changeset 73:20b37b004b8e

* src/java/org/classpath/icedtea/pulseaudio/PulseAudioMixer.java: modified getLine() to allow us to obtain a TargetDataLine * src/java/org/classpath/icedtea/pulseaudio/PulseAudioTargetDataLine.java: added open(), read() and close() * src/java/org/classpath/icedtea/pulseaudio/PulseAudioSourceDataLine.java marged the streamListeners into a single listener
author iivan@town.yyz.redhat.com
date Thu, 14 Aug 2008 16:09:15 -0400
parents 3003bf4192f2
children 3cf90f7c6931
files ChangeLog build.xml src/java/org/classpath/icedtea/pulseaudio/PulseAudioMixer.java src/java/org/classpath/icedtea/pulseaudio/PulseAudioSourceDataLine.java src/java/org/classpath/icedtea/pulseaudio/PulseAudioTargetDataLine.java src/java/org/classpath/icedtea/pulseaudio/SimpleAudioRecorder.java src/native/Makefile.am src/native/org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine.c
diffstat 8 files changed, 623 insertions(+), 416 deletions(-) [+]
line wrap: on
line diff
--- a/ChangeLog	Wed Aug 13 14:22:35 2008 -0400
+++ b/ChangeLog	Thu Aug 14 16:09:15 2008 -0400
@@ -1,10 +1,21 @@
+2008-08-13 Ioana Ivan  <iivan@redhat.com>
+
+        * src/java/org/classpath/icedtea/pulseaudio/PulseAudioMixer.java:
+	modified getLine() to allow us to obtain a TargetDataLine
+	* src/java/org/classpath/icedtea/pulseaudio/PulseAudioTargetDataLine.java: 
+	added open(), read() and close()
+	* src/java/org/classpath/icedtea/pulseaudio/PulseAudioSourceDataLine.java
+	marged the streamListeners into a single listener
+
+
+
 2008-08-13 Ioana Ivan  <iivan@redhat.com>
 
         * src/java/org/classpath/icedtea/pulseaudio/PulseAudioMixer.java: the
 	list of formats recognized by PulseAudio is being set here, so it can
 	be used by all DataLines. Also made some changes to get*LineInfo() and
 	isLineSupported()
-	*src/java/org/classpath/icedtea/pulseaudio/PulseAudioSourceDataLine.java:
+	* src/java/org/classpath/icedtea/pulseaudio/PulseAudioSourceDataLine.java:
 	changed the constructor 
 
 
--- a/build.xml	Wed Aug 13 14:22:35 2008 -0400
+++ b/build.xml	Thu Aug 14 16:09:15 2008 -0400
@@ -38,9 +38,11 @@
 			<class name="org.classpath.icedtea.pulseaudio.EventLoop"/>
 			<class name="org.classpath.icedtea.pulseaudio.Operation"/>
 			<class name="org.classpath.icedtea.pulseaudio.PulseAudioSourceDataLine"/>
+			<class name="org.classpath.icedtea.pulseaudio.PulseAudioTargetDataLine"/>
 			<class name="org.classpath.icedtea.pulseaudio.PulseAudioClip"/>
 			<class name="org.classpath.icedtea.pulseaudio.PulseAudioStreamVolumeControl"/>
 			<class name="org.classpath.icedtea.pulseaudio.Operation"/>
+			<class name="org.classpath.icedtea.pulseaudio.SimpleAudioRecorder"/>
 		</javah>
 	</target>
 
--- a/src/java/org/classpath/icedtea/pulseaudio/PulseAudioMixer.java	Wed Aug 13 14:22:35 2008 -0400
+++ b/src/java/org/classpath/icedtea/pulseaudio/PulseAudioMixer.java	Thu Aug 14 16:09:15 2008 -0400
@@ -317,9 +317,9 @@
 				return new PulseAudioSourceDataLine(eventLoop, formats, defaultFormat);
 	    }
 	        
-		/*if ((info.getLineClass() == TargetDataLine.class)) {
-	          return new PulseAudioTargetDataLine(this, (DataLine.Info) info);
-	    }*/
+		if ((info.getLineClass() == TargetDataLine.class)) {
+	          return new PulseAudioTargetDataLine(eventLoop, formats, defaultFormat);
+	    }
 		
 		PulseAudioClip clip = new PulseAudioClip();
 
--- a/src/java/org/classpath/icedtea/pulseaudio/PulseAudioSourceDataLine.java	Wed Aug 13 14:22:35 2008 -0400
+++ b/src/java/org/classpath/icedtea/pulseaudio/PulseAudioSourceDataLine.java	Thu Aug 14 16:09:15 2008 -0400
@@ -81,6 +81,8 @@
 	private boolean muted;
 	private float volume;
 	
+	private Semaphore semaphore = new Semaphore(0);
+	
 	private long currentFramePosition = 0;
 
 	/*
@@ -122,10 +124,7 @@
 	}
 
 	public PulseAudioSourceDataLine(EventLoop eventLoop, AudioFormat[] formats, AudioFormat defaultFormat) {
-		if (formats == null) {
-			
-		}
-		
+
 		supportedFormats = formats;
 		this.eventLoop = eventLoop;
 		this.lineListeners = new ArrayList<LineListener>();
@@ -182,10 +181,12 @@
 				if (e.getType() == StreamEvent.Type.READY) {
 					fireLineEvent(new LineEvent(PulseAudioSourceDataLine.this,
 							LineEvent.Type.OPEN, AudioSystem.NOT_SPECIFIED));
+					semaphore.release();
 				} else if (e.getType() == StreamEvent.Type.TERMINATED
 						|| e.getType() == StreamEvent.Type.FAILED) {
 					fireLineEvent((new LineEvent(PulseAudioSourceDataLine.this,
 							LineEvent.Type.CLOSE, AudioSystem.NOT_SPECIFIED)));
+					semaphore.release();
 				}
 			}
 
@@ -193,19 +194,10 @@
 
 		addStreamListener(openCloseListener);
 
-		final Semaphore semaphore = new Semaphore(0);
 
+		
 		synchronized (eventLoop.threadLock) {
 
-			this.addStreamListener(new StreamListener() {
-				@Override
-				public void update(StreamEvent e) {
-					if (e.getType() == StreamEvent.Type.READY) {
-						semaphore.release();
-					}
-				}
-			});
-
 			native_start();
 		}
 
@@ -264,7 +256,7 @@
 		while (remainingLength != 0) {
 
 			synchronized (eventLoop.threadLock) {
-				availableSize = native_get_writable_size();
+				availableSize = available();
 				if (availableSize < 0) {
 					return sizeWritten;
 				}
@@ -346,19 +338,6 @@
 	public void close() {
 		assert (isOpen);
 
-		final Semaphore semaphore = new Semaphore(0);
-
-		StreamListener closeListener = new StreamListener() {
-			@Override
-			public void update(StreamEvent e) {
-				if (e.getType() == StreamEvent.Type.TERMINATED
-						|| e.getType() == StreamEvent.Type.FAILED) {
-					semaphore.release();
-				}
-			}
-		};
-
-		addStreamListener(closeListener);
 
 		synchronized (eventLoop.threadLock) {
 			native_close();
@@ -367,7 +346,6 @@
 		try {
 			semaphore.acquire();
 			System.out.println("stream closed");
-			removeStreamListener(closeListener);
 		} catch (InterruptedException e) {
 			// throw new LineUnavailableException("unable to prepare
 			// stream");
--- a/src/java/org/classpath/icedtea/pulseaudio/PulseAudioTargetDataLine.java	Wed Aug 13 14:22:35 2008 -0400
+++ b/src/java/org/classpath/icedtea/pulseaudio/PulseAudioTargetDataLine.java	Thu Aug 14 16:09:15 2008 -0400
@@ -37,41 +37,51 @@
 
 package org.classpath.icedtea.pulseaudio;
 
+import java.io.File;
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.LinkedList;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.Semaphore;
 
 import javax.sound.sampled.*;
 import javax.sound.sampled.AudioFormat.Encoding;
 import javax.sound.sampled.Control.Type;
+import javax.sound.sampled.Port.Info;
 
 public class PulseAudioTargetDataLine implements TargetDataLine {
 
 	
-	protected long contextPointer;
-	protected long mainLoopPointer;
-	protected long streamPointer;
+	
 	protected boolean isOpen = false;
 	protected boolean isPaused = false;
-	protected int defaultBufferSize;
+
+	private AudioFormat[] supportedFormats = null;
+	private AudioFormat currentFormat = null;
+	private AudioFormat defaultFormat = null;
+
+	private List<LineListener> lineListeners;
 	
+	private List<StreamListener> streamListeners = new ArrayList<StreamListener>();
 	
+	private String streamName = "Java Stream";
 	private static final int DEFAULT_BUFFER_SIZE = 1000;
 	private static final String PULSEAUDIO_FORMAT_KEY = "PulseAudioFormatKey";
-	private List<AudioFormat> supportedFormats = null;
-	private AudioFormat currentFormat = null;
+	
 	private EventLoop eventLoop = null;
 	
 	protected ArrayList<LineListener> listeners;
+	
+	private Semaphore semaphore = new Semaphore(0);
+	
+	@SuppressWarnings("unused")
+	private long streamPointer;
 
 	static {
 		try {
-			String library = new java.io.File(".").getCanonicalPath()
-					+ java.io.File.separatorChar + "lib"
-					+ java.io.File.separatorChar
+			String library = new java.io.File(".").getCanonicalPath()	+ java.io.File.separatorChar
 					+ System.mapLibraryName("pulse-java");
 			System.out.println(library);
 			System.load(library);
@@ -80,226 +90,148 @@
 		}
 	}
 	
-	public PulseAudioTargetDataLine(EventLoop eventLoop) {
-		this.eventLoop = eventLoop;
-		this.listeners = new ArrayList<LineListener>();
+	private native void native_open(long contextPointer, String streamName,
+			String encoding, int sampleRate, int channels, int bufferSize);
+	
+	private native void native_start();
+	
+	private native int native_get_readable_size();
+	
+	private native void native_close();
 	
-
-
-
-		/*
-		 * FIXME puselaudio supports any sample rate (it can covert between
-		 * sample rates without a problem). it calculates the frame size and the
-		 * frame rate based on that.
-		 * 
-		 * Java's AudioSystem interface accepts NOT_SPECIFIED only for sample
-		 * rate and frame rate. eg: cant say that it supports any number of
-		 * audio channels
-		 * 
-		 * sample size in bytes [PA_SAMPLE_U8] = 1, [PA_SAMPLE_ULAW] = 1,
-		 * [PA_SAMPLE_ALAW] = 1, [PA_SAMPLE_S16LE] = 2, [PA_SAMPLE_S16BE] = 2,
-		 * [PA_SAMPLE_FLOAT32LE] = 4, [PA_SAMPLE_FLOAT32BE] = 4,
-		 * [PA_SAMPLE_S32LE] = 4, [PA_SAMPLE_S32BE] = 4,
-		 * 
-		 * 
-		 */
-		
-		supportedFormats = new LinkedList<AudioFormat>();
-		
-		Map<String, Object> properties;
-
-		int[] channelSizes = new int[] { 1, 2, 5 };
-		for (int channelSize : channelSizes) {
-			properties = new HashMap<String, Object>();
-			properties.put(PULSEAUDIO_FORMAT_KEY, "PA_SAMPLE_U8");
-
-			// frameSize = sample size (in bytes, not bits) x # of channels
-			// ^ that's from PulseAudio sources, so it will pretty much break
-			// as soon as they change something
-			// FIXME ^
-			int sampleSize = 8; // in bits
-			AudioFormat PA_SAMPLE_U8 = new AudioFormat(
-					Encoding.PCM_UNSIGNED, // encoding
-					AudioSystem.NOT_SPECIFIED, // sample rate
-					sampleSize, // sample size
-					channelSize, // channels
-					sampleSize / 8 * channelSize, // frame size in bytes
-					AudioSystem.NOT_SPECIFIED, // frame rate
-					false, // big endian?
-					properties);
-
-			supportedFormats.add(PA_SAMPLE_U8);
-		}
-
-		for (int channelSize : channelSizes) {
-			properties = new HashMap<String, Object>();
-			properties.put(PULSEAUDIO_FORMAT_KEY, "PA_SAMPLE_ALAW");
-
-			// frameSize = sample size (in bytes, not bits) x # of channels
-			// ^ that's from PulseAudio sources, so it will pretty much break
-			// as soon as they change something
-			// FIXME ^
-
-			int sampleSize = 8;
-			final AudioFormat PA_SAMPLE_ALAW = new AudioFormat(Encoding.ALAW, // encoding
-					AudioSystem.NOT_SPECIFIED, // sample rate
-					sampleSize, // sample size
-					channelSize, // channels
-					sampleSize / 8 * channelSize, // frame size
-					AudioSystem.NOT_SPECIFIED, // frame rate
-					false, // big endian?
-					properties);
-
-			supportedFormats.add(PA_SAMPLE_ALAW);
-		}
-
-		for (int channelSize : channelSizes) {
-			properties = new HashMap<String, Object>();
-			properties.put(PULSEAUDIO_FORMAT_KEY, "PA_SAMPLE_ULAW");
-
-			// frameSize = sample size (in bytes, not bits) x # of channels
-			// ^ that's from PulseAudio sources, so it will pretty much break
-			// as soon as they change something
-			// FIXME ^
-
-			int sampleSize = 8;
-			final AudioFormat PA_SAMPLE_ULAW = new AudioFormat(Encoding.ULAW, // encoding
-					AudioSystem.NOT_SPECIFIED, // sample rate
-					sampleSize, // sample size
-					channelSize, // channels
-					sampleSize / 8 * channelSize, // frame size
-					AudioSystem.NOT_SPECIFIED, // frame rate
-					false, // big endian?
-					properties);
-
-			supportedFormats.add(PA_SAMPLE_ULAW);
-		}
-
-		for (int channelSize : channelSizes) {
-			properties = new HashMap<String, Object>();
-			properties.put(PULSEAUDIO_FORMAT_KEY, "PA_SAMPLE_S16BE");
-
-			// frameSize = sample size (in bytes, not bits) x # of channels
-			// ^ that's from PulseAudio sources, so it will pretty much break
-			// as soon as they change something
-			// FIXME ^
-
-			int sampleSize = 16;
-			final AudioFormat PA_SAMPLE_S16BE = new AudioFormat(
-					Encoding.PCM_SIGNED, // encoding
-					AudioSystem.NOT_SPECIFIED, // sample rate
-					sampleSize, // sample size
-					channelSize, // channels
-					sampleSize / 8 * channelSize, // frame size
-					AudioSystem.NOT_SPECIFIED, // frame rate
-					true, // big endian?
-					properties);
-
-			supportedFormats.add(PA_SAMPLE_S16BE);
-		}
-
-		for (int channelSize : channelSizes) {
-			properties = new HashMap<String, Object>();
-			properties.put(PULSEAUDIO_FORMAT_KEY, "PA_SAMPLE_S16LE");
-
-			// frameSize = sample size (in bytes, not bits) x # of channels
-			// ^ that's from PulseAudio sources, so it will pretty much break
-			// as soon as they change something
-			// FIXME ^
-
-			int sampleSize = 16;
-			final AudioFormat A_SAMPLE_S16LE = new AudioFormat(
-					Encoding.PCM_SIGNED, // encoding
-					AudioSystem.NOT_SPECIFIED, // sample rate
-					sampleSize, // sample size
-					channelSize, // channels
-					sampleSize / 8 * channelSize, // frame size
-					AudioSystem.NOT_SPECIFIED, // frame rate
-					false, // big endian?
-					properties);
-
-			supportedFormats.add(A_SAMPLE_S16LE);
-		}
-
-		for (int channelSize : channelSizes) {
-			properties = new HashMap<String, Object>();
-			properties.put(PULSEAUDIO_FORMAT_KEY, "PA_SAMPLE_S32BE");
-
-			// frameSize = sample size (in bytes, not bits) x # of channels
-			// ^ that's from PulseAudio sources, so it will pretty much break
-			// as soon as they change something
-			// FIXME ^
-
-			int sampleSize = 32;
-			final AudioFormat PA_SAMPLE_S32BE = new AudioFormat(
-					Encoding.PCM_SIGNED, // encoding
-					AudioSystem.NOT_SPECIFIED, // sample rate
-					sampleSize, // sample size
-					channelSize, // channels
-					sampleSize / 8 * channelSize, // frame size
-					AudioSystem.NOT_SPECIFIED, // frame rate
-					true, // big endian?
-					properties);
-
-			supportedFormats.add(PA_SAMPLE_S32BE);
-		}
-
-		for (int channelSize : channelSizes) {
-			properties = new HashMap<String, Object>();
-			properties.put(PULSEAUDIO_FORMAT_KEY, "PA_SAMPLE_S32LE");
-
-			// frameSize = sample size (in bytes, not bits) x # of channels
-			// ^ that's from PulseAudio sources, so it will pretty much break
-			// as soon as they change something
-			// FIXME ^
-
-			int sampleSize = 32;
-			final AudioFormat PA_SAMPLE_S32LE = new AudioFormat(
-					Encoding.PCM_SIGNED, // encoding
-					AudioSystem.NOT_SPECIFIED, // sample rate
-					sampleSize, // sample size
-					channelSize, // channels
-					sampleSize / 8 * channelSize, // frame size
-					AudioSystem.NOT_SPECIFIED, // frame rate
-					false, // big endian?
-					properties);
-
-			supportedFormats.add(PA_SAMPLE_S32LE);
-		}
-
-		currentFormat = null;
-
+	private native int native_read(byte[] array, int remaininglength, int position);
+	
+	public PulseAudioTargetDataLine(EventLoop eventLoop, AudioFormat[] formats, AudioFormat defaultFormat) {
+		supportedFormats = formats;
+		this.eventLoop = eventLoop;
+		this.lineListeners = new ArrayList<LineListener>();
+		this.defaultFormat = defaultFormat; 
+		this.currentFormat = defaultFormat;
 
 	}
 
 	public void open(AudioFormat format, int bufferSize)
 			throws LineUnavailableException {
-		isOpen = true;
+		System.out.println("OPEn CALLED");
+		if (isOpen) {
+			throw new IllegalStateException("Line is already open");
+		}
+
+		// ignore suggested buffer size
+
+		for (AudioFormat myFormat : supportedFormats) {
+			if (format.matches(myFormat)) {
+				native_open(eventLoop.getContextPointer(), streamName,
+						(String) myFormat.getProperty(PULSEAUDIO_FORMAT_KEY),
+						(int) format.getSampleRate(), format.getChannels(),
+						bufferSize);
+				currentFormat = format;
+				isOpen = true;
+			}
+		}
+		// no matches found
+		if (!isOpen) {
+			throw new IllegalArgumentException("Invalid format");
+		}
+
+		StreamListener openCloseListener = new StreamListener() {
 
-		int channels = format.getChannels();
-		float rate = format.getSampleRate();
-		int sampleSize = format.getSampleSizeInBits();
-		String encoding = format.getEncoding().toString();
-		boolean bigEndian = format.isBigEndian();
-		openStream(encoding, rate, sampleSize, channels, bigEndian, bufferSize);
+			@Override
+			public void update(StreamEvent e) {
+				if (e.getType() == StreamEvent.Type.READY) {
+					fireLineEvent(new LineEvent(PulseAudioTargetDataLine.this,
+							LineEvent.Type.OPEN, AudioSystem.NOT_SPECIFIED));
+					System.out.println("IN HERE");
+					semaphore.release();
+				} else if (e.getType() == StreamEvent.Type.TERMINATED
+						|| e.getType() == StreamEvent.Type.FAILED) {
+					fireLineEvent((new LineEvent(PulseAudioTargetDataLine.this,
+							LineEvent.Type.CLOSE, AudioSystem.NOT_SPECIFIED)));
+					semaphore.release();
+				}
+			}
+
+		};
+
+		addStreamListener(openCloseListener);
+
+
+		
+		synchronized (eventLoop.threadLock) {
+
+			native_start();
+		}
+
+		try {
+			semaphore.acquire();
+		} catch (InterruptedException e) {
+			// throw new LineUnavailableException("unable to prepare
+			// stream");
+		}
 	}
 
 	public void open(AudioFormat format) throws LineUnavailableException {
-		open(format, defaultBufferSize);
+		open(format, DEFAULT_BUFFER_SIZE);
 
 	}
 
 	public void open() throws LineUnavailableException {
-		openStream("PCM_SIGNED", 44100, 16, 2, false, defaultBufferSize);
+		// pick a random format
+		if (defaultFormat == null) {
+			defaultFormat = new AudioFormat(Encoding.PCM_UNSIGNED, 22050, 8, 2,
+					2, AudioSystem.NOT_SPECIFIED, false);
+		}
+
+		open(defaultFormat, DEFAULT_BUFFER_SIZE);
 	}
 
-	private native void openStream(String encoding, float rate, int size,
-			int channels, boolean bigEndian, int bufferSize);
 
 	@Override
-	public int read(byte[] b, int off, int len) {
-		readFromStream(b, off, len);
-		return len;
+	public int read(byte[] data, int offset, int length) {
+		int frameSize = currentFormat.getFrameSize();
+		
+		if (length % frameSize != 0) {
+			throw new IllegalArgumentException(
+					"amount of data to write does not represent an integral number of frames");
+		}
+		
+		if (length < 0) {
+			throw new IllegalArgumentException("length is negative");
+		}
+
+		if (length + offset > data.length) {
+			throw new ArrayIndexOutOfBoundsException(length + offset);
+		}
+
+		int position = offset;
+		int remainingLength = length;
+		int availableSize;
+
+		int sizeRead = 0;
+
+		while (remainingLength != 0) {
+
+			synchronized (eventLoop.threadLock) {
+				availableSize = available();
+				int toRead = native_read(data, remainingLength,  position);
+
+				sizeRead += toRead;
+				position += toRead;
+				remainingLength -= toRead;
+				
+
+			}
+		}
+
+		// all the data should have been played by now
+		assert (sizeRead == length);
+		//currentFramePosition += (sizeWritten/getFormat().getFrameSize());
+		/*
+		 * FIXME when the stream is flushed() etc, instead of returning length
+		 * this should unblock and return the the size of data written so far
+		 */
+		return sizeRead;
+	
 	}
 
 	private native void readFromStream(byte[] b, int off, int len);
@@ -308,17 +240,17 @@
 		if (isPaused) {
 			resumeStream();
 			isPaused = false;
-		} else {
+		} /*else {
 			startStream();
-		}
+		}*/
 
-		for (LineListener l : listeners) {
+		/*for (LineListener l : listeners) {
 			l.update(new LineEvent(this, LineEvent.Type.START, 0));
-		}
+		}*/
 	}
 
 	public void stop() {
-		pauseStream();
+		//pauseStream();
 		isPaused = true;
 
 	}
@@ -353,12 +285,26 @@
 		return isOpen;
 	}
 
-	public native int available();
+	public int available() {
+		synchronized (eventLoop.threadLock) {
+			return native_get_readable_size();
+		}
+	}
 
 	public void close() {
-		closeStream();
-		for (LineListener l : listeners) {
-			l.update(new LineEvent(this, LineEvent.Type.CLOSE, 0));
+		assert (isOpen);
+
+
+		synchronized (eventLoop.threadLock) {
+			native_close();
+		}
+
+		try {
+			semaphore.acquire();
+			System.out.println("stream closed");
+		} catch (InterruptedException e) {
+			// throw new LineUnavailableException("unable to prepare
+			// stream");
 		}
 
 	}
@@ -371,8 +317,7 @@
 	}
 
 	public AudioFormat getFormat() {
-		// TODO Auto-generated method stub
-		return null;
+		return currentFormat;
 	}
 
 	public int getFramePosition() {
@@ -423,5 +368,56 @@
 		// TODO Auto-generated method stub
 		return false;
 	}
+	
+	public void update(int status) {
+		synchronized (eventLoop.threadLock) {
+			switch (status) {
+			case 0:
+				fireStreamEvent(new StreamEvent(StreamEvent.Type.UNCONNECTED));
+				break;
+			case 1:
+				fireStreamEvent(new StreamEvent(StreamEvent.Type.CREATING));
+				break;
+			case 2:
+				fireStreamEvent(new StreamEvent(StreamEvent.Type.READY));
+				break;
+			case 3:
+				fireStreamEvent(new StreamEvent(StreamEvent.Type.FAILED));
+				break;
+			case 4:
+				fireStreamEvent(new StreamEvent(StreamEvent.Type.TERMINATED));
+				break;
+			default:
+				assert ("not supposed to happen".indexOf("false") >= 0);
+			}
+		}
+	}
 
-}
+	private void fireLineEvent(LineEvent e) {
+		for (LineListener lineListener : lineListeners) {
+			lineListener.update(e);
+		}
+	}
+
+	private void fireStreamEvent(StreamEvent e) {
+		synchronized (streamListeners) {
+			for (StreamListener streamListener : streamListeners) {
+				streamListener.update(e);
+			}
+		}
+	}
+	
+	
+	private void addStreamListener(StreamListener listener) {
+		synchronized (streamListeners) {
+			this.streamListeners.add(listener);
+		}
+	}
+
+	private void removeStreamListener(StreamListener listener) {
+		synchronized (streamListeners) {
+			this.streamListeners.remove(listener);
+		}
+	}
+
+}
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/src/java/org/classpath/icedtea/pulseaudio/SimpleAudioRecorder.java	Thu Aug 14 16:09:15 2008 -0400
@@ -0,0 +1,248 @@
+package org.classpath.icedtea.pulseaudio;
+
+/*
+ *	SimpleAudioRecorder.java
+ *
+ *	This file is part of jsresources.org
+ */
+
+/*
+ * Copyright (c) 1999 - 2003 by Matthias Pfisterer
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *
+ * - Redistributions of source code must retain the above copyright notice,
+ *   this list of conditions and the following disclaimer.
+ * - Redistributions in binary form must reproduce the above copyright
+ *   notice, this list of conditions and the following disclaimer in the
+ *   documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+ * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+ * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
+ * OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+/*
+|<---            this code is formatted to fit into 80 columns             --->|
+*/
+
+import java.io.IOException;
+import java.io.File;
+
+import javax.sound.sampled.DataLine;
+import javax.sound.sampled.Mixer;
+import javax.sound.sampled.TargetDataLine;
+import javax.sound.sampled.AudioFormat;
+import javax.sound.sampled.AudioSystem;
+import javax.sound.sampled.AudioInputStream;
+import javax.sound.sampled.LineUnavailableException;
+import javax.sound.sampled.AudioFileFormat;
+
+
+
+public class SimpleAudioRecorder
+extends Thread
+{
+	private TargetDataLine		m_line;
+	private AudioFileFormat.Type	m_targetType;
+	private AudioInputStream	m_audioInputStream;
+	private File			m_outputFile;
+
+
+
+	public SimpleAudioRecorder(TargetDataLine line,
+				     AudioFileFormat.Type targetType,
+				     File file)
+	{
+		m_line = line;
+		m_audioInputStream = new AudioInputStream(line);
+		m_targetType = targetType;
+		m_outputFile = file;
+	}
+
+
+
+	/** Starts the recording.
+	    To accomplish this, (i) the line is started and (ii) the
+	    thread is started.
+	*/
+	public void start()
+	{
+		/* Starting the TargetDataLine. It tells the line that
+		   we now want to read data from it. If this method
+		   isn't called, we won't
+		   be able to read data from the line at all.
+		*/
+		m_line.start();
+
+		/* Starting the thread. This call results in the
+		   method 'run()' (see below) being called. There, the
+		   data is actually read from the line.
+		*/
+		super.start();
+	}
+
+
+	/** Stops the recording.
+
+	    Note that stopping the thread explicitely is not necessary. Once
+	    no more data can be read from the TargetDataLine, no more data
+	    be read from our AudioInputStream. And if there is no more
+	    data from the AudioInputStream, the method 'AudioSystem.write()'
+	    (called in 'run()' returns. Returning from 'AudioSystem.write()'
+	    is followed by returning from 'run()', and thus, the thread
+	    is terminated automatically.
+
+	    It's not a good idea to call this method just 'stop()'
+	    because stop() is a (deprecated) method of the class 'Thread'.
+	    And we don't want to override this method.
+	*/
+	public void stopRecording()
+	{
+		m_line.stop();
+		m_line.close();
+		super.stop();
+	}
+
+
+
+
+	/** Main working method.
+	    You may be surprised that here, just 'AudioSystem.write()' is
+	    called. But internally, it works like this: AudioSystem.write()
+	    contains a loop that is trying to read from the passed
+	    AudioInputStream. Since we have a special AudioInputStream
+	    that gets its data from a TargetDataLine, reading from the
+	    AudioInputStream leads to reading from the TargetDataLine. The
+	    data read this way is then written to the passed File. Before
+	    writing of audio data starts, a header is written according
+	    to the desired audio file type. Reading continues untill no
+	    more data can be read from the AudioInputStream. In our case,
+	    this happens if no more data can be read from the TargetDataLine.
+	    This, in turn, happens if the TargetDataLine is stopped or closed
+	    (which implies stopping). (Also see the comment above.) Then,
+	    the file is closed and 'AudioSystem.write()' returns.
+	*/
+	public void run()
+	{
+			try
+			{
+				AudioSystem.write(
+					m_audioInputStream,
+					m_targetType,
+					m_outputFile);
+			}
+			catch (IOException e)
+			{
+				e.printStackTrace();
+			}
+	}
+
+
+
+	public static void main(String[] args) throws Exception
+	{
+		Mixer.Info mixerInfos[] = AudioSystem.getMixerInfo();
+		Mixer.Info selectedMixerInfo = null;
+		// int i = 0;
+		for (Mixer.Info info : mixerInfos) {
+			// System.out.println("Mixer Line " + i++ + ": " + info.getName() +
+			// " " + info.getDescription());
+			if (info.getName().contains("PulseAudio")) {
+				selectedMixerInfo = info;
+				System.out.println(selectedMixerInfo);
+			}
+		}
+
+		PulseAudioMixer mixer = (PulseAudioMixer) AudioSystem
+				.getMixer(selectedMixerInfo);
+
+		mixer.open();
+		File outputFile = new File("recordingFile");
+		AudioFormat	audioFormat = new AudioFormat(
+				AudioFormat.Encoding.PCM_SIGNED,
+				44100.0F, 16, 2, 4, 44100.0F, false);
+		DataLine.Info	info = new DataLine.Info(TargetDataLine.class, audioFormat);
+		TargetDataLine targetDataLine = (TargetDataLine) mixer.getLine(info);
+		targetDataLine.open();
+
+		AudioFileFormat.Type	targetType = AudioFileFormat.Type.WAVE;
+		SimpleAudioRecorder	recorder = new SimpleAudioRecorder(
+			targetDataLine,
+			targetType,
+			outputFile);
+
+		/* We are waiting for the user to press ENTER to
+		   start the recording. (You might find it
+		   inconvenient if recording starts immediately.)
+		*/
+		out("Press ENTER to start the recording.");
+		try
+		{
+			System.in.read();
+		}
+		catch (IOException e)
+		{
+			e.printStackTrace();
+		}
+		/* Here, the recording is actually started.
+		 */
+		recorder.start();
+		out("Recording...");
+
+		/* And now, we are waiting again for the user to press ENTER,
+		   this time to signal that the recording should be stopped.
+		*/
+		out("Press ENTER to stop the recording.");
+		try
+		{
+			System.in.read();
+		}
+		catch (IOException e)
+		{
+			e.printStackTrace();
+		}
+
+		/* Here, the recording is actually stopped.
+		 */
+		recorder.stopRecording();
+		out("Recording stopped.");
+		
+		mixer.close();
+	}
+
+
+
+	private static void printUsageAndExit()
+	{
+		out("SimpleAudioRecorder: usage:");
+		out("\tjava SimpleAudioRecorder -h");
+		out("\tjava SimpleAudioRecorder <audiofile>");
+		System.exit(0);
+	}
+
+
+
+	private static void out(String strMessage)
+	{
+		System.out.println(strMessage);
+	}
+}
+
+
+
+/*** SimpleAudioRecorder.java ***/
+
+
--- a/src/native/Makefile.am	Wed Aug 13 14:22:35 2008 -0400
+++ b/src/native/Makefile.am	Thu Aug 14 16:09:15 2008 -0400
@@ -7,6 +7,8 @@
 	org_classpath_icedtea_pulseaudio_EventLoop.h \
 	org_classpath_icedtea_pulseaudio_PulseAudioSourceDataLine.c \
 	org_classpath_icedtea_pulseaudio_PulseAudioSourceDataLine.h \
+	org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine.c \
+        org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine.h \
 	org_classpath_icedtea_pulseaudio_PulseAudioStreamVolumeControl.c \
 	org_classpath_icedtea_pulseaudio_PulseAudioStreamVolumeControl.h \
 	org_classpath_icedtea_pulseaudio_Operation.h \
--- a/src/native/org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine.c	Wed Aug 13 14:22:35 2008 -0400
+++ b/src/native/org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine.c	Thu Aug 14 16:09:15 2008 -0400
@@ -47,7 +47,10 @@
 #include "org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine.h"
 #include "jni-common.h"
 
-static void stream_read_cb(pa_stream* stream, size_t length, void* userdata) {
+/* defined in EventLoop.c */
+extern JNIEnv* pulse_thread_env;
+
+/*static void stream_read_cb(pa_stream* stream, size_t length, void* userdata) {
 	pa_threaded_mainloop *mainloop = userdata;
 	pa_threaded_mainloop_signal(mainloop, 0);
 }
@@ -56,212 +59,179 @@
 		void* userdata) {
 	pa_threaded_mainloop *mainloop = userdata;
 	pa_threaded_mainloop_signal(mainloop, 0);
+}*/
+
+static void stream_state_change_callback(pa_stream* stream, void* userdata) {
+	assert(stream);
+	assert(userdata);
+
+	//	printf("entering stream_state_change_callback\n");
+
+	java_context_t* java_context = (java_context_t*)userdata;
+	JNIEnv* env;
+
+	/* needed so we can create a stream from another thread
+	 */
+	if (pa_stream_get_state(stream) == PA_STREAM_CREATING) {
+		env = java_context->env;
+	} else {
+		env = pulse_thread_env;
+	}
+
+	jobject obj = java_context->obj;
+
+	// printf("stream state changed to %d\n", pa_stream_get_state(stream));
+
+	/* Call the 'update' method in java
+	 * to handle all java-side events
+	 */
+	jclass cls = (*env)->GetObjectClass(env, obj);
+	if (cls == NULL) {
+		printf("unable to get class of object");
+		return;
+	}
+	jmethodID mid = (*env)->GetMethodID(env, cls, "update", "(I)V");
+	if (mid == NULL) {
+		printf("unable to get callback method\n");
+		return;
+
+	}
+	//printf("calling update on java\n");
+	(*env)->CallVoidMethod(env, obj, mid, pa_stream_get_state(stream));
+
+	//printf("returning form stream_state_change_callback\n");
+	return;
+
 }
 
-static void stream_state_cb(pa_stream* stream, void* userdata) {
-	assert(stream);
-	pa_threaded_mainloop *mainloop = userdata;
-	printf("stream state changed to %d\n", pa_stream_get_state(stream));
-	switch (pa_stream_get_state(stream)) {
-	case PA_STREAM_READY:
-	case PA_STREAM_FAILED:
-	case PA_STREAM_TERMINATED:
-		pa_threaded_mainloop_signal(mainloop, 0);
-		break;
-
-	default:
-		/* do nothing */
-		break;
-	}
-}
-
-JNIEXPORT void JNICALL Java_org_openjdk_sound_PulseAudioSourceDataLine_openStream
-(JNIEnv * env, jobject obj, jstring string, jfloat rate, jint size, jint channels, jboolean bigEndian, jint bufferSize) {
-
-	printf("entering native_open\n");
-
-	pa_context* context = (pa_context*) contextPointer;
-	assert(context != NULL);
-
-	obj = (*env)->NewGlobalRef(env, obj);
-
-	java_context_t* java_context = malloc(sizeof(java_context));
-	java_context->env = env;
-	java_context->obj = obj;
+/*
+ * Class:     org_classpath_icedtea_pulseaudio_PulseAudioSourceDataLine
+ * Method:    native_open
+ * Signature: (JLjava/lang/String;Ljava/lang/String;III)V
+ */
+JNIEXPORT void JNICALL Java_org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine_native_1open
+(JNIEnv* env, jobject obj, jlong contextPointer, jstring name, jstring encodingString, jint sampleRate, jint channels, jint bufferSize) {
 
 	//TODO: Need to deal with the buffer size. Currently ignored
 
+	//	printf("entering native_open\n");
+	java_context_t* java_context = malloc(sizeof(java_context));
+	java_context->env = env;
+	java_context->obj = (*env)->NewGlobalRef(env, obj);
+
+	pa_context* context = (pa_context*) convertJavaLongToPointer(contextPointer);
+	assert(context != NULL);
+
 	pa_sample_spec sample_spec;
 
-	char *encoding = GetStringUTFChars(env, string, NULL);
+	const char *encoding = (*env)->GetStringUTFChars(env, encodingString, NULL);
 
-	if( (strcmp(encoding, "PCM_UNSIGNED") == 0) && (size == 8)) {
+	if (strcmp(encoding, "PA_SAMPLE_U8") == 0) {
 		sample_spec.format = PA_SAMPLE_U8;
-	} else if( (strcmp(encoding, "ALAW") == 0) && (size == 8)) {
+	} else if (strcmp(encoding, "PA_SAMPLE_ALAW") == 0) {
 		sample_spec.format = PA_SAMPLE_ALAW;
-	} else if( (strcmp(encoding, "ULAW") == 0) && (size == 8)) {
+	} else if (strcmp(encoding, "PA_SAMPLE_ULAW;") == 0) {
 		sample_spec.format = PA_SAMPLE_ULAW;
-	} else if ( (strcmp(encoding, "PCM_SIGNED") == 0) && (size == 16) && (bigEndian == 1)) {
+	} else if (strcmp(encoding, "PA_SAMPLE_S16BE") == 0) {
 		sample_spec.format = PA_SAMPLE_S16BE;
-	} else if ( (strcmp(encoding, "PCM_SIGNED") == 0) && (size == 16) && (bigEndian == 0)) {
+	} else if (strcmp(encoding, "PA_SAMPLE_S16LE") == 0) {
 		sample_spec.format = PA_SAMPLE_S16LE;
-	} else if ( (strcmp(encoding, "PCM_SIGNED") == 0) && (size == 32) && (bigEndian == 1)) {
+	} else if (strcmp(encoding, "PA_SAMPLE_S32BE") == 0) {
 		sample_spec.format = PA_SAMPLE_S32BE;
-	} else if ( (strcmp(encoding, "PCM_SIGNED") == 0) && (size == 32) && (bigEndian == 0)) {
+	} else if (strcmp(encoding, "PA_SAMPLE_S32LE") == 0) {
 		sample_spec.format = PA_SAMPLE_S32LE;
 	} else {
-		//TODO: Invalid format :throw Exception;
+		printf("error in open: encoding is : %s\n", encoding);
+		throwByName(env, "java/lang/IllegalArgumentException", "Invalid format");
+		/* clean up */
+		free(java_context);
+		(*env)->DeleteGlobalRef(env, obj);
+		(*env)->ReleaseStringUTFChars(env, encodingString, encoding);
+		return;
 	}
 
-	sample_spec.rate = rate;
+	sample_spec.rate = sampleRate;
 	sample_spec.channels = channels;
 
+	printf("sample_spec.rate = %d\n", sample_spec.rate);
+	printf("sample_spec.channels = %d\n", sample_spec.channels);
+
+	if ( !pa_sample_spec_valid(&sample_spec)) {
+		printf("error: invalid format\n");
+		throwByName(env, "java/lang/IllegalArgumentException", "Invalid format");
+		/* clean up */
+		free(java_context);
+		(*env)->DeleteGlobalRef(env, obj);
+		(*env)->ReleaseStringUTFChars(env, encodingString, encoding);
+		return;
+	}
+
+	(*env)->ReleaseStringUTFChars(env, encodingString, encoding);
+
 	/* obtain the server from the caller */
-	const jbyte* stream_name = NULL;
+	const char* stream_name = NULL;
 	stream_name = (*env)->GetStringUTFChars(env, name, NULL);
 	if (stream_name == NULL) {
 		return; /* OutOfMemoryError */
 	}
-	printf("About to create stream: %s\n", stream_name);
+	//	printf("About to create stream: %s\n", stream_name);
 	pa_stream* stream = pa_stream_new(context, stream_name, &sample_spec, NULL);
 	assert(stream != NULL);
 	(*env)->ReleaseStringUTFChars(env, name, stream_name);
 
 	pa_stream_set_state_callback(stream, stream_state_change_callback, java_context);
 
-	jclass cls = (*env)->GetObjectClass(env,obj);
-	jfieldID fid = (*env)->GetFieldID(env, cls, "streamPointer", "I");
-	(*env)->SetIntField(env, obj, fid, (jint) stream);
-
-	printf("returning from native_open\n");
+	//	printf("seeting stream pointer: %d\n", (int)stream);
+	setJavaPointer(env, obj, "streamPointer", stream);
+	//	printf("returning from native_open\n");
 
 }
 
-JNIEXPORT void JNICALL
-Java_org_openjdk_sound_PulseAudioSourceDataLine_startStream(JNIEnv *env, jobject obj) {
-	pa_threaded_mainloop *mainloop = getJavaLongField (env, obj, "mainLoopPointer");
-	pa_stream *stream = getJavaLongField(env, obj, "streamPointer");
-	pa_threaded_mainloop_lock(mainloop);
-	pa_stream_connect_playback(stream, NULL, NULL, 0, NULL, NULL);
-	pa_threaded_mainloop_wait(mainloop);
-	if ( pa_stream_get_state(stream) != PA_STREAM_READY ) {
-		printf("stream initialization failed\n");
-	}
-	pa_threaded_mainloop_unlock(mainloop);
-}
-
-JNIEXPORT void Java_org_openjdk_sound_PulseAudioSourceDataLine_resumeStream(
-		JNIEnv *env, jobject obj) {
-	pa_threaded_mainloop *mainloop = getJavaLongField(env, obj,
-			"mainLoopPointer");
-	pa_stream *stream = getJavaLongField(env, obj, "streamPointer");
-	pa_threaded_mainloop_lock(mainloop);
-	pa_operation *o = pa_stream_cork(stream, 0, stream_operation_complete_cb,
-			mainloop);
-	while (pa_operation_get_state(o) != PA_OPERATION_DONE) {
-		pa_threaded_mainloop_wait(mainloop);
-	}
-	pa_operation_unref(o);
-	pa_threaded_mainloop_unlock(mainloop);
+/*
+ * Class:     org_classpath_icedtea_pulseaudio_PulseAudioSourceDataLine
+ * Method:    native_start
+ * Signature: ()V
+ */
+JNIEXPORT void JNICALL Java_org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine_native_1start
+(JNIEnv *env, jobject obj) {
+	pa_stream *stream = (pa_stream*)getJavaPointer(env, obj, "streamPointer");
+	assert(stream);
+	pa_stream_connect_record(stream, NULL, NULL, 0);
 
 }
 
-JNIEXPORT void Java_org_openjdk_sound_PulseAudioSourceDataLine__pauseStream(
-		JNIEnv *env, jobject obj) {
-	pa_threaded_mainloop *mainloop = getJavaLongField(env, obj,
-			"mainLoopPointer");
-	pa_stream *stream = getJavaLongField(env, obj, "streamPointer");
-	pa_threaded_mainloop_lock(mainloop);
-	pa_operation *o = pa_stream_cork(stream, 1, stream_operation_complete_cb,
-			mainloop);
-	while (pa_operation_get_state(o) != PA_OPERATION_DONE) {
-		pa_threaded_mainloop_wait(mainloop);
-	}
-	pa_operation_unref(o);
-	pa_threaded_mainloop_unlock(mainloop);
-}
+
+JNIEXPORT jint JNICALL Java_org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine_native_1get_1readable_1size
+(JNIEnv* env, jobject obj) {
+
 
-J
 
-JNIEXPORT void JNICALL
-Java_org_openjdk_sound_PulseAudioSourceDataLine__closeStream(JNIEnv *env, jobject obj, jint channels, jfloat rate) {
-
-	pa_stream *stream = getJavaLongField(env, obj, "streamPointer");
-	pa_strean_disconnect(stream);
-}
-
-JNIEXPORT jint JNICALL Java_org_openjdk_sound_PulseAudioSourceDataLine_available
-(JNIEnv *env, jobject obj) {
-	pa_threaded_mainloop *mainloop = getJavaLongField (env, obj, "mainLoopPointer");
-	pa_stream *stream = getJavaLongField(env, obj, "streamPointer");
-	pa_threaded_mainloop_lock(mainloop);
-	int available = pa_stream_writable_size(stream);
-	pa_threaded_mainloop_unlock(mainloop);
+	pa_stream *stream = (pa_stream*) getJavaPointer(env, obj, "streamPointer");
+	assert(stream);
+	int available = pa_stream_readable_size(stream);
 	return available;
 }
 
-JNIEXPORT void JNICALL Java_org_openjdk_sound_PulseAudioSourceDataLine_drain
-(JNIEnv *env, jobject obj) {
-	pa_threaded_mainloop *mainloop = getJavaLongField (env, obj, "mainLoopPointer");
-	pa_stream *stream = getJavaLongField(env, obj, "streamPointer");
-	pa_threaded_mainloop_lock(mainloop);
-	pa_operation *o = pa_stream_drain(stream, stream_operation_complete_cb, mainloop);
-	while(pa_operation_get_state(o) != PA_OPERATION_DONE) {
-		pa_threaded_mainloop_wait(mainloop);
+JNIEXPORT int JNICALL Java_org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine_native_1read
+  (JNIEnv *env, jobject obj, jbyteArray array, jint length,  jint offset) {
+	pa_stream *stream = getJavaPointer(env, obj, "streamPointer");
+	const void *read_data = NULL;
+	size_t  read_length = 0;
+	pa_stream_peek(stream, &read_data, &read_length);
+	if (length < read_length) {
+		read_length = length;
 	}
-	pa_operation_unref(o);
-	pa_threaded_mainloop_unlock(mainloop);
-}
 
-JNIEXPORT void JNICALL Java_org_openjdk_sound_PulseAudioSourceDataLine_flush
-(JNIEnv *env, jobject obj) {
-	pa_threaded_mainloop *mainloop = getJavaLongField (env, obj, "mainLoopPointer");
-	pa_stream *stream = getJavaLongField(env, obj, "streamPointer");
-	pa_threaded_mainloop_lock(mainloop);
-	pa_operation *o = pa_stream_flush(stream, stream_operation_complete_cb, mainloop);
-	while(pa_operation_get_state(o) != PA_OPERATION_DONE) {
-		pa_threaded_mainloop_wait(mainloop);
-	}
-	pa_operation_unref(o);
-	pa_threaded_mainloop_unlock(mainloop);
+	(*env)->SetByteArrayRegion(env, array, offset, read_length, read_data);
+	pa_stream_drop(stream);
+	return read_length;
 }
 
-JNIEXPORT void JNICALL Java_org_openjdk_sound_PulseAudioTargetDataLine_readFromStream
-(JNIEnv * env, jobject obj, jbyteArray array, jint length, jint offset);
-pa_threaded_mainloop *mainloop = getJavaLongField(env, obj, "mainLoopPointer");
-pa_stream *stream = getJavaLongField(env, obj, "streamPointer");
-pa_threaded_mainloop_lock(mainloop);
-char[length] data;
-while(length> 0) {
-	size_t l;
-	while(!read_data) {
-		int r = pa_stream_peek(_stream, &read_data, &read_length);
-		if(!read_data) {
-			pa_threaded_mainloop_wait(mainloop);
-		} else {
-			read_index = 0;
-		}
-	}
+JNIEXPORT void JNICALL Java_org_classpath_icedtea_pulseaudio_PulseAudioTargetDataLine_native_1close
+(JNIEnv* env, jobject obj) {
+	pa_stream* stream = (pa_stream*) getJavaPointer(env, obj, "streamPointer");
+	pa_stream_disconnect(stream);
 
-	l = read_length < length ? read_length : length;
-	memcpy(data, read_data+read_index, l);
-
-	data = data + l;
-	length -= l;
-	read_index +=l;
-	read_length-=l;
-
-	if(! read_length) {
-		int r = pa_stream_drop(stream);
-		read_data = NULL;
-		read_length = 0;
-		read_index = 0;
-
-	}
 
 }
 
-pa_threaded_mainloop_unlock(mainloop);
-SetByteArrayRegion(env, array, offset, initialLength, data);
-}
-