1. /*
  2. * @(#)DataLine.java 1.33 04/07/14
  3. *
  4. * Copyright 2004 Sun Microsystems, Inc. All rights reserved.
  5. * SUN PROPRIETARY/CONFIDENTIAL. Use is subject to license terms.
  6. */
  7. package javax.sound.sampled;
  8. /**
  9. * <code>DataLine</code> adds media-related functionality to its
  10. * superinterface, <code>{@link Line}</code>. This functionality includes
  11. * transport-control methods that start, stop, drain, and flush
  12. * the audio data that passes through the line. A data line can also
  13. * report the current position, volume, and audio format of the media.
  14. * Data lines are used for output of audio by means of the
  15. * subinterfaces <code>{@link SourceDataLine}</code> or
  16. * <code>{@link Clip}</code>, which allow an application program to write data. Similarly,
  17. * audio input is handled by the subinterface <code>{@link TargetDataLine}</code>,
  18. * which allows data to be read.
  19. * <p>
  20. * A data line has an internal buffer in which
  21. * the incoming or outgoing audio data is queued. The
  22. * <code>{@link #drain()}</code> method blocks until this internal buffer
  23. * becomes empty, usually because all queued data has been processed. The
  24. * <code>{@link #flush()}</code> method discards any available queued data
  25. * from the internal buffer.
  26. * <p>
  27. * A data line produces <code>{@link LineEvent.Type#START START}</code> and
  28. * <code>{@link LineEvent.Type#STOP STOP}</code> events whenever
  29. * it begins or ceases active presentation or capture of data. These events
  30. * can be generated in response to specific requests, or as a result of
  31. * less direct state changes. For example, if <code>{@link #start()}</code> is called
  32. * on an inactive data line, and data is available for capture or playback, a
  33. * <code>START</code> event will be generated shortly, when data playback
  34. * or capture actually begins. Or, if the flow of data to an active data
  35. * line is constricted so that a gap occurs in the presentation of data,
  36. * a <code>STOP</code> event is generated.
  37. * <p>
  38. * Mixers often support synchronized control of multiple data lines.
  39. * Synchronization can be established through the Mixer interface's
  40. * <code>{@link Mixer#synchronize synchronize}</code> method.
  41. * See the description of the <code>{@link Mixer Mixer}</code> interface
  42. * for a more complete description.
  43. *
  44. * @author Kara Kytle
  45. * @version 1.33, 04/07/14
  46. * @see LineEvent
  47. * @since 1.3
  48. */
  49. public interface DataLine extends Line {
  50. /**
  51. * Drains queued data from the line by continuing data I/O until the
  52. * data line's internal buffer has been emptied.
  53. * This method blocks until the draining is complete. Because this is a
  54. * blocking method, it should be used with care. If <code>drain()</code>
  55. * is invoked on a stopped line that has data in its queue, the method will
  56. * block until the line is running and the data queue becomes empty. If
  57. * <code>drain()</code> is invoked by one thread, and another continues to
  58. * fill the data queue, the operation will not complete.
  59. * This method always returns when the data line is closed.
  60. *
  61. * @see #flush()
  62. */
  63. public void drain();
  64. /**
  65. * Flushes queued data from the line. The flushed data is discarded.
  66. * In some cases, not all queued data can be discarded. For example, a
  67. * mixer can flush data from the buffer for a specific input line, but any
  68. * unplayed data already in the output buffer (the result of the mix) will
  69. * still be played. You can invoke this method after pausing a line (the
  70. * normal case) if you want to skip the "stale" data when you restart
  71. * playback or capture. (It is legal to flush a line that is not stopped,
  72. * but doing so on an active line is likely to cause a discontinuity in the
  73. * data, resulting in a perceptible click.)
  74. *
  75. * @see #stop()
  76. * @see #drain()
  77. */
  78. public void flush();
  79. /**
  80. * Allows a line to engage in data I/O. If invoked on a line
  81. * that is already running, this method does nothing. Unless the data in
  82. * the buffer has been flushed, the line resumes I/O starting
  83. * with the first frame that was unprocessed at the time the line was
  84. * stopped. When audio capture or playback starts, a
  85. * <code>{@link LineEvent.Type#START START}</code> event is generated.
  86. *
  87. * @see #stop()
  88. * @see #isRunning()
  89. * @see LineEvent
  90. */
  91. public void start();
  92. /**
  93. * Stops the line. A stopped line should cease I/O activity.
  94. * If the line is open and running, however, it should retain the resources required
  95. * to resume activity. A stopped line should retain any audio data in its buffer
  96. * instead of discarding it, so that upon resumption the I/O can continue where it left off,
  97. * if possible. (This doesn't guarantee that there will never be discontinuities beyond the
  98. * current buffer, of course; if the stopped condition continues
  99. * for too long, input or output samples might be dropped.) If desired, the retained data can be
  100. * discarded by invoking the <code>flush</code> method.
  101. * When audio capture or playback stops, a <code>{@link LineEvent.Type#STOP STOP}</code> event is generated.
  102. *
  103. * @see #start()
  104. * @see #isRunning()
  105. * @see #flush()
  106. * @see LineEvent
  107. */
  108. public void stop();
  109. /**
  110. * Indicates whether the line is running. The default is <code>false</code>.
  111. * An open line begins running when the first data is presented in response to an
  112. * invocation of the <code>start</code> method, and continues
  113. * until presentation ceases in response to a call to <code>stop</code> or
  114. * because playback completes.
  115. * @return <code>true</code> if the line is running, otherwise <code>false</code>
  116. * @see #start()
  117. * @see #stop()
  118. */
  119. public boolean isRunning();
  120. /**
  121. * Indicates whether the line is engaging in active I/O (such as playback
  122. * or capture). When an inactive line becomes active, it sends a
  123. * <code>{@link LineEvent.Type#START START}</code> event to its listeners. Similarly, when
  124. * an active line becomes inactive, it sends a
  125. * <code>{@link LineEvent.Type#STOP STOP}</code> event.
  126. * @return <code>true</code> if the line is actively capturing or rendering
  127. * sound, otherwise <code>false</code>
  128. * @see #isOpen
  129. * @see #addLineListener
  130. * @see #removeLineListener
  131. * @see LineEvent
  132. * @see LineListener
  133. */
  134. public boolean isActive();
  135. /**
  136. * Obtains the current format (encoding, sample rate, number of channels,
  137. * etc.) of the data line's audio data.
  138. *
  139. * <p>If the line is not open and has never been opened, it returns
  140. * the default format. The default format is an implementation
  141. * specific audio format, or, if the <code>DataLine.Info</code>
  142. * object, which was used to retrieve this <code>DataLine</code>,
  143. * specifies at least one fully qualified audio format, the
  144. * last one will be used as the default format. Opening the
  145. * line with a specific audio format (e.g.
  146. * {@link SourceDataLine#open(AudioFormat)}) will override the
  147. * default format.
  148. *
  149. * @return current audio data format
  150. * @see AudioFormat
  151. */
  152. public AudioFormat getFormat();
  153. /**
  154. * Obtains the maximum number of bytes of data that will fit in the data line's
  155. * internal buffer. For a source data line, this is the size of the buffer to
  156. * which data can be written. For a target data line, it is the size of
  157. * the buffer from which data can be read. Note that
  158. * the units used are bytes, but will always correspond to an integral
  159. * number of sample frames of audio data.
  160. *
  161. * @return the size of the buffer in bytes
  162. */
  163. public int getBufferSize();
  164. /**
  165. * Obtains the number of bytes of data currently available to the
  166. * application for processing in the data line's internal buffer. For a
  167. * source data line, this is the amount of data that can be written to the
  168. * buffer without blocking. For a target data line, this is the amount of data
  169. * available to be read by the application. For a clip, this value is always
  170. * 0 because the audio data is loaded into the buffer when the clip is opened,
  171. * and persists without modification until the clip is closed.
  172. * <p>
  173. * Note that the units used are bytes, but will always
  174. * correspond to an integral number of sample frames of audio data.
  175. * <p>
  176. * An application is guaranteed that a read or
  177. * write operation of up to the number of bytes returned from
  178. * <code>available()</code> will not block; however, there is no guarantee
  179. * that attempts to read or write more data will block.
  180. *
  181. * @return the amount of data available, in bytes
  182. */
  183. public int available();
  184. /**
  185. * Obtains the current position in the audio data, in sample frames.
  186. * The frame position measures the number of sample
  187. * frames captured by, or rendered from, the line since it was opened.
  188. * This return value will wrap around after 2^31 frames. It is recommended
  189. * to use <code>getLongFramePosition</code> instead.
  190. *
  191. * @return the number of frames already processed since the line was opened
  192. * @see #getLongFramePosition()
  193. */
  194. public int getFramePosition();
  195. /**
  196. * Obtains the current position in the audio data, in sample frames.
  197. * The frame position measures the number of sample
  198. * frames captured by, or rendered from, the line since it was opened.
  199. *
  200. * @return the number of frames already processed since the line was opened
  201. * @since 1.5
  202. */
  203. public long getLongFramePosition();
  204. /**
  205. * Obtains the current position in the audio data, in microseconds.
  206. * The microsecond position measures the time corresponding to the number
  207. * of sample frames captured by, or rendered from, the line since it was opened.
  208. * The level of precision is not guaranteed. For example, an implementation
  209. * might calculate the microsecond position from the current frame position
  210. * and the audio sample frame rate. The precision in microseconds would
  211. * then be limited to the number of microseconds per sample frame.
  212. *
  213. * @return the number of microseconds of data processed since the line was opened
  214. */
  215. public long getMicrosecondPosition();
  216. /**
  217. * Obtains the current volume level for the line. This level is a measure
  218. * of the signal's current amplitude, and should not be confused with the
  219. * current setting of a gain control. The range is from 0.0 (silence) to
  220. * 1.0 (maximum possible amplitude for the sound waveform). The units
  221. * measure linear amplitude, not decibels.
  222. *
  223. * @return the current amplitude of the signal in this line, or
  224. * <code>{@link AudioSystem#NOT_SPECIFIED}</code>
  225. */
  226. public float getLevel();
  227. /**
  228. * Besides the class information inherited from its superclass,
  229. * <code>DataLine.Info</code> provides additional information specific to data lines.
  230. * This information includes:
  231. * <ul>
  232. * <li> the audio formats supported by the data line
  233. * <li> the minimum and maximum sizes of its internal buffer
  234. * </ul>
  235. * Because a <code>Line.Info</code> knows the class of the line its describes, a
  236. * <code>DataLine.Info</code> object can describe <code>DataLine</code>
  237. * subinterfaces such as <code>{@link SourceDataLine}</code>,
  238. * <code>{@link TargetDataLine}</code>, and <code>{@link Clip}</code>.
  239. * You can query a mixer for lines of any of these types, passing an appropriate
  240. * instance of <code>DataLine.Info</code> as the argument to a method such as
  241. * <code>{@link Mixer#getLine Mixer.getLine(Line.Info)}</code>.
  242. *
  243. * @see Line.Info
  244. * @author Kara Kytle
  245. * @version 1.33, 04/07/14
  246. * @since 1.3
  247. */
  248. public static class Info extends Line.Info {
  249. private AudioFormat[] formats;
  250. private int minBufferSize;
  251. private int maxBufferSize;
  252. /**
  253. * Constructs a data line's info object from the specified information,
  254. * which includes a set of supported audio formats and a range for the buffer size.
  255. * This constructor is typically used by mixer implementations
  256. * when returning information about a supported line.
  257. *
  258. * @param lineClass the class of the data line described by the info object
  259. * @param formats set of formats supported
  260. * @param minBufferSize minimum buffer size supported by the data line, in bytes
  261. * @param maxBufferSize maximum buffer size supported by the data line, in bytes
  262. */
  263. public Info(Class<?> lineClass, AudioFormat[] formats, int minBufferSize, int maxBufferSize) {
  264. super(lineClass);
  265. if (formats == null) {
  266. this.formats = new AudioFormat[0];
  267. } else {
  268. this.formats = formats;
  269. }
  270. this.minBufferSize = minBufferSize;
  271. this.maxBufferSize = maxBufferSize;
  272. }
  273. /**
  274. * Constructs a data line's info object from the specified information,
  275. * which includes a single audio format and a desired buffer size.
  276. * This constructor is typically used by an application to
  277. * describe a desired line.
  278. *
  279. * @param lineClass the class of the data line described by the info object
  280. * @param format desired format
  281. * @param bufferSize desired buffer size in bytes
  282. */
  283. public Info(Class<?> lineClass, AudioFormat format, int bufferSize) {
  284. super(lineClass);
  285. if (format == null) {
  286. this.formats = new AudioFormat[0];
  287. } else {
  288. AudioFormat[] formats = { format };
  289. this.formats = formats;
  290. }
  291. this.minBufferSize = bufferSize;
  292. this.maxBufferSize = bufferSize;
  293. }
  294. /**
  295. * Constructs a data line's info object from the specified information,
  296. * which includes a single audio format.
  297. * This constructor is typically used by an application to
  298. * describe a desired line.
  299. *
  300. * @param lineClass the class of the data line described by the info object
  301. * @param format desired format
  302. */
  303. public Info(Class<?> lineClass, AudioFormat format) {
  304. this(lineClass, format, AudioSystem.NOT_SPECIFIED);
  305. }
  306. /**
  307. * Obtains a set of audio formats supported by the data line.
  308. * Note that <code>isFormatSupported(AudioFormat)</code> might return
  309. * <code>true</code> for certain additional formats that are missing from
  310. * the set returned by <code>getFormats()</code>. The reverse is not
  311. * the case: <code>isFormatSupported(AudioFormat)</code> is guaranteed to return
  312. * <code>true</code> for all formats returned by <code>getFormats()</code>.
  313. *
  314. * Some fields in the AudioFormat instances can be set to
  315. * {@link javax.sound.sampled.AudioSystem#NOT_SPECIFIED NOT_SPECIFIED}
  316. * if that field does not apply to the format,
  317. * or if the format supports a wide range of values for that field.
  318. * For example, a multi-channel device supporting up to
  319. * 64 channels, could set the channel field in the
  320. * <code>AudioFormat</code> instances returned by this
  321. * method to <code>NOT_SPECIFIED</code>.
  322. *
  323. * @return a set of supported audio formats.
  324. * @see #isFormatSupported(AudioFormat)
  325. */
  326. public AudioFormat[] getFormats() {
  327. AudioFormat[] returnedArray = new AudioFormat[formats.length];
  328. System.arraycopy(formats, 0, returnedArray, 0, formats.length);
  329. return returnedArray;
  330. }
  331. /**
  332. * Indicates whether this data line supports a particular audio format.
  333. * The default implementation of this method simply returns <code>true</code> if
  334. * the specified format matches any of the supported formats.
  335. *
  336. * @param format the audio format for which support is queried.
  337. * @return <code>true</code> if the format is supported, otherwise <code>false</code>
  338. * @see #getFormats
  339. * @see AudioFormat#matches
  340. */
  341. public boolean isFormatSupported(AudioFormat format) {
  342. for (int i = 0; i < formats.length; i++) {
  343. if (format.matches(formats[i])) {
  344. return true;
  345. }
  346. }
  347. return false;
  348. }
  349. /**
  350. * Obtains the minimum buffer size supported by the data line.
  351. * @return minimum buffer size in bytes, or <code>AudioSystem.NOT_SPECIFIED</code>
  352. */
  353. public int getMinBufferSize() {
  354. return minBufferSize;
  355. }
  356. /**
  357. * Obtains the maximum buffer size supported by the data line.
  358. * @return maximum buffer size in bytes, or <code>AudioSystem.NOT_SPECIFIED</code>
  359. */
  360. public int getMaxBufferSize() {
  361. return maxBufferSize;
  362. }
  363. /**
  364. * Determines whether the specified info object matches this one.
  365. * To match, the superclass match requirements must be met. In
  366. * addition, this object's minimum buffer size must be at least as
  367. * large as that of the object specified, its maximum buffer size must
  368. * be at most as large as that of the object specified, and all of its
  369. * formats must match formats supported by the object specified.
  370. * @return <code>true</code> if this object matches the one specified,
  371. * otherwise <code>false</code>.
  372. */
  373. public boolean matches(Line.Info info) {
  374. if (! (super.matches(info)) ) {
  375. return false;
  376. }
  377. Info dataLineInfo = (Info)info;
  378. // treat anything < 0 as NOT_SPECIFIED
  379. // demo code in old Java Sound Demo used a wrong buffer calculation
  380. // that would lead to arbitrary negative values
  381. if ((getMaxBufferSize() >= 0) && (dataLineInfo.getMaxBufferSize() >= 0)) {
  382. if (getMaxBufferSize() > dataLineInfo.getMaxBufferSize()) {
  383. return false;
  384. }
  385. }
  386. if ((getMinBufferSize() >= 0) && (dataLineInfo.getMinBufferSize() >= 0)) {
  387. if (getMinBufferSize() < dataLineInfo.getMinBufferSize()) {
  388. return false;
  389. }
  390. }
  391. AudioFormat[] localFormats = getFormats();
  392. if (localFormats != null) {
  393. for (int i = 0; i < localFormats.length; i++) {
  394. if (! (localFormats[i] == null) ) {
  395. if (! (dataLineInfo.isFormatSupported(localFormats[i])) ) {
  396. return false;
  397. }
  398. }
  399. }
  400. }
  401. return true;
  402. }
  403. /**
  404. * Obtains a textual description of the data line info.
  405. * @return a string description
  406. */
  407. public String toString() {
  408. StringBuffer buf = new StringBuffer();
  409. if ( (formats.length == 1) && (formats[0] != null) ) {
  410. buf.append(" supporting format " + formats[0]);
  411. } else if (getFormats().length > 1) {
  412. buf.append(" supporting " + getFormats().length + " audio formats");
  413. }
  414. if ( (minBufferSize != AudioSystem.NOT_SPECIFIED) && (maxBufferSize != AudioSystem.NOT_SPECIFIED) ) {
  415. buf.append(", and buffers of " + minBufferSize + " to " + maxBufferSize + " bytes");
  416. } else if ( (minBufferSize != AudioSystem.NOT_SPECIFIED) && (minBufferSize > 0) ) {
  417. buf.append(", and buffers of at least " + minBufferSize + " bytes");
  418. } else if (maxBufferSize != AudioSystem.NOT_SPECIFIED) {
  419. buf.append(", and buffers of up to " + minBufferSize + " bytes");
  420. }
  421. return new String(super.toString() + buf);
  422. }
  423. } // class Info
  424. } // interface DataLine