Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/openjdk-multiarch-jdk8u
Path: blob/aarch64-shenandoah-jdk8u272-b10/jdk/src/share/classes/javax/sound/sampled/DataLine.java
38918 views
1
/*
2
* Copyright (c) 1999, 2013, Oracle and/or its affiliates. All rights reserved.
3
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4
*
5
* This code is free software; you can redistribute it and/or modify it
6
* under the terms of the GNU General Public License version 2 only, as
7
* published by the Free Software Foundation. Oracle designates this
8
* particular file as subject to the "Classpath" exception as provided
9
* by Oracle in the LICENSE file that accompanied this code.
10
*
11
* This code is distributed in the hope that it will be useful, but WITHOUT
12
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14
* version 2 for more details (a copy is included in the LICENSE file that
15
* accompanied this code).
16
*
17
* You should have received a copy of the GNU General Public License version
18
* 2 along with this work; if not, write to the Free Software Foundation,
19
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20
*
21
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22
* or visit www.oracle.com if you need additional information or have any
23
* questions.
24
*/
25
26
package javax.sound.sampled;
27
28
import java.util.Arrays;
29
30
/**
31
* <code>DataLine</code> adds media-related functionality to its
32
* superinterface, <code>{@link Line}</code>. This functionality includes
33
* transport-control methods that start, stop, drain, and flush
34
* the audio data that passes through the line. A data line can also
35
* report the current position, volume, and audio format of the media.
36
* Data lines are used for output of audio by means of the
37
* subinterfaces <code>{@link SourceDataLine}</code> or
38
* <code>{@link Clip}</code>, which allow an application program to write data. Similarly,
39
* audio input is handled by the subinterface <code>{@link TargetDataLine}</code>,
40
* which allows data to be read.
41
* <p>
42
* A data line has an internal buffer in which
43
* the incoming or outgoing audio data is queued. The
44
* <code>{@link #drain()}</code> method blocks until this internal buffer
45
* becomes empty, usually because all queued data has been processed. The
46
* <code>{@link #flush()}</code> method discards any available queued data
47
* from the internal buffer.
48
* <p>
49
* A data line produces <code>{@link LineEvent.Type#START START}</code> and
50
* <code>{@link LineEvent.Type#STOP STOP}</code> events whenever
51
* it begins or ceases active presentation or capture of data. These events
52
* can be generated in response to specific requests, or as a result of
53
* less direct state changes. For example, if <code>{@link #start()}</code> is called
54
* on an inactive data line, and data is available for capture or playback, a
55
* <code>START</code> event will be generated shortly, when data playback
56
* or capture actually begins. Or, if the flow of data to an active data
57
* line is constricted so that a gap occurs in the presentation of data,
58
* a <code>STOP</code> event is generated.
59
* <p>
60
* Mixers often support synchronized control of multiple data lines.
61
* Synchronization can be established through the Mixer interface's
62
* <code>{@link Mixer#synchronize synchronize}</code> method.
63
* See the description of the <code>{@link Mixer Mixer}</code> interface
64
* for a more complete description.
65
*
66
* @author Kara Kytle
67
* @see LineEvent
68
* @since 1.3
69
*/
70
public interface DataLine extends Line {
71
72
73
/**
74
* Drains queued data from the line by continuing data I/O until the
75
* data line's internal buffer has been emptied.
76
* This method blocks until the draining is complete. Because this is a
77
* blocking method, it should be used with care. If <code>drain()</code>
78
* is invoked on a stopped line that has data in its queue, the method will
79
* block until the line is running and the data queue becomes empty. If
80
* <code>drain()</code> is invoked by one thread, and another continues to
81
* fill the data queue, the operation will not complete.
82
* This method always returns when the data line is closed.
83
*
84
* @see #flush()
85
*/
86
public void drain();
87
88
/**
89
* Flushes queued data from the line. The flushed data is discarded.
90
* In some cases, not all queued data can be discarded. For example, a
91
* mixer can flush data from the buffer for a specific input line, but any
92
* unplayed data already in the output buffer (the result of the mix) will
93
* still be played. You can invoke this method after pausing a line (the
94
* normal case) if you want to skip the "stale" data when you restart
95
* playback or capture. (It is legal to flush a line that is not stopped,
96
* but doing so on an active line is likely to cause a discontinuity in the
97
* data, resulting in a perceptible click.)
98
*
99
* @see #stop()
100
* @see #drain()
101
*/
102
public void flush();
103
104
/**
105
* Allows a line to engage in data I/O. If invoked on a line
106
* that is already running, this method does nothing. Unless the data in
107
* the buffer has been flushed, the line resumes I/O starting
108
* with the first frame that was unprocessed at the time the line was
109
* stopped. When audio capture or playback starts, a
110
* <code>{@link LineEvent.Type#START START}</code> event is generated.
111
*
112
* @see #stop()
113
* @see #isRunning()
114
* @see LineEvent
115
*/
116
public void start();
117
118
/**
119
* Stops the line. A stopped line should cease I/O activity.
120
* If the line is open and running, however, it should retain the resources required
121
* to resume activity. A stopped line should retain any audio data in its buffer
122
* instead of discarding it, so that upon resumption the I/O can continue where it left off,
123
* if possible. (This doesn't guarantee that there will never be discontinuities beyond the
124
* current buffer, of course; if the stopped condition continues
125
* for too long, input or output samples might be dropped.) If desired, the retained data can be
126
* discarded by invoking the <code>flush</code> method.
127
* When audio capture or playback stops, a <code>{@link LineEvent.Type#STOP STOP}</code> event is generated.
128
*
129
* @see #start()
130
* @see #isRunning()
131
* @see #flush()
132
* @see LineEvent
133
*/
134
public void stop();
135
136
/**
137
* Indicates whether the line is running. The default is <code>false</code>.
138
* An open line begins running when the first data is presented in response to an
139
* invocation of the <code>start</code> method, and continues
140
* until presentation ceases in response to a call to <code>stop</code> or
141
* because playback completes.
142
* @return <code>true</code> if the line is running, otherwise <code>false</code>
143
* @see #start()
144
* @see #stop()
145
*/
146
public boolean isRunning();
147
148
/**
149
* Indicates whether the line is engaging in active I/O (such as playback
150
* or capture). When an inactive line becomes active, it sends a
151
* <code>{@link LineEvent.Type#START START}</code> event to its listeners. Similarly, when
152
* an active line becomes inactive, it sends a
153
* <code>{@link LineEvent.Type#STOP STOP}</code> event.
154
* @return <code>true</code> if the line is actively capturing or rendering
155
* sound, otherwise <code>false</code>
156
* @see #isOpen
157
* @see #addLineListener
158
* @see #removeLineListener
159
* @see LineEvent
160
* @see LineListener
161
*/
162
public boolean isActive();
163
164
/**
165
* Obtains the current format (encoding, sample rate, number of channels,
166
* etc.) of the data line's audio data.
167
*
168
* <p>If the line is not open and has never been opened, it returns
169
* the default format. The default format is an implementation
170
* specific audio format, or, if the <code>DataLine.Info</code>
171
* object, which was used to retrieve this <code>DataLine</code>,
172
* specifies at least one fully qualified audio format, the
173
* last one will be used as the default format. Opening the
174
* line with a specific audio format (e.g.
175
* {@link SourceDataLine#open(AudioFormat)}) will override the
176
* default format.
177
*
178
* @return current audio data format
179
* @see AudioFormat
180
*/
181
public AudioFormat getFormat();
182
183
/**
184
* Obtains the maximum number of bytes of data that will fit in the data line's
185
* internal buffer. For a source data line, this is the size of the buffer to
186
* which data can be written. For a target data line, it is the size of
187
* the buffer from which data can be read. Note that
188
* the units used are bytes, but will always correspond to an integral
189
* number of sample frames of audio data.
190
*
191
* @return the size of the buffer in bytes
192
*/
193
public int getBufferSize();
194
195
/**
196
* Obtains the number of bytes of data currently available to the
197
* application for processing in the data line's internal buffer. For a
198
* source data line, this is the amount of data that can be written to the
199
* buffer without blocking. For a target data line, this is the amount of data
200
* available to be read by the application. For a clip, this value is always
201
* 0 because the audio data is loaded into the buffer when the clip is opened,
202
* and persists without modification until the clip is closed.
203
* <p>
204
* Note that the units used are bytes, but will always
205
* correspond to an integral number of sample frames of audio data.
206
* <p>
207
* An application is guaranteed that a read or
208
* write operation of up to the number of bytes returned from
209
* <code>available()</code> will not block; however, there is no guarantee
210
* that attempts to read or write more data will block.
211
*
212
* @return the amount of data available, in bytes
213
*/
214
public int available();
215
216
/**
217
* Obtains the current position in the audio data, in sample frames.
218
* The frame position measures the number of sample
219
* frames captured by, or rendered from, the line since it was opened.
220
* This return value will wrap around after 2^31 frames. It is recommended
221
* to use <code>getLongFramePosition</code> instead.
222
*
223
* @return the number of frames already processed since the line was opened
224
* @see #getLongFramePosition()
225
*/
226
public int getFramePosition();
227
228
229
/**
230
* Obtains the current position in the audio data, in sample frames.
231
* The frame position measures the number of sample
232
* frames captured by, or rendered from, the line since it was opened.
233
*
234
* @return the number of frames already processed since the line was opened
235
* @since 1.5
236
*/
237
public long getLongFramePosition();
238
239
240
/**
241
* Obtains the current position in the audio data, in microseconds.
242
* The microsecond position measures the time corresponding to the number
243
* of sample frames captured by, or rendered from, the line since it was opened.
244
* The level of precision is not guaranteed. For example, an implementation
245
* might calculate the microsecond position from the current frame position
246
* and the audio sample frame rate. The precision in microseconds would
247
* then be limited to the number of microseconds per sample frame.
248
*
249
* @return the number of microseconds of data processed since the line was opened
250
*/
251
public long getMicrosecondPosition();
252
253
/**
254
* Obtains the current volume level for the line. This level is a measure
255
* of the signal's current amplitude, and should not be confused with the
256
* current setting of a gain control. The range is from 0.0 (silence) to
257
* 1.0 (maximum possible amplitude for the sound waveform). The units
258
* measure linear amplitude, not decibels.
259
*
260
* @return the current amplitude of the signal in this line, or
261
* <code>{@link AudioSystem#NOT_SPECIFIED}</code>
262
*/
263
public float getLevel();
264
265
/**
266
* Besides the class information inherited from its superclass,
267
* <code>DataLine.Info</code> provides additional information specific to data lines.
268
* This information includes:
269
* <ul>
270
* <li> the audio formats supported by the data line
271
* <li> the minimum and maximum sizes of its internal buffer
272
* </ul>
273
* Because a <code>Line.Info</code> knows the class of the line its describes, a
274
* <code>DataLine.Info</code> object can describe <code>DataLine</code>
275
* subinterfaces such as <code>{@link SourceDataLine}</code>,
276
* <code>{@link TargetDataLine}</code>, and <code>{@link Clip}</code>.
277
* You can query a mixer for lines of any of these types, passing an appropriate
278
* instance of <code>DataLine.Info</code> as the argument to a method such as
279
* <code>{@link Mixer#getLine Mixer.getLine(Line.Info)}</code>.
280
*
281
* @see Line.Info
282
* @author Kara Kytle
283
* @since 1.3
284
*/
285
public static class Info extends Line.Info {
286
287
private final AudioFormat[] formats;
288
private final int minBufferSize;
289
private final int maxBufferSize;
290
291
/**
292
* Constructs a data line's info object from the specified information,
293
* which includes a set of supported audio formats and a range for the buffer size.
294
* This constructor is typically used by mixer implementations
295
* when returning information about a supported line.
296
*
297
* @param lineClass the class of the data line described by the info object
298
* @param formats set of formats supported
299
* @param minBufferSize minimum buffer size supported by the data line, in bytes
300
* @param maxBufferSize maximum buffer size supported by the data line, in bytes
301
*/
302
public Info(Class<?> lineClass, AudioFormat[] formats, int minBufferSize, int maxBufferSize) {
303
304
super(lineClass);
305
306
if (formats == null) {
307
this.formats = new AudioFormat[0];
308
} else {
309
this.formats = Arrays.copyOf(formats, formats.length);
310
}
311
312
this.minBufferSize = minBufferSize;
313
this.maxBufferSize = maxBufferSize;
314
}
315
316
317
/**
318
* Constructs a data line's info object from the specified information,
319
* which includes a single audio format and a desired buffer size.
320
* This constructor is typically used by an application to
321
* describe a desired line.
322
*
323
* @param lineClass the class of the data line described by the info object
324
* @param format desired format
325
* @param bufferSize desired buffer size in bytes
326
*/
327
public Info(Class<?> lineClass, AudioFormat format, int bufferSize) {
328
329
super(lineClass);
330
331
if (format == null) {
332
this.formats = new AudioFormat[0];
333
} else {
334
this.formats = new AudioFormat[]{format};
335
}
336
337
this.minBufferSize = bufferSize;
338
this.maxBufferSize = bufferSize;
339
}
340
341
342
/**
343
* Constructs a data line's info object from the specified information,
344
* which includes a single audio format.
345
* This constructor is typically used by an application to
346
* describe a desired line.
347
*
348
* @param lineClass the class of the data line described by the info object
349
* @param format desired format
350
*/
351
public Info(Class<?> lineClass, AudioFormat format) {
352
this(lineClass, format, AudioSystem.NOT_SPECIFIED);
353
}
354
355
356
/**
357
* Obtains a set of audio formats supported by the data line.
358
* Note that <code>isFormatSupported(AudioFormat)</code> might return
359
* <code>true</code> for certain additional formats that are missing from
360
* the set returned by <code>getFormats()</code>. The reverse is not
361
* the case: <code>isFormatSupported(AudioFormat)</code> is guaranteed to return
362
* <code>true</code> for all formats returned by <code>getFormats()</code>.
363
*
364
* Some fields in the AudioFormat instances can be set to
365
* {@link javax.sound.sampled.AudioSystem#NOT_SPECIFIED NOT_SPECIFIED}
366
* if that field does not apply to the format,
367
* or if the format supports a wide range of values for that field.
368
* For example, a multi-channel device supporting up to
369
* 64 channels, could set the channel field in the
370
* <code>AudioFormat</code> instances returned by this
371
* method to <code>NOT_SPECIFIED</code>.
372
*
373
* @return a set of supported audio formats.
374
* @see #isFormatSupported(AudioFormat)
375
*/
376
public AudioFormat[] getFormats() {
377
return Arrays.copyOf(formats, formats.length);
378
}
379
380
/**
381
* Indicates whether this data line supports a particular audio format.
382
* The default implementation of this method simply returns <code>true</code> if
383
* the specified format matches any of the supported formats.
384
*
385
* @param format the audio format for which support is queried.
386
* @return <code>true</code> if the format is supported, otherwise <code>false</code>
387
* @see #getFormats
388
* @see AudioFormat#matches
389
*/
390
public boolean isFormatSupported(AudioFormat format) {
391
392
for (int i = 0; i < formats.length; i++) {
393
if (format.matches(formats[i])) {
394
return true;
395
}
396
}
397
398
return false;
399
}
400
401
/**
402
* Obtains the minimum buffer size supported by the data line.
403
* @return minimum buffer size in bytes, or <code>AudioSystem.NOT_SPECIFIED</code>
404
*/
405
public int getMinBufferSize() {
406
return minBufferSize;
407
}
408
409
410
/**
411
* Obtains the maximum buffer size supported by the data line.
412
* @return maximum buffer size in bytes, or <code>AudioSystem.NOT_SPECIFIED</code>
413
*/
414
public int getMaxBufferSize() {
415
return maxBufferSize;
416
}
417
418
419
/**
420
* Determines whether the specified info object matches this one.
421
* To match, the superclass match requirements must be met. In
422
* addition, this object's minimum buffer size must be at least as
423
* large as that of the object specified, its maximum buffer size must
424
* be at most as large as that of the object specified, and all of its
425
* formats must match formats supported by the object specified.
426
* @return <code>true</code> if this object matches the one specified,
427
* otherwise <code>false</code>.
428
*/
429
public boolean matches(Line.Info info) {
430
431
if (! (super.matches(info)) ) {
432
return false;
433
}
434
435
Info dataLineInfo = (Info)info;
436
437
// treat anything < 0 as NOT_SPECIFIED
438
// demo code in old Java Sound Demo used a wrong buffer calculation
439
// that would lead to arbitrary negative values
440
if ((getMaxBufferSize() >= 0) && (dataLineInfo.getMaxBufferSize() >= 0)) {
441
if (getMaxBufferSize() > dataLineInfo.getMaxBufferSize()) {
442
return false;
443
}
444
}
445
446
if ((getMinBufferSize() >= 0) && (dataLineInfo.getMinBufferSize() >= 0)) {
447
if (getMinBufferSize() < dataLineInfo.getMinBufferSize()) {
448
return false;
449
}
450
}
451
452
AudioFormat[] localFormats = getFormats();
453
454
if (localFormats != null) {
455
456
for (int i = 0; i < localFormats.length; i++) {
457
if (! (localFormats[i] == null) ) {
458
if (! (dataLineInfo.isFormatSupported(localFormats[i])) ) {
459
return false;
460
}
461
}
462
}
463
}
464
465
return true;
466
}
467
468
/**
469
* Obtains a textual description of the data line info.
470
* @return a string description
471
*/
472
public String toString() {
473
474
StringBuffer buf = new StringBuffer();
475
476
if ( (formats.length == 1) && (formats[0] != null) ) {
477
buf.append(" supporting format " + formats[0]);
478
} else if (getFormats().length > 1) {
479
buf.append(" supporting " + getFormats().length + " audio formats");
480
}
481
482
if ( (minBufferSize != AudioSystem.NOT_SPECIFIED) && (maxBufferSize != AudioSystem.NOT_SPECIFIED) ) {
483
buf.append(", and buffers of " + minBufferSize + " to " + maxBufferSize + " bytes");
484
} else if ( (minBufferSize != AudioSystem.NOT_SPECIFIED) && (minBufferSize > 0) ) {
485
buf.append(", and buffers of at least " + minBufferSize + " bytes");
486
} else if (maxBufferSize != AudioSystem.NOT_SPECIFIED) {
487
buf.append(", and buffers of up to " + minBufferSize + " bytes");
488
}
489
490
return new String(super.toString() + buf);
491
}
492
} // class Info
493
494
} // interface DataLine
495
496