1/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <common/all-versions/IncludeGuard.h>
18
19//#define LOG_NDEBUG 0
20#define ATRACE_TAG ATRACE_TAG_AUDIO
21
22#include <memory>
23
24#include <android/log.h>
25#include <hardware/audio.h>
26#include <utils/Trace.h>
27
28namespace android {
29namespace hardware {
30namespace audio {
31namespace AUDIO_HAL_VERSION {
32namespace implementation {
33
34using ::android::hardware::audio::common::AUDIO_HAL_VERSION::ThreadInfo;
35
36namespace {
37
38class WriteThread : public Thread {
39   public:
40    // WriteThread's lifespan never exceeds StreamOut's lifespan.
41    WriteThread(std::atomic<bool>* stop, audio_stream_out_t* stream,
42                StreamOut::CommandMQ* commandMQ, StreamOut::DataMQ* dataMQ,
43                StreamOut::StatusMQ* statusMQ, EventFlag* efGroup)
44        : Thread(false /*canCallJava*/),
45          mStop(stop),
46          mStream(stream),
47          mCommandMQ(commandMQ),
48          mDataMQ(dataMQ),
49          mStatusMQ(statusMQ),
50          mEfGroup(efGroup),
51          mBuffer(nullptr) {}
52    bool init() {
53        mBuffer.reset(new (std::nothrow) uint8_t[mDataMQ->getQuantumCount()]);
54        return mBuffer != nullptr;
55    }
56    virtual ~WriteThread() {}
57
58   private:
59    std::atomic<bool>* mStop;
60    audio_stream_out_t* mStream;
61    StreamOut::CommandMQ* mCommandMQ;
62    StreamOut::DataMQ* mDataMQ;
63    StreamOut::StatusMQ* mStatusMQ;
64    EventFlag* mEfGroup;
65    std::unique_ptr<uint8_t[]> mBuffer;
66    IStreamOut::WriteStatus mStatus;
67
68    bool threadLoop() override;
69
70    void doGetLatency();
71    void doGetPresentationPosition();
72    void doWrite();
73};
74
75void WriteThread::doWrite() {
76    const size_t availToRead = mDataMQ->availableToRead();
77    mStatus.retval = Result::OK;
78    mStatus.reply.written = 0;
79    if (mDataMQ->read(&mBuffer[0], availToRead)) {
80        ssize_t writeResult = mStream->write(mStream, &mBuffer[0], availToRead);
81        if (writeResult >= 0) {
82            mStatus.reply.written = writeResult;
83        } else {
84            mStatus.retval = Stream::analyzeStatus("write", writeResult);
85        }
86    }
87}
88
89void WriteThread::doGetPresentationPosition() {
90    mStatus.retval =
91        StreamOut::getPresentationPositionImpl(mStream, &mStatus.reply.presentationPosition.frames,
92                                               &mStatus.reply.presentationPosition.timeStamp);
93}
94
95void WriteThread::doGetLatency() {
96    mStatus.retval = Result::OK;
97    mStatus.reply.latencyMs = mStream->get_latency(mStream);
98}
99
100bool WriteThread::threadLoop() {
101    // This implementation doesn't return control back to the Thread until it
102    // decides to stop,
103    // as the Thread uses mutexes, and this can lead to priority inversion.
104    while (!std::atomic_load_explicit(mStop, std::memory_order_acquire)) {
105        uint32_t efState = 0;
106        mEfGroup->wait(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY), &efState);
107        if (!(efState & static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY))) {
108            continue;  // Nothing to do.
109        }
110        if (!mCommandMQ->read(&mStatus.replyTo)) {
111            continue;  // Nothing to do.
112        }
113        switch (mStatus.replyTo) {
114            case IStreamOut::WriteCommand::WRITE:
115                doWrite();
116                break;
117            case IStreamOut::WriteCommand::GET_PRESENTATION_POSITION:
118                doGetPresentationPosition();
119                break;
120            case IStreamOut::WriteCommand::GET_LATENCY:
121                doGetLatency();
122                break;
123            default:
124                ALOGE("Unknown write thread command code %d", mStatus.replyTo);
125                mStatus.retval = Result::NOT_SUPPORTED;
126                break;
127        }
128        if (!mStatusMQ->write(&mStatus)) {
129            ALOGE("status message queue write failed");
130        }
131        mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_FULL));
132    }
133
134    return false;
135}
136
137}  // namespace
138
139StreamOut::StreamOut(const sp<Device>& device, audio_stream_out_t* stream)
140    : mIsClosed(false),
141      mDevice(device),
142      mStream(stream),
143      mStreamCommon(new Stream(&stream->common)),
144      mStreamMmap(new StreamMmap<audio_stream_out_t>(stream)),
145      mEfGroup(nullptr),
146      mStopWriteThread(false) {}
147
148StreamOut::~StreamOut() {
149    ATRACE_CALL();
150    close();
151    if (mWriteThread.get()) {
152        ATRACE_NAME("mWriteThread->join");
153        status_t status = mWriteThread->join();
154        ALOGE_IF(status, "write thread exit error: %s", strerror(-status));
155    }
156    if (mEfGroup) {
157        status_t status = EventFlag::deleteEventFlag(&mEfGroup);
158        ALOGE_IF(status, "write MQ event flag deletion error: %s", strerror(-status));
159    }
160    mCallback.clear();
161    mDevice->closeOutputStream(mStream);
162    // Closing the output stream in the HAL waits for the callback to finish,
163    // and joins the callback thread. Thus is it guaranteed that the callback
164    // thread will not be accessing our object anymore.
165    mStream = nullptr;
166}
167
168// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStream follow.
169Return<uint64_t> StreamOut::getFrameSize() {
170    return audio_stream_out_frame_size(mStream);
171}
172
173Return<uint64_t> StreamOut::getFrameCount() {
174    return mStreamCommon->getFrameCount();
175}
176
177Return<uint64_t> StreamOut::getBufferSize() {
178    return mStreamCommon->getBufferSize();
179}
180
181Return<uint32_t> StreamOut::getSampleRate() {
182    return mStreamCommon->getSampleRate();
183}
184
185#ifdef AUDIO_HAL_VERSION_2_0
186Return<void> StreamOut::getSupportedChannelMasks(getSupportedChannelMasks_cb _hidl_cb) {
187    return mStreamCommon->getSupportedChannelMasks(_hidl_cb);
188}
189Return<void> StreamOut::getSupportedSampleRates(getSupportedSampleRates_cb _hidl_cb) {
190    return mStreamCommon->getSupportedSampleRates(_hidl_cb);
191}
192#endif
193
194Return<void> StreamOut::getSupportedChannelMasks(AudioFormat format,
195                                                 getSupportedChannelMasks_cb _hidl_cb) {
196    return mStreamCommon->getSupportedChannelMasks(format, _hidl_cb);
197}
198Return<void> StreamOut::getSupportedSampleRates(AudioFormat format,
199                                                getSupportedSampleRates_cb _hidl_cb) {
200    return mStreamCommon->getSupportedSampleRates(format, _hidl_cb);
201}
202
203Return<Result> StreamOut::setSampleRate(uint32_t sampleRateHz) {
204    return mStreamCommon->setSampleRate(sampleRateHz);
205}
206
207Return<AudioChannelBitfield> StreamOut::getChannelMask() {
208    return mStreamCommon->getChannelMask();
209}
210
211Return<Result> StreamOut::setChannelMask(AudioChannelBitfield mask) {
212    return mStreamCommon->setChannelMask(mask);
213}
214
215Return<AudioFormat> StreamOut::getFormat() {
216    return mStreamCommon->getFormat();
217}
218
219Return<void> StreamOut::getSupportedFormats(getSupportedFormats_cb _hidl_cb) {
220    return mStreamCommon->getSupportedFormats(_hidl_cb);
221}
222
223Return<Result> StreamOut::setFormat(AudioFormat format) {
224    return mStreamCommon->setFormat(format);
225}
226
227Return<void> StreamOut::getAudioProperties(getAudioProperties_cb _hidl_cb) {
228    return mStreamCommon->getAudioProperties(_hidl_cb);
229}
230
231Return<Result> StreamOut::addEffect(uint64_t effectId) {
232    return mStreamCommon->addEffect(effectId);
233}
234
235Return<Result> StreamOut::removeEffect(uint64_t effectId) {
236    return mStreamCommon->removeEffect(effectId);
237}
238
239Return<Result> StreamOut::standby() {
240    return mStreamCommon->standby();
241}
242
243Return<Result> StreamOut::setHwAvSync(uint32_t hwAvSync) {
244    return mStreamCommon->setHwAvSync(hwAvSync);
245}
246
247#ifdef AUDIO_HAL_VERSION_2_0
248Return<Result> StreamOut::setConnectedState(const DeviceAddress& address, bool connected) {
249    return mStreamCommon->setConnectedState(address, connected);
250}
251
252Return<AudioDevice> StreamOut::getDevice() {
253    return mStreamCommon->getDevice();
254}
255
256Return<Result> StreamOut::setDevice(const DeviceAddress& address) {
257    return mStreamCommon->setDevice(address);
258}
259
260Return<void> StreamOut::getParameters(const hidl_vec<hidl_string>& keys,
261                                      getParameters_cb _hidl_cb) {
262    return mStreamCommon->getParameters(keys, _hidl_cb);
263}
264
265Return<Result> StreamOut::setParameters(const hidl_vec<ParameterValue>& parameters) {
266    return mStreamCommon->setParameters(parameters);
267}
268
269Return<void> StreamOut::debugDump(const hidl_handle& fd) {
270    return mStreamCommon->debugDump(fd);
271}
272#elif defined(AUDIO_HAL_VERSION_4_0)
273Return<void> StreamOut::getDevices(getDevices_cb _hidl_cb) {
274    return mStreamCommon->getDevices(_hidl_cb);
275}
276
277Return<Result> StreamOut::setDevices(const hidl_vec<DeviceAddress>& devices) {
278    return mStreamCommon->setDevices(devices);
279}
280Return<void> StreamOut::getParameters(const hidl_vec<ParameterValue>& context,
281                                      const hidl_vec<hidl_string>& keys,
282                                      getParameters_cb _hidl_cb) {
283    return mStreamCommon->getParameters(context, keys, _hidl_cb);
284}
285
286Return<Result> StreamOut::setParameters(const hidl_vec<ParameterValue>& context,
287                                        const hidl_vec<ParameterValue>& parameters) {
288    return mStreamCommon->setParameters(context, parameters);
289}
290#endif
291
292Return<Result> StreamOut::close() {
293    if (mIsClosed) return Result::INVALID_STATE;
294    mIsClosed = true;
295    if (mWriteThread.get()) {
296        mStopWriteThread.store(true, std::memory_order_release);
297    }
298    if (mEfGroup) {
299        mEfGroup->wake(static_cast<uint32_t>(MessageQueueFlagBits::NOT_EMPTY));
300    }
301    return Result::OK;
302}
303
304// Methods from ::android::hardware::audio::AUDIO_HAL_VERSION::IStreamOut follow.
305Return<uint32_t> StreamOut::getLatency() {
306    return mStream->get_latency(mStream);
307}
308
309Return<Result> StreamOut::setVolume(float left, float right) {
310    if (mStream->set_volume == NULL) {
311        return Result::NOT_SUPPORTED;
312    }
313    if (!isGainNormalized(left)) {
314        ALOGW("Can not set a stream output volume {%f, %f} outside [0,1]", left, right);
315        return Result::INVALID_ARGUMENTS;
316    }
317    return Stream::analyzeStatus("set_volume", mStream->set_volume(mStream, left, right));
318}
319
320Return<void> StreamOut::prepareForWriting(uint32_t frameSize, uint32_t framesCount,
321                                          prepareForWriting_cb _hidl_cb) {
322    status_t status;
323    ThreadInfo threadInfo = {0, 0};
324
325    // Wrap the _hidl_cb to return an error
326    auto sendError = [&threadInfo, &_hidl_cb](Result result) {
327        _hidl_cb(result, CommandMQ::Descriptor(), DataMQ::Descriptor(), StatusMQ::Descriptor(),
328                 threadInfo);
329
330    };
331
332    // Create message queues.
333    if (mDataMQ) {
334        ALOGE("the client attempts to call prepareForWriting twice");
335        sendError(Result::INVALID_STATE);
336        return Void();
337    }
338    std::unique_ptr<CommandMQ> tempCommandMQ(new CommandMQ(1));
339
340    // Check frameSize and framesCount
341    if (frameSize == 0 || framesCount == 0) {
342        ALOGE("Null frameSize (%u) or framesCount (%u)", frameSize, framesCount);
343        sendError(Result::INVALID_ARGUMENTS);
344        return Void();
345    }
346    if (frameSize > Stream::MAX_BUFFER_SIZE / framesCount) {
347        ALOGE("Buffer too big: %u*%u bytes > MAX_BUFFER_SIZE (%u)", frameSize, framesCount,
348              Stream::MAX_BUFFER_SIZE);
349        sendError(Result::INVALID_ARGUMENTS);
350        return Void();
351    }
352    std::unique_ptr<DataMQ> tempDataMQ(new DataMQ(frameSize * framesCount, true /* EventFlag */));
353
354    std::unique_ptr<StatusMQ> tempStatusMQ(new StatusMQ(1));
355    if (!tempCommandMQ->isValid() || !tempDataMQ->isValid() || !tempStatusMQ->isValid()) {
356        ALOGE_IF(!tempCommandMQ->isValid(), "command MQ is invalid");
357        ALOGE_IF(!tempDataMQ->isValid(), "data MQ is invalid");
358        ALOGE_IF(!tempStatusMQ->isValid(), "status MQ is invalid");
359        sendError(Result::INVALID_ARGUMENTS);
360        return Void();
361    }
362    EventFlag* tempRawEfGroup{};
363    status = EventFlag::createEventFlag(tempDataMQ->getEventFlagWord(), &tempRawEfGroup);
364    std::unique_ptr<EventFlag, void (*)(EventFlag*)> tempElfGroup(
365        tempRawEfGroup, [](auto* ef) { EventFlag::deleteEventFlag(&ef); });
366    if (status != OK || !tempElfGroup) {
367        ALOGE("failed creating event flag for data MQ: %s", strerror(-status));
368        sendError(Result::INVALID_ARGUMENTS);
369        return Void();
370    }
371
372    // Create and launch the thread.
373    auto tempWriteThread =
374        std::make_unique<WriteThread>(&mStopWriteThread, mStream, tempCommandMQ.get(),
375                                      tempDataMQ.get(), tempStatusMQ.get(), tempElfGroup.get());
376    if (!tempWriteThread->init()) {
377        ALOGW("failed to start writer thread: %s", strerror(-status));
378        sendError(Result::INVALID_ARGUMENTS);
379        return Void();
380    }
381    status = tempWriteThread->run("writer", PRIORITY_URGENT_AUDIO);
382    if (status != OK) {
383        ALOGW("failed to start writer thread: %s", strerror(-status));
384        sendError(Result::INVALID_ARGUMENTS);
385        return Void();
386    }
387
388    mCommandMQ = std::move(tempCommandMQ);
389    mDataMQ = std::move(tempDataMQ);
390    mStatusMQ = std::move(tempStatusMQ);
391    mWriteThread = tempWriteThread.release();
392    mEfGroup = tempElfGroup.release();
393    threadInfo.pid = getpid();
394    threadInfo.tid = mWriteThread->getTid();
395    _hidl_cb(Result::OK, *mCommandMQ->getDesc(), *mDataMQ->getDesc(), *mStatusMQ->getDesc(),
396             threadInfo);
397    return Void();
398}
399
400Return<void> StreamOut::getRenderPosition(getRenderPosition_cb _hidl_cb) {
401    uint32_t halDspFrames;
402    Result retval = Stream::analyzeStatus("get_render_position",
403                                          mStream->get_render_position(mStream, &halDspFrames));
404    _hidl_cb(retval, halDspFrames);
405    return Void();
406}
407
408Return<void> StreamOut::getNextWriteTimestamp(getNextWriteTimestamp_cb _hidl_cb) {
409    Result retval(Result::NOT_SUPPORTED);
410    int64_t timestampUs = 0;
411    if (mStream->get_next_write_timestamp != NULL) {
412        retval = Stream::analyzeStatus("get_next_write_timestamp",
413                                       mStream->get_next_write_timestamp(mStream, &timestampUs));
414    }
415    _hidl_cb(retval, timestampUs);
416    return Void();
417}
418
419Return<Result> StreamOut::setCallback(const sp<IStreamOutCallback>& callback) {
420    if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
421    // Safe to pass 'this' because it is guaranteed that the callback thread
422    // is joined prior to exit from StreamOut's destructor.
423    int result = mStream->set_callback(mStream, StreamOut::asyncCallback, this);
424    if (result == 0) {
425        mCallback = callback;
426    }
427    return Stream::analyzeStatus("set_callback", result);
428}
429
430Return<Result> StreamOut::clearCallback() {
431    if (mStream->set_callback == NULL) return Result::NOT_SUPPORTED;
432    mCallback.clear();
433    return Result::OK;
434}
435
436// static
437int StreamOut::asyncCallback(stream_callback_event_t event, void*, void* cookie) {
438    // It is guaranteed that the callback thread is joined prior
439    // to exiting from StreamOut's destructor. Must *not* use sp<StreamOut>
440    // here because it can make this code the last owner of StreamOut,
441    // and an attempt to run the destructor on the callback thread
442    // will cause a deadlock in the legacy HAL code.
443    StreamOut* self = reinterpret_cast<StreamOut*>(cookie);
444    // It's correct to hold an sp<> to callback because the reference
445    // in the StreamOut instance can be cleared in the meantime. There is
446    // no difference on which thread to run IStreamOutCallback's destructor.
447    sp<IStreamOutCallback> callback = self->mCallback;
448    if (callback.get() == nullptr) return 0;
449    ALOGV("asyncCallback() event %d", event);
450    switch (event) {
451        case STREAM_CBK_EVENT_WRITE_READY:
452            callback->onWriteReady();
453            break;
454        case STREAM_CBK_EVENT_DRAIN_READY:
455            callback->onDrainReady();
456            break;
457        case STREAM_CBK_EVENT_ERROR:
458            callback->onError();
459            break;
460        default:
461            ALOGW("asyncCallback() unknown event %d", event);
462            break;
463    }
464    return 0;
465}
466
467Return<void> StreamOut::supportsPauseAndResume(supportsPauseAndResume_cb _hidl_cb) {
468    _hidl_cb(mStream->pause != NULL, mStream->resume != NULL);
469    return Void();
470}
471
472Return<Result> StreamOut::pause() {
473    return mStream->pause != NULL ? Stream::analyzeStatus("pause", mStream->pause(mStream))
474                                  : Result::NOT_SUPPORTED;
475}
476
477Return<Result> StreamOut::resume() {
478    return mStream->resume != NULL ? Stream::analyzeStatus("resume", mStream->resume(mStream))
479                                   : Result::NOT_SUPPORTED;
480}
481
482Return<bool> StreamOut::supportsDrain() {
483    return mStream->drain != NULL;
484}
485
486Return<Result> StreamOut::drain(AudioDrain type) {
487    return mStream->drain != NULL
488               ? Stream::analyzeStatus(
489                     "drain", mStream->drain(mStream, static_cast<audio_drain_type_t>(type)))
490               : Result::NOT_SUPPORTED;
491}
492
493Return<Result> StreamOut::flush() {
494    return mStream->flush != NULL ? Stream::analyzeStatus("flush", mStream->flush(mStream))
495                                  : Result::NOT_SUPPORTED;
496}
497
498// static
499Result StreamOut::getPresentationPositionImpl(audio_stream_out_t* stream, uint64_t* frames,
500                                              TimeSpec* timeStamp) {
501    // Don't logspam on EINVAL--it's normal for get_presentation_position
502    // to return it sometimes. EAGAIN may be returned by A2DP audio HAL
503    // implementation. ENODATA can also be reported while the writer is
504    // continuously querying it, but the stream has been stopped.
505    static const std::vector<int> ignoredErrors{EINVAL, EAGAIN, ENODATA};
506    Result retval(Result::NOT_SUPPORTED);
507    if (stream->get_presentation_position == NULL) return retval;
508    struct timespec halTimeStamp;
509    retval = Stream::analyzeStatus("get_presentation_position",
510                                   stream->get_presentation_position(stream, frames, &halTimeStamp),
511                                   ignoredErrors);
512    if (retval == Result::OK) {
513        timeStamp->tvSec = halTimeStamp.tv_sec;
514        timeStamp->tvNSec = halTimeStamp.tv_nsec;
515    }
516    return retval;
517}
518
519Return<void> StreamOut::getPresentationPosition(getPresentationPosition_cb _hidl_cb) {
520    uint64_t frames = 0;
521    TimeSpec timeStamp = {0, 0};
522    Result retval = getPresentationPositionImpl(mStream, &frames, &timeStamp);
523    _hidl_cb(retval, frames, timeStamp);
524    return Void();
525}
526
527Return<Result> StreamOut::start() {
528    return mStreamMmap->start();
529}
530
531Return<Result> StreamOut::stop() {
532    return mStreamMmap->stop();
533}
534
535Return<void> StreamOut::createMmapBuffer(int32_t minSizeFrames, createMmapBuffer_cb _hidl_cb) {
536    return mStreamMmap->createMmapBuffer(minSizeFrames, audio_stream_out_frame_size(mStream),
537                                         _hidl_cb);
538}
539
540Return<void> StreamOut::getMmapPosition(getMmapPosition_cb _hidl_cb) {
541    return mStreamMmap->getMmapPosition(_hidl_cb);
542}
543
544Return<void> StreamOut::debug(const hidl_handle& fd, const hidl_vec<hidl_string>& options) {
545    return mStreamCommon->debug(fd, options);
546}
547
548#ifdef AUDIO_HAL_VERSION_4_0
549Return<void> StreamOut::updateSourceMetadata(const SourceMetadata& sourceMetadata) {
550    if (mStream->update_source_metadata == nullptr) {
551        return Void();  // not supported by the HAL
552    }
553    std::vector<playback_track_metadata> halTracks;
554    halTracks.reserve(sourceMetadata.tracks.size());
555    for (auto& metadata : sourceMetadata.tracks) {
556        halTracks.push_back({
557            .usage = static_cast<audio_usage_t>(metadata.usage),
558            .content_type = static_cast<audio_content_type_t>(metadata.contentType),
559            .gain = metadata.gain,
560        });
561    }
562    const source_metadata_t halMetadata = {
563        .track_count = halTracks.size(), .tracks = halTracks.data(),
564    };
565    mStream->update_source_metadata(mStream, &halMetadata);
566    return Void();
567}
568Return<Result> StreamOut::selectPresentation(int32_t /*presentationId*/, int32_t /*programId*/) {
569    return Result::NOT_SUPPORTED;  // TODO: propagate to legacy
570}
571#endif
572
573}  // namespace implementation
574}  // namespace AUDIO_HAL_VERSION
575}  // namespace audio
576}  // namespace hardware
577}  // namespace android
578