1/*
2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/modules/audio_coding/main/test/Channel.h"
12
13#include <assert.h>
14#include <iostream>
15
16#include "webrtc/system_wrappers/interface/tick_util.h"
17#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
18
19namespace webrtc {
20
21int32_t Channel::SendData(const FrameType frameType, const uint8_t payloadType,
22                          const uint32_t timeStamp, const uint8_t* payloadData,
23                          const uint16_t payloadSize,
24                          const RTPFragmentationHeader* fragmentation) {
25  WebRtcRTPHeader rtpInfo;
26  int32_t status;
27  uint16_t payloadDataSize = payloadSize;
28
29  rtpInfo.header.markerBit = false;
30  rtpInfo.header.ssrc = 0;
31  rtpInfo.header.sequenceNumber = (external_sequence_number_ < 0) ?
32      _seqNo++ : static_cast<uint16_t>(external_sequence_number_);
33  rtpInfo.header.payloadType = payloadType;
34  rtpInfo.header.timestamp = (external_send_timestamp_ < 0) ? timeStamp :
35      static_cast<uint32_t>(external_send_timestamp_);
36
37  if (frameType == kAudioFrameCN) {
38    rtpInfo.type.Audio.isCNG = true;
39  } else {
40    rtpInfo.type.Audio.isCNG = false;
41  }
42  if (frameType == kFrameEmpty) {
43    // Skip this frame
44    return 0;
45  }
46
47  rtpInfo.type.Audio.channel = 1;
48  // Treat fragmentation separately
49  if (fragmentation != NULL) {
50    // If silence for too long, send only new data.
51    if ((fragmentation->fragmentationTimeDiff[1] <= 0x3fff) &&
52        (fragmentation->fragmentationVectorSize == 2)) {
53      // only 0x80 if we have multiple blocks
54      _payloadData[0] = 0x80 + fragmentation->fragmentationPlType[1];
55      uint32_t REDheader = (((uint32_t) fragmentation->fragmentationTimeDiff[1])
56          << 10) + fragmentation->fragmentationLength[1];
57      _payloadData[1] = uint8_t((REDheader >> 16) & 0x000000FF);
58      _payloadData[2] = uint8_t((REDheader >> 8) & 0x000000FF);
59      _payloadData[3] = uint8_t(REDheader & 0x000000FF);
60
61      _payloadData[4] = fragmentation->fragmentationPlType[0];
62      // copy the RED data
63      memcpy(_payloadData + 5,
64             payloadData + fragmentation->fragmentationOffset[1],
65             fragmentation->fragmentationLength[1]);
66      // copy the normal data
67      memcpy(_payloadData + 5 + fragmentation->fragmentationLength[1],
68             payloadData + fragmentation->fragmentationOffset[0],
69             fragmentation->fragmentationLength[0]);
70      payloadDataSize += 5;
71    } else {
72      // single block (newest one)
73      memcpy(_payloadData, payloadData + fragmentation->fragmentationOffset[0],
74             fragmentation->fragmentationLength[0]);
75      payloadDataSize = uint16_t(fragmentation->fragmentationLength[0]);
76      rtpInfo.header.payloadType = fragmentation->fragmentationPlType[0];
77    }
78  } else {
79    memcpy(_payloadData, payloadData, payloadDataSize);
80    if (_isStereo) {
81      if (_leftChannel) {
82        memcpy(&_rtpInfo, &rtpInfo, sizeof(WebRtcRTPHeader));
83        _leftChannel = false;
84        rtpInfo.type.Audio.channel = 1;
85      } else {
86        memcpy(&rtpInfo, &_rtpInfo, sizeof(WebRtcRTPHeader));
87        _leftChannel = true;
88        rtpInfo.type.Audio.channel = 2;
89      }
90    }
91  }
92
93  _channelCritSect->Enter();
94  if (_saveBitStream) {
95    //fwrite(payloadData, sizeof(uint8_t), payloadSize, _bitStreamFile);
96  }
97
98  if (!_isStereo) {
99    CalcStatistics(rtpInfo, payloadSize);
100  }
101  _lastInTimestamp = timeStamp;
102  _totalBytes += payloadDataSize;
103  _channelCritSect->Leave();
104
105  if (_useFECTestWithPacketLoss) {
106    _packetLoss += 1;
107    if (_packetLoss == 3) {
108      _packetLoss = 0;
109      return 0;
110    }
111  }
112
113  if (num_packets_to_drop_ > 0) {
114    num_packets_to_drop_--;
115    return 0;
116  }
117
118  status = _receiverACM->IncomingPacket(_payloadData, payloadDataSize, rtpInfo);
119
120  return status;
121}
122
123// TODO(turajs): rewite this method.
124void Channel::CalcStatistics(WebRtcRTPHeader& rtpInfo, uint16_t payloadSize) {
125  int n;
126  if ((rtpInfo.header.payloadType != _lastPayloadType)
127      && (_lastPayloadType != -1)) {
128    // payload-type is changed.
129    // we have to terminate the calculations on the previous payload type
130    // we ignore the last packet in that payload type just to make things
131    // easier.
132    for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
133      if (_lastPayloadType == _payloadStats[n].payloadType) {
134        _payloadStats[n].newPacket = true;
135        break;
136      }
137    }
138  }
139  _lastPayloadType = rtpInfo.header.payloadType;
140
141  bool newPayload = true;
142  ACMTestPayloadStats* currentPayloadStr = NULL;
143  for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
144    if (rtpInfo.header.payloadType == _payloadStats[n].payloadType) {
145      newPayload = false;
146      currentPayloadStr = &_payloadStats[n];
147      break;
148    }
149  }
150
151  if (!newPayload) {
152    if (!currentPayloadStr->newPacket) {
153      uint32_t lastFrameSizeSample = (uint32_t)(
154          (uint32_t) rtpInfo.header.timestamp
155              - (uint32_t) currentPayloadStr->lastTimestamp);
156      assert(lastFrameSizeSample > 0);
157      int k = 0;
158      while ((currentPayloadStr->frameSizeStats[k].frameSizeSample
159          != lastFrameSizeSample)
160          && (currentPayloadStr->frameSizeStats[k].frameSizeSample != 0)) {
161        k++;
162      }
163      ACMTestFrameSizeStats* currentFrameSizeStats = &(currentPayloadStr
164          ->frameSizeStats[k]);
165      currentFrameSizeStats->frameSizeSample = (int16_t) lastFrameSizeSample;
166
167      // increment the number of encoded samples.
168      currentFrameSizeStats->totalEncodedSamples += lastFrameSizeSample;
169      // increment the number of recveived packets
170      currentFrameSizeStats->numPackets++;
171      // increment the total number of bytes (this is based on
172      // the previous payload we don't know the frame-size of
173      // the current payload.
174      currentFrameSizeStats->totalPayloadLenByte += currentPayloadStr
175          ->lastPayloadLenByte;
176      // store the maximum payload-size (this is based on
177      // the previous payload we don't know the frame-size of
178      // the current payload.
179      if (currentFrameSizeStats->maxPayloadLen
180          < currentPayloadStr->lastPayloadLenByte) {
181        currentFrameSizeStats->maxPayloadLen = currentPayloadStr
182            ->lastPayloadLenByte;
183      }
184      // store the current values for the next time
185      currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
186      currentPayloadStr->lastPayloadLenByte = payloadSize;
187    } else {
188      currentPayloadStr->newPacket = false;
189      currentPayloadStr->lastPayloadLenByte = payloadSize;
190      currentPayloadStr->lastTimestamp = rtpInfo.header.timestamp;
191      currentPayloadStr->payloadType = rtpInfo.header.payloadType;
192      memset(currentPayloadStr->frameSizeStats, 0, MAX_NUM_FRAMESIZES *
193             sizeof(ACMTestFrameSizeStats));
194    }
195  } else {
196    n = 0;
197    while (_payloadStats[n].payloadType != -1) {
198      n++;
199    }
200    // first packet
201    _payloadStats[n].newPacket = false;
202    _payloadStats[n].lastPayloadLenByte = payloadSize;
203    _payloadStats[n].lastTimestamp = rtpInfo.header.timestamp;
204    _payloadStats[n].payloadType = rtpInfo.header.payloadType;
205    memset(_payloadStats[n].frameSizeStats, 0, MAX_NUM_FRAMESIZES *
206           sizeof(ACMTestFrameSizeStats));
207  }
208}
209
210Channel::Channel(int16_t chID)
211    : _receiverACM(NULL),
212      _seqNo(0),
213      _channelCritSect(CriticalSectionWrapper::CreateCriticalSection()),
214      _bitStreamFile(NULL),
215      _saveBitStream(false),
216      _lastPayloadType(-1),
217      _isStereo(false),
218      _leftChannel(true),
219      _lastInTimestamp(0),
220      _packetLoss(0),
221      _useFECTestWithPacketLoss(false),
222      _beginTime(TickTime::MillisecondTimestamp()),
223      _totalBytes(0),
224      external_send_timestamp_(-1),
225      external_sequence_number_(-1),
226      num_packets_to_drop_(0) {
227  int n;
228  int k;
229  for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
230    _payloadStats[n].payloadType = -1;
231    _payloadStats[n].newPacket = true;
232    for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
233      _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
234      _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
235      _payloadStats[n].frameSizeStats[k].numPackets = 0;
236      _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
237      _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
238    }
239  }
240  if (chID >= 0) {
241    _saveBitStream = true;
242    char bitStreamFileName[500];
243    sprintf(bitStreamFileName, "bitStream_%d.dat", chID);
244    _bitStreamFile = fopen(bitStreamFileName, "wb");
245  } else {
246    _saveBitStream = false;
247  }
248}
249
250Channel::~Channel() {
251  delete _channelCritSect;
252}
253
254void Channel::RegisterReceiverACM(AudioCodingModule* acm) {
255  _receiverACM = acm;
256  return;
257}
258
259void Channel::ResetStats() {
260  int n;
261  int k;
262  _channelCritSect->Enter();
263  _lastPayloadType = -1;
264  for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
265    _payloadStats[n].payloadType = -1;
266    _payloadStats[n].newPacket = true;
267    for (k = 0; k < MAX_NUM_FRAMESIZES; k++) {
268      _payloadStats[n].frameSizeStats[k].frameSizeSample = 0;
269      _payloadStats[n].frameSizeStats[k].maxPayloadLen = 0;
270      _payloadStats[n].frameSizeStats[k].numPackets = 0;
271      _payloadStats[n].frameSizeStats[k].totalPayloadLenByte = 0;
272      _payloadStats[n].frameSizeStats[k].totalEncodedSamples = 0;
273    }
274  }
275  _beginTime = TickTime::MillisecondTimestamp();
276  _totalBytes = 0;
277  _channelCritSect->Leave();
278}
279
280int16_t Channel::Stats(CodecInst& codecInst,
281                       ACMTestPayloadStats& payloadStats) {
282  _channelCritSect->Enter();
283  int n;
284  payloadStats.payloadType = -1;
285  for (n = 0; n < MAX_NUM_PAYLOADS; n++) {
286    if (_payloadStats[n].payloadType == codecInst.pltype) {
287      memcpy(&payloadStats, &_payloadStats[n], sizeof(ACMTestPayloadStats));
288      break;
289    }
290  }
291  if (payloadStats.payloadType == -1) {
292    _channelCritSect->Leave();
293    return -1;
294  }
295  for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
296    if (payloadStats.frameSizeStats[n].frameSizeSample == 0) {
297      _channelCritSect->Leave();
298      return 0;
299    }
300    payloadStats.frameSizeStats[n].usageLenSec = (double) payloadStats
301        .frameSizeStats[n].totalEncodedSamples / (double) codecInst.plfreq;
302
303    payloadStats.frameSizeStats[n].rateBitPerSec =
304        payloadStats.frameSizeStats[n].totalPayloadLenByte * 8
305            / payloadStats.frameSizeStats[n].usageLenSec;
306
307  }
308  _channelCritSect->Leave();
309  return 0;
310}
311
312void Channel::Stats(uint32_t* numPackets) {
313  _channelCritSect->Enter();
314  int k;
315  int n;
316  memset(numPackets, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
317  for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
318    if (_payloadStats[k].payloadType == -1) {
319      break;
320    }
321    numPackets[k] = 0;
322    for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
323      if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
324        break;
325      }
326      numPackets[k] += _payloadStats[k].frameSizeStats[n].numPackets;
327    }
328  }
329  _channelCritSect->Leave();
330}
331
332void Channel::Stats(uint8_t* payloadType, uint32_t* payloadLenByte) {
333  _channelCritSect->Enter();
334
335  int k;
336  int n;
337  memset(payloadLenByte, 0, MAX_NUM_PAYLOADS * sizeof(uint32_t));
338  for (k = 0; k < MAX_NUM_PAYLOADS; k++) {
339    if (_payloadStats[k].payloadType == -1) {
340      break;
341    }
342    payloadType[k] = (uint8_t) _payloadStats[k].payloadType;
343    payloadLenByte[k] = 0;
344    for (n = 0; n < MAX_NUM_FRAMESIZES; n++) {
345      if (_payloadStats[k].frameSizeStats[n].frameSizeSample == 0) {
346        break;
347      }
348      payloadLenByte[k] += (uint16_t) _payloadStats[k].frameSizeStats[n]
349          .totalPayloadLenByte;
350    }
351  }
352
353  _channelCritSect->Leave();
354}
355
356void Channel::PrintStats(CodecInst& codecInst) {
357  ACMTestPayloadStats payloadStats;
358  Stats(codecInst, payloadStats);
359  printf("%s %d kHz\n", codecInst.plname, codecInst.plfreq / 1000);
360  printf("=====================================================\n");
361  if (payloadStats.payloadType == -1) {
362    printf("No Packets are sent with payload-type %d (%s)\n\n",
363           codecInst.pltype, codecInst.plname);
364    return;
365  }
366  for (int k = 0; k < MAX_NUM_FRAMESIZES; k++) {
367    if (payloadStats.frameSizeStats[k].frameSizeSample == 0) {
368      break;
369    }
370    printf("Frame-size.................... %d samples\n",
371           payloadStats.frameSizeStats[k].frameSizeSample);
372    printf("Average Rate.................. %.0f bits/sec\n",
373           payloadStats.frameSizeStats[k].rateBitPerSec);
374    printf("Maximum Payload-Size.......... %d Bytes\n",
375           payloadStats.frameSizeStats[k].maxPayloadLen);
376    printf(
377        "Maximum Instantaneous Rate.... %.0f bits/sec\n",
378        ((double) payloadStats.frameSizeStats[k].maxPayloadLen * 8.0
379            * (double) codecInst.plfreq)
380            / (double) payloadStats.frameSizeStats[k].frameSizeSample);
381    printf("Number of Packets............. %u\n",
382           (unsigned int) payloadStats.frameSizeStats[k].numPackets);
383    printf("Duration...................... %0.3f sec\n\n",
384           payloadStats.frameSizeStats[k].usageLenSec);
385
386  }
387
388}
389
390uint32_t Channel::LastInTimestamp() {
391  uint32_t timestamp;
392  _channelCritSect->Enter();
393  timestamp = _lastInTimestamp;
394  _channelCritSect->Leave();
395  return timestamp;
396}
397
398double Channel::BitRate() {
399  double rate;
400  uint64_t currTime = TickTime::MillisecondTimestamp();
401  _channelCritSect->Enter();
402  rate = ((double) _totalBytes * 8.0) / (double) (currTime - _beginTime);
403  _channelCritSect->Leave();
404  return rate;
405}
406
407}  // namespace webrtc
408