1/*
2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10
11#include "webrtc/engine_configurations.h"
12#if defined(COCOA_RENDERING)
13
14#include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
15#include "webrtc/modules/video_render/mac/video_render_nsopengl.h"
16#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
17#include "webrtc/system_wrappers/interface/event_wrapper.h"
18#include "webrtc/system_wrappers/interface/thread_wrapper.h"
19#include "webrtc/system_wrappers/interface/trace.h"
20
21namespace webrtc {
22
23VideoChannelNSOpenGL::VideoChannelNSOpenGL(NSOpenGLContext *nsglContext, int iId, VideoRenderNSOpenGL* owner) :
24_nsglContext( nsglContext),
25_id( iId),
26_owner( owner),
27_width( 0),
28_height( 0),
29_startWidth( 0.0f),
30_startHeight( 0.0f),
31_stopWidth( 0.0f),
32_stopHeight( 0.0f),
33_stretchedWidth( 0),
34_stretchedHeight( 0),
35_oldStretchedHeight( 0),
36_oldStretchedWidth( 0),
37_buffer( 0),
38_bufferSize( 0),
39_incommingBufferSize( 0),
40_bufferIsUpdated( false),
41_numberOfStreams( 0),
42_pixelFormat( GL_RGBA),
43_pixelDataType( GL_UNSIGNED_INT_8_8_8_8),
44_texture( 0)
45{
46
47}
48
49VideoChannelNSOpenGL::~VideoChannelNSOpenGL()
50{
51    if (_buffer)
52    {
53        delete [] _buffer;
54        _buffer = NULL;
55    }
56
57    if (_texture != 0)
58    {
59        [_nsglContext makeCurrentContext];
60        glDeleteTextures(1, (const GLuint*) &_texture);
61        _texture = 0;
62    }
63}
64
65int VideoChannelNSOpenGL::ChangeContext(NSOpenGLContext *nsglContext)
66{
67    _owner->LockAGLCntx();
68
69    _nsglContext = nsglContext;
70    [_nsglContext makeCurrentContext];
71
72    _owner->UnlockAGLCntx();
73    return 0;
74
75}
76
77int32_t VideoChannelNSOpenGL::GetChannelProperties(float& left, float& top,
78                                                   float& right, float& bottom)
79{
80
81    _owner->LockAGLCntx();
82
83    left = _startWidth;
84    top = _startHeight;
85    right = _stopWidth;
86    bottom = _stopHeight;
87
88    _owner->UnlockAGLCntx();
89    return 0;
90}
91
92int32_t VideoChannelNSOpenGL::RenderFrame(
93  const uint32_t /*streamId*/, I420VideoFrame& videoFrame) {
94
95  _owner->LockAGLCntx();
96
97  if(_width != videoFrame.width() ||
98     _height != videoFrame.height()) {
99      if(FrameSizeChange(videoFrame.width(), videoFrame.height(), 1) == -1) {
100        _owner->UnlockAGLCntx();
101        return -1;
102      }
103  }
104  int ret = DeliverFrame(videoFrame);
105
106  _owner->UnlockAGLCntx();
107  return ret;
108}
109
110int VideoChannelNSOpenGL::UpdateSize(int width, int height)
111{
112    _owner->LockAGLCntx();
113    _width = width;
114    _height = height;
115    _owner->UnlockAGLCntx();
116    return 0;
117}
118
119int VideoChannelNSOpenGL::UpdateStretchSize(int stretchHeight, int stretchWidth)
120{
121
122    _owner->LockAGLCntx();
123    _stretchedHeight = stretchHeight;
124    _stretchedWidth = stretchWidth;
125    _owner->UnlockAGLCntx();
126    return 0;
127}
128
129int VideoChannelNSOpenGL::FrameSizeChange(int width, int height, int numberOfStreams)
130{
131    //  We got a new frame size from VideoAPI, prepare the buffer
132
133    _owner->LockAGLCntx();
134
135    if (width == _width && _height == height)
136    {
137        // We already have a correct buffer size
138        _numberOfStreams = numberOfStreams;
139        _owner->UnlockAGLCntx();
140        return 0;
141    }
142
143    _width = width;
144    _height = height;
145
146    // Delete the old buffer, create a new one with correct size.
147    if (_buffer)
148    {
149        delete [] _buffer;
150        _bufferSize = 0;
151    }
152
153    _incommingBufferSize = CalcBufferSize(kI420, _width, _height);
154    _bufferSize = CalcBufferSize(kARGB, _width, _height);
155    _buffer = new unsigned char [_bufferSize];
156    memset(_buffer, 0, _bufferSize * sizeof(unsigned char));
157
158    [_nsglContext makeCurrentContext];
159
160    if(glIsTexture(_texture))
161    {
162        glDeleteTextures(1, (const GLuint*) &_texture);
163        _texture = 0;
164    }
165
166    // Create a new texture
167    glGenTextures(1, (GLuint *) &_texture);
168
169    GLenum glErr = glGetError();
170
171    if (glErr != GL_NO_ERROR)
172    {
173
174    }
175
176    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
177
178    GLint texSize;
179    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &texSize);
180
181    if (texSize < _width || texSize < _height)
182    {
183        _owner->UnlockAGLCntx();
184        return -1;
185    }
186
187    // Set up th texture type and size
188    glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, // target
189            0, // level
190            GL_RGBA, // internal format
191            _width, // width
192            _height, // height
193            0, // border 0/1 = off/on
194            _pixelFormat, // format, GL_RGBA
195            _pixelDataType, // data type, GL_UNSIGNED_INT_8_8_8_8
196            _buffer); // pixel data
197
198    glErr = glGetError();
199    if (glErr != GL_NO_ERROR)
200    {
201        _owner->UnlockAGLCntx();
202        return -1;
203    }
204
205    _owner->UnlockAGLCntx();
206    return 0;
207}
208
209int VideoChannelNSOpenGL::DeliverFrame(const I420VideoFrame& videoFrame) {
210
211  _owner->LockAGLCntx();
212
213  if (_texture == 0) {
214    _owner->UnlockAGLCntx();
215    return 0;
216  }
217
218  int length = CalcBufferSize(kI420, videoFrame.width(), videoFrame.height());
219  if (length != _incommingBufferSize) {
220    _owner->UnlockAGLCntx();
221    return -1;
222  }
223
224  // Using the I420VideoFrame for YV12: YV12 is YVU; I420 assumes
225  // YUV.
226  // TODO(mikhal) : Use appropriate functionality.
227  // TODO(wu): See if we are using glTexSubImage2D correctly.
228  int rgbRet = ConvertFromYV12(videoFrame, kBGRA, 0, _buffer);
229  if (rgbRet < 0) {
230    _owner->UnlockAGLCntx();
231    return -1;
232  }
233
234  [_nsglContext makeCurrentContext];
235
236  // Make sure this texture is the active one
237  glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
238  GLenum glErr = glGetError();
239  if (glErr != GL_NO_ERROR) {
240    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
241    "ERROR %d while calling glBindTexture", glErr);
242    _owner->UnlockAGLCntx();
243    return -1;
244  }
245
246  glTexSubImage2D(GL_TEXTURE_RECTANGLE_EXT,
247                  0, // Level, not use
248                  0, // start point x, (low left of pic)
249                  0, // start point y,
250                  _width, // width
251                  _height, // height
252                  _pixelFormat, // pictue format for _buffer
253                  _pixelDataType, // data type of _buffer
254                  (const GLvoid*) _buffer); // the pixel data
255
256  glErr = glGetError();
257  if (glErr != GL_NO_ERROR) {
258    WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id,
259    "ERROR %d while calling glTexSubImage2d", glErr);
260    _owner->UnlockAGLCntx();
261    return -1;
262  }
263
264  _bufferIsUpdated = true;
265
266  _owner->UnlockAGLCntx();
267  return 0;
268}
269
270int VideoChannelNSOpenGL::RenderOffScreenBuffer()
271{
272
273    _owner->LockAGLCntx();
274
275    if (_texture == 0)
276    {
277        _owner->UnlockAGLCntx();
278        return 0;
279    }
280
281    //	if(_fullscreen)
282    //	{
283    // NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
284    //		_width = mainDisplayRect.size.width;
285    //		_height = mainDisplayRect.size.height;
286    //		glViewport(0, 0, mainDisplayRect.size.width, mainDisplayRect.size.height);
287    //		float newX = mainDisplayRect.size.width/_width;
288    //		float newY = mainDisplayRect.size.height/_height;
289
290    // convert from 0.0 <= size <= 1.0 to
291    // open gl world -1.0 < size < 1.0
292    GLfloat xStart = 2.0f * _startWidth - 1.0f;
293    GLfloat xStop = 2.0f * _stopWidth - 1.0f;
294    GLfloat yStart = 1.0f - 2.0f * _stopHeight;
295    GLfloat yStop = 1.0f - 2.0f * _startHeight;
296
297    [_nsglContext makeCurrentContext];
298
299    glBindTexture(GL_TEXTURE_RECTANGLE_EXT, _texture);
300    _oldStretchedHeight = _stretchedHeight;
301    _oldStretchedWidth = _stretchedWidth;
302
303    glLoadIdentity();
304    glEnable(GL_TEXTURE_RECTANGLE_EXT);
305    glBegin(GL_POLYGON);
306    {
307        glTexCoord2f(0.0, 0.0); glVertex2f(xStart, yStop);
308        glTexCoord2f(_width, 0.0); glVertex2f(xStop, yStop);
309        glTexCoord2f(_width, _height); glVertex2f(xStop, yStart);
310        glTexCoord2f(0.0, _height); glVertex2f(xStart, yStart);
311    }
312    glEnd();
313
314    glDisable(GL_TEXTURE_RECTANGLE_EXT);
315
316    _bufferIsUpdated = false;
317
318    _owner->UnlockAGLCntx();
319    return 0;
320}
321
322int VideoChannelNSOpenGL::IsUpdated(bool& isUpdated)
323{
324    _owner->LockAGLCntx();
325
326    isUpdated = _bufferIsUpdated;
327
328    _owner->UnlockAGLCntx();
329    return 0;
330}
331
332int VideoChannelNSOpenGL::SetStreamSettings(int /*streamId*/, float startWidth, float startHeight, float stopWidth, float stopHeight)
333{
334    _owner->LockAGLCntx();
335
336    _startWidth = startWidth;
337    _stopWidth = stopWidth;
338    _startHeight = startHeight;
339    _stopHeight = stopHeight;
340
341    int oldWidth = _width;
342    int oldHeight = _height;
343    int oldNumberOfStreams = _numberOfStreams;
344
345    _width = 0;
346    _height = 0;
347
348    int retVal = FrameSizeChange(oldWidth, oldHeight, oldNumberOfStreams);
349
350    _owner->UnlockAGLCntx();
351    return retVal;
352}
353
354int VideoChannelNSOpenGL::SetStreamCropSettings(int /*streamId*/, float /*startWidth*/, float /*startHeight*/, float /*stopWidth*/, float /*stopHeight*/)
355{
356    return -1;
357}
358
359/*
360 *
361 *    VideoRenderNSOpenGL
362 *
363 */
364
365VideoRenderNSOpenGL::VideoRenderNSOpenGL(CocoaRenderView *windowRef, bool fullScreen, int iId) :
366_windowRef( (CocoaRenderView*)windowRef),
367_fullScreen( fullScreen),
368_id( iId),
369_nsglContextCritSec( *CriticalSectionWrapper::CreateCriticalSection()),
370_screenUpdateThread( 0),
371_screenUpdateEvent( 0),
372_nsglContext( 0),
373_nsglFullScreenContext( 0),
374_fullScreenWindow( nil),
375_windowRect( ),
376_windowWidth( 0),
377_windowHeight( 0),
378_nsglChannels( ),
379_zOrderToChannel( ),
380_threadID (0),
381_renderingIsPaused (FALSE),
382_windowRefSuperView(NULL),
383_windowRefSuperViewFrame(NSMakeRect(0,0,0,0))
384{
385    _screenUpdateThread = ThreadWrapper::CreateThread(ScreenUpdateThreadProc, this, kRealtimePriority);
386    _screenUpdateEvent = EventWrapper::Create();
387}
388
389int VideoRenderNSOpenGL::ChangeWindow(CocoaRenderView* newWindowRef)
390{
391
392    LockAGLCntx();
393
394    _windowRef = newWindowRef;
395
396    if(CreateMixingContext() == -1)
397    {
398        UnlockAGLCntx();
399        return -1;
400    }
401
402    int error = 0;
403    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
404    while (it!= _nsglChannels.end())
405    {
406        error |= (it->second)->ChangeContext(_nsglContext);
407        it++;
408    }
409    if(error != 0)
410    {
411        UnlockAGLCntx();
412        return -1;
413    }
414
415    UnlockAGLCntx();
416    return 0;
417}
418
419/* Check if the thread and event already exist.
420 * If so then they will simply be restarted
421 * If not then create them and continue
422 */
423int32_t VideoRenderNSOpenGL::StartRender()
424{
425
426    LockAGLCntx();
427
428    const unsigned int MONITOR_FREQ = 60;
429    if(TRUE == _renderingIsPaused)
430    {
431        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "Restarting screenUpdateThread");
432
433        // we already have the thread. Most likely StopRender() was called and they were paused
434        if(FALSE == _screenUpdateThread->Start(_threadID) ||
435                FALSE == _screenUpdateEvent->StartTimer(true, 1000/MONITOR_FREQ))
436        {
437            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "Failed to restart screenUpdateThread or screenUpdateEvent");
438            UnlockAGLCntx();
439            return -1;
440        }
441
442        UnlockAGLCntx();
443        return 0;
444    }
445
446
447    if (!_screenUpdateThread)
448    {
449        WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "failed start screenUpdateThread");
450        UnlockAGLCntx();
451        return -1;
452    }
453
454
455    UnlockAGLCntx();
456    return 0;
457}
458int32_t VideoRenderNSOpenGL::StopRender()
459{
460
461    LockAGLCntx();
462
463    /* The code below is functional
464     * but it pauses for several seconds
465     */
466
467    // pause the update thread and the event timer
468    if(!_screenUpdateThread || !_screenUpdateEvent)
469    {
470        _renderingIsPaused = TRUE;
471
472        UnlockAGLCntx();
473        return 0;
474    }
475
476    if(FALSE == _screenUpdateThread->Stop() || FALSE == _screenUpdateEvent->StopTimer())
477    {
478        _renderingIsPaused = FALSE;
479
480        UnlockAGLCntx();
481        return -1;
482    }
483
484    _renderingIsPaused = TRUE;
485
486    UnlockAGLCntx();
487    return 0;
488}
489
490int VideoRenderNSOpenGL::configureNSOpenGLView()
491{
492    return 0;
493
494}
495
496int VideoRenderNSOpenGL::configureNSOpenGLEngine()
497{
498
499    LockAGLCntx();
500
501    // Disable not needed functionality to increase performance
502    glDisable(GL_DITHER);
503    glDisable(GL_ALPHA_TEST);
504    glDisable(GL_STENCIL_TEST);
505    glDisable(GL_FOG);
506    glDisable(GL_TEXTURE_2D);
507    glPixelZoom(1.0, 1.0);
508    glDisable(GL_BLEND);
509    glDisable(GL_DEPTH_TEST);
510    glDepthMask(GL_FALSE);
511    glDisable(GL_CULL_FACE);
512
513    // Set texture parameters
514    glTexParameterf(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_PRIORITY, 1.0);
515    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
516    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
517    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
518    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
519    glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
520    glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
521    glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE, GL_STORAGE_SHARED_APPLE);
522
523    if (GetWindowRect(_windowRect) == -1)
524    {
525        UnlockAGLCntx();
526        return true;
527    }
528
529    if (_windowWidth != (_windowRect.right - _windowRect.left)
530            || _windowHeight != (_windowRect.bottom - _windowRect.top))
531    {
532        _windowWidth = _windowRect.right - _windowRect.left;
533        _windowHeight = _windowRect.bottom - _windowRect.top;
534    }
535    glViewport(0, 0, _windowWidth, _windowHeight);
536
537    // Synchronize buffer swaps with vertical refresh rate
538    GLint swapInt = 1;
539    [_nsglContext setValues:&swapInt forParameter:NSOpenGLCPSwapInterval];
540
541    UnlockAGLCntx();
542    return 0;
543}
544
545int VideoRenderNSOpenGL::setRenderTargetWindow()
546{
547    LockAGLCntx();
548
549
550    GLuint attribs[] =
551    {
552        NSOpenGLPFAColorSize, 24,
553        NSOpenGLPFAAlphaSize, 8,
554        NSOpenGLPFADepthSize, 16,
555        NSOpenGLPFAAccelerated,
556        0
557    };
558
559    NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes:
560                          (NSOpenGLPixelFormatAttribute*) attribs] autorelease];
561
562    if(_windowRef)
563    {
564        [_windowRef initCocoaRenderView:fmt];
565    }
566    else
567    {
568        UnlockAGLCntx();
569        return -1;
570    }
571
572    _nsglContext = [_windowRef nsOpenGLContext];
573    [_nsglContext makeCurrentContext];
574
575    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
576    glClear(GL_COLOR_BUFFER_BIT);
577
578
579    DisplayBuffers();
580
581    UnlockAGLCntx();
582    return 0;
583}
584
585int VideoRenderNSOpenGL::setRenderTargetFullScreen()
586{
587    LockAGLCntx();
588
589
590    GLuint attribs[] =
591    {
592        NSOpenGLPFAColorSize, 24,
593        NSOpenGLPFAAlphaSize, 8,
594        NSOpenGLPFADepthSize, 16,
595        NSOpenGLPFAAccelerated,
596        0
597    };
598
599    NSOpenGLPixelFormat* fmt = [[[NSOpenGLPixelFormat alloc] initWithAttributes:
600                          (NSOpenGLPixelFormatAttribute*) attribs] autorelease];
601
602    // Store original superview and frame for use when exiting full screens
603    _windowRefSuperViewFrame = [_windowRef frame];
604    _windowRefSuperView = [_windowRef superview];
605
606
607    // create new fullscreen window
608    NSRect screenRect = [[NSScreen mainScreen]frame];
609    [_windowRef setFrame:screenRect];
610    [_windowRef setBounds:screenRect];
611
612
613    _fullScreenWindow = [[CocoaFullScreenWindow alloc]init];
614    [_fullScreenWindow grabFullScreen];
615    [[[_fullScreenWindow window] contentView] addSubview:_windowRef];
616
617    if(_windowRef)
618    {
619        [_windowRef initCocoaRenderViewFullScreen:fmt];
620    }
621    else
622    {
623        UnlockAGLCntx();
624        return -1;
625    }
626
627    _nsglContext = [_windowRef nsOpenGLContext];
628    [_nsglContext makeCurrentContext];
629
630    glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
631    glClear(GL_COLOR_BUFFER_BIT);
632
633    DisplayBuffers();
634
635    UnlockAGLCntx();
636    return 0;
637}
638
639VideoRenderNSOpenGL::~VideoRenderNSOpenGL()
640{
641
642    if(_fullScreen)
643    {
644        if(_fullScreenWindow)
645        {
646            // Detach CocoaRenderView from full screen view back to
647            // it's original parent.
648            [_windowRef removeFromSuperview];
649            if(_windowRefSuperView)
650            {
651              [_windowRefSuperView addSubview:_windowRef];
652              [_windowRef setFrame:_windowRefSuperViewFrame];
653            }
654
655            WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, 0, "%s:%d Attempting to release fullscreen window", __FUNCTION__, __LINE__);
656            [_fullScreenWindow releaseFullScreen];
657
658        }
659    }
660
661    // Signal event to exit thread, then delete it
662    ThreadWrapper* tmpPtr = _screenUpdateThread;
663    _screenUpdateThread = NULL;
664
665    if (tmpPtr)
666    {
667        tmpPtr->SetNotAlive();
668        _screenUpdateEvent->Set();
669        _screenUpdateEvent->StopTimer();
670
671        if (tmpPtr->Stop())
672        {
673            delete tmpPtr;
674        }
675        delete _screenUpdateEvent;
676        _screenUpdateEvent = NULL;
677    }
678
679    if (_nsglContext != 0)
680    {
681        [_nsglContext makeCurrentContext];
682        _nsglContext = nil;
683    }
684
685    // Delete all channels
686    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
687    while (it!= _nsglChannels.end())
688    {
689        delete it->second;
690        _nsglChannels.erase(it);
691        it = _nsglChannels.begin();
692    }
693    _nsglChannels.clear();
694
695    // Clean the zOrder map
696    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
697    while(zIt != _zOrderToChannel.end())
698    {
699        _zOrderToChannel.erase(zIt);
700        zIt = _zOrderToChannel.begin();
701    }
702    _zOrderToChannel.clear();
703
704}
705
706/* static */
707int VideoRenderNSOpenGL::GetOpenGLVersion(int& /*nsglMajor*/, int& /*nsglMinor*/)
708{
709    return -1;
710}
711
712int VideoRenderNSOpenGL::Init()
713{
714
715    LockAGLCntx();
716    if (!_screenUpdateThread)
717    {
718        UnlockAGLCntx();
719        return -1;
720    }
721
722    _screenUpdateThread->Start(_threadID);
723
724    // Start the event triggering the render process
725    unsigned int monitorFreq = 60;
726    _screenUpdateEvent->StartTimer(true, 1000/monitorFreq);
727
728    if (CreateMixingContext() == -1)
729    {
730        UnlockAGLCntx();
731        return -1;
732    }
733
734    UnlockAGLCntx();
735    return 0;
736}
737
738VideoChannelNSOpenGL* VideoRenderNSOpenGL::CreateNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
739{
740    CriticalSectionScoped cs(&_nsglContextCritSec);
741
742    if (HasChannel(channel))
743    {
744        return NULL;
745    }
746
747    if (_zOrderToChannel.find(zOrder) != _zOrderToChannel.end())
748    {
749
750    }
751
752    VideoChannelNSOpenGL* newAGLChannel = new VideoChannelNSOpenGL(_nsglContext, _id, this);
753    if (newAGLChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
754    {
755        if (newAGLChannel)
756        {
757            delete newAGLChannel;
758            newAGLChannel = NULL;
759        }
760
761        return NULL;
762    }
763
764    _nsglChannels[channel] = newAGLChannel;
765    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
766
767    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s successfully created NSGL channel number %d", __FUNCTION__, channel);
768
769    return newAGLChannel;
770}
771
772int VideoRenderNSOpenGL::DeleteAllNSGLChannels()
773{
774
775    CriticalSectionScoped cs(&_nsglContextCritSec);
776
777    std::map<int, VideoChannelNSOpenGL*>::iterator it;
778    it = _nsglChannels.begin();
779
780    while (it != _nsglChannels.end())
781    {
782        VideoChannelNSOpenGL* channel = it->second;
783        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Deleting channel %d", __FUNCTION__, channel);
784        delete channel;
785        it++;
786    }
787    _nsglChannels.clear();
788    return 0;
789}
790
791int32_t VideoRenderNSOpenGL::DeleteNSGLChannel(const uint32_t channel)
792{
793
794    CriticalSectionScoped cs(&_nsglContextCritSec);
795
796    std::map<int, VideoChannelNSOpenGL*>::iterator it;
797    it = _nsglChannels.find(channel);
798    if (it != _nsglChannels.end())
799    {
800        delete it->second;
801        _nsglChannels.erase(it);
802    }
803    else
804    {
805        return -1;
806    }
807
808    std::multimap<int, int>::iterator zIt = _zOrderToChannel.begin();
809    while( zIt != _zOrderToChannel.end())
810    {
811        if (zIt->second == (int)channel)
812        {
813            _zOrderToChannel.erase(zIt);
814            break;
815        }
816        zIt++;
817    }
818
819    return 0;
820}
821
822int32_t VideoRenderNSOpenGL::GetChannelProperties(const uint16_t streamId,
823                                                  uint32_t& zOrder,
824                                                  float& left,
825                                                  float& top,
826                                                  float& right,
827                                                  float& bottom)
828{
829
830    CriticalSectionScoped cs(&_nsglContextCritSec);
831
832    bool channelFound = false;
833
834    // Loop through all channels until we find a match.
835    // From that, get zorder.
836    // From that, get T, L, R, B
837    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
838            rIt != _zOrderToChannel.rend();
839            rIt++)
840    {
841        if(streamId == rIt->second)
842        {
843            channelFound = true;
844
845            zOrder = rIt->second;
846
847            std::map<int, VideoChannelNSOpenGL*>::iterator rIt = _nsglChannels.find(streamId);
848            VideoChannelNSOpenGL* tempChannel = rIt->second;
849
850            if(-1 == tempChannel->GetChannelProperties(left, top, right, bottom) )
851            {
852                return -1;
853            }
854            break;
855        }
856    }
857
858    if(false == channelFound)
859    {
860
861        return -1;
862    }
863
864    return 0;
865}
866
867int VideoRenderNSOpenGL::StopThread()
868{
869
870    ThreadWrapper* tmpPtr = _screenUpdateThread;
871    WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Stopping thread ", __FUNCTION__, _screenUpdateThread);
872    _screenUpdateThread = NULL;
873
874    if (tmpPtr)
875    {
876        tmpPtr->SetNotAlive();
877        _screenUpdateEvent->Set();
878        if (tmpPtr->Stop())
879        {
880            delete tmpPtr;
881        }
882    }
883
884    delete _screenUpdateEvent;
885    _screenUpdateEvent = NULL;
886
887    return 0;
888}
889
890bool VideoRenderNSOpenGL::IsFullScreen()
891{
892
893    CriticalSectionScoped cs(&_nsglContextCritSec);
894    return _fullScreen;
895}
896
897bool VideoRenderNSOpenGL::HasChannels()
898{
899    CriticalSectionScoped cs(&_nsglContextCritSec);
900
901    if (_nsglChannels.begin() != _nsglChannels.end())
902    {
903        return true;
904    }
905    return false;
906}
907
908bool VideoRenderNSOpenGL::HasChannel(int channel)
909{
910
911    CriticalSectionScoped cs(&_nsglContextCritSec);
912
913    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
914
915    if (it != _nsglChannels.end())
916    {
917        return true;
918    }
919    return false;
920}
921
922int VideoRenderNSOpenGL::GetChannels(std::list<int>& channelList)
923{
924
925    CriticalSectionScoped cs(&_nsglContextCritSec);
926
927    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
928
929    while (it != _nsglChannels.end())
930    {
931        channelList.push_back(it->first);
932        it++;
933    }
934
935    return 0;
936}
937
938VideoChannelNSOpenGL* VideoRenderNSOpenGL::ConfigureNSGLChannel(int channel, int zOrder, float startWidth, float startHeight, float stopWidth, float stopHeight)
939{
940
941    CriticalSectionScoped cs(&_nsglContextCritSec);
942
943    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channel);
944
945    if (it != _nsglChannels.end())
946    {
947        VideoChannelNSOpenGL* aglChannel = it->second;
948        if (aglChannel->SetStreamSettings(0, startWidth, startHeight, stopWidth, stopHeight) == -1)
949        {
950            WEBRTC_TRACE(kTraceError, kTraceVideoRenderer, _id, "%s failed to set stream settings: channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
951                    __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
952            return NULL;
953        }
954        WEBRTC_TRACE(kTraceInfo, kTraceVideoRenderer, _id, "%s Configuring channel %d. channel=%d zOrder=%d startWidth=%d startHeight=%d stopWidth=%d stopHeight=%d",
955                __FUNCTION__, channel, zOrder, startWidth, startHeight, stopWidth, stopHeight);
956
957        std::multimap<int, int>::iterator it = _zOrderToChannel.begin();
958        while(it != _zOrderToChannel.end())
959        {
960            if (it->second == channel)
961            {
962                if (it->first != zOrder)
963                {
964                    _zOrderToChannel.erase(it);
965                    _zOrderToChannel.insert(std::pair<int, int>(zOrder, channel));
966                }
967                break;
968            }
969            it++;
970        }
971        return aglChannel;
972    }
973
974    return NULL;
975}
976
977/*
978 *
979 *    Rendering process
980 *
981 */
982
983bool VideoRenderNSOpenGL::ScreenUpdateThreadProc(void* obj)
984{
985    return static_cast<VideoRenderNSOpenGL*>(obj)->ScreenUpdateProcess();
986}
987
988bool VideoRenderNSOpenGL::ScreenUpdateProcess()
989{
990
991    _screenUpdateEvent->Wait(10);
992    LockAGLCntx();
993
994    if (!_screenUpdateThread)
995    {
996        WEBRTC_TRACE(kTraceWarning, kTraceVideoRenderer, _id, "%s no screen update thread", __FUNCTION__);
997        UnlockAGLCntx();
998        return false;
999    }
1000
1001    [_nsglContext makeCurrentContext];
1002
1003    if (GetWindowRect(_windowRect) == -1)
1004    {
1005        UnlockAGLCntx();
1006        return true;
1007    }
1008
1009    if (_windowWidth != (_windowRect.right - _windowRect.left)
1010            || _windowHeight != (_windowRect.bottom - _windowRect.top))
1011    {
1012        _windowWidth = _windowRect.right - _windowRect.left;
1013        _windowHeight = _windowRect.bottom - _windowRect.top;
1014        glViewport(0, 0, _windowWidth, _windowHeight);
1015    }
1016
1017    // Check if there are any updated buffers
1018    bool updated = false;
1019    std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.begin();
1020    while (it != _nsglChannels.end())
1021    {
1022
1023        VideoChannelNSOpenGL* aglChannel = it->second;
1024        aglChannel->UpdateStretchSize(_windowHeight, _windowWidth);
1025        aglChannel->IsUpdated(updated);
1026        if (updated)
1027        {
1028            break;
1029        }
1030        it++;
1031    }
1032
1033    if (updated)
1034    {
1035
1036        // At least on buffers is updated, we need to repaint the texture
1037        if (RenderOffScreenBuffers() != -1)
1038        {
1039            UnlockAGLCntx();
1040            return true;
1041        }
1042    }
1043    //    }
1044    UnlockAGLCntx();
1045    return true;
1046}
1047
1048/*
1049 *
1050 *    Functions for creating mixing buffers and screen settings
1051 *
1052 */
1053
1054int VideoRenderNSOpenGL::CreateMixingContext()
1055{
1056
1057    CriticalSectionScoped cs(&_nsglContextCritSec);
1058
1059    if(_fullScreen)
1060    {
1061        if(-1 == setRenderTargetFullScreen())
1062        {
1063            return -1;
1064        }
1065    }
1066    else
1067    {
1068
1069        if(-1 == setRenderTargetWindow())
1070        {
1071            return -1;
1072        }
1073    }
1074
1075    configureNSOpenGLEngine();
1076
1077    DisplayBuffers();
1078
1079    GLenum glErr = glGetError();
1080    if (glErr)
1081    {
1082    }
1083
1084    return 0;
1085}
1086
1087/*
1088 *
1089 *    Rendering functions
1090 *
1091 */
1092
1093int VideoRenderNSOpenGL::RenderOffScreenBuffers()
1094{
1095    LockAGLCntx();
1096
1097    // Get the current window size, it might have changed since last render.
1098    if (GetWindowRect(_windowRect) == -1)
1099    {
1100        UnlockAGLCntx();
1101        return -1;
1102    }
1103
1104    [_nsglContext makeCurrentContext];
1105    glClear(GL_COLOR_BUFFER_BIT);
1106
1107    // Loop through all channels starting highest zOrder ending with lowest.
1108    for (std::multimap<int, int>::reverse_iterator rIt = _zOrderToChannel.rbegin();
1109            rIt != _zOrderToChannel.rend();
1110            rIt++)
1111    {
1112        int channelId = rIt->second;
1113        std::map<int, VideoChannelNSOpenGL*>::iterator it = _nsglChannels.find(channelId);
1114
1115        VideoChannelNSOpenGL* aglChannel = it->second;
1116
1117        aglChannel->RenderOffScreenBuffer();
1118    }
1119
1120    DisplayBuffers();
1121
1122    UnlockAGLCntx();
1123    return 0;
1124}
1125
1126/*
1127 *
1128 * Help functions
1129 *
1130 * All help functions assumes external protections
1131 *
1132 */
1133
1134int VideoRenderNSOpenGL::DisplayBuffers()
1135{
1136
1137    LockAGLCntx();
1138
1139    glFinish();
1140    [_nsglContext flushBuffer];
1141
1142    WEBRTC_TRACE(kTraceDebug, kTraceVideoRenderer, _id, "%s glFinish and [_nsglContext flushBuffer]", __FUNCTION__);
1143
1144    UnlockAGLCntx();
1145    return 0;
1146}
1147
1148int VideoRenderNSOpenGL::GetWindowRect(Rect& rect)
1149{
1150
1151    CriticalSectionScoped cs(&_nsglContextCritSec);
1152
1153    if (_windowRef)
1154    {
1155        if(_fullScreen)
1156        {
1157            NSRect mainDisplayRect = [[NSScreen mainScreen] frame];
1158            rect.bottom = 0;
1159            rect.left = 0;
1160            rect.right = mainDisplayRect.size.width;
1161            rect.top = mainDisplayRect.size.height;
1162        }
1163        else
1164        {
1165            rect.top = [_windowRef frame].origin.y;
1166            rect.left = [_windowRef frame].origin.x;
1167            rect.bottom = [_windowRef frame].origin.y + [_windowRef frame].size.height;
1168            rect.right = [_windowRef frame].origin.x + [_windowRef frame].size.width;
1169        }
1170
1171        return 0;
1172    }
1173    else
1174    {
1175        return -1;
1176    }
1177}
1178
1179int32_t VideoRenderNSOpenGL::ChangeUniqueID(int32_t id)
1180{
1181
1182    CriticalSectionScoped cs(&_nsglContextCritSec);
1183    _id = id;
1184    return 0;
1185}
1186
1187int32_t VideoRenderNSOpenGL::SetText(const uint8_t /*textId*/,
1188                                     const uint8_t* /*text*/,
1189                                     const int32_t /*textLength*/,
1190                                     const uint32_t /*textColorRef*/,
1191                                     const uint32_t /*backgroundColorRef*/,
1192                                     const float /*left*/,
1193                                     const float /*top*/,
1194                                     const float /*right*/,
1195                                     const float /*bottom*/)
1196{
1197
1198    return 0;
1199
1200}
1201
1202void VideoRenderNSOpenGL::LockAGLCntx()
1203{
1204    _nsglContextCritSec.Enter();
1205}
1206void VideoRenderNSOpenGL::UnlockAGLCntx()
1207{
1208    _nsglContextCritSec.Leave();
1209}
1210
1211/*
1212
1213 bool VideoRenderNSOpenGL::SetFullScreen(bool fullscreen)
1214 {
1215 NSRect mainDisplayRect, viewRect;
1216
1217 // Create a screen-sized window on the display you want to take over
1218 // Note, mainDisplayRect has a non-zero origin if the key window is on a secondary display
1219 mainDisplayRect = [[NSScreen mainScreen] frame];
1220 fullScreenWindow = [[NSWindow alloc] initWithContentRect:mainDisplayRect styleMask:NSBorderlessWindowMask
1221 backing:NSBackingStoreBuffered defer:YES];
1222
1223 // Set the window level to be above the menu bar
1224 [fullScreenWindow setLevel:NSMainMenuWindowLevel+1];
1225
1226 // Perform any other window configuration you desire
1227 [fullScreenWindow setOpaque:YES];
1228 [fullScreenWindow setHidesOnDeactivate:YES];
1229
1230 // Create a view with a double-buffered OpenGL context and attach it to the window
1231 // By specifying the non-fullscreen context as the shareContext, we automatically inherit the OpenGL objects (textures, etc) it has defined
1232 viewRect = NSMakeRect(0.0, 0.0, mainDisplayRect.size.width, mainDisplayRect.size.height);
1233 fullScreenView = [[MyOpenGLView alloc] initWithFrame:viewRect shareContext:[openGLView openGLContext]];
1234 [fullScreenWindow setContentView:fullScreenView];
1235
1236 // Show the window
1237 [fullScreenWindow makeKeyAndOrderFront:self];
1238
1239 // Set the scene with the full-screen viewport and viewing transformation
1240 [scene setViewportRect:viewRect];
1241
1242 // Assign the view's MainController to self
1243 [fullScreenView setMainController:self];
1244
1245 if (!isAnimating) {
1246 // Mark the view as needing drawing to initalize its contents
1247 [fullScreenView setNeedsDisplay:YES];
1248 }
1249 else {
1250 // Start playing the animation
1251 [fullScreenView startAnimation];
1252 }
1253
1254 }
1255
1256
1257
1258 */
1259
1260
1261}  // namespace webrtc
1262
1263#endif // COCOA_RENDERING
1264