source: 3DVCSoftware/trunk/source/App/TAppRenderer/TAppRendererTop.cpp @ 1144

Last change on this file since 1144 was 608, checked in by tech, 11 years ago

Merged DEV-2.0-dev0@604.

  • Property svn:eol-style set to native
File size: 35.1 KB
RevLine 
[5]1/* The copyright in this software is being made available under the BSD
2 * License, included below. This software may be subject to other third party
3 * and contributor rights, including patent rights, and no such rights are
4 * granted under this license.
5 *
6 * Copyright (c) 2010-2011, ISO/IEC
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions are met:
11 *
12 *  * Redistributions of source code must retain the above copyright notice,
13 *    this list of conditions and the following disclaimer.
14 *  * Redistributions in binary form must reproduce the above copyright notice,
15 *    this list of conditions and the following disclaimer in the documentation
16 *    and/or other materials provided with the distribution.
17 *  * Neither the name of the ISO/IEC nor the names of its contributors may
18 *    be used to endorse or promote products derived from this software without
19 *    specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
25 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
31 * THE POSSIBILITY OF SUCH DAMAGE.
32 */
[2]33
34
35#include <list>
36#include <stdio.h>
37#include <fcntl.h>
38#include <assert.h>
39#include <math.h>
40
41#include "TAppRendererTop.h"
42
[608]43#if H_3D
44
[2]45// ====================================================================================================================
46// Constructor / destructor / initialization / destroy
47// ====================================================================================================================
[608]48
[2]49TAppRendererTop::TAppRendererTop()
50{
51
52}
53
54TAppRendererTop::~TAppRendererTop()
55{
56
57}
58
59
60Void TAppRendererTop::xCreateLib()
61{
62  m_pcRenTop = new TRenTop();
63
64  for(Int iViewIdx=0; iViewIdx<m_iNumberOfInputViews; iViewIdx++)
65  {
66    TVideoIOYuv* pcVideoInput = new TVideoIOYuv;
67    TVideoIOYuv* pcDepthInput = new TVideoIOYuv;
68
[608]69    pcVideoInput->open( m_pchVideoInputFileList[iViewIdx], false, m_inputBitDepthY, m_inputBitDepthC, m_internalBitDepthY, m_internalBitDepthC);  // read mode
70    pcDepthInput->open( m_pchDepthInputFileList[iViewIdx], false, m_inputBitDepthY, m_inputBitDepthC, m_internalBitDepthY, m_internalBitDepthC  );  // read mode
[81]71    pcVideoInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight  );
72    pcDepthInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight  );
[100]73
[2]74    m_apcTVideoIOYuvVideoInput.push_back( pcVideoInput );
75    m_apcTVideoIOYuvDepthInput.push_back( pcDepthInput );
76  }
77
78  for(Int iViewIdx=0; iViewIdx<m_iNumberOfOutputViews; iViewIdx++)
79  {
80    TVideoIOYuv* pcSynthOutput = new TVideoIOYuv;
[608]81    pcSynthOutput->open( m_pchSynthOutputFileList[iViewIdx], true, m_outputBitDepthY, m_outputBitDepthC, m_internalBitDepthY, m_internalBitDepthC );  // write mode
[2]82    m_apcTVideoIOYuvSynthOutput.push_back( pcSynthOutput );
83  }
84}
85
86
87Void TAppRendererTop::xDestroyLib()
88{
89  delete m_pcRenTop;
90
91  for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
92  {
93    m_apcTVideoIOYuvVideoInput[iViewIdx]->close();
94    m_apcTVideoIOYuvDepthInput[iViewIdx]->close();
95
96    delete m_apcTVideoIOYuvDepthInput[iViewIdx];
97    delete m_apcTVideoIOYuvVideoInput[iViewIdx];
98  };
99
100  for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfOutputViews; iViewIdx++ )
101  {
102    m_apcTVideoIOYuvSynthOutput[iViewIdx]->close();
103    delete m_apcTVideoIOYuvSynthOutput[iViewIdx];
104  };
105}
106
107Void TAppRendererTop::xInitLib()
108{
109    m_pcRenTop->init(
110    m_iSourceWidth,
111    m_iSourceHeight,
112    (m_iRenderDirection != 0),
113    m_iLog2SamplingFactor,
114    m_iLog2SamplingFactor+m_iShiftPrecision,
115    m_bUVUp,
116    m_iPreProcMode,
117    m_iPreFilterSize,
118    m_iBlendMode,
119    m_iBlendZThresPerc,
120    m_bBlendUseDistWeight,
121    m_iBlendHoleMargin,
122    m_iInterpolationMode,
123    m_iHoleFillingMode,
124    m_iPostProcMode,
125    m_iUsedPelMapMarExt
126    );
127}
128
129// ====================================================================================================================
130// Public member functions
131// ====================================================================================================================
132
133
134
135Void TAppRendererTop::render()
136{
137  xCreateLib();
138  xInitLib();
139
140  // Create Buffers Input Views;
141  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
142  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
143
144  // TemporalImprovement Filter
145  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
146  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;
147
148  Int aiPad[2] = { 0, 0 };
149
150  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
151  {
152    TComPicYuv* pcNewVideoPic = new TComPicYuv;
153    TComPicYuv* pcNewDepthPic = new TComPicYuv;
154
155    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
156    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
157
158    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
159    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
160
161    //Temporal improvement Filter
162    if ( m_bTempDepthFilter )
163    {
164      pcNewVideoPic = new TComPicYuv;
165      pcNewDepthPic = new TComPicYuv;
166
167      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
168      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);
169
170      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
171      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
172    }
173  }
174
175  // Create Buffer for synthesized View
176  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
177  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
178
179  Bool bAnyEOS = false;
180
181  Int iNumOfRenderedFrames = 0;
182  Int iFrame = 0;
183
184  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
185  {
[81]186    if ( iFrame >= m_iFrameSkip ) 
187    {
[100]188      // read in depth and video
189      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
190      {
191        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
[56]192
[100]193        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
[56]194
[100]195        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
[2]196
[100]197        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
198        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
199        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
[2]200
[100]201        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
202        {
203          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
204        }
[2]205      }
206    }
[100]207    else   
[2]208    {
209      std::cout << "Skipping Frame " << iFrame << std::endl;
210
211      iFrame++;
212      continue;
213    }
214
[56]215    m_cCameraData.update( (UInt)iFrame - m_iFrameSkip );
[2]216
217    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
218    {
219      Int  iLeftBaseViewIdx  = -1;
220      Int  iRightBaseViewIdx = -1;
221
222      Bool bIsBaseView = false;
223
224      Int iRelDistToLeft;
225      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
226      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
227      Bool bHasRView = ( iRightBaseViewIdx != -1 );
228      Bool bRender   = true;
229
230      Int  iBlendMode = m_iBlendMode;
231      Int  iSimEnhBaseView = 0;
232
233      switch( m_iRenderDirection )
234      {
235      /// INTERPOLATION
236      case 0:
237        AOF( bHasLRView || bIsBaseView );
238
239        if ( !bHasLRView && bIsBaseView && m_iBlendMode == 0 )
240        {
241          bRender = false;
242        }
243        else
244        {
245          if ( bIsBaseView )
246          {
247            AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
248            Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
249
250            if ( m_iBlendMode == 1 )
251            {
252              if ( iSortedBaseViewIdx - 1 >= 0 )
253              {
254                iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
255                bRender = true;
256              }
257              else
258              {
259                bRender = false;
260              }
261            }
262            else if ( m_iBlendMode == 2 )
263            {
264              if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
265              {
266                iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
267                bRender = true;
268              }
269              else
270              {
271                bRender = false;
272              }
273            }
274          }
275
276          if ( m_iBlendMode == 3 )
277          {
278            if ( bIsBaseView && (iLeftBaseViewIdx == 0) )
279            {
280              bRender = false;
281            }
282            else
283            {
284              Int iDistLeft  = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx ]  );
285              Int iDistRight = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iRightBaseViewIdx]  );
286
287              Int iFillViewIdx = iDistLeft > iDistRight ? iLeftBaseViewIdx : iRightBaseViewIdx;
288
289              if( m_cCameraData.getBaseId2SortedId()[0] < m_cCameraData.getBaseId2SortedId() [iFillViewIdx] )
290              {
291                iBlendMode        = 1;
292                iLeftBaseViewIdx  = 0;
293                iRightBaseViewIdx = iFillViewIdx;
294              }
295              else
296              {
297                iBlendMode        = 2;
298                iLeftBaseViewIdx  = iFillViewIdx;
299                iRightBaseViewIdx = 0;
300              }
301
302            }
303          }
304          else
305          {
306            iBlendMode = m_iBlendMode;
307          }
308        }
309
310        if ( m_bSimEnhance )
311        {
312          if ( m_iNumberOfInputViews == 3 && m_cCameraData.getRelSynthViewNumbers()[ iSynthViewIdx ] < VIEW_NUM_PREC  )
313          {
314            iSimEnhBaseView = 2; // Take middle view
315          }
316          else
317          {
318            iSimEnhBaseView = 1; // Take left view
319          }
320        }
321
322          if ( bRender )
323          {
324          std::cout << "Rendering Frame "    << iFrame
325                    << " of View "           << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx    ] / VIEW_NUM_PREC
326                    << "   Left BaseView: "  << (Double) m_cCameraData.getBaseViewNumbers() [iLeftBaseViewIdx ] / VIEW_NUM_PREC
327                    << "   Right BaseView: " << (Double) m_cCameraData.getBaseViewNumbers() [iRightBaseViewIdx] / VIEW_NUM_PREC
328                    << "   BlendMode: "      << iBlendMode
329                    << std::endl;
330
331          m_pcRenTop->setShiftLUTs(
332            m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
333            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx],
334            m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
335            m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx][iSynthViewIdx],
336            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx],
337            m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx ],
338
339            iRelDistToLeft
340          );
341
342          m_pcRenTop->interpolateView(
343            apcPicYuvBaseVideo[iLeftBaseViewIdx ],
344            apcPicYuvBaseDepth[iLeftBaseViewIdx ],
345            apcPicYuvBaseVideo[iRightBaseViewIdx],
346            apcPicYuvBaseDepth[iRightBaseViewIdx],
347            pcPicYuvSynthOut,
348            iBlendMode,
349            iSimEnhBaseView
350            );
351        }
352        else
353        {
354          AOT(iLeftBaseViewIdx != iRightBaseViewIdx );
355          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut );
356          std::cout << "Copied    Frame " << iFrame
357                    << " of View "        << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC
358                    << "   (BaseView)  "    << std::endl;
359        }
360
361        break;
362      /// EXTRAPOLATION FROM LEFT
363      case 1:
364        if ( !bHasLView ) // View to render is BaseView
365        {
366          bRender = false;
367        }
368
369          if (  bIsBaseView )
370          {
371          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
372          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
373          if ( iSortedBaseViewIdx - 1 >= 0 )
374          {
375            iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
376          }
377          else
378          {
379            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
380            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
381            bRender = false;
382          }
383        }
384
385
386        if (bRender)
387        {
388          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
389          m_pcRenTop->setShiftLUTs( m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
390            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, NULL, -1 );
391          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iLeftBaseViewIdx ], apcPicYuvBaseDepth[iLeftBaseViewIdx ], pcPicYuvSynthOut, true );
392        }
393        break;
394      /// EXTRAPOLATION FROM RIGHT
395      case 2:            // extrapolation from right
396        if ( !bHasRView ) // View to render is BaseView
397        {
398          bRender = false;
399        }
400
401          if (  bIsBaseView )
402          {
403
404          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
405          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
406          if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
407          {
408            iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
409          }
410          else
411          {
412            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
413            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
414            bRender = false;
415          }
416        }
417
418        if ( bRender )
419        {
420          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
421          m_pcRenTop->setShiftLUTs( NULL, NULL,NULL, m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx ][iSynthViewIdx],
422            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx],NULL, iRelDistToLeft);
423          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iRightBaseViewIdx ], apcPicYuvBaseDepth[iRightBaseViewIdx ], pcPicYuvSynthOut, false);
424        }
425        break;
426      }
427
428      // Write Output
[56]429
430      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
[2]431    }
432    iFrame++;
433    iNumOfRenderedFrames++;
434  }
435
436  // Delete Buffers
437  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
438  {
439    apcPicYuvBaseVideo[uiBaseView]->destroy();
440    delete apcPicYuvBaseVideo[uiBaseView];
441
442    apcPicYuvBaseDepth[uiBaseView]->destroy();
443    delete apcPicYuvBaseDepth[uiBaseView];
444
445    // Temporal Filter
446    if ( m_bTempDepthFilter )
447    {
448      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
449      delete apcPicYuvLastBaseVideo[uiBaseView];
450
451      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
452      delete apcPicYuvLastBaseDepth[uiBaseView];
453    }
454  }
455
456  pcPicYuvSynthOut->destroy();
457  delete pcPicYuvSynthOut;
458
459  xDestroyLib();
460
461}
462
463Void TAppRendererTop::go()
464{
465  switch ( m_iRenderMode )
466  {
467  case 0:
468    render();
469    break;
[608]470#if H_3D_VSO
[2]471  case 1:
472    renderModel();
473    break;
[608]474#endif
[2]475  case 10:
476    renderUsedPelsMap( );
477      break;
478  default:
479    AOT(true);
480  }
[608]481
482#if H_3D_REN_MAX_DEV_OUT
483  Double dMaxDispDiff = m_cCameraData.getMaxShiftDeviation(); 
484
485  if ( !(dMaxDispDiff < 0) )
486  { 
487    printf("\n Max. possible shift error: %12.3f samples.\n", dMaxDispDiff );
488  }
489#endif
[2]490}
491
[608]492#if H_3D_VSO
[2]493Void TAppRendererTop::renderModel()
494{
495  if ( m_bUseSetupString )
496  {
497    xRenderModelFromString();
498  }
499  else
500  {
501    xRenderModelFromNums();
502  }
503}
504
[608]505
506
[2]507Void TAppRendererTop::xRenderModelFromString()
508{
509    xCreateLib();
510    xInitLib();
511
512    // Create Buffers Input Views;
513    std::vector<TComPicYuv*> apcPicYuvBaseVideo;
514    std::vector<TComPicYuv*> apcPicYuvBaseDepth;
515
516
517    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
518    {
519      TComPicYuv* pcNewVideoPic = new TComPicYuv;
520      TComPicYuv* pcNewDepthPic = new TComPicYuv;
521
522      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
523      apcPicYuvBaseVideo.push_back(pcNewVideoPic);
524
525      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
526      apcPicYuvBaseDepth.push_back(pcNewDepthPic);
527    }
528
529    Int aiPad[2] = { 0, 0 };
530
531    // Init Model
532    TRenModel cCurModel;
533
534    AOT( m_iLog2SamplingFactor != 0 );
[608]535#if H_3D_VSO_EARLY_SKIP
[100]536    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin, false );
537#else
[2]538    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
[100]539#endif
[2]540
[124]541    cCurModel.setupPart( 0, m_iSourceHeight  ); 
[100]542
[2]543    for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
544    {
545      Int iNumOfModels   = m_cRenModStrParser.getNumOfModelsForView(iViewIdx, 1);
546
547      for (Int iCurModel = 0; iCurModel < iNumOfModels; iCurModel++ )
548      {
549        Int iModelNum; Int iLeftViewNum; Int iRightViewNum; Int iDump; Int iOrgRefNum; Int iBlendMode;
550        m_cRenModStrParser.getSingleModelData  ( iViewIdx, 1, iCurModel, iModelNum, iBlendMode, iLeftViewNum, iRightViewNum, iOrgRefNum, iDump ) ;
551        cCurModel         .createSingleModel   ( iViewIdx, 1, iModelNum, iLeftViewNum, iRightViewNum, false, iBlendMode );
552
553      }
554    }
555
556    // Create Buffer for synthesized View
557    TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
558    pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
559
560    Bool bAnyEOS = false;
561
562    Int iNumOfRenderedFrames = 0;
563    Int iFrame = 0;
564
565    while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
566    {
[81]567
568      if ( iFrame >= m_iFrameSkip )
569      {     
[100]570        // read in depth and video
571        for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
572        {
573          m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
574          bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
[2]575
[100]576          m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
577          bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
578        }
[2]579      }
[81]580      else
[2]581      {
[81]582        iFrame++;
[2]583        continue;
584      }
585
586
587      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
588      {
589        TComPicYuv* pcPicYuvVideo = apcPicYuvBaseVideo[iBaseViewIdx];
590        TComPicYuv* pcPicYuvDepth = apcPicYuvBaseDepth[iBaseViewIdx];
591        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx ];
592        cCurModel.setBaseView( iBaseViewSIdx, pcPicYuvVideo, pcPicYuvDepth, NULL, NULL );
593      }
594
[81]595      m_cCameraData.update( (UInt) ( iFrame - m_iFrameSkip ));
596
[2]597      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
598      {
599        // setup virtual views
600        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
601
602        cCurModel.setErrorMode( iBaseViewSIdx, 1, 0 );
603        Int iNumOfSV  = m_cRenModStrParser.getNumOfModelsForView( iBaseViewSIdx, 1);
604        for (Int iCurView = 0; iCurView < iNumOfSV; iCurView++ )
605        {
606          Int iOrgRefBaseViewSIdx;
607          Int iLeftBaseViewSIdx;
608          Int iRightBaseViewSIdx;
609          Int iSynthViewRelNum;
610          Int iModelNum;
611          Int iBlendMode;
612
613          m_cRenModStrParser.getSingleModelData(iBaseViewSIdx, 1, iCurView, iModelNum, iBlendMode, iLeftBaseViewSIdx, iRightBaseViewSIdx, iOrgRefBaseViewSIdx, iSynthViewRelNum );
614
615          Int iLeftBaseViewIdx    = -1;
616          Int iRightBaseViewIdx   = -1;
617
618          TComPicYuv* pcPicYuvOrgRef  = NULL;
619          Int**      ppiShiftLUTLeft  = NULL;
620          Int**      ppiShiftLUTRight = NULL;
621          Int**      ppiBaseShiftLUTLeft  = NULL;
622          Int**      ppiBaseShiftLUTRight = NULL;
623
624
625          Int        iDistToLeft      = -1;
626
627          Int iSynthViewIdx = m_cCameraData.synthRelNum2Idx( iSynthViewRelNum );
628
629          if ( iLeftBaseViewSIdx != -1 )
630          {
631            iLeftBaseViewIdx   = m_cCameraData.getBaseSortedId2Id()   [ iLeftBaseViewSIdx ];
632            ppiShiftLUTLeft    = m_cCameraData.getSynthViewShiftLUTI()[ iLeftBaseViewIdx  ][ iSynthViewIdx  ];
633          }
634
635          if ( iRightBaseViewSIdx != -1 )
636          {
637            iRightBaseViewIdx  = m_cCameraData.getBaseSortedId2Id()   [iRightBaseViewSIdx ];
638            ppiShiftLUTRight   = m_cCameraData.getSynthViewShiftLUTI()[ iRightBaseViewIdx ][ iSynthViewIdx ];
639          }
640
641          if ( iRightBaseViewSIdx != -1 && iLeftBaseViewSIdx != -1 )
642          {
643
644            ppiBaseShiftLUTLeft  = m_cCameraData.getBaseViewShiftLUTI() [ iLeftBaseViewIdx  ][ iRightBaseViewIdx ];
645            ppiBaseShiftLUTRight = m_cCameraData.getBaseViewShiftLUTI() [ iRightBaseViewIdx ][ iLeftBaseViewIdx  ];
646            iDistToLeft    = m_cCameraData.getRelDistLeft(  iSynthViewIdx , iLeftBaseViewIdx, iRightBaseViewIdx);
647          }
648
649          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
650
651          cCurModel.setSingleModel( iModelNum, ppiShiftLUTLeft, ppiBaseShiftLUTLeft, ppiShiftLUTRight, ppiBaseShiftLUTRight, iDistToLeft, pcPicYuvOrgRef );
652
653          Int iViewPos;
654          if (iLeftBaseViewSIdx != -1 && iRightBaseViewSIdx != -1)
655          {
656            iViewPos = VIEWPOS_MERGED;
657          }
658          else if ( iLeftBaseViewSIdx != -1 )
659          {
660            iViewPos = VIEWPOS_LEFT;
661          }
662          else if ( iRightBaseViewSIdx != -1 )
663          {
664            iViewPos = VIEWPOS_RIGHT;
665          }
666          else
667          {
668            AOT(true);
669          }
670
671          cCurModel.getSynthVideo ( iModelNum, iViewPos, pcPicYuvSynthOut );
672
673          // Write Output
[56]674          m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iModelNum]->write( pcPicYuvSynthOut, 0 ,0 ,0, 0 );
[2]675        }
676      }
677      iFrame++;
678      iNumOfRenderedFrames++;
679  }
680
681    // Delete Buffers
682    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
683    {
684      apcPicYuvBaseVideo[uiBaseView]->destroy();
685      delete apcPicYuvBaseVideo[uiBaseView];
686
687      apcPicYuvBaseDepth[uiBaseView]->destroy();
688      delete apcPicYuvBaseDepth[uiBaseView];
689}
690    pcPicYuvSynthOut->destroy();
691    delete pcPicYuvSynthOut;
692
693    xDestroyLib();
694}
695
[608]696
[2]697Void TAppRendererTop::xRenderModelFromNums()
698{
699  xCreateLib();
700  xInitLib();
701
702  // Create Buffers Input Views;
703  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
704  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
705
706
707  Int aiPad[2] = { 0, 0 };
708
709  // Init Model
710  TRenModel cCurModel;
711
712  AOT( m_iLog2SamplingFactor != 0 );
[124]713  cCurModel.setupPart( 0, m_iSourceHeight  ); 
[608]714#if H_3D_VSO_EARLY_SKIP
[100]715  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin, false );
716#else
[2]717  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
[100]718#endif
[2]719
720  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
721  {
722    TComPicYuv* pcNewVideoPic = new TComPicYuv;
723    TComPicYuv* pcNewDepthPic = new TComPicYuv;
724
725    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
726    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
727
728    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
729    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
730  }
731
732  for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
733  {
734    Int  iLeftBaseViewIdx  = -1;
735    Int  iRightBaseViewIdx = -1;
736    Bool bIsBaseView = false;
737
738    Int iRelDistToLeft;
739    m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft,  bIsBaseView );
740
741    if (m_iRenderDirection == 1 )
742    {
743      iRightBaseViewIdx = -1;
744      AOT( iLeftBaseViewIdx == -1);
745    }
746
747    if (m_iRenderDirection == 2 )
748    {
749      iLeftBaseViewIdx = -1;
750      AOT( iRightBaseViewIdx == -1);
751    }
752
753    Int iLeftBaseViewSIdx  = -1;
754    Int iRightBaseViewSIdx = -1;
755
756    if (iLeftBaseViewIdx != -1 )
757    {
758      iLeftBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iLeftBaseViewIdx];
759    }
760
761    if (iRightBaseViewIdx != -1 )
762    {
763      iRightBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iRightBaseViewIdx];
764    }
765    cCurModel.createSingleModel(-1, -1, iSynthViewIdx, iLeftBaseViewSIdx, iRightBaseViewSIdx, false, m_iBlendMode );
766  }
767
768  // Create Buffer for synthesized View
769  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
770  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
771
772  Bool bAnyEOS = false;
773
774  Int iNumOfRenderedFrames = 0;
775  Int iFrame = 0;
776
777  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
778  {
[81]779
780    if ( iFrame >= m_iFrameSkip )
781    {     
[100]782      // read in depth and video
783      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
784      {
785        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
786        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
[2]787
[100]788        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
789        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
[2]790
[100]791        if ( iFrame >= m_iFrameSkip )
792        {
793          Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
794          cCurModel.setBaseView( iBaseViewSIdx, apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], NULL, NULL );
795        }
[2]796      }
797    }
[81]798    else
[2]799    {
800      iFrame++;
801      continue;
802    }
[81]803    m_cCameraData.update( (UInt) (iFrame - m_iFrameSkip ));
[2]804    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
805    {
806
807      Int  iLeftBaseViewIdx  = -1;
808      Int  iRightBaseViewIdx = -1;
809
810      Bool bIsBaseView = false;
811
812      Int iRelDistToLeft;
813      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
814      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
815      Bool bHasRView = ( iRightBaseViewIdx != -1 );
816
817      switch( m_iRenderDirection )
818      {
819        /// INTERPOLATION
820      case 0:
821        assert( bHasLRView || bIsBaseView );
822
823        if ( !bHasLRView && bIsBaseView ) // View to render is BaseView
824        {
825          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
826          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
827        }
828        else  // Render
829        {
830          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
831          cCurModel.setSingleModel( iSynthViewIdx,
832                                    m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx]    ,
833                                    m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
834                                    m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx]    ,
835                                    m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx] ,
836                                    iRelDistToLeft,
837                                    NULL );
838          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_MERGED, pcPicYuvSynthOut );
839        }
840        break;
841        /// EXTRAPOLATION FROM LEFT
842      case 1:
843
844        if ( !bHasLView ) // View to render is BaseView
845        {
846          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
847          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
848        }
849        else  // Render
850        {
851          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
852          cCurModel.setSingleModel( iSynthViewIdx, m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, -1,  NULL);
853          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_LEFT, pcPicYuvSynthOut );
854        }
855        break;
856        /// EXTRAPOLATION FROM RIGHT
857      case 2:            // extrapolation from right
858        if ( !bHasRView ) // View to render is BaseView
859        {
860          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
861          apcPicYuvBaseVideo[iRightBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
862        }
863        else  // Render
864        {
865          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
866          cCurModel.setSingleModel( iSynthViewIdx, NULL , NULL, m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx], NULL, -1, NULL);
867          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_RIGHT, pcPicYuvSynthOut );
868        }
869        break;
870      }
871
872      // Write Output
[56]873      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
[2]874    }
875    iFrame++;
876    iNumOfRenderedFrames++;
877  }
878
879  // Delete Buffers
880  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
881  {
882    apcPicYuvBaseVideo[uiBaseView]->destroy();
883    delete apcPicYuvBaseVideo[uiBaseView];
884
885    apcPicYuvBaseDepth[uiBaseView]->destroy();
886    delete apcPicYuvBaseDepth[uiBaseView];
887  }
888  pcPicYuvSynthOut->destroy();
889  delete pcPicYuvSynthOut;
890
891  xDestroyLib();
892
893}
[608]894#endif
[2]895
896Void TAppRendererTop::renderUsedPelsMap( )
897{
898  xCreateLib();
899  xInitLib();
900
901  // Create Buffers Input Views;
902  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
903  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
904
905  // TemporalImprovement Filter
906  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
907  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;
908
909  Int aiPad[2] = { 0, 0 };
910
911  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
912  {
913    TComPicYuv* pcNewVideoPic = new TComPicYuv;
914    TComPicYuv* pcNewDepthPic = new TComPicYuv;
915
916    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
917    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
918
919    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
920    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
921
922    //Temporal improvement Filter
923    if ( m_bTempDepthFilter )
924    {
925      pcNewVideoPic = new TComPicYuv;
926      pcNewDepthPic = new TComPicYuv;
927
928      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
929      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);
930
931      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
932      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
933    }
934  }
935
936  // Create Buffer for synthesized View
937  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
938  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
939
940  Bool bAnyEOS = false;
941
942  Int iNumOfRenderedFrames = 0;
943  Int iFrame = 0;
944
945  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
946  {
[81]947    if ( iFrame >= m_iFrameSkip )
948    {     
[100]949      // read in depth and video
950      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
951      {
952        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
953        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
954        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
[2]955
[100]956        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
957        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
958        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
[2]959
[100]960        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
961        {
962          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
963        }
[2]964      }
965    }
[81]966    else
[2]967    {
968      std::cout << "Skipping Frame " << iFrame << std::endl;
969
970      iFrame++;
971      continue;
972    }
[81]973    m_cCameraData.update( (UInt) ( iFrame - m_iFrameSkip ) );
[2]974
975    for(Int iViewIdx=1; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
976    {
977      std::cout << "Rendering UsedPelsMap for Frame " << iFrame << " of View " << (Double) m_cCameraData.getBaseViewNumbers()[iViewIdx] << std::endl;
978
979      Int iViewSIdx      = m_cCameraData.getBaseId2SortedId()[iViewIdx];
980      Int iFirstViewSIdx = m_cCameraData.getBaseId2SortedId()[0];
981
982      AOT( iViewSIdx == iFirstViewSIdx );
983
984      Bool bFirstIsLeft = (iFirstViewSIdx < iViewSIdx);
985
986      m_pcRenTop->setShiftLUTs(
987        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
988        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
989        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
990        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
991        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
992        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
993        -1
994        );
995
996      m_pcRenTop->getUsedSamplesMap( apcPicYuvBaseDepth[0], pcPicYuvSynthOut, bFirstIsLeft );
997
998      // Write Output
[56]999      m_apcTVideoIOYuvSynthOutput[iViewIdx-1]->write( pcPicYuvSynthOut, 0, 0, 0 );
[2]1000
1001    }
1002    iFrame++;
1003    iNumOfRenderedFrames++;
1004  }
1005
1006  // Delete Buffers
1007  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
1008  {
1009    apcPicYuvBaseVideo[uiBaseView]->destroy();
1010    delete apcPicYuvBaseVideo[uiBaseView];
1011
1012    apcPicYuvBaseDepth[uiBaseView]->destroy();
1013    delete apcPicYuvBaseDepth[uiBaseView];
1014
1015    // Temporal Filter
1016    if ( m_bTempDepthFilter )
1017    {
1018      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
1019      delete apcPicYuvLastBaseVideo[uiBaseView];
1020
1021      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
1022      delete apcPicYuvLastBaseDepth[uiBaseView];
1023    }
1024  }
1025  pcPicYuvSynthOut->destroy();
1026  delete pcPicYuvSynthOut;
1027
1028  xDestroyLib();
1029
1030}
[210]1031#endif
Note: See TracBrowser for help on using the repository browser.