source: 3DVCSoftware/trunk/source/App/TAppRenderer/TAppRendererTop.cpp @ 140

Last change on this file since 140 was 124, checked in by tech, 12 years ago

Fixed VSO setup and compiler warning in getRefQP
( FIX_VSO_SETUP and FIX_COMP_WARNING_INIT )

  • Property svn:eol-style set to native
File size: 35.5 KB
RevLine 
[5]1/* The copyright in this software is being made available under the BSD
2 * License, included below. This software may be subject to other third party
3 * and contributor rights, including patent rights, and no such rights are
4 * granted under this license.
5 *
6 * Copyright (c) 2010-2011, ISO/IEC
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions are met:
11 *
12 *  * Redistributions of source code must retain the above copyright notice,
13 *    this list of conditions and the following disclaimer.
14 *  * Redistributions in binary form must reproduce the above copyright notice,
15 *    this list of conditions and the following disclaimer in the documentation
16 *    and/or other materials provided with the distribution.
17 *  * Neither the name of the ISO/IEC nor the names of its contributors may
18 *    be used to endorse or promote products derived from this software without
19 *    specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
25 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
31 * THE POSSIBILITY OF SUCH DAMAGE.
32 */
[2]33
34
[5]35
[2]36#include <list>
37#include <stdio.h>
38#include <fcntl.h>
39#include <assert.h>
40#include <math.h>
41
42#include "TAppRendererTop.h"
43
44// ====================================================================================================================
45// Constructor / destructor / initialization / destroy
46// ====================================================================================================================
47
48TAppRendererTop::TAppRendererTop()
49{
50
51}
52
53TAppRendererTop::~TAppRendererTop()
54{
55
56}
57
58
59Void TAppRendererTop::xCreateLib()
60{
61  Int iInteralBitDepth = g_uiBitDepth + g_uiBitIncrement;
62  Int iFileBitDepth    = 8;
63  m_pcRenTop = new TRenTop();
64
65  for(Int iViewIdx=0; iViewIdx<m_iNumberOfInputViews; iViewIdx++)
66  {
67    TVideoIOYuv* pcVideoInput = new TVideoIOYuv;
68    TVideoIOYuv* pcDepthInput = new TVideoIOYuv;
69
70    pcVideoInput->open( m_pchVideoInputFileList[iViewIdx], false, iFileBitDepth, iInteralBitDepth );  // read mode
71    pcDepthInput->open( m_pchDepthInputFileList[iViewIdx], false, iFileBitDepth, iInteralBitDepth );  // read mode
[81]72    pcVideoInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight  );
73    pcDepthInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight  );
[100]74
[2]75    m_apcTVideoIOYuvVideoInput.push_back( pcVideoInput );
76    m_apcTVideoIOYuvDepthInput.push_back( pcDepthInput );
77  }
78
79  for(Int iViewIdx=0; iViewIdx<m_iNumberOfOutputViews; iViewIdx++)
80  {
81    TVideoIOYuv* pcSynthOutput = new TVideoIOYuv;
82    pcSynthOutput->open( m_pchSynthOutputFileList[iViewIdx], true, iFileBitDepth, iInteralBitDepth );  // write mode
83    m_apcTVideoIOYuvSynthOutput.push_back( pcSynthOutput );
84  }
85}
86
87
88Void TAppRendererTop::xDestroyLib()
89{
90  delete m_pcRenTop;
91
92  for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
93  {
94    m_apcTVideoIOYuvVideoInput[iViewIdx]->close();
95    m_apcTVideoIOYuvDepthInput[iViewIdx]->close();
96
97    delete m_apcTVideoIOYuvDepthInput[iViewIdx];
98    delete m_apcTVideoIOYuvVideoInput[iViewIdx];
99  };
100
101  for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfOutputViews; iViewIdx++ )
102  {
103    m_apcTVideoIOYuvSynthOutput[iViewIdx]->close();
104    delete m_apcTVideoIOYuvSynthOutput[iViewIdx];
105  };
106}
107
108Void TAppRendererTop::xInitLib()
109{
110    m_pcRenTop->init(
111    m_iSourceWidth,
112    m_iSourceHeight,
113    (m_iRenderDirection != 0),
114    m_iLog2SamplingFactor,
115    m_iLog2SamplingFactor+m_iShiftPrecision,
116    m_bUVUp,
117    m_iPreProcMode,
118    m_iPreFilterSize,
119    m_iBlendMode,
120    m_iBlendZThresPerc,
121    m_bBlendUseDistWeight,
122    m_iBlendHoleMargin,
123    m_iInterpolationMode,
124    m_iHoleFillingMode,
125    m_iPostProcMode,
126    m_iUsedPelMapMarExt
127    );
128}
129
130// ====================================================================================================================
131// Public member functions
132// ====================================================================================================================
133
134
135
136Void TAppRendererTop::render()
137{
138  xCreateLib();
139  xInitLib();
140
141  // Create Buffers Input Views;
142  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
143  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
144
145  // TemporalImprovement Filter
146  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
147  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;
148
149  Int aiPad[2] = { 0, 0 };
150
151  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
152  {
153    TComPicYuv* pcNewVideoPic = new TComPicYuv;
154    TComPicYuv* pcNewDepthPic = new TComPicYuv;
155
156    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
157    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
158
159    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
160    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
161
162    //Temporal improvement Filter
163    if ( m_bTempDepthFilter )
164    {
165      pcNewVideoPic = new TComPicYuv;
166      pcNewDepthPic = new TComPicYuv;
167
168      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
169      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);
170
171      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
172      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
173    }
174  }
175
176  // Create Buffer for synthesized View
177  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
178  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
179
180  Bool bAnyEOS = false;
181
182  Int iNumOfRenderedFrames = 0;
183  Int iFrame = 0;
184
185  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
186  {
[81]187    if ( iFrame >= m_iFrameSkip ) 
188    {
[100]189      // read in depth and video
190      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
191      {
192        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
[56]193
[100]194        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
[56]195
[100]196        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
[2]197
[100]198        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
199        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
200        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
[2]201
[100]202        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
203        {
204          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
205        }
[2]206      }
207    }
[100]208    else   
[2]209    {
210      std::cout << "Skipping Frame " << iFrame << std::endl;
211
212      iFrame++;
213      continue;
214    }
215
[56]216    m_cCameraData.update( (UInt)iFrame - m_iFrameSkip );
[2]217
218    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
219    {
220      Int  iLeftBaseViewIdx  = -1;
221      Int  iRightBaseViewIdx = -1;
222
223      Bool bIsBaseView = false;
224
225      Int iRelDistToLeft;
226      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
227      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
228      Bool bHasRView = ( iRightBaseViewIdx != -1 );
229      Bool bRender   = true;
230
231      Int  iBlendMode = m_iBlendMode;
232      Int  iSimEnhBaseView = 0;
233
234      switch( m_iRenderDirection )
235      {
236      /// INTERPOLATION
237      case 0:
238        AOF( bHasLRView || bIsBaseView );
239
240        if ( !bHasLRView && bIsBaseView && m_iBlendMode == 0 )
241        {
242          bRender = false;
243        }
244        else
245        {
246          if ( bIsBaseView )
247          {
248            AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
249            Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
250
251            if ( m_iBlendMode == 1 )
252            {
253              if ( iSortedBaseViewIdx - 1 >= 0 )
254              {
255                iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
256                bRender = true;
257              }
258              else
259              {
260                bRender = false;
261              }
262            }
263            else if ( m_iBlendMode == 2 )
264            {
265              if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
266              {
267                iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
268                bRender = true;
269              }
270              else
271              {
272                bRender = false;
273              }
274            }
275          }
276
277          if ( m_iBlendMode == 3 )
278          {
279            if ( bIsBaseView && (iLeftBaseViewIdx == 0) )
280            {
281              bRender = false;
282            }
283            else
284            {
285              Int iDistLeft  = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx ]  );
286              Int iDistRight = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iRightBaseViewIdx]  );
287
288              Int iFillViewIdx = iDistLeft > iDistRight ? iLeftBaseViewIdx : iRightBaseViewIdx;
289
290              if( m_cCameraData.getBaseId2SortedId()[0] < m_cCameraData.getBaseId2SortedId() [iFillViewIdx] )
291              {
292                iBlendMode        = 1;
293                iLeftBaseViewIdx  = 0;
294                iRightBaseViewIdx = iFillViewIdx;
295              }
296              else
297              {
298                iBlendMode        = 2;
299                iLeftBaseViewIdx  = iFillViewIdx;
300                iRightBaseViewIdx = 0;
301              }
302
303            }
304          }
305          else
306          {
307            iBlendMode = m_iBlendMode;
308          }
309        }
310
311        if ( m_bSimEnhance )
312        {
313          if ( m_iNumberOfInputViews == 3 && m_cCameraData.getRelSynthViewNumbers()[ iSynthViewIdx ] < VIEW_NUM_PREC  )
314          {
315            iSimEnhBaseView = 2; // Take middle view
316          }
317          else
318          {
319            iSimEnhBaseView = 1; // Take left view
320          }
321        }
322
323          if ( bRender )
324          {
325          std::cout << "Rendering Frame "    << iFrame
326                    << " of View "           << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx    ] / VIEW_NUM_PREC
327                    << "   Left BaseView: "  << (Double) m_cCameraData.getBaseViewNumbers() [iLeftBaseViewIdx ] / VIEW_NUM_PREC
328                    << "   Right BaseView: " << (Double) m_cCameraData.getBaseViewNumbers() [iRightBaseViewIdx] / VIEW_NUM_PREC
329                    << "   BlendMode: "      << iBlendMode
330                    << std::endl;
331
332          m_pcRenTop->setShiftLUTs(
333            m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
334            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx],
335            m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
336            m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx][iSynthViewIdx],
337            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx],
338            m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx ],
339
340            iRelDistToLeft
341          );
342
343          m_pcRenTop->interpolateView(
344            apcPicYuvBaseVideo[iLeftBaseViewIdx ],
345            apcPicYuvBaseDepth[iLeftBaseViewIdx ],
346            apcPicYuvBaseVideo[iRightBaseViewIdx],
347            apcPicYuvBaseDepth[iRightBaseViewIdx],
348            pcPicYuvSynthOut,
349            iBlendMode,
350            iSimEnhBaseView
351            );
352        }
353        else
354        {
355          AOT(iLeftBaseViewIdx != iRightBaseViewIdx );
356          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut );
357          std::cout << "Copied    Frame " << iFrame
358                    << " of View "        << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC
359                    << "   (BaseView)  "    << std::endl;
360        }
361
362        break;
363      /// EXTRAPOLATION FROM LEFT
364      case 1:
365        if ( !bHasLView ) // View to render is BaseView
366        {
367          bRender = false;
368        }
369
370          if (  bIsBaseView )
371          {
372          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
373          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
374          if ( iSortedBaseViewIdx - 1 >= 0 )
375          {
376            iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
377          }
378          else
379          {
380            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
381            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
382            bRender = false;
383          }
384        }
385
386
387        if (bRender)
388        {
389          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
390          m_pcRenTop->setShiftLUTs( m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
391            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, NULL, -1 );
392          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iLeftBaseViewIdx ], apcPicYuvBaseDepth[iLeftBaseViewIdx ], pcPicYuvSynthOut, true );
393        }
394        break;
395      /// EXTRAPOLATION FROM RIGHT
396      case 2:            // extrapolation from right
397        if ( !bHasRView ) // View to render is BaseView
398        {
399          bRender = false;
400        }
401
402          if (  bIsBaseView )
403          {
404
405          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
406          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
407          if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
408          {
409            iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
410          }
411          else
412          {
413            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
414            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
415            bRender = false;
416          }
417        }
418
419        if ( bRender )
420        {
421          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
422          m_pcRenTop->setShiftLUTs( NULL, NULL,NULL, m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx ][iSynthViewIdx],
423            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx],NULL, iRelDistToLeft);
424          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iRightBaseViewIdx ], apcPicYuvBaseDepth[iRightBaseViewIdx ], pcPicYuvSynthOut, false);
425        }
426        break;
427      }
428
429      // Write Output
[56]430
431#if PIC_CROPPING
432      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
433#else
[2]434      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, aiPad );
[56]435#endif
[2]436    }
437    iFrame++;
438    iNumOfRenderedFrames++;
439  }
440
441  // Delete Buffers
442  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
443  {
444    apcPicYuvBaseVideo[uiBaseView]->destroy();
445    delete apcPicYuvBaseVideo[uiBaseView];
446
447    apcPicYuvBaseDepth[uiBaseView]->destroy();
448    delete apcPicYuvBaseDepth[uiBaseView];
449
450    // Temporal Filter
451    if ( m_bTempDepthFilter )
452    {
453      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
454      delete apcPicYuvLastBaseVideo[uiBaseView];
455
456      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
457      delete apcPicYuvLastBaseDepth[uiBaseView];
458    }
459  }
460
461  pcPicYuvSynthOut->destroy();
462  delete pcPicYuvSynthOut;
463
464  xDestroyLib();
465
466}
467
468Void TAppRendererTop::go()
469{
470  switch ( m_iRenderMode )
471  {
472  case 0:
473    render();
474    break;
475  case 1:
476    renderModel();
477    break;
478  case 10:
479    renderUsedPelsMap( );
480      break;
481
482  default:
483    AOT(true);
484  }
485}
486
487Void TAppRendererTop::renderModel()
488{
489  if ( m_bUseSetupString )
490  {
491    xRenderModelFromString();
492  }
493  else
494  {
495    xRenderModelFromNums();
496  }
497}
498
499Void TAppRendererTop::xRenderModelFromString()
500{
501
502    xCreateLib();
503    xInitLib();
504
505    // Create Buffers Input Views;
506    std::vector<TComPicYuv*> apcPicYuvBaseVideo;
507    std::vector<TComPicYuv*> apcPicYuvBaseDepth;
508
509
510    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
511    {
512      TComPicYuv* pcNewVideoPic = new TComPicYuv;
513      TComPicYuv* pcNewDepthPic = new TComPicYuv;
514
515      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
516      apcPicYuvBaseVideo.push_back(pcNewVideoPic);
517
518      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
519      apcPicYuvBaseDepth.push_back(pcNewDepthPic);
520    }
521
522    Int aiPad[2] = { 0, 0 };
523
524    // Init Model
525    TRenModel cCurModel;
526
527    AOT( m_iLog2SamplingFactor != 0 );
[100]528#if LGE_VSO_EARLY_SKIP_A0093
529    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin, false );
530#else
[2]531    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
[100]532#endif
[2]533
[101]534#if HHI_VSO_SPEEDUP_A0033
[124]535#if FIX_VSO_SETUP
536    cCurModel.setupPart( 0, m_iSourceHeight  ); 
537#else
[100]538    cCurModel.setHorOffset( 0 );
539#endif
[124]540#endif
[100]541
[2]542    for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
543    {
544      Int iNumOfModels   = m_cRenModStrParser.getNumOfModelsForView(iViewIdx, 1);
545
546      for (Int iCurModel = 0; iCurModel < iNumOfModels; iCurModel++ )
547      {
548        Int iModelNum; Int iLeftViewNum; Int iRightViewNum; Int iDump; Int iOrgRefNum; Int iBlendMode;
549        m_cRenModStrParser.getSingleModelData  ( iViewIdx, 1, iCurModel, iModelNum, iBlendMode, iLeftViewNum, iRightViewNum, iOrgRefNum, iDump ) ;
550        cCurModel         .createSingleModel   ( iViewIdx, 1, iModelNum, iLeftViewNum, iRightViewNum, false, iBlendMode );
551
552      }
553    }
554
555    // Create Buffer for synthesized View
556    TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
557    pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
558
559    Bool bAnyEOS = false;
560
561    Int iNumOfRenderedFrames = 0;
562    Int iFrame = 0;
563
564    while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
565    {
[81]566
567      if ( iFrame >= m_iFrameSkip )
568      {     
[100]569        // read in depth and video
570        for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
571        {
572          m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
573          bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
[2]574
[100]575          m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
576          bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
577        }
[2]578      }
[81]579      else
[2]580      {
[81]581        iFrame++;
[2]582        continue;
583      }
584
585
586      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
587      {
588        TComPicYuv* pcPicYuvVideo = apcPicYuvBaseVideo[iBaseViewIdx];
589        TComPicYuv* pcPicYuvDepth = apcPicYuvBaseDepth[iBaseViewIdx];
590        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx ];
591        cCurModel.setBaseView( iBaseViewSIdx, pcPicYuvVideo, pcPicYuvDepth, NULL, NULL );
592      }
593
[81]594      m_cCameraData.update( (UInt) ( iFrame - m_iFrameSkip ));
595
[2]596      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
597      {
598        // setup virtual views
599        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
600
601        cCurModel.setErrorMode( iBaseViewSIdx, 1, 0 );
602        Int iNumOfSV  = m_cRenModStrParser.getNumOfModelsForView( iBaseViewSIdx, 1);
603        for (Int iCurView = 0; iCurView < iNumOfSV; iCurView++ )
604        {
605          Int iOrgRefBaseViewSIdx;
606          Int iLeftBaseViewSIdx;
607          Int iRightBaseViewSIdx;
608          Int iSynthViewRelNum;
609          Int iModelNum;
610          Int iBlendMode;
611
612          m_cRenModStrParser.getSingleModelData(iBaseViewSIdx, 1, iCurView, iModelNum, iBlendMode, iLeftBaseViewSIdx, iRightBaseViewSIdx, iOrgRefBaseViewSIdx, iSynthViewRelNum );
613
614          Int iLeftBaseViewIdx    = -1;
615          Int iRightBaseViewIdx   = -1;
616
617          TComPicYuv* pcPicYuvOrgRef  = NULL;
618          Int**      ppiShiftLUTLeft  = NULL;
619          Int**      ppiShiftLUTRight = NULL;
620          Int**      ppiBaseShiftLUTLeft  = NULL;
621          Int**      ppiBaseShiftLUTRight = NULL;
622
623
624          Int        iDistToLeft      = -1;
625
626          Int iSynthViewIdx = m_cCameraData.synthRelNum2Idx( iSynthViewRelNum );
627
628          if ( iLeftBaseViewSIdx != -1 )
629          {
630            iLeftBaseViewIdx   = m_cCameraData.getBaseSortedId2Id()   [ iLeftBaseViewSIdx ];
631            ppiShiftLUTLeft    = m_cCameraData.getSynthViewShiftLUTI()[ iLeftBaseViewIdx  ][ iSynthViewIdx  ];
632          }
633
634          if ( iRightBaseViewSIdx != -1 )
635          {
636            iRightBaseViewIdx  = m_cCameraData.getBaseSortedId2Id()   [iRightBaseViewSIdx ];
637            ppiShiftLUTRight   = m_cCameraData.getSynthViewShiftLUTI()[ iRightBaseViewIdx ][ iSynthViewIdx ];
638          }
639
640          if ( iRightBaseViewSIdx != -1 && iLeftBaseViewSIdx != -1 )
641          {
642
643            ppiBaseShiftLUTLeft  = m_cCameraData.getBaseViewShiftLUTI() [ iLeftBaseViewIdx  ][ iRightBaseViewIdx ];
644            ppiBaseShiftLUTRight = m_cCameraData.getBaseViewShiftLUTI() [ iRightBaseViewIdx ][ iLeftBaseViewIdx  ];
645            iDistToLeft    = m_cCameraData.getRelDistLeft(  iSynthViewIdx , iLeftBaseViewIdx, iRightBaseViewIdx);
646          }
647
648          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
649
650          cCurModel.setSingleModel( iModelNum, ppiShiftLUTLeft, ppiBaseShiftLUTLeft, ppiShiftLUTRight, ppiBaseShiftLUTRight, iDistToLeft, pcPicYuvOrgRef );
651
652          Int iViewPos;
653          if (iLeftBaseViewSIdx != -1 && iRightBaseViewSIdx != -1)
654          {
655            iViewPos = VIEWPOS_MERGED;
656          }
657          else if ( iLeftBaseViewSIdx != -1 )
658          {
659            iViewPos = VIEWPOS_LEFT;
660          }
661          else if ( iRightBaseViewSIdx != -1 )
662          {
663            iViewPos = VIEWPOS_RIGHT;
664          }
665          else
666          {
667            AOT(true);
668          }
669
670          cCurModel.getSynthVideo ( iModelNum, iViewPos, pcPicYuvSynthOut );
671
672          // Write Output
[56]673#if PIC_CROPPING
674          m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iModelNum]->write( pcPicYuvSynthOut, 0 ,0 ,0, 0 );
675#else
[2]676          m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iModelNum]->write( pcPicYuvSynthOut, aiPad );
[56]677#endif
[2]678        }
679      }
680      iFrame++;
681      iNumOfRenderedFrames++;
682  }
683
684    // Delete Buffers
685    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
686    {
687      apcPicYuvBaseVideo[uiBaseView]->destroy();
688      delete apcPicYuvBaseVideo[uiBaseView];
689
690      apcPicYuvBaseDepth[uiBaseView]->destroy();
691      delete apcPicYuvBaseDepth[uiBaseView];
692}
693    pcPicYuvSynthOut->destroy();
694    delete pcPicYuvSynthOut;
695
696    xDestroyLib();
697}
698
699Void TAppRendererTop::xRenderModelFromNums()
700{
701  xCreateLib();
702  xInitLib();
703
704  // Create Buffers Input Views;
705  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
706  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
707
708
709  Int aiPad[2] = { 0, 0 };
710
711  // Init Model
712  TRenModel cCurModel;
713
714  AOT( m_iLog2SamplingFactor != 0 );
[101]715#if HHI_VSO_SPEEDUP_A0033
[124]716#if FIX_VSO_SETUP
717  cCurModel.setupPart( 0, m_iSourceHeight  ); 
718#else
[100]719  cCurModel.setHorOffset( 0 );
720#endif
[124]721#endif
[100]722#if LGE_VSO_EARLY_SKIP_A0093
723  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin, false );
724#else
[2]725  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
[100]726#endif
[2]727
728  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
729  {
730    TComPicYuv* pcNewVideoPic = new TComPicYuv;
731    TComPicYuv* pcNewDepthPic = new TComPicYuv;
732
733    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
734    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
735
736    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
737    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
738  }
739
740  for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
741  {
742    Int  iLeftBaseViewIdx  = -1;
743    Int  iRightBaseViewIdx = -1;
744    Bool bIsBaseView = false;
745
746    Int iRelDistToLeft;
747    m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft,  bIsBaseView );
748
749    if (m_iRenderDirection == 1 )
750    {
751      iRightBaseViewIdx = -1;
752      AOT( iLeftBaseViewIdx == -1);
753    }
754
755    if (m_iRenderDirection == 2 )
756    {
757      iLeftBaseViewIdx = -1;
758      AOT( iRightBaseViewIdx == -1);
759    }
760
761    Int iLeftBaseViewSIdx  = -1;
762    Int iRightBaseViewSIdx = -1;
763
764    if (iLeftBaseViewIdx != -1 )
765    {
766      iLeftBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iLeftBaseViewIdx];
767    }
768
769    if (iRightBaseViewIdx != -1 )
770    {
771      iRightBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iRightBaseViewIdx];
772    }
773    cCurModel.createSingleModel(-1, -1, iSynthViewIdx, iLeftBaseViewSIdx, iRightBaseViewSIdx, false, m_iBlendMode );
774  }
775
776  // Create Buffer for synthesized View
777  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
778  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
779
780  Bool bAnyEOS = false;
781
782  Int iNumOfRenderedFrames = 0;
783  Int iFrame = 0;
784
785  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
786  {
[81]787
788    if ( iFrame >= m_iFrameSkip )
789    {     
[100]790      // read in depth and video
791      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
792      {
793        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
794        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
[2]795
[100]796        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
797        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
[2]798
[100]799        if ( iFrame >= m_iFrameSkip )
800        {
801          Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
802          cCurModel.setBaseView( iBaseViewSIdx, apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], NULL, NULL );
803        }
[2]804      }
805    }
[81]806    else
[2]807    {
808      iFrame++;
809      continue;
810    }
[81]811    m_cCameraData.update( (UInt) (iFrame - m_iFrameSkip ));
[2]812    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
813    {
814
815      Int  iLeftBaseViewIdx  = -1;
816      Int  iRightBaseViewIdx = -1;
817
818      Bool bIsBaseView = false;
819
820      Int iRelDistToLeft;
821      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
822      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
823      Bool bHasRView = ( iRightBaseViewIdx != -1 );
824
825      switch( m_iRenderDirection )
826      {
827        /// INTERPOLATION
828      case 0:
829        assert( bHasLRView || bIsBaseView );
830
831        if ( !bHasLRView && bIsBaseView ) // View to render is BaseView
832        {
833          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
834          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
835        }
836        else  // Render
837        {
838          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
839          cCurModel.setSingleModel( iSynthViewIdx,
840                                    m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx]    ,
841                                    m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
842                                    m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx]    ,
843                                    m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx] ,
844                                    iRelDistToLeft,
845                                    NULL );
846          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_MERGED, pcPicYuvSynthOut );
847        }
848        break;
849        /// EXTRAPOLATION FROM LEFT
850      case 1:
851
852        if ( !bHasLView ) // View to render is BaseView
853        {
854          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
855          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
856        }
857        else  // Render
858        {
859          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
860          cCurModel.setSingleModel( iSynthViewIdx, m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, -1,  NULL);
861          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_LEFT, pcPicYuvSynthOut );
862        }
863        break;
864        /// EXTRAPOLATION FROM RIGHT
865      case 2:            // extrapolation from right
866        if ( !bHasRView ) // View to render is BaseView
867        {
868          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
869          apcPicYuvBaseVideo[iRightBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
870        }
871        else  // Render
872        {
873          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
874          cCurModel.setSingleModel( iSynthViewIdx, NULL , NULL, m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx], NULL, -1, NULL);
875          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_RIGHT, pcPicYuvSynthOut );
876        }
877        break;
878      }
879
880      // Write Output
[56]881#if PIC_CROPPING
882      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
883#else
[2]884      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, aiPad );
[56]885#endif
[2]886    }
887    iFrame++;
888    iNumOfRenderedFrames++;
889  }
890
891  // Delete Buffers
892  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
893  {
894    apcPicYuvBaseVideo[uiBaseView]->destroy();
895    delete apcPicYuvBaseVideo[uiBaseView];
896
897    apcPicYuvBaseDepth[uiBaseView]->destroy();
898    delete apcPicYuvBaseDepth[uiBaseView];
899  }
900  pcPicYuvSynthOut->destroy();
901  delete pcPicYuvSynthOut;
902
903  xDestroyLib();
904
905}
906
907Void TAppRendererTop::renderUsedPelsMap( )
908{
909  xCreateLib();
910  xInitLib();
911
912  // Create Buffers Input Views;
913  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
914  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
915
916  // TemporalImprovement Filter
917  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
918  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;
919
920  Int aiPad[2] = { 0, 0 };
921
922  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
923  {
924    TComPicYuv* pcNewVideoPic = new TComPicYuv;
925    TComPicYuv* pcNewDepthPic = new TComPicYuv;
926
927    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
928    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
929
930    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
931    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
932
933    //Temporal improvement Filter
934    if ( m_bTempDepthFilter )
935    {
936      pcNewVideoPic = new TComPicYuv;
937      pcNewDepthPic = new TComPicYuv;
938
939      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
940      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);
941
942      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
943      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
944    }
945  }
946
947  // Create Buffer for synthesized View
948  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
949  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
950
951  Bool bAnyEOS = false;
952
953  Int iNumOfRenderedFrames = 0;
954  Int iFrame = 0;
955
956  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
957  {
958
[100]959
[81]960    if ( iFrame >= m_iFrameSkip )
961    {     
[100]962      // read in depth and video
963      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
964      {
965        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
966        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
967        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
[2]968
[100]969        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
970        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
971        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
[2]972
[100]973        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
974        {
975          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
976        }
[2]977      }
978    }
[81]979    else
[2]980    {
981      std::cout << "Skipping Frame " << iFrame << std::endl;
982
983      iFrame++;
984      continue;
985    }
[81]986    m_cCameraData.update( (UInt) ( iFrame - m_iFrameSkip ) );
[2]987
988    for(Int iViewIdx=1; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
989    {
990      std::cout << "Rendering UsedPelsMap for Frame " << iFrame << " of View " << (Double) m_cCameraData.getBaseViewNumbers()[iViewIdx] << std::endl;
991
992      Int iViewSIdx      = m_cCameraData.getBaseId2SortedId()[iViewIdx];
993      Int iFirstViewSIdx = m_cCameraData.getBaseId2SortedId()[0];
994
995      AOT( iViewSIdx == iFirstViewSIdx );
996
997      Bool bFirstIsLeft = (iFirstViewSIdx < iViewSIdx);
998
999      m_pcRenTop->setShiftLUTs(
1000        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
1001        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
1002        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
1003        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
1004        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
1005        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
1006        -1
1007        );
1008
1009      m_pcRenTop->getUsedSamplesMap( apcPicYuvBaseDepth[0], pcPicYuvSynthOut, bFirstIsLeft );
1010
1011      // Write Output
[56]1012#if PIC_CROPPING
1013      m_apcTVideoIOYuvSynthOutput[iViewIdx-1]->write( pcPicYuvSynthOut, 0, 0, 0 );
1014#else
[2]1015      m_apcTVideoIOYuvSynthOutput[iViewIdx-1]->write( pcPicYuvSynthOut, aiPad );
[56]1016#endif
[2]1017
1018    }
1019    iFrame++;
1020    iNumOfRenderedFrames++;
1021  }
1022
1023  // Delete Buffers
1024  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
1025  {
1026    apcPicYuvBaseVideo[uiBaseView]->destroy();
1027    delete apcPicYuvBaseVideo[uiBaseView];
1028
1029    apcPicYuvBaseDepth[uiBaseView]->destroy();
1030    delete apcPicYuvBaseDepth[uiBaseView];
1031
1032    // Temporal Filter
1033    if ( m_bTempDepthFilter )
1034    {
1035      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
1036      delete apcPicYuvLastBaseVideo[uiBaseView];
1037
1038      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
1039      delete apcPicYuvLastBaseDepth[uiBaseView];
1040    }
1041  }
1042  pcPicYuvSynthOut->destroy();
1043  delete pcPicYuvSynthOut;
1044
1045  xDestroyLib();
1046
1047}
Note: See TracBrowser for help on using the repository browser.