source: 3DVCSoftware/branches/HTM-14.1-update-dev1-HHI/source/App/TAppRenderer/TAppRendererTop.cpp @ 1203

Last change on this file since 1203 was 1200, checked in by tech, 10 years ago

Update to HM-16.5.
Starting point for further re-activation of 3D-tools.

Includes:

active:

  • MV-HEVC
  • 3D-HLS (apart from DLT)
  • VSO

inactive:

  • remaining 3D-HEVC tools.
  • Property svn:eol-style set to native
File size: 36.6 KB
Line 
1/* The copyright in this software is being made available under the BSD
2 * License, included below. This software may be subject to other third party
3 * and contributor rights, including patent rights, and no such rights are
4 * granted under this license.
5 *
6 * Copyright (c) 2010-2015, ITU/ISO/IEC
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions are met:
11 *
12 *  * Redistributions of source code must retain the above copyright notice,
13 *    this list of conditions and the following disclaimer.
14 *  * Redistributions in binary form must reproduce the above copyright notice,
15 *    this list of conditions and the following disclaimer in the documentation
16 *    and/or other materials provided with the distribution.
17 *  * Neither the name of the ISO/IEC nor the names of its contributors may
18 *    be used to endorse or promote products derived from this software without
19 *    specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
25 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
31 * THE POSSIBILITY OF SUCH DAMAGE.
32 */
33
34
35#include <list>
36#include <stdio.h>
37#include <fcntl.h>
38#include <assert.h>
39#include <math.h>
40
41#include "TAppRendererTop.h"
42
43#if NH_3D
44
45// ====================================================================================================================
46// Constructor / destructor / initialization / destroy
47// ====================================================================================================================
48
49TAppRendererTop::TAppRendererTop()
50{
51
52}
53
54TAppRendererTop::~TAppRendererTop()
55{
56
57}
58
59
60Void TAppRendererTop::xCreateLib()
61{
62  m_pcRenTop = new TRenTop();
63
64  for(Int iViewIdx=0; iViewIdx<m_iNumberOfInputViews; iViewIdx++)
65  {
66    TVideoIOYuv* pcVideoInput = new TVideoIOYuv;
67    TVideoIOYuv* pcDepthInput = new TVideoIOYuv;
68
69//    ( Char* pchFile, Bool bWriteMode, const Int fileBitDepth[MAX_NUM_CHANNEL_TYPE], const Int MSBExtendedBitDepth[MAX_NUM_CHANNEL_TYPE], const Int internalBitDepth[MAX_NUM_CHANNEL_TYPE] )
70
71    pcVideoInput->open( m_pchVideoInputFileList[iViewIdx], false, m_inputBitDepth, m_internalBitDepth, m_internalBitDepth );  // read mode
72    pcDepthInput->open( m_pchDepthInputFileList[iViewIdx], false, m_inputBitDepth, m_internalBitDepth, m_internalBitDepth );  // read mode
73    pcVideoInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight, CHROMA_420 );
74    pcDepthInput->skipFrames(m_iFrameSkip, m_iSourceWidth, m_iSourceHeight, CHROMA_420 );
75
76    m_apcTVideoIOYuvVideoInput.push_back( pcVideoInput );
77    m_apcTVideoIOYuvDepthInput.push_back( pcDepthInput );
78  }
79
80  for(Int iViewIdx=0; iViewIdx<m_iNumberOfOutputViews; iViewIdx++)
81  {
82    TVideoIOYuv* pcSynthOutput = new TVideoIOYuv;
83    pcSynthOutput->open( m_pchSynthOutputFileList[iViewIdx], true, m_outputBitDepth, m_internalBitDepth, m_internalBitDepth );  // write mode
84    m_apcTVideoIOYuvSynthOutput.push_back( pcSynthOutput );
85  }
86}
87
88
89Void TAppRendererTop::xDestroyLib()
90{
91  delete m_pcRenTop;
92
93  for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
94  {
95    m_apcTVideoIOYuvVideoInput[iViewIdx]->close();
96    m_apcTVideoIOYuvDepthInput[iViewIdx]->close();
97
98    delete m_apcTVideoIOYuvDepthInput[iViewIdx];
99    delete m_apcTVideoIOYuvVideoInput[iViewIdx];
100  };
101
102  for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfOutputViews; iViewIdx++ )
103  {
104    m_apcTVideoIOYuvSynthOutput[iViewIdx]->close();
105    delete m_apcTVideoIOYuvSynthOutput[iViewIdx];
106  };
107}
108
109Void TAppRendererTop::xInitLib()
110{
111    m_pcRenTop->init(
112    m_iSourceWidth,
113    m_iSourceHeight,
114    (m_iRenderDirection != 0),
115    m_iLog2SamplingFactor,
116    m_iLog2SamplingFactor+m_iShiftPrecision,
117    m_bUVUp,
118    m_iPreProcMode,
119    m_iPreFilterSize,
120    m_iBlendMode,
121    m_iBlendZThresPerc,
122    m_bBlendUseDistWeight,
123    m_iBlendHoleMargin,
124    m_iInterpolationMode,
125    m_iHoleFillingMode,
126    m_iPostProcMode,
127    m_iUsedPelMapMarExt
128    );
129}
130
131// ====================================================================================================================
132// Public member functions
133// ====================================================================================================================
134
135
136
137Void TAppRendererTop::render()
138{
139  xCreateLib();
140  xInitLib();
141
142  // Create Buffers Input Views;
143  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
144  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
145
146  // TemporalImprovement Filter
147  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
148  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;
149
150  Int aiPad[2] = { 0, 0 };
151
152  TComPicYuv* pcNewOrg = new TComPicYuv;
153  pcNewOrg->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
154
155  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
156  {
157    TComPicYuv* pcNewVideoPic = new TComPicYuv;
158    TComPicYuv* pcNewDepthPic = new TComPicYuv;
159
160    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
161    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
162
163    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true);
164    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
165
166    //Temporal improvement Filter
167    if ( m_bTempDepthFilter )
168    {
169      pcNewVideoPic = new TComPicYuv;
170      pcNewDepthPic = new TComPicYuv;
171
172      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
173      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);
174
175      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
176      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
177    }
178  }
179
180  // Create Buffer for synthesized View
181  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
182  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
183
184  Bool bAnyEOS = false;
185
186  Int iNumOfRenderedFrames = 0;
187  Int iFrame = 0;
188
189  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
190  {
191    if ( iFrame >= m_iFrameSkip ) 
192    {
193      // read in depth and video
194      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
195      {
196        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx],pcNewOrg, IPCOLOURSPACE_UNCHANGED, aiPad, CHROMA_420  ) ;
197
198        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
199
200        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
201
202        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx],pcNewOrg, IPCOLOURSPACE_UNCHANGED, aiPad, CHROMA_420  ) ;
203        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
204        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
205
206        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
207        {
208          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
209        }
210      }
211    }
212    else   
213    {
214      std::cout << "Skipping Frame " << iFrame << std::endl;
215
216      iFrame++;
217      continue;
218    }
219
220    m_cCameraData.update( (UInt)iFrame - m_iFrameSkip );
221
222    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
223    {
224      Int  iLeftBaseViewIdx  = -1;
225      Int  iRightBaseViewIdx = -1;
226
227      Bool bIsBaseView = false;
228
229      Int iRelDistToLeft;
230      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
231      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
232      Bool bHasRView = ( iRightBaseViewIdx != -1 );
233      Bool bRender   = true;
234
235      Int  iBlendMode = m_iBlendMode;
236      Int  iSimEnhBaseView = 0;
237
238      switch( m_iRenderDirection )
239      {
240      /// INTERPOLATION
241      case 0:
242        AOF( bHasLRView || bIsBaseView );
243
244        if ( !bHasLRView && bIsBaseView && m_iBlendMode == 0 )
245        {
246          bRender = false;
247        }
248        else
249        {
250          if ( bIsBaseView )
251          {
252            AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
253            Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
254
255            if ( m_iBlendMode == 1 )
256            {
257              if ( iSortedBaseViewIdx - 1 >= 0 )
258              {
259                iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
260                bRender = true;
261              }
262              else
263              {
264                bRender = false;
265              }
266            }
267            else if ( m_iBlendMode == 2 )
268            {
269              if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
270              {
271                iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
272                bRender = true;
273              }
274              else
275              {
276                bRender = false;
277              }
278            }
279          }
280
281          if ( m_iBlendMode == 3 )
282          {
283            if ( bIsBaseView && (iLeftBaseViewIdx == 0) )
284            {
285              bRender = false;
286            }
287            else
288            {
289              Int iDistLeft  = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx ]  );
290              Int iDistRight = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iRightBaseViewIdx]  );
291
292              Int iFillViewIdx = iDistLeft > iDistRight ? iLeftBaseViewIdx : iRightBaseViewIdx;
293
294              if( m_cCameraData.getBaseId2SortedId()[0] < m_cCameraData.getBaseId2SortedId() [iFillViewIdx] )
295              {
296                iBlendMode        = 1;
297                iLeftBaseViewIdx  = 0;
298                iRightBaseViewIdx = iFillViewIdx;
299              }
300              else
301              {
302                iBlendMode        = 2;
303                iLeftBaseViewIdx  = iFillViewIdx;
304                iRightBaseViewIdx = 0;
305              }
306
307            }
308          }
309          else
310          {
311            iBlendMode = m_iBlendMode;
312          }
313        }
314
315        if ( m_bSimEnhance )
316        {
317          if ( m_iNumberOfInputViews == 3 && m_cCameraData.getRelSynthViewNumbers()[ iSynthViewIdx ] < VIEW_NUM_PREC  )
318          {
319            iSimEnhBaseView = 2; // Take middle view
320          }
321          else
322          {
323            iSimEnhBaseView = 1; // Take left view
324          }
325        }
326
327          if ( bRender )
328          {
329          std::cout << "Rendering Frame "    << iFrame
330                    << " of View "           << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx    ] / VIEW_NUM_PREC
331                    << "   Left BaseView: "  << (Double) m_cCameraData.getBaseViewNumbers() [iLeftBaseViewIdx ] / VIEW_NUM_PREC
332                    << "   Right BaseView: " << (Double) m_cCameraData.getBaseViewNumbers() [iRightBaseViewIdx] / VIEW_NUM_PREC
333                    << "   BlendMode: "      << iBlendMode
334                    << std::endl;
335
336          m_pcRenTop->setShiftLUTs(
337            m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
338            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx],
339            m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
340            m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx][iSynthViewIdx],
341            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx],
342            m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx ],
343
344            iRelDistToLeft
345          );
346
347          m_pcRenTop->interpolateView(
348            apcPicYuvBaseVideo[iLeftBaseViewIdx ],
349            apcPicYuvBaseDepth[iLeftBaseViewIdx ],
350            apcPicYuvBaseVideo[iRightBaseViewIdx],
351            apcPicYuvBaseDepth[iRightBaseViewIdx],
352            pcPicYuvSynthOut,
353            iBlendMode,
354            iSimEnhBaseView
355            );
356        }
357        else
358        {
359          AOT(iLeftBaseViewIdx != iRightBaseViewIdx );
360          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut );
361          std::cout << "Copied    Frame " << iFrame
362                    << " of View "        << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC
363                    << "   (BaseView)  "    << std::endl;
364        }
365
366        break;
367      /// EXTRAPOLATION FROM LEFT
368      case 1:
369        if ( !bHasLView ) // View to render is BaseView
370        {
371          bRender = false;
372        }
373
374          if (  bIsBaseView )
375          {
376          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
377          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
378          if ( iSortedBaseViewIdx - 1 >= 0 )
379          {
380            iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
381          }
382          else
383          {
384            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
385            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
386            bRender = false;
387          }
388        }
389
390
391        if (bRender)
392        {
393          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
394          m_pcRenTop->setShiftLUTs( m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
395            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, NULL, -1 );
396          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iLeftBaseViewIdx ], apcPicYuvBaseDepth[iLeftBaseViewIdx ], pcPicYuvSynthOut, true );
397        }
398        break;
399      /// EXTRAPOLATION FROM RIGHT
400      case 2:            // extrapolation from right
401        if ( !bHasRView ) // View to render is BaseView
402        {
403          bRender = false;
404        }
405
406          if (  bIsBaseView )
407          {
408
409          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
410          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
411          if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
412          {
413            iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
414          }
415          else
416          {
417            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
418            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
419            bRender = false;
420          }
421        }
422
423        if ( bRender )
424        {
425          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
426          m_pcRenTop->setShiftLUTs( NULL, NULL,NULL, m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx ][iSynthViewIdx],
427            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx],NULL, iRelDistToLeft);
428          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iRightBaseViewIdx ], apcPicYuvBaseDepth[iRightBaseViewIdx ], pcPicYuvSynthOut, false);
429        }
430        break;
431      }
432
433      // Write Output
434
435      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, IPCOLOURSPACE_UNCHANGED, 0, 0, 0, 0, CHROMA_420 );
436    }
437    iFrame++;
438    iNumOfRenderedFrames++;
439  }
440
441  // Delete Buffers
442  pcNewOrg->destroy(); 
443  delete pcNewOrg; 
444
445  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
446  {
447    apcPicYuvBaseVideo[uiBaseView]->destroy();
448    delete apcPicYuvBaseVideo[uiBaseView];
449
450    apcPicYuvBaseDepth[uiBaseView]->destroy();
451    delete apcPicYuvBaseDepth[uiBaseView];
452
453    // Temporal Filter
454    if ( m_bTempDepthFilter )
455    {
456      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
457      delete apcPicYuvLastBaseVideo[uiBaseView];
458
459      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
460      delete apcPicYuvLastBaseDepth[uiBaseView];
461    }
462  }
463
464  pcPicYuvSynthOut->destroy();
465  delete pcPicYuvSynthOut;
466
467  xDestroyLib();
468
469}
470
471Void TAppRendererTop::go()
472{
473  switch ( m_iRenderMode )
474  {
475  case 0:
476    render();
477    break;
478#if NH_3D_VSO
479  case 1:
480    renderModel();
481    break;
482#endif
483  case 10:
484    renderUsedPelsMap( );
485      break;
486  default:
487    AOT(true);
488  }
489
490#if H_3D_REN_MAX_DEV_OUT
491  Double dMaxDispDiff = m_cCameraData.getMaxShiftDeviation(); 
492
493  if ( !(dMaxDispDiff < 0) )
494  { 
495    printf("\n Max. possible shift error: %12.3f samples.\n", dMaxDispDiff );
496  }
497#endif
498}
499
500#if NH_3D_VSO
501Void TAppRendererTop::renderModel()
502{
503  if ( m_bUseSetupString )
504  {
505    xRenderModelFromString();
506  }
507  else
508  {
509    xRenderModelFromNums();
510  }
511}
512
513
514
515Void TAppRendererTop::xRenderModelFromString()
516{
517    xCreateLib();
518    xInitLib();
519
520    // Create Buffers Input Views;
521    std::vector<TComPicYuv*> apcPicYuvBaseVideo;
522    std::vector<TComPicYuv*> apcPicYuvBaseDepth;
523
524
525    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
526    {
527      TComPicYuv* pcNewVideoPic = new TComPicYuv;
528      TComPicYuv* pcNewDepthPic = new TComPicYuv;
529
530      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
531      apcPicYuvBaseVideo.push_back(pcNewVideoPic);
532
533      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
534      apcPicYuvBaseDepth.push_back(pcNewDepthPic);
535    }
536
537    Int aiPad[2] = { 0, 0 };
538    TComPicYuv* pcNewOrg = new TComPicYuv;
539    pcNewOrg->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
540
541    // Init Model
542    TRenModel cCurModel;
543
544    AOT( m_iLog2SamplingFactor != 0 );
545#if H_3D_VSO_EARLY_SKIP
546    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin, false );
547#else
548    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
549#endif
550
551    cCurModel.setupPart( 0, m_iSourceHeight  ); 
552
553    for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
554    {
555      Int iNumOfModels   = m_cRenModStrParser.getNumOfModelsForView(iViewIdx, 1);
556
557      for (Int iCurModel = 0; iCurModel < iNumOfModels; iCurModel++ )
558      {
559        Int iModelNum; Int iLeftViewNum; Int iRightViewNum; Int iDump; Int iOrgRefNum; Int iBlendMode;
560        m_cRenModStrParser.getSingleModelData  ( iViewIdx, 1, iCurModel, iModelNum, iBlendMode, iLeftViewNum, iRightViewNum, iOrgRefNum, iDump ) ;
561        cCurModel         .createSingleModel   ( iViewIdx, 1, iModelNum, iLeftViewNum, iRightViewNum, false, iBlendMode );
562      }
563    }
564
565    // Create Buffer for synthesized View
566    TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
567    pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
568
569    Bool bAnyEOS = false;
570
571    Int iNumOfRenderedFrames = 0;
572    Int iFrame = 0;
573
574    while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
575    {
576
577      if ( iFrame >= m_iFrameSkip )
578      {     
579        // read in depth and video
580        for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
581        {
582          m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], pcNewOrg, IPCOLOURSPACE_UNCHANGED, aiPad, CHROMA_420  ) ;
583          bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
584
585          m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], pcNewOrg, IPCOLOURSPACE_UNCHANGED, aiPad, CHROMA_420  ) ;
586          bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
587        }
588      }
589      else
590      {
591        iFrame++;
592        continue;
593      }
594
595
596      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
597      {
598        TComPicYuv* pcPicYuvVideo = apcPicYuvBaseVideo[iBaseViewIdx];
599        TComPicYuv* pcPicYuvDepth = apcPicYuvBaseDepth[iBaseViewIdx];
600        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx ];
601        cCurModel.setBaseView( iBaseViewSIdx, pcPicYuvVideo, pcPicYuvDepth, NULL, NULL );
602      }
603
604      m_cCameraData.update( (UInt) ( iFrame - m_iFrameSkip ));
605
606      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
607      {
608        // setup virtual views
609        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
610
611        cCurModel.setErrorMode( iBaseViewSIdx, 1, 0 );
612        Int iNumOfSV  = m_cRenModStrParser.getNumOfModelsForView( iBaseViewSIdx, 1);
613        for (Int iCurView = 0; iCurView < iNumOfSV; iCurView++ )
614        {
615          Int iOrgRefBaseViewSIdx;
616          Int iLeftBaseViewSIdx;
617          Int iRightBaseViewSIdx;
618          Int iSynthViewRelNum;
619          Int iModelNum;
620          Int iBlendMode;
621
622          m_cRenModStrParser.getSingleModelData(iBaseViewSIdx, 1, iCurView, iModelNum, iBlendMode, iLeftBaseViewSIdx, iRightBaseViewSIdx, iOrgRefBaseViewSIdx, iSynthViewRelNum );
623
624          Int iLeftBaseViewIdx    = -1;
625          Int iRightBaseViewIdx   = -1;
626
627          TComPicYuv* pcPicYuvOrgRef  = NULL;
628          Int**      ppiShiftLUTLeft  = NULL;
629          Int**      ppiShiftLUTRight = NULL;
630          Int**      ppiBaseShiftLUTLeft  = NULL;
631          Int**      ppiBaseShiftLUTRight = NULL;
632
633
634          Int        iDistToLeft      = -1;
635
636          Int iSynthViewIdx = m_cCameraData.synthRelNum2Idx( iSynthViewRelNum );
637
638          if ( iLeftBaseViewSIdx != -1 )
639          {
640            iLeftBaseViewIdx   = m_cCameraData.getBaseSortedId2Id()   [ iLeftBaseViewSIdx ];
641            ppiShiftLUTLeft    = m_cCameraData.getSynthViewShiftLUTI()[ iLeftBaseViewIdx  ][ iSynthViewIdx  ];
642          }
643
644          if ( iRightBaseViewSIdx != -1 )
645          {
646            iRightBaseViewIdx  = m_cCameraData.getBaseSortedId2Id()   [iRightBaseViewSIdx ];
647            ppiShiftLUTRight   = m_cCameraData.getSynthViewShiftLUTI()[ iRightBaseViewIdx ][ iSynthViewIdx ];
648          }
649
650          if ( iRightBaseViewSIdx != -1 && iLeftBaseViewSIdx != -1 )
651          {
652
653            ppiBaseShiftLUTLeft  = m_cCameraData.getBaseViewShiftLUTI() [ iLeftBaseViewIdx  ][ iRightBaseViewIdx ];
654            ppiBaseShiftLUTRight = m_cCameraData.getBaseViewShiftLUTI() [ iRightBaseViewIdx ][ iLeftBaseViewIdx  ];
655            iDistToLeft    = m_cCameraData.getRelDistLeft(  iSynthViewIdx , iLeftBaseViewIdx, iRightBaseViewIdx);
656          }
657
658          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
659
660          cCurModel.setSingleModel( iModelNum, ppiShiftLUTLeft, ppiBaseShiftLUTLeft, ppiShiftLUTRight, ppiBaseShiftLUTRight, iDistToLeft, pcPicYuvOrgRef );
661
662          Int iViewPos;
663          if (iLeftBaseViewSIdx != -1 && iRightBaseViewSIdx != -1)
664          {
665            iViewPos = VIEWPOS_MERGED;
666          }
667          else if ( iLeftBaseViewSIdx != -1 )
668          {
669            iViewPos = VIEWPOS_LEFT;
670          }
671          else if ( iRightBaseViewSIdx != -1 )
672          {
673            iViewPos = VIEWPOS_RIGHT;
674          }
675          else
676          {
677            AOT(true);
678          }
679
680          cCurModel.getSynthVideo ( iModelNum, iViewPos, pcPicYuvSynthOut );
681
682          // Write Output
683          m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iModelNum]->write( pcPicYuvSynthOut, IPCOLOURSPACE_UNCHANGED,  0 ,0 ,0, 0, CHROMA_420 );
684        }
685      }
686      iFrame++;
687      iNumOfRenderedFrames++;
688  }
689
690    // Delete Buffers
691    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
692    {
693      apcPicYuvBaseVideo[uiBaseView]->destroy();
694      delete apcPicYuvBaseVideo[uiBaseView];
695
696      apcPicYuvBaseDepth[uiBaseView]->destroy();
697      delete apcPicYuvBaseDepth[uiBaseView];
698}
699    pcPicYuvSynthOut->destroy();
700    delete pcPicYuvSynthOut;
701
702    xDestroyLib();
703}
704
705
706Void TAppRendererTop::xRenderModelFromNums()
707{
708  xCreateLib();
709  xInitLib();
710
711  // Create Buffers Input Views;
712  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
713  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
714
715
716  Int aiPad[2] = { 0, 0 };
717  TComPicYuv* pcNewOrg = new TComPicYuv;
718  pcNewOrg->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
719
720
721  // Init Model
722  TRenModel cCurModel;
723
724  AOT( m_iLog2SamplingFactor != 0 );
725
726#if H_3D_VSO_EARLY_SKIP
727  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin, false );
728#else
729  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
730#endif
731  cCurModel.setupPart( 0, m_iSourceHeight  ); 
732
733  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
734  {
735    TComPicYuv* pcNewVideoPic = new TComPicYuv;
736    TComPicYuv* pcNewDepthPic = new TComPicYuv;
737
738    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
739    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
740
741    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
742    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
743  }
744
745  for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
746  {
747    Int  iLeftBaseViewIdx  = -1;
748    Int  iRightBaseViewIdx = -1;
749    Bool bIsBaseView = false;
750
751    Int iRelDistToLeft;
752    m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft,  bIsBaseView );
753
754    if (m_iRenderDirection == 1 )
755    {
756      iRightBaseViewIdx = -1;
757      AOT( iLeftBaseViewIdx == -1);
758    }
759
760    if (m_iRenderDirection == 2 )
761    {
762      iLeftBaseViewIdx = -1;
763      AOT( iRightBaseViewIdx == -1);
764    }
765
766    Int iLeftBaseViewSIdx  = -1;
767    Int iRightBaseViewSIdx = -1;
768
769    if (iLeftBaseViewIdx != -1 )
770    {
771      iLeftBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iLeftBaseViewIdx];
772    }
773
774    if (iRightBaseViewIdx != -1 )
775    {
776      iRightBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iRightBaseViewIdx];
777    }
778    cCurModel.createSingleModel(-1, -1, iSynthViewIdx, iLeftBaseViewSIdx, iRightBaseViewSIdx, false, m_iBlendMode );
779  }
780
781  // Create Buffer for synthesized View
782  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
783  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
784
785  Bool bAnyEOS = false;
786
787  Int iNumOfRenderedFrames = 0;
788  Int iFrame = 0;
789
790  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
791  {
792
793    if ( iFrame >= m_iFrameSkip )
794    {     
795      // read in depth and video
796      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
797      {
798        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], pcNewOrg, IPCOLOURSPACE_UNCHANGED, aiPad, CHROMA_420  ) ;
799        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
800
801        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], pcNewOrg, IPCOLOURSPACE_UNCHANGED, aiPad, CHROMA_420  ) ;
802        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
803
804        if ( iFrame >= m_iFrameSkip )
805        {
806          Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
807          cCurModel.setBaseView( iBaseViewSIdx, apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], NULL, NULL );
808        }
809      }
810    }
811    else
812    {
813      iFrame++;
814      continue;
815    }
816    m_cCameraData.update( (UInt) (iFrame - m_iFrameSkip ));
817    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
818    {
819
820      Int  iLeftBaseViewIdx  = -1;
821      Int  iRightBaseViewIdx = -1;
822
823      Bool bIsBaseView = false;
824
825      Int iRelDistToLeft;
826      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
827      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
828      Bool bHasRView = ( iRightBaseViewIdx != -1 );
829
830      switch( m_iRenderDirection )
831      {
832        /// INTERPOLATION
833      case 0:
834        assert( bHasLRView || bIsBaseView );
835
836        if ( !bHasLRView && bIsBaseView ) // View to render is BaseView
837        {
838          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
839          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
840        }
841        else  // Render
842        {
843          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
844          cCurModel.setSingleModel( iSynthViewIdx,
845                                    m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx]    ,
846                                    m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
847                                    m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx]    ,
848                                    m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx] ,
849                                    iRelDistToLeft,
850                                    NULL );
851          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_MERGED, pcPicYuvSynthOut );
852        }
853        break;
854        /// EXTRAPOLATION FROM LEFT
855      case 1:
856
857        if ( !bHasLView ) // View to render is BaseView
858        {
859          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
860          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
861        }
862        else  // Render
863        {
864          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
865          cCurModel.setSingleModel( iSynthViewIdx, m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, -1,  NULL);
866          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_LEFT, pcPicYuvSynthOut );
867        }
868        break;
869        /// EXTRAPOLATION FROM RIGHT
870      case 2:            // extrapolation from right
871        if ( !bHasRView ) // View to render is BaseView
872        {
873          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
874          apcPicYuvBaseVideo[iRightBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
875        }
876        else  // Render
877        {
878          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
879          cCurModel.setSingleModel( iSynthViewIdx, NULL , NULL, m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx], NULL, -1, NULL);
880          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_RIGHT, pcPicYuvSynthOut );
881        }
882        break;
883      }
884
885      // Write Output
886      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, IPCOLOURSPACE_UNCHANGED,  0 ,0 ,0, 0, CHROMA_420 );
887    }
888    iFrame++;
889    iNumOfRenderedFrames++;
890  }
891
892  // Delete Buffers
893  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
894  {
895    apcPicYuvBaseVideo[uiBaseView]->destroy();
896    delete apcPicYuvBaseVideo[uiBaseView];
897
898    apcPicYuvBaseDepth[uiBaseView]->destroy();
899    delete apcPicYuvBaseDepth[uiBaseView];
900  }
901  pcPicYuvSynthOut->destroy();
902  delete pcPicYuvSynthOut;
903
904  xDestroyLib();
905
906}
907#endif
908
909Void TAppRendererTop::renderUsedPelsMap( )
910{
911  xCreateLib();
912  xInitLib();
913
914  // Create Buffers Input Views;
915  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
916  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
917
918  // TemporalImprovement Filter
919  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
920  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;
921
922  Int aiPad[2] = { 0, 0 };
923
924  TComPicYuv* pcNewOrg = new TComPicYuv;
925  pcNewOrg->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
926
927  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
928  {
929    TComPicYuv* pcNewVideoPic = new TComPicYuv;
930    TComPicYuv* pcNewDepthPic = new TComPicYuv;
931
932    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight,CHROMA_420, 1, 1, 1, true );
933    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
934
935    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1, true );
936    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
937
938
939    //Temporal improvement Filter
940    if ( m_bTempDepthFilter )
941    {
942      pcNewVideoPic = new TComPicYuv;
943      pcNewDepthPic = new TComPicYuv;
944
945      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1, 1 , true);
946      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);
947
948      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight,CHROMA_420, 1, 1, 1 , true);
949      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
950    }
951  }
952
953  // Create Buffer for synthesized View
954  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
955  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, CHROMA_420, 1, 1 ,1, true);
956
957  Bool bAnyEOS = false;
958
959  Int iNumOfRenderedFrames = 0;
960  Int iFrame = 0;
961
962  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
963  {
964    if ( iFrame >= m_iFrameSkip )
965    {     
966      // read in depth and video
967      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
968      {
969        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], pcNewOrg, IPCOLOURSPACE_UNCHANGED,  aiPad, CHROMA_420  ) ;
970        apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
971        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
972
973        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], pcNewOrg, IPCOLOURSPACE_UNCHANGED,  aiPad, CHROMA_420  ) ;
974        apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
975        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
976
977        if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
978        {
979          m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
980        }
981      }
982    }
983    else
984    {
985      std::cout << "Skipping Frame " << iFrame << std::endl;
986
987      iFrame++;
988      continue;
989    }
990    m_cCameraData.update( (UInt) ( iFrame - m_iFrameSkip ) );
991
992    for(Int iViewIdx=1; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
993    {
994      std::cout << "Rendering UsedPelsMap for Frame " << iFrame << " of View " << (Double) m_cCameraData.getBaseViewNumbers()[iViewIdx] << std::endl;
995
996      Int iViewSIdx      = m_cCameraData.getBaseId2SortedId()[iViewIdx];
997      Int iFirstViewSIdx = m_cCameraData.getBaseId2SortedId()[0];
998
999      AOT( iViewSIdx == iFirstViewSIdx );
1000
1001      Bool bFirstIsLeft = (iFirstViewSIdx < iViewSIdx);
1002
1003      m_pcRenTop->setShiftLUTs(
1004        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
1005        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
1006        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
1007        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
1008        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
1009        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
1010        -1
1011        );
1012
1013      m_pcRenTop->getUsedSamplesMap( apcPicYuvBaseDepth[0], pcPicYuvSynthOut, bFirstIsLeft );
1014
1015      // Write Output
1016      m_apcTVideoIOYuvSynthOutput[iViewIdx-1]->write( pcPicYuvSynthOut,  IPCOLOURSPACE_UNCHANGED, 0, 0, 0, 0, CHROMA_420 );
1017
1018    }
1019    iFrame++;
1020    iNumOfRenderedFrames++;
1021  }
1022
1023  // Delete Buffers
1024
1025  pcNewOrg->destroy(); 
1026  delete pcNewOrg; 
1027
1028  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
1029  {
1030    apcPicYuvBaseVideo[uiBaseView]->destroy();
1031    delete apcPicYuvBaseVideo[uiBaseView];
1032
1033    apcPicYuvBaseDepth[uiBaseView]->destroy();
1034    delete apcPicYuvBaseDepth[uiBaseView];
1035
1036    // Temporal Filter
1037    if ( m_bTempDepthFilter )
1038    {
1039      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
1040      delete apcPicYuvLastBaseVideo[uiBaseView];
1041
1042      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
1043      delete apcPicYuvLastBaseDepth[uiBaseView];
1044    }
1045  }
1046  pcPicYuvSynthOut->destroy();
1047  delete pcPicYuvSynthOut;
1048
1049  xDestroyLib();
1050
1051}
1052#endif
Note: See TracBrowser for help on using the repository browser.