source: 3DVCSoftware/branches/HTM-3.0-LG/source/App/TAppRenderer/TAppRendererTop.cpp @ 89

Last change on this file since 89 was 56, checked in by hschwarz, 13 years ago

updated trunk (move to HM6.1)

  • Property svn:eol-style set to native
File size: 34.4 KB
Line 
1/* The copyright in this software is being made available under the BSD
2 * License, included below. This software may be subject to other third party
3 * and contributor rights, including patent rights, and no such rights are
4 * granted under this license.
5 *
6 * Copyright (c) 2010-2011, ISO/IEC
7 * All rights reserved.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions are met:
11 *
12 *  * Redistributions of source code must retain the above copyright notice,
13 *    this list of conditions and the following disclaimer.
14 *  * Redistributions in binary form must reproduce the above copyright notice,
15 *    this list of conditions and the following disclaimer in the documentation
16 *    and/or other materials provided with the distribution.
17 *  * Neither the name of the ISO/IEC nor the names of its contributors may
18 *    be used to endorse or promote products derived from this software without
19 *    specific prior written permission.
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
22 * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
23 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
24 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
25 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
26 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
27 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
28 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
29 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
30 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
31 * THE POSSIBILITY OF SUCH DAMAGE.
32 */
33
34
35
36#include <list>
37#include <stdio.h>
38#include <fcntl.h>
39#include <assert.h>
40#include <math.h>
41
42#include "TAppRendererTop.h"
43
44// ====================================================================================================================
45// Constructor / destructor / initialization / destroy
46// ====================================================================================================================
47
48TAppRendererTop::TAppRendererTop()
49{
50
51}
52
53TAppRendererTop::~TAppRendererTop()
54{
55
56}
57
58
59Void TAppRendererTop::xCreateLib()
60{
61  Int iInteralBitDepth = g_uiBitDepth + g_uiBitIncrement;
62  Int iFileBitDepth    = 8;
63  m_pcRenTop = new TRenTop();
64
65  for(Int iViewIdx=0; iViewIdx<m_iNumberOfInputViews; iViewIdx++)
66  {
67    TVideoIOYuv* pcVideoInput = new TVideoIOYuv;
68    TVideoIOYuv* pcDepthInput = new TVideoIOYuv;
69
70    pcVideoInput->open( m_pchVideoInputFileList[iViewIdx], false, iFileBitDepth, iInteralBitDepth );  // read mode
71    pcDepthInput->open( m_pchDepthInputFileList[iViewIdx], false, iFileBitDepth, iInteralBitDepth );  // read mode
72
73    m_apcTVideoIOYuvVideoInput.push_back( pcVideoInput );
74    m_apcTVideoIOYuvDepthInput.push_back( pcDepthInput );
75  }
76
77  for(Int iViewIdx=0; iViewIdx<m_iNumberOfOutputViews; iViewIdx++)
78  {
79    TVideoIOYuv* pcSynthOutput = new TVideoIOYuv;
80    pcSynthOutput->open( m_pchSynthOutputFileList[iViewIdx], true, iFileBitDepth, iInteralBitDepth );  // write mode
81    m_apcTVideoIOYuvSynthOutput.push_back( pcSynthOutput );
82  }
83}
84
85
86Void TAppRendererTop::xDestroyLib()
87{
88  delete m_pcRenTop;
89
90  for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
91  {
92    m_apcTVideoIOYuvVideoInput[iViewIdx]->close();
93    m_apcTVideoIOYuvDepthInput[iViewIdx]->close();
94
95    delete m_apcTVideoIOYuvDepthInput[iViewIdx];
96    delete m_apcTVideoIOYuvVideoInput[iViewIdx];
97  };
98
99  for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfOutputViews; iViewIdx++ )
100  {
101    m_apcTVideoIOYuvSynthOutput[iViewIdx]->close();
102    delete m_apcTVideoIOYuvSynthOutput[iViewIdx];
103  };
104}
105
106Void TAppRendererTop::xInitLib()
107{
108    m_pcRenTop->init(
109    m_iSourceWidth,
110    m_iSourceHeight,
111    (m_iRenderDirection != 0),
112    m_iLog2SamplingFactor,
113    m_iLog2SamplingFactor+m_iShiftPrecision,
114    m_bUVUp,
115    m_iPreProcMode,
116    m_iPreFilterSize,
117    m_iBlendMode,
118    m_iBlendZThresPerc,
119    m_bBlendUseDistWeight,
120    m_iBlendHoleMargin,
121    m_iInterpolationMode,
122    m_iHoleFillingMode,
123    m_iPostProcMode,
124    m_iUsedPelMapMarExt
125    );
126}
127
128// ====================================================================================================================
129// Public member functions
130// ====================================================================================================================
131
132
133
134Void TAppRendererTop::render()
135{
136  xCreateLib();
137  xInitLib();
138
139  // Create Buffers Input Views;
140  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
141  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
142
143  // TemporalImprovement Filter
144  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
145  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;
146
147  Int aiPad[2] = { 0, 0 };
148
149  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
150  {
151    TComPicYuv* pcNewVideoPic = new TComPicYuv;
152    TComPicYuv* pcNewDepthPic = new TComPicYuv;
153
154    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
155    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
156
157    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
158    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
159
160    //Temporal improvement Filter
161    if ( m_bTempDepthFilter )
162    {
163      pcNewVideoPic = new TComPicYuv;
164      pcNewDepthPic = new TComPicYuv;
165
166      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
167      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);
168
169      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
170      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
171    }
172  }
173
174  // Create Buffer for synthesized View
175  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
176  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
177
178  Bool bAnyEOS = false;
179
180  Int iNumOfRenderedFrames = 0;
181  Int iFrame = 0;
182
183  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
184  {
185
186    // read in depth and video
187    for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
188    {
189      m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
190
191      apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
192
193      bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
194
195      m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
196      apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
197      bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
198
199      if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
200      {
201        m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
202      }
203    }
204
205    if ( iFrame < m_iFrameSkip ) // Skip Frames
206    {
207      std::cout << "Skipping Frame " << iFrame << std::endl;
208
209      iFrame++;
210      continue;
211    }
212
213    m_cCameraData.update( (UInt)iFrame - m_iFrameSkip );
214
215    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
216    {
217      Int  iLeftBaseViewIdx  = -1;
218      Int  iRightBaseViewIdx = -1;
219
220      Bool bIsBaseView = false;
221
222      Int iRelDistToLeft;
223      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
224      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
225      Bool bHasRView = ( iRightBaseViewIdx != -1 );
226      Bool bRender   = true;
227
228      Int  iBlendMode = m_iBlendMode;
229      Int  iSimEnhBaseView = 0;
230
231      switch( m_iRenderDirection )
232      {
233      /// INTERPOLATION
234      case 0:
235        AOF( bHasLRView || bIsBaseView );
236
237        if ( !bHasLRView && bIsBaseView && m_iBlendMode == 0 )
238        {
239          bRender = false;
240        }
241        else
242        {
243          if ( bIsBaseView )
244          {
245            AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
246            Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
247
248            if ( m_iBlendMode == 1 )
249            {
250              if ( iSortedBaseViewIdx - 1 >= 0 )
251              {
252                iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
253                bRender = true;
254              }
255              else
256              {
257                bRender = false;
258              }
259            }
260            else if ( m_iBlendMode == 2 )
261            {
262              if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
263              {
264                iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
265                bRender = true;
266              }
267              else
268              {
269                bRender = false;
270              }
271            }
272          }
273
274          if ( m_iBlendMode == 3 )
275          {
276            if ( bIsBaseView && (iLeftBaseViewIdx == 0) )
277            {
278              bRender = false;
279            }
280            else
281            {
282              Int iDistLeft  = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx ]  );
283              Int iDistRight = abs( m_cCameraData.getBaseId2SortedId()[0] - m_cCameraData.getBaseId2SortedId() [iRightBaseViewIdx]  );
284
285              Int iFillViewIdx = iDistLeft > iDistRight ? iLeftBaseViewIdx : iRightBaseViewIdx;
286
287              if( m_cCameraData.getBaseId2SortedId()[0] < m_cCameraData.getBaseId2SortedId() [iFillViewIdx] )
288              {
289                iBlendMode        = 1;
290                iLeftBaseViewIdx  = 0;
291                iRightBaseViewIdx = iFillViewIdx;
292              }
293              else
294              {
295                iBlendMode        = 2;
296                iLeftBaseViewIdx  = iFillViewIdx;
297                iRightBaseViewIdx = 0;
298              }
299
300            }
301          }
302          else
303          {
304            iBlendMode = m_iBlendMode;
305          }
306        }
307
308        if ( m_bSimEnhance )
309        {
310          if ( m_iNumberOfInputViews == 3 && m_cCameraData.getRelSynthViewNumbers()[ iSynthViewIdx ] < VIEW_NUM_PREC  )
311          {
312            iSimEnhBaseView = 2; // Take middle view
313          }
314          else
315          {
316            iSimEnhBaseView = 1; // Take left view
317          }
318        }
319
320          if ( bRender )
321          {
322          std::cout << "Rendering Frame "    << iFrame
323                    << " of View "           << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx    ] / VIEW_NUM_PREC
324                    << "   Left BaseView: "  << (Double) m_cCameraData.getBaseViewNumbers() [iLeftBaseViewIdx ] / VIEW_NUM_PREC
325                    << "   Right BaseView: " << (Double) m_cCameraData.getBaseViewNumbers() [iRightBaseViewIdx] / VIEW_NUM_PREC
326                    << "   BlendMode: "      << iBlendMode
327                    << std::endl;
328
329          m_pcRenTop->setShiftLUTs(
330            m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
331            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx],
332            m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
333            m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx][iSynthViewIdx],
334            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx],
335            m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx ],
336
337            iRelDistToLeft
338          );
339
340          m_pcRenTop->interpolateView(
341            apcPicYuvBaseVideo[iLeftBaseViewIdx ],
342            apcPicYuvBaseDepth[iLeftBaseViewIdx ],
343            apcPicYuvBaseVideo[iRightBaseViewIdx],
344            apcPicYuvBaseDepth[iRightBaseViewIdx],
345            pcPicYuvSynthOut,
346            iBlendMode,
347            iSimEnhBaseView
348            );
349        }
350        else
351        {
352          AOT(iLeftBaseViewIdx != iRightBaseViewIdx );
353          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut );
354          std::cout << "Copied    Frame " << iFrame
355                    << " of View "        << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC
356                    << "   (BaseView)  "    << std::endl;
357        }
358
359        break;
360      /// EXTRAPOLATION FROM LEFT
361      case 1:
362        if ( !bHasLView ) // View to render is BaseView
363        {
364          bRender = false;
365        }
366
367          if (  bIsBaseView )
368          {
369          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
370          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
371          if ( iSortedBaseViewIdx - 1 >= 0 )
372          {
373            iLeftBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx - 1];
374          }
375          else
376          {
377            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
378            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
379            bRender = false;
380          }
381        }
382
383
384        if (bRender)
385        {
386          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
387          m_pcRenTop->setShiftLUTs( m_cCameraData.getSynthViewShiftLUTD()[iLeftBaseViewIdx ][iSynthViewIdx],
388            m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, NULL, -1 );
389          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iLeftBaseViewIdx ], apcPicYuvBaseDepth[iLeftBaseViewIdx ], pcPicYuvSynthOut, true );
390        }
391        break;
392      /// EXTRAPOLATION FROM RIGHT
393      case 2:            // extrapolation from right
394        if ( !bHasRView ) // View to render is BaseView
395        {
396          bRender = false;
397        }
398
399          if (  bIsBaseView )
400          {
401
402          AOF( iLeftBaseViewIdx == iRightBaseViewIdx );
403          Int iSortedBaseViewIdx = m_cCameraData.getBaseId2SortedId() [iLeftBaseViewIdx];
404          if ( iSortedBaseViewIdx + 1 < m_iNumberOfInputViews )
405          {
406            iRightBaseViewIdx = m_cCameraData.getBaseSortedId2Id()[ iSortedBaseViewIdx + 1];
407          }
408          else
409          {
410            std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
411            apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
412            bRender = false;
413          }
414        }
415
416        if ( bRender )
417        {
418          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
419          m_pcRenTop->setShiftLUTs( NULL, NULL,NULL, m_cCameraData.getSynthViewShiftLUTD()[iRightBaseViewIdx ][iSynthViewIdx],
420            m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx],NULL, iRelDistToLeft);
421          m_pcRenTop->extrapolateView( apcPicYuvBaseVideo[iRightBaseViewIdx ], apcPicYuvBaseDepth[iRightBaseViewIdx ], pcPicYuvSynthOut, false);
422        }
423        break;
424      }
425
426      // Write Output
427
428#if PIC_CROPPING
429      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
430#else
431      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, aiPad );
432#endif
433    }
434    iFrame++;
435    iNumOfRenderedFrames++;
436  }
437
438  // Delete Buffers
439  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
440  {
441    apcPicYuvBaseVideo[uiBaseView]->destroy();
442    delete apcPicYuvBaseVideo[uiBaseView];
443
444    apcPicYuvBaseDepth[uiBaseView]->destroy();
445    delete apcPicYuvBaseDepth[uiBaseView];
446
447    // Temporal Filter
448    if ( m_bTempDepthFilter )
449    {
450      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
451      delete apcPicYuvLastBaseVideo[uiBaseView];
452
453      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
454      delete apcPicYuvLastBaseDepth[uiBaseView];
455    }
456  }
457
458  pcPicYuvSynthOut->destroy();
459  delete pcPicYuvSynthOut;
460
461  xDestroyLib();
462
463}
464
465Void TAppRendererTop::go()
466{
467  switch ( m_iRenderMode )
468  {
469  case 0:
470    render();
471    break;
472  case 1:
473    renderModel();
474    break;
475  case 10:
476    renderUsedPelsMap( );
477      break;
478
479  default:
480    AOT(true);
481  }
482}
483
484Void TAppRendererTop::renderModel()
485{
486  if ( m_bUseSetupString )
487  {
488    xRenderModelFromString();
489  }
490  else
491  {
492    xRenderModelFromNums();
493  }
494}
495
496Void TAppRendererTop::xRenderModelFromString()
497{
498
499    xCreateLib();
500    xInitLib();
501
502    // Create Buffers Input Views;
503    std::vector<TComPicYuv*> apcPicYuvBaseVideo;
504    std::vector<TComPicYuv*> apcPicYuvBaseDepth;
505
506
507    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
508    {
509      TComPicYuv* pcNewVideoPic = new TComPicYuv;
510      TComPicYuv* pcNewDepthPic = new TComPicYuv;
511
512      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
513      apcPicYuvBaseVideo.push_back(pcNewVideoPic);
514
515      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
516      apcPicYuvBaseDepth.push_back(pcNewDepthPic);
517    }
518
519    Int aiPad[2] = { 0, 0 };
520
521    // Init Model
522    TRenModel cCurModel;
523
524    AOT( m_iLog2SamplingFactor != 0 );
525    cCurModel.create( m_cRenModStrParser.getNumOfBaseViews(), m_cRenModStrParser.getNumOfModels(), m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
526
527    for ( Int iViewIdx = 0; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
528    {
529      Int iNumOfModels   = m_cRenModStrParser.getNumOfModelsForView(iViewIdx, 1);
530
531      for (Int iCurModel = 0; iCurModel < iNumOfModels; iCurModel++ )
532      {
533        Int iModelNum; Int iLeftViewNum; Int iRightViewNum; Int iDump; Int iOrgRefNum; Int iBlendMode;
534        m_cRenModStrParser.getSingleModelData  ( iViewIdx, 1, iCurModel, iModelNum, iBlendMode, iLeftViewNum, iRightViewNum, iOrgRefNum, iDump ) ;
535        cCurModel         .createSingleModel   ( iViewIdx, 1, iModelNum, iLeftViewNum, iRightViewNum, false, iBlendMode );
536
537      }
538    }
539
540    // Create Buffer for synthesized View
541    TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
542    pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
543
544    Bool bAnyEOS = false;
545
546    Int iNumOfRenderedFrames = 0;
547    Int iFrame = 0;
548
549    while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
550    {
551      // read in depth and video
552      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
553      {
554        m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
555        bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
556
557        m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
558        bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
559      }
560
561      if ( iFrame < m_iFrameSkip )
562      {
563        continue;
564      }
565
566
567      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
568      {
569        TComPicYuv* pcPicYuvVideo = apcPicYuvBaseVideo[iBaseViewIdx];
570        TComPicYuv* pcPicYuvDepth = apcPicYuvBaseDepth[iBaseViewIdx];
571        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx ];
572        cCurModel.setBaseView( iBaseViewSIdx, pcPicYuvVideo, pcPicYuvDepth, NULL, NULL );
573      }
574
575      for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
576      {
577        m_cCameraData.update( (UInt)iFrame );
578
579        // setup virtual views
580        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
581
582        cCurModel.setErrorMode( iBaseViewSIdx, 1, 0 );
583        Int iNumOfSV  = m_cRenModStrParser.getNumOfModelsForView( iBaseViewSIdx, 1);
584        for (Int iCurView = 0; iCurView < iNumOfSV; iCurView++ )
585        {
586          Int iOrgRefBaseViewSIdx;
587          Int iLeftBaseViewSIdx;
588          Int iRightBaseViewSIdx;
589          Int iSynthViewRelNum;
590          Int iModelNum;
591          Int iBlendMode;
592
593          m_cRenModStrParser.getSingleModelData(iBaseViewSIdx, 1, iCurView, iModelNum, iBlendMode, iLeftBaseViewSIdx, iRightBaseViewSIdx, iOrgRefBaseViewSIdx, iSynthViewRelNum );
594
595          Int iLeftBaseViewIdx    = -1;
596          Int iRightBaseViewIdx   = -1;
597
598          TComPicYuv* pcPicYuvOrgRef  = NULL;
599          Int**      ppiShiftLUTLeft  = NULL;
600          Int**      ppiShiftLUTRight = NULL;
601          Int**      ppiBaseShiftLUTLeft  = NULL;
602          Int**      ppiBaseShiftLUTRight = NULL;
603
604
605          Int        iDistToLeft      = -1;
606
607          Int iSynthViewIdx = m_cCameraData.synthRelNum2Idx( iSynthViewRelNum );
608
609          if ( iLeftBaseViewSIdx != -1 )
610          {
611            iLeftBaseViewIdx   = m_cCameraData.getBaseSortedId2Id()   [ iLeftBaseViewSIdx ];
612            ppiShiftLUTLeft    = m_cCameraData.getSynthViewShiftLUTI()[ iLeftBaseViewIdx  ][ iSynthViewIdx  ];
613          }
614
615          if ( iRightBaseViewSIdx != -1 )
616          {
617            iRightBaseViewIdx  = m_cCameraData.getBaseSortedId2Id()   [iRightBaseViewSIdx ];
618            ppiShiftLUTRight   = m_cCameraData.getSynthViewShiftLUTI()[ iRightBaseViewIdx ][ iSynthViewIdx ];
619          }
620
621          if ( iRightBaseViewSIdx != -1 && iLeftBaseViewSIdx != -1 )
622          {
623
624            ppiBaseShiftLUTLeft  = m_cCameraData.getBaseViewShiftLUTI() [ iLeftBaseViewIdx  ][ iRightBaseViewIdx ];
625            ppiBaseShiftLUTRight = m_cCameraData.getBaseViewShiftLUTI() [ iRightBaseViewIdx ][ iLeftBaseViewIdx  ];
626            iDistToLeft    = m_cCameraData.getRelDistLeft(  iSynthViewIdx , iLeftBaseViewIdx, iRightBaseViewIdx);
627          }
628
629          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
630
631          cCurModel.setSingleModel( iModelNum, ppiShiftLUTLeft, ppiBaseShiftLUTLeft, ppiShiftLUTRight, ppiBaseShiftLUTRight, iDistToLeft, pcPicYuvOrgRef );
632
633          Int iViewPos;
634          if (iLeftBaseViewSIdx != -1 && iRightBaseViewSIdx != -1)
635          {
636            iViewPos = VIEWPOS_MERGED;
637          }
638          else if ( iLeftBaseViewSIdx != -1 )
639          {
640            iViewPos = VIEWPOS_LEFT;
641          }
642          else if ( iRightBaseViewSIdx != -1 )
643          {
644            iViewPos = VIEWPOS_RIGHT;
645          }
646          else
647          {
648            AOT(true);
649          }
650
651          cCurModel.getSynthVideo ( iModelNum, iViewPos, pcPicYuvSynthOut );
652
653          // Write Output
654#if PIC_CROPPING
655          m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iModelNum]->write( pcPicYuvSynthOut, 0 ,0 ,0, 0 );
656#else
657          m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iModelNum]->write( pcPicYuvSynthOut, aiPad );
658#endif
659        }
660      }
661      iFrame++;
662      iNumOfRenderedFrames++;
663  }
664
665    // Delete Buffers
666    for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
667    {
668      apcPicYuvBaseVideo[uiBaseView]->destroy();
669      delete apcPicYuvBaseVideo[uiBaseView];
670
671      apcPicYuvBaseDepth[uiBaseView]->destroy();
672      delete apcPicYuvBaseDepth[uiBaseView];
673}
674    pcPicYuvSynthOut->destroy();
675    delete pcPicYuvSynthOut;
676
677    xDestroyLib();
678}
679
680Void TAppRendererTop::xRenderModelFromNums()
681{
682  xCreateLib();
683  xInitLib();
684
685  // Create Buffers Input Views;
686  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
687  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
688
689
690  Int aiPad[2] = { 0, 0 };
691
692  // Init Model
693  TRenModel cCurModel;
694
695  AOT( m_iLog2SamplingFactor != 0 );
696  cCurModel.create( m_iNumberOfInputViews, m_iNumberOfOutputViews, m_iSourceWidth, m_iSourceHeight, m_iShiftPrecision, m_iBlendHoleMargin );
697
698  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
699  {
700    TComPicYuv* pcNewVideoPic = new TComPicYuv;
701    TComPicYuv* pcNewDepthPic = new TComPicYuv;
702
703    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
704    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
705
706    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
707    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
708  }
709
710  for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
711  {
712    Int  iLeftBaseViewIdx  = -1;
713    Int  iRightBaseViewIdx = -1;
714    Bool bIsBaseView = false;
715
716    Int iRelDistToLeft;
717    m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft,  bIsBaseView );
718
719    if (m_iRenderDirection == 1 )
720    {
721      iRightBaseViewIdx = -1;
722      AOT( iLeftBaseViewIdx == -1);
723    }
724
725    if (m_iRenderDirection == 2 )
726    {
727      iLeftBaseViewIdx = -1;
728      AOT( iRightBaseViewIdx == -1);
729    }
730
731    Int iLeftBaseViewSIdx  = -1;
732    Int iRightBaseViewSIdx = -1;
733
734    if (iLeftBaseViewIdx != -1 )
735    {
736      iLeftBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iLeftBaseViewIdx];
737    }
738
739    if (iRightBaseViewIdx != -1 )
740    {
741      iRightBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iRightBaseViewIdx];
742    }
743    cCurModel.createSingleModel(-1, -1, iSynthViewIdx, iLeftBaseViewSIdx, iRightBaseViewSIdx, false, m_iBlendMode );
744  }
745
746  // Create Buffer for synthesized View
747  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
748  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
749
750  Bool bAnyEOS = false;
751
752  Int iNumOfRenderedFrames = 0;
753  Int iFrame = 0;
754
755  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
756  {
757    // read in depth and video
758    for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
759    {
760      m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
761      bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
762
763      m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
764      bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
765
766      if ( iFrame >= m_iFrameSkip )
767      {
768        Int iBaseViewSIdx = m_cCameraData.getBaseId2SortedId()[iBaseViewIdx];
769        cCurModel.setBaseView( iBaseViewSIdx, apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], NULL, NULL );
770      }
771    }
772
773    if ( iFrame < m_iFrameSkip ) // Skip Frames
774    {
775      iFrame++;
776      continue;
777    }
778
779    m_cCameraData.update( (UInt)iFrame );
780
781    for(Int iSynthViewIdx=0; iSynthViewIdx < m_iNumberOfOutputViews; iSynthViewIdx++ )
782    {
783
784      Int  iLeftBaseViewIdx  = -1;
785      Int  iRightBaseViewIdx = -1;
786
787      Bool bIsBaseView = false;
788
789      Int iRelDistToLeft;
790      Bool bHasLRView = m_cCameraData.getLeftRightBaseView( iSynthViewIdx, iLeftBaseViewIdx, iRightBaseViewIdx, iRelDistToLeft, bIsBaseView );
791      Bool bHasLView = ( iLeftBaseViewIdx != -1 );
792      Bool bHasRView = ( iRightBaseViewIdx != -1 );
793
794      switch( m_iRenderDirection )
795      {
796        /// INTERPOLATION
797      case 0:
798        assert( bHasLRView || bIsBaseView );
799
800        if ( !bHasLRView && bIsBaseView ) // View to render is BaseView
801        {
802          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
803          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
804        }
805        else  // Render
806        {
807          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
808          cCurModel.setSingleModel( iSynthViewIdx,
809                                    m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx]    ,
810                                    m_cCameraData.getBaseViewShiftLUTI ()[iLeftBaseViewIdx ][iRightBaseViewIdx],
811                                    m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx][iSynthViewIdx]    ,
812                                    m_cCameraData.getBaseViewShiftLUTI ()[iRightBaseViewIdx][iLeftBaseViewIdx] ,
813                                    iRelDistToLeft,
814                                    NULL );
815          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_MERGED, pcPicYuvSynthOut );
816        }
817        break;
818        /// EXTRAPOLATION FROM LEFT
819      case 1:
820
821        if ( !bHasLView ) // View to render is BaseView
822        {
823          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
824          apcPicYuvBaseVideo[iLeftBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
825        }
826        else  // Render
827        {
828          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
829          cCurModel.setSingleModel( iSynthViewIdx, m_cCameraData.getSynthViewShiftLUTI()[iLeftBaseViewIdx ][iSynthViewIdx], NULL, NULL, NULL, -1,  NULL);
830          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_LEFT, pcPicYuvSynthOut );
831        }
832        break;
833        /// EXTRAPOLATION FROM RIGHT
834      case 2:            // extrapolation from right
835        if ( !bHasRView ) // View to render is BaseView
836        {
837          std::cout << "Copied    Frame " << iFrame << " of BaseView " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
838          apcPicYuvBaseVideo[iRightBaseViewIdx]->copyToPic( pcPicYuvSynthOut ); // Copy Original
839        }
840        else  // Render
841        {
842          std::cout << "Rendering Frame " << iFrame << " of View " << (Double) m_cCameraData.getSynthViewNumbers()[iSynthViewIdx] / VIEW_NUM_PREC  << std::endl;
843          cCurModel.setSingleModel( iSynthViewIdx, NULL , NULL, m_cCameraData.getSynthViewShiftLUTI()[iRightBaseViewIdx ][iSynthViewIdx], NULL, -1, NULL);
844          cCurModel.getSynthVideo ( iSynthViewIdx, VIEWPOS_RIGHT, pcPicYuvSynthOut );
845        }
846        break;
847      }
848
849      // Write Output
850#if PIC_CROPPING
851      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, 0, 0, 0, 0 );
852#else
853      m_apcTVideoIOYuvSynthOutput[m_bSweep ? 0 : iSynthViewIdx]->write( pcPicYuvSynthOut, aiPad );
854#endif
855    }
856    iFrame++;
857    iNumOfRenderedFrames++;
858  }
859
860  // Delete Buffers
861  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
862  {
863    apcPicYuvBaseVideo[uiBaseView]->destroy();
864    delete apcPicYuvBaseVideo[uiBaseView];
865
866    apcPicYuvBaseDepth[uiBaseView]->destroy();
867    delete apcPicYuvBaseDepth[uiBaseView];
868  }
869  pcPicYuvSynthOut->destroy();
870  delete pcPicYuvSynthOut;
871
872  xDestroyLib();
873
874}
875
876Void TAppRendererTop::renderUsedPelsMap( )
877{
878  xCreateLib();
879  xInitLib();
880
881  // Create Buffers Input Views;
882  std::vector<TComPicYuv*> apcPicYuvBaseVideo;
883  std::vector<TComPicYuv*> apcPicYuvBaseDepth;
884
885  // TemporalImprovement Filter
886  std::vector<TComPicYuv*> apcPicYuvLastBaseVideo;
887  std::vector<TComPicYuv*> apcPicYuvLastBaseDepth;
888
889  Int aiPad[2] = { 0, 0 };
890
891  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
892  {
893    TComPicYuv* pcNewVideoPic = new TComPicYuv;
894    TComPicYuv* pcNewDepthPic = new TComPicYuv;
895
896    pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
897    apcPicYuvBaseVideo.push_back(pcNewVideoPic);
898
899    pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
900    apcPicYuvBaseDepth.push_back(pcNewDepthPic);
901
902    //Temporal improvement Filter
903    if ( m_bTempDepthFilter )
904    {
905      pcNewVideoPic = new TComPicYuv;
906      pcNewDepthPic = new TComPicYuv;
907
908      pcNewVideoPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
909      apcPicYuvLastBaseVideo.push_back(pcNewVideoPic);
910
911      pcNewDepthPic->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
912      apcPicYuvLastBaseDepth.push_back(pcNewDepthPic);
913    }
914  }
915
916  // Create Buffer for synthesized View
917  TComPicYuv* pcPicYuvSynthOut = new TComPicYuv;
918  pcPicYuvSynthOut->create( m_iSourceWidth, m_iSourceHeight, 1, 1, 1 );
919
920  Bool bAnyEOS = false;
921
922  Int iNumOfRenderedFrames = 0;
923  Int iFrame = 0;
924
925  while ( ( ( iNumOfRenderedFrames < m_iFramesToBeRendered ) || ( m_iFramesToBeRendered == 0 ) ) && !bAnyEOS )
926  {
927    // set shift LUT
928
929    // read in depth and video
930    for(Int iBaseViewIdx=0; iBaseViewIdx < m_iNumberOfInputViews; iBaseViewIdx++ )
931    {
932      m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->read( apcPicYuvBaseVideo[iBaseViewIdx], aiPad  ) ;
933      apcPicYuvBaseVideo[iBaseViewIdx]->extendPicBorder();
934      bAnyEOS |= m_apcTVideoIOYuvVideoInput[iBaseViewIdx]->isEof();
935
936      m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->read( apcPicYuvBaseDepth[iBaseViewIdx], aiPad  ) ;
937      apcPicYuvBaseDepth[iBaseViewIdx]->extendPicBorder();
938      bAnyEOS |= m_apcTVideoIOYuvDepthInput[iBaseViewIdx]->isEof();
939
940      if ( m_bTempDepthFilter && (iFrame >= m_iFrameSkip) )
941      {
942        m_pcRenTop->temporalFilterVSRS( apcPicYuvBaseVideo[iBaseViewIdx], apcPicYuvBaseDepth[iBaseViewIdx], apcPicYuvLastBaseVideo[iBaseViewIdx], apcPicYuvLastBaseDepth[iBaseViewIdx], ( iFrame == m_iFrameSkip) );
943      }
944    }
945
946    if ( iFrame < m_iFrameSkip ) // Skip Frames
947    {
948      std::cout << "Skipping Frame " << iFrame << std::endl;
949
950      iFrame++;
951      continue;
952    }
953
954    m_cCameraData.update( (UInt)iFrame );
955
956    for(Int iViewIdx=1; iViewIdx < m_iNumberOfInputViews; iViewIdx++ )
957    {
958      std::cout << "Rendering UsedPelsMap for Frame " << iFrame << " of View " << (Double) m_cCameraData.getBaseViewNumbers()[iViewIdx] << std::endl;
959
960      Int iViewSIdx      = m_cCameraData.getBaseId2SortedId()[iViewIdx];
961      Int iFirstViewSIdx = m_cCameraData.getBaseId2SortedId()[0];
962
963      AOT( iViewSIdx == iFirstViewSIdx );
964
965      Bool bFirstIsLeft = (iFirstViewSIdx < iViewSIdx);
966
967      m_pcRenTop->setShiftLUTs(
968        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
969        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
970        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
971        m_cCameraData.getBaseViewShiftLUTD()[0][iViewIdx],
972        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
973        m_cCameraData.getBaseViewShiftLUTI()[0][iViewIdx],
974        -1
975        );
976
977      m_pcRenTop->getUsedSamplesMap( apcPicYuvBaseDepth[0], pcPicYuvSynthOut, bFirstIsLeft );
978
979      // Write Output
980#if PIC_CROPPING
981      m_apcTVideoIOYuvSynthOutput[iViewIdx-1]->write( pcPicYuvSynthOut, 0, 0, 0 );
982#else
983      m_apcTVideoIOYuvSynthOutput[iViewIdx-1]->write( pcPicYuvSynthOut, aiPad );
984#endif
985
986    }
987    iFrame++;
988    iNumOfRenderedFrames++;
989  }
990
991  // Delete Buffers
992  for ( UInt uiBaseView = 0; uiBaseView < m_iNumberOfInputViews; uiBaseView++ )
993  {
994    apcPicYuvBaseVideo[uiBaseView]->destroy();
995    delete apcPicYuvBaseVideo[uiBaseView];
996
997    apcPicYuvBaseDepth[uiBaseView]->destroy();
998    delete apcPicYuvBaseDepth[uiBaseView];
999
1000    // Temporal Filter
1001    if ( m_bTempDepthFilter )
1002    {
1003      apcPicYuvLastBaseVideo[uiBaseView]->destroy();
1004      delete apcPicYuvLastBaseVideo[uiBaseView];
1005
1006      apcPicYuvLastBaseDepth[uiBaseView]->destroy();
1007      delete apcPicYuvLastBaseDepth[uiBaseView];
1008    }
1009  }
1010  pcPicYuvSynthOut->destroy();
1011  delete pcPicYuvSynthOut;
1012
1013  xDestroyLib();
1014
1015}
Note: See TracBrowser for help on using the repository browser.