Main Page | Namespace List | Class Hierarchy | Class List | Directories | File List | Class Members | File Members | Related Pages

vpraycasting.cpp

Go to the documentation of this file.
00001 //deprecated
00003 //
00004 //  PROJECT.....: vpat - Creating Virtual Patients
00005 //  RESPONSIBLE.: Carla Freitas e Luciana Nedel
00006 //
00007 //  FILE........: vpraycasting.cpp
00008 //  DESCRIPTION.: Contain the VPRayCasting class implementation.
00009 //
00010 //  AUTHOR......: Isabel Harb Manssour
00011 //  DATE........: August/08/2000
00012 //  DESCRIPTION.: Constructors definition and methods implementation.
00013 //
00014 //  AUTHOR......: Isabel Harb Manssour
00015 //  DATE........: October/28/2002
00016 //  DESCRIPTION.: New methods implementation. Code "adjust".
00017 //
00019 
00020 
00021 #include <vpraycasting.h>
00022 #include <vpimage.h>
00023 #include <vpvolume.h>
00024 #include <vpplane.h>
00025 #include <vpvector3d.h>
00026 #include <vpdirectionallight.h>
00027 #include <stdio.h>
00028 #include <cmath>
00029 #include <list>
00030 using namespace std;
00031 
00032 
00034 // Description: Class "VPRayCasting" constructor without parameter, 
00035 //              that set the default ray casting visualization 
00036 //              values.
00037 // Parameters.: -
00038 // Return.....: -
00039 
00040 VPRayCasting::VPRayCasting () {
00041  char nomearq[30], aux[50];
00042  int tmp;
00043  FILE *fp;
00044 
00045  virtualYDimension = 0;
00046  dFront = dTop = dLeft = dBack = dBottom = dRight = 0.0;
00047  shadingMethod = LOCALSHADING; // GOURAUDSHADING;
00048  endOfSBand = 10;   
00049  endOfTBand = 18;
00050  light = NULL;
00051  lightVolumeComputation = true;
00052 
00053  printf("\nDigite o nome do arquivo: ");
00054  gets(nomearq);
00055 
00056  fp = fopen(nomearq, "r"); 
00057 
00058  if(fp == NULL) {
00059     printf("\n Erro na abertura do arquivo!");
00060     return;
00061  }
00062 
00063  fscanf(fp, "%s\n", aux); //Sample Step
00064  fscanf(fp, "%f\n", &sampleStep);
00065 
00066  fscanf(fp, "%s\n", aux); //TypeOfCuttingTool
00067  fscanf(fp, "%d\n", &typeOfCuttingTool);
00068 
00069  fscanf(fp, "%s\n", aux); //ThreePointsOfTheFrontCuttingPlane
00070  fscanf(fp, "%f%f%f\n", &P1.x,&P1.y,&P1.z);
00071  fscanf(fp, "%f%f%f\n", &P2.x,&P2.y,&P2.z);
00072  fscanf(fp, "%f%f%f\n", &P3.x,&P3.y,&P3.z);
00073 
00074  fscanf(fp, "%s\n", aux); //OneVolumePointToSimulateABackCuttingPlane
00075  fscanf(fp, "%d\n", &backCuttingPlane);
00076 
00077  fscanf(fp, "%s\n", aux); //ThreePointsOfTheBackCuttingPlane
00078  fscanf(fp, "%f%f%f\n", &backP1.x,&backP1.y,&backP1.z);
00079  fscanf(fp, "%f%f%f\n", &backP2.x,&backP2.y,&backP2.z);
00080  fscanf(fp, "%f%f%f\n", &backP3.x,&backP3.y,&backP3.z);
00081 
00082  fscanf(fp, "%s\n", aux); //TwoPointsToDefineTheParallelepipedCuttingTool
00083  fscanf(fp, "%f%f%f\n", &p1Parallelepiped.x,&p1Parallelepiped.y,&p1Parallelepiped.z);
00084  fscanf(fp, "%f%f%f\n", &p2Parallelepiped.x,&p2Parallelepiped.y,&p2Parallelepiped.z);
00085 
00086  fscanf(fp, "%s\n", aux); //OneVolumePointToSimulateADistanceForAnObliquePlane
00087  fscanf(fp, "%d\n", &distanceObliquePlane);
00088 
00089  fscanf(fp, "%s\n", aux); //LevThreshold_LevWidth_VAluesForClassification
00090  fscanf(fp, "%f%f\n", &levThreshold, &levWidth);
00091  
00092  fscanf(fp, "%s\n", aux); //KindOfOpacityFunction
00093  fscanf(fp, "%d\n", &typeOfOpacityFunction);
00094 
00095  fscanf(fp, "%s\n", aux); //InitialValueForLinearOpacity
00096  fscanf(fp, "%d\n", &initialValueForLinearOpacity);
00097 
00098  fscanf(fp, "%s\n", aux); //OptionalCameraLocation
00099  fscanf(fp, "%f%f%f\n", &cameraLocation.x,&cameraLocation.y,&cameraLocation.z);
00100 
00101  fscanf(fp, "%s\n", aux); //AmbientLight
00102  fscanf(fp, "%f\n", &ambientLight);
00103 
00104  fscanf(fp, "%s\n", aux); //DiffuseLight
00105  fscanf(fp, "%f\n", &diffuseLight);
00106 
00107  fscanf(fp, "%s\n", aux); //SpecularExponent
00108  fscanf(fp, "%d\n", &specularExponent);
00109 
00110  fscanf(fp, "%s\n", aux); //UseSpecular?
00111  fscanf(fp, "%d\n", &tmp);
00112  if (tmp)
00113      specular = true;
00114  else
00115      specular = false;
00116 
00117  fscanf(fp, "%s\n", aux); //negativeLightContribution?
00118  fscanf(fp, "%d\n", &negativeLightContribution);
00119 
00120  fscanf(fp, "%s\n", aux); //KindOfIntegration(1-VolumesWithTheSameSize,2-FunctionalVolumeSmall)
00121  fscanf(fp, "%d\n", &integrationType);
00122 
00123  fscanf(fp, "%s\n", aux); //RangeOfSlices(ForIntegration=2)
00124  fscanf(fp, "%d%d\n", &firstSlice,&lastSlice);
00125 
00126  fscanf(fp, "%s\n", aux); //Density value used in CVP technique
00127  fscanf(fp, "%d%d\n", &valueCVP);
00128 
00129  fclose(fp);
00130 
00131 }
00132 
00133 
00135 // Description: Method "vpSetDefaultParameters" set some attributes
00136 //              default values for volume visualization.
00137 // Parameters.: VPGraphicObj *volume, VPCamera *camera
00138 // Return.....: -
00139 
00140 void VPRayCasting::vpSetDefaultParameters (VPGraphicObj *volume, VPCamera *camera) {
00141 
00142     VPPoint3D point;
00143     // To verify the smallest dimension and set the virtualVolumeDimension
00144     if ( ((VPImage *)volume)->vpGetXDimension() < ((VPVolume *)volume)->vpGetZDimension() )
00145     {
00146         if ( ((VPVolume *)volume)->vpGetYDimension() < (((VPVolume *)volume)->vpGetZDimension()/3) ) // if the number os slices is very small...
00147             virtualYDimension = ((VPVolume *)volume)->vpGetZDimension() / 2;
00148         else
00149             virtualYDimension = ((VPVolume *)volume)->vpGetZDimension();
00150     }
00151     else 
00152     {
00153         if ( ((VPVolume *)volume)->vpGetYDimension() < (((VPVolume *)volume)->vpGetXDimension()/3) ) // if the number os slices is very small...
00154             virtualYDimension = ((VPImage *)volume)->vpGetXDimension() / 2;
00155         else
00156             virtualYDimension = ((VPImage *)volume)->vpGetXDimension();
00157     }
00158     
00159     ((VPVolume *)volume)->vpAdjustScale(((VPImage *)volume)->vpGetXDimension(), ((VPImage *)volume)->vpGetYDimension(), ((VPVolume *)volume)->vpGetZDimension());
00160     volumeScale = ((VPVolume *)volume)->vpGetScale();
00161     vpSetCameraDefault(camera, volume);
00162     vpSetLightDefault(light, camera);
00163 
00164     // Initialization of inner structures visualization parameters
00165     point = ((VPVolume *)volume)->vpGetCenterFocalPoint(); // the center of the "object"
00166     point.z = 0;
00167     while ( ((VPVolume *)volume)->vpGetValue(point.x,point.y,point.z) <= 40 )
00168             point.z++;
00169     endOfSBand = point.z + ( ((VPVolume *)volume)->vpGetZDimension() * 0.06 ); // 6%    
00170     endOfTBand = endOfSBand + ( ((VPVolume *)volume)->vpGetZDimension() * 0.06 );
00171 }
00172 
00173 
00175 // Description: Method "vpRenderLivroMonoGray" implement the brute  
00176 //              force ray casting visualization algorithm for gray 
00177 //              levels, available in "Introduction to Volume 
00178 //              Rendering" book.
00179 // Parameters.: VPScene *s, VPCamera *c (active camera), 
00180 //              int visualizationType, unsigned int **image 
00181 //              (pointer to the image);
00182 // Return.....: -
00183 
00184 void VPRayCasting::vpRenderLivroMonoGray(VPScene *s, VPCamera *c, int visualizationType, 
00185                                   int opacityComputation, unsigned int **image) {
00186  int line=0, column=0, depth=0, i=0, j=0, k=0, initialLineValue=0, 
00187      finalLineValue=0, initialColumnValue=0, finalColumnValue=0, flagCVP=0;
00188  short int planes[6];
00189  float yCorrection=0, sIn=0, sOut=0, sampleColor, antColor;
00190  VPVector3D projectionDirection, gradient, deltaX, deltaY, deltaZ, 
00191             vAux, correctProjectionDirection, antGradient;
00192  VPPoint3D p1, pIn, pOut, pAux, minPlaneProjection, volumeDimension, 
00193            origin, temp; 
00194  VPScene *scene = s;
00195  list<VPLight*> lights = scene->vpGetLights();
00196  list<VPGraphicObj*> objects = scene->vpGetObjects();
00197 
00198  // To get a pointer to the volume object
00199  VPGraphicObj *volume = *objects.begin();
00200 
00201  // Pointer to the right camera 
00202  VPCamera *camera = c; 
00203 
00204  // Get a pointer to the first light of the list 
00205  light = *lights.begin();
00206 
00207  // Get color and opacity tables
00208  VPTable controlTables = ((VPVolume *)volume)->vpGetControlTables();
00209 
00210  // Verify if it is the DEFAULTVIS visualization type to set the default parameters
00211  if(visualizationType==DEFAULTVIS) {
00212      vpSetDefaultParameters (volume, camera);
00213      visualizationType = MONOGRAYVIS;
00214  }
00215 
00216  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
00217                 volumeDimension, planes, finalLineValue, finalColumnValue,
00218                 deltaX, deltaY, deltaZ, volume);
00219  
00220  // For specular processing
00221  VPPoint3D observer = camera->vpGetLocation();   
00222  VPVector3D obsPoint;                           
00223 
00224  yCorrection = ((float) ((VPImage *)volume)->vpGetYDimension()) / ((float) virtualYDimension);
00225 
00226  switch(typeOfOpacityFunction)
00227  {
00228     case LEVOYSURFACEOPACITY: SetClassificationTable();
00229                         break;
00230     case LINEAROPACITY: SetClassificationTable2();
00231                         break;
00232  }
00233 
00234  float magnitudeMax = sqrt( (255*sqrt(3)) * (255*sqrt(3)) + (255*sqrt(3)) * (255*sqrt(3)) + (255*sqrt(3)) * (255*sqrt(3)) );
00235 
00236  // To set the light with the same direction of the observer...
00237  correctProjectionDirection = projectionDirection;
00238  correctProjectionDirection.x = -correctProjectionDirection.x; 
00239  correctProjectionDirection.y = -correctProjectionDirection.y; 
00240  correctProjectionDirection.z = -correctProjectionDirection.z; 
00241  // To set light position
00242  //correctProjectionDirection.x = 60; 
00243  //correctProjectionDirection.y = -40; 
00244  //correctProjectionDirection.z = -30;
00245  //correctProjectionDirection.vpNormalize();
00246  vpSetLightDirection(correctProjectionDirection);
00247 
00248 
00249  // 2- Cutting technique: one cut plane for the sphere example
00250  // plane1 = P1(0,40,0) P2(99,40,0) P3(0,80,99)
00251  // vet1 (P1->P2) = (99,0,0) 
00252  // vet2 (P1->P3) = (0,40,99) 
00253  // 2- Cutting technique: one cut plane for the head.i example
00254  // plane1 = P1(0,15,0) P2(112,15,0) P3(0,70,127)
00255  // vet1 (P1->P2) = (112,0,0) 
00256  // vet2 (P1->P3) = (0,70,127) 
00257   VPVector3D vet1(P2.x-P1.x,P2.y-P1.y,P2.z-P1.z), vet2(P3.x-P1.x,P3.y-P1.y,P3.z-P1.z), plane1Normal; // Resultado bom se "if(f>0&&g<0) pIn=pInNew;"
00258   vet1.vpNormalize(); vet2.vpNormalize(); 
00259   plane1Normal = vet1.vpCrossProduct(vet2);
00260   float d = - (plane1Normal.x*0 + plane1Normal.y*P1.y + plane1Normal.z*0); 
00261 
00262 
00263   // Scan view line for the ray tracing 
00264   for (line=0; line<finalLineValue; line++) { 
00265      // Plane point computation
00266      p1 = minPlaneProjection;
00267      p1 = p1 + deltaY * line; 
00268 
00269      // Scan view column
00270      for (column=0; column<finalColumnValue; column++) {
00271 
00272         // Volume and radius intersection point computation
00273         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
00274                                       projectionDirection, pIn, pOut, sIn, sOut) )
00275         { 
00276             // if the ray intersect the volume then we have to process the samples
00277 
00278             // variables initialization
00279             gradient.x = gradient.y = gradient.z = 0;
00280             lum   = (float) 0.0;
00281             a     = (float) 0.0;
00282             sampleColor   = (float) 0.0;
00283             depth = ivalue = flagCVP = 0;
00284             antColor = antGradient.x = antGradient.y = antGradient.z = 0;
00285 
00286             // BAKA corte: a=conteudo(valor densidade) b=r2
00287             bool FLAG=false;
00288 
00289             pIn.y = pIn.y*yCorrection;
00290 
00291             vpDefineA(pIn, pOut, p1, deltaZ, yCorrection, plane1Normal, volume, volumeDimension, d);
00292 
00293             // 7- Cutting technique: value range (a!=b: conteúdo) 
00294             //    Cutting by inclusion: a=first>=(Rp,150)  and b=first>=(Rp,250) 
00295             //while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension()) && sampleColor<135) { //200, 130
00296             //  i = (int) (pIn.x); //(column);
00297             //  j = (int) (pIn.y); //(line);
00298             //  k = (int) (pIn.z); //(depth);
00299             //  sampleColor = ((VPVolume *)volume)->vpGetValue(i,j,k);
00300             //  pIn = pIn + deltaZ; // projectionDirection; 
00301             //}
00302             //while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension()) && sampleColor>70) { //180, 120
00303             //  i = (int) (pIn.x); //(column);
00304             //  j = (int) (pIn.y); //(line);
00305             //  k = (int) (pIn.z); //(depth);
00306             //  sampleColor = ((VPVolume *)volume)->vpGetValue(i,j,k);
00307             //  pIn = pIn + deltaZ; // projectionDirection; 
00308             //}
00309 
00310 
00311             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension())) { 
00312                 if (typeOfCuttingTool==TWOCUTPLANESINCLUSIONOPAC) 
00313                     depth++;
00314 
00315                 i = (int) (pIn.x); //(column);
00316                 j = (int) (pIn.y); //(line);
00317                 k = (int) (pIn.z); //(depth);
00318 
00319 
00320                 // Sample processing in accordance with the shading method (LOCALSHADING)
00321                 GradientSobel (i, j, k, gradient, &gm, volume); 
00322 
00323                 sampleColor = vpTrilinearInterpolation(i,j,k,volume,pIn);
00324 
00325 
00326                 // BAKA corte: a=conteudo(valor densidade) b=r2
00327                 //if ( FLAG==false )            // processa as amostras somente depois de encontrar
00328                 //  if ( sampleColor < 160 ) {  // a primeira amostra com valor 230
00329                 //       pIn = pIn + deltaZ; continue;
00330                 //  }
00331                 //  else
00332                 //      FLAG = true;
00333 
00334                 //if ( FLAG==false )                // processa as amostras somente depois de encontrar
00335                 //  if ( sampleColor < 100 ) {  // a primeira amostra com valor 230
00336                 //       pIn = pIn + deltaZ; continue;
00337                 //  }
00338                 //  else 
00339                 //      FLAG = true;            // pára de processar as amostras depois
00340                 //else if ( sampleColor > 250)  // que encontra o valor 230
00341                 //          break;
00342                 
00343                 // BAKA corte: a=conteudo(valor densidade) b=conteudo(valor densidade) 
00344                 //if ( FLAG==false )            // processa as amostras com densidade entre 200 e 250                       
00345                 //  if ( sampleColor < 110 ) {  
00346                 //       pIn = pIn + deltaZ; continue;
00347                 //  }
00348                 //  else {
00349                 //      if ( sampleColor > 130 ) 
00350                 //          break;
00351                 //      else
00352                 //          FLAG = true;
00353                 //  }
00354 
00355                 // 7- BAKA for value range (a!=b: conteúdo)
00356                 // if ( sampleColor > 250 ) break;
00357 
00358                 // 1- BAKA for just one slice (line arround the slice)
00359                 if (OBLIQUESLICE==typeOfCuttingTool) {
00360                     if ( ( (i<4) || (i>=(volumeDimension.x-4)) ) || ( (j<4) || (j>=(((VPImage *)volume)->vpGetYDimension()-4)) ) || ( (k<4) || (k>=(volumeDimension.z-4)) ) )
00361                         sampleColor = 255;
00362                     ivalue = (int) sampleColor;
00363                 }
00364                 if (sampleColor>255 || sampleColor<0)
00365                     printf("i=%d j=%d k=%d",i,j,k);
00366                 Classify (&sampleColor, &gm, &luminance, &alpha);
00367 
00368                 alpha = 1 - pow(1-alpha, (1 / (1/sampleStep)) );
00369                 if (specular) {
00370                     obsPoint.x = i - observer.x;
00371                     obsPoint.y = j - observer.y;
00372                     obsPoint.z = k - observer.z;
00373                     obsPoint.vpNormalize();
00374                     attenuate = ShadeSpecular(gradient,obsPoint);
00375                 }
00376                 else 
00377                     attenuate = Shade (gradient); 
00378                 luminance = luminance * attenuate;
00379 
00380                 if (typeOfCuttingTool == TWOCUTPLANESINCLUSIONOPAC) { // proccess the sample IN two cutting planes (needs a different "while" implementation)
00381                     if ( (alpha && depth<=1) || (alpha && ( (pIn.y<(backCuttingPlane+1)) && (pIn.y>(backCuttingPlane-1)) )) ) {
00382                             ivalue = (int) vpTrilinearInterpolation(i,j,k,volume,pIn);
00383                             break; 
00384                     }
00385                 }
00386                 else
00387                     vpDefineB(i, j, k, alpha, volume, pIn, depth, ivalue);
00388 
00389                 //*----- Front-to-back alpha blending compositing
00390                 if (alpha > 0.0 && typeOfCuttingTool != TWOCUTPLANESINCLUSIONOPAC)  {
00391                         at = alpha * (1.0f - a);
00392                         lum = lum + (luminance * at);
00393                         a = a + at;
00394                         //*----- do early ray termination 
00395                         if (a > 0.97)
00396                             break;
00397                 }
00398 
00399                 // Next sample
00400                 pIn = pIn + deltaZ; // projectionDirection; 
00401 
00402                 if(typeOfCuttingTool == CVP) {
00403                         if (flagCVP == 0 && sampleColor > valueCVP)
00404                             flagCVP = sampleColor;
00405                         else if (flagCVP > valueCVP && sampleColor < valueCVP)
00406                             break;
00407                 }
00408 
00409                 if (typeOfCuttingTool==OBLIQUESLICE)
00410                     pIn.x = pIn.y = pIn.z = -10;
00411 
00412             } // while (depth)
00413 
00414             if ( (typeOfCuttingTool != TWOCUTPLANESINCLUSIONOPAC) && (typeOfCuttingTool != OBLIQUESLICE))
00415                 ivalue = (int) lum;
00416             if (a < 0)
00417                     image[line][column] = (unsigned int) (255<<24 | 102<<16 | 179<<8 | 230);
00418             else
00419                     image[line][column] = (unsigned int) (255<<24 | ivalue<<16 | ivalue<<8 | ivalue);
00420     
00421         } // if
00422 
00423         // Next plane point computation
00424         p1 = p1 + deltaX; 
00425 
00426     } // for (column)
00427 
00428  } // for (line)
00429 
00430 }
00431 
00432 
00434 // Description: Method "vpRenderLivroMultiModal" implement the brute  
00435 //              force ray casting visualization algorithm that 
00436 //              works with two volumes.
00437 // Parameters.: VPScene *s, VPCamera *c (active camera), 
00438 //              int visualizationType, unsigned int ***image 
00439 //              (pointer to the image);
00440 // Return.....: -
00441 
00442 void VPRayCasting::vpRenderLivroMultiModal(VPScene *s, VPCamera *c, int opacityComputation, unsigned int ***image) {
00443  int line=0, column=0, i=0, j=0, k=0, initialLineValue=0, ivalue1, ivalue2, ivalue3, depth,
00444      finalLineValue=0, initialColumnValue=0, finalColumnValue=0, i2=0, j2=0, k2=0, flagCVP=0;
00445  short int planes[6];
00446  float previousIlight=0, Ilight=0, sampleColor1=0, sampleColor2=0, 
00447        sIn=0, sOut=0, rayColorRed=0, rayColorGreen=0, rayColorBlue=0, 
00448        rayOpacity=0, sampleOpacity1=0, sampleOpacity2=0, yCorrection=0, 
00449        toRaio=0, auxLight=0, r, g, b;
00450  VPVector3D projectionDirection, gradient,
00451             deltaX, deltaY, deltaZ, vAux, correctProjectionDirection;
00452  VPPoint3D p1, pIn, pIn2, pOut, pAux, minPlaneProjection, volumeDimension, 
00453            volume2Dimension, origin, temp, point; 
00454  VPColor color1, color2;
00455  VPScene *scene = s;
00456  list<VPLight*> lights = scene->vpGetLights();
00457  list<VPGraphicObj*> objects = scene->vpGetObjects();
00458  list<VPGraphicObj*>::const_iterator iter;
00459 
00460  // To get a pointer to the first volume object
00461  VPGraphicObj *volume1 = *objects.begin();
00462  iter = objects.begin();
00463  iter++;
00464  // To get a pointer to the second volume object
00465  VPGraphicObj *volume2 = *iter;
00466 
00467  // Pointer to the right camera 
00468  VPCamera *camera = c; 
00469 
00470  // Get a pointer to the first light of the list 
00471  light = *lights.begin();
00472 
00473  // For specular processing
00474  VPPoint3D observer = camera->vpGetLocation();   
00475  VPVector3D obsPoint;   
00476 
00477  // Get color and opacity tables 
00478  VPTable controlTables1 = ((VPVolume *)volume1)->vpGetControlTables();
00479  VPTable controlTables2 = ((VPVolume *)volume2)->vpGetControlTables();
00480 
00481  // Verify if it is the first call for the visualization algorithm to set the default parameters
00482  if(virtualYDimension==0) {
00483     // To verify the smallest dimension and set the virtualVolumeDimension
00484     if ( ((VPImage *)volume1)->vpGetXDimension() < ((VPVolume *)volume1)->vpGetZDimension() )
00485         virtualYDimension = ((VPVolume *)volume1)->vpGetZDimension();
00486     else 
00487         virtualYDimension = ((VPImage *)volume1)->vpGetXDimension();    
00488 
00489     ((VPVolume *)volume1)->vpAdjustScale(((VPImage *)volume1)->vpGetXDimension(), ((VPImage *)volume1)->vpGetYDimension(), ((VPVolume *)volume1)->vpGetZDimension());
00490     ((VPVolume *)volume2)->vpAdjustScale(((VPImage *)volume2)->vpGetXDimension(), ((VPImage *)volume2)->vpGetYDimension(), ((VPVolume *)volume2)->vpGetZDimension());
00491     vpSetCameraDefault(camera, volume1);
00492     vpSetLightDefault(light, camera);
00493 
00494     // Initialization of inner structures visualization parameters 
00495     point = ((VPVolume *)volume1)->vpGetCenterFocalPoint(); // the center of the "object"
00496     point.z = 0;
00497     while ( ((VPVolume *)volume1)->vpGetValue(point.x,point.y,point.z) <= 40 )
00498             point.z++;
00499     endOfSBand = point.z + ( ((VPVolume *)volume1)->vpGetZDimension() * 0.06 ); // 6%   
00500     endOfTBand = endOfSBand + ( ((VPVolume *)volume1)->vpGetZDimension() * 0.06 );
00501  }
00502 
00503  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
00504                 volumeDimension, planes, finalLineValue, finalColumnValue,
00505                 deltaX, deltaY, deltaZ, volume1);
00506  volume2Dimension.x = ((VPImage *)volume2)->vpGetXDimension(); 
00507  volume2Dimension.y = ((VPImage *)volume2)->vpGetYDimension(); 
00508  volume2Dimension.z = ((VPVolume *)volume2)->vpGetZDimension(); 
00509  volume2Dimension -= 1; // because of the volume matrix index
00510 
00511  yCorrection = ((float) ((VPImage *)volume1)->vpGetYDimension()) / ((float) virtualYDimension);
00512 
00513  switch(typeOfOpacityFunction)
00514  {
00515     case LEVOYSURFACEOPACITY: SetClassificationTable();
00516                         break;
00517     case LINEAROPACITY: SetClassificationTable2();
00518                         break;
00519  }
00520 
00521  // To set the light with the same direction of the observer...
00522  correctProjectionDirection = projectionDirection;
00523  correctProjectionDirection.x = -correctProjectionDirection.x; 
00524  correctProjectionDirection.y = -correctProjectionDirection.y; 
00525  correctProjectionDirection.z = -correctProjectionDirection.z; 
00526  // To set a different light position
00527  //correctProjectionDirection.x = 0;
00528  //correctProjectionDirection.y = -40;
00529  //correctProjectionDirection.z = -30;
00530  //correctProjectionDirection.vpNormalize();
00531  vpSetLightDirection(correctProjectionDirection);
00532 
00533  // 2- Cutting technique: one cut plane 
00534   VPVector3D vet1(P2.x-P1.x,P2.y-P1.y,P2.z-P1.z), vet2(P3.x-P1.x,P3.y-P1.y,P3.z-P1.z), plane1Normal; // Resultado bom se "if(f>0&&g<0) pIn=pInNew;"
00535   vet1.vpNormalize(); vet2.vpNormalize(); 
00536   plane1Normal = vet1.vpCrossProduct(vet2);
00537   float d = - (plane1Normal.x*0 + plane1Normal.y*P1.y + plane1Normal.z*0); 
00538 
00539   // Scan view line for the ray tracing 
00540   for (line=0; line<finalLineValue; line++) { 
00541 
00542      // Plane point computation
00543      p1 = minPlaneProjection;
00544      p1 = p1 + deltaY * line; 
00545 
00546      // Scan view column
00547      for (column=0; column<finalColumnValue; column++) {
00548 
00549         // Volume and radius intersection point computation
00550         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
00551                                       projectionDirection, pIn, pOut, sIn, sOut) )
00552         { 
00553             // if the ray intersect the volume then we have to process the samples
00554             sampleColor1 = sampleColor2 = sampleOpacity1 = sampleOpacity2 = (float) 0.0;
00555             rayColorRed = rayColorGreen = rayColorBlue = rayOpacity = (float) 0.0;
00556             gradient.x = gradient.y = gradient.z = (float) 0.0;
00557             lum = a = (float) 0.0;
00558             depth = ivalue = ivalue1 = ivalue2 = ivalue3 = flagCVP = 0;
00559 
00560             // BAKA corte: a=conteudo(valor densidade) b=r2
00561             bool FLAG=false;
00562             int intFLAG=0;
00563 
00564             pIn.y = pIn.y*yCorrection;
00565 
00566             vpDefineA(pIn, pOut, p1, deltaZ, yCorrection, plane1Normal, volume1, volumeDimension, d);
00567 
00568             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume1)->vpGetYDimension())) { 
00569 
00570                 if (typeOfCuttingTool==TWOCUTPLANESINCLUSIONOPAC) 
00571                     depth++;
00572 
00573                 i = (int) (pIn.x); //(column);
00574                 j = (int) (pIn.y); //(line);
00575                 k = (int) (pIn.z); //(depth);
00576 
00577                 // Sample processing in accordance with the shading method (LOCALSHADING)
00578                 GradientSobel (i, j, k, gradient, &gm, volume1); 
00579                 sampleColor1 = vpTrilinearInterpolation(i,j,k,volume1,pIn);
00580 
00581                 if ( ( volumeDimension.x != volume2Dimension.x ) ||
00582                      ( ((VPImage *)volume1)->vpGetYDimension() != volume2Dimension.y ) ||
00583                      ( volumeDimension.z != volume2Dimension.z ) ) {
00584                     switch(integrationType)
00585                     {
00586                         case 1:
00587                                 i2 = (volume2Dimension.x*i) / volumeDimension.x;
00588                                 k2 = (volume2Dimension.z*k) / volumeDimension.z;
00589                                 pIn2.x = (volume2Dimension.x*pIn.x) / volumeDimension.x;
00590                                 pIn2.y = (volume2Dimension.y*pIn.y) / ( ((VPImage *)volume1)->vpGetYDimension() - 1 );
00591                                 j2 = (int)(pIn2.y); // por causa do problema de arredondamento...
00592                                 pIn2.z = (volume2Dimension.z*pIn.z) / volumeDimension.z;
00593                                 sampleColor2 = vpTrilinearInterpolation(i2,j2,k2,volume2,pIn2);
00594                                 break;
00595                         case 2:
00596                                 if ( (j>=firstSlice) && (j<=lastSlice) )
00597                                 {
00598                                     i2 = (volume2Dimension.x*i) / volumeDimension.x;
00599                                     k2 = (volume2Dimension.z*k) / volumeDimension.z;
00600                                     pIn2.x = (volume2Dimension.x*pIn.x) / volumeDimension.x;
00601                                     pIn2.y = ( volume2Dimension.y * (pIn.y-(float)(firstSlice))) / ((float)(lastSlice-firstSlice+1));
00602                                     j2 = (int)(pIn2.y); // por problemas de precisão foi a melhor solução para o cálculo de j2
00603                                     pIn2.z = (volume2Dimension.z*pIn.z) / volumeDimension.z;
00604                                     sampleColor2 = vpTrilinearInterpolation(i2,j2,k2,volume2,pIn2);
00605                                 }
00606                                 break;
00607                     }
00608 
00609                 }
00610                 else
00611                     sampleColor2 = vpTrilinearInterpolation(i,j,k,volume2,pIn);
00612 
00613                 // BAKA corte: a=conteudo(valor densidade) b=r2
00614                 //if ( FLAG==false )            // processa as amostras somente depois de encontrar
00615                 //  if ( sampleColor2 < 150 ) { // a primeira amostra com valor 100 no volume2 (funcional)
00616                 //       pIn = pIn + deltaZ; continue;
00617                 //  }
00618                 //  else
00619                 //      FLAG = true;
00620 
00621                 // BAKA corte: a=conteudo(valor densidade) b=r2
00622                 //if ( FLAG==false )            // processa as amostras somente depois de encontrar
00623                 //  if ( sampleColor1 < 230 ) { // a primeira amostra com valor 230 no volume1 (anatomia)
00624                 //       pIn = pIn + deltaZ; continue;
00625                 //  }
00626                 //  else
00627                 //      FLAG = true;
00628 
00629                 // BAKA corte: a=conteudo(valor densidade) b=conteudo(valor densidade) 
00630                 //if ( FLAG==false )            // processa as amostras com densidade entre 100 e 170 
00631                 //                              // (considerando o volume que tem a anatomia - sampleColor1)
00632                 //  if ( sampleColor1 < 100 ) { 
00633                 //       pIn = pIn + deltaZ; continue;
00634                 //}
00635                 //  else {
00636                 //      if ( sampleColor1 > 170 ) 
00637                 //          break;
00638                 //      else
00639                 //          FLAG = true;
00640                 //  }
00641 
00642                 // BAKA corte: a=conteudo(valor densidade) b=conteudo(valor densidade) 
00643                 //if ( intFLAG == 0 )           // não processa as amostras com densidade entre 100 e 170
00644                 //{                             // (considerando o volume que tem a anatomia - sampleColor1)
00645                 //  if ( sampleColor1 > 100 ) { 
00646                 //       intFLAG = 1;
00647                 //       pIn = pIn + deltaZ; continue;
00648                 //  }
00649                 //}
00650                 //else {
00651                 //      if ( intFLAG == 1 ) 
00652                 //      {
00653                 //          if (sampleColor1 != 255) {
00654                 //              pIn = pIn + deltaZ; 
00655                 //              continue;
00656                 //          }
00657                 //          else
00658                 //              intFLAG = 2;
00659                 //      }
00660                 //}
00661 
00662                 // BAKA corte: a=geometria(r1) b=conteúdo
00663                 //if ( sampleColor2 >= 200 )    // pára de processar as amostras quando encontra
00664                 //  break;                  // a primeira amostra com valor 200 no volume2 ("funcional")
00665 
00666                 // BAKA corte: a=geometria(r1) b=conteúdo
00667                 //if ( FLAG == false)
00668                 //  if ( sampleColor2 < 200 )   // começa a processar as amostras quando encontra
00669                 //  {   pIn = pIn + deltaZ;     // a primeira amostra com valor 200 no volume2 ("funcional")
00670                 //      continue;
00671                 //  }
00672                 //  else FLAG = TRUE;
00673 
00674                 color2 = controlTables2.vpGetColor(sampleColor2); //
00675 
00676                 ClassifyColor (&sampleColor1, &gm, &r, &g, &b, &alpha);
00677                 alpha = 1 - pow(1-alpha, (1 / (1/sampleStep)) );
00678                 if (specular) {
00679                     obsPoint.x = i - observer.x;
00680                     obsPoint.y = j - observer.y;
00681                     obsPoint.z = k - observer.z;
00682                     obsPoint.vpNormalize();
00683                     attenuate = ShadeSpecular(gradient,obsPoint);
00684                 }
00685                 else 
00686                     attenuate = Shade (gradient); 
00687 
00688                 if(integrationType==1)
00689                 {
00690                     r = color2.vpGetR() * attenuate; //r * attenuate;
00691                     g = color2.vpGetG() * attenuate; //g * attenuate;
00692                     b = color2.vpGetB() * attenuate; //b * attenuate;
00693                 }
00694                 else
00695                 {
00696                     if ( (j>=firstSlice) && (j<=lastSlice) )
00697                     {
00698                         r = color2.vpGetR() * attenuate; //r * attenuate;
00699                         g = color2.vpGetG() * attenuate; //g * attenuate;
00700                         b = color2.vpGetB() * attenuate; //b * attenuate;
00701                     }
00702                     else
00703                     {
00704                         Classify (&sampleColor1, &gm, &luminance, &alpha);
00705                         r = luminance * attenuate; //r * attenuate;
00706                         g = luminance * attenuate; //g * attenuate;
00707                         b = luminance * attenuate; //b * attenuate;
00708                     }
00709                 }
00710 
00711                 if (typeOfCuttingTool == TWOCUTPLANESINCLUSIONOPAC) { // proccess the sample IN two cutting planes (needs a different "while" implementation)
00712                     if ( (alpha && depth<=1) || (alpha && ( (pIn.y<(backCuttingPlane+1)) && (pIn.y>(backCuttingPlane-1)) )) ) {
00713                             ivalue1 = (int) r; //vpTrilinearInterpolation(i2,j2,k2,volume2,pIn2);
00714                             ivalue2 = (int) g;
00715                             ivalue3 = (int) b;
00716                             break; 
00717                     }
00718                 }
00719                 else 
00720                     vpDefineB(i, j, k, alpha, volume1, pIn, depth, ivalue);
00721 
00722                 //*----- Front-to-back alpha blending compositing
00723                 if (alpha > 0.0)
00724                 {
00725                         at = alpha * (1.0f - a);
00726                         rayColorRed   = rayColorRed   + (r * at);
00727                         rayColorGreen = rayColorGreen + (g * at);
00728                         rayColorBlue  = rayColorBlue  + (b * at);
00729                         a = a + at;
00730 
00731                         //*----- do early ray termination 
00732                         if (a > 0.97)
00733                             break;
00734                 }
00735 
00736                 // Next sample
00737                 pIn = pIn + deltaZ; // projectionDirection; 
00738 
00739                 if(typeOfCuttingTool == CVP) {
00740                         if (flagCVP == 0 && sampleColor1 > valueCVP)
00741                             flagCVP = sampleColor1;
00742                         else if (flagCVP > valueCVP && sampleColor1 < valueCVP)
00743                             break;
00744                 }
00745                 
00746                 if (typeOfCuttingTool == OBLIQUESLICE) {
00747                     if ( ( (i<4) || (i>=(volumeDimension.x-4)) ) && ( (j<4) || (j>=(((VPImage *)volume1)->vpGetYDimension()-4)) ) && ( (k<4) || (k>=(volumeDimension.z-4)) ) )
00748                         ivalue1 = ivalue2 = ivalue3 = 0;
00749                     else {
00750                         if (alpha > 0.0) {
00751                             ivalue1 = (int) r; 
00752                             ivalue2 = (int) g;
00753                             ivalue3 = (int) b;
00754                         }
00755                     }
00756                     pIn.x = pIn.y = pIn.z = -10;
00757                 }
00758 
00759             } // while (depth)
00760 
00761             if ( (typeOfCuttingTool != TWOCUTPLANESINCLUSIONOPAC) && (typeOfCuttingTool != OBLIQUESLICE))
00762             {
00763                 ivalue1 = (int) rayColorRed;
00764                 ivalue2 = (int) rayColorGreen;
00765                 ivalue3 = (int) rayColorBlue;
00766             }
00767 
00768 
00769             if (a < 0) {
00770                     image[line][column][red] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
00771                     image[line][column][green] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
00772                     image[line][column][blue] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
00773             }
00774             else {
00775                     image[line][column][red] = (unsigned short int) (255<<24 | ivalue1<<16 | ivalue1<<8 | ivalue1);
00776                     image[line][column][green] = (unsigned short int) (255<<24 | ivalue2<<16 | ivalue2<<8 | ivalue2);
00777                     image[line][column][blue] = (unsigned short int) (255<<24 | ivalue3<<16 | ivalue3<<8 | ivalue3);
00778             }
00779 
00780         } // if
00781 
00782         // Next plane point computation
00783         p1 = p1 + deltaX; 
00784 
00785     } // for (column)
00786 
00787  } // for (line)
00788 
00789 }   
00790 
00791 
00793 // Description: Method "vpRenderLivroMonoColor" implement the brute  
00794 //              force ray casting visualization algorithm that 
00795 //              works with colors.
00796 // Parameters.: VPScene *s, VPCamera *c (active camera), 
00797 //              int visualizationType, unsigned int ***image 
00798 //              (pointer to the image);
00799 // Return.....: -
00800 
00801 void VPRayCasting::vpRenderLivroMonoColor(VPScene *s, VPCamera *c, int opacityComputation, unsigned int ***image) {
00802  int line=0, column=0, i=0, j=0, k=0, initialLineValue=0, ivalue1, ivalue2, ivalue3,
00803      finalLineValue=0, initialColumnValue=0, finalColumnValue=0, depth, flagCVP=0;
00804  short int planes[6];
00805  float sampleColor=0, sIn=0, sOut=0, tmp,
00806        rayColorRed=0, rayColorGreen=0, rayColorBlue=0, rayOpacity=0, 
00807        sampleOpacity=0, yCorrection=0, toRaio=0, auxLight=0, r, g, b;
00808 
00809  VPVector3D projectionDirection, gradient,
00810             deltaX, deltaY, deltaZ, vAux, correctProjectionDirection;
00811  VPPoint3D p1, pIn, pOut, pAux, minPlaneProjection, volumeDimension, origin, temp; 
00812  VPColor color;
00813  VPScene *scene = s;
00814  list<VPLight*> lights = scene->vpGetLights();
00815  list<VPGraphicObj*> objects = scene->vpGetObjects();
00816 
00817  // To get a pointer to the volume object
00818  VPGraphicObj *volume = *objects.begin();
00819 
00820  // Pointer to the right camera 
00821  VPCamera *camera = c; 
00822 
00823  // Get a pointer to the first light of the list 
00824  light = *lights.begin();
00825 
00826  // For specular processing
00827  VPPoint3D observer = camera->vpGetLocation();   
00828  VPVector3D obsPoint;                           
00829 
00830  // Get color and opacity tables
00831  VPTable controlTables = ((VPVolume *)volume)->vpGetControlTables();
00832 
00833  // Verify if it is the first call for the visualization algorithm to set the default parameters
00834  if(virtualYDimension==0) 
00835      vpSetDefaultParameters (volume, camera);
00836 
00837  if (lightVolumeComputation) {
00838     ((VPVolume *)volume)->vpProcessLightedVolume(((VPDirectionalLight *)light)->vpGetDirection(), ambientLight);
00839     lightVolumeComputation = false;
00840  }
00841 
00842  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
00843                 volumeDimension, planes, finalLineValue, finalColumnValue,
00844                 deltaX, deltaY, deltaZ, volume);
00845 
00846  yCorrection = ((float) ((VPImage *)volume)->vpGetYDimension()) / ((float) virtualYDimension);
00847 
00848  switch(typeOfOpacityFunction)
00849  {
00850     case LEVOYSURFACEOPACITY: SetClassificationTable();
00851                         break;
00852     case LINEAROPACITY: SetClassificationTable2();
00853                         break;
00854  }
00855 
00856  // To set the light with the same direction of the observer...
00857  correctProjectionDirection = projectionDirection;
00858  correctProjectionDirection.x = -correctProjectionDirection.x; 
00859  correctProjectionDirection.y = -correctProjectionDirection.y; 
00860  correctProjectionDirection.z = -correctProjectionDirection.z; 
00861  // To set light position
00862  //correctProjectionDirection.x = 60; 
00863  //correctProjectionDirection.y = -40; 
00864  //correctProjectionDirection.z = -30;
00865  correctProjectionDirection.vpNormalize();
00866 
00867  vpSetLightDirection(correctProjectionDirection);
00868 
00869  // 2- Cutting technique: one cut plane 
00870   VPVector3D vet1(P2.x-P1.x,P2.y-P1.y,P2.z-P1.z), vet2(P3.x-P1.x,P3.y-P1.y,P3.z-P1.z), plane1Normal; // Resultado bom se "if(f>0&&g<0) pIn=pInNew;"
00871   vet1.vpNormalize(); vet2.vpNormalize(); 
00872   plane1Normal = vet1.vpCrossProduct(vet2);
00873   float d = - (plane1Normal.x*0 + plane1Normal.y*P1.y + plane1Normal.z*0); 
00874 
00875   // Scan view line for the ray tracing 
00876   for (line=0; line<finalLineValue; line++) { 
00877 
00878      // Plane point computation
00879      p1 = minPlaneProjection;
00880      p1 = p1 + deltaY * line; 
00881 
00882      // Scan view column
00883      for (column=0; column<finalColumnValue; column++) {
00884 
00885         // Volume and radius intersection point computation
00886         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
00887                                       projectionDirection, pIn, pOut, sIn, sOut) )
00888         { 
00889             // if the ray intersect the volume then we have to process the samples
00890             gradient.x = gradient.y = gradient.z = 0;
00891             lum   = (float) 0.0;
00892             a     = (float) 0.0;
00893             sampleColor = sampleOpacity = (float) 0.0;
00894             rayColorRed = rayColorGreen = rayColorBlue = rayOpacity = 0.0;
00895             depth = ivalue = ivalue1 = ivalue2 = ivalue3 = flagCVP = 0;
00896 
00897             pIn.y = pIn.y*yCorrection;
00898 
00899             vpDefineA(pIn, pOut, p1, deltaZ, yCorrection, plane1Normal, volume, volumeDimension, d);
00900 
00901             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension())) { 
00902                 if (typeOfCuttingTool==TWOCUTPLANESINCLUSIONOPAC) 
00903                     depth++;
00904 
00905                 i = (int) (pIn.x); //(column);
00906                 j = (int) (pIn.y); //(line);
00907                 k = (int) (pIn.z); //(depth);
00908 
00909                 // Sample processing in accordance with the shading method (LOCALSHADING)
00910                 GradientSobel (i, j, k, gradient, &gm, volume); 
00911                 tmp = vpTrilinearInterpolation(i,j,k,volume,pIn);
00912                 color = controlTables.vpGetColor(tmp); //
00913 
00914                 ClassifyColor (&tmp, &gm, &r, &g, &b, &alpha);
00915                 alpha = 1 - pow(1-alpha, (1 / (1/sampleStep)) );
00916 
00917                 if (specular) {
00918                     obsPoint.x = i - observer.x;
00919                     obsPoint.y = j - observer.y;
00920                     obsPoint.z = k - observer.z;
00921                     obsPoint.vpNormalize();
00922                     attenuate = ShadeSpecular(gradient,obsPoint);
00923                 }
00924                 else 
00925                     attenuate = Shade (gradient); 
00926                 r = color.vpGetR() * attenuate; //r * attenuate;
00927                 g = color.vpGetG() * attenuate; //g * attenuate;
00928                 b = color.vpGetB() * attenuate; //b * attenuate;
00929 
00930                 if (typeOfCuttingTool == TWOCUTPLANESINCLUSIONOPAC) { // proccess the sample IN two cutting planes (needs a different "while" implementation)
00931                     if ( (alpha && depth<=1) || (alpha && ( (pIn.y<(backCuttingPlane+1)) && (pIn.y>(backCuttingPlane-1)) )) ) {
00932                             ivalue1 = (int) r; //vpTrilinearInterpolation(i2,j2,k2,volume2,pIn2);
00933                             ivalue2 = (int) g;
00934                             ivalue3 = (int) b;
00935                             break; 
00936                     }
00937                 }
00938                 else
00939                     vpDefineB(i, j, k, alpha, volume, pIn, depth, ivalue);
00940 
00941                 //*----- Front-to-back alpha blending compositing
00942                 if (alpha > 0.0)
00943                 {
00944                         at = alpha * (1.0f - a);
00945                         rayColorRed   = rayColorRed   + (r * at);
00946                         rayColorGreen = rayColorGreen + (g * at);
00947                         rayColorBlue  = rayColorBlue  + (b * at);
00948                         a = a + at;
00949 
00950                         //*----- do early ray termination 
00951                         if (a > 0.97)
00952                             break;
00953                 }
00954 
00955                 // Next sample
00956                 pIn = pIn + deltaZ; // projectionDirection; 
00957 
00958                 if(typeOfCuttingTool == CVP) {
00959                         if (flagCVP == 0 && tmp > valueCVP)
00960                             flagCVP = tmp;
00961                         else if (flagCVP > valueCVP && tmp < valueCVP)
00962                             break;
00963                 }
00964 
00965                 if (typeOfCuttingTool==OBLIQUESLICE) {
00966                     if ( ( (i<4) || (i>=(volumeDimension.x-4)) ) && ( (j<4) || (j>=(((VPImage *)volume)->vpGetYDimension()-4)) ) && ( (k<4) || (k>=(volumeDimension.z-4)) ) )
00967                         ivalue1 = ivalue2 = ivalue3 = 0;
00968                     else {
00969                         if (alpha > 0.0) {
00970                             ivalue1 = (int) r; 
00971                             ivalue2 = (int) g;
00972                             ivalue3 = (int) b;
00973                         }
00974                     }
00975                     pIn.x = pIn.y = pIn.z = -10;
00976                 }
00977 
00978             } // while (depth)
00979 
00980             if ( (typeOfCuttingTool != TWOCUTPLANESINCLUSIONOPAC) && (typeOfCuttingTool != OBLIQUESLICE))
00981             {
00982                 ivalue1 = (int) rayColorRed;
00983                 ivalue2 = (int) rayColorGreen;
00984                 ivalue3 = (int) rayColorBlue;
00985             }
00986             if (a < 0) {
00987                     image[line][column][red] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
00988                     image[line][column][green] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
00989                     image[line][column][blue] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
00990             }
00991             else {
00992                     image[line][column][red] = (unsigned short int) (255<<24 | ivalue1<<16 | ivalue1<<8 | ivalue1);
00993                     image[line][column][green] = (unsigned short int) (255<<24 | ivalue2<<16 | ivalue2<<8 | ivalue2);
00994                     image[line][column][blue] = (unsigned short int) (255<<24 | ivalue3<<16 | ivalue3<<8 | ivalue3);
00995             }
00996     
00997         } // if
00998 
00999         // Next plane point computation
01000         p1 = p1 + deltaX; 
01001 
01002     } // for (column)
01003 
01004  } // for (line)
01005 
01006 }
01007 
01008 
01010 // Description: Method "vpRenderNoLightMonoGray" implement the brute  
01011 //              force ray casting visualization algorithm for gray 
01012 //              levels and whithout light.
01013 // Parameters.: VPScene *s, VPCamera *c (active camera), 
01014 //              unsigned int **image (pointer to the image);
01015 // Return.....: -
01016 
01017 void VPRayCasting::vpRenderNoLightMonoGray(VPScene *s, VPCamera *c, unsigned int **image) {
01018  int line=0, column=0, depth=0, i=0, j=0, k=0, initialLineValue=0, 
01019      finalLineValue=0, initialColumnValue=0, finalColumnValue=0;
01020  short int planes[6];
01021  float previousIlight=0, Ilight=0, sampleColor=0, rayColor=0, rayOpacity=0, 
01022        sampleOpacity=0, yCorrection=0, sIn=0, sOut=0;
01023 
01024  VPVector3D projectionDirection, deltaX, deltaY, deltaZ, vAux;
01025  VPPoint3D p1, pIn, pOut, pAux, minPlaneProjection, volumeDimension, origin, temp; 
01026  VPScene *scene = s;
01027  list<VPLight*> lights = scene->vpGetLights();
01028  list<VPGraphicObj*> objects = scene->vpGetObjects();
01029 
01030  // To get a pointer to the volume object
01031  VPGraphicObj *volume = *objects.begin();
01032 
01033  // Pointer to the right camera 
01034  VPCamera *camera = c; 
01035 
01036  // Get a pointer to the first light of the list 
01037  light = *lights.begin();
01038 
01039  // Get color and opacity tables
01040  VPTable controlTables = ((VPVolume *)volume)->vpGetControlTables();
01041 
01042  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
01043                 volumeDimension, planes, finalLineValue, finalColumnValue,
01044                 deltaX, deltaY, deltaZ, volume);
01045 
01046  yCorrection = ((float) ((VPImage *)volume)->vpGetYDimension()) / ((float) virtualYDimension);
01047 
01048   // Scan view line for the ray tracing 
01049   for (line=0; line<finalLineValue; line++) { 
01050 
01051      // Plane point computation
01052      p1 = minPlaneProjection;
01053      p1 = p1 + deltaY * line; 
01054 
01055      // Scan view column
01056      for (column=0; column<finalColumnValue; column++) {
01057 
01058         // Volume and radius intersection point computation
01059         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
01060                                       projectionDirection, pIn, pOut, sIn, sOut) )
01061         { 
01062             // if the ray intersect the volume then we have to process the samples
01063             sampleColor = 0;
01064             rayColor = rayOpacity = 0.0;
01065             previousIlight = Ilight = 0.0;
01066 
01067             pIn.y = pIn.y*yCorrection;
01068         
01069             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension())) { 
01070 
01071                 i = (int) (pIn.x); //(column);
01072                 j = (int) (pIn.y); //(line);
01073                 k = (int) (pIn.z); //(depth);
01074     
01075                 // Just interpolation...
01076                 sampleColor = vpTrilinearInterpolation(i,j,k,volume,pIn);
01077                 if (sampleColor > 255) 
01078                     sampleColor = 255;
01079                 sampleOpacity = controlTables.vpGetLinearOpacityValue( (unsigned int) sampleColor );
01080 
01081                 rayColor += (sampleColor * sampleOpacity);
01082                 rayOpacity += sampleOpacity;
01083             
01084                 if (sampleOpacity == 1) break;
01085 
01086                 // Next sample
01087                 pIn = pIn + deltaZ; // projectionDirection; 
01088             } // while (depth)
01089             
01090             if ( (rayColor != 0.0) && (rayOpacity != 0.0) ) {
01091                 sampleColor = rayColor / rayOpacity;
01092                 // store pixel color
01093                 image[line][column] = (unsigned int) sampleColor;
01094             }
01095 
01096         } // if
01097 
01098         // Next plane point computation
01099         p1 = p1 + deltaX; 
01100 
01101     } // for (column)
01102 
01103  } // for (line)
01104 
01105 }
01106 
01107 
01109 // Description: Method "vpRenderLivroMultiInnerStructures" implement the 
01110 //              inner structures visualization algorithm (based in  
01111 //              the brute force ray casting algorithm).
01112 // Parameters.: VPScene *s (scene object), 
01113 //              VPCamera *c (active camera), 
01114 //              int opacityComputation (opacity computation function),
01115 //              unsigned int ***image (pointer to the image)
01116 // Return.....: -
01117 
01118 void VPRayCasting::vpRenderLivroMultiInnerStructures(VPScene *s, VPCamera *c, int opacityComputation, unsigned int ***image) {
01119  int line=0, column=0, i=0, j=0, k=0, i2=0, j2=0, k2=0, initialLineValue=0, finalLineValue=0, 
01120      depth=0, initialColumnValue=0, finalColumnValue=0, sampleNumberSBand=0, sampleNumberTBand=0, 
01121      ivalue1, ivalue2, ivalue3;
01122  short int planes[6];
01123  float sampleColor1=0, sampleColor2=0, r, g, b, 
01124        sIn=0, sOut=0, rayColorRed=0, rayColorGreen=0, rayColorBlue=0, 
01125        rayOpacity=0, sampleOpacity1=0, sampleOpacity2=0, yCorrection=0, 
01126        toRaio=0, auxLight=0, distance1=0, distance2=0;
01127 
01128  VPVector3D projectionDirection, gradient, auxProjectionDirection, auxDelta,
01129             deltaX, deltaY, deltaZ, vAux, correctProjectionDirection;
01130  VPPoint3D p1, pIn, pIn2, pOut, pAux, minPlaneProjection, volumeDimension, 
01131            volume2Dimension, origin, temp, centerFocalPoint, auxSBand, auxTBand; 
01132  VPColor color1, color2;
01133  VPScene *scene = s;
01134  list<VPLight*> lights = scene->vpGetLights();
01135  list<VPGraphicObj*> objects = scene->vpGetObjects();
01136  list<VPGraphicObj*>::const_iterator iter;
01137 
01138  // To get a pointer to the first volume object
01139  VPGraphicObj *volume1 = *objects.begin();
01140  iter = objects.begin();
01141  iter++;
01142  // To get a pointer to the second volume object
01143  VPGraphicObj *volume2 = *iter;
01144 
01145  // Pointer to the right camera 
01146  VPCamera *camera = c; 
01147 
01148  // Get a pointer to the first light of the list 
01149  light = *lights.begin();
01150 
01151  // For specular processing
01152  VPPoint3D observer = camera->vpGetLocation();   
01153  VPVector3D obsPoint;                           
01154 
01155  VPTable controlTables1 = ((VPVolume *)volume1)->vpGetControlTables();
01156  VPTable controlTables2 = ((VPVolume *)volume2)->vpGetControlTables();
01157 
01158  // Verify if it is the first call for the visualization algorithm to set the default parameters
01159  if(virtualYDimension==0) {
01160     // To verify the smallest dimension and set the virtualVolumeDimension
01161     if ( ((VPImage *)volume1)->vpGetXDimension() < ((VPVolume *)volume1)->vpGetZDimension() )
01162         virtualYDimension = ((VPVolume *)volume1)->vpGetZDimension();
01163     else 
01164         virtualYDimension = ((VPImage *)volume1)->vpGetXDimension();    
01165 
01166     ((VPVolume *)volume1)->vpAdjustScale(((VPImage *)volume1)->vpGetXDimension(), ((VPImage *)volume1)->vpGetYDimension(), ((VPVolume *)volume1)->vpGetZDimension());
01167     ((VPVolume *)volume2)->vpAdjustScale(((VPImage *)volume2)->vpGetXDimension(), ((VPImage *)volume2)->vpGetYDimension(), ((VPVolume *)volume2)->vpGetZDimension());
01168     vpSetCameraDefault(camera, volume1);
01169     vpSetLightDefault(light, camera);
01170 
01171     // Initialization of inner structures visualization parameters 
01172     pAux = ((VPVolume *)volume1)->vpGetCenterFocalPoint(); // the center of the "object"
01173     pAux.z = 0;
01174     while ( ((VPVolume *)volume1)->vpGetValue(pAux.x,pAux.y,pAux.z) <= 40 )
01175             pAux.z++;
01176     endOfSBand = pAux.z + ( ((VPVolume *)volume1)->vpGetZDimension() * 0.06 ); // 6%    
01177     endOfTBand = endOfSBand + ( ((VPVolume *)volume1)->vpGetZDimension() * 0.06 );
01178  }
01179 
01180  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
01181                 volumeDimension, planes, finalLineValue, finalColumnValue,
01182                 deltaX, deltaY, deltaZ, volume1);
01183  volume2Dimension.x = ((VPImage *)volume2)->vpGetXDimension(); 
01184  volume2Dimension.y = ((VPImage *)volume2)->vpGetYDimension(); 
01185  volume2Dimension.z = ((VPVolume *)volume2)->vpGetZDimension(); 
01186  volume2Dimension -= 1; // because of the volume matrix index
01187 
01188  yCorrection = ((float) ((VPImage *)volume1)->vpGetYDimension()) / ((float) virtualYDimension);
01189 
01190 
01193 
01194   // To get the volume center focal point
01195  auxSBand = auxTBand = centerFocalPoint = ((VPVolume *)volume1)->vpGetCenterFocalPoint();
01196 
01197  // First, get the direction from endOfTBand to endOfSBand
01198  auxSBand.z = endOfSBand;
01199  auxTBand.z = endOfTBand;
01200  auxProjectionDirection.x = auxSBand.x - auxTBand.x;
01201  auxProjectionDirection.y = auxSBand.y - auxTBand.y;
01202  auxProjectionDirection.z = auxSBand.z - auxTBand.z;
01203  distance1 = auxProjectionDirection.vpModule();
01204  auxProjectionDirection.vpNormalize();
01205 
01206  // After, process the "delta" used to "walk" along the "ray"
01207  auxDelta = auxProjectionDirection * sampleStep;
01208 
01209  // The first point before "walking" throught the volume
01210  p1 = auxTBand;
01211 
01212  // Loop to go from the endOfTBand (internal) to the endOfSBand (external)
01213  while (distance2 < distance1) {
01214      sampleNumberTBand++;
01215      p1 = p1 + auxDelta;
01216      vAux.x = p1.x - auxTBand.x;
01217      vAux.y = p1.y - auxTBand.y;
01218      vAux.z = p1.z - auxTBand.z;
01219      distance2 = vAux.vpModule();
01220  }
01221  // To find the limit of the volume 
01222  while ((p1<=volumeDimension) && (p1>=temp) && (p1.y<((VPImage *)volume1)->vpGetYDimension())) {
01223      p1 = p1 + auxDelta;
01224  }
01225 
01226  // p1 "adjustment"
01227  p1 = p1 - auxDelta;
01228 
01229  // Loop to find the object "edge" 
01230  while (sampleColor1<=40) { 
01231     i = (int) (p1.x); //(column);
01232     j = (int) (p1.y); //(line);
01233     k = (int) (p1.z); //(depth);
01234     sampleColor1 = ((VPVolume *)volume1)->vpGetValue(i,j,k);
01235     p1 = p1 - auxDelta; // inverse projectionDirection; 
01236  }
01237 
01238  // Loop to go from "the edge" to the enfOfSband
01239  do {
01240      sampleNumberSBand++;
01241      p1 = p1 - auxDelta;
01242      vAux.x = p1.x - auxSBand.x;
01243      vAux.y = p1.y - auxSBand.y;
01244      vAux.z = p1.z - auxSBand.z;
01245      distance2 = vAux.vpModule();
01246  } while (distance2 > sampleStep);
01247 
01250 
01251  switch(typeOfOpacityFunction)
01252  {
01253     case LEVOYSURFACEOPACITY: SetClassificationTable();
01254                         break;
01255     case LINEAROPACITY: SetClassificationTable2();
01256                         break;
01257  }
01258 
01259  // To set the light with the same direction of the observer...
01260  correctProjectionDirection = projectionDirection;
01261  correctProjectionDirection.x = -correctProjectionDirection.x; 
01262  correctProjectionDirection.y = -correctProjectionDirection.y; 
01263  correctProjectionDirection.z = -correctProjectionDirection.z; 
01264  vpSetLightDirection(correctProjectionDirection);
01265 
01266   // Scan view line for the ray tracing 
01267   for (line=0; line<finalLineValue; line++) { 
01268 
01269      // Plane point computation
01270      p1 = minPlaneProjection;
01271      p1 = p1 + deltaY * line; 
01272 
01273      // Scan view column
01274      for (column=0; column<finalColumnValue; column++) {
01275 
01276         // Volume and radius intersection point computation
01277         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
01278                                       projectionDirection, pIn, pOut, sIn, sOut) )
01279         { 
01280             // if the ray intersect the volume then we have to process the samples
01281             sampleColor1 = sampleColor2 = sampleOpacity1 = sampleOpacity2 = 0;
01282             rayColorRed = rayColorGreen = rayColorBlue = rayOpacity = 0.0;
01283             a=0;
01284 
01285             pIn.y = pIn.y*yCorrection;
01286 
01287             // while don't "hit" the organ (beginning of S-Band)
01288             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume1)->vpGetYDimension()) && (sampleColor1<=40)) { 
01289                 i = (int) (pIn.x); //(column);
01290                 j = (int) (pIn.y); //(line);
01291                 k = (int) (pIn.z); //(depth);
01292                 sampleColor1 = ((VPVolume *)volume1)->vpGetValue(i,j,k);
01293                 pIn = pIn + deltaZ; // Next sample 
01294             }
01295 
01296             // If still inside the volume (because the ray can not "hit" the object)
01297             if ( (pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume1)->vpGetYDimension()) )
01298             {
01299                 // End of S-Band and beginning of T-Band
01300                 for (depth=0; depth<=sampleNumberSBand; depth++) { 
01301                     pIn = pIn + deltaZ; 
01302                 } 
01303             
01304                 // While inside the region of interest (until the end of T-Band)
01305                 depth=0;
01306                 while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume1)->vpGetYDimension()) && (depth<=sampleNumberTBand)) {
01307                     depth++;
01308                     i = (int) (pIn.x); //(column);
01309                     j = (int) (pIn.y); //(line);
01310                     k = (int) (pIn.z); //(depth);
01311                     
01312                     GradientSobel (i, j, k, gradient, &gm, volume1); 
01313                     sampleColor1 = vpTrilinearInterpolation(i,j,k,volume1,pIn);
01314 
01315 
01316                     if ( ( volumeDimension.x != volume2Dimension.x ) ||
01317                          ( ((VPImage *)volume1)->vpGetYDimension() != volume2Dimension.y ) ||
01318                          ( volumeDimension.z != volume2Dimension.z ) ) {
01319                         switch(integrationType)
01320                         {
01321                             case 1:
01322                                     i2 = (volume2Dimension.x*i) / volumeDimension.x;
01323                                     j2 = (volume2Dimension.y*j) / ( ((VPImage *)volume1)->vpGetYDimension() - 1 );
01324                                     k2 = (volume2Dimension.z*k) / volumeDimension.z;
01325                                     pIn2.x = (volume2Dimension.x*pIn.x) / volumeDimension.x;
01326                                     pIn2.y = (volume2Dimension.y*pIn.y) / ( ((VPImage *)volume1)->vpGetYDimension() - 1 );
01327                                     pIn2.z = (volume2Dimension.z*pIn.z) / volumeDimension.z;
01328                                     sampleColor2 = vpTrilinearInterpolation(i2,j2,k2,volume2,pIn2);
01329                                     break;
01330                             case 2:
01331                                     if ( (j>=firstSlice) && (j<=lastSlice) )
01332                                     {
01333                                         i2 = (volume2Dimension.x*i) / volumeDimension.x;
01334                                         k2 = (volume2Dimension.z*k) / volumeDimension.z;
01335                                         pIn2.x = (volume2Dimension.x*pIn.x) / volumeDimension.x;
01336                                         pIn2.y = ( volume2Dimension.y * (pIn.y-(float)(firstSlice))) / ((float)(lastSlice-firstSlice+1));
01337                                         j2 = (int)(pIn2.y); // por problemas de precisão foi a melhor solução para o cálculo de j2
01338                                         pIn2.z = (volume2Dimension.z*pIn.z) / volumeDimension.z;
01339                                         sampleColor2 = vpTrilinearInterpolation(i2,j2,k2,volume2,pIn2);
01340                                     }
01341                                     break;
01342                         }
01343 
01344                     }
01345                     else
01346                         sampleColor2 = vpTrilinearInterpolation(i,j,k,volume2,pIn);
01347 
01348                     color2 = controlTables2.vpGetColor(sampleColor2); //
01349 
01350                     ClassifyColor (&sampleColor1, &gm, &r, &g, &b, &alpha);
01351                     alpha = 1 - pow(1-alpha, (1 / (1/sampleStep)) );
01352                     if (specular) {
01353                         obsPoint.x = i - observer.x;
01354                         obsPoint.y = j - observer.y;
01355                         obsPoint.z = k - observer.z;
01356                         obsPoint.vpNormalize();
01357                         attenuate = ShadeSpecular(gradient,obsPoint);
01358                     }
01359                     else 
01360                         attenuate = Shade (gradient); 
01361 
01362                     if(integrationType==1)
01363                     {
01364                         r = color2.vpGetR() * attenuate; //r * attenuate;
01365                         g = color2.vpGetG() * attenuate; //g * attenuate;
01366                         b = color2.vpGetB() * attenuate; //b * attenuate;
01367                     }
01368                     else
01369                     {
01370                         if ( (j>=firstSlice) && (j<=lastSlice) )
01371                         {
01372                             r = color2.vpGetR() * attenuate; //r * attenuate;
01373                             g = color2.vpGetG() * attenuate; //g * attenuate;
01374                             b = color2.vpGetB() * attenuate; //b * attenuate;
01375                         }
01376                         else
01377                         {
01378                             Classify (&sampleColor1, &gm, &luminance, &alpha);
01379                             r = luminance * attenuate; //r * attenuate;
01380                             g = luminance * attenuate; //g * attenuate;
01381                             b = luminance * attenuate; //b * attenuate;
01382                         }
01383                     }
01384 
01385                     //*----- Front-to-back alpha blending compositing
01386                     if (alpha > 0.0)
01387                     {
01388                             at = alpha * (1.0f - a);
01389                             rayColorRed   = rayColorRed   + (r * at);
01390                             rayColorGreen = rayColorGreen + (g * at);
01391                             rayColorBlue  = rayColorBlue  + (b * at);
01392                             a = a + at;
01393 
01394                             //*----- do early ray termination 
01395                             if (a > 0.97)
01396                                 break;
01397                     }
01398 
01399                     // Next sample
01400                     pIn = pIn + deltaZ; // projectionDirection; 
01401 
01402                 } // while (depth)
01403 
01404             } // if
01405 
01406             ivalue1 = (int) rayColorRed;
01407             ivalue2 = (int) rayColorGreen;
01408             ivalue3 = (int) rayColorBlue;
01409             if (a < 0) {
01410                     image[line][column][red] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
01411                     image[line][column][green] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
01412                     image[line][column][blue] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
01413             }
01414             else {
01415                     image[line][column][red] = (unsigned short int) (255<<24 | ivalue1<<16 | ivalue1<<8 | ivalue1);
01416                     image[line][column][green] = (unsigned short int) (255<<24 | ivalue2<<16 | ivalue2<<8 | ivalue2);
01417                     image[line][column][blue] = (unsigned short int) (255<<24 | ivalue3<<16 | ivalue3<<8 | ivalue3);
01418             }
01419     
01420         } // if find intersection points
01421 
01422         // Next plane point computation
01423         p1 = p1 + deltaX; 
01424 
01425     } // for (column)
01426 
01427  } // for (line)
01428 
01429 }
01430 
01431 
01433 // Description: Method "vpRenderLivroInnerStructures" implement 
01434 //              the inner structures visualization algorithm   
01435 //              (based in the brute force ray casting algorithm).
01436 // Parameters.: VPScene *s (scene object), 
01437 //              VPCamera *c (active camera), 
01438 //              int opacityComputation (opacity computation function),
01439 //              unsigned int **image (pointer to the image)
01440 // Return.....: -
01441 
01442 void VPRayCasting::vpRenderLivroInnerStructures(VPScene *s, VPCamera *c,  int opacityComputation, unsigned int **image) {
01443  int line=0, column=0, depth=0, i=0, j=0, k=0, initialLineValue=0, 
01444      finalLineValue=0, initialColumnValue=0, finalColumnValue=0,
01445      sampleNumberTBand=0, sampleNumberSBand=0;
01446  short int planes[6];
01447  float previousIlight=0, Ilight=0, sampleColor=0, rayColor=0, rayOpacity=0, tmp,
01448        sampleOpacity=0, yCorrection=0, sIn=0, sOut=0, distance1=0, distance2=0;
01449 
01450  VPVector3D projectionDirection, gradient, deltaX, deltaY, deltaZ, 
01451             vAux, correctProjectionDirection, auxProjectionDirection, auxDelta;
01452  VPPoint3D p1, pIn, pOut, pAux, minPlaneProjection, volumeDimension, 
01453            origin, temp, auxSBand, auxTBand; 
01454  VPScene *scene = s;
01455  list<VPLight*> lights = scene->vpGetLights();
01456  list<VPGraphicObj*> objects = scene->vpGetObjects();
01457 
01458  // To get a pointer to the volume object
01459  VPGraphicObj *volume = *objects.begin();
01460 
01461  // Pointer to the right camera 
01462  VPCamera *camera = c; 
01463 
01464  // Get a pointer to the first light of the list 
01465  light = *lights.begin();
01466 
01467  // For specular processing
01468  VPPoint3D observer = camera->vpGetLocation();   
01469  VPVector3D obsPoint;                           
01470 
01471  // Get color and opacity tables
01472  VPTable controlTables = ((VPVolume *)volume)->vpGetControlTables();
01473 
01474  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
01475                 volumeDimension, planes, finalLineValue, finalColumnValue,
01476                 deltaX, deltaY, deltaZ, volume);
01477 
01478  yCorrection = ((float) ((VPImage *)volume)->vpGetYDimension()) / ((float) virtualYDimension);
01479 
01480 
01483 
01484   // To get the volume center focal point
01485  auxSBand = auxTBand = ((VPVolume *)volume)->vpGetCenterFocalPoint();
01486 
01487  // First, get the direction from endOfTBand to endOfSBand
01488  auxSBand.z = endOfSBand;
01489  auxTBand.z = endOfTBand;
01490  auxProjectionDirection.x = auxSBand.x - auxTBand.x;
01491  auxProjectionDirection.y = auxSBand.y - auxTBand.y;
01492  auxProjectionDirection.z = auxSBand.z - auxTBand.z;
01493  distance1 = auxProjectionDirection.vpModule();
01494  auxProjectionDirection.vpNormalize();
01495 
01496  // After, process the "delta" used to "walk" along the "ray"
01497  auxDelta = auxProjectionDirection * sampleStep;
01498 
01499  // The first point before "walking" throught the volume
01500  p1 = auxTBand;
01501 
01502  // Loop to go from the endOfTBand (internal) to the endOfSBand (external)
01503  while (distance2 < distance1) {
01504      sampleNumberTBand++;
01505      p1 = p1 + auxDelta;
01506      vAux.x = p1.x - auxTBand.x;
01507      vAux.y = p1.y - auxTBand.y;
01508      vAux.z = p1.z - auxTBand.z;
01509      distance2 = vAux.vpModule();
01510  }
01511  // To find the limit of the volume 
01512  while ((p1<=volumeDimension) && (p1>=temp) && (p1.y<((VPImage *)volume)->vpGetYDimension())) {
01513      p1 = p1 + auxDelta;
01514  }
01515 
01516  // p1 "adjustment"
01517  p1 = p1 - auxDelta;
01518 
01519  // Loop to find the object "edge" 
01520  while (sampleColor<=40) { 
01521     i = (int) (p1.x); //(column);
01522     j = (int) (p1.y); //(line);
01523     k = (int) (p1.z); //(depth);
01524     sampleColor = ((VPVolume *)volume)->vpGetValue(i,j,k);
01525     p1 = p1 - auxDelta; // inverse projectionDirection; 
01526  }
01527 
01528  // Loop to go from "the edge" to the enfOfSband
01529  do {
01530      sampleNumberSBand++;
01531      p1 = p1 - auxDelta;
01532      vAux.x = p1.x - auxSBand.x;
01533      vAux.y = p1.y - auxSBand.y;
01534      vAux.z = p1.z - auxSBand.z;
01535      distance2 = vAux.vpModule();
01536  } while (distance2 > sampleStep);
01537 
01540 
01541  switch(typeOfOpacityFunction)
01542  {
01543     case LEVOYSURFACEOPACITY: SetClassificationTable();
01544                         break;
01545     case LINEAROPACITY: SetClassificationTable2();
01546                         break;
01547  }
01548 
01549  // To set the light with the same direction of the observer...
01550  correctProjectionDirection = projectionDirection;
01551  correctProjectionDirection.x = -correctProjectionDirection.x; 
01552  correctProjectionDirection.y = -correctProjectionDirection.y; 
01553  correctProjectionDirection.z = -correctProjectionDirection.z; 
01554  vpSetLightDirection(correctProjectionDirection);
01555 
01556   // Scan view line for the ray tracing 
01557   for (line=0; line<finalLineValue; line++) { 
01558 
01559      // Plane point computation
01560      p1 = minPlaneProjection;
01561      p1 = p1 + deltaY * line; 
01562 
01563      // Scan view column
01564      for (column=0; column<finalColumnValue; column++) {
01565 
01566         // Volume and radius intersection point computation
01567         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
01568                                       projectionDirection, pIn, pOut, sIn, sOut) )
01569         { 
01570             // if the ray intersect the volume then we have to process the samples
01571             gradient.x = gradient.y = gradient.z = 0;
01572             lum   = (float) 0.0;
01573             a     = (float) 0.0;
01574             sampleColor = rayColor = rayOpacity = 0.0;
01575 
01576             pIn.y = pIn.y*yCorrection;
01577 
01578             // while don't "hit" the organ (beginning of S-Band)
01579             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension()) && (sampleColor<=40)) { 
01580                 i = (int) (pIn.x); //(column);
01581                 j = (int) (pIn.y); //(line);
01582                 k = (int) (pIn.z); //(depth);
01583                 sampleColor = ((VPVolume *)volume)->vpGetValue(i,j,k);
01584                 pIn = pIn + deltaZ; // Next sample 
01585             }
01586 
01587             // If still inside the volume (because the ray can not "hit" the object)
01588             if ( (pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension()) )
01589             {
01590                 // End of S-Band and beginning of T-Band
01591                 for (depth=0; depth<sampleNumberSBand; depth++) { 
01592                     pIn = pIn + deltaZ; 
01593                 } 
01594             
01595                 // While inside the region of interest (until the end of T-Band
01596                 depth=0;
01597                 while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension()) && (depth<sampleNumberTBand)) {
01598                     depth++;
01599                     i = (int) (pIn.x); //(column);
01600                     j = (int) (pIn.y); //(line);
01601                     k = (int) (pIn.z); //(depth);
01602         
01603                     // Sample processing in accordance with the shading method (LOCALSHADING)
01604                     GradientSobel (i, j, k, gradient, &gm, volume); 
01605                     tmp = vpTrilinearInterpolation(i,j,k,volume,pIn);
01606                     Classify (&tmp, &gm, &luminance, &alpha);
01607                     alpha = 1 - pow(1-alpha, (1 / (1/sampleStep)) );
01608                     if (specular) {
01609                         obsPoint.x = i - observer.x;
01610                         obsPoint.y = j - observer.y;
01611                         obsPoint.z = k - observer.z;
01612                         obsPoint.vpNormalize();
01613                         attenuate = ShadeSpecular(gradient,obsPoint);
01614                     }
01615                     else 
01616                         attenuate = Shade (gradient); 
01617                     luminance = luminance * attenuate;
01618 
01619                     //*----- Front-to-back alpha blending compositing
01620                     if (alpha > 0.0) 
01621                     {
01622                         at = alpha * (1.0f - a);
01623                         lum = lum + (luminance * at);
01624                         a = a + at;
01625 
01626                         if (a > 0.97) break;
01627                     }
01628 
01629                     // Next sample
01630                     pIn = pIn + deltaZ; // projectionDirection; 
01631                 } // while (depth)
01632             } // if
01633 
01634             ivalue = (int) lum;
01635             if (a < 0)
01636                     image[line][column] = (unsigned int) (255<<24 | 102<<16 | 179<<8 | 230);
01637             else
01638                     image[line][column] = (unsigned int) (255<<24 | ivalue<<16 | ivalue<<8 | ivalue);
01639         } // if
01640 
01641         // Next plane point computation
01642         p1 = p1 + deltaX; 
01643 
01644     } // for (column)
01645 
01646  } // for (line)
01647 
01648 }
01649 
01650 
01652 // Description: Method "vpRenderLivroMonoColorInnerStructures"  
01653 //              implement the inner structures visualization 
01654 //              algorithm using colors (based in the brute force 
01655 //              ray casting algorithm).
01656 // Parameters.: VPScene *s (scene object), 
01657 //              VPCamera *c (active camera), 
01658 //              int opacityComputation (opacity computation function),
01659 //              unsigned int ***image (pointer to the image)
01660 // Return.....: -
01661 
01662 void  VPRayCasting::vpRenderLivroMonoColorInnerStructures(VPScene *s, VPCamera *c, int opacityComputation, unsigned int ***image) {
01663  int line=0, column=0, depth=0, i=0, j=0, k=0, initialLineValue=0, ivalue1, ivalue2, ivalue3, 
01664      finalLineValue=0, initialColumnValue=0, finalColumnValue=0,
01665      sampleNumberTBand=0, sampleNumberSBand=0;
01666  short int planes[6];
01667  float previousIlight=0, Ilight=0, sampleColor=0, rayColor=0, rayOpacity=0, r, g, b,
01668        sampleOpacity=0, yCorrection=0, sIn=0, sOut=0, distance1=0, distance2=0,
01669        rayColorRed=0, rayColorGreen=0, rayColorBlue=0, tmp;
01670 
01671  VPVector3D projectionDirection, gradient, deltaX, deltaY, deltaZ, 
01672             vAux, correctProjectionDirection, auxProjectionDirection, auxDelta;
01673  VPPoint3D p1, pIn, pOut, pAux, minPlaneProjection, volumeDimension, 
01674            origin, temp, auxSBand, auxTBand; 
01675  VPColor color;
01676  VPScene *scene = s;
01677  list<VPLight*> lights = scene->vpGetLights();
01678  list<VPGraphicObj*> objects = scene->vpGetObjects();
01679 
01680  // To get a pointer to the volume object
01681  VPGraphicObj *volume = *objects.begin();
01682 
01683  // Pointer to the right camera 
01684  VPCamera *camera = c; 
01685 
01686  // Get a pointer to the first light of the list 
01687  light = *lights.begin();
01688 
01689  // For specular processing
01690  VPPoint3D observer = camera->vpGetLocation();   
01691  VPVector3D obsPoint;                           
01692 
01693  // Get color and opacity tables
01694  VPTable controlTables = ((VPVolume *)volume)->vpGetControlTables();
01695 
01696  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
01697                 volumeDimension, planes, finalLineValue, finalColumnValue,
01698                 deltaX, deltaY, deltaZ, volume);
01699 
01700  yCorrection = ((float) ((VPImage *)volume)->vpGetYDimension()) / ((float) virtualYDimension);
01701 
01704 
01705   // To get the volume center focal point
01706  auxSBand = auxTBand = ((VPVolume *)volume)->vpGetCenterFocalPoint();
01707 
01708  // First, get the direction from endOfTBand to endOfSBand
01709  auxSBand.z = endOfSBand;
01710  auxTBand.z = endOfTBand;
01711  auxProjectionDirection.x = auxSBand.x - auxTBand.x;
01712  auxProjectionDirection.y = auxSBand.y - auxTBand.y;
01713  auxProjectionDirection.z = auxSBand.z - auxTBand.z;
01714  distance1 = auxProjectionDirection.vpModule();
01715  auxProjectionDirection.vpNormalize();
01716 
01717  // After, process the "delta" used to "walk" along the "ray"
01718  auxDelta = auxProjectionDirection * sampleStep;
01719 
01720  // The first point before "walking" throught the volume
01721  p1 = auxTBand;
01722 
01723  // Loop to go from the endOfTBand (internal) to the endOfSBand (external)
01724  while (distance2 < distance1) {
01725      sampleNumberTBand++;
01726      p1 = p1 + auxDelta;
01727      vAux.x = p1.x - auxTBand.x;
01728      vAux.y = p1.y - auxTBand.y;
01729      vAux.z = p1.z - auxTBand.z;
01730      distance2 = vAux.vpModule();
01731  }
01732  // To find the limit of the volume 
01733  while ((p1<=volumeDimension) && (p1>=temp) && (p1.y<((VPImage *)volume)->vpGetYDimension())) {
01734      p1 = p1 + auxDelta;
01735  }
01736 
01737  // p1 "adjustment"
01738  p1 = p1 - auxDelta;
01739 
01740  // Loop to find the object "edge" 
01741  while (sampleColor<=40) { 
01742     i = (int) (p1.x); //(column);
01743     j = (int) (p1.y); //(line);
01744     k = (int) (p1.z); //(depth);
01745     sampleColor = ((VPVolume *)volume)->vpGetValue(i,j,k);
01746     p1 = p1 - auxDelta; // inverse projectionDirection; 
01747  }
01748 
01749  // Loop to go from "the edge" to the enfOfSband
01750  do {
01751      sampleNumberSBand++;
01752      p1 = p1 - auxDelta;
01753      vAux.x = p1.x - auxSBand.x;
01754      vAux.y = p1.y - auxSBand.y;
01755      vAux.z = p1.z - auxSBand.z;
01756      distance2 = vAux.vpModule();
01757  } while (distance2 > sampleStep);
01758 
01761 
01762  switch(typeOfOpacityFunction)
01763  {
01764     case LEVOYSURFACEOPACITY: SetClassificationTable();
01765                         break;
01766     case LINEAROPACITY: SetClassificationTable2();
01767                         break;
01768  }
01769 
01770  // To set the light with the same direction of the observer...
01771  correctProjectionDirection = projectionDirection;
01772  correctProjectionDirection.x = -correctProjectionDirection.x; 
01773  correctProjectionDirection.y = -correctProjectionDirection.y; 
01774  correctProjectionDirection.z = -correctProjectionDirection.z; 
01775  vpSetLightDirection(correctProjectionDirection);
01776 
01777   // Scan view line for the ray tracing 
01778   for (line=0; line<finalLineValue; line++) { 
01779 
01780      // Plane point computation
01781      p1 = minPlaneProjection;
01782      p1 = p1 + deltaY * line; 
01783 
01784      // Scan view column
01785      for (column=0; column<finalColumnValue; column++) {
01786 
01787         // Volume and radius intersection point computation
01788         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
01789                                       projectionDirection, pIn, pOut, sIn, sOut) )
01790         { 
01791             // if the ray intersect the volume then we have to process the samples
01792             gradient.x = gradient.y = gradient.z = 0;
01793             lum   = (float) 0.0;
01794             a     = (float) 0.0;
01795             sampleColor = rayColor = rayOpacity = r = g = b = 0.0;
01796             rayColorRed = rayColorGreen = rayColorBlue = 0.0;
01797 
01798             pIn.y = pIn.y*yCorrection;
01799             
01800             // while don't "hit" the organ (beginning of S-Band)
01801             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension()) && (sampleColor<=40)) { 
01802                 i = (int) (pIn.x); //(column);
01803                 j = (int) (pIn.y); //(line);
01804                 k = (int) (pIn.z); //(depth);
01805                 sampleColor = ((VPVolume *)volume)->vpGetValue(i,j,k);
01806                 pIn = pIn + deltaZ; // Next sample 
01807             }
01808 
01809             // If still inside the volume (because the ray can not "hit" the object)
01810             if ( (pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension()) )
01811             {
01812                 // End of S-Band and beginning of T-Band
01813                 for (depth=0; depth<sampleNumberSBand; depth++) { 
01814                     pIn = pIn + deltaZ; 
01815                 } 
01816             
01817                 // While inside the region of interest (until the end of T-Band
01818                 depth=0;
01819                 while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension()) && (depth<sampleNumberTBand)) {
01820                     depth++;
01821                     i = (int) (pIn.x); //(column);
01822                     j = (int) (pIn.y); //(line);
01823                     k = (int) (pIn.z); //(depth);
01824         
01825                     // Sample processing in accordance with the shading method (LOCALSHADING)
01826                     GradientSobel (i, j, k, gradient, &gm, volume); 
01827                     tmp = vpTrilinearInterpolation(i,j,k,volume,pIn);
01828                     color = controlTables.vpGetColor(tmp); //
01829 
01830                     ClassifyColor (&tmp, &gm, &r, &g, &b, &alpha);
01831                     alpha = 1 - pow(1-alpha, (1 / (1/sampleStep)) );
01832                     if (specular) {
01833                         obsPoint.x = i - observer.x;
01834                         obsPoint.y = j - observer.y;
01835                         obsPoint.z = k - observer.z;
01836                         obsPoint.vpNormalize();
01837                         attenuate = ShadeSpecular(gradient,obsPoint);
01838                     }
01839                     else 
01840                         attenuate = Shade (gradient); 
01841                     r = color.vpGetR() * attenuate; //r * attenuate;
01842                     g = color.vpGetG() * attenuate; //g * attenuate;
01843                     b = color.vpGetB() * attenuate; //b * attenuate;
01844 
01845                     //*----- Front-to-back alpha blending compositing
01846                     if (alpha > 0.0) 
01847                     {
01848                         at = alpha * (1.0f - a);
01849                         rayColorRed   = rayColorRed   + (r * at);
01850                         rayColorGreen = rayColorGreen + (g * at);
01851                         rayColorBlue  = rayColorBlue  + (b * at);
01852                         a = a + at;
01853 
01854                         if (a > 0.97) break;
01855                     }
01856 
01857                     // Next sample
01858                     pIn = pIn + deltaZ; // projectionDirection; 
01859                 } // while (depth)
01860             } // if
01861 
01862             ivalue1 = (int) rayColorRed;
01863             ivalue2 = (int) rayColorGreen;
01864             ivalue3 = (int) rayColorBlue;
01865             if (a < 0) {
01866                     image[line][column][red] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
01867                     image[line][column][green] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
01868                     image[line][column][blue] = (unsigned short int) (255<<24 | 102<<16 | 179<<8 | 230);
01869             }
01870             else {
01871                     image[line][column][red] = (unsigned short int) (255<<24 | ivalue1<<16 | ivalue1<<8 | ivalue1);
01872                     image[line][column][green] = (unsigned short int) (255<<24 | ivalue2<<16 | ivalue2<<8 | ivalue2);
01873                     image[line][column][blue] = (unsigned short int) (255<<24 | ivalue3<<16 | ivalue3<<8 | ivalue3);
01874             }
01875         } // if
01876 
01877         // Next plane point computation
01878         p1 = p1 + deltaX; 
01879 
01880     } // for (column)
01881 
01882  } // for (line)
01883 
01884 }
01885 
01886 
01888 // Description: Method "vpRenderMIP" implement the MIP visualization  
01889 //              type using the brute force ray casting algorithm for  
01890 //              gray levels.
01891 // Parameters.: VPScene *s, VPCamera *c (active camera), 
01892 //              unsigned int image[][256] (pointer to the image);
01893 // Return.....: -
01894 
01895 void VPRayCasting::vpRenderMIP(VPScene *s, VPCamera *c, unsigned int **image) {
01896  int line=0, column=0, depth=0, i=0, j=0, k=0, initialLineValue=0, 
01897      finalLineValue=0, initialColumnValue=0, finalColumnValue=0;
01898  short int planes[6];
01899  float previousIlight=0, Ilight=0, sampleColor=0, rayColor=0, rayOpacity=0, 
01900        sampleOpacity=0, yCorrection=0, sIn=0, sOut=0,
01901        MIPcolor=0, MIPlight=0, MIPopacity=0;
01902 
01903  VPVector3D projectionDirection, deltaX, deltaY, deltaZ, vAux;
01904  VPPoint3D p1, pIn, pOut, pAux, minPlaneProjection, volumeDimension, origin, temp; 
01905  VPScene *scene = s;
01906  list<VPLight*> lights = scene->vpGetLights();
01907  list<VPGraphicObj*> objects = scene->vpGetObjects();
01908 
01909  // To get a pointer to the volume object
01910  VPGraphicObj *volume = *objects.begin();
01911 
01912  // Pointer to the right camera 
01913  VPCamera *camera = c; 
01914 
01915  // Get a pointer to the first light of the list 
01916  light = *lights.begin();
01917 
01918  // Get color and opacity tables
01919  VPTable controlTables = ((VPVolume *)volume)->vpGetControlTables();
01920 
01921  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
01922                 volumeDimension, planes, finalLineValue, finalColumnValue,
01923                 deltaX, deltaY, deltaZ, volume);
01924 
01925  yCorrection = ((float) ((VPImage *)volume)->vpGetYDimension()) / ((float) virtualYDimension);
01926 
01927   // Scan view line for the ray tracing 
01928   for (line=0; line<finalLineValue; line++) { 
01929 
01930      // Plane point computation
01931      p1 = minPlaneProjection;
01932      p1 = p1 + deltaY * line; 
01933 
01934      // Scan view column
01935      for (column=0; column<finalColumnValue; column++) {
01936 
01937         // Volume and radius intersection point computation
01938         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
01939                                       projectionDirection, pIn, pOut, sIn, sOut) )
01940         { 
01941             // if the ray intersect the volume then we have to process the samples
01942             sampleColor = 0;
01943             rayColor = rayOpacity = 0.0;
01944             previousIlight = Ilight = 0.0;
01945             MIPcolor = -1.0;
01946 
01947             pIn.y = pIn.y*yCorrection;
01948 
01949             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension())) { 
01950 
01951                 i = (int) (pIn.x); //(column);
01952                 j = (int) (pIn.y); //(line);
01953                 k = (int) (pIn.z); //(depth);
01954 
01955 
01956                 // Interpolation 
01957                 sampleColor = vpTrilinearInterpolation(i,j,k,volume,pIn);
01958 
01959                 if (sampleColor > 255) 
01960                     sampleColor = 255;
01961 
01962                 if (MIPcolor < sampleColor)
01963                     MIPcolor = sampleColor;
01964 
01965                 // Next sample
01966                 pIn = pIn + deltaZ; // projectionDirection; 
01967             } // while (depth)
01968             
01969             sampleOpacity = controlTables.vpGetLinearOpacityValue( MIPcolor );
01970             
01971             // store pixel color
01972             image[line][column] = (unsigned int) MIPcolor*sampleOpacity;
01973 
01974         } // if
01975 
01976         // Next plane point computation
01977         p1 = p1 + deltaX; 
01978 
01979     } // for (column)
01980 
01981  } // for (line)
01982 
01983 }
01984 
01985 
01987 // Description: Method "vpRenderLivroMIP" implement the MIP   
01988 //              visualization type using the brute force ray  
01989 //              casting algorithm for gray levels.
01990 // Parameters.: VPScene *s, VPCamera *c (active camera), 
01991 //              unsigned int image[][256] (pointer to the image);
01992 // Return.....: -
01993 
01994 void VPRayCasting::vpRenderLivroMIP(VPScene *s, VPCamera *c, unsigned int **image) {
01995  int line=0, column=0, depth=0, i=0, j=0, k=0, initialLineValue=0, 
01996      finalLineValue=0, initialColumnValue=0, finalColumnValue=0;
01997  short int planes[6];
01998  float sampleOpacity=0, yCorrection=0, sIn=0, sOut=0, MIPcolor=0, MIPlight=0, 
01999      MIPopacity=0, MIPgm, tmp;
02000 
02001  VPVector3D projectionDirection, deltaX, deltaY, deltaZ, vAux, gradient, 
02002             MIPgradient, correctProjectionDirection;
02003  VPPoint3D p1, pIn, pOut, pAux, minPlaneProjection, volumeDimension, origin, temp; 
02004  VPScene *scene = s;
02005  list<VPLight*> lights = scene->vpGetLights();
02006  list<VPGraphicObj*> objects = scene->vpGetObjects();
02007 
02008  // To get a pointer to the volume object
02009  VPGraphicObj *volume = *objects.begin();
02010 
02011  // Pointer to the right camera 
02012  VPCamera *camera = c; 
02013 
02014  // Get a pointer to the first light of the list 
02015  light = *lights.begin();
02016 
02017  // Get color and opacity tables
02018  VPTable controlTables = ((VPVolume *)volume)->vpGetControlTables();
02019 
02020  vpSetVariables(camera, projectionDirection, minPlaneProjection, 
02021                 volumeDimension, planes, finalLineValue, finalColumnValue,
02022                 deltaX, deltaY, deltaZ, volume);
02023 
02024  yCorrection = ((float) ((VPImage *)volume)->vpGetYDimension()) / ((float) virtualYDimension);
02025 
02026  switch(typeOfOpacityFunction)
02027  {
02028     case LEVOYSURFACEOPACITY: SetClassificationTable();
02029                         break;
02030     case LINEAROPACITY: SetClassificationTable2();
02031                         break;
02032  }
02033 
02034   // Scan view line for the ray tracing 
02035   for (line=0; line<finalLineValue; line++) { 
02036 
02037      // Plane point computation
02038      p1 = minPlaneProjection;
02039      p1 = p1 + deltaY * line; 
02040 
02041      // Scan view column
02042      for (column=0; column<finalColumnValue; column++) {
02043 
02044         // Volume and radius intersection point computation
02045         if ( vpFindIntersectionPoints(planes, volumeDimension, p1, 
02046                                       projectionDirection, pIn, pOut, sIn, sOut) )
02047         { 
02048             // if the ray intersect the volume then we have to process the samples
02049             gradient.x = gradient.y = gradient.z = (float) 0.0;
02050             MIPgradient = gradient;
02051             MIPgm = (float) 0.0;
02052             lum   = (float) 0.0;
02053             a     = (float) 0.0;
02054             MIPcolor = (float) -1.0;
02055 
02056             pIn.y = pIn.y*yCorrection;
02057         
02058             while ((pIn<=volumeDimension) && (pIn>=temp) && (pIn.y<((VPImage *)volume)->vpGetYDimension())) { 
02059 
02060                 i = (int) (pIn.x); //(column);
02061                 j = (int) (pIn.y); //(line);
02062                 k = (int) (pIn.z); //(depth);
02063 
02064                 Gradient (i, j, k, gradient, &gm, volume); 
02065                 tmp = vpTrilinearInterpolation(i,j,k,volume,pIn);
02066 
02067                 if (MIPcolor < tmp) {
02068                     MIPcolor = tmp;
02069                     MIPgm = gm;
02070                     MIPgradient = gradient;
02071                 }
02072 
02073                 // Next sample
02074                 pIn = pIn + deltaZ; // projectionDirection; 
02075             } // while (depth)
02076 
02077             // To set the light with the same direction of the observer...
02078             correctProjectionDirection = projectionDirection;
02079             correctProjectionDirection.x = -correctProjectionDirection.x; 
02080             correctProjectionDirection.y = -correctProjectionDirection.y; 
02081             correctProjectionDirection.z = -correctProjectionDirection.z; 
02082             vpSetLightDirection(correctProjectionDirection);
02083             
02084             attenuate = Shade (MIPgradient);
02085             luminance = MIPcolor * attenuate;
02086 
02087             // store pixel color
02088             ivalue = (int) luminance;
02089             if (a < 0)
02090                     image[line][column] = (unsigned int) (255<<24 | 102<<16 | 179<<8 | 230);
02091             else
02092                     image[line][column] = (unsigned int) (255<<24 | ivalue<<16 | ivalue<<8 | ivalue);
02093 
02094         } // if
02095 
02096         // Next plane point computation
02097         p1 = p1 + deltaX; 
02098 
02099     } // for (column)
02100 
02101  } // for (line)
02102 
02103 }
02104 
02105 
02107 // Description: Method "vpSetVariables" set the values of some
02108 //              ray casting variables.
02109 // Parameters.: VPCamera *camera, a pointer to the camera object 
02110 //              VPVector3D &projectionDirection
02111 //              VPPoint3D &minPlaneProjection, point where the scan begin
02112 //              VPPoint3D &volumeDimension
02113 //              short int planes[], intersection candidate planes
02114 //              int &finalLineValue, screen width
02115 //              int &finalColumnValue, screen hight
02116 //              VPVector3D &deltaX, x step
02117 //              VPVector3D &deltaY, y step
02118 //              VPVector3D &deltaZ, z step
02119 //              VPGraphicObj *volume, volume that will be visualized
02120 // Return.....: -
02121 
02122 void VPRayCasting::vpSetVariables(VPCamera *camera, VPVector3D &projectionDirection,
02123                                   VPPoint3D &minPlaneProjection, VPPoint3D &volumeDimension, 
02124                                   short int planes[], int &finalLineValue, int &finalColumnValue,
02125                                   VPVector3D &deltaX, VPVector3D &deltaY, VPVector3D &deltaZ,
02126                                   VPGraphicObj *volume) 
02127 {
02128  float ratioX=0, ratioY=0;
02129  VPPoint2D winTopRightValue;
02130  VPPoint3D location = camera->vpGetLocation();
02131  VPPoint3D target = camera->vpGetTarget();
02132  VPVector3D vectorH, vectorV, auxUp(0,1,0);
02133                 
02134  // Set projection direction by the vector
02135  projectionDirection.vpSetVector3D(target - location);
02136  projectionDirection.vpNormalize();
02137 
02138  if ((projectionDirection==camera->vpGetUp()) || (projectionDirection==auxUp)) {
02139     VPVector3D v(0,0,-1);
02140     camera->vpSetUp(v);
02141  }
02142 
02143  // winTopRightValue has the projection plane size (x,y)
02144  winTopRightValue = camera->vpGetWinTopRight();
02145 
02146  // volumeDimension has the volume dimension
02147  volumeDimension.x = ((VPImage *)volume)->vpGetXDimension();
02148  volumeDimension.y = virtualYDimension;
02149  volumeDimension.z = ((VPVolume *)volume)->vpGetZDimension();
02150  volumeDimension -= 1; // because of the volume matrix index
02151  
02152  // "D" component of orthogonal planes equation...
02153  dFront = 0;                    // Normal(0,0,-1)  Point(0,0,0)
02154  dTop = 0;                      // Normal(0,-1,0)   Point(0,0,0)
02155  dLeft = 0;                     // Normal(-1,0,0)  Point(0,0,0)
02156  dBack = -volumeDimension.z;    // Normal(0,0,1)   Point = volumeDimension
02157  dBottom = -volumeDimension.y;  // Normal(0,1,0)  Point = volumeDimension
02158  dRight = -volumeDimension.x;   // Normal(1,0,0)   Point = volumeDimension
02159 
02160  // Set "h" and "v" vectors 
02161  VPVector3D BAKA = camera->vpGetUp();
02162  vectorH = BAKA.vpCrossProduct(projectionDirection);
02163  vectorV = vectorH.vpCrossProduct(projectionDirection);
02164 
02165  //vectorH = projectionDirection.vpCrossProduct(camera->vpGetUp());
02166  //vectorV = projectionDirection.vpCrossProduct(vectorH);
02167 
02168 
02169  // Possible intersection planes computation
02170  vpFindIntersectionCandidatePlanes(planes, projectionDirection);
02171 
02172  // Set scan window corner (where the scan begin)
02173  minPlaneProjection = (location - vectorH*(winTopRightValue.vpGetX()/2));
02174  minPlaneProjection = (minPlaneProjection - vectorV*(winTopRightValue.vpGetY()/2));
02175 
02176 // Set variables to determine the end of the view scan
02177  finalLineValue = camera->vpGetViewHeight();
02178  finalColumnValue = camera->vpGetViewWidth();
02179 
02180  // Window/Viewport ratio
02181  ratioX =  winTopRightValue.vpGetX()/finalColumnValue;
02182  ratioY = winTopRightValue.vpGetY()/finalLineValue;
02183 
02184  // Line, column and sample step 
02185  deltaX = vectorH * ratioX;
02186  deltaY = vectorV * ratioY;
02187  deltaZ = projectionDirection * sampleStep;
02188 }
02189 
02190 
02192 // Description: Method "vpSetCameraDefault" set the initial camera
02193 //              attributes values, in accordance with the volume.
02194 // Parameters.: *camera, a pointer to the camera object 
02195 //              *volume, a pointer to the volume object that will 
02196 //                       be visualized.
02197 // Return.....: -
02198 
02199 void VPRayCasting::vpSetCameraDefault(VPCamera *camera, VPGraphicObj *volume) {
02200 
02201  float h=0, w=0;
02202  VPPoint3D center = ((VPVolume *)volume)->vpGetCenterFocalPoint();
02203 
02204  // Middle of the object (gives camera direction)
02205  VPPoint3D target((float) center.x, 
02206                   (float) virtualYDimension/2,
02207                   (float) center.z);
02208 
02209  // Camera location
02210  VPPoint3D location ((float) center.x, 
02211                      (float) virtualYDimension/2,
02212                      (float) 0 );
02213 
02214  VPVector3D up(0.0,1.0,0.0);
02215 
02216  if (cameraLocation.x==0 && cameraLocation.y==0 && cameraLocation.z==0)
02217     camera->vpSetLocation(location);
02218  else
02219     camera->vpSetLocation(cameraLocation);
02220  camera->vpSetTarget(target);
02221  camera->vpSetProjectionType(ORTHOGRAPHIC);
02222  camera->vpSetUp(up);
02223 
02224  // Set the parallelpiped or frustum view, in agreement with the
02225  // selected projection (winTopRight is initialized before)
02226  VPPoint2D winBottomLeftValue(0,0);
02227  
02228  camera->vpSetNearPlane(0);
02229  if ( ((VPImage *)volume)->vpGetYDimension() > ((VPVolume *)volume)->vpGetZDimension() )
02230     camera->vpSetFarPlane(((VPImage *)volume)->vpGetYDimension()); 
02231  else
02232     camera->vpSetFarPlane(((VPVolume *)volume)->vpGetZDimension());
02233 
02234  camera->vpSetWinBottomLeft(winBottomLeftValue);
02235 
02236  ((VPVolume *)volume)->vpSetCameraLocationForInnerStructure(location);
02237 
02238 }
02239 
02240 
02242 // Description: Method "vpSetLightDefault" set the initial light
02243 //              attributes values, in accordance with the observer
02244 //              position.
02245 // Parameters.: *light, a pointer to the light object  
02246 //              *camera, a pointer to the camera object
02247 // Return.....: -
02248 
02249 void VPRayCasting::vpSetLightDefault(VPLight *light, VPCamera *camera) {
02250  VPVector3D direction;
02251  VPPoint3D location = camera->vpGetLocation();
02252  VPPoint3D target = camera->vpGetTarget();
02253 
02254  //location.x += 15;
02255  //location.y += 15;
02256  direction.vpSetVector3D(location-target); 
02257  direction.vpNormalize();
02258 
02259  ((VPDirectionalLight *)light)->vpSetDirection(direction);
02260 }
02261 
02262 
02264 // Description: Method "vpFindIntersectionCandidatePlanes" process   
02265 //              the possible planes intersection.
02266 // Parameters.: short int planes[](information about the possibles 
02267 //                                 planes intersection)
02268 //              VPVector3D dir (observer-target direction ray) 
02269 // Return.....: -
02270 
02271 void VPRayCasting::vpFindIntersectionCandidatePlanes(short int planes[], 
02272                                                      VPVector3D dir) {
02273  // Identification of possible intersection planes
02274  if ( dir.x != 0 ) { // If it could intersect YZ plane...
02275     if ( dir.x > 0) {
02276         planes[PLEFT] = PIN;
02277         planes[PRIGHT] = POUT;
02278     }
02279     else {
02280         planes[PLEFT] = POUT;
02281         planes[PRIGHT] = PIN;
02282     }
02283  }
02284  else {
02285     planes[PLEFT] = planes[PRIGHT] = NOTIO;
02286  }
02287 
02288  if ( dir.y != 0 ) { // If it could intersect XZ plane...
02289     if ( dir.y > 0) { 
02290         planes[PTOP] = PIN;
02291         planes[PBOTTOM] = POUT;
02292     }
02293     else {
02294         planes[PTOP] = POUT;
02295         planes[PBOTTOM] = PIN;
02296     }
02297  }
02298  else {
02299     planes[PTOP] = planes[PBOTTOM] = NOTIO;
02300  }
02301 
02302  if ( dir.z != 0 ) { // If it could intersect XY plane...
02303     if ( dir.z > 0) { 
02304         planes[PFRONT] = PIN;
02305         planes[PBACK] = POUT;
02306     }
02307     else {
02308         planes[PFRONT] = POUT;
02309         planes[PBACK] = PIN;
02310     }
02311  }
02312  else {
02313     planes[PFRONT] = planes[PBACK] = NOTIO;
02314  }
02315 
02316 }
02317 
02318 
02320 // Description: Method "vpFindIntersectionPoints" process the  
02321 //              intersection between a line and a volume.
02322 // Parameters.: VPPoint3D maxB (volume dimension) 
02323 //              VPPoint3D origin (ray origin) 
02324 //              VPVector3D dir (ray direction) 
02325 //              VPPoint3D &in (intersection point from in plane)
02326 //              VPPoint3D &out (intersection point from out plane)
02327 // Return.....: true (line intersect the volume)
02328 //              false (line doesn't intersect the volume)
02329 
02330 bool VPRayCasting::vpFindIntersectionPoints(short int planes[], VPPoint3D maxB, 
02331                                             VPPoint3D origin, VPVector3D dir, 
02332                                             VPPoint3D &in, VPPoint3D &out, 
02333                                             float &tIn, float &tOut) {
02334  bool noIn=true, noOut=true;
02335  short int planeIn=0, planeOut=0;
02336  float d=0, t[6]={0,0,0,0,0,0};
02337  VPPoint3D minB;
02338 
02339  // Equations to find the intersection between a line and a plane
02340  // f = originX - endX, g = originY - endY, h = originZ - endZ
02341  // d=A*f+B*g+C*h  T=-(A*x2+B*y2+C*z2+D)/d
02342  // origin - end = dir, so (f,g,h)=dir - it's not necessary to process "f,g,h"
02343 
02344  // "T" processing for all planes (distance between the ray origin and the plane)
02345  if (planes[PFRONT] != NOTIO) { // N(0,0,-1)
02346     d = -dir.z;
02347     t[PFRONT] = origin.z / d;
02348  }
02349  if (planes[PTOP] != NOTIO) { // N(0,-1,0)
02350     d = -dir.y;
02351     t[PTOP] = origin.y / d;
02352  }
02353  if (planes[PLEFT] != NOTIO) { // N(-1,0,0)
02354     d = -dir.x;
02355     t[PLEFT] = origin.x / d;
02356  }
02357  if (planes[PBACK] != NOTIO) { // N(0,0,1)
02358     d = dir.z;
02359     t[PBACK] = -(origin.z + dBack) / d;
02360  }
02361  if (planes[PBOTTOM] != NOTIO) { // N(0,1,0)
02362     d = dir.y;
02363     t[PBOTTOM] = -(origin.y + dBottom) / d;
02364  }
02365  if (planes[PRIGHT] != NOTIO) { // N(1,0,0)
02366     d = dir.x;
02367     t[PRIGHT] = -(origin.x + dRight) / d;
02368  }
02369 
02370  // Verification of the min "T" for "in plane" and "out plane" candidates
02371  for (int i=0; i<6; i++) {
02372      if (planes[i] == NOTIO) continue;
02373      if (planes[i] == PIN) {
02374          if (noIn) {
02375             tIn = t[i];
02376             planeIn = i;
02377             noIn = false;
02378          }
02379          else
02380             if (t[i] > tIn) {
02381                  tIn=t[i];
02382                  planeIn = i;
02383             }
02384      }
02385      else { // planes[i] == POUT
02386          if (noOut) {
02387             tOut = t[i];
02388             planeOut = i;
02389             noOut = false;
02390          }
02391          else
02392             if (t[i] < tOut) {
02393                  tOut=t[i];
02394                  planeOut = i;
02395             }
02396      }
02397  }
02398  
02399  // Intersection point
02400  in = origin + (dir * tIn);
02401  out = origin + (dir * tOut);
02402 
02403  // Round error adjust
02404  switch (planeIn) {
02405     case PFRONT:in.z = 0;
02406                 break;
02407     case PBACK: in.z = maxB.z;
02408                 break;
02409     case PTOP:  in.y = 0;
02410                 break;
02411     case PBOTTOM:in.y = maxB.y;
02412                 break;
02413     case PLEFT: in.x = 0;
02414                 break;
02415     case PRIGHT: in.x = maxB.x;
02416                 break;
02417  }
02418  switch (planeOut) {
02419     case PFRONT:out.z = 0;
02420                 break;
02421     case PBACK: out.z = maxB.z;
02422                 //if (tOut > maxB.z) 
02423                 //  tOut = maxB.z;
02424                 break;
02425     case PTOP:  out.y = 0;
02426                 break;
02427     case PBOTTOM:out.y = maxB.y;
02428     case PLEFT: out.x = 0;
02429                 break;
02430     case PRIGHT: out.x = maxB.x;
02431                 break;
02432  }
02433 
02434  if ( ((in<=maxB) && (in>=minB)) && ((out<=maxB) && (out>=minB)) )
02435      return true;
02436  else 
02437      return false;
02438 
02439 }
02440 
02441 
02443 // Description: Method "vpFindSphereIntersectionPoints" process the  
02444 //              intersection between a line and sphere.
02445 // Parameters.: VPPoint3D origin (origin of the ray) 
02446 //              VPPoint3D center (center of the sphere) 
02447 //              float ray (sphere ray) 
02448 //              VPVector3D dir (direction of the ray) 
02449 //              VPPoint3D &point (intersection point from in plane)
02450 // Return.....: true (ray intersect the sphere)
02451 //              false (ray doesn't intersect the sphere)
02452 
02453 bool VPRayCasting::vpFindSphereIntersectionPoints(VPPoint3D origin, VPPoint3D center, float ray,
02454                                                   VPVector3D dir, VPPoint3D &point) {
02455  VPVector3D v1;
02456  float v, disc;
02457 
02458  // v1 = vector from the origin to the center of the sphere
02459  v1.x = center.x - origin.x;
02460  v1.y = center.y - origin.y;
02461  v1.z = center.z - origin.z;
02462  
02463  // dot product among v1 and ray direction
02464  v = v1.vpDotProduct(dir);
02465 
02466  // "E" is the starting point of the ray and "O" is the center of the sphere
02467  // "EO" is the vector from E to O (thus, EO = O - E)
02468  // disc = r^2 - ((EO · EO) - v^2);
02469  disc = ray * ray - ( (v1.vpDotProduct(v1)) - (v*v) );
02470 
02471  if (disc < 0)
02472      return false; // no intersection
02473  else {
02474      disc = sqrt(disc);
02475      point.x = origin.x + (v - disc) * dir.x;
02476      point.y = origin.y + (v - disc) * dir.y;
02477      point.z = origin.z + (v - disc) * dir.z;
02478      return true;
02479  }
02480 
02481 }
02482 
02483 
02485 // Description: Method "vpTrilinearInterpolation" process the voxel 
02486 //              color (gray) using trilinear interpolation.
02487 // Parameters.: int i, int j, int k, VPGraphicObj *volume,
02488 //              VPPoint3D p (i,j,k=volume matrix index; 
02489 //              depth=ray position; volume=object volume that has all 
02490 //              information about the volume; p="in plane" intersection
02491 //              point) 
02492 // Return.....: float color (voxel color)
02493 
02494 float VPRayCasting::vpTrilinearInterpolation(int i, int j, int k,  
02495                                              VPGraphicObj *volume, VPPoint3D p) {
02496 
02497  int C1, C2, C3, C4, C5, C6, C7, C8, ii, jj, kk;
02498  float i1, i2, i3, i4, i5, i6, i7,  // interpolated values
02499        dx, dy, dz;                  // distance (delta)
02500 
02501  // First, voxels color initialization
02502  ii = ((i+1) < ((VPImage *)volume)->vpGetXDimension()) ? i+1 : i;
02503  jj = ((j+1) < ((VPImage *)volume)->vpGetYDimension()) ? j+1 : j;
02504  kk = ((k+1) < ((VPVolume *)volume)->vpGetZDimension()) ? k+1 : k;
02505  C1 = ((VPVolume *)volume)->vpGetValue(i,j,k);
02506  C2 = ((VPVolume *)volume)->vpGetValue(ii,j,k);
02507  C3 = ((VPVolume *)volume)->vpGetValue(ii,jj,k);
02508  C4 = ((VPVolume *)volume)->vpGetValue(i,jj,k);
02509  C5 = ((VPVolume *)volume)->vpGetValue(i,j,kk);
02510  C6 = ((VPVolume *)volume)->vpGetValue(ii,j,kk);
02511  C7 = ((VPVolume *)volume)->vpGetValue(ii,jj,kk);
02512  C8 = ((VPVolume *)volume)->vpGetValue(i,jj,kk);
02513  
02514  // If all voxels have the same color, it is not necessary to interpolate  
02515  if ((C1==C2) && (C2==C3) && (C3==C4) && (C4==C5) && (C5==C6) && (C6==C7) && (C7==C8))
02516     return( (float)(C1) );
02517 
02518  // Trilinear interpolation (seven linear interpolations)
02519  //                 
02520  //       5-----------------6       
02521  //      /|                /|       
02522  //     i1-------i5-------i2|       
02523  //    /  |      |       /  |       
02524  //   1-----------------2   |       
02525  //   |   |      |      |   |       
02526  //   |   |      i7     |   |       
02527  //   |   |      |      |   |       
02528  //   |   8------|i3----|---7       
02529  //   |  /       |/     |  /        
02530  //   | /        i6     | /     
02531  //   |/        /       |/      
02532  //   4--------i4-------3       
02533  //                 
02534 
02535  // Distance from the voxel 1
02536  dx = p.x-(float)i;
02537  dy = p.y-(float)j;
02538  dz = p.z-(float)k;
02539 
02540  // Interpolation values
02541  i1 = ( (C5-C1) * dz + C1);
02542  i2 = ( (C6-C2) * dz + C2);
02543  i3 = ( (C7-C8) * dx + C8);
02544  i4 = ( (C3-C4) * dx + C4);
02545  i5 = ( (i2-i1) * dx + i1);
02546  i6 = ( (i3-i4) * dz + i4);
02547  i7 = ( (i6-i5) * dy + i5);
02548 
02549  return(i7);
02550 }
02551 
02552 
02554 // Description: Method "vpTriLinearGouraudInterpolation" process 
02555 //              the voxel color (gray) and the light value using 
02556 //              trilinear interpolation.
02557 // Parameters.: int i, int j, int k = volume matrix index;
02558 //              VPGraphicObj *volume = object volume that has all 
02559 //                                     information about the volume;
02560 //              VPPoint3D p = "in plane" intersection point
02561 //              float &color = voxel color processed 
02562 //              float &light = light processed 
02563 //              bool  &interpolate = if all voxels don't have the 
02564 //                                  same value,interpolation occurs 
02565 //                                  and i=TRUE, else i=FALSE)
02566 // Return.....: -
02567 
02568 void VPRayCasting::vpTriLinearGouraudInterpolation(int i, int j, int k,  
02569                                        VPGraphicObj *volume, VPPoint3D p, 
02570                                        float &color, float &light, bool &interpolate) {
02571  int C1, C2, C3, C4, C5, C6, C7, C8, ii, jj, kk;
02572  float l1, l2, l3, l4, l5, l6, l7, l8,
02573        i1, i2, i3, i4, i5, i6,  // interpolated values 
02574        dx, dy, dz;              // distance (delta)
02575 
02576  // First, voxels color initialization
02577  ii = ((i+1) < ((VPImage *)volume)->vpGetXDimension()) ? i+1 : i;
02578  jj = ((j+1) < ((VPImage *)volume)->vpGetYDimension()) ? j+1 : j;
02579  kk = ((k+1) < ((VPVolume *)volume)->vpGetZDimension()) ? k+1 : k;
02580 
02581  C1 = ((VPVolume *)volume)->vpGetValue(i,j,k);
02582  C2 = ((VPVolume *)volume)->vpGetValue(ii,j,k);
02583  C3 = ((VPVolume *)volume)->vpGetValue(ii,jj,k);
02584  C4 = ((VPVolume *)volume)->vpGetValue(i,jj,k);
02585  C5 = ((VPVolume *)volume)->vpGetValue(i,j,kk);
02586  C6 = ((VPVolume *)volume)->vpGetValue(ii,j,kk);
02587  C7 = ((VPVolume *)volume)->vpGetValue(ii,jj,kk);
02588  C8 = ((VPVolume *)volume)->vpGetValue(i,jj,kk);
02589  
02590 
02591  // Trilinear interpolation for color and light
02592  // (seven linear interpolations)
02593  //                 
02594  //       5-----------------6       
02595  //      /|                /|       
02596  //     i1-------i5-------i2|       
02597  //    /  |      |       /  |       
02598  //   1-----------------2   |       
02599  //   |   |      |      |   |       
02600  //   |   |    color    |   |       
02601  //   |   |      |      |   |       
02602  //   |   8------|i3----|---7       
02603  //   |  /       |/     |  /        
02604  //   | /        i6     | /     
02605  //   |/        /       |/      
02606  //   4--------i4-------3       
02607  //                 
02608 
02609  // Distance from the voxel 1
02610  dx = p.x-i;
02611  dy = p.y-j;
02612  dz = p.z-k;
02613 
02614 
02615  // If all voxels have the same color, it is not necessary to interpolate  
02616  // FUTURAMENTE VERIFICAR BIT DE FLAG ???
02617  if ((C1==C2) && (C2==C3) && (C3==C4) && (C4==C5) && (C5==C6) && (C6==C7) && (C7==C8)) {
02618     color = (float)(C1);
02619     interpolate = false;
02620  }
02621  else {
02622      // Voxel interpolation values
02623     i1 = ( (C5-C1) * dz + C1);
02624     i2 = ( (C6-C2) * dz + C2);
02625     i3 = ( (C7-C8) * dx + C8);
02626     i4 = ( (C3-C4) * dx + C4);
02627     i5 = ( (i2-i1) * dx + i1);
02628     i6 = ( (i3-i4) * dz + i4);
02629     color = ( (i6-i5) * dy + i5);
02630     interpolate = true;
02631  }
02632 
02633   // Second, light color initialization
02634  l1 = ((VPVolume *)volume)->vpGetLightValue(i,j,k);
02635  l2 = ((VPVolume *)volume)->vpGetLightValue(ii,j,k);
02636  l3 = ((VPVolume *)volume)->vpGetLightValue(ii,jj,k);
02637  l4 = ((VPVolume *)volume)->vpGetLightValue(i,jj,k);
02638  l5 = ((VPVolume *)volume)->vpGetLightValue(i,j,kk);
02639  l6 = ((VPVolume *)volume)->vpGetLightValue(ii,j,kk);
02640  l7 = ((VPVolume *)volume)->vpGetLightValue(ii,jj,kk);
02641  l8 = ((VPVolume *)volume)->vpGetLightValue(i,jj,kk);
02642 
02643  // Light interpolation values
02644  i1 = ( (l5-l1) * dz + l1);
02645  i2 = ( (l6-l2) * dz + l2);
02646  i3 = ( (l7-l8) * dx + l8);
02647  i4 = ( (l3-l4) * dx + l4);
02648  i5 = ( (i2-i1) * dx + i1);
02649  i6 = ( (i3-i4) * dz + i4);
02650  light = ( (i6-i5) * dy + i5);
02651 
02652  return;
02653 }
02654 
02655 
02657 // Description: Method "vpTriLinearInterpolationAndGradientComputation" 
02658 //              process the voxel color (gray) using trilinear 
02659 //              interpolation and the gradient approximation.
02660 //              The gradient calculation technique used
02661 //              is "interpolated points difference"
02662 // Parameters.: int i, int j, int k, VPGraphicObj *volume,
02663 //              VPPoint3D p, float previousIlight, VPLight *l 
02664 //              (i,j,k=volume matrix index; 
02665 //              depth=ray position; volume=object volume that has all 
02666 //              information about the volume; p="in plane" intersection
02667 //              point; float &color (voxel color), VPVector3D &g 
02668 //              (gradient vector approximation) and bool interpolate (if all voxels
02669 //              don't have the same value, interpolation occurs and i=TRUE,
02670 //              else i=FALSE)
02671 // Return.....: -
02672 
02673 void VPRayCasting::vpTriLinearInterpolationAndGradientComputation(int i, int j, int k,  
02674                                             VPGraphicObj *volume, VPPoint3D p, 
02675                                             float &color, VPVector3D &g, bool &interpolate) {
02676 
02677  int C1, C2, C3, C4, C5, C6, C7, C8, ii, jj, kk;
02678  float i1, i2, i3, i4, i5, i6, i7, i8,  // interpolated values 
02679        PX0, PX1, PY0, PY1, PZ0, PZ1,    // interpolated values 
02680        dx, dy, dz;                      // distance (delta)
02681  
02682  // First, voxels color initialization
02683  ii = ((i+1) < ((VPImage *)volume)->vpGetXDimension()) ? i+1 : i;
02684  jj = ((j+1) < ((VPImage *)volume)->vpGetYDimension()) ? j+1 : j;
02685  kk = ((k+1) < ((VPVolume *)volume)->vpGetZDimension()) ? k+1 : k;
02686  C1 = ((VPVolume *)volume)->vpGetValue(i,j,k);
02687  C2 = ((VPVolume *)volume)->vpGetValue(ii,j,k);
02688  C3 = ((VPVolume *)volume)->vpGetValue(ii,jj,k);
02689  C4 = ((VPVolume *)volume)->vpGetValue(i,jj,k);
02690  C5 = ((VPVolume *)volume)->vpGetValue(i,j,kk);
02691  C6 = ((VPVolume *)volume)->vpGetValue(ii,j,kk);
02692  C7 = ((VPVolume *)volume)->vpGetValue(ii,jj,kk);
02693  C8 = ((VPVolume *)volume)->vpGetValue(i,jj,kk);
02694 
02695  // If all voxels have the same color, it is not necessary to interpolate  
02696  if ((C1==C2) && (C2==C3) && (C3==C4) && (C4==C5) && (C5==C6) && (C6==C7) && (C7==C8)) {
02697     color = (float)(C1);
02698     interpolate = false;
02699  }
02700  else {
02701     // Trilinear interpolation (seven linear interpolations)
02702     //                  
02703     //       5-----------------6        
02704     //      /|                /|        
02705     //     i1------PY0-------i2|        
02706     //    /  |      |       /  |        
02707     //   1-----------------2   |        
02708     //   |   |      |      |   |        
02709     //   |   |    color    |   |        
02710     //   |   |      |      |   |        
02711     //   |   8------|i3----|---7        
02712     //   |  /       |/     |  /     
02713     //   | /       PY1     | /      
02714     //   |/        /       |/       
02715     //   4--------i4-------3        
02716     //                  
02717 
02718     // Distance from the voxel 1
02719     dx = p.x-i;
02720     dy = p.y-j;
02721     dz = p.z-k;
02722 
02723     // Interpolation values
02724     i1  = ( (C5-C1) * dz + C1);
02725     i2  = ( (C6-C2) * dz + C2);
02726     i3  = ( (C7-C8) * dx + C8);
02727     i4  = ( (C3-C4) * dx + C4);
02728     PY0 = ( (i2-i1) * dx + i1);
02729     PY1 = ( (i3-i4) * dz + i4);
02730     color  = ( (PY1-PY0) * dy + PY0); // final voxel color
02731     interpolate = true;
02732 
02733 
02734     // More interpolations for gradient approximation
02735     // Gradient calculation technique: interpolated points difference
02736     // 
02737     //       5-------i7--------6        
02738     //      /|                /|        
02739     //     i1------PY0-------i2|        
02740     //    /| |      |       /| |        
02741     //   1--------i8-------2 | |        
02742     //   | | |      |      | | |        
02743     //   |PX0-------G------|PX1|        
02744     //   | | |      |      | | |        
02745     //   | | 8------|i3----|-|-7    -> PZ1      
02746     //   | |/       |/     | |/     
02747     //   | i5      PY1     | i6     
02748     //   |/        /       |/       
02749     //   4--------i4-------3    ->PZ0   
02750     //                  
02751 
02752     VPPoint3D scale = ((VPVolume *)volume)->vpGetScale();
02753 
02754     // Interpolation values 
02755     i5  = ( (C8-C4) * dz + C4); 
02756     i6  = ( (C7-C3) * dz + C3); 
02757     i7  = ( (C6-C5) * dx + C5); 
02758     i8  = ( (C2-C1) * dx + C1); 
02759     PX0 = ( (i5-i1) * dy + i1); 
02760     PX1 = ( (i6-i2) * dy + i2); 
02761     PZ0 = ( (i4-i8) * dy + i8); 
02762     PZ1 = ( (i3-i7) * dy + i7); 
02763     g.x = (PX0-PX1) * scale.x; 
02764     g.y = (PY0-PY1) * scale.y; 
02765     g.z = (PZ0-PZ1) * scale.z; // gradient approximation
02766     
02767     g.vpNormalize();
02768 
02769  }
02770 
02771 }
02772 
02773 
02775 // Description: Method "vpOtherSampleColorAndShading" process the voxel 
02776 //              color (gray) using trilinear interpolation, the
02777 //              the gradient approximation and the voxel light 
02778 //              intensity. The gradient calculation technique used
02779 //              is "centra difference"
02780 // Parameters.: int i, int j, int k, VPGraphicObj *volume,
02781 //              VPPoint3D p, float previousIlight, VPLight *l 
02782 //              (i,j,k=volume matrix index; 
02783 //              depth=ray position; volume=object volume that has all 
02784 //              information about the volume; p="in plane" intersection
02785 //              point; previousIlight=previous voxel light intensity;
02786 //              l=pointer to the point light) 
02787 //              float &color (voxel color) and float &Ilight (voxel 
02788 //              light intensity)
02789 // Return.....: -
02790 
02791 void VPRayCasting::vpOtherSampleColorAndShading(int i, int j, int k,  
02792                                             VPGraphicObj *volume, 
02793                                             VPPoint3D p, float previousIlight,
02794                                             VPLight *l, float &color, float &Ilight) {
02795 
02796  int C1, C2, C3, C4, C5, C6, C7, C8, ii, jj, kk, Caux;
02797  float i1, i2, i3, i4, PY0, PY1,        // interpolated values 
02798        dx, dy, dz;                      // distance (delta)
02799  VPVector3D gradient;                   // gradient vector approximation
02800 
02801  // First, voxels color initialization
02802  ii = ((i+1) < ((VPImage *)volume)->vpGetXDimension()) ? i+1 : i;
02803  jj = ((j+1) < ((VPImage *)volume)->vpGetYDimension()) ? j+1 : j;
02804  kk = ((k+1) < ((VPVolume *)volume)->vpGetZDimension()) ? k+1 : k;
02805  C1 = ((VPVolume *)volume)->vpGetValue(i,j,k);
02806  C2 = ((VPVolume *)volume)->vpGetValue(ii,j,k);
02807  C3 = ((VPVolume *)volume)->vpGetValue(ii,jj,k);
02808  C4 = ((VPVolume *)volume)->vpGetValue(i,jj,k);
02809  C5 = ((VPVolume *)volume)->vpGetValue(i,j,kk);
02810  C6 = ((VPVolume *)volume)->vpGetValue(ii,j,kk);
02811  C7 = ((VPVolume *)volume)->vpGetValue(ii,jj,kk);
02812  C8 = ((VPVolume *)volume)->vpGetValue(i,jj,kk);
02813  
02814  // If all voxels have the same color, it is not necessary to interpolate  
02815  if ((C1==C2) && (C2==C3) && (C3==C4) && (C4==C5) && (C5==C6) && (C6==C7) && (C7==C8)) {
02816     color = (float)(C1);
02817     Ilight = previousIlight;
02818  }
02819  else {
02820     // Trilinear interpolation (seven linear interpolations)
02821     //                  
02822     //       5-----------------6        
02823     //      /|                /|        
02824     //     i1------PY0-------i2|        
02825     //    /  |      |       /  |        
02826     //   1-----------------2   |        
02827     //   |   |      |      |   |        
02828     //   |   |    color    |   |        
02829     //   |   |      |      |   |        
02830     //   |   8------|i3----|---7        
02831     //   |  /       |/     |  /     
02832     //   | /       PY1     | /      
02833     //   |/        /       |/       
02834     //   4--------i4-------3        
02835     //                  
02836 
02837     // Distance from the voxel 1
02838     dx = p.x-i;
02839     dy = p.y-j;
02840     dz = p.z-k;
02841 
02842     // Interpolation values
02843     i1  = ( (C5-C1) * dz + C1);
02844     i2  = ( (C6-C2) * dz + C2);
02845     i3  = ( (C7-C8) * dx + C8);
02846     i4  = ( (C3-C4) * dx + C4);
02847     PY0 = ( (i2-i1) * dx + i1);
02848     PY1 = ( (i3-i4) * dz + i4);
02849     color = ( (PY1-PY0) * dy + PY0 ); // final voxel color
02850 
02851 
02852     // More processing for gradient approximation
02853     // Technique: Central Difference Gradient Estimator
02854     // 
02855     //        5-----------------6       
02856     //       /|                /|       
02857     //      / |               / |       
02858     //     /  |              /  |       
02859     // C(x,y,z)-------------2   |       
02860     //    |   |             |   |       
02861     //    |   |             |   |       
02862     //    |   |             |   |       
02863     //    |   8-------------|---C(x+1,y+1,z+1)          
02864     //    |  /              |  /        
02865     //    | /               | /     
02866     //    |/                |/      
02867     //    4-----------------3       
02868     //                  
02869     // Gx = C(x+1,y,z) - C(x-1,y,z)
02870     // Gy = C(x,y+1,z) - C(x,y-1,z)
02871     // Gz = C(x,y,z+1) - C(x,y,z-1)
02872 
02873     if ( (i-1) >= 0  ) {
02874         Caux = ((VPVolume *)volume)->vpGetValue(i-1,j,k);
02875         gradient.x = C2-Caux; // gradient approximation
02876     }
02877     else {
02878         gradient.x = C2-C1; // gradient approximation
02879     }
02880 
02881     if ( (j-1) >= 0  ) {
02882         Caux = ((VPVolume *)volume)->vpGetValue(i,j-1,k);
02883         gradient.y = C4-Caux; // gradient approximation
02884     }
02885     else {
02886         gradient.y = C4-C1; // gradient approximation
02887     }
02888     
02889     if ( (k-1) >= 0  ) {
02890         Caux = ((VPVolume *)volume)->vpGetValue(i,j,k-1);
02891         gradient.z = C5-Caux; // gradient approximation
02892     }
02893     else {
02894         gradient.z = C5-C1; // gradient approximation
02895     }
02896 
02897     gradient.vpNormalize();
02898     Ilight = vpProcessILight(gradient, l);
02899  
02900  }
02901 
02902 }
02903 
02904 
02906 // Description: Method "vpGradientComputation" process the gradient
02907 //              approximation. The gradient calculation technique 
02908 //              used is "interpolated points difference"
02909 // Parameters.: (i,j,k=volume matrix index; 
02910 //              volume=object volume that has all information about
02911 //              the volume; p="in plane" intersection point;
02912 //              VPVector3D &g (gradient vector approximation)
02913 //              and bool interpolate (if all voxels don't have the  
02914 //              same value, interpolation occurs and i=TRUE,
02915 //              else i=FALSE)
02916 // Return.....: -
02917 
02918 void VPRayCasting::vpGradientComputation(int i, int j, int k,  
02919                                         VPGraphicObj *volume, VPPoint3D p, 
02920                                         VPVector3D &g, bool &interpolate) {
02921 
02922  int C1, C2, C3, C4, C5, C6, C7, C8, ii, jj, kk;
02923  float i1, i2, i3, i4, i5, i6, i7, i8,  // interpolated values 
02924        PX0, PX1, PY0, PY1, PZ0, PZ1,    // interpolated values 
02925        dx, dy, dz;                      // distance (delta)
02926  
02927  // First, voxels color initialization
02928  ii = ((i+1) < ((VPImage *)volume)->vpGetXDimension()) ? i+1 : i;
02929  jj = ((j+1) < ((VPImage *)volume)->vpGetYDimension()) ? j+1 : j;
02930  kk = ((k+1) < ((VPVolume *)volume)->vpGetZDimension()) ? k+1 : k;
02931  C1 = ((VPVolume *)volume)->vpGetValue(i,j,k);
02932  C2 = ((VPVolume *)volume)->vpGetValue(ii,j,k);
02933  C3 = ((VPVolume *)volume)->vpGetValue(ii,jj,k);
02934  C4 = ((VPVolume *)volume)->vpGetValue(i,jj,k);
02935  C5 = ((VPVolume *)volume)->vpGetValue(i,j,kk);
02936  C6 = ((VPVolume *)volume)->vpGetValue(ii,j,kk);
02937  C7 = ((VPVolume *)volume)->vpGetValue(ii,jj,kk);
02938  C8 = ((VPVolume *)volume)->vpGetValue(i,jj,kk);
02939 
02940  // If all voxels have the same color, it is not necessary to interpolate  
02941  if ((C1==C2) && (C2==C3) && (C3==C4) && (C4==C5) && (C5==C6) && (C6==C7) && (C7==C8)) {
02942     interpolate = false;
02943  }
02944  else {
02945     // More interpolations for gradient approximation
02946     // Gradient calculation technique: interpolated points difference
02947     // 
02948     //       5-------i7--------6        
02949     //      /|                /|        
02950     //     i1------PY0-------i2|        
02951     //    /| |      |       /| |        
02952     //   1--------i8-------2 | |        
02953     //   | | |      |      | | |        
02954     //   |PX0-------G------|PX1|        
02955     //   | | |      |      | | |        
02956     //   | | 8------|i3----|-|-7    -> PZ1      
02957     //   | |/       |/     | |/     
02958     //   | i5      PY1     | i6     
02959     //   |/        /       |/       
02960     //   4--------i4-------3    ->PZ0   
02961     //                  
02962 
02963     // Distance from the voxel 1
02964     dx = p.x-i;
02965     dy = p.y-j;
02966     dz = p.z-k;
02967     VPPoint3D scale = ((VPVolume *)volume)->vpGetScale();
02968 
02969     // Interpolation values
02970     i1  = ( (C5-C1) * dz + C1);
02971     i2  = ( (C6-C2) * dz + C2);
02972     i3  = ( (C7-C8) * dx + C8);
02973     i4  = ( (C3-C4) * dx + C4);
02974     i5  = ( (C8-C4) * dz + C4); 
02975     i6  = ( (C7-C3) * dz + C3); 
02976     i7  = ( (C6-C5) * dx + C5); 
02977     i8  = ( (C2-C1) * dx + C1); 
02978     PY0 = ( (i2-i1) * dx + i1);
02979     PY1 = ( (i3-i4) * dz + i4);
02980     PX0 = ( (i5-i1) * dy + i1); 
02981     PX1 = ( (i6-i2) * dy + i2); 
02982     PZ0 = ( (i4-i8) * dy + i8); 
02983     PZ1 = ( (i3-i7) * dy + i7); 
02984     g.x = (PX0-PX1) * scale.x; 
02985     g.y = (PY0-PY1) * scale.y; 
02986     g.z = (PZ0-PZ1) * scale.z; // gradient approximation
02987     
02988     g.vpNormalize();
02989     interpolate = true;
02990  }
02991 
02992 }
02993 
02994 
02996 // Description: Method "vpProcessILight" process the light 
02997 //              intensity.
02998 // Parameters.: VPVector3D g (gradient approximation)
02999 //              VPLight *l (pointer to the point light)
03000 // Return.....: float Ilight (voxel light intensity)
03001 
03002 float VPRayCasting::vpProcessILight(VPVector3D g, VPLight *light) {
03003     float Ilight=0, M=0, dot=0;
03004     VPVector3D l(1,0,0);
03005 
03006     // De acordo com a fórmula do livro...
03007     l = ((VPDirectionalLight *)light)->vpGetDirection();
03008     dot = g.vpDotProduct(l); 
03009           
03010     if (dot < 0.0)
03011         dot = -dot;
03012         //dot = (float) 0.0;    // no negative contributions
03013 
03014     Ilight = ambientLight + (diffuseLight * dot); 
03015 
03016     if (Ilight > 1) 
03017         Ilight = 1;
03018 
03019     return Ilight;
03020 }
03021 
03022 
03024 // Description: Method "vpProcessSpecularILight" process the light 
03025 //              intensity.
03026 // Parameters.: VPVector3D g (gradient approximation)
03027 //              VPPoint3D o (observer and light position)
03028 // Return.....: float Ilight (voxel light intensity)
03029 
03030 float VPRayCasting::vpProcessSpecularILight(VPVector3D g, VPLight *light, VPVector3D o) {
03031     float Ilight=0, dot1=0, dot2=0, dot3=0;
03032 
03033     // De acordo com a fórmula do livro...
03034     VPVector3D l = ((VPDirectionalLight *)light)->vpGetDirection();
03035     dot1 = g.vpDotProduct(l); // g . l
03036     dot2 = g.vpDotProduct(o); // g . o
03037     dot3 = o.vpDotProduct(l); // o . l
03038          
03039     dot1 = fabs(dot1);
03040     dot2 = fabs(dot2);
03041     dot3 = fabs(dot3);
03042 
03043     Ilight = ambientLight + (diffuseLight * dot1) + (float) ( pow( ((2 * dot1 * dot2) - dot3), 50 ) ); 
03044 
03045     if (Ilight > 1) 
03046         Ilight = 1;
03047 
03048     return Ilight;
03049 }
03050 
03051 
03053 // Description: Method "vpSetEndSBand" sends a new value for the 
03054 //              endOfS-Band attribute (used for inner structures 
03055 //              visualization).
03056 // Parameters.: VPPoint3D sb (view proportional variation for x and z)
03057 //              VPGraphicObj *volume,
03058 //              int vt (visualization type)
03059 // Return.....: -
03060 
03061 void VPRayCasting::vpSetEndSBand(VPPoint3D sb, VPGraphicObj *volume, int vt) {
03062 
03063     VPVector3D scanDirection;
03064     VPPoint3D volumeCameraLocation = ((VPVolume *)volume)->vpGetCameraLocationForInnerStructure();
03065 
03066     // "Assuming" that the target is always the central focal point
03067     VPPoint3D target = ((VPVolume *)volume)->vpGetCenterFocalPoint();
03068     target.y = (float) virtualYDimension/2; 
03069 
03070      if (vt == INNERSTRTOPSLICE) {
03071             // To verify scan direction
03072             scanDirection.vpSetVector3D(target - volumeCameraLocation);
03073             scanDirection.vpNormalize();
03074             
03075             if (scanDirection.x==0 && scanDirection.z<0)
03076             {
03077                 if ( (sb.x==0 && sb.z<0) || (sb.x>0 && sb.z<0) || (sb.x<0 && sb.z<0) )
03078                     endOfSBand += abs(sb.z);
03079                 else if ( (sb.x==0 && sb.z>0) || (sb.x<0 && sb.z>0) || (sb.x>0 && sb.z>0)  )
03080                     endOfSBand -= abs(sb.z);
03081             }
03082             else if (scanDirection.x==0 && scanDirection.z>0)
03083             {
03084                 if ( (sb.x==0 && sb.z<0) || (sb.x>0 && sb.z<0) || (sb.x<0 && sb.z<0) )
03085                     endOfSBand -= abs(sb.z);
03086                 else if ( (sb.x==0 && sb.z>0) || (sb.x<0 && sb.z>0) || (sb.x>0 && sb.z>0)  )
03087                     endOfSBand += abs(sb.z);
03088             }
03089             else if (scanDirection.x<0 && scanDirection.z==0)
03090             {
03091                 if ( (sb.x<0 && sb.z==0) || (sb.x<0 && sb.z>0) || (sb.x<0 && sb.z<0) )
03092                     endOfSBand -= abs(sb.x);
03093                 else if ( (sb.x>0 && sb.z==0) || (sb.x>0 && sb.z>0) || (sb.x>0 && sb.z<0)  )
03094                     endOfSBand += abs(sb.x);
03095             }
03096             else if (scanDirection.x>0 && scanDirection.z==0)
03097             {
03098                 if ( (sb.x<0 && sb.z==0) || (sb.x<0 && sb.z>0) || (sb.x<0 && sb.z<0) )
03099                     endOfSBand += abs(sb.x);
03100                 else if ( (sb.x>0 && sb.z==0) || (sb.x>0 && sb.z>0) || (sb.x>0 && sb.z<0)  )
03101                     endOfSBand -= abs(sb.x);
03102             }
03103             else if (scanDirection.x<0 && scanDirection.z<0)
03104             {
03105                 if ( (sb.x==0 && sb.z<0) || (sb.x>0 && sb.z<0) || (sb.x>0 && sb.z==0) || (sb.x<0 && sb.z<0) )
03106                     endOfSBand += (abs(sb.z)+abs(sb.x)) / 2.0;
03107                 else if ( (sb.x==0 && sb.z>0) || (sb.x<0 && sb.z>0) || (sb.x<0 && sb.z==0) || (sb.x>0 && sb.z>0)  )
03108                     endOfSBand -= (abs(sb.z)+abs(sb.x)) / 2.0;
03109             }
03110             else if (scanDirection.x>0 && scanDirection.z>0)
03111             {
03112                 if ( (sb.x==0 && sb.z<0) || (sb.x>0 && sb.z<0) || (sb.x>0 && sb.z==0) || (sb.x<0 && sb.z<0) )
03113                     endOfSBand -= (abs(sb.z)+abs(sb.x)) / 2.0;
03114                 else if ( (sb.x==0 && sb.z>0) || (sb.x<0 && sb.z>0) || (sb.x<0 && sb.z==0) || (sb.x>0 && sb.z>0)  )
03115                     endOfSBand += (abs(sb.z)+abs(sb.x)) / 2.0;
03116             }
03117             else if (scanDirection.x<0 && scanDirection.z>0)
03118             {
03119                 if ( (sb.x==0 && sb.z<0) || (sb.x>0 && sb.z<0) || (sb.x<0 && sb.z==0) || (sb.x<0 && sb.z<0) )
03120                     endOfSBand -= (abs(sb.z)+abs(sb.x)) / 2.0;
03121                 else if ( (sb.x==0 && sb.z>0) || (sb.x<0 && sb.z>0) || (sb.x>0 && sb.z==0) || (sb.x>0 && sb.z>0)  )
03122                     endOfSBand += (abs(sb.z)+abs(sb.x)) / 2.0;
03123             }
03124             else if (scanDirection.x>0 && scanDirection.z<0)
03125             {
03126                 if ( (sb.x==0 && sb.z<0) || (sb.x>0 && sb.z<0) || (sb.x<0 && sb.z==0) || (sb.x<0 && sb.z<0) )
03127                     endOfSBand += (abs(sb.z)+abs(sb.x)) / 2.0;
03128                 else if ( (sb.x==0 && sb.z>0) || (sb.x<0 && sb.z>0) || (sb.x>0 && sb.z==0) || (sb.x>0 && sb.z>0)  )
03129                     endOfSBand -= (abs(sb.z)+abs(sb.x)) / 2.0;
03130             }
03131                         
03132             // Obs: The point is always in the middle from bottom to top
03133      }
03134 }
03135 
03136 
03138 // Description: Method "vpGetEndSBand" get the value of the 
03139 //              endOfSBand attribute (used for inner structures 
03140 //              visualization).
03141 // Parameters.: -
03142 // Return.....: int endOfSBand 
03143 
03144 int VPRayCasting::vpGetEndSBand() { 
03145     return endOfSBand;
03146 }
03147 
03148 
03150 // Description: Method "vpSetEndTBand" sends a new value for the 
03151 //              endOfTBand attribute (used for inner structures 
03152 //              visualization).
03153 // Parameters.: VPPoint3D tb (view proportional variation for x and z)
03154 //              VPGraphicObj *volume,
03155 //              int vt (visualization type)
03156 // Return.....: -
03157 
03158 void VPRayCasting::vpSetEndTBand(VPPoint3D tb, VPGraphicObj *volume, int vt) { 
03159 
03160  VPVector3D scanDirection;
03161  VPPoint3D volumeCameraLocation = ((VPVolume *)volume)->vpGetCameraLocationForInnerStructure();
03162 
03163  // "Assuming" that the target is always the central focal point
03164  VPPoint3D target = ((VPVolume *)volume)->vpGetCenterFocalPoint();
03165  target.y = (float) virtualYDimension/2;    
03166 
03167  if (vt == INNERSTRTOPSLICE) {
03168         // To verify scan direction
03169         scanDirection.vpSetVector3D(target - volumeCameraLocation);
03170         scanDirection.vpNormalize();
03171 
03172         if (scanDirection.x==0 && scanDirection.z<0)
03173         {
03174             if ( (tb.x==0 && tb.z<0) || (tb.x>0 && tb.z<0) || (tb.x<0 && tb.z<0) )
03175                 endOfTBand += abs(tb.z);
03176             else if ( (tb.x==0 && tb.z>0) || (tb.x<0 && tb.z>0) || (tb.x>0 && tb.z>0)  )
03177                 endOfTBand -= abs(tb.z);
03178         }
03179         else if (scanDirection.x==0 && scanDirection.z>0)
03180         {
03181             if ( (tb.x==0 && tb.z<0) || (tb.x>0 && tb.z<0) || (tb.x<0 && tb.z<0) )
03182                 endOfTBand -= abs(tb.z);
03183             else if ( (tb.x==0 && tb.z>0) || (tb.x<0 && tb.z>0) || (tb.x>0 && tb.z>0)  )
03184                 endOfTBand += abs(tb.z);
03185         }
03186         else if (scanDirection.x<0 && scanDirection.z==0)
03187         {
03188             if ( (tb.x<0 && tb.z==0) || (tb.x<0 && tb.z>0) || (tb.x<0 && tb.z<0) )
03189                 endOfTBand -= abs(tb.x);
03190             else if ( (tb.x>0 && tb.z==0) || (tb.x>0 && tb.z>0) || (tb.x>0 && tb.z<0)  )
03191                 endOfTBand += abs(tb.x);
03192         }
03193         else if (scanDirection.x>0 && scanDirection.z==0)
03194         {
03195             if ( (tb.x<0 && tb.z==0) || (tb.x<0 && tb.z>0) || (tb.x<0 && tb.z<0) )
03196                 endOfTBand += abs(tb.x);
03197             else if ( (tb.x>0 && tb.z==0) || (tb.x>0 && tb.z>0) || (tb.x>0 && tb.z<0)  )
03198                 endOfTBand -= abs(tb.x);
03199         }
03200         else if (scanDirection.x<0 && scanDirection.z<0)
03201         {
03202             if ( (tb.x==0 && tb.z<0) || (tb.x>0 && tb.z<0) || (tb.x>0 && tb.z==0) || (tb.x<0 && tb.z<0) )
03203                 endOfTBand += (abs(tb.z)+abs(tb.x)) / 2.0;
03204             else if ( (tb.x==0 && tb.z>0) || (tb.x<0 && tb.z>0) || (tb.x<0 && tb.z==0) || (tb.x>0 && tb.z>0)  )
03205                 endOfTBand -= (abs(tb.z)+abs(tb.x)) / 2.0;
03206         }
03207         else if (scanDirection.x>0 && scanDirection.z>0)
03208         {
03209             if ( (tb.x==0 && tb.z<0) || (tb.x>0 && tb.z<0) || (tb.x>0 && tb.z==0) || (tb.x<0 && tb.z<0) )
03210                 endOfTBand -= (abs(tb.z)+abs(tb.x)) / 2.0;
03211             else if ( (tb.x==0 && tb.z>0) || (tb.x<0 && tb.z>0) || (tb.x<0 && tb.z==0) || (tb.x>0 && tb.z>0)  )
03212                 endOfTBand += (abs(tb.z)+abs(tb.x)) / 2.0;
03213         }
03214         else if (scanDirection.x<0 && scanDirection.z>0)
03215         {
03216             if ( (tb.x==0 && tb.z<0) || (tb.x>0 && tb.z<0) || (tb.x<0 && tb.z==0) || (tb.x<0 && tb.z<0) )
03217                 endOfTBand -= (abs(tb.z)+abs(tb.x)) / 2.0;
03218             else if ( (tb.x==0 && tb.z>0) || (tb.x<0 && tb.z>0) || (tb.x>0 && tb.z==0) || (tb.x>0 && tb.z>0)  )
03219                 endOfTBand += (abs(tb.z)+abs(tb.x)) / 2.0;
03220         }
03221         else if (scanDirection.x>0 && scanDirection.z<0)
03222         {
03223             if ( (tb.x==0 && tb.z<0) || (tb.x>0 && tb.z<0) || (tb.x<0 && tb.z==0) || (tb.x<0 && tb.z<0) )
03224                 endOfTBand += (abs(tb.z)+abs(tb.x)) / 2.0;
03225             else if ( (tb.x==0 && tb.z>0) || (tb.x<0 && tb.z>0) || (tb.x>0 && tb.z==0) || (tb.x>0 && tb.z>0)  )
03226                 endOfTBand -= (abs(tb.z)+abs(tb.x)) / 2.0;
03227         }
03228         
03229         // Obs: The point is always in the middle from bottom to top
03230  }
03231 
03232 }
03233 
03234 
03236 // Description: Method "vpGetEndTBand" get the value of the 
03237 //              endOfTBand attribute (used for inner structures 
03238 //              visualization).
03239 // Parameters.: -
03240 // Return.....: int endOfTBand
03241 
03242 int VPRayCasting::vpGetEndTBand() {
03243     return endOfTBand;
03244 }
03245 
03246 
03248 // Description: Method "vpSetRayCastingSampleStep" sends a new value 
03249 //              to the sampleStep attribute.
03250 // Parameters.: float s (new value)
03251 // Return.....: -
03252 
03253 void VPRayCasting::vpSetSampleStep(float s) {
03254     sampleStep = s;
03255 }
03256 
03257 
03259 // Description: Method "vpGetRayCastingSampleStep" returns the  
03260 //              sampleStep attribute value.
03261 // Parameters.: -
03262 // Return.....: sampleStep attribute value
03263 
03264 float VPRayCasting::vpGetSampleStep() {
03265     return sampleStep;
03266 }
03267 
03268 
03270 // Description: Method "vpSetAmbientLight" sends a new value 
03271 //              to the ambientLight attribute.
03272 // Parameters.: float a (new value)
03273 // Return.....: -
03274 
03275 void VPRayCasting::vpSetAmbientLight(float a) {
03276     ambientLight = a;
03277     lightVolumeComputation = true;
03278 }
03279 
03280 
03282 // Description: Method "vpGetAmbientLight" returns the ambientLight
03283 //              attribute value.
03284 // Parameters.: -
03285 // Return.....: ambientLight attribute value
03286 
03287 float VPRayCasting::vpGetAmbientLight() {
03288     return ambientLight;
03289 }
03290 
03291 
03293 // Description: Method "vpSetDiffuseLight" sends a new value 
03294 //              to the diffuseLight attribute.
03295 // Parameters.: float d (new value)
03296 // Return.....: -
03297 
03298 void VPRayCasting::vpSetDiffuseLight(float d) {
03299     diffuseLight = d;
03300 }
03301 
03302 
03304 // Description: Method "vpGetDiffuseLight" returns the diffuseLight
03305 //              attribute value.
03306 // Parameters.: -
03307 // Return.....: diffuseLight attribute value
03308 
03309 float VPRayCasting::vpGetDiffuseLight() {
03310     return diffuseLight;
03311 }
03312 
03313 
03315 // Description: Method "vpSetSpecularExponent" sends a new value 
03316 //              to the specularExponent attribute.
03317 // Parameters.: int s (new value)
03318 // Return.....: -
03319 
03320 void  VPRayCasting::vpSetSpecularExponent(int s) {
03321     specularExponent = s;
03322 }
03323 
03324 
03326 // Description: Method "vpGetSpecularExponent" returns the 
03327 //              specularExponent attribute value.
03328 // Parameters.: -
03329 // Return.....: specularExponent attribute value
03330 
03331 int   VPRayCasting::vpGetSpecularExponent() {
03332     return specularExponent;
03333 }
03334 
03335 
03337 // Description: Method "vpSetSpecular" sends a new value 
03338 //              to the specular attribute.
03339 // Parameters.: bool s (new value)
03340 // Return.....: -
03341 
03342 void  VPRayCasting::vpSetSpecular(bool s) {
03343     specular = s;
03344 }
03345 
03346 
03348 // Description: Method "vpGetSpecular" returns the 
03349 //              specular attribute value.
03350 // Parameters.: -
03351 // Return.....: specular attribute value
03352 
03353 bool  VPRayCasting::vpGetSpecular() {
03354     return specular;
03355 }
03356 
03357 
03359 // Description: Method "vpSetShadingMethod" sends a new value 
03360 //              to the shadingMethod attribute.
03361 // Parameters.: float sm (new value)
03362 // Return.....: -
03363 
03364 void VPRayCasting::vpSetShadingMethod(int sm) {
03365     shadingMethod = sm;
03366 }
03367 
03368 
03370 // Description: Method "vpGetShadingMethod" returns the 
03371 //              shadingMethod attribute value.
03372 // Parameters.: -
03373 // Return.....: shadingMethod attribute value
03374 
03375 int VPRayCasting::vpGetShadingMethod() {
03376     return shadingMethod;
03377 }
03378 
03379 
03381 // Description: Method "vpSetLightDirection" sends a new value 
03382 //              to the light direction. For actualization, the
03383 //              light attribute is used.
03384 // Parameters.: VPVector3D ld (new value)
03385 // Return.....: -
03386 
03387 void VPRayCasting::vpSetLightDirection(VPVector3D ld) {
03388     lightVolumeComputation = true;
03389     ((VPDirectionalLight *)light)->vpSetDirection(ld);
03390 }
03391 
03392 
03394 // Description: Method "vpGetLightDirection" returns the light
03395 //              direction in accordance with the light attribute 
03396 //              value.
03397 // Parameters.: -
03398 // Return.....: VPVector3D ld (light direction)
03399 
03400 VPVector3D VPRayCasting::vpGetLightDirection() {
03401     return ((VPDirectionalLight *)light)->vpGetDirection();
03402 }
03403 
03404 
03406 // Description: Method "vpSetTypeOfCuttingTool" sends a new value 
03407 //              to the typeOfCuttingTool attribute.
03408 // Parameters.: int t (new value)
03409 // Return.....: -
03410 
03411 void VPRayCasting::vpSetTypeOfCuttingTool(int t) {
03412     typeOfCuttingTool = t;
03413 }
03414 
03415 
03417 // Description: Method "vpGetTypeOfCuttingTool" returns the 
03418 //              typeOfCuttingTool attribute value.
03419 // Parameters.: -
03420 // Return.....: int typeOfCuttingTool
03421 
03422 int VPRayCasting::vpGetTypeOfCuttingTool() {
03423     return typeOfCuttingTool;
03424 }
03425 
03426 
03428 // Description: Method "vpDefineA" proccess the "a" variable when
03429 //              some cutting tool are being used.
03430 // Parameters.: -
03431 // Return.....: -
03432 
03433 void VPRayCasting::vpDefineA(VPPoint3D &pIn, VPPoint3D pOut, VPPoint3D p1, VPVector3D deltaZ, float yCorrection, VPVector3D plane1Normal, VPGraphicObj *volume, VPPoint3D volumeDimension, float d) {
03434  int i;
03435  float denom, f, g, h;
03436  VPPoint3D pInNew, pInAux, temp;
03437 
03438  switch(typeOfCuttingTool)
03439  {
03440     case OBLIQUESLICE:  // Cutting technique: just one slice (orthogonal or oblique, according to camera position)
03441                         pIn = p1;
03442                         pIn.y = pIn.y*yCorrection;
03443                         for(i=0; i<(sampleStep*distanceObliquePlane); i++) //sampleStep*80 ou *53
03444                             pIn = pIn + deltaZ; // projectionDirection; 
03445                         pIn = pIn + deltaZ;
03446                         break;
03447     case TWOCUTPLANES:  
03448     case ONECUTPLANE:   // Cutting technique: one cut plane (from the cut plane INTERSECTION 
03449                         //                  to the end of the volume (back) ) - plane1Normal, d, ray=pIn-pOut 
03450                         f = pIn.x - pOut.x;
03451                         g = pIn.y - pOut.y;
03452                         h = pIn.z - pOut.z;
03453                         denom = plane1Normal.x*f + plane1Normal.y*g + plane1Normal.z*h;
03454                         denom = - ( (plane1Normal.x*pOut.x + plane1Normal.y*pOut.y + plane1Normal.z*pOut.z + d) / denom );
03455                         pInNew.x = pOut.x + f*denom;
03456                         pInNew.y = pOut.y + g*denom;
03457                         pInNew.z = pOut.z + h*denom; // pInNew = intersection point
03458                         pInAux = pInNew + deltaZ;
03459                         f = plane1Normal.x*pInAux.x + plane1Normal.y*pInAux.y + plane1Normal.z*pInAux.z + d;
03460                         g = plane1Normal.x*pIn.x    + plane1Normal.y*pIn.y    + plane1Normal.z*pIn.z    + d;
03461                         if ( f < 0 && g > 0 )
03462                             pIn = pInNew;
03463                         break;
03464     case TWOCUTPLANESINCLUSIONOPAC: // Cutting technique: just if the samples are IN the cut planes 
03465                                     //                      (plane1Normal, d, ray=pIn-pOut)
03466                         f = pIn.x - pOut.x;
03467                         g = pIn.y - pOut.y;
03468                         h = pIn.z - pOut.z;
03469                         denom = plane1Normal.x*f + plane1Normal.y*g + plane1Normal.z*h;
03470                         denom = - ( (plane1Normal.x*pOut.x + plane1Normal.y*pOut.y + plane1Normal.z*pOut.z + d) / denom );
03471                         pInNew.x = pOut.x + f*denom;
03472                         pInNew.y = pOut.y + g*denom;
03473                         pInNew.z = pOut.z + h*denom; // pInNew = intersection point
03474                         pInAux = pInNew + deltaZ;
03475                         f = plane1Normal.x*pInAux.x + plane1Normal.y*pInAux.y + plane1Normal.z*pInAux.z + d;
03476                         g = plane1Normal.x*pIn.x    + plane1Normal.y*pIn.y    + plane1Normal.z*pIn.z    + d;
03477                         if ( f < 0 && g > 0 )
03478                             pIn = pInNew;
03479                         break;
03480  }
03481 
03482 }
03483 
03484 
03486 // Description: Method "vpDefineB" proccess the "b" variable when
03487 //              some cutting tool are being used.
03488 // Parameters.: -
03489 // Return.....: -
03490 
03491 void VPRayCasting::vpDefineB(int i, int j, int k, float &alpha, VPGraphicObj *volume, VPPoint3D &pIn, int depth, int &ivalue) {
03492 
03493  switch(typeOfCuttingTool)
03494  {
03495     case TWOCUTPLANES:  // BAKA para simular um plano de corte traseiro 
03496                         if (j > backCuttingPlane)
03497                             alpha = 0.0;
03498                         break;
03499     case CUBEBYINCLUSION: // Cutting technique: a parallelepiped is used as selection tool 
03500                           // Do not consider everything that is outside the parallelepiped
03501                         if ( ( (i>p2Parallelepiped.x) || (i<p1Parallelepiped.x) ) ||
03502                              ( (j>p2Parallelepiped.y) || (j<p1Parallelepiped.y) ) || 
03503                              ( (k>p2Parallelepiped.z) || (k<p1Parallelepiped.z) ) )
03504                             alpha = 0.0;
03505                         break;
03506     case CUBEBYEXCLUSION: // Cutting technique: with a parallelepiped as a cut volume
03507                         if ( ( (i<p2Parallelepiped.x) && (i>p1Parallelepiped.x) ) &&
03508                              ( (j<p2Parallelepiped.y) && (j>p1Parallelepiped.y) ) && 
03509                              ( (k<p2Parallelepiped.z) && (k>p1Parallelepiped.z) ) )
03510                             alpha = 0.0;
03511                             //if (alpha != 0) alpha = 0.05;
03512                         break;
03513     case SPHEREBYINCLUSION:
03514                         break;
03515     case SPHEREBYEXCLUSION:
03516                         break;
03517 
03518  }
03519 
03520 }
03521 
03522 
03523 /*+-------------------------------------------------------------------*/
03524 /*+--------------+
03525   |   Classify   |
03526   +--------------+*/
03527 
03528 /* This routine performs the actual classification */
03529 
03530 void    VPRayCasting::Classify
03531 (
03532     float   *intensity,
03533     float   *gm,
03534     float   *luminance,
03535     float   *alpha
03536 )
03537 
03538 {
03539     int     iGm;
03540     int     composite;
03541 
03542     /* 442 is maximum gm value. SQRT(255^2 + 255^2 + 255^2), since we only
03543        work on 8 bit data. The gradient magnitude is converted into six
03544        bits.
03545     */
03546 
03547     iGm = (int) (*gm / 442.0f * 64.0f); 
03548     composite = (iGm<<8) | (unsigned char) *intensity;
03549 
03550     *alpha = (float) alphaTable[composite] / 255.0f;
03551     *luminance = (float) luminanceTable[composite];
03552 }
03553 
03554 
03555 /*+-------------------------------------------------------------------*/
03556 /*+-------------------+
03557   |   ClassifyColor   |
03558   +-------------------+*/
03559 
03560 void    VPRayCasting::ClassifyColor
03561 (
03562     float   *intensity,
03563     float   *gm,
03564     float   *r,
03565     float   *g,
03566     float   *b,
03567     float   *alpha
03568 )
03569 
03570 {
03571     int     iGm;
03572     int     composite;
03573 
03574     /* 442 is maximum gm value. SQRT(255^2 + 255^2 + 255^2), since we only
03575        work on 8 bit data. The gradient magnitude is converted into six
03576        bits.
03577     */
03578 
03579     iGm = (int) (*gm / 442.0f * 64.0f); 
03580     composite = (iGm<<8) | (unsigned char) *intensity;
03581 
03582     *alpha = (float) alphaTable[composite] / 255.0f;
03583     *r = (float) RGBtable[3*composite];
03584     *g = (float) RGBtable[3*composite+1];
03585     *b = (float) RGBtable[3*composite+2];
03586 
03587 }
03588 
03589 
03590 /*+-------------------------------------------------------------------*/
03591 /*+----------------------------+
03592   |   SetClassificationTable   |
03593   +----------------------------+*/
03594 
03595 void    VPRayCasting::SetClassificationTable (void)
03596 
03597 /* Precompute the opacity lookup table and the color lookup table */
03598 
03599 {
03600         float           alpha;
03601         float           denominator;
03602         int             index, magnitude, intensity, ind;
03603 
03604 
03605 /* The luminance table is only used when the final image should be
03606    a grayscale. */
03607 
03608 /* The format that is assumed here is 2 bits for index information,
03609    6 bits for gradient magnitude and 8 bits for intensity values,
03610    in that order */
03611   
03612     /* The levThreshold and levWidth variable determine the position
03613        of the classification function. Experiment with different 
03614        values to see their effect. The current settings are reasonable
03615        for both the ramp classification function and the levoy 'tent'
03616        classification function. But they are dataset dependent, and
03617        also will differ depending on what you want to make translucent
03618        in a dataset.
03619 
03620         Now, these values are read from the parameters file and the 
03621         following comands are not necessary...
03622         levThreshold = (float) 180.0; // 128.0 para a engine; 88.0 para cthead.vol
03623         levWidth = (float) 3.0; //2.0 para a engine; 4.0 para cthead.vol
03624 
03625      */
03626 
03627     for (index = 0; index < 4; index++)
03628     {
03629         for (magnitude = 0; magnitude < 64; magnitude++)
03630         {
03631             for (intensity = 0; intensity < 256; intensity++)
03632             {
03633                 ind = intensity + (magnitude << 8) + (index << 14);
03634 
03635             /*----- Simple ramp classification function -----*/
03636 
03637 #if 0
03638                 if ((intensity >= levThreshold) &&
03639                     (intensity < (levThreshold + levWidth)))
03640                     alpha = ((float) intensity - levThreshold) / levWidth;
03641                 else if (intensity < levThreshold)
03642                     alpha= (float) 0.0;
03643                 else
03644                     alpha= (float) 1.0;
03645 
03646 #endif
03647 
03648             /*----- Slight variation on Levoy's 'tent' classification function -----*/
03649 
03650                 denominator = levWidth * (float) (magnitude<<2);
03651                 if (denominator > EPSILON)
03652                     alpha = (float) (1.0 -
03653                         (double) fabs ((double) ((levThreshold-intensity) /
03654                         denominator)));
03655                 else
03656                     alpha = (float) 0.0;
03657 
03658 
03659             /*----- Encode alpha value into unsigned char table -----*/
03660 
03661                 if (alpha>0.0)
03662                     alphaTable[ind] = (unsigned char) (255.0 * alpha);
03663                 else
03664                     alphaTable[ind] = (unsigned char) 0;
03665 
03666             /*----- Set luminance value in unsigned char table -----*/
03667 
03668                 luminanceTable[ind] = 255;
03669 
03670             /*----- For index values other than zero, set a color -----*/
03671 
03672                 if (index==0)
03673                 {
03674                     RGBtable[3*ind  ] = 255;
03675                     RGBtable[3*ind+1] = 255;
03676                     RGBtable[3*ind+2] = 255;
03677                 }
03678                 else if (index==2)
03679                 {
03680                     RGBtable[3*ind  ] = 243;
03681                     RGBtable[3*ind+1] = 208;
03682                     RGBtable[3*ind+2] = 190;
03683                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03684                 }
03685                 else if (index==1)
03686                 {
03687                     RGBtable[3*ind  ] = (unsigned char) (255.0 *0.95);
03688                     RGBtable[3*ind+1] = (unsigned char) (255.0 *0.10);
03689                     RGBtable[3*ind+2] = (unsigned char) (255.0 *0.10);
03690                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03691                 }
03692                 else if (index==3)
03693                 {
03694                     RGBtable[3*ind  ] = (unsigned char) (255.0 *0.38);
03695                     RGBtable[3*ind+1] = (unsigned char) (255.0 *0.50);
03696                     RGBtable[3*ind+2] = (unsigned char) (255.0 *0.90);
03697                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03698                 }
03699             }
03700         }
03701     }
03702 }
03703 
03704 
03705 /*+-------------------------------------------------------------------*/
03706 /*+-----------------------------+
03707   |   SetClassificationTable2   |
03708   +-----------------------------+*/
03709 
03710 void    VPRayCasting::SetClassificationTable2 (void)
03711 
03712 /* Precompute the opacity lookup table and the color lookup table */
03713 
03714 {
03715         float           alpha;
03716         int             index, magnitude, intensity, ind;
03717 
03718 /* The luminance table is only used when the final image should be
03719    a grayscale. */
03720 
03721 /* The format that is assumed here is 2 bits for index information,
03722    6 bits for gradient magnitude and 8 bits for intensity values,
03723    in that order */
03724   
03725     /* The levThreshold and levWidth variable determine the position
03726        of the classification function. Experiment with different 
03727        values to see their effect. The current settings are reasonable
03728        for both the ramp classification function and the levoy 'tent'
03729        classification function. But they are dataset dependent, and
03730        also will differ depending on what you want to make translucent
03731        in a dataset.
03732 
03733         Now, these values are read from the parameters file and the 
03734         following comands are not necessary...
03735         levThreshold = (float) 252.0; // 128.0 para a engine
03736         levWidth = (float) 4.0; //2.0
03737      */
03738 
03739 
03740     for (index = 0; index < 4; index++)
03741     {
03742         for (magnitude = 0; magnitude < 64; magnitude++)
03743         {
03744             for (intensity = 0; intensity < 256; intensity++)
03745             {
03746                 ind = intensity + (magnitude << 8) + (index << 14);
03747 
03748             /*----- Simple ramp classification function -----*/
03749 
03750 #if 0
03751                 if ((intensity >= levThreshold) &&
03752                     (intensity < (levThreshold + levWidth)))
03753                     alpha = ((float) intensity - levThreshold) / levWidth;
03754                 else if (intensity < levThreshold)
03755                     alpha= (float) 0.0;
03756                 else
03757                     alpha= (float) 1.0;
03758 
03759 #endif
03760 
03761             /*----- Função linear (despreza o que for menor que 40 ou 8) -----*/
03762                 if ( intensity < initialValueForLinearOpacity)
03763                     alpha = ( (float) 0);
03764                 else
03765                     alpha = ((float) (intensity-initialValueForLinearOpacity) / (float)(255-initialValueForLinearOpacity) );
03766 
03767             /*----- Função linear (bloco: 80=0.06, 240-255=1) -----*/
03768             //if (intensity == 80)
03769             //  alpha = ( (float) 0.06 );
03770             //else if ( intensity >=240 )
03771             //  alpha = ( (float) 1.0 );
03772             //else
03773             //  alpha = ( (float) 0.0 );
03774 
03775             /*----- Função linear (rampa) -----*/
03776             //float ic = (float)255.0 / (float)2.0;
03777             //if (intensity > ic)
03778             //  alpha =  ( ((float)225.0) - intensity ) / ic;
03779             //else
03780             //  alpha =  intensity / ic;
03781 
03782             /*----- Função linear (exponencial) NÃO FUNCIONOU!!! -----*/
03783             //float aux = ( (9.0f * intensity) / 255.0f ) + 1.0f;
03784             //aux = (float) log(aux);
03785 
03786             /*----- Encode alpha value into unsigned char table -----*/
03787 
03788                 if (alpha>0.0)
03789                     alphaTable[ind] = (unsigned char) (255.0 * alpha);
03790                 else
03791                     alphaTable[ind] = (unsigned char) 0;
03792 
03793             /*----- Set luminance value in unsigned char table -----*/
03794 
03795                 luminanceTable[ind] = 255;
03796 
03797             /*----- For index values other than zero, set a color -----*/
03798 
03799                 if (index==0)
03800                 {
03801                     RGBtable[3*ind  ] = 255;
03802                     RGBtable[3*ind+1] = 255;
03803                     RGBtable[3*ind+2] = 255;
03804                 }
03805                 else if (index==2)
03806                 {
03807                     RGBtable[3*ind  ] = 243;
03808                     RGBtable[3*ind+1] = 208;
03809                     RGBtable[3*ind+2] = 190;
03810                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03811                 }
03812                 else if (index==1)
03813                 {
03814                     RGBtable[3*ind  ] = (unsigned char) (255.0 *0.95);
03815                     RGBtable[3*ind+1] = (unsigned char) (255.0 *0.10);
03816                     RGBtable[3*ind+2] = (unsigned char) (255.0 *0.10);
03817                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03818                 }
03819                 else if (index==3)
03820                 {
03821                     RGBtable[3*ind  ] = (unsigned char) (255.0 *0.38);
03822                     RGBtable[3*ind+1] = (unsigned char) (255.0 *0.50);
03823                     RGBtable[3*ind+2] = (unsigned char) (255.0 *0.90);
03824                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03825                 }
03826             }
03827         }
03828     }
03829 }
03830 
03831 
03832 /*+-------------------------------------------------------------------*/
03833 /*+-----------------------------+
03834   |   SetClassificationTable3   |
03835   +-----------------------------+*/
03836 
03837 void    VPRayCasting::SetClassificationTable3 (void)
03838 
03839 /* Precompute the opacity lookup table and the color lookup table */
03840 
03841 {
03842         float           alpha;
03843         int             index, magnitude, intensity, ind;
03844 
03845 /* The luminance table is only used when the final image should be
03846    a grayscale. */
03847 
03848 /* The format that is assumed here is 2 bits for index information,
03849    6 bits for gradient magnitude and 8 bits for intensity values,
03850    in that order */
03851   
03852     /* The levThreshold and levWidth variable determine the position
03853        of the classification function. Experiment with different 
03854        values to see their effect. The current settings are reasonable
03855        for both the ramp classification function and the levoy 'tent'
03856        classification function. But they are dataset dependent, and
03857        also will differ depending on what you want to make translucent
03858        in a dataset.
03859 
03860        Now, these values are read from the parameters file and the 
03861        following comands are not necessary...
03862        levThreshold = (float) 252.0; // 128.0 para a engine
03863        levWidth = (float) 4.0; //2.0
03864      */
03865 
03866     for (index = 0; index < 4; index++)
03867     {
03868         for (magnitude = 0; magnitude < 64; magnitude++)
03869         {
03870             for (intensity = 0; intensity < 256; intensity++)
03871             {
03872                 ind = intensity + (magnitude << 8) + (index << 14);
03873 
03874             /*----- Simple ramp classification function -----*/
03875 
03876 #if 0
03877                 if ((intensity >= levThreshold) &&
03878                     (intensity < (levThreshold + levWidth)))
03879                     alpha = ((float) intensity - levThreshold) / levWidth;
03880                 else if (intensity < levThreshold)
03881                     alpha= (float) 0.0;
03882                 else
03883                     alpha= (float) 1.0;
03884 
03885 #endif
03886 
03887             /*----- Tabela de opacidade do tipo bloco (79-81->0.1, 240-255->1.0, 0 caso contrário) -----*/
03888                 if ( intensity >= 79 && intensity <= 81 ) 
03889                     alpha = ( (float) 0.05);
03890                 else if ( intensity >= 240 )
03891                     alpha = ((float) 1 );
03892                 else
03893                     alpha = ((float) 0 );
03894 
03895             /*----- Encode alpha value into unsigned char table -----*/
03896 
03897                 if (alpha>0.0)
03898                     alphaTable[ind] = (unsigned char) (255.0 * alpha);
03899                 else
03900                     alphaTable[ind] = (unsigned char) 0;
03901 
03902             /*----- Set luminance value in unsigned char table -----*/
03903 
03904                 luminanceTable[ind] = 255;
03905 
03906             /*----- For index values other than zero, set a color -----*/
03907 
03908                 if (index==0)
03909                 {
03910                     RGBtable[3*ind  ] = 255;
03911                     RGBtable[3*ind+1] = 255;
03912                     RGBtable[3*ind+2] = 255;
03913                 }
03914                 else if (index==2)
03915                 {
03916                     RGBtable[3*ind  ] = 243;
03917                     RGBtable[3*ind+1] = 208;
03918                     RGBtable[3*ind+2] = 190;
03919                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03920                 }
03921                 else if (index==1)
03922                 {
03923                     RGBtable[3*ind  ] = (unsigned char) (255.0 *0.95);
03924                     RGBtable[3*ind+1] = (unsigned char) (255.0 *0.10);
03925                     RGBtable[3*ind+2] = (unsigned char) (255.0 *0.10);
03926                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03927                 }
03928                 else if (index==3)
03929                 {
03930                     RGBtable[3*ind  ] = (unsigned char) (255.0 *0.38);
03931                     RGBtable[3*ind+1] = (unsigned char) (255.0 *0.50);
03932                     RGBtable[3*ind+2] = (unsigned char) (255.0 *0.90);
03933                     alphaTable[ind]   = (unsigned char) (255.0 *0.5);
03934                 }
03935             }
03936         }
03937     }
03938 }
03939 
03940 
03941 /*+-------------------------------------------------------------------*/
03942 /*+-----------+
03943   |   Shade   |
03944   +-----------+*/
03945 
03946 float   VPRayCasting::Shade
03947 (
03948     VPVector3D gradient
03949 )
03950 
03951 {
03952     VPVector3D  light;
03953     float       dot;
03954     float       intensity;  // returned attenuation factor
03955 
03956     //                        // frente head; // esfera;  // engine
03957     //light.x = (float)     0;  // -1.0;      //  0.0;    //  2.0
03958     //light.y = (float)     0;  // -0.25;     //  0.25;   // -1.25
03959     //light.z = (float)  -1.0;  //  2.0;      // -4/2.0;  //  2.0
03960 
03961     light = vpGetLightDirection();
03962 
03963     light.vpNormalize();
03964 
03965     dot = gradient.x * light.x +
03966           gradient.y * light.y +
03967           gradient.z * light.z;
03968           
03969     if (dot < 0.0) {
03970         if (negativeLightContribution)
03971             dot = -dot; 
03972         else
03973             dot = (float) 0.0;  // no negative contributions
03974     }
03975 
03976     intensity = ambientLight + (diffuseLight * dot);
03977 
03978     if (intensity > 1)
03979         intensity = 1;
03980 
03981     return (intensity);
03982 }
03983 
03984 
03985 /*+-------------------------------------------------------------------*/
03986 /*+-------------------+
03987   |   ShadeSpecular   |
03988   +-------------------+*/
03989 
03990 float   VPRayCasting::ShadeSpecular
03991 (
03992     VPVector3D gradient,
03993     VPVector3D obsPoint
03994 )
03995 
03996 {
03997     VPVector3D  light;
03998     float       dot1, dot2, dot3;
03999     float       intensity;  // returned attenuation factor
04000 
04001     light = vpGetLightDirection();
04002 
04003     light.vpNormalize();
04004 
04005     dot1 = gradient.vpDotProduct(light);    // g . l
04006     dot2 = gradient.vpDotProduct(obsPoint); // g . o
04007     dot3 = obsPoint.vpDotProduct(light);    // o . l
04008          
04009     dot1 = fabs(dot1);
04010     dot2 = fabs(dot2);
04011     dot3 = fabs(dot3);
04012           
04013     intensity = ambientLight + (diffuseLight * dot1) + (float) ( pow( ((2 * dot1 * dot2) - dot3), specularExponent ) ); //50-30
04014 
04015     if (intensity > 1)
04016         intensity = 1;
04017 
04018     return (intensity);
04019 }
04020 
04021 
04022 /*+-------------------------------------------------------------------*/
04023 /*+--------------+
04024   |   Gradient   |
04025   +--------------+*/
04026 
04027 void    VPRayCasting::Gradient (int i, int j, int k,      // The location where to compute the gradient
04028                   VPVector3D &gradient,    // The gradient
04029                   float *gm,              // The gradient magnitude
04030                   VPGraphicObj *volume
04031                   )
04032 
04033 {
04034  int ii,jj,kk,i_,j_,k_;
04035 
04036 /*----- Note that the gradient being computed is the nearest
04037         neighbor for the sample point.  This is being estimated
04038         to provide a straight forward example.  The x, y and z
04039         gradients should be interpolated for the sample point
04040         using the eight neighboring gradients.  It would be
04041         very time consuming to calculate eight gradients for
04042         each x, y and z direction and then interpolate amongst
04043         them.  Please see the chapter that discusses system
04044         design and trade-offs and then adapt those ideas to
04045         create your own, high-performance, high-quality renderer. -----*/
04046 
04047      ii = ((i+1) < ((VPImage *)volume)->vpGetXDimension()) ? i+1 : i;
04048      jj = ((j+1) < ((VPImage *)volume)->vpGetYDimension()) ? j+1 : j;
04049      kk = ((k+1) < ((VPVolume *)volume)->vpGetZDimension()) ? k+1 : k;
04050      i_ = ((i-1) < 0) ? i : i-1;
04051      j_ = ((j-1) < 0) ? j : j-1;
04052      k_ = ((k-1) < 0) ? k : k-1;
04053 
04054     gradient.x = (float) ((VPVolume *)volume)->vpGetValue(i_,j,k) - (float) ((VPVolume *)volume)->vpGetValue(ii,j,k);
04055     gradient.y = (float) ((VPVolume *)volume)->vpGetValue(i,j_,k) -
04056         (float) ((VPVolume *)volume)->vpGetValue(i,jj,k);
04057     gradient.z = (float) ((VPVolume *)volume)->vpGetValue(i,j,k_) -
04058         (float) ((VPVolume *)volume)->vpGetValue(i,j,kk);
04059 
04060     *gm = ((gradient.x * gradient.x) + (gradient.y * gradient.y) + (gradient.z * gradient.z));
04061     *gm = (float) sqrt ((double) *gm);
04062 
04063     gradient.vpNormalize();
04064 }
04065 
04066 
04067 /*+-------------------------------------------------------------------*/
04068 /*+-------------------+
04069   |   GradientSobel   |
04070   +-------------------+*/
04071 
04072 void    VPRayCasting::GradientSobel (int i, int j, int k,      // The location where to compute the gradient
04073                   VPVector3D &gradient,    // The gradient
04074                   float *gm,              // The gradient magnitude
04075                   VPGraphicObj *volume
04076                   )
04077 {
04078     gradient = ((VPVolume *)volume)->vpGetSobelGradient(i, j, k);
04079 
04080     *gm = ((gradient.x * gradient.x) + (gradient.y * gradient.y) + (gradient.z * gradient.z));
04081     *gm = (float) sqrt ((double) *gm);
04082 
04083     gradient.vpNormalize();
04084 }
04085 
04086 

Generated on Tue Sep 6 10:00:06 2005 for VPAT by  doxygen 1.4.4