diff --git a/Cassiopee/CPlot/CPlot/PyTree.py b/Cassiopee/CPlot/CPlot/PyTree.py index e42883ee0..dac8064bc 100644 --- a/Cassiopee/CPlot/CPlot/PyTree.py +++ b/Cassiopee/CPlot/CPlot/PyTree.py @@ -1046,82 +1046,105 @@ def loadImageFiles(t, offscreen=0): CPlot.setState(billBoards=out, offscreen=offscreen) return None -# subfunction of display 360. DIsplay the 6 views. -def display360__(t, posCam, posEye, dirCam, offscreen, locRez, kwargs): +# subfunction of display 360. Display the 6 views. +def display360__(t, posCam, posEye, dirCam, offscreen, exportRez, kwargs): import KCore.Vector as Vector + # resolution for the square view images + locRez = exportRez.split('x')[1] + locRez = int(locRez)//2 + locRez = max(locRez, 100) # minimum 100 pixels + locRez = min(locRez, 8192) # maximum 8192 pixels, generally the max texture size + locRez = "%dx%d"%(locRez, locRez) + # Compute all view vectors - v1 = Vector.sub(posEye, posCam) + v1 = Vector.sub(posEye, posCam) # view vector vz = Vector.normalize(dirCam) # orthogonalisation de v1 s = Vector.dot(v1, vz) v1 = Vector.sub(v1, Vector.mul(s, vz)) - v2 = Vector.cross(vz, v1) + v2 = Vector.cross(vz, v1) # second view vector n = Vector.norm(v1) - v3 = Vector.mul(n, vz) + v3 = Vector.mul(n, vz) # third view vector - # right - posEye0 = Vector.sub(posCam, v2); dirCam0 = dirCam lkwargs = kwargs.copy() - lkwargs['posCam'] = posCam + fov = 90. + + # right + posCam0 = posCam + posEye0 = Vector.sub(posCam, v2) + dirCam0 = dirCam + print('right', posCam, posCam0, posEye0, dirCam0) + lkwargs['posCam'] = posCam0 lkwargs['posEye'] = posEye0 lkwargs['dirCam'] = dirCam0 - lkwargs['viewAngle'] = 90. + lkwargs['viewAngle'] = fov lkwargs['exportResolution'] = locRez lkwargs['export'] = 'cube_right.png' display(t, **lkwargs) finalizeExport(offscreen) # left - posEye0 = Vector.add(posCam, v2); dirCam0 = dirCam - lkwargs['posCam'] = posCam + posCam0 = posCam + posEye0 = Vector.add(posCam, v2) + dirCam0 = dirCam + print('left', posCam, posCam0, posEye0, dirCam0) + lkwargs['posCam'] = posCam0 lkwargs['posEye'] = posEye0 lkwargs['dirCam'] = dirCam0 - lkwargs['viewAngle'] = 90. + lkwargs['viewAngle'] = fov lkwargs['exportResolution'] = locRez lkwargs['export'] = 'cube_left.png' display(t, **lkwargs) - finalizeExport(offscreen) + finalizeExport(offscreen) # front - posEye0 = posEye; dirCam0 = dirCam - lkwargs['posCam'] = posCam + posCam0 = posCam + print('front', posCam, posCam0, posEye0, dirCam0) + posEye0 = posEye + dirCam0 = dirCam + lkwargs['posCam'] = posCam0 lkwargs['posEye'] = posEye0 lkwargs['dirCam'] = dirCam0 - lkwargs['viewAngle'] = 90. + lkwargs['viewAngle'] = fov lkwargs['exportResolution'] = locRez lkwargs['export'] = 'cube_front.png' display(t, **lkwargs) finalizeExport(offscreen) # back - posEye0 = Vector.sub(posCam, v1); dirCam0 = dirCam - lkwargs['posCam'] = posCam + posCam0 = posCam + posEye0 = Vector.sub(posCam, v1) + dirCam0 = dirCam + print('back', posCam, posCam0, posEye0, dirCam0) + lkwargs['posCam'] = posCam0 lkwargs['posEye'] = posEye0 lkwargs['dirCam'] = dirCam0 - lkwargs['viewAngle'] = 90. + lkwargs['viewAngle'] = fov lkwargs['exportResolution'] = locRez lkwargs['export'] = 'cube_back.png' display(t, **lkwargs) finalizeExport(offscreen) # top - posEye0 = Vector.add(posCam, v3); dirCam0 = Vector.mul(-1, v1) + posEye0 = Vector.add(posCam, v3) + dirCam0 = Vector.mul(-1, v1) lkwargs['posCam'] = posCam lkwargs['posEye'] = posEye0 lkwargs['dirCam'] = dirCam0 - lkwargs['viewAngle'] = 90. + lkwargs['viewAngle'] = fov lkwargs['exportResolution'] = locRez lkwargs['export'] = 'cube_top.png' display(t, **lkwargs) finalizeExport(offscreen) # bot - posEye0 = Vector.sub(posCam, v3); dirCam0 = Vector.mul(+1, v1) + posEye0 = Vector.sub(posCam, v3) + dirCam0 = Vector.mul(+1, v1) lkwargs['posCam'] = posCam lkwargs['posEye'] = posEye0 lkwargs['dirCam'] = dirCam0 - lkwargs['viewAngle'] = 90. + lkwargs['viewAngle'] = fov lkwargs['exportResolution'] = locRez lkwargs['export'] = 'cube_bottom.png' display(t, **lkwargs) @@ -1129,6 +1152,64 @@ def display360__(t, posCam, posEye, dirCam, offscreen, locRez, kwargs): return None +# subfunction of display 360. Display the n views. +def display360ODS__(t, posCam, posEye, dirCam, offscreen, exportRez, stereoShift, kwargs): + + import KCore.Vector as Vector + lkwargs = kwargs.copy() + + # number of images, 1 per pixel + nangles = exportRez.split('x')[0] + nangles = int(nangles) + # fov of each image + fov = 90. + + # locrez of each image + locRez = exportRez.split('x')[1] + locRez1 = 2 + locRez2 = int(locRez) + locRez = "%dx%d"%(locRez1, locRez2) + + # Compute all front view vectors + v1 = Vector.sub(posEye, posCam) # view vector + vz = Vector.normalize(dirCam) # third view vector + v2 = Vector.cross(vz, v1) # second view vector + v2 = Vector.normalize(v2) + + import Geom.PyTree as D + import Transform.PyTree as T + + # start from -pi to pi and rotate left + for i in range(nangles): + + theta = i*360./nangles-180. + + point = D.point(v1) + point = T.rotate(point, (0,0,0), vz, theta) + v1p = C.getValue(point, 'GridCoordinates', 0) + point = D.point(v2) + point = T.rotate(point, (0,0,0), vz, theta) + v2p = C.getValue(point, 'GridCoordinates', 0) + dv = Vector.mul(stereoShift, v2p) + + posCam0 = Vector.add(posCam, dv) + posEye0 = Vector.add(v1p, posCam) + dirCam0 = dirCam + print('init', posCam, posEye) + print('rot', posCam0, posEye0) + print('image', i, posCam, posCam0, posEye0, dirCam0) + + lkwargs['posCam'] = posCam0 + lkwargs['posEye'] = posEye0 + lkwargs['dirCam'] = dirCam0 + lkwargs['viewAngle'] = fov + lkwargs['exportResolution'] = locRez + lkwargs['export'] = 'cube_%03d.png'%i + display(t, **lkwargs) + finalizeExport(offscreen) + + return None + #============================================================================== # display360 (offscreen=1, 2 or 7) # type360=0 (360 degres), =1 (180 degres) @@ -1148,91 +1229,48 @@ def display360(t, type360=0, **kwargs): stereoDist = kwargs.get("stereoDist", 0.07) # stereoDist is in real world distance if stereo == 1: kwargs['stereo'] = 0 - # resolution for the 6 view images - locRez = exportRez.split('x')[1] - locRez = int(locRez)//2 - locRez = max(locRez, 100) # minimum 100 pixels - locRez = min(locRez, 8192) # maximum 8192 pixels, generally the max texture size - locRez = "%dx%d"%(locRez, locRez) + import Converter.Mpi as Cmpi + + # display + if stereo == 0: + # display 6 views + display360__(t, posCam, posEye, dirCam, offscreen, exportRez, kwargs) + # Create the 360 image from cube images + if Cmpi.rank == 0: + panorama(export, exportRez, type360=type360) + Cmpi.barrier() # wait for completion - # display 6 views - display360__(t, posCam, posEye, dirCam, offscreen, locRez, kwargs) + else: # stereo (ODS) - # Create the 360 image from cube images - import Converter.Mpi as Cmpi - if Cmpi.rank == 0: - #if offscreen == 7: foffscreen = 1 - #else: foffscreen = offscreen - #a = C.newPyTree(['Base']) - #display(a, panorama=1, - # offscreen=foffscreen, export=export, exportResolution=exportRez) - #finalizeExport(foffscreen) - panorama(export, exportRez, type360=type360) - Cmpi.barrier() # wait for completion - - if stereo == 1: # left eye - import Generator.PyTree as G + export1 = export.rsplit('.', 1) + if len(export1) == 2: export1 = export1[0]+'_1.'+export1[1] + else: export1 = export+'_1' export2 = export.rsplit('.', 1) if len(export2) == 2: export2 = export2[0]+'_2.'+export2[1] else: export2 = export+'_2' - # common - v1 = Vector.sub(posEye, posCam) - vz = Vector.normalize(dirCam) - v2 = Vector.cross(vz, v1) - v2 = Vector.normalize(v2) + # right eye + #stereoDist = 0. # forced to 0 for debug + #display360ODS__(t, posCam, posEye, dirCam, offscreen, exportRez, stereoDist/2., kwargs) + #if Cmpi.rank == 0: + # panoramaODS(export1, exportRez, type360=type360) + #Cmpi.barrier() # wait for completion - # stereo mode: convergent: decalage de la posCam a gauche, puis rotation du centre decale - v3 = Vector.mul(stereoDist, v2) - posCam = Vector.add(posCam, v3) + # left eye + #display360ODS__(t, posCam, posEye, dirCam, offscreen, exportRez, -stereoDist/2., kwargs) + #if Cmpi.rank == 0: + # panoramaODS(export2, exportRez, type360=type360) + #Cmpi.barrier() # wait for completion - display360__(t, posCam, posEye, dirCam, offscreen, locRez, kwargs) + # stitch + panoramaStereo(export, export1, export2, exportRez, type360=type360) - if Cmpi.rank == 0: - panorama(export2, exportRez, type360=type360) - - # assemble images - a1 = C.convertFile2PyTree(export) - a2 = C.convertFile2PyTree(export2) - a1 = Internal.getZones(a1)[0] - a2 = Internal.getZones(a2)[0] - a1[0] = "right"; a2[0] = "left" - locRez = exportRez.split('x') - if type360 == 0: # 360 - ni = int(locRez[0]); nj = int(locRez[1]) - a = G.cart((0,0,0), (1,1,1), (ni,2*nj,1)) - C._addVars(a, ['r','g','b','a']) - for v in ['r','g','b','a']: - pr = Internal.getNodeFromName2(a, v)[1] - pr1 = Internal.getNodeFromName2(a1, v) - pr2 = Internal.getNodeFromName2(a2, v) - if pr1 is not None and pr2 is not None: - pr1 = pr1[1]; pr2 = pr2[1] - pr[0:ni,0:nj] = pr1[0:ni,0:nj] - pr[0:ni,nj:2*nj] = pr2[0:ni,0:nj] - else: - pr[0:ni, 0:2*nj] = 1. - else: # 180 - ni = int(locRez[1]); nj = int(locRez[1]) - a = G.cart((0,0,0), (1,1,1), (2*ni,nj,1)) - C._addVars(a, ['r','g','b','a']) - for v in ['r','g','b','a']: - pr = Internal.getNodeFromName2(a, v)[1] - pr1 = Internal.getNodeFromName2(a1, v) - pr2 = Internal.getNodeFromName2(a2, v) - if pr1 is not None and pr2 is not None: - pr1 = pr1[1]; pr2 = pr2[1] - pr[0:ni,0:nj] = pr1[0:ni,0:nj] - pr[ni:2*ni,0:nj] = pr2[0:ni,0:nj] - else: - pr[0:2*ni,0:nj] = 1. - C.convertPyTree2File(a, export) # finale - Cmpi.barrier() # wait for completion return None -# type360=0 -> 360, mode=1 -> 180 -def panorama(export, exportResolution, type360=0): - res = exportResolution.split('x') +# assemble 6 cube images en une image panoramique +# type360=0 -> 360, type360=1 -> 180 +def panorama(export, exportRez, type360=0): + res = exportRez.split('x') if type360 == 0: resx = int(res[0]); resy = int(res[1]) else: resx = int(res[1]); resy = int(res[1]) import Generator.PyTree as G @@ -1254,4 +1292,74 @@ def panorama(export, exportResolution, type360=0): a7f = C.getFields('nodes', a7, api=3)[0] CPlot.cplot.panorama(a1, a2, a3, a4, a5, a6, a7f, type360) C.convertPyTree2File(a7, export) - return a7 \ No newline at end of file + return a7 + +# assemble 2 images panoramiques en une image panoramique stereo +def panoramaStereo(export, export1, export2, exportRez, type360=0): + import Generator.PyTree as G + # assemble 2 panoramic images in a single stereo image + a1 = C.convertFile2PyTree(export1) + a2 = C.convertFile2PyTree(export2) + a1 = Internal.getZones(a1)[0] + a2 = Internal.getZones(a2)[0] + a1[0] = "right"; a2[0] = "left" + locRez = exportRez.split('x') + if type360 == 0: # 360 + ni = int(locRez[0]); nj = int(locRez[1]) + a = G.cart((0,0,0), (1,1,1), (ni,2*nj,1)) + C._addVars(a, ['r','g','b','a']) + for v in ['r','g','b','a']: + pr = Internal.getNodeFromName2(a, v)[1] + pr1 = Internal.getNodeFromName2(a1, v) + pr2 = Internal.getNodeFromName2(a2, v) + if pr1 is not None and pr2 is not None: + pr1 = pr1[1]; pr2 = pr2[1] + pr1 = pr1[:,::-1] + pr2 = pr2[:,::-1] + pr[0:ni,0:nj] = pr1[0:ni, 0:nj] + pr[0:ni,nj:2*nj] = pr2[0:ni, 0:nj] + else: + if v == 'a': pr[0:ni, 0:2*nj] = 255 + else: pr[0:ni, 0:2*nj] = 0 + else: # 180 + ni = int(locRez[1]); nj = int(locRez[1]) + a = G.cart((0,0,0), (1,1,1), (2*ni,nj,1)) + C._addVars(a, ['r','g','b','a']) + for v in ['r','g','b','a']: + pr = Internal.getNodeFromName2(a, v)[1] + pr1 = Internal.getNodeFromName2(a1, v) + pr2 = Internal.getNodeFromName2(a2, v) + if pr1 is not None and pr2 is not None: + pr1 = pr1[1]; pr2 = pr2[1] + pr[0:ni,0:nj] = pr1[0:ni,0:nj] + pr[ni:2*ni,0:nj] = pr2[0:ni,0:nj] + else: + if v == 'a': pr[0:2*ni,0:nj] = 255 + else: pr[0:2*ni,0:nj] = 0 + + C.convertPyTree2File(a, export) # finale + +# assemble n cube images en une image panoramique +# type360=0 -> 360, type360=1 -> 180 +def panoramaODS(export, exportRez, type360=0): + + res = exportRez.split('x') + if type360 == 0: resx = int(res[0]); resy = int(res[1]) + else: resx = int(res[1]); resy = int(res[1]) + import Generator.PyTree as G + import CPlot.cplot + + nangles = exportRez.split('x')[0] + nangles = int(nangles) + + a = [] + for i in range(nangles): + a1 = C.convertFile2PyTree('cube_%03d.png'%i) + a1 = C.getFields('nodes', a1, api=3)[0] + a.append(a1) + a7 = G.cart((0,0,0), (1,1,1), (resx, resy,1)) + C._addVars(a7, ['r','g','b','a']) + a7f = C.getFields('nodes', a7, api=3)[0] + CPlot.cplot.panoramaODS(a, a7f, type360) + C.convertPyTree2File(a7, export) + \ No newline at end of file diff --git a/Cassiopee/CPlot/CPlot/cplot.cpp b/Cassiopee/CPlot/CPlot/cplot.cpp index 66b76b67a..883098a17 100644 --- a/Cassiopee/CPlot/CPlot/cplot.cpp +++ b/Cassiopee/CPlot/CPlot/cplot.cpp @@ -69,6 +69,7 @@ static PyMethodDef Pycplot [] = {"display1D", K_CPLOT::display1D, METH_VARARGS}, {"configure", K_CPLOT::configure, METH_VARARGS}, {"panorama", K_CPLOT::panorama, METH_VARARGS}, + {"panoramaODS", K_CPLOT::panoramaODS, METH_VARARGS}, {NULL, NULL} }; diff --git a/Cassiopee/CPlot/CPlot/cplot.h b/Cassiopee/CPlot/CPlot/cplot.h index ae70ee26a..fb1104403 100644 --- a/Cassiopee/CPlot/CPlot/cplot.h +++ b/Cassiopee/CPlot/CPlot/cplot.h @@ -76,6 +76,7 @@ PyObject* show(PyObject* self, PyObject* args); PyObject* display1D(PyObject* self, PyObject* args); PyObject* configure(PyObject* self, PyObject* args); PyObject* panorama(PyObject* self, PyObject* args); +PyObject* panoramaODS(PyObject* self, PyObject* args); } E_Int getMode(PyObject* modeObject); diff --git a/Cassiopee/CPlot/CPlot/panorama.cpp b/Cassiopee/CPlot/CPlot/panorama.cpp index fb37a58fe..c8c65b56d 100644 --- a/Cassiopee/CPlot/CPlot/panorama.cpp +++ b/Cassiopee/CPlot/CPlot/panorama.cpp @@ -18,11 +18,12 @@ */ #include "cplot.h" #include "Data.h" +#include #define M_PI 3.1415926535897932384626433832795 // in texture interpolation -void interp(E_Int ind, +void interp(E_Int ind, E_Float* final1, E_Float* final2, E_Float* final3, E_Float* final4, E_Float* im1, E_Float* im2, E_Float* im3, E_Float* im4, E_Float px, E_Float py, E_Int ni1, E_Int nj1) @@ -47,6 +48,7 @@ void interp(E_Int ind, } +// Create 1D half gaussian kernel coefficients // IN: sigma: in pixels // IN: n: half size of kernel // OUT: c: half kernel coefficients @@ -63,6 +65,7 @@ void createGaussFilter(E_Float sigma, E_Int n, E_Float* c) } +// Apply gaussian blur to in // IN: in: color array // IN: ni,nj: image size // IN: c: kernel coef @@ -101,6 +104,8 @@ void gaussianBlur(E_Float* in, E_Int ni, E_Int nj, E_Float* c, E_Int n, E_Float* // it doesnt have the texture size limit // si type360=0 -> 360 deg // si type360=1 -> 180 deg +// shift: eye shift +// fov2: fov enlargement PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) { // Get the 4 arrays of cube images (left, right, bottom, top, back, front) @@ -110,7 +115,8 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) PyObject* finalArray; E_Int type360; if (!PYPARSETUPLE_(args, OOOO_ OOO_ I_, &leftArray, &rightArray, - &bottomArray, &topArray, &backArray, &frontArray, &finalArray, &type360)) + &bottomArray, &topArray, &backArray, &frontArray, &finalArray, + &type360)) { return NULL; } @@ -233,11 +239,15 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) if (type360 == 0) { tinf = -M_PI; tsup = 2*M_PI; } // 360 else { tinf = -M_PI/2.; tsup = M_PI; } // 180 + // fov of each image + E_Float fov = 90.; + printf("fov=%g\n", fov); + #pragma omp parallel { E_Int ii, jj; E_Float tx, ty, px, py; - E_Float theta, phi, x, y, z; + E_Float theta, phi, x, y, z; E_Float scale; # pragma omp for for (E_Int ind = 0; ind < nijl; ind++) @@ -248,7 +258,7 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) ty = (1.*jj) / njl1; theta = tinf + tx * tsup; // between -pi and pi - phi = -M_PI/2. + ty * M_PI; // between -pi/2 and pi/2 + phi = M_PI/2. - ty * M_PI; // between pi/2 and -pi/2 x = cos(phi) * sin(theta); y = sin(phi); @@ -262,7 +272,6 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) scale = -1.0 / x; px = ( z*scale + 1.0) / 2.0; py = ( y*scale + 1.0) / 2.0; - //printf("1px=%g %g\n", px, py); interp(ind, final1, final2, final3, final4, left1, left2, left3, left4, px, py, ni1, nj1); @@ -272,7 +281,6 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) scale = 1.0 / x; px = (-z*scale + 1.0) / 2.0; py = ( y*scale + 1.0) / 2.0; - //printf("2px=%g %g\n", px, py); interp(ind, final1, final2, final3, final4, right1, right2, right3, right4, px, py, ni1, nj1); @@ -283,9 +291,8 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) if (y > 0.0) { scale = -1.0 / y; - px = (-x*scale + 1.0) / 2.0; // a shifter a droite + px = (-x*scale + 1.0) / 2.0; py = ( z*scale + 1.0) / 2.0; - //printf("3px=%g %g\n", px, py); interp(ind, final1, final2, final3, final4, bottom1, bottom2, bottom3, bottom4, px, py, ni1, nj1); @@ -295,7 +302,6 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) scale = 1.0 / y; px = (-x*scale + 1.0) / 2.0; py = (-z*scale + 1.0) / 2.0; - //printf("4px=%g %g\n", px, py); interp(ind, final1, final2, final3, final4, top1, top2, top3, top4, px, py, ni1, nj1); @@ -308,7 +314,6 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) scale = -1.0 / z; px = (-x*scale + 1.0) / 2.0; py = ( y*scale + 1.0) / 2.0; - //printf("5px=%g %g\n", px, py); interp(ind, final1, final2, final3, final4, back1, back2, back3, back4, px, py, ni1, nj1); @@ -316,9 +321,8 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) else { scale = 1.0 / z; - px = ( x*scale + 1.0) / 2.0; + px = ( x*scale + 1.0) / 2.0; py = ( y*scale + 1.0) / 2.0; - //printf("6px=%g %g\n", px, py); interp(ind, final1, final2, final3, final4, front1, front2, front3, front4, px, py, ni1, nj1); @@ -358,6 +362,155 @@ PyObject* K_CPLOT::panorama(PyObject* self, PyObject* args) delete [] c; } + RELEASESHAREDS(frontArray, front); + RELEASESHAREDS(backArray, back); + RELEASESHAREDS(leftArray, left); + RELEASESHAREDS(rightArray, right); + RELEASESHAREDS(topArray, top); + RELEASESHAREDS(bottomArray, bottom); + RELEASESHAREDS(finalArray, final); + return Py_None; } +//=========================================================================== + +PyObject* K_CPLOT::panoramaODS(PyObject* self, PyObject* args) +{ + // Get the n arrays of cube images + PyObject* arrays; + PyObject* finalArray; + E_Int type360; + if (!PYPARSETUPLE_(args, OO_ I_, &arrays, &finalArray, &type360)) + { + return NULL; + } + + E_Int nangles = PyList_Size(arrays); + printf("nangles=%d\n", nangles); + + char* varString; + E_Int ni, nj, nk, res; + FldArrayF* array; FldArrayI* cn; char* eltType; + std::vector images(nangles); + for (E_Int i = 0; i < nangles; i++) + { + res = K_ARRAY::getFromArray3(PyList_GetItem(arrays, i), varString, images[i], + ni, nj, nk, cn, eltType); + if (res != 1) + { + PyErr_SetString(PyExc_TypeError, + "panorama: requires a structured array."); + return NULL; + } + } + + FldArrayF* final; + E_Int nil, njl, nkl; + res = K_ARRAY::getFromArray3(finalArray, varString, final, + nil, njl, nkl, cn, eltType); + if (res != 1) + { + PyErr_SetString(PyExc_TypeError, + "panorama: requires a structured array (final)."); + return NULL; + } + + assert(njl == nj); + + // cube image pointers + std::vector im1(nangles); + std::vector im2(nangles); + std::vector im3(nangles); + std::vector im4(nangles); + for (E_Int i = 0; i < nangles; i++) + { + im1[i] = images[i]->begin(4); + im2[i] = images[i]->begin(5); + im3[i] = images[i]->begin(6); + im4[i] = images[i]->begin(7); + } + + // final image pointers + E_Float* final1 = final->begin(4); // r,g,b,a + E_Float* final2 = final->begin(5); + E_Float* final3 = final->begin(6); + E_Float* final4 = final->begin(7); + + E_Float tinf, tsup; + + if (type360 == 0) { tinf = -M_PI; tsup = 2*M_PI; } // 360 + else { tinf = -M_PI/2.; tsup = M_PI; } // 180 + + // fov of each cube image + E_Float fov = 90.; + + E_Int nijl = nil*njl; // final image + E_Int nil1 = nil-1; + E_Int njl1 = njl-1; + E_Int ni1 = ni-1; // cube image + E_Int nj1 = nj-1; + printf("ni=%d, nj=%d\n", ni, nj); + + E_Int ind; + E_Int mid = ni/2; + + // direct sliting + for (E_Int i = 0; i < nangles; i++) + { + for (E_Int j = 0; j < njl; j++) + { + ind = i + j*nil; + final1[ind] = im1[i][mid+j*ni]; + final2[ind] = im2[i][mid+j*ni]; + final3[ind] = im3[i][mid+j*ni]; + final4[ind] = im4[i][mid+j*ni]; + } + } + + // transformation + E_Float theta = 0.; + E_Float x, y, z, scale, py, phi, ty; + + for (E_Int i = 0; i < nangles; i++) + { + // 1D interpolation + for (E_Int j = 0; j < njl; j++) + { + ty = (1.*j)/nj1; + phi = M_PI/2. - ty * M_PI; // between pi/2 and -pi/2 + + ind = i + j*nil; + + x = 0.; + y = sin(phi); // entre -1 et 1 + z = cos(phi); // entre 0 et 0 + + if (phi > -M_PI/4. && phi < M_PI/4.) + { + scale = 1.0 / z; + py = ( y*scale + 1.0) / 2.0; + printf("%g %g\n", py, phi); + interp(ind, final1, final2, final3, final4, + im1[i], im2[i], im3[i], im4[i], + 0.5, py, ni1, nj1); + } + else + { + final1[ind] = 0.; + final2[ind] = 0.; + final3[ind] = 0.; + final4[ind] = 255.; + } + } + } + + for (E_Int i = 0; i < nangles; i++) + { + RELEASESHAREDS(PyList_GetItem(arrays, i), images[i]); + } + + RELEASESHAREDS(finalArray, final); + + return Py_None; +} diff --git a/Cassiopee/Converter/Converter/Internal.py b/Cassiopee/Converter/Converter/Internal.py index b044aebad..a5000202c 100644 --- a/Cassiopee/Converter/Converter/Internal.py +++ b/Cassiopee/Converter/Converter/Internal.py @@ -1,8 +1,6 @@ # -- Internal -- # -- PyTree node manipulations -- from sys import version_info -try: range = xrange -except: pass import numpy import fnmatch # unix wildcards diff --git a/Cassiopee/KCore/KCore/Vector.py b/Cassiopee/KCore/KCore/Vector.py index 43d2cf844..b6d714e9d 100644 --- a/Cassiopee/KCore/KCore/Vector.py +++ b/Cassiopee/KCore/KCore/Vector.py @@ -61,3 +61,12 @@ def dist(p1, p2): dy = p1[1]-p2[1] dz = p1[2]-p2[2] return math.sqrt(dx*dx+dy*dy+dz*dz) + +# the matrix is given by lines +def matprod(m, v): + """Matrix-vector product.""" + [l1,l2,l3] = m # get each line of matrix + vx = l1[0]*v[0]+l1[1]*v[1]+l1[2]*v[2] + vy = l2[0]*v[0]+l2[1]*v[1]+l2[2]*v[2] + vz = l3[0]*v[0]+l3[1]*v[1]+l3[2]*v[2] + return [ vx, vy, vz ] diff --git a/docs/index.html b/docs/index.html index 87d609590..f537c00a1 100644 --- a/docs/index.html +++ b/docs/index.html @@ -40,7 +40,7 @@ - Current release: 4.0 (07/2024) [release notes].
+ Current release: 4.0 (07/2024) [Release notes].