20 #include "Inventor/nodes/SoMaterial.h"
21 #include <Inventor/nodes/SoPerspectiveCamera.h>
22 #include <Inventor/nodes/SoOrthographicCamera.h>
23 #include <Inventor/nodes/SoGroup.h>
24 #include <Inventor/nodes/SoNurbsCurve.h>
25 #include <Inventor/nodes/SoCoordinate4.h>
27 #include <Inventor/SoPath.h>
28 #include <Inventor/SoOffscreenRenderer.h>
29 #include "Inventor/Qt/SoQtRenderArea.h"
30 #include <Inventor/actions/SoSearchAction.h>
31 #include <Inventor/SoDB.h>
32 #include <Inventor/actions/SoWriteAction.h>
34 #include <Inventor/nodes/SoLineSet.h>
35 #include <Inventor/nodes/SoVertexProperty.h>
38 #include <Inventor/VRMLnodes/SoVRMLGroup.h>
39 #include <Inventor/actions/SoToVRML2Action.h>
45 #include <QTextStream>
48 #include <QtCoreVersion>
69 static void latorgba(
unsigned char *
b,
unsigned char *
a,
unsigned char *
l,
int n)
81 unsigned char *
b,
unsigned char *
l,
int n)
88 l += 4; ++
r; ++
g; ++
b;
93 unsigned char *
b,
unsigned char *
a,
unsigned char *
l,
int n)
100 l += 4; ++
r; ++
g; ++
b; ++
a;
129 *
array++ = (b1 << 8) | (b2);
135 unsigned b1, b2, b3, b4;
144 *
array++ = (b1 << 24) | (b2 << 16) | (b3 << 8) | (b4);
157 endianTest.testWord = 1;
158 if (endianTest.testByte[0] == 1) {
166 fprintf(
stderr,
"Out of memory!\n");
174 int bytesRead = fread(
image, 1, 12,
image->file);
177 fprintf(
stderr,
"fread failed!\n");
189 const unsigned int colourBuffSize=
image->xsize*256
u;
190 image->tmp = (
unsigned char *)
malloc(colourBuffSize);
191 image->tmpR = (
unsigned char *)
malloc(colourBuffSize);
192 image->tmpG = (
unsigned char *)
malloc(colourBuffSize);
193 image->tmpB = (
unsigned char *)
malloc(colourBuffSize);
194 if (
image->tmp == NULL ||
image->tmpR == NULL ||
image->tmpG == NULL ||
195 image->tmpB == NULL) {
196 fprintf(
stderr,
"Out of memory!\n");
201 if ((
image->type & 0xFF00) == 0x0100) {
202 size_t x = ((size_t)
image->ysize * (
size_t)
image->zsize) *
sizeof(
unsigned);
205 if (
image->rowStart == NULL ||
image->rowSize == NULL) {
206 fprintf(
stderr,
"Out of memory!\n");
209 image->rleEnd = 512 + (2 *
x);
210 const int fseekRetVal= fseek(
image->file, 512, SEEK_SET);
211 if (fseekRetVal !=0){
212 fprintf(
stderr,
"Something very wrong with fseek near line 205 of VP1QtInventorUtils.cxx");
214 size_t bytesRead = 0;
215 bytesRead = fread(
image->rowStart, 1,
x,
image->file);
217 bytesRead = fread(
image->rowSize, 1,
x,
image->file);
225 image->rowStart = NULL;
226 image->rowSize = NULL;
238 free(
image->rowSize);
239 free(
image->rowStart);
244 unsigned char *buf,
int y,
int z)
246 unsigned char *iPtr, *oPtr,
pixel;
250 if ((
image->type & 0xFF00) == 0x0100) {
252 int okseek = fseek(
image->file, (
long)
image->rowStart[
y+
z*
image->ysize], SEEK_SET);
253 int okread = fread(
image->tmp, 1, (
unsigned int)
image->rowSize[
y+
z*
image->ysize],
279 int okstatus = fseek(
image->file, 512
u+yDim+zDim, SEEK_SET);
282 size_t bytesRead = 0;
283 bytesRead = fread(buf, 1,
image->xsize,
image->file);
289 std::cout <<
"Warning! ImageGetRow() - no 'image'..." << std::endl;
295 unsigned *
base, *lptr;
296 unsigned char *rbuf, *gbuf, *bbuf, *abuf;
305 (*width)=
image->xsize;
306 (*height)=
image->ysize;
307 (*components)=
image->zsize;
308 const unsigned int imageWidth =
image->xsize;
309 const unsigned int imageHeight =
image->ysize;
310 const unsigned int uintSize(
sizeof(
unsigned)), ucharSize(
sizeof(
unsigned char));
311 const unsigned int colourBufSize=imageWidth*ucharSize;
312 base = (
unsigned *)
malloc(imageWidth*imageHeight*uintSize);
313 rbuf = (
unsigned char *)
malloc(colourBufSize);
314 gbuf = (
unsigned char *)
malloc(colourBufSize);
315 bbuf = (
unsigned char *)
malloc(colourBufSize);
316 abuf = (
unsigned char *)
malloc(colourBufSize);
317 if(!
base || !rbuf || !gbuf || !bbuf) {
320 if (rbuf) free(rbuf);
321 if (gbuf) free(gbuf);
322 if (bbuf) free(bbuf);
323 if (abuf) free(abuf);
328 if (
image->zsize>=4) {
334 lptr +=
image->xsize;
335 }
else if(
image->zsize==3) {
340 lptr +=
image->xsize;
341 }
else if(
image->zsize==2) {
345 lptr +=
image->xsize;
349 lptr +=
image->xsize;
358 return (
unsigned *)
base;
414 if(
width == 0 || height == 0 ) std::cout <<
"VP1QtInventorUtils::imageFromRGBFile - read_texture failed?" << std::endl;
416 unsigned char *
data =
reinterpret_cast<unsigned char*
>(imagedata);
419 QImage
im(
width,height, ( components <= 3 ? QImage::Format_RGB32 : QImage::Format_ARGB32 ) );
422 for (
y=0;
y<height; ++
y) {
437 bool transparent_background,
double actualRenderedSizeFact )
450 if (actualRenderedSizeFact!=1.0&&!transparent_background) {
452 static_cast<int>(pixels_x*actualRenderedSizeFact+0.5),
453 static_cast<int>(pixels_y*actualRenderedSizeFact+0.5),
456 .scaled(pixels_x,pixels_y,Qt::IgnoreAspectRatio,Qt::SmoothTransformation);
459 if (transparent_background) {
464 SbColor save_bgd = ra->getBackgroundColor();
465 SbBool save_redraw = ra->isAutoRedraw();
467 ra->setAutoRedraw(
false);
470 QImage im_black_bgd, im_white_bgd;
471 if (actualRenderedSizeFact==1.0) {
472 ra->setBackgroundColor(SbColor(0.0,0.0,0.0));
473 im_black_bgd =
renderToImage(ra, pixels_x, pixels_y,
false,1.0);
474 ra->setBackgroundColor(SbColor(1.0,1.0,1.0));
475 im_white_bgd =
renderToImage(ra, pixels_x, pixels_y,
false,1.0);
477 ra->setBackgroundColor(SbColor(0.0,0.0,0.0));
478 im_black_bgd =
renderToImage(ra,
static_cast<int>(pixels_x*actualRenderedSizeFact+0.5),
static_cast<int>(pixels_y*actualRenderedSizeFact+0.5),
false,1.0);
479 ra->setBackgroundColor(SbColor(1.0,1.0,1.0));
480 im_white_bgd =
renderToImage(ra,
static_cast<int>(pixels_x*actualRenderedSizeFact+0.5),
static_cast<int>(pixels_y*actualRenderedSizeFact+0.5),
false,1.0);
483 ra->setBackgroundColor(save_bgd);
484 ra->setAutoRedraw(save_redraw);
486 if (actualRenderedSizeFact==1.0)
490 .scaled(pixels_x,pixels_y,Qt::IgnoreAspectRatio,Qt::SmoothTransformation);
507 SoNode *rootOverlay = ra->getOverlaySceneGraph();
512 SbViewportRegion myViewport;
513 myViewport.setWindowSize(SbVec2s(pixels_x,pixels_y));
515 QString tmppath(QDir::tempPath());
516 if (!tmppath.endsWith(QDir::separator()))
517 tmppath+=QDir::separator();
518 tmppath +=
"vp1tmpfileXXXXXX.rgb";
519 std::string stmppath = tmppath.toStdString();
520 int tmpfd = mkstemps (stmppath.data(), 4);
521 FILE* tmpf = fdopen (tmpfd,
"w");
522 QString tmpfile (stmppath.c_str());
525 SoOffscreenRenderer *myRenderer =
new SoOffscreenRenderer(myViewport);
528 myRenderer->setBackgroundColor(ra->getBackgroundColor());
531 myRenderer->setComponents(SoOffscreenRenderer::RGB_TRANSPARENCY);
532 myRenderer->getGLRenderAction()->setTransparencyType(ra->getTransparencyType());
536 SbBool smoothing;
int numPasses;
537 ra->getAntialiasing (smoothing, numPasses);
538 myRenderer->getGLRenderAction()->setSmoothing (smoothing);
539 myRenderer->getGLRenderAction()->setNumPasses(numPasses);
546 if (!myRenderer->render(
root)) {
556 bool okOver = myRenderer->render(rootOverlay);
569 if (!myRenderer->writeToRGB(tmpf)) {
572 QFile(tmpfile).remove();
587 QFile(tmpfile).remove();
596 bool transparent_background,
double actualRenderedSizeFact )
598 return QPixmap::fromImage(
renderToImage(ra, pixels_x, pixels_y, transparent_background, actualRenderedSizeFact));
604 if (im_black_bgd.isNull()||im_white_bgd.isNull()||im_black_bgd.size()!=im_white_bgd.size())
607 QImage
im(im_black_bgd.size(),QImage::Format_ARGB32);
610 int height =
im.height();
611 QRgb white = qRgba(255,255,255,255);
612 QRgb black = qRgba(0,0,0,255);
615 for (
int y = 0;
y < height; ++
y) {
616 if (im_black_bgd.pixel(
x,
y)==im_white_bgd.pixel(
x,
y)) {
617 im.setPixel(
x,
y,im_white_bgd.pixel(
x,
y));
618 }
else if (im_black_bgd.pixel(
x,
y)==black&&im_white_bgd.pixel(
x,
y)==white) {
619 im.setPixel(
x,
y,Qt::transparent);
622 QColor pix_b = QColor(im_black_bgd.pixel(
x,
y));
623 QColor pix_w = QColor(im_white_bgd.pixel(
x,
y));
624 qreal
alpha = 1.0 - pix_w.redF() + pix_b.redF();
626 im.setPixel(
x,
y,Qt::transparent);
628 im.setPixel(
x,
y,qRgba(
static_cast<int>(pix_b.redF()/
alpha*255+0.5),
629 static_cast<int>(pix_b.greenF()/
alpha*255+0.5),
630 static_cast<int>(pix_b.blueF()/
alpha*255+0.5),
631 static_cast<int>(
alpha*255+0.5)));
643 return SoGLRenderAction::DELAYED_BLEND;
649 QList<SoGLRenderAction::TransparencyType>
l;
651 << SoGLRenderAction::SCREEN_DOOR
653 << SoGLRenderAction::DELAYED_ADD
654 << SoGLRenderAction::SORTED_OBJECT_ADD
655 << SoGLRenderAction::BLEND
656 << SoGLRenderAction::DELAYED_BLEND
657 << SoGLRenderAction::SORTED_OBJECT_BLEND
658 << SoGLRenderAction::SORTED_OBJECT_SORTED_TRIANGLE_ADD
659 << SoGLRenderAction::SORTED_OBJECT_SORTED_TRIANGLE_BLEND
660 << SoGLRenderAction::SORTED_LAYERS_BLEND;
668 case SoGLRenderAction::SCREEN_DOOR:
return 0;
670 case SoGLRenderAction::DELAYED_ADD:
return 2;
671 case SoGLRenderAction::SORTED_OBJECT_ADD:
return 3;
672 case SoGLRenderAction::BLEND:
return 4;
673 case SoGLRenderAction::DELAYED_BLEND:
return 5;
674 case SoGLRenderAction::SORTED_OBJECT_BLEND:
return 6;
675 case SoGLRenderAction::SORTED_OBJECT_SORTED_TRIANGLE_ADD:
return 7;
676 case SoGLRenderAction::SORTED_OBJECT_SORTED_TRIANGLE_BLEND:
return 8;
678 case SoGLRenderAction::SORTED_LAYERS_BLEND:
return 10;
680 VP1Msg::messageDebug(
"VP1QtInventorUtils::transparencyTypeToInt ERROR: Unknown transparency type");
688 case 0:
return SoGLRenderAction::SCREEN_DOOR;
690 case 2:
return SoGLRenderAction::DELAYED_ADD;
691 case 3:
return SoGLRenderAction::SORTED_OBJECT_ADD;
692 case 4:
return SoGLRenderAction::BLEND;
693 case 5:
return SoGLRenderAction::DELAYED_BLEND;
694 case 6:
return SoGLRenderAction::SORTED_OBJECT_BLEND;
695 case 7:
return SoGLRenderAction::SORTED_OBJECT_SORTED_TRIANGLE_ADD;
696 case 8:
return SoGLRenderAction::SORTED_OBJECT_SORTED_TRIANGLE_BLEND;
698 case 10:
return SoGLRenderAction::SORTED_LAYERS_BLEND;
701 return SoGLRenderAction::DELAYED_BLEND;
710 case SoGLRenderAction::DELAYED_BLEND:
return "Delayed blend";
break;
711 case SoGLRenderAction::SCREEN_DOOR:
return "Screen door";
break;
713 case SoGLRenderAction::DELAYED_ADD:
return "Delayed add";
break;
714 case SoGLRenderAction::SORTED_OBJECT_ADD:
return "Sorted object add";
break;
715 case SoGLRenderAction::BLEND:
return "Blend (Best for Geo volumes)";
break;
716 case SoGLRenderAction::SORTED_OBJECT_BLEND:
return "Sorted object blend (Best for physics objects: jets, tracks, ...)";
break;
717 case SoGLRenderAction::SORTED_OBJECT_SORTED_TRIANGLE_ADD:
return "Sorted object sorted triangle add";
break;
718 case SoGLRenderAction::SORTED_OBJECT_SORTED_TRIANGLE_BLEND:
return "Sorted object sorted triangle blend";
break;
720 case SoGLRenderAction::SORTED_LAYERS_BLEND:
return "Sorted layers blend";
break;
731 QByteArray byteArray;
732 QBuffer
buffer(&byteArray);
733 buffer.open(QIODevice::WriteOnly);
739 rot.getValue (q0,q1,q2,q3);
763 buffer.open(QIODevice::ReadOnly);
764 QDataStream state(&
buffer);
768 state.setFloatingPointPrecision(QDataStream::SinglePrecision);
776 rot.setValue (q0,q1,q2,q3);
794 rot.setValue (q0,q1,q2,q3);
814 QByteArray byteArray;
815 QBuffer
buffer(&byteArray);
816 buffer.open(QIODevice::WriteOnly);
842 buffer.open(QIODevice::ReadOnly);
843 QDataStream state(&
buffer);
847 state.setFloatingPointPrecision(QDataStream::SinglePrecision);
891 QByteArray byteArray;
892 QBuffer
buffer(&byteArray);
893 buffer.open(QIODevice::WriteOnly);
898 SbRotation camrot = cam.orientation.getValue();
900 SbVec3f campos = cam.position.getValue();
902 float f_aspectRatio(cam.aspectRatio.getValue());
903 float f_nearDistance(cam.nearDistance.getValue());
904 float f_farDistance(cam.farDistance.getValue());
905 float f_focalDistance(cam.focalDistance.getValue());
912 switch (cam.viewportMapping.getValue()) {
913 case SoCamera::CROP_VIEWPORT_FILL_FRAME: viewportmap = 0;
break;
914 case SoCamera::CROP_VIEWPORT_LINE_FRAME: viewportmap = 1;
break;
915 case SoCamera::CROP_VIEWPORT_NO_FRAME: viewportmap = 2;
break;
916 case SoCamera::ADJUST_CAMERA: viewportmap = 3;
break;
917 case SoCamera::LEAVE_ALONE: viewportmap = 4;
break;
923 if (cam.getTypeId().isDerivedFrom(SoPerspectiveCamera::getClassTypeId()))
925 else if (cam.getTypeId().isDerivedFrom(SoOrthographicCamera::getClassTypeId()))
930 out << (double)static_cast<const SoPerspectiveCamera*>(&cam)->heightAngle.getValue();
931 }
else if (camtype==1) {
932 out << (double)static_cast<const SoOrthographicCamera*>(&cam)->height.getValue();
947 +QString(camtype==0?
"perspective":(camtype==1?
"orthographic":
"unknown")));
950 +
QString::number(
static_cast<const SoPerspectiveCamera*
>(&cam)->heightAngle.getValue()));
953 +
QString::number(
static_cast<const SoOrthographicCamera*
>(&cam)->height.getValue()));
964 if (ba==QByteArray())
969 buffer.open(QIODevice::ReadOnly);
970 QDataStream state(&
buffer);
974 state.setFloatingPointPrecision(QDataStream::SinglePrecision);
977 SbRotation rot; QByteArray ba_rot; state >> ba_rot;
980 SbVec3f
pos; QByteArray ba_pos; state >> ba_pos;
983 bool save = cam.enableNotify(
false);
985 cam.orientation.setValue(rot);
986 cam.position.setValue(
pos);
988 float f_aspectRatio, f_nearDistance, f_farDistance, f_focalDistance;
990 state >> f_aspectRatio; cam.aspectRatio.setValue(f_aspectRatio);
991 state >> f_nearDistance; cam.nearDistance.setValue(f_nearDistance);
992 state >> f_farDistance; cam.farDistance.setValue(f_farDistance);
993 state >> f_focalDistance; cam.focalDistance.setValue(f_focalDistance);
997 switch (viewportmap) {
998 case 0: cam.viewportMapping.setValue(SoCamera::CROP_VIEWPORT_FILL_FRAME);
break;
999 case 1: cam.viewportMapping.setValue(SoCamera::CROP_VIEWPORT_LINE_FRAME);
break;
1000 case 2: cam.viewportMapping.setValue(SoCamera::CROP_VIEWPORT_NO_FRAME);
break;
1001 case 3: cam.viewportMapping.setValue(SoCamera::ADJUST_CAMERA);
break;
1002 case 4: cam.viewportMapping.setValue(SoCamera::LEAVE_ALONE);
break;
1006 bool passedcameraisperspective = cam.getTypeId().isDerivedFrom(SoPerspectiveCamera::getClassTypeId());
1011 float f_orthopersp_heightpar(-999);
1014 if (!passedcameraisperspective)
1016 state >> f_orthopersp_heightpar;
1017 static_cast<SoPerspectiveCamera*
>(&cam)->heightAngle.setValue(f_orthopersp_heightpar);
1018 }
else if (camtype==1) {
1020 if (passedcameraisperspective)
1022 state >> f_orthopersp_heightpar;
1023 static_cast<SoOrthographicCamera*
>(&cam)->height.setValue(f_orthopersp_heightpar);
1027 cam.enableNotify(
true);
1041 +QString(camtype==0?
"perspective":(camtype==1?
"orthographic":
"unknown")));
1055 SbRotation rot; QByteArray ba_rot; state >> ba_rot;
1058 SbVec3f
pos; QByteArray ba_pos; state >> ba_pos;
1061 bool save = cam.enableNotify(
false);
1063 cam.orientation.setValue(rot);
1064 cam.position.setValue(
pos);
1066 double f_aspectRatio, f_nearDistance, f_farDistance, f_focalDistance;
1068 state >> f_aspectRatio; cam.aspectRatio.setValue(f_aspectRatio);
1069 state >> f_nearDistance; cam.nearDistance.setValue(f_nearDistance);
1070 state >> f_farDistance; cam.farDistance.setValue(f_farDistance);
1071 state >> f_focalDistance; cam.focalDistance.setValue(f_focalDistance);
1075 switch (viewportmap) {
1076 case 0: cam.viewportMapping.setValue(SoCamera::CROP_VIEWPORT_FILL_FRAME);
break;
1077 case 1: cam.viewportMapping.setValue(SoCamera::CROP_VIEWPORT_LINE_FRAME);
break;
1078 case 2: cam.viewportMapping.setValue(SoCamera::CROP_VIEWPORT_NO_FRAME);
break;
1079 case 3: cam.viewportMapping.setValue(SoCamera::ADJUST_CAMERA);
break;
1080 case 4: cam.viewportMapping.setValue(SoCamera::LEAVE_ALONE);
break;
1084 bool passedcameraisperspective = cam.getTypeId().isDerivedFrom(SoPerspectiveCamera::getClassTypeId());
1089 double f_orthopersp_heightpar(-999);
1092 if (!passedcameraisperspective)
1094 state >> f_orthopersp_heightpar;
1095 static_cast<SoPerspectiveCamera*
>(&cam)->heightAngle.setValue(f_orthopersp_heightpar);
1096 }
else if (camtype==1) {
1098 if (passedcameraisperspective)
1100 state >> f_orthopersp_heightpar;
1101 static_cast<SoOrthographicCamera*
>(&cam)->height.setValue(f_orthopersp_heightpar);
1105 cam.enableNotify(
true);
1119 +QString(camtype==0?
"perspective":(camtype==1?
"orthographic":
"unknown")));
1130 cam.unrefNoDelete();
1138 return SbColor(
col.red()/255.0,
col.green()/255.0,
col.blue()/255.0 );
1146 return QColor::fromRgbF(
r,
g,
b );
1152 if (!
m||
m->ambientColor.getNum()!=1
1153 ||
m->diffuseColor.getNum()!=1
1154 ||
m->specularColor.getNum()!=1
1155 ||
m->emissiveColor.getNum()!=1
1156 ||
m->transparency.getNum()!=1
1157 ||
m->shininess.getNum()!=1) {
1159 "Passed material must have exactly one value in each of the 6 fields!!");
1160 return QByteArray();
1165 QByteArray byteArray;
1166 QBuffer
buffer(&byteArray);
1167 buffer.open(QIODevice::WriteOnly);
1171 out << QString(
"somat_v1_begin");
1177 out << (
double)
m->transparency[0];
1178 out << QString(
"somat_end");
1190 if (!
m||
m->ambientColor.getNum()!=1
1191 ||
m->diffuseColor.getNum()!=1
1192 ||
m->specularColor.getNum()!=1
1193 ||
m->emissiveColor.getNum()!=1
1194 ||
m->transparency.getNum()!=1
1195 ||
m->shininess.getNum()!=1) {
1197 "Passed material must have exactly one value in each of the 6 fields!!");
1203 buffer.open(QIODevice::ReadOnly);
1206 stream.setFloatingPointPrecision(QDataStream::SinglePrecision);
1210 if (
str!=
"somat_v1_begin")
1213 QColor ambientcol;
stream >> ambientcol;
1214 if (!ambientcol.isValid())
1217 QColor diffusecol;
stream >> diffusecol;
1218 if (!diffusecol.isValid())
1221 QColor specularcol;
stream >> specularcol;
1222 if (!specularcol.isValid())
1225 QColor emissivecol;
stream >> emissivecol;
1226 if (!emissivecol.isValid())
1229 if(ba.size()==106) {
1231 float shininess;
stream >> shininess;
1232 if (shininess<0.0f||shininess>1.0
f)
1235 float transparency;
stream >> transparency;
1236 if (transparency<0.0f||transparency>1.0
f)
1240 if (
str!=
"somat_end")
1248 m->specularColor.setValue(
qcol2sbcol(specularcol));
1249 m->emissiveColor.setValue(
qcol2sbcol(emissivecol));
1250 m->shininess.setValue(shininess);
1251 m->transparency.setValue(transparency);
1255 double shininess;
stream >> shininess;
1256 if (shininess<0.0||shininess>1.0)
1259 double transparency;
stream >> transparency;
1260 if (transparency<0.0||transparency>1.0)
1264 if (
str!=
"somat_end")
1272 m->specularColor.setValue(
qcol2sbcol(specularcol));
1273 m->emissiveColor.setValue(
qcol2sbcol(emissivecol));
1274 m->shininess.setValue(shininess);
1275 m->transparency.setValue(transparency);
1284 SoGroup* grp =
new SoGroup;
1287 SoCoordinate4 *
coord =
new SoCoordinate4;
1288 const double invsqrttwo=0.707106781186547;
1291 coord->point.set1Value(icoord++,SbVec4f(1*
radius,0,0,1));
1292 coord->point.set1Value(icoord++,SbVec4f(invsqrttwo*
radius,invsqrttwo*
radius,0,invsqrttwo));
1293 coord->point.set1Value(icoord++,SbVec4f(0,1*
radius,0,1));
1294 coord->point.set1Value(icoord++,SbVec4f(-invsqrttwo*
radius,invsqrttwo*
radius,0,invsqrttwo));
1295 coord->point.set1Value(icoord++,SbVec4f(-1*
radius,0,0,1));
1296 coord->point.set1Value(icoord++,SbVec4f(-invsqrttwo*
radius,-invsqrttwo*
radius,0,invsqrttwo));
1297 coord->point.set1Value(icoord++,SbVec4f(0,-1*
radius,0,1));
1298 coord->point.set1Value(icoord++,SbVec4f(invsqrttwo*
radius,-invsqrttwo*
radius,0,invsqrttwo));
1299 coord->point.set1Value(icoord++,SbVec4f(1*
radius,0,0,1));
1301 SoNurbsCurve * curve =
new SoNurbsCurve;
1302 curve->numControlPoints = icoord;
1306 curve->knotVector.set1Value(iknot++,0);
1307 curve->knotVector.set1Value(iknot++,0);
1308 curve->knotVector.set1Value(iknot++,0);
1309 curve->knotVector.set1Value(iknot++,1);
1310 curve->knotVector.set1Value(iknot++,1);
1311 curve->knotVector.set1Value(iknot++,2);
1312 curve->knotVector.set1Value(iknot++,2);
1313 curve->knotVector.set1Value(iknot++,3);
1314 curve->knotVector.set1Value(iknot++,3);
1315 curve->knotVector.set1Value(iknot++,4);
1316 curve->knotVector.set1Value(iknot++,4);
1317 curve->knotVector.set1Value(iknot++,4);
1318 grp->addChild(
coord);
1319 grp->addChild(curve);
1321 grp->unrefNoDelete();
1328 SoVertexProperty *vertices =
new SoVertexProperty();
1331 vertices->vertex.set1Value(iver++,radiusX,0.0,0.0);
1332 for (
int i = 1;
i < numnodes;
i++)
1334 vertices->vertex.set1Value(iver++,
1335 cos(2.0*
static_cast<double>(
i)*
M_PI/
static_cast<double>(numnodes))*radiusX,
1336 sin(2.0*
static_cast<double>(
i)*
M_PI/
static_cast<double>(numnodes))*radiusY,0.0);
1338 vertices->vertex.set1Value(iver++,radiusX,0.0,0.0);
1340 SoLineSet * ellipse =
new SoLineSet();
1341 ellipse->numVertices = iver;
1342 ellipse->vertexProperty = vertices;
1350 if (!
path||!commonBranchPoint||!newtail)
1354 sa.setInterest(SoSearchAction::FIRST);
1355 sa.setNode(newtail);
1356 sa.apply(commonBranchPoint);
1359 SoPath * newpath = sa.getPath();
1363 for (
int i=0;
i<
path->getLength();++
i) {
1364 if (
path->getNode(
i)==commonBranchPoint) {
1366 path->truncate(
i+1);
1371 path->append(newpath);
1394 out.setBuffer(
buffer, buffer_size, buffer_realloc);
1396 SoWriteAction wa(&
out);
1407 SoToVRML2Action vwa;
1410 SoVRMLGroup * newroot = vwa.getVRML2SceneGraph();
1414 out.setHeaderString(
"#VRML V2.0 utf8");
1415 SoWriteAction wra(&
out);
1430 root->unrefNoDelete();
1433 if (
data.open(QFile::WriteOnly | QFile::Truncate)) {
1435 #if QTCORE_VERSION >= 0x050E00
1436 out <<
s << Qt::endl;
1451 if (!in.openFile(
filename.toStdString().c_str()))
1453 return SoDB::readAll(&in);
1465 root->unrefNoDelete();
1483 const double& brightness,
const double& transp )
1491 const double& brightness,
const double& transp )
1526 "purpose of getting supported line widths and point sizes!");
1530 SbVec2f
range;
float granularity;
1531 ra->getLineWidthLimits(
range, granularity);
1537 VP1Msg::messageVerbose(
"VP1QtInventorUtils Determined line widths supported by hardware (min,max,granularity) = ("
1539 ra->getPointSizeLimits(
range, granularity);
1544 VP1Msg::messageVerbose(
"VP1QtInventorUtils Determined point sizes supported by hardware (min,max,granularity) = ("
1571 slider->setRange(0,nsteps);
1572 slider->setSingleStep(1);
1573 slider->setPageStep(stepsPerUnit);
1583 int nsteps =
std::min(1000,std::max<int>(0,
1585 int stepsPerUnit =
std::min(nsteps,std::max<int>(1,
1587 slider->setRange(0,nsteps);
1588 slider->setSingleStep(1);
1589 slider->setPageStep(stepsPerUnit);
1599 int itarget =
std::min(slider->maximum(),std::max<int>(slider->minimum(),
1601 if (slider->value()!=itarget)
1602 slider->setValue(itarget);
1612 int itarget =
std::min(slider->maximum(),std::max<int>(slider->minimum(),
1614 if (slider->value()!=itarget)
1615 slider->setValue(itarget);