Announcement

Collapse
No announcement yet.

V-Ray SDK BitmapInterface getting color crashes V-ray & Maya

Collapse
X
 
  • Filter
  • Time
  • Show
Clear All
new posts

  • #16
    Thanks, looking forward to it!

    I tried using the context and changing the sample point with setRay() or spawnNewContext from the context and setting the ray on that, but it never seemed to give the results of actually sampling the UV point specified.
    So I wonder what the correct workaround is.

    Comment


    • #17
      Here is the modified code. Here I removed the "uvwgen" parameter as I assume you don't need it.

      Code:
      #include "vrayplugins.h"
      #include "vrayinterface.h"
      #include "vrayrenderer.h"
      #include "vraytexutils.h"
      #include <math.h>
      
      #include "globalnewdelete.cpp"
      
      using namespace VR;
      
      // Parameters
      struct SphereMapTex_Params: VRayParameterListDesc {
      	SphereMapTex_Params(void) {
      		// Add the parameters
      		addParamTexture("texture");
      	}
      };
      
      
      struct SphereMapTex: VRayTexture {
      	SphereMapTex(VRayPluginDesc *pluginDesc):VRayTexture(pluginDesc) {
      		// Set parameter caches
      		paramList->setParamCache("texture", &texture);
      	}
      
      	void renderBegin(VR::VRayRenderer *vray);
      	void renderEnd(VR::VRayRenderer *vray);
      	void frameBegin(VR::VRayRenderer *vray);
      	void frameEnd(VR::VRayRenderer *vray);
      
      	AColor getTexColor(const VRayContext &rc);
      	void getTexColorBounds(AColor &cmin, AColor &cmax);
      	Vector getColorBumpGradient(const VRayContext &rc);
      private:
      	// Cached parameters
      	TextureInterface *texture;
      
      	// The color cache
      	ShadeCache<AColor, true, false> shadeCache;
      };
      
      #define SphereMapTex_PluginID PluginID(LARGE_CONST(20141228000))
      SIMPLE_PLUGIN_LIBRARY(SphereMapTex_PluginID, EXT_TEXTURE, "SphereMapTex", "SphereMap texture for VRay", SphereMapTex, SphereMapTex_Params);
      
      void SphereMapTex::renderBegin(VR::VRayRenderer *vray) {
      	VRayTexture::renderBegin(vray);
      	shadeCache.renderBegin(vray);
      }
      
      void SphereMapTex::renderEnd(VR::VRayRenderer *vray) {
      	VRayTexture::renderEnd(vray);
      	shadeCache.renderEnd(vray);
      }
      
      void SphereMapTex::frameBegin(VR::VRayRenderer *vray) {
      	// Call the base class to update the parameter caches
      	VRayTexture::frameBegin(vray);
      
      	// Call frameBegin on the shade cache
      	shadeCache.frameBegin(vray);
      
      	const VRaySequenceData &sdata=vray->getSequenceData();
      	if (!texture && sdata.progress) sdata.progress->warning("[SphereMapTex] No texture specified.");
      }
      
      void SphereMapTex::frameEnd(VR::VRayRenderer *vray) {
      	VRayTexture::frameEnd(vray);
      	shadeCache.frameEnd(vray);
      }
      
      // A structure that replaces the normal shade data in the ray context so
      // that we can evaluate textures at arbitrary UVs.
      struct UVSurface: public MappedSurface, public VRayShadeData {
      public:
          UVSurface(float u, float v, const VRayContext& rc):uvCoords(u,v) {
              oldShadeData=rc.rayresult.sd;
              rcc=const_cast<VRayContext*>( &rc );
              rcc->rayresult.sd=static_cast<VRayShadeData*>(this);
          }
      
          ~UVSurface() {
              rcc->rayresult.sd=oldShadeData;
          }
      
          PluginBase *getPlugin(void) {
              return static_cast<PluginBase*>(this);
          }
      
          PluginInterface *newInterface(InterfaceID id) {
              if (id==EXT_MAPPED_SURFACE) {
                  return static_cast<MappedSurface*>(this);
              }
              return NULL;
          }
      
      	// From MappedSurface
          virtual Transform getLocalUVWTransform(const VRayContext& rc, int channel) {
              Transform t;
      
              // Matrix that describes how a change in the shading point, in world coordinates,
              // is converted to a change in UV coordinates. This is used for texture filtering.
              // If you do not know how to compute this matrix analytically, you can set it
              // to zero, but this will remove any filtering from the sampled texture. For
              // bitmap textures, this will cause the most detailed mip-map level to be always
              // loaded. It is a good idea to spend some time and compute the correct matrix here.
              t.m.makeZero();
      
              // Replace the UVs with our own
              t.offs.set(uvCoords.x, uvCoords.y, 0.0f);
              return t;
          }
      
      private:
          VRayContext* rcc;
          VRayShadeData* oldShadeData;
      
          Vector2 uvCoords;
      };
      
      AColor SphereMapTex::getTexColor(const VRayContext &rc) {
      	if (!texture) return AColor(0.5, 0.5, 0.5, 1.0);
      
      	// Compute some UVs
      	float u=(float) rc.rayresult.wpoint.x*0.1f, v=(float) rc.rayresult.wpoint.y*0.1f;
      
      	//return AColor(1.0, 0.0, 0.0, 1.0); // test with red (keeping this avoids the crash and render it red as expected)
      	UVSurface uvSurface(u, v, rc);
      	return texture->getTexColor(rc);
      
      }
      
      void SphereMapTex::getTexColorBounds(AColor &cmin, AColor &cmax) {
      	if (texture==NULL) cmin=cmax=AColor(0.5f, 0.5f, 0.5f, 1.0f); 
      	else texture->getTexColorBounds(cmin, cmax);
      }
      
      Vector SphereMapTex::getColorBumpGradient(const VRayContext &rc) {
      	return Vector(0.0f, 0.0f, 0.0f);
      	//if (texture==NULL) return Vector(0.0f, 0.0f, 0.0f);
      	//return texture->getColorBumpGradient(rc);
      }
      Let me know if you have any questions.

      Best regards,
      Vlado
      I only act like I know everything, Rogers.

      Comment


      • #18
        Thanks Vlado!

        I'll definitely start working with this. So what exactly is the need of the BitmapInterface and why doesn't that work with Maya's UI connected textures? At least it didn't seem to work.
        The workaround you provided here seems a bit odd. You're overriding the sample point in the context to sample a different UV point.
        Does that mean that if I would have multiple texture slots that I want to sample at a different UV I just need to change the context with the surface class after each other?

        Also I saw this topic:
        http://forums.chaosgroup.com/showthr...kup-equivalent

        Looking at this it seemed that he was able to connect the LUT texture somehow and have it render correctly.
        Does that mean he connected it outside of Maya's UI? Or what is exactly the reason we need this workaround with the texture parameters?

        Thanks for all the information! This really helps to understand more about the V-ray SDK!

        Cheers,
        Roy

        Comment


        • #19
          Originally posted by colorbleed View Post
          So what exactly is the need of the BitmapInterface and why doesn't that work with Maya's UI connected textures?
          It is an internal interface that V-Ray uses to represent bitmap buffers. Those may come from bitmap files on the disk, but we also have a bitmap buffer that can specify the raw pixel data directly. We have never meant for BitmapInterface plugins to be directly accessible to the user inside Maya.

          The workaround you provided here seems a bit odd. You're overriding the sample point in the context to sample a different UV point.
          Yes. What is odd about that?

          Does that mean that if I would have multiple texture slots that I want to sample at a different UV I just need to change the context with the surface class after each other?
          Yes, just be careful to restore the original shade data pointer in the context on exit.

          Also I saw this topic: http://forums.chaosgroup.com/showthr...kup-equivalent Looking at this it seemed that he was able to connect the LUT texture somehow and have it render correctly. Does that mean he connected it outside of Maya's UI?
          Yes, the BitmapBuffer plugin is created internally by the V-Ray plugin so it does not appear directly in the Maya UI. Perhaps in Maya the user just put a string parameter to specify the file name.

          Or what is exactly the reason we need this workaround with the texture parameters?
          Well, I don't know what you really want to do, so I'm just giving you the simplest something that I know will work. It is of course possible to use a BitmapBuffer plugin, but you will most likely have to create it yourself based on a user-supplied string, or something else.

          Best regards,
          Vlado
          I only act like I know everything, Rogers.

          Comment


          • #20
            Thanks for the explanation, makes perfect sense.
            Not sure how to calculate a matrix for the subsampling, but I don't think I need that in this particular case.

            Basically what I'm doing is sample a spherical map based on the surface normal.
            So I ended up doing:
            Code:
            	Vector nrm = rc.rayresult.normal;
            	float u = 1- (0.5 + (atan2(nrm.z, nrm.x) / (2*pi())));
            	float v = 1 - (0.5 - (asin(nrm.y) / pi()));
            With the current code it seems to be working nicely.
            Now all that's left is create a Maya node equivalent so I can handle it nicely in Maya, write a shader relation file and figure out how to nicely distribute this on the server.
            We were unable to figure out how to do the above with built-in nodes in Maya/V-ray.

            Cheers,
            Roy

            Comment


            • #21
              Originally posted by colorbleed View Post
              Now all that's left is create a Maya node equivalent so I can handle it nicely in Maya, write a shader relation file
              In the latest nightly builds (3.x and 2.x), the V-Ray for Maya translator can automatically generate a Maya node based on the shader description file only. Check the vraysdk/docs/maya_sdk.html file, in the "Automatic generation of shading nodes" section - if you have that section then your version supports this.

              We were unable to figure out how to do the above with built-in nodes in Maya/V-ray.
              Which part did you not figure out? How to put the shader on a shared network place?

              Best regards,
              Vlado
              I only act like I know everything, Rogers.

              Comment


              • #22
                The functionality you're referring to from 3.x and 2.x versions is the Create Texture From Plug-In functionality right? It doesn't just show in the hypershade as a material so it can be easily created? That's basically the only reason why I would implement it otherwise.

                We were unable to figure out how to do the above with built-in nodes in Maya/V-ray.
                I meant that I was unable to perform the functionality from the sample code I posted with built-in nodes. Like sampling an envBall directly from the normal angle.
                Would be nice to also implement the same supporting reflection, so I think that would be the next step for my plug-in.

                How to put the shader on a shared network place?
                According to the docs I would have to:
                Set up the VRAY_FOR_MAYA_SHADERS environment path to point at a server directory that contains the conversion description files for V-ray for Maya (the .txt files).
                Set up the VRAY_FOR_MAYAnnnn_PLUGINS_pp environment path to point at a server directory that contains the V-ray plug-in dlls to be loaded within Maya for V-ray for Maya (the .dll files).
                Set up the VRAY_PLUGINS_pp environment path to point at a server directory that contains the V-ray plug-in dlls to be loaded within V-ray Standalone (the .dll files). (This could be same directo
                Then of course if I would compile a custom Maya node that gets converted that would be somewhere in the MAYA_PLUG_IN_PATH to load it. (the .mll file)

                The documentation also states:
                For materials, the classification (i.e. the string returned for its outApiClassification attribute) must be "shader/surface/utility/:swatch/VRayMtlSwatchGen" (without the quotes) so that the material swatch is rendered with V-Ray;
                Does that need to be the exact string, and would I use the same for a Texture? Or how would that differ?
                Could I still classify it in a way so that it shows up under Maya > Env Textures in the Hypershade and still have it work correctly?

                So yeah, there's definitely some extra checking out I need to do to work this all through!

                Comment


                • #23
                  Originally posted by colorbleed View Post
                  The functionality you're referring to from 3.x and 2.x versions is the Create Texture From Plug-In functionality right?
                  No, that's not what I meant (though this will certainly work too).

                  It doesn't just show in the hypershade as a material so it can be easily created? That's basically the only reason why I would implement it otherwise.
                  It does show up in the Hypershade as a texture node, if created in this way.

                  I meant that I was unable to perform the functionality from the sample code I posted with built-in nodes.
                  Well, yes, can't code nodes for every occasion, that's why the SDK is there...

                  Like sampling an envBall directly from the normal angle. Would be nice to also implement the same supporting reflection, so I think that would be the next step for my plug-in.
                  I didn't get that... but we can talk about it when you get there.

                  According to the docs I would have to:
                  Set up the VRAY_FOR_MAYA_SHADERS environment path to point at a server directory that contains the conversion description files for V-ray for Maya (the .txt files).

                  Set up the VRAY_FOR_MAYAnnnn_PLUGINS_pp environment path to point at a server directory that contains the V-ray plug-in dlls to be loaded within Maya for V-ray for Maya (the .dll files).

                  Set up the VRAY_PLUGINS_pp environment path to point at a server directory that contains the V-ray plug-in dlls to be loaded within V-ray Standalone (the .dll files). (This could be same directory)

                  Then of course if I would compile a custom Maya node that gets converted that would be somewhere in the MAYA_PLUG_IN_PATH to load it. (the .mll file)
                  This is correct, yes.

                  The documentation also states:
                  For materials, the classification (i.e. the string returned for its outApiClassification attribute) must be "shader/surface/utility/:swatch/VRayMtlSwatchGen" (without the quotes) so that the material swatch is rendered with V-Ray;
                  Does that need to be the exact string, and would I use the same for a Texture? Or how would that differ?
                  It needs to be slightly different in the V-Ray part for the swatch. See the example for the Max noise texture. (Though I think we may have some issue with the swatches...)

                  Could I still classify it in a way so that it shows up under Maya > Env Textures in the Hypershade and still have it work correctly?
                  Yes, of course you can classify it was whatever you want.

                  Best regards,
                  Vlado
                  I only act like I know everything, Rogers.

                  Comment

                  Working...
                  X