Mitsuba Renderer  0.5.0
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
scene.h
Go to the documentation of this file.
1 /*
2  This file is part of Mitsuba, a physically based rendering system.
3 
4  Copyright (c) 2007-2014 by Wenzel Jakob and others.
5 
6  Mitsuba is free software; you can redistribute it and/or modify
7  it under the terms of the GNU General Public License Version 3
8  as published by the Free Software Foundation.
9 
10  Mitsuba is distributed in the hope that it will be useful,
11  but WITHOUT ANY WARRANTY; without even the implied warranty of
12  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13  GNU General Public License for more details.
14 
15  You should have received a copy of the GNU General Public License
16  along with this program. If not, see <http://www.gnu.org/licenses/>.
17 */
18 
19 #pragma once
20 #if !defined(__MITSUBA_RENDER_SCENE_H_)
21 #define __MITSUBA_RENDER_SCENE_H_
22 
23 #include <mitsuba/core/netobject.h>
24 #include <mitsuba/core/pmf.h>
25 #include <mitsuba/core/aabb.h>
26 #include <mitsuba/render/trimesh.h>
27 #include <mitsuba/render/skdtree.h>
28 #include <mitsuba/render/sensor.h>
30 #include <mitsuba/render/bsdf.h>
32 #include <mitsuba/render/texture.h>
33 #include <mitsuba/render/medium.h>
34 #include <mitsuba/render/volume.h>
35 #include <mitsuba/render/phase.h>
36 
38 
39 /**
40  * \brief Principal scene data structure
41  *
42  * This class holds information on surfaces, emitters and participating media
43  * and coordinates rendering jobs. It also provides useful query routines that
44  * are mostly used by the \ref Integrator implementations.
45  *
46  * \ingroup librender
47  * \ingroup libpython
48  */
50 public:
51  // =============================================================
52  //! @{ \name Initialization and rendering
53  // =============================================================
54 
55  /// Construct a new, empty scene (with the default properties)
56  Scene();
57 
58  /// Construct a new, empty scene
59  Scene(const Properties &props);
60 
61  /// Create a shallow clone of a scene
62  Scene(Scene *scene);
63 
64  /// Unserialize a scene from a binary data stream
65  Scene(Stream *stream, InstanceManager *manager);
66 
67  /**
68  * \brief Initialize the scene
69  *
70  * This function \a must be called before using any
71  * of the methods in this class.
72  */
73  void initialize();
74 
75  /**
76  *\brief Invalidate the kd-tree
77  *
78  * This function must be called if, after running \ref initialize(),
79  * additional geometry is added to the scene.
80  */
81  void invalidate();
82 
83  /**
84  * \brief Initialize the scene for bidirectional rendering algorithms.
85  *
86  * This ensures that certain "special" shapes (such as the aperture
87  * of the sensor) are added to the scene. This function should be called
88  * before using any of the methods in this class.
89  */
90  void initializeBidirectional();
91 
92  /**
93  * \brief Perform any pre-processing steps before rendering
94  *
95  * This function should be called after \ref initialize() and
96  * before rendering the scene. It might do a variety of things,
97  * such as constructing photon maps or executing distributed overture
98  * passes.
99  *
100  * Progress is tracked by sending status messages to a provided
101  * render queue (the parameter \c job is required to discern multiple
102  * render jobs occurring in parallel).
103  *
104  * The last three parameters are resource IDs of the associated scene,
105  * sensor and sample generator, which have been made available to all
106  * local and remote workers.
107  *
108  * \return \c true upon successful completion.
109  */
110  bool preprocess(RenderQueue *queue, const RenderJob *job,
111  int sceneResID, int sensorResID, int samplerResID);
112 
113  /**
114  * \brief Render the scene as seen by the scene's main sensor.
115  *
116  * Progress is tracked by sending status messages to a provided
117  * render queue (the parameter \c job is required to discern multiple
118  * render jobs occurring in parallel).
119  *
120  * The last three parameters are resource IDs of the associated scene,
121  * sensor and sample generator, which have been made available to all
122  * local and remote workers.
123  *
124  * \return \c true upon successful completion.
125  */
126  bool render(RenderQueue *queue, const RenderJob *job,
127  int sceneResID, int sensorResID, int samplerResID);
128 
129  /**
130  * \brief Perform any post-processing steps after rendering
131  *
132  * Progress is tracked by sending status messages to a provided
133  * render queue (the parameter \c job is required to discern multiple
134  * render jobs occurring in parallel).
135  *
136  * The last three parameters are resource IDs of the associated scene,
137  * sensor and sample generator, which have been made available to all
138  * local and remote workers.
139  */
140  void postprocess(RenderQueue *queue, const RenderJob *job,
141  int sceneResID, int sensorResID, int samplerResID);
142 
143  /// Write out the current (partially rendered) image
144  void flush(RenderQueue *queue, const RenderJob *job);
145 
146  /**
147  * \brief Cancel a running rendering job
148  *
149  * This function can be called asynchronously, e.g. from a GUI.
150  * In this case, \ref render() will quit with a return value of
151  * \c false.
152  */
153  void cancel();
154 
155  /// Add a child node to the scene
156  void addChild(const std::string &name, ConfigurableObject *child);
157 
158  /// Add an unnamed child
159  inline void addChild(ConfigurableObject *child) { addChild("", child); }
160 
161  /** \brief Configure this object (called \a once after construction
162  and addition of all child \ref ConfigurableObject instances).) */
163  void configure();
164 
165  //! @}
166  // =============================================================
167 
168  // =============================================================
169  //! @{ \name Ray tracing
170  // =============================================================
171 
172  /**
173  * \brief Intersect a ray against all primitives stored in the scene
174  * and return detailed intersection information
175  *
176  * \param ray
177  * A 3-dimensional ray data structure with minimum/maximum
178  * extent information, as well as a time value (which applies
179  * when the shapes are in motion)
180  *
181  * \param its
182  * A detailed intersection record, which will be filled by the
183  * intersection query
184  *
185  * \return \c true if an intersection was found
186  */
187  inline bool rayIntersect(const Ray &ray, Intersection &its) const {
188  return m_kdtree->rayIntersect(ray, its);
189  }
190 
191  /**
192  * \brief Intersect a ray against all primitives stored in the scene
193  * and return the traveled distance and intersected shape
194  *
195  * This function represents a performance improvement when the
196  * intersected shape must be known, but there is no need for
197  * a detailed intersection record.
198  *
199  * \param ray
200  * A 3-dimensional ray data structure with minimum/maximum
201  * extent information, as well as a time value (which applies
202  * when the shapes are in motion)
203  *
204  * \param t
205  * The traveled ray distance will be stored in this parameter
206 
207  * \param shape
208  * A pointer to the intersected shape will be stored in this
209  * parameter
210  *
211  * \param n
212  * The geometric surface normal will be stored in this parameter
213  *
214  * \param uv
215  * The UV coordinates associated with the intersection will
216  * be stored here.
217  *
218  * \return \c true if an intersection was found
219  */
220  inline bool rayIntersect(const Ray &ray, Float &t,
221  ConstShapePtr &shape, Normal &n, Point2 &uv) const {
222  return m_kdtree->rayIntersect(ray, t, shape, n, uv);
223  }
224 
225  /**
226  * \brief Intersect a ray against all primitives stored in the scene
227  * and \a only determine whether or not there is an intersection.
228  *
229  * This is by far the fastest ray tracing method. This performance
230  * improvement comes with a major limitation though: this function
231  * cannot provide any additional information about the detected
232  * intersection (not even its position).
233  *
234  * \param ray
235  * A 3-dimensional ray data structure with minimum/maximum
236  * extent information, as well as a time value (which applies
237  * when the shapes are in motion)
238  *
239  * \return \c true if an intersection was found
240  */
241  inline bool rayIntersect(const Ray &ray) const {
242  return m_kdtree->rayIntersect(ray);
243  }
244 
245  /**
246  * \brief Return the transmittance between \c p1 and \c p2 at the
247  * specified time.
248  *
249  * This function is essentially a continuous version of \ref isOccluded(),
250  * which additionally accounts for the presence of participating media
251  * and surface interactions that attenuate a ray without changing
252  * its direction (i.e. geometry with an alpha mask)
253  *
254  * The implementation correctly handles arbitrary amounts of index-matched
255  * medium transitions. The \c interactions parameter can be used to
256  * specify a maximum number of possible surface interactions and medium
257  * transitions between \c p1 and \c p2. When this number is exceeded,
258  * the function returns zero.
259  *
260  * Note that index-mismatched boundaries (i.e. a transition from air to
261  * water) are not supported by this function. The integrator needs to take
262  * care of these in some other way.
263  *
264  * \param p1
265  * Source position
266  * \param p2
267  * Target position
268  * \param p1OnSurface
269  * Is the source position located on a surface? This information is
270  * necessary to set up the right ray epsilons for the kd-tree traversal
271  * \param p2OnSurface
272  * Is the target position located on a surface?
273  * \param medium
274  * The medium at \c p1
275  * \param interactions
276  * Specifies the maximum permissible number of index-matched medium
277  * transitions or \ref BSDF::ENull scattering events on the way
278  * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
279  * When the function returns a nonzero result, this parameter will
280  * additionally be used to return the actual number of intermediate
281  * interactions.
282  * \param time
283  * Associated scene time value for the transmittance computation
284  * \param sampler
285  * Optional: A sample generator. This may be used
286  * to compute a random unbiased estimate of the transmission.
287  * \return An spectral-valued transmittance value with components
288  * between zero and one.
289  */
290  Spectrum evalTransmittance(const Point &p1, bool p1OnSurface,
291  const Point &p2, bool p2OnSurface, Float time, const Medium *medium,
292  int &interactions, Sampler *sampler = NULL) const;
293 
294  //! @}
295  // =============================================================
296 
297  // =============================================================
298  //! @{ \name Ray tracing support for bidirectional algorithms
299  // =============================================================
300 
301  /**
302  * \brief Intersect a ray against all scene primitives \a and
303  * "special" primitives, such as the aperture of a sensor.
304  *
305  * This function does exactly the same thing as \ref rayIntersect,
306  * except that it additionally performs intersections against a
307  * list of "special" shapes that are intentionally kept outside
308  * of the main scene kd-tree (e.g. because they are not static
309  * and might change from rendering to rendering). This is needed
310  * by some bidirectional techniques that e.g. care about
311  * intersections with the sensor aperture.
312  *
313  * \param ray
314  * A 3-dimensional ray data structure with minimum/maximum
315  * extent information, as well as a time value (which applies
316  * when the shapes are in motion)
317  *
318  * \param its
319  * A detailed intersection record, which will be filled by the
320  * intersection query
321  *
322  * \return \c true if an intersection was found
323  */
324  bool rayIntersectAll(const Ray &ray, Intersection &its) const;
325 
326  /**
327  * \brief Intersect a ray against all normal and "special" primitives
328  * and only return the traveled distance and intersected shape
329  *
330  * This function represents a performance improvement when the
331  * intersected shape must be known, but there is no need for
332  * a detailed intersection record.
333  *
334  * This function does exactly the same thing as \ref rayIntersect,
335  * except that it additionally performs intersections against a
336  * list of "special" shapes that are intentionally kept outside
337  * of the main scene kd-tree (e.g. because they are not static
338  * and might change from rendering to rendering). This is needed
339  * by some bidirectional techniques that e.g. care about
340  * intersections with the sensor aperture.
341  *
342  * \param ray
343  * A 3-dimensional ray data structure with minimum/maximum
344  * extent information, as well as a time value (which applies
345  * when the shapes are in motion)
346  *
347  * \param t
348  * The traveled ray distance will be stored in this parameter
349 
350  * \param shape
351  * A pointer to the intersected shape will be stored in this
352  * parameter
353  *
354  * \param n
355  * The geometric surface normal will be stored in this parameter
356  *
357  * \param uv
358  * The UV coordinates associated with the intersection will
359  * be stored here.
360  *
361  * \return \c true if an intersection was found
362  */
363  bool rayIntersectAll(const Ray &ray, Float &t,
364  ConstShapePtr &shape, Normal &n, Point2 &uv) const;
365 
366  /**
367  * \brief Intersect a ray against all normal and "special" primitives
368  * and \a only determine whether or not there is an intersection.
369  *
370  * This is by far the fastest ray tracing method. This performance
371  * improvement comes with a major limitation though: this function
372  * cannot provide any additional information about the detected
373  * intersection (not even its position).
374  *
375  * This function does exactly the same thing as \ref rayIntersect,
376  * except that it additionally performs intersections against a
377  * list of "special" shapes that are intentionally kept outside
378  * of the main scene kd-tree (e.g. because they are not static
379  * and might change from rendering to rendering). This is needed
380  * by some bidirectional techniques that e.g. care about
381  * intersections with the sensor aperture.
382  *
383  * \param ray
384  * A 3-dimensional ray data structure with minimum/maximum
385  * extent information, as well as a time value (which applies
386  * when the shapes are in motion)
387  *
388  * \return \c true if an intersection was found
389  */
390  bool rayIntersectAll(const Ray &ray) const;
391 
392  /**
393  * \brief Return the transmittance between \c p1 and \c p2 at the
394  * specified time (and acount for "special" primitives).
395  *
396  * This function is essentially a continuous version of \ref isOccluded(),
397  * which additionally accounts for the presence of participating media
398  * and surface interactions that attenuate a ray without changing
399  * its direction (i.e. geometry with an alpha mask)
400  *
401  * The implementation correctly handles arbitrary amounts of index-matched
402  * medium transitions. The \c interactions parameter can be used to
403  * specify a maximum number of possible surface interactions and medium
404  * transitions between \c p1 and \c p2. When this number is exceeded,
405  * the function returns zero.
406  *
407  * Note that index-mismatched boundaries (i.e. a transition from air to
408  * water) are not supported by this function. The integrator needs to take
409  * care of these in some other way.
410  *
411  * This function does exactly the same thing as \ref evalTransmittance,
412  * except that it additionally performs intersections against a
413  * list of "special" shapes that are intentionally kept outside
414  * of the main scene kd-tree (e.g. because they are not static
415  * and might change from rendering to rendering). This is needed
416  * by some bidirectional techniques that care about intersections
417  * with the sensor aperture, etc.
418  *
419  * \param p1
420  * Source position
421  * \param p2
422  * Target position
423  * \param p1OnSurface
424  * Is the source position located on a surface? This information is
425  * necessary to set up the right ray epsilons for the kd-tree traversal
426  * \param p2OnSurface
427  * Is the target position located on a surface?
428  * \param medium
429  * The medium at \c p1
430  * \param interactions
431  * Specifies the maximum permissible number of index-matched medium
432  * transitions or \ref BSDF::ENull scattering events on the way
433  * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
434  * When the function returns a nonzero result, this parameter will
435  * additionally be used to return the actual number of intermediate
436  * interactions.
437  * \param time
438  * Associated scene time value for the transmittance computation
439  * \param sampler
440  * Optional: A sample generator. This may be used
441  * to compute a random unbiased estimate of the transmission.
442  * \return An spectral-valued transmittance value with components
443  * between zero and one.
444  */
445  Spectrum evalTransmittanceAll(const Point &p1, bool p1OnSurface,
446  const Point &p2, bool p2OnSurface, Float time, const Medium *medium,
447  int &interactions, Sampler *sampler = NULL) const;
448 
449  //! @}
450  // =============================================================
451 
452  // =============================================================
453  //! @{ \name Direct sampling techniques
454  // =============================================================
455 
456  /**
457  * \brief Direct illumination sampling routine
458  *
459  * Given an arbitrary reference point in the scene, this method samples a
460  * position on an emitter that has a nonzero contribution towards that point.
461  *
462  * Ideally, the implementation should importance sample the product of
463  * the emission profile and the geometry term between the reference point
464  * and the position on the emitter.
465  *
466  * \param dRec
467  * A direct illumination sampling record that specifies the
468  * reference point and a time value. After the function terminates,
469  * it will be populated with the position sample and related information
470  *
471  * \param sample
472  * A uniformly distributed 2D vector
473  *
474  * \param testVisibility
475  * When set to \c true, a shadow ray will be cast to ensure that the
476  * sampled emitter position and the reference point are mutually visible.
477  *
478  * \return
479  * An importance weight given by the radiance received along
480  * the sampled ray divided by the sample probability.
481  */
482  Spectrum sampleEmitterDirect(DirectSamplingRecord &dRec,
483  const Point2 &sample, bool testVisibility = true) const;
484 
485  /**
486  * \brief "Direct illumination" sampling routine for the main scene sensor
487  *
488  * Given an arbitrary reference point in the scene, this method samples a
489  * position on an sensor that has a nonzero contribution towards that point.
490  * This function can be interpreted as a generalization of a direct
491  * illumination sampling strategy to sensors.
492  *
493  * Ideally, the implementation should importance sample the product of
494  * the response profile and the geometry term between the reference point
495  * and the position on the emitter.
496  *
497  * \param dRec
498  * A direct illumination sampling record that specifies the
499  * reference point and a time value. After the function terminates,
500  * it will be populated with the position sample and related information
501  *
502  * \param sample
503  * A uniformly distributed 2D vector
504  *
505  * \param testVisibility
506  * When set to \c true, a shadow ray will be cast to ensure that the
507  * sampled sensor position and the reference point are mutually visible.
508  *
509  * \return
510  * An importance weight given by the importance emitted along
511  * the sampled ray divided by the sample probability.
512  */
513  Spectrum sampleSensorDirect(DirectSamplingRecord &dRec,
514  const Point2 &sample, bool testVisibility = true) const;
515 
516  /**
517  * \brief Direct illumination sampling with support for participating
518  * media (medium variant)
519  *
520  * Given an arbitrary reference point in the scene, this method samples a
521  * position on an emitter that has a nonzero contribution towards that point.
522  * In comparison to \ref sampleEmitterDirect, this version also accounts for
523  * attenuation by participating media and should be used when \c dRec.p
524  * lies \a inside a medium, i.e. \a not on a surface!
525  *
526  * Ideally, the implementation should importance sample the product of
527  * the emission profile and the geometry term between the reference point
528  * and the position on the emitter.
529  *
530  * \param dRec
531  * A direct illumination sampling record that specifies the
532  * reference point and a time value. After the function terminates,
533  * it will be populated with the position sample and related information
534  *
535  * \param medium
536  * The medium located at the reference point (or \c NULL for vacuum).
537  *
538  * \param interactions
539  * Specifies the maximum permissible number of index-matched medium
540  * transitions or \ref BSDF::ENull scattering events on the way
541  * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
542  * When the function returns a nonzero result, this parameter will
543  * additionally be used to return the actual number of intermediate
544  * interactions.
545  *
546  * \param sample
547  * A uniformly distributed 2D vector
548  *
549  * \param sampler
550  * Optional: a pointer to a sample generator. Some particular
551  * implementations can do a better job at sampling when they have
552  * access to additional random numbers.
553  *
554  * \return
555  * An importance weight given by the radiance received along
556  * the sampled ray divided by the sample probability.
557  */
558  Spectrum sampleAttenuatedEmitterDirect(DirectSamplingRecord &dRec,
559  const Medium *medium, int &interactions, const Point2 &sample,
560  Sampler *sampler = NULL) const;
561 
562  /**
563  * \brief "Direct illumination" sampling routine for the main scene sensor
564  * with support for participating media (medium variant)
565  *
566  * Given an arbitrary reference point in the scene, this method samples a
567  * position on an sensor that has a nonzero response towards that point.
568  * In comparison to \ref sampleSensorDirect, this version also accounts for
569  * attenuation by participating media and should be used when \c dRec.p
570  * lies \a inside a medium, i.e. \a not on a surface!
571  * This function can be interpreted as a generalization of a direct
572  * illumination sampling strategy to sensors.
573  *
574  * Ideally, the implementation should importance sample the product of
575  * the response profile and the geometry term between the reference point
576  * and the position on the sensor.
577  *
578  * \param dRec
579  * A direct illumination sampling record that specifies the
580  * reference point and a time value. After the function terminates,
581  * it will be populated with the position sample and related information
582  *
583  * \param medium
584  * The medium located at the reference point (or \c NULL for vacuum).
585  *
586  * \param interactions
587  * Specifies the maximum permissible number of index-matched medium
588  * transitions or \ref BSDF::ENull scattering events on the way
589  * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
590  * When the function returns a nonzero result, this parameter will
591  * additionally be used to return the actual number of intermediate
592  * interactions.
593  *
594  * \param sample
595  * A uniformly distributed 2D vector
596  *
597  * \param sampler
598  * Optional: a pointer to a sample generator. Some particular
599  * implementations can do a better job at sampling when they have
600  * access to additional random numbers.
601  *
602  * \return
603  * An importance weight given by the radiance received along
604  * the sampled ray divided by the sample probability.
605  */
606  Spectrum sampleAttenuatedSensorDirect(DirectSamplingRecord &dRec,
607  const Medium *medium, int &interactions, const Point2 &sample,
608  Sampler *sampler = NULL) const;
609 
610  /**
611  * \brief Direct illumination sampling with support for participating
612  * media (surface variant)
613  *
614  * Given an arbitrary reference point in the scene, this method samples a
615  * position on an emitter that has a nonzero contribution towards that point.
616  * In comparison to \ref sampleEmitterDirect, this version also accounts for
617  * attenuation by participating media and should be used when the target
618  * position lies on a surface.
619  *
620  * Ideally, the implementation should importance sample the product of
621  * the emission profile and the geometry term between the reference point
622  * and the position on the emitter.
623  *
624  * \param dRec
625  * A direct illumination sampling record that specifies the
626  * reference point and a time value. After the function terminates,
627  * it will be populated with the position sample and related information
628  *
629  * \param its
630  * An intersection record associated with the reference point in
631  * \c dRec. This record is needed to determine the participating
632  * medium between the emitter sample and the reference point
633  * when \c its marks a medium transition.
634  *
635  * \param medium
636  * The medium located at \c its (or \c NULL for vacuum). When the shape
637  * associated with \c its marks a medium transition, it does not matter
638  * which of the two media is specified.
639  *
640  * \param interactions
641  * Specifies the maximum permissible number of index-matched medium
642  * transitions or \ref BSDF::ENull scattering events on the way
643  * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
644  * When the function returns a nonzero result, this parameter will
645  * additionally be used to return the actual number of intermediate
646  * interactions.
647  *
648  * \param sample
649  * A uniformly distributed 2D vector
650  *
651  * \param sampler
652  * Optional: a pointer to a sample generator. Some particular
653  * implementations can do a better job at sampling when they have
654  * access to additional random numbers.
655  *
656  * \return
657  * An importance weight given by the radiance received along
658  * the sampled ray divided by the sample probability.
659  */
660  Spectrum sampleAttenuatedEmitterDirect(DirectSamplingRecord &dRec,
661  const Intersection &its, const Medium *medium, int &interactions,
662  const Point2 &sample, Sampler *sampler = NULL) const;
663 
664  /**
665  * \brief "Direct illumination" sampling routine for the main scene sensor
666  * with support for participating media (surface variant)
667  *
668  * Given an arbitrary reference point in the scene, this method samples a
669  * position on an sensor that has a nonzero response towards that point.
670  * In comparison to \ref sampleSensorDirect, this version also accounts for
671  * attenuation by participating media and should be used when the target
672  * position lies on a surface.
673  *
674  * Ideally, the implementation should importance sample the product of
675  * the emission profile and the geometry term between the reference point
676  * and the position on the sensor.
677  *
678  * \param dRec
679  * A direct illumination sampling record that specifies the
680  * reference point and a time value. After the function terminates,
681  * it will be populated with the position sample and related information
682  *
683  * \param its
684  * An intersection record associated with the reference point in
685  * \c dRec. This record is needed to determine the participating
686  * medium between the sensor sample and the reference point
687  * when \c its marks a medium transition.
688  *
689  * \param medium
690  * The medium located at \c its (or \c NULL for vacuum). When the shape
691  * associated with \c its marks a medium transition, it does not matter
692  * which of the two media is specified.
693  *
694  * \param interactions
695  * Specifies the maximum permissible number of index-matched medium
696  * transitions or \ref BSDF::ENull scattering events on the way
697  * to the light source. (<tt>interactions<0</tt> means arbitrarily many).
698  * When the function returns a nonzero result, this parameter will
699  * additionally be used to return the actual number of intermediate
700  * interactions.
701  *
702  * \param sample
703  * A uniformly distributed 2D vector
704  *
705  * \param sampler
706  * Optional: a pointer to a sample generator. Some particular
707  * implementations can do a better job at sampling when they have
708  * access to additional random numbers.
709  *
710  * \return
711  * An importance weight given by the radiance received along
712  * the sampled ray divided by the sample probability.
713  */
714  Spectrum sampleAttenuatedSensorDirect(DirectSamplingRecord &dRec,
715  const Intersection &its, const Medium *medium, int &interactions,
716  const Point2 &sample, Sampler *sampler = NULL) const;
717 
718  /**
719  * \brief Evaluate the probability density of the \a direct sampling
720  * method implemented by the \ref sampleEmitterDirect() method.
721  *
722  * \param dRec
723  * A direct sampling record, which specifies the query
724  * location. Note that this record need not be completely
725  * filled out. The important fields are \c p, \c n, \c ref,
726  * \c dist, \c d, \c measure, and \c uv.
727  *
728  * \param p
729  * The world-space position that would have been passed to \ref
730  * sampleEmitterDirect()
731  *
732  * \return
733  * The density expressed with respect to the requested measure
734  * (usually \ref ESolidAngle)
735  */
736  Float pdfEmitterDirect(const DirectSamplingRecord &dRec) const;
737 
738  /**
739  * \brief Evaluate the probability density of the \a direct sampling
740  * method implemented by the \ref sampleSensorDirect() method.
741  *
742  * \param dRec
743  * A direct sampling record, which specifies the query
744  * location. Note that this record need not be completely
745  * filled out. The important fields are \c p, \c n, \c ref,
746  * \c dist, \c d, \c measure, and \c uv.
747  *
748  * \param p
749  * The world-space position that would have been passed to \ref
750  * sampleSensorDirect()
751  *
752  * \return
753  * The density expressed with respect to the requested measure
754  * (usually \ref ESolidAngle)
755  */
756  Float pdfSensorDirect(const DirectSamplingRecord &dRec) const;
757 
758  //! @}
759  // =============================================================
760 
761  // =============================================================
762  //! @{ \name Emission sampling techniques
763  // =============================================================
764 
765  /**
766  * \brief Sample a position according to the emission profile
767  * defined by the emitters in the scene.
768  *
769  * To sample the directional component, please use the
770  * \ref Emitter::sampleDirection() method.
771  *
772  * \param pRec
773  * A position record to be populated with the sampled
774  * position and related information
775  *
776  * \param sample
777  * A uniformly distributed 2D vector
778  *
779  * \return
780  * An importance weight associated with the sampled position.
781  * This accounts for the difference in the spatial part of the
782  * emission profile and the density function.
783  */
784  Spectrum sampleEmitterPosition(PositionSamplingRecord &pRec,
785  const Point2 &sample) const;
786 
787  /**
788  * \brief Sample a position on the main sensor of the scene.
789  *
790  * This function is provided here mainly for symmetry
791  * with respect to \ref sampleEmitterPosition().
792  *
793  * To sample the directional component, please use the
794  * \ref Sensor::sampleDirection() method.
795  *
796  * \param pRec
797  * A position record to be populated with the sampled
798  * position and related information
799  *
800  * \param sample
801  * A uniformly distributed 2D vector
802  *
803  * \param extra
804  * An additional 2D vector provided to the sampling
805  * routine -- its use is implementation-dependent.
806  *
807  * \return
808  * An importance weight associated with the sampled position.
809  * This accounts for the difference in the spatial part of the
810  * response profile and the density function.
811  */
813  const Point2 &sample, const Point2 *extra = NULL) const {
814  pRec.object = m_sensor.get();
815  return m_sensor->samplePosition(pRec, sample, extra);
816  }
817 
818  /**
819  * \brief Evaluate the spatial component of the sampling density
820  * implemented by the \ref sampleEmitterPosition() method
821  *
822  * \param pRec
823  * A position sampling record, which specifies the query location
824  *
825  * \return
826  * The area density at the supplied position
827  */
828  Float pdfEmitterPosition(const PositionSamplingRecord &pRec) const;
829 
830  /**
831  * \brief Evaluate the spatial component of the sampling density
832  * implemented by the \ref sampleSensorPosition() method
833  *
834  * \param pRec
835  * A position sampling record, which specifies the query location
836  *
837  * \return
838  * The area density at the supplied position
839  */
840  inline Float pdfSensorPosition(const PositionSamplingRecord &pRec) const {
841  return m_sensor->pdfPosition(pRec);
842  }
843 
844  /**
845  * \brief Return the discrete probability of choosing a
846  * certain emitter in <tt>sampleEmitter*</tt>
847  */
848  inline Float pdfEmitterDiscrete(const Emitter *emitter) const {
849  return emitter->getSamplingWeight() * m_emitterPDF.getNormalization();
850  }
851 
852  /**
853  * \brief Importance sample a ray according to the emission profile
854  * defined by the sensors in the scene
855  *
856  * This function combines both steps of choosing a ray origin and
857  * direction value. It does not return any auxiliary sampling
858  * information and is mainly meant to be used by unidirectional
859  * rendering techniques.
860  *
861  * Note that this function potentially uses a different sampling
862  * strategy compared to the sequence of running \ref sampleEmitterPosition()
863  * and \ref Emitter::sampleDirection(). The reason for this is that it may
864  * be possible to switch to a better technique when sampling both
865  * position and direction at the same time.
866  *
867  * \param ray
868  * A ray data structure to be populated with a position
869  * and direction value
870  *
871  * \param spatialSample
872  * Denotes the sample that is used to choose the spatial component
873  *
874  * \param directionalSample
875  * Denotes the sample that is used to choose the directional component
876  *
877  * \param time
878  * Scene time value to be associated with the sample
879  *
880  * \return
881  * An importance weight associated with the sampled ray.
882  * This accounts for the difference between the emission profile
883  * and the sampling density function.
884  */
885  Spectrum sampleEmitterRay(Ray &ray,
886  const Emitter* &emitter,
887  const Point2 &spatialSample,
888  const Point2 &directionalSample,
889  Float time) const;
890 
891  //! @}
892  // =============================================================
893 
894  // =============================================================
895  //! @{ \name Environment emitters
896  // =============================================================
897 
898  /// Return the scene's environment emitter (if there is one)
899  inline const Emitter *getEnvironmentEmitter() const { return m_environmentEmitter.get(); }
900 
901  /// Does the scene have a environment emitter?
902  inline bool hasEnvironmentEmitter() const { return m_environmentEmitter.get() != NULL; }
903 
904  /**
905  * \brief Return the environment radiance for a ray that did not intersect
906  * any of the scene objects.
907  *
908  * This is primarily meant for path tracing-style integrators.
909  */
910  inline Spectrum evalEnvironment(const RayDifferential &ray) const {
911  return hasEnvironmentEmitter() ?
912  m_environmentEmitter->evalEnvironment(ray) : Spectrum(0.0f);
913  }
914 
915  /**
916  * \brief Return the environment radiance for a ray that did not intersect
917  * any of the scene objects. This method additionally considers
918  * transmittance by participating media
919  *
920  * This is primarily meant for path tracing-style integrators.
921  */
923  const Medium *medium, Sampler *sampler) const {
924  if (!m_environmentEmitter)
925  return Spectrum(0.0f);
926  Spectrum result = evalEnvironment(ray);
927  if (medium)
928  result *= medium->evalTransmittance(ray, sampler);
929  return result;
930  }
931 
932  //! @}
933  // =============================================================
934 
935  // =============================================================
936  //! @{ \name Miscellaneous
937  // =============================================================
938 
939  /// Return an axis-aligned bounding box containing the whole scene
940  inline const AABB &getAABB() const {
941  return m_aabb;
942  }
943 
944  /**
945  * \brief Is the main scene sensor degenerate? (i.e. has it
946  * collapsed to a point or line)
947  *
948  * Note that this function only cares about the spatial component
949  * of the sensor -- its value does not depend on whether the directional
950  * response function is degenerate.
951  */
952  inline bool hasDegenerateSensor() const { return m_degenerateSensor; }
953 
954  /**
955  * \brief Area \a all emitters in this scene degenerate?
956  * (i.e. they has collapsed to a point or line)
957  *
958  * Note that this function only cares about the spatial component
959  * of the emitters -- its value does not depend on whether the
960  * directional emission profile is degenerate.
961  */
962  inline bool hasDegenerateEmitters() const { return m_degenerateEmitters; }
963 
964  /// Return a bounding sphere containing the whole scene
965  inline BSphere getBSphere() const {
966  // todo: switch to something smarter at some point
967  return m_aabb.getBSphere();
968  }
969 
970  /// Does the scene contain participating media?
971  inline bool hasMedia() const { return !m_media.empty(); }
972 
973  /**
974  * \brief Set the main scene sensor.
975  *
976  * Note that the main sensor is not included when this Scene instance
977  * is serialized -- the sensor field will be \c NULL after
978  * unserialization. This is intentional so that the sensor can
979  * be changed without having to re-transmit the whole scene.
980  * Hence, it needs to be submitted separately and re-attached
981  * on the remote side using \ref setSensor().
982  **/
983  void setSensor(Sensor *sensor);
984 
985  /// \brief Remove a sensor from the scene's sensor list
986  void removeSensor(Sensor *sensor);
987 
988  /// \brief Add a sensor to the scene's sensor list
989  void addSensor(Sensor *sensor);
990 
991  /// Return the scene's sensor
992  inline Sensor *getSensor() { return m_sensor; }
993 
994  /// Return the scene's sensor (const version)
995  inline const Sensor *getSensor() const { return m_sensor.get(); }
996 
997  /**
998  * \brief Return the list of sensors that are specified
999  * by the scene.
1000  *
1001  * As scene can have multiple sensors -- however, during
1002  * a rendering, there will always be one "main" sensor that
1003  * is currently active.
1004  *
1005  * \sa getSensor
1006  */
1007  inline ref_vector<Sensor> &getSensors() { return m_sensors; }
1008 
1009  /**
1010  * \brief Return the list of sensors that are specified
1011  * by the scene (const version)
1012  *
1013  * As scene can have multiple sensors -- however, during
1014  * a rendering, there will always be one "main" sensor that
1015  * is currently active.
1016  *
1017  * \sa getSensor
1018  */
1019  inline const ref_vector<Sensor> &getSensors() const { return m_sensors; }
1020 
1021  /**
1022  * \brief Set the scene's integrator.
1023  *
1024  * Note that the integrator is not included when this Scene instance
1025  * is serialized -- the integrator field will be \c NULL after
1026  * unserialization. This is intentional so that the integrator can
1027  * be changed without having to re-transmit the whole scene. Hence,
1028  * the integrator needs to be submitted separately and re-attached
1029  * on the remote side using \ref setIntegrator().
1030  **/
1031  inline void setIntegrator(Integrator *integrator) { m_integrator = integrator; }
1032 
1033  /// Return the scene's integrator
1034  inline Integrator *getIntegrator() { return m_integrator; }
1035  /// Return the scene's integrator (const version)
1036  inline const Integrator *getIntegrator() const { return m_integrator.get(); }
1037 
1038  /**
1039  * \brief Set the scene's sampler.
1040  *
1041  * Note that the sampler is not included when this Scene instance
1042  * is serialized -- the sampler field will be \c NULL after
1043  * unserialization. This is intentional so that the sampler can
1044  * be changed without having to re-transmit the whole scene.
1045  * Hence, the sampler needs to be submitted separately
1046  * and re-attached on the remote side using \ref setSampler().
1047  **/
1048  inline void setSampler(Sampler *sampler) { m_sampler = sampler; }
1049 
1050  /**
1051  * \brief Return the scene's sampler.
1052  *
1053  * Note that when rendering using multiple different threads, each
1054  * thread will be passed a shallow copy of the scene, which has a
1055  * different sampler instance. This helps to avoid locking/contention
1056  * issues and ensures that different threads render with different
1057  * random number sequences. The sampler instance provided here is a
1058  * clone of the original sampler specified in the sensor.
1059  */
1060  inline Sampler *getSampler() { return m_sampler; }
1061  /// Return the scene's sampler
1062  inline const Sampler *getSampler() const { return m_sampler.get(); }
1063 
1064  /// Return the scene's film
1065  inline Film *getFilm() { return m_sensor->getFilm(); }
1066  /// Return the scene's film
1067  inline const Film *getFilm() const { return m_sensor->getFilm(); }
1068 
1069  /// Return the scene's kd-tree accelerator
1070  inline ShapeKDTree *getKDTree() { return m_kdtree; }
1071  /// Return the scene's kd-tree accelerator
1072  inline const ShapeKDTree *getKDTree() const { return m_kdtree.get(); }
1073 
1074  /// Return the a list of all subsurface integrators
1075  inline ref_vector<Subsurface> &getSubsurfaceIntegrators() { return m_ssIntegrators; }
1076  /// Return the a list of all subsurface integrators
1077  inline const ref_vector<Subsurface> &getSubsurfaceIntegrators() const { return m_ssIntegrators; }
1078 
1079  /// Return the scene's triangular meshes (a subset of \ref getShapes())
1080  inline std::vector<TriMesh *> &getMeshes() { return m_meshes; }
1081  /// Return the scene's triangular meshes (a subset of \ref getShapes())
1082  inline const std::vector<TriMesh *> &getMeshes() const { return m_meshes; }
1083  /// Return the scene's normal shapes (including triangular meshes)
1084  inline ref_vector<Shape> &getShapes() { return m_shapes; }
1085  /// Return the scene's normal shapes (including triangular meshes)
1086  inline const ref_vector<Shape> &getShapes() const { return m_shapes; }
1087 
1088  /// Return a set of special shapes related to emitter/sensor geometry in bidirectional renderings
1089  inline ref_vector<Shape> &getSpecialShapes() { return m_specialShapes; }
1090  /// Return a set of special shapes related to emitter/sensor geometry in bidirectional renderings
1091  inline const ref_vector<Shape> &getSpecialShapes() const { return m_specialShapes; }
1092 
1093  /// Return the scene's emitters
1094  inline ref_vector<Emitter> &getEmitters() { return m_emitters; }
1095  /// Return the scene's emitters
1096  inline const ref_vector<Emitter> &getEmitters() const { return m_emitters; }
1097  /// Return the scene's participating media
1098  inline ref_vector<Medium> &getMedia() { return m_media; }
1099  /// Return the scene's participating media
1100  inline const ref_vector<Medium> &getMedia() const { return m_media; }
1101  /// Return referenced objects (such as textures, BSDFs)
1103  /// Return referenced objects (such as textures, BSDFs)
1104  inline const ref_vector<ConfigurableObject> &getReferencedObjects() const { return m_objects; }
1105 
1106  /// Return the name of the file containing the original description of this scene
1107  inline const fs::path &getSourceFile() const { return *m_sourceFile; }
1108  /// Set the name of the file containing the original description of this scene
1109  void setSourceFile(const fs::path &name);
1110  /// Return the render output filename
1111  inline const fs::path &getDestinationFile() const { return *m_destinationFile; }
1112  /// Set the render output filename
1113  void setDestinationFile(const fs::path &name);
1114 
1115  /// Does the destination file already exist?
1116  inline bool destinationExists() const { return m_sensor->getFilm()->destinationExists(*m_destinationFile); }
1117 
1118  /// Set the block resolution used to split images into parallel workloads
1119  inline void setBlockSize(uint32_t size) { m_blockSize = size; }
1120  /// Return the block resolution used to split images into parallel workloads
1121  inline uint32_t getBlockSize() const { return m_blockSize; }
1122 
1123  /// Serialize the whole scene to a network/file stream
1124  void serialize(Stream *stream, InstanceManager *manager) const;
1125 
1126  /* NetworkedObject implementation */
1127  void bindUsedResources(ParallelProcess *proc) const;
1128  void wakeup(ConfigurableObject *parent,
1129  std::map<std::string, SerializableObject *> &params);
1130 
1131  /// Return a string representation
1132  std::string toString() const;
1133 
1134  //! @}
1135  // =============================================================
1136 
1138 protected:
1139  /// Virtual destructor
1140  virtual ~Scene();
1141 
1142  /// \cond
1143  /// Add a shape to the scene
1144  void addShape(Shape *shape);
1145  /// \endcond
1146 private:
1147  ref<ShapeKDTree> m_kdtree;
1148  ref<Sensor> m_sensor;
1149  ref<Integrator> m_integrator;
1150  ref<Sampler> m_sampler;
1151  ref<Emitter> m_environmentEmitter;
1152  ref_vector<Shape> m_shapes;
1153  ref_vector<Shape> m_specialShapes;
1154  ref_vector<Sensor> m_sensors;
1155  ref_vector<Emitter> m_emitters;
1156  ref_vector<ConfigurableObject> m_objects;
1157  ref_vector<NetworkedObject> m_netObjects;
1158  ref_vector<Subsurface> m_ssIntegrators;
1159  ref_vector<Medium> m_media;
1160  std::vector<TriMesh *> m_meshes;
1161  fs::path *m_sourceFile;
1162  fs::path *m_destinationFile;
1163  DiscreteDistribution m_emitterPDF;
1164  AABB m_aabb;
1165  uint32_t m_blockSize;
1166  bool m_degenerateSensor;
1167  bool m_degenerateEmitters;
1168 };
1169 
1171 
1172 #include <mitsuba/render/records.inl>
1173 
1174 #endif /* __MITSUBA_RENDER_SCENE_H_ */
Record for solid-angle based area sampling techniques.
Definition: common.h:238
Abstract participating medium.
Definition: medium.h:103
const Film * getFilm() const
Return the scene&#39;s film.
Definition: scene.h:1067
const ConfigurableObject * object
Optional: Pointer to an associated object.
Definition: common.h:123
const ref_vector< ConfigurableObject > & getReferencedObjects() const
Return referenced objects (such as textures, BSDFs)
Definition: scene.h:1104
Three-dimensional normal data structure.
Definition: normal.h:39
bool rayIntersect(const Ray &ray, Float &t, ConstShapePtr &shape, Normal &n, Point2 &uv) const
Intersect a ray against all primitives stored in the scene and return the traveled distance and inter...
Definition: scene.h:220
Abstract triangle mesh base class.
Definition: trimesh.h:68
Abstract radiance emitter interface.
Definition: emitter.h:443
const fs::path & getSourceFile() const
Return the name of the file containing the original description of this scene.
Definition: scene.h:1107
Integrator * getIntegrator()
Return the scene&#39;s integrator.
Definition: scene.h:1034
Film * getFilm()
Return the scene&#39;s film.
Definition: scene.h:1065
uint32_t getBlockSize() const
Return the block resolution used to split images into parallel workloads.
Definition: scene.h:1121
Generic serializable object, which supports construction from a Properties instance.
Definition: cobject.h:40
Sensor * getSensor()
Return the scene&#39;s sensor.
Definition: scene.h:992
std::vector< TriMesh * > & getMeshes()
Return the scene&#39;s triangular meshes (a subset of getShapes())
Definition: scene.h:1080
bool hasDegenerateEmitters() const
Area all emitters in this scene degenerate? (i.e. they has collapsed to a point or line) ...
Definition: scene.h:962
const Integrator * getIntegrator() const
Return the scene&#39;s integrator (const version)
Definition: scene.h:1036
bool hasEnvironmentEmitter() const
Does the scene have a environment emitter?
Definition: scene.h:902
SAH KD-tree acceleration data structure for fast ray-triangle intersections.
Definition: skdtree.h:69
Generic sampling record for positions.
Definition: common.h:82
Simple reference-counted vector container based on std::vector and ref.
Definition: ref.h:144
const Emitter * getEnvironmentEmitter() const
Return the scene&#39;s environment emitter (if there is one)
Definition: scene.h:899
virtual void bindUsedResources(ParallelProcess *proc) const
Bind any used resources to the process proc.
Principal scene data structure.
Definition: scene.h:49
Render queue - used to keep track of a number of scenes that are simultaneously being rendered...
Definition: renderqueue.h:65
Abstract sensor interface.
Definition: sensor.h:66
Bounding sphere data structure in three dimensions.
Definition: bsphere.h:32
virtual Spectrum evalTransmittance(const Ray &ray, Sampler *sampler=NULL) const =0
Compute the transmittance along a ray segment.
Base class of all sample generators.
Definition: sampler.h:66
const ref_vector< Shape > & getSpecialShapes() const
Return a set of special shapes related to emitter/sensor geometry in bidirectional renderings...
Definition: scene.h:1091
void setSampler(Sampler *sampler)
Set the scene&#39;s sampler.
Definition: scene.h:1048
bool rayIntersect(const Ray &ray) const
Intersect a ray against all primitives stored in the scene and only determine whether or not there is...
Definition: scene.h:241
Abstract parallelizable task.
Definition: sched.h:197
Sampler * getSampler()
Return the scene&#39;s sampler.
Definition: scene.h:1060
#define MTS_NAMESPACE_BEGIN
Definition: platform.h:137
void setIntegrator(Integrator *integrator)
Set the scene&#39;s integrator.
Definition: scene.h:1031
ref_vector< Emitter > & getEmitters()
Return the scene&#39;s emitters.
Definition: scene.h:1094
Abstract base class of all shapes.
Definition: shape.h:178
void addChild(ConfigurableObject *child)
Add an unnamed child.
Definition: scene.h:159
const ref_vector< Sensor > & getSensors() const
Return the list of sensors that are specified by the scene (const version)
Definition: scene.h:1019
ShapeKDTree * getKDTree()
Return the scene&#39;s kd-tree accelerator.
Definition: scene.h:1070
void setBlockSize(uint32_t size)
Set the block resolution used to split images into parallel workloads.
Definition: scene.h:1119
Spectrum evalEnvironment(const RayDifferential &ray) const
Return the environment radiance for a ray that did not intersect any of the scene objects...
Definition: scene.h:910
Ray differential – enhances the basic ray class with information about the rays of adjacent pixels on...
Definition: ray.h:140
ref_vector< Sensor > & getSensors()
Return the list of sensors that are specified by the scene.
Definition: scene.h:1007
const ref_vector< Emitter > & getEmitters() const
Return the scene&#39;s emitters.
Definition: scene.h:1096
bool hasMedia() const
Does the scene contain participating media?
Definition: scene.h:971
Axis-aligned bounding box data structure in three dimensions.
Definition: aabb.h:437
const AABB & getAABB() const
Return an axis-aligned bounding box containing the whole scene.
Definition: scene.h:940
ref_vector< ConfigurableObject > & getReferencedObjects()
Return referenced objects (such as textures, BSDFs)
Definition: scene.h:1102
virtual void addChild(const std::string &name, ConfigurableObject *child)
Add a child (default implementation throws an error)
const ref_vector< Shape > & getShapes() const
Return the scene&#39;s normal shapes (including triangular meshes)
Definition: scene.h:1086
Float pdfSensorPosition(const PositionSamplingRecord &pRec) const
Evaluate the spatial component of the sampling density implemented by the sampleSensorPosition() meth...
Definition: scene.h:840
Abstract seekable stream class.
Definition: stream.h:58
#define MTS_DECLARE_CLASS()
This macro must be used in the initial definition in classes that derive from Object.
Definition: class.h:158
const std::vector< TriMesh * > & getMeshes() const
Return the scene&#39;s triangular meshes (a subset of getShapes())
Definition: scene.h:1082
Definition: fwd.h:99
Abstract film base class - used to store samples generated by Integrator implementations.
Definition: film.h:37
virtual void configure()
Configure the object (called once after construction and addition of all child ConfigurableObject ins...
Reference counting helper.
Definition: ref.h:40
virtual void wakeup(ConfigurableObject *parent, std::map< std::string, SerializableObject * > &params)
Retrieve any required resources.
Definition: fwd.h:65
virtual void serialize(Stream *stream, InstanceManager *manager) const
Serialize this object to a stream.
const ShapeKDTree * getKDTree() const
Return the scene&#39;s kd-tree accelerator.
Definition: scene.h:1072
const ref_vector< Medium > & getMedia() const
Return the scene&#39;s participating media.
Definition: scene.h:1100
Float pdfEmitterDiscrete(const Emitter *emitter) const
Return the discrete probability of choosing a certain emitter in sampleEmitter*
Definition: scene.h:848
Abstract interface for objects that reference shared network resources.
Definition: netobject.h:40
ref_vector< Shape > & getShapes()
Return the scene&#39;s normal shapes (including triangular meshes)
Definition: scene.h:1084
Abstract integrator base-class; does not make any assumptions on how radiance is computed.
Definition: integrator.h:49
Spectrum sampleSensorPosition(PositionSamplingRecord &pRec, const Point2 &sample, const Point2 *extra=NULL) const
Sample a position on the main sensor of the scene.
Definition: scene.h:812
Coordinates the process of rendering a single image.
Definition: renderjob.h:37
Associative parameter map for constructing subclasses of ConfigurableObject.
Definition: properties.h:46
Container for all information related to a surface intersection.
Definition: shape.h:36
Definition: fwd.h:100
bool hasDegenerateSensor() const
Is the main scene sensor degenerate? (i.e. has it collapsed to a point or line)
Definition: scene.h:952
Spectrum evalAttenuatedEnvironment(const RayDifferential &ray, const Medium *medium, Sampler *sampler) const
Return the environment radiance for a ray that did not intersect any of the scene objects...
Definition: scene.h:922
Coordinates the serialization and unserialization of object graphs.
Definition: serialization.h:65
ref_vector< Medium > & getMedia()
Return the scene&#39;s participating media.
Definition: scene.h:1098
#define MTS_EXPORT_RENDER
Definition: platform.h:109
Discrete spectral power distribution based on a number of wavelength bins over the 360-830 nm range...
Definition: spectrum.h:663
virtual std::string toString() const
Return a human-readable string representation of the object&#39;s contents.
BSphere getBSphere() const
Return a bounding sphere containing the whole scene.
Definition: scene.h:965
ref_vector< Shape > & getSpecialShapes()
Return a set of special shapes related to emitter/sensor geometry in bidirectional renderings...
Definition: scene.h:1089
const Sensor * getSensor() const
Return the scene&#39;s sensor (const version)
Definition: scene.h:995
Abstract subsurface scattering models.
Definition: subsurface.h:38
#define MTS_NAMESPACE_END
Definition: platform.h:138
Float getSamplingWeight() const
Return the luminaire&#39;s sampling weight.
Definition: emitter.h:535
const ref_vector< Subsurface > & getSubsurfaceIntegrators() const
Return the a list of all subsurface integrators.
Definition: scene.h:1077
const Sampler * getSampler() const
Return the scene&#39;s sampler.
Definition: scene.h:1062
bool rayIntersect(const Ray &ray, Intersection &its) const
Intersect a ray against all primitives stored in the scene and return detailed intersection informati...
Definition: scene.h:187
Discrete probability distribution.
Definition: pmf.h:35
const fs::path & getDestinationFile() const
Return the render output filename.
Definition: scene.h:1111
ref_vector< Subsurface > & getSubsurfaceIntegrators()
Return the a list of all subsurface integrators.
Definition: scene.h:1075
bool destinationExists() const
Does the destination file already exist?
Definition: scene.h:1116