37void Film::init_aovs(
const Set<std::string> &passes_used_by_viewport_compositor)
39 Vector<ViewLayerAOV *> aovs;
45 if (inst_.is_viewport()) {
67 if (passes_used_by_viewport_compositor.
contains(aov->
name)) {
81 inst_.info_append_i18n(
"Error: Too many AOVs");
93 if (!aovs.is_empty()) {
102 if (pass_tx ==
nullptr) {
114 Texture &accum_tx = is_value ? value_accum_tx_ : color_accum_tx_;
122 for (
uint4 candidate_hash : aovs_hash) {
123 if (candidate_hash.x ==
hash) {
130 if (aov_index == -1) {
136 int index = aov_index + (is_value ? data_.aov_value_id : data_.aov_color_id);
146void Film::sync_mist()
167 data_.
mist_bias = -mist_start / mist_distance;
202#define ENABLE_FROM_LEGACY(name_legacy, name_eevee) \
203 SET_FLAG_FROM_TEST(result, \
204 (view_layer->passflag & SCE_PASS_##name_legacy) != 0, \
205 EEVEE_RENDER_PASS_##name_eevee);
222#undef ENABLE_FROM_LEGACY
250 if (pass_type == 0) {
255 if (viewport_compositor_needed_passes.
contains(pass_name)) {
256 viewport_compositor_enabled_passes |= pass_type;
261 return viewport_compositor_enabled_passes;
266 using namespace math;
268 Sampling &sampling = inst_.sampling;
269 Scene &scene = *inst_.scene;
276 passes_used_by_viewport_compositor, inst_.view_layer);
280 init_aovs(passes_used_by_viewport_compositor);
284 if (inst_.is_viewport()) {
287 viewport_compositor_enabled_passes_;
289 if (inst_.overlays_enabled() || inst_.gpencil_engine_enabled()) {
309 data_.scaling_factor = 1;
310 if (inst_.is_viewport()) {
314 data_.texture_lod_bias = 1.0f / (data_.scaling_factor * 1.5f);
320 output_rect = &fallback_rect;
323 display_extent = extent;
326 data_.offset =
int2(output_rect->
xmin, output_rect->
ymin);
327 data_.extent_inv = 1.0f /
float2(data_.extent);
328 data_.render_extent =
divide_ceil(data_.extent,
int2(data_.scaling_factor));
330 data_.render_extent += data_.overscan * 2;
333 data_.filter_radius = (sampling.
sample_count() == 1) ? 0.0f :
335 data_.cryptomatte_samples_len = inst_.view_layer->cryptomatte_levels;
338 if (inst_.is_viewport() &&
false ) {
339 data_.background_opacity = inst_.v3d->shading.studiolight_background;
357 if (enabled_passes_ & data_passes) {
360 if (enabled_passes_ & color_passes_1) {
363 if (enabled_passes_ & color_passes_2) {
366 if (enabled_passes_ & color_passes_3) {
387 int index = (enabled_passes_ & pass_type) ?
390 if (inst_.is_viewport() && inst_.v3d->shading.render_pass == pass_type) {
391 data_.display_id = index;
392 data_.display_storage_type = storage_type;
412 data_.aov_color_id = data_.color_len;
413 data_.aov_value_id = data_.value_len;
415 data_.aov_color_len =
aovs_info.color_len;
416 data_.aov_value_len =
aovs_info.value_len;
418 data_.color_len += data_.aov_color_len;
419 data_.value_len += data_.aov_value_len;
421 int cryptomatte_id = 0;
424 if (enabled_passes_ & pass_type) {
425 index = cryptomatte_id;
426 cryptomatte_id +=
divide_ceil_u(data_.cryptomatte_samples_len, 2u);
428 if (inst_.is_viewport() && inst_.v3d->shading.render_pass == pass_type) {
429 data_.display_id = index;
439 if ((enabled_passes_ &
447 int2 weight_extent = (inst_.camera.is_panoramic() || (data_.scaling_factor > 1)) ?
458 reset += depth_tx_.ensure_2d(depth_format, data_.extent);
459 reset += combined_tx_.current().ensure_2d(color_format, data_.extent);
460 reset += combined_tx_.next().ensure_2d(color_format, data_.extent);
462 reset += weight_tx_.current().ensure_2d_array(weight_format, weight_extent, 2);
463 reset += weight_tx_.next().ensure_2d_array(weight_format, weight_extent, 2);
464 reset += color_accum_tx_.ensure_2d_array(color_format,
465 (data_.color_len > 0) ? data_.extent :
int2(1),
466 (data_.color_len > 0) ? data_.color_len : 1);
467 reset += value_accum_tx_.ensure_2d_array(float_format,
468 (data_.value_len > 0) ? data_.extent :
int2(1),
469 (data_.value_len > 0) ? data_.value_len : 1);
473 reset += cryptomatte_tx_.ensure_2d_array(cryptomatte_format,
474 (cryptomatte_array_len > 0) ? data_.extent :
int2(1),
475 (cryptomatte_array_len > 0) ? cryptomatte_array_len :
479 data_.use_history = 0;
480 use_reprojection_ =
false;
483 color_accum_tx_.clear(
float4(0.0f));
484 value_accum_tx_.clear(
float4(0.0f));
485 combined_tx_.current().clear(
float4(0.0f));
486 weight_tx_.current().clear(
float4(0.0f));
487 depth_tx_.clear(
float4(0.0f));
488 cryptomatte_tx_.clear(
float4(0.0f));
500 use_compute_ = !inst_.is_viewport() ||
507 GPUShader *sh = inst_.shaders.static_shader_get(shader);
508 accumulate_ps_.init();
509 init_pass(accumulate_ps_, sh);
520 if (use_compute_ && inst_.is_viewport()) {
521 init_pass(copy_ps_, inst_.shaders.static_shader_get(
FILM_COPY));
526 const bool is_cryptomatte_pass_enabled = cryptomatte_layer_count > 0;
527 const bool do_cryptomatte_sorting = !inst_.is_viewport() ||
529 cryptomatte_post_ps_.init();
530 if (is_cryptomatte_pass_enabled && do_cryptomatte_sorting) {
533 cryptomatte_post_ps_.bind_image(
"cryptomatte_img", &cryptomatte_tx_);
534 cryptomatte_post_ps_.bind_resources(inst_.uniform_data);
535 cryptomatte_post_ps_.push_constant(
"cryptomatte_layer_len", cryptomatte_layer_count);
536 cryptomatte_post_ps_.push_constant(
"cryptomatte_samples_per_layer",
537 inst_.view_layer->cryptomatte_levels);
540 cryptomatte_post_ps_.dispatch(
int3(
UNPACK2(dispatch_size), 1));
548 VelocityModule &velocity = inst_.
velocity;
566 pass.
bind_ubo(
"camera_next", &(*velocity.camera_steps[step_next]));
570 pass.
bind_texture(
"rp_color_tx", &rbuffers.rp_color_tx);
571 pass.
bind_texture(
"rp_value_tx", &rbuffers.rp_value_tx);
572 pass.
bind_texture(
"cryptomatte_tx", &rbuffers.cryptomatte_tx);
576 pass.
bind_image(
"in_weight_img", &weight_tx_.current());
577 pass.
bind_image(
"out_weight_img", &weight_tx_.next());
579 pass.
bind_image(
"out_combined_img", &combined_tx_.next());
581 pass.
bind_image(
"color_accum_img", &color_accum_tx_);
582 pass.
bind_image(
"value_accum_img", &value_accum_tx_);
583 pass.
bind_image(
"cryptomatte_img", &cryptomatte_tx_);
589 use_reprojection_ = inst_.sampling.interactive_mode();
592 if (!use_reprojection_ && inst_.sampling.is_reset()) {
593 use_reprojection_ =
false;
594 data_.use_history =
false;
613 const float bias = 0.5f;
621 if (data_.scaling_factor > 1) {
626 const int scale = data_.scaling_factor;
630 int prime = (render_pixel_per_final_pixel / 2) - 1;
632 uint64_t index = (inst_.sampling.sample_index() * prime) % render_pixel_per_final_pixel;
637 jitter += ((
float2(pixel_co) + 0.5f) /
scale) - 0.5f;
644 if (inst_.is_viewport() && use_reprojection_) {
648 return enabled_passes_;
654 result += data_.cryptomatte_object_id == -1 ? 0 : 1;
655 result += data_.cryptomatte_asset_id == -1 ? 0 : 1;
656 result += data_.cryptomatte_material_id == -1 ? 0 : 1;
662 if (data_.cryptomatte_material_id != -1) {
665 if (data_.cryptomatte_asset_id != -1) {
668 if (data_.cryptomatte_object_id != -1) {
674void Film::update_sample_table()
688 for (
int y = 0;
y <= 1;
y++) {
689 for (
int x = 0;
x <= 1;
x++) {
700 data_.samples[0].texel =
int2(0, 0);
701 data_.samples[0].weight = 1.0f;
702 data_.samples_weight_total = 1.0f;
703 data_.samples_len = 1;
706 else if (data_.filter_radius < 2.20f) {
708 int closest_index = 0;
709 float closest_distance =
FLT_MAX;
710 data_.samples_weight_total = 0.0f;
712 for (
int y = -filter_radius_ceil;
y <= filter_radius_ceil;
y++) {
713 for (
int x = -filter_radius_ceil;
x <= filter_radius_ceil;
x++) {
716 if (distance_sqr < filter_radius_sqr) {
721 FilmSample &
sample = data_.samples[data_.samples_len];
724 data_.samples_weight_total +=
sample.weight;
726 if (distance_sqr < closest_distance) {
727 closest_distance = distance_sqr;
728 closest_index = data_.samples_len;
735 if (closest_index != 0) {
736 std::swap(data_.samples[closest_index], data_.samples[0]);
745 data_.samples_len = sample_table.size();
746 data_.samples_weight_total = 0.0f;
749 for (FilmSample &
sample : sample_table) {
761 data_.samples_weight_total +=
sample.weight;
769 if (inst_.is_viewport()) {
775 float4 clear_color = {0.0f, 0.0f, 0.0f, 0.0f};
781 update_sample_table();
783 combined_final_tx_ = combined_final_tx;
785 data_.display_only =
false;
786 inst_.uniform_data.push_update();
788 inst_.manager->submit(accumulate_ps_,
view);
789 inst_.manager->submit(copy_ps_,
view);
795 if (data_.use_history == 0) {
796 data_.use_history = 1;
805 inst_.render_buffers.acquire(
int2(1));
811 combined_final_tx_ = inst_.render_buffers.combined_tx;
813 data_.display_only =
true;
814 inst_.uniform_data.push_update();
820 inst_.render_buffers.release();
855 combined_tx_.current() :
858 (is_cryptomatte ? cryptomatte_tx_ :
859 (is_value ? value_accum_tx_ : color_accum_tx_));
867 return accum_tx.
layer_view(index + layer_offset);
920 viewport_compositor_enabled_passes_ & (1 << i));
921 if (pass_type == 0) {
945 const char *pass_name = pass_names[pass_offset].c_str();
949 PassSimple write_pass_ps = {
"Film.WriteViewportCompositorPass"};
951 write_pass_ps.
shader_set(inst_.shaders.static_shader_get(write_shader_type));
954 write_pass_ps.
bind_image(
"output_img", output_pass_texture);
957 inst_.manager->submit(write_pass_ps);
975 PassSimple write_pass_ps = {
"Film.WriteViewportCompositorPass"};
977 write_pass_ps.
shader_set(inst_.shaders.static_shader_get(write_shader_type));
980 write_pass_ps.
bind_image(
"output_img", output_pass_texture);
983 inst_.manager->submit(write_pass_ps);
int BKE_render_preview_pixel_size(const RenderData *r)
#define BLI_assert_msg(a, msg)
BLI_INLINE unsigned int BLI_hash_string(const char *str)
void * BLI_findstring(const struct ListBase *listbase, const char *id, int offset) ATTR_WARN_UNUSED_RESULT ATTR_NONNULL(1)
#define LISTBASE_FOREACH(type, var, list)
MINLINE uint divide_ceil_u(uint a, uint b)
MINLINE float pow2f(float x)
MINLINE float clamp_f(float value, float min, float max)
MINLINE int square_i(int a)
MINLINE float square_f(float a)
BLI_INLINE int BLI_rcti_size_y(const struct rcti *rct)
void BLI_rcti_init(struct rcti *rect, int xmin, int xmax, int ymin, int ymax)
BLI_INLINE int BLI_rcti_size_x(const struct rcti *rct)
bool BLI_rcti_is_empty(const struct rcti *rect)
#define SET_FLAG_FROM_TEST(value, test, flag)
@ VIEW_LAYER_CRYPTOMATTE_MATERIAL
@ VIEW_LAYER_CRYPTOMATTE_ASSET
@ VIEW_LAYER_CRYPTOMATTE_OBJECT
#define EEVEE_RENDER_PASS_MAX_BIT
@ EEVEE_RENDER_PASS_UNUSED_8
@ EEVEE_RENDER_PASS_CRYPTOMATTE_MATERIAL
@ EEVEE_RENDER_PASS_NORMAL
@ EEVEE_RENDER_PASS_UNUSED_14
@ EEVEE_RENDER_PASS_CRYPTOMATTE_OBJECT
@ EEVEE_RENDER_PASS_DIFFUSE_LIGHT
@ EEVEE_RENDER_PASS_VOLUME_LIGHT
@ EEVEE_RENDER_PASS_DIFFUSE_COLOR
@ EEVEE_RENDER_PASS_CRYPTOMATTE_ASSET
@ EEVEE_RENDER_PASS_ENVIRONMENT
@ EEVEE_RENDER_PASS_COMBINED
@ EEVEE_RENDER_PASS_SPECULAR_LIGHT
@ EEVEE_RENDER_PASS_VECTOR
@ EEVEE_RENDER_PASS_SPECULAR_COLOR
@ EEVEE_RENDER_PASS_TRANSPARENT
@ EEVEE_RENDER_PASS_SHADOW
@ EEVEE_RENDER_PASS_POSITION
struct ViewLayerAOV ViewLayerAOV
@ WO_MIST_INVERSE_QUADRATIC
void GPU_framebuffer_bind(GPUFrameBuffer *framebuffer)
void GPU_framebuffer_clear_color(GPUFrameBuffer *fb, const float clear_col[4])
void GPU_framebuffer_viewport_set(GPUFrameBuffer *framebuffer, int x, int y, int width, int height)
void GPU_memory_barrier(eGPUBarrier barrier)
@ GPU_BARRIER_TEXTURE_FETCH
@ GPU_BARRIER_SHADER_IMAGE_ACCESS
@ GPU_BARRIER_TEXTURE_UPDATE
int GPU_texture_height(const GPUTexture *texture)
int GPU_texture_width(const GPUTexture *texture)
void * GPU_texture_read(GPUTexture *texture, eGPUDataFormat data_format, int mip_level)
@ GPU_SAMPLER_FILTERING_LINEAR
eGPUTextureFormat GPU_texture_format(const GPUTexture *texture)
in reality light always falls off quadratically Particle Retrieve the data of the particle that spawned the object for example to give variation to multiple instances of an object Point Retrieve information about points in a point cloud Retrieve the edges of an object as it appears to Cycles topology will always appear triangulated Convert a blackbody temperature to an RGB value Normal Generate a perturbed normal from an RGB normal map image Typically used for faking highly detailed surfaces Generate an OSL shader from a file or text data block Image Sample an image file as a texture Gabor Generate Gabor noise Gradient Generate interpolated color and intensity values based on the input vector Magic Generate a psychedelic color texture Voronoi Generate Worley noise based on the distance to random points Typically used to generate textures such as or biological cells Brick Generate a procedural texture producing bricks Texture Retrieve multiple types of texture coordinates nTypically used as inputs for texture nodes Vector Convert a or normal between world
Group Output data from inside of a node group A color picker Mix two input colors RGB to Convert a color s luminance to a grayscale value NORMAL
struct GPUShader GPUShader
void reset()
clear internal cached data and reset random seed
bool contains(const Key &key) const
void append(const T &value)
const T & last(const int64_t n=0) const
bool contains(const Key &key) const
void submit(PassSimple &pass, View &view)
void acquire(int2 extent, eGPUTextureFormat format, eGPUTextureUsage usage=GPU_TEXTURE_USAGE_GENERAL)
bool ensure_layer_views(bool cube_as_array=false)
GPUTexture * layer_view(int layer)
void bind_texture(const char *name, GPUTexture *texture, GPUSamplerState state=sampler_auto)
void bind_resources(U &resources)
void bind_image(const char *name, GPUTexture *image)
void specialize_constant(GPUShader *shader, const char *name, const float &data)
void dispatch(int group_len)
void barrier(eGPUBarrier type)
void state_set(DRWState state, int clip_plane_count=0)
void bind_ubo(const char *name, GPUUniformBuf *buffer)
void push_constant(const char *name, const float &data)
void shader_set(GPUShader *shader)
const CameraData & data_get() const
float2 pixel_jitter_get() const
int pass_id_get(eViewLayerEEVEEPassType pass_type) const
static bool pass_is_float3(eViewLayerEEVEEPassType pass_type)
GPUTexture * get_aov_texture(ViewLayerAOV *aov)
void accumulate(View &view, GPUTexture *combined_final_tx)
static const Vector< std::string > pass_to_render_pass_names(eViewLayerEEVEEPassType pass_type, const ViewLayer *view_layer)
int cryptomatte_layer_max_get() const
int cryptomatte_layer_len_get() const
static ePassStorageType pass_storage_type(eViewLayerEEVEEPassType pass_type)
float * read_pass(eViewLayerEEVEEPassType pass_type, int layer_offset)
void write_viewport_compositor_passes()
static int overscan_pixels_get(float overscan, int2 extent)
bool is_viewport_compositor_enabled() const
AOVsInfoDataBuf aovs_info
void init(const int2 &full_extent, const rcti *output_rect)
float * read_aov(ViewLayerAOV *aov)
GPUTexture * get_pass_texture(eViewLayerEEVEEPassType pass_type, int layer_offset)
eViewLayerEEVEEPassType enabled_passes_get() const
static constexpr bool use_box_filter
RenderBuffers render_buffers
UniformDataModule uniform_data
static float2 sample_disk(const float2 &rand)
uint64_t sample_count() const
static float2 sample_spiral(const float2 &rand)
local_group_size(16, 16) .push_constant(Type b
additional_info("compositor_sum_float_shared") .push_constant(Type additional_info("compositor_sum_float_shared") .push_constant(Type GPU_RGBA32F
blender::draw::Manager * DRW_manager_get()
DefaultFramebufferList * DRW_viewport_framebuffer_list_get()
blender::draw::TextureFromPool & DRW_viewport_pass_texture_get(const char *pass_name)
DefaultTextureList * DRW_viewport_texture_list_get()
bool DRW_is_viewport_compositor_enabled()
const DRWView * DRW_view_default_get()
#define ENABLE_FROM_LEGACY(name_legacy, name_eevee)
draw_view in_light_buf[] float
draw_view push_constant(Type::INT, "radiance_src") .push_constant(Type capture_info_buf storage_buf(1, Qualifier::READ, "ObjectBounds", "bounds_buf[]") .push_constant(Type draw_view int
#define FILM_PRECOMP_SAMPLE_MAX
DO_INLINE void filter(lfVector *V, fmatrix3x3 *S)
Set< std::string > get_used_passes(const Scene &scene, const ViewLayer *view_layer)
detail::Pass< command::DrawCommandBuf > PassSimple
@ FILM_PASS_CONVERT_COMBINED
@ FILM_PASS_CONVERT_CRYPTOMATTE
@ FILM_PASS_CONVERT_COLOR
@ FILM_PASS_CONVERT_VALUE
@ FILM_PASS_CONVERT_DEPTH
static eShaderType get_write_pass_shader_type(eViewLayerEEVEEPassType pass_type)
static float film_filter_weight(float filter_radius, float sample_distance_sqr)
bool operator!=(const CameraData &a, const CameraData &b)
static eShaderType get_aov_write_pass_shader_type(const ViewLayerAOV *aov)
static eViewLayerEEVEEPassType enabled_passes(const ViewLayer *view_layer)
bool operator==(const CameraData &a, const CameraData &b)
static eViewLayerEEVEEPassType get_viewport_compositor_enabled_passes(const Set< std::string > &viewport_compositor_needed_passes, const ViewLayer *view_layer)
@ PASS_STORAGE_CRYPTOMATTE
@ PASS_CATEGORY_CRYPTOMATTE
T length_squared(const VecBase< T, Size > &a)
MatBase< T, NumCol, NumRow > scale(const MatBase< T, NumCol, NumRow > &mat, const VectorT &scale)
VecBase< T, Size > divide_ceil(const VecBase< T, Size > &a, const VecBase< T, Size > &b)
VecBase< uint32_t, 4 > uint4
VecBase< float, 4 > float4
VecBase< int32_t, 2 > int2
VecBase< float, 2 > float2
VecBase< int32_t, 3 > int3
VecBase< float, 3 > float3
unsigned __int64 uint64_t
GPUFrameBuffer * default_fb
ColorManagedViewSettings view_settings
struct ViewLayerEEVEE eevee
float samples_weight_total
FilmSample samples[FILM_PRECOMP_SAMPLE_MAX]