Render to offscreen buffer only one node works

I’m rendering to an offscreen buffer, but only one node works at a time. I have the second ortho camera set up to render my_render_node with one textured card. This works, but when I add a second card the second card is invisible. If I take the first node away the second node renders.

If I do this to the second card, it renders the second node all white.

        card2.setTransparency(TransparencyAttrib.M_alpha)
        card2.setAlphaScale(blend_amount)

Full code

from direct.showbase.ShowBase import ShowBase
from panda3d.core import Filename, TransparencyAttrib
from panda3d.core import FrameBufferProperties, WindowProperties, GraphicsPipe, Texture, GraphicsOutput, LColor, \
    CardMaker, GraphicsPipeSelection, Shader, PNMImage, PNMImageHeader, NodePath, Camera, OrthographicLens, LTexCoord
import time
class MixingImages(ShowBase):
    def __init__(self):
        ShowBase.__init__(self)
        # First blend using images in C++
        blend_amount = 0.5
        img = PNMImage()
        img_filename = "assets/write_in_shader_read_in_cpu.png"
        print(img.read(img_filename))
        img2 = PNMImage()
        img2_filename = "assets/PNMImagePerlinNoise.png"
        img2.read(img2_filename)

        start_time = time.perf_counter()
        img.blend_sub_image(img2, 0, 0, pixel_scale=blend_amount)
        end_time = time.perf_counter()
        print(f"{end_time - start_time} seconds to blend images in c++")
        img.write("assets/MixingImages.png")

        # Second blend images using the GPU
        img.read(img_filename) # read it again to clear out the changes
        # 1 setup the buffer
        # https://docs.panda3d.org/1.10/python/programming/rendering-process/creating-windows-and-buffers
        pipe = base.pipe  # GraphicsPipeSelection.get_global_ptr().make_module_pipe("pandagl")
        name = "shader buffer"
        sort = 0
        # fb_prop = FrameBufferProperties.getDefault()
        fb_prop = FrameBufferProperties()  # getDefault() results in an error
        fb_prop.set_rgba_bits(16, 0, 0, 0)

        # win_prop = WindowProperties.getDefault()
        img_size_x = img.get_x_size()
        img_size_y = img.get_y_size()
        win_prop = WindowProperties(size=(img_size_x, img_size_y))
        flags = GraphicsPipe.BFRefuseWindow
        # gsg and host are optional, but give more options for the buffer properties
        buffer = base.graphicsEngine.makeOutput(pipe, name=name, sort=sort, fb_prop=fb_prop, win_prop=win_prop,
                                                flags=flags, gsg=base.win.get_gsg(), host=base.win)

        # 2 create a texture to handle rendering and accessing on the CPU side
        texture = Texture("texture_in_memory")
        buffer.add_render_texture(texture, GraphicsOutput.RTM_copy_ram)

        # You need to create a node that will be rendered.
        my_render_node = NodePath("my_render_node")

        # Set up a camera with an orthographic lens(2D)
        camera_class = Camera("camera")
        lens = OrthographicLens()
        # Camera zoom
        lens.set_film_size(img_size_x, img_size_y)  # Set 1, 1
        lens.set_near_far(-1000, 1000)
        camera_class.set_lens(lens)
        # Attach the camera to the rendering node
        buffer_camera = NodePath(camera_class)
        buffer_camera.reparentTo(my_render_node)
        # Create a buffer rendering display
        render_region = buffer.make_display_region()
        render_region.camera = buffer_camera

        # 3 make some geometry to render to
        cm = CardMaker('card')
        cm.set_frame(0.0, img_size_x, 0.0, img_size_y)
        img_max = max(img_size_x, img_size_y)
        cm.set_uv_range(LTexCoord(0.0, 0.0), LTexCoord(img_size_x / img_max, img_size_y / img_max))
        card = my_render_node.attachNewNode(cm.generate())
        # card.setTexture(texture)
        card.setPos(-img_size_x / 2.0, 25.0, -img_size_y / 2.0)

        # 4 load the shaders
        shader = Shader.load(Shader.SL_GLSL,
                             vertex="shaders/write_in_shader_read_in_cpu.vert",
                             fragment="shaders/write_in_shader_read_in_cpu.frag")
        card.setShader(shader)

        # make a second card with a texture to blend
        cm2 = CardMaker('card2')
        cm2.set_frame(0.0, img_size_x, 0.0, img_size_y)
        img_max = max(img_size_x, img_size_y)
        cm2.set_uv_range(LTexCoord(0.0, 0.0), LTexCoord(img_size_x / img_max, img_size_y / img_max))
        print(f"Is img2 valid? {img2.is_valid()}")
        # img2.add_alpha()
        # img2.alpha_fill(blend_amount)
        card2 = my_render_node.attachNewNode(cm2.generate())
        # card2.setTransparency(TransparencyAttrib.M_alpha)
        # card2.setAlphaScale(blend_amount)
        texture2 = Texture("texture_from_image")
        texture2.load(img2)
        card2.setTexture(texture2)
        card2.setPos(-img_size_x / 4.0, 15.0, -img_size_y / 4.0)

        start_time = time.perf_counter()
        def readTexture(task):
            peek = texture.peek()
            if peek:

                img = PNMImage(img_size_x, img_size_y, num_channels=1, maxval=65535)
                img.set_color_type(PNMImageHeader.CT_grayscale)
                texture.store(img)
                end_time = time.perf_counter()
                print(f"{end_time - start_time} seconds to render the blended images in the GPU")
                img.write('assets/MixingImagesGPU.png')
                print(buffer.get_fb_properties())
                print(texture.get_format())
                print(buffer.get_texture(0))
                return task.done
            return task.cont

        self.taskMgr.add(readTexture)

app = MixingImages()
app.run()

If you want to render the second map, then you need a second rendering pass.

Or create another node and configure the display region by analogy for the first node.

1 Like

Thanks for the help.

I thought the camera would render the whole second scene. What I’m trying to do is blend the two images in the GPU. The front will be partially transparent. Can multi-pass do that?

It sounds like one pass will overwrite the other, if I use the same buffer. If I don’t use the same buffer, I’m back to blending two buffers in C++, which is slow on larger images.

The question of implementing what you want to implement is more extensive, but the bottom line is that you must have two textures before mixing. Which specific path you choose is up to you. Because all textures for mixing must be transferred to the shader at the same time.

1 Like