How to use python api for denoising


I am currently using Blender2.81a python API to render the barbershop demo on a server. Although I have set the sampling number to 3000, the output images are very noisy.

I tried to set use_denoising with:

bpy.context.scene.view_layers[‘View Layer’].cycles.use_denoising = True

However, when I start rendering, it prints:

Fra:1 Mem:5062.57M (0.00M, Peak 5481.01M) | Time:03:00.34 | Remaining:01:31.03 | Mem:2138.12M, Peak:2138.12M | Scene, RenderLayer | Rendered 0/2 Tiles, Sample 28/32, Denoised 0 tiles

and the output images are still very noisy.

So, I was wondering how could I use the denoising API correctly. The following is my python script for your consideration.

import bpy
import os
import numpy as np

# Remove all the cameras in the scene.
for i, item in enumerate(
    if item.type == 'CAMERA':[i].select_set(True)

# Create a new camera. Like every newly created object, the camera is automatically assigned to     `bpy.context.object`.
cam = bpy.context.object

# Rename the new camera (not necessary). = 'Camera_360'

# Turn the camera into an omnidirectional one. = 'PANO' = 'EQUIRECTANGULAR'

# Set the rendering range: every object between the two bounds below will be rendered. = 0.0 = 1000.0
# The upper bound should be specified for each blender project:
# `1000` may represent a large distance in one project and a small one somewhere else.

# Camera resolution (e.g., (720, 480), (1920, 1080)).
resolutions = [(1024, 512)]

# Camera position/s in the scene.
camera_coordinates = [(1.5, 6.0, 1)]

# Define the camera field of view. = -np.pi/2 = np.pi/2 = -np.pi = np.pi

# Define the camera rotation.
# The rotation follows the rule of the right hand.
# TODO: Check the previous statement.
cam.rotation_euler[0] = np.pi/2      # Along x.
cam.rotation_euler[1] = 0            # Along y.
cam.rotation_euler[2] = 0            # Along z.

# Specify the device.
computation_type = 'CUDA'
gpu_id = (0,)

# Set the number of rendering samples.
bpy.context.scene.cycles.samples = 3000

# Select the computing device.
prefs = bpy.context.preferences.addons['cycles'].preferences
devices = prefs.get_devices()
if computation_type == 'CUDA':
    bpy.context.scene.cycles.device = 'GPU'
    prefs.compute_device_type = 'CUDA'
for i, gpu in enumerate(devices[0]):
    gpu.use = (i in gpu_id)

# Assign the new camera to the scene. = cam

# Activate the use of nodes.
bpy.context.scene.use_nodes = True

# Render an image for any pair ((width, height), (x, y, z)).
for width, height in resolutions:
    for x, y, z in camera_coordinates:
        # Set the camera parameters.
        bpy.context.scene.render.resolution_percentage = 100
        bpy.context.scene.render.resolution_x = width
        bpy.context.scene.render.resolution_y = height
        bpy.context.scene.render.use_compositing = True
        bpy.context.scene.view_layers["RenderLayer"].cycles.use_denoising = True = x = y = z
        tree = bpy.context.scene.node_tree
        links = tree.links
        rl ="CompositorNodeRLayers")
        # Depth map.
        fileDepthOutput ="CompositorNodeOutputFile")
        fileDepthOutput.format.file_format = 'OPEN_EXR'
        fileDepthOutput.base_path = 'outputImages/{w}_{h}'.format(w=width, h=height)
        fileDepthId = 'test_{x}_{y}_{z}_{w}_{h}_depth_'.format(x=x, y=y, z=z, w=width, h=height)
        fileDepthPath = '{}/{}.exr'.format(fileDepthOutput.base_path, fileDepthId)
        fileDepthOutputSocket =['Depth'], fileDepthOutputSocket)
        # Texture.
        fileTextureOutput ="CompositorNodeOutputFile")
        fileTextureOutput.format.file_format = 'PNG'
        fileTextureOutput.base_path = 'outputImages/{w}_{h}'.format(w=width, h=height)
        fileTextureOutputId = 'test_{x}_{y}_{z}_{w}_{h}_'.format(x=x, y=y, z=z, w=width, h=height)
        fileTextureOutputPath = '{}/{}.png'.format(fileTextureOutput.base_path, fileTextureOutputId)
        fileTextureOutputSocket =['Image'], fileTextureOutputSocket)
        # Launch the rendering.
        # Clean the created nodes.

Any suggestions would be greatly appreciated.

Best regards,