I’m porting an OSL line shader to cycles svm and while I got depth and object based line detection working, I can’t figure out how to retrieve the normal from a scene-ray intersection test.
The Intersection struct filled by scene_intersect() doesn’t writes anything to Intersection.Ng .
I have tried to retrieve the normal with the triangle_point_normal() function, but I get primitive index out of range errors.
Here’s the svm shader :
CCL_NAMESPACE_BEGIN
#ifdef __SHADER_RAYTRACE__
typedef struct OutlineResult
{
float max_depth;
float min_dot;
float contour;
} OutlineResult;
ccl_device_noinline OutlineResult svm_outline(KernelGlobals *kg, ShaderData *sd, ccl_addr_space PathState *state, float width)
{
OutlineResult result;
result.max_depth = 0.0f;
result.min_dot = 1.0f;
result.contour = 0.0f;
/* Early out if no sampling needed. */
if (/*num_samples < 1 ||*/ sd->object == OBJECT_NONE) {
return result;
}
/* Can't raytrace from shaders like displacement, before BVH exists. */
if (kernel_data.bvh.bvh_layout == BVH_LAYOUT_NONE) {
return result;
}
float3 offsets[4] = {
make_float3(-1, -1, 0),
make_float3(-1, 1, 0),
make_float3(1, -1, 0),
make_float3(1, 1, 0)
};
float max_depth = 0.0;
float min_dot = 1.0;
bool contour = false;
float3 trace_start = sd->P + sd->I * sd->ray_length;
float P_distance = sd->ray_length;
for (int i = 0; i < 4; i++)
{
float3 trace_target = sd->P + offsets[i].x * width * sd->dI.dx + offsets[i].y * width * sd->dI.dy;
float3 trace_direction = normalize(trace_target - trace_start);
Ray ray;
ray.P = trace_start;
ray.D = trace_direction;
ray.t = 1000000000000.0; // TODO
ray.time = sd->time;
ray.dP = sd->dP;
ray.dD = differential3_zero();
Intersection intersection;
if (scene_intersect(kg, &ray, PATH_RAY_CAMERA, &intersection))
{
float delta_depth = intersection.t - P_distance;
max_depth = max(max_depth, delta_depth);
//if (dot(trace_direction, sd->Ng) > dot(trace_direction, intersection.Ng))
//if (delta_depth > 0)
{
Transform hit_transform = object_fetch_transform(
kg, intersection.object, OBJECT_TRANSFORM);
printf("Ng: %f, %f, %f\n", intersection.Ng.x, intersection.Ng.y, intersection.Ng.z);
/*
float3 hit_P;
float3 hit_Ng;
int hit_shader;
triangle_point_normal(kg,
intersection.object,
intersection.prim,
intersection.u,
intersection.v,
&hit_P,
&hit_Ng,
&hit_shader);
float3 hit_normal = transform_direction(&hit_transform, hit_Ng);
hit_normal = normalize(hit_normal);
min_dot = min(min_dot, dot(sd->Ng, hit_normal));
*/
/*
printf("Ng: %f, %f, %f, WNg : %f, %f, %f\n",
hit_Ng.x,
hit_Ng.y,
hit_Ng.z,
hit_normal.x,
hit_normal.y,
hit_normal.z);
*/
if (intersection.object != sd->object)
{
contour = true;
}
}
}
else
{
max_depth = 1000000000000.0;
min_dot = -1.0f;
contour = true;
}
}
result.max_depth = max_depth;
result.min_dot = min_dot;
if (contour) result.contour = 1.0f;
return result;
}
ccl_device void svm_node_outline(
KernelGlobals *kg, ShaderData *sd, ccl_addr_space PathState *state, float *stack, uint4 node)
{
uint width_offset, depth_offset, dot_offset, object_offset;
svm_unpack_node_uchar4(node.y, &width_offset, &depth_offset, &dot_offset, &object_offset);
float width = stack_load_float(stack, width_offset);
OutlineResult result = svm_outline(kg, sd, state, width);
if (stack_valid(depth_offset)) {
stack_store_float(stack, depth_offset, result.max_depth);
}
if (stack_valid(dot_offset)) {
stack_store_float(stack, dot_offset, result.min_dot);
}
if (stack_valid(object_offset)) {
stack_store_float(stack, object_offset, result.contour);
}
}
#endif /* __SHADER_RAYTRACE__ */
CCL_NAMESPACE_END
I’m using the AO and Bevel nodes as reference, but I guess I’m missing something.