Spaces:
Running
on
Zero
Running
on
Zero
fix output of inference
Browse files
app.py
CHANGED
@@ -129,9 +129,10 @@ def run_model(target_dir, model) -> dict:
|
|
129 |
|
130 |
# Generate world points from depth map
|
131 |
print("Computing world points from depth map...")
|
132 |
-
depth_map = predictions["depth"] # (S, H, W, 1)
|
133 |
-
world_points = unproject_depth_map_to_point_map(depth_map, predictions["extrinsic"], predictions["intrinsic"])
|
134 |
-
predictions["world_points_from_depth"] = world_points
|
|
|
135 |
|
136 |
# Clean up
|
137 |
torch.cuda.empty_cache()
|
|
|
129 |
|
130 |
# Generate world points from depth map
|
131 |
print("Computing world points from depth map...")
|
132 |
+
#depth_map = predictions["depth"] # (S, H, W, 1)
|
133 |
+
#world_points = unproject_depth_map_to_point_map(depth_map, predictions["extrinsic"], predictions["intrinsic"])
|
134 |
+
#predictions["world_points_from_depth"] = world_points
|
135 |
+
predictions["world_points_from_depth"] = predictions["world_points"]
|
136 |
|
137 |
# Clean up
|
138 |
torch.cuda.empty_cache()
|