Dataset Viewer
Auto-converted to Parquet Duplicate
images
images listlengths
1
1
data_source
stringclasses
1 value
reward_model
dict
extra_info
dict
prompt
listlengths
2
2
ability
stringclasses
1 value
env_name
stringclasses
1 value
sealvqa_spatial
{ "ground_truth": "No, the backpack is not below the chair." }
{ "answer": "No, the backpack is not below the chair.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000139500.jpg", "index": "sealvqa_spatial_8217", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the backpack on the left or right side of the chair?" }, { "from": "gpt", "value": "The backpack is on the left side of the chair." }, { "from": "human", "value": "Is the backpack below the chair?" }, { "from": "gpt", "value": "No, the backpack is not below the chair." } ], "question": "Is the backpack below the chair?", "question_id": "sealvqa_spatial_8217", "search": 1, "target_instances": [ { "bbox": [ 16.37, 172.33, 34.04, 45.97 ], "instance_id": "", "name": "backpack" }, { "bbox": [ 114.02, 220.25, 49.44, 52.49 ], "instance_id": "", "name": "chair" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the backpack below the chair?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the chair is below the bird." }
{ "answer": "Yes, the chair is below the bird.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000470378.jpg", "index": "sealvqa_spatial_11512", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the chair to the left or right of the bird?" }, { "from": "gpt", "value": "The chair is to the right of the bird." }, { "from": "human", "value": "Is the chair below the bird?" }, { "from": "gpt", "value": "Yes, the chair is below the bird." } ], "question": "Is the chair below the bird?", "question_id": "sealvqa_spatial_11512", "search": 1, "target_instances": [ { "bbox": [ 99.66, 173.83, 31.79, 20.35 ], "instance_id": "", "name": "bird" }, { "bbox": [ 260.43, 226.84, 98.63, 78.75 ], "instance_id": "", "name": "chair" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the chair below the bird?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the parking meter is not below the book." }
{ "answer": "No, the parking meter is not below the book.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000116663.jpg", "index": "sealvqa_spatial_8485", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the parking meter on the left or right side of the book?" }, { "from": "gpt", "value": "The parking meter is on the left side of the book." }, { "from": "human", "value": "Is the parking meter below the book?" }, { "from": "gpt", "value": "No, the parking meter is not below the book." } ], "question": "Is the parking meter below the book?", "question_id": "sealvqa_spatial_8485", "search": 1, "target_instances": [ { "bbox": [ 526.79, 244.69, 113.21, 69.08 ], "instance_id": "", "name": "book" }, { "bbox": [ 39.08, 24.07, 23.39, 91.85 ], "instance_id": "", "name": "parking meter" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the parking meter below the book?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the parking meter is to the left of the bench." }
{ "answer": "Yes, the parking meter is to the left of the bench.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000150267.jpg", "index": "sealvqa_spatial_1927", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the parking meter to the left of the bench?" }, { "from": "gpt", "value": "Yes, the parking meter is to the left of the bench." } ], "question": "Is the parking meter to the left of the bench?", "question_id": "sealvqa_spatial_1927", "search": 1, "target_instances": [ { "bbox": [ 242.1, 215.55, 47.49, 9.11 ], "instance_id": "", "name": "bench" }, { "bbox": [ 167.1, 213.87, 42.85, 65.11 ], "instance_id": "", "name": "parking meter" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the parking meter to the left of the bench?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the car is not on the left side of the person." }
{ "answer": "No, the car is not on the left side of the person.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000228461.jpg", "index": "sealvqa_spatial_21971", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the car on the left side of the person?" }, { "from": "gpt", "value": "No, the car is not on the left side of the person." } ], "question": "Is the car on the left side of the person?", "question_id": "sealvqa_spatial_21971", "search": 1, "target_instances": [ { "bbox": [ 16.01, 337.8, 6.55, 7.06 ], "instance_id": "", "name": "car" }, { "bbox": [ 3.85, 336.58, 4.44, 13.33 ], "instance_id": "", "name": "person" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the car on the left side of the person?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The skateboard is on the left side of the suitcase." }
{ "answer": "The skateboard is on the left side of the suitcase.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000239580.jpg", "index": "sealvqa_spatial_25290", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the skateboard on the left or right side of the suitcase?" }, { "from": "gpt", "value": "The skateboard is on the left side of the suitcase." } ], "question": "Is the skateboard on the left or right side of the suitcase?", "question_id": "sealvqa_spatial_25290", "search": 1, "target_instances": [ { "bbox": [ 188.21, 342.4, 70.29, 19.28 ], "instance_id": "", "name": "skateboard" }, { "bbox": [ 294.19, 341.63, 22.99, 28.85 ], "instance_id": "", "name": "suitcase" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the skateboard on the left or right side of the suitcase?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the backpack is not above the stop sign." }
{ "answer": "No, the backpack is not above the stop sign.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000341645.jpg", "index": "sealvqa_spatial_4204", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the stop sign to the left or right of the backpack?" }, { "from": "gpt", "value": "The stop sign is to the left of the backpack." }, { "from": "human", "value": "Is the backpack above the stop sign?" }, { "from": "gpt", "value": "No, the backpack is not above the stop sign." } ], "question": "Is the backpack above the stop sign?", "question_id": "sealvqa_spatial_4204", "search": 1, "target_instances": [ { "bbox": [ 384.74, 56.08, 82.98, 104.01 ], "instance_id": "", "name": "backpack" }, { "bbox": [ 182.18, 0.32, 61.3, 23.91 ], "instance_id": "", "name": "stop sign" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the backpack above the stop sign?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the cup is above the dining table." }
{ "answer": "Yes, the cup is above the dining table.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000432519.jpg", "index": "sealvqa_spatial_4137", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cup to the right of the dining table?" }, { "from": "gpt", "value": "Yes, the cup is to the right of the dining table." }, { "from": "human", "value": "Is the cup above the dining table?" }, { "from": "gpt", "value": "Yes, the cup is above the dining table." } ], "question": "Is the cup above the dining table?", "question_id": "sealvqa_spatial_4137", "search": 1, "target_instances": [ { "bbox": [ 392.15, 87.3, 27.42, 32.98 ], "instance_id": "", "name": "cup" }, { "bbox": [ 242.6, 190.34, 90.89, 96.63 ], "instance_id": "", "name": "dining table" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup above the dining table?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The cell phone is below the backpack." }
{ "answer": "The cell phone is below the backpack.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000012236.jpg", "index": "sealvqa_spatial_3012", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the backpack on the right side of the cell phone?" }, { "from": "gpt", "value": "No, the backpack is not on the right side of the cell phone." }, { "from": "human", "value": "Is the cell phone above or below the backpack?" }, { "from": "gpt", "value": "The cell phone is below the backpack." } ], "question": "Is the cell phone above or below the backpack?", "question_id": "sealvqa_spatial_3012", "search": 1, "target_instances": [ { "bbox": [ 492.68, 203.19, 8.64, 15.21 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 81.75, 87.81, 35.56, 54.23 ], "instance_id": "", "name": "backpack" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cell phone above or below the backpack?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the handbag is to the left of the backpack." }
{ "answer": "Yes, the handbag is to the left of the backpack.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000349098.jpg", "index": "sealvqa_spatial_2445", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the handbag to the left of the backpack?" }, { "from": "gpt", "value": "Yes, the handbag is to the left of the backpack." } ], "question": "Is the handbag to the left of the backpack?", "question_id": "sealvqa_spatial_2445", "search": 1, "target_instances": [ { "bbox": [ 376.12, 247.75, 21.1, 44.19 ], "instance_id": "", "name": "backpack" }, { "bbox": [ 229.11, 288.06, 21.52, 9.37 ], "instance_id": "", "name": "handbag" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the handbag to the left of the backpack?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the knife is not above the fork." }
{ "answer": "No, the knife is not above the fork.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000117466.jpg", "index": "sealvqa_spatial_4333", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the fork to the left or right of the knife?" }, { "from": "gpt", "value": "The fork is to the right of the knife." }, { "from": "human", "value": "Is the knife above the fork?" }, { "from": "gpt", "value": "No, the knife is not above the fork." } ], "question": "Is the knife above the fork?", "question_id": "sealvqa_spatial_4333", "search": 1, "target_instances": [ { "bbox": [ 13.38, 373.87, 95.86, 23 ], "instance_id": "", "name": "knife" }, { "bbox": [ 415.18, 252.41, 37.57, 99.22 ], "instance_id": "", "name": "fork" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the knife above the fork?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the remote is not above the sink." }
{ "answer": "No, the remote is not above the sink.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000127495.jpg", "index": "sealvqa_spatial_26622", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the remote to the right of the sink?" }, { "from": "gpt", "value": "No, the remote is not to the right of the sink." }, { "from": "human", "value": "Is the remote above the sink?" }, { "from": "gpt", "value": "No, the remote is not above the sink." } ], "question": "Is the remote above the sink?", "question_id": "sealvqa_spatial_26622", "search": 1, "target_instances": [ { "bbox": [ 99.77, 326.24, 19.64, 6.54 ], "instance_id": "", "name": "remote" }, { "bbox": [ 247.52, 277.59, 172.47, 37.64 ], "instance_id": "", "name": "sink" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the remote above the sink?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the cell phone is below the mouse." }
{ "answer": "Yes, the cell phone is below the mouse.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000260651.jpg", "index": "sealvqa_spatial_17418", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the mouse on the left or right side of the cell phone?" }, { "from": "gpt", "value": "The mouse is on the right side of the cell phone." }, { "from": "human", "value": "Is the cell phone below the mouse?" }, { "from": "gpt", "value": "Yes, the cell phone is below the mouse." } ], "question": "Is the cell phone below the mouse?", "question_id": "sealvqa_spatial_17418", "search": 1, "target_instances": [ { "bbox": [ 81.2, 324.2, 48.03, 12.14 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 265.84, 299.53, 29.01, 14.8 ], "instance_id": "", "name": "mouse" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cell phone below the mouse?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the cup is not below the cell phone." }
{ "answer": "No, the cup is not below the cell phone.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000042804.jpg", "index": "sealvqa_spatial_20659", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cell phone to the left or right of the cup?" }, { "from": "gpt", "value": "The cell phone is to the right of the cup." }, { "from": "human", "value": "Is the cup below the cell phone?" }, { "from": "gpt", "value": "No, the cup is not below the cell phone." } ], "question": "Is the cup below the cell phone?", "question_id": "sealvqa_spatial_20659", "search": 1, "target_instances": [ { "bbox": [ 456.64, 182.53, 24.41, 29.92 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 48.24, 98.65, 16.11, 11.84 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup below the cell phone?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the potted plant is below the truck." }
{ "answer": "Yes, the potted plant is below the truck.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000345786.jpg", "index": "sealvqa_spatial_22072", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the truck on the left side of the potted plant?" }, { "from": "gpt", "value": "No, the truck is not on the left side of the potted plant." }, { "from": "human", "value": "Is the potted plant below the truck?" }, { "from": "gpt", "value": "Yes, the potted plant is below the truck." } ], "question": "Is the potted plant below the truck?", "question_id": "sealvqa_spatial_22072", "search": 1, "target_instances": [ { "bbox": [ 127.91, 414.24, 26.98, 32.07 ], "instance_id": "", "name": "potted plant" }, { "bbox": [ 274.6, 362.8, 46.67, 42.33 ], "instance_id": "", "name": "truck" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the potted plant below the truck?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The wine glass is to the right of the clock." }
{ "answer": "The wine glass is to the right of the clock.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000511031.jpg", "index": "sealvqa_spatial_12723", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the wine glass to the left or right of the clock?" }, { "from": "gpt", "value": "The wine glass is to the right of the clock." } ], "question": "Is the wine glass to the left or right of the clock?", "question_id": "sealvqa_spatial_12723", "search": 1, "target_instances": [ { "bbox": [ 397.65, 172.07, 24.69, 24.07 ], "instance_id": "", "name": "clock" }, { "bbox": [ 626.61, 190.63, 13.07, 37.2 ], "instance_id": "", "name": "wine glass" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the wine glass to the left or right of the clock?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The handbag is to the right of the keyboard." }
{ "answer": "The handbag is to the right of the keyboard.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000417373.jpg", "index": "sealvqa_spatial_5953", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the handbag to the left or right of the keyboard?" }, { "from": "gpt", "value": "The handbag is to the right of the keyboard." } ], "question": "Is the handbag to the left or right of the keyboard?", "question_id": "sealvqa_spatial_5953", "search": 1, "target_instances": [ { "bbox": [ 212.91, 182.56, 102.29, 46.26 ], "instance_id": "", "name": "keyboard" }, { "bbox": [ 374.51, 104.43, 107.68, 81.78 ], "instance_id": "", "name": "handbag" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the handbag to the left or right of the keyboard?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the backpack is on the right side of the handbag." }
{ "answer": "Yes, the backpack is on the right side of the handbag.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000331289.jpg", "index": "sealvqa_spatial_1734", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the backpack on the right side of the handbag?" }, { "from": "gpt", "value": "Yes, the backpack is on the right side of the handbag." } ], "question": "Is the backpack on the right side of the handbag?", "question_id": "sealvqa_spatial_1734", "search": 1, "target_instances": [ { "bbox": [ 371.15, 239.93, 16.55, 36.6 ], "instance_id": "", "name": "handbag" }, { "bbox": [ 477.99, 204.22, 85, 113.7 ], "instance_id": "", "name": "backpack" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the backpack on the right side of the handbag?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the cup is below the knife." }
{ "answer": "Yes, the cup is below the knife.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000206751.jpg", "index": "sealvqa_spatial_4147", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the knife on the left side of the cup?" }, { "from": "gpt", "value": "Yes, the knife is on the left side of the cup." }, { "from": "human", "value": "Is the cup below the knife?" }, { "from": "gpt", "value": "Yes, the cup is below the knife." } ], "question": "Is the cup below the knife?", "question_id": "sealvqa_spatial_4147", "search": 1, "target_instances": [ { "bbox": [ 552.12, 381.8, 87.88, 97.4 ], "instance_id": "", "name": "cup" }, { "bbox": [ 527.44, 254.63, 22.6, 99.19 ], "instance_id": "", "name": "knife" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup below the knife?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the refrigerator is below the microwave." }
{ "answer": "Yes, the refrigerator is below the microwave.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000337354.jpg", "index": "sealvqa_spatial_2257", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the refrigerator to the left of the microwave?" }, { "from": "gpt", "value": "No, the refrigerator is not to the left of the microwave." }, { "from": "human", "value": "Is the refrigerator below the microwave?" }, { "from": "gpt", "value": "Yes, the refrigerator is below the microwave." } ], "question": "Is the refrigerator below the microwave?", "question_id": "sealvqa_spatial_2257", "search": 1, "target_instances": [ { "bbox": [ 351.4, 153.02, 115.03, 62.68 ], "instance_id": "", "name": "microwave" }, { "bbox": [ 604.04, 281.53, 35.6, 193.08 ], "instance_id": "", "name": "refrigerator" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the refrigerator below the microwave?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The couch is above the remote." }
{ "answer": "The couch is above the remote.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000012825.jpg", "index": "sealvqa_spatial_17492", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the couch to the right of the remote?" }, { "from": "gpt", "value": "No, the couch is not to the right of the remote." }, { "from": "human", "value": "Is the couch above or below the remote?" }, { "from": "gpt", "value": "The couch is above the remote." } ], "question": "Is the couch above or below the remote?", "question_id": "sealvqa_spatial_17492", "search": 1, "target_instances": [ { "bbox": [ 574.3, 428.11, 26.13, 15.23 ], "instance_id": "", "name": "remote" }, { "bbox": [ 27.87, 291.3, 86.29, 75.69 ], "instance_id": "", "name": "couch" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the couch above or below the remote?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the bottle is on the right side of the toothbrush." }
{ "answer": "Yes, the bottle is on the right side of the toothbrush.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000341437.jpg", "index": "sealvqa_spatial_21602", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bottle on the right side of the toothbrush?" }, { "from": "gpt", "value": "Yes, the bottle is on the right side of the toothbrush." } ], "question": "Is the bottle on the right side of the toothbrush?", "question_id": "sealvqa_spatial_21602", "search": 1, "target_instances": [ { "bbox": [ 250.69, 50.96, 62.48, 97.87 ], "instance_id": "", "name": "toothbrush" }, { "bbox": [ 314.51, 131.51, 45.72, 153.83 ], "instance_id": "", "name": "bottle" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bottle on the right side of the toothbrush?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the bowl is to the right of the spoon." }
{ "answer": "Yes, the bowl is to the right of the spoon.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000439906.jpg", "index": "sealvqa_spatial_14743", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bowl to the right of the spoon?" }, { "from": "gpt", "value": "Yes, the bowl is to the right of the spoon." } ], "question": "Is the bowl to the right of the spoon?", "question_id": "sealvqa_spatial_14743", "search": 1, "target_instances": [ { "bbox": [ 420.04, 315.06, 45.33, 37.18 ], "instance_id": "", "name": "spoon" }, { "bbox": [ 526.32, 282.51, 67.97, 76.16 ], "instance_id": "", "name": "bowl" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bowl to the right of the spoon?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the spoon is to the right of the sink." }
{ "answer": "Yes, the spoon is to the right of the sink.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000402233.jpg", "index": "sealvqa_spatial_141", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the spoon to the right of the sink?" }, { "from": "gpt", "value": "Yes, the spoon is to the right of the sink." } ], "question": "Is the spoon to the right of the sink?", "question_id": "sealvqa_spatial_141", "search": 1, "target_instances": [ { "bbox": [ 208.55, 328.34, 16.82, 11.65 ], "instance_id": "", "name": "spoon" }, { "bbox": [ 0.24, 327.52, 38.38, 37.03 ], "instance_id": "", "name": "sink" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the spoon to the right of the sink?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The tv is to the right of the chair." }
{ "answer": "The tv is to the right of the chair.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000053361.jpg", "index": "sealvqa_spatial_10525", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the tv to the left or right of the chair?" }, { "from": "gpt", "value": "The tv is to the right of the chair." } ], "question": "Is the tv to the left or right of the chair?", "question_id": "sealvqa_spatial_10525", "search": 1, "target_instances": [ { "bbox": [ 553.68, 167.67, 86.32, 66.1 ], "instance_id": "", "name": "tv" }, { "bbox": [ 447.59, 205.39, 59.54, 102.78 ], "instance_id": "", "name": "chair" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the tv to the left or right of the chair?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the oven is to the left of the microwave." }
{ "answer": "Yes, the oven is to the left of the microwave.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000121232.jpg", "index": "sealvqa_spatial_14340", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the oven to the left of the microwave?" }, { "from": "gpt", "value": "Yes, the oven is to the left of the microwave." } ], "question": "Is the oven to the left of the microwave?", "question_id": "sealvqa_spatial_14340", "search": 1, "target_instances": [ { "bbox": [ 321.76, 249.24, 64.02, 27.26 ], "instance_id": "", "name": "microwave" }, { "bbox": [ 0, 229.38, 30.52, 94.36 ], "instance_id": "", "name": "oven" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the oven to the left of the microwave?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the keyboard is not below the book." }
{ "answer": "No, the keyboard is not below the book.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000335688.jpg", "index": "sealvqa_spatial_11381", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the keyboard on the right side of the book?" }, { "from": "gpt", "value": "Yes, the keyboard is on the right side of the book." }, { "from": "human", "value": "Is the keyboard below the book?" }, { "from": "gpt", "value": "No, the keyboard is not below the book." } ], "question": "Is the keyboard below the book?", "question_id": "sealvqa_spatial_11381", "search": 1, "target_instances": [ { "bbox": [ 148.47, 373.03, 169.94, 34.61 ], "instance_id": "", "name": "keyboard" }, { "bbox": [ 1.05, 409.14, 89.25, 39.22 ], "instance_id": "", "name": "book" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the keyboard below the book?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the dog is below the couch." }
{ "answer": "Yes, the dog is below the couch.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000394404.jpg", "index": "sealvqa_spatial_8169", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the couch on the right side of the dog?" }, { "from": "gpt", "value": "No, the couch is not on the right side of the dog." }, { "from": "human", "value": "Is the dog below the couch?" }, { "from": "gpt", "value": "Yes, the dog is below the couch." } ], "question": "Is the dog below the couch?", "question_id": "sealvqa_spatial_8169", "search": 1, "target_instances": [ { "bbox": [ 151.13, 407.89, 70.78, 72.11 ], "instance_id": "", "name": "dog" }, { "bbox": [ 2.06, 250.95, 46.46, 145.55 ], "instance_id": "", "name": "couch" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the dog below the couch?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the baseball bat is above the baseball glove." }
{ "answer": "Yes, the baseball bat is above the baseball glove.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000026498.jpg", "index": "sealvqa_spatial_19562", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the baseball glove to the left of the baseball bat?" }, { "from": "gpt", "value": "No, the baseball glove is not to the left of the baseball bat." }, { "from": "human", "value": "Is the baseball bat above the baseball glove?" }, { "from": "gpt", "value": "Yes, the baseball bat is above the baseball glove." } ], "question": "Is the baseball bat above the baseball glove?", "question_id": "sealvqa_spatial_19562", "search": 1, "target_instances": [ { "bbox": [ 170.27, 282.96, 141.29, 26.58 ], "instance_id": "", "name": "baseball bat" }, { "bbox": [ 623.37, 360.67, 16.63, 54.32 ], "instance_id": "", "name": "baseball glove" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the baseball bat above the baseball glove?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The book is below the cell phone." }
{ "answer": "The book is below the cell phone.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000397569.jpg", "index": "sealvqa_spatial_10875", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the book on the left side of the cell phone?" }, { "from": "gpt", "value": "Yes, the book is on the left side of the cell phone." }, { "from": "human", "value": "Is the book above or below the cell phone?" }, { "from": "gpt", "value": "The book is below the cell phone." } ], "question": "Is the book above or below the cell phone?", "question_id": "sealvqa_spatial_10875", "search": 1, "target_instances": [ { "bbox": [ 381.36, 331.61, 51.74, 25.1 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 0.96, 378.19, 147.49, 45.97 ], "instance_id": "", "name": "book" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the book above or below the cell phone?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the cup is above the apple." }
{ "answer": "Yes, the cup is above the apple.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000546952.jpg", "index": "sealvqa_spatial_14164", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cup on the left or right side of the apple?" }, { "from": "gpt", "value": "The cup is on the left side of the apple." }, { "from": "human", "value": "Is the cup above the apple?" }, { "from": "gpt", "value": "Yes, the cup is above the apple." } ], "question": "Is the cup above the apple?", "question_id": "sealvqa_spatial_14164", "search": 1, "target_instances": [ { "bbox": [ 313.24, 185.28, 39.74, 40.47 ], "instance_id": "", "name": "apple" }, { "bbox": [ 104.42, 65.72, 31.99, 39.8 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup above the apple?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the clock is on the right side of the person." }
{ "answer": "Yes, the clock is on the right side of the person.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000454086.jpg", "index": "sealvqa_spatial_21263", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the clock on the right side of the person?" }, { "from": "gpt", "value": "Yes, the clock is on the right side of the person." } ], "question": "Is the clock on the right side of the person?", "question_id": "sealvqa_spatial_21263", "search": 1, "target_instances": [ { "bbox": [ 573.76, 204.4, 26.5, 24.63 ], "instance_id": "", "name": "clock" }, { "bbox": [ 493.8, 186.55, 26.41, 81.39 ], "instance_id": "", "name": "person" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the clock on the right side of the person?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The spoon is below the chair." }
{ "answer": "The spoon is below the chair.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000426152.jpg", "index": "sealvqa_spatial_25872", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the chair on the left or right side of the spoon?" }, { "from": "gpt", "value": "The chair is on the left side of the spoon." }, { "from": "human", "value": "Is the spoon above or below the chair?" }, { "from": "gpt", "value": "The spoon is below the chair." } ], "question": "Is the spoon above or below the chair?", "question_id": "sealvqa_spatial_25872", "search": 1, "target_instances": [ { "bbox": [ 153.85, 471.56, 29.69, 54.07 ], "instance_id": "", "name": "spoon" }, { "bbox": [ 112.3, 379.57, 17.94, 47 ], "instance_id": "", "name": "chair" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the spoon above or below the chair?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the umbrella is below the backpack." }
{ "answer": "Yes, the umbrella is below the backpack.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000083736.jpg", "index": "sealvqa_spatial_1883", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the backpack on the left side of the umbrella?" }, { "from": "gpt", "value": "No, the backpack is not on the left side of the umbrella." }, { "from": "human", "value": "Is the umbrella below the backpack?" }, { "from": "gpt", "value": "Yes, the umbrella is below the backpack." } ], "question": "Is the umbrella below the backpack?", "question_id": "sealvqa_spatial_1883", "search": 1, "target_instances": [ { "bbox": [ 27.01, 487.74, 16.96, 94.06 ], "instance_id": "", "name": "umbrella" }, { "bbox": [ 79.9, 419.85, 31.16, 31.93 ], "instance_id": "", "name": "backpack" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the umbrella below the backpack?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the dog is below the car." }
{ "answer": "Yes, the dog is below the car.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000089340.jpg", "index": "sealvqa_spatial_8109", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the dog on the left side of the car?" }, { "from": "gpt", "value": "No, the dog is not on the left side of the car." }, { "from": "human", "value": "Is the dog below the car?" }, { "from": "gpt", "value": "Yes, the dog is below the car." } ], "question": "Is the dog below the car?", "question_id": "sealvqa_spatial_8109", "search": 1, "target_instances": [ { "bbox": [ 152.64, 220.31, 116.31, 48.72 ], "instance_id": "", "name": "dog" }, { "bbox": [ 0.27, 131.74, 28.84, 68.01 ], "instance_id": "", "name": "car" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the dog below the car?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The chair is to the left of the remote." }
{ "answer": "The chair is to the left of the remote.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000103509.jpg", "index": "sealvqa_spatial_5707", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the chair to the left or right of the remote?" }, { "from": "gpt", "value": "The chair is to the left of the remote." } ], "question": "Is the chair to the left or right of the remote?", "question_id": "sealvqa_spatial_5707", "search": 1, "target_instances": [ { "bbox": [ 0, 454.11, 49.42, 25.54 ], "instance_id": "", "name": "chair" }, { "bbox": [ 146.71, 364.73, 90.2, 97.38 ], "instance_id": "", "name": "remote" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the chair to the left or right of the remote?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The hair drier is above the suitcase." }
{ "answer": "The hair drier is above the suitcase.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000130492.jpg", "index": "sealvqa_spatial_14243", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the suitcase to the left of the hair drier?" }, { "from": "gpt", "value": "Yes, the suitcase is to the left of the hair drier." }, { "from": "human", "value": "Is the hair drier above or below the suitcase?" }, { "from": "gpt", "value": "The hair drier is above the suitcase." } ], "question": "Is the hair drier above or below the suitcase?", "question_id": "sealvqa_spatial_14243", "search": 1, "target_instances": [ { "bbox": [ 76.58, 419.21, 75.78, 60.63 ], "instance_id": "", "name": "suitcase" }, { "bbox": [ 544.8, 345.67, 53.64, 38.76 ], "instance_id": "", "name": "hair drier" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the hair drier above or below the suitcase?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The scissors is below the bottle." }
{ "answer": "The scissors is below the bottle.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000208386.jpg", "index": "sealvqa_spatial_17422", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the scissors to the left or right of the bottle?" }, { "from": "gpt", "value": "The scissors is to the left of the bottle." }, { "from": "human", "value": "Is the scissors above or below the bottle?" }, { "from": "gpt", "value": "The scissors is below the bottle." } ], "question": "Is the scissors above or below the bottle?", "question_id": "sealvqa_spatial_17422", "search": 1, "target_instances": [ { "bbox": [ 254.77, 166.08, 21.97, 47.17 ], "instance_id": "", "name": "bottle" }, { "bbox": [ 36.8, 374.4, 34.25, 30.14 ], "instance_id": "", "name": "scissors" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the scissors above or below the bottle?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the parking meter is on the left side of the bicycle." }
{ "answer": "Yes, the parking meter is on the left side of the bicycle.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000208067.jpg", "index": "sealvqa_spatial_15922", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the parking meter on the left side of the bicycle?" }, { "from": "gpt", "value": "Yes, the parking meter is on the left side of the bicycle." } ], "question": "Is the parking meter on the left side of the bicycle?", "question_id": "sealvqa_spatial_15922", "search": 1, "target_instances": [ { "bbox": [ 337.87, 243.9, 10.36, 26.12 ], "instance_id": "", "name": "bicycle" }, { "bbox": [ 5.49, 238.14, 5.91, 8.93 ], "instance_id": "", "name": "parking meter" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the parking meter on the left side of the bicycle?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The broccoli is below the fork." }
{ "answer": "The broccoli is below the fork.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000473762.jpg", "index": "sealvqa_spatial_14383", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the fork to the left of the broccoli?" }, { "from": "gpt", "value": "No, the fork is not to the left of the broccoli." }, { "from": "human", "value": "Is the broccoli above or below the fork?" }, { "from": "gpt", "value": "The broccoli is below the fork." } ], "question": "Is the broccoli above or below the fork?", "question_id": "sealvqa_spatial_14383", "search": 1, "target_instances": [ { "bbox": [ 502.39, 350.79, 55.18, 14.18 ], "instance_id": "", "name": "fork" }, { "bbox": [ 220.52, 370.66, 114.15, 34.42 ], "instance_id": "", "name": "broccoli" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the broccoli above or below the fork?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the cake is not to the left of the chair." }
{ "answer": "No, the cake is not to the left of the chair.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000092553.jpg", "index": "sealvqa_spatial_13243", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cake to the left of the chair?" }, { "from": "gpt", "value": "No, the cake is not to the left of the chair." } ], "question": "Is the cake to the left of the chair?", "question_id": "sealvqa_spatial_13243", "search": 1, "target_instances": [ { "bbox": [ 1.92, 351.2, 62.37, 68.12 ], "instance_id": "", "name": "chair" }, { "bbox": [ 308.38, 317.87, 107.26, 76.31 ], "instance_id": "", "name": "cake" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cake to the left of the chair?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the backpack is not on the right side of the suitcase." }
{ "answer": "No, the backpack is not on the right side of the suitcase.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000432268.jpg", "index": "sealvqa_spatial_25103", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the backpack on the right side of the suitcase?" }, { "from": "gpt", "value": "No, the backpack is not on the right side of the suitcase." } ], "question": "Is the backpack on the right side of the suitcase?", "question_id": "sealvqa_spatial_25103", "search": 1, "target_instances": [ { "bbox": [ 85.45, 87.54, 25.27, 31.08 ], "instance_id": "", "name": "backpack" }, { "bbox": [ 138.85, 104.54, 17.25, 20.83 ], "instance_id": "", "name": "suitcase" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the backpack on the right side of the suitcase?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The person is on the left side of the truck." }
{ "answer": "The person is on the left side of the truck.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000001451.jpg", "index": "sealvqa_spatial_12026", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the person on the left or right side of the truck?" }, { "from": "gpt", "value": "The person is on the left side of the truck." } ], "question": "Is the person on the left or right side of the truck?", "question_id": "sealvqa_spatial_12026", "search": 1, "target_instances": [ { "bbox": [ 279.84, 150.41, 28.19, 59.15 ], "instance_id": "", "name": "person" }, { "bbox": [ 384.72, 120.22, 125.99, 60.6 ], "instance_id": "", "name": "truck" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the person on the left or right side of the truck?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the sink is on the right side of the cup." }
{ "answer": "Yes, the sink is on the right side of the cup.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000475546.jpg", "index": "sealvqa_spatial_23957", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the sink on the right side of the cup?" }, { "from": "gpt", "value": "Yes, the sink is on the right side of the cup." } ], "question": "Is the sink on the right side of the cup?", "question_id": "sealvqa_spatial_23957", "search": 1, "target_instances": [ { "bbox": [ 195.73, 267.76, 13.14, 25.15 ], "instance_id": "", "name": "cup" }, { "bbox": [ 270.32, 260.22, 114.92, 67.4 ], "instance_id": "", "name": "sink" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the sink on the right side of the cup?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the cell phone is not to the left of the bench." }
{ "answer": "No, the cell phone is not to the left of the bench.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000152886.jpg", "index": "sealvqa_spatial_239", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cell phone to the left of the bench?" }, { "from": "gpt", "value": "No, the cell phone is not to the left of the bench." } ], "question": "Is the cell phone to the left of the bench?", "question_id": "sealvqa_spatial_239", "search": 1, "target_instances": [ { "bbox": [ 256.66, 88.26, 5.99, 11.44 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 119.37, 99.02, 14.02, 7.01 ], "instance_id": "", "name": "bench" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cell phone to the left of the bench?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the bottle is not to the right of the cup." }
{ "answer": "No, the bottle is not to the right of the cup.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000462712.jpg", "index": "sealvqa_spatial_12398", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bottle to the right of the cup?" }, { "from": "gpt", "value": "No, the bottle is not to the right of the cup." } ], "question": "Is the bottle to the right of the cup?", "question_id": "sealvqa_spatial_12398", "search": 1, "target_instances": [ { "bbox": [ 72.71, 206.01, 7.45, 20.37 ], "instance_id": "", "name": "bottle" }, { "bbox": [ 176.9, 213.56, 9.09, 14.55 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bottle to the right of the cup?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the skateboard is not above the bench." }
{ "answer": "No, the skateboard is not above the bench.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000400913.jpg", "index": "sealvqa_spatial_11574", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bench to the left of the skateboard?" }, { "from": "gpt", "value": "Yes, the bench is to the left of the skateboard." }, { "from": "human", "value": "Is the skateboard above the bench?" }, { "from": "gpt", "value": "No, the skateboard is not above the bench." } ], "question": "Is the skateboard above the bench?", "question_id": "sealvqa_spatial_11574", "search": 1, "target_instances": [ { "bbox": [ 0.05, 242.38, 133.88, 35.27 ], "instance_id": "", "name": "bench" }, { "bbox": [ 429.53, 304.96, 49.32, 10.81 ], "instance_id": "", "name": "skateboard" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the skateboard above the bench?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the traffic light is on the right side of the bus." }
{ "answer": "Yes, the traffic light is on the right side of the bus.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000556345.jpg", "index": "sealvqa_spatial_884", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the traffic light on the right side of the bus?" }, { "from": "gpt", "value": "Yes, the traffic light is on the right side of the bus." } ], "question": "Is the traffic light on the right side of the bus?", "question_id": "sealvqa_spatial_884", "search": 1, "target_instances": [ { "bbox": [ 326.3, 246.26, 100.7, 90.05 ], "instance_id": "", "name": "bus" }, { "bbox": [ 429.2, 279.13, 6.19, 12.21 ], "instance_id": "", "name": "traffic light" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the traffic light on the right side of the bus?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the bowl is not on the left side of the cup." }
{ "answer": "No, the bowl is not on the left side of the cup.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000467314.jpg", "index": "sealvqa_spatial_1376", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bowl on the left side of the cup?" }, { "from": "gpt", "value": "No, the bowl is not on the left side of the cup." } ], "question": "Is the bowl on the left side of the cup?", "question_id": "sealvqa_spatial_1376", "search": 1, "target_instances": [ { "bbox": [ 524.49, 205.14, 59.73, 15.57 ], "instance_id": "", "name": "bowl" }, { "bbox": [ 192.73, 192.13, 14.65, 17.94 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bowl on the left side of the cup?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the dog is not below the person." }
{ "answer": "No, the dog is not below the person.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000038447.jpg", "index": "sealvqa_spatial_8157", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the person on the left side of the dog?" }, { "from": "gpt", "value": "No, the person is not on the left side of the dog." }, { "from": "human", "value": "Is the dog below the person?" }, { "from": "gpt", "value": "No, the dog is not below the person." } ], "question": "Is the dog below the person?", "question_id": "sealvqa_spatial_8157", "search": 1, "target_instances": [ { "bbox": [ 142.14, 114.24, 74.34, 46.46 ], "instance_id": "", "name": "dog" }, { "bbox": [ 334.28, 325.54, 29.98, 119.93 ], "instance_id": "", "name": "person" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the dog below the person?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the teddy bear is above the tie." }
{ "answer": "Yes, the teddy bear is above the tie.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000221223.jpg", "index": "sealvqa_spatial_9687", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the teddy bear on the right side of the tie?" }, { "from": "gpt", "value": "Yes, the teddy bear is on the right side of the tie." }, { "from": "human", "value": "Is the teddy bear above the tie?" }, { "from": "gpt", "value": "Yes, the teddy bear is above the tie." } ], "question": "Is the teddy bear above the tie?", "question_id": "sealvqa_spatial_9687", "search": 1, "target_instances": [ { "bbox": [ 274.31, 181.61, 22.2, 58.1 ], "instance_id": "", "name": "tie" }, { "bbox": [ 488.95, 126.08, 49.94, 39.54 ], "instance_id": "", "name": "teddy bear" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the teddy bear above the tie?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the mouse is not to the left of the keyboard." }
{ "answer": "No, the mouse is not to the left of the keyboard.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000312435.jpg", "index": "sealvqa_spatial_14360", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the mouse to the left of the keyboard?" }, { "from": "gpt", "value": "No, the mouse is not to the left of the keyboard." } ], "question": "Is the mouse to the left of the keyboard?", "question_id": "sealvqa_spatial_14360", "search": 1, "target_instances": [ { "bbox": [ 108.39, 115.06, 85.39, 20.46 ], "instance_id": "", "name": "keyboard" }, { "bbox": [ 212.84, 119.55, 16.13, 11.25 ], "instance_id": "", "name": "mouse" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the mouse to the left of the keyboard?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the car is not to the right of the fire hydrant." }
{ "answer": "No, the car is not to the right of the fire hydrant.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000272925.jpg", "index": "sealvqa_spatial_2104", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the car to the right of the fire hydrant?" }, { "from": "gpt", "value": "No, the car is not to the right of the fire hydrant." } ], "question": "Is the car to the right of the fire hydrant?", "question_id": "sealvqa_spatial_2104", "search": 1, "target_instances": [ { "bbox": [ 149.84, 1.11, 55.75, 175.02 ], "instance_id": "", "name": "fire hydrant" }, { "bbox": [ 0.37, 0.38, 48.39, 72.21 ], "instance_id": "", "name": "car" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the car to the right of the fire hydrant?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The sports ball is above the tie." }
{ "answer": "The sports ball is above the tie.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000050975.jpg", "index": "sealvqa_spatial_4060", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the sports ball on the left side of the tie?" }, { "from": "gpt", "value": "Yes, the sports ball is on the left side of the tie." }, { "from": "human", "value": "Is the sports ball above or below the tie?" }, { "from": "gpt", "value": "The sports ball is above the tie." } ], "question": "Is the sports ball above or below the tie?", "question_id": "sealvqa_spatial_4060", "search": 1, "target_instances": [ { "bbox": [ 370.3, 115.28, 2.92, 8.48 ], "instance_id": "", "name": "tie" }, { "bbox": [ 180.26, 85.41, 15.8, 15.42 ], "instance_id": "", "name": "sports ball" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the sports ball above or below the tie?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The cup is below the cell phone." }
{ "answer": "The cup is below the cell phone.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000535770.jpg", "index": "sealvqa_spatial_5885", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cell phone to the right of the cup?" }, { "from": "gpt", "value": "No, the cell phone is not to the right of the cup." }, { "from": "human", "value": "Is the cup above or below the cell phone?" }, { "from": "gpt", "value": "The cup is below the cell phone." } ], "question": "Is the cup above or below the cell phone?", "question_id": "sealvqa_spatial_5885", "search": 1, "target_instances": [ { "bbox": [ 234.63, 242.25, 19.65, 6.23 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 353.71, 281.04, 52.52, 63.81 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup above or below the cell phone?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the handbag is not to the right of the refrigerator." }
{ "answer": "No, the handbag is not to the right of the refrigerator.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000202782.jpg", "index": "sealvqa_spatial_22474", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the handbag to the right of the refrigerator?" }, { "from": "gpt", "value": "No, the handbag is not to the right of the refrigerator." } ], "question": "Is the handbag to the right of the refrigerator?", "question_id": "sealvqa_spatial_22474", "search": 1, "target_instances": [ { "bbox": [ 347.44, 99.28, 25.23, 122.33 ], "instance_id": "", "name": "refrigerator" }, { "bbox": [ 187.04, 213.83, 20.87, 26.89 ], "instance_id": "", "name": "handbag" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the handbag to the right of the refrigerator?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the cup is not to the right of the bottle." }
{ "answer": "No, the cup is not to the right of the bottle.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000379357.jpg", "index": "sealvqa_spatial_9438", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cup to the right of the bottle?" }, { "from": "gpt", "value": "No, the cup is not to the right of the bottle." } ], "question": "Is the cup to the right of the bottle?", "question_id": "sealvqa_spatial_9438", "search": 1, "target_instances": [ { "bbox": [ 376.54, 165.91, 22.33, 83.42 ], "instance_id": "", "name": "bottle" }, { "bbox": [ 83.02, 185.64, 31.3, 61.42 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup to the right of the bottle?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the cell phone is not above the vase." }
{ "answer": "No, the cell phone is not above the vase.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000556516.jpg", "index": "sealvqa_spatial_4827", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cell phone on the left or right side of the vase?" }, { "from": "gpt", "value": "The cell phone is on the left side of the vase." }, { "from": "human", "value": "Is the cell phone above the vase?" }, { "from": "gpt", "value": "No, the cell phone is not above the vase." } ], "question": "Is the cell phone above the vase?", "question_id": "sealvqa_spatial_4827", "search": 1, "target_instances": [ { "bbox": [ 487.34, 393.57, 25.76, 10.43 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 617.71, 343.19, 22.29, 41.66 ], "instance_id": "", "name": "vase" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cell phone above the vase?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the fork is on the left side of the cake." }
{ "answer": "Yes, the fork is on the left side of the cake.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000265196.jpg", "index": "sealvqa_spatial_8293", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the fork on the left side of the cake?" }, { "from": "gpt", "value": "Yes, the fork is on the left side of the cake." } ], "question": "Is the fork on the left side of the cake?", "question_id": "sealvqa_spatial_8293", "search": 1, "target_instances": [ { "bbox": [ 114.96, 268.3, 99.01, 70.28 ], "instance_id": "", "name": "fork" }, { "bbox": [ 291.11, 280.9, 50.88, 17.29 ], "instance_id": "", "name": "cake" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the fork on the left side of the cake?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The cup is to the right of the spoon." }
{ "answer": "The cup is to the right of the spoon.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000213453.jpg", "index": "sealvqa_spatial_4443", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cup to the left or right of the spoon?" }, { "from": "gpt", "value": "The cup is to the right of the spoon." } ], "question": "Is the cup to the left or right of the spoon?", "question_id": "sealvqa_spatial_4443", "search": 1, "target_instances": [ { "bbox": [ 468.15, 0.14, 99.66, 52.64 ], "instance_id": "", "name": "cup" }, { "bbox": [ 406.17, 0.4, 57.14, 37.25 ], "instance_id": "", "name": "spoon" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup to the left or right of the spoon?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the sports ball is not above the baseball bat." }
{ "answer": "No, the sports ball is not above the baseball bat.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000247378.jpg", "index": "sealvqa_spatial_3790", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the baseball bat on the left side of the sports ball?" }, { "from": "gpt", "value": "Yes, the baseball bat is on the left side of the sports ball." }, { "from": "human", "value": "Is the sports ball above the baseball bat?" }, { "from": "gpt", "value": "No, the sports ball is not above the baseball bat." } ], "question": "Is the sports ball above the baseball bat?", "question_id": "sealvqa_spatial_3790", "search": 1, "target_instances": [ { "bbox": [ 395.92, 190.51, 19.51, 35.38 ], "instance_id": "", "name": "baseball bat" }, { "bbox": [ 516.97, 307.06, 12.51, 10.02 ], "instance_id": "", "name": "sports ball" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the sports ball above the baseball bat?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the bottle is not to the right of the chair." }
{ "answer": "No, the bottle is not to the right of the chair.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000265625.jpg", "index": "sealvqa_spatial_12901", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bottle to the right of the chair?" }, { "from": "gpt", "value": "No, the bottle is not to the right of the chair." } ], "question": "Is the bottle to the right of the chair?", "question_id": "sealvqa_spatial_12901", "search": 1, "target_instances": [ { "bbox": [ 383.79, 286.03, 34.14, 127.25 ], "instance_id": "", "name": "bottle" }, { "bbox": [ 522.51, 404.61, 48.85, 74.02 ], "instance_id": "", "name": "chair" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bottle to the right of the chair?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the bottle is not above the bench." }
{ "answer": "No, the bottle is not above the bench.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000571051.jpg", "index": "sealvqa_spatial_24722", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bottle to the left or right of the bench?" }, { "from": "gpt", "value": "The bottle is to the right of the bench." }, { "from": "human", "value": "Is the bottle above the bench?" }, { "from": "gpt", "value": "No, the bottle is not above the bench." } ], "question": "Is the bottle above the bench?", "question_id": "sealvqa_spatial_24722", "search": 1, "target_instances": [ { "bbox": [ 353.12, 75.26, 86.4, 10.8 ], "instance_id": "", "name": "bench" }, { "bbox": [ 466.64, 185.19, 25.82, 50.29 ], "instance_id": "", "name": "bottle" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bottle above the bench?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the bench is below the person." }
{ "answer": "Yes, the bench is below the person.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000082174.jpg", "index": "sealvqa_spatial_2224", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the person on the left or right side of the bench?" }, { "from": "gpt", "value": "The person is on the left side of the bench." }, { "from": "human", "value": "Is the bench below the person?" }, { "from": "gpt", "value": "Yes, the bench is below the person." } ], "question": "Is the bench below the person?", "question_id": "sealvqa_spatial_2224", "search": 1, "target_instances": [ { "bbox": [ 122.35, 37.07, 17.03, 14.62 ], "instance_id": "", "name": "person" }, { "bbox": [ 606.3, 129.93, 33.7, 89.88 ], "instance_id": "", "name": "bench" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bench below the person?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the cup is on the right side of the remote." }
{ "answer": "Yes, the cup is on the right side of the remote.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000034708.jpg", "index": "sealvqa_spatial_5211", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cup on the right side of the remote?" }, { "from": "gpt", "value": "Yes, the cup is on the right side of the remote." } ], "question": "Is the cup on the right side of the remote?", "question_id": "sealvqa_spatial_5211", "search": 1, "target_instances": [ { "bbox": [ 384.57, 217.61, 44.77, 63.65 ], "instance_id": "", "name": "cup" }, { "bbox": [ 57.67, 235.65, 51.12, 39.19 ], "instance_id": "", "name": "remote" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup on the right side of the remote?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the horse is not below the car." }
{ "answer": "No, the horse is not below the car.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000480950.jpg", "index": "sealvqa_spatial_11996", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the horse to the left or right of the car?" }, { "from": "gpt", "value": "The horse is to the left of the car." }, { "from": "human", "value": "Is the horse below the car?" }, { "from": "gpt", "value": "No, the horse is not below the car." } ], "question": "Is the horse below the car?", "question_id": "sealvqa_spatial_11996", "search": 1, "target_instances": [ { "bbox": [ 464.09, 73.47, 36.35, 8.93 ], "instance_id": "", "name": "car" }, { "bbox": [ 225.5, 38.8, 15.86, 10.65 ], "instance_id": "", "name": "horse" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the horse below the car?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the tie is not on the right side of the potted plant." }
{ "answer": "No, the tie is not on the right side of the potted plant.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000182236.jpg", "index": "sealvqa_spatial_7137", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the tie on the right side of the potted plant?" }, { "from": "gpt", "value": "No, the tie is not on the right side of the potted plant." } ], "question": "Is the tie on the right side of the potted plant?", "question_id": "sealvqa_spatial_7137", "search": 1, "target_instances": [ { "bbox": [ 321.59, 208.71, 23.85, 58.85 ], "instance_id": "", "name": "tie" }, { "bbox": [ 409.41, 201.18, 33.41, 55.05 ], "instance_id": "", "name": "potted plant" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the tie on the right side of the potted plant?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the book is to the right of the cell phone." }
{ "answer": "Yes, the book is to the right of the cell phone.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000017273.jpg", "index": "sealvqa_spatial_5548", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the book to the right of the cell phone?" }, { "from": "gpt", "value": "Yes, the book is to the right of the cell phone." } ], "question": "Is the book to the right of the cell phone?", "question_id": "sealvqa_spatial_5548", "search": 1, "target_instances": [ { "bbox": [ 499.71, 338.86, 59.47, 48.07 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 587.23, 294.17, 52.77, 128.14 ], "instance_id": "", "name": "book" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the book to the right of the cell phone?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the keyboard is to the left of the chair." }
{ "answer": "Yes, the keyboard is to the left of the chair.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000254007.jpg", "index": "sealvqa_spatial_10690", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the keyboard to the left of the chair?" }, { "from": "gpt", "value": "Yes, the keyboard is to the left of the chair." } ], "question": "Is the keyboard to the left of the chair?", "question_id": "sealvqa_spatial_10690", "search": 1, "target_instances": [ { "bbox": [ 451.06, 91.29, 89.16, 17.22 ], "instance_id": "", "name": "keyboard" }, { "bbox": [ 601.82, 82.42, 38.18, 219.62 ], "instance_id": "", "name": "chair" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the keyboard to the left of the chair?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the fire hydrant is below the stop sign." }
{ "answer": "Yes, the fire hydrant is below the stop sign.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000142129.jpg", "index": "sealvqa_spatial_938", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the stop sign to the left of the fire hydrant?" }, { "from": "gpt", "value": "No, the stop sign is not to the left of the fire hydrant." }, { "from": "human", "value": "Is the fire hydrant below the stop sign?" }, { "from": "gpt", "value": "Yes, the fire hydrant is below the stop sign." } ], "question": "Is the fire hydrant below the stop sign?", "question_id": "sealvqa_spatial_938", "search": 1, "target_instances": [ { "bbox": [ 365.75, 0, 32.61, 27.55 ], "instance_id": "", "name": "stop sign" }, { "bbox": [ 114.96, 71.52, 12.61, 30.87 ], "instance_id": "", "name": "fire hydrant" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the fire hydrant below the stop sign?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the cup is not above the knife." }
{ "answer": "No, the cup is not above the knife.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000351380.jpg", "index": "sealvqa_spatial_24744", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cup on the left side of the knife?" }, { "from": "gpt", "value": "No, the cup is not on the left side of the knife." }, { "from": "human", "value": "Is the cup above the knife?" }, { "from": "gpt", "value": "No, the cup is not above the knife." } ], "question": "Is the cup above the knife?", "question_id": "sealvqa_spatial_24744", "search": 1, "target_instances": [ { "bbox": [ 499.7, 442.92, 140.3, 37.08 ], "instance_id": "", "name": "cup" }, { "bbox": [ 0, 163.32, 73.56, 130.91 ], "instance_id": "", "name": "knife" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup above the knife?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the oven is on the right side of the bowl." }
{ "answer": "Yes, the oven is on the right side of the bowl.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000453680.jpg", "index": "sealvqa_spatial_13917", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the oven on the right side of the bowl?" }, { "from": "gpt", "value": "Yes, the oven is on the right side of the bowl." } ], "question": "Is the oven on the right side of the bowl?", "question_id": "sealvqa_spatial_13917", "search": 1, "target_instances": [ { "bbox": [ 332.29, 156.55, 16.59, 9.26 ], "instance_id": "", "name": "bowl" }, { "bbox": [ 362.52, 141.54, 75.7, 93.11 ], "instance_id": "", "name": "oven" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the oven on the right side of the bowl?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The bird is on the left side of the vase." }
{ "answer": "The bird is on the left side of the vase.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000532239.jpg", "index": "sealvqa_spatial_13763", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bird on the left or right side of the vase?" }, { "from": "gpt", "value": "The bird is on the left side of the vase." } ], "question": "Is the bird on the left or right side of the vase?", "question_id": "sealvqa_spatial_13763", "search": 1, "target_instances": [ { "bbox": [ 496.4, 199.21, 20.15, 23.71 ], "instance_id": "", "name": "vase" }, { "bbox": [ 240.86, 212.12, 24.54, 10.91 ], "instance_id": "", "name": "bird" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bird on the left or right side of the vase?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the horse is to the right of the car." }
{ "answer": "Yes, the horse is to the right of the car.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000043279.jpg", "index": "sealvqa_spatial_16220", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the horse to the right of the car?" }, { "from": "gpt", "value": "Yes, the horse is to the right of the car." } ], "question": "Is the horse to the right of the car?", "question_id": "sealvqa_spatial_16220", "search": 1, "target_instances": [ { "bbox": [ 100.53, 252.34, 57.34, 46.55 ], "instance_id": "", "name": "car" }, { "bbox": [ 168.27, 252.99, 39.27, 62.05 ], "instance_id": "", "name": "horse" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the horse to the right of the car?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The bicycle is to the left of the bench." }
{ "answer": "The bicycle is to the left of the bench.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000271038.jpg", "index": "sealvqa_spatial_15679", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bicycle to the left or right of the bench?" }, { "from": "gpt", "value": "The bicycle is to the left of the bench." } ], "question": "Is the bicycle to the left or right of the bench?", "question_id": "sealvqa_spatial_15679", "search": 1, "target_instances": [ { "bbox": [ 69.66, 532.24, 21.5, 45.54 ], "instance_id": "", "name": "bicycle" }, { "bbox": [ 174.98, 532.98, 49.26, 5.52 ], "instance_id": "", "name": "bench" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bicycle to the left or right of the bench?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The potted plant is to the right of the remote." }
{ "answer": "The potted plant is to the right of the remote.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000055344.jpg", "index": "sealvqa_spatial_26626", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the potted plant to the left or right of the remote?" }, { "from": "gpt", "value": "The potted plant is to the right of the remote." } ], "question": "Is the potted plant to the left or right of the remote?", "question_id": "sealvqa_spatial_26626", "search": 1, "target_instances": [ { "bbox": [ 218.95, 237.24, 15.4, 11.77 ], "instance_id": "", "name": "remote" }, { "bbox": [ 408.54, 211.35, 35.68, 56.39 ], "instance_id": "", "name": "potted plant" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the potted plant to the left or right of the remote?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the cell phone is not to the right of the mouse." }
{ "answer": "No, the cell phone is not to the right of the mouse.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000064659.jpg", "index": "sealvqa_spatial_13564", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cell phone to the right of the mouse?" }, { "from": "gpt", "value": "No, the cell phone is not to the right of the mouse." } ], "question": "Is the cell phone to the right of the mouse?", "question_id": "sealvqa_spatial_13564", "search": 1, "target_instances": [ { "bbox": [ 67.57, 345.2, 29.01, 15.94 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 444.76, 324.45, 32.36, 22.08 ], "instance_id": "", "name": "mouse" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cell phone to the right of the mouse?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The car is to the left of the skateboard." }
{ "answer": "The car is to the left of the skateboard.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000142360.jpg", "index": "sealvqa_spatial_3610", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the car to the left or right of the skateboard?" }, { "from": "gpt", "value": "The car is to the left of the skateboard." } ], "question": "Is the car to the left or right of the skateboard?", "question_id": "sealvqa_spatial_3610", "search": 1, "target_instances": [ { "bbox": [ 139.64, 323.2, 111.49, 69.82 ], "instance_id": "", "name": "skateboard" }, { "bbox": [ 33.49, 292.17, 7.43, 45.61 ], "instance_id": "", "name": "car" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the car to the left or right of the skateboard?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the bowl is above the cell phone." }
{ "answer": "Yes, the bowl is above the cell phone.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000227851.jpg", "index": "sealvqa_spatial_7322", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bowl on the left side of the cell phone?" }, { "from": "gpt", "value": "No, the bowl is not on the left side of the cell phone." }, { "from": "human", "value": "Is the bowl above the cell phone?" }, { "from": "gpt", "value": "Yes, the bowl is above the cell phone." } ], "question": "Is the bowl above the cell phone?", "question_id": "sealvqa_spatial_7322", "search": 1, "target_instances": [ { "bbox": [ 227.79, 171.08, 38.56, 41.54 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 488.01, 47.37, 65.52, 24.71 ], "instance_id": "", "name": "bowl" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bowl above the cell phone?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the book is below the bottle." }
{ "answer": "Yes, the book is below the bottle.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000368980.jpg", "index": "sealvqa_spatial_6176", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bottle on the left side of the book?" }, { "from": "gpt", "value": "Yes, the bottle is on the left side of the book." }, { "from": "human", "value": "Is the book below the bottle?" }, { "from": "gpt", "value": "Yes, the book is below the bottle." } ], "question": "Is the book below the bottle?", "question_id": "sealvqa_spatial_6176", "search": 1, "target_instances": [ { "bbox": [ 56.87, 217.66, 27.07, 38.31 ], "instance_id": "", "name": "bottle" }, { "bbox": [ 86.99, 292.58, 54.96, 29.26 ], "instance_id": "", "name": "book" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the book below the bottle?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the stop sign is above the backpack." }
{ "answer": "Yes, the stop sign is above the backpack.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000405800.jpg", "index": "sealvqa_spatial_16909", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the backpack on the right side of the stop sign?" }, { "from": "gpt", "value": "Yes, the backpack is on the right side of the stop sign." }, { "from": "human", "value": "Is the stop sign above the backpack?" }, { "from": "gpt", "value": "Yes, the stop sign is above the backpack." } ], "question": "Is the stop sign above the backpack?", "question_id": "sealvqa_spatial_16909", "search": 1, "target_instances": [ { "bbox": [ 524.16, 262.44, 9.2, 11.44 ], "instance_id": "", "name": "backpack" }, { "bbox": [ 511.75, 212.38, 7.46, 17.89 ], "instance_id": "", "name": "stop sign" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the stop sign above the backpack?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the backpack is on the left side of the bicycle." }
{ "answer": "Yes, the backpack is on the left side of the bicycle.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000274413.jpg", "index": "sealvqa_spatial_17381", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the backpack on the left side of the bicycle?" }, { "from": "gpt", "value": "Yes, the backpack is on the left side of the bicycle." } ], "question": "Is the backpack on the left side of the bicycle?", "question_id": "sealvqa_spatial_17381", "search": 1, "target_instances": [ { "bbox": [ 584.87, 222.88, 27.51, 14.92 ], "instance_id": "", "name": "bicycle" }, { "bbox": [ 563.57, 212.28, 9.63, 15.59 ], "instance_id": "", "name": "backpack" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the backpack on the left side of the bicycle?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the sink is to the left of the bowl." }
{ "answer": "Yes, the sink is to the left of the bowl.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000180141.jpg", "index": "sealvqa_spatial_13211", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the sink to the left of the bowl?" }, { "from": "gpt", "value": "Yes, the sink is to the left of the bowl." } ], "question": "Is the sink to the left of the bowl?", "question_id": "sealvqa_spatial_13211", "search": 1, "target_instances": [ { "bbox": [ 556.1, 82.77, 49.7, 28.44 ], "instance_id": "", "name": "bowl" }, { "bbox": [ 314.07, 106.3, 45.27, 6.93 ], "instance_id": "", "name": "sink" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the sink to the left of the bowl?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the sports ball is above the tennis racket." }
{ "answer": "Yes, the sports ball is above the tennis racket.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000018041.jpg", "index": "sealvqa_spatial_12184", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the tennis racket to the left or right of the sports ball?" }, { "from": "gpt", "value": "The tennis racket is to the right of the sports ball." }, { "from": "human", "value": "Is the sports ball above the tennis racket?" }, { "from": "gpt", "value": "Yes, the sports ball is above the tennis racket." } ], "question": "Is the sports ball above the tennis racket?", "question_id": "sealvqa_spatial_12184", "search": 1, "target_instances": [ { "bbox": [ 495.24, 235.9, 17.71, 9.55 ], "instance_id": "", "name": "tennis racket" }, { "bbox": [ 468.86, 161.74, 3.26, 3.32 ], "instance_id": "", "name": "sports ball" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the sports ball above the tennis racket?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The chair is on the right side of the teddy bear." }
{ "answer": "The chair is on the right side of the teddy bear.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000073418.jpg", "index": "sealvqa_spatial_20062", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the chair on the left or right side of the teddy bear?" }, { "from": "gpt", "value": "The chair is on the right side of the teddy bear." } ], "question": "Is the chair on the left or right side of the teddy bear?", "question_id": "sealvqa_spatial_20062", "search": 1, "target_instances": [ { "bbox": [ 604.45, 275.39, 33.59, 204.54 ], "instance_id": "", "name": "chair" }, { "bbox": [ 15.39, 311.68, 60.41, 62.26 ], "instance_id": "", "name": "teddy bear" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the chair on the left or right side of the teddy bear?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The sports ball is below the clock." }
{ "answer": "The sports ball is below the clock.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000570848.jpg", "index": "sealvqa_spatial_6352", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the sports ball to the right of the clock?" }, { "from": "gpt", "value": "No, the sports ball is not to the right of the clock." }, { "from": "human", "value": "Is the sports ball above or below the clock?" }, { "from": "gpt", "value": "The sports ball is below the clock." } ], "question": "Is the sports ball above or below the clock?", "question_id": "sealvqa_spatial_6352", "search": 1, "target_instances": [ { "bbox": [ 484.17, 434.8, 6.69, 9.86 ], "instance_id": "", "name": "sports ball" }, { "bbox": [ 497.66, 398.61, 5.59, 7.44 ], "instance_id": "", "name": "clock" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the sports ball above or below the clock?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the spoon is not above the cup." }
{ "answer": "No, the spoon is not above the cup.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000275339.jpg", "index": "sealvqa_spatial_25271", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the spoon to the left of the cup?" }, { "from": "gpt", "value": "Yes, the spoon is to the left of the cup." }, { "from": "human", "value": "Is the spoon above the cup?" }, { "from": "gpt", "value": "No, the spoon is not above the cup." } ], "question": "Is the spoon above the cup?", "question_id": "sealvqa_spatial_25271", "search": 1, "target_instances": [ { "bbox": [ 297.3, 197.73, 21.12, 28.25 ], "instance_id": "", "name": "spoon" }, { "bbox": [ 339.47, 136.56, 9.97, 14.84 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the spoon above the cup?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The handbag is below the umbrella." }
{ "answer": "The handbag is below the umbrella.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000450666.jpg", "index": "sealvqa_spatial_16196", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the handbag to the left or right of the umbrella?" }, { "from": "gpt", "value": "The handbag is to the left of the umbrella." }, { "from": "human", "value": "Is the handbag above or below the umbrella?" }, { "from": "gpt", "value": "The handbag is below the umbrella." } ], "question": "Is the handbag above or below the umbrella?", "question_id": "sealvqa_spatial_16196", "search": 1, "target_instances": [ { "bbox": [ 228.34, 257.03, 3.86, 5.12 ], "instance_id": "", "name": "handbag" }, { "bbox": [ 303.51, 233.53, 19.39, 7.93 ], "instance_id": "", "name": "umbrella" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the handbag above or below the umbrella?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The baseball bat is above the bench." }
{ "answer": "The baseball bat is above the bench.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000383390.jpg", "index": "sealvqa_spatial_25567", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bench to the left of the baseball bat?" }, { "from": "gpt", "value": "Yes, the bench is to the left of the baseball bat." }, { "from": "human", "value": "Is the baseball bat above or below the bench?" }, { "from": "gpt", "value": "The baseball bat is above the bench." } ], "question": "Is the baseball bat above or below the bench?", "question_id": "sealvqa_spatial_25567", "search": 1, "target_instances": [ { "bbox": [ 382.31, 212.16, 29.8, 10.84 ], "instance_id": "", "name": "baseball bat" }, { "bbox": [ 67.61, 224.05, 15.92, 3.39 ], "instance_id": "", "name": "bench" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the baseball bat above or below the bench?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the mouse is on the left side of the laptop." }
{ "answer": "Yes, the mouse is on the left side of the laptop.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000415694.jpg", "index": "sealvqa_spatial_23024", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the mouse on the left side of the laptop?" }, { "from": "gpt", "value": "Yes, the mouse is on the left side of the laptop." } ], "question": "Is the mouse on the left side of the laptop?", "question_id": "sealvqa_spatial_23024", "search": 1, "target_instances": [ { "bbox": [ 275.67, 124.34, 17.69, 9.59 ], "instance_id": "", "name": "mouse" }, { "bbox": [ 334.34, 132.28, 57.22, 22.99 ], "instance_id": "", "name": "laptop" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the mouse on the left side of the laptop?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The knife is below the cup." }
{ "answer": "The knife is below the cup.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000087569.jpg", "index": "sealvqa_spatial_23092", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the knife to the right of the cup?" }, { "from": "gpt", "value": "Yes, the knife is to the right of the cup." }, { "from": "human", "value": "Is the knife above or below the cup?" }, { "from": "gpt", "value": "The knife is below the cup." } ], "question": "Is the knife above or below the cup?", "question_id": "sealvqa_spatial_23092", "search": 1, "target_instances": [ { "bbox": [ 3.54, 252.44, 23.57, 28.09 ], "instance_id": "", "name": "cup" }, { "bbox": [ 276.16, 337.34, 11.68, 55.62 ], "instance_id": "", "name": "knife" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the knife above or below the cup?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the chair is not on the left side of the orange." }
{ "answer": "No, the chair is not on the left side of the orange.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000197934.jpg", "index": "sealvqa_spatial_9686", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the chair on the left side of the orange?" }, { "from": "gpt", "value": "No, the chair is not on the left side of the orange." } ], "question": "Is the chair on the left side of the orange?", "question_id": "sealvqa_spatial_9686", "search": 1, "target_instances": [ { "bbox": [ 508.84, 290.06, 50.47, 184.38 ], "instance_id": "", "name": "chair" }, { "bbox": [ 295.46, 303.77, 24.06, 22.03 ], "instance_id": "", "name": "orange" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the chair on the left side of the orange?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the cell phone is not on the left side of the backpack." }
{ "answer": "No, the cell phone is not on the left side of the backpack.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000386204.jpg", "index": "sealvqa_spatial_3556", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cell phone on the left side of the backpack?" }, { "from": "gpt", "value": "No, the cell phone is not on the left side of the backpack." } ], "question": "Is the cell phone on the left side of the backpack?", "question_id": "sealvqa_spatial_3556", "search": 1, "target_instances": [ { "bbox": [ 196.27, 259.1, 21.54, 25.29 ], "instance_id": "", "name": "cell phone" }, { "bbox": [ 79.63, 195.21, 58.12, 154.2 ], "instance_id": "", "name": "backpack" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cell phone on the left side of the backpack?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the motorcycle is not to the left of the baseball bat." }
{ "answer": "No, the motorcycle is not to the left of the baseball bat.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000413128.jpg", "index": "sealvqa_spatial_16755", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the motorcycle to the left of the baseball bat?" }, { "from": "gpt", "value": "No, the motorcycle is not to the left of the baseball bat." } ], "question": "Is the motorcycle to the left of the baseball bat?", "question_id": "sealvqa_spatial_16755", "search": 1, "target_instances": [ { "bbox": [ 248.65, 229.13, 14.24, 21 ], "instance_id": "", "name": "motorcycle" }, { "bbox": [ 83.47, 220.13, 8.26, 11.1 ], "instance_id": "", "name": "baseball bat" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the motorcycle to the left of the baseball bat?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the broccoli is not above the cup." }
{ "answer": "No, the broccoli is not above the cup.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000399618.jpg", "index": "sealvqa_spatial_26302", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cup on the right side of the broccoli?" }, { "from": "gpt", "value": "No, the cup is not on the right side of the broccoli." }, { "from": "human", "value": "Is the broccoli above the cup?" }, { "from": "gpt", "value": "No, the broccoli is not above the cup." } ], "question": "Is the broccoli above the cup?", "question_id": "sealvqa_spatial_26302", "search": 1, "target_instances": [ { "bbox": [ 303.85, 77.14, 79.17, 83.25 ], "instance_id": "", "name": "broccoli" }, { "bbox": [ 206.1, 0.75, 83.45, 55.13 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the broccoli above the cup?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the cup is to the right of the toothbrush." }
{ "answer": "Yes, the cup is to the right of the toothbrush.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000184555.jpg", "index": "sealvqa_spatial_13043", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the cup to the right of the toothbrush?" }, { "from": "gpt", "value": "Yes, the cup is to the right of the toothbrush." } ], "question": "Is the cup to the right of the toothbrush?", "question_id": "sealvqa_spatial_13043", "search": 1, "target_instances": [ { "bbox": [ 85.81, 220.89, 43.59, 57.85 ], "instance_id": "", "name": "toothbrush" }, { "bbox": [ 247.81, 274.36, 55.75, 56.64 ], "instance_id": "", "name": "cup" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the cup to the right of the toothbrush?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the laptop is not on the right side of the person." }
{ "answer": "No, the laptop is not on the right side of the person.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000432604.jpg", "index": "sealvqa_spatial_8226", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the laptop on the right side of the person?" }, { "from": "gpt", "value": "No, the laptop is not on the right side of the person." } ], "question": "Is the laptop on the right side of the person?", "question_id": "sealvqa_spatial_8226", "search": 1, "target_instances": [ { "bbox": [ 197.15, 129.7, 49.48, 12.62 ], "instance_id": "", "name": "laptop" }, { "bbox": [ 418.37, 21.76, 67.8, 146.74 ], "instance_id": "", "name": "person" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the laptop on the right side of the person?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "No, the knife is not above the bowl." }
{ "answer": "No, the knife is not above the bowl.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000243482.jpg", "index": "sealvqa_spatial_7342", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bowl on the left side of the knife?" }, { "from": "gpt", "value": "No, the bowl is not on the left side of the knife." }, { "from": "human", "value": "Is the knife above the bowl?" }, { "from": "gpt", "value": "No, the knife is not above the bowl." } ], "question": "Is the knife above the bowl?", "question_id": "sealvqa_spatial_7342", "search": 1, "target_instances": [ { "bbox": [ 366.09, 294.96, 34.79, 14.37 ], "instance_id": "", "name": "bowl" }, { "bbox": [ 106.93, 360.01, 17.76, 25.83 ], "instance_id": "", "name": "knife" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the knife above the bowl?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "The knife is below the bowl." }
{ "answer": "The knife is below the bowl.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 4, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000416786.jpg", "index": "sealvqa_spatial_4275", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the knife on the left or right side of the bowl?" }, { "from": "gpt", "value": "The knife is on the right side of the bowl." }, { "from": "human", "value": "Is the knife above or below the bowl?" }, { "from": "gpt", "value": "The knife is below the bowl." } ], "question": "Is the knife above or below the bowl?", "question_id": "sealvqa_spatial_4275", "search": 1, "target_instances": [ { "bbox": [ 6.08, 238.79, 116.01, 54.88 ], "instance_id": "", "name": "bowl" }, { "bbox": [ 155.82, 349.64, 65.81, 25.89 ], "instance_id": "", "name": "knife" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the knife above or below the bowl?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
sealvqa_spatial
{ "ground_truth": "Yes, the bus is to the left of the potted plant." }
{ "answer": "Yes, the bus is to the left of the potted plant.", "bbox_format": "absolute_xywh", "bbox_format_description": "Bounding boxes use absolute pixel coordinates in [x, y, width, height] format", "conversation_turns": 2, "data_split": "train", "dataset_type": "spatial_relation", "has_targets": true, "image_path": "coco2017/train2017/000000374174.jpg", "index": "sealvqa_spatial_8901", "original_conversations": [ { "from": "human", "value": "<image>\nAdditional visual information to focus on: <object>; <object>.\nIs the bus to the left of the potted plant?" }, { "from": "gpt", "value": "Yes, the bus is to the left of the potted plant." } ], "question": "Is the bus to the left of the potted plant?", "question_id": "sealvqa_spatial_8901", "search": 1, "target_instances": [ { "bbox": [ 386.05, 119.36, 45.17, 33.7 ], "instance_id": "", "name": "potted plant" }, { "bbox": [ 37.94, 118.71, 122.15, 78.44 ], "instance_id": "", "name": "bus" } ] }
[ { "content": "", "role": "system" }, { "content": "<image>\nIs the bus to the left of the potted plant?\nThink first, call **image_zoom_in_tool** if needed, then answer. Format strictly as: <think>...</think> <tool_call>...</tool_call> (if tools needed) <answer>...</answer> ", "role": "user" } ]
sealvqa_spatial_spatial
visual_toolbox_v2
End of preview. Expand in Data Studio
README.md exists but content is empty.
Downloads last month
25