Upload 21 files
Browse filesAdded plots and training script and YAML
- .gitattributes +17 -0
- BoxF1_curve.png +3 -0
- BoxPR_curve.png +3 -0
- BoxP_curve.png +3 -0
- BoxR_curve.png +3 -0
- args.yaml +105 -0
- benthic_supercategory_detector.yaml +33 -0
- confusion_matrix.png +3 -0
- confusion_matrix_normalized.png +3 -0
- labels.jpg +3 -0
- results.csv +87 -0
- results.png +3 -0
- train.py +25 -0
- train_batch0.jpg +3 -0
- train_batch1.jpg +3 -0
- train_batch2.jpg +3 -0
- val_batch0_labels.jpg +3 -0
- val_batch0_pred.jpg +3 -0
- val_batch1_labels.jpg +3 -0
- val_batch1_pred.jpg +3 -0
- val_batch2_labels.jpg +3 -0
- val_batch2_pred.jpg +3 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,20 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
BoxF1_curve.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
BoxP_curve.png filter=lfs diff=lfs merge=lfs -text
|
| 38 |
+
BoxPR_curve.png filter=lfs diff=lfs merge=lfs -text
|
| 39 |
+
BoxR_curve.png filter=lfs diff=lfs merge=lfs -text
|
| 40 |
+
confusion_matrix_normalized.png filter=lfs diff=lfs merge=lfs -text
|
| 41 |
+
confusion_matrix.png filter=lfs diff=lfs merge=lfs -text
|
| 42 |
+
labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 43 |
+
results.png filter=lfs diff=lfs merge=lfs -text
|
| 44 |
+
train_batch0.jpg filter=lfs diff=lfs merge=lfs -text
|
| 45 |
+
train_batch1.jpg filter=lfs diff=lfs merge=lfs -text
|
| 46 |
+
train_batch2.jpg filter=lfs diff=lfs merge=lfs -text
|
| 47 |
+
val_batch0_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 48 |
+
val_batch0_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 49 |
+
val_batch1_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 50 |
+
val_batch1_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
| 51 |
+
val_batch2_labels.jpg filter=lfs diff=lfs merge=lfs -text
|
| 52 |
+
val_batch2_pred.jpg filter=lfs diff=lfs merge=lfs -text
|
BoxF1_curve.png
ADDED
|
Git LFS Details
|
BoxPR_curve.png
ADDED
|
Git LFS Details
|
BoxP_curve.png
ADDED
|
Git LFS Details
|
BoxR_curve.png
ADDED
|
Git LFS Details
|
args.yaml
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
task: detect
|
| 2 |
+
mode: train
|
| 3 |
+
model: yolo11x.yaml
|
| 4 |
+
data: /data/james/reduced_experiment/data6/benthic_supercategory_detector.yaml
|
| 5 |
+
epochs: 100
|
| 6 |
+
time: null
|
| 7 |
+
patience: 15
|
| 8 |
+
batch: 32
|
| 9 |
+
imgsz: 640
|
| 10 |
+
save: true
|
| 11 |
+
save_period: -1
|
| 12 |
+
cache: false
|
| 13 |
+
device: '0'
|
| 14 |
+
workers: 8
|
| 15 |
+
project: null
|
| 16 |
+
name: train
|
| 17 |
+
exist_ok: false
|
| 18 |
+
pretrained: yolo11x.pt
|
| 19 |
+
optimizer: auto
|
| 20 |
+
verbose: true
|
| 21 |
+
seed: 0
|
| 22 |
+
deterministic: true
|
| 23 |
+
single_cls: false
|
| 24 |
+
rect: false
|
| 25 |
+
cos_lr: false
|
| 26 |
+
close_mosaic: 10
|
| 27 |
+
resume: false
|
| 28 |
+
amp: true
|
| 29 |
+
fraction: 1.0
|
| 30 |
+
profile: false
|
| 31 |
+
freeze: null
|
| 32 |
+
multi_scale: false
|
| 33 |
+
overlap_mask: true
|
| 34 |
+
mask_ratio: 4
|
| 35 |
+
dropout: 0.0
|
| 36 |
+
val: true
|
| 37 |
+
split: val
|
| 38 |
+
save_json: false
|
| 39 |
+
conf: null
|
| 40 |
+
iou: 0.7
|
| 41 |
+
max_det: 300
|
| 42 |
+
half: false
|
| 43 |
+
dnn: false
|
| 44 |
+
plots: true
|
| 45 |
+
source: null
|
| 46 |
+
vid_stride: 1
|
| 47 |
+
stream_buffer: false
|
| 48 |
+
visualize: false
|
| 49 |
+
augment: false
|
| 50 |
+
agnostic_nms: false
|
| 51 |
+
classes: null
|
| 52 |
+
retina_masks: false
|
| 53 |
+
embed: null
|
| 54 |
+
show: false
|
| 55 |
+
save_frames: false
|
| 56 |
+
save_txt: false
|
| 57 |
+
save_conf: false
|
| 58 |
+
save_crop: false
|
| 59 |
+
show_labels: true
|
| 60 |
+
show_conf: true
|
| 61 |
+
show_boxes: true
|
| 62 |
+
line_width: null
|
| 63 |
+
format: torchscript
|
| 64 |
+
keras: false
|
| 65 |
+
optimize: false
|
| 66 |
+
int8: false
|
| 67 |
+
dynamic: false
|
| 68 |
+
simplify: true
|
| 69 |
+
opset: null
|
| 70 |
+
workspace: null
|
| 71 |
+
nms: false
|
| 72 |
+
lr0: 0.01
|
| 73 |
+
lrf: 0.01
|
| 74 |
+
momentum: 0.937
|
| 75 |
+
weight_decay: 0.0005
|
| 76 |
+
warmup_epochs: 3.0
|
| 77 |
+
warmup_momentum: 0.8
|
| 78 |
+
warmup_bias_lr: 0.1
|
| 79 |
+
box: 7.5
|
| 80 |
+
cls: 0.5
|
| 81 |
+
dfl: 1.5
|
| 82 |
+
pose: 12.0
|
| 83 |
+
kobj: 1.0
|
| 84 |
+
nbs: 64
|
| 85 |
+
hsv_h: 0.015
|
| 86 |
+
hsv_s: 0.7
|
| 87 |
+
hsv_v: 0.4
|
| 88 |
+
degrees: 0.0
|
| 89 |
+
translate: 0.1
|
| 90 |
+
scale: 0.5
|
| 91 |
+
shear: 0.0
|
| 92 |
+
perspective: 0.0
|
| 93 |
+
flipud: 0.0
|
| 94 |
+
fliplr: 0.5
|
| 95 |
+
bgr: 0.0
|
| 96 |
+
mosaic: 1.0
|
| 97 |
+
mixup: 0.0
|
| 98 |
+
cutmix: 0.0
|
| 99 |
+
copy_paste: 0.0
|
| 100 |
+
copy_paste_mode: flip
|
| 101 |
+
auto_augment: randaugment
|
| 102 |
+
erasing: 0.4
|
| 103 |
+
cfg: null
|
| 104 |
+
tracker: botsort.yaml
|
| 105 |
+
save_dir: runs/detect/train
|
benthic_supercategory_detector.yaml
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
train: /data/james/reduced_experiment/data6/images/train
|
| 2 |
+
val: /data/james/reduced_experiment/data6/images/val
|
| 3 |
+
test: /data/james/reduced_experiment/data6/images/test
|
| 4 |
+
names:
|
| 5 |
+
0: Sea Anemones
|
| 6 |
+
1: Bony fishes
|
| 7 |
+
2: Flatfish
|
| 8 |
+
3: Eels
|
| 9 |
+
4: Gastropods
|
| 10 |
+
5: Sharks
|
| 11 |
+
6: Rays and Skates
|
| 12 |
+
7: Chimaeras
|
| 13 |
+
8: Sea stars
|
| 14 |
+
9: Feather stars and sea lilies
|
| 15 |
+
10: Sea cucumbers
|
| 16 |
+
11: Urchins
|
| 17 |
+
12: Glass sponges
|
| 18 |
+
13: Sea fans
|
| 19 |
+
14: Soft corals
|
| 20 |
+
15: Sea pens
|
| 21 |
+
16: Stony corals
|
| 22 |
+
17: Black corals
|
| 23 |
+
18: Crabs
|
| 24 |
+
19: Shrimps
|
| 25 |
+
20: Squat lobsters
|
| 26 |
+
21: Barnacles
|
| 27 |
+
22: Sea spiders
|
| 28 |
+
23: Worms
|
| 29 |
+
24: Brittle Stars
|
| 30 |
+
25: Tube-Dwelling Anemones
|
| 31 |
+
26: Demosponges
|
| 32 |
+
27: Zoanthids
|
| 33 |
+
28: Clams
|
confusion_matrix.png
ADDED
|
Git LFS Details
|
confusion_matrix_normalized.png
ADDED
|
Git LFS Details
|
labels.jpg
ADDED
|
Git LFS Details
|
results.csv
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch,time,train/box_loss,train/cls_loss,train/dfl_loss,metrics/precision(B),metrics/recall(B),metrics/mAP50(B),metrics/mAP50-95(B),val/box_loss,val/cls_loss,val/dfl_loss,lr/pg0,lr/pg1,lr/pg2
|
| 2 |
+
1,510.554,1.30685,2.15905,1.16091,0.44353,0.41404,0.37588,0.24114,1.28053,1.59709,1.12829,0.00332991,0.00332991,0.00332991
|
| 3 |
+
2,1017.27,1.28684,1.49255,1.12629,0.53222,0.46377,0.45652,0.29443,1.2561,1.35019,1.10536,0.00659728,0.00659728,0.00659728
|
| 4 |
+
3,1519.22,1.36116,1.58479,1.16414,0.3943,0.27602,0.2387,0.14081,1.4931,2.03336,1.26294,0.00979865,0.00979865,0.00979865
|
| 5 |
+
4,2022.53,1.40732,1.65977,1.19716,0.51103,0.40739,0.40502,0.25371,1.32934,1.45307,1.16041,0.009703,0.009703,0.009703
|
| 6 |
+
5,2524.71,1.33949,1.49103,1.16751,0.48828,0.45338,0.43642,0.27284,1.28413,1.37177,1.13542,0.009604,0.009604,0.009604
|
| 7 |
+
6,3027.32,1.29554,1.38126,1.1538,0.53645,0.49798,0.50462,0.32757,1.20891,1.22063,1.10976,0.009505,0.009505,0.009505
|
| 8 |
+
7,3529.75,1.26947,1.30713,1.13668,0.54285,0.53327,0.5251,0.35011,1.18996,1.15897,1.10379,0.009406,0.009406,0.009406
|
| 9 |
+
8,4034.11,1.24768,1.25562,1.13022,0.60596,0.5362,0.55886,0.37178,1.16186,1.10814,1.09093,0.009307,0.009307,0.009307
|
| 10 |
+
9,4536.44,1.23034,1.22038,1.12064,0.56352,0.54574,0.5692,0.38294,1.15313,1.07449,1.08059,0.009208,0.009208,0.009208
|
| 11 |
+
10,5038.76,1.21648,1.1786,1.113,0.62196,0.57598,0.59508,0.41022,1.12363,1.03143,1.07176,0.009109,0.009109,0.009109
|
| 12 |
+
11,5541.38,1.19754,1.14579,1.1059,0.5835,0.57695,0.59718,0.40969,1.12129,1.02792,1.07083,0.00901,0.00901,0.00901
|
| 13 |
+
12,6044.15,1.1868,1.12417,1.10507,0.58934,0.59779,0.6155,0.42634,1.10641,0.98573,1.06432,0.008911,0.008911,0.008911
|
| 14 |
+
13,6546.74,1.17396,1.09692,1.09831,0.61435,0.59539,0.62902,0.43504,1.09882,0.96664,1.06171,0.008812,0.008812,0.008812
|
| 15 |
+
14,7050.07,1.16975,1.07866,1.0933,0.60353,0.62017,0.63471,0.44337,1.09238,0.95521,1.05389,0.008713,0.008713,0.008713
|
| 16 |
+
15,7552.98,1.15805,1.06649,1.08789,0.62283,0.62513,0.64287,0.45053,1.0836,0.93621,1.05452,0.008614,0.008614,0.008614
|
| 17 |
+
16,8056.31,1.15431,1.0454,1.08718,0.66376,0.60903,0.65264,0.45855,1.07368,0.9262,1.05265,0.008515,0.008515,0.008515
|
| 18 |
+
17,8559.8,1.14377,1.02509,1.08117,0.65963,0.6216,0.64874,0.45766,1.06676,0.91424,1.04807,0.008416,0.008416,0.008416
|
| 19 |
+
18,9063.86,1.13752,1.01792,1.08028,0.6665,0.62769,0.66195,0.46888,1.05973,0.90473,1.04425,0.008317,0.008317,0.008317
|
| 20 |
+
19,9567.09,1.13613,1.0084,1.07918,0.63458,0.62841,0.66378,0.47306,1.05678,0.89485,1.04546,0.008218,0.008218,0.008218
|
| 21 |
+
20,10071.2,1.12076,0.9888,1.07386,0.63218,0.63397,0.66521,0.47502,1.05656,0.88554,1.04396,0.008119,0.008119,0.008119
|
| 22 |
+
21,10575.2,1.11559,0.97077,1.07101,0.65344,0.63715,0.67231,0.48005,1.05116,0.87498,1.04015,0.00802,0.00802,0.00802
|
| 23 |
+
22,11079,1.11356,0.9633,1.06862,0.64702,0.63885,0.67195,0.48193,1.04521,0.87449,1.04102,0.007921,0.007921,0.007921
|
| 24 |
+
23,11581.9,1.1101,0.95435,1.06664,0.63486,0.64412,0.67473,0.48423,1.04586,0.86954,1.04058,0.007822,0.007822,0.007822
|
| 25 |
+
24,12086.4,1.10676,0.94542,1.06272,0.64733,0.64905,0.67977,0.48922,1.04256,0.86422,1.03846,0.007723,0.007723,0.007723
|
| 26 |
+
25,12590.6,1.09599,0.93515,1.05843,0.65794,0.64682,0.67949,0.48827,1.04221,0.8617,1.03735,0.007624,0.007624,0.007624
|
| 27 |
+
26,13094.1,1.09235,0.92512,1.05701,0.63741,0.66174,0.68231,0.48988,1.04043,0.85807,1.03593,0.007525,0.007525,0.007525
|
| 28 |
+
27,13598.5,1.08785,0.91546,1.05384,0.62026,0.67472,0.68482,0.49345,1.03731,0.85311,1.03512,0.007426,0.007426,0.007426
|
| 29 |
+
28,14102.9,1.08087,0.90302,1.04947,0.62051,0.67608,0.68471,0.49376,1.0361,0.85043,1.03326,0.007327,0.007327,0.007327
|
| 30 |
+
29,14605.2,1.08046,0.89752,1.05356,0.62819,0.67914,0.68569,0.49519,1.0361,0.85112,1.03425,0.007228,0.007228,0.007228
|
| 31 |
+
30,15106.8,1.07106,0.88671,1.04584,0.65487,0.65729,0.68619,0.49574,1.03416,0.85078,1.03282,0.007129,0.007129,0.007129
|
| 32 |
+
31,15607.6,1.06974,0.88124,1.04637,0.65928,0.65613,0.6863,0.49723,1.03398,0.85012,1.03261,0.00703,0.00703,0.00703
|
| 33 |
+
32,16108.9,1.06477,0.87268,1.04426,0.65815,0.66032,0.68678,0.49856,1.03361,0.84915,1.03294,0.006931,0.006931,0.006931
|
| 34 |
+
33,16609.8,1.06225,0.86577,1.0412,0.66294,0.65978,0.68715,0.49847,1.03342,0.84755,1.03317,0.006832,0.006832,0.006832
|
| 35 |
+
34,17111.5,1.05375,0.85422,1.03709,0.66591,0.66197,0.68842,0.50029,1.03273,0.84595,1.03246,0.006733,0.006733,0.006733
|
| 36 |
+
35,17612.4,1.04974,0.84999,1.0371,0.66308,0.66147,0.68922,0.50078,1.03166,0.84467,1.0321,0.006634,0.006634,0.006634
|
| 37 |
+
36,18114.5,1.04413,0.8383,1.0363,0.66837,0.65858,0.68955,0.50131,1.03134,0.84395,1.03206,0.006535,0.006535,0.006535
|
| 38 |
+
37,18615.8,1.04017,0.8319,1.03218,0.66578,0.66159,0.69003,0.50231,1.03121,0.84268,1.03221,0.006436,0.006436,0.006436
|
| 39 |
+
38,19118.2,1.03658,0.82061,1.02887,0.67257,0.65711,0.69048,0.50193,1.03056,0.84151,1.03197,0.006337,0.006337,0.006337
|
| 40 |
+
39,19619.4,1.03593,0.82018,1.02986,0.67072,0.65973,0.69102,0.50235,1.03043,0.84057,1.03206,0.006238,0.006238,0.006238
|
| 41 |
+
40,20120.8,1.02588,0.81129,1.02611,0.6526,0.67487,0.69088,0.50273,1.03016,0.83991,1.03195,0.006139,0.006139,0.006139
|
| 42 |
+
41,20622,1.02525,0.79951,1.02294,0.65118,0.67735,0.69085,0.50256,1.03008,0.83948,1.03187,0.00604,0.00604,0.00604
|
| 43 |
+
42,21123.8,1.02172,0.79641,1.0229,0.6572,0.67094,0.69107,0.50269,1.02945,0.83909,1.03146,0.005941,0.005941,0.005941
|
| 44 |
+
43,21625,1.01192,0.78632,1.01897,0.6611,0.66762,0.69123,0.50268,1.02944,0.83868,1.03148,0.005842,0.005842,0.005842
|
| 45 |
+
44,22126.3,1.01056,0.77964,1.01938,0.66352,0.66294,0.69115,0.50246,1.02917,0.83793,1.03162,0.005743,0.005743,0.005743
|
| 46 |
+
45,22627,1.00724,0.7758,1.01725,0.65565,0.66823,0.69196,0.50299,1.0291,0.83781,1.03193,0.005644,0.005644,0.005644
|
| 47 |
+
46,23129.8,1.00286,0.7697,1.0171,0.6574,0.66802,0.69199,0.50345,1.0283,0.83685,1.03187,0.005545,0.005545,0.005545
|
| 48 |
+
47,23631.4,0.99786,0.75934,1.01523,0.64982,0.67067,0.69199,0.50421,1.02803,0.83618,1.03194,0.005446,0.005446,0.005446
|
| 49 |
+
48,24133.6,0.9946,0.75655,1.01117,0.65606,0.67289,0.69214,0.50451,1.02815,0.83567,1.03191,0.005347,0.005347,0.005347
|
| 50 |
+
49,24635.2,0.98705,0.7457,1.01128,0.65131,0.67489,0.69213,0.50508,1.02824,0.83474,1.03217,0.005248,0.005248,0.005248
|
| 51 |
+
50,25137.1,0.98114,0.74137,1.00628,0.65281,0.67363,0.69235,0.50493,1.02816,0.83429,1.03207,0.005149,0.005149,0.005149
|
| 52 |
+
51,25638.5,0.97807,0.72968,1.00383,0.65615,0.67532,0.69231,0.50515,1.02811,0.83359,1.03229,0.00505,0.00505,0.00505
|
| 53 |
+
52,26140.4,0.97351,0.7209,0.99916,0.65387,0.67617,0.69284,0.50556,1.02819,0.83309,1.03236,0.004951,0.004951,0.004951
|
| 54 |
+
53,26642.3,0.9681,0.71641,1.00145,0.66045,0.67171,0.69318,0.50591,1.02827,0.83259,1.03255,0.004852,0.004852,0.004852
|
| 55 |
+
54,27145.2,0.96786,0.71513,1.0004,0.66386,0.66794,0.69358,0.50604,1.02847,0.83189,1.0328,0.004753,0.004753,0.004753
|
| 56 |
+
55,27647,0.95948,0.70065,0.99483,0.66463,0.66884,0.69346,0.50588,1.02828,0.83126,1.0329,0.004654,0.004654,0.004654
|
| 57 |
+
56,28148.4,0.95033,0.69389,0.99554,0.66562,0.67007,0.69344,0.50611,1.02805,0.83043,1.03291,0.004555,0.004555,0.004555
|
| 58 |
+
57,28649.5,0.94768,0.68838,0.99279,0.66689,0.67135,0.69361,0.50673,1.02803,0.82963,1.03319,0.004456,0.004456,0.004456
|
| 59 |
+
58,29151.6,0.94205,0.6821,0.98967,0.66805,0.6716,0.6938,0.50672,1.0282,0.82896,1.03333,0.004357,0.004357,0.004357
|
| 60 |
+
59,29654.8,0.93964,0.67908,0.98769,0.67152,0.66761,0.69442,0.50747,1.02792,0.82863,1.03312,0.004258,0.004258,0.004258
|
| 61 |
+
60,30158.7,0.93193,0.66423,0.98654,0.6787,0.66411,0.6946,0.50785,1.02799,0.82823,1.03351,0.004159,0.004159,0.004159
|
| 62 |
+
61,30662,0.92707,0.66051,0.9856,0.68397,0.6637,0.69488,0.50829,1.02851,0.82775,1.03412,0.00406,0.00406,0.00406
|
| 63 |
+
62,31165.7,0.92242,0.65154,0.98049,0.68331,0.66612,0.69496,0.50823,1.02883,0.82718,1.03454,0.003961,0.003961,0.003961
|
| 64 |
+
63,31668.3,0.91847,0.64737,0.97977,0.67874,0.67037,0.69513,0.50862,1.02905,0.8272,1.03493,0.003862,0.003862,0.003862
|
| 65 |
+
64,32171.9,0.91175,0.63783,0.97596,0.68245,0.66632,0.6954,0.50858,1.02927,0.82665,1.0351,0.003763,0.003763,0.003763
|
| 66 |
+
65,32674.8,0.90749,0.63245,0.97551,0.68432,0.66377,0.69516,0.50835,1.02993,0.82654,1.03539,0.003664,0.003664,0.003664
|
| 67 |
+
66,33178,0.90323,0.62658,0.97675,0.67974,0.66752,0.69505,0.50819,1.03026,0.82656,1.036,0.003565,0.003565,0.003565
|
| 68 |
+
67,33681.2,0.89664,0.61974,0.96986,0.67276,0.67294,0.6951,0.50849,1.03052,0.82663,1.03641,0.003466,0.003466,0.003466
|
| 69 |
+
68,34185.1,0.89004,0.6136,0.96812,0.67439,0.67397,0.69531,0.50824,1.03084,0.82694,1.03663,0.003367,0.003367,0.003367
|
| 70 |
+
69,34688,0.88526,0.60789,0.96877,0.67402,0.67407,0.69527,0.50899,1.03097,0.82716,1.03693,0.003268,0.003268,0.003268
|
| 71 |
+
70,35191.6,0.87945,0.60033,0.96206,0.66435,0.67416,0.69525,0.50885,1.03124,0.8281,1.037,0.003169,0.003169,0.003169
|
| 72 |
+
71,35695,0.87607,0.59462,0.96056,0.66396,0.6745,0.69555,0.50947,1.03163,0.82885,1.03711,0.00307,0.00307,0.00307
|
| 73 |
+
72,36198.5,0.86472,0.58419,0.95745,0.6672,0.67195,0.69537,0.50903,1.03215,0.82959,1.03741,0.002971,0.002971,0.002971
|
| 74 |
+
73,36702,0.8621,0.57787,0.95596,0.66833,0.6744,0.69535,0.50892,1.0324,0.83019,1.03756,0.002872,0.002872,0.002872
|
| 75 |
+
74,37205.8,0.85442,0.56731,0.95428,0.6722,0.67286,0.69487,0.50873,1.03286,0.83105,1.0377,0.002773,0.002773,0.002773
|
| 76 |
+
75,37709.6,0.84689,0.56383,0.95088,0.6772,0.67005,0.69473,0.50885,1.03352,0.83235,1.03814,0.002674,0.002674,0.002674
|
| 77 |
+
76,38213.9,0.83959,0.55903,0.94793,0.67615,0.66983,0.69462,0.50872,1.0339,0.83386,1.0384,0.002575,0.002575,0.002575
|
| 78 |
+
77,38716.9,0.83483,0.54678,0.94539,0.67405,0.66972,0.69429,0.50839,1.03452,0.83588,1.03897,0.002476,0.002476,0.002476
|
| 79 |
+
78,39220.3,0.82763,0.54378,0.94146,0.67456,0.66655,0.6943,0.50865,1.03505,0.83764,1.03894,0.002377,0.002377,0.002377
|
| 80 |
+
79,39724.6,0.82625,0.54034,0.93981,0.67129,0.66825,0.69374,0.50847,1.03558,0.83948,1.03891,0.002278,0.002278,0.002278
|
| 81 |
+
80,40228.6,0.81373,0.52765,0.93614,0.66974,0.67239,0.69367,0.50877,1.03617,0.84189,1.03887,0.002179,0.002179,0.002179
|
| 82 |
+
81,40731.6,0.81166,0.52264,0.93616,0.67166,0.66865,0.69333,0.50872,1.03681,0.84447,1.0391,0.00208,0.00208,0.00208
|
| 83 |
+
82,41235.6,0.80028,0.51377,0.93028,0.67609,0.66681,0.69317,0.50798,1.03787,0.84739,1.03964,0.001981,0.001981,0.001981
|
| 84 |
+
83,41739,0.79979,0.51254,0.93376,0.67428,0.66829,0.69293,0.50795,1.03873,0.85094,1.04023,0.001882,0.001882,0.001882
|
| 85 |
+
84,42242.5,0.79001,0.50439,0.92741,0.67557,0.66592,0.69234,0.50754,1.03973,0.85417,1.04065,0.001783,0.001783,0.001783
|
| 86 |
+
85,42745.9,0.78282,0.49551,0.9286,0.67609,0.6652,0.69187,0.50714,1.04067,0.85733,1.04136,0.001684,0.001684,0.001684
|
| 87 |
+
86,43249.3,0.77527,0.49049,0.92442,0.68484,0.65681,0.69147,0.50689,1.04178,0.86059,1.04207,0.001585,0.001585,0.001585
|
results.png
ADDED
|
Git LFS Details
|
train.py
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# %pip install comet_ml --quiet
|
| 2 |
+
import comet_ml
|
| 3 |
+
import torch
|
| 4 |
+
from ultralytics import YOLO
|
| 5 |
+
import ultralytics.data as data
|
| 6 |
+
import ultralytics.data.dataset as dataset
|
| 7 |
+
import ultralytics.data.build as build
|
| 8 |
+
|
| 9 |
+
import numpy as np
|
| 10 |
+
comet_ml.login(project_name='reduced_images_benthic_supercategory_detector6')
|
| 11 |
+
#comet_ml.start(mode="get", experiment_keyd="87329baa84f547feb8f249cd3991b51d")
|
| 12 |
+
|
| 13 |
+
import os
|
| 14 |
+
os.environ["CUDA_VISIBLE_DEVICES"] = "1"
|
| 15 |
+
|
| 16 |
+
print("CUDA Available:", torch.cuda.is_available())
|
| 17 |
+
if torch.cuda.is_available():
|
| 18 |
+
print("GPU Name:", torch.cuda.get_device_name(0))
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
model = YOLO("yolo11x.yaml")
|
| 22 |
+
model = YOLO("yolo11x.pt") # Load a pretrained model
|
| 23 |
+
model = YOLO("yolo11x.yaml").load("yolo11x.pt")
|
| 24 |
+
|
| 25 |
+
results = model.train(data='/data/james/reduced_experiment/data6/benthic_supercategory_detector.yaml', batch = 32, epochs=100, imgsz=640, patience=15, val=True, device=0, plots=True)
|
train_batch0.jpg
ADDED
|
Git LFS Details
|
train_batch1.jpg
ADDED
|
Git LFS Details
|
train_batch2.jpg
ADDED
|
Git LFS Details
|
val_batch0_labels.jpg
ADDED
|
Git LFS Details
|
val_batch0_pred.jpg
ADDED
|
Git LFS Details
|
val_batch1_labels.jpg
ADDED
|
Git LFS Details
|
val_batch1_pred.jpg
ADDED
|
Git LFS Details
|
val_batch2_labels.jpg
ADDED
|
Git LFS Details
|
val_batch2_pred.jpg
ADDED
|
Git LFS Details
|