Added cfg, labels, tree ... files for Yolo9000

This commit is contained in:
AlexeyAB 2017-07-28 00:24:10 +03:00
parent 576dbe12e6
commit 1fd398da4f
12 changed files with 29003 additions and 1 deletions

View File

@ -0,0 +1,5 @@
darknet.exe detector test cfg/combine9k.data yolo9000.cfg yolo9000.weights data/dog.jpg
pause

View File

@ -0,0 +1,7 @@
darknet.exe detector demo cfg/combine9k.data yolo9000.cfg yolo9000.weights street4k.mp4
pause

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,80 @@
5177
3768
3802
3800
4107
4072
4071
3797
4097
2645
5150
2644
3257
2523
6527
6866
6912
7342
7255
7271
7217
6858
7343
7233
3704
4374
3641
5001
3899
2999
2631
5141
2015
1133
1935
1930
5144
5143
2371
3916
3745
3640
4749
4736
4735
3678
58
42
771
81
152
141
786
700
218
791
2518
2521
3637
2458
2505
2519
3499
2837
3503
2597
3430
2080
5103
5111
5102
3013
5096
1102
3218
4010
2266
1127
5122
2360

View File

@ -0,0 +1,10 @@
classes= 9418
#train = /home/pjreddie/data/coco/trainvalno5k.txt
train = data/combine9k.train.list
valid = /home/pjreddie/data/imagenet/det.val.files
labels = data/9k.labels
names = data/9k.names
backup = backup/
map = data/inet9k.map
eval = imagenet
results = results

View File

@ -0,0 +1,200 @@
2687
4107
8407
7254
42
6797
127
2268
2442
3704
260
1970
58
4443
2661
2043
2039
4858
4007
6858
8408
166
2523
3768
4347
6527
2446
5005
3274
3678
4918
709
4072
8428
7223
2251
3802
3848
7271
2677
8267
2849
2518
2738
3746
5105
3430
3503
2249
1841
2032
2358
122
3984
4865
3246
5095
6912
6878
8467
2741
1973
3057
7217
1872
44
2452
3637
2704
6917
2715
6734
2325
6864
6677
2035
1949
338
2664
5122
1844
784
2223
7188
2719
2670
4830
158
4818
7228
1965
7342
786
2095
8281
8258
7406
3915
8382
2437
2837
82
6871
1876
7447
8285
5007
2740
3463
5103
3755
4910
6809
3800
118
3396
3092
2709
81
7105
4036
2366
1846
5177
2684
64
2041
3919
700
3724
1742
39
807
7184
2256
235
2778
2996
2030
3714
7167
2369
6705
6861
5096
2597
2166
2036
3228
3747
2711
8300
2226
7153
7255
2631
7109
8242
7445
3776
3803
3690
2025
2521
2316
7190
8249
3352
2639
2887
100
4219
3344
5008
7224
3351
2434
2074
2034
8304
5004
6868
5102
2645
4071
2716
2717
7420
3499
3763
5084
2676
2046
5107
5097
3944
4097
7132
3956
7343

View File

@ -0,0 +1,218 @@
[net]
# Testing
batch=1
subdivisions=1
# Training
# batch=64
# subdivisions=8
batch=1
subdivisions=1
height=544
width=544
channels=3
momentum=0.9
decay=0.0005
learning_rate=0.001
burn_in=1000
max_batches = 500200
policy=steps
steps=400000,450000
scales=.1,.1
hue=.1
saturation=.75
exposure=.75
[convolutional]
batch_normalize=1
filters=32
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=64
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=64
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky
[convolutional]
filters=28269
size=1
stride=1
pad=1
activation=linear
[region]
anchors = 0.77871, 1.14074, 3.00525, 4.31277, 9.22725, 9.61974
bias_match=1
classes=9418
coords=4
num=3
softmax=1
jitter=.2
rescore=1
object_scale=5
noobject_scale=1
class_scale=1
coord_scale=1
thresh = .6
absolute=1
random=1
tree=data/9k.tree
map = data/coco9k.map

10
cfg/combine9k.data Normal file
View File

@ -0,0 +1,10 @@
classes= 9418
#train = /home/pjreddie/data/coco/trainvalno5k.txt
train = data/combine9k.train.list
valid = /home/pjreddie/data/imagenet/det.val.files
labels = data/9k.labels
names = data/9k.names
backup = backup/
map = data/inet9k.map
eval = imagenet
results = results

218
cfg/yolo9000.cfg Normal file
View File

@ -0,0 +1,218 @@
[net]
# Testing
batch=1
subdivisions=1
# Training
# batch=64
# subdivisions=8
batch=1
subdivisions=1
height=544
width=544
channels=3
momentum=0.9
decay=0.0005
learning_rate=0.001
burn_in=1000
max_batches = 500200
policy=steps
steps=400000,450000
scales=.1,.1
hue=.1
saturation=.75
exposure=.75
[convolutional]
batch_normalize=1
filters=32
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=64
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=64
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=128
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=256
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=3
stride=1
pad=1
activation=leaky
[maxpool]
size=2
stride=2
[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=512
size=1
stride=1
pad=1
activation=leaky
[convolutional]
batch_normalize=1
filters=1024
size=3
stride=1
pad=1
activation=leaky
[convolutional]
filters=28269
size=1
stride=1
pad=1
activation=linear
[region]
anchors = 0.77871, 1.14074, 3.00525, 4.31277, 9.22725, 9.61974
bias_match=1
classes=9418
coords=4
num=3
softmax=1
jitter=.2
rescore=1
object_scale=5
noobject_scale=1
class_scale=1
coord_scale=1
thresh = .6
absolute=1
random=1
tree=data/9k.tree
map = data/coco9k.map

View File

@ -543,7 +543,7 @@ void show_image_cv_ipl(IplImage *disp, const char *name)
//printf("\n cvCreateVideoWriter, DST output_video = %p \n", output_video);
}
//cvWriteFrame(output_video, disp); // comment this line to improve FPS !!!
cvWriteFrame(output_video, disp); // comment this line to improve FPS !!!
printf("\n cvWriteFrame \n");
}