ai-face-detection
Version:
This is simple face detection using face-api.js and tensorflow.js
280 lines (279 loc) • 9.45 kB
JSON
[
{
"weights": [
{
"name": "dense0/conv0/filters",
"shape": [3, 3, 3, 32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008194216092427571,
"min": -0.9423348506291708
}
},
{
"name": "dense0/conv0/bias",
"shape": [32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006839508168837603,
"min": -0.8412595047670252
}
},
{
"name": "dense0/conv1/depthwise_filter",
"shape": [3, 3, 32, 1],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.009194007106855804,
"min": -1.2779669878529567
}
},
{
"name": "dense0/conv1/pointwise_filter",
"shape": [1, 1, 32, 32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0036026100317637128,
"min": -0.3170296827952067
}
},
{
"name": "dense0/conv1/bias",
"shape": [32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.000740380117706224,
"min": -0.06367269012273527
}
},
{
"name": "dense0/conv2/depthwise_filter",
"shape": [3, 3, 32, 1],
"dtype": "float32",
"quantization": { "dtype": "uint8", "scale": 1, "min": 0 }
},
{
"name": "dense0/conv2/pointwise_filter",
"shape": [1, 1, 32, 32],
"dtype": "float32",
"quantization": { "dtype": "uint8", "scale": 1, "min": 0 }
},
{
"name": "dense0/conv2/bias",
"shape": [32],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0037702228508743585,
"min": -0.6220867703942692
}
},
{
"name": "dense1/conv0/depthwise_filter",
"shape": [3, 3, 32, 1],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0033707996209462483,
"min": -0.421349952618281
}
},
{
"name": "dense1/conv0/pointwise_filter",
"shape": [1, 1, 32, 64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.014611541991140328,
"min": -1.8556658328748217
}
},
{
"name": "dense1/conv0/bias",
"shape": [64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002832523046755323,
"min": -0.30307996600281956
}
},
{
"name": "dense1/conv1/depthwise_filter",
"shape": [3, 3, 64, 1],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.006593170586754294,
"min": -0.6329443763284123
}
},
{
"name": "dense1/conv1/pointwise_filter",
"shape": [1, 1, 64, 64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.012215249211180444,
"min": -1.6001976466646382
}
},
{
"name": "dense1/conv1/bias",
"shape": [64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002384825547536214,
"min": -0.3028728445370992
}
},
{
"name": "dense1/conv2/depthwise_filter",
"shape": [3, 3, 64, 1],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005859645441466687,
"min": -0.7617539073906693
}
},
{
"name": "dense1/conv2/pointwise_filter",
"shape": [1, 1, 64, 64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.013121426806730382,
"min": -1.7845140457153321
}
},
{
"name": "dense1/conv2/bias",
"shape": [64],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0032247188044529336,
"min": -0.46435950784122243
}
},
{
"name": "dense2/conv0/depthwise_filter",
"shape": [3, 3, 64, 1],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.002659512618008782,
"min": -0.32977956463308894
}
},
{
"name": "dense2/conv0/pointwise_filter",
"shape": [1, 1, 64, 128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.015499923743453681,
"min": -1.9839902391620712
}
},
{
"name": "dense2/conv0/bias",
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0032450980999890497,
"min": -0.522460794098237
}
},
{
"name": "dense2/conv1/depthwise_filter",
"shape": [3, 3, 128, 1],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.005911862382701799,
"min": -0.792189559282041
}
},
{
"name": "dense2/conv1/pointwise_filter",
"shape": [1, 1, 128, 128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.021025861478319356,
"min": -2.2077154552235325
}
},
{
"name": "dense2/conv1/bias",
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.00349616945958605,
"min": -0.46149436866535865
}
},
{
"name": "dense2/conv2/depthwise_filter",
"shape": [3, 3, 128, 1],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.008104994250278847,
"min": -1.013124281284856
}
},
{
"name": "dense2/conv2/pointwise_filter",
"shape": [1, 1, 128, 128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.029337059282789044,
"min": -3.5791212325002633
}
},
{
"name": "dense2/conv2/bias",
"shape": [128],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0038808938334969913,
"min": -0.4230174278511721
}
},
{
"name": "fc/weights",
"shape": [128, 136],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.014016061670639936,
"min": -1.8921683255363912
}
},
{
"name": "fc/bias",
"shape": [136],
"dtype": "float32",
"quantization": {
"dtype": "uint8",
"scale": 0.0029505149698724935,
"min": 0.088760145008564
}
}
],
"paths": ["face_landmark_68_tiny_model-shard1"]
}
]