UNPKG

ai-face-detection

Version:

This is simple face detection using face-api.js and tensorflow.js

106 lines (105 loc) 4.94 kB
[ { "paths": ["mtcnn_model-shard1"], "weights": [ { "dtype": "float32", "name": "pnet/conv1/weights", "shape": [3, 3, 3, 10] }, { "dtype": "float32", "name": "pnet/conv1/bias", "shape": [10] }, { "dtype": "float32", "name": "pnet/prelu1_alpha", "shape": [10] }, { "dtype": "float32", "name": "pnet/conv2/weights", "shape": [3, 3, 10, 16] }, { "dtype": "float32", "name": "pnet/conv2/bias", "shape": [16] }, { "dtype": "float32", "name": "pnet/prelu2_alpha", "shape": [16] }, { "dtype": "float32", "name": "pnet/conv3/weights", "shape": [3, 3, 16, 32] }, { "dtype": "float32", "name": "pnet/conv3/bias", "shape": [32] }, { "dtype": "float32", "name": "pnet/prelu3_alpha", "shape": [32] }, { "dtype": "float32", "name": "pnet/conv4_1/weights", "shape": [1, 1, 32, 2] }, { "dtype": "float32", "name": "pnet/conv4_1/bias", "shape": [2] }, { "dtype": "float32", "name": "pnet/conv4_2/weights", "shape": [1, 1, 32, 4] }, { "dtype": "float32", "name": "pnet/conv4_2/bias", "shape": [4] }, { "dtype": "float32", "name": "rnet/conv1/weights", "shape": [3, 3, 3, 28] }, { "dtype": "float32", "name": "rnet/conv1/bias", "shape": [28] }, { "dtype": "float32", "name": "rnet/prelu1_alpha", "shape": [28] }, { "dtype": "float32", "name": "rnet/conv2/weights", "shape": [3, 3, 28, 48] }, { "dtype": "float32", "name": "rnet/conv2/bias", "shape": [48] }, { "dtype": "float32", "name": "rnet/prelu2_alpha", "shape": [48] }, { "dtype": "float32", "name": "rnet/conv3/weights", "shape": [2, 2, 48, 64] }, { "dtype": "float32", "name": "rnet/conv3/bias", "shape": [64] }, { "dtype": "float32", "name": "rnet/prelu3_alpha", "shape": [64] }, { "dtype": "float32", "name": "rnet/fc1/weights", "shape": [576, 128] }, { "dtype": "float32", "name": "rnet/fc1/bias", "shape": [128] }, { "dtype": "float32", "name": "rnet/prelu4_alpha", "shape": [128] }, { "dtype": "float32", "name": "rnet/fc2_1/weights", "shape": [128, 2] }, { "dtype": "float32", "name": "rnet/fc2_1/bias", "shape": [2] }, { "dtype": "float32", "name": "rnet/fc2_2/weights", "shape": [128, 4] }, { "dtype": "float32", "name": "rnet/fc2_2/bias", "shape": [4] }, { "dtype": "float32", "name": "onet/conv1/weights", "shape": [3, 3, 3, 32] }, { "dtype": "float32", "name": "onet/conv1/bias", "shape": [32] }, { "dtype": "float32", "name": "onet/prelu1_alpha", "shape": [32] }, { "dtype": "float32", "name": "onet/conv2/weights", "shape": [3, 3, 32, 64] }, { "dtype": "float32", "name": "onet/conv2/bias", "shape": [64] }, { "dtype": "float32", "name": "onet/prelu2_alpha", "shape": [64] }, { "dtype": "float32", "name": "onet/conv3/weights", "shape": [3, 3, 64, 64] }, { "dtype": "float32", "name": "onet/conv3/bias", "shape": [64] }, { "dtype": "float32", "name": "onet/prelu3_alpha", "shape": [64] }, { "dtype": "float32", "name": "onet/conv4/weights", "shape": [2, 2, 64, 128] }, { "dtype": "float32", "name": "onet/conv4/bias", "shape": [128] }, { "dtype": "float32", "name": "onet/prelu4_alpha", "shape": [128] }, { "dtype": "float32", "name": "onet/fc1/weights", "shape": [1152, 256] }, { "dtype": "float32", "name": "onet/fc1/bias", "shape": [256] }, { "dtype": "float32", "name": "onet/prelu5_alpha", "shape": [256] }, { "dtype": "float32", "name": "onet/fc2_1/weights", "shape": [256, 2] }, { "dtype": "float32", "name": "onet/fc2_1/bias", "shape": [2] }, { "dtype": "float32", "name": "onet/fc2_2/weights", "shape": [256, 4] }, { "dtype": "float32", "name": "onet/fc2_2/bias", "shape": [4] }, { "dtype": "float32", "name": "onet/fc2_3/weights", "shape": [256, 10] }, { "dtype": "float32", "name": "onet/fc2_3/bias", "shape": [10] } ] } ]