Skip to content

Commit

Permalink
Merge branch 'refactor'
Browse files Browse the repository at this point in the history
  • Loading branch information
alessiosavi committed May 22, 2019
2 parents 73055f1 + 0fab053 commit a9f8bb3
Show file tree
Hide file tree
Showing 15 changed files with 371 additions and 201 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -76,3 +76,4 @@ dataset
*.pyc
flask_monitoringdashboard.db
uploads
conf
114 changes: 41 additions & 73 deletions api/Api.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,13 @@
"""
Custom function that will be wrapped for be HTTP compliant
"""
import os
import pickle

import time
import zipfile
from datetime import datetime
from logging import getLogger
from os.path import join as path_join

from datastructure.Response import Response
from utils.util import print_prediction_on_image, random_string, remove_dir, unzip_data
from utils.util import print_prediction_on_image, random_string, retrieve_dataset

log = getLogger()

Expand All @@ -24,21 +22,22 @@ def predict_image(img_path, clf, PREDICTION_PATH):
:return: Response dictionary jsonizable
"""
response = Response()
log.debug("predict_image | Predicting {}".format(img_path))
if clf is None:
log.error("predict_image | FATAL | Classifier is None!")
prediction = None
else:
log.debug("predict_image | Predicting {}".format(img_path))
prediction = clf.predict(img_path)
log.debug("predict_image | Image analyzed!")
log.debug("predict_image | Result: {}".format(prediction))
# Manage success
if prediction is not None and isinstance(prediction, list) and len(prediction) == 1:
if prediction and isinstance(prediction["predictions"], list):
img_name = random_string() + ".png"
log.debug("predict_image | Generated a random name: {}".format(img_path))
log.debug("predict_image | Generated a random name: {}".format(img_name))
log.debug("predict_image | Visualizing face recognition ...")
print_prediction_on_image(img_path, prediction, PREDICTION_PATH, img_name)
response.status = "OK"
response.description = img_name
response.data = prediction[0][0]
print_prediction_on_image(img_path, prediction["predictions"], PREDICTION_PATH, img_name)
return Response(status="OK", description=img_name, data={"name": prediction["predictions"][0][0],
"distance": prediction[
"score"]}).__dict__

# Manage error
elif prediction is None:
Expand All @@ -61,7 +60,7 @@ def predict_image(img_path, clf, PREDICTION_PATH):
# TODO: Add custom algorithm that "try to understand" who has never been recognized
response.error = "FACE_NOT_RECOGNIZED"
response.description = "Seems that this face is related to nobody that i've seen before ..."
log.error("predict_image | Seems that this face is lated to nobody that i've seen before ...")
log.error("predict_image | Seems that this face is related to nobody that i've seen before ...")

elif prediction == -2:
response.error = "FILE_NOT_VALID"
Expand All @@ -79,23 +78,23 @@ def train_network(folder_uncompress, zip_file, clf):
:param clf:
:return:
"""
log.debug("train_network | uncompressing zip file ...")
folder_name = path_join(folder_uncompress, random_string())
zip_ref = zipfile.ZipFile(zip_file)
zip_ref.extractall(folder_name)
zip_ref.close()
log.debug("train_network | zip file uncompressed!")
clf.init_peoples_list(peoples_path=folder_name)
dataset = clf.init_dataset()
neural_model_file = clf.train(dataset["X"], dataset["Y"])
log.debug("train_network | Removing unzipped files")
remove_dir(folder_name)
response = Response()
response.status = "OK"
response.data = neural_model_file
response.description = "Model succesfully trained!"

return response.__dict__
log.debug("train_network | Starting training phase ...")
dataset = retrieve_dataset(folder_uncompress, zip_file, clf)

if dataset is None:
return Response(error="ERROR DURING LOADING DAT", description="Seems that the dataset is not valid").__dict__

else:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
neural_model_file, elapsed_time = clf.train(dataset["X"], dataset["Y"], timestamp)

response = Response(status="OK", data=neural_model_file)
response.description = "Model succesfully trained! | {}".format(
time.strftime("%H:%M:%S.%f", time.gmtime(elapsed_time)))
log.debug("train_network | Tuning phase finihsed! | {}".format(response.description))

return response.__dict__


def tune_network(folder_uncompress, zip_file, clf):
Expand All @@ -106,50 +105,19 @@ def tune_network(folder_uncompress, zip_file, clf):
:param clf:
:return:
"""
log.debug("tune_network | uncompressing zip file ...")
check = verify_extension(zip_file.filename)
if check == "zip": # Image provided
folder_name = unzip_data(folder_uncompress, zip_file)
log.debug("tune_network | zip file uncompressed!")
clf.init_peoples_list(peoples_path=folder_name)
dataset = clf.init_dataset()
elif check == "dat":
dataset = pickle.load(zip_file)
log.debug("tune_network | Starting tuning phase ...")
dataset = retrieve_dataset(folder_uncompress, zip_file, clf)

if dataset is None:
return Response(error="ERROR DURING LOADING DAT", description="Seems that the dataset is not valid").__dict__

else:
dataset = None

if dataset is not None:
start_time = time.time()
neural_model_file = clf.tuning(dataset["X"], dataset["Y"])
elapsed_time = time.time() - start_time

log.debug("tune_network | Removing unzipped files")
if check == "zip":
# TODO: Refactor this method :/
remove_dir(folder_name)
response = Response()
response.status = "OK"
response.data = neural_model_file
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
neural_model_file, elapsed_time = clf.tuning(dataset["X"], dataset["Y"], timestamp)

response = Response(status="OK", data=neural_model_file)
response.description = "Model succesfully trained! | {}".format(
time.strftime("%H:%M:%S.%f", time.gmtime(elapsed_time)))
else:
response = Response()
response.error = "ERROR DURING LOADING DAT"
return response.__dict__
log.debug("train_network | Tuning phase finihsed! | {}".format(response.description))


def verify_extension(file):
"""
Wrapper for validate file
:param file:
:return:
"""
extension = os.path.splitext(file)[1]
log.debug("verify_extension | File: {} | Ext: {}".format(file, extension))
if extension == ".zip":
# In this case we have to analyze the photos
return "zip"
elif extension == ".dat":
# Photos have been alredy analyzed, dataset is ready!
return "dat"
return None
return response.__dict__
1 change: 1 addition & 0 deletions api/templates/train.html
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ <h1>Upload a zip file with all person that you want to save!</h1>
<form enctype=multipart/form-data method=post>
<input name=file type=file>
<input type=submit value=Upload>
<input name=_csrf_token type=hidden value="{{ csrf_token() }}">
</form>
{% with messages = get_flashed_messages() %}
{% if messages %}
Expand Down
1 change: 1 addition & 0 deletions api/templates/upload.html
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ <h1>Upload new File</h1>
<form enctype=multipart/form-data method=post>
<input name=file type=file>
<input type=submit value=Upload>
<input name=_csrf_token type=hidden value="{{ csrf_token() }}">
</form>
{% with messages = get_flashed_messages() %}
{% if messages %}
Expand Down
2 changes: 1 addition & 1 deletion conf/dashboard.ini
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@ GUEST_PASSWORD = ['guest', 'password']

[database]
TABLE_PREFIX = fmd
DATABASE = sqlite:///log/flask_monitoringdashboard.db
DATABASE = sqlite:///conf/flask_monitoringdashboard.db
18 changes: 18 additions & 0 deletions conf/ssl/localhost.crt
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
-----BEGIN CERTIFICATE-----
MIIC5TCCAc2gAwIBAgIJAJPNi4jjHSy3MA0GCSqGSIb3DQEBCwUAMBQxEjAQBgNV
BAMMCWxvY2FsaG9zdDAeFw0xOTA1MjIxNjMxMDJaFw0xOTA2MjExNjMxMDJaMBQx
EjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
ggEBANskAjz6LENzhnpGkyJHztmIf3Pno8h/k70fjEI13osonv7W5alA3vgQ9az3
ivD7cp6YPXkv5lK+mTx6dKccrdAPQLWQDZBqaotasTX1hBxaqILqNvh25QY5gjbz
jdfK27E+82QDZUzdYsFDyZQ4ORQ8qVUz0k42ulS4WMpluBEaLk8rHkDIyZSM4psv
EK+IcI7mN8z1YI8mS3jOW2ouQQVwRb60ZOe4b9wcFPYR7+NdNQM7rCR9UQU9ymjC
U4VmTUrIonmXML1gRPHs0Z694AsQe+Mr5O3OxeYhbsFb7d1Ry4WcZiPM+ugJJiNS
Fkpf4SDT7nHAcHbqFzibpSJPP7cCAwEAAaM6MDgwFAYDVR0RBA0wC4IJbG9jYWxo
b3N0MAsGA1UdDwQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0B
AQsFAAOCAQEADz/YL1DOV8n/15/ApaWCQhzcFGOPSv1DcnI6sY46I4zRKyG9yuHE
N11XqkCmQuKF9UnowhFFMLIfxzlqkUTWjKtaWKasnOdAd/LOqO9Eh4cnsyC4yEBB
aMO00YdUAdFb0eV3bR/UY3srji6LjRy9215Ad3eXYxjdTTB/btIsN75XTTsZLnbR
F0V3TRkZlxCQXcYh/lpfPHG9xWLxPZ8g8e+hrwJhsmW3a0BMzYNF8nJdzhZi7Dls
ldR2V8IqVP/Ip6dpsygn/CzbDlZVcZVV4jqhec8bbijsXdSizwm8bfc57TssRA1C
HlvLlwAsoiDj6PZ4PwRCvc5k6ydDbXNftw==
-----END CERTIFICATE-----
28 changes: 28 additions & 0 deletions conf/ssl/localhost.key
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
-----BEGIN PRIVATE KEY-----
MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDbJAI8+ixDc4Z6
RpMiR87ZiH9z56PIf5O9H4xCNd6LKJ7+1uWpQN74EPWs94rw+3KemD15L+ZSvpk8
enSnHK3QD0C1kA2QamqLWrE19YQcWqiC6jb4duUGOYI2843XytuxPvNkA2VM3WLB
Q8mUODkUPKlVM9JONrpUuFjKZbgRGi5PKx5AyMmUjOKbLxCviHCO5jfM9WCPJkt4
zltqLkEFcEW+tGTnuG/cHBT2Ee/jXTUDO6wkfVEFPcpowlOFZk1KyKJ5lzC9YETx
7NGeveALEHvjK+TtzsXmIW7BW+3dUcuFnGYjzProCSYjUhZKX+Eg0+5xwHB26hc4
m6UiTz+3AgMBAAECggEBAIMpqFVK/9dXfDQPrd0k0cAOHQsIqFVHVuwpx8+RYqQ0
KgYqJcgKVepwbDuc5oKaXd5jDNhOPTNldV5nhQ7I8ZfIqViC4juAFklWfR7o1qwJ
7zZ8bW6F60qwfSna2RlCCACsxw0joyxAje1TX4HhrPhZ3phqrgO2agxvUmXCQEur
HmZXEXP2grR0XdWiXazWI5jlG0MsX6J+qsMHFCApGR/9KcsB8Lwe8RAiszc1SPPp
TNGZopojkH1GK8DAXMFvODmTdwlStpDh1g711cX5KoINKlX5ppJjsoqcGOLhbEee
uCsfckXGrHJm51GbJePPZ16x7Op/BUdyKjYvSL31fuECgYEA7mumpBMDq4NQ1gju
n7kmU75k2ddrXSycvFJ5yxKCCec+hdJBtKm6WrGGD+uchjxFhZP37JRTimV/F5RL
Ps6xVwgwX3DtSLpwyOelLR8Zo2wT1cDFKp6EfD4ltDVbTsOW2X8yyKeJHac23/wT
HIRyv+8DUUo0GU4JMl4VAW9PwWkCgYEA60xv/8c0AfjOZIGlxdk2RCKWnZas6Rdk
STChPXoIOj5T75B7OfxJukY4R8d7jzXOwX5WX3wS/rtEuom5tFW5+fLl16HWUyz5
pXa7/QW5dQa7GLB3K6HBKhfTm7/fDkaFKDu/c+sF46RWoP7vxqct1ir0L0Z1BFnk
/qSpSbhBtB8CgYA1/ajR9QBawbT3kzQ+dVYplq8N6cuFYQnpV5//DaTnCzfMZC2+
9MSfrx3V0xwyBcoUksqNB5XXfF6If2t+wJ3GQLN7mX4Sfy31QQfVrPpIWLwxJqM/
oIAOBqDRK1gPARnTDQv6Bn51eZ1ioZnOVmwJ7N1KdkxQAqzwe/+zwHpGKQKBgQCH
e/Pha2pe2Ey/QoeZbID6qo/fHatia72rBv1Q0Lt8Dfd2sdLCiKpLP7OYYRycUXdD
ouNJB8BIPLxOTI9JbzMu4NXHW8B1FCiLRdrozisDX2TLypBT50e6XQ3TWJ+vMJvr
lruem21ArpfTC/g0gn66GvGPZxpp7vkURuvTLu1mMQKBgQDI0yvH+FqxiXmnZjY6
4rqoq7shenmrHxbywHOCJbXMVlFMhFovZUCKZtJ0G14e3yGystA3wkNj8CJtBYj4
/R1ucQIXBeiGJHKY9lVuRuJI258jUrIQ8z6hNv8zXVW/2oM0R58dJXL2UJVFHDpU
ETwkYWrY5QeX4J4mxX2AfsrZ8Q==
-----END PRIVATE KEY-----
21 changes: 13 additions & 8 deletions conf/test.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"PyRecognizer": {
"Version": "0.0.1",
"Version": "0.1.2",
"temp_upload_training": "uploads/training/",
"temp_upload_predict": "uploads/predict/",
"temp_upload": "uploads/upload"
Expand All @@ -11,21 +11,26 @@
"level": "debug"
},
"network": {
"host": "locahost",
"host": "0.0.0.0",
"port": 11001,
"templates": "api/templates/",
"SSL": {
"enabled": false,
"cert.pub": "/dev/null",
"cert.priv": "/dev/null"
"enabled": true,
"cert.pub": "conf/ssl/localhost.crt",
"cert.priv": "conf/ssl/localhost.key"
}
},
"classifier": {
"trainin_dir": "dataset/images/",
"model_path": "dataset/model/",
"model": "model-20190518_191827.clf",
"n_neighbors": "",
"knn_algo": ""
"timestamp": "20190522_170246",
"params": {
"algorithm": "ball_tree",
"metric": "minkowski",
"n_neighbors": 80,
"p": 2,
"weights": "distance"
}
},
"data": {
"test_data": "/tmp/test_data/"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
{
"classifier_file": "dataset/model/model-20190519_210950",
"classifier_file": "20190522_170246/model.clf",
"params": {
"algorithm": "ball_tree",
"metric": "minkowski",
"n_neighbors": 78,
"n_neighbors": 80,
"p": 2,
"weights": "distance"
}
Expand Down
Binary file removed dataset/model/model-20190519_210950.clf
Binary file not shown.
Loading

0 comments on commit a9f8bb3

Please sign in to comment.