mirror of
https://github.com/arnaucube/objectImageIdentifierAI.git
synced 2026-02-07 03:36:51 +01:00
jupyter book cleaned, started the upload of the images in the smartphone app
This commit is contained in:
25
README.md
25
README.md
@@ -1,18 +1,7 @@
|
||||
# objectImageIdentifierAI
|
||||
|
||||
- imagesToDataset
|
||||
- From two directories ('object' and 'noobject'), gets all the images inside the directories and generates the dataset
|
||||
- nnTrain
|
||||
- From the dataset file generated in the previous step, train the Neural Network
|
||||
- serverPredictor
|
||||
- Runs a server API, that with the Neural Network classifies the incoming images
|
||||
- smartphoneApp
|
||||
- Take photo and upload to the server, to get the response (object or no object)
|
||||
|
||||

|
||||
|
||||
|
||||
## Real steps
|
||||
## Run
|
||||
- download images
|
||||
- for example, can be done with https://github.com/arnaucode/imgDownloader.git
|
||||
- In /serverPredictor directory
|
||||
@@ -23,3 +12,15 @@ This will generate the model.pkl. Then, run the serverPredictor.py
|
||||
```
|
||||
python serverPredictor.py
|
||||
```
|
||||
- Can run the tests:
|
||||
```
|
||||
bash test.sh
|
||||
```
|
||||
- The deploy.sh will execute the classifierChooser.py, and then the serverPredictor.py
|
||||
```
|
||||
bash deploy.sh
|
||||
```
|
||||
|
||||
|
||||
|
||||

|
||||
|
||||
@@ -17,7 +17,6 @@
|
||||
"source": [
|
||||
"from PIL import Image, ImageOps\n",
|
||||
"import numpy, os\n",
|
||||
"from sklearn.ensemble import AdaBoostClassifier\n",
|
||||
"from sklearn.cross_validation import cross_val_score\n",
|
||||
"import numpy as np\n",
|
||||
"import pandas as pd"
|
||||
@@ -46,182 +45,15 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"dataset/object/27.png\n",
|
||||
"dataset/object/82.png\n",
|
||||
"dataset/object/83.png\n",
|
||||
"dataset/object/100.png\n",
|
||||
"dataset/object/0.png\n",
|
||||
"dataset/object/13.png\n",
|
||||
"dataset/object/45.png\n",
|
||||
"dataset/object/64.png\n",
|
||||
"dataset/object/19.png\n",
|
||||
"dataset/object/101.png\n",
|
||||
"dataset/object/40.png\n",
|
||||
"dataset/object/97.png\n",
|
||||
"dataset/object/41.png\n",
|
||||
"dataset/object/7.png\n",
|
||||
"dataset/object/66.png\n",
|
||||
"dataset/object/55.png\n",
|
||||
"dataset/object/56.png\n",
|
||||
"dataset/object/65.png\n",
|
||||
"dataset/object/18.png\n",
|
||||
"dataset/object/24.png\n",
|
||||
"dataset/object/105.png\n",
|
||||
"dataset/object/116.png\n",
|
||||
"dataset/object/117.png\n",
|
||||
"dataset/object/104.png\n",
|
||||
"dataset/object/63.png\n",
|
||||
"dataset/object/38.png\n",
|
||||
"dataset/object/58.png\n",
|
||||
"dataset/object/103.png\n",
|
||||
"dataset/object/112.png\n",
|
||||
"dataset/object/33.png\n",
|
||||
"dataset/object/76.png\n",
|
||||
"dataset/object/59.png\n",
|
||||
"dataset/object/96.png\n",
|
||||
"dataset/object/91.png\n",
|
||||
"dataset/object/57.png\n",
|
||||
"dataset/object/2.png\n",
|
||||
"dataset/object/75.png\n",
|
||||
"dataset/object/107.png\n",
|
||||
"dataset/object/50.png\n",
|
||||
"dataset/object/16.png\n",
|
||||
"dataset/object/32.png\n",
|
||||
"dataset/object/15.png\n",
|
||||
"dataset/object/5.png\n",
|
||||
"dataset/object/72.png\n",
|
||||
"dataset/object/52.png\n",
|
||||
"dataset/object/4.png\n",
|
||||
"dataset/object/28.png\n",
|
||||
"dataset/object/43.png\n",
|
||||
"dataset/object/87.png\n",
|
||||
"dataset/object/98.png\n",
|
||||
"dataset/object/71.png\n",
|
||||
"dataset/object/102.png\n",
|
||||
"dataset/object/62.png\n",
|
||||
"dataset/object/9.png\n",
|
||||
"dataset/object/6.png\n",
|
||||
"dataset/object/85.png\n",
|
||||
"dataset/object/70.png\n",
|
||||
"dataset/object/42.png\n",
|
||||
"dataset/object/34.png\n",
|
||||
"dataset/object/81.png\n",
|
||||
"dataset/object/94.png\n",
|
||||
"dataset/object/26.png\n",
|
||||
"dataset/object/90.png\n",
|
||||
"dataset/object/44.png\n",
|
||||
"dataset/object/60.png\n",
|
||||
"dataset/object/17.png\n",
|
||||
"dataset/object/10.png\n",
|
||||
"dataset/object/53.png\n",
|
||||
"dataset/object/25.png\n",
|
||||
"dataset/object/21.png\n",
|
||||
"dataset/object/22.png\n",
|
||||
"dataset/object/30.png\n",
|
||||
"dataset/object/78.png\n",
|
||||
"dataset/object/118.png\n",
|
||||
"dataset/object/110.png\n",
|
||||
"dataset/object/79.png\n",
|
||||
"dataset/object/77.png\n",
|
||||
"dataset/object/12.png\n",
|
||||
"dataset/object/115.png\n",
|
||||
"dataset/object/67.png\n",
|
||||
"dataset/object/84.png\n",
|
||||
"dataset/object/11.png\n",
|
||||
"dataset/object/86.png\n",
|
||||
"dataset/object/89.png\n",
|
||||
"dataset/object/113.png\n",
|
||||
"dataset/noobject/image_0056.jpg\n",
|
||||
"dataset/noobject/image_0181.jpg\n",
|
||||
"dataset/noobject/image_0127.jpg\n",
|
||||
"dataset/noobject/image_0142.jpg\n",
|
||||
"dataset/noobject/image_0025.jpg\n",
|
||||
"dataset/noobject/image_0065.jpg\n",
|
||||
"dataset/noobject/image_0174.jpg\n",
|
||||
"dataset/noobject/image_0091.jpg\n",
|
||||
"dataset/noobject/image_0124.jpg\n",
|
||||
"dataset/noobject/image_0086.jpg\n",
|
||||
"dataset/noobject/image_0079.jpg\n",
|
||||
"dataset/noobject/image_0058.jpg\n",
|
||||
"dataset/noobject/image_0060.jpg\n",
|
||||
"dataset/noobject/image_0119.jpg\n",
|
||||
"dataset/noobject/image_0023.jpg\n",
|
||||
"dataset/noobject/image_0075.jpg\n",
|
||||
"dataset/noobject/image_0020.jpg\n",
|
||||
"dataset/noobject/image_0013.jpg\n",
|
||||
"dataset/noobject/image_0126.jpg\n",
|
||||
"dataset/noobject/image_0012.jpg\n",
|
||||
"dataset/noobject/image_0055.jpg\n",
|
||||
"dataset/noobject/image_0176.jpg\n",
|
||||
"dataset/noobject/image_0144.jpg\n",
|
||||
"dataset/noobject/image_0048.jpg\n",
|
||||
"dataset/noobject/image_0121.jpg\n",
|
||||
"dataset/noobject/image_0070.jpg\n",
|
||||
"dataset/noobject/image_0082.jpg\n",
|
||||
"dataset/noobject/image_0095.jpg\n",
|
||||
"dataset/noobject/image_0022.jpg\n",
|
||||
"dataset/noobject/image_0120.jpg\n",
|
||||
"dataset/noobject/image_0139.jpg\n",
|
||||
"dataset/noobject/image_0073.jpg\n",
|
||||
"dataset/noobject/image_0090.jpg\n",
|
||||
"dataset/noobject/image_0145.jpg\n",
|
||||
"dataset/noobject/image_0173.jpg\n",
|
||||
"dataset/noobject/image_0078.jpg\n",
|
||||
"dataset/noobject/image_0085.jpg\n",
|
||||
"dataset/noobject/image_0083.jpg\n",
|
||||
"dataset/noobject/image_0179.jpg\n",
|
||||
"dataset/noobject/image_0050.jpg\n",
|
||||
"dataset/noobject/image_0076.jpg\n",
|
||||
"dataset/noobject/image_0014.jpg\n",
|
||||
"dataset/noobject/image_0054.jpg\n",
|
||||
"dataset/noobject/image_0066.jpg\n",
|
||||
"dataset/noobject/image_0001.jpg\n",
|
||||
"dataset/noobject/image_0047.jpg\n",
|
||||
"dataset/noobject/image_0077.jpg\n",
|
||||
"dataset/noobject/image_0122.jpg\n",
|
||||
"dataset/noobject/image_0068.jpg\n",
|
||||
"dataset/noobject/image_0049.jpg\n",
|
||||
"dataset/noobject/image_0092.jpg\n",
|
||||
"dataset/noobject/image_0138.jpg\n",
|
||||
"dataset/noobject/image_0072.jpg\n",
|
||||
"dataset/noobject/image_0146.jpg\n",
|
||||
"dataset/noobject/image_0061.jpg\n",
|
||||
"dataset/noobject/image_0011.jpg\n",
|
||||
"dataset/noobject/image_0002.jpg\n",
|
||||
"dataset/noobject/image_0143.jpg\n",
|
||||
"dataset/noobject/image_0088.jpg\n",
|
||||
"dataset/noobject/image_0062.jpg\n",
|
||||
"dataset/noobject/image_0089.jpg\n",
|
||||
"dataset/noobject/image_0018.jpg\n",
|
||||
"dataset/noobject/image_0024.jpg\n",
|
||||
"dataset/noobject/image_0064.jpg\n",
|
||||
"dataset/noobject/image_0074.jpg\n",
|
||||
"dataset/noobject/image_0052.jpg\n",
|
||||
"dataset/noobject/image_0096.jpg\n",
|
||||
"dataset/noobject/image_0178.jpg\n",
|
||||
"dataset/noobject/image_0067.jpg\n",
|
||||
"dataset/noobject/image_0140.jpg\n",
|
||||
"dataset/noobject/image_0084.jpg\n",
|
||||
"dataset/noobject/image_0010.jpg\n",
|
||||
"dataset/noobject/image_0081.jpg\n",
|
||||
"dataset/noobject/image_0059.jpg\n",
|
||||
"dataset/noobject/image_0016.jpg\n",
|
||||
"dataset/noobject/image_0175.jpg\n",
|
||||
"dataset/noobject/image_0094.jpg\n",
|
||||
"dataset/noobject/image_0071.jpg\n",
|
||||
"dataset/noobject/image_0080.jpg\n",
|
||||
"dataset/noobject/image_0125.jpg\n",
|
||||
"dataset/noobject/image_0008.jpg\n",
|
||||
"dataset/noobject/image_0019.jpg\n",
|
||||
"dataset/noobject/image_0017.jpg\n",
|
||||
"dataset/noobject/image_0180.jpg\n"
|
||||
"reading dataset images files\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"print(\"reading dataset images files\")\n",
|
||||
"for directory in os.listdir(path):\n",
|
||||
" for file in os.listdir(path+directory):\n",
|
||||
" print(path+directory+\"/\"+file)\n",
|
||||
" #print(path+directory+\"/\"+file)\n",
|
||||
" img=Image.open(path+directory+\"/\"+file)\n",
|
||||
" #resize\n",
|
||||
" thumb = ImageOps.fit(img, size, Image.ANTIALIAS)\n",
|
||||
@@ -253,39 +85,20 @@
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"clf=AdaBoostClassifier(n_estimators=100)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scores = cross_val_score(clf, X_train, y_train, cv=3)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"0.77037037037\n"
|
||||
"0.762399355878\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from sklearn.ensemble import AdaBoostClassifier\n",
|
||||
"clf=AdaBoostClassifier(n_estimators=100)\n",
|
||||
"scores = cross_val_score(clf, X_train, y_train, cv=3)\n",
|
||||
"print(scores.mean())"
|
||||
]
|
||||
},
|
||||
@@ -298,40 +111,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 8,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.naive_bayes import GaussianNB"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"clf = GaussianNB()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 10,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 11,
|
||||
"execution_count": 6,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@@ -343,6 +123,9 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from sklearn.naive_bayes import GaussianNB\n",
|
||||
"clf = GaussianNB()\n",
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)\n",
|
||||
"print(scores.mean())"
|
||||
]
|
||||
},
|
||||
@@ -355,40 +138,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 12,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.neighbors import KNeighborsClassifier"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 13,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"clf = KNeighborsClassifier(n_neighbors=10)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 14,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 15,
|
||||
"execution_count": 7,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@@ -400,6 +150,9 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from sklearn.neighbors import KNeighborsClassifier\n",
|
||||
"clf = KNeighborsClassifier(n_neighbors=10)\n",
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)\n",
|
||||
"print(scores.mean())"
|
||||
]
|
||||
},
|
||||
@@ -412,51 +165,21 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 16,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.svm import LinearSVC"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 17,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"clf = LinearSVC()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 18,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 19,
|
||||
"execution_count": 8,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"0.638575605681\n"
|
||||
"0.66238512949\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from sklearn.svm import LinearSVC\n",
|
||||
"clf = LinearSVC()\n",
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)\n",
|
||||
"print(scores.mean())"
|
||||
]
|
||||
},
|
||||
@@ -469,40 +192,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 20,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.svm import SVC"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 21,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"clf = SVC()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 22,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 23,
|
||||
"execution_count": 9,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@@ -514,6 +204,9 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from sklearn.svm import SVC\n",
|
||||
"clf = SVC()\n",
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)\n",
|
||||
"print(scores.mean())"
|
||||
]
|
||||
},
|
||||
@@ -526,40 +219,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 24,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.gaussian_process import GaussianProcessClassifier"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 25,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"clf = GaussianProcessClassifier()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 26,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 27,
|
||||
"execution_count": 10,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
@@ -571,6 +231,9 @@
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from sklearn.gaussian_process import GaussianProcessClassifier\n",
|
||||
"clf = GaussianProcessClassifier()\n",
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)\n",
|
||||
"print(scores.mean())"
|
||||
]
|
||||
},
|
||||
@@ -583,51 +246,21 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 28,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from sklearn.ensemble import RandomForestClassifier"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 29,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"clf = RandomForestClassifier()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 30,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 31,
|
||||
"execution_count": 11,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"0.710317460317\n"
|
||||
"0.775793650794\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"from sklearn.ensemble import RandomForestClassifier\n",
|
||||
"clf = RandomForestClassifier()\n",
|
||||
"scores = cross_val_score(clf, Xlist, Ylist)\n",
|
||||
"print(scores.mean())"
|
||||
]
|
||||
},
|
||||
@@ -640,7 +273,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 32,
|
||||
"execution_count": 12,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
@@ -653,7 +286,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 33,
|
||||
"execution_count": 13,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
@@ -679,7 +312,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 34,
|
||||
"execution_count": 14,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
@@ -695,7 +328,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 35,
|
||||
"execution_count": 15,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
@@ -706,18 +339,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 36,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"#grid_search.fit(Xlist, Ylist)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 37,
|
||||
"execution_count": 16,
|
||||
"metadata": {
|
||||
"collapsed": true
|
||||
},
|
||||
@@ -738,7 +360,7 @@
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 38,
|
||||
"execution_count": 17,
|
||||
"metadata": {
|
||||
"scrolled": false
|
||||
},
|
||||
@@ -763,26 +385,26 @@
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[Parallel(n_jobs=-1)]: Done 9 out of 9 | elapsed: 1.5s finished\n"
|
||||
"[Parallel(n_jobs=-1)]: Done 9 out of 9 | elapsed: 0.8s finished\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"GridSearchCV took 2.38 seconds for 3 candidate parameter settings.\n",
|
||||
"GridSearchCV took 1.45 seconds for 3 candidate parameter settings.\n",
|
||||
"finished GridSearch\n",
|
||||
"Model with rank: 1\n",
|
||||
"Mean validation score: 0.815 (std: 0.073)\n",
|
||||
"Mean validation score: 0.800 (std: 0.085)\n",
|
||||
"Parameters: {'clf__n_estimators': 100}\n",
|
||||
"\n",
|
||||
"Model with rank: 2\n",
|
||||
"Mean validation score: 0.763 (std: 0.093)\n",
|
||||
"Parameters: {'clf__n_estimators': 10}\n",
|
||||
"Mean validation score: 0.778 (std: 0.035)\n",
|
||||
"Parameters: {'clf__n_estimators': 3}\n",
|
||||
"\n",
|
||||
"Model with rank: 3\n",
|
||||
"Mean validation score: 0.756 (std: 0.110)\n",
|
||||
"Parameters: {'clf__n_estimators': 3}\n",
|
||||
"Mean validation score: 0.741 (std: 0.046)\n",
|
||||
"Parameters: {'clf__n_estimators': 10}\n",
|
||||
"\n",
|
||||
"-----\n",
|
||||
"classifier:\n",
|
||||
@@ -790,15 +412,15 @@
|
||||
" metric_params=None, n_jobs=1, n_neighbors=5, p=2,\n",
|
||||
" weights='uniform')\n",
|
||||
"Fitting 3 folds for each of 2 candidates, totalling 6 fits\n",
|
||||
"GridSearchCV took 0.23 seconds for 2 candidate parameter settings.\n",
|
||||
"GridSearchCV took 0.35 seconds for 2 candidate parameter settings.\n",
|
||||
"finished GridSearch\n",
|
||||
"Model with rank: 1\n",
|
||||
"Mean validation score: 0.778 (std: 0.048)\n",
|
||||
"Parameters: {'clf__n_neighbors': 3}\n",
|
||||
"Mean validation score: 0.756 (std: 0.056)\n",
|
||||
"Parameters: {'clf__n_neighbors': 10}\n",
|
||||
"\n",
|
||||
"Model with rank: 2\n",
|
||||
"Mean validation score: 0.704 (std: 0.010)\n",
|
||||
"Parameters: {'clf__n_neighbors': 10}\n",
|
||||
"Mean validation score: 0.748 (std: 0.111)\n",
|
||||
"Parameters: {'clf__n_neighbors': 3}\n",
|
||||
"\n",
|
||||
"-----\n",
|
||||
"classifier:\n",
|
||||
@@ -821,14 +443,14 @@
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"GridSearchCV took 0.36 seconds for 2 candidate parameter settings.\n",
|
||||
"GridSearchCV took 0.47 seconds for 2 candidate parameter settings.\n",
|
||||
"finished GridSearch\n",
|
||||
"Model with rank: 1\n",
|
||||
"Mean validation score: 0.489 (std: 0.000)\n",
|
||||
"Mean validation score: 0.496 (std: 0.005)\n",
|
||||
"Parameters: {'clf__n_restarts_optimizer': 0}\n",
|
||||
"\n",
|
||||
"Model with rank: 1\n",
|
||||
"Mean validation score: 0.489 (std: 0.000)\n",
|
||||
"Mean validation score: 0.496 (std: 0.005)\n",
|
||||
"Parameters: {'clf__n_restarts_optimizer': 1}\n",
|
||||
"\n",
|
||||
"-----\n",
|
||||
@@ -842,28 +464,27 @@
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[Parallel(n_jobs=-1)]: Done 6 out of 6 | elapsed: 0.2s remaining: 0.0s\n",
|
||||
"[Parallel(n_jobs=-1)]: Done 6 out of 6 | elapsed: 0.2s finished\n",
|
||||
"[Parallel(n_jobs=-1)]: Done 9 out of 9 | elapsed: 0.9s finished\n"
|
||||
"[Parallel(n_jobs=-1)]: Done 6 out of 6 | elapsed: 0.3s remaining: 0.0s\n",
|
||||
"[Parallel(n_jobs=-1)]: Done 6 out of 6 | elapsed: 0.3s finished\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"GridSearchCV took 1.16 seconds for 3 candidate parameter settings.\n",
|
||||
"GridSearchCV took 1.06 seconds for 3 candidate parameter settings.\n",
|
||||
"finished GridSearch\n",
|
||||
"Model with rank: 1\n",
|
||||
"Mean validation score: 0.807 (std: 0.093)\n",
|
||||
"Mean validation score: 0.793 (std: 0.088)\n",
|
||||
"Parameters: {'clf__n_estimators': 3}\n",
|
||||
"\n",
|
||||
"Model with rank: 2\n",
|
||||
"Mean validation score: 0.756 (std: 0.048)\n",
|
||||
"Parameters: {'clf__n_estimators': 100}\n",
|
||||
"Mean validation score: 0.785 (std: 0.084)\n",
|
||||
"Parameters: {'clf__n_estimators': 10}\n",
|
||||
"\n",
|
||||
"Model with rank: 3\n",
|
||||
"Mean validation score: 0.733 (std: 0.054)\n",
|
||||
"Parameters: {'clf__n_estimators': 10}\n",
|
||||
"Mean validation score: 0.763 (std: 0.048)\n",
|
||||
"Parameters: {'clf__n_estimators': 100}\n",
|
||||
"\n",
|
||||
"-----\n",
|
||||
"classifier:\n",
|
||||
@@ -871,19 +492,32 @@
|
||||
" decision_function_shape='ovr', degree=3, gamma='auto', kernel='rbf',\n",
|
||||
" max_iter=-1, probability=False, random_state=None, shrinking=True,\n",
|
||||
" tol=0.001, verbose=False)\n",
|
||||
"Fitting 3 folds for each of 3 candidates, totalling 9 fits\n",
|
||||
"GridSearchCV took 0.35 seconds for 3 candidate parameter settings.\n",
|
||||
"Fitting 3 folds for each of 3 candidates, totalling 9 fits\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stderr",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"[Parallel(n_jobs=-1)]: Done 9 out of 9 | elapsed: 0.9s finished\n"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"GridSearchCV took 0.36 seconds for 3 candidate parameter settings.\n",
|
||||
"finished GridSearch\n",
|
||||
"Model with rank: 1\n",
|
||||
"Mean validation score: 0.689 (std: 0.031)\n",
|
||||
"Mean validation score: 0.689 (std: 0.067)\n",
|
||||
"Parameters: {'clf__C': 3}\n",
|
||||
"\n",
|
||||
"Model with rank: 1\n",
|
||||
"Mean validation score: 0.689 (std: 0.031)\n",
|
||||
"Mean validation score: 0.689 (std: 0.067)\n",
|
||||
"Parameters: {'clf__C': 10}\n",
|
||||
"\n",
|
||||
"Model with rank: 1\n",
|
||||
"Mean validation score: 0.689 (std: 0.031)\n",
|
||||
"Mean validation score: 0.689 (std: 0.067)\n",
|
||||
"Parameters: {'clf__C': 100}\n",
|
||||
"\n"
|
||||
]
|
||||
|
||||
@@ -10,6 +10,7 @@ import numpy as np
|
||||
|
||||
from flask import Flask
|
||||
from flask_restful import Resource, Api, request
|
||||
from flask_cors import CORS
|
||||
|
||||
|
||||
from PIL import Image, ImageOps
|
||||
@@ -23,6 +24,7 @@ size = 100, 100
|
||||
|
||||
|
||||
app = Flask(__name__)
|
||||
CORS(app)
|
||||
api = Api(app)
|
||||
|
||||
|
||||
@@ -61,4 +63,4 @@ if __name__ == '__main__':
|
||||
logging.basicConfig(filename=logfilename,level=logging.DEBUG)
|
||||
|
||||
print("server running")
|
||||
app.run(port='3000')
|
||||
app.run(port='3200')
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
echo "sending img1 to server"
|
||||
echo "server response:"
|
||||
curl -X POST -F file=@./test1.png http://127.0.0.1:3000/predict
|
||||
curl -X POST -F file=@./test1.png http://127.0.0.1:3200/predict
|
||||
echo ""
|
||||
|
||||
echo "sending img2 to server"
|
||||
echo "server response:"
|
||||
curl -X POST -F file=@./test2.png http://127.0.0.1:3000/predict
|
||||
curl -X POST -F file=@./test2.png http://127.0.0.1:3200/predict
|
||||
echo ""
|
||||
|
||||
echo "sending img to server"
|
||||
echo "server response:"
|
||||
curl -X POST -F file=@./test3.png http://127.0.0.1:3000/predict
|
||||
curl -X POST -F file=@./test3.png http://127.0.0.1:3200/predict
|
||||
echo ""
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
var urlapi = "http://127.0.0.1:3200/";
|
||||
|
||||
angular.module('app', [
|
||||
'ionic',
|
||||
@@ -35,7 +36,8 @@ angular.module('app', [
|
||||
url: '/main',
|
||||
views: {
|
||||
'menuContent': {
|
||||
templateUrl: 'templates/main.html'
|
||||
templateUrl: 'templates/main.html',
|
||||
controller: 'MainCtrl'
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,5 +1,80 @@
|
||||
angular.module('app.main', [])
|
||||
|
||||
.controller('MainCtrl', function($scope) {
|
||||
.controller('MainCtrl', function($scope, $http) {
|
||||
$scope.response = "";
|
||||
$scope.model_file={};
|
||||
|
||||
$scope.uploadFile = function() {
|
||||
console.log("$scope.img_file");
|
||||
console.log($scope.img_file);
|
||||
var fd = new FormData();
|
||||
//Take the first selected file
|
||||
fd.append("file", $scope.img_file);
|
||||
console.log(fd);
|
||||
$http({
|
||||
url: urlapi + 'predict',
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": undefined
|
||||
},
|
||||
data: fd
|
||||
})
|
||||
.then(function(data) {
|
||||
console.log("response: ");
|
||||
console.log(data.data);
|
||||
// response reaction
|
||||
$scope.response= data.data.result;
|
||||
},
|
||||
function(response) { // optional
|
||||
// failed
|
||||
console.log(response);
|
||||
});
|
||||
};
|
||||
|
||||
});
|
||||
/*$scope.takePhoto = function() {
|
||||
alert("a");
|
||||
console.log("take photo");
|
||||
var options = {
|
||||
quality: 100,
|
||||
destinationType: Camera.DestinationType.DATA_URL,
|
||||
sourceType: Camera.sourceType,
|
||||
allowEdit: true,
|
||||
encodingType: Camera.EncodingType.PNG,
|
||||
targetWidth: 500,
|
||||
targetHeight: 500,
|
||||
popoverOptions: CameraPopoverOptions,
|
||||
saveToPhotoAlbum: false,
|
||||
correctOrientation:true
|
||||
};
|
||||
|
||||
$cordovaCamera.getPicture(options).then(function(imageData) {
|
||||
//$scope.user.newAvatar = "data:image/jpeg;base64," + imageData;
|
||||
$scope.img.imgdata = "data:image/jpeg;base64," + imageData;
|
||||
$scope.img.img = imageData;
|
||||
}, function(err) {
|
||||
console.log(err);
|
||||
});
|
||||
};*/
|
||||
})
|
||||
.directive('fileModel', [
|
||||
'$parse',
|
||||
function($parse) {
|
||||
return {
|
||||
restrict: 'A',
|
||||
link: function(scope, element, attrs) {
|
||||
var model = $parse(attrs.fileModel);
|
||||
var modelSetter = model.assign;
|
||||
|
||||
element.bind('change', function() {
|
||||
scope.$apply(function() {
|
||||
if (attrs.multiple) {
|
||||
modelSetter(scope, element[0].files);
|
||||
} else {
|
||||
modelSetter(scope, element[0].files[0]);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
]);
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
<ion-content>
|
||||
<div class="row">
|
||||
<div class="col">
|
||||
<div class="button button-full button-stable">
|
||||
<div ng-click="takePhoto()" class="button button-full">
|
||||
<i class="icon ion-camera"></i> Take Photo
|
||||
</div>
|
||||
</div>
|
||||
@@ -12,14 +12,19 @@
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<input type='file' file-model='img_file'>
|
||||
|
||||
<div class="list card">
|
||||
<div class="item item-image">
|
||||
<img src="http://www.trueactivist.com/wp-content/uploads/2015/07/hotdog-300x300.jpg">
|
||||
<img ng-src="{{img_file}}">
|
||||
</div>
|
||||
<a class="item item-icon-left balanced" href="#">
|
||||
<div ng-click="uploadFile()" class="button button-royal" href="#">
|
||||
Send
|
||||
</div>
|
||||
</div>
|
||||
<a ng-show="response!=''" class="item item-icon-left balanced" href="#">
|
||||
<i class="icon ion-checkmark"></i>
|
||||
Is a Hotdog
|
||||
Is a {{response}}
|
||||
</a>
|
||||
</div>
|
||||
</ion-content>
|
||||
|
||||
Reference in New Issue
Block a user