mirror of
https://github.com/arnaucube/objectImageIdentifierAI.git
synced 2026-02-07 03:36:51 +01:00
training neural network ok, needs too much ram. Started implementation of the server
This commit is contained in:
1
.gitignore
vendored
Normal file
1
.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
serverGo
|
||||||
4
cropObjects/.gitignore
vendored
Normal file
4
cropObjects/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
object
|
||||||
|
noobject
|
||||||
|
*.jpeg
|
||||||
|
*.png
|
||||||
37
cropObjects/README.md
Normal file
37
cropObjects/README.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# imagesToDataset
|
||||||
|
Gets all the images from the directories 'object' and 'noobject', and puts in a dataset file.
|
||||||
|
The dataset file is a dataset.data file that contains 2 columns:
|
||||||
|
- images arrays of pixels
|
||||||
|
- 0 or 1, depending if is from the 'noobject' or 'object' directory
|
||||||
|
|
||||||
|
|
||||||
|
First, install the libraries.
|
||||||
|
|
||||||
|
### install scikit-learn
|
||||||
|
http://scikit-learn.org/stable/install.html
|
||||||
|
pip install -U scikit-learn
|
||||||
|
|
||||||
|
### install scikit-image
|
||||||
|
http://scikit-image.org/download
|
||||||
|
pip install -U scikit-image
|
||||||
|
|
||||||
|
### install numpy
|
||||||
|
https://www.scipy.org/install.html
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install --user numpy scipy matplotlib ipython jupyter pandas sympy nose
|
||||||
|
|
||||||
|
### install Pillow
|
||||||
|
http://pillow.readthedocs.io/en/3.0.x/installation.html
|
||||||
|
(sudo) pip install Pillow
|
||||||
|
|
||||||
|
### install matplotlib
|
||||||
|
https://matplotlib.org/users/installing.html
|
||||||
|
python -mpip install -U pip
|
||||||
|
python -mpip install -U matplotlib
|
||||||
|
|
||||||
|
may need to install python-tk:
|
||||||
|
sudo apt-get install python-tk
|
||||||
|
|
||||||
|
|
||||||
|
## to run
|
||||||
|
python readDataset.py
|
||||||
57
cropObjects/detectObject.py
Normal file
57
cropObjects/detectObject.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import numpy as np
|
||||||
|
from PIL import Image
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
import os
|
||||||
|
from skimage import io
|
||||||
|
|
||||||
|
from skimage import color
|
||||||
|
from skimage import filters
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def imgFileToData(path):
|
||||||
|
image = Image.open(path)
|
||||||
|
image_data = np.asarray(image)
|
||||||
|
return image_data
|
||||||
|
|
||||||
|
def imgFileToData2(path):
|
||||||
|
img = io.imread(path)
|
||||||
|
return img
|
||||||
|
|
||||||
|
def detectObj(image_data):
|
||||||
|
#image_data_blue = image_data[:,:,2]
|
||||||
|
image_data_blue = color.rgb2grey(image_data)
|
||||||
|
#image_data_blue = threshold(image_data)
|
||||||
|
|
||||||
|
median_blue = np.median(image_data_blue)
|
||||||
|
print median_blue
|
||||||
|
median_blue = median_blue - median_blue/1.5
|
||||||
|
print median_blue
|
||||||
|
print image_data_blue
|
||||||
|
|
||||||
|
non_empty_columns = np.where(image_data_blue.min(axis=0)<median_blue)[0]
|
||||||
|
non_empty_rows = np.where(image_data_blue.min(axis=1)<median_blue)[0]
|
||||||
|
|
||||||
|
boundingBox = (min(non_empty_rows), max(non_empty_rows), min(non_empty_columns), max(non_empty_columns))
|
||||||
|
print boundingBox
|
||||||
|
return boundingBox
|
||||||
|
|
||||||
|
def threshold(img):
|
||||||
|
#img = color.rgb2grey(img)
|
||||||
|
#img = img[:,:,2]
|
||||||
|
img = color.rgb2grey(img)
|
||||||
|
thresh = filters.threshold_mean(img)
|
||||||
|
binary = img > thresh
|
||||||
|
return binary
|
||||||
|
|
||||||
|
def prova(img):
|
||||||
|
#return color.rgb2grey(img)
|
||||||
|
return img
|
||||||
|
|
||||||
|
def crop(image_data, box):
|
||||||
|
return image_data[box[0]:box[1], box[2]:box[3]]
|
||||||
|
|
||||||
|
def saveDataToImageFile(data, filename):
|
||||||
|
image = Image.fromarray(data)
|
||||||
|
image.save(filename)
|
||||||
BIN
cropObjects/detectObject.pyc
Normal file
BIN
cropObjects/detectObject.pyc
Normal file
Binary file not shown.
24
cropObjects/detectObjects.py
Normal file
24
cropObjects/detectObjects.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import cv2
|
||||||
|
#reading the image
|
||||||
|
#image = cv2.imread("demo.jpeg")
|
||||||
|
|
||||||
|
def detectObjects(image):
|
||||||
|
edged = cv2.Canny(image, 10, 250)
|
||||||
|
cv2.imshow("Edges", edged)
|
||||||
|
cv2.waitKey(0)
|
||||||
|
|
||||||
|
#applying closing function
|
||||||
|
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (7, 7))
|
||||||
|
closed = cv2.morphologyEx(edged, cv2.MORPH_CLOSE, kernel)
|
||||||
|
cv2.imshow("Closed", closed)
|
||||||
|
cv2.waitKey(0)
|
||||||
|
|
||||||
|
#finding_contours
|
||||||
|
(_, cnts, _) = cv2.findContours(closed.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
||||||
|
|
||||||
|
for c in cnts:
|
||||||
|
peri = cv2.arcLength(c, True)
|
||||||
|
approx = cv2.approxPolyDP(c, 0.02 * peri, True)
|
||||||
|
cv2.drawContours(image, [approx], -1, (0, 255, 0), 2)
|
||||||
|
cv2.imshow("Output", image)
|
||||||
|
cv2.waitKey(0)
|
||||||
BIN
cropObjects/detectObjects.pyc
Normal file
BIN
cropObjects/detectObjects.pyc
Normal file
Binary file not shown.
42
cropObjects/main.py
Normal file
42
cropObjects/main.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
from os import walk
|
||||||
|
import detectObject as do
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
#image_data = do.imgFileToData("imgs/34.png")
|
||||||
|
image_data = do.imgFileToData2("object/25.png")
|
||||||
|
|
||||||
|
|
||||||
|
boundingBox = do.detectObj(image_data)
|
||||||
|
image_data = do.prova(image_data)
|
||||||
|
r = do.crop(image_data, boundingBox)
|
||||||
|
|
||||||
|
|
||||||
|
import detectObjects as dos
|
||||||
|
r_copy = r
|
||||||
|
dos.detectObjects(r_copy)
|
||||||
|
#do.saveDataToImageFile(image_data, "out.png")
|
||||||
|
|
||||||
|
#r = do.prova(image_data)
|
||||||
|
|
||||||
|
fig = plt.figure()
|
||||||
|
ax = fig.add_subplot(121)
|
||||||
|
ax.set_title("Original")
|
||||||
|
ax.imshow(image_data)
|
||||||
|
|
||||||
|
ax1 = fig.add_subplot(122)
|
||||||
|
ax1.set_title("Result")
|
||||||
|
ax1.imshow(r)
|
||||||
|
|
||||||
|
plt.show()
|
||||||
|
|
||||||
|
'''
|
||||||
|
f = []
|
||||||
|
for (dirpath, dirnames, filenames) in walk("imgs"):
|
||||||
|
for filename in filenames:
|
||||||
|
print filename
|
||||||
|
image_data = do.imgFileToData("imgs/" + filename)
|
||||||
|
boundingBox = do.detectObj(image_data)
|
||||||
|
print boundingBox
|
||||||
|
'''
|
||||||
2
imagesToDataset/.gitignore
vendored
Normal file
2
imagesToDataset/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
object
|
||||||
|
noobject
|
||||||
37
imagesToDataset/README.md
Normal file
37
imagesToDataset/README.md
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
# imagesToDataset
|
||||||
|
Gets all the images from the directories 'object' and 'noobject', and puts in a dataset file.
|
||||||
|
The dataset file is a dataset.data file that contains 2 columns:
|
||||||
|
- images arrays of pixels
|
||||||
|
- 0 or 1, depending if is from the 'noobject' or 'object' directory
|
||||||
|
|
||||||
|
|
||||||
|
First, install the libraries.
|
||||||
|
|
||||||
|
### install scikit-learn
|
||||||
|
http://scikit-learn.org/stable/install.html
|
||||||
|
pip install -U scikit-learn
|
||||||
|
|
||||||
|
### install scikit-image
|
||||||
|
http://scikit-image.org/download
|
||||||
|
pip install -U scikit-image
|
||||||
|
|
||||||
|
### install numpy
|
||||||
|
https://www.scipy.org/install.html
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install --user numpy scipy matplotlib ipython jupyter pandas sympy nose
|
||||||
|
|
||||||
|
### install Pillow
|
||||||
|
http://pillow.readthedocs.io/en/3.0.x/installation.html
|
||||||
|
(sudo) pip install Pillow
|
||||||
|
|
||||||
|
### install matplotlib
|
||||||
|
https://matplotlib.org/users/installing.html
|
||||||
|
python -mpip install -U pip
|
||||||
|
python -mpip install -U matplotlib
|
||||||
|
|
||||||
|
may need to install python-tk:
|
||||||
|
sudo apt-get install python-tk
|
||||||
|
|
||||||
|
|
||||||
|
## to run
|
||||||
|
python readDataset.py
|
||||||
BIN
imagesToDataset/dataset.npy
Normal file
BIN
imagesToDataset/dataset.npy
Normal file
Binary file not shown.
47
imagesToDataset/main.py
Normal file
47
imagesToDataset/main.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
from os import walk
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
from PIL import Image, ImageOps
|
||||||
|
|
||||||
|
#pixels, pixels of the output resizing images
|
||||||
|
size = 100, 100
|
||||||
|
def imgFileToData(path):
|
||||||
|
image = Image.open(path)
|
||||||
|
#resize the image
|
||||||
|
thumb = ImageOps.fit(image, size, Image.ANTIALIAS)
|
||||||
|
image_data = np.asarray(thumb).flatten()
|
||||||
|
'''
|
||||||
|
plt.plot(111)
|
||||||
|
plt.imshow(thumb)
|
||||||
|
plt.show()
|
||||||
|
'''
|
||||||
|
if len(image_data)!=30000:
|
||||||
|
print "possible future ERROR!"
|
||||||
|
print "len: " + str(len(image_data))
|
||||||
|
print "please, delete: " + path
|
||||||
|
return image_data
|
||||||
|
|
||||||
|
def getDirectoryFiles(path, imgClass):
|
||||||
|
images = []
|
||||||
|
for (dirpath, dirnames, filenames) in walk(path):
|
||||||
|
for filename in filenames:
|
||||||
|
#print filename
|
||||||
|
image_data = imgFileToData(path + "/" + filename)
|
||||||
|
images.append([image_data, imgClass])
|
||||||
|
return images
|
||||||
|
|
||||||
|
|
||||||
|
def asdf():
|
||||||
|
for index, (image, prediction) in enumerate(images_and_predictions[:4]):
|
||||||
|
plt.subplot(2, 4, index + 5)
|
||||||
|
plt.axis('off')
|
||||||
|
plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
|
||||||
|
plt.title('Prediction: %i' % prediction)
|
||||||
|
|
||||||
|
|
||||||
|
objects = getDirectoryFiles("object", 1)
|
||||||
|
noobjects = getDirectoryFiles("noobject", 0)
|
||||||
|
|
||||||
|
dataset = np.concatenate((objects, noobjects), axis=0)
|
||||||
|
|
||||||
|
np.save('dataset.npy', dataset)
|
||||||
16
imagesToDataset/openDataset.py
Normal file
16
imagesToDataset/openDataset.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
from random import randint
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
dataset = np.load('dataset.npy')
|
||||||
|
|
||||||
|
n = randint(0, len(dataset))
|
||||||
|
|
||||||
|
plt.plot(111)
|
||||||
|
plt.axis('off')
|
||||||
|
plt.imshow(dataset[n][0])
|
||||||
|
plt.title('class: ' + str(dataset[n][1]))
|
||||||
|
|
||||||
|
plt.show()
|
||||||
1
nnTrain/.gitignore
vendored
Normal file
1
nnTrain/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
trainBACKUP.py
|
||||||
24
nnTrain/README.md
Normal file
24
nnTrain/README.md
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
# serverImgPredictor
|
||||||
|
Need the file dataset.data
|
||||||
|
|
||||||
|
### install Flask
|
||||||
|
http://flask.pocoo.org/docs/0.12/quickstart/#a-minimal-application
|
||||||
|
(sudo) pip install Flask
|
||||||
|
|
||||||
|
pip install flask_restful
|
||||||
|
pip install flask-jsonpify
|
||||||
|
|
||||||
|
### install scikit-neuralnetwork
|
||||||
|
https://scikit-neuralnetwork.readthedocs.io/en/latest/guide_installation.html
|
||||||
|
pip install scikit-neuralnetwork
|
||||||
|
|
||||||
|
also need to upgrade the Lasagne library:
|
||||||
|
(sudo) pip install --upgrade https://github.com/Lasagne/Lasagne/archive/master.zip
|
||||||
|
|
||||||
|
|
||||||
|
## Run
|
||||||
|
python train.py
|
||||||
|
|
||||||
|
will generate nn.pkl
|
||||||
|
|
||||||
|
copy nn.pkl to the serverPredictor directory
|
||||||
BIN
nnTrain/dataset.npy
Normal file
BIN
nnTrain/dataset.npy
Normal file
Binary file not shown.
265
nnTrain/nn.pkl
Normal file
265
nnTrain/nn.pkl
Normal file
File diff suppressed because one or more lines are too long
32
nnTrain/predict.py
Normal file
32
nnTrain/predict.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
from sklearn.neural_network import MLPClassifier
|
||||||
|
from skimage import io
|
||||||
|
|
||||||
|
img1 = io.imread("imgs/25.png")
|
||||||
|
img2 = io.imread("imgs/24.png")
|
||||||
|
img3 = io.imread("imgs/104.png")
|
||||||
|
|
||||||
|
img4 = io.imread("otherimgs/image_0008.jpg")
|
||||||
|
|
||||||
|
|
||||||
|
data_train = [img1, img2, img3, img4]
|
||||||
|
data_labels = [1, 1, 1, 0]
|
||||||
|
data_test = [img4, img3]
|
||||||
|
clf = MLPClassifier(solver='lbfgs', alpha=1e-5,
|
||||||
|
hidden_layer_sizes=(5,2), random_state=1)
|
||||||
|
clf.fit(data_train, data_labels)
|
||||||
|
|
||||||
|
clf.predict(data_test)
|
||||||
|
|
||||||
|
print "MPLClassifier values:"
|
||||||
|
[coef.shape for coef in clf.coefs_]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
images_and_predictions = list(zip(digits.images[n_samples // 2:], predicted))
|
||||||
|
for index, (image, prediction) in enumerate(images_and_predictions[:4]):
|
||||||
|
plt.subplot(2, 4, index + 5)
|
||||||
|
plt.axis('off')
|
||||||
|
plt.imshow(image, cmap=plt.cm.gray_r, interpolation='nearest')
|
||||||
|
plt.title('Prediction: %i' % prediction)
|
||||||
|
'''
|
||||||
62
nnTrain/train.py
Normal file
62
nnTrain/train.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
from random import randint
|
||||||
|
import pickle
|
||||||
|
from sknn.mlp import Classifier, Layer, Convolution
|
||||||
|
|
||||||
|
def datasetToTrainAndTestData(dataset, numtest):
|
||||||
|
np.random.shuffle(dataset)
|
||||||
|
print "length total data:" + str(len(dataset))
|
||||||
|
|
||||||
|
traindata = np.copy(dataset)
|
||||||
|
testdata = []
|
||||||
|
for i in range(numtest):
|
||||||
|
#get random integer between 0 and the total amount of images in the dataset
|
||||||
|
n = randint(0, len(traindata))
|
||||||
|
testdata.append(dataset[n])
|
||||||
|
|
||||||
|
#delete the n image (dataset[n]) of the traindata
|
||||||
|
traindata = np.delete(traindata, n, axis=0)
|
||||||
|
testdataNP = np.array(testdata)
|
||||||
|
return traindata, testdataNP
|
||||||
|
|
||||||
|
|
||||||
|
#read the dataset made with the 'imagesToDataset' repository
|
||||||
|
dataset = np.load('dataset.npy')
|
||||||
|
|
||||||
|
traindata, testdata = datasetToTrainAndTestData(dataset, 10)
|
||||||
|
print "length traindata: " + str(len(traindata))
|
||||||
|
print "length testdata: " + str(len(testdata))
|
||||||
|
|
||||||
|
#traindataAttributes contains all the pixels of each image
|
||||||
|
traindataAttributes = traindata[:,0]
|
||||||
|
traindataAttributes = np.array([[row] for row in traindataAttributes])
|
||||||
|
|
||||||
|
#traindataLabels contains each label of each image
|
||||||
|
traindataLabels = traindata[:,1]
|
||||||
|
traindataLabels = traindataLabels.astype('int')
|
||||||
|
|
||||||
|
#testdataAttributes contains the pixels of the test images
|
||||||
|
testdataAttributes = testdata[:,0]
|
||||||
|
testdataAttributes = np.array([[row] for row in testdataAttributes])
|
||||||
|
|
||||||
|
#testdataLabels contains each label of each image
|
||||||
|
testdataLabels = testdata[:,1]
|
||||||
|
testdataLabels = testdataLabels.astype('int')
|
||||||
|
|
||||||
|
#default: units=100, learning_rate=0.001, n_iter=25
|
||||||
|
nn = Classifier(
|
||||||
|
layers=[
|
||||||
|
Layer("Sigmoid", units=10),
|
||||||
|
Layer("Softmax")],
|
||||||
|
learning_rate=0.001,
|
||||||
|
n_iter=20,
|
||||||
|
verbose=True)
|
||||||
|
|
||||||
|
nn.fit(traindataAttributes, traindataLabels)
|
||||||
|
|
||||||
|
print('\nTRAIN SCORE', nn.score(traindataAttributes, traindataLabels))
|
||||||
|
print('TEST SCORE', nn.score(testdataAttributes, testdataLabels))
|
||||||
|
|
||||||
|
#save the neural network configuration
|
||||||
|
pickle.dump(nn, open('nn.pkl', 'wb'))
|
||||||
1
serverPredictor/.gitignore
vendored
Normal file
1
serverPredictor/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
currentimage.png
|
||||||
0
serverPredictor/loadNN.py
Normal file
0
serverPredictor/loadNN.py
Normal file
63
serverPredictor/main.py
Normal file
63
serverPredictor/main.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
from flask import Flask
|
||||||
|
from flask_restful import Resource, Api, request
|
||||||
|
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
import cv2
|
||||||
|
import io
|
||||||
|
from PIL import Image, ImageOps
|
||||||
|
|
||||||
|
import pickle
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024 # 16 MB
|
||||||
|
api = Api(app)
|
||||||
|
|
||||||
|
size = 100, 100
|
||||||
|
|
||||||
|
|
||||||
|
#load Neural Network, generated with nnTrain
|
||||||
|
nn = pickle.load(open('nn.pkl', 'rb'))
|
||||||
|
|
||||||
|
class Predict(Resource):
|
||||||
|
def get(self):
|
||||||
|
message = {'message': 'getted route1'}
|
||||||
|
return message
|
||||||
|
def post(self):
|
||||||
|
filer = request.files['file']
|
||||||
|
#open the uploaded image, and transform to the numpy array
|
||||||
|
filer.save("currentimage.png")
|
||||||
|
image = Image.open("currentimage.png")
|
||||||
|
thumb = ImageOps.fit(image, size, Image.ANTIALIAS)
|
||||||
|
image_data = np.asarray(thumb).flatten()
|
||||||
|
imagetopredict = np.array([image_data])
|
||||||
|
|
||||||
|
#predict the class of the image with the neural network
|
||||||
|
prediction = nn.predict(imagetopredict)
|
||||||
|
print "prediction"
|
||||||
|
print prediction[0][0]
|
||||||
|
if prediction[0][0]==0:
|
||||||
|
result = "noobject"
|
||||||
|
else:
|
||||||
|
result = "object"
|
||||||
|
message = {'class': result}
|
||||||
|
return message
|
||||||
|
|
||||||
|
|
||||||
|
class Route2(Resource):
|
||||||
|
def get(self):
|
||||||
|
return {'message': 'getted route2'}
|
||||||
|
|
||||||
|
|
||||||
|
class Route3(Resource):
|
||||||
|
def get(self):
|
||||||
|
return {'message': 'getted route3'}
|
||||||
|
|
||||||
|
|
||||||
|
api.add_resource(Predict, '/predict')
|
||||||
|
api.add_resource(Route2, '/route2')
|
||||||
|
api.add_resource(Route3, '/route3')
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
app.run(port='3045')
|
||||||
265
serverPredictor/nn.pkl
Normal file
265
serverPredictor/nn.pkl
Normal file
File diff suppressed because one or more lines are too long
10
serverPredictor/test.sh
Normal file
10
serverPredictor/test.sh
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
echo "sending img1 to server"
|
||||||
|
echo "server response:"
|
||||||
|
curl -F file=@./test1.png http://127.0.0.1:3045/predict
|
||||||
|
echo ""
|
||||||
|
|
||||||
|
|
||||||
|
echo "sending img2 to server"
|
||||||
|
echo "server response:"
|
||||||
|
curl -F file=@./test2.png http://127.0.0.1:3045/predict
|
||||||
|
echo ""
|
||||||
BIN
serverPredictor/test1.png
Normal file
BIN
serverPredictor/test1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 71 KiB |
BIN
serverPredictor/test2.png
Normal file
BIN
serverPredictor/test2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 152 KiB |
Reference in New Issue
Block a user