-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathapp.py
224 lines (200 loc) · 7.23 KB
/
app.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
from flask import Flask, request, jsonify
from flask_cors import CORS
import numpy as np
from PIL import Image
import io
import tensorflow as tf
import os
app = Flask(__name__)
CORS(app)
# Add confidence threshold
CONFIDENCE_THRESHOLD = 0.70 # 70% confidence threshold
print("Starting server...")
print("Current working directory:", os.getcwd())
print("Files in directory:", os.listdir())
# Load the TFLite model with more error checking
try:
print("\nAttempting to load model...")
model_path = "recycling_model.tflite"
# Check if file exists
if os.path.exists(model_path):
print(f"Model file found! Size: {os.path.getsize(model_path) / (1024*1024):.2f} MB")
interpreter = tf.lite.Interpreter(model_path=model_path)
interpreter.allocate_tensors()
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
print("Model loaded successfully!")
print("Input details:", input_details)
print("Output details:", output_details)
else:
print("Model file not found!")
print("Current directory contents:", os.listdir())
interpreter = None
except Exception as e:
print(f"Error loading model: {str(e)}")
import traceback
traceback.print_exc()
interpreter = None
# Categories should match your training exactly
CATEGORIES = [
'Cardboard', 'Food_Waste', 'Glass', 'Metal', 'Paper', 'Plastic', 'Other'
]
# Instructions for each category
INSTRUCTIONS = {
'Cardboard': {
'instructions': [
'Flatten all boxes',
'Remove tape and staples',
'Keep dry and clean',
'Bundle large boxes together'
],
'examples': 'Boxes, packaging, shipping containers'
},
'Food_Waste': {
'instructions': [
'Remove any packaging',
'Collect in compost bin',
'Keep sealed to prevent odors',
'Avoid meat and dairy if home composting'
],
'examples': 'Fruit/vegetable scraps, coffee grounds, eggshells'
},
'Glass': {
'instructions': [
'Rinse thoroughly',
'Remove caps and lids',
'Sort by color if required',
'Handle with care - do not break'
],
'examples': 'Bottles, jars, containers'
},
'Metal': {
'instructions': [
'Clean thoroughly',
'Remove labels if possible',
'Crush cans to save space',
'Separate aluminum and steel'
],
'examples': 'Cans, foil, bottle caps'
},
'Paper': {
'instructions': [
'Keep clean and dry',
'Remove plastic wrapping',
'Stack neatly',
'Avoid greasy or food-stained paper'
],
'examples': 'Newspapers, magazines, office paper'
},
'Plastic': {
'instructions': [
'Rinse clean',
'Check recycling number',
'Remove caps and labels',
'Crush to save space'
],
'examples': 'Bottles, containers, packaging'
},
'Other': {
'instructions': [
'Check local guidelines',
'Separate if multiple materials',
'Consider reuse options',
'When in doubt, ask recycling center'
],
'examples': 'Mixed materials, uncommon items'
},
'Uncertain': {
'instructions': [
'Try taking another photo with:',
'- Better lighting',
'- Different angle',
'- Less background clutter',
'- Closer to the item',
'Or consult your local recycling guidelines'
],
'examples': 'Item needs clearer image for classification'
}
}
@app.route('/', methods=['GET'])
def home():
return jsonify({
"status": "Recycling Classification Server is Running!",
"model_loaded": interpreter is not None,
"categories": CATEGORIES
})
@app.route('/predict', methods=['POST'])
def predict():
print("\nReceived prediction request")
try:
if interpreter is None:
raise Exception("Model not loaded properly")
if 'image' not in request.files:
return jsonify({
'success': False,
'error': 'No image file provided'
})
file = request.files['image']
print(f"Processing file: {file.filename}")
# Process image
image = Image.open(io.BytesIO(file.read())).convert('RGB')
image = image.resize((299, 299))
image_array = np.array(image, dtype=np.float32)
image_array = image_array / 255.0
image_array = np.expand_dims(image_array, axis=0)
print(f"Processed image shape: {image_array.shape}")
print(f"Image value range: {image_array.min()} to {image_array.max()}")
print("Setting tensor data...")
interpreter.set_tensor(input_details[0]['index'], image_array)
print("Running inference...")
interpreter.invoke()
print("Getting predictions...")
predictions = interpreter.get_tensor(output_details[0]['index'])
# Print all predictions
print("\nAll predictions:")
for i, conf in enumerate(predictions[0]):
print(f"{CATEGORIES[i]}: {conf * 100:.2f}%")
# Get top 2 predictions
top_2_indices = np.argsort(predictions[0])[-2:][::-1]
top_2_confidences = predictions[0][top_2_indices]
# Get highest confidence prediction
predicted_class = top_2_indices[0]
confidence = float(top_2_confidences[0])
category = CATEGORIES[predicted_class]
print(f"\nFinal prediction: {category} with confidence: {confidence * 100:.2f}%")
# If confidence is below threshold or top 2 predictions are close
if confidence < CONFIDENCE_THRESHOLD or (top_2_confidences[0] - top_2_confidences[1]) < 0.15:
return jsonify({
'success': True,
'category': 'Uncertain',
'confidence': confidence,
'instructions': [
f'This item could be either:',
f'1. {CATEGORIES[top_2_indices[0]]} ({(top_2_confidences[0] * 100):.1f}%)',
f'2. {CATEGORIES[top_2_indices[1]]} ({(top_2_confidences[1] * 100):.1f}%)',
'',
'Tips for better classification:',
'- Try better lighting',
'- Different angle',
'- Less background clutter',
'- Get closer to the item'
],
'examples': 'Multiple possible categories detected'
})
return jsonify({
'success': True,
'category': category,
'confidence': confidence,
'instructions': INSTRUCTIONS[category]['instructions'],
'examples': INSTRUCTIONS[category]['examples']
})
except Exception as e:
print(f"Error during prediction: {str(e)}")
import traceback
traceback.print_exc()
return jsonify({
'success': False,
'error': str(e)
})
if __name__ == '__main__':
app.run(debug=True)