Skip to content

Commit 8da69b8

Browse files
committed
Single Agent Working
1 parent 2577594 commit 8da69b8

File tree

13 files changed

+149
-46
lines changed

13 files changed

+149
-46
lines changed

server/camera/computer_vision.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,11 @@ def __init__(self, manager, config_path, width=700, height=470):
2929
# Camera object
3030
self.cam = None
3131
self.manager = manager
32+
self.camera_ip= "http://192.168.137.119:8080/video"
3233

3334
def init_camera(self):
3435
"""Initialize the camera settings."""
36+
3537
self.cam = cv2.VideoCapture(1, cv2.CAP_DSHOW)
3638
self.cam.set(cv2.CAP_PROP_FRAME_WIDTH, self.width)
3739
self.cam.set(cv2.CAP_PROP_FRAME_HEIGHT, self.height)

server/camera/config/config2.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323
],
2424
"player": [
2525
{
26-
"aruco_id": 3,
26+
"aruco_id": 4,
2727
"tags": "bot",
2828
"id": 3
2929
}
7.6 KB
Loading
7.59 KB
Loading
7.67 KB
Loading
7.62 KB
Loading
7.62 KB
Loading

server/camera/utils.py

Lines changed: 27 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -47,21 +47,41 @@ def detect_shapes_and_colors(image):
4747

4848

4949

50+
import cv2
51+
import numpy as np
52+
5053
def detect_aruco_markers(image):
54+
# Convert to grayscale
55+
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
5156

57+
# Apply mild Gaussian blur (reduces noise but keeps details)
58+
gray = cv2.GaussianBlur(gray, (3, 3), 0)
59+
60+
# Define the ArUco dictionary and detector parameters
5261
markerDictionary = cv2.aruco.getPredefinedDictionary(cv2.aruco.DICT_5X5_50)
53-
detectorParam = cv2.aruco.DetectorParameters()
54-
detector = cv2.aruco.ArucoDetector(markerDictionary, detectorParam)
55-
corners, ids, rejected = detector.detectMarkers(image)
56-
62+
detectorParams = cv2.aruco.DetectorParameters()
63+
64+
# Improve robustness to shadows
65+
detectorParams.adaptiveThreshWinSizeMin = 5
66+
detectorParams.adaptiveThreshWinSizeMax = 23
67+
detectorParams.adaptiveThreshWinSizeStep = 10
68+
detectorParams.minMarkerPerimeterRate = 0.03
69+
detectorParams.cornerRefinementMethod = cv2.aruco.CORNER_REFINE_CONTOUR
70+
71+
detector = cv2.aruco.ArucoDetector(markerDictionary, detectorParams)
72+
73+
# Detect ArUco markers
74+
corners, ids, rejected = detector.detectMarkers(gray)
5775

5876
list_of_aruco = []
59-
if ids is not None :
77+
if ids is not None:
6078
for i in range(len(ids)):
61-
list_of_aruco.append((ids[i][0],corners[i][0]))
79+
list_of_aruco.append((ids[i][0], corners[i][0]))
80+
6281

6382
return list_of_aruco
6483

84+
6585
from collections import defaultdict
6686

6787

@@ -119,6 +139,7 @@ def assign_positions_to_aruco_entities(entities, list_of_aruco):
119139
positions = positions_dict[id].pop()
120140
entity = grouped_entities[id].pop()
121141
result.append((entity,positions))
142+
122143

123144
return result
124145

server/manager.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,7 @@ def __init__(self):
126126
self.camera_interface = ComputerVisionManager(self, camera_config,width=700,height=470)
127127
self.aget_interface = AgentInterface(self,model_path)
128128
self.webscoket_interface = WebSocketServer(self)
129-
129+
self.frame_const = 5
130130
self.frame_rate = 1
131131

132132
def validate_response(self, response: dict):
@@ -198,7 +198,7 @@ def process_frame(self, response, image):
198198
# self.webscoket_interface.send_frame(image,"cvframe")
199199
# self.frame_rate=20
200200
# self.frame_rate-=1
201-
self.frame_rate = 10
201+
self.frame_rate = self.frame_const
202202
self.webscoket_interface.send_frame(image,"cvframe1")
203203

204204
required_fields = ["bot_pos", "bot_dir", "ball_coords", "goal_coords", "wall_coords", "bot_id" ]
@@ -227,7 +227,7 @@ def process_frame(self, response, image):
227227
self.aget_interface.step(cv_frame_data,image)
228228
else:
229229
pass
230-
# print("Invalid frame: Missing required fields ->", set(required_fields) - cv_frame_data.keys())
230+
print("Invalid frame: Missing required fields ->", set(required_fields) - cv_frame_data.keys())
231231

232232

233233
return

server/ml_agent/agent_interface.py

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -74,10 +74,10 @@ def action_to_vel(self,action):
7474
v = 1
7575
elif action == 2: # Move backward
7676
v = -1
77-
elif action == 3: # Rotate left
78-
w = 1
79-
elif action == 4: # Rotate right
77+
elif action == 3: # Rotate right
8078
w = -1
79+
elif action == 4: # Rotate left
80+
w = 1
8181
elif action == 5: # Strafe left (approximate with slight left rotation)
8282
w = 0.5
8383
elif action == 6: # Strafe right (approximate with slight right rotation)
@@ -98,8 +98,7 @@ def step(self, cv_frame_data,image):
9898
#print(cv_frame_data)
9999
processed_frame_data,rays = self.agent_observation.addObservation(cv_frame_data)
100100
# print("added observation")
101-
processed_frame_img= visualize_frame(cv_frame_data,processed_frame_data,rays,image)
102-
self.manager.webscoket_interface.send_frame(processed_frame_img,"cvframe2")
101+
103102
# print("ray Data",ray_data)
104103
# print(ray_data)
105104

@@ -122,6 +121,8 @@ def step(self, cv_frame_data,image):
122121

123122
print("actions",action,target_velocity)
124123

124+
processed_frame_img= visualize_frame(cv_frame_data,processed_frame_data,rays,image,action)
125+
self.manager.webscoket_interface.send_frame(processed_frame_img,"cvframe2")
125126
#return action
126127

127128
# Example usage:

server/ml_agent/agent_observation.py

Lines changed: 21 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@
77
class Observation:
88
def __init__(self):
99
self.data = np.zeros((3, 7, 5), dtype=float)
10-
self.ray_length = 400.0
10+
self.ray_length = 350.0
1111
self.num_rays = 7
1212

1313
self.new_frame = np.tile([0, 0, 0, 1, 1], (7, 1)).astype(float)
@@ -21,18 +21,25 @@ def __init__(self):
2121
self.hit_threshold = 50
2222

2323
def generate_rays(self, origin, angle):
24-
"""Generate rays from the given origin with a spread of angles."""
25-
rays = []
26-
angles = np.linspace(angle-90, angle , self.num_rays)
27-
28-
for theta in angles:
29-
theta_rad = np.radians(theta)
30-
end_x = origin[0] + self.ray_length * np.cos(theta_rad)
31-
end_y = origin[1] + self.ray_length * np.sin(theta_rad)
32-
33-
ray = geom.LineString([origin, (end_x, end_y)])
34-
rays.append(ray)
35-
24+
"""Generate rays in an alternating order around the central angle."""
25+
26+
delta_angles = np.linspace(45,-135,self.num_rays) # Define symmetric spread
27+
28+
# Generate alternating order (0, -1, +1, -2, +2, ..., -n, +n)
29+
mid_idx = len(delta_angles) // 2
30+
sorted_indices = np.argsort(np.abs(np.arange(len(delta_angles)) - mid_idx))
31+
alt_angles = delta_angles[sorted_indices] # Reorder angles
32+
print(angle,alt_angles-angle)
33+
# Convert to radians and apply to base angle
34+
angles = np.radians(angle + alt_angles)
35+
print(angles)
36+
# Vectorized endpoint computation
37+
end_x = origin[0] + self.ray_length * np.cos(angles)
38+
end_y = origin[1] + self.ray_length * np.sin(angles)
39+
40+
# Create LineString rays
41+
rays = [geom.LineString([origin, (ex, ey)]) for ex, ey in zip(end_x, end_y)]
42+
3643
return rays
3744

3845
def wall_hit(self, frame_width, frame_height, origin):
@@ -78,6 +85,7 @@ def object_hit(self, target, tag_index, origin):
7885
hit_distance = origin_point.distance(intersection)
7986
hitFraction = hit_distance / self.ray_length
8087
self.update_new_frame(idx, True, tag_index, hitFraction)
88+
print("Ray Hit Object",idx)
8189

8290
def polygon_hit(self, polygon_coords, tag_index, origin):
8391
"""Check if any rays intersect a polygon boundary and update hit distances."""

server/ml_agent/visualize.py

Lines changed: 54 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import numpy as np
22
import cv2
33

4-
def visualize_frame(cv_frame_data, processed_frame, rays, image):
4+
def visualize_frame(cv_frame_data, processed_frame, rays, image, agent_action):
55
img = image.copy() # Work on the provided image
66

77
# Draw walls (polygon)
@@ -12,12 +12,12 @@ def visualize_frame(cv_frame_data, processed_frame, rays, image):
1212
# Draw goal area (filled polygon)
1313
if 'goal_coords' in cv_frame_data and cv_frame_data['goal_coords'].size > 0:
1414
goal = np.array(cv_frame_data['goal_coords'], np.int32)
15-
cv2.fillPoly(img, [goal], color=(0, 255, 0)) # Red
15+
cv2.fillPoly(img, [goal], color=(0, 255, 0)) # Green
1616

1717
# Draw ball (circle)
1818
if 'ball_coords' in cv_frame_data and len(cv_frame_data['ball_coords']) == 2:
1919
bx, by = cv_frame_data['ball_coords']
20-
cv2.circle(img, (int(bx), int(by)), 5, (0, 0, 255), -1) # Blue
20+
cv2.circle(img, (int(bx), int(by)), 5, (0, 0, 255), -1) # Red
2121

2222
# Draw bot (rotated triangle)
2323
if 'bot_pos' in cv_frame_data and 'bot_dir' in cv_frame_data:
@@ -27,33 +27,71 @@ def visualize_frame(cv_frame_data, processed_frame, rays, image):
2727

2828
# Calculate triangle vertices
2929
front = (bot_x + size * np.cos(bot_dir), bot_y + size * np.sin(bot_dir))
30-
left = (bot_x + size/2 * np.cos(bot_dir + np.pi/2), bot_y + size/2 * np.sin(bot_dir + np.pi/2))
31-
right = (bot_x + size/2 * np.cos(bot_dir - np.pi/2), bot_y + size/2 * np.sin(bot_dir - np.pi/2))
30+
left = (bot_x + size / 2 * np.cos(bot_dir + np.pi / 2), bot_y + size / 2 * np.sin(bot_dir + np.pi / 2))
31+
right = (bot_x + size / 2 * np.cos(bot_dir - np.pi / 2), bot_y + size / 2 * np.sin(bot_dir - np.pi / 2))
3232

3333
triangle = np.array([(int(front[0]), int(front[1])),
3434
(int(left[0]), int(left[1])),
3535
(int(right[0]), int(right[1]))], np.int32)
36-
cv2.fillPoly(img, [triangle], color=(0, 255, 0)) # Green
36+
cv2.fillPoly(img, [triangle], color=(0, 255, 0)) # Green bot
3737

38-
38+
# Draw action arrow
39+
arrow_length = 30
40+
arrow_thickness = 2
41+
arrow_color = (0, 165, 255) # Orange
42+
43+
# Action movement dictionary
44+
action_labels = {
45+
1: "Move Forward",
46+
2: "Move Backward",
47+
3: "Rotate Right",
48+
4: "Rotate Left",
49+
5: "Dir Left",
50+
6: "Dir Right"
51+
}
3952

53+
action_text = action_labels.get(agent_action, "No Movement") # Default if unknown action
54+
55+
if agent_action == 1: # Move forward
56+
arrow_end = (bot_x + arrow_length * np.cos(bot_dir), bot_y + arrow_length * np.sin(bot_dir))
57+
elif agent_action == 2: # Move backward
58+
arrow_end = (bot_x - arrow_length * np.cos(bot_dir), bot_y - arrow_length * np.sin(bot_dir))
59+
elif agent_action == 3: # Rotate left
60+
arrow_color = (255, 255, 0) # Yellow
61+
arrow_end = (bot_x + arrow_length * np.cos(bot_dir + np.pi / 4), bot_y + arrow_length * np.sin(bot_dir + np.pi / 4))
62+
elif agent_action == 4: # Rotate right
63+
arrow_color = (255, 255, 0) # Yellow
64+
arrow_end = (bot_x + arrow_length * np.cos(bot_dir - np.pi / 4), bot_y + arrow_length * np.sin(bot_dir - np.pi / 4))
65+
elif agent_action == 5: # Strafe left
66+
arrow_color = (255, 0, 255) # Purple
67+
arrow_end = (bot_x - arrow_length * np.cos(bot_dir + np.pi / 2), bot_y - arrow_length * np.sin(bot_dir + np.pi / 2))
68+
elif agent_action == 6: # Strafe right
69+
arrow_color = (255, 0, 255) # Purple
70+
arrow_end = (bot_x + arrow_length * np.cos(bot_dir - np.pi / 2), bot_y + arrow_length * np.sin(bot_dir - np.pi / 2))
71+
else:
72+
arrow_end = (bot_x, bot_y) # No movement
73+
74+
cv2.arrowedLine(img, (int(bot_x), int(bot_y)), (int(arrow_end[0]), int(arrow_end[1])), arrow_color, arrow_thickness, tipLength=0.3)
75+
76+
# Draw action text in the top-right corner
77+
text_position = (img.shape[1] - 200, 30) # Top-right corner
78+
cv2.putText(img, f"Action: {action_text}", text_position, cv2.FONT_HERSHEY_SIMPLEX,
79+
0.6, (0, 0, 200), 2, cv2.LINE_AA) # White text with black outline
80+
81+
# Draw Rays
4082
for idx, ray in enumerate(rays):
4183
start = ray.coords[0]
4284
end = ray.coords[1]
43-
4485
hit_info = processed_frame[idx]
4586
hit_tags = hit_info[:3]
4687
hit_fraction = hit_info[3]
4788

48-
# Only draw if there's a hit
49-
5089
tag_hit = np.argmax(hit_tags)
5190
colors = [
5291
(0, 0, 255), # Ball (red)
53-
(0, 255, 0), # Goal (blue)
54-
(0, 0, 255) # Wall (green)
92+
(0, 255, 0), # Goal (green)
93+
(255, 0, 0) # Wall (blue)
5594
]
56-
5795

5896
# Compute hit point
5997
dx = end[0] - start[0]
@@ -62,15 +100,13 @@ def visualize_frame(cv_frame_data, processed_frame, rays, image):
62100
hit_y = start[1] + dy * hit_fraction
63101

64102
# Draw the ray up to the hit point
65-
cv2.line(img, (int(start[0]), int(start[1])), (int(hit_x), int(hit_y)), (0, 255, 0) , 1)
103+
cv2.line(img, (int(start[0]), int(start[1])), (int(hit_x), int(hit_y)), (0, 255, 0), 1)
66104

67105
# Draw cross at hit point
68-
if(not hit_info[4]):
69-
# print(hit_info)
70-
# print(start,end, hit_x,hit_y)
106+
if not hit_info[4]: # If an object was hit
71107
cv2.drawMarker(img, (int(hit_x), int(hit_y)), colors[tag_hit], markerType=cv2.MARKER_CROSS, thickness=2)
72108

73109
# Show the image
74110
cv2.imshow('Observation Frame', img)
75111
cv2.waitKey(1)
76-
return img
112+
return img

tcp_server.py

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
import socket
2+
import threading
3+
4+
# Server setup
5+
HOST = "0.0.0.0" # Listen on all interfaces
6+
PORT = 5000
7+
8+
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
9+
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
10+
server.bind((HOST, PORT))
11+
server.listen(5)
12+
13+
print("✅ Server is listening for ESP32 connections...")
14+
15+
# Handle client connection
16+
def handle_client(client, addr):
17+
print(f"✅ ESP32 Connected from {addr}")
18+
try:
19+
while True:
20+
data = client.recv(1024)
21+
if not data:
22+
print(f"❌ ESP32 {addr} Disconnected")
23+
# break
24+
print(f"📩 Received from {addr}: {data.decode('utf-8')}")
25+
client.sendall(b"ACK") # Optional acknowledgment
26+
except Exception as e:
27+
print(f"❌ Connection Error with {addr}: {e}")
28+
finally:
29+
client.close() # Close connection properly
30+
print(f"🔌 Connection closed for {addr}")
31+
32+
while True:
33+
client, addr = server.accept()
34+
client_thread = threading.Thread(target=handle_client, args=(client, addr))
35+
client_thread.start()

0 commit comments

Comments
 (0)