|
| 1 | +using System; |
| 2 | +using System.Collections.Generic; |
1 | 3 | using System.Diagnostics;
|
2 | 4 | using System.Text;
|
3 | 5 |
|
@@ -163,29 +165,24 @@ public void HandleUnknownAction(int lineNum, ReadOnlySpan<byte> action, ReadOnly
|
163 | 165 | {
|
164 | 166 | }
|
165 | 167 |
|
166 |
| - private void InitUserAgentsAndPath(List<string> userAgents, byte[] path) |
| 168 | + private void InitUserAgentsAndPath(List<byte[]> userAgents, byte[] path) |
167 | 169 | {
|
168 |
| - _userAgents = new List<byte[]>(userAgents.Count); |
169 |
| - foreach (var ua in userAgents) |
170 |
| - { |
171 |
| - _userAgents.Add(Encoding.UTF8.GetBytes(ua)); |
172 |
| - } |
173 |
| - |
| 170 | + _userAgents = userAgents; |
174 | 171 | Debug.Assert(path.Length > 0 && path[0] == '/');
|
175 | 172 | _path = path;
|
176 | 173 | }
|
177 | 174 |
|
178 | 175 | private bool SeenAnyAgent => _seenGlobalAgent || _seenSpecificAgent;
|
179 | 176 |
|
180 |
| - public bool AllowedByRobots(byte[] robotsBody, List<string> userAgents, string url) |
| 177 | + public bool AllowedByRobots(byte[] robotsBody, List<byte[]> userAgents, string url) |
181 | 178 | {
|
182 | 179 | // The url is not normalized (escaped, percent encoded) here because the user
|
183 | 180 | // is asked to provide it in escaped form already.
|
184 | 181 | var path = GetPathParamsQuery(url);
|
185 | 182 | return PathAllowedByRobots(robotsBody, userAgents, new UTF8Encoding().GetBytes(path));
|
186 | 183 | }
|
187 | 184 |
|
188 |
| - public bool PathAllowedByRobots(byte[] robotsBody, List<string> userAgents, byte[] path) |
| 185 | + public bool PathAllowedByRobots(byte[] robotsBody, List<byte[]> userAgents, byte[] path) |
189 | 186 | {
|
190 | 187 | InitUserAgentsAndPath(userAgents, path);
|
191 | 188 | ParseRobotsTxt(robotsBody, this);
|
@@ -313,9 +310,9 @@ public void Clear()
|
313 | 310 | byte[]? _path;
|
314 | 311 | private List<byte[]>? _userAgents; // Set by InitUserAgentsAndPath.
|
315 | 312 |
|
316 |
| - public bool OneAgentAllowedByRobots(byte[] robotsContent, string userAgent, string url) |
| 313 | + public bool OneAgentAllowedByRobots(byte[] robotsContent, byte[] userAgent, string url) |
317 | 314 | {
|
318 |
| - var userAgents = new List<string> { userAgent }; |
| 315 | + var userAgents = new List<byte[]> { userAgent, }; |
319 | 316 | return AllowedByRobots(robotsContent, userAgents, url);
|
320 | 317 | }
|
321 | 318 |
|
|
0 commit comments