@@ -83,30 +83,34 @@ impl<SV> HttpProxy<SV> {
83
83
match session. cache . cache_lookup ( ) . await {
84
84
Ok ( res) => {
85
85
if let Some ( ( mut meta, handler) ) = res {
86
- // vary logic
87
- // because this branch can be called multiple times in a loop, and we only
86
+ // Vary logic
87
+ // Because this branch can be called multiple times in a loop, and we only
88
88
// need to update the vary once, check if variance is already set to
89
- // prevent unnecessary vary lookups
89
+ // prevent unnecessary vary lookups.
90
90
let cache_key = session. cache . cache_key ( ) ;
91
91
if let Some ( variance) = cache_key. variance_bin ( ) {
92
- // adhoc double check the variance found is the variance we want
92
+ // We've looked up a secondary slot.
93
+ // Adhoc double check that the variance found is the variance we want.
93
94
if Some ( variance) != meta. variance ( ) {
94
95
warn ! ( "Cache variance mismatch, {variance:?}, {cache_key:?}" ) ;
95
96
session. cache . disable ( NoCacheReason :: InternalError ) ;
96
97
break None ;
97
98
}
98
99
} else {
100
+ // Basic cache key; either variance is off, or this is the primary slot.
99
101
let req_header = session. req_header ( ) ;
100
102
let variance = self . inner . cache_vary_filter ( & meta, ctx, req_header) ;
101
103
if let Some ( variance) = variance {
104
+ // Variance is on. This is the primary slot.
102
105
if !session. cache . cache_vary_lookup ( variance, & meta) {
103
- // cache key variance updated, need to lookup again
106
+ // This wasn't the desired variant. Updated cache key variance, cause another
107
+ // lookup to get the desired variant, which would be in a secondary slot.
104
108
continue ;
105
109
}
106
- } //else: vary is not in use
110
+ } // else: vary is not in use
107
111
}
108
112
109
- // either no variance or the current handler is the variance
113
+ // Either no variance, or the current handler targets the correct variant.
110
114
111
115
// hit
112
116
// TODO: maybe round and/or cache now()
@@ -206,6 +210,7 @@ impl<SV> HttpProxy<SV> {
206
210
} else {
207
211
// cache miss
208
212
if session. cache . is_cache_locked ( ) {
213
+ // Another request is filling the cache; try waiting til that's done and retry.
209
214
let lock_status = session. cache . cache_lock_wait ( ) . await ;
210
215
if self . handle_lock_status ( session, ctx, lock_status) {
211
216
continue ;
0 commit comments