@@ -22,16 +22,18 @@ use self::states::{Rawtext, Rcdata, ScriptData, ScriptDataEscaped};
2222use  self :: char_ref:: { CharRef ,  CharRefTokenizer } ; 
2323
2424use  crate :: util:: str:: lower_ascii_letter; 
25- 
2625use  log:: { debug,  trace} ; 
2726use  mac:: format_if; 
28- use  markup5ever:: { namespace_url,  ns,  small_char_set,  TokenizerResult } ; 
27+ use  markup5ever:: { 
28+     buffer_queue:: BufferQueue ,  namespace_url,  ns,  small_char_set,  InputSink ,  InputSinkResult , 
29+     TokenizerResult , 
30+ } ; 
2931use  std:: borrow:: Cow :: { self ,  Borrowed } ; 
3032use  std:: cell:: { Cell ,  RefCell ,  RefMut } ; 
3133use  std:: collections:: BTreeMap ; 
32- use  std:: mem; 
34+ use  std:: { iter ,   mem} ; 
3335
34- pub  use  crate :: buffer_queue:: { BufferQueue ,   FromSet ,  NotFromSet ,  SetResult } ; 
36+ pub  use  crate :: buffer_queue:: { FromSet ,  NotFromSet ,  SetResult } ; 
3537use  crate :: tendril:: StrTendril ; 
3638use  crate :: { Attribute ,  LocalName ,  QualName ,  SmallCharSet } ; 
3739
@@ -43,6 +45,8 @@ pub enum ProcessResult<Handle> {
4345    Continue , 
4446    Suspend , 
4547    Script ( Handle ) , 
48+     #[ cfg( feature = "encoding" ) ]  
49+     MaybeChangeEncodingAndStartOver ( & ' static  encoding_rs:: Encoding ) , 
4650} 
4751
4852fn  option_push ( opt_str :  & mut  Option < StrTendril > ,  c :  char )  { 
@@ -357,6 +361,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
357361                    ProcessResult :: Continue  => ( ) , 
358362                    ProcessResult :: Suspend  => break , 
359363                    ProcessResult :: Script ( node)  => return  TokenizerResult :: Script ( node) , 
364+                     #[ cfg( feature = "encoding" ) ]  
365+                     ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding)  => { 
366+                         return  TokenizerResult :: MaybeChangeEncodingAndStartOver ( encoding) 
367+                     } , 
360368                } 
361369            } 
362370        }  else  { 
@@ -365,6 +373,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
365373                    ProcessResult :: Continue  => ( ) , 
366374                    ProcessResult :: Suspend  => break , 
367375                    ProcessResult :: Script ( node)  => return  TokenizerResult :: Script ( node) , 
376+                     #[ cfg( feature = "encoding" ) ]  
377+                     ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding)  => { 
378+                         return  TokenizerResult :: MaybeChangeEncodingAndStartOver ( encoding) 
379+                     } , 
368380                } 
369381            } 
370382        } 
@@ -445,6 +457,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
445457                self . state . set ( states:: RawData ( kind) ) ; 
446458                ProcessResult :: Continue 
447459            } , 
460+             #[ cfg( feature = "encoding" ) ]  
461+             TokenSinkResult :: MaybeChangeEncodingAndStartOver ( encoding)  => { 
462+                 ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding) 
463+             } , 
448464        } 
449465    } 
450466
@@ -1448,6 +1464,8 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
14481464                ProcessResult :: Continue  => ( ) , 
14491465                ProcessResult :: Suspend  => break , 
14501466                ProcessResult :: Script ( _)  => unreachable ! ( ) , 
1467+                 #[ cfg( feature = "encoding" ) ]  
1468+                 ProcessResult :: MaybeChangeEncodingAndStartOver ( _)  => unreachable ! ( ) , 
14511469            } 
14521470        } 
14531471
@@ -1575,13 +1593,24 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
15751593    } 
15761594} 
15771595
1596+ impl < Sink >  InputSink  for  Tokenizer < Sink > 
1597+ where 
1598+     Sink :  TokenSink , 
1599+ { 
1600+     type  Handle  = Sink :: Handle ; 
1601+ 
1602+     fn  feed ( & self ,  input :  & BufferQueue )  -> impl  Iterator < Item  = InputSinkResult < Self :: Handle > >  { 
1603+         iter:: from_fn ( || self . feed ( input) . into ( ) ) 
1604+     } 
1605+ } 
1606+ 
15781607#[ cfg( test) ]  
15791608#[ allow( non_snake_case) ]  
15801609mod  test { 
15811610    use  super :: option_push;  // private items 
1582-     use  crate :: tendril:: { SliceExt ,  StrTendril } ; 
1583- 
15841611    use  super :: { TokenSink ,  TokenSinkResult ,  Tokenizer ,  TokenizerOpts } ; 
1612+     use  crate :: tendril:: { SliceExt ,  StrTendril } ; 
1613+     use  crate :: LocalName ; 
15851614
15861615    use  super :: interface:: { CharacterTokens ,  EOFToken ,  NullCharacterToken ,  ParseError } ; 
15871616    use  super :: interface:: { EndTag ,  StartTag ,  Tag ,  TagKind } ; 
@@ -1590,8 +1619,6 @@ mod test {
15901619    use  markup5ever:: buffer_queue:: BufferQueue ; 
15911620    use  std:: cell:: RefCell ; 
15921621
1593-     use  crate :: LocalName ; 
1594- 
15951622    // LinesMatch implements the TokenSink trait. It is used for testing to see 
15961623    // if current_line is being updated when process_token is called. The lines 
15971624    // vector is a collection of the line numbers that each token is on. 
0 commit comments