@@ -22,16 +22,18 @@ use self::states::{Rawtext, Rcdata, ScriptData, ScriptDataEscaped};
2222use self :: char_ref:: { CharRef , CharRefTokenizer } ;
2323
2424use crate :: util:: str:: lower_ascii_letter;
25-
2625use log:: { debug, trace} ;
2726use mac:: format_if;
28- use markup5ever:: { ns, small_char_set, TokenizerResult } ;
27+ use markup5ever:: {
28+ buffer_queue:: BufferQueue , namespace_url, ns, small_char_set, InputSink , InputSinkResult ,
29+ TokenizerResult ,
30+ } ;
2931use std:: borrow:: Cow :: { self , Borrowed } ;
3032use std:: cell:: { Cell , RefCell , RefMut } ;
3133use std:: collections:: BTreeMap ;
32- use std:: mem;
34+ use std:: { iter , mem} ;
3335
34- pub use crate :: buffer_queue:: { BufferQueue , FromSet , NotFromSet , SetResult } ;
36+ pub use crate :: buffer_queue:: { FromSet , NotFromSet , SetResult } ;
3537use crate :: tendril:: StrTendril ;
3638use crate :: { Attribute , LocalName , QualName , SmallCharSet } ;
3739
@@ -43,6 +45,8 @@ pub enum ProcessResult<Handle> {
4345 Continue ,
4446 Suspend ,
4547 Script ( Handle ) ,
48+ #[ cfg( feature = "encoding" ) ]
49+ MaybeChangeEncodingAndStartOver ( & ' static encoding_rs:: Encoding ) ,
4650}
4751
4852fn option_push ( opt_str : & mut Option < StrTendril > , c : char ) {
@@ -357,6 +361,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
357361 ProcessResult :: Continue => ( ) ,
358362 ProcessResult :: Suspend => break ,
359363 ProcessResult :: Script ( node) => return TokenizerResult :: Script ( node) ,
364+ #[ cfg( feature = "encoding" ) ]
365+ ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding) => {
366+ return TokenizerResult :: MaybeChangeEncodingAndStartOver ( encoding)
367+ } ,
360368 }
361369 }
362370 } else {
@@ -365,6 +373,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
365373 ProcessResult :: Continue => ( ) ,
366374 ProcessResult :: Suspend => break ,
367375 ProcessResult :: Script ( node) => return TokenizerResult :: Script ( node) ,
376+ #[ cfg( feature = "encoding" ) ]
377+ ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding) => {
378+ return TokenizerResult :: MaybeChangeEncodingAndStartOver ( encoding)
379+ } ,
368380 }
369381 }
370382 }
@@ -456,6 +468,10 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
456468 self . state . set ( states:: RawData ( kind) ) ;
457469 ProcessResult :: Continue
458470 } ,
471+ #[ cfg( feature = "encoding" ) ]
472+ TokenSinkResult :: MaybeChangeEncodingAndStartOver ( encoding) => {
473+ ProcessResult :: MaybeChangeEncodingAndStartOver ( encoding)
474+ } ,
459475 }
460476 }
461477
@@ -1725,6 +1741,8 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
17251741 ProcessResult :: Continue => ( ) ,
17261742 ProcessResult :: Suspend => break ,
17271743 ProcessResult :: Script ( _) => unreachable ! ( ) ,
1744+ #[ cfg( feature = "encoding" ) ]
1745+ ProcessResult :: MaybeChangeEncodingAndStartOver ( _) => unreachable ! ( ) ,
17281746 }
17291747 }
17301748
@@ -2001,13 +2019,27 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
20012019 }
20022020}
20032021
2022+ impl < Sink > InputSink for Tokenizer < Sink >
2023+ where
2024+ Sink : TokenSink ,
2025+ {
2026+ type Handle = Sink :: Handle ;
2027+
2028+ fn feed < ' a > (
2029+ & ' a self ,
2030+ input : & ' a BufferQueue ,
2031+ ) -> impl Iterator < Item = InputSinkResult < Self :: Handle > > + ' a {
2032+ iter:: from_fn ( || self . feed ( input) . into ( ) )
2033+ }
2034+ }
2035+
20042036#[ cfg( test) ]
20052037#[ allow( non_snake_case) ]
20062038mod test {
20072039 use super :: option_push; // private items
2008- use crate :: tendril:: { SliceExt , StrTendril } ;
2009-
20102040 use super :: { TokenSink , TokenSinkResult , Tokenizer , TokenizerOpts } ;
2041+ use crate :: tendril:: { SliceExt , StrTendril } ;
2042+ use crate :: LocalName ;
20112043
20122044 use super :: interface:: { CharacterTokens , EOFToken , NullCharacterToken , ParseError } ;
20132045 use super :: interface:: { EndTag , StartTag , Tag , TagKind } ;
@@ -2016,8 +2048,6 @@ mod test {
20162048 use markup5ever:: buffer_queue:: BufferQueue ;
20172049 use std:: cell:: RefCell ;
20182050
2019- use crate :: LocalName ;
2020-
20212051 // LinesMatch implements the TokenSink trait. It is used for testing to see
20222052 // if current_line is being updated when process_token is called. The lines
20232053 // vector is a collection of the line numbers that each token is on.
0 commit comments