Main Page | Class Hierarchy | Alphabetical List | Class List | File List | Class Members | File Members | Related Pages

TokenEx.d

Go to the documentation of this file.
00001 /*******************************************************************************
00002 
00003         @file TokenEx.d
00004         
00005         Copyright (c) 2004 Kris Bell
00006         
00007         This software is provided 'as-is', without any express or implied
00008         warranty. In no event will the authors be held liable for damages
00009         of any kind arising from the use of this software.
00010         
00011         Permission is hereby granted to anyone to use this software for any 
00012         purpose, including commercial applications, and to alter it and/or 
00013         redistribute it freely, subject to the following restrictions:
00014         
00015         1. The origin of this software must not be misrepresented; you must 
00016            not claim that you wrote the original software. If you use this 
00017            software in a product, an acknowledgment within documentation of 
00018            said product would be appreciated but is not required.
00019 
00020         2. Altered source versions must be plainly marked as such, and must 
00021            not be misrepresented as being the original software.
00022 
00023         3. This notice may not be removed or altered from any distribution
00024            of the source.
00025 
00026         4. Derivative works are permitted, but they must carry this notice
00027            in full and credit the original source.
00028 
00029 
00030                         ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
00031 
00032 
00033         @version        Initial version, Oct 2004    
00034         @author         Kris
00035 
00036 
00037 *******************************************************************************/
00038 
00039 module mango.io.TokenEx;
00040 
00041 private import  mango.io.Token,
00042                 mango.io.Tokenizer;
00043 
00044 private import  mango.io.model.IReader,
00045                 mango.io.model.IConduit;
00046 
00047 
00048 /*******************************************************************************
00049 
00050         A style of Token that's bound to a Tokenizer. This can be a handy 
00051         means of cleaning up client code, and limiting the scope of how
00052         a token is used by recieving methods.
00053 
00054         Contrast this example with that shown in the Token class:
00055 
00056         @code
00057         // open a file for reading
00058         FileConduit fc = new FileConduit ("test.txt");
00059 
00060         // create a buffer for reading the file
00061         IBuffer buffer = fc.createBuffer();
00062 
00063         // bind a line-tokenizer to our input token
00064         BoundToken line = new BoundToken (Tokenizers.line);
00065 
00066         // read file a line at a time. Method next() returns false when no more 
00067         // delimiters are found. Note there may be an unterminated line at eof
00068         while (line.next(buffer) || line.getLength())
00069                Stdout.put(line).cr();
00070         @endcode
00071 
00072         One might also consider a CompositeToken or HybridToken.
00073 
00074 *******************************************************************************/
00075 
00076 class BoundToken : Token
00077 { 
00078         private ITokenizer tk;
00079 
00080         /***********************************************************************
00081         
00082         ***********************************************************************/
00083 
00084         this (ITokenizer tk)
00085         {
00086                 this.tk = tk;
00087         }
00088 
00089         /***********************************************************************
00090         
00091                 Return the associated tokenizer
00092 
00093         ***********************************************************************/
00094 
00095         ITokenizer getTokenizer ()
00096         {     
00097                 return tk;
00098         }
00099 
00100         /***********************************************************************
00101         
00102                 Extract the next token from the provided buffer.
00103 
00104                 Returns true if a token was isolated, false if no more 
00105                 tokens were found. Note that one last token may still
00106                 be present when this return false; this may happen if
00107                 (for example) the last delimiter is missing before an
00108                 EOF condition is seen. Check token.getLength() when
00109                 this method returns false.
00110                 
00111                 For example:
00112 
00113                 @code
00114                         while (token.next() || token.getLength())
00115                                // do something
00116 
00117                 @endcode               
00118 
00119         ***********************************************************************/
00120 
00121         bool next (IBuffer buf)
00122         {
00123                 return tk.next (buf, this);
00124         }
00125 }
00126 
00127 
00128 /*******************************************************************************
00129 
00130         ReaderToken adapts a BoundToken such that it can be used directly
00131         with any IReader implementation. We just add the IReadable methods
00132         to the basic BoundToken.
00133 
00134         Here's a contrived example of how to use ReaderToken:
00135 
00136         @code
00137         // create a small buffer on the heap
00138         Buffer buf = new Buffer (256);
00139 
00140         // write items with a comma between each
00141         TextWriter tw = new TextWriter (buf, ",");
00142 
00143         // write some stuff to the buffer
00144         tw << "now is the time for all good men" << 3.14159;
00145 
00146         // bind a couple of tokens to a comma tokenizer
00147         ReaderToken text = new ReaderToken (Tokenizers.comma);
00148         ReaderToken number = new ReaderToken (Tokenizers.comma);
00149         
00150         // create any old reader since we only use it for handling tokens
00151         Reader r = new Reader (buf);
00152 
00153         // populate both tokens via reader 
00154         r >> text >> number;
00155 
00156         // print them to the console
00157         Stdout << text << ':' << number << Stdout.newline;
00158         @endcode
00159 
00160 *******************************************************************************/
00161 
00162 class ReaderToken : BoundToken, IReadable
00163 { 
00164         /***********************************************************************
00165         
00166                 Construct a ReaderToken using the provided Tokenizer.
00167 
00168         ***********************************************************************/
00169 
00170         this (ITokenizer tk)
00171         {
00172                 super (tk);
00173         }
00174 
00175         /***********************************************************************
00176         
00177                 Read the next delimited element into this token.
00178 
00179         ***********************************************************************/
00180 
00181         void read (IReader r)
00182         {
00183                 tk.next (r.getBuffer(), this);
00184         }
00185 }
00186 
00187 
00188 /*******************************************************************************
00189 
00190         Another subclass of BoundToken that combines both a Tokenizer and
00191         an input buffer. This is simply a convenience wrapper than takes
00192         care of details that would otherwise clutter the client code.
00193 
00194         Compare this to usage of a basic Token:
00195 
00196         @code
00197         // open a file for reading
00198         FileConduit fc = new FileConduit ("test.txt");
00199 
00200         // create a Token and bind it to both the file and a line-tokenizer
00201         CompositeToken line = new CompositeToken (Tokenizers.line, fc);
00202 
00203         // read file a line at a time. Method get() returns false when no more 
00204         // tokens are found. 
00205         while (line.get)
00206                Stdout.put(line).cr();
00207         @endcode
00208 
00209         You might also consider a HybridToken for further processing of
00210         token content.
00211 
00212 *******************************************************************************/
00213 
00214 class CompositeToken : BoundToken
00215 {       
00216         private IBuffer buffer;
00217 
00218         /***********************************************************************
00219         
00220                 Set this token to use the provided Tokenizer, and bind it 
00221                 to the given buffer.
00222 
00223         ***********************************************************************/
00224 
00225         this (ITokenizer tk, IBuffer buffer)
00226         {
00227                 super (tk);
00228                 this.buffer = buffer;
00229         }
00230 
00231         /***********************************************************************
00232         
00233                 Set this token to use the provided Tokenizer, and bind it 
00234                 to the buffer associated with the given conduit.
00235 
00236         ***********************************************************************/
00237 
00238         this (ITokenizer tk, IConduit conduit)
00239         {
00240                 this (tk, conduit.createBuffer());
00241         }
00242 
00243         /***********************************************************************
00244         
00245                 Return the associated buffer
00246 
00247         ***********************************************************************/
00248 
00249         IBuffer getBuffer ()
00250         {     
00251                 return buffer;
00252         }
00253 
00254         /***********************************************************************
00255 
00256                 Extract the next token. 
00257 
00258                 Returns true if a token was isolated, false if no more 
00259                 tokens were found. Note that one last token may still
00260                 be present when this return false; this may happen if
00261                 (for example) the last delimiter is missing before an
00262                 Eof condition is seen. Check token.getLength() when
00263                 this method returns false.
00264                 
00265                 For example:
00266 
00267                 @code
00268                         while (token.next || token.getLength)
00269                                // do something
00270 
00271                 @endcode               
00272 
00273         ***********************************************************************/
00274 
00275         bool next ()
00276         {
00277                 return tk.next (buffer, this);
00278         }
00279 
00280         /***********************************************************************
00281 
00282                 Extract the next token, taking Eof into consideration.
00283                 If next() returns false, then this function will still
00284                 return true as long as there's some content available.
00285 
00286                 For example:
00287 
00288                 @code
00289                         while (token.get)
00290                                // do something
00291 
00292                 @endcode               
00293 
00294         ***********************************************************************/
00295 
00296         bool get ()
00297         {
00298                 return next() || getLength();
00299         }
00300 }
00301 
00302 
00303 /*******************************************************************************
00304 
00305         A subclass of CompositeToken that combines a Tokenizer, an input buffer,
00306         and the means to bind its content to a subordinate Reader or Token. 
00307         This is another convenience wrapper than takes care of details that
00308         would otherwise complicate client code.
00309 
00310         Compare this to usage of a CompositeToken:
00311 
00312         @code
00313         // open a file for reading
00314         FileConduit fc = new FileConduit ("test.txt");
00315 
00316         // create a Token and bind it to both the file and a line-tokenizer
00317         HybridToken line = new HybridToken (Tokenizers.line, fc);
00318 
00319         // now create a reader upon the token
00320         Reader reader = new Reader (line.getHost);
00321 
00322         // read file a line at a time. Method get() returns false when no more 
00323         // tokens are found. 
00324         while (line.get)
00325               {
00326               int x, y;
00327                 
00328               // reader is now bound to the content of the current line
00329               reader.get(x).get(y);
00330 
00331               Stdout.put(x).put(y).cr();
00332               }
00333         @endcode
00334 
00335         You can use the same mechanism to bind subordinate Tokens:
00336 
00337         @code
00338         // open a file for reading
00339         FileConduit fc = new FileConduit ("test.txt");
00340 
00341         // create a Token and bind it to both the file and a line-tokenizer
00342         HybridToken line = new HybridToken (Tokenizers.line, fc);
00343 
00344         // now create a subordinate Token that splits on whitespace
00345         CompositeToken word = new CompositeToken (Tokenizers.space, line.getHost);
00346 
00347         // read file a line at a time. Method get() returns false when no more 
00348         // tokens are found. 
00349         while (line.get)
00350                // extract space delimited tokens from each line
00351                while (word.get)
00352                       Stdout.put(word).cr();
00353         @endcode
00354 
00355 
00356 *******************************************************************************/
00357 
00358 class HybridToken : CompositeToken
00359 {       
00360         private IBuffer host;
00361 
00362         /***********************************************************************
00363         
00364                 Set this token to use the provided Tokenizer, and bind it 
00365                 to the given buffer.
00366 
00367         ***********************************************************************/
00368 
00369         this (ITokenizer tk, IBuffer buffer)
00370         {
00371                 super (tk, buffer);
00372 
00373                 // create the hosting IBuffer
00374                 host = buffer.create();
00375         }
00376 
00377         /***********************************************************************
00378         
00379                 Set this token to use the provided Tokenizer, and bind it 
00380                 to the buffer associated with the given conduit.
00381 
00382         ***********************************************************************/
00383 
00384         this (ITokenizer tk, IConduit conduit)
00385         {
00386                 this (tk, conduit.createBuffer());
00387         }
00388 
00389         /***********************************************************************
00390         
00391                 Return the associated host buffer. The host should be used
00392                 for purposes of binding a subordinate Token or Reader onto
00393                 the content of this token. Each call to next() will update
00394                 this content appropriately, which is also reflected within 
00395                 said host buffer.
00396 
00397                 That is, token.toString() == token.getHost.toString().
00398 
00399         ***********************************************************************/
00400 
00401         IBuffer getHost ()
00402         {     
00403                 return host;
00404         }
00405 
00406         /***********************************************************************
00407 
00408                 Extract the next token. 
00409 
00410                 Returns true if a token was isolated, false if no more 
00411                 tokens were found. Note that one last token may still
00412                 be present when this return false; this may happen if
00413                 (for example) the last delimiter is missing before an
00414                 Eof condition is seen. Check token.getLength() when
00415                 this method returns false.
00416                 
00417                 For example:
00418 
00419                 @code
00420                         while (token.next || token.getLength)
00421                                // do something
00422 
00423                 @endcode               
00424 
00425         ***********************************************************************/
00426 
00427         bool next ()
00428         {
00429                 // get the next token
00430                 bool ret = super.next ();
00431 
00432                 // set host content
00433                 host.setValidContent (toString());
00434 
00435                 return ret;
00436         }
00437 }

Generated on Tue Jan 25 21:18:23 2005 for Mango by doxygen 1.3.6