This is the mail archive of the
gcc-help@gcc.gnu.org
mailing list for the GCC project.
Re: string.h - strsep
- From: Gokhan Kisacikoglu <kisa at centropolisfx dot com>
- To: Ömer Rauf Atay <rauf at ug dot bilkent dot edu dot tr>
- Cc: gcc-help at gcc dot gnu dot org
- Date: Tue, 05 Nov 2002 15:23:52 -0800
- Subject: Re: string.h - strsep
- Organization: Centropolis Effects, LLC
- References: <001901c28510$79b967c0$daceb38b@raucu>
- Reply-to: kisa at centropolisfx dot com
> Any comments?
>
Using the <string> library from the standard templates instead; but you
will have to do some work. You can find more info at:
http://www.sgi.com/tech/stl/basic_string.html
For example, to tokenize a string;
#include <string>
#include <vector>
// headers necessary only for testing:
//
#include <iostream>
#include <iterator>
#include <algorithm>
using namespace std;
typedef vector <string> string_array;
unsigned int
tokenize(const string &_str,
const string &_delim,
string_array &_tokens )
{
_tokens = string_array();
if ( _str.empty() ) return 0;
for ( string :: size_type spos = 0;
spos != _str.size();
spos += _delim.size() )
{
string :: size_type epos = _str.find( _delim, spos );
if ( epos == string :: npos )
{
_tokens.push_back( string( _str, spos, epos ) );
break;
}
_tokens.push_back( string( _str, spos, epos-spos ) );
spos = epos;
}
return _tokens.size();
}
int main(void)
{
string_array tokens;
// test: 1
string str = "my string to be tokenized";
string delimiter = " ";
cout << "token count: "
<< tokenize( str, delimiter, tokens ) << endl;
copy( tokens.begin(), tokens.end(),
ostream_iterator <string> ( cout, "_" ) );
cout << endl;
// test: 2
cout << "token count: "
<< tokenize( "another string to-!be-!tokenized-!", "-!", tokens )
<< endl;
copy( tokens.begin(), tokens.end(),
ostream_iterator <string> ( cout, "_" ) );
cout << endl;
// test: 3
cout << "token count: "
<< tokenize( "", " ", tokens ) << endl;
copy( tokens.begin(), tokens.end(),
ostream_iterator <string> ( cout, "_" ) );
cout << endl;
}
Regards,
Gokhan