# | Line 1 | Line 1 | |
---|---|---|
1 | < | /* |
1 | > | /* |
2 | * Copyright (c) 2005 The University of Notre Dame. All Rights Reserved. | |
3 | * | |
4 | * The University of Notre Dame grants you ("Licensee") a | |
# | Line 47 | Line 47 | namespace oopse { | |
47 | namespace oopse { | |
48 | ||
49 | ||
50 | < | StringTokenizer::StringTokenizer(const std::string & str, const std::string & delim) |
51 | < | : tokenString_(str), delim_(delim), returnTokens_(false), |
52 | < | currentPos_(tokenString_.begin()), end_(tokenString_.end()){ |
50 | > | StringTokenizer::StringTokenizer(const std::string & str, const std::string & delim) |
51 | > | : tokenString_(str), delim_(delim), returnTokens_(false), |
52 | > | currentPos_(tokenString_.begin()), end_(tokenString_.end()){ |
53 | ||
54 | < | } |
54 | > | } |
55 | ||
56 | < | StringTokenizer::StringTokenizer(std::string::const_iterator& first, std::string::const_iterator& last, |
57 | < | const std::string & delim) |
58 | < | : tokenString_(first, last) , delim_(delim), returnTokens_(false), |
59 | < | currentPos_(tokenString_.begin()), end_(tokenString_.end()) { |
56 | > | StringTokenizer::StringTokenizer(std::string::const_iterator& first, std::string::const_iterator& last, |
57 | > | const std::string & delim) |
58 | > | : tokenString_(first, last) , delim_(delim), returnTokens_(false), |
59 | > | currentPos_(tokenString_.begin()), end_(tokenString_.end()) { |
60 | ||
61 | < | } |
61 | > | } |
62 | ||
63 | < | StringTokenizer::StringTokenizer(const std::string&str, const std::string&delim, |
64 | < | bool returnTokens) |
65 | < | : tokenString_(str), delim_(delim), returnTokens_(returnTokens), |
66 | < | currentPos_(tokenString_.begin()), end_(tokenString_.end()) { |
63 | > | StringTokenizer::StringTokenizer(const std::string&str, const std::string&delim, |
64 | > | bool returnTokens) |
65 | > | : tokenString_(str), delim_(delim), returnTokens_(returnTokens), |
66 | > | currentPos_(tokenString_.begin()), end_(tokenString_.end()) { |
67 | ||
68 | < | } |
68 | > | } |
69 | ||
70 | < | bool StringTokenizer::isDelimiter(const char c) { |
70 | > | bool StringTokenizer::isDelimiter(const char c) { |
71 | return delim_.find(c) == std::string::npos ? false : true; | |
72 | < | } |
72 | > | } |
73 | ||
74 | < | int StringTokenizer::countTokens() { |
74 | > | int StringTokenizer::countTokens() { |
75 | ||
76 | std::string::const_iterator tmpIter = currentPos_; | |
77 | int numToken = 0; | |
78 | ||
79 | while (true) { | |
80 | ||
81 | < | //skip delimiter first |
82 | < | while( tmpIter != end_ && isDelimiter(*tmpIter)) { |
83 | < | ++tmpIter; |
81 | > | //skip delimiter first |
82 | > | while( tmpIter != end_ && isDelimiter(*tmpIter)) { |
83 | > | ++tmpIter; |
84 | ||
85 | < | if (returnTokens_) { |
86 | < | //if delimiter is consider as token |
87 | < | ++numToken; |
88 | < | } |
89 | < | } |
85 | > | if (returnTokens_) { |
86 | > | //if delimiter is consider as token |
87 | > | ++numToken; |
88 | > | } |
89 | > | } |
90 | ||
91 | < | if (tmpIter == end_) { |
92 | < | break; |
93 | < | } |
91 | > | if (tmpIter == end_) { |
92 | > | break; |
93 | > | } |
94 | ||
95 | < | //encount a token here |
96 | < | while ( tmpIter != end_ && !isDelimiter(*tmpIter) ) { |
97 | < | ++tmpIter; |
98 | < | } |
95 | > | //encount a token here |
96 | > | while ( tmpIter != end_ && !isDelimiter(*tmpIter) ) { |
97 | > | ++tmpIter; |
98 | > | } |
99 | ||
100 | < | ++numToken; |
100 | > | ++numToken; |
101 | ||
102 | } | |
103 | ||
104 | return numToken; | |
105 | < | } |
105 | > | } |
106 | ||
107 | < | bool StringTokenizer::hasMoreTokens() { |
107 | > | bool StringTokenizer::hasMoreTokens() { |
108 | ||
109 | if (currentPos_ == end_) { | |
110 | < | return false; |
110 | > | return false; |
111 | } else if (returnTokens_) { | |
112 | < | return true; |
112 | > | return true; |
113 | } else { | |
114 | < | std::string::const_iterator i = currentPos_; |
114 | > | std::string::const_iterator i = currentPos_; |
115 | ||
116 | < | //walk through the remaining string to check whether it contains non-delimeter or not |
117 | < | while(i != end_ && isDelimiter(*i)) { |
118 | < | ++i; |
119 | < | } |
116 | > | //walk through the remaining string to check whether it contains non-delimeter or not |
117 | > | while(i != end_ && isDelimiter(*i)) { |
118 | > | ++i; |
119 | > | } |
120 | ||
121 | < | return i != end_ ? true : false; |
121 | > | return i != end_ ? true : false; |
122 | } | |
123 | < | } |
123 | > | } |
124 | ||
125 | < | std::string StringTokenizer::nextToken() { |
125 | > | std::string StringTokenizer::nextToken() { |
126 | std::string result; | |
127 | ||
128 | if(currentPos_ != end_) { | |
129 | < | std::insert_iterator<std::string> insertIter(result, result.begin()); |
129 | > | std::insert_iterator<std::string> insertIter(result, result.begin()); |
130 | ||
131 | < | while( currentPos_ != end_ && isDelimiter(*currentPos_)) { |
131 | > | while( currentPos_ != end_ && isDelimiter(*currentPos_)) { |
132 | ||
133 | < | if (returnTokens_) { |
134 | < | *insertIter++ = *currentPos_++; |
135 | < | return result; |
136 | < | } |
133 | > | if (returnTokens_) { |
134 | > | *insertIter++ = *currentPos_++; |
135 | > | return result; |
136 | > | } |
137 | ||
138 | < | ++currentPos_; |
139 | < | } |
138 | > | ++currentPos_; |
139 | > | } |
140 | ||
141 | < | while (currentPos_ != end_ && !isDelimiter(*currentPos_)) { |
142 | < | *insertIter++ = *currentPos_++; |
143 | < | } |
141 | > | while (currentPos_ != end_ && !isDelimiter(*currentPos_)) { |
142 | > | *insertIter++ = *currentPos_++; |
143 | > | } |
144 | ||
145 | } | |
146 | ||
147 | return result; | |
148 | < | } |
148 | > | } |
149 | ||
150 | < | bool StringTokenizer::nextTokenAsBool() { |
150 | > | bool StringTokenizer::nextTokenAsBool() { |
151 | std::string token = nextToken(); | |
152 | std::istringstream iss(token); | |
153 | bool result; | |
154 | ||
155 | if (iss >> result) { | |
156 | < | return result; |
156 | > | return result; |
157 | } else { | |
158 | < | std::cerr << "unable to convert " << token << " to a bool" << std::endl; |
159 | < | return false; |
158 | > | std::cerr << "unable to convert " << token << " to a bool" << std::endl; |
159 | > | return false; |
160 | } | |
161 | < | } |
162 | < | |
163 | < | int StringTokenizer::nextTokenAsInt() { |
161 | > | } |
162 | > | |
163 | > | //Since libstdc++(GCC 3.2) has an i/ostream::operator>>/<<(streambuf*) bug (Bug 9318) |
164 | > | //Instead of using iostream facility, we use C library |
165 | > | int StringTokenizer::nextTokenAsInt() { |
166 | std::string token = nextToken(); | |
167 | < | std::istringstream iss(token); |
168 | < | int result; |
169 | < | |
168 | < | if (iss >> result) { |
169 | < | return result; |
170 | < | } else { |
171 | < | std::cerr << "unable to convert " << token << " to an integer" << std::endl; |
172 | < | return 0; |
173 | < | } |
174 | < | } |
167 | > | |
168 | > | return atoi(token.c_str()); |
169 | > | } |
170 | ||
171 | < | float StringTokenizer::nextTokenAsFloat() { |
171 | > | float StringTokenizer::nextTokenAsFloat() { |
172 | std::string token = nextToken(); | |
173 | < | std::istringstream iss(token); |
174 | < | float result; |
175 | < | |
181 | < | if (iss >> result) { |
182 | < | return result; |
183 | < | } else { |
184 | < | std::cerr << "unable to convert " << token << " to a float" << std::endl; |
185 | < | return 0.0; |
186 | < | } |
187 | < | } |
173 | > | convertFortranNumber(token); |
174 | > | return (float) (atof(token.c_str())); |
175 | > | } |
176 | ||
177 | < | double StringTokenizer::nextTokenAsDouble() { |
177 | > | double StringTokenizer::nextTokenAsDouble() { |
178 | std::string token = nextToken(); | |
179 | < | std::istringstream iss(token); |
180 | < | double result; |
181 | < | |
194 | < | if (iss >> result) { |
195 | < | return result; |
196 | < | } else { |
197 | < | std::cerr << "unable to convert " << token << " to a double" << std::endl; |
198 | < | return 0.0; |
199 | < | } |
200 | < | } |
179 | > | convertFortranNumber(token); |
180 | > | return atof(token.c_str()); |
181 | > | } |
182 | ||
183 | < | std::string StringTokenizer::peekNextToken() { |
183 | > | std::string StringTokenizer::peekNextToken() { |
184 | std::string result; | |
185 | std::string::const_iterator tmpIter = currentPos_; | |
186 | ||
187 | if(tmpIter != end_) { | |
188 | < | std::insert_iterator<std::string> insertIter(result, result.begin()); |
188 | > | std::insert_iterator<std::string> insertIter(result, result.begin()); |
189 | ||
190 | < | while(tmpIter != end_ && isDelimiter(*tmpIter)) { |
190 | > | while(tmpIter != end_ && isDelimiter(*tmpIter)) { |
191 | ||
192 | < | if (returnTokens_) { |
193 | < | *insertIter++ = *tmpIter++; |
194 | < | return result; |
195 | < | } |
192 | > | if (returnTokens_) { |
193 | > | *insertIter++ = *tmpIter++; |
194 | > | return result; |
195 | > | } |
196 | ||
197 | < | ++tmpIter; |
198 | < | } |
197 | > | ++tmpIter; |
198 | > | } |
199 | ||
200 | < | while (tmpIter != end_ && !isDelimiter(*tmpIter)) { |
201 | < | *insertIter++ = *tmpIter++; |
202 | < | } |
200 | > | while (tmpIter != end_ && !isDelimiter(*tmpIter)) { |
201 | > | *insertIter++ = *tmpIter++; |
202 | > | } |
203 | } | |
204 | ||
205 | return result; | |
206 | < | } |
206 | > | } |
207 | ||
208 | + | std::vector<std::string> StringTokenizer::getAllTokens() { |
209 | + | std::vector<std::string> tokens; |
210 | + | while (hasMoreTokens()) { |
211 | + | tokens.push_back(nextToken()); |
212 | + | } |
213 | + | return tokens; |
214 | + | } |
215 | + | void StringTokenizer::convertFortranNumber(std::string& fortranNumber) { |
216 | + | std::string::iterator i; |
217 | + | for(i = fortranNumber.begin(); i != fortranNumber.end(); ++i) { |
218 | + | if (*i == 'd' || *i == 'D') { |
219 | + | *i = 'E'; |
220 | + | } |
221 | + | } |
222 | + | } |
223 | + | |
224 | }//end namespace oopse | |
225 |
– | Removed lines |
+ | Added lines |
< | Changed lines |
> | Changed lines |