Skip to content

Commit eac8ada

Browse files
shaargtzMongoDB Bot
authored andcommitted
SERVER-89322 Change eraseDuplicatePoints() logic to avoid underflows [7.0] (#32646)
GitOrigin-RevId: dd6d19d1cc5d97a73ce32d47ca91f8acf8680b4f
1 parent 8ff1410 commit eac8ada

File tree

2 files changed

+45
-6
lines changed

2 files changed

+45
-6
lines changed

src/mongo/db/geo/geoparser.cpp

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -141,7 +141,7 @@ static Status parseArrayOfCoordinates(const BSONElement& elem, vector<S2Point>*
141141
<< typeName(elem.type()));
142142
}
143143
BSONObjIterator it(elem.Obj());
144-
// Iterate all coordinates in array
144+
// Iterate all coordinates in array.
145145
while (it.more()) {
146146
S2Point p;
147147
Status status = parseGeoJSONCoordinate(it.next(), &p);
@@ -153,11 +153,20 @@ static Status parseArrayOfCoordinates(const BSONElement& elem, vector<S2Point>*
153153
}
154154

155155
static void eraseDuplicatePoints(vector<S2Point>* vertices) {
156-
for (size_t i = 1; i < vertices->size(); ++i) {
157-
if ((*vertices)[i - 1] == (*vertices)[i]) {
158-
vertices->erase(vertices->begin() + i);
159-
// We could have > 2 adjacent identical vertices, and must examine i again.
160-
--i;
156+
// Duplicates can't exist in a vector of 0 or 1 elements, and we want to be careful about
157+
// possible underflow of size - 1 in the next block.
158+
if (vertices->size() < 2) {
159+
return;
160+
}
161+
162+
size_t i = 0;
163+
while (i < vertices->size() - 1) {
164+
if ((*vertices)[i] == (*vertices)[i + 1]) {
165+
vertices->erase(vertices->begin() + i + 1);
166+
// We could have > 2 adjacent identical vertices, and must examine i again, so we don't
167+
// increment the iterator.
168+
} else {
169+
++i;
161170
}
162171
}
163172
}

src/mongo/db/geo/geoparser_test.cpp

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -125,6 +125,8 @@ TEST(GeoParser, parseGeoJSONLine) {
125125
fromjson("{'type':'LineString', 'coordinates':[[1,2, 3], [3,4, 5], [5,6]]}"),
126126
false,
127127
&polyline));
128+
ASSERT_NOT_OK(GeoParser::parseGeoJSONLine(
129+
fromjson("{'type':'LineString', 'coordinates':[[1,2], [1,2]]}"), false, &polyline));
128130
}
129131

130132
TEST(GeoParser, parseGeoJSONPolygon) {
@@ -220,6 +222,34 @@ TEST(GeoParser, parseGeoJSONPolygon) {
220222
&polygonBad));
221223
}
222224

225+
TEST(GeoParser, parseGeoJSONPolygonStrictSphere) {
226+
string crs = "crs:{ type: 'name', properties:{name:'" + CRS_STRICT_WINDING + "'}}";
227+
PolygonWithCRS polygon;
228+
BSONObj bigSimplePolygon = fromjson(
229+
"{'type':'Polygon', 'coordinates':[ "
230+
"[[0,0],[5,0],[5,5],[0,5],[0,0]]], " +
231+
crs + "}");
232+
ASSERT_OK(GeoParser::parseGeoJSONPolygon(bigSimplePolygon, false, &polygon));
233+
234+
BSONObj bigSimplePolygonWithValidDuplicates = fromjson(
235+
"{'type':'Polygon', 'coordinates':[ "
236+
"[[0,0],[5,0],[5,5],[5,5],[0,5],[0,0]]], " +
237+
crs + "}");
238+
ASSERT_OK(GeoParser::parseGeoJSONPolygon(bigSimplePolygonWithValidDuplicates, false, &polygon));
239+
240+
BSONObj bigSimplePolygonWithInvalidDuplicates = fromjson(
241+
"{'type':'Polygon', 'coordinates':[ "
242+
"[[0,0],[5,0],[5,0],[0,0],[0,0]]], " +
243+
crs + "}");
244+
ASSERT_NOT_OK(
245+
GeoParser::parseGeoJSONPolygon(bigSimplePolygonWithInvalidDuplicates, false, &polygon));
246+
247+
BSONObj bigSimplePolygonWithFewPoints = fromjson(
248+
"{'type':'Polygon', 'coordinates':[ "
249+
"[[0,0]]], " +
250+
crs + "}");
251+
ASSERT_NOT_OK(GeoParser::parseGeoJSONPolygon(bigSimplePolygonWithFewPoints, false, &polygon));
252+
}
223253

224254
TEST(GeoParser, parseGeoJSONCRS) {
225255
string goodCRS1 = "crs:{ type: 'name', properties:{name:'EPSG:4326'}}";

0 commit comments

Comments
 (0)