Add Polygon Type To Schema / PolygonContain to Query (#3944)

* Added type polygon to schema

* refactoring and more tests

* fix tests

* update test and transform

* add support for polygonContains

* fix transform.mongoObjectToParseObject test

* add indexes for polygon

* index test

* postgres test fix

* remove invalid loop test

* add invalid loop test

* nit
This commit is contained in:
Diamond Lewis
2017-07-11 22:33:45 -05:00
committed by Florent Vilmart
parent 0571c6f95e
commit e6cc8204b3
9 changed files with 470 additions and 5 deletions

View File

@@ -25,6 +25,7 @@ function mongoFieldToParseSchemaField(type) {
case 'geopoint': return {type: 'GeoPoint'};
case 'file': return {type: 'File'};
case 'bytes': return {type: 'Bytes'};
case 'polygon': return {type: 'Polygon'};
}
}
@@ -98,6 +99,7 @@ function parseFieldTypeToMongoFieldType({ type, targetClass }) {
case 'GeoPoint': return 'geopoint';
case 'File': return 'file';
case 'Bytes': return 'bytes';
case 'Polygon': return 'polygon';
}
}

View File

@@ -182,7 +182,8 @@ export class MongoStorageAdapter {
addFieldIfNotExists(className, fieldName, type) {
return this._schemaCollection()
.then(schemaCollection => schemaCollection.addFieldIfNotExists(className, fieldName, type));
.then(schemaCollection => schemaCollection.addFieldIfNotExists(className, fieldName, type))
.then(() => this.createIndexesIfNeeded(className, fieldName, type));
}
// Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.)
@@ -429,6 +430,21 @@ export class MongoStorageAdapter {
return this._adaptiveCollection(className)
.then(collection => collection._mongoCollection.createIndex(index));
}
createIndexesIfNeeded(className, fieldName, type) {
if (type && type.type === 'Polygon') {
const index = {
[fieldName]: '2dsphere'
};
return this.createIndex(className, index);
}
return Promise.resolve();
}
getIndexes(className) {
return this._adaptiveCollection(className)
.then(collection => collection._mongoCollection.indexes());
}
}
export default MongoStorageAdapter;

View File

@@ -495,6 +495,9 @@ function transformTopLevelAtom(atom) {
if (GeoPointCoder.isValidJSON(atom)) {
return GeoPointCoder.JSONToDatabase(atom);
}
if (PolygonCoder.isValidJSON(atom)) {
return PolygonCoder.JSONToDatabase(atom);
}
if (FileCoder.isValidJSON(atom)) {
return FileCoder.JSONToDatabase(atom);
}
@@ -692,6 +695,24 @@ function transformConstraint(constraint, inArray) {
};
break;
}
case '$geoIntersects': {
const point = constraint[key]['$point'];
if (!GeoPointCoder.isValidJSON(point)) {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
'bad $geoIntersect value; $point should be GeoPoint'
);
} else {
Parse.GeoPoint._validate(point.latitude, point.longitude);
}
answer[key] = {
$geometry: {
type: 'Point',
coordinates: [point.longitude, point.latitude]
}
};
break;
}
default:
if (key.match(/^\$+/)) {
throw new Parse.Error(
@@ -940,6 +961,10 @@ const mongoObjectToParseObject = (className, mongoObject, schema) => {
restObject[key] = GeoPointCoder.databaseToJSON(value);
break;
}
if (schema.fields[key] && schema.fields[key].type === 'Polygon' && PolygonCoder.isValidDatabaseObject(value)) {
restObject[key] = PolygonCoder.databaseToJSON(value);
break;
}
if (schema.fields[key] && schema.fields[key].type === 'Bytes' && BytesCoder.isValidDatabaseObject(value)) {
restObject[key] = BytesCoder.databaseToJSON(value);
break;
@@ -1043,6 +1068,64 @@ var GeoPointCoder = {
}
};
var PolygonCoder = {
databaseToJSON(object) {
return {
__type: 'Polygon',
coordinates: object['coordinates'][0]
}
},
isValidDatabaseObject(object) {
const coords = object.coordinates[0];
if (object.type !== 'Polygon' || !(coords instanceof Array)) {
return false;
}
for (let i = 0; i < coords.length; i++) {
const point = coords[i];
if (!GeoPointCoder.isValidDatabaseObject(point)) {
return false;
}
Parse.GeoPoint._validate(parseFloat(point[1]), parseFloat(point[0]));
}
return true;
},
JSONToDatabase(json) {
const coords = json.coordinates;
if (coords[0][0] !== coords[coords.length - 1][0] ||
coords[0][1] !== coords[coords.length - 1][1]) {
coords.push(coords[0]);
}
const unique = coords.filter((item, index, ar) => {
let foundIndex = -1;
for (let i = 0; i < ar.length; i += 1) {
const pt = ar[i];
if (pt[0] === item[0] &&
pt[1] === item[1]) {
foundIndex = i;
break;
}
}
return foundIndex === index;
});
if (unique.length < 3) {
throw new Parse.Error(
Parse.Error.INTERNAL_SERVER_ERROR,
'GeoJSON: Loop must have at least 3 different vertices'
);
}
return { type: 'Polygon', coordinates: [coords] };
},
isValidJSON(value) {
return (typeof value === 'object' &&
value !== null &&
value.__type === 'Polygon'
);
}
};
var FileCoder = {
databaseToJSON(object) {
return {

View File

@@ -29,6 +29,7 @@ const parseTypeToPostgresType = type => {
case 'Number': return 'double precision';
case 'GeoPoint': return 'point';
case 'Bytes': return 'jsonb';
case 'Polygon': return 'polygon';
case 'Array':
if (type.contents && type.contents.type === 'String') {
return 'text[]';
@@ -435,6 +436,20 @@ const buildWhereClause = ({ schema, query, index }) => {
values.push(fieldName, `(${points})`);
index += 2;
}
if (fieldValue.$geoIntersects && fieldValue.$geoIntersects.$point) {
const point = fieldValue.$geoIntersects.$point;
if (typeof point !== 'object' || point.__type !== 'GeoPoint') {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
'bad $geoIntersect value; $point should be GeoPoint'
);
} else {
Parse.GeoPoint._validate(point.latitude, point.longitude);
}
patterns.push(`$${index}:name::polygon @> $${index + 1}::point`);
values.push(fieldName, `(${point.longitude}, ${point.latitude})`);
index += 2;
}
if (fieldValue.$regex) {
let regex = fieldValue.$regex;
@@ -480,6 +495,13 @@ const buildWhereClause = ({ schema, query, index }) => {
index += 3;
}
if (fieldValue.__type === 'Polygon') {
const value = convertPolygonToSQL(fieldValue.coordinates);
patterns.push(`$${index}:name ~= $${index + 1}::polygon`);
values.push(fieldName, value);
index += 2;
}
Object.keys(ParseToPosgresComparator).forEach(cmp => {
if (fieldValue[cmp]) {
const pgComparator = ParseToPosgresComparator[cmp];
@@ -844,6 +866,11 @@ export class PostgresStorageAdapter {
case 'File':
valuesArray.push(object[fieldName].name);
break;
case 'Polygon': {
const value = convertPolygonToSQL(object[fieldName].coordinates);
valuesArray.push(value);
break;
}
case 'GeoPoint':
// pop the point and process later
geoPoints[fieldName] = object[fieldName];
@@ -1024,6 +1051,11 @@ export class PostgresStorageAdapter {
updatePatterns.push(`$${index}:name = POINT($${index + 1}, $${index + 2})`);
values.push(fieldName, fieldValue.longitude, fieldValue.latitude);
index += 3;
} else if (fieldValue.__type === 'Polygon') {
const value = convertPolygonToSQL(fieldValue.coordinates);
updatePatterns.push(`$${index}:name = $${index + 1}::polygon`);
values.push(fieldName, value);
index += 2;
} else if (fieldValue.__type === 'Relation') {
// noop
} else if (typeof fieldValue === 'number') {
@@ -1186,6 +1218,20 @@ export class PostgresStorageAdapter {
longitude: object[fieldName].x
}
}
if (object[fieldName] && schema.fields[fieldName].type === 'Polygon') {
let coords = object[fieldName];
coords = coords.substr(2, coords.length - 4).split('),(');
coords = coords.map((point) => {
return [
parseFloat(point.split(',')[1]),
parseFloat(point.split(',')[0])
];
});
object[fieldName] = {
__type: "Polygon",
coordinates: coords
}
}
if (object[fieldName] && schema.fields[fieldName].type === 'File') {
object[fieldName] = {
__type: 'File',
@@ -1303,6 +1349,42 @@ export class PostgresStorageAdapter {
}
}
function convertPolygonToSQL(polygon) {
if (polygon.length < 3) {
throw new Parse.Error(
Parse.Error.INVALID_JSON,
`Polygon must have at least 3 values`
);
}
if (polygon[0][0] !== polygon[polygon.length - 1][0] ||
polygon[0][1] !== polygon[polygon.length - 1][1]) {
polygon.push(polygon[0]);
}
const unique = polygon.filter((item, index, ar) => {
let foundIndex = -1;
for (let i = 0; i < ar.length; i += 1) {
const pt = ar[i];
if (pt[0] === item[0] &&
pt[1] === item[1]) {
foundIndex = i;
break;
}
}
return foundIndex === index;
});
if (unique.length < 3) {
throw new Parse.Error(
Parse.Error.INTERNAL_SERVER_ERROR,
'GeoJSON: Loop must have at least 3 different vertices'
);
}
const points = polygon.map((point) => {
Parse.GeoPoint._validate(parseFloat(point[1]), parseFloat(point[0]));
return `(${point[1]}, ${point[0]})`;
}).join(', ');
return `(${points})`;
}
function removeWhiteSpace(regex) {
if (!regex.endsWith('\n')){
regex += '\n';

View File

@@ -231,7 +231,8 @@ const validNonRelationOrPointerTypes = [
'Array',
'GeoPoint',
'File',
'Bytes'
'Bytes',
'Polygon'
];
// Returns an error suitable for throwing if the type is invalid
const fieldTypeIsInvalid = ({ type, targetClass }) => {
@@ -995,6 +996,11 @@ function getObjectType(obj) {
return 'Bytes';
}
break;
case 'Polygon' :
if(obj.coordinates) {
return 'Polygon';
}
break;
}
throw new Parse.Error(Parse.Error.INCORRECT_TYPE, "This is not a valid " + obj.__type);
}