Merge remote-tracking branch 'upstream/master'
This commit is contained in:
95
APNS.js
Normal file
95
APNS.js
Normal file
@@ -0,0 +1,95 @@
|
||||
var Parse = require('parse/node').Parse;
|
||||
// TODO: apn does not support the new HTTP/2 protocal. It is fine to use it in V1,
|
||||
// but probably we will replace it in the future.
|
||||
var apn = require('apn');
|
||||
|
||||
/**
|
||||
* Create a new connection to the APN service.
|
||||
* @constructor
|
||||
* @param {Object} args Arguments to config APNS connection
|
||||
* @param {String} args.cert The filename of the connection certificate to load from disk, default is cert.pem
|
||||
* @param {String} args.key The filename of the connection key to load from disk, default is key.pem
|
||||
* @param {String} args.passphrase The passphrase for the connection key, if required
|
||||
* @param {Boolean} args.production Specifies which environment to connect to: Production (if true) or Sandbox
|
||||
*/
|
||||
function APNS(args) {
|
||||
this.sender = new apn.connection(args);
|
||||
|
||||
this.sender.on('connected', function() {
|
||||
console.log('APNS Connected');
|
||||
});
|
||||
|
||||
this.sender.on('transmissionError', function(errCode, notification, device) {
|
||||
console.error('APNS Notification caused error: ' + errCode + ' for device ', device, notification);
|
||||
// TODO: For error caseud by invalid deviceToken, we should mark those installations.
|
||||
});
|
||||
|
||||
this.sender.on("timeout", function () {
|
||||
console.log("APNS Connection Timeout");
|
||||
});
|
||||
|
||||
this.sender.on("disconnected", function() {
|
||||
console.log("APNS Disconnected");
|
||||
});
|
||||
|
||||
this.sender.on("socketError", console.error);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send apns request.
|
||||
* @param {Object} data The data we need to send, the format is the same with api request body
|
||||
* @param {Array} deviceTokens A array of device tokens
|
||||
* @returns {Object} A promise which is resolved immediately
|
||||
*/
|
||||
APNS.prototype.send = function(data, deviceTokens) {
|
||||
var coreData = data.data;
|
||||
var expirationTime = data['expiration_time'];
|
||||
var notification = generateNotification(coreData, expirationTime);
|
||||
this.sender.pushNotification(notification, deviceTokens);
|
||||
// TODO: pushNotification will push the notification to apn's queue.
|
||||
// We do not handle error in V1, we just relies apn to auto retry and send the
|
||||
// notifications.
|
||||
return Parse.Promise.as();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the apns notification from the data we get from api request.
|
||||
* @param {Object} coreData The data field under api request body
|
||||
* @returns {Object} A apns notification
|
||||
*/
|
||||
var generateNotification = function(coreData, expirationTime) {
|
||||
var notification = new apn.notification();
|
||||
var payload = {};
|
||||
for (key in coreData) {
|
||||
switch (key) {
|
||||
case 'alert':
|
||||
notification.setAlertText(coreData.alert);
|
||||
break;
|
||||
case 'badge':
|
||||
notification.badge = coreData.badge;
|
||||
break;
|
||||
case 'sound':
|
||||
notification.sound = coreData.sound;
|
||||
break;
|
||||
case 'content-available':
|
||||
notification.setNewsstandAvailable(true);
|
||||
var isAvailable = coreData['content-available'] === 1;
|
||||
notification.setContentAvailable(isAvailable);
|
||||
break;
|
||||
case 'category':
|
||||
notification.category = coreData.category;
|
||||
break;
|
||||
default:
|
||||
payload[key] = coreData[key];
|
||||
break;
|
||||
}
|
||||
}
|
||||
notification.payload = payload;
|
||||
notification.expiry = expirationTime;
|
||||
return notification;
|
||||
}
|
||||
|
||||
if (typeof process !== 'undefined' && process.env.NODE_ENV === 'test') {
|
||||
APNS.generateNotification = generateNotification;
|
||||
}
|
||||
module.exports = APNS;
|
||||
@@ -6,8 +6,9 @@ We really want Parse to be yours, to see it grow and thrive in the open source c
|
||||
|
||||
##### Please Do's
|
||||
|
||||
* Please write tests to cover new methods.
|
||||
* Please run the tests and make sure you didn't break anything.
|
||||
* Take testing seriously! Aim to increase the test coverage with every pull request.
|
||||
* Run the tests for the file you are working on with `TESTING=1 (repo-root)/node_modules/jasmine/bin/jasmine.js spec/MyFile.spec.js`
|
||||
* Run the tests for the whole project and look at the coverage report to make sure your tests are exhaustive by running `npm test` and looking at (project-root)/lcov-report/parse-server/FileUnderTest.js.html
|
||||
|
||||
##### Code of Conduct
|
||||
|
||||
|
||||
@@ -60,13 +60,7 @@ ExportAdapter.prototype.connect = function() {
|
||||
var joinRegex = /^_Join:[A-Za-z0-9_]+:[A-Za-z0-9_]+/;
|
||||
var otherRegex = /^[A-Za-z][A-Za-z0-9_]*$/;
|
||||
ExportAdapter.prototype.collection = function(className) {
|
||||
if (className !== '_User' &&
|
||||
className !== '_Installation' &&
|
||||
className !== '_Session' &&
|
||||
className !== '_SCHEMA' &&
|
||||
className !== '_Role' &&
|
||||
!joinRegex.test(className) &&
|
||||
!otherRegex.test(className)) {
|
||||
if (!Schema.classNameIsValid(className)) {
|
||||
throw new Parse.Error(Parse.Error.INVALID_CLASS_NAME,
|
||||
'invalid className: ' + className);
|
||||
}
|
||||
@@ -500,6 +494,7 @@ ExportAdapter.prototype.smartFind = function(coll, where, options) {
|
||||
|
||||
var index = {};
|
||||
index[key] = '2d';
|
||||
//TODO: condiser moving index creation logic into Schema.js
|
||||
return coll.createIndex(index).then(() => {
|
||||
// Retry, but just once.
|
||||
return coll.find(where, options).toArray();
|
||||
|
||||
82
GCM.js
Normal file
82
GCM.js
Normal file
@@ -0,0 +1,82 @@
|
||||
var Parse = require('parse/node').Parse;
|
||||
var gcm = require('node-gcm');
|
||||
var randomstring = require('randomstring');
|
||||
|
||||
var GCMTimeToLiveMax = 4 * 7 * 24 * 60 * 60; // GCM allows a max of 4 weeks
|
||||
var GCMRegistrationTokensMax = 1000;
|
||||
|
||||
function GCM(apiKey) {
|
||||
this.sender = new gcm.Sender(apiKey);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send gcm request.
|
||||
* @param {Object} data The data we need to send, the format is the same with api request body
|
||||
* @param {Array} registrationTokens A array of registration tokens
|
||||
* @returns {Object} A promise which is resolved after we get results from gcm
|
||||
*/
|
||||
GCM.prototype.send = function (data, registrationTokens) {
|
||||
if (registrationTokens.length >= GCMRegistrationTokensMax) {
|
||||
throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED,
|
||||
'Too many registration tokens for a GCM request.');
|
||||
}
|
||||
var pushId = randomstring.generate({
|
||||
length: 10,
|
||||
charset: 'alphanumeric'
|
||||
});
|
||||
var timeStamp = Date.now();
|
||||
var expirationTime;
|
||||
// We handle the expiration_time convertion in push.js, so expiration_time is a valid date
|
||||
// in Unix epoch time in milliseconds here
|
||||
if (data['expiration_time']) {
|
||||
expirationTime = data['expiration_time'];
|
||||
}
|
||||
// Generate gcm payload
|
||||
var gcmPayload = generateGCMPayload(data.data, pushId, timeStamp, expirationTime);
|
||||
// Make and send gcm request
|
||||
var message = new gcm.Message(gcmPayload);
|
||||
var promise = new Parse.Promise();
|
||||
this.sender.send(message, { registrationTokens: registrationTokens }, 5, function (error, response) {
|
||||
// TODO: Use the response from gcm to generate and save push report
|
||||
// TODO: If gcm returns some deviceTokens are invalid, set tombstone for the installation
|
||||
promise.resolve();
|
||||
});
|
||||
return promise;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the gcm payload from the data we get from api request.
|
||||
* @param {Object} coreData The data field under api request body
|
||||
* @param {String} pushId A random string
|
||||
* @param {Number} timeStamp A number whose format is the Unix Epoch
|
||||
* @param {Number|undefined} expirationTime A number whose format is the Unix Epoch or undefined
|
||||
* @returns {Object} A promise which is resolved after we get results from gcm
|
||||
*/
|
||||
var generateGCMPayload = function(coreData, pushId, timeStamp, expirationTime) {
|
||||
var payloadData = {
|
||||
'time': new Date(timeStamp).toISOString(),
|
||||
'push_id': pushId,
|
||||
'data': JSON.stringify(coreData)
|
||||
}
|
||||
var payload = {
|
||||
priority: 'normal',
|
||||
data: payloadData
|
||||
};
|
||||
if (expirationTime) {
|
||||
// The timeStamp and expiration is in milliseconds but gcm requires second
|
||||
var timeToLive = Math.floor((expirationTime - timeStamp) / 1000);
|
||||
if (timeToLive < 0) {
|
||||
timeToLive = 0;
|
||||
}
|
||||
if (timeToLive >= GCMTimeToLiveMax) {
|
||||
timeToLive = GCMTimeToLiveMax;
|
||||
}
|
||||
payload.timeToLive = timeToLive;
|
||||
}
|
||||
return payload;
|
||||
}
|
||||
|
||||
if (typeof process !== 'undefined' && process.env.NODE_ENV === 'test') {
|
||||
GCM.generateGCMPayload = generateGCMPayload;
|
||||
}
|
||||
module.exports = GCM;
|
||||
47
README.md
47
README.md
@@ -12,6 +12,8 @@ Read the migration guide here: https://parse.com/docs/server/guide#migrating
|
||||
|
||||
There is a development wiki here on GitHub: https://github.com/ParsePlatform/parse-server/wiki
|
||||
|
||||
We also have an [example project](https://github.com/ParsePlatform/parse-server-example) using the parse-server module on Express.
|
||||
|
||||
---
|
||||
|
||||
#### Basic options:
|
||||
@@ -22,6 +24,7 @@ There is a development wiki here on GitHub: https://github.com/ParsePlatform/par
|
||||
* cloud - The absolute path to your cloud code main.js file
|
||||
* fileKey - For migrated apps, this is necessary to provide access to files already hosted on Parse.
|
||||
* facebookAppIds - An array of valid Facebook application IDs.
|
||||
* serverURL - URL which will be used by Cloud Code functions to make requests against.
|
||||
|
||||
#### Client key options:
|
||||
|
||||
@@ -49,14 +52,17 @@ var ParseServer = require('parse-server').ParseServer;
|
||||
|
||||
var app = express();
|
||||
|
||||
var port = process.env.PORT || 1337;
|
||||
|
||||
// Specify the connection string for your mongodb database
|
||||
// and the location to your Parse cloud code
|
||||
var api = new ParseServer({
|
||||
databaseURI: 'mongodb://localhost:27017/dev',
|
||||
cloud: '/home/myApp/cloud/main.js', // Provide an absolute path
|
||||
appId: 'myAppId',
|
||||
masterKey: 'mySecretMasterKey',
|
||||
fileKey: 'optionalFileKey'
|
||||
masterKey: '', //Add your master key here. Keep it secret!
|
||||
fileKey: 'optionalFileKey',
|
||||
serverURL: 'http://localhost:' + port + '/parse' // Don't forget to change to https if needed
|
||||
});
|
||||
|
||||
// Serve the Parse API on the /parse URL prefix
|
||||
@@ -67,13 +73,48 @@ app.get('/', function(req, res) {
|
||||
res.status(200).send('Express is running here.');
|
||||
});
|
||||
|
||||
var port = process.env.PORT || 1337;
|
||||
app.listen(port, function() {
|
||||
console.log('parse-server-example running on port ' + port + '.');
|
||||
});
|
||||
|
||||
```
|
||||
|
||||
|
||||
#### Standalone usage
|
||||
|
||||
You can configure the Parse Server with environment variables:
|
||||
|
||||
```js
|
||||
PARSE_SERVER_DATABASE_URI
|
||||
PARSE_SERVER_CLOUD_CODE_MAIN
|
||||
PARSE_SERVER_COLLECTION_PREFIX
|
||||
PARSE_SERVER_APPLICATION_ID // required
|
||||
PARSE_SERVER_CLIENT_KEY
|
||||
PARSE_SERVER_REST_API_KEY
|
||||
PARSE_SERVER_DOTNET_KEY
|
||||
PARSE_SERVER_JAVASCRIPT_KEY
|
||||
PARSE_SERVER_DOTNET_KEY
|
||||
PARSE_SERVER_MASTER_KEY // required
|
||||
PARSE_SERVER_FILE_KEY
|
||||
PARSE_SERVER_FACEBOOK_APP_IDS // string of comma separated list
|
||||
|
||||
```
|
||||
|
||||
|
||||
|
||||
Alernatively, you can use the `PARSE_SERVER_OPTIONS` environment variable set to the JSON of your configuration (see Usage).
|
||||
|
||||
To start the server, just run `npm start`.
|
||||
|
||||
##### Global installation
|
||||
|
||||
You can install parse-server globally
|
||||
|
||||
`$ npm install -g parse-server`
|
||||
|
||||
Now you can just run `$ parse-server` from your command line.
|
||||
|
||||
|
||||
### Supported
|
||||
|
||||
* CRUD operations
|
||||
|
||||
218
Schema.js
218
Schema.js
@@ -17,6 +17,135 @@
|
||||
var Parse = require('parse/node').Parse;
|
||||
var transform = require('./transform');
|
||||
|
||||
defaultColumns = {
|
||||
// Contain the default columns for every parse object type (except _Join collection)
|
||||
_Default: {
|
||||
"objectId": {type:'String'},
|
||||
"createdAt": {type:'Date'},
|
||||
"updatedAt": {type:'Date'},
|
||||
"ACL": {type:'ACL'},
|
||||
},
|
||||
// The additional default columns for the _User collection (in addition to DefaultCols)
|
||||
_User: {
|
||||
"username": {type:'String'},
|
||||
"password": {type:'String'},
|
||||
"authData": {type:'Object'},
|
||||
"email": {type:'String'},
|
||||
"emailVerified": {type:'Boolean'},
|
||||
},
|
||||
// The additional default columns for the _User collection (in addition to DefaultCols)
|
||||
_Installation: {
|
||||
"installationId": {type:'String'},
|
||||
"deviceToken": {type:'String'},
|
||||
"channels": {type:'Array'},
|
||||
"deviceType": {type:'String'},
|
||||
"pushType": {type:'String'},
|
||||
"GCMSenderId": {type:'String'},
|
||||
"timeZone": {type:'String'},
|
||||
"localeIdentifier": {type:'String'},
|
||||
"badge": {type:'Number'},
|
||||
},
|
||||
// The additional default columns for the _User collection (in addition to DefaultCols)
|
||||
_Role: {
|
||||
"name": {type:'String'},
|
||||
"users": {type:'Relation',className:'_User'},
|
||||
"roles": {type:'Relation',className:'_Role'},
|
||||
},
|
||||
// The additional default columns for the _User collection (in addition to DefaultCols)
|
||||
_Session: {
|
||||
"restricted": {type:'Boolean'},
|
||||
"user": {type:'Pointer', className:'_User'},
|
||||
"installationId": {type:'String'},
|
||||
"sessionToken": {type:'String'},
|
||||
"expiresAt": {type:'Date'},
|
||||
"createdWith": {type:'Object'},
|
||||
},
|
||||
}
|
||||
|
||||
// Valid classes must:
|
||||
// Be one of _User, _Installation, _Role, _Session OR
|
||||
// Be a join table OR
|
||||
// Include only alpha-numeric and underscores, and not start with an underscore or number
|
||||
var joinClassRegex = /^_Join:[A-Za-z0-9_]+:[A-Za-z0-9_]+/;
|
||||
var classAndFieldRegex = /^[A-Za-z][A-Za-z0-9_]*$/;
|
||||
function classNameIsValid(className) {
|
||||
return (
|
||||
className === '_User' ||
|
||||
className === '_Installation' ||
|
||||
className === '_Session' ||
|
||||
className === '_SCHEMA' || //TODO: remove this, as _SCHEMA is not a valid class name for storing Parse Objects.
|
||||
className === '_Role' ||
|
||||
joinClassRegex.test(className) ||
|
||||
//Class names have the same constraints as field names, but also allow the previous additional names.
|
||||
fieldNameIsValid(className)
|
||||
);
|
||||
}
|
||||
|
||||
// Valid fields must be alpha-numeric, and not start with an underscore or number
|
||||
function fieldNameIsValid(fieldName) {
|
||||
return classAndFieldRegex.test(fieldName);
|
||||
}
|
||||
|
||||
// Checks that it's not trying to clobber one of the default fields of the class.
|
||||
function fieldNameIsValidForClass(fieldName, className) {
|
||||
if (!fieldNameIsValid(fieldName)) {
|
||||
return false;
|
||||
}
|
||||
if (defaultColumns._Default[fieldName]) {
|
||||
return false;
|
||||
}
|
||||
if (defaultColumns[className] && defaultColumns[className][fieldName]) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function invalidClassNameMessage(className) {
|
||||
return 'Invalid classname: ' + className + ', classnames can only have alphanumeric characters and _, and must start with an alpha character ';
|
||||
}
|
||||
|
||||
// Returns { error: "message", code: ### } if the type could not be
|
||||
// converted, otherwise returns a returns { result: "mongotype" }
|
||||
// where mongotype is suitable for inserting into mongo _SCHEMA collection
|
||||
function schemaAPITypeToMongoFieldType(type) {
|
||||
var invalidJsonError = { error: "invalid JSON", code: Parse.Error.INVALID_JSON };
|
||||
if (type.type == 'Pointer') {
|
||||
if (!type.targetClass) {
|
||||
return { error: 'type Pointer needs a class name', code: 135 };
|
||||
} else if (typeof type.targetClass !== 'string') {
|
||||
return invalidJsonError;
|
||||
} else if (!classNameIsValid(type.targetClass)) {
|
||||
return { error: invalidClassNameMessage(type.targetClass), code: Parse.Error.INVALID_CLASS_NAME };
|
||||
} else {
|
||||
return { result: '*' + type.targetClass };
|
||||
}
|
||||
}
|
||||
if (type.type == 'Relation') {
|
||||
if (!type.targetClass) {
|
||||
return { error: 'type Relation needs a class name', code: 135 };
|
||||
} else if (typeof type.targetClass !== 'string') {
|
||||
return invalidJsonError;
|
||||
} else if (!classNameIsValid(type.targetClass)) {
|
||||
return { error: invalidClassNameMessage(type.targetClass), code: Parse.Error.INVALID_CLASS_NAME };
|
||||
} else {
|
||||
return { result: 'relation<' + type.targetClass + '>' };
|
||||
}
|
||||
}
|
||||
if (typeof type.type !== 'string') {
|
||||
return { error: "invalid JSON", code: Parse.Error.INVALID_JSON };
|
||||
}
|
||||
switch (type.type) {
|
||||
default: return { error: 'invalid field type: ' + type.type, code: Parse.Error.INCORRECT_TYPE };
|
||||
case 'Number': return { result: 'number' };
|
||||
case 'String': return { result: 'string' };
|
||||
case 'Boolean': return { result: 'boolean' };
|
||||
case 'Date': return { result: 'date' };
|
||||
case 'Object': return { result: 'object' };
|
||||
case 'Array': return { result: 'array' };
|
||||
case 'GeoPoint': return { result: 'geopoint' };
|
||||
case 'File': return { result: 'file' };
|
||||
}
|
||||
}
|
||||
|
||||
// Create a schema from a Mongo collection and the exported schema format.
|
||||
// mongoSchema should be a list of objects, each with:
|
||||
@@ -71,9 +200,93 @@ Schema.prototype.reload = function() {
|
||||
return load(this.collection);
|
||||
};
|
||||
|
||||
// Create a new class that includes the three default fields.
|
||||
// ACL is an implicit column that does not get an entry in the
|
||||
// _SCHEMAS database. Returns a promise that resolves with the
|
||||
// created schema, in mongo format.
|
||||
// on success, and rejects with an error on fail. Ensure you
|
||||
// have authorization (master key, or client class creation
|
||||
// enabled) before calling this function.
|
||||
Schema.prototype.addClassIfNotExists = function(className, fields) {
|
||||
if (this.data[className]) {
|
||||
return Promise.reject({
|
||||
code: Parse.Error.INVALID_CLASS_NAME,
|
||||
error: 'class ' + className + ' already exists',
|
||||
});
|
||||
}
|
||||
|
||||
if (!classNameIsValid(className)) {
|
||||
return Promise.reject({
|
||||
code: Parse.Error.INVALID_CLASS_NAME,
|
||||
error: invalidClassNameMessage(className),
|
||||
});
|
||||
}
|
||||
for (fieldName in fields) {
|
||||
if (!fieldNameIsValid(fieldName)) {
|
||||
return Promise.reject({
|
||||
code: Parse.Error.INVALID_KEY_NAME,
|
||||
error: 'invalid field name: ' + fieldName,
|
||||
});
|
||||
}
|
||||
if (!fieldNameIsValidForClass(fieldName, className)) {
|
||||
return Promise.reject({
|
||||
code: 136,
|
||||
error: 'field ' + fieldName + ' cannot be added',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var mongoObject = {
|
||||
_id: className,
|
||||
objectId: 'string',
|
||||
updatedAt: 'string',
|
||||
createdAt: 'string',
|
||||
};
|
||||
for (fieldName in defaultColumns[className]) {
|
||||
validatedField = schemaAPITypeToMongoFieldType(defaultColumns[className][fieldName]);
|
||||
if (validatedField.code) {
|
||||
return Promise.reject(validatedField);
|
||||
}
|
||||
mongoObject[fieldName] = validatedField.result;
|
||||
}
|
||||
|
||||
for (fieldName in fields) {
|
||||
validatedField = schemaAPITypeToMongoFieldType(fields[fieldName]);
|
||||
if (validatedField.code) {
|
||||
return Promise.reject(validatedField);
|
||||
}
|
||||
mongoObject[fieldName] = validatedField.result;
|
||||
}
|
||||
|
||||
var geoPoints = Object.keys(mongoObject).filter(key => mongoObject[key] === 'geopoint');
|
||||
|
||||
if (geoPoints.length > 1) {
|
||||
return Promise.reject({
|
||||
code: Parse.Error.INCORRECT_TYPE,
|
||||
error: 'currently, only one GeoPoint field may exist in an object. Adding ' + geoPoints[1] + ' when ' + geoPoints[0] + ' already exists.',
|
||||
});
|
||||
}
|
||||
|
||||
return this.collection.insertOne(mongoObject)
|
||||
.then(result => result.ops[0])
|
||||
.catch(error => {
|
||||
if (error.code === 11000) { //Mongo's duplicate key error
|
||||
return Promise.reject({
|
||||
code: Parse.Error.INVALID_CLASS_NAME,
|
||||
error: 'class ' + className + ' already exists',
|
||||
});
|
||||
}
|
||||
return Promise.reject(error);
|
||||
});
|
||||
}
|
||||
|
||||
// Returns a promise that resolves successfully to the new schema
|
||||
// object.
|
||||
// object or fails with a reason.
|
||||
// If 'freeze' is true, refuse to update the schema.
|
||||
// WARNING: this function has side-effects, and doesn't actually
|
||||
// do any validation of the format of the className. You probably
|
||||
// should use classNameIsValid or addClassIfNotExists or something
|
||||
// like that instead. TODO: rename or remove this function.
|
||||
Schema.prototype.validateClassName = function(className, freeze) {
|
||||
if (this.data[className]) {
|
||||
return Promise.resolve(this);
|
||||
@@ -348,5 +561,6 @@ function getObjectType(obj) {
|
||||
|
||||
|
||||
module.exports = {
|
||||
load: load
|
||||
load: load,
|
||||
classNameIsValid: classNameIsValid,
|
||||
};
|
||||
|
||||
43
bin/parse-server
Executable file
43
bin/parse-server
Executable file
@@ -0,0 +1,43 @@
|
||||
#!/usr/bin/env node
|
||||
var express = require('express');
|
||||
var ParseServer = require("../index").ParseServer;
|
||||
|
||||
var app = express();
|
||||
|
||||
var options = {};
|
||||
if (process.env.PARSE_SERVER_OPTIONS) {
|
||||
|
||||
options = JSON.parse(process.env.PARSE_SERVER_OPTIONS);
|
||||
|
||||
} else {
|
||||
|
||||
options.databaseURI = process.env.PARSE_SERVER_DATABASE_URI;
|
||||
options.cloud = process.env.PARSE_SERVER_CLOUD_CODE_MAIN;
|
||||
options.collectionPrefix = process.env.PARSE_SERVER_COLLECTION_PREFIX;
|
||||
|
||||
// Keys and App ID
|
||||
options.appId = process.env.PARSE_SERVER_APPLICATION_ID;
|
||||
options.clientKey = process.env.PARSE_SERVER_CLIENT_KEY;
|
||||
options.restAPIKey = process.env.PARSE_SERVER_REST_API_KEY;
|
||||
options.dotNetKey = process.env.PARSE_SERVER_DOTNET_KEY;
|
||||
options.javascriptKey = process.env.PARSE_SERVER_JAVASCRIPT_KEY;
|
||||
options.dotNetKey = process.env.PARSE_SERVER_DOTNET_KEY;
|
||||
options.masterKey = process.env.PARSE_SERVER_MASTER_KEY;
|
||||
options.fileKey = process.env.PARSE_SERVER_FILE_KEY;
|
||||
// Comma separated list of facebook app ids
|
||||
var facebookAppIds = process.env.PARSE_SERVER_FACEBOOK_APP_IDS;
|
||||
|
||||
if (facebookAppIds) {
|
||||
facebookAppIds = facebookAppIds.split(",");
|
||||
options.facebookAppIds = facebookAppIds;
|
||||
}
|
||||
}
|
||||
|
||||
var mountPath = process.env.PARSE_SERVER_MOUNT_PATH || "/";
|
||||
var api = new ParseServer(options);
|
||||
app.use(mountPath, api);
|
||||
|
||||
var port = process.env.PORT || 1337;
|
||||
app.listen(port, function() {
|
||||
console.log('parse-server-example running on http://localhost:'+ port + mountPath);
|
||||
});
|
||||
@@ -35,7 +35,7 @@ function createResponseObject(resolve, reject) {
|
||||
success: function(result) {
|
||||
resolve({
|
||||
response: {
|
||||
result: result
|
||||
result: Parse._encode(result)
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
5
index.js
5
index.js
@@ -82,6 +82,9 @@ function ParseServer(args) {
|
||||
|
||||
// Initialize the node client SDK automatically
|
||||
Parse.initialize(args.appId, args.javascriptKey || '', args.masterKey);
|
||||
if(args.serverURL) {
|
||||
Parse.serverURL = args.serverURL;
|
||||
}
|
||||
|
||||
// This app serves the Parse API directly.
|
||||
// It's the equivalent of https://api.parse.com/1 in the hosted Parse API.
|
||||
@@ -108,7 +111,7 @@ function ParseServer(args) {
|
||||
router.merge(require('./sessions'));
|
||||
router.merge(require('./roles'));
|
||||
router.merge(require('./analytics'));
|
||||
router.merge(require('./push'));
|
||||
router.merge(require('./push').router);
|
||||
router.merge(require('./installations'));
|
||||
router.merge(require('./functions'));
|
||||
router.merge(require('./schemas'));
|
||||
|
||||
25
package.json
25
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "parse-server",
|
||||
"version": "2.0.6",
|
||||
"version": "2.0.7",
|
||||
"description": "An express module providing a Parse-compatible API server",
|
||||
"main": "index.js",
|
||||
"repository": {
|
||||
@@ -9,16 +9,19 @@
|
||||
},
|
||||
"license": "BSD-3-Clause",
|
||||
"dependencies": {
|
||||
"apn": "^1.7.5",
|
||||
"aws-sdk": "~2.2.33",
|
||||
"bcrypt-nodejs": "0.0.3",
|
||||
"body-parser": "~1.12.4",
|
||||
"deepcopy": "^0.5.0",
|
||||
"express": "~4.2.x",
|
||||
"body-parser": "^1.14.2",
|
||||
"deepcopy": "^0.6.1",
|
||||
"express": "^4.13.4",
|
||||
"hat": "~0.0.3",
|
||||
"mime": "^1.3.4",
|
||||
"mongodb": "~2.0.33",
|
||||
"multer": "~0.1.8",
|
||||
"parse": "~1.6.12",
|
||||
"mongodb": "~2.1.0",
|
||||
"multer": "^1.1.0",
|
||||
"parse": "^1.7.0",
|
||||
"randomstring": "^1.1.3",
|
||||
"node-gcm": "^0.14.0",
|
||||
"request": "^2.65.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -30,10 +33,14 @@
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "MONGODB_VERSION=${MONGODB_VERSION:=3.0.8} mongodb-runner start",
|
||||
"test": "TESTING=1 ./node_modules/.bin/istanbul cover --include-all-sources -x **/spec/** ./node_modules/.bin/jasmine",
|
||||
"posttest": "mongodb-runner stop"
|
||||
"test": "NODE_ENV=test TESTING=1 ./node_modules/.bin/istanbul cover --include-all-sources -x **/spec/** ./node_modules/.bin/jasmine",
|
||||
"posttest": "mongodb-runner stop",
|
||||
"start": "./bin/parse-server"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=4.1"
|
||||
},
|
||||
"bin": {
|
||||
"parse-server": "./bin/parse-server"
|
||||
}
|
||||
}
|
||||
|
||||
122
push.js
122
push.js
@@ -4,15 +4,121 @@ var Parse = require('parse/node').Parse,
|
||||
PromiseRouter = require('./PromiseRouter'),
|
||||
rest = require('./rest');
|
||||
|
||||
var router = new PromiseRouter();
|
||||
var validPushTypes = ['ios', 'android'];
|
||||
|
||||
|
||||
|
||||
function notImplementedYet(req) {
|
||||
throw new Parse.Error(Parse.Error.COMMAND_UNAVAILABLE,
|
||||
'This path is not implemented yet.');
|
||||
function handlePushWithoutQueue(req) {
|
||||
validateMasterKey(req);
|
||||
var where = getQueryCondition(req);
|
||||
validateDeviceType(where);
|
||||
// Replace the expiration_time with a valid Unix epoch milliseconds time
|
||||
req.body['expiration_time'] = getExpirationTime(req);
|
||||
return rest.find(req.config, req.auth, '_Installation', where).then(function(response) {
|
||||
throw new Parse.Error(Parse.Error.COMMAND_UNAVAILABLE,
|
||||
'This path is not implemented yet.');
|
||||
});
|
||||
}
|
||||
|
||||
router.route('POST','/push', notImplementedYet);
|
||||
/**
|
||||
* Check whether the deviceType parameter in qury condition is valid or not.
|
||||
* @param {Object} where A query condition
|
||||
*/
|
||||
function validateDeviceType(where) {
|
||||
var where = where || {};
|
||||
var deviceTypeField = where.deviceType || {};
|
||||
var deviceTypes = [];
|
||||
if (typeof deviceTypeField === 'string') {
|
||||
deviceTypes.push(deviceTypeField);
|
||||
} else if (typeof deviceTypeField['$in'] === 'array') {
|
||||
deviceTypes.concat(deviceTypeField['$in']);
|
||||
}
|
||||
for (var i = 0; i < deviceTypes.length; i++) {
|
||||
var deviceType = deviceTypes[i];
|
||||
if (validPushTypes.indexOf(deviceType) < 0) {
|
||||
throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED,
|
||||
deviceType + ' is not supported push type.');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = router;
|
||||
/**
|
||||
* Get expiration time from the request body.
|
||||
* @param {Object} request A request object
|
||||
* @returns {Number|undefined} The expiration time if it exists in the request
|
||||
*/
|
||||
function getExpirationTime(req) {
|
||||
var body = req.body || {};
|
||||
var hasExpirationTime = !!body['expiration_time'];
|
||||
if (!hasExpirationTime) {
|
||||
return;
|
||||
}
|
||||
var expirationTimeParam = body['expiration_time'];
|
||||
var expirationTime;
|
||||
if (typeof expirationTimeParam === 'number') {
|
||||
expirationTime = new Date(expirationTimeParam * 1000);
|
||||
} else if (typeof expirationTimeParam === 'string') {
|
||||
expirationTime = new Date(expirationTimeParam);
|
||||
} else {
|
||||
throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED,
|
||||
body['expiration_time'] + ' is not valid time.');
|
||||
}
|
||||
// Check expirationTime is valid or not, if it is not valid, expirationTime is NaN
|
||||
if (!isFinite(expirationTime)) {
|
||||
throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED,
|
||||
body['expiration_time'] + ' is not valid time.');
|
||||
}
|
||||
return expirationTime.valueOf();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get query condition from the request body.
|
||||
* @param {Object} request A request object
|
||||
* @returns {Object} The query condition, the where field in a query api call
|
||||
*/
|
||||
function getQueryCondition(req) {
|
||||
var body = req.body || {};
|
||||
var hasWhere = typeof body.where !== 'undefined';
|
||||
var hasChannels = typeof body.channels !== 'undefined';
|
||||
|
||||
var where;
|
||||
if (hasWhere && hasChannels) {
|
||||
throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED,
|
||||
'Channels and query can not be set at the same time.');
|
||||
} else if (hasWhere) {
|
||||
where = body.where;
|
||||
} else if (hasChannels) {
|
||||
where = {
|
||||
"channels": {
|
||||
"$in": body.channels
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED,
|
||||
'Channels and query should be set at least one.');
|
||||
}
|
||||
return where;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the api call has master key or not.
|
||||
* @param {Object} request A request object
|
||||
*/
|
||||
function validateMasterKey(req) {
|
||||
if (req.info.masterKey !== req.config.masterKey) {
|
||||
throw new Parse.Error(Parse.Error.PUSH_MISCONFIGURED,
|
||||
'Master key is invalid, you should only use master key to send push');
|
||||
}
|
||||
}
|
||||
|
||||
var router = new PromiseRouter();
|
||||
router.route('POST','/push', handlePushWithoutQueue);
|
||||
|
||||
module.exports = {
|
||||
router: router
|
||||
}
|
||||
|
||||
if (typeof process !== 'undefined' && process.env.NODE_ENV === 'test') {
|
||||
module.exports.getQueryCondition = getQueryCondition;
|
||||
module.exports.validateMasterKey = validateMasterKey;
|
||||
module.exports.getExpirationTime = getExpirationTime;
|
||||
module.exports.validateDeviceType = validateDeviceType;
|
||||
}
|
||||
|
||||
30
schemas.js
30
schemas.js
@@ -5,7 +5,7 @@ var express = require('express'),
|
||||
|
||||
var router = new PromiseRouter();
|
||||
|
||||
function mongoFieldTypeToApiResponseType(type) {
|
||||
function mongoFieldTypeToSchemaAPIType(type) {
|
||||
if (type[0] === '*') {
|
||||
return {
|
||||
type: 'Pointer',
|
||||
@@ -32,10 +32,10 @@ function mongoFieldTypeToApiResponseType(type) {
|
||||
|
||||
function mongoSchemaAPIResponseFields(schema) {
|
||||
fieldNames = Object.keys(schema).filter(key => key !== '_id');
|
||||
response = {};
|
||||
fieldNames.forEach(fieldName => {
|
||||
response[fieldName] = mongoFieldTypeToApiResponseType(schema[fieldName]);
|
||||
});
|
||||
response = fieldNames.reduce((obj, fieldName) => {
|
||||
obj[fieldName] = mongoFieldTypeToSchemaAPIType(schema[fieldName])
|
||||
return obj;
|
||||
}, {});
|
||||
response.ACL = {type: 'ACL'};
|
||||
response.createdAt = {type: 'Date'};
|
||||
response.updatedAt = {type: 'Date'};
|
||||
@@ -64,6 +64,26 @@ function getAllSchemas(req) {
|
||||
}}));
|
||||
}
|
||||
|
||||
function getOneSchema(req) {
|
||||
if (!req.auth.isMaster) {
|
||||
return Promise.resolve({
|
||||
status: 401,
|
||||
response: {error: 'unauthorized'},
|
||||
});
|
||||
}
|
||||
return req.config.database.collection('_SCHEMA')
|
||||
.then(coll => coll.findOne({'_id': req.params.className}))
|
||||
.then(schema => ({response: mongoSchemaToSchemaAPIResponse(schema)}))
|
||||
.catch(() => ({
|
||||
status: 400,
|
||||
response: {
|
||||
code: 103,
|
||||
error: 'class ' + req.params.className + ' does not exist',
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
router.route('GET', '/schemas', getAllSchemas);
|
||||
router.route('GET', '/schemas/:className', getOneSchema);
|
||||
|
||||
module.exports = router;
|
||||
|
||||
58
spec/APNS.spec.js
Normal file
58
spec/APNS.spec.js
Normal file
@@ -0,0 +1,58 @@
|
||||
var APNS = require('../APNS');
|
||||
|
||||
describe('APNS', () => {
|
||||
it('can generate APNS notification', (done) => {
|
||||
//Mock request data
|
||||
var data = {
|
||||
'alert': 'alert',
|
||||
'badge': 100,
|
||||
'sound': 'test',
|
||||
'content-available': 1,
|
||||
'category': 'INVITE_CATEGORY',
|
||||
'key': 'value',
|
||||
'keyAgain': 'valueAgain'
|
||||
};
|
||||
var expirationTime = 1454571491354
|
||||
|
||||
var notification = APNS.generateNotification(data, expirationTime);
|
||||
|
||||
expect(notification.alert).toEqual(data.alert);
|
||||
expect(notification.badge).toEqual(data.badge);
|
||||
expect(notification.sound).toEqual(data.sound);
|
||||
expect(notification.contentAvailable).toEqual(1);
|
||||
expect(notification.category).toEqual(data.category);
|
||||
expect(notification.payload).toEqual({
|
||||
'key': 'value',
|
||||
'keyAgain': 'valueAgain'
|
||||
});
|
||||
expect(notification.expiry).toEqual(expirationTime);
|
||||
done();
|
||||
});
|
||||
|
||||
it('can send APNS notification', (done) => {
|
||||
var apns = new APNS();
|
||||
var sender = {
|
||||
pushNotification: jasmine.createSpy('send')
|
||||
};
|
||||
apns.sender = sender;
|
||||
// Mock data
|
||||
var expirationTime = 1454571491354
|
||||
var data = {
|
||||
'expiration_time': expirationTime,
|
||||
'data': {
|
||||
'alert': 'alert'
|
||||
}
|
||||
}
|
||||
// Mock registrationTokens
|
||||
var deviceTokens = ['token'];
|
||||
|
||||
var promise = apns.send(data, deviceTokens);
|
||||
expect(sender.pushNotification).toHaveBeenCalled();
|
||||
var args = sender.pushNotification.calls.first().args;
|
||||
var notification = args[0];
|
||||
expect(notification.alert).toEqual(data.data.alert);
|
||||
expect(notification.expiry).toEqual(data['expiration_time']);
|
||||
expect(args[1]).toEqual(deviceTokens);
|
||||
done();
|
||||
});
|
||||
});
|
||||
137
spec/GCM.spec.js
Normal file
137
spec/GCM.spec.js
Normal file
@@ -0,0 +1,137 @@
|
||||
var GCM = require('../GCM');
|
||||
|
||||
describe('GCM', () => {
|
||||
it('can generate GCM Payload without expiration time', (done) => {
|
||||
//Mock request data
|
||||
var data = {
|
||||
'alert': 'alert'
|
||||
};
|
||||
var pushId = 1;
|
||||
var timeStamp = 1454538822113;
|
||||
var timeStampISOStr = new Date(timeStamp).toISOString();
|
||||
|
||||
var payload = GCM.generateGCMPayload(data, pushId, timeStamp);
|
||||
|
||||
expect(payload.priority).toEqual('normal');
|
||||
expect(payload.timeToLive).toEqual(undefined);
|
||||
var dataFromPayload = payload.data;
|
||||
expect(dataFromPayload.time).toEqual(timeStampISOStr);
|
||||
expect(dataFromPayload['push_id']).toEqual(pushId);
|
||||
var dataFromUser = JSON.parse(dataFromPayload.data);
|
||||
expect(dataFromUser).toEqual(data);
|
||||
done();
|
||||
});
|
||||
|
||||
it('can generate GCM Payload with valid expiration time', (done) => {
|
||||
//Mock request data
|
||||
var data = {
|
||||
'alert': 'alert'
|
||||
};
|
||||
var pushId = 1;
|
||||
var timeStamp = 1454538822113;
|
||||
var timeStampISOStr = new Date(timeStamp).toISOString();
|
||||
var expirationTime = 1454538922113
|
||||
|
||||
var payload = GCM.generateGCMPayload(data, pushId, timeStamp, expirationTime);
|
||||
|
||||
expect(payload.priority).toEqual('normal');
|
||||
expect(payload.timeToLive).toEqual(Math.floor((expirationTime - timeStamp) / 1000));
|
||||
var dataFromPayload = payload.data;
|
||||
expect(dataFromPayload.time).toEqual(timeStampISOStr);
|
||||
expect(dataFromPayload['push_id']).toEqual(pushId);
|
||||
var dataFromUser = JSON.parse(dataFromPayload.data);
|
||||
expect(dataFromUser).toEqual(data);
|
||||
done();
|
||||
});
|
||||
|
||||
it('can generate GCM Payload with too early expiration time', (done) => {
|
||||
//Mock request data
|
||||
var data = {
|
||||
'alert': 'alert'
|
||||
};
|
||||
var pushId = 1;
|
||||
var timeStamp = 1454538822113;
|
||||
var timeStampISOStr = new Date(timeStamp).toISOString();
|
||||
var expirationTime = 1454538822112;
|
||||
|
||||
var payload = GCM.generateGCMPayload(data, pushId, timeStamp, expirationTime);
|
||||
|
||||
expect(payload.priority).toEqual('normal');
|
||||
expect(payload.timeToLive).toEqual(0);
|
||||
var dataFromPayload = payload.data;
|
||||
expect(dataFromPayload.time).toEqual(timeStampISOStr);
|
||||
expect(dataFromPayload['push_id']).toEqual(pushId);
|
||||
var dataFromUser = JSON.parse(dataFromPayload.data);
|
||||
expect(dataFromUser).toEqual(data);
|
||||
done();
|
||||
});
|
||||
|
||||
it('can generate GCM Payload with too late expiration time', (done) => {
|
||||
//Mock request data
|
||||
var data = {
|
||||
'alert': 'alert'
|
||||
};
|
||||
var pushId = 1;
|
||||
var timeStamp = 1454538822113;
|
||||
var timeStampISOStr = new Date(timeStamp).toISOString();
|
||||
var expirationTime = 2454538822113;
|
||||
|
||||
var payload = GCM.generateGCMPayload(data, pushId, timeStamp, expirationTime);
|
||||
|
||||
expect(payload.priority).toEqual('normal');
|
||||
// Four week in second
|
||||
expect(payload.timeToLive).toEqual(4 * 7 * 24 * 60 * 60);
|
||||
var dataFromPayload = payload.data;
|
||||
expect(dataFromPayload.time).toEqual(timeStampISOStr);
|
||||
expect(dataFromPayload['push_id']).toEqual(pushId);
|
||||
var dataFromUser = JSON.parse(dataFromPayload.data);
|
||||
expect(dataFromUser).toEqual(data);
|
||||
done();
|
||||
});
|
||||
|
||||
it('can send GCM request', (done) => {
|
||||
var gcm = new GCM('apiKey');
|
||||
// Mock gcm sender
|
||||
var sender = {
|
||||
send: jasmine.createSpy('send')
|
||||
};
|
||||
gcm.sender = sender;
|
||||
// Mock data
|
||||
var expirationTime = 2454538822113;
|
||||
var data = {
|
||||
'expiration_time': expirationTime,
|
||||
'data': {
|
||||
'alert': 'alert'
|
||||
}
|
||||
}
|
||||
// Mock registrationTokens
|
||||
var registrationTokens = ['token'];
|
||||
|
||||
var promise = gcm.send(data, registrationTokens);
|
||||
expect(sender.send).toHaveBeenCalled();
|
||||
var args = sender.send.calls.first().args;
|
||||
// It is too hard to verify message of gcm library, we just verify tokens and retry times
|
||||
expect(args[1].registrationTokens).toEqual(registrationTokens);
|
||||
expect(args[2]).toEqual(5);
|
||||
done();
|
||||
});
|
||||
|
||||
it('can throw on sending when we have too many registration tokens', (done) => {
|
||||
var gcm = new GCM('apiKey');
|
||||
// Mock gcm sender
|
||||
var sender = {
|
||||
send: jasmine.createSpy('send')
|
||||
};
|
||||
gcm.sender = sender;
|
||||
// Mock registrationTokens
|
||||
var registrationTokens = [];
|
||||
for (var i = 0; i <= 2000; i++) {
|
||||
registrationTokens.push(i.toString());
|
||||
}
|
||||
|
||||
expect(function() {
|
||||
gcm.send({}, registrationTokens);
|
||||
}).toThrow();
|
||||
done();
|
||||
});
|
||||
});
|
||||
@@ -287,4 +287,47 @@ describe('Parse.GeoPoint testing', () => {
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('supports a sub-object with a geo point', done => {
|
||||
var point = new Parse.GeoPoint(44.0, -11.0);
|
||||
var obj = new TestObject();
|
||||
obj.set('subobject', { location: point });
|
||||
obj.save(null, {
|
||||
success: function() {
|
||||
var query = new Parse.Query(TestObject);
|
||||
query.find({
|
||||
success: function(results) {
|
||||
equal(results.length, 1);
|
||||
var pointAgain = results[0].get('subobject')['location'];
|
||||
ok(pointAgain);
|
||||
equal(pointAgain.latitude, 44.0);
|
||||
equal(pointAgain.longitude, -11.0);
|
||||
done();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
it('supports array of geo points', done => {
|
||||
var point1 = new Parse.GeoPoint(44.0, -11.0);
|
||||
var point2 = new Parse.GeoPoint(22.0, -55.0);
|
||||
var obj = new TestObject();
|
||||
obj.set('locations', [ point1, point2 ]);
|
||||
obj.save(null, {
|
||||
success: function() {
|
||||
var query = new Parse.Query(TestObject);
|
||||
query.find({
|
||||
success: function(results) {
|
||||
equal(results.length, 1);
|
||||
var locations = results[0].get('locations');
|
||||
expect(locations.length).toEqual(2);
|
||||
expect(locations[0]).toEqual(point1);
|
||||
expect(locations[1]).toEqual(point2);
|
||||
done();
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
// These tests check that the Schema operates correctly.
|
||||
var Config = require('../Config');
|
||||
var Schema = require('../Schema');
|
||||
var dd = require('deep-diff');
|
||||
|
||||
var config = new Config('test');
|
||||
|
||||
@@ -131,4 +132,278 @@ describe('Schema', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('can add classes without needing an object', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'String'}
|
||||
}))
|
||||
.then(result => {
|
||||
expect(result).toEqual({
|
||||
_id: 'NewClass',
|
||||
objectId: 'string',
|
||||
updatedAt: 'string',
|
||||
createdAt: 'string',
|
||||
foo: 'string',
|
||||
})
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('will fail to create a class if that class was already created by an object', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => {
|
||||
schema.validateObject('NewClass', {foo: 7})
|
||||
.then(() => {
|
||||
schema.reload()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'String'}
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INVALID_CLASS_NAME)
|
||||
expect(error.error).toEqual('class NewClass already exists');
|
||||
done();
|
||||
});
|
||||
});
|
||||
})
|
||||
});
|
||||
|
||||
it('will resolve class creation races appropriately', done => {
|
||||
// If two callers race to create the same schema, the response to the
|
||||
// race loser should be the same as if they hadn't been racing.
|
||||
config.database.loadSchema()
|
||||
.then(schema => {
|
||||
var p1 = schema.addClassIfNotExists('NewClass', {foo: {type: 'String'}});
|
||||
var p2 = schema.addClassIfNotExists('NewClass', {foo: {type: 'String'}});
|
||||
Promise.race([p1, p2]) //Use race because we expect the first completed promise to be the successful one
|
||||
.then(response => {
|
||||
expect(response).toEqual({
|
||||
_id: 'NewClass',
|
||||
objectId: 'string',
|
||||
updatedAt: 'string',
|
||||
createdAt: 'string',
|
||||
foo: 'string',
|
||||
});
|
||||
});
|
||||
Promise.all([p1,p2])
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INVALID_CLASS_NAME);
|
||||
expect(error.error).toEqual('class NewClass already exists');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to create classes with invalid names', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => {
|
||||
schema.addClassIfNotExists('_InvalidName', {foo: {type: 'String'}})
|
||||
.catch(error => {
|
||||
expect(error.error).toEqual(
|
||||
'Invalid classname: _InvalidName, classnames can only have alphanumeric characters and _, and must start with an alpha character '
|
||||
);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with invalid names', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {'0InvalidName': {type: 'String'}}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INVALID_KEY_NAME);
|
||||
expect(error.error).toEqual('invalid field name: 0InvalidName');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to explicitly create the default fields for custom classes', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {objectId: {type: 'String'}}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(136);
|
||||
expect(error.error).toEqual('field objectId cannot be added');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to explicitly create the default fields for non-custom classes', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('_Installation', {localeIdentifier: {type: 'String'}}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(136);
|
||||
expect(error.error).toEqual('field localeIdentifier cannot be added');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with invalid types', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 7}
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INVALID_JSON);
|
||||
expect(error.error).toEqual('invalid JSON');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with invalid pointer types', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'Pointer'},
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(135);
|
||||
expect(error.error).toEqual('type Pointer needs a class name');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with invalid pointer target', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'Pointer', targetClass: 7},
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INVALID_JSON);
|
||||
expect(error.error).toEqual('invalid JSON');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with invalid Relation type', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'Relation', uselessKey: 7},
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(135);
|
||||
expect(error.error).toEqual('type Relation needs a class name');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with invalid relation target', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'Relation', targetClass: 7},
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INVALID_JSON);
|
||||
expect(error.error).toEqual('invalid JSON');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with uncreatable pointer target class', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'Pointer', targetClass: 'not a valid class name'},
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INVALID_CLASS_NAME);
|
||||
expect(error.error).toEqual('Invalid classname: not a valid class name, classnames can only have alphanumeric characters and _, and must start with an alpha character ');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with uncreatable relation target class', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'Relation', targetClass: 'not a valid class name'},
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INVALID_CLASS_NAME);
|
||||
expect(error.error).toEqual('Invalid classname: not a valid class name, classnames can only have alphanumeric characters and _, and must start with an alpha character ');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to add fields with unknown types', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
foo: {type: 'Unknown'},
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INCORRECT_TYPE);
|
||||
expect(error.error).toEqual('invalid field type: Unknown');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('will create classes', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
aNumber: {type: 'Number'},
|
||||
aString: {type: 'String'},
|
||||
aBool: {type: 'Boolean'},
|
||||
aDate: {type: 'Date'},
|
||||
aObject: {type: 'Object'},
|
||||
aArray: {type: 'Array'},
|
||||
aGeoPoint: {type: 'GeoPoint'},
|
||||
aFile: {type: 'File'},
|
||||
aPointer: {type: 'Pointer', targetClass: 'ThisClassDoesNotExistYet'},
|
||||
aRelation: {type: 'Relation', targetClass: 'NewClass'},
|
||||
}))
|
||||
.then(mongoObj => {
|
||||
expect(mongoObj).toEqual({
|
||||
_id: 'NewClass',
|
||||
objectId: 'string',
|
||||
createdAt: 'string',
|
||||
updatedAt: 'string',
|
||||
aNumber: 'number',
|
||||
aString: 'string',
|
||||
aBool: 'boolean',
|
||||
aDate: 'date',
|
||||
aObject: 'object',
|
||||
aArray: 'array',
|
||||
aGeoPoint: 'geopoint',
|
||||
aFile: 'file',
|
||||
aPointer: '*ThisClassDoesNotExistYet',
|
||||
aRelation: 'relation<NewClass>',
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('creates the default fields for non-custom classes', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('_Installation', {
|
||||
foo: {type: 'Number'},
|
||||
}))
|
||||
.then(mongoObj => {
|
||||
expect(mongoObj).toEqual({
|
||||
_id: '_Installation',
|
||||
createdAt: 'string',
|
||||
updatedAt: 'string',
|
||||
objectId: 'string',
|
||||
foo: 'number',
|
||||
installationId: 'string',
|
||||
deviceToken: 'string',
|
||||
channels: 'array',
|
||||
deviceType: 'string',
|
||||
pushType: 'string',
|
||||
GCMSenderId: 'string',
|
||||
timeZone: 'string',
|
||||
localeIdentifier: 'string',
|
||||
badge: 'number',
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('refuses to create two geopoints', done => {
|
||||
config.database.loadSchema()
|
||||
.then(schema => schema.addClassIfNotExists('NewClass', {
|
||||
geo1: {type: 'GeoPoint'},
|
||||
geo2: {type: 'GeoPoint'},
|
||||
}))
|
||||
.catch(error => {
|
||||
expect(error.code).toEqual(Parse.Error.INCORRECT_TYPE);
|
||||
expect(error.error).toEqual('currently, only one GeoPoint field may exist in an object. Adding geo2 when geo1 already exists.');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
206
spec/push.spec.js
Normal file
206
spec/push.spec.js
Normal file
@@ -0,0 +1,206 @@
|
||||
var push = require('../push');
|
||||
|
||||
describe('push', () => {
|
||||
it('can check valid master key of request', (done) => {
|
||||
// Make mock request
|
||||
var request = {
|
||||
info: {
|
||||
masterKey: 'masterKey'
|
||||
},
|
||||
config: {
|
||||
masterKey: 'masterKey'
|
||||
}
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
push.validateMasterKey(request);
|
||||
}).not.toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can check invalid master key of request', (done) => {
|
||||
// Make mock request
|
||||
var request = {
|
||||
info: {
|
||||
masterKey: 'masterKey'
|
||||
},
|
||||
config: {
|
||||
masterKey: 'masterKeyAgain'
|
||||
}
|
||||
}
|
||||
|
||||
expect(() => {
|
||||
push.validateMasterKey(request);
|
||||
}).toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can get query condition when channels is set', (done) => {
|
||||
// Make mock request
|
||||
var request = {
|
||||
body: {
|
||||
channels: ['Giants', 'Mets']
|
||||
}
|
||||
}
|
||||
|
||||
var where = push.getQueryCondition(request);
|
||||
expect(where).toEqual({
|
||||
'channels': {
|
||||
'$in': ['Giants', 'Mets']
|
||||
}
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
it('can get query condition when where is set', (done) => {
|
||||
// Make mock request
|
||||
var request = {
|
||||
body: {
|
||||
'where': {
|
||||
'injuryReports': true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var where = push.getQueryCondition(request);
|
||||
expect(where).toEqual({
|
||||
'injuryReports': true
|
||||
});
|
||||
done();
|
||||
});
|
||||
|
||||
it('can get query condition when nothing is set', (done) => {
|
||||
// Make mock request
|
||||
var request = {
|
||||
body: {
|
||||
}
|
||||
}
|
||||
|
||||
expect(function() {
|
||||
push.getQueryCondition(request);
|
||||
}).toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can throw on getQueryCondition when channels and where are set', (done) => {
|
||||
// Make mock request
|
||||
var request = {
|
||||
body: {
|
||||
'channels': {
|
||||
'$in': ['Giants', 'Mets']
|
||||
},
|
||||
'where': {
|
||||
'injuryReports': true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(function() {
|
||||
push.getQueryCondition(request);
|
||||
}).toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can validate device type when no device type is set', (done) => {
|
||||
// Make query condition
|
||||
var where = {
|
||||
}
|
||||
|
||||
expect(function(){
|
||||
push.validateDeviceType(where);
|
||||
}).not.toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can validate device type when single valid device type is set', (done) => {
|
||||
// Make query condition
|
||||
var where = {
|
||||
'deviceType': 'ios'
|
||||
}
|
||||
|
||||
expect(function(){
|
||||
push.validateDeviceType(where);
|
||||
}).not.toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can validate device type when multiple valid device types are set', (done) => {
|
||||
// Make query condition
|
||||
var where = {
|
||||
'deviceType': {
|
||||
'$in': ['android', 'ios']
|
||||
}
|
||||
}
|
||||
|
||||
expect(function(){
|
||||
push.validateDeviceType(where);
|
||||
}).not.toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can throw on validateDeviceType when single invalid device type is set', (done) => {
|
||||
// Make query condition
|
||||
var where = {
|
||||
'deviceType': 'osx'
|
||||
}
|
||||
|
||||
expect(function(){
|
||||
push.validateDeviceType(where);
|
||||
}).toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can throw on validateDeviceType when single invalid device type is set', (done) => {
|
||||
// Make query condition
|
||||
var where = {
|
||||
'deviceType': 'osx'
|
||||
}
|
||||
|
||||
expect(function(){
|
||||
push.validateDeviceType(where)
|
||||
}).toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('can get expiration time in string format', (done) => {
|
||||
// Make mock request
|
||||
var timeStr = '2015-03-19T22:05:08Z';
|
||||
var request = {
|
||||
body: {
|
||||
'expiration_time': timeStr
|
||||
}
|
||||
}
|
||||
|
||||
var time = push.getExpirationTime(request);
|
||||
expect(time).toEqual(new Date(timeStr).valueOf());
|
||||
done();
|
||||
});
|
||||
|
||||
it('can get expiration time in number format', (done) => {
|
||||
// Make mock request
|
||||
var timeNumber = 1426802708;
|
||||
var request = {
|
||||
body: {
|
||||
'expiration_time': timeNumber
|
||||
}
|
||||
}
|
||||
|
||||
var time = push.getExpirationTime(request);
|
||||
expect(time).toEqual(timeNumber * 1000);
|
||||
done();
|
||||
});
|
||||
|
||||
it('can throw on getExpirationTime in invalid format', (done) => {
|
||||
// Make mock request
|
||||
var request = {
|
||||
body: {
|
||||
'expiration_time': 'abcd'
|
||||
}
|
||||
}
|
||||
|
||||
expect(function(){
|
||||
push.getExpirationTime(request);
|
||||
}).toThrow();
|
||||
done();
|
||||
});
|
||||
});
|
||||
@@ -1,5 +1,60 @@
|
||||
var request = require('request');
|
||||
var dd = require('deep-diff');
|
||||
var hasAllPODobject = () => {
|
||||
var obj = new Parse.Object('HasAllPOD');
|
||||
obj.set('aNumber', 5);
|
||||
obj.set('aString', 'string');
|
||||
obj.set('aBool', true);
|
||||
obj.set('aDate', new Date());
|
||||
obj.set('aObject', {k1: 'value', k2: true, k3: 5});
|
||||
obj.set('aArray', ['contents', true, 5]);
|
||||
obj.set('aGeoPoint', new Parse.GeoPoint({latitude: 0, longitude: 0}));
|
||||
obj.set('aFile', new Parse.File('f.txt', { base64: 'V29ya2luZyBhdCBQYXJzZSBpcyBncmVhdCE=' }));
|
||||
var objACL = new Parse.ACL();
|
||||
objACL.setPublicWriteAccess(false);
|
||||
obj.setACL(objACL);
|
||||
return obj;
|
||||
}
|
||||
|
||||
var expectedResponseForHasAllPOD = {
|
||||
className: 'HasAllPOD',
|
||||
fields: {
|
||||
//Default fields
|
||||
ACL: {type: 'ACL'},
|
||||
createdAt: {type: 'Date'},
|
||||
updatedAt: {type: 'Date'},
|
||||
objectId: {type: 'String'},
|
||||
//Custom fields
|
||||
aNumber: {type: 'Number'},
|
||||
aString: {type: 'String'},
|
||||
aBool: {type: 'Boolean'},
|
||||
aDate: {type: 'Date'},
|
||||
aObject: {type: 'Object'},
|
||||
aArray: {type: 'Array'},
|
||||
aGeoPoint: {type: 'GeoPoint'},
|
||||
aFile: {type: 'File'}
|
||||
},
|
||||
};
|
||||
|
||||
var expectedResponseforHasPointersAndRelations = {
|
||||
className: 'HasPointersAndRelations',
|
||||
fields: {
|
||||
//Default fields
|
||||
ACL: {type: 'ACL'},
|
||||
createdAt: {type: 'Date'},
|
||||
updatedAt: {type: 'Date'},
|
||||
objectId: {type: 'String'},
|
||||
//Custom fields
|
||||
aPointer: {
|
||||
type: 'Pointer',
|
||||
targetClass: 'HasAllPOD',
|
||||
},
|
||||
aRelation: {
|
||||
type: 'Relation',
|
||||
targetClass: 'HasAllPOD',
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
describe('schemas', () => {
|
||||
it('requires the master key to get all schemas', (done) => {
|
||||
@@ -17,6 +72,21 @@ describe('schemas', () => {
|
||||
});
|
||||
});
|
||||
|
||||
it('requires the master key to get one schema', (done) => {
|
||||
request.get({
|
||||
url: 'http://localhost:8378/1/schemas/SomeSchema',
|
||||
json: true,
|
||||
headers: {
|
||||
'X-Parse-Application-Id': 'test',
|
||||
'X-Parse-REST-API-Key': 'rest',
|
||||
},
|
||||
}, (error, response, body) => {
|
||||
expect(response.statusCode).toEqual(401);
|
||||
expect(body.error).toEqual('unauthorized');
|
||||
done();
|
||||
});
|
||||
});
|
||||
|
||||
it('responds with empty list when there are no schemas', done => {
|
||||
request.get({
|
||||
url: 'http://localhost:8378/1/schemas',
|
||||
@@ -32,79 +102,66 @@ describe('schemas', () => {
|
||||
});
|
||||
|
||||
it('responds with a list of schemas after creating objects', done => {
|
||||
var obj1 = new Parse.Object('HasAllPOD');
|
||||
obj1.set('aNumber', 5);
|
||||
obj1.set('aString', 'string');
|
||||
obj1.set('aBool', true);
|
||||
obj1.set('aDate', new Date());
|
||||
obj1.set('aObject', {k1: 'value', k2: true, k3: 5});
|
||||
obj1.set('aArray', ['contents', true, 5]);
|
||||
obj1.set('aGeoPoint', new Parse.GeoPoint({latitude: 0, longitude: 0}));
|
||||
obj1.set('aFile', new Parse.File('f.txt', { base64: 'V29ya2luZyBhdCBQYXJzZSBpcyBncmVhdCE=' }));
|
||||
var obj1ACL = new Parse.ACL();
|
||||
obj1ACL.setPublicWriteAccess(false);
|
||||
obj1.setACL(obj1ACL);
|
||||
var obj1 = hasAllPODobject();
|
||||
obj1.save().then(savedObj1 => {
|
||||
var obj2 = new Parse.Object('HasPointersAndRelations');
|
||||
obj2.set('aPointer', savedObj1);
|
||||
var relation = obj2.relation('aRelation');
|
||||
relation.add(obj1);
|
||||
return obj2.save();
|
||||
}).then(() => {
|
||||
request.get({
|
||||
url: 'http://localhost:8378/1/schemas',
|
||||
json: true,
|
||||
headers: {
|
||||
'X-Parse-Application-Id': 'test',
|
||||
'X-Parse-Master-Key': 'test',
|
||||
},
|
||||
}, (error, response, body) => {
|
||||
var expected = {
|
||||
results: [expectedResponseForHasAllPOD,expectedResponseforHasPointersAndRelations]
|
||||
};
|
||||
expect(body).toEqual(expected);
|
||||
done();
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
obj1.save().then(savedObj1 => {
|
||||
var obj2 = new Parse.Object('HasPointersAndRelations');
|
||||
obj2.set('aPointer', savedObj1);
|
||||
var relation = obj2.relation('aRelation');
|
||||
relation.add(obj1);
|
||||
return obj2.save();
|
||||
}).then(() => {
|
||||
request.get({
|
||||
url: 'http://localhost:8378/1/schemas',
|
||||
json: true,
|
||||
headers: {
|
||||
'X-Parse-Application-Id': 'test',
|
||||
'X-Parse-Master-Key': 'test',
|
||||
},
|
||||
}, (error, response, body) => {
|
||||
var expected = {
|
||||
results: [
|
||||
{
|
||||
className: 'HasAllPOD',
|
||||
fields: {
|
||||
//Default fields
|
||||
ACL: {type: 'ACL'},
|
||||
createdAt: {type: 'Date'},
|
||||
updatedAt: {type: 'Date'},
|
||||
objectId: {type: 'String'},
|
||||
//Custom fields
|
||||
aNumber: {type: 'Number'},
|
||||
aString: {type: 'String'},
|
||||
aBool: {type: 'Boolean'},
|
||||
aDate: {type: 'Date'},
|
||||
aObject: {type: 'Object'},
|
||||
aArray: {type: 'Array'},
|
||||
aGeoPoint: {type: 'GeoPoint'},
|
||||
aFile: {type: 'File'}
|
||||
},
|
||||
},
|
||||
{
|
||||
className: 'HasPointersAndRelations',
|
||||
fields: {
|
||||
//Default fields
|
||||
ACL: {type: 'ACL'},
|
||||
createdAt: {type: 'Date'},
|
||||
updatedAt: {type: 'Date'},
|
||||
objectId: {type: 'String'},
|
||||
//Custom fields
|
||||
aPointer: {
|
||||
type: 'Pointer',
|
||||
targetClass: 'HasAllPOD',
|
||||
},
|
||||
aRelation: {
|
||||
type: 'Relation',
|
||||
targetClass: 'HasAllPOD',
|
||||
},
|
||||
},
|
||||
}
|
||||
]
|
||||
};
|
||||
expect(body).toEqual(expected);
|
||||
done();
|
||||
})
|
||||
it('responds with a single schema', done => {
|
||||
var obj = hasAllPODobject();
|
||||
obj.save().then(() => {
|
||||
request.get({
|
||||
url: 'http://localhost:8378/1/schemas/HasAllPOD',
|
||||
json: true,
|
||||
headers: {
|
||||
'X-Parse-Application-Id': 'test',
|
||||
'X-Parse-Master-Key': 'test',
|
||||
},
|
||||
}, (error, response, body) => {
|
||||
expect(body).toEqual(expectedResponseForHasAllPOD);
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('treats class names case sensitively', done => {
|
||||
var obj = hasAllPODobject();
|
||||
obj.save().then(() => {
|
||||
request.get({
|
||||
url: 'http://localhost:8378/1/schemas/HASALLPOD',
|
||||
json: true,
|
||||
headers: {
|
||||
'X-Parse-Application-Id': 'test',
|
||||
'X-Parse-Master-Key': 'test',
|
||||
},
|
||||
}, (error, response, body) => {
|
||||
expect(response.statusCode).toEqual(400);
|
||||
expect(body).toEqual({
|
||||
code: 103,
|
||||
error: 'class HASALLPOD does not exist',
|
||||
});
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -61,6 +61,29 @@ describe('transformCreate', () => {
|
||||
// This just checks that it doesn't crash, but it should check format.
|
||||
done();
|
||||
});
|
||||
|
||||
describe('GeoPoints', () => {
|
||||
it('plain', (done) => {
|
||||
var geoPoint = {__type: 'GeoPoint', longitude: 180, latitude: -180};
|
||||
var out = transform.transformCreate(dummySchema, null, {location: geoPoint});
|
||||
expect(out.location).toEqual([180, -180]);
|
||||
done();
|
||||
});
|
||||
|
||||
it('in array', (done) => {
|
||||
var geoPoint = {__type: 'GeoPoint', longitude: 180, latitude: -180};
|
||||
var out = transform.transformCreate(dummySchema, null, {locations: [geoPoint, geoPoint]});
|
||||
expect(out.locations).toEqual([geoPoint, geoPoint]);
|
||||
done();
|
||||
});
|
||||
|
||||
it('in sub-object', (done) => {
|
||||
var geoPoint = {__type: 'GeoPoint', longitude: 180, latitude: -180};
|
||||
var out = transform.transformCreate(dummySchema, null, { locations: { start: geoPoint }});
|
||||
expect(out).toEqual({ locations: { start: geoPoint } });
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('transformWhere', () => {
|
||||
|
||||
132
transform.js
132
transform.js
@@ -363,20 +363,17 @@ function transformAtom(atom, force, options) {
|
||||
objectId: atom.objectId
|
||||
};
|
||||
}
|
||||
if (atom.__type == 'Date') {
|
||||
return new Date(atom.iso);
|
||||
if (DateCoder.isValidJSON(atom)) {
|
||||
return DateCoder.JSONToDatabase(atom);
|
||||
}
|
||||
if (atom.__type == 'GeoPoint') {
|
||||
return [atom.longitude, atom.latitude];
|
||||
if (BytesCoder.isValidJSON(atom)) {
|
||||
return BytesCoder.JSONToDatabase(atom);
|
||||
}
|
||||
if (atom.__type == 'Bytes') {
|
||||
return new mongodb.Binary(new Buffer(atom.base64, 'base64'));
|
||||
if (GeoPointCoder.isValidJSON(atom)) {
|
||||
return (inArray || inObject ? atom : GeoPointCoder.JSONToDatabase(atom));
|
||||
}
|
||||
if (atom.__type == 'File') {
|
||||
if (!inArray && !inObject) {
|
||||
return atom.name;
|
||||
}
|
||||
return atom;
|
||||
if (FileCoder.isValidJSON(atom)) {
|
||||
return (inArray || inObject ? atom : FileCoder.JSONToDatabase(atom));
|
||||
}
|
||||
|
||||
if (force) {
|
||||
@@ -617,11 +614,8 @@ function untransformObject(schema, className, mongoObject) {
|
||||
return Parse._encode(mongoObject);
|
||||
}
|
||||
|
||||
if (mongoObject instanceof mongodb.Binary) {
|
||||
return {
|
||||
__type: 'Bytes',
|
||||
base64: mongoObject.buffer.toString('base64')
|
||||
};
|
||||
if (BytesCoder.isValidDatabaseObject(mongoObject)) {
|
||||
return BytesCoder.databaseToJSON(mongoObject);
|
||||
}
|
||||
|
||||
var restObject = untransformACL(mongoObject);
|
||||
@@ -696,20 +690,14 @@ function untransformObject(schema, className, mongoObject) {
|
||||
//} else if (mongoObject[key] === null) {
|
||||
//break;
|
||||
} else {
|
||||
var expected = schema.getExpectedType(className, key);
|
||||
if (expected == 'file' && mongoObject[key]) {
|
||||
restObject[key] = {
|
||||
__type: 'File',
|
||||
name: mongoObject[key]
|
||||
};
|
||||
var expectedType = schema.getExpectedType(className, key);
|
||||
var value = mongoObject[key];
|
||||
if (expectedType === 'file' && FileCoder.isValidDatabaseObject(value)) {
|
||||
restObject[key] = FileCoder.databaseToJSON(value);
|
||||
break;
|
||||
}
|
||||
if (expected == 'geopoint') {
|
||||
restObject[key] = {
|
||||
__type: 'GeoPoint',
|
||||
latitude: mongoObject[key][1],
|
||||
longitude: mongoObject[key][0]
|
||||
};
|
||||
if (expectedType === 'geopoint' && GeoPointCoder.isValidDatabaseObject(value)) {
|
||||
restObject[key] = GeoPointCoder.databaseToJSON(value);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -723,6 +711,94 @@ function untransformObject(schema, className, mongoObject) {
|
||||
}
|
||||
}
|
||||
|
||||
var DateCoder = {
|
||||
JSONToDatabase(json) {
|
||||
return new Date(json.iso);
|
||||
},
|
||||
|
||||
isValidJSON(value) {
|
||||
return (typeof value === 'object' &&
|
||||
value !== null &&
|
||||
value.__type === 'Date'
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
var BytesCoder = {
|
||||
databaseToJSON(object) {
|
||||
return {
|
||||
__type: 'Bytes',
|
||||
base64: object.buffer.toString('base64')
|
||||
};
|
||||
},
|
||||
|
||||
isValidDatabaseObject(object) {
|
||||
return (object instanceof mongodb.Binary);
|
||||
},
|
||||
|
||||
JSONToDatabase(json) {
|
||||
return new mongodb.Binary(new Buffer(json.base64, 'base64'));
|
||||
},
|
||||
|
||||
isValidJSON(value) {
|
||||
return (typeof value === 'object' &&
|
||||
value !== null &&
|
||||
value.__type === 'Bytes'
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
var GeoPointCoder = {
|
||||
databaseToJSON(object) {
|
||||
return {
|
||||
__type: 'GeoPoint',
|
||||
latitude: object[1],
|
||||
longitude: object[0]
|
||||
}
|
||||
},
|
||||
|
||||
isValidDatabaseObject(object) {
|
||||
return (object instanceof Array &&
|
||||
object.length == 2
|
||||
);
|
||||
},
|
||||
|
||||
JSONToDatabase(json) {
|
||||
return [ json.longitude, json.latitude ];
|
||||
},
|
||||
|
||||
isValidJSON(value) {
|
||||
return (typeof value === 'object' &&
|
||||
value !== null &&
|
||||
value.__type === 'GeoPoint'
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
var FileCoder = {
|
||||
databaseToJSON(object) {
|
||||
return {
|
||||
__type: 'File',
|
||||
name: object
|
||||
}
|
||||
},
|
||||
|
||||
isValidDatabaseObject(object) {
|
||||
return (typeof object === 'string');
|
||||
},
|
||||
|
||||
JSONToDatabase(json) {
|
||||
return json.name;
|
||||
},
|
||||
|
||||
isValidJSON(value) {
|
||||
return (typeof value === 'object' &&
|
||||
value !== null &&
|
||||
value.__type === 'File'
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
transformKey: transformKey,
|
||||
transformCreate: transformCreate,
|
||||
|
||||
Reference in New Issue
Block a user