Add more postgres support (#2080)

* reload the right data

More passing postgres tests

Handle schema updates, and $in for non array columns

remove authdata from user and implement ensureUniqueness

Make some tests work, detect existing classes

Throw proper error for unique index violation

* fix findOneAndUpdate
This commit is contained in:
Drew
2016-06-16 15:39:05 -07:00
committed by Peter J. Shin
parent 1a75101146
commit 0ff35e18f0
8 changed files with 194 additions and 127 deletions

View File

@@ -8,10 +8,11 @@ var request = require('request');
const rp = require('request-promise');
const Parse = require("parse/node");
let Config = require('../src/Config');
let defaultColumns = require('../src/Controllers/SchemaController').defaultColumns;
const SchemaController = require('../src/Controllers/SchemaController');
var TestUtils = require('../src/index').TestUtils;
const deepcopy = require('deepcopy');
const requiredUserFields = { fields: Object.assign({}, defaultColumns._Default, defaultColumns._User) };
const userSchema = SchemaController.convertSchemaToAdapterSchema({ className: '_User', fields: Object.assign({}, SchemaController.defaultColumns._Default, SchemaController.defaultColumns._User) });
describe('miscellaneous', function() {
it('create a GameScore object', function(done) {
@@ -131,24 +132,27 @@ describe('miscellaneous', function() {
let config = new Config('test');
// Remove existing data to clear out unique index
TestUtils.destroyAllDataPermanently()
.then(() => config.database.adapter.createObject('_User', requiredUserFields, { objectId: 'x', username: 'u' }))
.then(() => config.database.adapter.createObject('_User', requiredUserFields, { objectId: 'y', username: 'u' }))
.then(() => config.database.adapter.createClass('_User', userSchema))
.then(() => config.database.adapter.createObject('_User', userSchema, { objectId: 'x', username: 'u' }).catch(fail))
.then(() => config.database.adapter.createObject('_User', userSchema, { objectId: 'y', username: 'u' }).catch(fail))
// Create a new server to try to recreate the unique indexes
.then(reconfigureServer)
.catch(() => {
let user = new Parse.User();
user.setPassword('asdf');
user.setUsername('zxcv');
// Sign up with new email still works
return user.signUp().catch(fail);
})
.then(() => {
let user = new Parse.User();
user.setPassword('asdf');
user.setUsername('u');
// sign up with duplicate username doens't
return user.signUp()
})
.then(result => {
fail('should not have been able to sign up');
done();
})
.catch(error => {
expect(error.code).toEqual(Parse.Error.USERNAME_TAKEN);
done();
@@ -159,8 +163,9 @@ describe('miscellaneous', function() {
let config = new Config('test');
// Remove existing data to clear out unique index
TestUtils.destroyAllDataPermanently()
.then(() => config.database.adapter.createObject('_User', requiredUserFields, { objectId: 'x', email: 'a@b.c' }))
.then(() => config.database.adapter.createObject('_User', requiredUserFields, { objectId: 'y', email: 'a@b.c' }))
.then(() => config.database.adapter.createClass('_User', userSchema))
.then(() => config.database.adapter.createObject('_User', userSchema, { objectId: 'x', email: 'a@b.c' }))
.then(() => config.database.adapter.createObject('_User', userSchema, { objectId: 'y', email: 'a@b.c' }))
.then(reconfigureServer)
.catch(() => {
let user = new Parse.User();
@@ -184,7 +189,8 @@ describe('miscellaneous', function() {
it('ensure that if you try to sign up a user with a unique username and email, but duplicates in some other field that has a uniqueness constraint, you get a regular duplicate value error', done => {
let config = new Config('test');
config.database.adapter.ensureUniqueness('_User', requiredUserFields, ['randomField'])
config.database.adapter.addFieldIfNotExists('_User', 'randomField', { type: 'String' })
.then(() => config.database.adapter.ensureUniqueness('_User', userSchema, ['randomField']))
.then(() => {
let user = new Parse.User();
user.setPassword('asdf');
@@ -277,7 +283,7 @@ describe('miscellaneous', function() {
expect(results.length).toEqual(1);
done();
}, (error) => {
fail(error);
fail(JSON.stringify(error));
done();
});
});
@@ -292,8 +298,8 @@ describe('miscellaneous', function() {
}).then((results) => {
expect(results.length).toEqual(100);
done();
}, (error) => {
fail(error);
}, error => {
fail(JSON.stringify(error));
done();
});
});
@@ -335,8 +341,8 @@ describe('miscellaneous', function() {
fail(error);
done();
});
}, function(error) {
fail(error);
}, error => {
fail(JSON.stringify(error));
done();
});
});

View File

@@ -788,11 +788,10 @@ describe('SchemaController', () => {
_id: '_User',
username: { type: 'String' },
password: { type: 'String' },
authData: { type: 'Object' },
email: { type: 'String' },
emailVerified: { type: 'Boolean' },
},{
authData: { type: 'String' },
emailVerified: { type: 'String' },
customField: { type: 'String' },
})).toEqual({
customField: { type: 'String' }

View File

@@ -86,6 +86,21 @@ var pointersAndRelationsSchema = {
classLevelPermissions: defaultClassLevelPermissions
}
const userSchema = {
"className": "_User",
"fields": {
"objectId": {"type": "String"},
"createdAt": {"type": "Date"},
"updatedAt": {"type": "Date"},
"ACL": {"type": "ACL"},
"username": {"type": "String"},
"password": {"type": "String"},
"email": {"type": "String"},
"emailVerified": {"type": "Boolean"}
},
"classLevelPermissions": defaultClassLevelPermissions,
}
var noAuthHeaders = {
'X-Parse-Application-Id': 'test',
};
@@ -139,13 +154,13 @@ describe('schemas', () => {
});
});
it('responds with empty list when there are no schemas', done => {
it('creates _User schema when server starts', done => {
request.get({
url: 'http://localhost:8378/1/schemas',
json: true,
headers: masterKeyHeaders,
}, (error, response, body) => {
expect(body.results).toEqual([]);
expect(dd(body.results, [userSchema])).toEqual();
done();
});
});
@@ -165,9 +180,9 @@ describe('schemas', () => {
headers: masterKeyHeaders,
}, (error, response, body) => {
var expected = {
results: [plainOldDataSchema,pointersAndRelationsSchema]
results: [userSchema,plainOldDataSchema,pointersAndRelationsSchema]
};
expect(body).toEqual(expected);
expect(dd(body, expected)).toEqual(undefined);
done();
})
});
@@ -328,31 +343,43 @@ describe('schemas', () => {
it('responds with all fields when getting incomplete schema', done => {
config.database.loadSchema()
.then(schemaController => schemaController.addClassIfNotExists('_User', {}, defaultClassLevelPermissions))
.then(schemaController => schemaController.addClassIfNotExists('_Installation', {}, defaultClassLevelPermissions))
.then(() => {
request.get({
url: 'http://localhost:8378/1/schemas/_User',
url: 'http://localhost:8378/1/schemas/_Installation',
headers: masterKeyHeaders,
json: true
}, (error, response, body) => {
expect(body).toEqual({
className: '_User',
expect(dd(body,{
className: '_Installation',
fields: {
objectId: {type: 'String'},
updatedAt: {type: 'Date'},
createdAt: {type: 'Date'},
username: {type: 'String'},
password: {type: 'String'},
authData: {type: 'Object'},
email: {type: 'String'},
emailVerified: {type: 'Boolean'},
installationId: {type: 'String'},
deviceToken: {type: 'String'},
channels: {type: 'Array'},
deviceType: {type: 'String'},
pushType: {type: 'String'},
GCMSenderId: {type: 'String'},
timeZone: {type: 'String'},
badge: {type: 'Number'},
appIdentifier: {type: 'String'},
localeIdentifier: {type: 'String'},
appVersion: {type: 'String'},
appName: {type: 'String'},
parseVersion: {type: 'String'},
ACL: {type: 'ACL'}
},
classLevelPermissions: defaultClassLevelPermissions
});
})).toBeUndefined();
done();
});
})
.catch(error => {
fail(JSON.stringify(error))
done();
});
});
it('lets you specify class name in both places', done => {
@@ -634,7 +661,7 @@ describe('schemas', () => {
}
}
}, (error, response, body) => {
expect(body).toEqual({
expect(dd(body,{
className: '_User',
fields: {
objectId: {type: 'String'},
@@ -642,20 +669,19 @@ describe('schemas', () => {
createdAt: {type: 'Date'},
username: {type: 'String'},
password: {type: 'String'},
authData: {type: 'Object'},
email: {type: 'String'},
emailVerified: {type: 'Boolean'},
newField: {type: 'String'},
ACL: {type: 'ACL'}
},
classLevelPermissions: defaultClassLevelPermissions
});
})).toBeUndefined();
request.get({
url: 'http://localhost:8378/1/schemas/_User',
headers: masterKeyHeaders,
json: true
}, (error, response, body) => {
expect(body).toEqual({
expect(dd(body,{
className: '_User',
fields: {
objectId: {type: 'String'},
@@ -663,14 +689,13 @@ describe('schemas', () => {
createdAt: {type: 'Date'},
username: {type: 'String'},
password: {type: 'String'},
authData: {type: 'Object'},
email: {type: 'String'},
emailVerified: {type: 'Boolean'},
newField: {type: 'String'},
ACL: {type: 'ACL'}
},
classLevelPermissions: defaultClassLevelPermissions
});
})).toBeUndefined();
done();
});
});
@@ -1541,14 +1566,13 @@ describe('schemas', () => {
setPermissionsOnClass('_User', {
'create': {'*': true},
'addField': {}
}).then(() => {
}, true).then(() => {
return Parse.User.signUp('foo', 'bar');
}).then((user) => {
expect(user.getUsername()).toBe('foo');
done()
}, (err) => {
console.error(err);
fail('should create user');
}, error => {
fail(JSON.stringify(error));
done();
})
})

View File

@@ -2,6 +2,8 @@ const pgp = require('pg-promise')();
const PostgresRelationDoesNotExistError = '42P01';
const PostgresDuplicateRelationError = '42P07';
const PostgresDuplicateColumnError = '42701';
const PostgresUniqueIndexViolationError = '23505';
const parseTypeToPostgresType = type => {
switch (type.type) {
@@ -21,6 +23,62 @@ const parseTypeToPostgresType = type => {
}
};
const buildWhereClause = ({ schema, query, index }) => {
let patterns = [];
let values = [];
for (let fieldName in query) {
let fieldValue = query[fieldName];
if (typeof fieldValue === 'string') {
patterns.push(`$${index}:name = $${index + 1}`);
values.push(fieldName, fieldValue);
index += 2;
} else if (fieldValue.$ne) {
patterns.push(`$${index}:name <> $${index + 1}`);
values.push(fieldName, fieldValue.$ne);
index += 2;
} else if (fieldName === '$or') {
fieldValue.map(subQuery => buildWhereClause({ schema, query: subQuery, index })).forEach(result => {
patterns.push(result.pattern);
values.push(...result.values);
});
} else if (Array.isArray(fieldValue.$in) && schema.fields[fieldName].type === 'Array') {
let inPatterns = [];
let allowNull = false;
values.push(fieldName);
fieldValue.$in.forEach((listElem, listIndex) => {
if (listElem === null ) {
allowNull = true;
} else {
values.push(listElem);
inPatterns.push(`$${index + 1 + listIndex - (allowNull ? 1 : 0)}`);
}
});
if (allowNull) {
patterns.push(`($${index}:name IS NULL OR $${index}:name && ARRAY[${inPatterns.join(',')}])`);
} else {
patterns.push(`$${index}:name && ARRAY[${inPatterns.join(',')}]`);
}
index = index + 1 + inPatterns.length;
} else if (Array.isArray(fieldValue.$in) && schema.fields[fieldName].type === 'String') {
let inPatterns = [];
values.push(fieldName);
fieldValue.$in.forEach((listElem, listIndex) => {
values.push(listElem);
inPatterns.push(`$${index + 1 + listIndex}`);
});
patterns.push(`$${index}:name IN (${inPatterns.join(',')})`);
index = index + 1 + inPatterns.length;
} else if (fieldValue.__type === 'Pointer') {
patterns.push(`$${index}:name = $${index + 1}`);
values.push(fieldName, fieldValue.objectId);
index += 2;
} else {
throw new Parse.Error(Parse.Error.OPERATION_FORBIDDEN, `Postgres doesn't support this query type yet`);
}
}
return { pattern: patterns.join(' AND '), values };
}
export class PostgresStorageAdapter {
// Private
_collectionPrefix: string;
@@ -65,7 +123,15 @@ export class PostgresStorageAdapter {
valuesArray.push(parseTypeToPostgresType(parseType));
patternsArray.push(`$${index * 2 + 2}:name $${index * 2 + 3}:raw`);
});
return this._client.query(`CREATE TABLE $1:name (${patternsArray.join(',')})`, [className, ...valuesArray])
return this._ensureSchemaCollectionExists()
.then(() => this._client.query(`CREATE TABLE $1:name (${patternsArray.join(',')})`, [className, ...valuesArray]))
.catch(error => {
if (error.code === PostgresDuplicateRelationError) {
// Table already exists, must have been created by a different request. Ignore error.
} else {
throw error;
}
})
.then(() => this._client.query('INSERT INTO "_SCHEMA" ("className", "schema", "isParseClass") VALUES ($<className>, $<schema>, true)', { className, schema }))
}
@@ -75,11 +141,14 @@ export class PostgresStorageAdapter {
.catch(error => {
if (error.code === PostgresRelationDoesNotExistError) {
return this.createClass(className, { fields: { [fieldName]: type } })
} else if (error.code === PostgresDuplicateColumnError) {
// Column already exists, created by other request. Carry on to
// See if it's the right type.
} else {
throw error;
}
})
.then(() => this._client.query('SELECT "schema" FROM "_SCHEMA"', { className }))
.then(() => this._client.query('SELECT "schema" FROM "_SCHEMA" WHERE "className" = $<className>', { className }))
.then(result => {
if (fieldName in result[0].schema) {
throw "Attempted to add a field that already exists";
@@ -155,7 +224,7 @@ export class PostgresStorageAdapter {
return this._client.query('SELECT * FROM "_SCHEMA" WHERE "className"=$<className>', { className })
.then(result => {
if (result.length === 1) {
return result[0];
return result[0].schema;
} else {
throw undefined;
}
@@ -166,11 +235,6 @@ export class PostgresStorageAdapter {
createObject(className, schema, object) {
let columnsArray = [];
let valuesArray = [];
console.log('creating');
console.log(schema);
console.log(object);
console.log(className);
console.log(new Error().stack);
Object.keys(object).forEach(fieldName => {
columnsArray.push(fieldName);
switch (schema.fields[fieldName].type) {
@@ -186,9 +250,18 @@ export class PostgresStorageAdapter {
}
});
let columnsPattern = columnsArray.map((col, index) => `$${index + 2}:name`).join(',');
let valuesPattern = valuesArray.map((val, index) => `$${index + 2 + columnsArray.length}`).join(',');
return this._client.query(`INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`, [className, ...columnsArray, ...valuesArray])
let valuesPattern = valuesArray.map((val, index) => `$${index + 2 + columnsArray.length}${(['_rperm','_wperm'].includes(columnsArray[index])) ? '::text[]' : ''}`).join(',');
let qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`
let values = [className, ...columnsArray, ...valuesArray]
return this._client.query(qs, values)
.then(() => ({ ops: [object] }))
.catch(error => {
if (error.code === PostgresUniqueIndexViolationError) {
throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'A duplicate value for a field with unique values was provided');
} else {
throw error;
}
})
}
// Remove all objects that match the given Parse Query.
@@ -214,8 +287,7 @@ export class PostgresStorageAdapter {
findOneAndUpdate(className, schema, query, update) {
let conditionPatterns = [];
let updatePatterns = [];
let values = []
values.push(className);
let values = [className]
let index = 2;
for (let fieldName in update) {
@@ -233,26 +305,10 @@ export class PostgresStorageAdapter {
}
}
for (let fieldName in query) {
let fieldValue = query[fieldName];
if (typeof fieldValue === 'string') {
conditionPatterns.push(`$${index}:name = $${index + 1}`);
values.push(fieldName, fieldValue);
index += 2;
} else if (Array.isArray(fieldValue.$in)) {
let inPatterns = [];
values.push(fieldName);
fieldValue.$in.forEach((listElem, listIndex) => {
values.push(listElem);
inPatterns.push(`$${index + 1 + listIndex}`);
});
conditionPatterns.push(`$${index}:name && ARRAY[${inPatterns.join(',')}]`);
index = index + 1 + inPatterns.length;
} else {
return Promise.reject(new Parse.Error(Parse.Error.OPERATION_FORBIDDEN, `Postgres doesn't support this type of request yet`));
}
}
let qs = `UPDATE $1:name SET ${updatePatterns.join(',')} WHERE ${conditionPatterns.join(' AND ')} RETURNING *`;
let where = buildWhereClause({ schema, index, query })
values.push(...where.values);
let qs = `UPDATE $1:name SET ${updatePatterns.join(',')} WHERE ${where.pattern} RETURNING *`;
return this._client.query(qs, values)
.then(val => {
return val[0];
@@ -264,42 +320,16 @@ export class PostgresStorageAdapter {
return Promise.reject('Not implented yet.')
}
// Executes a find. Accepts: className, query in Parse format, and { skip, limit, sort }.
find(className, schema, query, { skip, limit, sort }) {
let conditionPatterns = [];
let values = [];
values.push(className);
let index = 2;
let values = [className];
let where = buildWhereClause({ schema, query, index: 2 })
values.push(...where.values);
for (let fieldName in query) {
let fieldValue = query[fieldName];
if (typeof fieldValue === 'string') {
conditionPatterns.push(`$${index}:name = $${index + 1}`);
values.push(fieldName, fieldValue);
index += 2;
} else if (fieldValue.$ne) {
conditionPatterns.push(`$${index}:name <> $${index + 1}`);
values.push(fieldName, fieldValue.$ne)
index += 2;
} else if (Array.isArray(fieldValue.$in)) {
let inPatterns = [];
values.push(fieldName);
fieldValue.$in.forEach((listElem, listIndex) => {
values.push(listElem);
inPatterns.push(`$${index + 1 + listIndex}`);
});
conditionPatterns.push(`$${index}:name IN (${inPatterns.join(',')})`);
index = index + 1 + inPatterns.length;
} else if (fieldValue.__type === 'Pointer') {
conditionPatterns.push(`$${index}:name = $${index + 1}`);
values.push(fieldName, fieldValue.objectId);
index += 2;
} else {
return Promise.reject(new Parse.Error(Parse.Error.OPERATION_FORBIDDEN, "Postgres doesn't support this query type yet"));
}
const qs = `SELECT * FROM $1:name WHERE ${where.pattern} ${limit !== undefined ? `LIMIT $${values.length + 1}` : ''}`;
if (limit !== undefined) {
values.push(limit);
}
return this._client.query(`SELECT * FROM $1:name WHERE ${conditionPatterns.join(' AND ')}`, values)
return this._client.query(qs, values)
.then(results => results.map(object => {
Object.keys(schema.fields).filter(field => schema.fields[field].type === 'Pointer').forEach(fieldName => {
object[fieldName] = { objectId: object[fieldName], __type: 'Pointer', className: schema.fields[fieldName].targetClass };
@@ -331,7 +361,12 @@ export class PostgresStorageAdapter {
// Way of determining if a field is nullable. Undefined doesn't count against uniqueness,
// which is why we use sparse indexes.
ensureUniqueness(className, schema, fieldNames) {
return Promise.resolve('ensureUniqueness not implented yet.')
// Use the same name for every ensureUniqueness attempt, because postgres
// Will happily create the same index with multiple names.
const constraintName = `unique_${fieldNames.sort().join('_')}`;
const constraintPatterns = fieldNames.map((fieldName, index) => `$${index + 3}:name`);
const qs = `ALTER TABLE $1:name ADD CONSTRAINT $2:name UNIQUE (${constraintPatterns.join(',')})`;
return this._client.query(qs,[className, constraintName, ...fieldNames])
}
// Executs a count.

View File

@@ -418,6 +418,7 @@ DatabaseController.prototype.create = function(className, object, { acl } = {})
return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, 'create'))
.then(() => this.handleRelationUpdates(className, null, object))
.then(() => schemaController.enforceClassExists(className))
.then(() => schemaController.reloadData())
.then(() => schemaController.getOneSchema(className, true))
.then(schema => this.adapter.createObject(className, SchemaController.convertSchemaToAdapterSchema(schema), object))
.then(result => sanitizeDatabaseResult(originalObject, result.ops[0]));

View File

@@ -29,7 +29,6 @@ const defaultColumns = Object.freeze({
_User: {
"username": {type:'String'},
"password": {type:'String'},
"authData": {type:'Object'},
"email": {type:'String'},
"emailVerified": {type:'Boolean'},
},
@@ -241,6 +240,7 @@ const convertAdapterSchemaToParseSchema = ({...schema}) => {
schema.fields.ACL = { type: 'ACL' };
if (schema.className === '_User') {
delete schema.fields.authData; //Auth data is implicit
delete schema.fields._hashed_password;
schema.fields.password = { type: 'String' };
}
@@ -248,14 +248,14 @@ const convertAdapterSchemaToParseSchema = ({...schema}) => {
return schema;
}
const injectDefaultSchema = schema => ({
className: schema.className,
const injectDefaultSchema = ({className, fields, classLevelPermissions}) => ({
className,
fields: {
...defaultColumns._Default,
...(defaultColumns[schema.className] || {}),
...schema.fields,
...(defaultColumns[className] || {}),
...fields,
},
classLevelPermissions: schema.classLevelPermissions,
classLevelPermissions,
})
const dbTypeMatchesObjectType = (dbType, objectType) => {
@@ -313,7 +313,7 @@ class SchemaController {
return Promise.resolve(this.data[className]);
}
return this._dbAdapter.getClass(className)
.then(injectDefaultSchema);
.then(injectDefaultSchema)
}
// Create a new class that includes the three default fields.
@@ -524,12 +524,14 @@ class SchemaController {
return this._dbAdapter.addFieldIfNotExists(className, fieldName, type).then(() => {
// The update succeeded. Reload the schema
return this.reloadData();
}, () => {
}, error => {
//TODO: introspect the error and only reload if the error is one for which is makes sense to reload
// The update failed. This can be okay - it might have been a race
// condition where another client updated the schema in the same
// way that we wanted to. So, just reload the schema
return this.reloadData();
}).then(() => {
}).then(error => {
// Ensure that the schema now validates
if (!dbTypeMatchesObjectType(this.getExpectedType(className, fieldName), type)) {
throw new Parse.Error(Parse.Error.INVALID_JSON, `Could not add field ${fieldName}`);
@@ -609,7 +611,8 @@ class SchemaController {
// Every object has ACL implicitly.
continue;
}
promise = thenValidateField(promise, className, fieldName, expected);
promise = promise.then(schema => schema.enforceFieldExists(className, fieldName, expected));
}
promise = thenValidateRequiredColumns(promise, className, object, query);
return promise;
@@ -741,14 +744,6 @@ function buildMergedSchemaObject(existingFields, putRequest) {
return newSchema;
}
// Given a schema promise, construct another schema promise that
// validates this field once the schema loads.
function thenValidateField(schemaPromise, className, key, type) {
return schemaPromise.then((schema) => {
return schema.enforceFieldExists(className, key, type);
});
}
// Given a schema promise, construct another schema promise that
// validates this field once the schema loads.
function thenValidateRequiredColumns(schemaPromise, className, object, query) {

View File

@@ -194,13 +194,20 @@ class ParseServer {
const databaseController = new DatabaseController(databaseAdapter);
const hooksController = new HooksController(appId, databaseController, webhookKey);
let usernameUniqueness = databaseController.adapter.ensureUniqueness('_User', requiredUserFields, ['username'])
let userClassPromise = databaseController.loadSchema()
.then(schema => schema.enforceClassExists('_User'))
let usernameUniqueness = userClassPromise
.then(() => databaseController.adapter.ensureUniqueness('_User', requiredUserFields, ['username']))
.catch(error => {
logger.warn('Unable to ensure uniqueness for usernames: ', error);
return Promise.reject();
});
let emailUniqueness = databaseController.adapter.ensureUniqueness('_User', requiredUserFields, ['email'])
let emailUniqueness = userClassPromise
.then(() => databaseController.adapter.ensureUniqueness('_User', requiredUserFields, ['email']))
.catch(error => {
logger.warn('Unabled to ensure uniqueness for user email addresses: ', error);
return Promise.reject();

View File

@@ -89,7 +89,7 @@ RestWrite.prototype.execute = function() {
return this.cleanUserAuthData();
}).then(() => {
return this.response;
});
})
};
// Uses the Auth object to get the list of roles, adds the user id