* Start DB runner from tests * Connect GridstoreAdapter only when needed * removes unused package * better test errors reporting * Adds support for __op.Delete * Better test error reporting * Makes sure all tests can run without crashing * Use xdescribe to skip test suite * Removes unused dependencies * Let volatiles classes be created with PG on start * Do not fail if class dont exist * adds index.spec.js to the pg suite * Use a new config each test to prevent side effects * Enable EmailVerificationToken specs with pg * Makes sure failure output is not cut * Reduces number of ignored tests in ParseObject.spec * Inspect reconfiguration errors * Mark GlobalConfig is incompatible with PG - Problem is with nested updates (param.prop = value) * PG: Nested JSON queries and updates - Adds support for nested json and . operator queries - Adds debug support for PG adapter - Adds loglevel support in helper * Enable working specs in ParseUser * Sets default logLevel in tests to undefined * Adds File type support, retores purchaseValidation specs * Adds support for updating jsonb objects - Restores PushController tests * Proper implementation of deleteByQuery and ORs - Adds ParseInstallation spec to the test suite * xit only failing tests * Nit on ParseAPI spec * add sorting operator * properly bound order keys * reverts describe_only_db behavior * Enables passing tests * Adds basic support for relations, upsertOneObject aliased to createObject * progress on queries options * Fix ACL update related problems * Creates relation tables on class creation * Adds Relation tests * remove flaky tests * use promises instead of CB * disable flaky test * nits * Fixes on schema spec - Next thing is to implemenet geopoint and files correctly * fix failues * Basic GeoPoint support * Adds support for $nearSphere/$maxDistance geopoint queries * enable passing tests * drop tables afterEach for PG, clean up relation tables too * Better initialization/dropTables
104 lines
3.2 KiB
JavaScript
104 lines
3.2 KiB
JavaScript
'use strict';
|
|
|
|
var request = require('request');
|
|
const Parse = require("parse/node");
|
|
let Config = require('../src/Config');
|
|
|
|
describe('Uniqueness', function() {
|
|
it('fail when create duplicate value in unique field', done => {
|
|
let obj = new Parse.Object('UniqueField');
|
|
obj.set('unique', 'value');
|
|
obj.save().then(() => {
|
|
expect(obj.id).not.toBeUndefined();
|
|
let config = new Config('test');
|
|
return config.database.adapter.ensureUniqueness('UniqueField', { fields: { unique: { __type: 'String' } } }, ['unique'])
|
|
})
|
|
.then(() => {
|
|
let obj = new Parse.Object('UniqueField');
|
|
obj.set('unique', 'value');
|
|
return obj.save()
|
|
}).then(() => {
|
|
fail('Saving duplicate field should have failed');
|
|
done();
|
|
}, error => {
|
|
expect(error.code).toEqual(Parse.Error.DUPLICATE_VALUE);
|
|
done();
|
|
});
|
|
});
|
|
|
|
it('unique indexing works on pointer fields', done => {
|
|
let obj = new Parse.Object('UniquePointer');
|
|
obj.save({ string: 'who cares' })
|
|
.then(() => obj.save({ ptr: obj }))
|
|
.then(() => {
|
|
let config = new Config('test');
|
|
return config.database.adapter.ensureUniqueness('UniquePointer', { fields: {
|
|
string: { __type: 'String' },
|
|
ptr: { __type: 'Pointer', targetClass: 'UniquePointer' }
|
|
} }, ['ptr']);
|
|
})
|
|
.then(() => {
|
|
let newObj = new Parse.Object('UniquePointer')
|
|
newObj.set('ptr', obj)
|
|
return newObj.save()
|
|
})
|
|
.then(() => {
|
|
fail('save should have failed due to duplicate value');
|
|
done();
|
|
})
|
|
.catch(error => {
|
|
expect(error.code).toEqual(Parse.Error.DUPLICATE_VALUE);
|
|
done();
|
|
});
|
|
});
|
|
|
|
it('fails when attempting to ensure uniqueness of fields that are not currently unique', done => {
|
|
let o1 = new Parse.Object('UniqueFail');
|
|
o1.set('key', 'val');
|
|
let o2 = new Parse.Object('UniqueFail');
|
|
o2.set('key', 'val');
|
|
Parse.Object.saveAll([o1, o2])
|
|
.then(() => {
|
|
let config = new Config('test');
|
|
return config.database.adapter.ensureUniqueness('UniqueFail', { fields: { key: { __type: 'String' } } }, ['key']);
|
|
})
|
|
.catch(error => {
|
|
expect(error.code).toEqual(Parse.Error.DUPLICATE_VALUE);
|
|
done();
|
|
});
|
|
});
|
|
|
|
it_exclude_dbs(['postgres'])('can do compound uniqueness', done => {
|
|
let config = new Config('test');
|
|
config.database.adapter.ensureUniqueness('CompoundUnique', { fields: { k1: { __type: 'String' }, k2: { __type: 'String' } } }, ['k1', 'k2'])
|
|
.then(() => {
|
|
let o1 = new Parse.Object('CompoundUnique');
|
|
o1.set('k1', 'v1');
|
|
o1.set('k2', 'v2');
|
|
return o1.save();
|
|
})
|
|
.then(() => {
|
|
let o2 = new Parse.Object('CompoundUnique');
|
|
o2.set('k1', 'v1');
|
|
o2.set('k2', 'not a dupe');
|
|
return o2.save();
|
|
})
|
|
.then(() => {
|
|
let o3 = new Parse.Object('CompoundUnique');
|
|
o3.set('k1', 'not a dupe');
|
|
o3.set('k2', 'v2');
|
|
return o3.save();
|
|
})
|
|
.then(() => {
|
|
let o4 = new Parse.Object('CompoundUnique');
|
|
o4.set('k1', 'v1');
|
|
o4.set('k2', 'v2');
|
|
return o4.save();
|
|
})
|
|
.catch(error => {
|
|
expect(error.code).toEqual(Parse.Error.DUPLICATE_VALUE);
|
|
done();
|
|
});
|
|
});
|
|
});
|