build: Release (#9169)
This commit is contained in:
@@ -24,7 +24,8 @@
|
||||
"prefer-const": "error",
|
||||
"space-infix-ops": "error",
|
||||
"no-useless-escape": "off",
|
||||
"require-atomic-updates": "off"
|
||||
"require-atomic-updates": "off",
|
||||
"object-curly-spacing": ["error", "always"]
|
||||
},
|
||||
"globals": {
|
||||
"Parse": true
|
||||
|
||||
13
.github/ISSUE_TEMPLATE/---1-report-an-issue.md
vendored
13
.github/ISSUE_TEMPLATE/---1-report-an-issue.md
vendored
@@ -8,16 +8,10 @@ assignees: ''
|
||||
---
|
||||
|
||||
### New Issue Checklist
|
||||
<!--
|
||||
Check every following box [x] before submitting your issue.
|
||||
Click the "Preview" tab for better readability.
|
||||
Thanks for contributing to Parse Platform!
|
||||
-->
|
||||
|
||||
- [ ] I am not disclosing a [vulnerability](https://github.com/parse-community/parse-server/blob/master/SECURITY.md).
|
||||
- [ ] I am not just asking a [question](https://github.com/parse-community/.github/blob/master/SUPPORT.md).
|
||||
- [ ] I have searched through [existing issues](https://github.com/parse-community/parse-server/issues?q=is%3Aissue).
|
||||
- [ ] I can reproduce the issue with the [latest version of Parse Server](https://github.com/parse-community/parse-server/releases). <!-- We don't investigate issues for outdated releases. -->
|
||||
- Report security issues [confidentially](https://github.com/parse-community/parse-server/security/policy).
|
||||
- Any contribution is under this [license](https://github.com/parse-community/parse-server/blob/alpha/LICENSE).
|
||||
- Before posting search [existing issues](https://github.com/parse-community/parse-server/issues?q=is%3Aissue).
|
||||
|
||||
### Issue Description
|
||||
<!-- What is the specific issue with Parse Server? -->
|
||||
@@ -30,6 +24,7 @@ assignees: ''
|
||||
|
||||
### Expected Outcome
|
||||
<!-- What outcome, for example query result, did you expect? -->
|
||||
|
||||
### Environment
|
||||
<!-- Be specific with versions, don't use "latest" or semver ranges like "~x.y.z" or "^x.y.z". -->
|
||||
|
||||
|
||||
11
.github/ISSUE_TEMPLATE/---2-feature-request.md
vendored
11
.github/ISSUE_TEMPLATE/---2-feature-request.md
vendored
@@ -8,15 +8,10 @@ assignees: ''
|
||||
---
|
||||
|
||||
### New Feature / Enhancement Checklist
|
||||
<!--
|
||||
Check every following box [x] before submitting your issue.
|
||||
Click the "Preview" tab for better readability.
|
||||
Thanks for contributing to Parse Platform!
|
||||
-->
|
||||
|
||||
- [ ] I am not disclosing a [vulnerability](https://github.com/parse-community/parse-server/blob/master/SECURITY.md).
|
||||
- [ ] I am not just asking a [question](https://github.com/parse-community/.github/blob/master/SUPPORT.md).
|
||||
- [ ] I have searched through [existing issues](https://github.com/parse-community/parse-server/issues?q=is%3Aissue).
|
||||
- Report security issues [confidentially](https://github.com/parse-community/parse-server/security/policy).
|
||||
- Any contribution is under this [license](https://github.com/parse-community/parse-server/blob/alpha/LICENSE).
|
||||
- Before posting search [existing issues](https://github.com/parse-community/parse-server/issues?q=is%3Aissue).
|
||||
|
||||
### Current Limitation
|
||||
<!-- Which current limitation is the feature or enhancement addressing? -->
|
||||
|
||||
@@ -17,14 +17,8 @@ jobs:
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 14
|
||||
- name: Cache Node.js modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.npm
|
||||
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-node-
|
||||
node-version: 20
|
||||
cache: 'npm'
|
||||
- name: Install dependencies
|
||||
run: npm ci
|
||||
- name: CI Environments Check
|
||||
|
||||
59
.github/workflows/ci.yml
vendored
59
.github/workflows/ci.yml
vendored
@@ -8,7 +8,7 @@ on:
|
||||
paths-ignore:
|
||||
- '**/**.md'
|
||||
env:
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
PARSE_SERVER_TEST_TIMEOUT: 20000
|
||||
jobs:
|
||||
check-code-analysis:
|
||||
@@ -146,34 +146,34 @@ jobs:
|
||||
matrix:
|
||||
include:
|
||||
- name: MongoDB 4.2, ReplicaSet
|
||||
MONGODB_VERSION: 4.2.19
|
||||
MONGODB_VERSION: 4.2.25
|
||||
MONGODB_TOPOLOGY: replset
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: MongoDB 4.4, ReplicaSet
|
||||
MONGODB_VERSION: 4.4.13
|
||||
MONGODB_VERSION: 4.4.29
|
||||
MONGODB_TOPOLOGY: replset
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: MongoDB 5, ReplicaSet
|
||||
MONGODB_VERSION: 5.3.2
|
||||
MONGODB_VERSION: 5.0.26
|
||||
MONGODB_TOPOLOGY: replset
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: MongoDB 6, ReplicaSet
|
||||
MONGODB_VERSION: 6.0.2
|
||||
MONGODB_VERSION: 6.0.14
|
||||
MONGODB_TOPOLOGY: replset
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: MongoDB 7, ReplicaSet
|
||||
MONGODB_VERSION: 7.0.1
|
||||
MONGODB_VERSION: 7.0.8
|
||||
MONGODB_TOPOLOGY: replset
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: Redis Cache
|
||||
PARSE_SERVER_TEST_CACHE: redis
|
||||
MONGODB_VERSION: 4.4.13
|
||||
MONGODB_VERSION: 7.0.8
|
||||
MONGODB_TOPOLOGY: standalone
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: Node 18
|
||||
MONGODB_VERSION: 4.4.13
|
||||
MONGODB_VERSION: 7.0.8
|
||||
MONGODB_TOPOLOGY: standalone
|
||||
NODE_VERSION: 18.19.1
|
||||
NODE_VERSION: 18.20.0
|
||||
fail-fast: false
|
||||
name: ${{ matrix.name }}
|
||||
timeout-minutes: 15
|
||||
@@ -210,32 +210,37 @@ jobs:
|
||||
- run: npm run coverage
|
||||
env:
|
||||
CI: true
|
||||
- run: bash <(curl -s https://codecov.io/bash)
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
# Set to `true` once codecov token bug is fixed; https://github.com/parse-community/parse-server/issues/9129
|
||||
fail_ci_if_error: false
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
check-postgres:
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- name: PostgreSQL 13, PostGIS 3.1
|
||||
POSTGRES_IMAGE: postgis/postgis:13-3.1
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: PostgreSQL 13, PostGIS 3.2
|
||||
POSTGRES_IMAGE: postgis/postgis:13-3.2
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: PostgreSQL 13, PostGIS 3.3
|
||||
POSTGRES_IMAGE: postgis/postgis:13-3.3
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: PostgreSQL 13, PostGIS 3.4
|
||||
POSTGRES_IMAGE: postgis/postgis:13-3.4
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: PostgreSQL 14, PostGIS 3.4
|
||||
POSTGRES_IMAGE: postgis/postgis:14-3.4
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: PostgreSQL 15, PostGIS 3.4
|
||||
POSTGRES_IMAGE: postgis/postgis:15-3.4
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
- name: PostgreSQL 16, PostGIS 3.4
|
||||
POSTGRES_IMAGE: postgis/postgis:15-3.4
|
||||
NODE_VERSION: 20.11.1
|
||||
NODE_VERSION: 20.12.0
|
||||
fail-fast: false
|
||||
name: ${{ matrix.name }}
|
||||
timeout-minutes: 15
|
||||
@@ -281,7 +286,13 @@ jobs:
|
||||
- run: npm run coverage
|
||||
env:
|
||||
CI: true
|
||||
- run: bash <(curl -s https://codecov.io/bash)
|
||||
- name: Upload code coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
fail_ci_if_error: false
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
env:
|
||||
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
4
.github/workflows/release-automated.yml
vendored
4
.github/workflows/release-automated.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
persist-credentials: false
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.19.1
|
||||
node-version: 18.20.0
|
||||
registry-url: https://registry.npmjs.org/
|
||||
- name: Cache Node.js modules
|
||||
uses: actions/cache@v4
|
||||
@@ -93,7 +93,7 @@ jobs:
|
||||
- name: Use Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: 18.19.1
|
||||
node-version: 18.20.0
|
||||
- name: Cache Node.js modules
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
|
||||
10
Dockerfile
10
Dockerfile
@@ -1,9 +1,13 @@
|
||||
############################################################
|
||||
# Build stage
|
||||
############################################################
|
||||
FROM node:lts-alpine AS build
|
||||
FROM node:20.14.0-alpine3.20 AS build
|
||||
|
||||
RUN apk --no-cache add \
|
||||
build-base \
|
||||
git \
|
||||
python3
|
||||
|
||||
RUN apk --no-cache add git
|
||||
WORKDIR /tmp
|
||||
|
||||
# Copy package.json first to benefit from layer caching
|
||||
@@ -24,7 +28,7 @@ RUN npm ci --omit=dev --ignore-scripts \
|
||||
############################################################
|
||||
# Release stage
|
||||
############################################################
|
||||
FROM node:lts-alpine AS release
|
||||
FROM node:20.14.0-alpine3.20 AS release
|
||||
|
||||
VOLUME /parse-server/cloud /parse-server/config
|
||||
|
||||
|
||||
21
README.md
21
README.md
@@ -6,11 +6,11 @@
|
||||
[](https://github.com/parse-community/parse-server/actions?query=workflow%3Aci+branch%3Abeta)
|
||||
[](https://github.com/parse-community/parse-server/actions?query=workflow%3Aci+branch%3Arelease)
|
||||
[](https://snyk.io/test/github/parse-community/parse-server)
|
||||
[](https://codecov.io/github/parse-community/parse-server?branch=alpha)
|
||||
[](https://app.codecov.io/github/parse-community/parse-server/tree/alpha)
|
||||
[](https://github.com/parse-community/parse-dashboard/releases)
|
||||
|
||||
[](https://nodejs.org)
|
||||
[](https://www.mongodb.com)
|
||||
[](https://www.mongodb.com)
|
||||
[](https://www.postgresql.org)
|
||||
|
||||
[](https://www.npmjs.com/package/parse-server)
|
||||
@@ -129,21 +129,20 @@ Parse Server is continuously tested with the most recent releases of Node.js to
|
||||
|
||||
| Version | Latest Version | End-of-Life | Compatible |
|
||||
|------------|----------------|-------------|------------|
|
||||
| Node.js 18 | 18.19.1 | April 2025 | ✅ Yes |
|
||||
| Node.js 20 | 20.11.1 | April 2026 | ✅ Yes |
|
||||
| Node.js 18 | 18.20.0 | April 2025 | ✅ Yes |
|
||||
| Node.js 20 | 20.12.0 | April 2026 | ✅ Yes |
|
||||
|
||||
#### MongoDB
|
||||
|
||||
Parse Server is continuously tested with the most recent releases of MongoDB to ensure compatibility. We follow the [MongoDB support schedule](https://www.mongodb.com/support-policy) and [MongoDB lifecycle schedule](https://www.mongodb.com/support-policy/lifecycles) and only test against versions that are officially supported and have not reached their end-of-life date. We consider the end-of-life date of a MongoDB "rapid release" to be the same as its major version release.
|
||||
Parse Server is continuously tested with the most recent releases of MongoDB to ensure compatibility. We follow the [MongoDB support schedule](https://www.mongodb.com/support-policy) and [MongoDB lifecycle schedule](https://www.mongodb.com/support-policy/lifecycles) and only test against versions that are officially supported and have not reached their end-of-life date. MongoDB "rapid releases" are ignored as these are considered pre-releases of the next major version.
|
||||
|
||||
| Version | Latest Version | End-of-Life | Compatible |
|
||||
| ----------- | -------------- | ------------- | ---------- |
|
||||
| MongoDB 4.0 | 4.0.28 | April 2022 | ✅ Yes |
|
||||
| MongoDB 4.2 | 4.2.19 | April 2023 | ✅ Yes |
|
||||
| MongoDB 4.4 | 4.4.13 | February 2024 | ✅ Yes |
|
||||
| MongoDB 5 | 5.3.2 | October 2024 | ✅ Yes |
|
||||
| MongoDB 6 | 6.0.2 | July 2025 | ✅ Yes |
|
||||
| MongoDB 7 | 7.0.1 | TDB | ✅ Yes |
|
||||
| MongoDB 4.2 | 4.2.25 | April 2023 | ✅ Yes |
|
||||
| MongoDB 4.4 | 4.4.29 | February 2024 | ✅ Yes |
|
||||
| MongoDB 5 | 5.0.26 | October 2024 | ✅ Yes |
|
||||
| MongoDB 6 | 6.0.14 | July 2025 | ✅ Yes |
|
||||
| MongoDB 7 | 7.0.8 | TDB | ✅ Yes |
|
||||
|
||||
#### PostgreSQL
|
||||
|
||||
|
||||
@@ -1,3 +1,105 @@
|
||||
# [7.1.0-alpha.12](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.11...7.1.0-alpha.12) (2024-06-30)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* SQL injection when using Parse Server with PostgreSQL; fixes security vulnerability [GHSA-c2hr-cqg6-8j6r](https://github.com/parse-community/parse-server/security/advisories/GHSA-c2hr-cqg6-8j6r) ([#9167](https://github.com/parse-community/parse-server/issues/9167)) ([2edf1e4](https://github.com/parse-community/parse-server/commit/2edf1e4c0363af01e97a7fbc97694f851b7d1ff3))
|
||||
|
||||
# [7.1.0-alpha.11](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.10...7.1.0-alpha.11) (2024-06-29)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Upgrade to Parse JS SDK 5.2.0 ([#9128](https://github.com/parse-community/parse-server/issues/9128)) ([665b8d5](https://github.com/parse-community/parse-server/commit/665b8d52d6cf5275179a5e1fb132c934edb53ecc))
|
||||
|
||||
# [7.1.0-alpha.10](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.9...7.1.0-alpha.10) (2024-06-11)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Live query throws error when constraint `notEqualTo` is set to `null` ([#8835](https://github.com/parse-community/parse-server/issues/8835)) ([11d3e48](https://github.com/parse-community/parse-server/commit/11d3e484df862224c15d20f6171514948981ea90))
|
||||
|
||||
# [7.1.0-alpha.9](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.8...7.1.0-alpha.9) (2024-05-27)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Parse Server option `extendSessionOnUse` not working for session lengths < 24 hours ([#9113](https://github.com/parse-community/parse-server/issues/9113)) ([0a054e6](https://github.com/parse-community/parse-server/commit/0a054e6b541fd5ab470bf025665f5f7d2acedaa0))
|
||||
|
||||
# [7.1.0-alpha.8](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.7...7.1.0-alpha.8) (2024-05-16)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Upgrade to @parse/push-adapter 6.2.0 ([#9127](https://github.com/parse-community/parse-server/issues/9127)) ([ca20496](https://github.com/parse-community/parse-server/commit/ca20496f28e5ec1294a7a23c8559df82b79b2a04))
|
||||
|
||||
# [7.1.0-alpha.7](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.6...7.1.0-alpha.7) (2024-05-16)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Facebook Limited Login not working due to incorrect domain in JWT validation ([#9122](https://github.com/parse-community/parse-server/issues/9122)) ([9d0bd2b](https://github.com/parse-community/parse-server/commit/9d0bd2badd6e5f7429d1af00b118225752e5d86a))
|
||||
|
||||
# [7.1.0-alpha.6](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.5...7.1.0-alpha.6) (2024-04-14)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* `Parse.Cloud.startJob` and `Parse.Push.send` not returning status ID when setting Parse Server option `directAccess: true` ([#8766](https://github.com/parse-community/parse-server/issues/8766)) ([5b0efb2](https://github.com/parse-community/parse-server/commit/5b0efb22efe94c47f243cf8b1e6407ed5c5a67d3))
|
||||
|
||||
# [7.1.0-alpha.5](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.4...7.1.0-alpha.5) (2024-04-07)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Prevent Parse Server start in case of unknown option in server configuration ([#8987](https://github.com/parse-community/parse-server/issues/8987)) ([8758e6a](https://github.com/parse-community/parse-server/commit/8758e6abb9dbb68757bddcbd332ad25100c24a0e))
|
||||
|
||||
# [7.1.0-alpha.4](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.3...7.1.0-alpha.4) (2024-03-31)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Upgrade to @parse/push-adapter 6.0.0 ([#9066](https://github.com/parse-community/parse-server/issues/9066)) ([18bdbf8](https://github.com/parse-community/parse-server/commit/18bdbf89c53a57648891ef582614ba7c2941e587))
|
||||
|
||||
# [7.1.0-alpha.3](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.2...7.1.0-alpha.3) (2024-03-24)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Rate limiting can fail when using Parse Server option `rateLimit.redisUrl` with clusters ([#8632](https://github.com/parse-community/parse-server/issues/8632)) ([c277739](https://github.com/parse-community/parse-server/commit/c27773962399f8e27691e3b8087e7e1d59516efd))
|
||||
|
||||
# [7.1.0-alpha.2](https://github.com/parse-community/parse-server/compare/7.1.0-alpha.1...7.1.0-alpha.2) (2024-03-24)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add server security check status `security.enableCheck` to Features Router ([#8679](https://github.com/parse-community/parse-server/issues/8679)) ([b07ec15](https://github.com/parse-community/parse-server/commit/b07ec153825882e97cc48dc84072c7f549f3238b))
|
||||
|
||||
# [7.1.0-alpha.1](https://github.com/parse-community/parse-server/compare/7.0.0...7.1.0-alpha.1) (2024-03-23)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* `Required` option not handled correctly for special fields (File, GeoPoint, Polygon) on GraphQL API mutations ([#8915](https://github.com/parse-community/parse-server/issues/8915)) ([907ad42](https://github.com/parse-community/parse-server/commit/907ad4267c228d26cfcefe7848b30ce85ba7ff8f))
|
||||
|
||||
### Features
|
||||
|
||||
* Add `silent` log level for Cloud Code ([#8803](https://github.com/parse-community/parse-server/issues/8803)) ([5f81efb](https://github.com/parse-community/parse-server/commit/5f81efb42964c4c2fa8bcafee9446a0122e3ce21))
|
||||
|
||||
# [7.0.0-alpha.31](https://github.com/parse-community/parse-server/compare/7.0.0-alpha.30...7.0.0-alpha.31) (2024-03-21)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* Add `silent` log level for Cloud Code ([#8803](https://github.com/parse-community/parse-server/issues/8803)) ([5f81efb](https://github.com/parse-community/parse-server/commit/5f81efb42964c4c2fa8bcafee9446a0122e3ce21))
|
||||
|
||||
# [7.0.0-alpha.30](https://github.com/parse-community/parse-server/compare/7.0.0-alpha.29...7.0.0-alpha.30) (2024-03-20)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* `Required` option not handled correctly for special fields (File, GeoPoint, Polygon) on GraphQL API mutations ([#8915](https://github.com/parse-community/parse-server/issues/8915)) ([907ad42](https://github.com/parse-community/parse-server/commit/907ad4267c228d26cfcefe7848b30ce85ba7ff8f))
|
||||
|
||||
# [7.0.0-alpha.29](https://github.com/parse-community/parse-server/compare/7.0.0-alpha.28...7.0.0-alpha.29) (2024-03-19)
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
'use strict';
|
||||
|
||||
const CiVersionCheck = require('./CiVersionCheck');
|
||||
const mongoVersionList = require('mongodb-version-list');
|
||||
const allNodeVersions = require('all-node-versions');
|
||||
const { exec } = require('child_process');
|
||||
|
||||
async function check() {
|
||||
// Run checks
|
||||
@@ -14,14 +13,16 @@ async function check() {
|
||||
* Check the MongoDB versions used in test environments.
|
||||
*/
|
||||
async function checkMongoDbVersions() {
|
||||
const releasedVersions = await new Promise((resolve, reject) => {
|
||||
mongoVersionList(function (error, versions) {
|
||||
let latestStableVersions = await new Promise((resolve, reject) => {
|
||||
exec('m ls', (error, stdout) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
resolve(versions);
|
||||
resolve(stdout.trim());
|
||||
});
|
||||
});
|
||||
latestStableVersions = latestStableVersions.split('\n').map(version => version.trim());
|
||||
|
||||
await new CiVersionCheck({
|
||||
packageName: 'MongoDB',
|
||||
@@ -29,13 +30,14 @@ async function checkMongoDbVersions() {
|
||||
yamlFilePath: './.github/workflows/ci.yml',
|
||||
ciEnvironmentsKeyPath: 'jobs.check-mongo.strategy.matrix.include',
|
||||
ciVersionKey: 'MONGODB_VERSION',
|
||||
releasedVersions,
|
||||
latestComponent: CiVersionCheck.versionComponents.minor,
|
||||
releasedVersions: latestStableVersions,
|
||||
latestComponent: CiVersionCheck.versionComponents.patch,
|
||||
ignoreReleasedVersions: [
|
||||
'<4.0.0', // Versions reached their MongoDB end-of-life support date
|
||||
'~4.1.0', // Development release according to MongoDB support
|
||||
'~4.3.0', // Development release according to MongoDB support
|
||||
'~4.7.0', // Development release according to MongoDB support
|
||||
'<4.2.0', // These versions have reached their end-of-life support date
|
||||
'>=4.3.0 <5.0.0', // Unsupported rapid release versions
|
||||
'>=5.1.0 <6.0.0', // Unsupported rapid release versions
|
||||
'>=6.1.0 <7.0.0', // Unsupported rapid release versions
|
||||
'>=7.1.0 <8.0.0', // Unsupported rapid release versions
|
||||
],
|
||||
}).check();
|
||||
}
|
||||
@@ -44,8 +46,9 @@ async function checkMongoDbVersions() {
|
||||
* Check the Nodejs versions used in test environments.
|
||||
*/
|
||||
async function checkNodeVersions() {
|
||||
const allVersions = await allNodeVersions();
|
||||
const releasedVersions = allVersions.versions;
|
||||
const allVersions = (await import('all-node-versions')).default;
|
||||
const { versions } = await allVersions();
|
||||
const nodeVersions = versions.map(version => version.node);
|
||||
|
||||
await new CiVersionCheck({
|
||||
packageName: 'Node.js',
|
||||
@@ -53,13 +56,12 @@ async function checkNodeVersions() {
|
||||
yamlFilePath: './.github/workflows/ci.yml',
|
||||
ciEnvironmentsKeyPath: 'jobs.check-mongo.strategy.matrix.include',
|
||||
ciVersionKey: 'NODE_VERSION',
|
||||
releasedVersions,
|
||||
releasedVersions: nodeVersions,
|
||||
latestComponent: CiVersionCheck.versionComponents.minor,
|
||||
ignoreReleasedVersions: [
|
||||
'<12.0.0', // These versions have reached their end-of-life support date
|
||||
'>=13.0.0 <14.0.0', // These versions have reached their end-of-life support date
|
||||
'>=15.0.0 <16.0.0', // These versions have reached their end-of-life support date
|
||||
'>=19.0.0', // These versions are not officially supported yet
|
||||
'<18.0.0', // These versions have reached their end-of-life support date
|
||||
'>=19.0.0 <20.0.0', // These versions have reached their end-of-life support date
|
||||
'>=21.0.0', // These versions are not officially supported yet
|
||||
],
|
||||
}).check();
|
||||
}
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"template": "./node_modules/clean-jsdoc-theme",
|
||||
"theme_opts": {
|
||||
"default_theme": "dark",
|
||||
"title": "<img src='../.github/parse-server-logo.png' class='logo'/>",
|
||||
"title": "<img src='https://raw.githubusercontent.com/parse-community/parse-server/alpha/.github/parse-server-logo.png' class='logo'/>",
|
||||
"create_style": "header, .sidebar-section-title, .sidebar-title { color: #139cee !important } .logo { margin-left : 40px; margin-right: 40px }"
|
||||
}
|
||||
},
|
||||
|
||||
4821
package-lock.json
generated
4821
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
43
package.json
43
package.json
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "parse-server",
|
||||
"version": "7.0.0",
|
||||
"version": "7.1.0-alpha.12",
|
||||
"description": "An express module providing a Parse-compatible API server",
|
||||
"main": "lib/index.js",
|
||||
"repository": {
|
||||
@@ -21,40 +21,40 @@
|
||||
"dependencies": {
|
||||
"@apollo/server": "4.10.1",
|
||||
"@babel/eslint-parser": "7.21.8",
|
||||
"@graphql-tools/merge": "8.4.1",
|
||||
"@graphql-tools/merge": "9.0.3",
|
||||
"@graphql-tools/schema": "10.0.3",
|
||||
"@graphql-tools/utils": "8.12.0",
|
||||
"@parse/fs-files-adapter": "2.0.1",
|
||||
"@parse/push-adapter": "5.1.1",
|
||||
"@parse/fs-files-adapter": "3.0.0",
|
||||
"@parse/push-adapter": "6.2.0",
|
||||
"bcryptjs": "2.4.3",
|
||||
"body-parser": "1.20.2",
|
||||
"commander": "12.0.0",
|
||||
"cors": "2.8.5",
|
||||
"deepcopy": "2.1.0",
|
||||
"express": "4.18.2",
|
||||
"express": "4.19.2",
|
||||
"express-rate-limit": "6.11.2",
|
||||
"follow-redirects": "1.15.6",
|
||||
"graphql": "16.8.1",
|
||||
"graphql-list-fields": "2.0.4",
|
||||
"graphql-relay": "0.10.0",
|
||||
"graphql-relay": "0.10.1",
|
||||
"graphql-tag": "2.12.6",
|
||||
"graphql-upload": "15.0.2",
|
||||
"intersect": "1.0.1",
|
||||
"jsonwebtoken": "9.0.0",
|
||||
"jsonwebtoken": "9.0.2",
|
||||
"jwks-rsa": "3.1.0",
|
||||
"ldapjs": "3.0.7",
|
||||
"lodash": "4.17.21",
|
||||
"lru-cache": "10.1.0",
|
||||
"lru-cache": "10.2.2",
|
||||
"mime": "3.0.0",
|
||||
"mongodb": "5.9.0",
|
||||
"mustache": "4.2.0",
|
||||
"otpauth": "9.2.2",
|
||||
"parse": "5.0.0",
|
||||
"parse": "5.2.0",
|
||||
"path-to-regexp": "6.2.1",
|
||||
"pg-monitor": "2.0.0",
|
||||
"pg-promise": "11.5.4",
|
||||
"pg-promise": "11.7.8",
|
||||
"pluralize": "8.0.0",
|
||||
"rate-limit-redis": "3.0.2",
|
||||
"rate-limit-redis": "4.2.0",
|
||||
"redis": "4.6.13",
|
||||
"semver": "7.6.0",
|
||||
"subscriptions-transport-ws": "0.11.0",
|
||||
@@ -62,13 +62,13 @@
|
||||
"uuid": "9.0.1",
|
||||
"winston": "3.12.0",
|
||||
"winston-daily-rotate-file": "5.0.0",
|
||||
"ws": "8.16.0"
|
||||
"ws": "8.17.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@actions/core": "1.9.1",
|
||||
"@apollo/client": "3.9.5",
|
||||
"@actions/core": "1.10.1",
|
||||
"@apollo/client": "3.9.11",
|
||||
"@babel/cli": "7.23.9",
|
||||
"@babel/core": "7.24.0",
|
||||
"@babel/core": "7.24.7",
|
||||
"@babel/plugin-proposal-object-rest-spread": "7.10.0",
|
||||
"@babel/plugin-transform-flow-strip-types": "7.23.3",
|
||||
"@babel/preset-env": "7.24.0",
|
||||
@@ -79,9 +79,9 @@
|
||||
"@semantic-release/github": "7.2.3",
|
||||
"@semantic-release/npm": "7.1.3",
|
||||
"@semantic-release/release-notes-generator": "9.0.3",
|
||||
"all-node-versions": "11.3.0",
|
||||
"all-node-versions": "12.1.0",
|
||||
"apollo-upload-client": "17.0.0",
|
||||
"clean-jsdoc-theme": "4.2.7",
|
||||
"clean-jsdoc-theme": "4.2.18",
|
||||
"cross-env": "7.0.2",
|
||||
"deep-diff": "1.0.2",
|
||||
"eslint": "8.26.0",
|
||||
@@ -95,12 +95,12 @@
|
||||
"jsdoc": "4.0.2",
|
||||
"jsdoc-babel": "0.5.0",
|
||||
"lint-staged": "10.2.3",
|
||||
"m": "1.9.0",
|
||||
"madge": "6.1.0",
|
||||
"mock-files-adapter": "file:spec/dependencies/mock-files-adapter",
|
||||
"mock-mail-adapter": "file:spec/dependencies/mock-mail-adapter",
|
||||
"mongodb-runner": "5.4.4",
|
||||
"mongodb-version-list": "1.0.0",
|
||||
"node-abort-controller": "3.0.1",
|
||||
"mongodb-runner": "5.5.4",
|
||||
"node-abort-controller": "3.1.1",
|
||||
"node-fetch": "3.2.10",
|
||||
"nyc": "15.1.0",
|
||||
"prettier": "2.0.5",
|
||||
@@ -125,6 +125,7 @@
|
||||
"test:mongodb:5.3.2": "npm run test:mongodb --dbversion=5.3.2",
|
||||
"test:mongodb:6.0.2": "npm run test:mongodb --dbversion=6.0.2",
|
||||
"test:mongodb:7.0.1": "npm run test:mongodb --dbversion=7.0.1",
|
||||
"test:postgres:testonly": "cross-env PARSE_SERVER_TEST_DB=postgres PARSE_SERVER_TEST_DATABASE_URI=postgres://postgres:password@localhost:5432/parse_server_postgres_adapter_test_database npm run testonly",
|
||||
"pretest": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.3.2} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} mongodb-runner start -t ${MONGODB_TOPOLOGY} --version ${MONGODB_VERSION} -- --port 27017",
|
||||
"testonly": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=5.3.2} MONGODB_TOPOLOGY=${MONGODB_TOPOLOGY:=standalone} TESTING=1 jasmine",
|
||||
"test": "npm run testonly",
|
||||
@@ -143,7 +144,7 @@
|
||||
"parse-server": "bin/parse-server"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@node-rs/bcrypt": "1.1.0"
|
||||
"@node-rs/bcrypt": "1.10.1"
|
||||
},
|
||||
"collective": {
|
||||
"type": "opencollective",
|
||||
|
||||
@@ -1,10 +1,17 @@
|
||||
#!/bin/sh -e
|
||||
set -x
|
||||
# GITHUB_ACTIONS=true SOURCE_TAG=test ./release_docs.sh
|
||||
|
||||
if [ "${GITHUB_ACTIONS}" = "" ];
|
||||
then
|
||||
echo "Cannot release docs without GITHUB_ACTIONS set"
|
||||
exit 0;
|
||||
fi
|
||||
if [ "${SOURCE_TAG}" = "" ];
|
||||
then
|
||||
echo "Cannot release docs without SOURCE_TAG set"
|
||||
exit 0;
|
||||
fi
|
||||
REPO="https://github.com/parse-community/parse-server"
|
||||
|
||||
rm -rf docs
|
||||
@@ -13,20 +20,20 @@ cd docs
|
||||
git pull origin gh-pages
|
||||
cd ..
|
||||
|
||||
DEST="master"
|
||||
RELEASE="release"
|
||||
VERSION="${SOURCE_TAG}"
|
||||
|
||||
if [ "${SOURCE_TAG}" != "" ];
|
||||
then
|
||||
DEST="${SOURCE_TAG}"
|
||||
# change the default page to the latest
|
||||
echo "<meta http-equiv='refresh' content='0; url=/parse-server/api/${DEST}'>" > "docs/api/index.html"
|
||||
fi
|
||||
# change the default page to the latest
|
||||
echo "<meta http-equiv='refresh' content='0; url=/parse-server/api/${VERSION}'>" > "docs/api/index.html"
|
||||
|
||||
npm run definitions
|
||||
npm run docs
|
||||
|
||||
mkdir -p "docs/api/${DEST}"
|
||||
cp -R out/* "docs/api/${DEST}"
|
||||
mkdir -p "docs/api/${RELEASE}"
|
||||
cp -R out/* "docs/api/${RELEASE}"
|
||||
|
||||
mkdir -p "docs/api/${VERSION}"
|
||||
cp -R out/* "docs/api/${VERSION}"
|
||||
|
||||
# Copy other resources
|
||||
RESOURCE_DIR=".github"
|
||||
|
||||
@@ -254,6 +254,23 @@ function inject(t, list) {
|
||||
if (action) {
|
||||
props.push(t.objectProperty(t.stringLiteral('action'), action));
|
||||
}
|
||||
|
||||
if (t.isGenericTypeAnnotation(elt)) {
|
||||
if (elt.typeAnnotation.id.name in nestedOptionEnvPrefix) {
|
||||
props.push(
|
||||
t.objectProperty(t.stringLiteral('type'), t.stringLiteral(elt.typeAnnotation.id.name))
|
||||
);
|
||||
}
|
||||
} else if (t.isArrayTypeAnnotation(elt)) {
|
||||
const elementType = elt.typeAnnotation.elementType;
|
||||
if (t.isGenericTypeAnnotation(elementType)) {
|
||||
if (elementType.id.name in nestedOptionEnvPrefix) {
|
||||
props.push(
|
||||
t.objectProperty(t.stringLiteral('type'), t.stringLiteral(elementType.id.name + '[]'))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (elt.defaultValue) {
|
||||
let parsedValue = parseDefaultValue(elt, elt.defaultValue, t);
|
||||
if (!parsedValue) {
|
||||
|
||||
@@ -142,6 +142,25 @@ describe('AdapterLoader', () => {
|
||||
}).not.toThrow();
|
||||
});
|
||||
|
||||
it('should load custom database adapter from config', done => {
|
||||
const adapterPath = require('path').resolve('./spec/support/MockDatabaseAdapter');
|
||||
const options = {
|
||||
databaseURI: 'oracledb://user:password@localhost:1521/freepdb1',
|
||||
collectionPrefix: '',
|
||||
};
|
||||
const databaseAdapterOptions = {
|
||||
adapter: adapterPath,
|
||||
options,
|
||||
};
|
||||
expect(() => {
|
||||
const databaseAdapter = loadAdapter(databaseAdapterOptions);
|
||||
expect(databaseAdapter).not.toBe(undefined);
|
||||
expect(databaseAdapter.options).toEqual(options);
|
||||
expect(databaseAdapter.getDatabaseURI()).toEqual(options.databaseURI);
|
||||
}).not.toThrow();
|
||||
done();
|
||||
});
|
||||
|
||||
it('should load file adapter from direct passing', done => {
|
||||
spyOn(console, 'warn').and.callFake(() => {});
|
||||
const mockFilesAdapter = new MockFilesAdapter('key', 'secret', 'bucket');
|
||||
|
||||
@@ -260,3 +260,24 @@ describe('Auth', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('extendSessionOnUse', () => {
|
||||
it(`shouldUpdateSessionExpiry()`, async () => {
|
||||
const { shouldUpdateSessionExpiry } = require('../lib/Auth');
|
||||
let update = new Date(Date.now() - 86410 * 1000);
|
||||
|
||||
const res = shouldUpdateSessionExpiry(
|
||||
{ sessionLength: 86460 },
|
||||
{ updatedAt: update }
|
||||
);
|
||||
|
||||
update = new Date(Date.now() - 43210 * 1000);
|
||||
const res2 = shouldUpdateSessionExpiry(
|
||||
{ sessionLength: 86460 },
|
||||
{ updatedAt: update }
|
||||
);
|
||||
|
||||
expect(res).toBe(true);
|
||||
expect(res2).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2047,7 +2047,7 @@ describe('facebook limited auth adapter', () => {
|
||||
|
||||
it('should use algorithm from key header to verify id_token', async () => {
|
||||
const fakeClaim = {
|
||||
iss: 'https://facebook.com',
|
||||
iss: 'https://www.facebook.com',
|
||||
aud: 'secret',
|
||||
exp: Date.now(),
|
||||
sub: 'the_user_id',
|
||||
@@ -2097,7 +2097,7 @@ describe('facebook limited auth adapter', () => {
|
||||
|
||||
it('(using client id as string) should verify id_token', async () => {
|
||||
const fakeClaim = {
|
||||
iss: 'https://facebook.com',
|
||||
iss: 'https://www.facebook.com',
|
||||
aud: 'secret',
|
||||
exp: Date.now(),
|
||||
sub: 'the_user_id',
|
||||
@@ -2117,7 +2117,7 @@ describe('facebook limited auth adapter', () => {
|
||||
|
||||
it('(using client id as array) should verify id_token', async () => {
|
||||
const fakeClaim = {
|
||||
iss: 'https://facebook.com',
|
||||
iss: 'https://www.facebook.com',
|
||||
aud: 'secret',
|
||||
exp: Date.now(),
|
||||
sub: 'the_user_id',
|
||||
@@ -2137,7 +2137,7 @@ describe('facebook limited auth adapter', () => {
|
||||
|
||||
it('(using client id as array with multiple items) should verify id_token', async () => {
|
||||
const fakeClaim = {
|
||||
iss: 'https://facebook.com',
|
||||
iss: 'https://www.facebook.com',
|
||||
aud: 'secret',
|
||||
exp: Date.now(),
|
||||
sub: 'the_user_id',
|
||||
@@ -2174,7 +2174,7 @@ describe('facebook limited auth adapter', () => {
|
||||
fail();
|
||||
} catch (e) {
|
||||
expect(e.message).toBe(
|
||||
'id token not issued by correct OpenID provider - expected: https://facebook.com | from: https://not.facebook.com'
|
||||
'id token not issued by correct OpenID provider - expected: https://www.facebook.com | from: https://not.facebook.com'
|
||||
);
|
||||
}
|
||||
});
|
||||
@@ -2203,7 +2203,7 @@ describe('facebook limited auth adapter', () => {
|
||||
fail();
|
||||
} catch (e) {
|
||||
expect(e.message).toBe(
|
||||
'id token not issued by correct OpenID provider - expected: https://facebook.com | from: https://not.facebook.com'
|
||||
'id token not issued by correct OpenID provider - expected: https://www.facebook.com | from: https://not.facebook.com'
|
||||
);
|
||||
}
|
||||
});
|
||||
@@ -2230,7 +2230,7 @@ describe('facebook limited auth adapter', () => {
|
||||
fail();
|
||||
} catch (e) {
|
||||
expect(e.message).toBe(
|
||||
'id token not issued by correct OpenID provider - expected: https://facebook.com | from: https://not.facebook.com'
|
||||
'id token not issued by correct OpenID provider - expected: https://www.facebook.com | from: https://not.facebook.com'
|
||||
);
|
||||
}
|
||||
});
|
||||
@@ -2288,7 +2288,7 @@ describe('facebook limited auth adapter', () => {
|
||||
|
||||
it('should throw error with with invalid user id', async () => {
|
||||
const fakeClaim = {
|
||||
iss: 'https://facebook.com',
|
||||
iss: 'https://www.facebook.com',
|
||||
aud: 'invalid_client_id',
|
||||
sub: 'a_different_user_id',
|
||||
};
|
||||
|
||||
@@ -487,6 +487,33 @@ describe('Auth Adapter features', () => {
|
||||
expect(baseAdapter2.validateAuthData).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should not perform authData validation twice when data mutated', async () => {
|
||||
spyOn(baseAdapter, 'validateAuthData').and.resolveTo({});
|
||||
await reconfigureServer({
|
||||
auth: { baseAdapter },
|
||||
allowExpiredAuthDataToken: false,
|
||||
});
|
||||
|
||||
const user = new Parse.User();
|
||||
|
||||
await user.save({
|
||||
authData: {
|
||||
baseAdapter: { id: 'baseAdapter', token: "sometoken1" },
|
||||
},
|
||||
});
|
||||
|
||||
expect(baseAdapter.validateAuthData).toHaveBeenCalledTimes(1);
|
||||
|
||||
const user2 = new Parse.User();
|
||||
await user2.save({
|
||||
authData: {
|
||||
baseAdapter: { id: 'baseAdapter', token: "sometoken2" },
|
||||
},
|
||||
});
|
||||
|
||||
expect(baseAdapter.validateAuthData).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
it('should require additional provider if configured', async () => {
|
||||
await reconfigureServer({
|
||||
auth: { baseAdapter, additionalAdapter },
|
||||
|
||||
@@ -15,6 +15,13 @@ describe('Cloud Code Logger', () => {
|
||||
// useful to flip to false for fine tuning :).
|
||||
silent: true,
|
||||
logLevel: undefined,
|
||||
logLevels: {
|
||||
cloudFunctionError: 'error',
|
||||
cloudFunctionSuccess: 'info',
|
||||
triggerAfter: 'info',
|
||||
triggerBeforeError: 'error',
|
||||
triggerBeforeSuccess: 'info',
|
||||
},
|
||||
})
|
||||
.then(() => {
|
||||
return Parse.User.signUp('tester', 'abc')
|
||||
@@ -334,4 +341,53 @@ describe('Cloud Code Logger', () => {
|
||||
expect(args[0]).toBe('Parse error: ');
|
||||
expect(args[1].message).toBe('Object not found.');
|
||||
});
|
||||
|
||||
it('should log cloud function execution using the silent log level', async () => {
|
||||
await reconfigureServer({
|
||||
logLevels: {
|
||||
cloudFunctionSuccess: 'silent',
|
||||
cloudFunctionError: 'silent',
|
||||
},
|
||||
});
|
||||
Parse.Cloud.define('aFunction', () => {
|
||||
return 'it worked!';
|
||||
});
|
||||
Parse.Cloud.define('bFunction', () => {
|
||||
throw new Error('Failed');
|
||||
});
|
||||
spy = spyOn(Config.get('test').loggerController.adapter, 'log').and.callThrough();
|
||||
|
||||
await Parse.Cloud.run('aFunction', { foo: 'bar' });
|
||||
expect(spy).toHaveBeenCalledTimes(0);
|
||||
|
||||
await expectAsync(Parse.Cloud.run('bFunction', { foo: 'bar' })).toBeRejected();
|
||||
// Not "Failed running cloud function message..."
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should log cloud function triggers using the silent log level', async () => {
|
||||
await reconfigureServer({
|
||||
logLevels: {
|
||||
triggerAfter: 'silent',
|
||||
triggerBeforeSuccess: 'silent',
|
||||
triggerBeforeError: 'silent',
|
||||
},
|
||||
});
|
||||
Parse.Cloud.beforeSave('TestClassError', () => {
|
||||
throw new Error('Failed');
|
||||
});
|
||||
Parse.Cloud.beforeSave('TestClass', () => {});
|
||||
Parse.Cloud.afterSave('TestClass', () => {});
|
||||
|
||||
spy = spyOn(Config.get('test').loggerController.adapter, 'log').and.callThrough();
|
||||
|
||||
const obj = new Parse.Object('TestClass');
|
||||
await obj.save();
|
||||
expect(spy).toHaveBeenCalledTimes(0);
|
||||
|
||||
const objError = new Parse.Object('TestClassError');
|
||||
await expectAsync(objError.save()).toBeRejected();
|
||||
// Not "beforeSave failed for TestClassError for user ..."
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -87,7 +87,7 @@ describe('OAuth', function () {
|
||||
done();
|
||||
}
|
||||
|
||||
it('GET request for a resource that requires OAuth should fail with invalid credentials', done => {
|
||||
xit('GET request for a resource that requires OAuth should fail with invalid credentials', done => {
|
||||
/*
|
||||
This endpoint has been chosen to make a request to an endpoint that requires OAuth which fails due to missing authentication.
|
||||
Any other endpoint from the Twitter API that requires OAuth can be used instead in case the currently used endpoint deprecates.
|
||||
@@ -105,7 +105,7 @@ describe('OAuth', function () {
|
||||
});
|
||||
});
|
||||
|
||||
it('POST request for a resource that requires OAuth should fail with invalid credentials', done => {
|
||||
xit('POST request for a resource that requires OAuth should fail with invalid credentials', done => {
|
||||
/*
|
||||
This endpoint has been chosen to make a request to an endpoint that requires OAuth which fails due to missing authentication.
|
||||
Any other endpoint from the Twitter API that requires OAuth can be used instead in case the currently used endpoint deprecates.
|
||||
|
||||
52
spec/ParseConfigKey.spec.js
Normal file
52
spec/ParseConfigKey.spec.js
Normal file
@@ -0,0 +1,52 @@
|
||||
const Config = require('../lib/Config');
|
||||
const ParseServer = require('../lib/index').ParseServer;
|
||||
|
||||
describe('Config Keys', () => {
|
||||
const tests = [
|
||||
{
|
||||
name: 'Invalid Root Keys',
|
||||
options: { unknow: 'val', masterKeyIPs: '' },
|
||||
error: 'unknow, masterKeyIPs',
|
||||
},
|
||||
{ name: 'Invalid Schema Keys', options: { schema: { Strict: 'val' } }, error: 'schema.Strict' },
|
||||
{
|
||||
name: 'Invalid Pages Keys',
|
||||
options: { pages: { customUrls: { EmailVerificationSendFail: 'val' } } },
|
||||
error: 'pages.customUrls.EmailVerificationSendFail',
|
||||
},
|
||||
{
|
||||
name: 'Invalid LiveQueryServerOptions Keys',
|
||||
options: { liveQueryServerOptions: { MasterKey: 'value' } },
|
||||
error: 'liveQueryServerOptions.MasterKey',
|
||||
},
|
||||
{
|
||||
name: 'Invalid RateLimit Keys - Array Item',
|
||||
options: { rateLimit: [{ RequestPath: '' }, { RequestTimeWindow: '' }] },
|
||||
error: 'rateLimit[0].RequestPath, rateLimit[1].RequestTimeWindow',
|
||||
},
|
||||
];
|
||||
|
||||
tests.forEach(test => {
|
||||
it(test.name, async () => {
|
||||
const logger = require('../lib/logger').logger;
|
||||
spyOn(logger, 'error').and.callThrough();
|
||||
spyOn(Config, 'validateOptions').and.callFake(() => {});
|
||||
|
||||
new ParseServer({
|
||||
...defaultConfiguration,
|
||||
...test.options,
|
||||
});
|
||||
expect(logger.error).toHaveBeenCalledWith(`Invalid Option Keys Found: ${test.error}`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should run fine', async () => {
|
||||
try {
|
||||
await reconfigureServer({
|
||||
...defaultConfiguration,
|
||||
});
|
||||
} catch (err) {
|
||||
fail('Should run without error');
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -9548,6 +9548,71 @@ describe('ParseGraphQLServer', () => {
|
||||
}
|
||||
});
|
||||
|
||||
it('should support files on required file', async () => {
|
||||
try {
|
||||
parseServer = await global.reconfigureServer({
|
||||
publicServerURL: 'http://localhost:13377/parse',
|
||||
});
|
||||
const schemaController = await parseServer.config.databaseController.loadSchema();
|
||||
await schemaController.addClassIfNotExists('SomeClassWithRequiredFile', {
|
||||
someField: { type: 'File', required: true },
|
||||
});
|
||||
await resetGraphQLCache();
|
||||
await parseGraphQLServer.parseGraphQLSchema.schemaCache.clear();
|
||||
|
||||
const body = new FormData();
|
||||
body.append(
|
||||
'operations',
|
||||
JSON.stringify({
|
||||
query: `
|
||||
mutation CreateSomeObject(
|
||||
$fields: CreateSomeClassWithRequiredFileFieldsInput
|
||||
) {
|
||||
createSomeClassWithRequiredFile(
|
||||
input: { fields: $fields }
|
||||
) {
|
||||
someClassWithRequiredFile {
|
||||
id
|
||||
someField {
|
||||
name
|
||||
url
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`,
|
||||
variables: {
|
||||
fields: {
|
||||
someField: { upload: null },
|
||||
},
|
||||
},
|
||||
})
|
||||
);
|
||||
body.append('map', JSON.stringify({ 1: ['variables.fields.someField.upload'] }));
|
||||
body.append('1', 'My File Content', {
|
||||
filename: 'myFileName.txt',
|
||||
contentType: 'text/plain',
|
||||
});
|
||||
|
||||
const res = await fetch('http://localhost:13377/graphql', {
|
||||
method: 'POST',
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
expect(res.status).toEqual(200);
|
||||
const resText = await res.text();
|
||||
const result = JSON.parse(resText);
|
||||
expect(
|
||||
result.data.createSomeClassWithRequiredFile.someClassWithRequiredFile.someField.name
|
||||
).toEqual(jasmine.stringMatching(/_myFileName.txt$/));
|
||||
expect(
|
||||
result.data.createSomeClassWithRequiredFile.someClassWithRequiredFile.someField.url
|
||||
).toEqual(jasmine.stringMatching(/_myFileName.txt$/));
|
||||
} catch (e) {
|
||||
handleError(e);
|
||||
}
|
||||
});
|
||||
|
||||
it('should support file upload for on fly creation through pointer and relation', async () => {
|
||||
parseServer = await global.reconfigureServer({
|
||||
publicServerURL: 'http://localhost:13377/parse',
|
||||
|
||||
@@ -1269,4 +1269,33 @@ describe('ParseLiveQuery', function () {
|
||||
expect(object2.id).toBeDefined();
|
||||
expect(object3.id).toBeDefined();
|
||||
});
|
||||
|
||||
it('triggers query event with constraint not equal to null', async () => {
|
||||
await reconfigureServer({
|
||||
liveQuery: {
|
||||
classNames: ['TestObject'],
|
||||
},
|
||||
startLiveQueryServer: true,
|
||||
verbose: false,
|
||||
silent: true,
|
||||
});
|
||||
|
||||
const spy = {
|
||||
create(obj) {
|
||||
expect(obj.attributes.foo).toEqual('bar');
|
||||
},
|
||||
};
|
||||
const createSpy = spyOn(spy, 'create');
|
||||
const query = new Parse.Query(TestObject);
|
||||
query.notEqualTo('foo', null);
|
||||
const subscription = await query.subscribe();
|
||||
subscription.on('create', spy.create);
|
||||
|
||||
const object1 = new TestObject();
|
||||
object1.set('foo', 'bar');
|
||||
await object1.save();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
expect(createSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -631,4 +631,58 @@ describe('ParseServerRESTController', () => {
|
||||
expect(sessions[0].get('installationId')).toBe(installationId);
|
||||
expect(sessions[0].get('sessionToken')).toBe(loggedUser.sessionToken);
|
||||
});
|
||||
|
||||
it('returns a statusId when running jobs', async () => {
|
||||
Parse.Cloud.job('CloudJob', () => {
|
||||
return 'Cloud job completed';
|
||||
});
|
||||
const res = await RESTController.request(
|
||||
'POST',
|
||||
'/jobs/CloudJob',
|
||||
{},
|
||||
{ useMasterKey: true, returnStatus: true }
|
||||
);
|
||||
const jobStatusId = res._headers['X-Parse-Job-Status-Id'];
|
||||
expect(jobStatusId).toBeDefined();
|
||||
const result = await Parse.Cloud.getJobStatus(jobStatusId);
|
||||
expect(result.id).toBe(jobStatusId);
|
||||
});
|
||||
|
||||
it('returns a statusId when running push notifications', async () => {
|
||||
const payload = {
|
||||
data: { alert: 'We return status!' },
|
||||
where: { deviceType: 'ios' },
|
||||
};
|
||||
const res = await RESTController.request('POST', '/push', payload, {
|
||||
useMasterKey: true,
|
||||
returnStatus: true,
|
||||
});
|
||||
const pushStatusId = res._headers['X-Parse-Push-Status-Id'];
|
||||
expect(pushStatusId).toBeDefined();
|
||||
|
||||
const result = await Parse.Push.getPushStatus(pushStatusId);
|
||||
expect(result.id).toBe(pushStatusId);
|
||||
});
|
||||
|
||||
it('returns a statusId when running batch push notifications', async () => {
|
||||
const payload = {
|
||||
data: { alert: 'We return status!' },
|
||||
where: { deviceType: 'ios' },
|
||||
};
|
||||
const res = await RESTController.request('POST', 'batch', {
|
||||
requests: [{
|
||||
method: 'POST',
|
||||
path: '/push',
|
||||
body: payload,
|
||||
}],
|
||||
}, {
|
||||
useMasterKey: true,
|
||||
returnStatus: true,
|
||||
});
|
||||
const pushStatusId = res[0]._headers['X-Parse-Push-Status-Id'];
|
||||
expect(pushStatusId).toBeDefined();
|
||||
|
||||
const result = await Parse.Push.getPushStatus(pushStatusId);
|
||||
expect(result.id).toBe(pushStatusId);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -337,5 +337,33 @@ describe('Security Check', () => {
|
||||
expect(logSpy.calls.all()[0].args[0]).toContain(title);
|
||||
}
|
||||
});
|
||||
|
||||
it('does update featuresRouter', async () => {
|
||||
let response = await request({
|
||||
url: 'http://localhost:8378/1/serverInfo',
|
||||
json: true,
|
||||
headers: {
|
||||
'X-Parse-Application-Id': 'test',
|
||||
'X-Parse-REST-API-Key': 'rest',
|
||||
'X-Parse-Master-Key': 'test',
|
||||
},
|
||||
});
|
||||
expect(response.data.features.settings.securityCheck).toBeTrue();
|
||||
await reconfigureServer({
|
||||
security: {
|
||||
enableCheck: false,
|
||||
},
|
||||
});
|
||||
response = await request({
|
||||
url: 'http://localhost:8378/1/serverInfo',
|
||||
json: true,
|
||||
headers: {
|
||||
'X-Parse-Application-Id': 'test',
|
||||
'X-Parse-REST-API-Key': 'rest',
|
||||
'X-Parse-Master-Key': 'test',
|
||||
},
|
||||
});
|
||||
expect(response.data.features.settings.securityCheck).toBeFalse();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -35,6 +35,7 @@ process.noDeprecation = true;
|
||||
const cache = require('../lib/cache').default;
|
||||
const defaults = require('../lib/defaults').default;
|
||||
const ParseServer = require('../lib/index').ParseServer;
|
||||
const loadAdapter = require('../lib/Adapters/AdapterLoader').loadAdapter;
|
||||
const path = require('path');
|
||||
const TestUtils = require('../lib/TestUtils');
|
||||
const GridFSBucketAdapter = require('../lib/Adapters/Files/GridFSBucketAdapter')
|
||||
@@ -53,7 +54,10 @@ let databaseAdapter;
|
||||
let databaseURI;
|
||||
// need to bind for mocking mocha
|
||||
|
||||
if (process.env.PARSE_SERVER_TEST_DB === 'postgres') {
|
||||
if (process.env.PARSE_SERVER_DATABASE_ADAPTER) {
|
||||
databaseAdapter = JSON.parse(process.env.PARSE_SERVER_DATABASE_ADAPTER);
|
||||
databaseAdapter = loadAdapter(databaseAdapter);
|
||||
} else if (process.env.PARSE_SERVER_TEST_DB === 'postgres') {
|
||||
databaseURI = process.env.PARSE_SERVER_TEST_DATABASE_URI || postgresURI;
|
||||
databaseAdapter = new PostgresStorageAdapter({
|
||||
uri: databaseURI,
|
||||
@@ -132,6 +136,16 @@ const defaultConfiguration = {
|
||||
allowClientClassCreation: true,
|
||||
};
|
||||
|
||||
if (silent) {
|
||||
defaultConfiguration.logLevels = {
|
||||
cloudFunctionSuccess: 'silent',
|
||||
cloudFunctionError: 'silent',
|
||||
triggerAfter: 'silent',
|
||||
triggerBeforeError: 'silent',
|
||||
triggerBeforeSuccess: 'silent',
|
||||
};
|
||||
}
|
||||
|
||||
if (process.env.PARSE_SERVER_TEST_CACHE === 'redis') {
|
||||
defaultConfiguration.cacheAdapter = new RedisCacheAdapter();
|
||||
}
|
||||
@@ -434,8 +448,8 @@ try {
|
||||
// Fetch test exclusion list
|
||||
testExclusionList = require('./testExclusionList.json');
|
||||
console.log(`Using test exclusion list with ${testExclusionList.length} entries`);
|
||||
} catch(error) {
|
||||
if(error.code !== 'MODULE_NOT_FOUND') {
|
||||
} catch (error) {
|
||||
if (error.code !== 'MODULE_NOT_FOUND') {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -445,10 +459,7 @@ global.it_id = (id, func) => {
|
||||
if (testExclusionList.includes(id)) {
|
||||
return xit;
|
||||
} else {
|
||||
if(func === undefined)
|
||||
return it;
|
||||
else
|
||||
return func;
|
||||
return func || it;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
9
spec/support/MockDatabaseAdapter.js
Normal file
9
spec/support/MockDatabaseAdapter.js
Normal file
@@ -0,0 +1,9 @@
|
||||
module.exports = function (options) {
|
||||
return {
|
||||
options: options,
|
||||
send: function () {},
|
||||
getDatabaseURI: function () {
|
||||
return options.databaseURI;
|
||||
},
|
||||
};
|
||||
};
|
||||
@@ -6,7 +6,7 @@ const jwt = require('jsonwebtoken');
|
||||
const httpsRequest = require('./httpsRequest');
|
||||
const authUtils = require('./utils');
|
||||
|
||||
const TOKEN_ISSUER = 'https://facebook.com';
|
||||
const TOKEN_ISSUER = 'https://www.facebook.com';
|
||||
|
||||
function getAppSecretPath(authData, options = {}) {
|
||||
const appSecret = options.appSecret;
|
||||
|
||||
@@ -2614,16 +2614,16 @@ function isAnyValueRegexStartsWith(values) {
|
||||
});
|
||||
}
|
||||
|
||||
function createLiteralRegex(remaining) {
|
||||
function createLiteralRegex(remaining: string) {
|
||||
return remaining
|
||||
.split('')
|
||||
.map(c => {
|
||||
const regex = RegExp('[0-9 ]|\\p{L}', 'u'); // Support all unicode letter chars
|
||||
const regex = RegExp('[0-9 ]|\\p{L}', 'u'); // Support all Unicode letter chars
|
||||
if (c.match(regex) !== null) {
|
||||
// don't escape alphanumeric characters
|
||||
// Don't escape alphanumeric characters
|
||||
return c;
|
||||
}
|
||||
// escape everything else (single quotes with single quotes, everything else with a backslash)
|
||||
// Escape everything else (single quotes with single quotes, everything else with a backslash)
|
||||
return c === `'` ? `''` : `\\${c}`;
|
||||
})
|
||||
.join('');
|
||||
@@ -2633,14 +2633,14 @@ function literalizeRegexPart(s: string) {
|
||||
const matcher1 = /\\Q((?!\\E).*)\\E$/;
|
||||
const result1: any = s.match(matcher1);
|
||||
if (result1 && result1.length > 1 && result1.index > -1) {
|
||||
// process regex that has a beginning and an end specified for the literal text
|
||||
// Process Regex that has a beginning and an end specified for the literal text
|
||||
const prefix = s.substring(0, result1.index);
|
||||
const remaining = result1[1];
|
||||
|
||||
return literalizeRegexPart(prefix) + createLiteralRegex(remaining);
|
||||
}
|
||||
|
||||
// process regex that has a beginning specified for the literal text
|
||||
// Process Regex that has a beginning specified for the literal text
|
||||
const matcher2 = /\\Q((?!\\E).*)$/;
|
||||
const result2: any = s.match(matcher2);
|
||||
if (result2 && result2.length > 1 && result2.index > -1) {
|
||||
@@ -2650,14 +2650,18 @@ function literalizeRegexPart(s: string) {
|
||||
return literalizeRegexPart(prefix) + createLiteralRegex(remaining);
|
||||
}
|
||||
|
||||
// remove all instances of \Q and \E from the remaining text & escape single quotes
|
||||
// Remove problematic chars from remaining text
|
||||
return s
|
||||
// Remove all instances of \Q and \E
|
||||
.replace(/([^\\])(\\E)/, '$1')
|
||||
.replace(/([^\\])(\\Q)/, '$1')
|
||||
.replace(/^\\E/, '')
|
||||
.replace(/^\\Q/, '')
|
||||
.replace(/([^'])'/g, `$1''`)
|
||||
.replace(/^'([^'])/, `''$1`);
|
||||
// Ensure even number of single quote sequences by adding an extra single quote if needed;
|
||||
// this ensures that every single quote is escaped
|
||||
.replace(/'+/g, match => {
|
||||
return match.length % 2 === 0 ? match : match + "'";
|
||||
});
|
||||
}
|
||||
|
||||
var GeoPointCoder = {
|
||||
|
||||
17
src/Auth.js
17
src/Auth.js
@@ -67,6 +67,17 @@ function nobody(config) {
|
||||
return new Auth({ config, isMaster: false });
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks whether session should be updated based on last update time & session length.
|
||||
*/
|
||||
function shouldUpdateSessionExpiry(config, session) {
|
||||
const resetAfter = config.sessionLength / 2;
|
||||
const lastUpdated = new Date(session?.updatedAt);
|
||||
const skipRange = new Date();
|
||||
skipRange.setTime(skipRange.getTime() - resetAfter * 1000);
|
||||
return lastUpdated <= skipRange;
|
||||
}
|
||||
|
||||
const throttle = {};
|
||||
const renewSessionIfNeeded = async ({ config, session, sessionToken }) => {
|
||||
if (!config?.extendSessionOnUse) {
|
||||
@@ -88,10 +99,7 @@ const renewSessionIfNeeded = async ({ config, session, sessionToken }) => {
|
||||
const { results } = await query.execute();
|
||||
session = results[0];
|
||||
}
|
||||
const lastUpdated = new Date(session?.updatedAt);
|
||||
const yesterday = new Date();
|
||||
yesterday.setDate(yesterday.getDate() - 1);
|
||||
if (lastUpdated > yesterday || !session) {
|
||||
if (!shouldUpdateSessionExpiry(config, session) || !session) {
|
||||
return;
|
||||
}
|
||||
const expiresAt = config.generateSessionExpiresAt();
|
||||
@@ -579,6 +587,7 @@ module.exports = {
|
||||
maintenance,
|
||||
nobody,
|
||||
readOnly,
|
||||
shouldUpdateSessionExpiry,
|
||||
getAuthForSessionToken,
|
||||
getAuthForLegacySessionToken,
|
||||
findUsersWithAuthData,
|
||||
|
||||
@@ -64,6 +64,7 @@ export class Config {
|
||||
}
|
||||
|
||||
static validateOptions({
|
||||
customPages,
|
||||
publicServerURL,
|
||||
revokeSessionOnPasswordReset,
|
||||
expireInactiveSessions,
|
||||
@@ -133,9 +134,18 @@ export class Config {
|
||||
this.validateRateLimit(rateLimit);
|
||||
this.validateLogLevels(logLevels);
|
||||
this.validateDatabaseOptions(databaseOptions);
|
||||
this.validateCustomPages(customPages);
|
||||
this.validateAllowClientClassCreation(allowClientClassCreation);
|
||||
}
|
||||
|
||||
static validateCustomPages(customPages) {
|
||||
if (!customPages) return;
|
||||
|
||||
if (Object.prototype.toString.call(customPages) !== '[object Object]') {
|
||||
throw Error('Parse Server option customPages must be an object.');
|
||||
}
|
||||
}
|
||||
|
||||
static validateControllers({
|
||||
verifyUserEmails,
|
||||
userController,
|
||||
@@ -569,6 +579,7 @@ export class Config {
|
||||
if (Object.prototype.toString.call(databaseOptions) !== '[object Object]') {
|
||||
throw `databaseOptions must be an object`;
|
||||
}
|
||||
|
||||
if (databaseOptions.enableSchemaHooks === undefined) {
|
||||
databaseOptions.enableSchemaHooks = DatabaseOptions.enableSchemaHooks.default;
|
||||
} else if (typeof databaseOptions.enableSchemaHooks !== 'boolean') {
|
||||
|
||||
@@ -16,7 +16,7 @@ export const LogOrder = {
|
||||
ASCENDING: 'asc',
|
||||
};
|
||||
|
||||
export const logLevels = ['error', 'warn', 'info', 'debug', 'verbose', 'silly'];
|
||||
export const logLevels = ['error', 'warn', 'info', 'debug', 'verbose', 'silly', 'silent'];
|
||||
|
||||
export class LoggerController extends AdaptableController {
|
||||
constructor(adapter, appId, options = { logLevel: 'info' }) {
|
||||
|
||||
@@ -15,6 +15,4 @@
|
||||
*
|
||||
* If there are no deprecations, this must return an empty array.
|
||||
*/
|
||||
module.exports = [
|
||||
{ optionKey: 'encodeParseObjectInCloudFunction', changeNewDefault: 'true' },
|
||||
];
|
||||
module.exports = [{ optionKey: 'encodeParseObjectInCloudFunction', changeNewDefault: 'true' }];
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import Parse from 'parse/node';
|
||||
import { fromGlobalId } from 'graphql-relay';
|
||||
import { handleUpload } from '../loaders/filesMutations';
|
||||
import * as defaultGraphQLTypes from '../loaders/defaultGraphQLTypes';
|
||||
import * as objectsMutations from '../helpers/objectsMutations';
|
||||
|
||||
const transformTypes = async (
|
||||
@@ -28,27 +27,28 @@ const transformTypes = async (
|
||||
inputTypeField = classGraphQLUpdateTypeFields[field];
|
||||
}
|
||||
if (inputTypeField) {
|
||||
switch (true) {
|
||||
case inputTypeField.type === defaultGraphQLTypes.GEO_POINT_INPUT:
|
||||
const parseFieldType = parseClass.fields[field].type;
|
||||
switch (parseFieldType) {
|
||||
case 'GeoPoint':
|
||||
if (fields[field] === null) {
|
||||
fields[field] = { __op: 'Delete' };
|
||||
break;
|
||||
}
|
||||
fields[field] = transformers.geoPoint(fields[field]);
|
||||
break;
|
||||
case inputTypeField.type === defaultGraphQLTypes.POLYGON_INPUT:
|
||||
case 'Polygon':
|
||||
if (fields[field] === null) {
|
||||
fields[field] = { __op: 'Delete' };
|
||||
break;
|
||||
}
|
||||
fields[field] = transformers.polygon(fields[field]);
|
||||
break;
|
||||
case inputTypeField.type === defaultGraphQLTypes.FILE_INPUT:
|
||||
// Use `originalFields` to handle file upload since fields are a deepcopy and do not
|
||||
// keep the file object
|
||||
case 'File':
|
||||
// We need to use the originalFields to handle the file upload
|
||||
// since fields are a deepcopy and do not keep the file object
|
||||
fields[field] = await transformers.file(originalFields[field], req);
|
||||
break;
|
||||
case parseClass.fields[field].type === 'Relation':
|
||||
case 'Relation':
|
||||
fields[field] = await transformers.relation(
|
||||
parseClass.fields[field].targetClass,
|
||||
field,
|
||||
@@ -58,7 +58,7 @@ const transformTypes = async (
|
||||
req
|
||||
);
|
||||
break;
|
||||
case parseClass.fields[field].type === 'Pointer':
|
||||
case 'Pointer':
|
||||
if (fields[field] === null) {
|
||||
fields[field] = { __op: 'Delete' };
|
||||
break;
|
||||
|
||||
@@ -223,7 +223,7 @@ function matchesKeyConstraints(object, key, constraints) {
|
||||
// More complex cases
|
||||
for (var condition in constraints) {
|
||||
compareTo = constraints[condition];
|
||||
if (compareTo.__type) {
|
||||
if (compareTo?.__type) {
|
||||
compareTo = Parse._decode(key, compareTo);
|
||||
}
|
||||
switch (condition) {
|
||||
|
||||
@@ -54,6 +54,7 @@ module.exports.ParseServerOptions = {
|
||||
env: 'PARSE_SERVER_ACCOUNT_LOCKOUT',
|
||||
help: 'The account lockout policy for failed login attempts.',
|
||||
action: parsers.objectParser,
|
||||
type: 'AccountLockoutOptions',
|
||||
},
|
||||
allowClientClassCreation: {
|
||||
env: 'PARSE_SERVER_ALLOW_CLIENT_CLASS_CREATION',
|
||||
@@ -157,6 +158,7 @@ module.exports.ParseServerOptions = {
|
||||
env: 'PARSE_SERVER_CUSTOM_PAGES',
|
||||
help: 'custom pages for password validation and reset',
|
||||
action: parsers.objectParser,
|
||||
type: 'CustomPagesOptions',
|
||||
default: {},
|
||||
},
|
||||
databaseAdapter: {
|
||||
@@ -169,6 +171,7 @@ module.exports.ParseServerOptions = {
|
||||
env: 'PARSE_SERVER_DATABASE_OPTIONS',
|
||||
help: 'Options to pass to the database client',
|
||||
action: parsers.objectParser,
|
||||
type: 'DatabaseOptions',
|
||||
},
|
||||
databaseURI: {
|
||||
env: 'PARSE_SERVER_DATABASE_URI',
|
||||
@@ -256,7 +259,8 @@ module.exports.ParseServerOptions = {
|
||||
},
|
||||
extendSessionOnUse: {
|
||||
env: 'PARSE_SERVER_EXTEND_SESSION_ON_USE',
|
||||
help: 'Whether Parse Server should automatically extend a valid session by the sessionLength',
|
||||
help:
|
||||
"Whether Parse Server should automatically extend a valid session by the sessionLength. In order to reduce the number of session updates in the database, a session will only be extended when a request is received after at least half of the current session's lifetime has passed.",
|
||||
action: parsers.booleanParser,
|
||||
default: false,
|
||||
},
|
||||
@@ -273,6 +277,7 @@ module.exports.ParseServerOptions = {
|
||||
env: 'PARSE_SERVER_FILE_UPLOAD_OPTIONS',
|
||||
help: 'Options for file uploads',
|
||||
action: parsers.objectParser,
|
||||
type: 'FileUploadOptions',
|
||||
default: {},
|
||||
},
|
||||
graphQLPath: {
|
||||
@@ -294,6 +299,7 @@ module.exports.ParseServerOptions = {
|
||||
help:
|
||||
'Options for request idempotency to deduplicate identical requests that may be caused by network issues. Caution, this is an experimental feature that may not be appropriate for production.',
|
||||
action: parsers.objectParser,
|
||||
type: 'IdempotencyOptions',
|
||||
default: {},
|
||||
},
|
||||
javascriptKey: {
|
||||
@@ -309,11 +315,13 @@ module.exports.ParseServerOptions = {
|
||||
env: 'PARSE_SERVER_LIVE_QUERY',
|
||||
help: "parse-server's LiveQuery configuration object",
|
||||
action: parsers.objectParser,
|
||||
type: 'LiveQueryOptions',
|
||||
},
|
||||
liveQueryServerOptions: {
|
||||
env: 'PARSE_SERVER_LIVE_QUERY_SERVER_OPTIONS',
|
||||
help: 'Live query server configuration options (will start the liveQuery server)',
|
||||
action: parsers.objectParser,
|
||||
type: 'LiveQueryServerOptions',
|
||||
},
|
||||
loggerAdapter: {
|
||||
env: 'PARSE_SERVER_LOGGER_ADAPTER',
|
||||
@@ -328,6 +336,7 @@ module.exports.ParseServerOptions = {
|
||||
env: 'PARSE_SERVER_LOG_LEVELS',
|
||||
help: '(Optional) Overrides the log levels used internally by Parse Server to log events.',
|
||||
action: parsers.objectParser,
|
||||
type: 'LogLevels',
|
||||
default: {},
|
||||
},
|
||||
logsFolder: {
|
||||
@@ -408,12 +417,14 @@ module.exports.ParseServerOptions = {
|
||||
help:
|
||||
'The options for pages such as password reset and email verification. Caution, this is an experimental feature that may not be appropriate for production.',
|
||||
action: parsers.objectParser,
|
||||
type: 'PagesOptions',
|
||||
default: {},
|
||||
},
|
||||
passwordPolicy: {
|
||||
env: 'PARSE_SERVER_PASSWORD_POLICY',
|
||||
help: 'The password policy for enforcing password related rules.',
|
||||
action: parsers.objectParser,
|
||||
type: 'PasswordPolicyOptions',
|
||||
},
|
||||
playgroundPath: {
|
||||
env: 'PARSE_SERVER_PLAYGROUND_PATH',
|
||||
@@ -471,6 +482,7 @@ module.exports.ParseServerOptions = {
|
||||
help:
|
||||
"Options to limit repeated requests to Parse Server APIs. This can be used to protect sensitive endpoints such as `/requestPasswordReset` from brute-force attacks or Parse Server as a whole from denial-of-service (DoS) attacks.<br><br>\u2139\uFE0F Mind the following limitations:<br>- rate limits applied per IP address; this limits protection against distributed denial-of-service (DDoS) attacks where many requests are coming from various IP addresses<br>- if multiple Parse Server instances are behind a load balancer or ran in a cluster, each instance will calculate it's own request rates, independent from other instances; this limits the applicability of this feature when using a load balancer and another rate limiting solution that takes requests across all instances into account may be more suitable<br>- this feature provides basic protection against denial-of-service attacks, but a more sophisticated solution works earlier in the request flow and prevents a malicious requests to even reach a server instance; it's therefore recommended to implement a solution according to architecture and user case.",
|
||||
action: parsers.arrayParser,
|
||||
type: 'RateLimitOptions[]',
|
||||
default: [],
|
||||
},
|
||||
readOnlyMasterKey: {
|
||||
@@ -516,11 +528,13 @@ module.exports.ParseServerOptions = {
|
||||
env: 'PARSE_SERVER_SCHEMA',
|
||||
help: 'Defined schema',
|
||||
action: parsers.objectParser,
|
||||
type: 'SchemaOptions',
|
||||
},
|
||||
security: {
|
||||
env: 'PARSE_SERVER_SECURITY',
|
||||
help: 'The security options to identify and report weak security settings.',
|
||||
action: parsers.objectParser,
|
||||
type: 'SecurityOptions',
|
||||
default: {},
|
||||
},
|
||||
sendUserEmailVerification: {
|
||||
@@ -665,12 +679,14 @@ module.exports.PagesOptions = {
|
||||
env: 'PARSE_SERVER_PAGES_CUSTOM_ROUTES',
|
||||
help: 'The custom routes.',
|
||||
action: parsers.arrayParser,
|
||||
type: 'PagesRoute[]',
|
||||
default: [],
|
||||
},
|
||||
customUrls: {
|
||||
env: 'PARSE_SERVER_PAGES_CUSTOM_URLS',
|
||||
help: 'The URLs to the custom pages.',
|
||||
action: parsers.objectParser,
|
||||
type: 'PagesCustomUrlsOptions',
|
||||
default: {},
|
||||
},
|
||||
enableLocalization: {
|
||||
|
||||
@@ -47,7 +47,7 @@
|
||||
* @property {String} encryptionKey Key for encrypting your files
|
||||
* @property {Boolean} enforcePrivateUsers Set to true if new users should be created without public read and write access.
|
||||
* @property {Boolean} expireInactiveSessions Sets whether we should expire the inactive sessions, defaults to true. If false, all new sessions are created with no expiration date.
|
||||
* @property {Boolean} extendSessionOnUse Whether Parse Server should automatically extend a valid session by the sessionLength
|
||||
* @property {Boolean} extendSessionOnUse Whether Parse Server should automatically extend a valid session by the sessionLength. In order to reduce the number of session updates in the database, a session will only be extended when a request is received after at least half of the current session's lifetime has passed.
|
||||
* @property {String} fileKey Key for your files
|
||||
* @property {Adapter<FilesAdapter>} filesAdapter Adapter module for the files sub-system
|
||||
* @property {FileUploadOptions} fileUpload Options for file uploads
|
||||
|
||||
@@ -228,7 +228,7 @@ export interface ParseServerOptions {
|
||||
/* Session duration, in seconds, defaults to 1 year
|
||||
:DEFAULT: 31536000 */
|
||||
sessionLength: ?number;
|
||||
/* Whether Parse Server should automatically extend a valid session by the sessionLength
|
||||
/* Whether Parse Server should automatically extend a valid session by the sessionLength. In order to reduce the number of session updates in the database, a session will only be extended when a request is received after at least half of the current session's lifetime has passed.
|
||||
:DEFAULT: false */
|
||||
extendSessionOnUse: ?boolean;
|
||||
/* Default value for limit option on queries, defaults to `100`.
|
||||
|
||||
@@ -45,6 +45,7 @@ import { SecurityRouter } from './Routers/SecurityRouter';
|
||||
import CheckRunner from './Security/CheckRunner';
|
||||
import Deprecator from './Deprecator/Deprecator';
|
||||
import { DefinedSchemas } from './SchemaMigrations/DefinedSchemas';
|
||||
import OptionsDefinitions from './Options/Definitions';
|
||||
|
||||
// Mutate the Parse object to add the Cloud Code handlers
|
||||
addParseCloud();
|
||||
@@ -59,6 +60,58 @@ class ParseServer {
|
||||
constructor(options: ParseServerOptions) {
|
||||
// Scan for deprecated Parse Server options
|
||||
Deprecator.scanParseServerOptions(options);
|
||||
|
||||
const interfaces = JSON.parse(JSON.stringify(OptionsDefinitions));
|
||||
|
||||
function getValidObject(root) {
|
||||
const result = {};
|
||||
for (const key in root) {
|
||||
if (Object.prototype.hasOwnProperty.call(root[key], 'type')) {
|
||||
if (root[key].type.endsWith('[]')) {
|
||||
result[key] = [getValidObject(interfaces[root[key].type.slice(0, -2)])];
|
||||
} else {
|
||||
result[key] = getValidObject(interfaces[root[key].type]);
|
||||
}
|
||||
} else {
|
||||
result[key] = '';
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const optionsBlueprint = getValidObject(interfaces['ParseServerOptions']);
|
||||
|
||||
function validateKeyNames(original, ref, name = '') {
|
||||
let result = [];
|
||||
const prefix = name + (name !== '' ? '.' : '');
|
||||
for (const key in original) {
|
||||
if (!Object.prototype.hasOwnProperty.call(ref, key)) {
|
||||
result.push(prefix + key);
|
||||
} else {
|
||||
if (ref[key] === '') continue;
|
||||
let res = [];
|
||||
if (Array.isArray(original[key]) && Array.isArray(ref[key])) {
|
||||
const type = ref[key][0];
|
||||
original[key].forEach((item, idx) => {
|
||||
if (typeof item === 'object' && item !== null) {
|
||||
res = res.concat(validateKeyNames(item, type, prefix + key + `[${idx}]`));
|
||||
}
|
||||
});
|
||||
} else if (typeof original[key] === 'object' && typeof ref[key] === 'object') {
|
||||
res = validateKeyNames(original[key], ref[key], prefix + key);
|
||||
}
|
||||
result = result.concat(res);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
const diff = validateKeyNames(options, optionsBlueprint);
|
||||
if (diff.length > 0) {
|
||||
const logger = logging.logger;
|
||||
logger.error(`Invalid Option Keys Found: ${diff.join(', ')}`);
|
||||
}
|
||||
|
||||
// Set option defaults
|
||||
injectDefaults(options);
|
||||
const {
|
||||
@@ -70,9 +123,9 @@ class ParseServer {
|
||||
// Initialize the node client SDK automatically
|
||||
Parse.initialize(appId, javascriptKey || 'unused', masterKey);
|
||||
Parse.serverURL = serverURL;
|
||||
|
||||
Config.validateOptions(options);
|
||||
const allControllers = controllers.getControllers(options);
|
||||
|
||||
options.state = 'initialized';
|
||||
this.config = Config.put(Object.assign({}, options, allControllers));
|
||||
this.config.masterKeyIpsStore = new Map();
|
||||
|
||||
@@ -58,8 +58,10 @@ function ParseServerRESTController(applicationId, router) {
|
||||
response => {
|
||||
if (options.returnStatus) {
|
||||
const status = response._status;
|
||||
const headers = response._headers;
|
||||
delete response._status;
|
||||
return { success: response, _status: status };
|
||||
delete response._headers;
|
||||
return { success: response, _status: status, _headers: headers };
|
||||
}
|
||||
return { success: response };
|
||||
},
|
||||
@@ -128,9 +130,9 @@ function ParseServerRESTController(applicationId, router) {
|
||||
})
|
||||
.then(
|
||||
resp => {
|
||||
const { response, status } = resp;
|
||||
const { response, status, headers = {} } = resp;
|
||||
if (options.returnStatus) {
|
||||
resolve({ ...response, _status: status });
|
||||
resolve({ ...response, _status: status, _headers: headers });
|
||||
} else {
|
||||
resolve(response);
|
||||
}
|
||||
|
||||
@@ -523,10 +523,14 @@ RestWrite.prototype.handleAuthData = async function (authData) {
|
||||
const r = await Auth.findUsersWithAuthData(this.config, authData);
|
||||
const results = this.filteredObjectsByACL(r);
|
||||
|
||||
if (results.length > 1) {
|
||||
const userId = this.getUserId();
|
||||
const userResult = results[0];
|
||||
const foundUserIsNotCurrentUser = userId && userResult && userId !== userResult.objectId;
|
||||
|
||||
if (results.length > 1 || foundUserIsNotCurrentUser) {
|
||||
// To avoid https://github.com/parse-community/parse-server/security/advisories/GHSA-8w3j-g983-8jh5
|
||||
// Let's run some validation before throwing
|
||||
await Auth.handleAuthDataValidation(authData, this, results[0]);
|
||||
await Auth.handleAuthDataValidation(authData, this, userResult);
|
||||
throw new Parse.Error(Parse.Error.ACCOUNT_ALREADY_LINKED, 'this auth is already used');
|
||||
}
|
||||
|
||||
@@ -544,13 +548,6 @@ RestWrite.prototype.handleAuthData = async function (authData) {
|
||||
|
||||
// User found with provided authData
|
||||
if (results.length === 1) {
|
||||
const userId = this.getUserId();
|
||||
const userResult = results[0];
|
||||
// Prevent duplicate authData id
|
||||
if (userId && userId !== userResult.objectId) {
|
||||
await Auth.handleAuthDataValidation(authData, this, results[0]);
|
||||
throw new Parse.Error(Parse.Error.ACCOUNT_ALREADY_LINKED, 'this auth is already used');
|
||||
}
|
||||
|
||||
this.storage.authProvider = Object.keys(authData).join(',');
|
||||
|
||||
|
||||
@@ -46,6 +46,9 @@ export class FeaturesRouter extends PromiseRouter {
|
||||
editClassLevelPermissions: true,
|
||||
editPointerPermissions: true,
|
||||
},
|
||||
settings: {
|
||||
securityCheck: !!config.security?.enableCheck,
|
||||
},
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
@@ -141,19 +141,21 @@ export class FunctionsRouter extends PromiseRouter {
|
||||
|
||||
return new Promise(function (resolve, reject) {
|
||||
const userString = req.auth && req.auth.user ? req.auth.user.id : undefined;
|
||||
const cleanInput = logger.truncateLogMessage(JSON.stringify(params));
|
||||
const { success, error } = FunctionsRouter.createResponseObject(
|
||||
result => {
|
||||
try {
|
||||
const cleanResult = logger.truncateLogMessage(JSON.stringify(result.response.result));
|
||||
logger[req.config.logLevels.cloudFunctionSuccess](
|
||||
`Ran cloud function ${functionName} for user ${userString} with:\n Input: ${cleanInput}\n Result: ${cleanResult}`,
|
||||
{
|
||||
functionName,
|
||||
params,
|
||||
user: userString,
|
||||
}
|
||||
);
|
||||
if (req.config.logLevels.cloudFunctionSuccess !== 'silent') {
|
||||
const cleanInput = logger.truncateLogMessage(JSON.stringify(params));
|
||||
const cleanResult = logger.truncateLogMessage(JSON.stringify(result.response.result));
|
||||
logger[req.config.logLevels.cloudFunctionSuccess](
|
||||
`Ran cloud function ${functionName} for user ${userString} with:\n Input: ${cleanInput}\n Result: ${cleanResult}`,
|
||||
{
|
||||
functionName,
|
||||
params,
|
||||
user: userString,
|
||||
}
|
||||
);
|
||||
}
|
||||
resolve(result);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
@@ -161,16 +163,19 @@ export class FunctionsRouter extends PromiseRouter {
|
||||
},
|
||||
error => {
|
||||
try {
|
||||
logger[req.config.logLevels.cloudFunctionError](
|
||||
`Failed running cloud function ${functionName} for user ${userString} with:\n Input: ${cleanInput}\n Error: ` +
|
||||
JSON.stringify(error),
|
||||
{
|
||||
functionName,
|
||||
error,
|
||||
params,
|
||||
user: userString,
|
||||
}
|
||||
);
|
||||
if (req.config.logLevels.cloudFunctionError !== 'silent') {
|
||||
const cleanInput = logger.truncateLogMessage(JSON.stringify(params));
|
||||
logger[req.config.logLevels.cloudFunctionError](
|
||||
`Failed running cloud function ${functionName} for user ${userString} with:\n Input: ${cleanInput}\n Error: ` +
|
||||
JSON.stringify(error),
|
||||
{
|
||||
functionName,
|
||||
error,
|
||||
params,
|
||||
user: userString,
|
||||
}
|
||||
);
|
||||
}
|
||||
reject(error);
|
||||
} catch (e) {
|
||||
reject(e);
|
||||
|
||||
@@ -6,6 +6,7 @@ function logStartupOptions(options) {
|
||||
}
|
||||
// Keys that may include sensitive information that will be redacted in logs
|
||||
const keysToRedact = [
|
||||
'databaseAdapter',
|
||||
'databaseURI',
|
||||
'masterKey',
|
||||
'maintenanceKey',
|
||||
|
||||
@@ -531,19 +531,17 @@ export const addRateLimit = (route, config, cloud) => {
|
||||
const redisStore = {
|
||||
connectionPromise: Promise.resolve(),
|
||||
store: null,
|
||||
connected: false,
|
||||
};
|
||||
if (route.redisUrl) {
|
||||
const client = createClient({
|
||||
url: route.redisUrl,
|
||||
});
|
||||
redisStore.connectionPromise = async () => {
|
||||
if (redisStore.connected) {
|
||||
if (client.isOpen) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await client.connect();
|
||||
redisStore.connected = true;
|
||||
} catch (e) {
|
||||
const log = config?.loggerController || defaultLogger;
|
||||
log.error(`Could not connect to redisURL in rate limit: ${e}`);
|
||||
|
||||
@@ -382,6 +382,9 @@ function userIdForLog(auth) {
|
||||
}
|
||||
|
||||
function logTriggerAfterHook(triggerType, className, input, auth, logLevel) {
|
||||
if (logLevel === 'silent') {
|
||||
return;
|
||||
}
|
||||
const cleanInput = logger.truncateLogMessage(JSON.stringify(input));
|
||||
logger[logLevel](
|
||||
`${triggerType} triggered for ${className} for user ${userIdForLog(
|
||||
@@ -396,6 +399,9 @@ function logTriggerAfterHook(triggerType, className, input, auth, logLevel) {
|
||||
}
|
||||
|
||||
function logTriggerSuccessBeforeHook(triggerType, className, input, result, auth, logLevel) {
|
||||
if (logLevel === 'silent') {
|
||||
return;
|
||||
}
|
||||
const cleanInput = logger.truncateLogMessage(JSON.stringify(input));
|
||||
const cleanResult = logger.truncateLogMessage(JSON.stringify(result));
|
||||
logger[logLevel](
|
||||
@@ -411,6 +417,9 @@ function logTriggerSuccessBeforeHook(triggerType, className, input, result, auth
|
||||
}
|
||||
|
||||
function logTriggerErrorBeforeHook(triggerType, className, input, auth, error, logLevel) {
|
||||
if (logLevel === 'silent') {
|
||||
return;
|
||||
}
|
||||
const cleanInput = logger.truncateLogMessage(JSON.stringify(input));
|
||||
logger[logLevel](
|
||||
`${triggerType} failed for ${className} for user ${userIdForLog(
|
||||
|
||||
Reference in New Issue
Block a user