2017-12-30 20:44:18 -05:00
|
|
|
// @flow
|
2025-11-08 15:48:29 +01:00
|
|
|
import { format as formatUrl, parse as parseUrl } from '../../../vendor/mongodbUrl';
|
|
|
|
|
import type { QueryOptions, QueryType, SchemaType, StorageClass } from '../StorageAdapter';
|
|
|
|
|
import { StorageAdapter } from '../StorageAdapter';
|
2018-09-01 13:58:06 -04:00
|
|
|
import MongoCollection from './MongoCollection';
|
2016-05-24 17:21:20 -07:00
|
|
|
import MongoSchemaCollection from './MongoSchemaCollection';
|
|
|
|
|
import {
|
|
|
|
|
mongoObjectToParseObject,
|
2025-11-08 15:48:29 +01:00
|
|
|
parseObjectToMongoObjectForCreate,
|
2016-05-24 17:21:20 -07:00
|
|
|
transformKey,
|
2017-12-29 21:32:40 -06:00
|
|
|
transformPointerString,
|
2025-11-08 15:48:29 +01:00
|
|
|
transformUpdate,
|
|
|
|
|
transformWhere,
|
2016-05-24 17:21:20 -07:00
|
|
|
} from './MongoTransform';
|
2017-12-30 20:44:18 -05:00
|
|
|
// @flow-disable-next
|
2018-09-01 13:58:06 -04:00
|
|
|
import Parse from 'parse/node';
|
2017-12-30 20:44:18 -05:00
|
|
|
// @flow-disable-next
|
2018-09-01 13:58:06 -04:00
|
|
|
import _ from 'lodash';
|
2025-11-08 18:41:45 +01:00
|
|
|
import defaults, { ParseServerDatabaseOptions } from '../../../defaults';
|
2018-09-01 13:58:06 -04:00
|
|
|
import logger from '../../../logger';
|
2025-11-08 15:48:29 +01:00
|
|
|
import Utils from '../../../Utils';
|
2016-03-01 20:04:15 -08:00
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
// @flow-disable-next
|
2016-12-07 15:17:05 -08:00
|
|
|
const mongodb = require('mongodb');
|
|
|
|
|
const MongoClient = mongodb.MongoClient;
|
2017-06-21 17:18:10 -03:00
|
|
|
const ReadPreference = mongodb.ReadPreference;
|
2016-02-27 02:23:57 -08:00
|
|
|
|
2016-03-09 15:20:59 -08:00
|
|
|
const MongoSchemaCollectionName = '_SCHEMA';
|
2018-07-03 11:13:08 -04:00
|
|
|
|
2026-01-25 00:15:01 +01:00
|
|
|
/**
|
|
|
|
|
* Determines if a MongoDB error is a transient infrastructure error
|
|
|
|
|
* (connection pool, network, server selection) as opposed to a query-level error.
|
|
|
|
|
*/
|
|
|
|
|
function isTransientError(error) {
|
|
|
|
|
if (!error) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Connection pool, network, and server selection errors
|
|
|
|
|
const transientErrorNames = [
|
|
|
|
|
'MongoWaitQueueTimeoutError',
|
|
|
|
|
'MongoServerSelectionError',
|
|
|
|
|
'MongoNetworkTimeoutError',
|
|
|
|
|
'MongoNetworkError',
|
|
|
|
|
];
|
|
|
|
|
if (transientErrorNames.includes(error.name)) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check for MongoDB's transient transaction error label
|
|
|
|
|
if (typeof error.hasErrorLabel === 'function') {
|
|
|
|
|
if (error.hasErrorLabel('TransientTransactionError')) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
2020-10-01 23:58:23 +02:00
|
|
|
const storageAdapterAllCollections = mongoAdapter => {
|
2018-09-01 13:58:06 -04:00
|
|
|
return mongoAdapter
|
|
|
|
|
.connect()
|
2018-07-03 11:13:08 -04:00
|
|
|
.then(() => mongoAdapter.database.collections())
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collections => {
|
|
|
|
|
return collections.filter(collection => {
|
2018-07-03 11:13:08 -04:00
|
|
|
if (collection.namespace.match(/\.system\./)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
// TODO: If you have one app with a collection prefix that happens to be a prefix of another
|
|
|
|
|
// apps prefix, this will go very very badly. We should fix that somehow.
|
2020-12-13 11:19:04 -06:00
|
|
|
return collection.collectionName.indexOf(mongoAdapter._collectionPrefix) == 0;
|
2018-07-03 11:13:08 -04:00
|
|
|
});
|
|
|
|
|
});
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2018-07-03 11:13:08 -04:00
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const convertParseSchemaToMongoSchema = ({ ...schema }) => {
|
2016-06-13 12:57:20 -07:00
|
|
|
delete schema.fields._rperm;
|
|
|
|
|
delete schema.fields._wperm;
|
2016-06-13 13:14:29 -07:00
|
|
|
|
|
|
|
|
if (schema.className === '_User') {
|
|
|
|
|
// Legacy mongo adapter knows about the difference between password and _hashed_password.
|
|
|
|
|
// Future database adapters will only know about _hashed_password.
|
|
|
|
|
// Note: Parse Server will bring back password with injectDefaultSchema, so we don't need
|
|
|
|
|
// to add _hashed_password back ever.
|
|
|
|
|
delete schema.fields._hashed_password;
|
|
|
|
|
}
|
|
|
|
|
|
2016-06-13 12:57:20 -07:00
|
|
|
return schema;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-06-13 12:57:20 -07:00
|
|
|
|
2016-06-17 09:59:16 -07:00
|
|
|
// Returns { code, error } if invalid, or { result }, an object
|
|
|
|
|
// suitable for inserting into _SCHEMA collection, otherwise.
|
2018-09-01 13:58:06 -04:00
|
|
|
const mongoSchemaFromFieldsAndClassNameAndCLP = (
|
|
|
|
|
fields,
|
|
|
|
|
className,
|
|
|
|
|
classLevelPermissions,
|
|
|
|
|
indexes
|
|
|
|
|
) => {
|
2016-12-07 15:17:05 -08:00
|
|
|
const mongoObject = {
|
2016-06-17 09:59:16 -07:00
|
|
|
_id: className,
|
|
|
|
|
objectId: 'string',
|
|
|
|
|
updatedAt: 'string',
|
2017-12-30 20:44:18 -05:00
|
|
|
createdAt: 'string',
|
|
|
|
|
_metadata: undefined,
|
2016-06-17 09:59:16 -07:00
|
|
|
};
|
|
|
|
|
|
2016-12-07 15:17:05 -08:00
|
|
|
for (const fieldName in fields) {
|
2019-07-25 21:13:59 -07:00
|
|
|
const { type, targetClass, ...fieldOptions } = fields[fieldName];
|
2020-12-13 11:19:04 -06:00
|
|
|
mongoObject[fieldName] = MongoSchemaCollection.parseFieldTypeToMongoFieldType({
|
2019-07-25 21:13:59 -07:00
|
|
|
type,
|
|
|
|
|
targetClass,
|
|
|
|
|
});
|
|
|
|
|
if (fieldOptions && Object.keys(fieldOptions).length > 0) {
|
|
|
|
|
mongoObject._metadata = mongoObject._metadata || {};
|
2020-12-13 11:19:04 -06:00
|
|
|
mongoObject._metadata.fields_options = mongoObject._metadata.fields_options || {};
|
2019-07-25 21:13:59 -07:00
|
|
|
mongoObject._metadata.fields_options[fieldName] = fieldOptions;
|
|
|
|
|
}
|
2016-06-17 09:59:16 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (typeof classLevelPermissions !== 'undefined') {
|
2016-11-24 15:47:41 -05:00
|
|
|
mongoObject._metadata = mongoObject._metadata || {};
|
2016-06-17 09:59:16 -07:00
|
|
|
if (!classLevelPermissions) {
|
|
|
|
|
delete mongoObject._metadata.class_permissions;
|
|
|
|
|
} else {
|
|
|
|
|
mongoObject._metadata.class_permissions = classLevelPermissions;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-13 11:19:04 -06:00
|
|
|
if (indexes && typeof indexes === 'object' && Object.keys(indexes).length > 0) {
|
2017-11-25 13:55:34 -06:00
|
|
|
mongoObject._metadata = mongoObject._metadata || {};
|
|
|
|
|
mongoObject._metadata.indexes = indexes;
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
if (!mongoObject._metadata) {
|
|
|
|
|
// cleanup the unused _metadata
|
2017-12-30 20:44:18 -05:00
|
|
|
delete mongoObject._metadata;
|
|
|
|
|
}
|
|
|
|
|
|
2016-06-17 09:59:16 -07:00
|
|
|
return mongoObject;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-06-17 09:59:16 -07:00
|
|
|
|
2021-09-02 03:46:48 -07:00
|
|
|
function validateExplainValue(explain) {
|
|
|
|
|
if (explain) {
|
|
|
|
|
// The list of allowed explain values is from node-mongodb-native/lib/explain.js
|
|
|
|
|
const explainAllowedValues = [
|
|
|
|
|
'queryPlanner',
|
|
|
|
|
'queryPlannerExtended',
|
|
|
|
|
'executionStats',
|
|
|
|
|
'allPlansExecution',
|
|
|
|
|
false,
|
|
|
|
|
true,
|
|
|
|
|
];
|
|
|
|
|
if (!explainAllowedValues.includes(explain)) {
|
|
|
|
|
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Invalid value for explain');
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
export class MongoStorageAdapter implements StorageAdapter {
|
2016-02-27 02:23:57 -08:00
|
|
|
// Private
|
|
|
|
|
_uri: string;
|
2016-04-13 05:21:53 -07:00
|
|
|
_collectionPrefix: string;
|
|
|
|
|
_mongoOptions: Object;
|
2021-03-16 16:05:36 -05:00
|
|
|
_onchange: any;
|
|
|
|
|
_stream: any;
|
2025-11-08 15:48:29 +01:00
|
|
|
_logClientEvents: ?Array<any>;
|
2026-01-24 22:44:38 +01:00
|
|
|
_clientMetadata: ?{ name: string, version: string };
|
2016-02-27 02:23:57 -08:00
|
|
|
// Public
|
2019-10-08 18:39:32 +00:00
|
|
|
connectionPromise: ?Promise<any>;
|
2017-12-30 20:44:18 -05:00
|
|
|
database: any;
|
|
|
|
|
client: MongoClient;
|
|
|
|
|
_maxTimeMS: ?number;
|
|
|
|
|
canSortOnJoinTables: boolean;
|
2021-03-16 16:05:36 -05:00
|
|
|
enableSchemaHooks: boolean;
|
2023-02-27 11:55:47 +11:00
|
|
|
schemaCacheTtl: ?number;
|
2025-10-10 00:03:52 +02:00
|
|
|
disableIndexFieldValidation: boolean;
|
2017-12-30 20:44:18 -05:00
|
|
|
|
2020-12-13 11:19:04 -06:00
|
|
|
constructor({ uri = defaults.DefaultMongoURI, collectionPrefix = '', mongoOptions = {} }: any) {
|
2016-02-27 02:23:57 -08:00
|
|
|
this._uri = uri;
|
2016-04-13 05:21:53 -07:00
|
|
|
this._collectionPrefix = collectionPrefix;
|
2025-11-08 17:02:13 +01:00
|
|
|
this._onchange = () => {};
|
2016-11-11 08:03:35 -08:00
|
|
|
|
|
|
|
|
// MaxTimeMS is not a global MongoDB client option, it is applied per operation.
|
|
|
|
|
this._maxTimeMS = mongoOptions.maxTimeMS;
|
2017-11-14 14:46:51 -05:00
|
|
|
this.canSortOnJoinTables = true;
|
2021-03-16 16:05:36 -05:00
|
|
|
this.enableSchemaHooks = !!mongoOptions.enableSchemaHooks;
|
2023-02-27 11:55:47 +11:00
|
|
|
this.schemaCacheTtl = mongoOptions.schemaCacheTtl;
|
2025-10-10 00:03:52 +02:00
|
|
|
this.disableIndexFieldValidation = !!mongoOptions.disableIndexFieldValidation;
|
2025-11-08 15:48:29 +01:00
|
|
|
this._logClientEvents = mongoOptions.logClientEvents;
|
2026-01-24 22:44:38 +01:00
|
|
|
this._clientMetadata = mongoOptions.clientMetadata;
|
2025-11-08 17:02:13 +01:00
|
|
|
|
2025-11-08 18:41:45 +01:00
|
|
|
// Create a copy of mongoOptions and remove Parse Server-specific options that should not
|
|
|
|
|
// be passed to MongoDB client. Note: We only delete from this._mongoOptions, not from the
|
|
|
|
|
// original mongoOptions object, because other components (like DatabaseController) need
|
|
|
|
|
// access to these options.
|
|
|
|
|
this._mongoOptions = { ...mongoOptions };
|
|
|
|
|
for (const key of ParseServerDatabaseOptions) {
|
2023-02-27 11:55:47 +11:00
|
|
|
delete this._mongoOptions[key];
|
|
|
|
|
}
|
2016-02-27 02:23:57 -08:00
|
|
|
}
|
|
|
|
|
|
2021-03-16 16:05:36 -05:00
|
|
|
watch(callback: () => void): void {
|
|
|
|
|
this._onchange = callback;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-27 02:23:57 -08:00
|
|
|
connect() {
|
|
|
|
|
if (this.connectionPromise) {
|
|
|
|
|
return this.connectionPromise;
|
|
|
|
|
}
|
|
|
|
|
|
Add URI encoding to mongo auth parameters
The mongodb driver requires auth values be URI encoded:
https://github.com/mongodb/node-mongodb-native/commit/044063097dc4dd5e6cf3a3574c555fec7559d38b
This uses node's built-in url module to encode the auth portion, by
parsing and re-formatting it, which causes special characters to get URI
encoded properly:
https://nodejs.org/api/url.html#url_escaped_characters
This is all a bit silly since mongodb just takes our passed uri, and
runs it through the same url parser again, but not before explicitly
erroring on '@' characters in the uri.
This is similiar to #148 (reverted by #297), but with much less code,
and hopefully less breakage. Also, note that `uri_decode_auth` is no
longer needed. That was removed in the above referenced
node-mongodb-native commit.
I've tested this on usernames and passwords with @, !, +, and a space.
Presumably this would also work with usernames and passwords that are
already URI encoded (since parseUrl will simply unescape it, and
formatUrl will escape it again).
2016-03-11 16:10:44 -08:00
|
|
|
// parsing and re-formatting causes the auth value (if there) to get URI
|
|
|
|
|
// encoded
|
|
|
|
|
const encodedUri = formatUrl(parseUrl(this._uri));
|
2026-01-24 22:44:38 +01:00
|
|
|
|
|
|
|
|
// Only use driverInfo if clientMetadata option is set
|
|
|
|
|
const options = { ...this._mongoOptions };
|
|
|
|
|
if (this._clientMetadata) {
|
|
|
|
|
options.driverInfo = {
|
|
|
|
|
name: this._clientMetadata.name,
|
|
|
|
|
version: this._clientMetadata.version
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
this.connectionPromise = MongoClient.connect(encodedUri, options)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(client => {
|
2018-09-01 13:58:06 -04:00
|
|
|
// Starting mongoDB 3.0, the MongoClient.connect don't return a DB anymore but a client
|
|
|
|
|
// Fortunately, we can get back the options and use them to select the proper DB.
|
|
|
|
|
// https://github.com/mongodb/node-mongodb-native/blob/2c35d76f08574225b8db02d7bef687123e6bb018/lib/mongo_client.js#L885
|
|
|
|
|
const options = client.s.options;
|
|
|
|
|
const database = client.db(options.dbName);
|
|
|
|
|
if (!database) {
|
|
|
|
|
delete this.connectionPromise;
|
|
|
|
|
return;
|
|
|
|
|
}
|
2021-10-17 21:16:24 +02:00
|
|
|
client.on('error', () => {
|
2018-09-01 13:58:06 -04:00
|
|
|
delete this.connectionPromise;
|
|
|
|
|
});
|
2021-10-17 21:16:24 +02:00
|
|
|
client.on('close', () => {
|
2018-09-01 13:58:06 -04:00
|
|
|
delete this.connectionPromise;
|
|
|
|
|
});
|
2025-11-08 15:48:29 +01:00
|
|
|
|
|
|
|
|
// Set up client event logging if configured
|
|
|
|
|
if (this._logClientEvents && Array.isArray(this._logClientEvents)) {
|
|
|
|
|
this._logClientEvents.forEach(eventConfig => {
|
|
|
|
|
client.on(eventConfig.name, event => {
|
|
|
|
|
let logData = {};
|
|
|
|
|
if (!eventConfig.keys || eventConfig.keys.length === 0) {
|
|
|
|
|
logData = event;
|
|
|
|
|
} else {
|
|
|
|
|
eventConfig.keys.forEach(keyPath => {
|
|
|
|
|
logData[keyPath] = _.get(event, keyPath);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Validate log level exists, fallback to 'info'
|
|
|
|
|
const logLevel = typeof logger[eventConfig.logLevel] === 'function' ? eventConfig.logLevel : 'info';
|
|
|
|
|
|
|
|
|
|
// Safe JSON serialization with Map/Set and circular reference support
|
|
|
|
|
const logMessage = `MongoDB client event ${eventConfig.name}: ${JSON.stringify(logData, Utils.getCircularReplacer())}`;
|
|
|
|
|
|
|
|
|
|
logger[logLevel](logMessage);
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
this.client = client;
|
|
|
|
|
this.database = database;
|
|
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => {
|
2016-07-18 11:51:50 -04:00
|
|
|
delete this.connectionPromise;
|
2018-09-01 13:58:06 -04:00
|
|
|
return Promise.reject(err);
|
2016-07-18 11:51:50 -04:00
|
|
|
});
|
2016-06-10 20:27:21 -07:00
|
|
|
|
2016-02-27 02:23:57 -08:00
|
|
|
return this.connectionPromise;
|
|
|
|
|
}
|
2016-02-27 03:02:38 -08:00
|
|
|
|
2018-05-01 07:37:38 -04:00
|
|
|
handleError<T>(error: ?(Error | Parse.Error)): Promise<T> {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (error && error.code === 13) {
|
|
|
|
|
// Unauthorized error
|
2018-03-02 10:24:58 -05:00
|
|
|
delete this.client;
|
|
|
|
|
delete this.database;
|
|
|
|
|
delete this.connectionPromise;
|
|
|
|
|
logger.error('Received unauthorized error', { error: error });
|
|
|
|
|
}
|
2026-01-25 00:15:01 +01:00
|
|
|
|
|
|
|
|
// Transform infrastructure/transient errors into Parse.Error.INTERNAL_SERVER_ERROR
|
|
|
|
|
if (isTransientError(error)) {
|
|
|
|
|
logger.error('Database transient error', error);
|
|
|
|
|
throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Database error');
|
|
|
|
|
}
|
|
|
|
|
|
2018-03-02 10:24:58 -05:00
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
|
2023-06-08 19:04:49 +10:00
|
|
|
async handleShutdown() {
|
2017-12-30 00:23:43 -05:00
|
|
|
if (!this.client) {
|
2023-06-08 19:04:49 +10:00
|
|
|
return;
|
2017-05-08 13:06:01 -04:00
|
|
|
}
|
2023-06-08 19:04:49 +10:00
|
|
|
await this.client.close(false);
|
|
|
|
|
delete this.connectionPromise;
|
2017-05-08 13:06:01 -04:00
|
|
|
}
|
|
|
|
|
|
2016-06-12 16:35:13 -07:00
|
|
|
_adaptiveCollection(name: string) {
|
2016-03-01 20:04:15 -08:00
|
|
|
return this.connect()
|
2016-04-13 16:45:07 -07:00
|
|
|
.then(() => this.database.collection(this._collectionPrefix + name))
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(rawCollection => new MongoCollection(rawCollection))
|
|
|
|
|
.catch(err => this.handleError(err));
|
2016-03-01 20:04:15 -08:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
_schemaCollection(): Promise<MongoSchemaCollection> {
|
2016-03-09 15:20:59 -08:00
|
|
|
return this.connect()
|
2016-06-12 16:35:13 -07:00
|
|
|
.then(() => this._adaptiveCollection(MongoSchemaCollectionName))
|
2021-03-16 16:05:36 -05:00
|
|
|
.then(collection => {
|
|
|
|
|
if (!this._stream && this.enableSchemaHooks) {
|
|
|
|
|
this._stream = collection._mongoCollection.watch();
|
|
|
|
|
this._stream.on('change', () => this._onchange());
|
|
|
|
|
}
|
|
|
|
|
return new MongoSchemaCollection(collection);
|
|
|
|
|
});
|
2016-03-09 15:20:59 -08:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
classExists(name: string) {
|
2018-09-01 13:58:06 -04:00
|
|
|
return this.connect()
|
|
|
|
|
.then(() => {
|
2020-12-13 11:19:04 -06:00
|
|
|
return this.database.listCollections({ name: this._collectionPrefix + name }).toArray();
|
2018-09-01 13:58:06 -04:00
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collections => {
|
2018-09-01 13:58:06 -04:00
|
|
|
return collections.length > 0;
|
|
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-02-29 17:04:38 -08:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
setClassLevelPermissions(className: string, CLPs: any): Promise<void> {
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._schemaCollection()
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(schemaCollection =>
|
2018-09-01 13:58:06 -04:00
|
|
|
schemaCollection.updateSchema(className, {
|
|
|
|
|
$set: { '_metadata.class_permissions': CLPs },
|
|
|
|
|
})
|
|
|
|
|
)
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
setIndexesWithSchemaFormat(
|
|
|
|
|
className: string,
|
|
|
|
|
submittedIndexes: any,
|
|
|
|
|
existingIndexes: any = {},
|
|
|
|
|
fields: any
|
|
|
|
|
): Promise<void> {
|
2017-11-25 13:55:34 -06:00
|
|
|
if (submittedIndexes === undefined) {
|
|
|
|
|
return Promise.resolve();
|
|
|
|
|
}
|
|
|
|
|
if (Object.keys(existingIndexes).length === 0) {
|
2018-09-01 13:58:06 -04:00
|
|
|
existingIndexes = { _id_: { _id: 1 } };
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
const deletePromises = [];
|
|
|
|
|
const insertedIndexes = [];
|
2020-10-01 23:58:23 +02:00
|
|
|
Object.keys(submittedIndexes).forEach(name => {
|
2017-11-25 13:55:34 -06:00
|
|
|
const field = submittedIndexes[name];
|
|
|
|
|
if (existingIndexes[name] && field.__op !== 'Delete') {
|
2020-12-13 11:19:04 -06:00
|
|
|
throw new Parse.Error(Parse.Error.INVALID_QUERY, `Index ${name} exists, cannot update.`);
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
if (!existingIndexes[name] && field.__op === 'Delete') {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_QUERY,
|
|
|
|
|
`Index ${name} does not exist, cannot delete.`
|
|
|
|
|
);
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
if (field.__op === 'Delete') {
|
|
|
|
|
const promise = this.dropIndex(className, name);
|
|
|
|
|
deletePromises.push(promise);
|
|
|
|
|
delete existingIndexes[name];
|
|
|
|
|
} else {
|
2020-10-01 23:58:23 +02:00
|
|
|
Object.keys(field).forEach(key => {
|
|
|
|
|
if (
|
2025-10-10 00:03:52 +02:00
|
|
|
!this.disableIndexFieldValidation &&
|
2020-10-01 23:58:23 +02:00
|
|
|
!Object.prototype.hasOwnProperty.call(
|
|
|
|
|
fields,
|
|
|
|
|
key.indexOf('_p_') === 0 ? key.replace('_p_', '') : key
|
|
|
|
|
)
|
|
|
|
|
) {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_QUERY,
|
|
|
|
|
`Field ${key} does not exist, cannot add index.`
|
|
|
|
|
);
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
existingIndexes[name] = field;
|
|
|
|
|
insertedIndexes.push({
|
|
|
|
|
key: field,
|
|
|
|
|
name,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
let insertPromise = Promise.resolve();
|
|
|
|
|
if (insertedIndexes.length > 0) {
|
|
|
|
|
insertPromise = this.createIndexes(className, insertedIndexes);
|
|
|
|
|
}
|
|
|
|
|
return Promise.all(deletePromises)
|
|
|
|
|
.then(() => insertPromise)
|
|
|
|
|
.then(() => this._schemaCollection())
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(schemaCollection =>
|
2018-09-01 13:58:06 -04:00
|
|
|
schemaCollection.updateSchema(className, {
|
|
|
|
|
$set: { '_metadata.indexes': existingIndexes },
|
|
|
|
|
})
|
|
|
|
|
)
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
setIndexesFromMongo(className: string) {
|
2018-09-01 13:58:06 -04:00
|
|
|
return this.getIndexes(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(indexes => {
|
2018-09-01 13:58:06 -04:00
|
|
|
indexes = indexes.reduce((obj, index) => {
|
|
|
|
|
if (index.key._fts) {
|
|
|
|
|
delete index.key._fts;
|
|
|
|
|
delete index.key._ftsx;
|
|
|
|
|
for (const field in index.weights) {
|
|
|
|
|
index.key[field] = 'text';
|
|
|
|
|
}
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
obj[index.name] = index.key;
|
|
|
|
|
return obj;
|
|
|
|
|
}, {});
|
2020-10-01 23:58:23 +02:00
|
|
|
return this._schemaCollection().then(schemaCollection =>
|
2018-09-01 13:58:06 -04:00
|
|
|
schemaCollection.updateSchema(className, {
|
|
|
|
|
$set: { '_metadata.indexes': indexes },
|
|
|
|
|
})
|
|
|
|
|
);
|
|
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err))
|
2018-03-02 10:24:58 -05:00
|
|
|
.catch(() => {
|
|
|
|
|
// Ignore if collection not found
|
|
|
|
|
return Promise.resolve();
|
|
|
|
|
});
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
createClass(className: string, schema: SchemaType): Promise<void> {
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2018-09-01 13:58:06 -04:00
|
|
|
const mongoObject = mongoSchemaFromFieldsAndClassNameAndCLP(
|
|
|
|
|
schema.fields,
|
|
|
|
|
className,
|
|
|
|
|
schema.classLevelPermissions,
|
|
|
|
|
schema.indexes
|
|
|
|
|
);
|
2016-06-17 09:59:16 -07:00
|
|
|
mongoObject._id = className;
|
2020-12-13 11:19:04 -06:00
|
|
|
return this.setIndexesWithSchemaFormat(className, schema.indexes, {}, schema.fields)
|
2017-11-25 13:55:34 -06:00
|
|
|
.then(() => this._schemaCollection())
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(schemaCollection => schemaCollection.insertSchema(mongoObject))
|
|
|
|
|
.catch(err => this.handleError(err));
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2021-11-01 09:28:49 -04:00
|
|
|
async updateFieldOptions(className: string, fieldName: string, type: any) {
|
|
|
|
|
const schemaCollection = await this._schemaCollection();
|
|
|
|
|
await schemaCollection.updateFieldOptions(className, fieldName, type);
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-13 11:19:04 -06:00
|
|
|
addFieldIfNotExists(className: string, fieldName: string, type: any): Promise<void> {
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._schemaCollection()
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(schemaCollection => schemaCollection.addFieldIfNotExists(className, fieldName, type))
|
2018-03-02 10:24:58 -05:00
|
|
|
.then(() => this.createIndexesIfNeeded(className, fieldName, type))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.)
|
|
|
|
|
// and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible.
|
2017-12-30 20:44:18 -05:00
|
|
|
deleteClass(className: string) {
|
2018-09-01 13:58:06 -04:00
|
|
|
return (
|
|
|
|
|
this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection => collection.drop())
|
|
|
|
|
.catch(error => {
|
2018-09-01 13:58:06 -04:00
|
|
|
// 'ns not found' means collection was already gone. Ignore deletion attempt.
|
|
|
|
|
if (error.message == 'ns not found') {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
throw error;
|
|
|
|
|
})
|
|
|
|
|
// We've dropped the collection, now remove the _SCHEMA document
|
|
|
|
|
.then(() => this._schemaCollection())
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(schemaCollection => schemaCollection.findAndDeleteSchema(className))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err))
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2016-02-29 17:04:38 -08:00
|
|
|
}
|
2016-04-13 16:45:07 -07:00
|
|
|
|
2018-07-03 11:13:08 -04:00
|
|
|
deleteAllClasses(fast: boolean) {
|
2020-10-01 23:58:23 +02:00
|
|
|
return storageAdapterAllCollections(this).then(collections =>
|
2018-09-01 13:58:06 -04:00
|
|
|
Promise.all(
|
2020-12-13 11:19:04 -06:00
|
|
|
collections.map(collection => (fast ? collection.deleteMany({}) : collection.drop()))
|
2018-09-01 13:58:06 -04:00
|
|
|
)
|
|
|
|
|
);
|
2016-02-27 03:02:38 -08:00
|
|
|
}
|
2016-04-12 19:06:58 -07:00
|
|
|
|
|
|
|
|
// Remove the column and all the data. For Relations, the _Join collection is handled
|
|
|
|
|
// specially, this function does not delete _Join columns. It should, however, indicate
|
|
|
|
|
// that the relation fields does not exist anymore. In mongo, this means removing it from
|
|
|
|
|
// the _SCHEMA collection. There should be no actual data in the collection under the same name
|
|
|
|
|
// as the relation column, so it's fine to attempt to delete it. If the fields listed to be
|
|
|
|
|
// deleted do not exist, this function should return successfully anyways. Checking for
|
|
|
|
|
// attempts to delete non-existent fields is the responsibility of Parse Server.
|
|
|
|
|
|
|
|
|
|
// Pointer field names are passed for legacy reasons: the original mongo
|
|
|
|
|
// format stored pointer field names differently in the database, and therefore
|
|
|
|
|
// needed to know the type of the field before it could delete it. Future database
|
2017-08-29 11:20:51 -07:00
|
|
|
// adapters should ignore the pointerFieldNames argument. All the field names are in
|
2016-04-12 19:06:58 -07:00
|
|
|
// fieldNames, they show up additionally in the pointerFieldNames database for use
|
|
|
|
|
// by the mongo adapter, which deals with the legacy mongo format.
|
|
|
|
|
|
|
|
|
|
// This function is not obligated to delete fields atomically. It is given the field
|
|
|
|
|
// names in a list so that databases that are capable of deleting fields atomically
|
|
|
|
|
// may do so.
|
|
|
|
|
|
|
|
|
|
// Returns a Promise.
|
2017-12-30 20:44:18 -05:00
|
|
|
deleteFields(className: string, schema: SchemaType, fieldNames: string[]) {
|
2020-10-01 23:58:23 +02:00
|
|
|
const mongoFormatNames = fieldNames.map(fieldName => {
|
2016-06-12 16:35:13 -07:00
|
|
|
if (schema.fields[fieldName].type === 'Pointer') {
|
2018-09-01 13:58:06 -04:00
|
|
|
return `_p_${fieldName}`;
|
2016-06-12 16:35:13 -07:00
|
|
|
} else {
|
|
|
|
|
return fieldName;
|
|
|
|
|
}
|
|
|
|
|
});
|
2018-09-01 13:58:06 -04:00
|
|
|
const collectionUpdate = { $unset: {} };
|
2020-10-01 23:58:23 +02:00
|
|
|
mongoFormatNames.forEach(name => {
|
2016-04-12 19:06:58 -07:00
|
|
|
collectionUpdate['$unset'][name] = null;
|
|
|
|
|
});
|
|
|
|
|
|
2020-07-27 02:22:04 +02:00
|
|
|
const collectionFilter = { $or: [] };
|
|
|
|
|
mongoFormatNames.forEach(name => {
|
|
|
|
|
collectionFilter['$or'].push({ [name]: { $exists: true } });
|
|
|
|
|
});
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const schemaUpdate = { $unset: {} };
|
2020-10-01 23:58:23 +02:00
|
|
|
fieldNames.forEach(name => {
|
2016-04-12 19:06:58 -07:00
|
|
|
schemaUpdate['$unset'][name] = null;
|
2019-07-25 21:13:59 -07:00
|
|
|
schemaUpdate['$unset'][`_metadata.fields_options.${name}`] = null;
|
2016-04-12 19:06:58 -07:00
|
|
|
});
|
|
|
|
|
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._adaptiveCollection(className)
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(collection => collection.updateMany(collectionFilter, collectionUpdate))
|
2017-06-20 09:15:26 -07:00
|
|
|
.then(() => this._schemaCollection())
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(schemaCollection => schemaCollection.updateSchema(className, schemaUpdate))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-04-12 19:06:58 -07:00
|
|
|
}
|
2016-04-14 19:24:56 -04:00
|
|
|
|
2016-04-18 17:06:00 -07:00
|
|
|
// Return a promise for all schemas known to this adapter, in Parse format. In case the
|
|
|
|
|
// schemas cannot be retrieved, returns a promise that rejects. Requirements for the
|
|
|
|
|
// rejection reason are TBD.
|
2017-12-30 20:44:18 -05:00
|
|
|
getAllClasses(): Promise<StorageClass[]> {
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._schemaCollection()
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(schemasCollection => schemasCollection._fetchAllSchemasFrom_SCHEMA())
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-04-18 17:06:00 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Return a promise for the schema with the given name, in Parse format. If
|
|
|
|
|
// this adapter doesn't know about the schema, return a promise that rejects with
|
|
|
|
|
// undefined as the reason.
|
2017-12-30 20:44:18 -05:00
|
|
|
getClass(className: string): Promise<StorageClass> {
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._schemaCollection()
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(schemasCollection => schemasCollection._fetchOneSchemaFrom_SCHEMA(className))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-04-18 17:06:00 -07:00
|
|
|
}
|
|
|
|
|
|
2016-05-24 17:21:20 -07:00
|
|
|
// TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema,
|
2016-04-20 13:35:48 -07:00
|
|
|
// and should infer from the type. Or maybe does need the schema for validations. Or maybe needs
|
2017-08-29 11:20:51 -07:00
|
|
|
// the schema only for the legacy mongo format. We'll figure that out later.
|
2020-12-13 11:19:04 -06:00
|
|
|
createObject(className: string, schema: SchemaType, object: any, transactionalSession: ?any) {
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2020-12-13 11:19:04 -06:00
|
|
|
const mongoObject = parseObjectToMongoObjectForCreate(className, object, schema);
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._adaptiveCollection(className)
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(collection => collection.insertOne(mongoObject, transactionalSession))
|
2022-02-06 18:30:36 +01:00
|
|
|
.then(() => ({ ops: [mongoObject] }))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(error => {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (error.code === 11000) {
|
2025-12-14 21:09:17 +05:30
|
|
|
logger.error('Duplicate key error:', error.message);
|
2018-09-01 13:58:06 -04:00
|
|
|
const err = new Parse.Error(
|
|
|
|
|
Parse.Error.DUPLICATE_VALUE,
|
|
|
|
|
'A duplicate value for a field with unique values was provided'
|
|
|
|
|
);
|
2017-09-05 17:51:11 -04:00
|
|
|
err.underlyingError = error;
|
|
|
|
|
if (error.message) {
|
2020-12-13 11:19:04 -06:00
|
|
|
const matches = error.message.match(/index:[\sa-zA-Z0-9_\-\.]+\$?([a-zA-Z_-]+)_1/);
|
2017-09-05 17:51:11 -04:00
|
|
|
if (matches && Array.isArray(matches)) {
|
|
|
|
|
err.userInfo = { duplicated_field: matches[1] };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
throw err;
|
2017-06-20 09:15:26 -07:00
|
|
|
}
|
|
|
|
|
throw error;
|
2018-03-02 10:24:58 -05:00
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-04-18 09:45:48 -07:00
|
|
|
}
|
|
|
|
|
|
2016-05-24 17:21:20 -07:00
|
|
|
// Remove all objects that match the given Parse Query.
|
2016-04-22 14:05:21 -07:00
|
|
|
// If no objects match, reject with OBJECT_NOT_FOUND. If objects are found and deleted, resolve with undefined.
|
|
|
|
|
// If there is some other error, reject with INTERNAL_SERVER_ERROR.
|
2018-09-01 13:58:06 -04:00
|
|
|
deleteObjectsByQuery(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
2019-07-31 02:41:07 -07:00
|
|
|
query: QueryType,
|
|
|
|
|
transactionalSession: ?any
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection => {
|
2017-06-20 09:15:26 -07:00
|
|
|
const mongoWhere = transformWhere(className, query, schema);
|
2019-07-31 02:41:07 -07:00
|
|
|
return collection.deleteMany(mongoWhere, transactionalSession);
|
2017-06-20 09:15:26 -07:00
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err))
|
2018-09-01 13:58:06 -04:00
|
|
|
.then(
|
2022-02-06 18:30:36 +01:00
|
|
|
({ deletedCount }) => {
|
|
|
|
|
if (deletedCount === 0) {
|
2020-12-13 11:19:04 -06:00
|
|
|
throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.');
|
2018-09-01 13:58:06 -04:00
|
|
|
}
|
|
|
|
|
return Promise.resolve();
|
|
|
|
|
},
|
|
|
|
|
() => {
|
2020-12-13 11:19:04 -06:00
|
|
|
throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, 'Database adapter error');
|
2017-06-20 09:15:26 -07:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2016-04-22 14:05:21 -07:00
|
|
|
}
|
|
|
|
|
|
2016-05-24 17:21:20 -07:00
|
|
|
// Apply the update to all objects that match the given Parse Query.
|
2018-09-01 13:58:06 -04:00
|
|
|
updateObjectsByQuery(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
2019-07-31 02:41:07 -07:00
|
|
|
update: any,
|
|
|
|
|
transactionalSession: ?any
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2016-05-24 17:21:20 -07:00
|
|
|
const mongoUpdate = transformUpdate(className, update, schema);
|
|
|
|
|
const mongoWhere = transformWhere(className, query, schema);
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._adaptiveCollection(className)
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(collection => collection.updateMany(mongoWhere, mongoUpdate, transactionalSession))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-05-24 17:21:20 -07:00
|
|
|
}
|
|
|
|
|
|
2016-06-12 13:39:41 -07:00
|
|
|
// Atomically finds and updates an object based on query.
|
2016-06-11 00:43:02 -07:00
|
|
|
// Return value not currently well specified.
|
2018-09-01 13:58:06 -04:00
|
|
|
findOneAndUpdate(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
2019-07-31 02:41:07 -07:00
|
|
|
update: any,
|
|
|
|
|
transactionalSession: ?any
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2016-05-24 17:21:20 -07:00
|
|
|
const mongoUpdate = transformUpdate(className, update, schema);
|
|
|
|
|
const mongoWhere = transformWhere(className, query, schema);
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection =>
|
2018-09-04 16:15:09 -04:00
|
|
|
collection._mongoCollection.findOneAndUpdate(mongoWhere, mongoUpdate, {
|
2021-10-17 21:16:24 +02:00
|
|
|
returnDocument: 'after',
|
2019-07-31 02:41:07 -07:00
|
|
|
session: transactionalSession || undefined,
|
2018-09-01 13:58:06 -04:00
|
|
|
})
|
|
|
|
|
)
|
2024-10-23 21:27:42 +02:00
|
|
|
.then(result => mongoObjectToParseObject(className, result, schema))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(error => {
|
2018-02-09 07:39:35 -06:00
|
|
|
if (error.code === 11000) {
|
2025-12-14 21:09:17 +05:30
|
|
|
logger.error('Duplicate key error:', error.message);
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.DUPLICATE_VALUE,
|
|
|
|
|
'A duplicate value for a field with unique values was provided'
|
|
|
|
|
);
|
2018-02-09 07:39:35 -06:00
|
|
|
}
|
|
|
|
|
throw error;
|
2018-03-02 10:24:58 -05:00
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-05-24 17:21:20 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Hopefully we can get rid of this. It's only used for config and hooks.
|
2018-09-01 13:58:06 -04:00
|
|
|
upsertOneObject(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
2019-07-31 02:41:07 -07:00
|
|
|
update: any,
|
|
|
|
|
transactionalSession: ?any
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2016-05-24 17:21:20 -07:00
|
|
|
const mongoUpdate = transformUpdate(className, update, schema);
|
|
|
|
|
const mongoWhere = transformWhere(className, query, schema);
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._adaptiveCollection(className)
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(collection => collection.upsertOne(mongoWhere, mongoUpdate, transactionalSession))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-05-24 17:21:20 -07:00
|
|
|
}
|
|
|
|
|
|
2016-05-23 16:31:51 -07:00
|
|
|
// Executes a find. Accepts: className, query in Parse format, and { skip, limit, sort }.
|
2018-09-01 13:58:06 -04:00
|
|
|
find(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
2024-03-03 02:27:57 +01:00
|
|
|
{
|
|
|
|
|
skip,
|
|
|
|
|
limit,
|
|
|
|
|
sort,
|
|
|
|
|
keys,
|
|
|
|
|
readPreference,
|
|
|
|
|
hint,
|
|
|
|
|
caseInsensitive,
|
|
|
|
|
explain,
|
|
|
|
|
comment,
|
|
|
|
|
}: QueryOptions
|
2018-09-01 13:58:06 -04:00
|
|
|
): Promise<any> {
|
2021-09-02 03:46:48 -07:00
|
|
|
validateExplainValue(explain);
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2016-12-07 15:17:05 -08:00
|
|
|
const mongoWhere = transformWhere(className, query, schema);
|
2018-09-01 13:58:06 -04:00
|
|
|
const mongoSort = _.mapKeys(sort, (value, fieldName) =>
|
|
|
|
|
transformKey(className, fieldName, schema)
|
|
|
|
|
);
|
|
|
|
|
const mongoKeys = _.reduce(
|
|
|
|
|
keys,
|
|
|
|
|
(memo, key) => {
|
|
|
|
|
if (key === 'ACL') {
|
|
|
|
|
memo['_rperm'] = 1;
|
|
|
|
|
memo['_wperm'] = 1;
|
|
|
|
|
} else {
|
|
|
|
|
memo[transformKey(className, key, schema)] = 1;
|
|
|
|
|
}
|
|
|
|
|
return memo;
|
|
|
|
|
},
|
|
|
|
|
{}
|
|
|
|
|
);
|
2017-09-02 11:31:19 -05:00
|
|
|
|
perf: Allow covering relation queries with minimal index (#6581)
* Apply linter changes on files I'm about to update
My actual changes were quite difficult to find when buried in this sea
of style changes, which were getting automatically applied during a
pre-commit hook. Here I just run the hooks against the files I'm going
to be touching in the following commit, so that a reviewer can ignore
these automatically generated diffs and just view the meaningful commit.
* perf: Allow covering relation queries with minimal index
When finding objects through a relation, we're sending Mongo queries
that look like this:
```
db.getCollection('_Join:foo:bar').find({ relatedId: { $in: [...] } });
```
From the result of that query, we're only reading the `owningId` field,
so we can start by adding it as a projection:
```
db.getCollection('_Join:foo:bar')
.find({ relatedId: { $in: [...] } })
.project({ owningId: 1 });
```
This seems like the perfect example of a query that could be satisfied
with an index scan: we are querying on one field, and only need one
field from the matching document.
For example, this can allow users to speed up the fetching of user roles
in authentication, because they query a `roles` relation on the `_Role`
collection. To add a covering index on that, you could now add an index
like the following:
```
db.getCollection('_Join:roles:_Role').createIndex(
{ relatedId: 1, owningId: 1 },
{ background: true }
);
```
One caveat there is that the index I propose above doesn't include the
`_id` column. For the query in question, we don't actually care about
the ID of the row in the join table, just the `owningId` field, so we
can avoid some overhead of putting the `_id` column into the index if we
can also drop it from the projection. This requires adding a small
special case to the MongoStorageAdapter, because the `_id` field is
special: you have to opt-out of using it by projecting `{ _id: 0 }`.
2020-04-08 11:43:45 -07:00
|
|
|
// If we aren't requesting the `_id` field, we need to explicitly opt out
|
|
|
|
|
// of it. Doing so in parse-server is unusual, but it can allow us to
|
|
|
|
|
// optimize some queries with covering indexes.
|
|
|
|
|
if (keys && !mongoKeys._id) {
|
|
|
|
|
mongoKeys._id = 0;
|
|
|
|
|
}
|
|
|
|
|
|
2017-06-21 17:18:10 -03:00
|
|
|
readPreference = this._parseReadPreference(readPreference);
|
2017-11-25 13:55:34 -06:00
|
|
|
return this.createTextIndexesIfNeeded(className, query, schema)
|
2017-09-02 11:31:19 -05:00
|
|
|
.then(() => this._adaptiveCollection(className))
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection =>
|
2018-09-01 13:58:06 -04:00
|
|
|
collection.find(mongoWhere, {
|
|
|
|
|
skip,
|
|
|
|
|
limit,
|
|
|
|
|
sort: mongoSort,
|
|
|
|
|
keys: mongoKeys,
|
|
|
|
|
maxTimeMS: this._maxTimeMS,
|
|
|
|
|
readPreference,
|
2020-01-14 01:14:43 -07:00
|
|
|
hint,
|
2020-02-14 09:44:51 -08:00
|
|
|
caseInsensitive,
|
2020-01-14 01:14:43 -07:00
|
|
|
explain,
|
2024-03-03 02:27:57 +01:00
|
|
|
comment,
|
2018-09-01 13:58:06 -04:00
|
|
|
})
|
|
|
|
|
)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(objects => {
|
2020-01-14 01:14:43 -07:00
|
|
|
if (explain) {
|
|
|
|
|
return objects;
|
|
|
|
|
}
|
2020-12-13 11:19:04 -06:00
|
|
|
return objects.map(object => mongoObjectToParseObject(className, object, schema));
|
2020-01-14 01:14:43 -07:00
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-05-23 16:31:51 -07:00
|
|
|
}
|
|
|
|
|
|
2020-02-14 09:44:51 -08:00
|
|
|
ensureIndex(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
fieldNames: string[],
|
|
|
|
|
indexName: ?string,
|
2020-03-29 22:15:40 +02:00
|
|
|
caseInsensitive: boolean = false,
|
2020-10-01 23:58:23 +02:00
|
|
|
options?: Object = {}
|
2020-02-14 09:44:51 -08:00
|
|
|
): Promise<any> {
|
|
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
|
|
|
|
const indexCreationRequest = {};
|
2020-12-13 11:19:04 -06:00
|
|
|
const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema));
|
2020-10-01 23:58:23 +02:00
|
|
|
mongoFieldNames.forEach(fieldName => {
|
2020-12-13 11:19:04 -06:00
|
|
|
indexCreationRequest[fieldName] = options.indexType !== undefined ? options.indexType : 1;
|
2020-02-14 09:44:51 -08:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const defaultOptions: Object = { background: true, sparse: true };
|
|
|
|
|
const indexNameOptions: Object = indexName ? { name: indexName } : {};
|
2020-12-13 11:19:04 -06:00
|
|
|
const ttlOptions: Object = options.ttl !== undefined ? { expireAfterSeconds: options.ttl } : {};
|
2025-11-01 13:52:23 +01:00
|
|
|
const sparseOptions: Object = options.sparse !== undefined ? { sparse: options.sparse } : {};
|
2020-02-14 09:44:51 -08:00
|
|
|
const caseInsensitiveOptions: Object = caseInsensitive
|
|
|
|
|
? { collation: MongoCollection.caseInsensitiveCollation() }
|
|
|
|
|
: {};
|
|
|
|
|
const indexOptions: Object = {
|
|
|
|
|
...defaultOptions,
|
|
|
|
|
...caseInsensitiveOptions,
|
|
|
|
|
...indexNameOptions,
|
2020-07-15 20:10:33 +02:00
|
|
|
...ttlOptions,
|
2025-11-01 13:52:23 +01:00
|
|
|
...sparseOptions,
|
2020-02-14 09:44:51 -08:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
return this._adaptiveCollection(className)
|
2023-12-10 02:42:40 +01:00
|
|
|
.then(collection =>
|
|
|
|
|
collection._mongoCollection.createIndex(indexCreationRequest, indexOptions)
|
2020-02-14 09:44:51 -08:00
|
|
|
)
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2020-02-14 09:44:51 -08:00
|
|
|
}
|
|
|
|
|
|
2016-06-10 20:27:21 -07:00
|
|
|
// Create a unique index. Unique indexes on nullable fields are not allowed. Since we don't
|
|
|
|
|
// currently know which fields are nullable and which aren't, we ignore that criteria.
|
|
|
|
|
// As such, we shouldn't expose this function to users of parse until we have an out-of-band
|
|
|
|
|
// Way of determining if a field is nullable. Undefined doesn't count against uniqueness,
|
|
|
|
|
// which is why we use sparse indexes.
|
2020-12-13 11:19:04 -06:00
|
|
|
ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) {
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2016-12-07 15:17:05 -08:00
|
|
|
const indexCreationRequest = {};
|
2020-12-13 11:19:04 -06:00
|
|
|
const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema));
|
2020-10-01 23:58:23 +02:00
|
|
|
mongoFieldNames.forEach(fieldName => {
|
2016-06-10 20:27:21 -07:00
|
|
|
indexCreationRequest[fieldName] = 1;
|
|
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._adaptiveCollection(className)
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(collection => collection._ensureSparseUniqueIndexInBackground(indexCreationRequest))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(error => {
|
2017-06-20 09:15:26 -07:00
|
|
|
if (error.code === 11000) {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.DUPLICATE_VALUE,
|
|
|
|
|
'Tried to ensure field uniqueness for a class that already has duplicates.'
|
|
|
|
|
);
|
2017-06-20 09:15:26 -07:00
|
|
|
}
|
2017-09-05 17:51:11 -04:00
|
|
|
throw error;
|
2018-03-02 10:24:58 -05:00
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-06-10 20:27:21 -07:00
|
|
|
}
|
|
|
|
|
|
2016-05-28 09:25:09 -07:00
|
|
|
// Used in tests
|
2017-12-30 20:44:18 -05:00
|
|
|
_rawFind(className: string, query: QueryType) {
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection =>
|
2018-09-01 13:58:06 -04:00
|
|
|
collection.find(query, {
|
|
|
|
|
maxTimeMS: this._maxTimeMS,
|
|
|
|
|
})
|
|
|
|
|
)
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-05-28 09:25:09 -07:00
|
|
|
}
|
|
|
|
|
|
2016-11-11 08:03:35 -08:00
|
|
|
// Executes a count.
|
2018-09-01 13:58:06 -04:00
|
|
|
count(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
2020-01-14 01:14:43 -07:00
|
|
|
readPreference: ?string,
|
2024-03-03 02:27:57 +01:00
|
|
|
_estimate: ?boolean,
|
|
|
|
|
hint: ?mixed,
|
|
|
|
|
comment: ?string
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
2016-06-13 12:57:20 -07:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2017-06-21 17:18:10 -03:00
|
|
|
readPreference = this._parseReadPreference(readPreference);
|
2016-06-12 16:35:13 -07:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection =>
|
2019-04-25 03:28:13 +02:00
|
|
|
collection.count(transformWhere(className, query, schema, true), {
|
2018-09-01 13:58:06 -04:00
|
|
|
maxTimeMS: this._maxTimeMS,
|
|
|
|
|
readPreference,
|
2020-01-14 01:14:43 -07:00
|
|
|
hint,
|
2024-03-03 02:27:57 +01:00
|
|
|
comment,
|
2018-09-01 13:58:06 -04:00
|
|
|
})
|
|
|
|
|
)
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2016-04-14 19:24:56 -04:00
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
|
2020-12-13 11:19:04 -06:00
|
|
|
distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string) {
|
2017-11-12 13:00:22 -06:00
|
|
|
schema = convertParseSchemaToMongoSchema(schema);
|
2020-12-13 11:19:04 -06:00
|
|
|
const isPointerField = schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer';
|
2018-10-26 10:21:42 -05:00
|
|
|
const transformField = transformKey(className, fieldName, schema);
|
|
|
|
|
|
2017-11-12 13:00:22 -06:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection =>
|
2020-12-13 11:19:04 -06:00
|
|
|
collection.distinct(transformField, transformWhere(className, query, schema))
|
2018-09-01 13:58:06 -04:00
|
|
|
)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(objects => {
|
|
|
|
|
objects = objects.filter(obj => obj != null);
|
|
|
|
|
return objects.map(object => {
|
2018-02-16 09:44:42 -06:00
|
|
|
if (isPointerField) {
|
2018-10-26 10:21:42 -05:00
|
|
|
return transformPointerString(schema, fieldName, object);
|
2018-02-16 09:44:42 -06:00
|
|
|
}
|
|
|
|
|
return mongoObjectToParseObject(className, object, schema);
|
|
|
|
|
});
|
2018-03-02 10:24:58 -05:00
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
aggregate(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: any,
|
|
|
|
|
pipeline: any,
|
2020-01-14 01:14:43 -07:00
|
|
|
readPreference: ?string,
|
|
|
|
|
hint: ?mixed,
|
2024-03-03 02:27:57 +01:00
|
|
|
explain?: boolean,
|
|
|
|
|
comment: ?string
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
2021-09-02 03:46:48 -07:00
|
|
|
validateExplainValue(explain);
|
2018-01-08 20:42:08 -06:00
|
|
|
let isPointerField = false;
|
2020-10-01 23:58:23 +02:00
|
|
|
pipeline = pipeline.map(stage => {
|
2018-06-26 18:18:04 -04:00
|
|
|
if (stage.$group) {
|
|
|
|
|
stage.$group = this._parseAggregateGroupArgs(schema, stage.$group);
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
stage.$group._id &&
|
|
|
|
|
typeof stage.$group._id === 'string' &&
|
|
|
|
|
stage.$group._id.indexOf('$_p_') >= 0
|
|
|
|
|
) {
|
2018-01-08 20:42:08 -06:00
|
|
|
isPointerField = true;
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-01-20 08:00:36 -06:00
|
|
|
if (stage.$match) {
|
2018-06-26 18:18:04 -04:00
|
|
|
stage.$match = this._parseAggregateArgs(schema, stage.$match);
|
|
|
|
|
}
|
|
|
|
|
if (stage.$project) {
|
2020-12-13 11:19:04 -06:00
|
|
|
stage.$project = this._parseAggregateProjectArgs(schema, stage.$project);
|
2018-01-20 08:00:36 -06:00
|
|
|
}
|
2020-09-08 22:16:03 +02:00
|
|
|
if (stage.$geoNear && stage.$geoNear.query) {
|
2020-12-13 11:19:04 -06:00
|
|
|
stage.$geoNear.query = this._parseAggregateArgs(schema, stage.$geoNear.query);
|
2020-03-29 22:15:40 +02:00
|
|
|
}
|
2018-01-08 20:42:08 -06:00
|
|
|
return stage;
|
|
|
|
|
});
|
2017-11-12 13:00:22 -06:00
|
|
|
readPreference = this._parseReadPreference(readPreference);
|
|
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection =>
|
2018-09-01 13:58:06 -04:00
|
|
|
collection.aggregate(pipeline, {
|
|
|
|
|
readPreference,
|
|
|
|
|
maxTimeMS: this._maxTimeMS,
|
2020-01-14 01:14:43 -07:00
|
|
|
hint,
|
|
|
|
|
explain,
|
2024-03-03 02:27:57 +01:00
|
|
|
comment,
|
2018-09-01 13:58:06 -04:00
|
|
|
})
|
|
|
|
|
)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(results => {
|
|
|
|
|
results.forEach(result => {
|
2019-08-14 16:57:00 -05:00
|
|
|
if (Object.prototype.hasOwnProperty.call(result, '_id')) {
|
2018-01-08 20:42:08 -06:00
|
|
|
if (isPointerField && result._id) {
|
|
|
|
|
result._id = result._id.split('$')[1];
|
|
|
|
|
}
|
2019-09-01 22:34:36 -07:00
|
|
|
if (
|
|
|
|
|
result._id == null ||
|
|
|
|
|
result._id == undefined ||
|
2020-12-13 11:19:04 -06:00
|
|
|
(['object', 'string'].includes(typeof result._id) && _.isEmpty(result._id))
|
2019-09-01 22:34:36 -07:00
|
|
|
) {
|
2018-02-16 12:41:02 -06:00
|
|
|
result._id = null;
|
|
|
|
|
}
|
2017-11-12 13:00:22 -06:00
|
|
|
result.objectId = result._id;
|
|
|
|
|
delete result._id;
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
return results;
|
2017-11-22 23:07:45 -08:00
|
|
|
})
|
2020-12-13 11:19:04 -06:00
|
|
|
.then(objects => objects.map(object => mongoObjectToParseObject(className, object, schema)))
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
|
2018-06-26 18:18:04 -04:00
|
|
|
// This function will recursively traverse the pipeline and convert any Pointer or Date columns.
|
|
|
|
|
// If we detect a pointer column we will rename the column being queried for to match the column
|
|
|
|
|
// in the database. We also modify the value to what we expect the value to be in the database
|
|
|
|
|
// as well.
|
|
|
|
|
// For dates, the driver expects a Date object, but we have a string coming in. So we'll convert
|
|
|
|
|
// the string to a Date so the driver can perform the necessary comparison.
|
|
|
|
|
//
|
|
|
|
|
// The goal of this method is to look for the "leaves" of the pipeline and determine if it needs
|
|
|
|
|
// to be converted. The pipeline can have a few different forms. For more details, see:
|
|
|
|
|
// https://docs.mongodb.com/manual/reference/operator/aggregation/
|
|
|
|
|
//
|
|
|
|
|
// If the pipeline is an array, it means we are probably parsing an '$and' or '$or' operator. In
|
|
|
|
|
// that case we need to loop through all of it's children to find the columns being operated on.
|
|
|
|
|
// If the pipeline is an object, then we'll loop through the keys checking to see if the key name
|
|
|
|
|
// matches one of the schema columns. If it does match a column and the column is a Pointer or
|
|
|
|
|
// a Date, then we'll convert the value as described above.
|
|
|
|
|
//
|
|
|
|
|
// As much as I hate recursion...this seemed like a good fit for it. We're essentially traversing
|
|
|
|
|
// down a tree to find a "leaf node" and checking to see if it needs to be converted.
|
|
|
|
|
_parseAggregateArgs(schema: any, pipeline: any): any {
|
2019-09-11 07:13:15 -07:00
|
|
|
if (pipeline === null) {
|
|
|
|
|
return null;
|
|
|
|
|
} else if (Array.isArray(pipeline)) {
|
2020-10-01 23:58:23 +02:00
|
|
|
return pipeline.map(value => this._parseAggregateArgs(schema, value));
|
2018-06-26 18:18:04 -04:00
|
|
|
} else if (typeof pipeline === 'object') {
|
|
|
|
|
const returnValue = {};
|
|
|
|
|
for (const field in pipeline) {
|
|
|
|
|
if (schema.fields[field] && schema.fields[field].type === 'Pointer') {
|
|
|
|
|
if (typeof pipeline[field] === 'object') {
|
|
|
|
|
// Pass objects down to MongoDB...this is more than likely an $exists operator.
|
|
|
|
|
returnValue[`_p_${field}`] = pipeline[field];
|
|
|
|
|
} else {
|
2020-12-13 11:19:04 -06:00
|
|
|
returnValue[`_p_${field}`] = `${schema.fields[field].targetClass}$${pipeline[field]}`;
|
2018-06-26 18:18:04 -04:00
|
|
|
}
|
2020-12-13 11:19:04 -06:00
|
|
|
} else if (schema.fields[field] && schema.fields[field].type === 'Date') {
|
2018-06-26 18:18:04 -04:00
|
|
|
returnValue[field] = this._convertToDate(pipeline[field]);
|
|
|
|
|
} else {
|
2020-12-13 11:19:04 -06:00
|
|
|
returnValue[field] = this._parseAggregateArgs(schema, pipeline[field]);
|
2018-06-26 18:18:04 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (field === 'objectId') {
|
|
|
|
|
returnValue['_id'] = returnValue[field];
|
|
|
|
|
delete returnValue[field];
|
|
|
|
|
} else if (field === 'createdAt') {
|
|
|
|
|
returnValue['_created_at'] = returnValue[field];
|
|
|
|
|
delete returnValue[field];
|
|
|
|
|
} else if (field === 'updatedAt') {
|
|
|
|
|
returnValue['_updated_at'] = returnValue[field];
|
|
|
|
|
delete returnValue[field];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return returnValue;
|
|
|
|
|
}
|
|
|
|
|
return pipeline;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// This function is slightly different than the one above. Rather than trying to combine these
|
|
|
|
|
// two functions and making the code even harder to understand, I decided to split it up. The
|
|
|
|
|
// difference with this function is we are not transforming the values, only the keys of the
|
|
|
|
|
// pipeline.
|
|
|
|
|
_parseAggregateProjectArgs(schema: any, pipeline: any): any {
|
|
|
|
|
const returnValue = {};
|
|
|
|
|
for (const field in pipeline) {
|
|
|
|
|
if (schema.fields[field] && schema.fields[field].type === 'Pointer') {
|
|
|
|
|
returnValue[`_p_${field}`] = pipeline[field];
|
|
|
|
|
} else {
|
|
|
|
|
returnValue[field] = this._parseAggregateArgs(schema, pipeline[field]);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (field === 'objectId') {
|
|
|
|
|
returnValue['_id'] = returnValue[field];
|
|
|
|
|
delete returnValue[field];
|
|
|
|
|
} else if (field === 'createdAt') {
|
|
|
|
|
returnValue['_created_at'] = returnValue[field];
|
|
|
|
|
delete returnValue[field];
|
|
|
|
|
} else if (field === 'updatedAt') {
|
|
|
|
|
returnValue['_updated_at'] = returnValue[field];
|
|
|
|
|
delete returnValue[field];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return returnValue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// This function is slightly different than the two above. MongoDB $group aggregate looks like:
|
|
|
|
|
// { $group: { _id: <expression>, <field1>: { <accumulator1> : <expression1> }, ... } }
|
|
|
|
|
// The <expression> could be a column name, prefixed with the '$' character. We'll look for
|
|
|
|
|
// these <expression> and check to see if it is a 'Pointer' or if it's one of createdAt,
|
|
|
|
|
// updatedAt or objectId and change it accordingly.
|
|
|
|
|
_parseAggregateGroupArgs(schema: any, pipeline: any): any {
|
|
|
|
|
if (Array.isArray(pipeline)) {
|
2020-12-13 11:19:04 -06:00
|
|
|
return pipeline.map(value => this._parseAggregateGroupArgs(schema, value));
|
2018-06-26 18:18:04 -04:00
|
|
|
} else if (typeof pipeline === 'object') {
|
|
|
|
|
const returnValue = {};
|
|
|
|
|
for (const field in pipeline) {
|
2020-12-13 11:19:04 -06:00
|
|
|
returnValue[field] = this._parseAggregateGroupArgs(schema, pipeline[field]);
|
2018-06-26 18:18:04 -04:00
|
|
|
}
|
|
|
|
|
return returnValue;
|
|
|
|
|
} else if (typeof pipeline === 'string') {
|
|
|
|
|
const field = pipeline.substring(1);
|
|
|
|
|
if (schema.fields[field] && schema.fields[field].type === 'Pointer') {
|
|
|
|
|
return `$_p_${field}`;
|
|
|
|
|
} else if (field == 'createdAt') {
|
|
|
|
|
return '$_created_at';
|
|
|
|
|
} else if (field == 'updatedAt') {
|
|
|
|
|
return '$_updated_at';
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return pipeline;
|
|
|
|
|
}
|
|
|
|
|
|
2025-07-13 02:44:08 +02:00
|
|
|
/**
|
|
|
|
|
* Recursively converts values to Date objects. Since the passed object is part of an aggregation
|
|
|
|
|
* pipeline and can contain various logic operators (like $gt, $lt, etc), this function will
|
|
|
|
|
* traverse the object and convert any strings that can be parsed as dates into Date objects.
|
|
|
|
|
* @param {any} value The value to convert.
|
|
|
|
|
* @returns {any} The original value if not convertible to Date, or a Date object if it is.
|
|
|
|
|
*/
|
2018-06-26 18:18:04 -04:00
|
|
|
_convertToDate(value: any): any {
|
2022-09-18 00:19:28 +10:00
|
|
|
if (value instanceof Date) {
|
|
|
|
|
return value;
|
|
|
|
|
}
|
2018-06-26 18:18:04 -04:00
|
|
|
if (typeof value === 'string') {
|
2025-07-13 02:44:08 +02:00
|
|
|
return isNaN(Date.parse(value)) ? value : new Date(value);
|
2018-06-26 18:18:04 -04:00
|
|
|
}
|
2025-07-13 02:44:08 +02:00
|
|
|
if (typeof value === 'object') {
|
|
|
|
|
const returnValue = {};
|
|
|
|
|
for (const field in value) {
|
|
|
|
|
returnValue[field] = this._convertToDate(value[field]);
|
|
|
|
|
}
|
|
|
|
|
return returnValue;
|
2018-06-26 18:18:04 -04:00
|
|
|
}
|
2025-07-13 02:44:08 +02:00
|
|
|
return value;
|
2018-06-26 18:18:04 -04:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
_parseReadPreference(readPreference: ?string): ?string {
|
2019-05-14 12:58:02 -07:00
|
|
|
if (readPreference) {
|
|
|
|
|
readPreference = readPreference.toUpperCase();
|
|
|
|
|
}
|
2017-12-30 00:23:43 -05:00
|
|
|
switch (readPreference) {
|
2018-09-01 13:58:06 -04:00
|
|
|
case 'PRIMARY':
|
|
|
|
|
readPreference = ReadPreference.PRIMARY;
|
|
|
|
|
break;
|
|
|
|
|
case 'PRIMARY_PREFERRED':
|
|
|
|
|
readPreference = ReadPreference.PRIMARY_PREFERRED;
|
|
|
|
|
break;
|
|
|
|
|
case 'SECONDARY':
|
|
|
|
|
readPreference = ReadPreference.SECONDARY;
|
|
|
|
|
break;
|
|
|
|
|
case 'SECONDARY_PREFERRED':
|
|
|
|
|
readPreference = ReadPreference.SECONDARY_PREFERRED;
|
|
|
|
|
break;
|
|
|
|
|
case 'NEAREST':
|
|
|
|
|
readPreference = ReadPreference.NEAREST;
|
|
|
|
|
break;
|
|
|
|
|
case undefined:
|
2019-04-08 15:59:15 -07:00
|
|
|
case null:
|
|
|
|
|
case '':
|
2018-09-01 13:58:06 -04:00
|
|
|
break;
|
|
|
|
|
default:
|
2020-12-13 11:19:04 -06:00
|
|
|
throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Not supported read preference.');
|
2017-06-21 17:18:10 -03:00
|
|
|
}
|
|
|
|
|
return readPreference;
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
performInitialization(): Promise<void> {
|
2019-06-13 13:40:58 -05:00
|
|
|
return Promise.resolve();
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2017-06-13 20:42:59 -05:00
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
createIndex(className: string, index: any) {
|
2017-06-13 20:42:59 -05:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection => collection._mongoCollection.createIndex(index))
|
|
|
|
|
.catch(err => this.handleError(err));
|
2017-06-13 20:42:59 -05:00
|
|
|
}
|
2017-07-11 22:33:45 -05:00
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
createIndexes(className: string, indexes: any) {
|
2017-11-25 13:55:34 -06:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection => collection._mongoCollection.createIndexes(indexes))
|
|
|
|
|
.catch(err => this.handleError(err));
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
createIndexesIfNeeded(className: string, fieldName: string, type: any) {
|
2017-07-11 22:33:45 -05:00
|
|
|
if (type && type.type === 'Polygon') {
|
|
|
|
|
const index = {
|
2018-09-01 13:58:06 -04:00
|
|
|
[fieldName]: '2dsphere',
|
2017-07-11 22:33:45 -05:00
|
|
|
};
|
|
|
|
|
return this.createIndex(className, index);
|
|
|
|
|
}
|
|
|
|
|
return Promise.resolve();
|
|
|
|
|
}
|
|
|
|
|
|
2020-12-13 11:19:04 -06:00
|
|
|
createTextIndexesIfNeeded(className: string, query: QueryType, schema: any): Promise<void> {
|
2018-09-01 13:58:06 -04:00
|
|
|
for (const fieldName in query) {
|
2017-09-02 11:31:19 -05:00
|
|
|
if (!query[fieldName] || !query[fieldName].$text) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2017-11-25 13:55:34 -06:00
|
|
|
const existingIndexes = schema.indexes;
|
|
|
|
|
for (const key in existingIndexes) {
|
|
|
|
|
const index = existingIndexes[key];
|
2019-08-14 16:57:00 -05:00
|
|
|
if (Object.prototype.hasOwnProperty.call(index, fieldName)) {
|
2017-11-25 13:55:34 -06:00
|
|
|
return Promise.resolve();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
const indexName = `${fieldName}_text`;
|
|
|
|
|
const textIndex = {
|
2018-09-01 13:58:06 -04:00
|
|
|
[indexName]: { [fieldName]: 'text' },
|
2017-09-02 11:31:19 -05:00
|
|
|
};
|
2018-09-01 13:58:06 -04:00
|
|
|
return this.setIndexesWithSchemaFormat(
|
|
|
|
|
className,
|
|
|
|
|
textIndex,
|
|
|
|
|
existingIndexes,
|
|
|
|
|
schema.fields
|
2020-10-01 23:58:23 +02:00
|
|
|
).catch(error => {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (error.code === 85) {
|
|
|
|
|
// Index exist with different options
|
|
|
|
|
return this.setIndexesFromMongo(className);
|
|
|
|
|
}
|
|
|
|
|
throw error;
|
|
|
|
|
});
|
2017-09-02 11:31:19 -05:00
|
|
|
}
|
|
|
|
|
return Promise.resolve();
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
getIndexes(className: string) {
|
2017-07-11 22:33:45 -05:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection => collection._mongoCollection.indexes())
|
|
|
|
|
.catch(err => this.handleError(err));
|
2017-07-11 22:33:45 -05:00
|
|
|
}
|
2017-11-25 13:55:34 -06:00
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
dropIndex(className: string, index: any) {
|
2017-11-25 13:55:34 -06:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection => collection._mongoCollection.dropIndex(index))
|
|
|
|
|
.catch(err => this.handleError(err));
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
dropAllIndexes(className: string) {
|
2017-11-25 13:55:34 -06:00
|
|
|
return this._adaptiveCollection(className)
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(collection => collection._mongoCollection.dropIndexes())
|
|
|
|
|
.catch(err => this.handleError(err));
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
updateSchemaWithIndexes(): Promise<any> {
|
2017-11-25 13:55:34 -06:00
|
|
|
return this.getAllClasses()
|
2020-10-01 23:58:23 +02:00
|
|
|
.then(classes => {
|
|
|
|
|
const promises = classes.map(schema => {
|
2017-11-25 13:55:34 -06:00
|
|
|
return this.setIndexesFromMongo(schema.className);
|
|
|
|
|
});
|
|
|
|
|
return Promise.all(promises);
|
2018-03-02 10:24:58 -05:00
|
|
|
})
|
2020-10-01 23:58:23 +02:00
|
|
|
.catch(err => this.handleError(err));
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
2019-07-31 02:41:07 -07:00
|
|
|
|
|
|
|
|
createTransactionalSession(): Promise<any> {
|
|
|
|
|
const transactionalSection = this.client.startSession();
|
|
|
|
|
transactionalSection.startTransaction();
|
|
|
|
|
return Promise.resolve(transactionalSection);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
commitTransactionalSession(transactionalSection: any): Promise<void> {
|
2021-02-18 10:18:54 -08:00
|
|
|
const commit = retries => {
|
|
|
|
|
return transactionalSection
|
|
|
|
|
.commitTransaction()
|
|
|
|
|
.catch(error => {
|
|
|
|
|
if (error && error.hasErrorLabel('TransientTransactionError') && retries > 0) {
|
|
|
|
|
return commit(retries - 1);
|
|
|
|
|
}
|
|
|
|
|
throw error;
|
|
|
|
|
})
|
|
|
|
|
.then(() => {
|
|
|
|
|
transactionalSection.endSession();
|
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
return commit(5);
|
2019-07-31 02:41:07 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
abortTransactionalSession(transactionalSection: any): Promise<void> {
|
|
|
|
|
return transactionalSection.abortTransaction().then(() => {
|
|
|
|
|
transactionalSection.endSession();
|
|
|
|
|
});
|
|
|
|
|
}
|
2016-02-27 02:23:57 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export default MongoStorageAdapter;
|