2017-12-30 20:44:18 -05:00
|
|
|
// @flow
|
2016-10-16 02:59:54 +05:30
|
|
|
import { createClient } from './PostgresClient';
|
2017-12-30 20:44:18 -05:00
|
|
|
// @flow-disable-next
|
2018-09-01 13:58:06 -04:00
|
|
|
import Parse from 'parse/node';
|
2017-12-30 20:44:18 -05:00
|
|
|
// @flow-disable-next
|
2018-09-01 13:58:06 -04:00
|
|
|
import _ from 'lodash';
|
|
|
|
|
import sql from './sql';
|
2016-06-12 16:35:13 -07:00
|
|
|
|
|
|
|
|
const PostgresRelationDoesNotExistError = '42P01';
|
|
|
|
|
const PostgresDuplicateRelationError = '42P07';
|
2016-06-16 15:39:05 -07:00
|
|
|
const PostgresDuplicateColumnError = '42701';
|
2017-12-29 21:32:40 -06:00
|
|
|
const PostgresMissingColumnError = '42703';
|
2017-03-23 22:29:43 -04:00
|
|
|
const PostgresDuplicateObjectError = '42710';
|
2016-06-16 15:39:05 -07:00
|
|
|
const PostgresUniqueIndexViolationError = '23505';
|
2016-10-29 05:53:37 +05:30
|
|
|
const PostgresTransactionAbortedError = '25P02';
|
2016-08-15 16:48:39 -04:00
|
|
|
const logger = require('../../../logger');
|
|
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const debug = function (...args: any) {
|
2017-01-11 12:31:40 -08:00
|
|
|
args = ['PG: ' + arguments[0]].concat(args.slice(1, args.length));
|
2016-12-07 15:17:05 -08:00
|
|
|
const log = logger.getLogger();
|
2016-08-15 16:48:39 -04:00
|
|
|
log.debug.apply(log, args);
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-06-12 16:35:13 -07:00
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
import { StorageAdapter } from '../StorageAdapter';
|
|
|
|
|
import type { SchemaType, QueryType, QueryOptions } from '../StorageAdapter';
|
2017-12-30 20:44:18 -05:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const parseTypeToPostgresType = (type) => {
|
2016-06-06 13:47:11 -07:00
|
|
|
switch (type.type) {
|
2018-09-01 13:58:06 -04:00
|
|
|
case 'String':
|
|
|
|
|
return 'text';
|
|
|
|
|
case 'Date':
|
|
|
|
|
return 'timestamp with time zone';
|
|
|
|
|
case 'Object':
|
2016-11-24 15:47:41 -05:00
|
|
|
return 'jsonb';
|
2018-09-01 13:58:06 -04:00
|
|
|
case 'File':
|
|
|
|
|
return 'text';
|
|
|
|
|
case 'Boolean':
|
|
|
|
|
return 'boolean';
|
|
|
|
|
case 'Pointer':
|
2020-04-22 12:01:11 -04:00
|
|
|
return 'text';
|
2018-09-01 13:58:06 -04:00
|
|
|
case 'Number':
|
|
|
|
|
return 'double precision';
|
|
|
|
|
case 'GeoPoint':
|
|
|
|
|
return 'point';
|
|
|
|
|
case 'Bytes':
|
|
|
|
|
return 'jsonb';
|
|
|
|
|
case 'Polygon':
|
|
|
|
|
return 'polygon';
|
|
|
|
|
case 'Array':
|
|
|
|
|
if (type.contents && type.contents.type === 'String') {
|
|
|
|
|
return 'text[]';
|
|
|
|
|
} else {
|
|
|
|
|
return 'jsonb';
|
|
|
|
|
}
|
|
|
|
|
default:
|
|
|
|
|
throw `no type for ${JSON.stringify(type)} yet`;
|
2016-06-06 13:47:11 -07:00
|
|
|
}
|
|
|
|
|
};
|
2016-06-12 16:35:13 -07:00
|
|
|
|
2016-08-15 16:48:39 -04:00
|
|
|
const ParseToPosgresComparator = {
|
2018-09-01 13:58:06 -04:00
|
|
|
$gt: '>',
|
|
|
|
|
$lt: '<',
|
|
|
|
|
$gte: '>=',
|
|
|
|
|
$lte: '<=',
|
|
|
|
|
};
|
2016-08-15 16:48:39 -04:00
|
|
|
|
2018-02-16 12:41:02 -06:00
|
|
|
const mongoAggregateToPostgres = {
|
|
|
|
|
$dayOfMonth: 'DAY',
|
|
|
|
|
$dayOfWeek: 'DOW',
|
|
|
|
|
$dayOfYear: 'DOY',
|
|
|
|
|
$isoDayOfWeek: 'ISODOW',
|
2018-09-01 13:58:06 -04:00
|
|
|
$isoWeekYear: 'ISOYEAR',
|
2018-02-16 12:41:02 -06:00
|
|
|
$hour: 'HOUR',
|
|
|
|
|
$minute: 'MINUTE',
|
|
|
|
|
$second: 'SECOND',
|
|
|
|
|
$millisecond: 'MILLISECONDS',
|
|
|
|
|
$month: 'MONTH',
|
|
|
|
|
$week: 'WEEK',
|
|
|
|
|
$year: 'YEAR',
|
|
|
|
|
};
|
|
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const toPostgresValue = (value) => {
|
2016-08-15 16:48:39 -04:00
|
|
|
if (typeof value === 'object') {
|
|
|
|
|
if (value.__type === 'Date') {
|
|
|
|
|
return value.iso;
|
|
|
|
|
}
|
2016-08-18 18:05:26 -04:00
|
|
|
if (value.__type === 'File') {
|
|
|
|
|
return value.name;
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
return value;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-08-15 16:48:39 -04:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const transformValue = (value) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (typeof value === 'object' && value.__type === 'Pointer') {
|
2016-08-15 16:48:39 -04:00
|
|
|
return value.objectId;
|
|
|
|
|
}
|
|
|
|
|
return value;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-08-15 16:48:39 -04:00
|
|
|
|
|
|
|
|
// Duplicate from then mongo adapter...
|
|
|
|
|
const emptyCLPS = Object.freeze({
|
|
|
|
|
find: {},
|
|
|
|
|
get: {},
|
2019-07-24 14:41:18 -03:00
|
|
|
count: {},
|
2016-08-15 16:48:39 -04:00
|
|
|
create: {},
|
|
|
|
|
update: {},
|
|
|
|
|
delete: {},
|
|
|
|
|
addField: {},
|
2019-01-28 07:50:21 +00:00
|
|
|
protectedFields: {},
|
2016-08-15 16:48:39 -04:00
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const defaultCLPS = Object.freeze({
|
2018-09-01 13:58:06 -04:00
|
|
|
find: { '*': true },
|
|
|
|
|
get: { '*': true },
|
2019-07-24 14:41:18 -03:00
|
|
|
count: { '*': true },
|
2018-09-01 13:58:06 -04:00
|
|
|
create: { '*': true },
|
|
|
|
|
update: { '*': true },
|
|
|
|
|
delete: { '*': true },
|
|
|
|
|
addField: { '*': true },
|
2019-01-28 07:50:21 +00:00
|
|
|
protectedFields: { '*': [] },
|
2016-08-15 16:48:39 -04:00
|
|
|
});
|
|
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const toParseSchema = (schema) => {
|
2016-08-15 16:48:39 -04:00
|
|
|
if (schema.className === '_User') {
|
|
|
|
|
delete schema.fields._hashed_password;
|
|
|
|
|
}
|
|
|
|
|
if (schema.fields) {
|
|
|
|
|
delete schema.fields._wperm;
|
|
|
|
|
delete schema.fields._rperm;
|
|
|
|
|
}
|
|
|
|
|
let clps = defaultCLPS;
|
|
|
|
|
if (schema.classLevelPermissions) {
|
2018-09-01 13:58:06 -04:00
|
|
|
clps = { ...emptyCLPS, ...schema.classLevelPermissions };
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2017-11-25 13:55:34 -06:00
|
|
|
let indexes = {};
|
|
|
|
|
if (schema.indexes) {
|
2018-09-01 13:58:06 -04:00
|
|
|
indexes = { ...schema.indexes };
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
return {
|
|
|
|
|
className: schema.className,
|
|
|
|
|
fields: schema.fields,
|
|
|
|
|
classLevelPermissions: clps,
|
2017-11-25 13:55:34 -06:00
|
|
|
indexes,
|
2016-08-15 16:48:39 -04:00
|
|
|
};
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-08-15 16:48:39 -04:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const toPostgresSchema = (schema) => {
|
2016-08-15 16:48:39 -04:00
|
|
|
if (!schema) {
|
|
|
|
|
return schema;
|
|
|
|
|
}
|
|
|
|
|
schema.fields = schema.fields || {};
|
2018-09-01 13:58:06 -04:00
|
|
|
schema.fields._wperm = { type: 'Array', contents: { type: 'String' } };
|
|
|
|
|
schema.fields._rperm = { type: 'Array', contents: { type: 'String' } };
|
2016-08-15 16:48:39 -04:00
|
|
|
if (schema.className === '_User') {
|
2018-09-01 13:58:06 -04:00
|
|
|
schema.fields._hashed_password = { type: 'String' };
|
|
|
|
|
schema.fields._password_history = { type: 'Array' };
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
return schema;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-08-15 16:48:39 -04:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const handleDotFields = (object) => {
|
|
|
|
|
Object.keys(object).forEach((fieldName) => {
|
2016-08-20 16:07:48 -04:00
|
|
|
if (fieldName.indexOf('.') > -1) {
|
2016-12-07 15:17:05 -08:00
|
|
|
const components = fieldName.split('.');
|
|
|
|
|
const first = components.shift();
|
2016-08-20 16:07:48 -04:00
|
|
|
object[first] = object[first] || {};
|
|
|
|
|
let currentObj = object[first];
|
|
|
|
|
let next;
|
|
|
|
|
let value = object[fieldName];
|
|
|
|
|
if (value && value.__op === 'Delete') {
|
|
|
|
|
value = undefined;
|
|
|
|
|
}
|
2016-11-24 15:47:41 -05:00
|
|
|
/* eslint-disable no-cond-assign */
|
2018-09-01 13:58:06 -04:00
|
|
|
while ((next = components.shift())) {
|
|
|
|
|
/* eslint-enable no-cond-assign */
|
2016-08-20 16:07:48 -04:00
|
|
|
currentObj[next] = currentObj[next] || {};
|
|
|
|
|
if (components.length === 0) {
|
|
|
|
|
currentObj[next] = value;
|
|
|
|
|
}
|
|
|
|
|
currentObj = currentObj[next];
|
|
|
|
|
}
|
|
|
|
|
delete object[fieldName];
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
return object;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-08-20 16:07:48 -04:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const transformDotFieldToComponents = (fieldName) => {
|
2017-08-23 10:33:57 -05:00
|
|
|
return fieldName.split('.').map((cmpt, index) => {
|
|
|
|
|
if (index === 0) {
|
|
|
|
|
return `"${cmpt}"`;
|
|
|
|
|
}
|
|
|
|
|
return `'${cmpt}'`;
|
|
|
|
|
});
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2017-08-23 10:33:57 -05:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const transformDotField = (fieldName) => {
|
2017-08-23 10:33:57 -05:00
|
|
|
if (fieldName.indexOf('.') === -1) {
|
|
|
|
|
return `"${fieldName}"`;
|
|
|
|
|
}
|
|
|
|
|
const components = transformDotFieldToComponents(fieldName);
|
|
|
|
|
let name = components.slice(0, components.length - 1).join('->');
|
|
|
|
|
name += '->>' + components[components.length - 1];
|
|
|
|
|
return name;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2017-08-23 10:33:57 -05:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const transformAggregateField = (fieldName) => {
|
2018-02-16 12:41:02 -06:00
|
|
|
if (typeof fieldName !== 'string') {
|
|
|
|
|
return fieldName;
|
|
|
|
|
}
|
|
|
|
|
if (fieldName === '$_created_at') {
|
|
|
|
|
return 'createdAt';
|
|
|
|
|
}
|
|
|
|
|
if (fieldName === '$_updated_at') {
|
|
|
|
|
return 'updatedAt';
|
|
|
|
|
}
|
2017-11-12 13:00:22 -06:00
|
|
|
return fieldName.substr(1);
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2017-11-12 13:00:22 -06:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
const validateKeys = (object) => {
|
2016-10-31 21:41:21 +05:30
|
|
|
if (typeof object == 'object') {
|
|
|
|
|
for (const key in object) {
|
|
|
|
|
if (typeof object[key] == 'object') {
|
|
|
|
|
validateKeys(object[key]);
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
if (key.includes('$') || key.includes('.')) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_NESTED_KEY,
|
|
|
|
|
"Nested keys should not contain the '$' or '.' characters"
|
|
|
|
|
);
|
2016-12-01 10:24:46 -08:00
|
|
|
}
|
2016-10-31 21:41:21 +05:30
|
|
|
}
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-10-31 21:41:21 +05:30
|
|
|
|
2016-08-18 18:05:26 -04:00
|
|
|
// Returns the list of join tables on a schema
|
2020-04-06 22:50:33 +05:30
|
|
|
const joinTablesForSchema = (schema) => {
|
2016-12-07 15:17:05 -08:00
|
|
|
const list = [];
|
2016-08-18 18:05:26 -04:00
|
|
|
if (schema) {
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(schema.fields).forEach((field) => {
|
2016-08-18 18:05:26 -04:00
|
|
|
if (schema.fields[field].type === 'Relation') {
|
|
|
|
|
list.push(`_Join:${field}:${schema.className}`);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
return list;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-08-18 18:05:26 -04:00
|
|
|
|
2018-05-01 07:37:38 -04:00
|
|
|
interface WhereClause {
|
|
|
|
|
pattern: string;
|
|
|
|
|
values: Array<any>;
|
|
|
|
|
sorts: Array<any>;
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-14 09:44:51 -08:00
|
|
|
const buildWhereClause = ({
|
|
|
|
|
schema,
|
|
|
|
|
query,
|
|
|
|
|
index,
|
|
|
|
|
caseInsensitive,
|
|
|
|
|
}): WhereClause => {
|
2016-12-07 15:17:05 -08:00
|
|
|
const patterns = [];
|
2016-06-16 15:39:05 -07:00
|
|
|
let values = [];
|
2016-12-07 15:17:05 -08:00
|
|
|
const sorts = [];
|
2016-08-15 16:48:39 -04:00
|
|
|
|
|
|
|
|
schema = toPostgresSchema(schema);
|
2016-12-07 15:17:05 -08:00
|
|
|
for (const fieldName in query) {
|
2018-09-01 13:58:06 -04:00
|
|
|
const isArrayField =
|
|
|
|
|
schema.fields &&
|
|
|
|
|
schema.fields[fieldName] &&
|
|
|
|
|
schema.fields[fieldName].type === 'Array';
|
2016-12-07 15:17:05 -08:00
|
|
|
const initialPatternsLength = patterns.length;
|
|
|
|
|
const fieldValue = query[fieldName];
|
2016-08-20 16:07:48 -04:00
|
|
|
|
2019-08-16 06:55:12 +02:00
|
|
|
// nothing in the schema, it's gonna blow up
|
2016-08-20 16:07:48 -04:00
|
|
|
if (!schema.fields[fieldName]) {
|
|
|
|
|
// as it won't exist
|
2017-06-21 09:23:20 -03:00
|
|
|
if (fieldValue && fieldValue.$exists === false) {
|
2016-08-20 16:07:48 -04:00
|
|
|
continue;
|
|
|
|
|
}
|
2016-12-01 10:24:46 -08:00
|
|
|
}
|
2016-08-20 16:07:48 -04:00
|
|
|
|
2020-02-14 09:44:51 -08:00
|
|
|
const authDataMatch = fieldName.match(/^_auth_data_([a-zA-Z0-9_]+)$/);
|
|
|
|
|
if (authDataMatch) {
|
|
|
|
|
// TODO: Handle querying by _auth_data_provider, authData is stored in authData field
|
|
|
|
|
continue;
|
|
|
|
|
} else if (
|
|
|
|
|
caseInsensitive &&
|
|
|
|
|
(fieldName === 'username' || fieldName === 'email')
|
|
|
|
|
) {
|
|
|
|
|
patterns.push(`LOWER($${index}:name) = LOWER($${index + 1})`);
|
|
|
|
|
values.push(fieldName, fieldValue);
|
|
|
|
|
index += 2;
|
|
|
|
|
} else if (fieldName.indexOf('.') >= 0) {
|
2017-08-23 10:33:57 -05:00
|
|
|
let name = transformDotField(fieldName);
|
2017-06-21 09:23:20 -03:00
|
|
|
if (fieldValue === null) {
|
2020-02-14 09:44:51 -08:00
|
|
|
patterns.push(`$${index}:raw IS NULL`);
|
|
|
|
|
values.push(name);
|
|
|
|
|
index += 1;
|
|
|
|
|
continue;
|
2017-06-21 09:23:20 -03:00
|
|
|
} else {
|
2017-08-23 10:33:57 -05:00
|
|
|
if (fieldValue.$in) {
|
|
|
|
|
name = transformDotFieldToComponents(fieldName).join('->');
|
2019-07-28 23:54:13 -05:00
|
|
|
patterns.push(`($${index}:raw)::jsonb @> $${index + 1}::jsonb`);
|
|
|
|
|
values.push(name, JSON.stringify(fieldValue.$in));
|
|
|
|
|
index += 2;
|
2017-08-23 10:33:57 -05:00
|
|
|
} else if (fieldValue.$regex) {
|
|
|
|
|
// Handle later
|
GraphQL Object constraints (#5715)
* GraphQL Object constraints
Implements the GraphQL Object constraints, which allows us to filter queries results using the `$eq`, `$lt`, `$gt`, `$in`, and other Parse supported constraints.
Example:
```
query objects {
findMyClass(where: {
objField: {
_eq: {
key: 'foo.bar',
value: 'hello'
},
_gt: {
key: 'foo.number',
value: 10
},
_lt: {
key: 'anotherNumber',
value: 5
}
}
}) {
results {
objectId
}
}
}
```
In the example above, we have the `findMyClass` query (automatically generated for the `MyClass` class), and a field named `objField` whose type is Object. The object below represents a valid `objField` value and would satisfy all constraints:
```
{
"foo": {
"bar": "hello",
"number": 11
},
"anotherNumber": 4
}
```
The Object constraint is applied only when using Parse class object type queries. When using "generic" queries such as `get` and `find`, this type of constraint is not available.
* Objects constraints not working on Postgres
Fixes the $eq, $ne, $gt, and $lt constraints when applied on an Object type field.
* Fix object constraint field name
* Fix Postgres constraints indexes
* fix: Object type composed constraints not working
* fix: Rename key and value fields
* refactor: Object constraints for generic queries
* fix: Object constraints not working on Postgres
2019-08-02 16:18:08 -03:00
|
|
|
} else if (typeof fieldValue !== 'object') {
|
2019-07-28 23:54:13 -05:00
|
|
|
patterns.push(`$${index}:raw = $${index + 1}::text`);
|
|
|
|
|
values.push(name, fieldValue);
|
|
|
|
|
index += 2;
|
2017-08-23 10:33:57 -05:00
|
|
|
}
|
2017-06-21 09:23:20 -03:00
|
|
|
}
|
2017-09-19 06:59:48 -05:00
|
|
|
} else if (fieldValue === null || fieldValue === undefined) {
|
2017-06-21 09:23:20 -03:00
|
|
|
patterns.push(`$${index}:name IS NULL`);
|
|
|
|
|
values.push(fieldName);
|
|
|
|
|
index += 1;
|
|
|
|
|
continue;
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (typeof fieldValue === 'string') {
|
2016-06-16 15:39:05 -07:00
|
|
|
patterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue);
|
|
|
|
|
index += 2;
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (typeof fieldValue === 'boolean') {
|
|
|
|
|
patterns.push(`$${index}:name = $${index + 1}`);
|
2018-03-24 17:45:34 -05:00
|
|
|
// Can't cast boolean to double precision
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
schema.fields[fieldName] &&
|
|
|
|
|
schema.fields[fieldName].type === 'Number'
|
|
|
|
|
) {
|
2018-03-24 17:45:34 -05:00
|
|
|
// Should always return zero results
|
|
|
|
|
const MAX_INT_PLUS_ONE = 9223372036854775808;
|
|
|
|
|
values.push(fieldName, MAX_INT_PLUS_ONE);
|
|
|
|
|
} else {
|
|
|
|
|
values.push(fieldName, fieldValue);
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
index += 2;
|
|
|
|
|
} else if (typeof fieldValue === 'number') {
|
|
|
|
|
patterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue);
|
2016-06-16 15:39:05 -07:00
|
|
|
index += 2;
|
2018-05-18 15:26:33 -04:00
|
|
|
} else if (['$or', '$nor', '$and'].includes(fieldName)) {
|
2016-12-07 15:17:05 -08:00
|
|
|
const clauses = [];
|
|
|
|
|
const clauseValues = [];
|
2020-04-06 22:50:33 +05:30
|
|
|
fieldValue.forEach((subQuery) => {
|
2020-02-14 09:44:51 -08:00
|
|
|
const clause = buildWhereClause({
|
|
|
|
|
schema,
|
|
|
|
|
query: subQuery,
|
|
|
|
|
index,
|
|
|
|
|
caseInsensitive,
|
|
|
|
|
});
|
2016-08-20 16:07:48 -04:00
|
|
|
if (clause.pattern.length > 0) {
|
|
|
|
|
clauses.push(clause.pattern);
|
|
|
|
|
clauseValues.push(...clause.values);
|
|
|
|
|
index += clause.values.length;
|
|
|
|
|
}
|
2016-06-16 15:39:05 -07:00
|
|
|
});
|
2018-05-18 15:26:33 -04:00
|
|
|
|
|
|
|
|
const orOrAnd = fieldName === '$and' ? ' AND ' : ' OR ';
|
|
|
|
|
const not = fieldName === '$nor' ? ' NOT ' : '';
|
|
|
|
|
|
|
|
|
|
patterns.push(`${not}(${clauses.join(orOrAnd)})`);
|
2016-08-15 16:48:39 -04:00
|
|
|
values.push(...clauseValues);
|
|
|
|
|
}
|
|
|
|
|
|
2017-06-21 09:23:20 -03:00
|
|
|
if (fieldValue.$ne !== undefined) {
|
2016-08-20 16:07:48 -04:00
|
|
|
if (isArrayField) {
|
|
|
|
|
fieldValue.$ne = JSON.stringify([fieldValue.$ne]);
|
|
|
|
|
patterns.push(`NOT array_contains($${index}:name, $${index + 1})`);
|
2016-08-15 16:48:39 -04:00
|
|
|
} else {
|
2016-08-20 16:07:48 -04:00
|
|
|
if (fieldValue.$ne === null) {
|
2017-06-21 09:23:20 -03:00
|
|
|
patterns.push(`$${index}:name IS NOT NULL`);
|
|
|
|
|
values.push(fieldName);
|
|
|
|
|
index += 1;
|
|
|
|
|
continue;
|
2016-08-20 16:07:48 -04:00
|
|
|
} else {
|
|
|
|
|
// if not null, we need to manually exclude null
|
2019-04-30 23:29:44 -05:00
|
|
|
if (fieldValue.$ne.__type === 'GeoPoint') {
|
|
|
|
|
patterns.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`($${index}:name <> POINT($${index + 1}, $${
|
|
|
|
|
index + 2
|
|
|
|
|
}) OR $${index}:name IS NULL)`
|
2019-04-30 23:29:44 -05:00
|
|
|
);
|
|
|
|
|
} else {
|
GraphQL Object constraints (#5715)
* GraphQL Object constraints
Implements the GraphQL Object constraints, which allows us to filter queries results using the `$eq`, `$lt`, `$gt`, `$in`, and other Parse supported constraints.
Example:
```
query objects {
findMyClass(where: {
objField: {
_eq: {
key: 'foo.bar',
value: 'hello'
},
_gt: {
key: 'foo.number',
value: 10
},
_lt: {
key: 'anotherNumber',
value: 5
}
}
}) {
results {
objectId
}
}
}
```
In the example above, we have the `findMyClass` query (automatically generated for the `MyClass` class), and a field named `objField` whose type is Object. The object below represents a valid `objField` value and would satisfy all constraints:
```
{
"foo": {
"bar": "hello",
"number": 11
},
"anotherNumber": 4
}
```
The Object constraint is applied only when using Parse class object type queries. When using "generic" queries such as `get` and `find`, this type of constraint is not available.
* Objects constraints not working on Postgres
Fixes the $eq, $ne, $gt, and $lt constraints when applied on an Object type field.
* Fix object constraint field name
* Fix Postgres constraints indexes
* fix: Object type composed constraints not working
* fix: Rename key and value fields
* refactor: Object constraints for generic queries
* fix: Object constraints not working on Postgres
2019-08-02 16:18:08 -03:00
|
|
|
if (fieldName.indexOf('.') >= 0) {
|
|
|
|
|
const constraintFieldName = transformDotField(fieldName);
|
|
|
|
|
patterns.push(
|
|
|
|
|
`(${constraintFieldName} <> $${index} OR ${constraintFieldName} IS NULL)`
|
|
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
patterns.push(
|
|
|
|
|
`($${index}:name <> $${index + 1} OR $${index}:name IS NULL)`
|
|
|
|
|
);
|
|
|
|
|
}
|
2019-04-30 23:29:44 -05:00
|
|
|
}
|
2016-08-20 16:07:48 -04:00
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2019-04-30 23:29:44 -05:00
|
|
|
if (fieldValue.$ne.__type === 'GeoPoint') {
|
|
|
|
|
const point = fieldValue.$ne;
|
|
|
|
|
values.push(fieldName, point.longitude, point.latitude);
|
|
|
|
|
index += 3;
|
|
|
|
|
} else {
|
|
|
|
|
// TODO: support arrays
|
|
|
|
|
values.push(fieldName, fieldValue.$ne);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2018-03-24 17:45:34 -05:00
|
|
|
if (fieldValue.$eq !== undefined) {
|
|
|
|
|
if (fieldValue.$eq === null) {
|
|
|
|
|
patterns.push(`$${index}:name IS NULL`);
|
|
|
|
|
values.push(fieldName);
|
|
|
|
|
index += 1;
|
|
|
|
|
} else {
|
GraphQL Object constraints (#5715)
* GraphQL Object constraints
Implements the GraphQL Object constraints, which allows us to filter queries results using the `$eq`, `$lt`, `$gt`, `$in`, and other Parse supported constraints.
Example:
```
query objects {
findMyClass(where: {
objField: {
_eq: {
key: 'foo.bar',
value: 'hello'
},
_gt: {
key: 'foo.number',
value: 10
},
_lt: {
key: 'anotherNumber',
value: 5
}
}
}) {
results {
objectId
}
}
}
```
In the example above, we have the `findMyClass` query (automatically generated for the `MyClass` class), and a field named `objField` whose type is Object. The object below represents a valid `objField` value and would satisfy all constraints:
```
{
"foo": {
"bar": "hello",
"number": 11
},
"anotherNumber": 4
}
```
The Object constraint is applied only when using Parse class object type queries. When using "generic" queries such as `get` and `find`, this type of constraint is not available.
* Objects constraints not working on Postgres
Fixes the $eq, $ne, $gt, and $lt constraints when applied on an Object type field.
* Fix object constraint field name
* Fix Postgres constraints indexes
* fix: Object type composed constraints not working
* fix: Rename key and value fields
* refactor: Object constraints for generic queries
* fix: Object constraints not working on Postgres
2019-08-02 16:18:08 -03:00
|
|
|
if (fieldName.indexOf('.') >= 0) {
|
|
|
|
|
values.push(fieldValue.$eq);
|
|
|
|
|
patterns.push(`${transformDotField(fieldName)} = $${index++}`);
|
|
|
|
|
} else {
|
|
|
|
|
values.push(fieldName, fieldValue.$eq);
|
|
|
|
|
patterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
2018-03-24 17:45:34 -05:00
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
const isInOrNin =
|
|
|
|
|
Array.isArray(fieldValue.$in) || Array.isArray(fieldValue.$nin);
|
|
|
|
|
if (
|
|
|
|
|
Array.isArray(fieldValue.$in) &&
|
|
|
|
|
isArrayField &&
|
|
|
|
|
schema.fields[fieldName].contents &&
|
|
|
|
|
schema.fields[fieldName].contents.type === 'String'
|
|
|
|
|
) {
|
2016-12-07 15:17:05 -08:00
|
|
|
const inPatterns = [];
|
2016-06-16 15:39:05 -07:00
|
|
|
let allowNull = false;
|
|
|
|
|
values.push(fieldName);
|
|
|
|
|
fieldValue.$in.forEach((listElem, listIndex) => {
|
2016-12-02 16:11:54 -08:00
|
|
|
if (listElem === null) {
|
2016-06-16 15:39:05 -07:00
|
|
|
allowNull = true;
|
|
|
|
|
} else {
|
|
|
|
|
values.push(listElem);
|
|
|
|
|
inPatterns.push(`$${index + 1 + listIndex - (allowNull ? 1 : 0)}`);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
if (allowNull) {
|
2018-09-01 13:58:06 -04:00
|
|
|
patterns.push(
|
|
|
|
|
`($${index}:name IS NULL OR $${index}:name && ARRAY[${inPatterns.join()}])`
|
|
|
|
|
);
|
2016-06-16 15:39:05 -07:00
|
|
|
} else {
|
2017-12-28 01:43:34 +00:00
|
|
|
patterns.push(`$${index}:name && ARRAY[${inPatterns.join()}]`);
|
2016-06-16 15:39:05 -07:00
|
|
|
}
|
|
|
|
|
index = index + 1 + inPatterns.length;
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (isInOrNin) {
|
2016-11-24 15:47:41 -05:00
|
|
|
var createConstraint = (baseArray, notIn) => {
|
2018-12-19 17:57:55 -06:00
|
|
|
const not = notIn ? ' NOT ' : '';
|
2016-08-15 16:48:39 -04:00
|
|
|
if (baseArray.length > 0) {
|
2016-08-20 16:07:48 -04:00
|
|
|
if (isArrayField) {
|
2018-09-01 13:58:06 -04:00
|
|
|
patterns.push(
|
|
|
|
|
`${not} array_contains($${index}:name, $${index + 1})`
|
|
|
|
|
);
|
2016-08-20 16:07:48 -04:00
|
|
|
values.push(fieldName, JSON.stringify(baseArray));
|
|
|
|
|
index += 2;
|
|
|
|
|
} else {
|
2017-08-23 10:33:57 -05:00
|
|
|
// Handle Nested Dot Notation Above
|
|
|
|
|
if (fieldName.indexOf('.') >= 0) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
2016-12-07 15:17:05 -08:00
|
|
|
const inPatterns = [];
|
2016-08-20 16:07:48 -04:00
|
|
|
values.push(fieldName);
|
|
|
|
|
baseArray.forEach((listElem, listIndex) => {
|
2019-08-13 23:34:46 -05:00
|
|
|
if (listElem != null) {
|
2017-07-23 11:11:02 -05:00
|
|
|
values.push(listElem);
|
|
|
|
|
inPatterns.push(`$${index + 1 + listIndex}`);
|
|
|
|
|
}
|
2016-08-20 16:07:48 -04:00
|
|
|
});
|
2017-12-28 01:43:34 +00:00
|
|
|
patterns.push(`$${index}:name ${not} IN (${inPatterns.join()})`);
|
2016-08-20 16:07:48 -04:00
|
|
|
index = index + 1 + inPatterns.length;
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (!notIn) {
|
|
|
|
|
values.push(fieldName);
|
|
|
|
|
patterns.push(`$${index}:name IS NULL`);
|
|
|
|
|
index = index + 1;
|
2018-12-19 17:57:55 -06:00
|
|
|
} else {
|
|
|
|
|
// Handle empty array
|
|
|
|
|
if (notIn) {
|
|
|
|
|
patterns.push('1 = 1'); // Return all values
|
|
|
|
|
} else {
|
|
|
|
|
patterns.push('1 = 2'); // Return no values
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-08-15 16:48:39 -04:00
|
|
|
if (fieldValue.$in) {
|
2020-02-14 09:44:51 -08:00
|
|
|
createConstraint(
|
2020-04-06 22:50:33 +05:30
|
|
|
_.flatMap(fieldValue.$in, (elt) => elt),
|
2020-02-14 09:44:51 -08:00
|
|
|
false
|
|
|
|
|
);
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
if (fieldValue.$nin) {
|
2020-02-14 09:44:51 -08:00
|
|
|
createConstraint(
|
2020-04-06 22:50:33 +05:30
|
|
|
_.flatMap(fieldValue.$nin, (elt) => elt),
|
2020-02-14 09:44:51 -08:00
|
|
|
true
|
|
|
|
|
);
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
} else if (typeof fieldValue.$in !== 'undefined') {
|
2018-03-24 17:45:34 -05:00
|
|
|
throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad $in value');
|
|
|
|
|
} else if (typeof fieldValue.$nin !== 'undefined') {
|
|
|
|
|
throw new Parse.Error(Parse.Error.INVALID_JSON, 'bad $nin value');
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
|
2016-08-20 16:07:48 -04:00
|
|
|
if (Array.isArray(fieldValue.$all) && isArrayField) {
|
2018-05-16 03:42:32 +02:00
|
|
|
if (isAnyValueRegexStartsWith(fieldValue.$all)) {
|
|
|
|
|
if (!isAllValuesRegexOrNone(fieldValue.$all)) {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
'All $all values must be of regex type or none: ' + fieldValue.$all
|
|
|
|
|
);
|
2018-05-16 03:42:32 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (let i = 0; i < fieldValue.$all.length; i += 1) {
|
|
|
|
|
const value = processRegexPattern(fieldValue.$all[i].$regex);
|
|
|
|
|
fieldValue.$all[i] = value.substring(1) + '%';
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
patterns.push(
|
|
|
|
|
`array_contains_all_regex($${index}:name, $${index + 1}::jsonb)`
|
|
|
|
|
);
|
2018-05-16 03:42:32 +02:00
|
|
|
} else {
|
2018-09-01 13:58:06 -04:00
|
|
|
patterns.push(
|
|
|
|
|
`array_contains_all($${index}:name, $${index + 1}::jsonb)`
|
|
|
|
|
);
|
2018-05-16 03:42:32 +02:00
|
|
|
}
|
2016-08-20 16:07:48 -04:00
|
|
|
values.push(fieldName, JSON.stringify(fieldValue.$all));
|
2017-01-11 12:31:40 -08:00
|
|
|
index += 2;
|
2019-08-16 06:55:12 +02:00
|
|
|
} else if (Array.isArray(fieldValue.$all)) {
|
|
|
|
|
if (fieldValue.$all.length === 1) {
|
|
|
|
|
patterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue.$all[0].objectId);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
2016-08-18 18:05:26 -04:00
|
|
|
}
|
|
|
|
|
|
2016-08-15 16:48:39 -04:00
|
|
|
if (typeof fieldValue.$exists !== 'undefined') {
|
|
|
|
|
if (fieldValue.$exists) {
|
|
|
|
|
patterns.push(`$${index}:name IS NOT NULL`);
|
|
|
|
|
} else {
|
|
|
|
|
patterns.push(`$${index}:name IS NULL`);
|
|
|
|
|
}
|
2016-06-16 15:39:05 -07:00
|
|
|
values.push(fieldName);
|
2016-08-15 16:48:39 -04:00
|
|
|
index += 1;
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-18 09:35:50 -04:00
|
|
|
if (fieldValue.$containedBy) {
|
|
|
|
|
const arr = fieldValue.$containedBy;
|
|
|
|
|
if (!(arr instanceof Array)) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`bad $containedBy: should be an array`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
patterns.push(`$${index}:name <@ $${index + 1}::jsonb`);
|
|
|
|
|
values.push(fieldName, JSON.stringify(arr));
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
|
|
|
|
|
2017-06-13 20:42:59 -05:00
|
|
|
if (fieldValue.$text) {
|
|
|
|
|
const search = fieldValue.$text.$search;
|
|
|
|
|
let language = 'english';
|
|
|
|
|
if (typeof search !== 'object') {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`bad $text: $search, should be object`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
if (!search.$term || typeof search.$term !== 'string') {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`bad $text: $term, should be string`
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
if (search.$language && typeof search.$language !== 'string') {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`bad $text: $language, should be string`
|
|
|
|
|
);
|
|
|
|
|
} else if (search.$language) {
|
|
|
|
|
language = search.$language;
|
|
|
|
|
}
|
|
|
|
|
if (search.$caseSensitive && typeof search.$caseSensitive !== 'boolean') {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`bad $text: $caseSensitive, should be boolean`
|
|
|
|
|
);
|
|
|
|
|
} else if (search.$caseSensitive) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`bad $text: $caseSensitive not supported, please use $regex or create a separate lower case column.`
|
|
|
|
|
);
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
search.$diacriticSensitive &&
|
|
|
|
|
typeof search.$diacriticSensitive !== 'boolean'
|
|
|
|
|
) {
|
2017-06-13 20:42:59 -05:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`bad $text: $diacriticSensitive, should be boolean`
|
|
|
|
|
);
|
|
|
|
|
} else if (search.$diacriticSensitive === false) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`bad $text: $diacriticSensitive - false not supported, install Postgres Unaccent Extension`
|
|
|
|
|
);
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
patterns.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`to_tsvector($${index}, $${index + 1}:name) @@ to_tsquery($${
|
|
|
|
|
index + 2
|
|
|
|
|
}, $${index + 3})`
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2017-06-13 20:42:59 -05:00
|
|
|
values.push(language, fieldName, language, search.$term);
|
|
|
|
|
index += 4;
|
|
|
|
|
}
|
|
|
|
|
|
2016-08-15 16:48:39 -04:00
|
|
|
if (fieldValue.$nearSphere) {
|
2016-12-07 15:17:05 -08:00
|
|
|
const point = fieldValue.$nearSphere;
|
|
|
|
|
const distance = fieldValue.$maxDistance;
|
2017-01-11 12:31:40 -08:00
|
|
|
const distanceInKM = distance * 6371 * 1000;
|
2018-09-01 13:58:06 -04:00
|
|
|
patterns.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`ST_DistanceSphere($${index}:name::geometry, POINT($${index + 1}, $${
|
|
|
|
|
index + 2
|
|
|
|
|
})::geometry) <= $${index + 3}`
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
|
|
|
|
sorts.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`ST_DistanceSphere($${index}:name::geometry, POINT($${index + 1}, $${
|
|
|
|
|
index + 2
|
|
|
|
|
})::geometry) ASC`
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2016-08-20 16:07:48 -04:00
|
|
|
values.push(fieldName, point.longitude, point.latitude, distanceInKM);
|
2016-08-15 16:48:39 -04:00
|
|
|
index += 4;
|
|
|
|
|
}
|
|
|
|
|
|
2016-08-20 16:07:48 -04:00
|
|
|
if (fieldValue.$within && fieldValue.$within.$box) {
|
2016-12-07 15:17:05 -08:00
|
|
|
const box = fieldValue.$within.$box;
|
|
|
|
|
const left = box[0].longitude;
|
|
|
|
|
const bottom = box[0].latitude;
|
|
|
|
|
const right = box[1].longitude;
|
|
|
|
|
const top = box[1].latitude;
|
2016-08-20 16:07:48 -04:00
|
|
|
|
2017-01-11 12:31:40 -08:00
|
|
|
patterns.push(`$${index}:name::point <@ $${index + 1}::box`);
|
2016-08-20 16:07:48 -04:00
|
|
|
values.push(fieldName, `((${left}, ${bottom}), (${right}, ${top}))`);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-12 18:41:02 +02:00
|
|
|
if (fieldValue.$geoWithin && fieldValue.$geoWithin.$centerSphere) {
|
|
|
|
|
const centerSphere = fieldValue.$geoWithin.$centerSphere;
|
|
|
|
|
if (!(centerSphere instanceof Array) || centerSphere.length < 2) {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
'bad $geoWithin value; $centerSphere should be an array of Parse.GeoPoint and distance'
|
|
|
|
|
);
|
2018-06-12 18:41:02 +02:00
|
|
|
}
|
|
|
|
|
// Get point, convert to geo point if necessary and validate
|
|
|
|
|
let point = centerSphere[0];
|
|
|
|
|
if (point instanceof Array && point.length === 2) {
|
|
|
|
|
point = new Parse.GeoPoint(point[1], point[0]);
|
|
|
|
|
} else if (!GeoPointCoder.isValidJSON(point)) {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
'bad $geoWithin value; $centerSphere geo point invalid'
|
|
|
|
|
);
|
2018-06-12 18:41:02 +02:00
|
|
|
}
|
|
|
|
|
Parse.GeoPoint._validate(point.latitude, point.longitude);
|
|
|
|
|
// Get distance and validate
|
|
|
|
|
const distance = centerSphere[1];
|
2018-09-01 13:58:06 -04:00
|
|
|
if (isNaN(distance) || distance < 0) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
'bad $geoWithin value; $centerSphere distance invalid'
|
|
|
|
|
);
|
2018-06-12 18:41:02 +02:00
|
|
|
}
|
|
|
|
|
const distanceInKM = distance * 6371 * 1000;
|
2018-09-01 13:58:06 -04:00
|
|
|
patterns.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`ST_DistanceSphere($${index}:name::geometry, POINT($${index + 1}, $${
|
|
|
|
|
index + 2
|
|
|
|
|
})::geometry) <= $${index + 3}`
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2018-06-12 18:41:02 +02:00
|
|
|
values.push(fieldName, point.longitude, point.latitude, distanceInKM);
|
|
|
|
|
index += 4;
|
|
|
|
|
}
|
|
|
|
|
|
2017-05-28 11:42:16 -05:00
|
|
|
if (fieldValue.$geoWithin && fieldValue.$geoWithin.$polygon) {
|
|
|
|
|
const polygon = fieldValue.$geoWithin.$polygon;
|
2018-05-22 18:06:43 +02:00
|
|
|
let points;
|
|
|
|
|
if (typeof polygon === 'object' && polygon.__type === 'Polygon') {
|
|
|
|
|
if (!polygon.coordinates || polygon.coordinates.length < 3) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
'bad $geoWithin value; Polygon.coordinates should contain at least 3 lon/lat pairs'
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
points = polygon.coordinates;
|
2018-09-01 13:58:06 -04:00
|
|
|
} else if (polygon instanceof Array) {
|
2018-05-22 18:06:43 +02:00
|
|
|
if (polygon.length < 3) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
'bad $geoWithin value; $polygon should contain at least 3 GeoPoints'
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
points = polygon;
|
|
|
|
|
} else {
|
2017-05-31 17:08:37 -05:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
2018-09-01 13:58:06 -04:00
|
|
|
"bad $geoWithin value; $polygon should be Polygon object or Array of Parse.GeoPoint's"
|
2017-05-31 17:08:37 -05:00
|
|
|
);
|
2017-05-28 11:42:16 -05:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
points = points
|
2020-04-06 22:50:33 +05:30
|
|
|
.map((point) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (point instanceof Array && point.length === 2) {
|
|
|
|
|
Parse.GeoPoint._validate(point[1], point[0]);
|
|
|
|
|
return `(${point[0]}, ${point[1]})`;
|
|
|
|
|
}
|
|
|
|
|
if (typeof point !== 'object' || point.__type !== 'GeoPoint') {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
'bad $geoWithin value'
|
|
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
Parse.GeoPoint._validate(point.latitude, point.longitude);
|
|
|
|
|
}
|
|
|
|
|
return `(${point.longitude}, ${point.latitude})`;
|
|
|
|
|
})
|
|
|
|
|
.join(', ');
|
2017-05-28 11:42:16 -05:00
|
|
|
|
|
|
|
|
patterns.push(`$${index}:name::point <@ $${index + 1}::polygon`);
|
|
|
|
|
values.push(fieldName, `(${points})`);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
2017-07-11 22:33:45 -05:00
|
|
|
if (fieldValue.$geoIntersects && fieldValue.$geoIntersects.$point) {
|
|
|
|
|
const point = fieldValue.$geoIntersects.$point;
|
|
|
|
|
if (typeof point !== 'object' || point.__type !== 'GeoPoint') {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
'bad $geoIntersect value; $point should be GeoPoint'
|
|
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
Parse.GeoPoint._validate(point.latitude, point.longitude);
|
|
|
|
|
}
|
|
|
|
|
patterns.push(`$${index}:name::polygon @> $${index + 1}::point`);
|
|
|
|
|
values.push(fieldName, `(${point.longitude}, ${point.latitude})`);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
2017-05-28 11:42:16 -05:00
|
|
|
|
2016-08-15 16:48:39 -04:00
|
|
|
if (fieldValue.$regex) {
|
|
|
|
|
let regex = fieldValue.$regex;
|
|
|
|
|
let operator = '~';
|
2016-12-07 15:17:05 -08:00
|
|
|
const opts = fieldValue.$options;
|
2016-08-15 16:48:39 -04:00
|
|
|
if (opts) {
|
|
|
|
|
if (opts.indexOf('i') >= 0) {
|
|
|
|
|
operator = '~*';
|
|
|
|
|
}
|
2016-10-31 21:40:53 +05:30
|
|
|
if (opts.indexOf('x') >= 0) {
|
|
|
|
|
regex = removeWhiteSpace(regex);
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2016-10-31 21:40:53 +05:30
|
|
|
|
2017-08-23 10:33:57 -05:00
|
|
|
const name = transformDotField(fieldName);
|
2016-10-31 21:40:53 +05:30
|
|
|
regex = processRegexPattern(regex);
|
|
|
|
|
|
2017-08-23 10:33:57 -05:00
|
|
|
patterns.push(`$${index}:raw ${operator} '$${index + 1}:raw'`);
|
|
|
|
|
values.push(name, regex);
|
2016-08-15 16:48:39 -04:00
|
|
|
index += 2;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (fieldValue.__type === 'Pointer') {
|
2016-08-20 16:07:48 -04:00
|
|
|
if (isArrayField) {
|
|
|
|
|
patterns.push(`array_contains($${index}:name, $${index + 1})`);
|
|
|
|
|
values.push(fieldName, JSON.stringify([fieldValue]));
|
|
|
|
|
index += 2;
|
|
|
|
|
} else {
|
|
|
|
|
patterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue.objectId);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (fieldValue.__type === 'Date') {
|
|
|
|
|
patterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue.iso);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
|
|
|
|
|
2017-05-28 19:42:51 -05:00
|
|
|
if (fieldValue.__type === 'GeoPoint') {
|
2019-04-30 23:29:44 -05:00
|
|
|
patterns.push(`$${index}:name ~= POINT($${index + 1}, $${index + 2})`);
|
2017-05-28 19:42:51 -05:00
|
|
|
values.push(fieldName, fieldValue.longitude, fieldValue.latitude);
|
|
|
|
|
index += 3;
|
|
|
|
|
}
|
|
|
|
|
|
2017-07-11 22:33:45 -05:00
|
|
|
if (fieldValue.__type === 'Polygon') {
|
|
|
|
|
const value = convertPolygonToSQL(fieldValue.coordinates);
|
|
|
|
|
patterns.push(`$${index}:name ~= $${index + 1}::polygon`);
|
|
|
|
|
values.push(fieldName, value);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(ParseToPosgresComparator).forEach((cmp) => {
|
2018-03-24 17:45:34 -05:00
|
|
|
if (fieldValue[cmp] || fieldValue[cmp] === 0) {
|
2016-12-07 15:17:05 -08:00
|
|
|
const pgComparator = ParseToPosgresComparator[cmp];
|
GraphQL Object constraints (#5715)
* GraphQL Object constraints
Implements the GraphQL Object constraints, which allows us to filter queries results using the `$eq`, `$lt`, `$gt`, `$in`, and other Parse supported constraints.
Example:
```
query objects {
findMyClass(where: {
objField: {
_eq: {
key: 'foo.bar',
value: 'hello'
},
_gt: {
key: 'foo.number',
value: 10
},
_lt: {
key: 'anotherNumber',
value: 5
}
}
}) {
results {
objectId
}
}
}
```
In the example above, we have the `findMyClass` query (automatically generated for the `MyClass` class), and a field named `objField` whose type is Object. The object below represents a valid `objField` value and would satisfy all constraints:
```
{
"foo": {
"bar": "hello",
"number": 11
},
"anotherNumber": 4
}
```
The Object constraint is applied only when using Parse class object type queries. When using "generic" queries such as `get` and `find`, this type of constraint is not available.
* Objects constraints not working on Postgres
Fixes the $eq, $ne, $gt, and $lt constraints when applied on an Object type field.
* Fix object constraint field name
* Fix Postgres constraints indexes
* fix: Object type composed constraints not working
* fix: Rename key and value fields
* refactor: Object constraints for generic queries
* fix: Object constraints not working on Postgres
2019-08-02 16:18:08 -03:00
|
|
|
const postgresValue = toPostgresValue(fieldValue[cmp]);
|
|
|
|
|
let constraintFieldName;
|
|
|
|
|
if (fieldName.indexOf('.') >= 0) {
|
|
|
|
|
let castType;
|
|
|
|
|
switch (typeof postgresValue) {
|
|
|
|
|
case 'number':
|
|
|
|
|
castType = 'double precision';
|
|
|
|
|
break;
|
|
|
|
|
case 'boolean':
|
|
|
|
|
castType = 'boolean';
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
castType = undefined;
|
|
|
|
|
}
|
|
|
|
|
constraintFieldName = castType
|
|
|
|
|
? `CAST ((${transformDotField(fieldName)}) AS ${castType})`
|
|
|
|
|
: transformDotField(fieldName);
|
|
|
|
|
} else {
|
|
|
|
|
constraintFieldName = `$${index++}:name`;
|
|
|
|
|
values.push(fieldName);
|
|
|
|
|
}
|
|
|
|
|
values.push(postgresValue);
|
|
|
|
|
patterns.push(`${constraintFieldName} ${pgComparator} $${index++}`);
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (initialPatternsLength === patterns.length) {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.OPERATION_FORBIDDEN,
|
|
|
|
|
`Postgres doesn't support this query type yet ${JSON.stringify(
|
|
|
|
|
fieldValue
|
|
|
|
|
)}`
|
|
|
|
|
);
|
2016-06-16 15:39:05 -07:00
|
|
|
}
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
values = values.map(transformValue);
|
|
|
|
|
return { pattern: patterns.join(' AND '), values, sorts };
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-06-16 15:39:05 -07:00
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
export class PostgresStorageAdapter implements StorageAdapter {
|
2018-05-01 07:37:38 -04:00
|
|
|
canSortOnJoinTables: boolean;
|
|
|
|
|
|
2016-06-12 16:35:13 -07:00
|
|
|
// Private
|
|
|
|
|
_collectionPrefix: string;
|
2017-12-30 20:44:18 -05:00
|
|
|
_client: any;
|
|
|
|
|
_pgp: any;
|
2016-06-12 16:35:13 -07:00
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
constructor({ uri, collectionPrefix = '', databaseOptions }: any) {
|
2016-06-12 16:35:13 -07:00
|
|
|
this._collectionPrefix = collectionPrefix;
|
2017-05-16 12:06:17 -04:00
|
|
|
const { client, pgp } = createClient(uri, databaseOptions);
|
|
|
|
|
this._client = client;
|
|
|
|
|
this._pgp = pgp;
|
2018-05-01 07:37:38 -04:00
|
|
|
this.canSortOnJoinTables = false;
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
//Note that analyze=true will run the query, executing INSERTS, DELETES, etc.
|
2020-04-06 22:50:33 +05:30
|
|
|
createExplainableQuery(query: string, analyze: boolean = false) {
|
|
|
|
|
if (analyze) {
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
return 'EXPLAIN (ANALYZE, FORMAT JSON) ' + query;
|
2020-04-06 22:50:33 +05:30
|
|
|
} else {
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
return 'EXPLAIN (FORMAT JSON) ' + query;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-11-19 04:20:19 +08:00
|
|
|
handleShutdown() {
|
|
|
|
|
if (!this._client) {
|
2018-01-01 20:33:41 +00:00
|
|
|
return;
|
2017-11-19 04:20:19 +08:00
|
|
|
}
|
|
|
|
|
this._client.$pool.end();
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async _ensureSchemaCollectionExists(conn: any) {
|
2016-10-29 05:53:37 +05:30
|
|
|
conn = conn || this._client;
|
2019-12-16 18:50:31 +00:00
|
|
|
await conn
|
2018-09-01 13:58:06 -04:00
|
|
|
.none(
|
|
|
|
|
'CREATE TABLE IF NOT EXISTS "_SCHEMA" ( "className" varChar(120), "schema" jsonb, "isParseClass" bool, PRIMARY KEY ("className") )'
|
|
|
|
|
)
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((error) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
error.code === PostgresDuplicateRelationError ||
|
|
|
|
|
error.code === PostgresUniqueIndexViolationError ||
|
|
|
|
|
error.code === PostgresDuplicateObjectError
|
|
|
|
|
) {
|
|
|
|
|
// Table already exists, must have been created by a different request. Ignore error.
|
2017-06-20 09:15:26 -07:00
|
|
|
} else {
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
});
|
2016-11-24 15:47:41 -05:00
|
|
|
}
|
2016-06-12 16:35:13 -07:00
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async classExists(name: string) {
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._client.one(
|
|
|
|
|
'SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = $1)',
|
|
|
|
|
[name],
|
2020-04-06 22:50:33 +05:30
|
|
|
(a) => a.exists
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async setClassLevelPermissions(className: string, CLPs: any) {
|
2017-12-27 00:30:50 +00:00
|
|
|
const self = this;
|
2020-04-06 22:50:33 +05:30
|
|
|
await this._client.task('set-class-level-permissions', async (t) => {
|
2019-07-27 19:07:36 +00:00
|
|
|
await self._ensureSchemaCollectionExists(t);
|
2018-09-01 13:58:06 -04:00
|
|
|
const values = [
|
|
|
|
|
className,
|
|
|
|
|
'schema',
|
|
|
|
|
'classLevelPermissions',
|
|
|
|
|
JSON.stringify(CLPs),
|
|
|
|
|
];
|
2019-07-27 19:07:36 +00:00
|
|
|
await t.none(
|
2019-12-16 18:50:31 +00:00
|
|
|
`UPDATE "_SCHEMA" SET $2:name = json_object_set_key($2:name, $3::text, $4::jsonb) WHERE "className" = $1`,
|
2018-09-01 13:58:06 -04:00
|
|
|
values
|
|
|
|
|
);
|
2016-08-18 18:05:26 -04:00
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async setIndexesWithSchemaFormat(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
submittedIndexes: any,
|
|
|
|
|
existingIndexes: any = {},
|
|
|
|
|
fields: any,
|
|
|
|
|
conn: ?any
|
|
|
|
|
): Promise<void> {
|
2017-11-25 13:55:34 -06:00
|
|
|
conn = conn || this._client;
|
2017-12-24 19:03:35 +00:00
|
|
|
const self = this;
|
2017-11-25 13:55:34 -06:00
|
|
|
if (submittedIndexes === undefined) {
|
|
|
|
|
return Promise.resolve();
|
|
|
|
|
}
|
|
|
|
|
if (Object.keys(existingIndexes).length === 0) {
|
2018-09-01 13:58:06 -04:00
|
|
|
existingIndexes = { _id_: { _id: 1 } };
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
const deletedIndexes = [];
|
|
|
|
|
const insertedIndexes = [];
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(submittedIndexes).forEach((name) => {
|
2017-11-25 13:55:34 -06:00
|
|
|
const field = submittedIndexes[name];
|
|
|
|
|
if (existingIndexes[name] && field.__op !== 'Delete') {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_QUERY,
|
|
|
|
|
`Index ${name} exists, cannot update.`
|
|
|
|
|
);
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
if (!existingIndexes[name] && field.__op === 'Delete') {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_QUERY,
|
|
|
|
|
`Index ${name} does not exist, cannot delete.`
|
|
|
|
|
);
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
if (field.__op === 'Delete') {
|
|
|
|
|
deletedIndexes.push(name);
|
|
|
|
|
delete existingIndexes[name];
|
|
|
|
|
} else {
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(field).forEach((key) => {
|
2019-08-14 16:57:00 -05:00
|
|
|
if (!Object.prototype.hasOwnProperty.call(fields, key)) {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_QUERY,
|
|
|
|
|
`Field ${key} does not exist, cannot add index.`
|
|
|
|
|
);
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
existingIndexes[name] = field;
|
|
|
|
|
insertedIndexes.push({
|
|
|
|
|
key: field,
|
|
|
|
|
name,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
});
|
2020-04-06 22:50:33 +05:30
|
|
|
await conn.tx('set-indexes-with-schema-format', async (t) => {
|
2017-12-24 19:03:35 +00:00
|
|
|
if (insertedIndexes.length > 0) {
|
2019-07-27 19:07:36 +00:00
|
|
|
await self.createIndexes(className, insertedIndexes, t);
|
2017-12-24 19:03:35 +00:00
|
|
|
}
|
|
|
|
|
if (deletedIndexes.length > 0) {
|
2019-07-27 19:07:36 +00:00
|
|
|
await self.dropIndexes(className, deletedIndexes, t);
|
2017-12-24 19:03:35 +00:00
|
|
|
}
|
2019-07-27 19:07:36 +00:00
|
|
|
await self._ensureSchemaCollectionExists(t);
|
|
|
|
|
await t.none(
|
2019-12-16 18:50:31 +00:00
|
|
|
'UPDATE "_SCHEMA" SET $2:name = json_object_set_key($2:name, $3::text, $4::jsonb) WHERE "className" = $1',
|
2018-09-01 13:58:06 -04:00
|
|
|
[className, 'schema', 'indexes', JSON.stringify(existingIndexes)]
|
|
|
|
|
);
|
2017-11-30 08:30:15 +07:00
|
|
|
});
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async createClass(className: string, schema: SchemaType, conn: ?any) {
|
2017-12-27 18:14:15 +00:00
|
|
|
conn = conn || this._client;
|
2018-09-01 13:58:06 -04:00
|
|
|
return conn
|
2020-04-06 22:50:33 +05:30
|
|
|
.tx('create-class', async (t) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
const q1 = this.createTable(className, schema, t);
|
|
|
|
|
const q2 = t.none(
|
|
|
|
|
'INSERT INTO "_SCHEMA" ("className", "schema", "isParseClass") VALUES ($<className>, $<schema>, true)',
|
|
|
|
|
{ className, schema }
|
|
|
|
|
);
|
|
|
|
|
const q3 = this.setIndexesWithSchemaFormat(
|
|
|
|
|
className,
|
|
|
|
|
schema.indexes,
|
|
|
|
|
{},
|
|
|
|
|
schema.fields,
|
|
|
|
|
t
|
|
|
|
|
);
|
2019-12-16 18:50:31 +00:00
|
|
|
// TODO: The test should not verify the returned value, and then
|
|
|
|
|
// the method can be simplified, to avoid returning useless stuff.
|
2018-09-01 13:58:06 -04:00
|
|
|
return t.batch([q1, q2, q3]);
|
|
|
|
|
})
|
2017-06-20 09:15:26 -07:00
|
|
|
.then(() => {
|
2017-12-28 01:43:34 +00:00
|
|
|
return toParseSchema(schema);
|
2017-06-20 09:15:26 -07:00
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((err) => {
|
2017-12-28 01:43:34 +00:00
|
|
|
if (err.data[0].result.code === PostgresTransactionAbortedError) {
|
2017-06-20 09:15:26 -07:00
|
|
|
err = err.data[1].result;
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
err.code === PostgresUniqueIndexViolationError &&
|
|
|
|
|
err.detail.includes(className)
|
|
|
|
|
) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.DUPLICATE_VALUE,
|
|
|
|
|
`Class ${className} already exists.`
|
|
|
|
|
);
|
2017-06-20 09:15:26 -07:00
|
|
|
}
|
|
|
|
|
throw err;
|
2018-09-01 13:58:06 -04:00
|
|
|
});
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Just create a table, do not insert in schema
|
2019-12-16 18:50:31 +00:00
|
|
|
async createTable(className: string, schema: SchemaType, conn: any) {
|
2016-10-29 05:53:37 +05:30
|
|
|
conn = conn || this._client;
|
2017-12-26 00:14:16 +00:00
|
|
|
const self = this;
|
2016-08-15 16:48:39 -04:00
|
|
|
debug('createTable', className, schema);
|
2016-12-07 15:17:05 -08:00
|
|
|
const valuesArray = [];
|
|
|
|
|
const patternsArray = [];
|
|
|
|
|
const fields = Object.assign({}, schema.fields);
|
2016-08-15 16:48:39 -04:00
|
|
|
if (className === '_User') {
|
2018-09-01 13:58:06 -04:00
|
|
|
fields._email_verify_token_expires_at = { type: 'Date' };
|
|
|
|
|
fields._email_verify_token = { type: 'String' };
|
|
|
|
|
fields._account_lockout_expires_at = { type: 'Date' };
|
|
|
|
|
fields._failed_login_count = { type: 'Number' };
|
|
|
|
|
fields._perishable_token = { type: 'String' };
|
|
|
|
|
fields._perishable_token_expires_at = { type: 'Date' };
|
|
|
|
|
fields._password_changed_at = { type: 'Date' };
|
|
|
|
|
fields._password_history = { type: 'Array' };
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
let index = 2;
|
2016-12-07 15:17:05 -08:00
|
|
|
const relations = [];
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(fields).forEach((fieldName) => {
|
2016-12-07 15:17:05 -08:00
|
|
|
const parseType = fields[fieldName];
|
2016-08-15 16:48:39 -04:00
|
|
|
// Skip when it's a relation
|
|
|
|
|
// We'll create the tables later
|
2016-09-02 17:00:47 -07:00
|
|
|
if (parseType.type === 'Relation') {
|
2018-09-01 13:58:06 -04:00
|
|
|
relations.push(fieldName);
|
2016-08-15 16:48:39 -04:00
|
|
|
return;
|
|
|
|
|
}
|
2016-09-24 13:53:15 -04:00
|
|
|
if (['_rperm', '_wperm'].indexOf(fieldName) >= 0) {
|
2016-06-10 14:09:48 -07:00
|
|
|
parseType.contents = { type: 'String' };
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
valuesArray.push(fieldName);
|
2016-06-10 14:09:48 -07:00
|
|
|
valuesArray.push(parseTypeToPostgresType(parseType));
|
2017-01-11 12:31:40 -08:00
|
|
|
patternsArray.push(`$${index}:name $${index + 1}:raw`);
|
2016-08-18 18:05:26 -04:00
|
|
|
if (fieldName === 'objectId') {
|
2018-09-01 13:58:06 -04:00
|
|
|
patternsArray.push(`PRIMARY KEY ($${index}:name)`);
|
2016-08-18 18:05:26 -04:00
|
|
|
}
|
2017-01-11 12:31:40 -08:00
|
|
|
index = index + 2;
|
2016-06-06 13:47:11 -07:00
|
|
|
});
|
2017-12-28 01:43:34 +00:00
|
|
|
const qs = `CREATE TABLE IF NOT EXISTS $1:name (${patternsArray.join()})`;
|
2016-08-15 16:48:39 -04:00
|
|
|
const values = [className, ...valuesArray];
|
2017-12-29 11:39:16 -06:00
|
|
|
|
2018-12-14 17:39:07 -06:00
|
|
|
debug(qs, values);
|
2020-04-06 22:50:33 +05:30
|
|
|
return conn.task('create-table', async (t) => {
|
2017-12-26 00:14:16 +00:00
|
|
|
try {
|
2019-07-27 19:07:36 +00:00
|
|
|
await self._ensureSchemaCollectionExists(t);
|
|
|
|
|
await t.none(qs, values);
|
2018-09-01 13:58:06 -04:00
|
|
|
} catch (error) {
|
2017-12-29 11:39:16 -06:00
|
|
|
if (error.code !== PostgresDuplicateRelationError) {
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
2017-12-26 00:14:16 +00:00
|
|
|
// ELSE: Table already exists, must have been created by a different request. Ignore the error.
|
|
|
|
|
}
|
2020-04-06 22:50:33 +05:30
|
|
|
await t.tx('create-table-tx', (tx) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
return tx.batch(
|
2020-04-06 22:50:33 +05:30
|
|
|
relations.map((fieldName) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
return tx.none(
|
|
|
|
|
'CREATE TABLE IF NOT EXISTS $<joinTable:name> ("relatedId" varChar(120), "owningId" varChar(120), PRIMARY KEY("relatedId", "owningId") )',
|
|
|
|
|
{ joinTable: `_Join:${fieldName}:${className}` }
|
|
|
|
|
);
|
|
|
|
|
})
|
|
|
|
|
);
|
2017-06-20 09:15:26 -07:00
|
|
|
});
|
2017-12-26 00:14:16 +00:00
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async schemaUpgrade(className: string, schema: SchemaType, conn: any) {
|
2018-01-03 00:23:05 -03:00
|
|
|
debug('schemaUpgrade', { className, schema });
|
|
|
|
|
conn = conn || this._client;
|
|
|
|
|
const self = this;
|
2018-01-08 20:42:08 -06:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
await conn.tx('schema-upgrade', async (t) => {
|
2019-07-27 19:07:36 +00:00
|
|
|
const columns = await t.map(
|
2018-09-01 13:58:06 -04:00
|
|
|
'SELECT column_name FROM information_schema.columns WHERE table_name = $<className>',
|
|
|
|
|
{ className },
|
2020-04-06 22:50:33 +05:30
|
|
|
(a) => a.column_name
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2018-01-03 00:23:05 -03:00
|
|
|
const newColumns = Object.keys(schema.fields)
|
2020-04-06 22:50:33 +05:30
|
|
|
.filter((item) => columns.indexOf(item) === -1)
|
|
|
|
|
.map((fieldName) =>
|
2018-09-01 13:58:06 -04:00
|
|
|
self.addFieldIfNotExists(
|
|
|
|
|
className,
|
|
|
|
|
fieldName,
|
|
|
|
|
schema.fields[fieldName],
|
|
|
|
|
t
|
|
|
|
|
)
|
|
|
|
|
);
|
2018-01-08 20:42:08 -06:00
|
|
|
|
2019-07-27 19:07:36 +00:00
|
|
|
await t.batch(newColumns);
|
2018-01-03 00:23:05 -03:00
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async addFieldIfNotExists(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
fieldName: string,
|
|
|
|
|
type: any,
|
|
|
|
|
conn: any
|
|
|
|
|
) {
|
2016-06-17 08:04:58 +01:00
|
|
|
// TODO: Must be revised for invalid logic...
|
2018-09-01 13:58:06 -04:00
|
|
|
debug('addFieldIfNotExists', { className, fieldName, type });
|
2018-01-03 00:23:05 -03:00
|
|
|
conn = conn || this._client;
|
2017-12-27 18:14:15 +00:00
|
|
|
const self = this;
|
2020-04-06 22:50:33 +05:30
|
|
|
await conn.tx('add-field-if-not-exists', async (t) => {
|
2016-08-15 16:48:39 -04:00
|
|
|
if (type.type !== 'Relation') {
|
2017-12-27 18:14:15 +00:00
|
|
|
try {
|
2019-07-27 19:07:36 +00:00
|
|
|
await t.none(
|
2018-09-01 13:58:06 -04:00
|
|
|
'ALTER TABLE $<className:name> ADD COLUMN $<fieldName:name> $<postgresType:raw>',
|
|
|
|
|
{
|
|
|
|
|
className,
|
|
|
|
|
fieldName,
|
|
|
|
|
postgresType: parseTypeToPostgresType(type),
|
|
|
|
|
}
|
|
|
|
|
);
|
|
|
|
|
} catch (error) {
|
2017-12-29 11:39:16 -06:00
|
|
|
if (error.code === PostgresRelationDoesNotExistError) {
|
2019-12-16 18:50:31 +00:00
|
|
|
return self.createClass(
|
2018-09-01 13:58:06 -04:00
|
|
|
className,
|
|
|
|
|
{ fields: { [fieldName]: type } },
|
|
|
|
|
t
|
|
|
|
|
);
|
2017-12-29 11:39:16 -06:00
|
|
|
}
|
|
|
|
|
if (error.code !== PostgresDuplicateColumnError) {
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
// Column already exists, created by other request. Carry on to see if it's the right type.
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
} else {
|
2019-07-27 19:07:36 +00:00
|
|
|
await t.none(
|
2018-09-01 13:58:06 -04:00
|
|
|
'CREATE TABLE IF NOT EXISTS $<joinTable:name> ("relatedId" varChar(120), "owningId" varChar(120), PRIMARY KEY("relatedId", "owningId") )',
|
|
|
|
|
{ joinTable: `_Join:${fieldName}:${className}` }
|
|
|
|
|
);
|
2017-12-27 18:14:15 +00:00
|
|
|
}
|
|
|
|
|
|
2019-07-27 19:07:36 +00:00
|
|
|
const result = await t.any(
|
2018-09-01 13:58:06 -04:00
|
|
|
'SELECT "schema" FROM "_SCHEMA" WHERE "className" = $<className> and ("schema"::json->\'fields\'->$<fieldName>) is not null',
|
|
|
|
|
{ className, fieldName }
|
|
|
|
|
);
|
2017-12-27 18:14:15 +00:00
|
|
|
|
|
|
|
|
if (result[0]) {
|
|
|
|
|
throw 'Attempted to add a field that already exists';
|
|
|
|
|
} else {
|
|
|
|
|
const path = `{fields,${fieldName}}`;
|
2019-07-27 19:07:36 +00:00
|
|
|
await t.none(
|
2018-09-01 13:58:06 -04:00
|
|
|
'UPDATE "_SCHEMA" SET "schema"=jsonb_set("schema", $<path>, $<type>) WHERE "className"=$<className>',
|
|
|
|
|
{ path, type, className }
|
|
|
|
|
);
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2016-06-17 08:04:58 +01:00
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.)
|
|
|
|
|
// and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible.
|
2019-12-16 18:50:31 +00:00
|
|
|
async deleteClass(className: string) {
|
2017-05-28 15:48:32 +01:00
|
|
|
const operations = [
|
2018-09-01 13:58:06 -04:00
|
|
|
{ query: `DROP TABLE IF EXISTS $1:name`, values: [className] },
|
|
|
|
|
{
|
|
|
|
|
query: `DELETE FROM "_SCHEMA" WHERE "className" = $1`,
|
|
|
|
|
values: [className],
|
|
|
|
|
},
|
2017-05-28 15:48:32 +01:00
|
|
|
];
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._client
|
2020-04-06 22:50:33 +05:30
|
|
|
.tx((t) => t.none(this._pgp.helpers.concat(operations)))
|
2017-05-28 15:48:32 +01:00
|
|
|
.then(() => className.indexOf('_Join:') != 0); // resolves with false when _Join table
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2016-06-16 19:34:00 -07:00
|
|
|
// Delete all data known to this adapter. Used for testing.
|
2019-12-16 18:50:31 +00:00
|
|
|
async deleteAllClasses() {
|
2016-12-07 15:17:05 -08:00
|
|
|
const now = new Date().getTime();
|
2017-12-27 22:41:50 +00:00
|
|
|
const helpers = this._pgp.helpers;
|
2016-08-15 16:48:39 -04:00
|
|
|
debug('deleteAllClasses');
|
2017-12-29 11:39:16 -06:00
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
await this._client
|
2020-04-06 22:50:33 +05:30
|
|
|
.task('delete-all-classes', async (t) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
try {
|
2019-07-27 19:07:36 +00:00
|
|
|
const results = await t.any('SELECT * FROM "_SCHEMA"');
|
2018-09-01 13:58:06 -04:00
|
|
|
const joins = results.reduce((list: Array<string>, schema: any) => {
|
|
|
|
|
return list.concat(joinTablesForSchema(schema.schema));
|
|
|
|
|
}, []);
|
|
|
|
|
const classes = [
|
|
|
|
|
'_SCHEMA',
|
|
|
|
|
'_PushStatus',
|
|
|
|
|
'_JobStatus',
|
|
|
|
|
'_JobSchedule',
|
|
|
|
|
'_Hooks',
|
|
|
|
|
'_GlobalConfig',
|
2019-07-25 20:46:25 +01:00
|
|
|
'_GraphQLConfig',
|
2018-09-01 13:58:06 -04:00
|
|
|
'_Audience',
|
2020-04-06 22:50:33 +05:30
|
|
|
...results.map((result) => result.className),
|
2018-09-01 13:58:06 -04:00
|
|
|
...joins,
|
|
|
|
|
];
|
2020-04-06 22:50:33 +05:30
|
|
|
const queries = classes.map((className) => ({
|
2018-09-01 13:58:06 -04:00
|
|
|
query: 'DROP TABLE IF EXISTS $<className:name>',
|
|
|
|
|
values: { className },
|
|
|
|
|
}));
|
2020-04-06 22:50:33 +05:30
|
|
|
await t.tx((tx) => tx.none(helpers.concat(queries)));
|
2018-09-01 13:58:06 -04:00
|
|
|
} catch (error) {
|
|
|
|
|
if (error.code !== PostgresRelationDoesNotExistError) {
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
// No _SCHEMA collection. Don't delete anything.
|
2017-06-20 09:15:26 -07:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
})
|
2018-01-01 20:33:41 +00:00
|
|
|
.then(() => {
|
|
|
|
|
debug(`deleteAllClasses done in ${new Date().getTime() - now}`);
|
|
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Remove the column and all the data. For Relations, the _Join collection is handled
|
|
|
|
|
// specially, this function does not delete _Join columns. It should, however, indicate
|
|
|
|
|
// that the relation fields does not exist anymore. In mongo, this means removing it from
|
|
|
|
|
// the _SCHEMA collection. There should be no actual data in the collection under the same name
|
|
|
|
|
// as the relation column, so it's fine to attempt to delete it. If the fields listed to be
|
|
|
|
|
// deleted do not exist, this function should return successfully anyways. Checking for
|
|
|
|
|
// attempts to delete non-existent fields is the responsibility of Parse Server.
|
|
|
|
|
|
|
|
|
|
// This function is not obligated to delete fields atomically. It is given the field
|
|
|
|
|
// names in a list so that databases that are capable of deleting fields atomically
|
|
|
|
|
// may do so.
|
|
|
|
|
|
|
|
|
|
// Returns a Promise.
|
2019-12-16 18:50:31 +00:00
|
|
|
async deleteFields(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
fieldNames: string[]
|
|
|
|
|
): Promise<void> {
|
2016-08-18 18:05:26 -04:00
|
|
|
debug('deleteFields', className, fieldNames);
|
2018-05-01 07:37:38 -04:00
|
|
|
fieldNames = fieldNames.reduce((list: Array<string>, fieldName: string) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
const field = schema.fields[fieldName];
|
2017-12-24 15:58:20 +00:00
|
|
|
if (field.type !== 'Relation') {
|
|
|
|
|
list.push(fieldName);
|
|
|
|
|
}
|
|
|
|
|
delete schema.fields[fieldName];
|
|
|
|
|
return list;
|
|
|
|
|
}, []);
|
|
|
|
|
|
|
|
|
|
const values = [className, ...fieldNames];
|
2018-09-01 13:58:06 -04:00
|
|
|
const columns = fieldNames
|
|
|
|
|
.map((name, idx) => {
|
|
|
|
|
return `$${idx + 2}:name`;
|
|
|
|
|
})
|
|
|
|
|
.join(', DROP COLUMN');
|
2017-12-24 15:58:20 +00:00
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
await this._client.tx('delete-fields', async (t) => {
|
2019-07-27 19:07:36 +00:00
|
|
|
await t.none(
|
2019-12-16 18:50:31 +00:00
|
|
|
'UPDATE "_SCHEMA" SET "schema" = $<schema> WHERE "className" = $<className>',
|
2018-09-01 13:58:06 -04:00
|
|
|
{ schema, className }
|
|
|
|
|
);
|
2017-12-24 15:58:20 +00:00
|
|
|
if (values.length > 1) {
|
2019-07-27 19:07:36 +00:00
|
|
|
await t.none(`ALTER TABLE $1:name DROP COLUMN ${columns}`, values);
|
2017-12-24 15:58:20 +00:00
|
|
|
}
|
|
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Return a promise for all schemas known to this adapter, in Parse format. In case the
|
2016-06-16 19:34:00 -07:00
|
|
|
// schemas cannot be retrieved, returns a promise that rejects. Requirements for the
|
2016-06-12 16:35:13 -07:00
|
|
|
// rejection reason are TBD.
|
2019-12-16 18:50:31 +00:00
|
|
|
async getAllClasses() {
|
2017-12-27 20:44:11 +00:00
|
|
|
const self = this;
|
2020-04-06 22:50:33 +05:30
|
|
|
return this._client.task('get-all-classes', async (t) => {
|
2019-07-27 19:07:36 +00:00
|
|
|
await self._ensureSchemaCollectionExists(t);
|
2020-04-06 22:50:33 +05:30
|
|
|
return await t.map('SELECT * FROM "_SCHEMA"', null, (row) =>
|
2018-09-01 13:58:06 -04:00
|
|
|
toParseSchema({ className: row.className, ...row.schema })
|
|
|
|
|
);
|
2017-12-29 11:39:16 -06:00
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Return a promise for the schema with the given name, in Parse format. If
|
|
|
|
|
// this adapter doesn't know about the schema, return a promise that rejects with
|
|
|
|
|
// undefined as the reason.
|
2019-12-16 18:50:31 +00:00
|
|
|
async getClass(className: string) {
|
2016-08-18 18:05:26 -04:00
|
|
|
debug('getClass', className);
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._client
|
2019-12-16 18:50:31 +00:00
|
|
|
.any('SELECT * FROM "_SCHEMA" WHERE "className" = $<className>', {
|
2018-09-01 13:58:06 -04:00
|
|
|
className,
|
|
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.then((result) => {
|
2018-01-01 20:33:41 +00:00
|
|
|
if (result.length !== 1) {
|
2018-01-08 20:42:08 -06:00
|
|
|
throw undefined;
|
2017-06-20 09:15:26 -07:00
|
|
|
}
|
2018-01-01 20:33:41 +00:00
|
|
|
return result[0].schema;
|
|
|
|
|
})
|
|
|
|
|
.then(toParseSchema);
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2016-06-11 00:43:02 -07:00
|
|
|
// TODO: remove the mongo format dependency in the return value
|
2019-12-16 18:50:31 +00:00
|
|
|
async createObject(
|
2019-07-31 02:41:07 -07:00
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
object: any,
|
|
|
|
|
transactionalSession: ?any
|
|
|
|
|
) {
|
2016-08-15 16:48:39 -04:00
|
|
|
debug('createObject', className, object);
|
2016-06-06 13:47:11 -07:00
|
|
|
let columnsArray = [];
|
2016-12-07 15:17:05 -08:00
|
|
|
const valuesArray = [];
|
2016-08-15 16:48:39 -04:00
|
|
|
schema = toPostgresSchema(schema);
|
2016-12-07 15:17:05 -08:00
|
|
|
const geoPoints = {};
|
2016-08-20 16:07:48 -04:00
|
|
|
|
|
|
|
|
object = handleDotFields(object);
|
|
|
|
|
|
2016-10-31 21:41:21 +05:30
|
|
|
validateKeys(object);
|
|
|
|
|
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(object).forEach((fieldName) => {
|
2017-06-21 09:23:20 -03:00
|
|
|
if (object[fieldName] === null) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
var authDataMatch = fieldName.match(/^_auth_data_([a-zA-Z0-9_]+)$/);
|
|
|
|
|
if (authDataMatch) {
|
|
|
|
|
var provider = authDataMatch[1];
|
|
|
|
|
object['authData'] = object['authData'] || {};
|
|
|
|
|
object['authData'][provider] = object[fieldName];
|
|
|
|
|
delete object[fieldName];
|
|
|
|
|
fieldName = 'authData';
|
|
|
|
|
}
|
2016-12-01 10:24:46 -08:00
|
|
|
|
2016-06-06 13:47:11 -07:00
|
|
|
columnsArray.push(fieldName);
|
2016-08-15 16:48:39 -04:00
|
|
|
if (!schema.fields[fieldName] && className === '_User') {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
fieldName === '_email_verify_token' ||
|
|
|
|
|
fieldName === '_failed_login_count' ||
|
|
|
|
|
fieldName === '_perishable_token' ||
|
|
|
|
|
fieldName === '_password_history'
|
|
|
|
|
) {
|
2016-08-15 16:48:39 -04:00
|
|
|
valuesArray.push(object[fieldName]);
|
|
|
|
|
}
|
2016-09-02 17:00:47 -07:00
|
|
|
|
|
|
|
|
if (fieldName === '_email_verify_token_expires_at') {
|
2016-08-20 16:07:48 -04:00
|
|
|
if (object[fieldName]) {
|
|
|
|
|
valuesArray.push(object[fieldName].iso);
|
|
|
|
|
} else {
|
|
|
|
|
valuesArray.push(null);
|
|
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2016-09-02 17:00:47 -07:00
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
fieldName === '_account_lockout_expires_at' ||
|
|
|
|
|
fieldName === '_perishable_token_expires_at' ||
|
|
|
|
|
fieldName === '_password_changed_at'
|
|
|
|
|
) {
|
2016-09-02 17:00:47 -07:00
|
|
|
if (object[fieldName]) {
|
|
|
|
|
valuesArray.push(object[fieldName].iso);
|
|
|
|
|
} else {
|
|
|
|
|
valuesArray.push(null);
|
|
|
|
|
}
|
2016-08-18 18:05:26 -04:00
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
return;
|
|
|
|
|
}
|
2016-06-10 14:09:48 -07:00
|
|
|
switch (schema.fields[fieldName].type) {
|
2018-09-01 13:58:06 -04:00
|
|
|
case 'Date':
|
|
|
|
|
if (object[fieldName]) {
|
|
|
|
|
valuesArray.push(object[fieldName].iso);
|
|
|
|
|
} else {
|
|
|
|
|
valuesArray.push(null);
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
case 'Pointer':
|
|
|
|
|
valuesArray.push(object[fieldName].objectId);
|
|
|
|
|
break;
|
|
|
|
|
case 'Array':
|
|
|
|
|
if (['_rperm', '_wperm'].indexOf(fieldName) >= 0) {
|
|
|
|
|
valuesArray.push(object[fieldName]);
|
|
|
|
|
} else {
|
|
|
|
|
valuesArray.push(JSON.stringify(object[fieldName]));
|
|
|
|
|
}
|
|
|
|
|
break;
|
|
|
|
|
case 'Object':
|
|
|
|
|
case 'Bytes':
|
|
|
|
|
case 'String':
|
|
|
|
|
case 'Number':
|
|
|
|
|
case 'Boolean':
|
2016-06-10 14:09:48 -07:00
|
|
|
valuesArray.push(object[fieldName]);
|
2018-09-01 13:58:06 -04:00
|
|
|
break;
|
|
|
|
|
case 'File':
|
|
|
|
|
valuesArray.push(object[fieldName].name);
|
|
|
|
|
break;
|
|
|
|
|
case 'Polygon': {
|
|
|
|
|
const value = convertPolygonToSQL(object[fieldName].coordinates);
|
|
|
|
|
valuesArray.push(value);
|
|
|
|
|
break;
|
2016-11-24 15:47:41 -05:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
case 'GeoPoint':
|
|
|
|
|
// pop the point and process later
|
|
|
|
|
geoPoints[fieldName] = object[fieldName];
|
|
|
|
|
columnsArray.pop();
|
|
|
|
|
break;
|
|
|
|
|
default:
|
|
|
|
|
throw `Type ${schema.fields[fieldName].type} not supported yet`;
|
2016-06-10 14:09:48 -07:00
|
|
|
}
|
2016-06-06 13:47:11 -07:00
|
|
|
});
|
2016-08-15 16:48:39 -04:00
|
|
|
|
|
|
|
|
columnsArray = columnsArray.concat(Object.keys(geoPoints));
|
2016-12-07 15:17:05 -08:00
|
|
|
const initialValues = valuesArray.map((val, index) => {
|
2016-08-18 18:05:26 -04:00
|
|
|
let termination = '';
|
2016-12-07 15:17:05 -08:00
|
|
|
const fieldName = columnsArray[index];
|
2018-09-01 13:58:06 -04:00
|
|
|
if (['_rperm', '_wperm'].indexOf(fieldName) >= 0) {
|
2016-08-18 18:05:26 -04:00
|
|
|
termination = '::text[]';
|
2018-09-01 13:58:06 -04:00
|
|
|
} else if (
|
|
|
|
|
schema.fields[fieldName] &&
|
|
|
|
|
schema.fields[fieldName].type === 'Array'
|
|
|
|
|
) {
|
2016-08-18 18:05:26 -04:00
|
|
|
termination = '::jsonb';
|
|
|
|
|
}
|
|
|
|
|
return `$${index + 2 + columnsArray.length}${termination}`;
|
|
|
|
|
});
|
2020-04-06 22:50:33 +05:30
|
|
|
const geoPointsInjects = Object.keys(geoPoints).map((key) => {
|
2016-12-07 15:17:05 -08:00
|
|
|
const value = geoPoints[key];
|
2016-08-20 16:07:48 -04:00
|
|
|
valuesArray.push(value.longitude, value.latitude);
|
2016-12-07 15:17:05 -08:00
|
|
|
const l = valuesArray.length + columnsArray.length;
|
2017-01-11 12:31:40 -08:00
|
|
|
return `POINT($${l}, $${l + 1})`;
|
2016-08-15 16:48:39 -04:00
|
|
|
});
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const columnsPattern = columnsArray
|
|
|
|
|
.map((col, index) => `$${index + 2}:name`)
|
|
|
|
|
.join();
|
|
|
|
|
const valuesPattern = initialValues.concat(geoPointsInjects).join();
|
2016-08-15 16:48:39 -04:00
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const qs = `INSERT INTO $1:name (${columnsPattern}) VALUES (${valuesPattern})`;
|
|
|
|
|
const values = [className, ...columnsArray, ...valuesArray];
|
2016-08-15 16:48:39 -04:00
|
|
|
debug(qs, values);
|
2019-07-31 02:41:07 -07:00
|
|
|
const promise = (transactionalSession
|
|
|
|
|
? transactionalSession.t
|
|
|
|
|
: this._client
|
|
|
|
|
)
|
2018-09-01 13:58:06 -04:00
|
|
|
.none(qs, values)
|
2017-06-20 09:15:26 -07:00
|
|
|
.then(() => ({ ops: [object] }))
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((error) => {
|
2017-06-20 09:15:26 -07:00
|
|
|
if (error.code === PostgresUniqueIndexViolationError) {
|
2018-09-01 13:58:06 -04:00
|
|
|
const err = new Parse.Error(
|
|
|
|
|
Parse.Error.DUPLICATE_VALUE,
|
|
|
|
|
'A duplicate value for a field with unique values was provided'
|
|
|
|
|
);
|
2017-09-05 17:51:11 -04:00
|
|
|
err.underlyingError = error;
|
|
|
|
|
if (error.constraint) {
|
|
|
|
|
const matches = error.constraint.match(/unique_([a-zA-Z]+)/);
|
|
|
|
|
if (matches && Array.isArray(matches)) {
|
|
|
|
|
err.userInfo = { duplicated_field: matches[1] };
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-01-01 20:33:41 +00:00
|
|
|
error = err;
|
2017-06-20 09:15:26 -07:00
|
|
|
}
|
2018-01-01 20:33:41 +00:00
|
|
|
throw error;
|
|
|
|
|
});
|
2019-07-31 02:41:07 -07:00
|
|
|
if (transactionalSession) {
|
|
|
|
|
transactionalSession.batch.push(promise);
|
|
|
|
|
}
|
|
|
|
|
return promise;
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Remove all objects that match the given Parse Query.
|
|
|
|
|
// If no objects match, reject with OBJECT_NOT_FOUND. If objects are found and deleted, resolve with undefined.
|
|
|
|
|
// If there is some other error, reject with INTERNAL_SERVER_ERROR.
|
2019-12-16 18:50:31 +00:00
|
|
|
async deleteObjectsByQuery(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
2019-07-31 02:41:07 -07:00
|
|
|
query: QueryType,
|
|
|
|
|
transactionalSession: ?any
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
2016-08-15 16:48:39 -04:00
|
|
|
debug('deleteObjectsByQuery', className, query);
|
2016-12-07 15:17:05 -08:00
|
|
|
const values = [className];
|
|
|
|
|
const index = 2;
|
2020-02-14 09:44:51 -08:00
|
|
|
const where = buildWhereClause({
|
|
|
|
|
schema,
|
|
|
|
|
index,
|
|
|
|
|
query,
|
|
|
|
|
caseInsensitive: false,
|
|
|
|
|
});
|
2016-08-15 16:48:39 -04:00
|
|
|
values.push(...where.values);
|
2016-09-02 17:00:47 -07:00
|
|
|
if (Object.keys(query).length === 0) {
|
2016-08-15 16:48:39 -04:00
|
|
|
where.pattern = 'TRUE';
|
|
|
|
|
}
|
2019-06-19 23:30:08 +01:00
|
|
|
const qs = `WITH deleted AS (DELETE FROM $1:name WHERE ${where.pattern} RETURNING *) SELECT count(*) FROM deleted`;
|
2016-08-15 16:48:39 -04:00
|
|
|
debug(qs, values);
|
2019-07-31 02:41:07 -07:00
|
|
|
const promise = (transactionalSession
|
|
|
|
|
? transactionalSession.t
|
|
|
|
|
: this._client
|
|
|
|
|
)
|
2020-04-06 22:50:33 +05:30
|
|
|
.one(qs, values, (a) => +a.count)
|
|
|
|
|
.then((count) => {
|
2017-06-20 09:15:26 -07:00
|
|
|
if (count === 0) {
|
2018-09-01 13:58:06 -04:00
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.OBJECT_NOT_FOUND,
|
|
|
|
|
'Object not found.'
|
|
|
|
|
);
|
2017-06-20 09:15:26 -07:00
|
|
|
} else {
|
|
|
|
|
return count;
|
|
|
|
|
}
|
2018-01-01 20:33:41 +00:00
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((error) => {
|
2018-01-01 20:33:41 +00:00
|
|
|
if (error.code !== PostgresRelationDoesNotExistError) {
|
2017-09-18 18:02:56 -05:00
|
|
|
throw error;
|
|
|
|
|
}
|
2018-01-01 20:33:41 +00:00
|
|
|
// ELSE: Don't delete anything if doesn't exist
|
2017-06-20 09:15:26 -07:00
|
|
|
});
|
2019-07-31 02:41:07 -07:00
|
|
|
if (transactionalSession) {
|
|
|
|
|
transactionalSession.batch.push(promise);
|
|
|
|
|
}
|
|
|
|
|
return promise;
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
2016-08-20 16:07:48 -04:00
|
|
|
// Return value not currently well specified.
|
2019-12-16 18:50:31 +00:00
|
|
|
async findOneAndUpdate(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
2019-07-31 02:41:07 -07:00
|
|
|
update: any,
|
|
|
|
|
transactionalSession: ?any
|
2018-09-01 13:58:06 -04:00
|
|
|
): Promise<any> {
|
2016-08-20 16:07:48 -04:00
|
|
|
debug('findOneAndUpdate', className, query, update);
|
2019-07-31 02:41:07 -07:00
|
|
|
return this.updateObjectsByQuery(
|
|
|
|
|
className,
|
|
|
|
|
schema,
|
|
|
|
|
query,
|
|
|
|
|
update,
|
|
|
|
|
transactionalSession
|
2020-04-06 22:50:33 +05:30
|
|
|
).then((val) => val[0]);
|
2016-08-20 16:07:48 -04:00
|
|
|
}
|
2016-06-12 16:35:13 -07:00
|
|
|
|
|
|
|
|
// Apply the update to all objects that match the given Parse Query.
|
2019-12-16 18:50:31 +00:00
|
|
|
async updateObjectsByQuery(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
2019-07-31 02:41:07 -07:00
|
|
|
update: any,
|
|
|
|
|
transactionalSession: ?any
|
2018-09-01 13:58:06 -04:00
|
|
|
): Promise<[any]> {
|
2016-08-15 16:48:39 -04:00
|
|
|
debug('updateObjectsByQuery', className, query, update);
|
2016-12-07 15:17:05 -08:00
|
|
|
const updatePatterns = [];
|
2018-09-01 13:58:06 -04:00
|
|
|
const values = [className];
|
2016-06-11 00:43:02 -07:00
|
|
|
let index = 2;
|
2016-08-15 16:48:39 -04:00
|
|
|
schema = toPostgresSchema(schema);
|
2016-08-20 16:07:48 -04:00
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const originalUpdate = { ...update };
|
2018-12-14 17:39:07 -06:00
|
|
|
|
|
|
|
|
// Set flag for dot notation fields
|
|
|
|
|
const dotNotationOptions = {};
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(update).forEach((fieldName) => {
|
2018-12-14 17:39:07 -06:00
|
|
|
if (fieldName.indexOf('.') > -1) {
|
|
|
|
|
const components = fieldName.split('.');
|
|
|
|
|
const first = components.shift();
|
|
|
|
|
dotNotationOptions[first] = true;
|
|
|
|
|
} else {
|
|
|
|
|
dotNotationOptions[fieldName] = false;
|
|
|
|
|
}
|
|
|
|
|
});
|
2016-08-20 16:07:48 -04:00
|
|
|
update = handleDotFields(update);
|
2016-08-18 18:05:26 -04:00
|
|
|
// Resolve authData first,
|
|
|
|
|
// So we don't end up with multiple key updates
|
2016-12-07 15:17:05 -08:00
|
|
|
for (const fieldName in update) {
|
|
|
|
|
const authDataMatch = fieldName.match(/^_auth_data_([a-zA-Z0-9_]+)$/);
|
2016-08-15 16:48:39 -04:00
|
|
|
if (authDataMatch) {
|
|
|
|
|
var provider = authDataMatch[1];
|
2016-12-07 15:17:05 -08:00
|
|
|
const value = update[fieldName];
|
2016-08-15 16:48:39 -04:00
|
|
|
delete update[fieldName];
|
2016-08-18 18:05:26 -04:00
|
|
|
update['authData'] = update['authData'] || {};
|
|
|
|
|
update['authData'][provider] = value;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2016-12-07 15:17:05 -08:00
|
|
|
for (const fieldName in update) {
|
|
|
|
|
const fieldValue = update[fieldName];
|
2018-09-26 21:42:06 +01:00
|
|
|
// Drop any undefined values.
|
|
|
|
|
if (typeof fieldValue === 'undefined') {
|
|
|
|
|
delete update[fieldName];
|
|
|
|
|
} else if (fieldValue === null) {
|
2016-08-30 07:19:21 -04:00
|
|
|
updatePatterns.push(`$${index}:name = NULL`);
|
|
|
|
|
values.push(fieldName);
|
|
|
|
|
index += 1;
|
|
|
|
|
} else if (fieldName == 'authData') {
|
2016-08-18 18:05:26 -04:00
|
|
|
// This recursively sets the json_object
|
|
|
|
|
// Only 1 level deep
|
2018-05-01 07:37:38 -04:00
|
|
|
const generate = (jsonb: string, key: string, value: any) => {
|
2016-12-01 10:24:46 -08:00
|
|
|
return `json_object_set_key(COALESCE(${jsonb}, '{}'::jsonb), ${key}, ${value})::jsonb`;
|
2018-09-01 13:58:06 -04:00
|
|
|
};
|
2016-12-07 15:17:05 -08:00
|
|
|
const lastKey = `$${index}:name`;
|
|
|
|
|
const fieldNameIndex = index;
|
2017-01-11 12:31:40 -08:00
|
|
|
index += 1;
|
2016-08-18 18:05:26 -04:00
|
|
|
values.push(fieldName);
|
2018-09-01 13:58:06 -04:00
|
|
|
const update = Object.keys(fieldValue).reduce(
|
|
|
|
|
(lastKey: string, key: string) => {
|
|
|
|
|
const str = generate(
|
|
|
|
|
lastKey,
|
|
|
|
|
`$${index}::text`,
|
|
|
|
|
`$${index + 1}::jsonb`
|
|
|
|
|
);
|
|
|
|
|
index += 2;
|
|
|
|
|
let value = fieldValue[key];
|
|
|
|
|
if (value) {
|
|
|
|
|
if (value.__op === 'Delete') {
|
|
|
|
|
value = null;
|
|
|
|
|
} else {
|
|
|
|
|
value = JSON.stringify(value);
|
|
|
|
|
}
|
2016-08-20 16:07:48 -04:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
values.push(key, value);
|
|
|
|
|
return str;
|
|
|
|
|
},
|
|
|
|
|
lastKey
|
|
|
|
|
);
|
2016-08-18 18:05:26 -04:00
|
|
|
updatePatterns.push(`$${fieldNameIndex}:name = ${update}`);
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (fieldValue.__op === 'Increment') {
|
2018-09-01 13:58:06 -04:00
|
|
|
updatePatterns.push(
|
|
|
|
|
`$${index}:name = COALESCE($${index}:name, 0) + $${index + 1}`
|
|
|
|
|
);
|
2016-06-11 00:43:02 -07:00
|
|
|
values.push(fieldName, fieldValue.amount);
|
|
|
|
|
index += 2;
|
2016-06-17 09:59:16 -07:00
|
|
|
} else if (fieldValue.__op === 'Add') {
|
2018-09-01 13:58:06 -04:00
|
|
|
updatePatterns.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`$${index}:name = array_add(COALESCE($${index}:name, '[]'::jsonb), $${
|
|
|
|
|
index + 1
|
|
|
|
|
}::jsonb)`
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2016-08-18 18:05:26 -04:00
|
|
|
values.push(fieldName, JSON.stringify(fieldValue.objects));
|
2016-06-17 09:59:16 -07:00
|
|
|
index += 2;
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (fieldValue.__op === 'Delete') {
|
2018-09-01 13:58:06 -04:00
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
2016-08-15 16:48:39 -04:00
|
|
|
values.push(fieldName, null);
|
|
|
|
|
index += 2;
|
2016-06-17 09:59:16 -07:00
|
|
|
} else if (fieldValue.__op === 'Remove') {
|
2018-09-01 13:58:06 -04:00
|
|
|
updatePatterns.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`$${index}:name = array_remove(COALESCE($${index}:name, '[]'::jsonb), $${
|
|
|
|
|
index + 1
|
|
|
|
|
}::jsonb)`
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2016-08-18 18:05:26 -04:00
|
|
|
values.push(fieldName, JSON.stringify(fieldValue.objects));
|
|
|
|
|
index += 2;
|
2016-06-17 09:59:16 -07:00
|
|
|
} else if (fieldValue.__op === 'AddUnique') {
|
2018-09-01 13:58:06 -04:00
|
|
|
updatePatterns.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`$${index}:name = array_add_unique(COALESCE($${index}:name, '[]'::jsonb), $${
|
|
|
|
|
index + 1
|
|
|
|
|
}::jsonb)`
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2016-08-18 18:05:26 -04:00
|
|
|
values.push(fieldName, JSON.stringify(fieldValue.objects));
|
|
|
|
|
index += 2;
|
2018-09-01 13:58:06 -04:00
|
|
|
} else if (fieldName === 'updatedAt') {
|
|
|
|
|
//TODO: stop special casing this. It should check for __type === 'Date' and use .iso
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
2016-08-15 16:48:39 -04:00
|
|
|
values.push(fieldName, fieldValue);
|
2016-06-11 00:43:02 -07:00
|
|
|
index += 2;
|
2016-06-17 09:59:16 -07:00
|
|
|
} else if (typeof fieldValue === 'string') {
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue);
|
|
|
|
|
index += 2;
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (typeof fieldValue === 'boolean') {
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue);
|
|
|
|
|
index += 2;
|
2016-06-17 11:09:42 -07:00
|
|
|
} else if (fieldValue.__type === 'Pointer') {
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue.objectId);
|
|
|
|
|
index += 2;
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (fieldValue.__type === 'Date') {
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, toPostgresValue(fieldValue));
|
|
|
|
|
index += 2;
|
2016-08-30 07:19:21 -04:00
|
|
|
} else if (fieldValue instanceof Date) {
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue);
|
|
|
|
|
index += 2;
|
2016-08-18 18:05:26 -04:00
|
|
|
} else if (fieldValue.__type === 'File') {
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, toPostgresValue(fieldValue));
|
|
|
|
|
index += 2;
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (fieldValue.__type === 'GeoPoint') {
|
2018-09-01 13:58:06 -04:00
|
|
|
updatePatterns.push(
|
|
|
|
|
`$${index}:name = POINT($${index + 1}, $${index + 2})`
|
|
|
|
|
);
|
2017-05-28 11:41:09 -05:00
|
|
|
values.push(fieldName, fieldValue.longitude, fieldValue.latitude);
|
2016-08-15 16:48:39 -04:00
|
|
|
index += 3;
|
2017-07-11 22:33:45 -05:00
|
|
|
} else if (fieldValue.__type === 'Polygon') {
|
|
|
|
|
const value = convertPolygonToSQL(fieldValue.coordinates);
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}::polygon`);
|
|
|
|
|
values.push(fieldName, value);
|
|
|
|
|
index += 2;
|
2016-11-21 09:22:16 -05:00
|
|
|
} else if (fieldValue.__type === 'Relation') {
|
|
|
|
|
// noop
|
2016-08-15 16:48:39 -04:00
|
|
|
} else if (typeof fieldValue === 'number') {
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(fieldName, fieldValue);
|
|
|
|
|
index += 2;
|
2018-09-01 13:58:06 -04:00
|
|
|
} else if (
|
|
|
|
|
typeof fieldValue === 'object' &&
|
|
|
|
|
schema.fields[fieldName] &&
|
|
|
|
|
schema.fields[fieldName].type === 'Object'
|
|
|
|
|
) {
|
2017-01-13 19:34:04 -05:00
|
|
|
// Gather keys to increment
|
2018-09-01 13:58:06 -04:00
|
|
|
const keysToIncrement = Object.keys(originalUpdate)
|
2020-04-06 22:50:33 +05:30
|
|
|
.filter((k) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
// choose top level fields that have a delete operation set
|
|
|
|
|
// Note that Object.keys is iterating over the **original** update object
|
|
|
|
|
// and that some of the keys of the original update could be null or undefined:
|
|
|
|
|
// (See the above check `if (fieldValue === null || typeof fieldValue == "undefined")`)
|
|
|
|
|
const value = originalUpdate[k];
|
|
|
|
|
return (
|
|
|
|
|
value &&
|
|
|
|
|
value.__op === 'Increment' &&
|
|
|
|
|
k.split('.').length === 2 &&
|
|
|
|
|
k.split('.')[0] === fieldName
|
|
|
|
|
);
|
|
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.map((k) => k.split('.')[1]);
|
2017-01-13 19:34:04 -05:00
|
|
|
|
|
|
|
|
let incrementPatterns = '';
|
|
|
|
|
if (keysToIncrement.length > 0) {
|
2018-09-01 13:58:06 -04:00
|
|
|
incrementPatterns =
|
|
|
|
|
' || ' +
|
|
|
|
|
keysToIncrement
|
2020-04-06 22:50:33 +05:30
|
|
|
.map((c) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
const amount = fieldValue[c].amount;
|
|
|
|
|
return `CONCAT('{"${c}":', COALESCE($${index}:name->>'${c}','0')::int + ${amount}, '}')::jsonb`;
|
|
|
|
|
})
|
|
|
|
|
.join(' || ');
|
2017-01-13 19:34:04 -05:00
|
|
|
// Strip the keys
|
2020-04-06 22:50:33 +05:30
|
|
|
keysToIncrement.forEach((key) => {
|
2017-01-13 19:34:04 -05:00
|
|
|
delete fieldValue[key];
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const keysToDelete: Array<string> = Object.keys(originalUpdate)
|
2020-04-06 22:50:33 +05:30
|
|
|
.filter((k) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
// choose top level fields that have a delete operation set.
|
|
|
|
|
const value = originalUpdate[k];
|
|
|
|
|
return (
|
|
|
|
|
value &&
|
|
|
|
|
value.__op === 'Delete' &&
|
|
|
|
|
k.split('.').length === 2 &&
|
|
|
|
|
k.split('.')[0] === fieldName
|
|
|
|
|
);
|
|
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.map((k) => k.split('.')[1]);
|
2018-09-01 13:58:06 -04:00
|
|
|
|
|
|
|
|
const deletePatterns = keysToDelete.reduce(
|
|
|
|
|
(p: string, c: string, i: number) => {
|
|
|
|
|
return p + ` - '$${index + 1 + i}:value'`;
|
|
|
|
|
},
|
|
|
|
|
''
|
|
|
|
|
);
|
2018-12-14 17:39:07 -06:00
|
|
|
// Override Object
|
|
|
|
|
let updateObject = "'{}'::jsonb";
|
2016-11-02 06:25:53 +05:30
|
|
|
|
2018-12-14 17:39:07 -06:00
|
|
|
if (dotNotationOptions[fieldName]) {
|
|
|
|
|
// Merge Object
|
|
|
|
|
updateObject = `COALESCE($${index}:name, '{}'::jsonb)`;
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
updatePatterns.push(
|
2020-04-06 22:50:33 +05:30
|
|
|
`$${index}:name = (${updateObject} ${deletePatterns} ${incrementPatterns} || $${
|
|
|
|
|
index + 1 + keysToDelete.length
|
|
|
|
|
}::jsonb )`
|
2018-09-01 13:58:06 -04:00
|
|
|
);
|
2016-11-24 15:47:41 -05:00
|
|
|
values.push(fieldName, ...keysToDelete, JSON.stringify(fieldValue));
|
|
|
|
|
index += 2 + keysToDelete.length;
|
2018-09-01 13:58:06 -04:00
|
|
|
} else if (
|
|
|
|
|
Array.isArray(fieldValue) &&
|
|
|
|
|
schema.fields[fieldName] &&
|
|
|
|
|
schema.fields[fieldName].type === 'Array'
|
|
|
|
|
) {
|
2016-12-07 15:17:05 -08:00
|
|
|
const expectedType = parseTypeToPostgresType(schema.fields[fieldName]);
|
2016-08-15 16:48:39 -04:00
|
|
|
if (expectedType === 'text[]') {
|
|
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}::text[]`);
|
2018-12-18 17:38:05 -06:00
|
|
|
values.push(fieldName, fieldValue);
|
|
|
|
|
index += 2;
|
2016-08-15 16:48:39 -04:00
|
|
|
} else {
|
2019-05-02 12:44:17 -05:00
|
|
|
updatePatterns.push(`$${index}:name = $${index + 1}::jsonb`);
|
|
|
|
|
values.push(fieldName, JSON.stringify(fieldValue));
|
|
|
|
|
index += 2;
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2016-06-11 00:43:02 -07:00
|
|
|
} else {
|
2016-08-15 16:48:39 -04:00
|
|
|
debug('Not supported update', fieldName, fieldValue);
|
2018-09-01 13:58:06 -04:00
|
|
|
return Promise.reject(
|
|
|
|
|
new Parse.Error(
|
|
|
|
|
Parse.Error.OPERATION_FORBIDDEN,
|
|
|
|
|
`Postgres doesn't support update ${JSON.stringify(fieldValue)} yet`
|
|
|
|
|
)
|
|
|
|
|
);
|
2016-06-11 00:43:02 -07:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-02-14 09:44:51 -08:00
|
|
|
const where = buildWhereClause({
|
|
|
|
|
schema,
|
|
|
|
|
index,
|
|
|
|
|
query,
|
|
|
|
|
caseInsensitive: false,
|
|
|
|
|
});
|
2016-06-16 15:39:05 -07:00
|
|
|
values.push(...where.values);
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const whereClause =
|
|
|
|
|
where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
|
2017-12-28 01:43:34 +00:00
|
|
|
const qs = `UPDATE $1:name SET ${updatePatterns.join()} ${whereClause} RETURNING *`;
|
2016-08-15 16:48:39 -04:00
|
|
|
debug('update: ', qs, values);
|
2019-07-31 02:41:07 -07:00
|
|
|
const promise = (transactionalSession
|
|
|
|
|
? transactionalSession.t
|
|
|
|
|
: this._client
|
|
|
|
|
).any(qs, values);
|
|
|
|
|
if (transactionalSession) {
|
|
|
|
|
transactionalSession.batch.push(promise);
|
|
|
|
|
}
|
|
|
|
|
return promise;
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2016-06-16 19:34:00 -07:00
|
|
|
// Hopefully, we can get rid of this. It's only used for config and hooks.
|
2018-09-01 13:58:06 -04:00
|
|
|
upsertOneObject(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
2019-07-31 02:41:07 -07:00
|
|
|
update: any,
|
|
|
|
|
transactionalSession: ?any
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
|
|
|
|
debug('upsertOneObject', { className, query, update });
|
2016-12-07 15:17:05 -08:00
|
|
|
const createValue = Object.assign({}, query, update);
|
2019-07-31 02:41:07 -07:00
|
|
|
return this.createObject(
|
|
|
|
|
className,
|
|
|
|
|
schema,
|
|
|
|
|
createValue,
|
|
|
|
|
transactionalSession
|
2020-04-06 22:50:33 +05:30
|
|
|
).catch((error) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
// ignore duplicate value errors as it's upsert
|
|
|
|
|
if (error.code !== Parse.Error.DUPLICATE_VALUE) {
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
2019-07-31 02:41:07 -07:00
|
|
|
return this.findOneAndUpdate(
|
|
|
|
|
className,
|
|
|
|
|
schema,
|
|
|
|
|
query,
|
|
|
|
|
update,
|
|
|
|
|
transactionalSession
|
|
|
|
|
);
|
2018-09-01 13:58:06 -04:00
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
find(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
{ skip, limit, sort, keys, caseInsensitive, explain }: QueryOptions
|
2018-09-01 13:58:06 -04:00
|
|
|
) {
|
2020-02-14 09:44:51 -08:00
|
|
|
debug('find', className, query, {
|
|
|
|
|
skip,
|
|
|
|
|
limit,
|
|
|
|
|
sort,
|
|
|
|
|
keys,
|
|
|
|
|
caseInsensitive,
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
explain,
|
2020-02-14 09:44:51 -08:00
|
|
|
});
|
2016-08-15 16:48:39 -04:00
|
|
|
const hasLimit = limit !== undefined;
|
|
|
|
|
const hasSkip = skip !== undefined;
|
2016-06-16 15:39:05 -07:00
|
|
|
let values = [className];
|
2020-02-14 09:44:51 -08:00
|
|
|
const where = buildWhereClause({
|
|
|
|
|
schema,
|
|
|
|
|
query,
|
|
|
|
|
index: 2,
|
|
|
|
|
caseInsensitive,
|
|
|
|
|
});
|
2016-06-16 15:39:05 -07:00
|
|
|
values.push(...where.values);
|
2016-12-01 10:24:46 -08:00
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const wherePattern =
|
|
|
|
|
where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
|
2016-08-15 16:48:39 -04:00
|
|
|
const limitPattern = hasLimit ? `LIMIT $${values.length + 1}` : '';
|
|
|
|
|
if (hasLimit) {
|
2016-06-16 15:39:05 -07:00
|
|
|
values.push(limit);
|
2016-06-11 00:43:02 -07:00
|
|
|
}
|
2017-01-11 12:31:40 -08:00
|
|
|
const skipPattern = hasSkip ? `OFFSET $${values.length + 1}` : '';
|
2016-08-15 16:48:39 -04:00
|
|
|
if (hasSkip) {
|
|
|
|
|
values.push(skip);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let sortPattern = '';
|
|
|
|
|
if (sort) {
|
2017-12-30 20:44:18 -05:00
|
|
|
const sortCopy: any = sort;
|
2018-09-01 13:58:06 -04:00
|
|
|
const sorting = Object.keys(sort)
|
2020-04-06 22:50:33 +05:30
|
|
|
.map((key) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
const transformKey = transformDotFieldToComponents(key).join('->');
|
|
|
|
|
// Using $idx pattern gives: non-integer constant in ORDER BY
|
|
|
|
|
if (sortCopy[key] === 1) {
|
|
|
|
|
return `${transformKey} ASC`;
|
|
|
|
|
}
|
|
|
|
|
return `${transformKey} DESC`;
|
|
|
|
|
})
|
|
|
|
|
.join();
|
|
|
|
|
sortPattern =
|
|
|
|
|
sort !== undefined && Object.keys(sort).length > 0
|
|
|
|
|
? `ORDER BY ${sorting}`
|
|
|
|
|
: '';
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2017-12-30 20:44:18 -05:00
|
|
|
if (where.sorts && Object.keys((where.sorts: any)).length > 0) {
|
2017-12-28 01:43:34 +00:00
|
|
|
sortPattern = `ORDER BY ${where.sorts.join()}`;
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
|
|
|
|
|
2016-09-24 13:43:49 -04:00
|
|
|
let columns = '*';
|
|
|
|
|
if (keys) {
|
|
|
|
|
// Exclude empty keys
|
2018-08-15 09:48:32 -04:00
|
|
|
// Replace ACL by it's keys
|
|
|
|
|
keys = keys.reduce((memo, key) => {
|
|
|
|
|
if (key === 'ACL') {
|
|
|
|
|
memo.push('_rperm');
|
|
|
|
|
memo.push('_wperm');
|
|
|
|
|
} else if (key.length > 0) {
|
|
|
|
|
memo.push(key);
|
|
|
|
|
}
|
|
|
|
|
return memo;
|
|
|
|
|
}, []);
|
2018-09-01 13:58:06 -04:00
|
|
|
columns = keys
|
|
|
|
|
.map((key, index) => {
|
|
|
|
|
if (key === '$score') {
|
|
|
|
|
return `ts_rank_cd(to_tsvector($${2}, $${3}:name), to_tsquery($${4}, $${5}), 32) as score`;
|
|
|
|
|
}
|
|
|
|
|
return `$${index + values.length + 1}:name`;
|
|
|
|
|
})
|
|
|
|
|
.join();
|
2016-09-24 13:43:49 -04:00
|
|
|
values = values.concat(keys);
|
|
|
|
|
}
|
|
|
|
|
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
const originalQuery = `SELECT ${columns} FROM $1:name ${wherePattern} ${sortPattern} ${limitPattern} ${skipPattern}`;
|
2020-04-06 22:50:33 +05:30
|
|
|
const qs = explain
|
|
|
|
|
? this.createExplainableQuery(originalQuery)
|
|
|
|
|
: originalQuery;
|
2016-08-15 16:48:39 -04:00
|
|
|
debug(qs, values);
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._client
|
|
|
|
|
.any(qs, values)
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((error) => {
|
2018-01-01 20:33:41 +00:00
|
|
|
// Query on non existing table, don't crash
|
|
|
|
|
if (error.code !== PostgresRelationDoesNotExistError) {
|
|
|
|
|
throw error;
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2018-01-01 20:33:41 +00:00
|
|
|
return [];
|
2017-06-20 09:15:26 -07:00
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.then((results) => {
|
|
|
|
|
if (explain) {
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
return results;
|
|
|
|
|
}
|
2020-04-06 22:50:33 +05:30
|
|
|
return results.map((object) =>
|
2018-09-01 13:58:06 -04:00
|
|
|
this.postgresObjectToParseObject(className, object, schema)
|
2020-04-06 22:50:33 +05:30
|
|
|
);
|
|
|
|
|
});
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Converts from a postgres-format object to a REST-format object.
|
|
|
|
|
// Does not strip out anything based on a lack of authentication.
|
2017-12-30 20:44:18 -05:00
|
|
|
postgresObjectToParseObject(className: string, object: any, schema: any) {
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(schema.fields).forEach((fieldName) => {
|
2017-11-22 23:07:45 -08:00
|
|
|
if (schema.fields[fieldName].type === 'Pointer' && object[fieldName]) {
|
2018-09-01 13:58:06 -04:00
|
|
|
object[fieldName] = {
|
|
|
|
|
objectId: object[fieldName],
|
|
|
|
|
__type: 'Pointer',
|
|
|
|
|
className: schema.fields[fieldName].targetClass,
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
if (schema.fields[fieldName].type === 'Relation') {
|
|
|
|
|
object[fieldName] = {
|
2018-09-01 13:58:06 -04:00
|
|
|
__type: 'Relation',
|
|
|
|
|
className: schema.fields[fieldName].targetClass,
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
if (object[fieldName] && schema.fields[fieldName].type === 'GeoPoint') {
|
|
|
|
|
object[fieldName] = {
|
2018-09-01 13:58:06 -04:00
|
|
|
__type: 'GeoPoint',
|
2017-11-22 23:07:45 -08:00
|
|
|
latitude: object[fieldName].y,
|
2018-09-01 13:58:06 -04:00
|
|
|
longitude: object[fieldName].x,
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
if (object[fieldName] && schema.fields[fieldName].type === 'Polygon') {
|
|
|
|
|
let coords = object[fieldName];
|
|
|
|
|
coords = coords.substr(2, coords.length - 4).split('),(');
|
2020-04-06 22:50:33 +05:30
|
|
|
coords = coords.map((point) => {
|
2017-11-22 23:07:45 -08:00
|
|
|
return [
|
|
|
|
|
parseFloat(point.split(',')[1]),
|
2018-09-01 13:58:06 -04:00
|
|
|
parseFloat(point.split(',')[0]),
|
2017-11-22 23:07:45 -08:00
|
|
|
];
|
|
|
|
|
});
|
|
|
|
|
object[fieldName] = {
|
2018-09-01 13:58:06 -04:00
|
|
|
__type: 'Polygon',
|
|
|
|
|
coordinates: coords,
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
if (object[fieldName] && schema.fields[fieldName].type === 'File') {
|
|
|
|
|
object[fieldName] = {
|
|
|
|
|
__type: 'File',
|
2018-09-01 13:58:06 -04:00
|
|
|
name: object[fieldName],
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
//TODO: remove this reliance on the mongo format. DB adapter shouldn't know there is a difference between created at and any other date field.
|
|
|
|
|
if (object.createdAt) {
|
|
|
|
|
object.createdAt = object.createdAt.toISOString();
|
|
|
|
|
}
|
|
|
|
|
if (object.updatedAt) {
|
|
|
|
|
object.updatedAt = object.updatedAt.toISOString();
|
|
|
|
|
}
|
|
|
|
|
if (object.expiresAt) {
|
2018-09-01 13:58:06 -04:00
|
|
|
object.expiresAt = {
|
|
|
|
|
__type: 'Date',
|
|
|
|
|
iso: object.expiresAt.toISOString(),
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
if (object._email_verify_token_expires_at) {
|
2018-09-01 13:58:06 -04:00
|
|
|
object._email_verify_token_expires_at = {
|
|
|
|
|
__type: 'Date',
|
|
|
|
|
iso: object._email_verify_token_expires_at.toISOString(),
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
if (object._account_lockout_expires_at) {
|
2018-09-01 13:58:06 -04:00
|
|
|
object._account_lockout_expires_at = {
|
|
|
|
|
__type: 'Date',
|
|
|
|
|
iso: object._account_lockout_expires_at.toISOString(),
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
if (object._perishable_token_expires_at) {
|
2018-09-01 13:58:06 -04:00
|
|
|
object._perishable_token_expires_at = {
|
|
|
|
|
__type: 'Date',
|
|
|
|
|
iso: object._perishable_token_expires_at.toISOString(),
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
if (object._password_changed_at) {
|
2018-09-01 13:58:06 -04:00
|
|
|
object._password_changed_at = {
|
|
|
|
|
__type: 'Date',
|
|
|
|
|
iso: object._password_changed_at.toISOString(),
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
2016-06-11 00:43:02 -07:00
|
|
|
|
2017-11-22 23:07:45 -08:00
|
|
|
for (const fieldName in object) {
|
|
|
|
|
if (object[fieldName] === null) {
|
|
|
|
|
delete object[fieldName];
|
|
|
|
|
}
|
|
|
|
|
if (object[fieldName] instanceof Date) {
|
2018-09-01 13:58:06 -04:00
|
|
|
object[fieldName] = {
|
|
|
|
|
__type: 'Date',
|
|
|
|
|
iso: object[fieldName].toISOString(),
|
|
|
|
|
};
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
|
|
|
|
}
|
2017-06-20 09:15:26 -07:00
|
|
|
|
2017-11-22 23:07:45 -08:00
|
|
|
return object;
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Create a unique index. Unique indexes on nullable fields are not allowed. Since we don't
|
|
|
|
|
// currently know which fields are nullable and which aren't, we ignore that criteria.
|
|
|
|
|
// As such, we shouldn't expose this function to users of parse until we have an out-of-band
|
|
|
|
|
// Way of determining if a field is nullable. Undefined doesn't count against uniqueness,
|
|
|
|
|
// which is why we use sparse indexes.
|
2019-12-16 18:50:31 +00:00
|
|
|
async ensureUniqueness(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
fieldNames: string[]
|
|
|
|
|
) {
|
2016-06-16 15:39:05 -07:00
|
|
|
// Use the same name for every ensureUniqueness attempt, because postgres
|
|
|
|
|
// Will happily create the same index with multiple names.
|
|
|
|
|
const constraintName = `unique_${fieldNames.sort().join('_')}`;
|
2018-09-01 13:58:06 -04:00
|
|
|
const constraintPatterns = fieldNames.map(
|
|
|
|
|
(fieldName, index) => `$${index + 3}:name`
|
|
|
|
|
);
|
2017-12-28 01:43:34 +00:00
|
|
|
const qs = `ALTER TABLE $1:name ADD CONSTRAINT $2:name UNIQUE (${constraintPatterns.join()})`;
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._client
|
|
|
|
|
.none(qs, [className, constraintName, ...fieldNames])
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((error) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
error.code === PostgresDuplicateRelationError &&
|
|
|
|
|
error.message.includes(constraintName)
|
|
|
|
|
) {
|
|
|
|
|
// Index already exists. Ignore error.
|
|
|
|
|
} else if (
|
|
|
|
|
error.code === PostgresUniqueIndexViolationError &&
|
|
|
|
|
error.message.includes(constraintName)
|
|
|
|
|
) {
|
|
|
|
|
// Cast the error into the proper parse error
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.DUPLICATE_VALUE,
|
|
|
|
|
'A duplicate value for a field with unique values was provided'
|
|
|
|
|
);
|
2017-06-20 09:15:26 -07:00
|
|
|
} else {
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2016-06-16 19:34:00 -07:00
|
|
|
// Executes a count.
|
2019-12-16 18:50:31 +00:00
|
|
|
async count(
|
2019-04-08 15:59:15 -07:00
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
|
|
|
|
readPreference?: string,
|
|
|
|
|
estimate?: boolean = true
|
|
|
|
|
) {
|
|
|
|
|
debug('count', className, query, readPreference, estimate);
|
2016-12-07 15:17:05 -08:00
|
|
|
const values = [className];
|
2020-02-14 09:44:51 -08:00
|
|
|
const where = buildWhereClause({
|
|
|
|
|
schema,
|
|
|
|
|
query,
|
|
|
|
|
index: 2,
|
|
|
|
|
caseInsensitive: false,
|
|
|
|
|
});
|
2016-06-17 11:09:42 -07:00
|
|
|
values.push(...where.values);
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const wherePattern =
|
|
|
|
|
where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
|
2019-04-08 15:59:15 -07:00
|
|
|
let qs = '';
|
|
|
|
|
|
|
|
|
|
if (where.pattern.length > 0 || !estimate) {
|
|
|
|
|
qs = `SELECT count(*) FROM $1:name ${wherePattern}`;
|
|
|
|
|
} else {
|
|
|
|
|
qs =
|
|
|
|
|
'SELECT reltuples AS approximate_row_count FROM pg_class WHERE relname = $1';
|
|
|
|
|
}
|
|
|
|
|
|
2019-01-19 13:08:42 -05:00
|
|
|
return this._client
|
2020-04-06 22:50:33 +05:30
|
|
|
.one(qs, values, (a) => {
|
2019-04-08 15:59:15 -07:00
|
|
|
if (a.approximate_row_count != null) {
|
|
|
|
|
return +a.approximate_row_count;
|
|
|
|
|
} else {
|
|
|
|
|
return +a.count;
|
|
|
|
|
}
|
|
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((error) => {
|
2019-01-19 13:08:42 -05:00
|
|
|
if (error.code !== PostgresRelationDoesNotExistError) {
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
return 0;
|
|
|
|
|
});
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
2016-08-15 16:48:39 -04:00
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async distinct(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
query: QueryType,
|
|
|
|
|
fieldName: string
|
|
|
|
|
) {
|
2017-11-12 13:00:22 -06:00
|
|
|
debug('distinct', className, query);
|
|
|
|
|
let field = fieldName;
|
|
|
|
|
let column = fieldName;
|
2018-02-16 09:44:42 -06:00
|
|
|
const isNested = fieldName.indexOf('.') >= 0;
|
|
|
|
|
if (isNested) {
|
2017-11-12 13:00:22 -06:00
|
|
|
field = transformDotFieldToComponents(fieldName).join('->');
|
|
|
|
|
column = fieldName.split('.')[0];
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
const isArrayField =
|
|
|
|
|
schema.fields &&
|
|
|
|
|
schema.fields[fieldName] &&
|
|
|
|
|
schema.fields[fieldName].type === 'Array';
|
|
|
|
|
const isPointerField =
|
|
|
|
|
schema.fields &&
|
|
|
|
|
schema.fields[fieldName] &&
|
|
|
|
|
schema.fields[fieldName].type === 'Pointer';
|
2017-11-12 13:00:22 -06:00
|
|
|
const values = [field, column, className];
|
2020-02-14 09:44:51 -08:00
|
|
|
const where = buildWhereClause({
|
|
|
|
|
schema,
|
|
|
|
|
query,
|
|
|
|
|
index: 4,
|
|
|
|
|
caseInsensitive: false,
|
|
|
|
|
});
|
2017-11-12 13:00:22 -06:00
|
|
|
values.push(...where.values);
|
|
|
|
|
|
2018-09-01 13:58:06 -04:00
|
|
|
const wherePattern =
|
|
|
|
|
where.pattern.length > 0 ? `WHERE ${where.pattern}` : '';
|
2017-12-29 21:32:40 -06:00
|
|
|
const transformer = isArrayField ? 'jsonb_array_elements' : 'ON';
|
2018-02-16 09:44:42 -06:00
|
|
|
let qs = `SELECT DISTINCT ${transformer}($1:name) $2:name FROM $3:name ${wherePattern}`;
|
|
|
|
|
if (isNested) {
|
|
|
|
|
qs = `SELECT DISTINCT ${transformer}($1:raw) $2:raw FROM $3:name ${wherePattern}`;
|
|
|
|
|
}
|
2017-11-12 13:00:22 -06:00
|
|
|
debug(qs, values);
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._client
|
|
|
|
|
.any(qs, values)
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((error) => {
|
2017-12-29 21:32:40 -06:00
|
|
|
if (error.code === PostgresMissingColumnError) {
|
|
|
|
|
return [];
|
|
|
|
|
}
|
|
|
|
|
throw error;
|
|
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.then((results) => {
|
2018-02-16 09:44:42 -06:00
|
|
|
if (!isNested) {
|
2020-04-06 22:50:33 +05:30
|
|
|
results = results.filter((object) => object[field] !== null);
|
|
|
|
|
return results.map((object) => {
|
2017-12-29 21:32:40 -06:00
|
|
|
if (!isPointerField) {
|
|
|
|
|
return object[field];
|
|
|
|
|
}
|
|
|
|
|
return {
|
|
|
|
|
__type: 'Pointer',
|
2018-09-01 13:58:06 -04:00
|
|
|
className: schema.fields[fieldName].targetClass,
|
|
|
|
|
objectId: object[field],
|
2017-12-29 21:32:40 -06:00
|
|
|
};
|
|
|
|
|
});
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
const child = fieldName.split('.')[1];
|
2020-04-06 22:50:33 +05:30
|
|
|
return results.map((object) => object[column][child]);
|
2018-01-01 20:33:41 +00:00
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.then((results) =>
|
|
|
|
|
results.map((object) =>
|
2018-09-01 13:58:06 -04:00
|
|
|
this.postgresObjectToParseObject(className, object, schema)
|
|
|
|
|
)
|
|
|
|
|
);
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
async aggregate(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: any,
|
|
|
|
|
pipeline: any,
|
|
|
|
|
readPreference: ?string,
|
|
|
|
|
hint: ?mixed,
|
2020-04-06 22:50:33 +05:30
|
|
|
explain?: boolean
|
|
|
|
|
) {
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
debug('aggregate', className, pipeline, readPreference, hint, explain);
|
2017-11-12 13:00:22 -06:00
|
|
|
const values = [className];
|
2018-05-01 07:37:38 -04:00
|
|
|
let index: number = 2;
|
2017-12-30 20:44:18 -05:00
|
|
|
let columns: string[] = [];
|
2017-11-12 13:00:22 -06:00
|
|
|
let countField = null;
|
2018-02-16 12:41:02 -06:00
|
|
|
let groupValues = null;
|
2017-11-12 13:00:22 -06:00
|
|
|
let wherePattern = '';
|
|
|
|
|
let limitPattern = '';
|
|
|
|
|
let skipPattern = '';
|
|
|
|
|
let sortPattern = '';
|
|
|
|
|
let groupPattern = '';
|
|
|
|
|
for (let i = 0; i < pipeline.length; i += 1) {
|
|
|
|
|
const stage = pipeline[i];
|
|
|
|
|
if (stage.$group) {
|
|
|
|
|
for (const field in stage.$group) {
|
|
|
|
|
const value = stage.$group[field];
|
|
|
|
|
if (value === null || value === undefined) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
if (field === '_id' && typeof value === 'string' && value !== '') {
|
2018-01-20 08:00:36 -06:00
|
|
|
columns.push(`$${index}:name AS "objectId"`);
|
|
|
|
|
groupPattern = `GROUP BY $${index}:name`;
|
|
|
|
|
values.push(transformAggregateField(value));
|
|
|
|
|
index += 1;
|
2017-11-12 13:00:22 -06:00
|
|
|
continue;
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
field === '_id' &&
|
|
|
|
|
typeof value === 'object' &&
|
|
|
|
|
Object.keys(value).length !== 0
|
|
|
|
|
) {
|
2018-02-16 12:41:02 -06:00
|
|
|
groupValues = value;
|
|
|
|
|
const groupByFields = [];
|
|
|
|
|
for (const alias in value) {
|
2020-03-09 21:48:39 +05:30
|
|
|
if (typeof value[alias] === 'string' && value[alias]) {
|
|
|
|
|
const source = transformAggregateField(value[alias]);
|
2018-02-16 12:41:02 -06:00
|
|
|
if (!groupByFields.includes(`"${source}"`)) {
|
|
|
|
|
groupByFields.push(`"${source}"`);
|
|
|
|
|
}
|
|
|
|
|
values.push(source, alias);
|
2020-03-09 21:48:39 +05:30
|
|
|
columns.push(`$${index}:name AS $${index + 1}:name`);
|
2018-02-16 12:41:02 -06:00
|
|
|
index += 2;
|
2020-03-09 21:48:39 +05:30
|
|
|
} else {
|
|
|
|
|
const operation = Object.keys(value[alias])[0];
|
|
|
|
|
const source = transformAggregateField(value[alias][operation]);
|
|
|
|
|
if (mongoAggregateToPostgres[operation]) {
|
|
|
|
|
if (!groupByFields.includes(`"${source}"`)) {
|
|
|
|
|
groupByFields.push(`"${source}"`);
|
|
|
|
|
}
|
|
|
|
|
columns.push(
|
|
|
|
|
`EXTRACT(${
|
|
|
|
|
mongoAggregateToPostgres[operation]
|
2020-04-06 22:50:33 +05:30
|
|
|
} FROM $${index}:name AT TIME ZONE 'UTC') AS $${
|
|
|
|
|
index + 1
|
|
|
|
|
}:name`
|
2020-03-09 21:48:39 +05:30
|
|
|
);
|
|
|
|
|
values.push(source, alias);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
2018-02-16 12:41:02 -06:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
groupPattern = `GROUP BY $${index}:raw`;
|
|
|
|
|
values.push(groupByFields.join());
|
|
|
|
|
index += 1;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
2019-01-19 13:08:42 -05:00
|
|
|
if (typeof value === 'object') {
|
|
|
|
|
if (value.$sum) {
|
|
|
|
|
if (typeof value.$sum === 'string') {
|
|
|
|
|
columns.push(`SUM($${index}:name) AS $${index + 1}:name`);
|
|
|
|
|
values.push(transformAggregateField(value.$sum), field);
|
|
|
|
|
index += 2;
|
|
|
|
|
} else {
|
|
|
|
|
countField = field;
|
|
|
|
|
columns.push(`COUNT(*) AS $${index}:name`);
|
|
|
|
|
values.push(field);
|
|
|
|
|
index += 1;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (value.$max) {
|
|
|
|
|
columns.push(`MAX($${index}:name) AS $${index + 1}:name`);
|
|
|
|
|
values.push(transformAggregateField(value.$max), field);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
|
|
|
|
if (value.$min) {
|
|
|
|
|
columns.push(`MIN($${index}:name) AS $${index + 1}:name`);
|
|
|
|
|
values.push(transformAggregateField(value.$min), field);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
|
|
|
|
if (value.$avg) {
|
|
|
|
|
columns.push(`AVG($${index}:name) AS $${index + 1}:name`);
|
|
|
|
|
values.push(transformAggregateField(value.$avg), field);
|
2018-01-20 08:00:36 -06:00
|
|
|
index += 2;
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
columns.push('*');
|
|
|
|
|
}
|
|
|
|
|
if (stage.$project) {
|
|
|
|
|
if (columns.includes('*')) {
|
|
|
|
|
columns = [];
|
|
|
|
|
}
|
|
|
|
|
for (const field in stage.$project) {
|
|
|
|
|
const value = stage.$project[field];
|
2018-09-01 13:58:06 -04:00
|
|
|
if (value === 1 || value === true) {
|
2018-01-20 08:00:36 -06:00
|
|
|
columns.push(`$${index}:name`);
|
|
|
|
|
values.push(field);
|
|
|
|
|
index += 1;
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if (stage.$match) {
|
|
|
|
|
const patterns = [];
|
2019-08-14 16:57:00 -05:00
|
|
|
const orOrAnd = Object.prototype.hasOwnProperty.call(
|
|
|
|
|
stage.$match,
|
|
|
|
|
'$or'
|
|
|
|
|
)
|
|
|
|
|
? ' OR '
|
|
|
|
|
: ' AND ';
|
2018-01-20 08:00:36 -06:00
|
|
|
|
|
|
|
|
if (stage.$match.$or) {
|
|
|
|
|
const collapse = {};
|
2020-04-06 22:50:33 +05:30
|
|
|
stage.$match.$or.forEach((element) => {
|
2018-01-20 08:00:36 -06:00
|
|
|
for (const key in element) {
|
|
|
|
|
collapse[key] = element[key];
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
stage.$match = collapse;
|
|
|
|
|
}
|
2017-11-12 13:00:22 -06:00
|
|
|
for (const field in stage.$match) {
|
|
|
|
|
const value = stage.$match[field];
|
2018-01-20 08:00:36 -06:00
|
|
|
const matchPatterns = [];
|
2020-04-06 22:50:33 +05:30
|
|
|
Object.keys(ParseToPosgresComparator).forEach((cmp) => {
|
2017-11-12 13:00:22 -06:00
|
|
|
if (value[cmp]) {
|
|
|
|
|
const pgComparator = ParseToPosgresComparator[cmp];
|
2018-09-01 13:58:06 -04:00
|
|
|
matchPatterns.push(
|
|
|
|
|
`$${index}:name ${pgComparator} $${index + 1}`
|
|
|
|
|
);
|
2018-01-20 08:00:36 -06:00
|
|
|
values.push(field, toPostgresValue(value[cmp]));
|
|
|
|
|
index += 2;
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
});
|
2018-01-20 08:00:36 -06:00
|
|
|
if (matchPatterns.length > 0) {
|
|
|
|
|
patterns.push(`(${matchPatterns.join(' AND ')})`);
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
schema.fields[field] &&
|
|
|
|
|
schema.fields[field].type &&
|
|
|
|
|
matchPatterns.length === 0
|
|
|
|
|
) {
|
2018-01-20 08:00:36 -06:00
|
|
|
patterns.push(`$${index}:name = $${index + 1}`);
|
|
|
|
|
values.push(field, value);
|
|
|
|
|
index += 2;
|
|
|
|
|
}
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
wherePattern =
|
|
|
|
|
patterns.length > 0 ? `WHERE ${patterns.join(` ${orOrAnd} `)}` : '';
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
if (stage.$limit) {
|
2018-01-20 08:00:36 -06:00
|
|
|
limitPattern = `LIMIT $${index}`;
|
|
|
|
|
values.push(stage.$limit);
|
|
|
|
|
index += 1;
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
if (stage.$skip) {
|
2018-01-20 08:00:36 -06:00
|
|
|
skipPattern = `OFFSET $${index}`;
|
|
|
|
|
values.push(stage.$skip);
|
|
|
|
|
index += 1;
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
if (stage.$sort) {
|
|
|
|
|
const sort = stage.$sort;
|
2018-01-20 08:00:36 -06:00
|
|
|
const keys = Object.keys(sort);
|
2018-09-01 13:58:06 -04:00
|
|
|
const sorting = keys
|
2020-04-06 22:50:33 +05:30
|
|
|
.map((key) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
const transformer = sort[key] === 1 ? 'ASC' : 'DESC';
|
|
|
|
|
const order = `$${index}:name ${transformer}`;
|
|
|
|
|
index += 1;
|
|
|
|
|
return order;
|
|
|
|
|
})
|
|
|
|
|
.join();
|
2018-01-20 08:00:36 -06:00
|
|
|
values.push(...keys);
|
2018-09-01 13:58:06 -04:00
|
|
|
sortPattern =
|
|
|
|
|
sort !== undefined && sorting.length > 0 ? `ORDER BY ${sorting}` : '';
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
}
|
2020-04-06 22:50:33 +05:30
|
|
|
|
|
|
|
|
if (groupPattern) {
|
|
|
|
|
columns.forEach((e, i, a) => {
|
|
|
|
|
if (e && e.trim() === '*') {
|
|
|
|
|
a[i] = '';
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const originalQuery = `SELECT ${columns
|
|
|
|
|
.filter(Boolean)
|
|
|
|
|
.join()} FROM $1:name ${wherePattern} ${skipPattern} ${groupPattern} ${sortPattern} ${limitPattern}`;
|
|
|
|
|
const qs = explain
|
|
|
|
|
? this.createExplainableQuery(originalQuery)
|
|
|
|
|
: originalQuery;
|
2017-11-12 13:00:22 -06:00
|
|
|
debug(qs, values);
|
2020-04-06 22:50:33 +05:30
|
|
|
return this._client.any(qs, values).then((a) => {
|
|
|
|
|
if (explain) {
|
|
|
|
|
return a;
|
|
|
|
|
}
|
|
|
|
|
const results = a.map((object) =>
|
|
|
|
|
this.postgresObjectToParseObject(className, object, schema)
|
|
|
|
|
);
|
|
|
|
|
results.forEach((result) => {
|
|
|
|
|
if (!Object.prototype.hasOwnProperty.call(result, 'objectId')) {
|
|
|
|
|
result.objectId = null;
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
}
|
2020-04-06 22:50:33 +05:30
|
|
|
if (groupValues) {
|
|
|
|
|
result.objectId = {};
|
|
|
|
|
for (const key in groupValues) {
|
|
|
|
|
result.objectId[key] = result[key];
|
|
|
|
|
delete result[key];
|
2017-11-22 23:07:45 -08:00
|
|
|
}
|
2020-04-06 22:50:33 +05:30
|
|
|
}
|
|
|
|
|
if (countField) {
|
|
|
|
|
result[countField] = parseInt(result[countField], 10);
|
|
|
|
|
}
|
2017-11-12 13:00:22 -06:00
|
|
|
});
|
2020-04-06 22:50:33 +05:30
|
|
|
return results;
|
|
|
|
|
});
|
2017-11-12 13:00:22 -06:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async performInitialization({ VolatileClassesSchemas }: any) {
|
2018-01-03 00:23:05 -03:00
|
|
|
// TODO: This method needs to be rewritten to make proper use of connections (@vitaly-t)
|
2016-08-15 16:48:39 -04:00
|
|
|
debug('performInitialization');
|
2020-04-06 22:50:33 +05:30
|
|
|
const promises = VolatileClassesSchemas.map((schema) => {
|
2018-01-03 00:23:05 -03:00
|
|
|
return this.createTable(schema.className, schema)
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((err) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
err.code === PostgresDuplicateRelationError ||
|
|
|
|
|
err.code === Parse.Error.INVALID_CLASS_NAME
|
|
|
|
|
) {
|
2018-01-03 00:23:05 -03:00
|
|
|
return Promise.resolve();
|
|
|
|
|
}
|
|
|
|
|
throw err;
|
|
|
|
|
})
|
|
|
|
|
.then(() => this.schemaUpgrade(schema.className, schema));
|
2016-08-15 16:48:39 -04:00
|
|
|
});
|
2017-03-04 23:56:53 +00:00
|
|
|
return Promise.all(promises)
|
|
|
|
|
.then(() => {
|
2020-04-06 22:50:33 +05:30
|
|
|
return this._client.tx('perform-initialization', (t) => {
|
2017-03-04 23:56:53 +00:00
|
|
|
return t.batch([
|
|
|
|
|
t.none(sql.misc.jsonObjectSetKeys),
|
|
|
|
|
t.none(sql.array.add),
|
|
|
|
|
t.none(sql.array.addUnique),
|
|
|
|
|
t.none(sql.array.remove),
|
|
|
|
|
t.none(sql.array.containsAll),
|
2018-05-16 03:42:32 +02:00
|
|
|
t.none(sql.array.containsAllRegex),
|
2018-09-01 13:58:06 -04:00
|
|
|
t.none(sql.array.contains),
|
2017-03-04 23:56:53 +00:00
|
|
|
]);
|
|
|
|
|
});
|
|
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.then((data) => {
|
2017-03-04 23:56:53 +00:00
|
|
|
debug(`initializationDone in ${data.duration}`);
|
2016-11-24 15:47:41 -05:00
|
|
|
})
|
2020-04-06 22:50:33 +05:30
|
|
|
.catch((error) => {
|
2017-03-04 23:56:53 +00:00
|
|
|
/* eslint-disable no-console */
|
|
|
|
|
console.error(error);
|
|
|
|
|
});
|
2016-08-15 16:48:39 -04:00
|
|
|
}
|
2017-11-25 13:55:34 -06:00
|
|
|
|
2020-02-14 09:44:51 -08:00
|
|
|
async createIndexes(
|
|
|
|
|
className: string,
|
|
|
|
|
indexes: any,
|
|
|
|
|
conn: ?any
|
|
|
|
|
): Promise<void> {
|
2020-04-06 22:50:33 +05:30
|
|
|
return (conn || this._client).tx((t) =>
|
2018-09-01 13:58:06 -04:00
|
|
|
t.batch(
|
2020-04-06 22:50:33 +05:30
|
|
|
indexes.map((i) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
return t.none('CREATE INDEX $1:name ON $2:name ($3:name)', [
|
|
|
|
|
i.name,
|
|
|
|
|
className,
|
|
|
|
|
i.key,
|
|
|
|
|
]);
|
|
|
|
|
})
|
|
|
|
|
)
|
|
|
|
|
);
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async createIndexesIfNeeded(
|
2018-09-01 13:58:06 -04:00
|
|
|
className: string,
|
|
|
|
|
fieldName: string,
|
|
|
|
|
type: any,
|
|
|
|
|
conn: ?any
|
|
|
|
|
): Promise<void> {
|
2020-02-14 09:44:51 -08:00
|
|
|
await (
|
|
|
|
|
conn || this._client
|
|
|
|
|
).none('CREATE INDEX $1:name ON $2:name ($3:name)', [
|
|
|
|
|
fieldName,
|
|
|
|
|
className,
|
|
|
|
|
type,
|
|
|
|
|
]);
|
2017-12-30 20:44:18 -05:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async dropIndexes(className: string, indexes: any, conn: any): Promise<void> {
|
2020-04-06 22:50:33 +05:30
|
|
|
const queries = indexes.map((i) => ({
|
2018-09-01 13:58:06 -04:00
|
|
|
query: 'DROP INDEX $1:name',
|
|
|
|
|
values: i,
|
|
|
|
|
}));
|
2020-04-06 22:50:33 +05:30
|
|
|
await (conn || this._client).tx((t) =>
|
2018-09-01 13:58:06 -04:00
|
|
|
t.none(this._pgp.helpers.concat(queries))
|
|
|
|
|
);
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async getIndexes(className: string) {
|
2017-11-25 13:55:34 -06:00
|
|
|
const qs = 'SELECT * FROM pg_indexes WHERE tablename = ${className}';
|
2018-09-01 13:58:06 -04:00
|
|
|
return this._client.any(qs, { className });
|
2017-11-25 13:55:34 -06:00
|
|
|
}
|
|
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async updateSchemaWithIndexes(): Promise<void> {
|
2017-11-25 13:55:34 -06:00
|
|
|
return Promise.resolve();
|
|
|
|
|
}
|
2019-04-08 15:59:15 -07:00
|
|
|
|
|
|
|
|
// Used for testing purposes
|
2019-12-16 18:50:31 +00:00
|
|
|
async updateEstimatedCount(className: string) {
|
2019-04-08 15:59:15 -07:00
|
|
|
return this._client.none('ANALYZE $1:name', [className]);
|
|
|
|
|
}
|
2019-07-31 02:41:07 -07:00
|
|
|
|
2019-12-16 18:50:31 +00:00
|
|
|
async createTransactionalSession(): Promise<any> {
|
2020-04-06 22:50:33 +05:30
|
|
|
return new Promise((resolve) => {
|
2019-07-31 02:41:07 -07:00
|
|
|
const transactionalSession = {};
|
2020-04-06 22:50:33 +05:30
|
|
|
transactionalSession.result = this._client.tx((t) => {
|
2019-07-31 02:41:07 -07:00
|
|
|
transactionalSession.t = t;
|
2020-04-06 22:50:33 +05:30
|
|
|
transactionalSession.promise = new Promise((resolve) => {
|
2019-07-31 02:41:07 -07:00
|
|
|
transactionalSession.resolve = resolve;
|
|
|
|
|
});
|
|
|
|
|
transactionalSession.batch = [];
|
|
|
|
|
resolve(transactionalSession);
|
|
|
|
|
return transactionalSession.promise;
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
commitTransactionalSession(transactionalSession: any): Promise<void> {
|
|
|
|
|
transactionalSession.resolve(
|
|
|
|
|
transactionalSession.t.batch(transactionalSession.batch)
|
|
|
|
|
);
|
|
|
|
|
return transactionalSession.result;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
abortTransactionalSession(transactionalSession: any): Promise<void> {
|
|
|
|
|
const result = transactionalSession.result.catch();
|
|
|
|
|
transactionalSession.batch.push(Promise.reject());
|
|
|
|
|
transactionalSession.resolve(
|
|
|
|
|
transactionalSession.t.batch(transactionalSession.batch)
|
|
|
|
|
);
|
|
|
|
|
return result;
|
|
|
|
|
}
|
2020-02-14 09:44:51 -08:00
|
|
|
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
async ensureIndex(
|
|
|
|
|
className: string,
|
|
|
|
|
schema: SchemaType,
|
|
|
|
|
fieldNames: string[],
|
|
|
|
|
indexName: ?string,
|
|
|
|
|
caseInsensitive: boolean = false,
|
|
|
|
|
conn: ?any = null
|
|
|
|
|
): Promise<any> {
|
|
|
|
|
conn = conn != null ? conn : this._client;
|
|
|
|
|
const defaultIndexName = `parse_default_${fieldNames.sort().join('_')}`;
|
2020-04-06 22:50:33 +05:30
|
|
|
const indexNameOptions: Object =
|
|
|
|
|
indexName != null ? { name: indexName } : { name: defaultIndexName };
|
|
|
|
|
const constraintPatterns = caseInsensitive
|
|
|
|
|
? fieldNames.map(
|
|
|
|
|
(fieldName, index) => `lower($${index + 3}:name) varchar_pattern_ops`
|
|
|
|
|
)
|
|
|
|
|
: fieldNames.map((fieldName, index) => `$${index + 3}:name`);
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
const qs = `CREATE INDEX $1:name ON $2:name (${constraintPatterns.join()})`;
|
2020-04-06 22:50:33 +05:30
|
|
|
await conn
|
|
|
|
|
.none(qs, [indexNameOptions.name, className, ...fieldNames])
|
|
|
|
|
.catch((error) => {
|
Case insensitive username and email indexing and query planning for Postgres (#6506)
* Update .travis.yml
testing error to see what happens...
* Update .travis.yml
Attempting to resolve postgres in CL by installing postgis via sudo instead of through apt/packages
* Update .travis.yml
* Update .travis.yml
* Update .travis.yml
Removed extra lines of postgres that were under "services" and "addons". I believe the "postgresql" line under "services" was installing the default of 9.6 and "addons" was installing postgres 11. My guess is the fail was occurring due to 9.6 being called sometimes and it never had postgis installed. If this is true, the solution is to only install one version of postgres, which is version 11 with postgis 2.5.
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adding test case for caseInsensitive
Adding test case for verifying indexing for caseInsensitive
* Implementing ensureIndex
* Updated PostgresStorageAdapter calls to ST_DistanceSphere. Note this has a minimum requirement of postgis 2.2. Documented the change in the readme. This is address #6441
* updated postgres sections of contributions with newer postgres info. Also switched postgis image it points to as the other one hasn't been updated in over a year.
* more info about postgres
* added necessary password for postgres docker
* updated wording in contributions
* removed reference to MacJr environment var when starting postgres in contributions. The official image automatically creates a user named 'postgres', but it does require a password, which the command sets to 'postgres'
* added more time to docker sleep/wait to enter postgis commands. This will always take a few seconds because the db is installing from scratch everytime. If postgres/postgis images aren't already downloaded locally, it will take even longer. Worst case, if the command times out on first run. Stop and remove the parse-postgres container and run the command again, 20 seconds should be enough wait time then
* latest changes
* initial fix, need to test
* fixed lint
* Adds caseInsensitive constraints to database, but doesn't pass regular tests. I believe this is because ensureIndex in the Postgres adapter is returning wrong. Also, some issues with the caseInsensitive test case
* this version addes the indexes, but something still wrong with the ensureIndex method in adapter
* removed code from suggestions
* fixed lint
* fixed PostgresAdapter test case
* small bug in test case
* reverted back to main branch package.json and lock file
* fixed docker command in Contribute file
* added ability to explain the find method
* triggering another build
* added ability to choose to 'analyze' a query which actually executes (this can be bad when looking at a query plan for Insert, Delete, etc.) the query or to just setup the query plan (default, previous versions defaulted to 'analyze'). Alse added some comparsons on sequential vs index searches for postgres
* made sure to check that search actually returns 1 result. Removed prep time comparison between searches as this seemed to be variable
* added test cases using find and case insensitivity on fields other than username and password. Also added explain to aggregate method
* fixing issue where query in aggregate replaced the map method incorrectly
* reverted back to mapping for aggregate method to make sure it's the issue
* switched back to caseInsensitive check for email and username as it was causing issues
* fixed aggregate method using explain
* made query plain results more flexible/reusable. Got rid of droptables as 'beforeEach' already handles this
* updated CONTRIBUTING doc to use netrecon as default username for postgres (similar to old style). Note that the official postgres docker image for postgres requires POSTGRES_PASSWORD to be set in order to use the image
* left postgis at 2.5 in the contributing document as this is the last version to be backwards compatibile with older versions of parse server
* updating docker command for postgres
Co-authored-by: Arthur Cinader <700572+acinader@users.noreply.github.com>
2020-04-03 10:24:56 -04:00
|
|
|
if (
|
|
|
|
|
error.code === PostgresDuplicateRelationError &&
|
|
|
|
|
error.message.includes(indexNameOptions.name)
|
|
|
|
|
) {
|
|
|
|
|
// Index already exists. Ignore error.
|
|
|
|
|
} else if (
|
|
|
|
|
error.code === PostgresUniqueIndexViolationError &&
|
|
|
|
|
error.message.includes(indexNameOptions.name)
|
|
|
|
|
) {
|
|
|
|
|
// Cast the error into the proper parse error
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.DUPLICATE_VALUE,
|
|
|
|
|
'A duplicate value for a field with unique values was provided'
|
|
|
|
|
);
|
|
|
|
|
} else {
|
|
|
|
|
throw error;
|
|
|
|
|
}
|
|
|
|
|
});
|
2020-02-14 09:44:51 -08:00
|
|
|
}
|
2016-06-12 16:35:13 -07:00
|
|
|
}
|
|
|
|
|
|
2017-07-11 22:33:45 -05:00
|
|
|
function convertPolygonToSQL(polygon) {
|
|
|
|
|
if (polygon.length < 3) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INVALID_JSON,
|
|
|
|
|
`Polygon must have at least 3 values`
|
|
|
|
|
);
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
if (
|
|
|
|
|
polygon[0][0] !== polygon[polygon.length - 1][0] ||
|
|
|
|
|
polygon[0][1] !== polygon[polygon.length - 1][1]
|
|
|
|
|
) {
|
2017-07-11 22:33:45 -05:00
|
|
|
polygon.push(polygon[0]);
|
|
|
|
|
}
|
|
|
|
|
const unique = polygon.filter((item, index, ar) => {
|
|
|
|
|
let foundIndex = -1;
|
|
|
|
|
for (let i = 0; i < ar.length; i += 1) {
|
|
|
|
|
const pt = ar[i];
|
2018-09-01 13:58:06 -04:00
|
|
|
if (pt[0] === item[0] && pt[1] === item[1]) {
|
2017-07-11 22:33:45 -05:00
|
|
|
foundIndex = i;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return foundIndex === index;
|
|
|
|
|
});
|
|
|
|
|
if (unique.length < 3) {
|
|
|
|
|
throw new Parse.Error(
|
|
|
|
|
Parse.Error.INTERNAL_SERVER_ERROR,
|
|
|
|
|
'GeoJSON: Loop must have at least 3 different vertices'
|
|
|
|
|
);
|
|
|
|
|
}
|
2018-09-01 13:58:06 -04:00
|
|
|
const points = polygon
|
2020-04-06 22:50:33 +05:30
|
|
|
.map((point) => {
|
2018-09-01 13:58:06 -04:00
|
|
|
Parse.GeoPoint._validate(parseFloat(point[1]), parseFloat(point[0]));
|
|
|
|
|
return `(${point[1]}, ${point[0]})`;
|
|
|
|
|
})
|
|
|
|
|
.join(', ');
|
2017-07-11 22:33:45 -05:00
|
|
|
return `(${points})`;
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-31 21:40:53 +05:30
|
|
|
function removeWhiteSpace(regex) {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (!regex.endsWith('\n')) {
|
2016-10-31 21:40:53 +05:30
|
|
|
regex += '\n';
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// remove non escaped comments
|
2018-09-01 13:58:06 -04:00
|
|
|
return (
|
|
|
|
|
regex
|
|
|
|
|
.replace(/([^\\])#.*\n/gim, '$1')
|
|
|
|
|
// remove lines starting with a comment
|
|
|
|
|
.replace(/^#.*\n/gim, '')
|
|
|
|
|
// remove non escaped whitespace
|
|
|
|
|
.replace(/([^\\])\s+/gim, '$1')
|
|
|
|
|
// remove whitespace at the beginning of a line
|
|
|
|
|
.replace(/^\s+/, '')
|
|
|
|
|
.trim()
|
|
|
|
|
);
|
2016-10-31 21:40:53 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function processRegexPattern(s) {
|
2018-09-01 13:58:06 -04:00
|
|
|
if (s && s.startsWith('^')) {
|
2016-10-31 21:40:53 +05:30
|
|
|
// regex for startsWith
|
|
|
|
|
return '^' + literalizeRegexPart(s.slice(1));
|
|
|
|
|
} else if (s && s.endsWith('$')) {
|
|
|
|
|
// regex for endsWith
|
|
|
|
|
return literalizeRegexPart(s.slice(0, s.length - 1)) + '$';
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// regex for contains
|
|
|
|
|
return literalizeRegexPart(s);
|
|
|
|
|
}
|
|
|
|
|
|
2018-05-16 03:42:32 +02:00
|
|
|
function isStartsWithRegex(value) {
|
|
|
|
|
if (!value || typeof value !== 'string' || !value.startsWith('^')) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const matches = value.match(/\^\\Q.*\\E/);
|
|
|
|
|
return !!matches;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function isAllValuesRegexOrNone(values) {
|
|
|
|
|
if (!values || !Array.isArray(values) || values.length === 0) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const firstValuesIsRegex = isStartsWithRegex(values[0].$regex);
|
|
|
|
|
if (values.length === 1) {
|
|
|
|
|
return firstValuesIsRegex;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for (let i = 1, length = values.length; i < length; ++i) {
|
|
|
|
|
if (firstValuesIsRegex !== isStartsWithRegex(values[i].$regex)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
function isAnyValueRegexStartsWith(values) {
|
2020-04-06 22:50:33 +05:30
|
|
|
return values.some(function (value) {
|
2018-05-16 03:42:32 +02:00
|
|
|
return isStartsWithRegex(value.$regex);
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2016-10-31 21:40:53 +05:30
|
|
|
function createLiteralRegex(remaining) {
|
2018-09-01 13:58:06 -04:00
|
|
|
return remaining
|
|
|
|
|
.split('')
|
2020-04-06 22:50:33 +05:30
|
|
|
.map((c) => {
|
2019-06-25 06:13:34 +09:00
|
|
|
const regex = RegExp('[0-9 ]|\\p{L}', 'u'); // Support all unicode letter chars
|
|
|
|
|
if (c.match(regex) !== null) {
|
2018-09-01 13:58:06 -04:00
|
|
|
// don't escape alphanumeric characters
|
|
|
|
|
return c;
|
|
|
|
|
}
|
|
|
|
|
// escape everything else (single quotes with single quotes, everything else with a backslash)
|
|
|
|
|
return c === `'` ? `''` : `\\${c}`;
|
|
|
|
|
})
|
|
|
|
|
.join('');
|
2016-10-31 21:40:53 +05:30
|
|
|
}
|
|
|
|
|
|
2017-12-30 20:44:18 -05:00
|
|
|
function literalizeRegexPart(s: string) {
|
2018-09-01 13:58:06 -04:00
|
|
|
const matcher1 = /\\Q((?!\\E).*)\\E$/;
|
2017-12-30 20:44:18 -05:00
|
|
|
const result1: any = s.match(matcher1);
|
2018-09-01 13:58:06 -04:00
|
|
|
if (result1 && result1.length > 1 && result1.index > -1) {
|
2016-10-31 21:40:53 +05:30
|
|
|
// process regex that has a beginning and an end specified for the literal text
|
|
|
|
|
const prefix = s.substr(0, result1.index);
|
|
|
|
|
const remaining = result1[1];
|
|
|
|
|
|
|
|
|
|
return literalizeRegexPart(prefix) + createLiteralRegex(remaining);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// process regex that has a beginning specified for the literal text
|
2018-09-01 13:58:06 -04:00
|
|
|
const matcher2 = /\\Q((?!\\E).*)$/;
|
2017-12-30 20:44:18 -05:00
|
|
|
const result2: any = s.match(matcher2);
|
2018-09-01 13:58:06 -04:00
|
|
|
if (result2 && result2.length > 1 && result2.index > -1) {
|
2016-10-31 21:40:53 +05:30
|
|
|
const prefix = s.substr(0, result2.index);
|
|
|
|
|
const remaining = result2[1];
|
|
|
|
|
|
|
|
|
|
return literalizeRegexPart(prefix) + createLiteralRegex(remaining);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// remove all instances of \Q and \E from the remaining text & escape single quotes
|
2018-09-01 13:58:06 -04:00
|
|
|
return s
|
|
|
|
|
.replace(/([^\\])(\\E)/, '$1')
|
|
|
|
|
.replace(/([^\\])(\\Q)/, '$1')
|
|
|
|
|
.replace(/^\\E/, '')
|
|
|
|
|
.replace(/^\\Q/, '')
|
|
|
|
|
.replace(/([^'])'/, `$1''`)
|
|
|
|
|
.replace(/^'([^'])/, `''$1`);
|
2016-10-31 21:40:53 +05:30
|
|
|
}
|
|
|
|
|
|
2018-06-12 18:41:02 +02:00
|
|
|
var GeoPointCoder = {
|
|
|
|
|
isValidJSON(value) {
|
2018-09-01 13:58:06 -04:00
|
|
|
return (
|
|
|
|
|
typeof value === 'object' && value !== null && value.__type === 'GeoPoint'
|
2018-06-12 18:41:02 +02:00
|
|
|
);
|
2018-09-01 13:58:06 -04:00
|
|
|
},
|
2018-06-12 18:41:02 +02:00
|
|
|
};
|
|
|
|
|
|
2016-06-12 16:35:13 -07:00
|
|
|
export default PostgresStorageAdapter;
|