2016-10-16 02:59:54 +05:30
import { createClient } from './PostgresClient' ;
2016-11-24 15:47:41 -05:00
import Parse from 'parse/node' ;
2016-12-07 20:03:40 -05:00
import _ from 'lodash' ;
2017-02-18 19:26:25 +00:00
import sql from './sql' ;
2016-06-12 16:35:13 -07:00
const PostgresRelationDoesNotExistError = '42P01' ;
const PostgresDuplicateRelationError = '42P07' ;
2016-06-16 15:39:05 -07:00
const PostgresDuplicateColumnError = '42701' ;
2017-03-23 22:29:43 -04:00
const PostgresDuplicateObjectError = '42710' ;
2016-06-16 15:39:05 -07:00
const PostgresUniqueIndexViolationError = '23505' ;
2016-10-29 05:53:37 +05:30
const PostgresTransactionAbortedError = '25P02' ;
2016-08-15 16:48:39 -04:00
const logger = require ( '../../../logger' ) ;
const debug = function ( ) {
let args = [ ... arguments ] ;
2017-01-11 12:31:40 -08:00
args = [ 'PG: ' + arguments [ 0 ] ] . concat ( args . slice ( 1 , args . length ) ) ;
2016-12-07 15:17:05 -08:00
const log = logger . getLogger ( ) ;
2016-08-15 16:48:39 -04:00
log . debug . apply ( log , args ) ;
}
2016-06-12 16:35:13 -07:00
2016-06-06 13:47:11 -07:00
const parseTypeToPostgresType = type => {
switch ( type . type ) {
2016-11-24 15:47:41 -05:00
case 'String' : return 'text' ;
case 'Date' : return 'timestamp with time zone' ;
case 'Object' : return 'jsonb' ;
case 'File' : return 'text' ;
case 'Boolean' : return 'boolean' ;
case 'Pointer' : return 'char(10)' ;
case 'Number' : return 'double precision' ;
case 'GeoPoint' : return 'point' ;
2017-05-31 21:23:49 -05:00
case 'Bytes' : return 'jsonb' ;
2017-07-11 22:33:45 -05:00
case 'Polygon' : return 'polygon' ;
2016-11-24 15:47:41 -05:00
case 'Array' :
if ( type . contents && type . contents . type === 'String' ) {
return 'text[]' ;
} else {
return 'jsonb' ;
}
default : throw ` no type for ${ JSON . stringify ( type ) } yet ` ;
2016-06-06 13:47:11 -07:00
}
} ;
2016-06-12 16:35:13 -07:00
2016-08-15 16:48:39 -04:00
const ParseToPosgresComparator = {
'$gt' : '>' ,
'$lt' : '<' ,
'$gte' : '>=' ,
'$lte' : '<='
}
2016-11-24 15:47:41 -05:00
const toPostgresValue = value => {
2016-08-15 16:48:39 -04:00
if ( typeof value === 'object' ) {
if ( value . _ _type === 'Date' ) {
return value . iso ;
}
2016-08-18 18:05:26 -04:00
if ( value . _ _type === 'File' ) {
return value . name ;
}
2016-08-15 16:48:39 -04:00
}
return value ;
}
2016-11-24 15:47:41 -05:00
const transformValue = value => {
2017-05-28 20:34:49 -04:00
if ( typeof value === 'object' &&
value . _ _type === 'Pointer' ) {
2016-08-15 16:48:39 -04:00
return value . objectId ;
}
return value ;
}
// Duplicate from then mongo adapter...
const emptyCLPS = Object . freeze ( {
find : { } ,
get : { } ,
create : { } ,
update : { } ,
delete : { } ,
addField : { } ,
} ) ;
const defaultCLPS = Object . freeze ( {
find : { '*' : true } ,
get : { '*' : true } ,
create : { '*' : true } ,
update : { '*' : true } ,
delete : { '*' : true } ,
addField : { '*' : true } ,
} ) ;
2016-11-24 15:47:41 -05:00
const toParseSchema = ( schema ) => {
2016-08-15 16:48:39 -04:00
if ( schema . className === '_User' ) {
delete schema . fields . _hashed _password ;
}
if ( schema . fields ) {
delete schema . fields . _wperm ;
delete schema . fields . _rperm ;
}
let clps = defaultCLPS ;
if ( schema . classLevelPermissions ) {
clps = { ... emptyCLPS , ... schema . classLevelPermissions } ;
}
2017-11-25 13:55:34 -06:00
let indexes = { } ;
if ( schema . indexes ) {
indexes = { ... schema . indexes } ;
}
2016-08-15 16:48:39 -04:00
return {
className : schema . className ,
fields : schema . fields ,
classLevelPermissions : clps ,
2017-11-25 13:55:34 -06:00
indexes ,
2016-08-15 16:48:39 -04:00
} ;
}
2016-11-24 15:47:41 -05:00
const toPostgresSchema = ( schema ) => {
2016-08-15 16:48:39 -04:00
if ( ! schema ) {
return schema ;
}
schema . fields = schema . fields || { } ;
schema . fields . _wperm = { type : 'Array' , contents : { type : 'String' } }
schema . fields . _rperm = { type : 'Array' , contents : { type : 'String' } }
if ( schema . className === '_User' ) {
schema . fields . _hashed _password = { type : 'String' } ;
2016-11-29 22:31:52 +05:30
schema . fields . _password _history = { type : 'Array' } ;
2016-08-15 16:48:39 -04:00
}
return schema ;
}
2016-11-24 15:47:41 -05:00
const handleDotFields = ( object ) => {
Object . keys ( object ) . forEach ( fieldName => {
2016-08-20 16:07:48 -04:00
if ( fieldName . indexOf ( '.' ) > - 1 ) {
2016-12-07 15:17:05 -08:00
const components = fieldName . split ( '.' ) ;
const first = components . shift ( ) ;
2016-08-20 16:07:48 -04:00
object [ first ] = object [ first ] || { } ;
let currentObj = object [ first ] ;
let next ;
let value = object [ fieldName ] ;
if ( value && value . _ _op === 'Delete' ) {
value = undefined ;
}
2016-11-24 15:47:41 -05:00
/* eslint-disable no-cond-assign */
2016-12-01 10:24:46 -08:00
while ( next = components . shift ( ) ) {
2016-11-24 15:47:41 -05:00
/* eslint-enable no-cond-assign */
2016-08-20 16:07:48 -04:00
currentObj [ next ] = currentObj [ next ] || { } ;
if ( components . length === 0 ) {
currentObj [ next ] = value ;
}
currentObj = currentObj [ next ] ;
}
delete object [ fieldName ] ;
}
} ) ;
return object ;
}
2017-08-23 10:33:57 -05:00
const transformDotFieldToComponents = ( fieldName ) => {
return fieldName . split ( '.' ) . map ( ( cmpt , index ) => {
if ( index === 0 ) {
return ` " ${ cmpt } " ` ;
}
return ` ' ${ cmpt } ' ` ;
} ) ;
}
const transformDotField = ( fieldName ) => {
if ( fieldName . indexOf ( '.' ) === - 1 ) {
return ` " ${ fieldName } " ` ;
}
const components = transformDotFieldToComponents ( fieldName ) ;
let name = components . slice ( 0 , components . length - 1 ) . join ( '->' ) ;
name += '->>' + components [ components . length - 1 ] ;
return name ;
}
2017-11-12 13:00:22 -06:00
const transformAggregateField = ( fieldName ) => {
return fieldName . substr ( 1 ) ;
}
2016-10-31 21:41:21 +05:30
const validateKeys = ( object ) => {
if ( typeof object == 'object' ) {
for ( const key in object ) {
if ( typeof object [ key ] == 'object' ) {
validateKeys ( object [ key ] ) ;
}
if ( key . includes ( '$' ) || key . includes ( '.' ) ) {
throw new Parse . Error ( Parse . Error . INVALID _NESTED _KEY , "Nested keys should not contain the '$' or '.' characters" ) ;
2016-12-01 10:24:46 -08:00
}
2016-10-31 21:41:21 +05:30
}
}
}
2016-08-18 18:05:26 -04:00
// Returns the list of join tables on a schema
2016-11-24 15:47:41 -05:00
const joinTablesForSchema = ( schema ) => {
2016-12-07 15:17:05 -08:00
const list = [ ] ;
2016-08-18 18:05:26 -04:00
if ( schema ) {
Object . keys ( schema . fields ) . forEach ( ( field ) => {
if ( schema . fields [ field ] . type === 'Relation' ) {
list . push ( ` _Join: ${ field } : ${ schema . className } ` ) ;
}
} ) ;
}
return list ;
2016-12-01 10:24:46 -08:00
}
2016-08-18 18:05:26 -04:00
2016-06-16 15:39:05 -07:00
const buildWhereClause = ( { schema , query , index } ) => {
2016-12-07 15:17:05 -08:00
const patterns = [ ] ;
2016-06-16 15:39:05 -07:00
let values = [ ] ;
2016-12-07 15:17:05 -08:00
const sorts = [ ] ;
2016-08-15 16:48:39 -04:00
schema = toPostgresSchema ( schema ) ;
2016-12-07 15:17:05 -08:00
for ( const fieldName in query ) {
const isArrayField = schema . fields
2016-12-01 10:24:46 -08:00
&& schema . fields [ fieldName ]
2016-08-20 16:07:48 -04:00
&& schema . fields [ fieldName ] . type === 'Array' ;
2016-12-07 15:17:05 -08:00
const initialPatternsLength = patterns . length ;
const fieldValue = query [ fieldName ] ;
2016-08-20 16:07:48 -04:00
// nothingin the schema, it's gonna blow up
if ( ! schema . fields [ fieldName ] ) {
// as it won't exist
2017-06-21 09:23:20 -03:00
if ( fieldValue && fieldValue . $exists === false ) {
2016-08-20 16:07:48 -04:00
continue ;
}
2016-12-01 10:24:46 -08:00
}
2016-08-20 16:07:48 -04:00
2016-08-15 16:48:39 -04:00
if ( fieldName . indexOf ( '.' ) >= 0 ) {
2017-08-23 10:33:57 -05:00
let name = transformDotField ( fieldName ) ;
2017-06-21 09:23:20 -03:00
if ( fieldValue === null ) {
patterns . push ( ` ${ name } IS NULL ` ) ;
} else {
2017-08-23 10:33:57 -05:00
if ( fieldValue . $in ) {
const inPatterns = [ ] ;
name = transformDotFieldToComponents ( fieldName ) . join ( '->' ) ;
fieldValue . $in . forEach ( ( listElem ) => {
if ( typeof listElem === 'string' ) {
inPatterns . push ( ` " ${ listElem } " ` ) ;
} else {
inPatterns . push ( ` ${ listElem } ` ) ;
}
} ) ;
patterns . push ( ` ( ${ name } )::jsonb @> '[ ${ inPatterns . join ( ',' ) } ]'::jsonb ` ) ;
} else if ( fieldValue . $regex ) {
// Handle later
} else {
patterns . push ( ` ${ name } = ' ${ fieldValue } ' ` ) ;
}
2017-06-21 09:23:20 -03:00
}
2017-09-19 06:59:48 -05:00
} else if ( fieldValue === null || fieldValue === undefined ) {
2017-06-21 09:23:20 -03:00
patterns . push ( ` $ ${ index } :name IS NULL ` ) ;
values . push ( fieldName ) ;
index += 1 ;
continue ;
2016-08-15 16:48:39 -04:00
} else if ( typeof fieldValue === 'string' ) {
2016-06-16 15:39:05 -07:00
patterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue ) ;
index += 2 ;
2016-08-15 16:48:39 -04:00
} else if ( typeof fieldValue === 'boolean' ) {
patterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue ) ;
index += 2 ;
} else if ( typeof fieldValue === 'number' ) {
patterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue ) ;
2016-06-16 15:39:05 -07:00
index += 2 ;
2016-08-20 16:07:48 -04:00
} else if ( fieldName === '$or' || fieldName === '$and' ) {
2016-12-07 15:17:05 -08:00
const clauses = [ ] ;
const clauseValues = [ ] ;
2016-11-24 15:47:41 -05:00
fieldValue . forEach ( ( subQuery ) => {
2016-12-07 15:17:05 -08:00
const clause = buildWhereClause ( { schema , query : subQuery , index } ) ;
2016-08-20 16:07:48 -04:00
if ( clause . pattern . length > 0 ) {
clauses . push ( clause . pattern ) ;
clauseValues . push ( ... clause . values ) ;
index += clause . values . length ;
}
2016-06-16 15:39:05 -07:00
} ) ;
2016-12-07 15:17:05 -08:00
const orOrAnd = fieldName === '$or' ? ' OR ' : ' AND ' ;
2016-08-20 16:07:48 -04:00
patterns . push ( ` ( ${ clauses . join ( orOrAnd ) } ) ` ) ;
2016-08-15 16:48:39 -04:00
values . push ( ... clauseValues ) ;
}
2017-06-21 09:23:20 -03:00
if ( fieldValue . $ne !== undefined ) {
2016-08-20 16:07:48 -04:00
if ( isArrayField ) {
fieldValue . $ne = JSON . stringify ( [ fieldValue . $ne ] ) ;
patterns . push ( ` NOT array_contains( $ ${ index } :name, $ ${ index + 1 } ) ` ) ;
2016-08-15 16:48:39 -04:00
} else {
2016-08-20 16:07:48 -04:00
if ( fieldValue . $ne === null ) {
2017-06-21 09:23:20 -03:00
patterns . push ( ` $ ${ index } :name IS NOT NULL ` ) ;
values . push ( fieldName ) ;
index += 1 ;
continue ;
2016-08-20 16:07:48 -04:00
} else {
// if not null, we need to manually exclude null
patterns . push ( ` ( $ ${ index } :name <> $ ${ index + 1 } OR $ ${ index } :name IS NULL) ` ) ;
}
2016-08-15 16:48:39 -04:00
}
// TODO: support arrays
values . push ( fieldName , fieldValue . $ne ) ;
index += 2 ;
}
if ( fieldValue . $eq ) {
patterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue . $eq ) ;
index += 2 ;
}
const isInOrNin = Array . isArray ( fieldValue . $in ) || Array . isArray ( fieldValue . $nin ) ;
2016-08-20 16:07:48 -04:00
if ( Array . isArray ( fieldValue . $in ) &&
isArrayField &&
2016-12-01 10:24:46 -08:00
schema . fields [ fieldName ] . contents &&
2016-08-20 16:07:48 -04:00
schema . fields [ fieldName ] . contents . type === 'String' ) {
2016-12-07 15:17:05 -08:00
const inPatterns = [ ] ;
2016-06-16 15:39:05 -07:00
let allowNull = false ;
values . push ( fieldName ) ;
fieldValue . $in . forEach ( ( listElem , listIndex ) => {
2016-12-02 16:11:54 -08:00
if ( listElem === null ) {
2016-06-16 15:39:05 -07:00
allowNull = true ;
} else {
values . push ( listElem ) ;
inPatterns . push ( ` $ ${ index + 1 + listIndex - ( allowNull ? 1 : 0 ) } ` ) ;
}
} ) ;
if ( allowNull ) {
patterns . push ( ` ( $ ${ index } :name IS NULL OR $ ${ index } :name && ARRAY[ ${ inPatterns . join ( ',' ) } ]) ` ) ;
} else {
patterns . push ( ` $ ${ index } :name && ARRAY[ ${ inPatterns . join ( ',' ) } ] ` ) ;
}
index = index + 1 + inPatterns . length ;
2016-08-15 16:48:39 -04:00
} else if ( isInOrNin ) {
2016-11-24 15:47:41 -05:00
var createConstraint = ( baseArray , notIn ) => {
2016-08-15 16:48:39 -04:00
if ( baseArray . length > 0 ) {
2016-12-07 15:17:05 -08:00
const not = notIn ? ' NOT ' : '' ;
2016-08-20 16:07:48 -04:00
if ( isArrayField ) {
2017-01-11 12:31:40 -08:00
patterns . push ( ` ${ not } array_contains( $ ${ index } :name, $ ${ index + 1 } ) ` ) ;
2016-08-20 16:07:48 -04:00
values . push ( fieldName , JSON . stringify ( baseArray ) ) ;
index += 2 ;
} else {
2017-08-23 10:33:57 -05:00
// Handle Nested Dot Notation Above
if ( fieldName . indexOf ( '.' ) >= 0 ) {
return ;
}
2016-12-07 15:17:05 -08:00
const inPatterns = [ ] ;
2016-08-20 16:07:48 -04:00
values . push ( fieldName ) ;
baseArray . forEach ( ( listElem , listIndex ) => {
2017-07-23 11:11:02 -05:00
if ( listElem !== null ) {
values . push ( listElem ) ;
inPatterns . push ( ` $ ${ index + 1 + listIndex } ` ) ;
}
2016-08-20 16:07:48 -04:00
} ) ;
patterns . push ( ` $ ${ index } :name ${ not } IN ( ${ inPatterns . join ( ',' ) } ) ` ) ;
index = index + 1 + inPatterns . length ;
}
2016-08-15 16:48:39 -04:00
} else if ( ! notIn ) {
values . push ( fieldName ) ;
patterns . push ( ` $ ${ index } :name IS NULL ` ) ;
index = index + 1 ;
}
}
if ( fieldValue . $in ) {
2016-12-07 20:03:40 -05:00
createConstraint ( _ . flatMap ( fieldValue . $in , elt => elt ) , false ) ;
2016-08-15 16:48:39 -04:00
}
if ( fieldValue . $nin ) {
2016-12-07 20:03:40 -05:00
createConstraint ( _ . flatMap ( fieldValue . $nin , elt => elt ) , true ) ;
2016-08-15 16:48:39 -04:00
}
}
2016-08-20 16:07:48 -04:00
if ( Array . isArray ( fieldValue . $all ) && isArrayField ) {
2017-01-11 12:31:40 -08:00
patterns . push ( ` array_contains_all( $ ${ index } :name, $ ${ index + 1 } ::jsonb) ` ) ;
2016-08-20 16:07:48 -04:00
values . push ( fieldName , JSON . stringify ( fieldValue . $all ) ) ;
2017-01-11 12:31:40 -08:00
index += 2 ;
2016-08-18 18:05:26 -04:00
}
2016-08-15 16:48:39 -04:00
if ( typeof fieldValue . $exists !== 'undefined' ) {
if ( fieldValue . $exists ) {
patterns . push ( ` $ ${ index } :name IS NOT NULL ` ) ;
} else {
patterns . push ( ` $ ${ index } :name IS NULL ` ) ;
}
2016-06-16 15:39:05 -07:00
values . push ( fieldName ) ;
2016-08-15 16:48:39 -04:00
index += 1 ;
}
2017-06-13 20:42:59 -05:00
if ( fieldValue . $text ) {
const search = fieldValue . $text . $search ;
let language = 'english' ;
if ( typeof search !== 'object' ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
` bad $ text: $ search, should be object `
) ;
}
if ( ! search . $term || typeof search . $term !== 'string' ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
` bad $ text: $ term, should be string `
) ;
}
if ( search . $language && typeof search . $language !== 'string' ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
` bad $ text: $ language, should be string `
) ;
} else if ( search . $language ) {
language = search . $language ;
}
if ( search . $caseSensitive && typeof search . $caseSensitive !== 'boolean' ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
` bad $ text: $ caseSensitive, should be boolean `
) ;
} else if ( search . $caseSensitive ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
` bad $ text: $ caseSensitive not supported, please use $ regex or create a separate lower case column. `
) ;
}
if ( search . $diacriticSensitive && typeof search . $diacriticSensitive !== 'boolean' ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
` bad $ text: $ diacriticSensitive, should be boolean `
) ;
} else if ( search . $diacriticSensitive === false ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
` bad $ text: $ diacriticSensitive - false not supported, install Postgres Unaccent Extension `
) ;
}
patterns . push ( ` to_tsvector( $ ${ index } , $ ${ index + 1 } :name) @@ to_tsquery( $ ${ index + 2 } , $ ${ index + 3 } ) ` ) ;
values . push ( language , fieldName , language , search . $term ) ;
index += 4 ;
}
2016-08-15 16:48:39 -04:00
if ( fieldValue . $nearSphere ) {
2016-12-07 15:17:05 -08:00
const point = fieldValue . $nearSphere ;
const distance = fieldValue . $maxDistance ;
2017-01-11 12:31:40 -08:00
const distanceInKM = distance * 6371 * 1000 ;
patterns . push ( ` ST_distance_sphere( $ ${ index } :name::geometry, POINT( $ ${ index + 1 } , $ ${ index + 2 } )::geometry) <= $ ${ index + 3 } ` ) ;
sorts . push ( ` ST_distance_sphere( $ ${ index } :name::geometry, POINT( $ ${ index + 1 } , $ ${ index + 2 } )::geometry) ASC ` )
2016-08-20 16:07:48 -04:00
values . push ( fieldName , point . longitude , point . latitude , distanceInKM ) ;
2016-08-15 16:48:39 -04:00
index += 4 ;
}
2016-08-20 16:07:48 -04:00
if ( fieldValue . $within && fieldValue . $within . $box ) {
2016-12-07 15:17:05 -08:00
const box = fieldValue . $within . $box ;
const left = box [ 0 ] . longitude ;
const bottom = box [ 0 ] . latitude ;
const right = box [ 1 ] . longitude ;
const top = box [ 1 ] . latitude ;
2016-08-20 16:07:48 -04:00
2017-01-11 12:31:40 -08:00
patterns . push ( ` $ ${ index } :name::point <@ $ ${ index + 1 } ::box ` ) ;
2016-08-20 16:07:48 -04:00
values . push ( fieldName , ` (( ${ left } , ${ bottom } ), ( ${ right } , ${ top } )) ` ) ;
index += 2 ;
}
2017-05-28 11:42:16 -05:00
if ( fieldValue . $geoWithin && fieldValue . $geoWithin . $polygon ) {
const polygon = fieldValue . $geoWithin . $polygon ;
if ( ! ( polygon instanceof Array ) ) {
2017-05-31 17:08:37 -05:00
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
'bad $geoWithin value; $polygon should contain at least 3 GeoPoints'
) ;
}
if ( polygon . length < 3 ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
'bad $geoWithin value; $polygon should contain at least 3 GeoPoints'
) ;
2017-05-28 11:42:16 -05:00
}
const points = polygon . map ( ( point ) => {
if ( typeof point !== 'object' || point . _ _type !== 'GeoPoint' ) {
throw new Parse . Error ( Parse . Error . INVALID _JSON , 'bad $geoWithin value' ) ;
} else {
Parse . GeoPoint . _validate ( point . latitude , point . longitude ) ;
}
return ` ( ${ point . longitude } , ${ point . latitude } ) ` ;
} ) . join ( ', ' ) ;
patterns . push ( ` $ ${ index } :name::point <@ $ ${ index + 1 } ::polygon ` ) ;
values . push ( fieldName , ` ( ${ points } ) ` ) ;
index += 2 ;
}
2017-07-11 22:33:45 -05:00
if ( fieldValue . $geoIntersects && fieldValue . $geoIntersects . $point ) {
const point = fieldValue . $geoIntersects . $point ;
if ( typeof point !== 'object' || point . _ _type !== 'GeoPoint' ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
'bad $geoIntersect value; $point should be GeoPoint'
) ;
} else {
Parse . GeoPoint . _validate ( point . latitude , point . longitude ) ;
}
patterns . push ( ` $ ${ index } :name::polygon @> $ ${ index + 1 } ::point ` ) ;
values . push ( fieldName , ` ( ${ point . longitude } , ${ point . latitude } ) ` ) ;
index += 2 ;
}
2017-05-28 11:42:16 -05:00
2016-08-15 16:48:39 -04:00
if ( fieldValue . $regex ) {
let regex = fieldValue . $regex ;
let operator = '~' ;
2016-12-07 15:17:05 -08:00
const opts = fieldValue . $options ;
2016-08-15 16:48:39 -04:00
if ( opts ) {
if ( opts . indexOf ( 'i' ) >= 0 ) {
operator = '~*' ;
}
2016-10-31 21:40:53 +05:30
if ( opts . indexOf ( 'x' ) >= 0 ) {
regex = removeWhiteSpace ( regex ) ;
}
2016-08-15 16:48:39 -04:00
}
2016-10-31 21:40:53 +05:30
2017-08-23 10:33:57 -05:00
const name = transformDotField ( fieldName ) ;
2016-10-31 21:40:53 +05:30
regex = processRegexPattern ( regex ) ;
2017-08-23 10:33:57 -05:00
patterns . push ( ` $ ${ index } :raw ${ operator } ' $ ${ index + 1 } :raw' ` ) ;
values . push ( name , regex ) ;
2016-08-15 16:48:39 -04:00
index += 2 ;
}
if ( fieldValue . _ _type === 'Pointer' ) {
2016-08-20 16:07:48 -04:00
if ( isArrayField ) {
patterns . push ( ` array_contains( $ ${ index } :name, $ ${ index + 1 } ) ` ) ;
values . push ( fieldName , JSON . stringify ( [ fieldValue ] ) ) ;
index += 2 ;
} else {
patterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue . objectId ) ;
index += 2 ;
}
2016-08-15 16:48:39 -04:00
}
if ( fieldValue . _ _type === 'Date' ) {
patterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue . iso ) ;
index += 2 ;
}
2017-05-28 19:42:51 -05:00
if ( fieldValue . _ _type === 'GeoPoint' ) {
patterns . push ( '$' + index + ':name ~= POINT($' + ( index + 1 ) + ', $' + ( index + 2 ) + ')' ) ;
values . push ( fieldName , fieldValue . longitude , fieldValue . latitude ) ;
index += 3 ;
}
2017-07-11 22:33:45 -05:00
if ( fieldValue . _ _type === 'Polygon' ) {
const value = convertPolygonToSQL ( fieldValue . coordinates ) ;
patterns . push ( ` $ ${ index } :name ~= $ ${ index + 1 } ::polygon ` ) ;
values . push ( fieldName , value ) ;
index += 2 ;
}
2016-11-24 15:47:41 -05:00
Object . keys ( ParseToPosgresComparator ) . forEach ( cmp => {
2016-08-15 16:48:39 -04:00
if ( fieldValue [ cmp ] ) {
2016-12-07 15:17:05 -08:00
const pgComparator = ParseToPosgresComparator [ cmp ] ;
2016-08-15 16:48:39 -04:00
patterns . push ( ` $ ${ index } :name ${ pgComparator } $ ${ index + 1 } ` ) ;
values . push ( fieldName , toPostgresValue ( fieldValue [ cmp ] ) ) ;
index += 2 ;
}
} ) ;
if ( initialPatternsLength === patterns . length ) {
throw new Parse . Error ( Parse . Error . OPERATION _FORBIDDEN , ` Postgres doesn't support this query type yet ${ JSON . stringify ( fieldValue ) } ` ) ;
2016-06-16 15:39:05 -07:00
}
}
2016-08-15 16:48:39 -04:00
values = values . map ( transformValue ) ;
return { pattern : patterns . join ( ' AND ' ) , values , sorts } ;
2016-06-16 15:39:05 -07:00
}
2016-06-12 16:35:13 -07:00
export class PostgresStorageAdapter {
// Private
_collectionPrefix : string ;
_client ;
2017-05-16 12:06:17 -04:00
_pgp ;
2016-06-12 16:35:13 -07:00
constructor ( {
uri ,
collectionPrefix = '' ,
2016-10-16 02:59:54 +05:30
databaseOptions
2016-06-12 16:35:13 -07:00
} ) {
this . _collectionPrefix = collectionPrefix ;
2017-05-16 12:06:17 -04:00
const { client , pgp } = createClient ( uri , databaseOptions ) ;
this . _client = client ;
this . _pgp = pgp ;
2016-06-12 16:35:13 -07:00
}
2017-11-19 04:20:19 +08:00
handleShutdown ( ) {
if ( ! this . _client ) {
return
}
this . _client . $pool . end ( ) ;
}
2016-10-29 05:53:37 +05:30
_ensureSchemaCollectionExists ( conn ) {
conn = conn || this . _client ;
return conn . none ( 'CREATE TABLE IF NOT EXISTS "_SCHEMA" ( "className" varChar(120), "schema" jsonb, "isParseClass" bool, PRIMARY KEY ("className") )' )
2017-06-20 09:15:26 -07:00
. catch ( error => {
if ( error . code === PostgresDuplicateRelationError
2017-03-23 22:29:43 -04:00
|| error . code === PostgresUniqueIndexViolationError
|| error . code === PostgresDuplicateObjectError ) {
2016-06-12 16:35:13 -07:00
// Table already exists, must have been created by a different request. Ignore error.
2017-06-20 09:15:26 -07:00
} else {
throw error ;
}
} ) ;
2016-11-24 15:47:41 -05:00
}
2016-06-12 16:35:13 -07:00
classExists ( name ) {
2017-11-30 08:30:15 +07:00
return this . _client . one ( 'SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = $1)' , [ name ] , a => a . exists ) ;
2016-06-12 16:35:13 -07:00
}
setClassLevelPermissions ( className , CLPs ) {
2017-12-27 00:30:50 +00:00
const self = this ;
return this . _client . task ( 'set-class-level-permissions' , function * ( t ) {
yield self . _ensureSchemaCollectionExists ( t ) ;
const values = [ className , 'schema' , 'classLevelPermissions' , JSON . stringify ( CLPs ) ] ;
yield t . none ( ` UPDATE "_SCHEMA" SET $ 2:name = json_object_set_key( $ 2:name, $ 3::text, $ 4::jsonb) WHERE "className"= $ 1 ` , values ) ;
2016-08-18 18:05:26 -04:00
} ) ;
2016-06-12 16:35:13 -07:00
}
2017-11-25 13:55:34 -06:00
setIndexesWithSchemaFormat ( className , submittedIndexes , existingIndexes = { } , fields , conn ) {
conn = conn || this . _client ;
2017-12-24 19:03:35 +00:00
const self = this ;
2017-11-25 13:55:34 -06:00
if ( submittedIndexes === undefined ) {
return Promise . resolve ( ) ;
}
if ( Object . keys ( existingIndexes ) . length === 0 ) {
existingIndexes = { _id _ : { _id : 1 } } ;
}
const deletedIndexes = [ ] ;
const insertedIndexes = [ ] ;
Object . keys ( submittedIndexes ) . forEach ( name => {
const field = submittedIndexes [ name ] ;
if ( existingIndexes [ name ] && field . _ _op !== 'Delete' ) {
throw new Parse . Error ( Parse . Error . INVALID _QUERY , ` Index ${ name } exists, cannot update. ` ) ;
}
if ( ! existingIndexes [ name ] && field . _ _op === 'Delete' ) {
throw new Parse . Error ( Parse . Error . INVALID _QUERY , ` Index ${ name } does not exist, cannot delete. ` ) ;
}
if ( field . _ _op === 'Delete' ) {
deletedIndexes . push ( name ) ;
delete existingIndexes [ name ] ;
} else {
Object . keys ( field ) . forEach ( key => {
if ( ! fields . hasOwnProperty ( key ) ) {
throw new Parse . Error ( Parse . Error . INVALID _QUERY , ` Field ${ key } does not exist, cannot add index. ` ) ;
}
} ) ;
existingIndexes [ name ] = field ;
insertedIndexes . push ( {
key : field ,
name ,
} ) ;
}
} ) ;
2017-12-24 19:03:35 +00:00
return conn . tx ( 'set-indexes-with-schema-format' , function * ( t ) {
if ( insertedIndexes . length > 0 ) {
yield self . createIndexes ( className , insertedIndexes , t ) ;
}
if ( deletedIndexes . length > 0 ) {
yield self . dropIndexes ( className , deletedIndexes , t ) ;
}
yield self . _ensureSchemaCollectionExists ( t ) ;
yield t . none ( 'UPDATE "_SCHEMA" SET $2:name = json_object_set_key($2:name, $3::text, $4::jsonb) WHERE "className"=$1' , [ className , 'schema' , 'indexes' , JSON . stringify ( existingIndexes ) ] ) ;
2017-11-30 08:30:15 +07:00
} ) ;
2017-11-25 13:55:34 -06:00
}
2017-12-27 18:14:15 +00:00
createClass ( className , schema , conn ) {
conn = conn || this . _client ;
return conn . tx ( 'create-class' , t => {
2016-11-24 15:47:41 -05:00
const q1 = this . createTable ( className , schema , t ) ;
const q2 = t . none ( 'INSERT INTO "_SCHEMA" ("className", "schema", "isParseClass") VALUES ($<className>, $<schema>, true)' , { className , schema } ) ;
2017-11-25 13:55:34 -06:00
const q3 = this . setIndexesWithSchemaFormat ( className , schema . indexes , { } , schema . fields , t ) ;
2016-12-01 10:24:46 -08:00
2017-11-25 13:55:34 -06:00
return t . batch ( [ q1 , q2 , q3 ] ) ;
2016-10-28 21:16:19 +05:30
} )
2017-06-20 09:15:26 -07:00
. then ( ( ) => {
return toParseSchema ( schema )
} )
. catch ( ( err ) => {
if ( Array . isArray ( err . data ) && err . data . length > 1 && err . data [ 0 ] . result . code === PostgresTransactionAbortedError ) {
err = err . data [ 1 ] . result ;
}
2016-10-29 05:53:37 +05:30
2017-06-20 09:15:26 -07:00
if ( err . code === PostgresUniqueIndexViolationError && err . detail . includes ( className ) ) {
throw new Parse . Error ( Parse . Error . DUPLICATE _VALUE , ` Class ${ className } already exists. ` )
}
throw err ;
} )
2016-08-15 16:48:39 -04:00
}
// Just create a table, do not insert in schema
2016-10-29 05:53:37 +05:30
createTable ( className , schema , conn ) {
conn = conn || this . _client ;
2017-12-26 00:14:16 +00:00
const self = this ;
2016-08-15 16:48:39 -04:00
debug ( 'createTable' , className , schema ) ;
2016-12-07 15:17:05 -08:00
const valuesArray = [ ] ;
const patternsArray = [ ] ;
const fields = Object . assign ( { } , schema . fields ) ;
2016-08-15 16:48:39 -04:00
if ( className === '_User' ) {
fields . _email _verify _token _expires _at = { type : 'Date' } ;
fields . _email _verify _token = { type : 'String' } ;
2016-09-02 17:00:47 -07:00
fields . _account _lockout _expires _at = { type : 'Date' } ;
fields . _failed _login _count = { type : 'Number' } ;
2016-08-18 18:05:26 -04:00
fields . _perishable _token = { type : 'String' } ;
2016-11-17 22:07:51 +05:30
fields . _perishable _token _expires _at = { type : 'Date' } ;
2016-11-21 21:16:38 +05:30
fields . _password _changed _at = { type : 'Date' } ;
2016-11-29 22:31:52 +05:30
fields . _password _history = { type : 'Array' } ;
2016-08-15 16:48:39 -04:00
}
let index = 2 ;
2016-12-07 15:17:05 -08:00
const relations = [ ] ;
2016-08-15 16:48:39 -04:00
Object . keys ( fields ) . forEach ( ( fieldName ) => {
2016-12-07 15:17:05 -08:00
const parseType = fields [ fieldName ] ;
2016-08-15 16:48:39 -04:00
// Skip when it's a relation
// We'll create the tables later
2016-09-02 17:00:47 -07:00
if ( parseType . type === 'Relation' ) {
2016-08-15 16:48:39 -04:00
relations . push ( fieldName )
return ;
}
2016-09-24 13:53:15 -04:00
if ( [ '_rperm' , '_wperm' ] . indexOf ( fieldName ) >= 0 ) {
2016-06-10 14:09:48 -07:00
parseType . contents = { type : 'String' } ;
}
2016-08-15 16:48:39 -04:00
valuesArray . push ( fieldName ) ;
2016-06-10 14:09:48 -07:00
valuesArray . push ( parseTypeToPostgresType ( parseType ) ) ;
2017-01-11 12:31:40 -08:00
patternsArray . push ( ` $ ${ index } :name $ ${ index + 1 } :raw ` ) ;
2016-08-18 18:05:26 -04:00
if ( fieldName === 'objectId' ) {
patternsArray . push ( ` PRIMARY KEY ( $ ${ index } :name) ` )
}
2017-01-11 12:31:40 -08:00
index = index + 2 ;
2016-06-06 13:47:11 -07:00
} ) ;
2016-10-29 05:53:37 +05:30
const qs = ` CREATE TABLE IF NOT EXISTS $ 1:name ( ${ patternsArray . join ( ',' ) } ) ` ;
2016-08-15 16:48:39 -04:00
const values = [ className , ... valuesArray ] ;
2017-12-26 00:14:16 +00:00
return conn . task ( 'create-table' , function * ( t ) {
try {
yield self . _ensureSchemaCollectionExists ( t ) ;
yield t . none ( qs , values ) ;
} catch ( error ) {
if ( error . code !== PostgresDuplicateRelationError ) {
2017-11-30 08:30:15 +07:00
throw error ;
2017-12-26 00:14:16 +00:00
}
// ELSE: Table already exists, must have been created by a different request. Ignore the error.
}
yield t . tx ( 'create-table-tx' , tx => {
return tx . batch ( relations . map ( fieldName => {
return tx . none ( 'CREATE TABLE IF NOT EXISTS $<joinTable:name> ("relatedId" varChar(120), "owningId" varChar(120), PRIMARY KEY("relatedId", "owningId") )' , { joinTable : ` _Join: ${ fieldName } : ${ className } ` } ) ;
} ) ) ;
2017-06-20 09:15:26 -07:00
} ) ;
2017-12-26 00:14:16 +00:00
} ) ;
2016-06-12 16:35:13 -07:00
}
addFieldIfNotExists ( className , fieldName , type ) {
2016-06-17 08:04:58 +01:00
// TODO: Must be revised for invalid logic...
2016-08-15 16:48:39 -04:00
debug ( 'addFieldIfNotExists' , { className , fieldName , type } ) ;
2017-12-27 18:14:15 +00:00
const self = this ;
return this . _client . tx ( 'add-field-if-not-exists' , function * ( t ) {
2016-08-15 16:48:39 -04:00
if ( type . type !== 'Relation' ) {
2017-12-27 18:14:15 +00:00
try {
yield t . none ( 'ALTER TABLE $<className:name> ADD COLUMN $<fieldName:name> $<postgresType:raw>' , {
className ,
fieldName ,
postgresType : parseTypeToPostgresType ( type )
} ) ;
} catch ( error ) {
2017-06-20 09:15:26 -07:00
if ( error . code === PostgresRelationDoesNotExistError ) {
2017-12-27 18:14:15 +00:00
return yield self . createClass ( className , { fields : { [ fieldName ] : type } } , t ) ;
}
if ( error . code !== PostgresDuplicateColumnError ) {
2017-06-20 09:15:26 -07:00
throw error ;
}
2017-12-27 18:14:15 +00:00
// Column already exists, created by other request. Carry on to see if it's the right type.
} ;
2016-08-15 16:48:39 -04:00
} else {
2017-12-27 18:14:15 +00:00
yield t . none ( 'CREATE TABLE IF NOT EXISTS $<joinTable:name> ("relatedId" varChar(120), "owningId" varChar(120), PRIMARY KEY("relatedId", "owningId") )' , { joinTable : ` _Join: ${ fieldName } : ${ className } ` } ) ;
}
const result = yield t . any ( 'SELECT "schema" FROM "_SCHEMA" WHERE "className" = $<className> and ("schema"::json->\'fields\'->$<fieldName>) is not null' , { className , fieldName } ) ;
if ( result [ 0 ] ) {
throw 'Attempted to add a field that already exists' ;
} else {
const path = ` {fields, ${ fieldName } } ` ;
yield t . none ( 'UPDATE "_SCHEMA" SET "schema"=jsonb_set("schema", $<path>, $<type>) WHERE "className"=$<className>' , { path , type , className } ) ;
2016-08-15 16:48:39 -04:00
}
2016-06-17 08:04:58 +01:00
} ) ;
2016-06-12 16:35:13 -07:00
}
// Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.)
// and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible.
deleteClass ( className ) {
2017-05-28 15:48:32 +01:00
const operations = [
{ query : ` DROP TABLE IF EXISTS $ 1:name ` , values : [ className ] } ,
{ query : ` DELETE FROM "_SCHEMA" WHERE "className" = $ 1 ` , values : [ className ] }
] ;
return this . _client . tx ( t => t . none ( this . _pgp . helpers . concat ( operations ) ) )
. then ( ( ) => className . indexOf ( '_Join:' ) != 0 ) ; // resolves with false when _Join table
2016-06-12 16:35:13 -07:00
}
2016-06-16 19:34:00 -07:00
// Delete all data known to this adapter. Used for testing.
2016-06-12 16:35:13 -07:00
deleteAllClasses ( ) {
2016-12-07 15:17:05 -08:00
const now = new Date ( ) . getTime ( ) ;
2016-08-15 16:48:39 -04:00
debug ( 'deleteAllClasses' ) ;
return this . _client . any ( 'SELECT * FROM "_SCHEMA"' )
2017-06-20 09:15:26 -07:00
. then ( results => {
const joins = results . reduce ( ( list , schema ) => {
return list . concat ( joinTablesForSchema ( schema . schema ) ) ;
} , [ ] ) ;
2017-06-21 02:54:13 -03:00
const classes = [ '_SCHEMA' , '_PushStatus' , '_JobStatus' , '_JobSchedule' , '_Hooks' , '_GlobalConfig' , '_Audience' , ... results . map ( result => result . className ) , ... joins ] ;
2017-12-25 21:08:04 +00:00
const queries = classes . map ( className => ( { query : 'DROP TABLE IF EXISTS $<className:name>' , values : { className } } ) ) ;
return this . _client . tx ( t => t . none ( this . _pgp . helpers . concat ( queries ) ) ) ;
2017-06-20 09:15:26 -07:00
} , error => {
if ( error . code === PostgresRelationDoesNotExistError ) {
2017-06-21 02:54:13 -03:00
// No _SCHEMA collection. Don't delete anything.
2017-06-20 09:15:26 -07:00
return ;
} else {
throw error ;
}
} ) . then ( ( ) => {
debug ( ` deleteAllClasses done in ${ new Date ( ) . getTime ( ) - now } ` ) ;
} ) ;
2016-06-12 16:35:13 -07:00
}
// Remove the column and all the data. For Relations, the _Join collection is handled
// specially, this function does not delete _Join columns. It should, however, indicate
// that the relation fields does not exist anymore. In mongo, this means removing it from
// the _SCHEMA collection. There should be no actual data in the collection under the same name
// as the relation column, so it's fine to attempt to delete it. If the fields listed to be
// deleted do not exist, this function should return successfully anyways. Checking for
// attempts to delete non-existent fields is the responsibility of Parse Server.
// This function is not obligated to delete fields atomically. It is given the field
// names in a list so that databases that are capable of deleting fields atomically
// may do so.
// Returns a Promise.
deleteFields ( className , schema , fieldNames ) {
2016-08-18 18:05:26 -04:00
debug ( 'deleteFields' , className , fieldNames ) ;
2017-12-24 15:58:20 +00:00
fieldNames = fieldNames . reduce ( ( list , fieldName ) => {
const field = schema . fields [ fieldName ]
if ( field . type !== 'Relation' ) {
list . push ( fieldName ) ;
}
delete schema . fields [ fieldName ] ;
return list ;
} , [ ] ) ;
const values = [ className , ... fieldNames ] ;
const columns = fieldNames . map ( ( name , idx ) => {
return ` $ ${ idx + 2 } :name ` ;
} ) . join ( ', DROP COLUMN' ) ;
return this . _client . tx ( 'delete-fields' , function * ( t ) {
yield t . none ( 'UPDATE "_SCHEMA" SET "schema"=$<schema> WHERE "className"=$<className>' , { schema , className } ) ;
if ( values . length > 1 ) {
yield t . none ( ` ALTER TABLE $ 1:name DROP COLUMN ${ columns } ` , values ) ;
}
} ) ;
2016-06-12 16:35:13 -07:00
}
// Return a promise for all schemas known to this adapter, in Parse format. In case the
2016-06-16 19:34:00 -07:00
// schemas cannot be retrieved, returns a promise that rejects. Requirements for the
2016-06-12 16:35:13 -07:00
// rejection reason are TBD.
getAllClasses ( ) {
return this . _ensureSchemaCollectionExists ( )
2017-12-24 16:34:01 +00:00
. then ( ( ) => this . _client . map ( 'SELECT * FROM "_SCHEMA"' , null , row => toParseSchema ( { className : row . className , ... row . schema } ) ) ) ;
2016-06-12 16:35:13 -07:00
}
// Return a promise for the schema with the given name, in Parse format. If
// this adapter doesn't know about the schema, return a promise that rejects with
// undefined as the reason.
getClass ( className ) {
2016-08-18 18:05:26 -04:00
debug ( 'getClass' , className ) ;
2016-08-06 18:24:42 +01:00
return this . _client . any ( 'SELECT * FROM "_SCHEMA" WHERE "className"=$<className>' , { className } )
2017-06-20 09:15:26 -07:00
. then ( result => {
if ( result . length === 1 ) {
return result [ 0 ] . schema ;
} else {
throw undefined ;
}
} ) . then ( toParseSchema ) ;
2016-06-12 16:35:13 -07:00
}
2016-06-11 00:43:02 -07:00
// TODO: remove the mongo format dependency in the return value
2016-06-12 16:35:13 -07:00
createObject ( className , schema , object ) {
2016-08-15 16:48:39 -04:00
debug ( 'createObject' , className , object ) ;
2016-06-06 13:47:11 -07:00
let columnsArray = [ ] ;
2016-12-07 15:17:05 -08:00
const valuesArray = [ ] ;
2016-08-15 16:48:39 -04:00
schema = toPostgresSchema ( schema ) ;
2016-12-07 15:17:05 -08:00
const geoPoints = { } ;
2016-08-20 16:07:48 -04:00
object = handleDotFields ( object ) ;
2016-10-31 21:41:21 +05:30
validateKeys ( object ) ;
2016-06-06 13:47:11 -07:00
Object . keys ( object ) . forEach ( fieldName => {
2017-06-21 09:23:20 -03:00
if ( object [ fieldName ] === null ) {
return ;
}
2016-08-15 16:48:39 -04:00
var authDataMatch = fieldName . match ( /^_auth_data_([a-zA-Z0-9_]+)$/ ) ;
if ( authDataMatch ) {
var provider = authDataMatch [ 1 ] ;
object [ 'authData' ] = object [ 'authData' ] || { } ;
object [ 'authData' ] [ provider ] = object [ fieldName ] ;
delete object [ fieldName ] ;
fieldName = 'authData' ;
}
2016-12-01 10:24:46 -08:00
2016-06-06 13:47:11 -07:00
columnsArray . push ( fieldName ) ;
2016-08-15 16:48:39 -04:00
if ( ! schema . fields [ fieldName ] && className === '_User' ) {
2016-09-02 17:00:47 -07:00
if ( fieldName === '_email_verify_token' ||
fieldName === '_failed_login_count' ||
2016-11-29 22:31:52 +05:30
fieldName === '_perishable_token' ||
fieldName === '_password_history' ) {
2016-08-15 16:48:39 -04:00
valuesArray . push ( object [ fieldName ] ) ;
}
2016-09-02 17:00:47 -07:00
if ( fieldName === '_email_verify_token_expires_at' ) {
2016-08-20 16:07:48 -04:00
if ( object [ fieldName ] ) {
valuesArray . push ( object [ fieldName ] . iso ) ;
} else {
valuesArray . push ( null ) ;
}
2016-08-15 16:48:39 -04:00
}
2016-09-02 17:00:47 -07:00
2017-01-11 12:31:40 -08:00
if ( fieldName === '_account_lockout_expires_at' ||
fieldName === '_perishable_token_expires_at' ||
2016-11-21 21:16:38 +05:30
fieldName === '_password_changed_at' ) {
2016-09-02 17:00:47 -07:00
if ( object [ fieldName ] ) {
valuesArray . push ( object [ fieldName ] . iso ) ;
} else {
valuesArray . push ( null ) ;
}
2016-08-18 18:05:26 -04:00
}
2016-08-15 16:48:39 -04:00
return ;
}
2016-06-10 14:09:48 -07:00
switch ( schema . fields [ fieldName ] . type ) {
2016-11-24 15:47:41 -05:00
case 'Date' :
if ( object [ fieldName ] ) {
valuesArray . push ( object [ fieldName ] . iso ) ;
} else {
valuesArray . push ( null ) ;
}
break ;
case 'Pointer' :
valuesArray . push ( object [ fieldName ] . objectId ) ;
break ;
case 'Array' :
if ( [ '_rperm' , '_wperm' ] . indexOf ( fieldName ) >= 0 ) {
2016-06-10 14:09:48 -07:00
valuesArray . push ( object [ fieldName ] ) ;
2016-11-24 15:47:41 -05:00
} else {
valuesArray . push ( JSON . stringify ( object [ fieldName ] ) ) ;
}
2016-12-01 10:24:46 -08:00
break ;
2016-11-24 15:47:41 -05:00
case 'Object' :
2017-05-31 21:23:49 -05:00
case 'Bytes' :
2016-11-24 15:47:41 -05:00
case 'String' :
case 'Number' :
case 'Boolean' :
valuesArray . push ( object [ fieldName ] ) ;
break ;
case 'File' :
valuesArray . push ( object [ fieldName ] . name ) ;
break ;
2017-07-11 22:33:45 -05:00
case 'Polygon' : {
const value = convertPolygonToSQL ( object [ fieldName ] . coordinates ) ;
valuesArray . push ( value ) ;
break ;
}
2016-11-24 15:47:41 -05:00
case 'GeoPoint' :
2017-06-20 09:15:26 -07:00
// pop the point and process later
2016-11-24 15:47:41 -05:00
geoPoints [ fieldName ] = object [ fieldName ] ;
columnsArray . pop ( ) ;
break ;
default :
throw ` Type ${ schema . fields [ fieldName ] . type } not supported yet ` ;
2016-06-10 14:09:48 -07:00
}
2016-06-06 13:47:11 -07:00
} ) ;
2016-08-15 16:48:39 -04:00
columnsArray = columnsArray . concat ( Object . keys ( geoPoints ) ) ;
2016-12-07 15:17:05 -08:00
const initialValues = valuesArray . map ( ( val , index ) => {
2016-08-18 18:05:26 -04:00
let termination = '' ;
2016-12-07 15:17:05 -08:00
const fieldName = columnsArray [ index ] ;
2016-09-24 13:53:15 -04:00
if ( [ '_rperm' , '_wperm' ] . indexOf ( fieldName ) >= 0 ) {
2016-08-18 18:05:26 -04:00
termination = '::text[]' ;
} else if ( schema . fields [ fieldName ] && schema . fields [ fieldName ] . type === 'Array' ) {
termination = '::jsonb' ;
}
return ` $ ${ index + 2 + columnsArray . length } ${ termination } ` ;
} ) ;
2016-12-07 15:17:05 -08:00
const geoPointsInjects = Object . keys ( geoPoints ) . map ( ( key ) => {
const value = geoPoints [ key ] ;
2016-08-20 16:07:48 -04:00
valuesArray . push ( value . longitude , value . latitude ) ;
2016-12-07 15:17:05 -08:00
const l = valuesArray . length + columnsArray . length ;
2017-01-11 12:31:40 -08:00
return ` POINT( $ ${ l } , $ ${ l + 1 } ) ` ;
2016-08-15 16:48:39 -04:00
} ) ;
2016-12-07 15:17:05 -08:00
const columnsPattern = columnsArray . map ( ( col , index ) => ` $ ${ index + 2 } :name ` ) . join ( ',' ) ;
const valuesPattern = initialValues . concat ( geoPointsInjects ) . join ( ',' )
2016-08-15 16:48:39 -04:00
2016-12-07 15:17:05 -08:00
const qs = ` INSERT INTO $ 1:name ( ${ columnsPattern } ) VALUES ( ${ valuesPattern } ) `
const values = [ className , ... columnsArray , ... valuesArray ]
2016-08-15 16:48:39 -04:00
debug ( qs , values ) ;
2017-05-27 20:51:09 +01:00
return this . _client . none ( qs , values )
2017-06-20 09:15:26 -07:00
. then ( ( ) => ( { ops : [ object ] } ) )
. catch ( error => {
if ( error . code === PostgresUniqueIndexViolationError ) {
2017-09-05 17:51:11 -04:00
const err = new Parse . Error ( Parse . Error . DUPLICATE _VALUE , 'A duplicate value for a field with unique values was provided' ) ;
err . underlyingError = error ;
if ( error . constraint ) {
const matches = error . constraint . match ( /unique_([a-zA-Z]+)/ ) ;
if ( matches && Array . isArray ( matches ) ) {
err . userInfo = { duplicated _field : matches [ 1 ] } ;
}
}
throw err ;
2017-06-20 09:15:26 -07:00
} else {
throw error ;
}
} )
2016-06-12 16:35:13 -07:00
}
// Remove all objects that match the given Parse Query.
// If no objects match, reject with OBJECT_NOT_FOUND. If objects are found and deleted, resolve with undefined.
// If there is some other error, reject with INTERNAL_SERVER_ERROR.
deleteObjectsByQuery ( className , schema , query ) {
2016-08-15 16:48:39 -04:00
debug ( 'deleteObjectsByQuery' , className , query ) ;
2016-12-07 15:17:05 -08:00
const values = [ className ] ;
const index = 2 ;
const where = buildWhereClause ( { schema , index , query } )
2016-08-15 16:48:39 -04:00
values . push ( ... where . values ) ;
2016-09-02 17:00:47 -07:00
if ( Object . keys ( query ) . length === 0 ) {
2016-08-15 16:48:39 -04:00
where . pattern = 'TRUE' ;
}
2016-12-07 15:17:05 -08:00
const qs = ` WITH deleted AS (DELETE FROM $ 1:name WHERE ${ where . pattern } RETURNING *) SELECT count(*) FROM deleted ` ;
2016-08-15 16:48:39 -04:00
debug ( qs , values ) ;
return this . _client . one ( qs , values , a => + a . count )
2017-06-20 09:15:26 -07:00
. then ( count => {
if ( count === 0 ) {
throw new Parse . Error ( Parse . Error . OBJECT _NOT _FOUND , 'Object not found.' ) ;
} else {
return count ;
}
2017-09-18 18:02:56 -05:00
} ) . catch ( ( error ) => {
if ( error . code === PostgresRelationDoesNotExistError ) {
// Don't delete anything if doesn't exist
} else {
throw error ;
}
2017-06-20 09:15:26 -07:00
} ) ;
2016-06-12 16:35:13 -07:00
}
2016-08-20 16:07:48 -04:00
// Return value not currently well specified.
findOneAndUpdate ( className , schema , query , update ) {
debug ( 'findOneAndUpdate' , className , query , update ) ;
2016-11-24 15:47:41 -05:00
return this . updateObjectsByQuery ( className , schema , query , update ) . then ( ( val ) => val [ 0 ] ) ;
2016-08-20 16:07:48 -04:00
}
2016-06-12 16:35:13 -07:00
// Apply the update to all objects that match the given Parse Query.
updateObjectsByQuery ( className , schema , query , update ) {
2016-08-15 16:48:39 -04:00
debug ( 'updateObjectsByQuery' , className , query , update ) ;
2016-12-07 15:17:05 -08:00
const updatePatterns = [ ] ;
const values = [ className ]
2016-06-11 00:43:02 -07:00
let index = 2 ;
2016-08-15 16:48:39 -04:00
schema = toPostgresSchema ( schema ) ;
2016-08-20 16:07:48 -04:00
2016-11-02 06:25:53 +05:30
const originalUpdate = { ... update } ;
2016-08-20 16:07:48 -04:00
update = handleDotFields ( update ) ;
2016-08-18 18:05:26 -04:00
// Resolve authData first,
// So we don't end up with multiple key updates
2016-12-07 15:17:05 -08:00
for ( const fieldName in update ) {
const authDataMatch = fieldName . match ( /^_auth_data_([a-zA-Z0-9_]+)$/ ) ;
2016-08-15 16:48:39 -04:00
if ( authDataMatch ) {
var provider = authDataMatch [ 1 ] ;
2016-12-07 15:17:05 -08:00
const value = update [ fieldName ] ;
2016-08-15 16:48:39 -04:00
delete update [ fieldName ] ;
2016-08-18 18:05:26 -04:00
update [ 'authData' ] = update [ 'authData' ] || { } ;
update [ 'authData' ] [ provider ] = value ;
}
}
2016-12-07 15:17:05 -08:00
for ( const fieldName in update ) {
const fieldValue = update [ fieldName ] ;
2016-08-30 07:19:21 -04:00
if ( fieldValue === null ) {
updatePatterns . push ( ` $ ${ index } :name = NULL ` ) ;
values . push ( fieldName ) ;
index += 1 ;
} else if ( fieldName == 'authData' ) {
2016-08-18 18:05:26 -04:00
// This recursively sets the json_object
// Only 1 level deep
2016-12-07 15:17:05 -08:00
const generate = ( jsonb , key , value ) => {
2016-12-01 10:24:46 -08:00
return ` json_object_set_key(COALESCE( ${ jsonb } , '{}'::jsonb), ${ key } , ${ value } )::jsonb ` ;
2016-08-18 18:05:26 -04:00
}
2016-12-07 15:17:05 -08:00
const lastKey = ` $ ${ index } :name ` ;
const fieldNameIndex = index ;
2017-01-11 12:31:40 -08:00
index += 1 ;
2016-08-18 18:05:26 -04:00
values . push ( fieldName ) ;
2016-12-07 15:17:05 -08:00
const update = Object . keys ( fieldValue ) . reduce ( ( lastKey , key ) => {
2017-01-11 12:31:40 -08:00
const str = generate ( lastKey , ` $ ${ index } ::text ` , ` $ ${ index + 1 } ::jsonb ` )
index += 2 ;
2016-08-20 16:07:48 -04:00
let value = fieldValue [ key ] ;
if ( value ) {
if ( value . _ _op === 'Delete' ) {
value = null ;
} else {
value = JSON . stringify ( value )
}
}
values . push ( key , value ) ;
2016-08-18 18:05:26 -04:00
return str ;
} , lastKey ) ;
updatePatterns . push ( ` $ ${ fieldNameIndex } :name = ${ update } ` ) ;
2016-08-15 16:48:39 -04:00
} else if ( fieldValue . _ _op === 'Increment' ) {
2016-06-11 00:43:02 -07:00
updatePatterns . push ( ` $ ${ index } :name = COALESCE( $ ${ index } :name, 0) + $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue . amount ) ;
index += 2 ;
2016-06-17 09:59:16 -07:00
} else if ( fieldValue . _ _op === 'Add' ) {
2016-08-18 18:05:26 -04:00
updatePatterns . push ( ` $ ${ index } :name = array_add(COALESCE( $ ${ index } :name, '[]'::jsonb), $ ${ index + 1 } ::jsonb) ` ) ;
values . push ( fieldName , JSON . stringify ( fieldValue . objects ) ) ;
2016-06-17 09:59:16 -07:00
index += 2 ;
2016-08-15 16:48:39 -04:00
} else if ( fieldValue . _ _op === 'Delete' ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` )
values . push ( fieldName , null ) ;
index += 2 ;
2016-06-17 09:59:16 -07:00
} else if ( fieldValue . _ _op === 'Remove' ) {
2016-08-18 18:05:26 -04:00
updatePatterns . push ( ` $ ${ index } :name = array_remove(COALESCE( $ ${ index } :name, '[]'::jsonb), $ ${ index + 1 } ::jsonb) ` )
values . push ( fieldName , JSON . stringify ( fieldValue . objects ) ) ;
index += 2 ;
2016-06-17 09:59:16 -07:00
} else if ( fieldValue . _ _op === 'AddUnique' ) {
2016-08-18 18:05:26 -04:00
updatePatterns . push ( ` $ ${ index } :name = array_add_unique(COALESCE( $ ${ index } :name, '[]'::jsonb), $ ${ index + 1 } ::jsonb) ` ) ;
values . push ( fieldName , JSON . stringify ( fieldValue . objects ) ) ;
index += 2 ;
2016-06-11 00:43:02 -07:00
} else if ( fieldName === 'updatedAt' ) { //TODO: stop special casing this. It should check for __type === 'Date' and use .iso
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` )
2016-08-15 16:48:39 -04:00
values . push ( fieldName , fieldValue ) ;
2016-06-11 00:43:02 -07:00
index += 2 ;
2016-06-17 09:59:16 -07:00
} else if ( typeof fieldValue === 'string' ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue ) ;
index += 2 ;
2016-08-15 16:48:39 -04:00
} else if ( typeof fieldValue === 'boolean' ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue ) ;
index += 2 ;
2016-06-17 11:09:42 -07:00
} else if ( fieldValue . _ _type === 'Pointer' ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue . objectId ) ;
index += 2 ;
2016-08-15 16:48:39 -04:00
} else if ( fieldValue . _ _type === 'Date' ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , toPostgresValue ( fieldValue ) ) ;
index += 2 ;
2016-08-30 07:19:21 -04:00
} else if ( fieldValue instanceof Date ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue ) ;
index += 2 ;
2016-08-18 18:05:26 -04:00
} else if ( fieldValue . _ _type === 'File' ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , toPostgresValue ( fieldValue ) ) ;
index += 2 ;
2016-08-15 16:48:39 -04:00
} else if ( fieldValue . _ _type === 'GeoPoint' ) {
updatePatterns . push ( ` $ ${ index } :name = POINT( $ ${ index + 1 } , $ ${ index + 2 } ) ` ) ;
2017-05-28 11:41:09 -05:00
values . push ( fieldName , fieldValue . longitude , fieldValue . latitude ) ;
2016-08-15 16:48:39 -04:00
index += 3 ;
2017-07-11 22:33:45 -05:00
} else if ( fieldValue . _ _type === 'Polygon' ) {
const value = convertPolygonToSQL ( fieldValue . coordinates ) ;
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ::polygon ` ) ;
values . push ( fieldName , value ) ;
index += 2 ;
2016-11-21 09:22:16 -05:00
} else if ( fieldValue . _ _type === 'Relation' ) {
// noop
2016-08-15 16:48:39 -04:00
} else if ( typeof fieldValue === 'number' ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ` ) ;
values . push ( fieldName , fieldValue ) ;
index += 2 ;
} else if ( typeof fieldValue === 'object'
&& schema . fields [ fieldName ]
2016-09-02 17:00:47 -07:00
&& schema . fields [ fieldName ] . type === 'Object' ) {
2017-01-13 19:34:04 -05:00
// Gather keys to increment
const keysToIncrement = Object . keys ( originalUpdate ) . filter ( k => {
// choose top level fields that have a delete operation set
return originalUpdate [ k ] . _ _op === 'Increment' && k . split ( '.' ) . length === 2 && k . split ( "." ) [ 0 ] === fieldName ;
} ) . map ( k => k . split ( '.' ) [ 1 ] ) ;
let incrementPatterns = '' ;
if ( keysToIncrement . length > 0 ) {
incrementPatterns = ' || ' + keysToIncrement . map ( ( c ) => {
const amount = fieldValue [ c ] . amount ;
return ` CONCAT('{" ${ c } ":', COALESCE( $ ${ index } :name->>' ${ c } ','0')::int + ${ amount } , '}')::jsonb ` ;
} ) . join ( ' || ' ) ;
// Strip the keys
keysToIncrement . forEach ( ( key ) => {
delete fieldValue [ key ] ;
} ) ;
}
2016-11-02 06:25:53 +05:30
const keysToDelete = Object . keys ( originalUpdate ) . filter ( k => {
2016-12-01 10:24:46 -08:00
// choose top level fields that have a delete operation set
2017-01-13 19:34:04 -05:00
return originalUpdate [ k ] . _ _op === 'Delete' && k . split ( '.' ) . length === 2 && k . split ( "." ) [ 0 ] === fieldName ;
2016-11-02 06:25:53 +05:30
} ) . map ( k => k . split ( '.' ) [ 1 ] ) ;
const deletePatterns = keysToDelete . reduce ( ( p , c , i ) => {
return p + ` - ' $ ${ index + 1 + i } :value' ` ;
} , '' ) ;
2017-01-13 19:34:04 -05:00
updatePatterns . push ( ` $ ${ index } :name = ( COALESCE( $ ${ index } :name, '{}'::jsonb) ${ deletePatterns } ${ incrementPatterns } || $ ${ index + 1 + keysToDelete . length } ::jsonb ) ` ) ;
2016-11-02 06:25:53 +05:30
2016-11-24 15:47:41 -05:00
values . push ( fieldName , ... keysToDelete , JSON . stringify ( fieldValue ) ) ;
index += 2 + keysToDelete . length ;
2016-08-15 16:48:39 -04:00
} else if ( Array . isArray ( fieldValue )
&& schema . fields [ fieldName ]
2016-09-02 17:00:47 -07:00
&& schema . fields [ fieldName ] . type === 'Array' ) {
2016-12-07 15:17:05 -08:00
const expectedType = parseTypeToPostgresType ( schema . fields [ fieldName ] ) ;
2016-08-15 16:48:39 -04:00
if ( expectedType === 'text[]' ) {
updatePatterns . push ( ` $ ${ index } :name = $ ${ index + 1 } ::text[] ` ) ;
} else {
2016-10-28 16:43:52 -04:00
let type = 'text' ;
2016-12-07 15:17:05 -08:00
for ( const elt of fieldValue ) {
2016-10-28 16:43:52 -04:00
if ( typeof elt == 'object' ) {
type = 'json' ;
break ;
}
}
updatePatterns . push ( ` $ ${ index } :name = array_to_json( $ ${ index + 1 } :: ${ type } [])::jsonb ` ) ;
2016-08-15 16:48:39 -04:00
}
values . push ( fieldName , fieldValue ) ;
index += 2 ;
2016-06-11 00:43:02 -07:00
} else {
2016-08-15 16:48:39 -04:00
debug ( 'Not supported update' , fieldName , fieldValue ) ;
2016-06-17 09:59:16 -07:00
return Promise . reject ( new Parse . Error ( Parse . Error . OPERATION _FORBIDDEN , ` Postgres doesn't support update ${ JSON . stringify ( fieldValue ) } yet ` ) ) ;
2016-06-11 00:43:02 -07:00
}
}
2016-12-07 15:17:05 -08:00
const where = buildWhereClause ( { schema , index , query } )
2016-06-16 15:39:05 -07:00
values . push ( ... where . values ) ;
2017-07-03 10:16:04 -04:00
const whereClause = where . pattern . length > 0 ? ` WHERE ${ where . pattern } ` : '' ;
const qs = ` UPDATE $ 1:name SET ${ updatePatterns . join ( ',' ) } ${ whereClause } RETURNING * ` ;
2016-08-15 16:48:39 -04:00
debug ( 'update: ' , qs , values ) ;
2017-05-27 20:51:09 +01:00
return this . _client . any ( qs , values ) ;
2016-06-12 16:35:13 -07:00
}
2016-06-16 19:34:00 -07:00
// Hopefully, we can get rid of this. It's only used for config and hooks.
2016-06-12 16:35:13 -07:00
upsertOneObject ( className , schema , query , update ) {
2016-08-15 16:48:39 -04:00
debug ( 'upsertOneObject' , { className , query , update } ) ;
2016-12-07 15:17:05 -08:00
const createValue = Object . assign ( { } , query , update ) ;
2016-11-24 15:47:41 -05:00
return this . createObject ( className , schema , createValue ) . catch ( ( err ) => {
2016-08-15 16:48:39 -04:00
// ignore duplicate value errors as it's upsert
2016-09-02 17:00:47 -07:00
if ( err . code === Parse . Error . DUPLICATE _VALUE ) {
2016-08-20 16:07:48 -04:00
return this . findOneAndUpdate ( className , schema , query , update ) ;
2016-08-15 16:48:39 -04:00
}
throw err ;
} ) ;
2016-06-12 16:35:13 -07:00
}
2016-09-24 13:43:49 -04:00
find ( className , schema , query , { skip , limit , sort , keys } ) {
debug ( 'find' , className , query , { skip , limit , sort , keys } ) ;
2016-08-15 16:48:39 -04:00
const hasLimit = limit !== undefined ;
const hasSkip = skip !== undefined ;
2016-06-16 15:39:05 -07:00
let values = [ className ] ;
2016-12-07 15:17:05 -08:00
const where = buildWhereClause ( { schema , query , index : 2 } )
2016-06-16 15:39:05 -07:00
values . push ( ... where . values ) ;
2016-12-01 10:24:46 -08:00
2016-06-17 11:09:42 -07:00
const wherePattern = where . pattern . length > 0 ? ` WHERE ${ where . pattern } ` : '' ;
2016-08-15 16:48:39 -04:00
const limitPattern = hasLimit ? ` LIMIT $ ${ values . length + 1 } ` : '' ;
if ( hasLimit ) {
2016-06-16 15:39:05 -07:00
values . push ( limit ) ;
2016-06-11 00:43:02 -07:00
}
2017-01-11 12:31:40 -08:00
const skipPattern = hasSkip ? ` OFFSET $ ${ values . length + 1 } ` : '' ;
2016-08-15 16:48:39 -04:00
if ( hasSkip ) {
values . push ( skip ) ;
}
let sortPattern = '' ;
if ( sort ) {
2016-12-07 15:17:05 -08:00
const sorting = Object . keys ( sort ) . map ( ( key ) => {
2016-08-15 16:48:39 -04:00
// Using $idx pattern gives: non-integer constant in ORDER BY
if ( sort [ key ] === 1 ) {
return ` " ${ key } " ASC ` ;
}
return ` " ${ key } " DESC ` ;
} ) . join ( ',' ) ;
2016-11-24 15:47:41 -05:00
sortPattern = sort !== undefined && Object . keys ( sort ) . length > 0 ? ` ORDER BY ${ sorting } ` : '' ;
2016-08-15 16:48:39 -04:00
}
2016-11-24 15:47:41 -05:00
if ( where . sorts && Object . keys ( where . sorts ) . length > 0 ) {
2016-08-15 16:48:39 -04:00
sortPattern = ` ORDER BY ${ where . sorts . join ( ',' ) } ` ;
}
2016-09-24 13:43:49 -04:00
let columns = '*' ;
if ( keys ) {
// Exclude empty keys
2016-11-24 15:47:41 -05:00
keys = keys . filter ( ( key ) => {
2016-09-24 13:43:49 -04:00
return key . length > 0 ;
} ) ;
2016-11-24 15:47:41 -05:00
columns = keys . map ( ( key , index ) => {
2017-06-13 20:42:59 -05:00
if ( key === '$score' ) {
return ` ts_rank_cd(to_tsvector( $ ${ 2 } , $ ${ 3 } :name), to_tsquery( $ ${ 4 } , $ ${ 5 } ), 32) as score ` ;
}
2017-01-11 12:31:40 -08:00
return ` $ ${ index + values . length + 1 } :name ` ;
2016-09-24 13:43:49 -04:00
} ) . join ( ',' ) ;
values = values . concat ( keys ) ;
}
const qs = ` SELECT ${ columns } FROM $ 1:name ${ wherePattern } ${ sortPattern } ${ limitPattern } ${ skipPattern } ` ;
2016-08-15 16:48:39 -04:00
debug ( qs , values ) ;
2016-08-06 18:24:42 +01:00
return this . _client . any ( qs , values )
2017-06-20 09:15:26 -07:00
. catch ( ( err ) => {
2016-08-15 16:48:39 -04:00
// Query on non existing table, don't crash
2017-06-20 09:15:26 -07:00
if ( err . code === PostgresRelationDoesNotExistError ) {
return [ ] ;
2016-08-15 16:48:39 -04:00
}
2017-06-20 09:15:26 -07:00
return Promise . reject ( err ) ;
} )
2017-11-22 23:07:45 -08:00
. then ( results => results . map ( object => this . postgresObjectToParseObject ( className , object , schema ) ) ) ;
}
// Converts from a postgres-format object to a REST-format object.
// Does not strip out anything based on a lack of authentication.
postgresObjectToParseObject ( className , object , schema ) {
Object . keys ( schema . fields ) . forEach ( fieldName => {
if ( schema . fields [ fieldName ] . type === 'Pointer' && object [ fieldName ] ) {
object [ fieldName ] = { objectId : object [ fieldName ] , _ _type : 'Pointer' , className : schema . fields [ fieldName ] . targetClass } ;
}
if ( schema . fields [ fieldName ] . type === 'Relation' ) {
object [ fieldName ] = {
_ _type : "Relation" ,
className : schema . fields [ fieldName ] . targetClass
2017-06-20 09:15:26 -07:00
}
2017-11-22 23:07:45 -08:00
}
if ( object [ fieldName ] && schema . fields [ fieldName ] . type === 'GeoPoint' ) {
object [ fieldName ] = {
_ _type : "GeoPoint" ,
latitude : object [ fieldName ] . y ,
longitude : object [ fieldName ] . x
2017-06-20 09:15:26 -07:00
}
2017-11-22 23:07:45 -08:00
}
if ( object [ fieldName ] && schema . fields [ fieldName ] . type === 'Polygon' ) {
let coords = object [ fieldName ] ;
coords = coords . substr ( 2 , coords . length - 4 ) . split ( '),(' ) ;
coords = coords . map ( ( point ) => {
return [
parseFloat ( point . split ( ',' ) [ 1 ] ) ,
parseFloat ( point . split ( ',' ) [ 0 ] )
] ;
} ) ;
object [ fieldName ] = {
_ _type : "Polygon" ,
coordinates : coords
2017-06-20 09:15:26 -07:00
}
2017-11-22 23:07:45 -08:00
}
if ( object [ fieldName ] && schema . fields [ fieldName ] . type === 'File' ) {
object [ fieldName ] = {
_ _type : 'File' ,
name : object [ fieldName ]
2016-06-17 09:59:16 -07:00
}
2017-11-22 23:07:45 -08:00
}
} ) ;
//TODO: remove this reliance on the mongo format. DB adapter shouldn't know there is a difference between created at and any other date field.
if ( object . createdAt ) {
object . createdAt = object . createdAt . toISOString ( ) ;
}
if ( object . updatedAt ) {
object . updatedAt = object . updatedAt . toISOString ( ) ;
}
if ( object . expiresAt ) {
object . expiresAt = { _ _type : 'Date' , iso : object . expiresAt . toISOString ( ) } ;
}
if ( object . _email _verify _token _expires _at ) {
object . _email _verify _token _expires _at = { _ _type : 'Date' , iso : object . _email _verify _token _expires _at . toISOString ( ) } ;
}
if ( object . _account _lockout _expires _at ) {
object . _account _lockout _expires _at = { _ _type : 'Date' , iso : object . _account _lockout _expires _at . toISOString ( ) } ;
}
if ( object . _perishable _token _expires _at ) {
object . _perishable _token _expires _at = { _ _type : 'Date' , iso : object . _perishable _token _expires _at . toISOString ( ) } ;
}
if ( object . _password _changed _at ) {
object . _password _changed _at = { _ _type : 'Date' , iso : object . _password _changed _at . toISOString ( ) } ;
}
2016-06-11 00:43:02 -07:00
2017-11-22 23:07:45 -08:00
for ( const fieldName in object ) {
if ( object [ fieldName ] === null ) {
delete object [ fieldName ] ;
}
if ( object [ fieldName ] instanceof Date ) {
object [ fieldName ] = { _ _type : 'Date' , iso : object [ fieldName ] . toISOString ( ) } ;
}
}
2017-06-20 09:15:26 -07:00
2017-11-22 23:07:45 -08:00
return object ;
2016-06-12 16:35:13 -07:00
}
// Create a unique index. Unique indexes on nullable fields are not allowed. Since we don't
// currently know which fields are nullable and which aren't, we ignore that criteria.
// As such, we shouldn't expose this function to users of parse until we have an out-of-band
// Way of determining if a field is nullable. Undefined doesn't count against uniqueness,
// which is why we use sparse indexes.
ensureUniqueness ( className , schema , fieldNames ) {
2016-06-16 15:39:05 -07:00
// Use the same name for every ensureUniqueness attempt, because postgres
// Will happily create the same index with multiple names.
const constraintName = ` unique_ ${ fieldNames . sort ( ) . join ( '_' ) } ` ;
const constraintPatterns = fieldNames . map ( ( fieldName , index ) => ` $ ${ index + 3 } :name ` ) ;
const qs = ` ALTER TABLE $ 1:name ADD CONSTRAINT $ 2:name UNIQUE ( ${ constraintPatterns . join ( ',' ) } ) ` ;
2017-12-24 16:34:01 +00:00
return this . _client . none ( qs , [ className , constraintName , ... fieldNames ] )
2017-06-20 09:15:26 -07:00
. catch ( error => {
if ( error . code === PostgresDuplicateRelationError && error . message . includes ( constraintName ) ) {
2016-06-17 09:59:16 -07:00
// Index already exists. Ignore error.
2017-06-20 09:15:26 -07:00
} else if ( error . code === PostgresUniqueIndexViolationError && error . message . includes ( constraintName ) ) {
2016-08-15 16:48:39 -04:00
// Cast the error into the proper parse error
2017-06-20 09:15:26 -07:00
throw new Parse . Error ( Parse . Error . DUPLICATE _VALUE , 'A duplicate value for a field with unique values was provided' ) ;
} else {
throw error ;
}
} ) ;
2016-06-12 16:35:13 -07:00
}
2016-06-16 19:34:00 -07:00
// Executes a count.
2016-06-12 16:35:13 -07:00
count ( className , schema , query ) {
2016-08-18 18:05:26 -04:00
debug ( 'count' , className , query ) ;
2016-12-07 15:17:05 -08:00
const values = [ className ] ;
const where = buildWhereClause ( { schema , query , index : 2 } ) ;
2016-06-17 11:09:42 -07:00
values . push ( ... where . values ) ;
const wherePattern = where . pattern . length > 0 ? ` WHERE ${ where . pattern } ` : '' ;
2016-08-06 18:24:42 +01:00
const qs = ` SELECT count(*) FROM $ 1:name ${ wherePattern } ` ;
2016-11-24 15:47:41 -05:00
return this . _client . one ( qs , values , a => + a . count ) . catch ( ( err ) => {
2016-08-18 18:05:26 -04:00
if ( err . code === PostgresRelationDoesNotExistError ) {
return 0 ;
}
throw err ;
} ) ;
2016-06-12 16:35:13 -07:00
}
2016-08-15 16:48:39 -04:00
2017-11-12 13:00:22 -06:00
distinct ( className , schema , query , fieldName ) {
debug ( 'distinct' , className , query ) ;
let field = fieldName ;
let column = fieldName ;
if ( fieldName . indexOf ( '.' ) >= 0 ) {
field = transformDotFieldToComponents ( fieldName ) . join ( '->' ) ;
column = fieldName . split ( '.' ) [ 0 ] ;
}
const isArrayField = schema . fields
&& schema . fields [ fieldName ]
&& schema . fields [ fieldName ] . type === 'Array' ;
const values = [ field , column , className ] ;
const where = buildWhereClause ( { schema , query , index : 4 } ) ;
values . push ( ... where . values ) ;
const wherePattern = where . pattern . length > 0 ? ` WHERE ${ where . pattern } ` : '' ;
let qs = ` SELECT DISTINCT ON ( $ 1:raw) $ 2:raw FROM $ 3:name ${ wherePattern } ` ;
if ( isArrayField ) {
qs = ` SELECT distinct jsonb_array_elements( $ 1:raw) as $ 2:raw FROM $ 3:name ${ wherePattern } ` ;
}
debug ( qs , values ) ;
return this . _client . any ( qs , values )
. catch ( ( ) => [ ] )
. then ( ( results ) => {
if ( fieldName . indexOf ( '.' ) === - 1 ) {
return results . map ( object => object [ field ] ) ;
}
const child = fieldName . split ( '.' ) [ 1 ] ;
return results . map ( object => object [ column ] [ child ] ) ;
2017-11-22 23:07:45 -08:00
} ) . then ( results => results . map ( object => this . postgresObjectToParseObject ( className , object , schema ) ) ) ;
2017-11-12 13:00:22 -06:00
}
2017-11-22 23:07:45 -08:00
aggregate ( className , schema , pipeline ) {
2017-11-12 13:00:22 -06:00
debug ( 'aggregate' , className , pipeline ) ;
const values = [ className ] ;
let columns = [ ] ;
let countField = null ;
let wherePattern = '' ;
let limitPattern = '' ;
let skipPattern = '' ;
let sortPattern = '' ;
let groupPattern = '' ;
for ( let i = 0 ; i < pipeline . length ; i += 1 ) {
const stage = pipeline [ i ] ;
if ( stage . $group ) {
for ( const field in stage . $group ) {
const value = stage . $group [ field ] ;
if ( value === null || value === undefined ) {
continue ;
}
if ( field === '_id' ) {
columns . push ( ` ${ transformAggregateField ( value ) } AS "objectId" ` ) ;
groupPattern = ` GROUP BY ${ transformAggregateField ( value ) } ` ;
continue ;
}
if ( value . $sum ) {
if ( typeof value . $sum === 'string' ) {
columns . push ( ` SUM( ${ transformAggregateField ( value . $sum ) } ) AS " ${ field } " ` ) ;
} else {
countField = field ;
columns . push ( ` COUNT(*) AS " ${ field } " ` ) ;
}
}
if ( value . $max ) {
columns . push ( ` MAX( ${ transformAggregateField ( value . $max ) } ) AS " ${ field } " ` ) ;
}
if ( value . $min ) {
columns . push ( ` MIN( ${ transformAggregateField ( value . $min ) } ) AS " ${ field } " ` ) ;
}
if ( value . $avg ) {
columns . push ( ` AVG( ${ transformAggregateField ( value . $avg ) } ) AS " ${ field } " ` ) ;
}
}
columns . join ( ',' ) ;
} else {
columns . push ( '*' ) ;
}
if ( stage . $project ) {
if ( columns . includes ( '*' ) ) {
columns = [ ] ;
}
for ( const field in stage . $project ) {
const value = stage . $project [ field ] ;
if ( ( value === 1 || value === true ) ) {
columns . push ( field ) ;
}
}
}
if ( stage . $match ) {
const patterns = [ ] ;
for ( const field in stage . $match ) {
const value = stage . $match [ field ] ;
Object . keys ( ParseToPosgresComparator ) . forEach ( cmp => {
if ( value [ cmp ] ) {
const pgComparator = ParseToPosgresComparator [ cmp ] ;
patterns . push ( ` ${ field } ${ pgComparator } ${ value [ cmp ] } ` ) ;
}
} ) ;
}
wherePattern = patterns . length > 0 ? ` WHERE ${ patterns . join ( ' ' ) } ` : '' ;
}
if ( stage . $limit ) {
limitPattern = ` LIMIT ${ stage . $limit } ` ;
}
if ( stage . $skip ) {
skipPattern = ` OFFSET ${ stage . $skip } ` ;
}
if ( stage . $sort ) {
const sort = stage . $sort ;
const sorting = Object . keys ( sort ) . map ( ( key ) => {
if ( sort [ key ] === 1 ) {
return ` " ${ key } " ASC ` ;
}
return ` " ${ key } " DESC ` ;
} ) . join ( ',' ) ;
sortPattern = sort !== undefined && Object . keys ( sort ) . length > 0 ? ` ORDER BY ${ sorting } ` : '' ;
}
}
const qs = ` SELECT ${ columns } FROM $ 1:name ${ wherePattern } ${ sortPattern } ${ limitPattern } ${ skipPattern } ${ groupPattern } ` ;
debug ( qs , values ) ;
2017-12-24 16:34:01 +00:00
return this . _client . map ( qs , values , a => this . postgresObjectToParseObject ( className , a , schema ) )
2017-11-22 23:07:45 -08:00
. then ( results => {
if ( countField ) {
results [ 0 ] [ countField ] = parseInt ( results [ 0 ] [ countField ] , 10 ) ;
2017-11-12 13:00:22 -06:00
}
2017-11-22 23:07:45 -08:00
results . forEach ( result => {
if ( ! result . hasOwnProperty ( 'objectId' ) ) {
result . objectId = null ;
}
} ) ;
return results ;
2017-11-12 13:00:22 -06:00
} ) ;
}
2016-08-15 16:48:39 -04:00
performInitialization ( { VolatileClassesSchemas } ) {
debug ( 'performInitialization' ) ;
2017-03-04 23:56:53 +00:00
const promises = VolatileClassesSchemas . map ( ( schema ) => {
return this . createTable ( schema . className , schema ) . catch ( ( err ) => {
2016-09-02 17:00:47 -07:00
if ( err . code === PostgresDuplicateRelationError || err . code === Parse . Error . INVALID _CLASS _NAME ) {
2016-08-18 18:05:26 -04:00
return Promise . resolve ( ) ;
}
throw err ;
} ) ;
2016-08-15 16:48:39 -04:00
} ) ;
2017-03-04 23:56:53 +00:00
return Promise . all ( promises )
. then ( ( ) => {
2017-12-24 16:34:01 +00:00
return this . _client . tx ( 'perform-initialization' , t => {
2017-03-04 23:56:53 +00:00
return t . batch ( [
t . none ( sql . misc . jsonObjectSetKeys ) ,
t . none ( sql . array . add ) ,
t . none ( sql . array . addUnique ) ,
t . none ( sql . array . remove ) ,
t . none ( sql . array . containsAll ) ,
t . none ( sql . array . contains )
] ) ;
} ) ;
} )
. then ( data => {
debug ( ` initializationDone in ${ data . duration } ` ) ;
2016-11-24 15:47:41 -05:00
} )
2017-03-04 23:56:53 +00:00
. catch ( error => {
/* eslint-disable no-console */
console . error ( error ) ;
} ) ;
2016-08-15 16:48:39 -04:00
}
2017-11-25 13:55:34 -06:00
createIndexes ( className , indexes , conn ) {
return ( conn || this . _client ) . tx ( t => t . batch ( indexes . map ( i => {
return t . none ( 'CREATE INDEX $1:name ON $2:name ($3:name)' , [ i . name , className , i . key ] ) ;
} ) ) ) ;
}
dropIndexes ( className , indexes , conn ) {
2017-12-25 21:08:04 +00:00
const queries = indexes . map ( i => ( { query : 'DROP INDEX $1:name' , values : i } ) ) ;
return ( conn || this . _client ) . tx ( t => t . none ( this . _pgp . helpers . concat ( queries ) ) ) ;
2017-11-25 13:55:34 -06:00
}
getIndexes ( className ) {
const qs = 'SELECT * FROM pg_indexes WHERE tablename = ${className}' ;
return this . _client . any ( qs , { className } ) ;
}
updateSchemaWithIndexes ( ) {
return Promise . resolve ( ) ;
}
2016-06-12 16:35:13 -07:00
}
2017-07-11 22:33:45 -05:00
function convertPolygonToSQL ( polygon ) {
if ( polygon . length < 3 ) {
throw new Parse . Error (
Parse . Error . INVALID _JSON ,
` Polygon must have at least 3 values `
) ;
}
if ( polygon [ 0 ] [ 0 ] !== polygon [ polygon . length - 1 ] [ 0 ] ||
polygon [ 0 ] [ 1 ] !== polygon [ polygon . length - 1 ] [ 1 ] ) {
polygon . push ( polygon [ 0 ] ) ;
}
const unique = polygon . filter ( ( item , index , ar ) => {
let foundIndex = - 1 ;
for ( let i = 0 ; i < ar . length ; i += 1 ) {
const pt = ar [ i ] ;
if ( pt [ 0 ] === item [ 0 ] &&
pt [ 1 ] === item [ 1 ] ) {
foundIndex = i ;
break ;
}
}
return foundIndex === index ;
} ) ;
if ( unique . length < 3 ) {
throw new Parse . Error (
Parse . Error . INTERNAL _SERVER _ERROR ,
'GeoJSON: Loop must have at least 3 different vertices'
) ;
}
const points = polygon . map ( ( point ) => {
Parse . GeoPoint . _validate ( parseFloat ( point [ 1 ] ) , parseFloat ( point [ 0 ] ) ) ;
return ` ( ${ point [ 1 ] } , ${ point [ 0 ] } ) ` ;
} ) . join ( ', ' ) ;
return ` ( ${ points } ) ` ;
}
2016-10-31 21:40:53 +05:30
function removeWhiteSpace ( regex ) {
if ( ! regex . endsWith ( '\n' ) ) {
regex += '\n' ;
}
// remove non escaped comments
return regex . replace ( /([^\\])#.*\n/gmi , '$1' )
// remove lines starting with a comment
. replace ( /^#.*\n/gmi , '' )
// remove non escaped whitespace
. replace ( /([^\\])\s+/gmi , '$1' )
// remove whitespace at the beginning of a line
. replace ( /^\s+/ , '' )
. trim ( ) ;
}
function processRegexPattern ( s ) {
if ( s && s . startsWith ( '^' ) ) {
// regex for startsWith
return '^' + literalizeRegexPart ( s . slice ( 1 ) ) ;
} else if ( s && s . endsWith ( '$' ) ) {
// regex for endsWith
return literalizeRegexPart ( s . slice ( 0 , s . length - 1 ) ) + '$' ;
}
// regex for contains
return literalizeRegexPart ( s ) ;
}
function createLiteralRegex ( remaining ) {
return remaining . split ( '' ) . map ( c => {
if ( c . match ( /[0-9a-zA-Z]/ ) !== null ) {
// don't escape alphanumeric characters
return c ;
}
// escape everything else (single quotes with single quotes, everything else with a backslash)
return c === ` ' ` ? ` '' ` : ` \\ ${ c } ` ;
} ) . join ( '' ) ;
}
function literalizeRegexPart ( s ) {
const matcher1 = /\\Q((?!\\E).*)\\E$/
const result1 = s . match ( matcher1 ) ;
if ( result1 && result1 . length > 1 && result1 . index > - 1 ) {
// process regex that has a beginning and an end specified for the literal text
const prefix = s . substr ( 0 , result1 . index ) ;
const remaining = result1 [ 1 ] ;
return literalizeRegexPart ( prefix ) + createLiteralRegex ( remaining ) ;
}
// process regex that has a beginning specified for the literal text
const matcher2 = /\\Q((?!\\E).*)$/
const result2 = s . match ( matcher2 ) ;
if ( result2 && result2 . length > 1 && result2 . index > - 1 ) {
const prefix = s . substr ( 0 , result2 . index ) ;
const remaining = result2 [ 1 ] ;
return literalizeRegexPart ( prefix ) + createLiteralRegex ( remaining ) ;
}
// remove all instances of \Q and \E from the remaining text & escape single quotes
return (
s . replace ( /([^\\])(\\E)/ , '$1' )
2017-08-10 16:31:32 -04:00
. replace ( /([^\\])(\\Q)/ , '$1' )
. replace ( /^\\E/ , '' )
. replace ( /^\\Q/ , '' )
. replace ( /([^'])'/ , ` $ 1'' ` )
. replace ( /^'([^'])/ , ` '' $ 1 ` )
2016-10-31 21:40:53 +05:30
) ;
}
2016-06-12 16:35:13 -07:00
export default PostgresStorageAdapter ;
module . exports = PostgresStorageAdapter ; // Required for tests