diff --git a/.flowconfig b/.flowconfig index c13f93b6a4..e36f404424 100644 --- a/.flowconfig +++ b/.flowconfig @@ -7,3 +7,4 @@ [libs] [options] +suppress_comment= \\(.\\|\n\\)*\\@flow-disable-next diff --git a/package.json b/package.json index 67f89795b0..6fe18777bc 100644 --- a/package.json +++ b/package.json @@ -56,6 +56,7 @@ "deep-diff": "0.3.8", "eslint": "^4.9.0", "eslint-plugin-flowtype": "^2.39.1", + "flow-bin": "^0.59.0", "gaze": "1.1.2", "jasmine": "2.8.0", "jasmine-spec-reporter": "^4.1.0", @@ -66,7 +67,7 @@ }, "scripts": { "dev": "npm run build && node bin/dev", - "lint": "eslint --cache ./", + "lint": "flow && eslint --cache ./", "build": "babel src/ -d lib/ --copy-files", "pretest": "npm run lint", "test": "cross-env MONGODB_VERSION=${MONGODB_VERSION:=3.2.6} MONGODB_STORAGE_ENGINE=mmapv1 TESTING=1 jasmine", diff --git a/spec/MongoStorageAdapter.spec.js b/spec/MongoStorageAdapter.spec.js index e6e65e7b40..19e8ce313a 100644 --- a/spec/MongoStorageAdapter.spec.js +++ b/spec/MongoStorageAdapter.spec.js @@ -1,7 +1,7 @@ 'use strict'; -const MongoStorageAdapter = require('../src/Adapters/Storage/Mongo/MongoStorageAdapter'); -const MongoClient = require('mongodb').MongoClient; +import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter'; +const { MongoClient } = require('mongodb'); const databaseURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase'; // These tests are specific to the mongo storage adapter + mongo storage format diff --git a/spec/ParsePolygon.spec.js b/spec/ParsePolygon.spec.js index f4ca455ae7..ca38ad44a6 100644 --- a/spec/ParsePolygon.spec.js +++ b/spec/ParsePolygon.spec.js @@ -1,5 +1,5 @@ const TestObject = Parse.Object.extend('TestObject'); -const MongoStorageAdapter = require('../src/Adapters/Storage/Mongo/MongoStorageAdapter'); +import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter'; const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase'; const rp = require('request-promise'); const defaultHeaders = { diff --git a/spec/ParseQuery.FullTextSearch.spec.js b/spec/ParseQuery.FullTextSearch.spec.js index 2563781d6e..9116410742 100644 --- a/spec/ParseQuery.FullTextSearch.spec.js +++ b/spec/ParseQuery.FullTextSearch.spec.js @@ -1,8 +1,8 @@ 'use strict'; -const MongoStorageAdapter = require('../src/Adapters/Storage/Mongo/MongoStorageAdapter'); +import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter'; const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase'; -const PostgresStorageAdapter = require('../src/Adapters/Storage/Postgres/PostgresStorageAdapter'); +import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter'; const postgresURI = 'postgres://localhost:5432/parse_server_postgres_adapter_test_database'; const Parse = require('parse/node'); const rp = require('request-promise'); diff --git a/spec/ParseServer.spec.js b/spec/ParseServer.spec.js index 841950d076..c5f945a238 100644 --- a/spec/ParseServer.spec.js +++ b/spec/ParseServer.spec.js @@ -1,7 +1,8 @@ 'use strict'; /* Tests for ParseServer.js */ const express = require('express'); - +import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter'; +import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter'; import ParseServer from '../src/ParseServer'; describe('Server Url Checks', () => { @@ -35,8 +36,6 @@ describe('Server Url Checks', () => { }); it('handleShutdown, close connection', (done) => { - var MongoStorageAdapter = require('../src/Adapters/Storage/Mongo/MongoStorageAdapter'); - const PostgresStorageAdapter = require('../src/Adapters/Storage/Postgres/PostgresStorageAdapter'); const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase'; const postgresURI = 'postgres://localhost:5432/parse_server_postgres_adapter_test_database'; let databaseAdapter; diff --git a/spec/PostgresInitOptions.spec.js b/spec/PostgresInitOptions.spec.js index 7feb30c970..f191adb48b 100644 --- a/spec/PostgresInitOptions.spec.js +++ b/spec/PostgresInitOptions.spec.js @@ -1,5 +1,5 @@ const Parse = require('parse/node').Parse; -const PostgresStorageAdapter = require('../src/Adapters/Storage/Postgres/PostgresStorageAdapter'); +import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter'; const postgresURI = 'postgres://localhost:5432/parse_server_postgres_adapter_test_database'; const ParseServer = require("../src/index"); const express = require('express'); diff --git a/spec/PostgresStorageAdapter.spec.js b/spec/PostgresStorageAdapter.spec.js index 18e7c83ad7..f4571208f8 100644 --- a/spec/PostgresStorageAdapter.spec.js +++ b/spec/PostgresStorageAdapter.spec.js @@ -1,4 +1,4 @@ -const PostgresStorageAdapter = require('../src/Adapters/Storage/Postgres/PostgresStorageAdapter'); +import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter'; const databaseURI = 'postgres://localhost:5432/parse_server_postgres_adapter_test_database'; describe_only_db('postgres')('PostgresStorageAdapter', () => { diff --git a/spec/helper.js b/spec/helper.js index 95d52aa732..965d71651e 100644 --- a/spec/helper.js +++ b/spec/helper.js @@ -27,10 +27,10 @@ var cache = require('../src/cache').default; var ParseServer = require('../src/index').ParseServer; var path = require('path'); var TestUtils = require('../src/TestUtils'); -var MongoStorageAdapter = require('../src/Adapters/Storage/Mongo/MongoStorageAdapter'); const GridStoreAdapter = require('../src/Adapters/Files/GridStoreAdapter').GridStoreAdapter; const FSAdapter = require('@parse/fs-files-adapter'); -const PostgresStorageAdapter = require('../src/Adapters/Storage/Postgres/PostgresStorageAdapter'); +import PostgresStorageAdapter from '../src/Adapters/Storage/Postgres/PostgresStorageAdapter'; +import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter'; const RedisCacheAdapter = require('../src/Adapters/Cache/RedisCacheAdapter').default; const mongoURI = 'mongodb://localhost:27017/parseServerMongoAdapterTestDatabase'; diff --git a/spec/index.spec.js b/spec/index.spec.js index b1f5b5f432..ed9e9a9120 100644 --- a/spec/index.spec.js +++ b/spec/index.spec.js @@ -6,7 +6,7 @@ var ParseServer = require("../src/index"); var Config = require('../src/Config'); var express = require('express'); -const MongoStorageAdapter = require('../src/Adapters/Storage/Mongo/MongoStorageAdapter'); +import MongoStorageAdapter from '../src/Adapters/Storage/Mongo/MongoStorageAdapter'; describe('server', () => { it('requires a master key and app id', done => { diff --git a/src/Adapters/Files/GridStoreAdapter.js b/src/Adapters/Files/GridStoreAdapter.js index 744dff4b2d..2a33b1af08 100644 --- a/src/Adapters/Files/GridStoreAdapter.js +++ b/src/Adapters/Files/GridStoreAdapter.js @@ -6,6 +6,7 @@ @flow weak */ +// @flow-disable-next import { MongoClient, GridStore, Db} from 'mongodb'; import { FilesAdapter } from './FilesAdapter'; import defaults from '../../defaults'; diff --git a/src/Adapters/Storage/Mongo/MongoSchemaCollection.js b/src/Adapters/Storage/Mongo/MongoSchemaCollection.js index 7de184250d..db52fd4479 100644 --- a/src/Adapters/Storage/Mongo/MongoSchemaCollection.js +++ b/src/Adapters/Storage/Mongo/MongoSchemaCollection.js @@ -137,6 +137,18 @@ class MongoSchemaCollection { return this._collection._mongoCollection.findAndRemove(_mongoSchemaQueryFromNameQuery(name), []); } + insertSchema(schema: any) { + return this._collection.insertOne(schema) + .then(result => mongoSchemaToParseSchema(result.ops[0])) + .catch(error => { + if (error.code === 11000) { //Mongo's duplicate key error + throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'Class already exists.'); + } else { + throw error; + } + }) + } + updateSchema(name: string, update) { return this._collection.updateOne(_mongoSchemaQueryFromNameQuery(name), update); } diff --git a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js index 05725a7db9..43a994daa9 100644 --- a/src/Adapters/Storage/Mongo/MongoStorageAdapter.js +++ b/src/Adapters/Storage/Mongo/MongoStorageAdapter.js @@ -1,5 +1,11 @@ +// @flow import MongoCollection from './MongoCollection'; import MongoSchemaCollection from './MongoSchemaCollection'; +import { StorageAdapter } from '../StorageAdapter'; +import type { SchemaType, + QueryType, + StorageClass, + QueryOptions } from '../StorageAdapter'; import { parse as parseUrl, format as formatUrl, @@ -12,10 +18,13 @@ import { transformUpdate, transformPointerString, } from './MongoTransform'; +// @flow-disable-next import Parse from 'parse/node'; +// @flow-disable-next import _ from 'lodash'; import defaults from '../../../defaults'; +// @flow-disable-next const mongodb = require('mongodb'); const MongoClient = mongodb.MongoClient; const ReadPreference = mongodb.ReadPreference; @@ -59,7 +68,8 @@ const mongoSchemaFromFieldsAndClassNameAndCLP = (fields, className, classLevelPe _id: className, objectId: 'string', updatedAt: 'string', - createdAt: 'string' + createdAt: 'string', + _metadata: undefined, }; for (const fieldName in fields) { @@ -80,24 +90,31 @@ const mongoSchemaFromFieldsAndClassNameAndCLP = (fields, className, classLevelPe mongoObject._metadata.indexes = indexes; } + if (!mongoObject._metadata) { // cleanup the unused _metadata + delete mongoObject._metadata; + } + return mongoObject; } -export class MongoStorageAdapter { +export class MongoStorageAdapter implements StorageAdapter { // Private _uri: string; _collectionPrefix: string; _mongoOptions: Object; // Public - connectionPromise; - database; - canSortOnJoinTables; + connectionPromise: Promise; + database: any; + client: MongoClient; + _maxTimeMS: ?number; + canSortOnJoinTables: boolean; + constructor({ uri = defaults.DefaultMongoURI, collectionPrefix = '', mongoOptions = {}, - }) { + }: any) { this._uri = uri; this._collectionPrefix = collectionPrefix; this._mongoOptions = mongoOptions; @@ -156,13 +173,13 @@ export class MongoStorageAdapter { .then(rawCollection => new MongoCollection(rawCollection)); } - _schemaCollection() { + _schemaCollection(): Promise { return this.connect() .then(() => this._adaptiveCollection(MongoSchemaCollectionName)) .then(collection => new MongoSchemaCollection(collection)); } - classExists(name) { + classExists(name: string) { return this.connect().then(() => { return this.database.listCollections({ name: this._collectionPrefix + name }).toArray(); }).then(collections => { @@ -170,14 +187,14 @@ export class MongoStorageAdapter { }); } - setClassLevelPermissions(className, CLPs) { + setClassLevelPermissions(className: string, CLPs: any): Promise { return this._schemaCollection() .then(schemaCollection => schemaCollection.updateSchema(className, { $set: { '_metadata.class_permissions': CLPs } })); } - setIndexesWithSchemaFormat(className, submittedIndexes, existingIndexes = {}, fields) { + setIndexesWithSchemaFormat(className: string, submittedIndexes: any, existingIndexes: any = {}, fields: any): Promise { if (submittedIndexes === undefined) { return Promise.resolve(); } @@ -223,7 +240,7 @@ export class MongoStorageAdapter { })); } - setIndexesFromMongo(className) { + setIndexesFromMongo(className: string) { return this.getIndexes(className).then((indexes) => { indexes = indexes.reduce((obj, index) => { if (index.key._fts) { @@ -246,24 +263,16 @@ export class MongoStorageAdapter { }); } - createClass(className, schema) { + createClass(className: string, schema: SchemaType): Promise { schema = convertParseSchemaToMongoSchema(schema); const mongoObject = mongoSchemaFromFieldsAndClassNameAndCLP(schema.fields, className, schema.classLevelPermissions, schema.indexes); mongoObject._id = className; return this.setIndexesWithSchemaFormat(className, schema.indexes, {}, schema.fields) .then(() => this._schemaCollection()) - .then(schemaCollection => schemaCollection._collection.insertOne(mongoObject)) - .then(result => MongoSchemaCollection._TESTmongoSchemaToParseSchema(result.ops[0])) - .catch(error => { - if (error.code === 11000) { //Mongo's duplicate key error - throw new Parse.Error(Parse.Error.DUPLICATE_VALUE, 'Class already exists.'); - } else { - throw error; - } - }) + .then(schemaCollection => schemaCollection.insertSchema(mongoObject)); } - addFieldIfNotExists(className, fieldName, type) { + addFieldIfNotExists(className: string, fieldName: string, type: any): Promise { return this._schemaCollection() .then(schemaCollection => schemaCollection.addFieldIfNotExists(className, fieldName, type)) .then(() => this.createIndexesIfNeeded(className, fieldName, type)); @@ -271,7 +280,7 @@ export class MongoStorageAdapter { // Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.) // and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible. - deleteClass(className) { + deleteClass(className: string) { return this._adaptiveCollection(className) .then(collection => collection.drop()) .catch(error => { @@ -312,7 +321,7 @@ export class MongoStorageAdapter { // may do so. // Returns a Promise. - deleteFields(className, schema, fieldNames) { + deleteFields(className: string, schema: SchemaType, fieldNames: string[]) { const mongoFormatNames = fieldNames.map(fieldName => { if (schema.fields[fieldName].type === 'Pointer') { return `_p_${fieldName}` @@ -339,14 +348,14 @@ export class MongoStorageAdapter { // Return a promise for all schemas known to this adapter, in Parse format. In case the // schemas cannot be retrieved, returns a promise that rejects. Requirements for the // rejection reason are TBD. - getAllClasses() { + getAllClasses(): Promise { return this._schemaCollection().then(schemasCollection => schemasCollection._fetchAllSchemasFrom_SCHEMA()); } // Return a promise for the schema with the given name, in Parse format. If // this adapter doesn't know about the schema, return a promise that rejects with // undefined as the reason. - getClass(className) { + getClass(className: string): Promise { return this._schemaCollection() .then(schemasCollection => schemasCollection._fetchOneSchemaFrom_SCHEMA(className)) } @@ -354,7 +363,7 @@ export class MongoStorageAdapter { // TODO: As yet not particularly well specified. Creates an object. Maybe shouldn't even need the schema, // and should infer from the type. Or maybe does need the schema for validations. Or maybe needs // the schema only for the legacy mongo format. We'll figure that out later. - createObject(className, schema, object) { + createObject(className: string, schema: SchemaType, object: any) { schema = convertParseSchemaToMongoSchema(schema); const mongoObject = parseObjectToMongoObjectForCreate(className, object, schema); return this._adaptiveCollection(className) @@ -378,7 +387,7 @@ export class MongoStorageAdapter { // Remove all objects that match the given Parse Query. // If no objects match, reject with OBJECT_NOT_FOUND. If objects are found and deleted, resolve with undefined. // If there is some other error, reject with INTERNAL_SERVER_ERROR. - deleteObjectsByQuery(className, schema, query) { + deleteObjectsByQuery(className: string, schema: SchemaType, query: QueryType) { schema = convertParseSchemaToMongoSchema(schema); return this._adaptiveCollection(className) .then(collection => { @@ -396,7 +405,7 @@ export class MongoStorageAdapter { } // Apply the update to all objects that match the given Parse Query. - updateObjectsByQuery(className, schema, query, update) { + updateObjectsByQuery(className: string, schema: SchemaType, query: QueryType, update: any) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); @@ -406,7 +415,7 @@ export class MongoStorageAdapter { // Atomically finds and updates an object based on query. // Return value not currently well specified. - findOneAndUpdate(className, schema, query, update) { + findOneAndUpdate(className: string, schema: SchemaType, query: QueryType, update: any) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); @@ -416,7 +425,7 @@ export class MongoStorageAdapter { } // Hopefully we can get rid of this. It's only used for config and hooks. - upsertOneObject(className, schema, query, update) { + upsertOneObject(className: string, schema: SchemaType, query: QueryType, update: any) { schema = convertParseSchemaToMongoSchema(schema); const mongoUpdate = transformUpdate(className, update, schema); const mongoWhere = transformWhere(className, query, schema); @@ -425,7 +434,7 @@ export class MongoStorageAdapter { } // Executes a find. Accepts: className, query in Parse format, and { skip, limit, sort }. - find(className, schema, query, { skip, limit, sort, keys, readPreference }) { + find(className: string, schema: SchemaType, query: QueryType, { skip, limit, sort, keys, readPreference }: QueryOptions): Promise { schema = convertParseSchemaToMongoSchema(schema); const mongoWhere = transformWhere(className, query, schema); const mongoSort = _.mapKeys(sort, (value, fieldName) => transformKey(className, fieldName, schema)); @@ -453,7 +462,7 @@ export class MongoStorageAdapter { // As such, we shouldn't expose this function to users of parse until we have an out-of-band // Way of determining if a field is nullable. Undefined doesn't count against uniqueness, // which is why we use sparse indexes. - ensureUniqueness(className, schema, fieldNames) { + ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) { schema = convertParseSchemaToMongoSchema(schema); const indexCreationRequest = {}; const mongoFieldNames = fieldNames.map(fieldName => transformKey(className, fieldName, schema)); @@ -471,14 +480,14 @@ export class MongoStorageAdapter { } // Used in tests - _rawFind(className, query) { + _rawFind(className: string, query: QueryType) { return this._adaptiveCollection(className).then(collection => collection.find(query, { maxTimeMS: this._maxTimeMS, })); } // Executes a count. - count(className, schema, query, readPreference) { + count(className: string, schema: SchemaType, query: QueryType, readPreference: ?string) { schema = convertParseSchemaToMongoSchema(schema); readPreference = this._parseReadPreference(readPreference); return this._adaptiveCollection(className) @@ -488,7 +497,7 @@ export class MongoStorageAdapter { })); } - distinct(className, schema, query, fieldName) { + distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string) { schema = convertParseSchemaToMongoSchema(schema); const isPointerField = schema.fields[fieldName] && schema.fields[fieldName].type === 'Pointer'; if (isPointerField) { @@ -505,7 +514,7 @@ export class MongoStorageAdapter { })); } - aggregate(className, schema, pipeline, readPreference) { + aggregate(className: string, schema: any, pipeline: any, readPreference: ?string) { readPreference = this._parseReadPreference(readPreference); return this._adaptiveCollection(className) .then(collection => collection.aggregate(pipeline, { readPreference, maxTimeMS: this._maxTimeMS })) @@ -521,7 +530,7 @@ export class MongoStorageAdapter { .then(objects => objects.map(object => mongoObjectToParseObject(className, object, schema))); } - _parseReadPreference(readPreference) { + _parseReadPreference(readPreference: ?string): ?string { switch (readPreference) { case 'PRIMARY': readPreference = ReadPreference.PRIMARY; @@ -548,21 +557,21 @@ export class MongoStorageAdapter { return readPreference; } - performInitialization() { + performInitialization(): Promise { return Promise.resolve(); } - createIndex(className, index) { + createIndex(className: string, index: any) { return this._adaptiveCollection(className) .then(collection => collection._mongoCollection.createIndex(index)); } - createIndexes(className, indexes) { + createIndexes(className: string, indexes: any) { return this._adaptiveCollection(className) .then(collection => collection._mongoCollection.createIndexes(indexes)); } - createIndexesIfNeeded(className, fieldName, type) { + createIndexesIfNeeded(className: string, fieldName: string, type: any) { if (type && type.type === 'Polygon') { const index = { [fieldName]: '2dsphere' @@ -572,7 +581,7 @@ export class MongoStorageAdapter { return Promise.resolve(); } - createTextIndexesIfNeeded(className, query, schema) { + createTextIndexesIfNeeded(className: string, query: QueryType, schema: any): Promise { for(const fieldName in query) { if (!query[fieldName] || !query[fieldName].$text) { continue; @@ -599,22 +608,22 @@ export class MongoStorageAdapter { return Promise.resolve(); } - getIndexes(className) { + getIndexes(className: string) { return this._adaptiveCollection(className) .then(collection => collection._mongoCollection.indexes()); } - dropIndex(className, index) { + dropIndex(className: string, index: any) { return this._adaptiveCollection(className) .then(collection => collection._mongoCollection.dropIndex(index)); } - dropAllIndexes(className) { + dropAllIndexes(className: string) { return this._adaptiveCollection(className) .then(collection => collection._mongoCollection.dropIndexes()); } - updateSchemaWithIndexes() { + updateSchemaWithIndexes(): Promise { return this.getAllClasses() .then((classes) => { const promises = classes.map((schema) => { @@ -626,4 +635,3 @@ export class MongoStorageAdapter { } export default MongoStorageAdapter; -module.exports = MongoStorageAdapter; // Required for tests diff --git a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js index 8d830c6bd4..2e06307d34 100644 --- a/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js +++ b/src/Adapters/Storage/Postgres/PostgresStorageAdapter.js @@ -1,5 +1,8 @@ +// @flow import { createClient } from './PostgresClient'; +// @flow-disable-next import Parse from 'parse/node'; +// @flow-disable-next import _ from 'lodash'; import sql from './sql'; @@ -12,13 +15,17 @@ const PostgresUniqueIndexViolationError = '23505'; const PostgresTransactionAbortedError = '25P02'; const logger = require('../../../logger'); -const debug = function(){ - let args = [...arguments]; +const debug = function(...args: any) { args = ['PG: ' + arguments[0]].concat(args.slice(1, args.length)); const log = logger.getLogger(); log.debug.apply(log, args); } +import { StorageAdapter } from '../StorageAdapter'; +import type { SchemaType, + QueryType, + QueryOptions } from '../StorageAdapter'; + const parseTypeToPostgresType = type => { switch (type.type) { case 'String': return 'text'; @@ -563,17 +570,17 @@ const buildWhereClause = ({ schema, query, index }) => { return { pattern: patterns.join(' AND '), values, sorts }; } -export class PostgresStorageAdapter { +export class PostgresStorageAdapter implements StorageAdapter { // Private _collectionPrefix: string; - _client; - _pgp; + _client: any; + _pgp: any; constructor({ uri, collectionPrefix = '', databaseOptions - }) { + }: any) { this._collectionPrefix = collectionPrefix; const { client, pgp } = createClient(uri, databaseOptions); this._client = client; @@ -587,7 +594,7 @@ export class PostgresStorageAdapter { this._client.$pool.end(); } - _ensureSchemaCollectionExists(conn) { + _ensureSchemaCollectionExists(conn: any) { conn = conn || this._client; return conn.none('CREATE TABLE IF NOT EXISTS "_SCHEMA" ( "className" varChar(120), "schema" jsonb, "isParseClass" bool, PRIMARY KEY ("className") )') .catch(error => { @@ -601,11 +608,11 @@ export class PostgresStorageAdapter { }); } - classExists(name) { + classExists(name: string) { return this._client.one('SELECT EXISTS (SELECT 1 FROM information_schema.tables WHERE table_name = $1)', [name], a => a.exists); } - setClassLevelPermissions(className, CLPs) { + setClassLevelPermissions(className: string, CLPs: any) { const self = this; return this._client.task('set-class-level-permissions', function * (t) { yield self._ensureSchemaCollectionExists(t); @@ -614,7 +621,7 @@ export class PostgresStorageAdapter { }); } - setIndexesWithSchemaFormat(className, submittedIndexes, existingIndexes = {}, fields, conn) { + setIndexesWithSchemaFormat(className: string, submittedIndexes: any, existingIndexes: any = {}, fields: any, conn: ?any): Promise { conn = conn || this._client; const self = this; if (submittedIndexes === undefined) { @@ -661,7 +668,7 @@ export class PostgresStorageAdapter { }); } - createClass(className, schema, conn) { + createClass(className: string, schema: SchemaType, conn: ?any) { conn = conn || this._client; return conn.tx('create-class', t => { const q1 = this.createTable(className, schema, t); @@ -684,7 +691,7 @@ export class PostgresStorageAdapter { } // Just create a table, do not insert in schema - createTable(className, schema, conn) { + createTable(className: string, schema: SchemaType, conn: any) { conn = conn || this._client; const self = this; debug('createTable', className, schema); @@ -743,7 +750,7 @@ export class PostgresStorageAdapter { }); } - addFieldIfNotExists(className, fieldName, type) { + addFieldIfNotExists(className: string, fieldName: string, type: any) { // TODO: Must be revised for invalid logic... debug('addFieldIfNotExists', {className, fieldName, type}); const self = this; @@ -781,7 +788,7 @@ export class PostgresStorageAdapter { // Drops a collection. Resolves with true if it was a Parse Schema (eg. _User, Custom, etc.) // and resolves with false if it wasn't (eg. a join table). Rejects if deletion was impossible. - deleteClass(className) { + deleteClass(className: string) { const operations = [ {query: `DROP TABLE IF EXISTS $1:name`, values: [className]}, {query: `DELETE FROM "_SCHEMA" WHERE "className" = $1`, values: [className]} @@ -829,7 +836,7 @@ export class PostgresStorageAdapter { // may do so. // Returns a Promise. - deleteFields(className, schema, fieldNames) { + deleteFields(className: string, schema: SchemaType, fieldNames: string[]): Promise { debug('deleteFields', className, fieldNames); fieldNames = fieldNames.reduce((list, fieldName) => { const field = schema.fields[fieldName] @@ -867,7 +874,7 @@ export class PostgresStorageAdapter { // Return a promise for the schema with the given name, in Parse format. If // this adapter doesn't know about the schema, return a promise that rejects with // undefined as the reason. - getClass(className) { + getClass(className: string) { debug('getClass', className); return this._client.any('SELECT * FROM "_SCHEMA" WHERE "className"=$', { className }) .then(result => { @@ -880,7 +887,7 @@ export class PostgresStorageAdapter { } // TODO: remove the mongo format dependency in the return value - createObject(className, schema, object) { + createObject(className: string, schema: SchemaType, object: any) { debug('createObject', className, object); let columnsArray = []; const valuesArray = []; @@ -1021,7 +1028,7 @@ export class PostgresStorageAdapter { // Remove all objects that match the given Parse Query. // If no objects match, reject with OBJECT_NOT_FOUND. If objects are found and deleted, resolve with undefined. // If there is some other error, reject with INTERNAL_SERVER_ERROR. - deleteObjectsByQuery(className, schema, query) { + deleteObjectsByQuery(className: string, schema: SchemaType, query: QueryType) { debug('deleteObjectsByQuery', className, query); const values = [className]; const index = 2; @@ -1048,13 +1055,13 @@ export class PostgresStorageAdapter { }); } // Return value not currently well specified. - findOneAndUpdate(className, schema, query, update) { + findOneAndUpdate(className: string, schema: SchemaType, query: QueryType, update: any): Promise { debug('findOneAndUpdate', className, query, update); return this.updateObjectsByQuery(className, schema, query, update).then((val) => val[0]); } // Apply the update to all objects that match the given Parse Query. - updateObjectsByQuery(className, schema, query, update) { + updateObjectsByQuery(className: string, schema: SchemaType, query: QueryType, update: any): Promise<[any]> { debug('updateObjectsByQuery', className, query, update); const updatePatterns = []; const values = [className] @@ -1238,7 +1245,7 @@ export class PostgresStorageAdapter { } // Hopefully, we can get rid of this. It's only used for config and hooks. - upsertOneObject(className, schema, query, update) { + upsertOneObject(className: string, schema: SchemaType, query: QueryType, update: any) { debug('upsertOneObject', {className, query, update}); const createValue = Object.assign({}, query, update); return this.createObject(className, schema, createValue).catch((err) => { @@ -1250,7 +1257,7 @@ export class PostgresStorageAdapter { }); } - find(className, schema, query, { skip, limit, sort, keys }) { + find(className: string, schema: SchemaType, query: QueryType, { skip, limit, sort, keys }: QueryOptions) { debug('find', className, query, {skip, limit, sort, keys }); const hasLimit = limit !== undefined; const hasSkip = skip !== undefined; @@ -1270,16 +1277,17 @@ export class PostgresStorageAdapter { let sortPattern = ''; if (sort) { + const sortCopy: any = sort; const sorting = Object.keys(sort).map((key) => { // Using $idx pattern gives: non-integer constant in ORDER BY - if (sort[key] === 1) { + if (sortCopy[key] === 1) { return `"${key}" ASC`; } return `"${key}" DESC`; }).join(); sortPattern = sort !== undefined && Object.keys(sort).length > 0 ? `ORDER BY ${sorting}` : ''; } - if (where.sorts && Object.keys(where.sorts).length > 0) { + if (where.sorts && Object.keys((where.sorts: any)).length > 0) { sortPattern = `ORDER BY ${where.sorts.join()}`; } @@ -1313,7 +1321,7 @@ export class PostgresStorageAdapter { // Converts from a postgres-format object to a REST-format object. // Does not strip out anything based on a lack of authentication. - postgresObjectToParseObject(className, object, schema) { + postgresObjectToParseObject(className: string, object: any, schema: any) { Object.keys(schema.fields).forEach(fieldName => { if (schema.fields[fieldName].type === 'Pointer' && object[fieldName]) { object[fieldName] = { objectId: object[fieldName], __type: 'Pointer', className: schema.fields[fieldName].targetClass }; @@ -1392,7 +1400,7 @@ export class PostgresStorageAdapter { // As such, we shouldn't expose this function to users of parse until we have an out-of-band // Way of determining if a field is nullable. Undefined doesn't count against uniqueness, // which is why we use sparse indexes. - ensureUniqueness(className, schema, fieldNames) { + ensureUniqueness(className: string, schema: SchemaType, fieldNames: string[]) { // Use the same name for every ensureUniqueness attempt, because postgres // Will happily create the same index with multiple names. const constraintName = `unique_${fieldNames.sort().join('_')}`; @@ -1412,7 +1420,7 @@ export class PostgresStorageAdapter { } // Executes a count. - count(className, schema, query) { + count(className: string, schema: SchemaType, query: QueryType) { debug('count', className, query); const values = [className]; const where = buildWhereClause({ schema, query, index: 2 }); @@ -1428,7 +1436,7 @@ export class PostgresStorageAdapter { }); } - distinct(className, schema, query, fieldName) { + distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string) { debug('distinct', className, query); let field = fieldName; let column = fieldName; @@ -1476,10 +1484,10 @@ export class PostgresStorageAdapter { }).then(results => results.map(object => this.postgresObjectToParseObject(className, object, schema))); } - aggregate(className, schema, pipeline) { + aggregate(className: string, schema: any, pipeline: any) { debug('aggregate', className, pipeline); const values = [className]; - let columns = []; + let columns: string[] = []; let countField = null; let wherePattern = ''; let limitPattern = ''; @@ -1578,7 +1586,7 @@ export class PostgresStorageAdapter { }); } - performInitialization({ VolatileClassesSchemas }) { + performInitialization({ VolatileClassesSchemas }: any) { debug('performInitialization'); const promises = VolatileClassesSchemas.map((schema) => { return this.createTable(schema.className, schema).catch((err) => { @@ -1610,23 +1618,27 @@ export class PostgresStorageAdapter { }); } - createIndexes(className, indexes, conn) { + createIndexes(className: string, indexes: any, conn: ?any): Promise { return (conn || this._client).tx(t => t.batch(indexes.map(i => { return t.none('CREATE INDEX $1:name ON $2:name ($3:name)', [i.name, className, i.key]); }))); } - dropIndexes(className, indexes, conn) { + createIndexesIfNeeded(className: string, fieldName: string, type: any, conn: ?any): Promise { + return (conn || this._client).none('CREATE INDEX $1:name ON $2:name ($3:name)', [fieldName, className, type]); + } + + dropIndexes(className: string, indexes: any, conn: any): Promise { const queries = indexes.map(i => ({query: 'DROP INDEX $1:name', values: i})); return (conn || this._client).tx(t => t.none(this._pgp.helpers.concat(queries))); } - getIndexes(className) { + getIndexes(className: string) { const qs = 'SELECT * FROM pg_indexes WHERE tablename = ${className}'; return this._client.any(qs, {className}); } - updateSchemaWithIndexes() { + updateSchemaWithIndexes(): Promise { return Promise.resolve(); } } @@ -1708,10 +1720,10 @@ function createLiteralRegex(remaining) { }).join(''); } -function literalizeRegexPart(s) { +function literalizeRegexPart(s: string) { const matcher1 = /\\Q((?!\\E).*)\\E$/ - const result1 = s.match(matcher1); - if(result1 && result1.length > 1 && result1.index > -1){ + const result1: any = s.match(matcher1); + if(result1 && result1.length > 1 && result1.index > -1) { // process regex that has a beginning and an end specified for the literal text const prefix = s.substr(0, result1.index); const remaining = result1[1]; @@ -1721,7 +1733,7 @@ function literalizeRegexPart(s) { // process regex that has a beginning specified for the literal text const matcher2 = /\\Q((?!\\E).*)$/ - const result2 = s.match(matcher2); + const result2: any = s.match(matcher2); if(result2 && result2.length > 1 && result2.index > -1){ const prefix = s.substr(0, result2.index); const remaining = result2[1]; @@ -1741,4 +1753,3 @@ function literalizeRegexPart(s) { } export default PostgresStorageAdapter; -module.exports = PostgresStorageAdapter; // Required for tests diff --git a/src/Adapters/Storage/StorageAdapter.js b/src/Adapters/Storage/StorageAdapter.js new file mode 100644 index 0000000000..4123117d8e --- /dev/null +++ b/src/Adapters/Storage/StorageAdapter.js @@ -0,0 +1,53 @@ +// @flow +export type SchemaType = any; +export type StorageClass = any; +export type QueryType = any; + +export type QueryOptions = { + skip?: number, + limit?: number, + acl?: string[], + sort?: {[string]: number}, + count?: boolean | number, + keys?: string[], + op?: string, + distinct?: boolean, + pipeline?: any, + readPreference?: ?string, +}; + +export type UpdateQueryOptions = { + many?: boolean, + upsert?: boolean +} + +export type FullQueryOptions = QueryOptions & UpdateQueryOptions; + +export interface StorageAdapter { + classExists(className: string): Promise; + setClassLevelPermissions(className: string, clps: any): Promise; + createClass(className: string, schema: SchemaType): Promise; + addFieldIfNotExists(className: string, fieldName: string, type: any): Promise; + deleteClass(className: string): Promise; + deleteAllClasses(): Promise; + deleteFields(className: string, schema: SchemaType, fieldNames: Array): Promise; + getAllClasses(): Promise; + getClass(className: string): Promise; + createObject(className: string, schema: SchemaType, object: any): Promise; + deleteObjectsByQuery(className: string, schema: SchemaType, query: QueryType): Promise; + updateObjectsByQuery(className: string, schema: SchemaType, query: QueryType, update: any): Promise<[any]>; + findOneAndUpdate(className: string, schema: SchemaType, query: QueryType, update: any): Promise; + upsertOneObject(className: string, schema: SchemaType, query: QueryType, update: any): Promise; + find(className: string, schema: SchemaType, query: QueryType, options: QueryOptions): Promise<[any]>; + ensureUniqueness(className: string, schema: SchemaType, fieldNames: Array): Promise; + count(className: string, schema: SchemaType, query: QueryType, readPreference: ?string): Promise; + distinct(className: string, schema: SchemaType, query: QueryType, fieldName: string): Promise; + aggregate(className: string, schema: any, pipeline: any, readPreference: ?string): Promise; + performInitialization(options: ?any): Promise; + + // Indexing + createIndexes(className: string, indexes: any, conn: ?any): Promise; + getIndexes(className: string, connection: ?any): Promise; + updateSchemaWithIndexes(): Promise; + setIndexesWithSchemaFormat(className: string, submittedIndexes: any, existingIndexes: any, fields: any, conn: ?any): Promise; +} diff --git a/src/Controllers/DatabaseController.js b/src/Controllers/DatabaseController.js index b3cd130ac4..19d0179fb6 100644 --- a/src/Controllers/DatabaseController.js +++ b/src/Controllers/DatabaseController.js @@ -1,12 +1,20 @@ -// A database adapter that works with data exported from the hosted +// @flow +// A database adapter that works with data exported from the hosted // Parse database. +// @flow-disable-next import { Parse } from 'parse/node'; +// @flow-disable-next import _ from 'lodash'; +// @flow-disable-next import intersect from 'intersect'; +// @flow-disable-next import deepcopy from 'deepcopy'; import logger from '../logger'; -import * as SchemaController from './SchemaController'; +import * as SchemaController from './SchemaController'; +import { StorageAdapter } from '../Adapters/Storage/StorageAdapter'; +import type { QueryOptions, + FullQueryOptions } from '../Adapters/Storage/StorageAdapter'; function addWriteACL(query, acl) { const newQuery = _.cloneDeep(query); @@ -48,7 +56,7 @@ const isSpecialQueryKey = key => { return specialQuerykeys.indexOf(key) >= 0; } -const validateQuery = query => { +const validateQuery = (query: any): void => { if (query.ACL) { throw new Parse.Error(Parse.Error.INVALID_QUERY, 'Cannot query on ACL.'); } @@ -117,75 +125,6 @@ const validateQuery = query => { }); } -function DatabaseController(adapter, schemaCache) { - this.adapter = adapter; - this.schemaCache = schemaCache; - // We don't want a mutable this.schema, because then you could have - // one request that uses different schemas for different parts of - // it. Instead, use loadSchema to get a schema. - this.schemaPromise = null; -} - -DatabaseController.prototype.collectionExists = function(className) { - return this.adapter.classExists(className); -}; - -DatabaseController.prototype.purgeCollection = function(className) { - return this.loadSchema() - .then(schemaController => schemaController.getOneSchema(className)) - .then(schema => this.adapter.deleteObjectsByQuery(className, schema, {})); -}; - -DatabaseController.prototype.validateClassName = function(className) { - if (!SchemaController.classNameIsValid(className)) { - return Promise.reject(new Parse.Error(Parse.Error.INVALID_CLASS_NAME, 'invalid className: ' + className)); - } - return Promise.resolve(); -}; - -// Returns a promise for a schemaController. -DatabaseController.prototype.loadSchema = function(options = {clearCache: false}) { - if (!this.schemaPromise) { - this.schemaPromise = SchemaController.load(this.adapter, this.schemaCache, options); - this.schemaPromise.then(() => delete this.schemaPromise, - () => delete this.schemaPromise); - } - return this.schemaPromise; -}; - -// Returns a promise for the classname that is related to the given -// classname through the key. -// TODO: make this not in the DatabaseController interface -DatabaseController.prototype.redirectClassNameForKey = function(className, key) { - return this.loadSchema().then((schema) => { - var t = schema.getExpectedType(className, key); - if (t && t.type == 'Relation') { - return t.targetClass; - } else { - return className; - } - }); -}; - -// Uses the schema to validate the object (REST API format). -// Returns a promise that resolves to the new schema. -// This does not update this.schema, because in a situation like a -// batch request, that could confuse other users of the schema. -DatabaseController.prototype.validateObject = function(className, object, query, { acl }) { - let schema; - const isMaster = acl === undefined; - var aclGroup = acl || []; - return this.loadSchema().then(s => { - schema = s; - if (isMaster) { - return Promise.resolve(); - } - return this.canAddField(schema, className, object, aclGroup); - }).then(() => { - return schema.validateObject(className, object, query); - }); -}; - // Filters out any data that shouldn't be on this REST-formatted object. const filterSensitiveData = (isMaster, aclGroup, className, object) => { if (className !== '_User') { @@ -216,6 +155,8 @@ const filterSensitiveData = (isMaster, aclGroup, className, object) => { return object; }; +import type { LoadSchemaOptions } from './types'; + // Runs an update on the database. // Returns a promise for an object with the new values for field // modifications that don't know their results ahead of time, like @@ -230,84 +171,6 @@ const isSpecialUpdateKey = key => { return specialKeysForUpdate.indexOf(key) >= 0; } -DatabaseController.prototype.update = function(className, query, update, { - acl, - many, - upsert, -} = {}, skipSanitization = false) { - const originalQuery = query; - const originalUpdate = update; - // Make a copy of the object, so we don't mutate the incoming data. - update = deepcopy(update); - var relationUpdates = []; - var isMaster = acl === undefined; - var aclGroup = acl || []; - return this.loadSchema() - .then(schemaController => { - return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, 'update')) - .then(() => { - relationUpdates = this.collectRelationUpdates(className, originalQuery.objectId, update); - if (!isMaster) { - query = this.addPointerPermissions(schemaController, className, 'update', query, aclGroup); - } - if (!query) { - return Promise.resolve(); - } - if (acl) { - query = addWriteACL(query, acl); - } - validateQuery(query); - return schemaController.getOneSchema(className, true) - .catch(error => { - // If the schema doesn't exist, pretend it exists with no fields. This behavior - // will likely need revisiting. - if (error === undefined) { - return { fields: {} }; - } - throw error; - }) - .then(schema => { - Object.keys(update).forEach(fieldName => { - if (fieldName.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) { - throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Invalid field name for update: ${fieldName}`); - } - fieldName = fieldName.split('.')[0]; - if (!SchemaController.fieldNameIsValid(fieldName) && !isSpecialUpdateKey(fieldName)) { - throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Invalid field name for update: ${fieldName}`); - } - }); - for (const updateOperation in update) { - if (Object.keys(updateOperation).some(innerKey => innerKey.includes('$') || innerKey.includes('.'))) { - throw new Parse.Error(Parse.Error.INVALID_NESTED_KEY, "Nested keys should not contain the '$' or '.' characters"); - } - } - update = transformObjectACL(update); - transformAuthData(className, update, schema); - if (many) { - return this.adapter.updateObjectsByQuery(className, schema, query, update); - } else if (upsert) { - return this.adapter.upsertOneObject(className, schema, query, update); - } else { - return this.adapter.findOneAndUpdate(className, schema, query, update) - } - }); - }) - .then(result => { - if (!result) { - return Promise.reject(new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.')); - } - return this.handleRelationUpdates(className, originalQuery.objectId, update, relationUpdates).then(() => { - return result; - }); - }).then((result) => { - if (skipSanitization) { - return Promise.resolve(result); - } - return sanitizeDatabaseResult(originalUpdate, result); - }); - }); -}; - function expandResultOnKeyPath(object, key, value) { if (key.indexOf('.') < 0) { object[key] = value[key]; @@ -321,7 +184,7 @@ function expandResultOnKeyPath(object, key, value) { return object; } -function sanitizeDatabaseResult(originalObject, result) { +function sanitizeDatabaseResult(originalObject, result): Promise { const response = {}; if (!result) { return Promise.resolve(response); @@ -339,149 +202,10 @@ function sanitizeDatabaseResult(originalObject, result) { return Promise.resolve(response); } -// Collect all relation-updating operations from a REST-format update. -// Returns a list of all relation updates to perform -// This mutates update. -DatabaseController.prototype.collectRelationUpdates = function(className, objectId, update) { - var ops = []; - var deleteMe = []; - objectId = update.objectId || objectId; - - var process = (op, key) => { - if (!op) { - return; - } - if (op.__op == 'AddRelation') { - ops.push({key, op}); - deleteMe.push(key); - } - - if (op.__op == 'RemoveRelation') { - ops.push({key, op}); - deleteMe.push(key); - } - - if (op.__op == 'Batch') { - for (var x of op.ops) { - process(x, key); - } - } - }; - - for (const key in update) { - process(update[key], key); - } - for (const key of deleteMe) { - delete update[key]; - } - return ops; +function joinTableName(className, key) { + return `_Join:${key}:${className}`; } -// Processes relation-updating operations from a REST-format update. -// Returns a promise that resolves when all updates have been performed -DatabaseController.prototype.handleRelationUpdates = function(className, objectId, update, ops) { - var pending = []; - objectId = update.objectId || objectId; - ops.forEach(({key, op}) => { - if (!op) { - return; - } - if (op.__op == 'AddRelation') { - for (const object of op.objects) { - pending.push(this.addRelation(key, className, - objectId, - object.objectId)); - } - } - - if (op.__op == 'RemoveRelation') { - for (const object of op.objects) { - pending.push(this.removeRelation(key, className, - objectId, - object.objectId)); - } - } - }); - - return Promise.all(pending); -}; - -// Adds a relation. -// Returns a promise that resolves successfully iff the add was successful. -const relationSchema = { fields: { relatedId: { type: 'String' }, owningId: { type: 'String' } } }; -DatabaseController.prototype.addRelation = function(key, fromClassName, fromId, toId) { - const doc = { - relatedId: toId, - owningId : fromId - }; - return this.adapter.upsertOneObject(`_Join:${key}:${fromClassName}`, relationSchema, doc, doc); -}; - -// Removes a relation. -// Returns a promise that resolves successfully iff the remove was -// successful. -DatabaseController.prototype.removeRelation = function(key, fromClassName, fromId, toId) { - var doc = { - relatedId: toId, - owningId: fromId - }; - return this.adapter.deleteObjectsByQuery(`_Join:${key}:${fromClassName}`, relationSchema, doc) - .catch(error => { - // We don't care if they try to delete a non-existent relation. - if (error.code == Parse.Error.OBJECT_NOT_FOUND) { - return; - } - throw error; - }); -}; - -// Removes objects matches this query from the database. -// Returns a promise that resolves successfully iff the object was -// deleted. -// Options: -// acl: a list of strings. If the object to be updated has an ACL, -// one of the provided strings must provide the caller with -// write permissions. -DatabaseController.prototype.destroy = function(className, query, { acl } = {}) { - const isMaster = acl === undefined; - const aclGroup = acl || []; - - return this.loadSchema() - .then(schemaController => { - return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, 'delete')) - .then(() => { - if (!isMaster) { - query = this.addPointerPermissions(schemaController, className, 'delete', query, aclGroup); - if (!query) { - throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.'); - } - } - // delete by query - if (acl) { - query = addWriteACL(query, acl); - } - validateQuery(query); - return schemaController.getOneSchema(className) - .catch(error => { - // If the schema doesn't exist, pretend it exists with no fields. This behavior - // will likely need revisiting. - if (error === undefined) { - return { fields: {} }; - } - throw error; - }) - .then(parseFormatSchema => this.adapter.deleteObjectsByQuery(className, parseFormatSchema, query)) - .catch(error => { - // When deleting sessions while changing passwords, don't throw an error if they don't have any sessions. - if (className === "_Session" && error.code === Parse.Error.OBJECT_NOT_FOUND) { - return Promise.resolve({}); - } - throw error; - }); - }); - }); -}; - const flattenUpdateOperatorsForCreate = object => { for (const key in object) { if (object[key] && object[key].__op) { @@ -537,349 +261,672 @@ const transformAuthData = (className, object, schema) => { delete object.authData; } } +// Transforms a Database format ACL to a REST API format ACL +const untransformObjectACL = ({_rperm, _wperm, ...output}) => { + if (_rperm || _wperm) { + output.ACL = {}; -// Inserts an object into the database. -// Returns a promise that resolves successfully iff the object saved. -DatabaseController.prototype.create = function(className, object, { acl } = {}) { - // Make a copy of the object, so we don't mutate the incoming data. - const originalObject = object; - object = transformObjectACL(object); - - object.createdAt = { iso: object.createdAt, __type: 'Date' }; - object.updatedAt = { iso: object.updatedAt, __type: 'Date' }; - - var isMaster = acl === undefined; - var aclGroup = acl || []; - const relationUpdates = this.collectRelationUpdates(className, null, object); - return this.validateClassName(className) - .then(() => this.loadSchema()) - .then(schemaController => { - return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, 'create')) - .then(() => schemaController.enforceClassExists(className)) - .then(() => schemaController.reloadData()) - .then(() => schemaController.getOneSchema(className, true)) - .then(schema => { - transformAuthData(className, object, schema); - flattenUpdateOperatorsForCreate(object); - return this.adapter.createObject(className, SchemaController.convertSchemaToAdapterSchema(schema), object); - }) - .then(result => { - return this.handleRelationUpdates(className, null, object, relationUpdates).then(() => { - return sanitizeDatabaseResult(originalObject, result.ops[0]) - }); - }); - }) -}; + (_rperm || []).forEach(entry => { + if (!output.ACL[entry]) { + output.ACL[entry] = { read: true }; + } else { + output.ACL[entry]['read'] = true; + } + }); -DatabaseController.prototype.canAddField = function(schema, className, object, aclGroup) { - const classSchema = schema.data[className]; - if (!classSchema) { - return Promise.resolve(); - } - const fields = Object.keys(object); - const schemaFields = Object.keys(classSchema); - const newKeys = fields.filter((field) => { - return schemaFields.indexOf(field) < 0; - }) - if (newKeys.length > 0) { - return schema.validatePermission(className, aclGroup, 'addField'); + (_wperm || []).forEach(entry => { + if (!output.ACL[entry]) { + output.ACL[entry] = { write: true }; + } else { + output.ACL[entry]['write'] = true; + } + }); } - return Promise.resolve(); + return output; } -// Won't delete collections in the system namespace -// Returns a promise. -DatabaseController.prototype.deleteEverything = function() { - this.schemaPromise = null; - return Promise.all([ - this.adapter.deleteAllClasses(), - this.schemaCache.clear() - ]); -}; +const relationSchema = { fields: { relatedId: { type: 'String' }, owningId: { type: 'String' } } }; -// Returns a promise for a list of related ids given an owning id. -// className here is the owning className. -DatabaseController.prototype.relatedIds = function(className, key, owningId, queryOptions) { - const { skip, limit, sort } = queryOptions; - const findOptions = {}; - if (sort && sort.createdAt && this.adapter.canSortOnJoinTables) { - findOptions.sort = { '_id' : sort.createdAt }; - findOptions.limit = limit; - findOptions.skip = skip; - queryOptions.skip = 0; +class DatabaseController { + adapter: StorageAdapter; + schemaCache: any; + schemaPromise: ?Promise; + + constructor(adapter: StorageAdapter, schemaCache: any) { + this.adapter = adapter; + this.schemaCache = schemaCache; + // We don't want a mutable this.schema, because then you could have + // one request that uses different schemas for different parts of + // it. Instead, use loadSchema to get a schema. + this.schemaPromise = null; } - return this.adapter.find(joinTableName(className, key), relationSchema, { owningId }, findOptions) - .then(results => results.map(result => result.relatedId)); -}; -// Returns a promise for a list of owning ids given some related ids. -// className here is the owning className. -DatabaseController.prototype.owningIds = function(className, key, relatedIds) { - return this.adapter.find(joinTableName(className, key), relationSchema, { relatedId: { '$in': relatedIds } }, {}) - .then(results => results.map(result => result.owningId)); -}; + collectionExists(className: string): Promise { + return this.adapter.classExists(className); + } -// Modifies query so that it no longer has $in on relation fields, or -// equal-to-pointer constraints on relation fields. -// Returns a promise that resolves when query is mutated -DatabaseController.prototype.reduceInRelation = function(className, query, schema) { + purgeCollection(className: string): Promise { + return this.loadSchema() + .then(schemaController => schemaController.getOneSchema(className)) + .then(schema => this.adapter.deleteObjectsByQuery(className, schema, {})); + } - // Search for an in-relation or equal-to-relation - // Make it sequential for now, not sure of paralleization side effects - if (query['$or']) { - const ors = query['$or']; - return Promise.all(ors.map((aQuery, index) => { - return this.reduceInRelation(className, aQuery, schema).then((aQuery) => { - query['$or'][index] = aQuery; - }); - })).then(() => { - return Promise.resolve(query); - }); + validateClassName(className: string): Promise { + if (!SchemaController.classNameIsValid(className)) { + return Promise.reject(new Parse.Error(Parse.Error.INVALID_CLASS_NAME, 'invalid className: ' + className)); + } + return Promise.resolve(); } - const promises = Object.keys(query).map((key) => { - const t = schema.getExpectedType(className, key); - if (!t || t.type !== 'Relation') { - return Promise.resolve(query); + // Returns a promise for a schemaController. + loadSchema(options: LoadSchemaOptions = {clearCache: false}): Promise { + if (this.schemaPromise != null) { + return this.schemaPromise; } - let queries = null; - if (query[key] && (query[key]['$in'] || query[key]['$ne'] || query[key]['$nin'] || query[key].__type == 'Pointer')) { - // Build the list of queries - queries = Object.keys(query[key]).map((constraintKey) => { - let relatedIds; - let isNegation = false; - if (constraintKey === 'objectId') { - relatedIds = [query[key].objectId]; - } else if (constraintKey == '$in') { - relatedIds = query[key]['$in'].map(r => r.objectId); - } else if (constraintKey == '$nin') { - isNegation = true; - relatedIds = query[key]['$nin'].map(r => r.objectId); - } else if (constraintKey == '$ne') { - isNegation = true; - relatedIds = [query[key]['$ne'].objectId]; - } else { - return; - } - return { - isNegation, - relatedIds - } + this.schemaPromise = SchemaController.load(this.adapter, this.schemaCache, options); + this.schemaPromise.then(() => delete this.schemaPromise, + () => delete this.schemaPromise); + return this.loadSchema(options); + } + + // Returns a promise for the classname that is related to the given + // classname through the key. + // TODO: make this not in the DatabaseController interface + redirectClassNameForKey(className: string, key: string): Promise { + return this.loadSchema().then((schema) => { + var t = schema.getExpectedType(className, key); + if (t != null && typeof t !== 'string' && t.type === 'Relation') { + return t.targetClass; + } + return className; + }); + } + + // Uses the schema to validate the object (REST API format). + // Returns a promise that resolves to the new schema. + // This does not update this.schema, because in a situation like a + // batch request, that could confuse other users of the schema. + validateObject(className: string, object: any, query: any, { acl }: QueryOptions): Promise { + let schema; + const isMaster = acl === undefined; + var aclGroup: string[] = acl || []; + return this.loadSchema().then(s => { + schema = s; + if (isMaster) { + return Promise.resolve(); + } + return this.canAddField(schema, className, object, aclGroup); + }).then(() => { + return schema.validateObject(className, object, query); + }); + } + + update(className: string, query: any, update: any, { + acl, + many, + upsert, + }: FullQueryOptions = {}, skipSanitization: boolean = false): Promise { + const originalQuery = query; + const originalUpdate = update; + // Make a copy of the object, so we don't mutate the incoming data. + update = deepcopy(update); + var relationUpdates = []; + var isMaster = acl === undefined; + var aclGroup = acl || []; + return this.loadSchema() + .then(schemaController => { + return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, 'update')) + .then(() => { + relationUpdates = this.collectRelationUpdates(className, originalQuery.objectId, update); + if (!isMaster) { + query = this.addPointerPermissions(schemaController, className, 'update', query, aclGroup); + } + if (!query) { + return Promise.resolve(); + } + if (acl) { + query = addWriteACL(query, acl); + } + validateQuery(query); + return schemaController.getOneSchema(className, true) + .catch(error => { + // If the schema doesn't exist, pretend it exists with no fields. This behavior + // will likely need revisiting. + if (error === undefined) { + return { fields: {} }; + } + throw error; + }) + .then(schema => { + Object.keys(update).forEach(fieldName => { + if (fieldName.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) { + throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Invalid field name for update: ${fieldName}`); + } + fieldName = fieldName.split('.')[0]; + if (!SchemaController.fieldNameIsValid(fieldName) && !isSpecialUpdateKey(fieldName)) { + throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Invalid field name for update: ${fieldName}`); + } + }); + for (const updateOperation: any in update) { + if (Object.keys(updateOperation).some(innerKey => innerKey.includes('$') || innerKey.includes('.'))) { + throw new Parse.Error(Parse.Error.INVALID_NESTED_KEY, "Nested keys should not contain the '$' or '.' characters"); + } + } + update = transformObjectACL(update); + transformAuthData(className, update, schema); + if (many) { + return this.adapter.updateObjectsByQuery(className, schema, query, update); + } else if (upsert) { + return this.adapter.upsertOneObject(className, schema, query, update); + } else { + return this.adapter.findOneAndUpdate(className, schema, query, update) + } + }); + }) + .then((result: any) => { + if (!result) { + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.'); + } + return this.handleRelationUpdates(className, originalQuery.objectId, update, relationUpdates).then(() => { + return result; + }); + }).then((result) => { + if (skipSanitization) { + return Promise.resolve(result); + } + return sanitizeDatabaseResult(originalUpdate, result); + }); }); - } else { - queries = [{isNegation: false, relatedIds: []}]; + } + + // Collect all relation-updating operations from a REST-format update. + // Returns a list of all relation updates to perform + // This mutates update. + collectRelationUpdates(className: string, objectId: ?string, update: any) { + var ops = []; + var deleteMe = []; + objectId = update.objectId || objectId; + + var process = (op, key) => { + if (!op) { + return; + } + if (op.__op == 'AddRelation') { + ops.push({key, op}); + deleteMe.push(key); + } + + if (op.__op == 'RemoveRelation') { + ops.push({key, op}); + deleteMe.push(key); + } + + if (op.__op == 'Batch') { + for (var x of op.ops) { + process(x, key); + } + } + }; + + for (const key in update) { + process(update[key], key); + } + for (const key of deleteMe) { + delete update[key]; } + return ops; + } - // remove the current queryKey as we don,t need it anymore - delete query[key]; - // execute each query independently to build the list of - // $in / $nin - const promises = queries.map((q) => { - if (!q) { - return Promise.resolve(); + // Processes relation-updating operations from a REST-format update. + // Returns a promise that resolves when all updates have been performed + handleRelationUpdates(className: string, objectId: string, update: any, ops: any) { + var pending = []; + objectId = update.objectId || objectId; + ops.forEach(({key, op}) => { + if (!op) { + return; } - return this.owningIds(className, key, q.relatedIds).then((ids) => { - if (q.isNegation) { - this.addNotInObjectIdsIds(ids, query); - } else { - this.addInObjectIdsIds(ids, query); + if (op.__op == 'AddRelation') { + for (const object of op.objects) { + pending.push(this.addRelation(key, className, + objectId, + object.objectId)); } - return Promise.resolve(); - }); + } + + if (op.__op == 'RemoveRelation') { + for (const object of op.objects) { + pending.push(this.removeRelation(key, className, + objectId, + object.objectId)); + } + } }); - return Promise.all(promises).then(() => { + return Promise.all(pending); + } + + // Adds a relation. + // Returns a promise that resolves successfully iff the add was successful. + addRelation(key: string, fromClassName: string, fromId: string, toId: string) { + const doc = { + relatedId: toId, + owningId : fromId + }; + return this.adapter.upsertOneObject(`_Join:${key}:${fromClassName}`, relationSchema, doc, doc); + } + + // Removes a relation. + // Returns a promise that resolves successfully iff the remove was + // successful. + removeRelation(key: string, fromClassName: string, fromId: string, toId: string) { + var doc = { + relatedId: toId, + owningId: fromId + }; + return this.adapter.deleteObjectsByQuery(`_Join:${key}:${fromClassName}`, relationSchema, doc) + .catch(error => { + // We don't care if they try to delete a non-existent relation. + if (error.code == Parse.Error.OBJECT_NOT_FOUND) { + return; + } + throw error; + }); + } + + // Removes objects matches this query from the database. + // Returns a promise that resolves successfully iff the object was + // deleted. + // Options: + // acl: a list of strings. If the object to be updated has an ACL, + // one of the provided strings must provide the caller with + // write permissions. + destroy(className: string, query: any, { acl }: QueryOptions = {}): Promise { + const isMaster = acl === undefined; + const aclGroup = acl || []; + + return this.loadSchema() + .then(schemaController => { + return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, 'delete')) + .then(() => { + if (!isMaster) { + query = this.addPointerPermissions(schemaController, className, 'delete', query, aclGroup); + if (!query) { + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.'); + } + } + // delete by query + if (acl) { + query = addWriteACL(query, acl); + } + validateQuery(query); + return schemaController.getOneSchema(className) + .catch(error => { + // If the schema doesn't exist, pretend it exists with no fields. This behavior + // will likely need revisiting. + if (error === undefined) { + return { fields: {} }; + } + throw error; + }) + .then(parseFormatSchema => this.adapter.deleteObjectsByQuery(className, parseFormatSchema, query)) + .catch(error => { + // When deleting sessions while changing passwords, don't throw an error if they don't have any sessions. + if (className === "_Session" && error.code === Parse.Error.OBJECT_NOT_FOUND) { + return Promise.resolve({}); + } + throw error; + }); + }); + }); + } + + // Inserts an object into the database. + // Returns a promise that resolves successfully iff the object saved. + create(className: string, object: any, { acl }: QueryOptions = {}): Promise { + // Make a copy of the object, so we don't mutate the incoming data. + const originalObject = object; + object = transformObjectACL(object); + + object.createdAt = { iso: object.createdAt, __type: 'Date' }; + object.updatedAt = { iso: object.updatedAt, __type: 'Date' }; + + var isMaster = acl === undefined; + var aclGroup = acl || []; + const relationUpdates = this.collectRelationUpdates(className, null, object); + return this.validateClassName(className) + .then(() => this.loadSchema()) + .then(schemaController => { + return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, 'create')) + .then(() => schemaController.enforceClassExists(className)) + .then(() => schemaController.reloadData()) + .then(() => schemaController.getOneSchema(className, true)) + .then(schema => { + transformAuthData(className, object, schema); + flattenUpdateOperatorsForCreate(object); + return this.adapter.createObject(className, SchemaController.convertSchemaToAdapterSchema(schema), object); + }) + .then(result => { + return this.handleRelationUpdates(className, object.objectId, object, relationUpdates).then(() => { + return sanitizeDatabaseResult(originalObject, result.ops[0]) + }); + }); + }) + } + + canAddField(schema: SchemaController.SchemaController, className: string, object: any, aclGroup: string[]): Promise { + const classSchema = schema.data[className]; + if (!classSchema) { return Promise.resolve(); + } + const fields = Object.keys(object); + const schemaFields = Object.keys(classSchema); + const newKeys = fields.filter((field) => { + return schemaFields.indexOf(field) < 0; }) + if (newKeys.length > 0) { + return schema.validatePermission(className, aclGroup, 'addField'); + } + return Promise.resolve(); + } - }) + // Won't delete collections in the system namespace + // Returns a promise. + deleteEverything() { + this.schemaPromise = null; + return Promise.all([ + this.adapter.deleteAllClasses(), + this.schemaCache.clear() + ]); + } - return Promise.all(promises).then(() => { - return Promise.resolve(query); - }) -}; -// Modifies query so that it no longer has $relatedTo -// Returns a promise that resolves when query is mutated -DatabaseController.prototype.reduceRelationKeys = function(className, query, queryOptions) { + // Returns a promise for a list of related ids given an owning id. + // className here is the owning className. + relatedIds(className: string, key: string, owningId: string, queryOptions: QueryOptions): Promise> { + const { skip, limit, sort } = queryOptions; + const findOptions = {}; + if (sort && sort.createdAt && this.adapter.canSortOnJoinTables) { + findOptions.sort = { '_id' : sort.createdAt }; + findOptions.limit = limit; + findOptions.skip = skip; + queryOptions.skip = 0; + } + return this.adapter.find(joinTableName(className, key), relationSchema, { owningId }, findOptions) + .then(results => results.map(result => result.relatedId)); + } - if (query['$or']) { - return Promise.all(query['$or'].map((aQuery) => { - return this.reduceRelationKeys(className, aQuery, queryOptions); - })); + // Returns a promise for a list of owning ids given some related ids. + // className here is the owning className. + owningIds(className: string, key: string, relatedIds: string): Promise { + return this.adapter.find(joinTableName(className, key), relationSchema, { relatedId: { '$in': relatedIds } }, {}) + .then(results => results.map(result => result.owningId)); } - var relatedTo = query['$relatedTo']; - if (relatedTo) { - return this.relatedIds( - relatedTo.object.className, - relatedTo.key, - relatedTo.object.objectId, - queryOptions) - .then((ids) => { - delete query['$relatedTo']; - this.addInObjectIdsIds(ids, query); - return this.reduceRelationKeys(className, query, queryOptions); + // Modifies query so that it no longer has $in on relation fields, or + // equal-to-pointer constraints on relation fields. + // Returns a promise that resolves when query is mutated + reduceInRelation(className: string, query: any, schema: any): Promise { + // Search for an in-relation or equal-to-relation + // Make it sequential for now, not sure of paralleization side effects + if (query['$or']) { + const ors = query['$or']; + return Promise.all(ors.map((aQuery, index) => { + return this.reduceInRelation(className, aQuery, schema).then((aQuery) => { + query['$or'][index] = aQuery; + }); + })).then(() => { + return Promise.resolve(query); }); - } -}; + } -DatabaseController.prototype.addInObjectIdsIds = function(ids = null, query) { - const idsFromString = typeof query.objectId === 'string' ? [query.objectId] : null; - const idsFromEq = query.objectId && query.objectId['$eq'] ? [query.objectId['$eq']] : null; - const idsFromIn = query.objectId && query.objectId['$in'] ? query.objectId['$in'] : null; + const promises = Object.keys(query).map((key) => { + const t = schema.getExpectedType(className, key); + if (!t || t.type !== 'Relation') { + return Promise.resolve(query); + } + let queries: ?any[] = null; + if (query[key] && (query[key]['$in'] || query[key]['$ne'] || query[key]['$nin'] || query[key].__type == 'Pointer')) { + // Build the list of queries + queries = Object.keys(query[key]).map((constraintKey) => { + let relatedIds; + let isNegation = false; + if (constraintKey === 'objectId') { + relatedIds = [query[key].objectId]; + } else if (constraintKey == '$in') { + relatedIds = query[key]['$in'].map(r => r.objectId); + } else if (constraintKey == '$nin') { + isNegation = true; + relatedIds = query[key]['$nin'].map(r => r.objectId); + } else if (constraintKey == '$ne') { + isNegation = true; + relatedIds = [query[key]['$ne'].objectId]; + } else { + return; + } + return { + isNegation, + relatedIds + } + }); + } else { + queries = [{isNegation: false, relatedIds: []}]; + } - const allIds = [idsFromString, idsFromEq, idsFromIn, ids].filter(list => list !== null); - const totalLength = allIds.reduce((memo, list) => memo + list.length, 0); + // remove the current queryKey as we don,t need it anymore + delete query[key]; + // execute each query independently to build the list of + // $in / $nin + const promises = queries.map((q) => { + if (!q) { + return Promise.resolve(); + } + return this.owningIds(className, key, q.relatedIds).then((ids) => { + if (q.isNegation) { + this.addNotInObjectIdsIds(ids, query); + } else { + this.addInObjectIdsIds(ids, query); + } + return Promise.resolve(); + }); + }); - let idsIntersection = []; - if (totalLength > 125) { - idsIntersection = intersect.big(allIds); - } else { - idsIntersection = intersect(allIds); + return Promise.all(promises).then(() => { + return Promise.resolve(); + }) + + }) + + return Promise.all(promises).then(() => { + return Promise.resolve(query); + }) } - // Need to make sure we don't clobber existing shorthand $eq constraints on objectId. - if (!('objectId' in query)) { - query.objectId = {}; - } else if (typeof query.objectId === 'string') { - query.objectId = { - $eq: query.objectId - }; + // Modifies query so that it no longer has $relatedTo + // Returns a promise that resolves when query is mutated + reduceRelationKeys(className: string, query: any, queryOptions: any): ?Promise { + + if (query['$or']) { + return Promise.all(query['$or'].map((aQuery) => { + return this.reduceRelationKeys(className, aQuery, queryOptions); + })); + } + + var relatedTo = query['$relatedTo']; + if (relatedTo) { + return this.relatedIds( + relatedTo.object.className, + relatedTo.key, + relatedTo.object.objectId, + queryOptions) + .then((ids) => { + delete query['$relatedTo']; + this.addInObjectIdsIds(ids, query); + return this.reduceRelationKeys(className, query, queryOptions); + }).then(() => {}); + } } - query.objectId['$in'] = idsIntersection; - return query; -} + addInObjectIdsIds(ids: ?Array = null, query: any) { + const idsFromString: ?Array = typeof query.objectId === 'string' ? [query.objectId] : null; + const idsFromEq: ?Array = query.objectId && query.objectId['$eq'] ? [query.objectId['$eq']] : null; + const idsFromIn: ?Array = query.objectId && query.objectId['$in'] ? query.objectId['$in'] : null; -DatabaseController.prototype.addNotInObjectIdsIds = function(ids = [], query) { - const idsFromNin = query.objectId && query.objectId['$nin'] ? query.objectId['$nin'] : []; - let allIds = [...idsFromNin,...ids].filter(list => list !== null); + // @flow-disable-next + const allIds: Array> = [idsFromString, idsFromEq, idsFromIn, ids].filter(list => list !== null); + const totalLength = allIds.reduce((memo, list) => memo + list.length, 0); - // make a set and spread to remove duplicates - allIds = [...new Set(allIds)]; + let idsIntersection = []; + if (totalLength > 125) { + idsIntersection = intersect.big(allIds); + } else { + idsIntersection = intersect(allIds); + } - // Need to make sure we don't clobber existing shorthand $eq constraints on objectId. - if (!('objectId' in query)) { - query.objectId = {}; - } else if (typeof query.objectId === 'string') { - query.objectId = { - $eq: query.objectId - }; + // Need to make sure we don't clobber existing shorthand $eq constraints on objectId. + if (!('objectId' in query)) { + query.objectId = { + $in: undefined, + }; + } else if (typeof query.objectId === 'string') { + query.objectId = { + $in: undefined, + $eq: query.objectId + }; + } + query.objectId['$in'] = idsIntersection; + + return query; } - query.objectId['$nin'] = allIds; - return query; -} + addNotInObjectIdsIds(ids: string[] = [], query: any) { + const idsFromNin = query.objectId && query.objectId['$nin'] ? query.objectId['$nin'] : []; + let allIds = [...idsFromNin,...ids].filter(list => list !== null); -// Runs a query on the database. -// Returns a promise that resolves to a list of items. -// Options: -// skip number of results to skip. -// limit limit to this number of results. -// sort an object where keys are the fields to sort by. -// the value is +1 for ascending, -1 for descending. -// count run a count instead of returning results. -// acl restrict this operation with an ACL for the provided array -// of user objectIds and roles. acl: null means no user. -// when this field is not present, don't do anything regarding ACLs. -// TODO: make userIds not needed here. The db adapter shouldn't know -// anything about users, ideally. Then, improve the format of the ACL -// arg to work like the others. -DatabaseController.prototype.find = function(className, query, { - skip, - limit, - acl, - sort = {}, - count, - keys, - op, - distinct, - pipeline, - readPreference -} = {}) { - const isMaster = acl === undefined; - const aclGroup = acl || []; - op = op || (typeof query.objectId == 'string' && Object.keys(query).length === 1 ? 'get' : 'find'); - // Count operation if counting - op = (count === true ? 'count' : op); - - let classExists = true; - return this.loadSchema() - .then(schemaController => { - //Allow volatile classes if querying with Master (for _PushStatus) - //TODO: Move volatile classes concept into mongo adapter, postgres adapter shouldn't care - //that api.parse.com breaks when _PushStatus exists in mongo. - return schemaController.getOneSchema(className, isMaster) - .catch(error => { + // make a set and spread to remove duplicates + allIds = [...new Set(allIds)]; + + // Need to make sure we don't clobber existing shorthand $eq constraints on objectId. + if (!('objectId' in query)) { + query.objectId = { + $nin: undefined, + }; + } else if (typeof query.objectId === 'string') { + query.objectId = { + $nin: undefined, + $eq: query.objectId + }; + } + + query.objectId['$nin'] = allIds; + return query; + } + + // Runs a query on the database. + // Returns a promise that resolves to a list of items. + // Options: + // skip number of results to skip. + // limit limit to this number of results. + // sort an object where keys are the fields to sort by. + // the value is +1 for ascending, -1 for descending. + // count run a count instead of returning results. + // acl restrict this operation with an ACL for the provided array + // of user objectIds and roles. acl: null means no user. + // when this field is not present, don't do anything regarding ACLs. + // TODO: make userIds not needed here. The db adapter shouldn't know + // anything about users, ideally. Then, improve the format of the ACL + // arg to work like the others. + find(className: string, query: any, { + skip, + limit, + acl, + sort = {}, + count, + keys, + op, + distinct, + pipeline, + readPreference + }: any = {}): Promise { + const isMaster = acl === undefined; + const aclGroup = acl || []; + op = op || (typeof query.objectId == 'string' && Object.keys(query).length === 1 ? 'get' : 'find'); + // Count operation if counting + op = (count === true ? 'count' : op); + + let classExists = true; + return this.loadSchema() + .then(schemaController => { + //Allow volatile classes if querying with Master (for _PushStatus) + //TODO: Move volatile classes concept into mongo adapter, postgres adapter shouldn't care + //that api.parse.com breaks when _PushStatus exists in mongo. + return schemaController.getOneSchema(className, isMaster) + .catch(error => { // Behavior for non-existent classes is kinda weird on Parse.com. Probably doesn't matter too much. // For now, pretend the class exists but has no objects, - if (error === undefined) { - classExists = false; - return { fields: {} }; - } - throw error; - }) - .then(schema => { + if (error === undefined) { + classExists = false; + return { fields: {} }; + } + throw error; + }) + .then(schema => { // Parse.com treats queries on _created_at and _updated_at as if they were queries on createdAt and updatedAt, // so duplicate that behavior here. If both are specified, the correct behavior to match Parse.com is to // use the one that appears first in the sort list. - if (sort._created_at) { - sort.createdAt = sort._created_at; - delete sort._created_at; - } - if (sort._updated_at) { - sort.updatedAt = sort._updated_at; - delete sort._updated_at; - } - Object.keys(sort).forEach(fieldName => { - if (fieldName.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) { - throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Cannot sort by ${fieldName}`); + if (sort._created_at) { + sort.createdAt = sort._created_at; + delete sort._created_at; } - if (!SchemaController.fieldNameIsValid(fieldName)) { - throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Invalid field name: ${fieldName}.`); + if (sort._updated_at) { + sort.updatedAt = sort._updated_at; + delete sort._updated_at; } - }); - const queryOptions = { skip, limit, sort, keys, readPreference }; - return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, op)) - .then(() => this.reduceRelationKeys(className, query, queryOptions)) - .then(() => this.reduceInRelation(className, query, schemaController)) - .then(() => { - if (!isMaster) { - query = this.addPointerPermissions(schemaController, className, op, query, aclGroup); + const queryOptions = { skip, limit, sort, keys, readPreference }; + Object.keys(sort).forEach(fieldName => { + if (fieldName.match(/^authData\.([a-zA-Z0-9_]+)\.id$/)) { + throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Cannot sort by ${fieldName}`); } - if (!query) { - if (op == 'get') { - throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.'); - } else { - return []; - } + if (!SchemaController.fieldNameIsValid(fieldName)) { + throw new Parse.Error(Parse.Error.INVALID_KEY_NAME, `Invalid field name: ${fieldName}.`); } - if (!isMaster) { - query = addReadACL(query, aclGroup); - } - validateQuery(query); - if (count) { - if (!classExists) { - return 0; - } else { - return this.adapter.count(className, schema, query, readPreference); + }); + return (isMaster ? Promise.resolve() : schemaController.validatePermission(className, aclGroup, op)) + .then(() => this.reduceRelationKeys(className, query, queryOptions)) + .then(() => this.reduceInRelation(className, query, schemaController)) + .then(() => { + if (!isMaster) { + query = this.addPointerPermissions(schemaController, className, op, query, aclGroup); } - } else if (distinct) { - if (!classExists) { - return []; - } else { - return this.adapter.distinct(className, schema, query, distinct); + if (!query) { + if (op == 'get') { + throw new Parse.Error(Parse.Error.OBJECT_NOT_FOUND, 'Object not found.'); + } else { + return []; + } } - } else if (pipeline) { - if (!classExists) { - return []; - } else { - return this.adapter.aggregate(className, schema, pipeline, readPreference); + if (!isMaster) { + query = addReadACL(query, aclGroup); } - } else { - if (!classExists) { - return []; + validateQuery(query); + if (count) { + if (!classExists) { + return 0; + } else { + return this.adapter.count(className, schema, query, readPreference); + } + } else if (distinct) { + if (!classExists) { + return []; + } else { + return this.adapter.distinct(className, schema, query, distinct); + } + } else if (pipeline) { + if (!classExists) { + return []; + } else { + return this.adapter.aggregate(className, schema, pipeline, readPreference); + } } else { return this.adapter.find(className, schema, query, queryOptions) .then(objects => objects.map(object => { @@ -889,157 +936,133 @@ DatabaseController.prototype.find = function(className, query, { throw new Parse.Error(Parse.Error.INTERNAL_SERVER_ERROR, error); }); } - } - }); - }); - }); -}; - -// Transforms a Database format ACL to a REST API format ACL -const untransformObjectACL = ({_rperm, _wperm, ...output}) => { - if (_rperm || _wperm) { - output.ACL = {}; - - (_rperm || []).forEach(entry => { - if (!output.ACL[entry]) { - output.ACL[entry] = { read: true }; - } else { - output.ACL[entry]['read'] = true; - } - }); - - (_wperm || []).forEach(entry => { - if (!output.ACL[entry]) { - output.ACL[entry] = { write: true }; - } else { - output.ACL[entry]['write'] = true; - } - }); + }); + }); + }); } - return output; -} -DatabaseController.prototype.deleteSchema = function(className) { - return this.loadSchema(true) - .then(schemaController => schemaController.getOneSchema(className, true)) - .catch(error => { - if (error === undefined) { - return { fields: {} }; - } else { - throw error; - } - }) - .then(schema => { - return this.collectionExists(className) - .then(() => this.adapter.count(className, { fields: {} })) - .then(count => { - if (count > 0) { - throw new Parse.Error(255, `Class ${className} is not empty, contains ${count} objects, cannot drop schema.`); - } - return this.adapter.deleteClass(className); - }) - .then(wasParseCollection => { - if (wasParseCollection) { - const relationFieldNames = Object.keys(schema.fields).filter(fieldName => schema.fields[fieldName].type === 'Relation'); - return Promise.all(relationFieldNames.map(name => this.adapter.deleteClass(joinTableName(className, name)))); - } else { - return Promise.resolve(); - } - }); - }) -} + deleteSchema(className: string): Promise { + return this.loadSchema({ clearCache: true }) + .then(schemaController => schemaController.getOneSchema(className, true)) + .catch(error => { + if (error === undefined) { + return { fields: {} }; + } else { + throw error; + } + }) + .then((schema: any) => { + return this.collectionExists(className) + .then(() => this.adapter.count(className, { fields: {} })) + .then(count => { + if (count > 0) { + throw new Parse.Error(255, `Class ${className} is not empty, contains ${count} objects, cannot drop schema.`); + } + return this.adapter.deleteClass(className); + }) + .then(wasParseCollection => { + if (wasParseCollection) { + const relationFieldNames = Object.keys(schema.fields).filter(fieldName => schema.fields[fieldName].type === 'Relation'); + return Promise.all(relationFieldNames.map(name => this.adapter.deleteClass(joinTableName(className, name)))).then(() => { + return; + }); + } else { + return Promise.resolve(); + } + }); + }) + } -DatabaseController.prototype.addPointerPermissions = function(schema, className, operation, query, aclGroup = []) { + addPointerPermissions(schema: any, className: string, operation: string, query: any, aclGroup: any[] = []) { // Check if class has public permission for operation // If the BaseCLP pass, let go through - if (schema.testBaseCLP(className, aclGroup, operation)) { - return query; - } - const perms = schema.perms[className]; - const field = ['get', 'find'].indexOf(operation) > -1 ? 'readUserFields' : 'writeUserFields'; - const userACL = aclGroup.filter((acl) => { - return acl.indexOf('role:') != 0 && acl != '*'; - }); - // the ACL should have exactly 1 user - if (perms && perms[field] && perms[field].length > 0) { + if (schema.testBaseCLP(className, aclGroup, operation)) { + return query; + } + const perms = schema.perms[className]; + const field = ['get', 'find'].indexOf(operation) > -1 ? 'readUserFields' : 'writeUserFields'; + const userACL = aclGroup.filter((acl) => { + return acl.indexOf('role:') != 0 && acl != '*'; + }); + // the ACL should have exactly 1 user + if (perms && perms[field] && perms[field].length > 0) { // No user set return undefined // If the length is > 1, that means we didn't de-dupe users correctly - if (userACL.length != 1) { - return; - } - const userId = userACL[0]; - const userPointer = { - "__type": "Pointer", - "className": "_User", - "objectId": userId - }; - - const permFields = perms[field]; - const ors = permFields.map((key) => { - const q = { - [key]: userPointer + if (userACL.length != 1) { + return; + } + const userId = userACL[0]; + const userPointer = { + "__type": "Pointer", + "className": "_User", + "objectId": userId }; - // if we already have a constraint on the key, use the $and - if (query.hasOwnProperty(key)) { - return {'$and': [q, query]}; + + const permFields = perms[field]; + const ors = permFields.map((key) => { + const q = { + [key]: userPointer + }; + // if we already have a constraint on the key, use the $and + if (query.hasOwnProperty(key)) { + return {'$and': [q, query]}; + } + // otherwise just add the constaint + return Object.assign({}, query, { + [`${key}`]: userPointer, + }) + }); + if (ors.length > 1) { + return {'$or': ors}; } - // otherwise just add the constaint - return Object.assign({}, query, { - [`${key}`]: userPointer, - }) - }); - if (ors.length > 1) { - return {'$or': ors}; + return ors[0]; + } else { + return query; } - return ors[0]; - } else { - return query; } -} -// TODO: create indexes on first creation of a _User object. Otherwise it's impossible to -// have a Parse app without it having a _User collection. -DatabaseController.prototype.performInitialization = function() { - const requiredUserFields = { fields: { ...SchemaController.defaultColumns._Default, ...SchemaController.defaultColumns._User } }; - const requiredRoleFields = { fields: { ...SchemaController.defaultColumns._Default, ...SchemaController.defaultColumns._Role } }; - - const userClassPromise = this.loadSchema() - .then(schema => schema.enforceClassExists('_User')) - const roleClassPromise = this.loadSchema() - .then(schema => schema.enforceClassExists('_Role')) - - const usernameUniqueness = userClassPromise - .then(() => this.adapter.ensureUniqueness('_User', requiredUserFields, ['username'])) - .catch(error => { - logger.warn('Unable to ensure uniqueness for usernames: ', error); - throw error; - }); + // TODO: create indexes on first creation of a _User object. Otherwise it's impossible to + // have a Parse app without it having a _User collection. + performInitialization() { + const requiredUserFields = { fields: { ...SchemaController.defaultColumns._Default, ...SchemaController.defaultColumns._User } }; + const requiredRoleFields = { fields: { ...SchemaController.defaultColumns._Default, ...SchemaController.defaultColumns._Role } }; + + const userClassPromise = this.loadSchema() + .then(schema => schema.enforceClassExists('_User')) + const roleClassPromise = this.loadSchema() + .then(schema => schema.enforceClassExists('_Role')) + + const usernameUniqueness = userClassPromise + .then(() => this.adapter.ensureUniqueness('_User', requiredUserFields, ['username'])) + .catch(error => { + logger.warn('Unable to ensure uniqueness for usernames: ', error); + throw error; + }); - const emailUniqueness = userClassPromise - .then(() => this.adapter.ensureUniqueness('_User', requiredUserFields, ['email'])) - .catch(error => { - logger.warn('Unable to ensure uniqueness for user email addresses: ', error); - throw error; - }); + const emailUniqueness = userClassPromise + .then(() => this.adapter.ensureUniqueness('_User', requiredUserFields, ['email'])) + .catch(error => { + logger.warn('Unable to ensure uniqueness for user email addresses: ', error); + throw error; + }); - const roleUniqueness = roleClassPromise - .then(() => this.adapter.ensureUniqueness('_Role', requiredRoleFields, ['name'])) - .catch(error => { - logger.warn('Unable to ensure uniqueness for role name: ', error); - throw error; - }); + const roleUniqueness = roleClassPromise + .then(() => this.adapter.ensureUniqueness('_Role', requiredRoleFields, ['name'])) + .catch(error => { + logger.warn('Unable to ensure uniqueness for role name: ', error); + throw error; + }); - const indexPromise = this.adapter.updateSchemaWithIndexes(); + const indexPromise = this.adapter.updateSchemaWithIndexes(); - // Create tables for volatile classes - const adapterInit = this.adapter.performInitialization({ VolatileClassesSchemas: SchemaController.VolatileClassesSchemas }); - return Promise.all([usernameUniqueness, emailUniqueness, roleUniqueness, adapterInit, indexPromise]); -} + // Create tables for volatile classes + const adapterInit = this.adapter.performInitialization({ VolatileClassesSchemas: SchemaController.VolatileClassesSchemas }); + return Promise.all([usernameUniqueness, emailUniqueness, roleUniqueness, adapterInit, indexPromise]); + } -function joinTableName(className, key) { - return `_Join:${key}:${className}`; + static _validateQuery: ((any) => void) } -// Expose validateQuery for tests -DatabaseController._validateQuery = validateQuery; module.exports = DatabaseController; +// Expose validateQuery for tests +module.exports._validateQuery = validateQuery; diff --git a/src/Controllers/HooksController.js b/src/Controllers/HooksController.js index 6fcdfc3c91..70598bb125 100644 --- a/src/Controllers/HooksController.js +++ b/src/Controllers/HooksController.js @@ -1,7 +1,9 @@ /** @flow weak */ import * as triggers from "../triggers"; +// @flow-disable-next import * as Parse from "parse/node"; +// @flow-disable-next import * as request from "request"; import { logger } from '../logger'; @@ -28,7 +30,7 @@ export class HooksController { } getFunction(functionName) { - return this._getHooks({ functionName: functionName }, 1).then(results => results[0]); + return this._getHooks({ functionName: functionName }).then(results => results[0]); } getFunctions() { @@ -36,7 +38,7 @@ export class HooksController { } getTrigger(className, triggerName) { - return this._getHooks({ className: className, triggerName: triggerName }, 1).then(results => results[0]); + return this._getHooks({ className: className, triggerName: triggerName }).then(results => results[0]); } getTriggers() { diff --git a/src/Controllers/SchemaController.js b/src/Controllers/SchemaController.js index ab579b6aa2..2233886ef6 100644 --- a/src/Controllers/SchemaController.js +++ b/src/Controllers/SchemaController.js @@ -1,3 +1,4 @@ +// @flow // This class handles schema validation, persistence, and modification. // // Each individual Schema object should be immutable. The helpers to @@ -13,9 +14,19 @@ // DatabaseController. This will let us replace the schema logic for // different databases. // TODO: hide all schema logic inside the database adapter. +// @flow-disable-next const Parse = require('parse/node').Parse; - -const defaultColumns = Object.freeze({ +import { StorageAdapter } from '../Adapters/Storage/StorageAdapter'; +import DatabaseController from './DatabaseController'; +import type { + Schema, + SchemaFields, + ClassLevelPermissions, + SchemaField, + LoadSchemaOptions, +} from './types'; + +const defaultColumns: {[string]: SchemaFields} = Object.freeze({ // Contain the default columns for every parse object type (except _Join collection) _Default: { "objectId": {type:'String'}, @@ -158,7 +169,7 @@ function verifyPermissionKey(key) { } const CLPValidKeys = Object.freeze(['find', 'count', 'get', 'create', 'update', 'delete', 'addField', 'readUserFields', 'writeUserFields']); -function validateCLP(perms, fields) { +function validateCLP(perms: ClassLevelPermissions, fields: SchemaFields) { if (!perms) { return; } @@ -166,9 +177,13 @@ function validateCLP(perms, fields) { if (CLPValidKeys.indexOf(operation) == -1) { throw new Parse.Error(Parse.Error.INVALID_JSON, `${operation} is not a valid operation for class level permissions`); } + if (!perms[operation]) { + return; + } if (operation === 'readUserFields' || operation === 'writeUserFields') { if (!Array.isArray(perms[operation])) { + // @flow-disable-next throw new Parse.Error(Parse.Error.INVALID_JSON, `'${perms[operation]}' is not a valid value for class level permissions ${operation}`); } else { perms[operation].forEach((key) => { @@ -180,10 +195,13 @@ function validateCLP(perms, fields) { return; } + // @flow-disable-next Object.keys(perms[operation]).forEach((key) => { verifyPermissionKey(key); + // @flow-disable-next const perm = perms[operation][key]; if (perm !== true) { + // @flow-disable-next throw new Parse.Error(Parse.Error.INVALID_JSON, `'${perm}' is not a valid value for class level permissions ${operation}:${key}:${perm}`); } }); @@ -191,7 +209,7 @@ function validateCLP(perms, fields) { } const joinClassRegex = /^_Join:[A-Za-z0-9_]+:[A-Za-z0-9_]+/; const classAndFieldRegex = /^[A-Za-z][A-Za-z0-9_]*$/; -function classNameIsValid(className) { +function classNameIsValid(className: string): boolean { // Valid classes must: return ( // Be one of _User, _Installation, _Role, _Session OR @@ -204,12 +222,12 @@ function classNameIsValid(className) { } // Valid fields must be alpha-numeric, and not start with an underscore or number -function fieldNameIsValid(fieldName) { +function fieldNameIsValid(fieldName: string): boolean { return classAndFieldRegex.test(fieldName); } // Checks that it's not trying to clobber one of the default fields of the class. -function fieldNameIsValidForClass(fieldName, className) { +function fieldNameIsValidForClass(fieldName: string, className: string): boolean { if (!fieldNameIsValid(fieldName)) { return false; } @@ -222,7 +240,7 @@ function fieldNameIsValidForClass(fieldName, className) { return true; } -function invalidClassNameMessage(className) { +function invalidClassNameMessage(className: string): string { return 'Invalid classname: ' + className + ', classnames can only have alphanumeric characters and _, and must start with an alpha character '; } @@ -261,7 +279,7 @@ const fieldTypeIsInvalid = ({ type, targetClass }) => { return undefined; } -const convertSchemaToAdapterSchema = schema => { +const convertSchemaToAdapterSchema = (schema: any) => { schema = injectDefaultSchema(schema); delete schema.fields.ACL; schema.fields._rperm = { type: 'Array' }; @@ -294,8 +312,8 @@ const convertAdapterSchemaToParseSchema = ({...schema}) => { return schema; } -const injectDefaultSchema = ({className, fields, classLevelPermissions, indexes}) => { - const defaultSchema = { +const injectDefaultSchema = ({className, fields, classLevelPermissions, indexes}: Schema) => { + const defaultSchema: Schema = { className, fields: { ...defaultColumns._Default, @@ -329,11 +347,12 @@ const _JobScheduleSchema = convertSchemaToAdapterSchema(injectDefaultSchema({ })); const _AudienceSchema = convertSchemaToAdapterSchema(injectDefaultSchema({ className: "_Audience", - fields: defaultColumns._Audience + fields: defaultColumns._Audience, + classLevelPermissions: {} })); const VolatileClassesSchemas = [_HooksSchema, _JobStatusSchema, _JobScheduleSchema, _PushStatusSchema, _GlobalConfigSchema, _AudienceSchema]; -const dbTypeMatchesObjectType = (dbType, objectType) => { +const dbTypeMatchesObjectType = (dbType: SchemaField | string, objectType: SchemaField) => { if (dbType.type !== objectType.type) return false; if (dbType.targetClass !== objectType.targetClass) return false; if (dbType === objectType.type) return true; @@ -341,22 +360,27 @@ const dbTypeMatchesObjectType = (dbType, objectType) => { return false; } -const typeToString = (type) => { +const typeToString = (type: SchemaField | string): string => { + if (typeof type === 'string') { + return type; + } if (type.targetClass) { return `${type.type}<${type.targetClass}>`; } - return `${type.type || type}`; + return `${type.type}`; } // Stores the entire schema of the app in a weird hybrid format somewhere between // the mongo format and the Parse format. Soon, this will all be Parse format. export default class SchemaController { - _dbAdapter; - data; - perms; - indexes; - - constructor(databaseAdapter, schemaCache) { + _dbAdapter: StorageAdapter; + data: any; + perms: any; + indexes: any; + _cache: any; + reloadDataPromise: Promise; + + constructor(databaseAdapter: StorageAdapter, schemaCache: any) { this._dbAdapter = databaseAdapter; this._cache = schemaCache; // this.data[className][fieldName] tells you the type of that field, in mongo format @@ -367,7 +391,7 @@ export default class SchemaController { this.indexes = {}; } - reloadData(options = {clearCache: false}) { + reloadData(options: LoadSchemaOptions = {clearCache: false}): Promise { let promise = Promise.resolve(); if (options.clearCache) { promise = promise.then(() => { @@ -378,9 +402,7 @@ export default class SchemaController { return this.reloadDataPromise; } this.reloadDataPromise = promise.then(() => { - return this.getAllClasses(options); - }) - .then(allSchemas => { + return this.getAllClasses(options).then((allSchemas) => { const data = {}; const perms = {}; const indexes = {}; @@ -392,7 +414,7 @@ export default class SchemaController { // Inject the in-memory classes volatileClasses.forEach(className => { - const schema = injectDefaultSchema({ className }); + const schema = injectDefaultSchema({ className, fields: {}, classLevelPermissions: {} }); data[className] = schema.fields; perms[className] = schema.classLevelPermissions; indexes[className] = schema.indexes; @@ -407,11 +429,12 @@ export default class SchemaController { this.indexes = {}; delete this.reloadDataPromise; throw err; - }); + }) + }).then(() => {}); return this.reloadDataPromise; } - getAllClasses(options = {clearCache: false}) { + getAllClasses(options: LoadSchemaOptions = {clearCache: false}): Promise> { let promise = Promise.resolve(); if (options.clearCache) { promise = this._cache.clear(); @@ -432,7 +455,7 @@ export default class SchemaController { }); } - getOneSchema(className, allowVolatileClasses = false, options = {clearCache: false}) { + getOneSchema(className: string, allowVolatileClasses: boolean = false, options: LoadSchemaOptions = {clearCache: false}): Promise { let promise = Promise.resolve(); if (options.clearCache) { promise = this._cache.clear(); @@ -468,7 +491,7 @@ export default class SchemaController { // on success, and rejects with an error on fail. Ensure you // have authorization (master key, or client class creation // enabled) before calling this function. - addClassIfNotExists(className, fields = {}, classLevelPermissions, indexes = {}) { + addClassIfNotExists(className: string, fields: SchemaFields = {}, classLevelPermissions: any, indexes: any = {}): Promise { var validationError = this.validateNewClass(className, fields, classLevelPermissions); if (validationError) { return Promise.reject(validationError); @@ -490,7 +513,7 @@ export default class SchemaController { }); } - updateClass(className, submittedFields, classLevelPermissions, indexes, database) { + updateClass(className: string, submittedFields: SchemaFields, classLevelPermissions: any, indexes: any, database: DatabaseController) { return this.getOneSchema(className) .then(schema => { const existingFields = schema.fields; @@ -514,7 +537,7 @@ export default class SchemaController { // Finally we have checked to make sure the request is valid and we can start deleting fields. // Do all deletions first, then a single save to _SCHEMA collection to handle all additions. - const deletedFields = []; + const deletedFields: string[] = []; const insertedFields = []; Object.keys(submittedFields).forEach(fieldName => { if (submittedFields[fieldName].__op === 'Delete') { @@ -542,7 +565,7 @@ export default class SchemaController { .then(() => this.reloadData({ clearCache: true })) //TODO: Move this logic into the database adapter .then(() => { - const reloadedSchema = { + const reloadedSchema: Schema = { className: className, fields: this.data[className], classLevelPermissions: this.perms[className], @@ -564,7 +587,7 @@ export default class SchemaController { // Returns a promise that resolves successfully to the new schema // object or fails with a reason. - enforceClassExists(className) { + enforceClassExists(className: string): Promise { if (this.data[className]) { return Promise.resolve(this); } @@ -593,7 +616,7 @@ export default class SchemaController { }); } - validateNewClass(className, fields = {}, classLevelPermissions) { + validateNewClass(className: string, fields: SchemaFields = {}, classLevelPermissions: any): any { if (this.data[className]) { throw new Parse.Error(Parse.Error.INVALID_CLASS_NAME, `Class ${className} already exists.`); } @@ -606,7 +629,7 @@ export default class SchemaController { return this.validateSchemaData(className, fields, classLevelPermissions, []); } - validateSchemaData(className, fields, classLevelPermissions, existingFieldNames) { + validateSchemaData(className: string, fields: SchemaFields, classLevelPermissions: ClassLevelPermissions, existingFieldNames: Array) { for (const fieldName in fields) { if (existingFieldNames.indexOf(fieldName) < 0) { if (!fieldNameIsValid(fieldName)) { @@ -641,7 +664,7 @@ export default class SchemaController { } // Sets the Class-level permissions for a given className, which must exist. - setPermissions(className, perms, newSchema) { + setPermissions(className: string, perms: any, newSchema: SchemaFields) { if (typeof perms === 'undefined') { return Promise.resolve(); } @@ -653,7 +676,7 @@ export default class SchemaController { // object if the provided className-fieldName-type tuple is valid. // The className must already be validated. // If 'freeze' is true, refuse to update the schema for this field. - enforceFieldExists(className, fieldName, type) { + enforceFieldExists(className: string, fieldName: string, type: string | SchemaField) { if (fieldName.indexOf(".") > 0) { // subdocument key (x.y) => ok if x is of type 'object' fieldName = fieldName.split(".")[ 0 ]; @@ -698,7 +721,11 @@ export default class SchemaController { return this.reloadData({ clearCache: true }); }).then(() => { // Ensure that the schema now validates - if (!dbTypeMatchesObjectType(this.getExpectedType(className, fieldName), type)) { + const expectedType = this.getExpectedType(className, fieldName); + if (typeof type === 'string') { + type = { type }; + } + if (!expectedType || !dbTypeMatchesObjectType(expectedType, type)) { throw new Parse.Error(Parse.Error.INVALID_JSON, `Could not add field ${fieldName}`); } // Remove the cached schema @@ -709,7 +736,7 @@ export default class SchemaController { } // maintain compatibility - deleteField(fieldName, className, database) { + deleteField(fieldName: string, className: string, database: DatabaseController) { return this.deleteFields([fieldName], className, database); } @@ -720,7 +747,7 @@ export default class SchemaController { // Passing the database and prefix is necessary in order to drop relation collections // and remove fields from objects. Ideally the database would belong to // a database adapter and this function would close over it or access it via member. - deleteFields(fieldNames, className, database) { + deleteFields(fieldNames: Array, className: string, database: DatabaseController) { if (!classNameIsValid(className)) { throw new Parse.Error(Parse.Error.INVALID_CLASS_NAME, invalidClassNameMessage(className)); } @@ -770,7 +797,7 @@ export default class SchemaController { // Validates an object provided in REST format. // Returns a promise that resolves to the new schema if this object is // valid. - validateObject(className, object, query) { + validateObject(className: string, object: any, query: any) { let geocount = 0; let promise = this.enforceClassExists(className); for (const fieldName in object) { @@ -804,7 +831,7 @@ export default class SchemaController { } // Validates that all the properties are set for the object - validateRequiredColumns(className, object, query) { + validateRequiredColumns(className: string, object: any, query: any) { const columns = requiredColumns[className]; if (!columns || columns.length == 0) { return Promise.resolve(this); @@ -831,7 +858,7 @@ export default class SchemaController { } // Validates the base CLP for an operation - testBaseCLP(className, aclGroup, operation) { + testBaseCLP(className: string, aclGroup: string[], operation: string) { if (!this.perms[className] || !this.perms[className][operation]) { return true; } @@ -849,7 +876,7 @@ export default class SchemaController { } // Validates an operation passes class-level-permissions set in the schema - validatePermission(className, aclGroup, operation) { + validatePermission(className: string, aclGroup: string[], operation: string) { if (this.testBaseCLP(className, aclGroup, operation)) { return Promise.resolve(); @@ -897,7 +924,7 @@ export default class SchemaController { // Returns the expected type for a className+key combination // or undefined if the schema is not set - getExpectedType(className, fieldName) { + getExpectedType(className: string, fieldName: string): ?(SchemaField | string) { if (this.data && this.data[className]) { const expectedType = this.data[className][fieldName] return expectedType === 'map' ? 'Object' : expectedType; @@ -906,13 +933,13 @@ export default class SchemaController { } // Checks if a given class is in the schema. - hasClass(className) { + hasClass(className: string) { return this.reloadData().then(() => !!(this.data[className])); } } // Returns a promise for a new Schema. -const load = (dbAdapter, schemaCache, options) => { +const load = (dbAdapter: StorageAdapter, schemaCache: any, options: any): Promise => { const schema = new SchemaController(dbAdapter, schemaCache); return schema.reloadData(options).then(() => schema); } @@ -922,8 +949,9 @@ const load = (dbAdapter, schemaCache, options) => { // does not include the default fields, as it is intended to be passed // to mongoSchemaFromFieldsAndClassName. No validation is done here, it // is done in mongoSchemaFromFieldsAndClassName. -function buildMergedSchemaObject(existingFields, putRequest) { +function buildMergedSchemaObject(existingFields: SchemaFields, putRequest: any): SchemaFields { const newSchema = {}; + // @flow-disable-next const sysSchemaField = Object.keys(defaultColumns).indexOf(existingFields._id) === -1 ? [] : Object.keys(defaultColumns[existingFields._id]); for (const oldField in existingFields) { if (oldField !== '_id' && oldField !== 'ACL' && oldField !== 'updatedAt' && oldField !== 'createdAt' && oldField !== 'objectId') { @@ -960,7 +988,7 @@ function thenValidateRequiredColumns(schemaPromise, className, object, query) { // type system. // The output should be a valid schema value. // TODO: ensure that this is compatible with the format used in Open DB -function getType(obj) { +function getType(obj: any): ?(SchemaField | string) { const type = typeof obj; switch(type) { case 'boolean': @@ -986,7 +1014,7 @@ function getType(obj) { // This gets the type for non-JSON types like pointers and files, but // also gets the appropriate type for $ operators. // Returns null if the type is unknown. -function getObjectType(obj) { +function getObjectType(obj): ?(SchemaField | string) { if (obj instanceof Array) { return 'Array'; } @@ -1074,4 +1102,5 @@ export { defaultColumns, convertSchemaToAdapterSchema, VolatileClassesSchemas, + SchemaController, }; diff --git a/src/Controllers/types.js b/src/Controllers/types.js new file mode 100644 index 0000000000..1e0c484ae0 --- /dev/null +++ b/src/Controllers/types.js @@ -0,0 +1,29 @@ +export type LoadSchemaOptions = { + clearCache: boolean +}; + +export type SchemaField = { + type: string; + targetClass?: ?string; +} + +export type SchemaFields = { [string]: SchemaField } + +export type Schema = { + className: string, + fields: SchemaFields, + classLevelPermissions: ClassLevelPermissions, + indexes?: ?any +}; + +export type ClassLevelPermissions = { + find?: {[string]: boolean}; + count?: {[string]: boolean}; + get?: {[string]: boolean}; + create?: {[string]: boolean}; + update?: {[string]: boolean}; + delete?: {[string]: boolean}; + addField?: {[string]: boolean}; + readUserFields?: string[]; + writeUserFields?: string[]; +}; diff --git a/src/Push/PushWorker.js b/src/Push/PushWorker.js index afd4416dc9..92729ec45a 100644 --- a/src/Push/PushWorker.js +++ b/src/Push/PushWorker.js @@ -1,4 +1,5 @@ // @flow +// @flow-disable-next import deepcopy from 'deepcopy'; import AdaptableController from '../Controllers/AdaptableController'; import { master } from '../Auth';