Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions spec/GridFSBucketStorageAdapter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ describe_only_db('mongo')('GridFSBucket', () => {
enableSchemaHooks: true,
schemaCacheTtl: 5000,
maxTimeMS: 30000,
batchSize: 500,
disableIndexFieldValidation: true,
logClientEvents: [{ name: 'commandStarted' }],
createIndexUserUsername: true,
Expand All @@ -46,6 +47,13 @@ describe_only_db('mongo')('GridFSBucket', () => {
expect(db.options?.retryWrites).toEqual(true);
});

it('should store batchSize and filter it from MongoClient options', async () => {
const gfsAdapter = new GridFSBucketAdapter(databaseURI, { batchSize: 500 });
expect(gfsAdapter._batchSize).toEqual(500);
// Verify batchSize is filtered from MongoClient options
expect(gfsAdapter._mongoOptions.batchSize).toBeUndefined();
});

it('should save an encrypted file that can only be decrypted by a GridFS adapter with the encryptionKey', async () => {
const unencryptedAdapter = new GridFSBucketAdapter(databaseURI);
const encryptedAdapter = new GridFSBucketAdapter(
Expand Down
52 changes: 52 additions & 0 deletions spec/MongoStorageAdapter.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,58 @@ describe_only_db('mongo')('MongoStorageAdapter', () => {
);
});

it('passes batchSize to the MongoDB driver find() call', async () => {
const batchSize = 50;
const adapter = new MongoStorageAdapter({
uri: databaseURI,
mongoOptions: { batchSize },
});
await adapter.createObject('BatchTest', { fields: {} }, { objectId: 'obj1' });

// Spy on the MongoDB driver's Collection.prototype.find to verify batchSize is forwarded
const originalFind = Collection.prototype.find;
let capturedOptions;
spyOn(Collection.prototype, 'find').and.callFake(function (query, options) {
capturedOptions = options;
return originalFind.call(this, query, options);
});

await adapter.find('BatchTest', { fields: {} }, {}, {});
expect(capturedOptions).toBeDefined();
expect(capturedOptions.batchSize).toEqual(50);
});

it('passes batchSize to the MongoDB driver aggregate() call', async () => {
const batchSize = 50;
const adapter = new MongoStorageAdapter({
uri: databaseURI,
mongoOptions: { batchSize },
});
await adapter.createObject('AggBatchTest', { fields: { count: { type: 'Number' } } }, { objectId: 'obj1', count: 1 });

// Spy on the MongoDB driver's Collection.prototype.aggregate to verify batchSize is forwarded
const originalAggregate = Collection.prototype.aggregate;
let capturedOptions;
spyOn(Collection.prototype, 'aggregate').and.callFake(function (pipeline, options) {
capturedOptions = options;
return originalAggregate.call(this, pipeline, options);
});

await adapter.aggregate('AggBatchTest', { fields: { count: { type: 'Number' } } }, [{ $match: {} }]);
expect(capturedOptions).toBeDefined();
expect(capturedOptions.batchSize).toEqual(50);
});

it('defaults batchSize to 1000', async () => {
await reconfigureServer({
databaseURI: databaseURI,
collectionPrefix: 'test_',
databaseAdapter: undefined,
});
const adapter = Config.get(Parse.applicationId).database.adapter;
expect(adapter._batchSize).toEqual(1000);
});

it('stores pointers with a _p_ prefix', done => {
const obj = {
objectId: 'bar',
Expand Down
9 changes: 5 additions & 4 deletions src/Adapters/Files/GridFSBucketAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
const defaultMongoOptions = {};
const _mongoOptions = Object.assign(defaultMongoOptions, mongoOptions);
this._clientMetadata = mongoOptions.clientMetadata;
this._batchSize = mongoOptions.batchSize;
// Remove Parse Server-specific options that should not be passed to MongoDB client
for (const key of ParseServerDatabaseOptions) {
delete _mongoOptions[key];
Expand Down Expand Up @@ -135,7 +136,7 @@ export class GridFSBucketAdapter extends FilesAdapter {

async deleteFile(filename: string) {
const bucket = await this._getBucket();
const documents = await bucket.find({ filename }).toArray();
const documents = await bucket.find({ filename }, { batchSize: this._batchSize }).toArray();
if (documents.length === 0) {
throw new Error('FileNotFound');
}
Expand Down Expand Up @@ -196,7 +197,7 @@ export class GridFSBucketAdapter extends FilesAdapter {
if (options.fileNames !== undefined) {
fileNames = options.fileNames;
} else {
const fileNamesIterator = await bucket.find().toArray();
const fileNamesIterator = await bucket.find({}, { batchSize: this._batchSize }).toArray();
fileNamesIterator.forEach(file => {
fileNames.push(file.filename);
});
Expand Down Expand Up @@ -226,7 +227,7 @@ export class GridFSBucketAdapter extends FilesAdapter {

async getMetadata(filename) {
const bucket = await this._getBucket();
const files = await bucket.find({ filename }).toArray();
const files = await bucket.find({ filename }, { batchSize: this._batchSize }).toArray();
if (files.length === 0) {
return {};
}
Expand All @@ -236,7 +237,7 @@ export class GridFSBucketAdapter extends FilesAdapter {

async handleFileStream(filename: string, req, res, contentType) {
const bucket = await this._getBucket();
const files = await bucket.find({ filename }).toArray();
const files = await bucket.find({ filename }, { batchSize: this._batchSize }).toArray();
if (files.length === 0) {
throw new Error('FileNotFound');
}
Expand Down
9 changes: 7 additions & 2 deletions src/Adapters/Storage/Mongo/MongoCollection.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export default class MongoCollection {
sort,
keys,
maxTimeMS,
batchSize,
readPreference,
hint,
caseInsensitive,
Expand All @@ -39,6 +40,7 @@ export default class MongoCollection {
sort,
keys,
maxTimeMS,
batchSize,
readPreference,
hint,
caseInsensitive,
Expand Down Expand Up @@ -68,6 +70,7 @@ export default class MongoCollection {
sort,
keys,
maxTimeMS,
batchSize,
readPreference,
hint,
caseInsensitive,
Expand All @@ -94,6 +97,7 @@ export default class MongoCollection {
sort,
keys,
maxTimeMS,
batchSize,
readPreference,
hint,
caseInsensitive,
Expand All @@ -108,6 +112,7 @@ export default class MongoCollection {
readPreference,
hint,
comment,
batchSize,
});

if (keys) {
Expand Down Expand Up @@ -153,9 +158,9 @@ export default class MongoCollection {
return this._mongoCollection.distinct(field, query);
}

aggregate(pipeline, { maxTimeMS, readPreference, hint, explain, comment } = {}) {
aggregate(pipeline, { maxTimeMS, batchSize, readPreference, hint, explain, comment } = {}) {
return this._mongoCollection
.aggregate(pipeline, { maxTimeMS, readPreference, hint, explain, comment })
.aggregate(pipeline, { maxTimeMS, batchSize, readPreference, hint, explain, comment })
.toArray();
}

Expand Down
6 changes: 6 additions & 0 deletions src/Adapters/Storage/Mongo/MongoStorageAdapter.js
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,7 @@ export class MongoStorageAdapter implements StorageAdapter {
database: any;
client: MongoClient;
_maxTimeMS: ?number;
_batchSize: ?number;
canSortOnJoinTables: boolean;
enableSchemaHooks: boolean;
schemaCacheTtl: ?number;
Expand All @@ -182,6 +183,8 @@ export class MongoStorageAdapter implements StorageAdapter {

// MaxTimeMS is not a global MongoDB client option, it is applied per operation.
this._maxTimeMS = mongoOptions.maxTimeMS;
// BatchSize is not a global MongoDB client option, it is applied per cursor operation.
this._batchSize = mongoOptions.batchSize;
this.canSortOnJoinTables = true;
this.enableSchemaHooks = !!mongoOptions.enableSchemaHooks;
this.schemaCacheTtl = mongoOptions.schemaCacheTtl;
Expand Down Expand Up @@ -735,6 +738,7 @@ export class MongoStorageAdapter implements StorageAdapter {
sort: mongoSort,
keys: mongoKeys,
maxTimeMS: this._maxTimeMS,
batchSize: this._batchSize,
readPreference,
hint,
caseInsensitive,
Expand Down Expand Up @@ -820,6 +824,7 @@ export class MongoStorageAdapter implements StorageAdapter {
.then(collection =>
collection.find(query, {
maxTimeMS: this._maxTimeMS,
batchSize: this._batchSize,
})
)
.catch(err => this.handleError(err));
Expand Down Expand Up @@ -909,6 +914,7 @@ export class MongoStorageAdapter implements StorageAdapter {
collection.aggregate(pipeline, {
readPreference,
maxTimeMS: this._maxTimeMS,
batchSize: this._batchSize,
hint,
explain,
comment,
Expand Down
6 changes: 6 additions & 0 deletions src/Options/Definitions.js
Original file line number Diff line number Diff line change
Expand Up @@ -1097,6 +1097,12 @@ module.exports.DatabaseOptions = {
help: 'The MongoDB driver option to specify the amount of time in milliseconds to wait for a connection attempt to finish before trying the next address when using the autoSelectFamily option. If set to a positive integer less than 10, the value 10 is used instead.',
action: parsers.numberParser('autoSelectFamilyAttemptTimeout'),
},
batchSize: {
env: 'PARSE_SERVER_DATABASE_BATCH_SIZE',
help: 'The number of documents per batch for MongoDB cursor `getMore` operations. A lower value reduces memory usage per batch; a higher value reduces the number of network round-trips.',
action: parsers.numberParser('batchSize'),
default: 1000,
},
clientMetadata: {
env: 'PARSE_SERVER_DATABASE_CLIENT_METADATA',
help: "Custom metadata to append to database client connections for identifying Parse Server instances in database logs. If set, this metadata will be visible in database logs during connection handshakes. This can help with debugging and monitoring in deployments with multiple database clients. Set `name` to identify your application (e.g., 'MyApp') and `version` to your application's version. Leave undefined (default) to disable this feature and avoid the additional data transfer overhead.",
Expand Down
1 change: 1 addition & 0 deletions src/Options/docs.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions src/Options/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -669,6 +669,9 @@ export interface DatabaseOptions {
schemaCacheTtl: ?number;
/* The MongoDB driver option to set whether to retry failed writes. */
retryWrites: ?boolean;
/* The number of documents per batch for MongoDB cursor `getMore` operations. A lower value reduces memory usage per batch; a higher value reduces the number of network round-trips.
:DEFAULT: 1000 */
batchSize: ?number;
/* The MongoDB driver option to set a cumulative time limit in milliseconds for processing operations on a cursor. */
maxTimeMS: ?number;
/* The MongoDB driver option to set the maximum replication lag for reads from secondary nodes.*/
Expand Down
18 changes: 17 additions & 1 deletion src/ParseServer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ var batch = require('./batch'),
fs = require('fs');

import { ParseServerOptions, LiveQueryServerOptions } from './Options';
import defaults from './defaults';
import defaults, { DatabaseOptionDefaults } from './defaults';
import * as logging from './logger';
import Config from './Config';
import PromiseRouter from './PromiseRouter';
Expand Down Expand Up @@ -593,6 +593,22 @@ function injectDefaults(options: ParseServerOptions) {
}
});

// Inject defaults for database options; only when no explicit database adapter is set,
// because an explicit adapter manages its own options and passing databaseOptions alongside
// it would cause a conflict error in getDatabaseController.
if (!options.databaseAdapter) {
if (options.databaseOptions == null) {
options.databaseOptions = {};
}
if (typeof options.databaseOptions === 'object' && !Array.isArray(options.databaseOptions)) {
Object.keys(DatabaseOptionDefaults).forEach(key => {
if (!Object.prototype.hasOwnProperty.call(options.databaseOptions, key)) {
options.databaseOptions[key] = DatabaseOptionDefaults[key];
}
});
}
}

if (!Object.prototype.hasOwnProperty.call(options, 'serverURL')) {
options.serverURL = `http://localhost:${options.port}${options.mountPath}`;
}
Expand Down
11 changes: 10 additions & 1 deletion src/defaults.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { nullParser } from './Options/parsers';
const { ParseServerOptions } = require('./Options/Definitions');
const { ParseServerOptions, DatabaseOptions } = require('./Options/Definitions');
const logsFolder = (() => {
let folder = './logs/';
if (typeof process !== 'undefined' && process.env.TESTING === '1') {
Expand Down Expand Up @@ -34,10 +34,19 @@ const computedDefaults = {
export default Object.assign({}, DefinitionDefaults, computedDefaults);
export const DefaultMongoURI = DefinitionDefaults.databaseURI;

export const DatabaseOptionDefaults = Object.keys(DatabaseOptions).reduce((memo, key) => {
const def = DatabaseOptions[key];
if (Object.prototype.hasOwnProperty.call(def, 'default')) {
memo[key] = def.default;
}
return memo;
}, {});

// Parse Server-specific database options that should be filtered out
// before passing to MongoDB client
export const ParseServerDatabaseOptions = [
'allowPublicExplain',
'batchSize',
'clientMetadata',
'createIndexRoleName',
'createIndexUserEmail',
Expand Down
1 change: 1 addition & 0 deletions types/Options/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,7 @@ export interface FileUploadOptions {
export interface DatabaseOptions {
// Parse Server custom options
allowPublicExplain?: boolean;
batchSize?: number;
createIndexRoleName?: boolean;
createIndexUserEmail?: boolean;
createIndexUserEmailCaseInsensitive?: boolean;
Expand Down
Loading