diff --git a/packages/store/node-server-sdk-mongodb/CHANGELOG.md b/packages/store/node-server-sdk-mongodb/CHANGELOG.md new file mode 100644 index 000000000..1bd065b02 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/CHANGELOG.md @@ -0,0 +1,40 @@ +# Changelog + +All notable changes to the LaunchDarkly Server-Side SDK for Node.js MongoDB store will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.0.0] - 2025-09-27 + +### Added +- Initial release of the MongoDB big segment store for the LaunchDarkly Server-Side SDK for Node.js +- Support for MongoDB big segment data storage and retrieval +- Configurable connection options including URI, database name, and collection prefix +- Automatic connection retry logic with configurable retry attempts and delays +- Connection pooling and efficient resource management +- Comprehensive error handling and logging +- Full TypeScript support with type definitions +- Unit and integration tests +- Documentation and usage examples + +### Features +- **MongoDBBigSegmentStore**: Core implementation of the BigSegmentStore interface +- **MongoDBBigSegmentStoreFactory**: Factory function for creating store instances +- **MongoDBClientState**: Connection and state management for MongoDB operations +- **LDMongoDBOptions**: Comprehensive configuration options interface + +### Configuration Options +- `uri`: MongoDB connection string (default: 'mongodb://localhost:27017') +- `database`: Database name (default: 'launchdarkly') +- `prefix`: Collection name prefix (optional) +- `connectTimeoutMS`: Connection timeout in milliseconds (default: 10000) +- `maxRetries`: Maximum connection retry attempts (default: 3) +- `retryDelayMS`: Retry delay in milliseconds (default: 1000) +- `clientOptions`: Additional MongoDB client options + +### Collections +- `big_segments_metadata`: Stores synchronization metadata +- `big_segments_user`: Stores user membership data + +[1.0.0]: https://github.com/launchdarkly/js-core/releases/tag/node-server-sdk-mongodb-v1.0.0 diff --git a/packages/store/node-server-sdk-mongodb/LICENSE b/packages/store/node-server-sdk-mongodb/LICENSE new file mode 100644 index 000000000..d8d98dcbd --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/LICENSE @@ -0,0 +1,175 @@ +Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source files, and configuration files. + + "Object" shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (which shall not include communications that are marked or + otherwise designated in writing by the copyright owner as + "Not a Work"). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based upon (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and derivative works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control + systems, and issue tracking systems that are managed by, or on behalf + of, the Licensor for the purpose of discussing and improving the Work, + but excluding communication that is marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to use, reproduce, modify, distribute, and prepare + Derivative Works of the Work, and to publicly perform and display the + Work and such Derivative Works in any medium or format. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, trademark, patent, + attribution and other notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright notice to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Miscellaneous. If any provision of this License is held to be + unenforceable, such provision shall be reformed only to the extent + necessary to make it enforceable. + + Unless required by applicable law or agreed to in writing, Licensor + provides the Work (and each Contributor provides its Contributions) + on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, + either express or implied, including, without limitation, any + warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, + or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for + determining the appropriateness of using or redistributing the Work + and assume any risks associated with Your exercise of permissions + under this License. + + 7. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 8. Accepting Warranty or Support. When redistributing the Work or + Derivative Works thereof, You may choose to offer, and charge a fee + for, acceptance of support, warranty, indemnity, or other liability + obligations and/or rights consistent with this License. However, in + accepting such obligations, You may act only on Your own behalf and + on Your sole responsibility, not on behalf of any other Contributor, + and only if You agree to indemnify, defend, and hold each Contributor + harmless for any liability incurred by, or claims asserted against, + such Contributor by reason of your accepting any such warranty or support. + + END OF TERMS AND CONDITIONS diff --git a/packages/store/node-server-sdk-mongodb/README.md b/packages/store/node-server-sdk-mongodb/README.md new file mode 100644 index 000000000..72e50d012 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/README.md @@ -0,0 +1,278 @@ +# LaunchDarkly Server-Side SDK for Node.js - MongoDB Support + +[![NPM][npm-badge]][npm-url] +[![Actions Status][ci-badge]][ci-url] +[![Documentation][docs-badge]][docs-url] + +This library provides a MongoDB-backed persistence mechanism (data store) for the [LaunchDarkly Server-Side SDK for Node.js](https://github.com/launchdarkly/js-core/packages/sdk/server-node), replacing the default in-memory data store. It includes both a **feature store** for storing feature flags and segments, and a **big segment store** for managing large user segments in MongoDB collections. + +## LaunchDarkly overview + +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/home/getting-started) using LaunchDarkly today! + +[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) + +## Supported Node versions + +This package is compatible with Node.js versions 14 and above. + +## Installation + +```bash +npm install @launchdarkly/node-server-sdk-mongodb +``` + +## Quick setup + +This assumes that you have already installed the LaunchDarkly Server-Side SDK for Node.js. + +```javascript +const ld = require('@launchdarkly/node-server-sdk'); +const { MongoDBFeatureStore, MongoDBBigSegmentStore } = require('@launchdarkly/node-server-sdk-mongodb'); + +const options = { + // Use MongoDB for feature flags and segments + featureStore: MongoDBFeatureStore({ + uri: 'mongodb://localhost:27017', + database: 'launchdarkly', + prefix: 'ld_' + }), + // Use MongoDB for big segments + bigSegments: { + store: MongoDBBigSegmentStore({ + uri: 'mongodb://localhost:27017', + database: 'launchdarkly', + prefix: 'ld_' + }) + } +}; + +const client = ld.init('YOUR_SDK_KEY', options); +``` + +## TypeScript + +```typescript +import { init } from '@launchdarkly/node-server-sdk'; +import { MongoDBFeatureStore, MongoDBBigSegmentStore } from '@launchdarkly/node-server-sdk-mongodb'; + +const client = init('YOUR_SDK_KEY', { + // Use MongoDB for feature flags and segments + featureStore: MongoDBFeatureStore({ + uri: 'mongodb://localhost:27017', + database: 'launchdarkly', + prefix: 'ld_' + }), + // Use MongoDB for big segments + bigSegments: { + store: MongoDBBigSegmentStore({ + uri: 'mongodb://localhost:27017', + database: 'launchdarkly', + prefix: 'ld_' + }) + } +}); +``` + +## Configuration options + +The MongoDB big segment store supports the following configuration options: + +- `uri` (string): The MongoDB connection URI. Defaults to `'mongodb://localhost:27017'`. +- `database` (string): The MongoDB database name. Defaults to `'launchdarkly'`. +- `prefix` (string): A prefix for all collection names. No default; if not provided, collections use their default names. +- `connectTimeoutMS` (number): Maximum time to wait for connection establishment in milliseconds. Defaults to `10000` (10 seconds). +- `maxRetries` (number): Number of connection retry attempts. Defaults to `3`. +- `retryDelayMS` (number): Time to wait between retries in milliseconds. Defaults to `1000` (1 second). +- `clientOptions` (MongoClientOptions): Additional MongoDB client options that will be merged with defaults. + +Example with all options: + +```javascript +const store = MongoDBBigSegmentStore({ + uri: 'mongodb://user:password@localhost:27017/mydb?authSource=admin', + database: 'feature_flags', + prefix: 'app1_', + connectTimeoutMS: 5000, + maxRetries: 5, + retryDelayMS: 2000, + clientOptions: { + maxPoolSize: 10, + serverSelectionTimeoutMS: 5000, + ssl: true + } +}); +``` + +## MongoDB setup + +Before using the MongoDB big segment store, you'll need to set up your MongoDB database. The store will automatically create the necessary collections when they're first accessed. + +### Collections + +The MongoDB big segment store uses two collections: + +1. **Metadata Collection** (`big_segments_metadata` or `{prefix}big_segments_metadata`): Stores metadata about when the big segments were last synchronized. +2. **User Collection** (`big_segments_user` or `{prefix}big_segments_user`): Stores user membership data (which segments include/exclude specific users). + +### Indexes + +For optimal performance, consider adding indexes to your collections: + +```javascript +// Connect to your MongoDB instance +const { MongoClient } = require('mongodb'); +const client = new MongoClient('mongodb://localhost:27017'); +await client.connect(); +const db = client.db('launchdarkly'); + +// Create index on user hash for fast lookups +await db.collection('big_segments_user').createIndex({ userHash: 1 }); + +// If using a prefix, adjust collection names accordingly +await db.collection('ld_big_segments_user').createIndex({ userHash: 1 }); +``` + +## Data structure + +### Metadata Document + +```javascript +{ + _id: "big_segments_metadata", + lastUpToDate: 1640995200000 // Unix timestamp in milliseconds +} +``` + +### User Membership Document + +```javascript +{ + userHash: "is_hashed:user123", + included: ["segment1", "segment2"], // Segments this user is explicitly included in + excluded: ["segment3"] // Segments this user is explicitly excluded from +} +``` + +## Big Segments + +Big Segments are a specific type of user segments. For more information, read the LaunchDarkly documentation: [https://docs.launchdarkly.com/home/users/big-segments](https://docs.launchdarkly.com/home/users/big-segments) + +## Caching behavior + +To reduce database traffic, the SDK has an internal cache for Big Segments. The cache duration is set to 5 minutes by default. You can configure this and other caching parameters in the SDK's `bigSegments` configuration. + +```javascript +const client = ld.init('YOUR_SDK_KEY', { + bigSegments: { + store: MongoDBBigSegmentStore({ + uri: 'mongodb://localhost:27017' + }), + userCacheSize: 1000, // Maximum number of users to cache + userCacheTimeSeconds: 300, // Cache duration (5 minutes) + staleAfterSeconds: 120, // Consider data stale after 2 minutes + statusPollIntervalSeconds: 5 // How often to poll store status + } +}); +``` + +## Error handling + +The MongoDB big segment store includes built-in error handling and retry logic: + +- **Connection errors**: Automatic retry with exponential backoff +- **Network timeouts**: Configurable timeout and retry settings +- **Transient errors**: Automatic retry for temporary MongoDB issues +- **Connection pooling**: Efficient connection reuse + +Errors are logged using the SDK's logger if one is configured. + +## Security considerations + +When using MongoDB in production: + +1. **Authentication**: Use strong credentials and enable authentication +2. **SSL/TLS**: Enable encryption for data in transit +3. **Network security**: Restrict network access to MongoDB instances +4. **Authorization**: Use role-based access control (RBAC) +5. **Monitoring**: Monitor for unusual access patterns + +Example secure configuration: + +```javascript +const store = MongoDBBigSegmentStore({ + uri: 'mongodb://username:password@cluster.mongodb.net/database?retryWrites=true&w=majority', + clientOptions: { + ssl: true, + authSource: 'admin', + readPreference: 'secondaryPreferred' + } +}); +``` + +## Performance optimization + +For better performance in production environments: + +1. **Indexes**: Create appropriate indexes on the `userHash` field +2. **Connection pooling**: Configure `maxPoolSize` based on your application needs +3. **Read preferences**: Use `secondaryPreferred` for read replicas if available +4. **Write concerns**: Configure appropriate write concerns for your consistency requirements + +## Testing + +The package includes comprehensive unit and integration tests. To run tests locally: + +```bash +# Make sure MongoDB is running locally +npm test +``` + +For integration tests, ensure you have a MongoDB instance running on `mongodb://localhost:27017`. + +## Development + +To work on this package: + +```bash +# Install dependencies +npm install + +# Run tests +npm test + +# Build the package +npm run build + +# Run linting +npm run lint +``` + +## Learn more + +Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [complete reference guide for the Node.js SDK](https://docs.launchdarkly.com/sdk/server-side/node-js) or the [API reference](https://launchdarkly.github.io/js-core/packages/sdk/server-node/docs/). + +## Contributing + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](../../CONTRIBUTING.md) for instructions on how to contribute to this SDK. + +## About LaunchDarkly + +* LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: + * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta program), gathering feedback and bug reports from real-world use cases. + * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). + * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. + * Grant access to certain features based on user attributes, like payment plan (eg: users on the 'gold' plan get access to more features than users in the 'silver' plan). Disable parts of your application to facilitate maintenance, without taking everything offline. +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Read [our documentation](https://docs.launchdarkly.com/sdk) for a complete list. +* Explore LaunchDarkly + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates + +[npm-badge]: https://img.shields.io/npm/v/@launchdarkly/node-server-sdk-mongodb.svg?style=flat-square +[npm-url]: https://www.npmjs.com/package/@launchdarkly/node-server-sdk-mongodb +[ci-badge]: https://github.com/launchdarkly/js-core/actions/workflows/ci.yml/badge.svg +[ci-url]: https://github.com/launchdarkly/js-core/actions/workflows/ci.yml +[docs-badge]: https://img.shields.io/static/v1?label=GitHub+Pages&message=API+reference&color=00add8 +[docs-url]: https://launchdarkly.github.io/js-core/packages/store/node-server-sdk-mongodb/docs/ diff --git a/packages/store/node-server-sdk-mongodb/__tests__/MongoDBBigSegmentStore.test.ts b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBBigSegmentStore.test.ts new file mode 100644 index 000000000..ac4232122 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBBigSegmentStore.test.ts @@ -0,0 +1,248 @@ +import { MongoClient, Db, Collection } from 'mongodb'; + +import { interfaces } from '@launchdarkly/node-server-sdk'; + +import MongoDBBigSegmentStore, { + COLLECTION_BIG_SEGMENTS_METADATA, + COLLECTION_BIG_SEGMENTS_USER, + METADATA_KEY, + FIELD_LAST_UP_TO_DATE, + FIELD_USER_HASH, + FIELD_INCLUDED, + FIELD_EXCLUDED, +} from '../src/MongoDBBigSegmentStore'; + +const FAKE_HASH = 'userhash'; +const TEST_DATABASE = 'test_launchdarkly'; + +// Helper function to clear all test data +async function clearTestData(prefix?: string): Promise { + const client = new MongoClient('mongodb://localhost:27017'); + await client.connect(); + const db = client.db(TEST_DATABASE); + + const metadataCollectionName = prefix ? `${prefix}${COLLECTION_BIG_SEGMENTS_METADATA}` : COLLECTION_BIG_SEGMENTS_METADATA; + const userCollectionName = prefix ? `${prefix}${COLLECTION_BIG_SEGMENTS_USER}` : COLLECTION_BIG_SEGMENTS_USER; + + await db.collection(metadataCollectionName).deleteMany({}); + await db.collection(userCollectionName).deleteMany({}); + await client.close(); +} + +// Helper function to set metadata in the database +async function setMetadata( + prefix: string, + metadata: interfaces.BigSegmentStoreMetadata, +): Promise { + const client = new MongoClient('mongodb://localhost:27017'); + await client.connect(); + const db = client.db(TEST_DATABASE); + + const metadataCollectionName = prefix ? `${prefix}${COLLECTION_BIG_SEGMENTS_METADATA}` : COLLECTION_BIG_SEGMENTS_METADATA; + const metadataCollection = db.collection(metadataCollectionName); + + if (metadata.lastUpToDate) { + await metadataCollection.replaceOne( + { _id: METADATA_KEY }, + { _id: METADATA_KEY, [FIELD_LAST_UP_TO_DATE]: metadata.lastUpToDate }, + { upsert: true } + ); + } + + await client.close(); +} + +// Helper function to set user segment membership in the database +async function setSegments( + prefix: string, + userHashKey: string, + included: string[], + excluded: string[], +): Promise { + const client = new MongoClient('mongodb://localhost:27017'); + await client.connect(); + const db = client.db(TEST_DATABASE); + + const userCollectionName = prefix ? `${prefix}${COLLECTION_BIG_SEGMENTS_USER}` : COLLECTION_BIG_SEGMENTS_USER; + const userCollection = db.collection(userCollectionName); + + const userData: any = { [FIELD_USER_HASH]: userHashKey }; + + if (included.length > 0) { + userData[FIELD_INCLUDED] = included; + } + + if (excluded.length > 0) { + userData[FIELD_EXCLUDED] = excluded; + } + + await userCollection.replaceOne( + { [FIELD_USER_HASH]: userHashKey }, + userData, + { upsert: true } + ); + + await client.close(); +} + +describe.each([undefined, 'app1_'])('MongoDB big segment store', (prefixParam) => { + let store: MongoDBBigSegmentStore; + const prefix = prefixParam || ''; + + beforeEach(async () => { + await clearTestData(prefixParam); + store = new MongoDBBigSegmentStore({ + uri: 'mongodb://localhost:27017', + database: TEST_DATABASE, + prefix: prefixParam, + }); + }); + + afterEach(async () => { + store.close(); + }); + + describe('metadata operations', () => { + it('can get populated metadata', async () => { + const expected = { lastUpToDate: 1234567890 }; + await setMetadata(prefix, expected); + const meta = await store.getMetadata(); + expect(meta).toEqual(expected); + }); + + it('can get metadata when not populated', async () => { + const meta = await store.getMetadata(); + expect(meta?.lastUpToDate).toBeUndefined(); + }); + + it('returns empty object when metadata collection is empty', async () => { + const meta = await store.getMetadata(); + expect(meta).toEqual({}); + }); + }); + + describe('user membership operations', () => { + it('can get user membership for a user which has no membership', async () => { + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toBeUndefined(); + }); + + it('can get membership for a user that is only included', async () => { + await setSegments(prefix, FAKE_HASH, ['key1', 'key2'], []); + + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toEqual({ key1: true, key2: true }); + }); + + it('can get membership for a user that is only excluded', async () => { + await setSegments(prefix, FAKE_HASH, [], ['key1', 'key2']); + + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toEqual({ key1: false, key2: false }); + }); + + it('can get membership for a user that is included and excluded', async () => { + await setSegments(prefix, FAKE_HASH, ['key1', 'key2'], ['key2', 'key3']); + + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toEqual({ key1: true, key2: true, key3: false }); // include of key2 overrides exclude + }); + + it('returns undefined when user exists but has no segments', async () => { + // Create a user document without included/excluded fields + const client = new MongoClient('mongodb://localhost:27017'); + await client.connect(); + const db = client.db(TEST_DATABASE); + + const userCollectionName = prefix ? `${prefix}${COLLECTION_BIG_SEGMENTS_USER}` : COLLECTION_BIG_SEGMENTS_USER; + const userCollection = db.collection(userCollectionName); + + await userCollection.insertOne({ [FIELD_USER_HASH]: FAKE_HASH }); + await client.close(); + + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toBeUndefined(); + }); + + it('handles empty arrays in included and excluded fields', async () => { + await setSegments(prefix, FAKE_HASH, [], []); + + const membership = await store.getUserMembership(FAKE_HASH); + expect(membership).toBeUndefined(); + }); + }); + + describe('error handling', () => { + it('throws error when MongoDB is unavailable for getMetadata', async () => { + const storeWithBadUri = new MongoDBBigSegmentStore({ + uri: 'mongodb://nonexistent:27017', + database: TEST_DATABASE, + connectTimeoutMS: 100, + maxRetries: 0, + }); + + await expect(storeWithBadUri.getMetadata()).rejects.toThrow(); + storeWithBadUri.close(); + }); + + it('throws error when MongoDB is unavailable for getUserMembership', async () => { + const storeWithBadUri = new MongoDBBigSegmentStore({ + uri: 'mongodb://nonexistent:27017', + database: TEST_DATABASE, + connectTimeoutMS: 100, + maxRetries: 0, + }); + + await expect(storeWithBadUri.getUserMembership(FAKE_HASH)).rejects.toThrow(); + storeWithBadUri.close(); + }); + }); + + describe('connection management', () => { + it('can be closed safely multiple times', () => { + expect(() => { + store.close(); + store.close(); + }).not.toThrow(); + }); + + it('reconnects automatically after connection loss', async () => { + // First, verify the store works + const meta1 = await store.getMetadata(); + expect(meta1).toEqual({}); + + // Close the connection + store.close(); + + // Should reconnect on next operation + const meta2 = await store.getMetadata(); + expect(meta2).toEqual({}); + }); + }); + + describe('configuration options', () => { + it('uses default URI when none provided', async () => { + const defaultStore = new MongoDBBigSegmentStore({ + database: TEST_DATABASE, + }); + + // Should work with default localhost URI + const meta = await defaultStore.getMetadata(); + expect(meta).toEqual({}); + + defaultStore.close(); + }); + + it('uses default database when none provided', async () => { + const defaultDbStore = new MongoDBBigSegmentStore({ + uri: 'mongodb://localhost:27017', + }); + + // This will use the default 'launchdarkly' database + const meta = await defaultDbStore.getMetadata(); + expect(meta).toEqual({}); + + defaultDbStore.close(); + }); + }); +}); diff --git a/packages/store/node-server-sdk-mongodb/__tests__/MongoDBBigSegmentStoreFactory.test.ts b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBBigSegmentStoreFactory.test.ts new file mode 100644 index 000000000..d187d8ad5 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBBigSegmentStoreFactory.test.ts @@ -0,0 +1,65 @@ +import { LDClientContext } from '@launchdarkly/node-server-sdk'; + +import MongoDBBigSegmentStoreFactory from '../src/MongoDBBigSegmentStoreFactory'; +import MongoDBBigSegmentStore from '../src/MongoDBBigSegmentStore'; + +describe('MongoDBBigSegmentStoreFactory', () => { + it('creates a store with provided options', () => { + const options = { + uri: 'mongodb://localhost:27017', + database: 'test_db', + prefix: 'test_', + }; + + const factory = MongoDBBigSegmentStoreFactory(options); + expect(typeof factory).toBe('function'); + + const mockContext: LDClientContext = { + basicConfiguration: { + logger: undefined, + }, + } as any; + + const store = factory(mockContext); + expect(store).toBeInstanceOf(MongoDBBigSegmentStore); + + store.close(); + }); + + it('creates a store without options', () => { + const factory = MongoDBBigSegmentStoreFactory(); + expect(typeof factory).toBe('function'); + + const mockContext: LDClientContext = { + basicConfiguration: { + logger: undefined, + }, + } as any; + + const store = factory(mockContext); + expect(store).toBeInstanceOf(MongoDBBigSegmentStore); + + store.close(); + }); + + it('passes logger from context to store', () => { + const mockLogger = { + error: jest.fn(), + warn: jest.fn(), + info: jest.fn(), + debug: jest.fn(), + }; + + const factory = MongoDBBigSegmentStoreFactory(); + const mockContext: LDClientContext = { + basicConfiguration: { + logger: mockLogger, + }, + } as any; + + const store = factory(mockContext); + expect(store).toBeInstanceOf(MongoDBBigSegmentStore); + + store.close(); + }); +}); diff --git a/packages/store/node-server-sdk-mongodb/__tests__/MongoDBClientState.test.ts b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBClientState.test.ts new file mode 100644 index 000000000..ae5bf633e --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBClientState.test.ts @@ -0,0 +1,129 @@ +import MongoDBClientState from '../src/MongoDBClientState'; + +describe('MongoDBClientState', () => { + let clientState: MongoDBClientState; + + afterEach(() => { + if (clientState) { + clientState.close(); + } + }); + + describe('collection naming', () => { + it('returns collection name without prefix when no prefix is set', () => { + clientState = new MongoDBClientState(); + expect(clientState.prefixedCollection('test')).toBe('test'); + }); + + it('returns prefixed collection name when prefix is set', () => { + clientState = new MongoDBClientState({ prefix: 'myapp_' }); + expect(clientState.prefixedCollection('test')).toBe('myapp_test'); + }); + + it('handles empty prefix', () => { + clientState = new MongoDBClientState({ prefix: '' }); + expect(clientState.prefixedCollection('test')).toBe('test'); + }); + }); + + describe('connection management', () => { + it('can be closed safely when not connected', () => { + clientState = new MongoDBClientState(); + expect(() => clientState.close()).not.toThrow(); + }); + + it('connects to MongoDB with default settings', async () => { + clientState = new MongoDBClientState(); + const db = await clientState.getDatabase(); + expect(db).toBeDefined(); + expect(db.databaseName).toBe('launchdarkly'); + }); + + it('connects to MongoDB with custom settings', async () => { + clientState = new MongoDBClientState({ + uri: 'mongodb://localhost:27017', + database: 'test_custom_db', + }); + const db = await clientState.getDatabase(); + expect(db).toBeDefined(); + expect(db.databaseName).toBe('test_custom_db'); + }); + + it('reuses existing connection', async () => { + clientState = new MongoDBClientState(); + const db1 = await clientState.getDatabase(); + const db2 = await clientState.getDatabase(); + expect(db1).toBe(db2); + }); + + it('gets collection from database', async () => { + clientState = new MongoDBClientState({ + database: 'test_collection_db', + }); + const collection = await clientState.getCollection('test_collection'); + expect(collection).toBeDefined(); + expect(collection.collectionName).toBe('test_collection'); + }); + + it('gets prefixed collection from database', async () => { + clientState = new MongoDBClientState({ + database: 'test_collection_db', + prefix: 'prefix_', + }); + const collection = await clientState.getCollection('test_collection'); + expect(collection).toBeDefined(); + expect(collection.collectionName).toBe('prefix_test_collection'); + }); + }); + + describe('error handling', () => { + it('throws error when connection fails', async () => { + clientState = new MongoDBClientState({ + uri: 'mongodb://nonexistent:27017', + connectTimeoutMS: 100, + maxRetries: 0, + }); + + await expect(clientState.getDatabase()).rejects.toThrow(); + }); + + it('retries connection on failure', async () => { + clientState = new MongoDBClientState({ + uri: 'mongodb://nonexistent:27017', + connectTimeoutMS: 100, + maxRetries: 2, + retryDelayMS: 50, + }); + + const startTime = Date.now(); + await expect(clientState.getDatabase()).rejects.toThrow(); + const endTime = Date.now(); + + // Should have retried at least twice with delays + expect(endTime - startTime).toBeGreaterThan(100); + }, 10000); + }); + + describe('configuration options', () => { + it('uses default values when options not provided', () => { + clientState = new MongoDBClientState(); + expect(clientState.prefixedCollection('test')).toBe('test'); + }); + + it('applies custom retry settings', async () => { + clientState = new MongoDBClientState({ + uri: 'mongodb://nonexistent:27017', + connectTimeoutMS: 50, + maxRetries: 1, + retryDelayMS: 25, + }); + + const startTime = Date.now(); + await expect(clientState.getDatabase()).rejects.toThrow(); + const endTime = Date.now(); + + // Should fail faster with custom settings + expect(endTime - startTime).toBeLessThan(500); + }, 5000); + }); +}); diff --git a/packages/store/node-server-sdk-mongodb/__tests__/MongoDBCore.test.ts b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBCore.test.ts new file mode 100644 index 000000000..241e2ca3c --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBCore.test.ts @@ -0,0 +1,204 @@ +import { interfaces } from '@launchdarkly/node-server-sdk'; + +import MongoDBCore, { + COLLECTION_FEATURES, + COLLECTION_SEGMENTS, + COLLECTION_INITIALIZED, + INITIALIZED_TOKEN, +} from '../src/MongoDBCore'; +import MongoDBClientState from '../src/MongoDBClientState'; + +const mockDataKind: interfaces.PersistentStoreDataKind = { + namespace: 'test_namespace', + deserialize: (str: string) => JSON.parse(str), +}; + +describe('MongoDBCore', () => { + let core: MongoDBCore; + let clientState: MongoDBClientState; + + beforeEach(() => { + clientState = new MongoDBClientState({ + uri: 'mongodb://localhost:27017', + database: 'test_mongodb_core', + }); + core = new MongoDBCore(clientState); + }); + + afterEach(() => { + core.close(); + }); + + describe('initialization', () => { + it('sets initialized flag after init', (done) => { + core.init([], () => { + core.initialized((isInit) => { + expect(isInit).toBe(true); + done(); + }); + }); + }); + + it('returns false for initialized before init', (done) => { + core.initialized((isInit) => { + expect(isInit).toBe(false); + done(); + }); + }); + }); + + describe('data operations', () => { + const testItem: interfaces.SerializedItemDescriptor = { + version: 1, + serializedItem: '{"key":"test","enabled":true}', + }; + + beforeEach((done) => { + // Initialize empty store + core.init([], done); + }); + + it('stores and retrieves items', (done) => { + core.upsert(mockDataKind, 'test_key', testItem, (err, result) => { + expect(err).toBeUndefined(); + expect(result).toEqual(testItem); + + core.get(mockDataKind, 'test_key', (retrieved) => { + expect(retrieved).toEqual(testItem); + done(); + }); + }); + }); + + it('returns undefined for non-existent items', (done) => { + core.get(mockDataKind, 'nonexistent', (result) => { + expect(result).toBeUndefined(); + done(); + }); + }); + + it('handles deleted items', (done) => { + const deletedItem: interfaces.SerializedItemDescriptor = { + version: 2, + deleted: true, + }; + + core.upsert(mockDataKind, 'test_key', deletedItem, (err, result) => { + expect(err).toBeUndefined(); + expect(result).toEqual(deletedItem); + + core.get(mockDataKind, 'test_key', (retrieved) => { + expect(retrieved?.deleted).toBe(true); + expect(retrieved?.version).toBe(2); + done(); + }); + }); + }); + + it('respects version ordering for upserts', (done) => { + // First, insert item with version 2 + const newerItem: interfaces.SerializedItemDescriptor = { + version: 2, + serializedItem: '{"key":"test","version":2}', + }; + + core.upsert(mockDataKind, 'test_key', newerItem, () => { + // Try to upsert with older version 1 + const olderItem: interfaces.SerializedItemDescriptor = { + version: 1, + serializedItem: '{"key":"test","version":1}', + }; + + core.upsert(mockDataKind, 'test_key', olderItem, () => { + // Should still have the newer version + core.get(mockDataKind, 'test_key', (retrieved) => { + expect(retrieved?.version).toBe(2); + expect(retrieved?.serializedItem).toBe('{"key":"test","version":2}'); + done(); + }); + }); + }); + }); + + it('retrieves all items of a kind', (done) => { + const item1: interfaces.SerializedItemDescriptor = { + version: 1, + serializedItem: '{"key":"item1"}', + }; + const item2: interfaces.SerializedItemDescriptor = { + version: 1, + serializedItem: '{"key":"item2"}', + }; + + core.upsert(mockDataKind, 'key1', item1, () => { + core.upsert(mockDataKind, 'key2', item2, () => { + core.getAll(mockDataKind, (results) => { + expect(results).toBeDefined(); + expect(results!.length).toBe(2); + + const resultMap = new Map(results!.map(r => [r.key, r.item])); + expect(resultMap.get('key1')).toEqual(item1); + expect(resultMap.get('key2')).toEqual(item2); + done(); + }); + }); + }); + }); + + it('excludes deleted items from getAll', (done) => { + const normalItem: interfaces.SerializedItemDescriptor = { + version: 1, + serializedItem: '{"key":"normal"}', + }; + const deletedItem: interfaces.SerializedItemDescriptor = { + version: 1, + deleted: true, + }; + + core.upsert(mockDataKind, 'normal', normalItem, () => { + core.upsert(mockDataKind, 'deleted', deletedItem, () => { + core.getAll(mockDataKind, (results) => { + expect(results).toBeDefined(); + expect(results!.length).toBe(1); + expect(results![0].key).toBe('normal'); + expect(results![0].item).toEqual(normalItem); + done(); + }); + }); + }); + }); + }); + + describe('error handling', () => { + it('handles database connection errors gracefully', (done) => { + const badCore = new MongoDBCore( + new MongoDBClientState({ + uri: 'mongodb://nonexistent:27017', + connectTimeoutMS: 100, + maxRetries: 0, + }) + ); + + badCore.get(mockDataKind, 'test', (result) => { + expect(result).toBeUndefined(); + badCore.close(); + done(); + }); + }); + }); + + describe('constants', () => { + it('exports expected collection names', () => { + expect(COLLECTION_FEATURES).toBe('features'); + expect(COLLECTION_SEGMENTS).toBe('segments'); + expect(COLLECTION_INITIALIZED).toBe('initialized'); + expect(INITIALIZED_TOKEN).toBe('$inited'); + }); + }); + + describe('description', () => { + it('returns MongoDB as description', () => { + expect(core.getDescription()).toBe('MongoDB'); + }); + }); +}); diff --git a/packages/store/node-server-sdk-mongodb/__tests__/MongoDBFeatureStore.test.ts b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBFeatureStore.test.ts new file mode 100644 index 000000000..9e404b529 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBFeatureStore.test.ts @@ -0,0 +1,317 @@ +import { AsyncStoreFacade } from '@launchdarkly/node-server-sdk'; +import { MongoClient } from 'mongodb'; + +import MongoDBFeatureStore from '../src/MongoDBFeatureStore'; + +const dataKind = { + features: { namespace: 'features' }, + segments: { namespace: 'segments' }, +}; + +const TEST_DATABASE = 'test_launchdarkly_feature_store'; + +// Helper function to clear all test data +async function clearTestData(prefix?: string): Promise { + const client = new MongoClient('mongodb://localhost:27017'); + await client.connect(); + const db = client.db(TEST_DATABASE); + + const collections = ['features', 'segments', 'initialized']; + + for (const collectionName of collections) { + const actualCollectionName = prefix ? `${prefix}${collectionName}` : collectionName; + await db.collection(actualCollectionName).deleteMany({}); + } + + await client.close(); +} + +describe.each([undefined, 'testing_'])('MongoDB Feature Store', (prefixParam) => { + const prefix = prefixParam || ''; + + describe('given an empty store', () => { + let store: MongoDBFeatureStore; + let facade: AsyncStoreFacade; + + beforeEach(async () => { + await clearTestData(prefixParam); + store = new MongoDBFeatureStore({ + uri: 'mongodb://localhost:27017', + database: TEST_DATABASE, + prefix: prefixParam, + }); + facade = new AsyncStoreFacade(store); + }); + + afterEach(() => { + store.close(); + }); + + it('is initialized after calling init()', async () => { + await facade.init({}); + const initialized = await facade.initialized(); + expect(initialized).toBeTruthy(); + }); + + it('is not initialized before calling init()', async () => { + const initialized = await facade.initialized(); + expect(initialized).toBeFalsy(); + }); + + it('completely replaces previous data when calling init()', async () => { + const flags = { + first: { key: 'first', version: 1 }, + second: { key: 'second', version: 1 }, + }; + const segments = { first: { key: 'first', version: 2 } }; + const initData1 = { + features: flags, + segments, + }; + + await facade.init(initData1); + const items1 = await facade.all(dataKind.features); + expect(items1).toEqual(flags); + const items2 = await facade.all(dataKind.segments); + expect(items2).toEqual(segments); + + const newFlags = { first: { key: 'first', version: 3 } }; + const newSegments = { first: { key: 'first', version: 4 } }; + const initData2 = { + features: newFlags, + segments: newSegments, + }; + + await facade.init(initData2); + const items3 = await facade.all(dataKind.features); + expect(items3).toEqual(newFlags); + const items4 = await facade.all(dataKind.segments); + expect(items4).toEqual(newSegments); + }); + + it('removes previous data that is not in new init data', async () => { + // Initialize with some data + const initialData = { + features: { + flag1: { key: 'flag1', version: 1 }, + flag2: { key: 'flag2', version: 1 }, + }, + segments: {}, + }; + await facade.init(initialData); + + // Verify initial data is there + const result1 = await facade.get(dataKind.features, 'flag1'); + expect(result1).toEqual({ key: 'flag1', version: 1 }); + + // Re-initialize with different data + const newData = { + features: { + flag3: { key: 'flag3', version: 1 }, + }, + segments: {}, + }; + await facade.init(newData); + + // Old data should be gone + const result2 = await facade.get(dataKind.features, 'flag1'); + expect(result2).toBeNull(); + const result3 = await facade.get(dataKind.features, 'flag2'); + expect(result3).toBeNull(); + + // New data should be there + const result4 = await facade.get(dataKind.features, 'flag3'); + expect(result4).toEqual({ key: 'flag3', version: 1 }); + }); + }); + + describe('given a store with basic data', () => { + let store: MongoDBFeatureStore; + let facade: AsyncStoreFacade; + + const feature1 = { key: 'foo', version: 10 }; + const feature2 = { key: 'bar', version: 10 }; + + beforeEach(async () => { + await clearTestData(prefixParam); + store = new MongoDBFeatureStore({ + uri: 'mongodb://localhost:27017', + database: TEST_DATABASE, + prefix: prefixParam, + }); + facade = new AsyncStoreFacade(store); + await facade.init({ + features: { + foo: feature1, + bar: feature2, + }, + segments: {}, + }); + }); + + afterEach(() => { + store.close(); + }); + + it('gets a feature that exists', async () => { + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(feature1); + }); + + it('does not get nonexisting feature', async () => { + const result = await facade.get(dataKind.features, 'biz'); + expect(result).toBeNull(); + }); + + it('gets all features', async () => { + const result = await facade.all(dataKind.features); + expect(result).toEqual({ + foo: feature1, + bar: feature2, + }); + }); + + it('gets empty collection when no segments exist', async () => { + const result = await facade.all(dataKind.segments); + expect(result).toEqual({}); + }); + + it('upserts with newer version', async () => { + const newVer = { key: feature1.key, version: feature1.version + 1 }; + + await facade.upsert(dataKind.features, newVer); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(newVer); + }); + + it('does not upsert with older version', async () => { + const oldVer = { key: feature1.key, version: feature1.version - 1 }; + await facade.upsert(dataKind.features, oldVer); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(feature1); + }); + + it('upserts new feature', async () => { + const newFeature = { key: 'biz', version: 99 }; + await facade.upsert(dataKind.features, newFeature); + const result = await facade.get(dataKind.features, newFeature.key); + expect(result).toEqual(newFeature); + }); + + it('handles upsert race condition within same client correctly', async () => { + const ver1 = { key: feature1.key, version: feature1.version + 1 }; + const ver2 = { key: feature1.key, version: feature1.version + 2 }; + const promises: Promise[] = []; + + // Deliberately do not wait for the first upsert to complete before starting the second, + // so their operations will be interleaved unless we're correctly handling version conflicts + promises.push(facade.upsert(dataKind.features, ver2)); + promises.push(facade.upsert(dataKind.features, ver1)); + + // Now wait until both have completed + await Promise.all(promises); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toEqual(ver2); + }); + + it('deletes with newer version', async () => { + await facade.delete(dataKind.features, feature1.key, feature1.version + 1); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toBe(null); + }); + + it('does not delete with older version', async () => { + await facade.delete(dataKind.features, feature1.key, feature1.version - 1); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).not.toBe(null); + }); + + it('allows deleting unknown feature', async () => { + await facade.delete(dataKind.features, 'biz', 99); + const result = await facade.get(dataKind.features, 'biz'); + expect(result).toBe(null); + }); + + it('does not upsert older version after delete', async () => { + await facade.delete(dataKind.features, feature1.key, feature1.version + 1); + await facade.upsert(dataKind.features, feature1); + const result = await facade.get(dataKind.features, feature1.key); + expect(result).toBe(null); + }); + + it('handles concurrent upserts to different keys', async () => { + const newFeature1 = { key: 'concurrent1', version: 1 }; + const newFeature2 = { key: 'concurrent2', version: 1 }; + + const promises = [ + facade.upsert(dataKind.features, newFeature1), + facade.upsert(dataKind.features, newFeature2), + ]; + + await Promise.all(promises); + + const result1 = await facade.get(dataKind.features, 'concurrent1'); + const result2 = await facade.get(dataKind.features, 'concurrent2'); + + expect(result1).toEqual(newFeature1); + expect(result2).toEqual(newFeature2); + }); + }); + + describe('error handling', () => { + it('handles connection errors gracefully', async () => { + const store = new MongoDBFeatureStore({ + uri: 'mongodb://nonexistent:27017', + database: TEST_DATABASE, + connectTimeoutMS: 100, + maxRetries: 0, + }); + const facade = new AsyncStoreFacade(store); + + const result = await facade.get(dataKind.features, 'nonexistent'); + expect(result).toBeNull(); + + store.close(); + }); + }); + + describe('cache behavior', () => { + let store: MongoDBFeatureStore; + let facade: AsyncStoreFacade; + + beforeEach(async () => { + await clearTestData(prefixParam); + store = new MongoDBFeatureStore({ + uri: 'mongodb://localhost:27017', + database: TEST_DATABASE, + prefix: prefixParam, + cacheTTL: 1, // Very short cache for testing + }); + facade = new AsyncStoreFacade(store); + }); + + afterEach(() => { + store.close(); + }); + + it('respects cache TTL setting', async () => { + const feature = { key: 'cached_feature', version: 1 }; + + await facade.init({ + features: { cached_feature: feature }, + segments: {}, + }); + + // First get should populate cache + const result1 = await facade.get(dataKind.features, 'cached_feature'); + expect(result1).toEqual(feature); + + // Wait for cache to expire (1 second + small buffer) + await new Promise(resolve => setTimeout(resolve, 1100)); + + // Should still work after cache expiry + const result2 = await facade.get(dataKind.features, 'cached_feature'); + expect(result2).toEqual(feature); + }); + }); +}); diff --git a/packages/store/node-server-sdk-mongodb/__tests__/MongoDBFeatureStoreFactory.test.ts b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBFeatureStoreFactory.test.ts new file mode 100644 index 000000000..299f83d7d --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/__tests__/MongoDBFeatureStoreFactory.test.ts @@ -0,0 +1,84 @@ +import { LDClientContext } from '@launchdarkly/node-server-sdk'; + +import MongoDBFeatureStoreFactory from '../src/MongoDBFeatureStoreFactory'; +import MongoDBFeatureStore from '../src/MongoDBFeatureStore'; + +describe('MongoDBFeatureStoreFactory', () => { + it('creates a feature store with provided options', () => { + const options = { + uri: 'mongodb://localhost:27017', + database: 'test_db', + prefix: 'test_', + cacheTTL: 60, + }; + + const factory = MongoDBFeatureStoreFactory(options); + expect(typeof factory).toBe('function'); + + const mockContext: LDClientContext = { + basicConfiguration: { + logger: undefined, + }, + } as any; + + const store = factory(mockContext); + expect(store).toBeInstanceOf(MongoDBFeatureStore); + + store.close(); + }); + + it('creates a feature store without options', () => { + const factory = MongoDBFeatureStoreFactory(); + expect(typeof factory).toBe('function'); + + const mockContext: LDClientContext = { + basicConfiguration: { + logger: undefined, + }, + } as any; + + const store = factory(mockContext); + expect(store).toBeInstanceOf(MongoDBFeatureStore); + + store.close(); + }); + + it('passes logger from context to store', () => { + const mockLogger = { + error: jest.fn(), + warn: jest.fn(), + info: jest.fn(), + debug: jest.fn(), + }; + + const factory = MongoDBFeatureStoreFactory(); + const mockContext: LDClientContext = { + basicConfiguration: { + logger: mockLogger, + }, + } as any; + + const store = factory(mockContext); + expect(store).toBeInstanceOf(MongoDBFeatureStore); + + store.close(); + }); + + it('uses custom cache TTL when provided', () => { + const options = { + cacheTTL: 120, + }; + + const factory = MongoDBFeatureStoreFactory(options); + const mockContext: LDClientContext = { + basicConfiguration: { + logger: undefined, + }, + } as any; + + const store = factory(mockContext); + expect(store).toBeInstanceOf(MongoDBFeatureStore); + + store.close(); + }); +}); diff --git a/packages/store/node-server-sdk-mongodb/jest.config.js b/packages/store/node-server-sdk-mongodb/jest.config.js new file mode 100644 index 000000000..f106eb3bc --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/jest.config.js @@ -0,0 +1,7 @@ +module.exports = { + transform: { '^.+\\.ts?$': 'ts-jest' }, + testMatch: ['**/__tests__/**/*test.ts?(x)'], + testEnvironment: 'node', + moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'], + collectCoverageFrom: ['src/**/*.ts'], +}; diff --git a/packages/store/node-server-sdk-mongodb/package.json b/packages/store/node-server-sdk-mongodb/package.json new file mode 100644 index 000000000..a9454ff0a --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/package.json @@ -0,0 +1,55 @@ +{ + "name": "@launchdarkly/node-server-sdk-mongodb", + "version": "1.0.0", + "description": "MongoDB-backed feature store for the LaunchDarkly Server-Side SDK for Node.js", + "homepage": "https://github.com/launchdarkly/js-core/tree/main/packages/store/node-server-sdk-mongodb", + "repository": { + "type": "git", + "url": "https://github.com/launchdarkly/js-core.git" + }, + "type": "commonjs", + "main": "./dist/src/index.js", + "types": "./dist/src/index.d.ts", + "files": [ + "dist" + ], + "keywords": [ + "launchdarkly", + "analytics", + "client", + "mongodb" + ], + "license": "Apache-2.0", + "scripts": { + "clean": "npx tsc --build --clean", + "test": "npx jest --ci --runInBand", + "build": "npx tsc", + "lint": "npx eslint . --ext .ts", + "lint:fix": "yarn run lint --fix" + }, + "dependencies": { + "mongodb": "^6.0.0" + }, + "peerDependencies": { + "@launchdarkly/node-server-sdk": ">=9.4.3" + }, + "devDependencies": { + "@launchdarkly/node-server-sdk": "9.10.2", + "@trivago/prettier-plugin-sort-imports": "^4.1.1", + "@types/jest": "^29.4.0", + "@typescript-eslint/eslint-plugin": "^6.20.0", + "@typescript-eslint/parser": "^6.20.0", + "eslint": "^8.45.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-airbnb-typescript": "^17.1.0", + "eslint-config-prettier": "^8.8.0", + "eslint-plugin-import": "^2.27.5", + "eslint-plugin-prettier": "^5.0.0", + "jest": "^29.5.0", + "launchdarkly-js-test-helpers": "^2.2.0", + "prettier": "^3.0.0", + "ts-jest": "^29.0.5", + "typedoc": "0.25.0", + "typescript": "5.1.6" + } +} diff --git a/packages/store/node-server-sdk-mongodb/src/LDMongoDBOptions.ts b/packages/store/node-server-sdk-mongodb/src/LDMongoDBOptions.ts new file mode 100644 index 000000000..ab06423c0 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/LDMongoDBOptions.ts @@ -0,0 +1,51 @@ +import { MongoClientOptions } from 'mongodb'; + +/** + * Configuration options for the MongoDB big segment store. + */ +export default interface LDMongoDBOptions { + /** + * The MongoDB connection URI. If not provided, defaults to 'mongodb://localhost:27017'. + */ + uri?: string; + + /** + * The MongoDB database name. If not provided, defaults to 'launchdarkly'. + */ + database?: string; + + /** + * A prefix string to prepend to all MongoDB collection names. If not provided, + * collections will use their default names without a prefix. + */ + prefix?: string; + + /** + * The maximum time to wait for a connection to be established, in milliseconds. + * If not provided, defaults to 10000 (10 seconds). + */ + connectTimeoutMS?: number; + + /** + * Additional MongoDB client options. These will be merged with the default options. + */ + clientOptions?: MongoClientOptions; + + /** + * The number of connection retries to attempt before giving up. + * If not provided, defaults to 3. + */ + maxRetries?: number; + + /** + * The time to wait between connection retries, in milliseconds. + * If not provided, defaults to 1000 (1 second). + */ + retryDelayMS?: number; + + /** + * The cache time-to-live (TTL) in seconds. If not provided, defaults to 30 seconds. + * Set to 0 to disable caching. + */ + cacheTTL?: number; +} diff --git a/packages/store/node-server-sdk-mongodb/src/MongoDBBigSegmentStore.ts b/packages/store/node-server-sdk-mongodb/src/MongoDBBigSegmentStore.ts new file mode 100644 index 000000000..109bce642 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/MongoDBBigSegmentStore.ts @@ -0,0 +1,153 @@ +import { interfaces, LDLogger } from '@launchdarkly/node-server-sdk'; + +import LDMongoDBOptions from './LDMongoDBOptions'; +import MongoDBClientState from './MongoDBClientState'; + +/** + * @internal + */ +interface MetadataDocument { + _id: string; + [FIELD_LAST_UP_TO_DATE]?: number; +} + +/** + * @internal + */ +interface UserDocument { + _id?: string; + [FIELD_USER_HASH]: string; + [FIELD_INCLUDED]?: string[]; + [FIELD_EXCLUDED]?: string[]; +} + +/** + * @internal + */ +export const COLLECTION_BIG_SEGMENTS_METADATA = 'big_segments_metadata'; + +/** + * @internal + */ +export const COLLECTION_BIG_SEGMENTS_USER = 'big_segments_user'; + +/** + * @internal + */ +export const METADATA_KEY = 'big_segments_metadata'; + +/** + * @internal + */ +export const FIELD_LAST_UP_TO_DATE = 'lastUpToDate'; + +/** + * @internal + */ +export const FIELD_USER_HASH = 'userHash'; + +/** + * @internal + */ +export const FIELD_INCLUDED = 'included'; + +/** + * @internal + */ +export const FIELD_EXCLUDED = 'excluded'; + +/** + * A MongoDB implementation of the LaunchDarkly BigSegmentStore interface. + * + * This store manages big segment data in MongoDB collections. It uses two collections: + * - One for metadata about when the big segments were last synchronized + * - One for user membership data (which segments include/exclude specific users) + */ +export default class MongoDBBigSegmentStore implements interfaces.BigSegmentStore { + private _state: MongoDBClientState; + + /** + * Creates a new MongoDB big segment store. + * + * @param options Optional MongoDB configuration options + * @param _logger Optional logger instance (reserved for future use) + */ + constructor(options?: LDMongoDBOptions, private readonly _logger?: LDLogger) { + this._state = new MongoDBClientState(options); + } + + /** + * Retrieves metadata about the big segments store, specifically the last update timestamp. + * + * @returns Promise resolving to metadata object containing lastUpToDate timestamp, or empty object if no metadata exists + */ + async getMetadata(): Promise { + try { + const metadataCollection = await this._state.getCollection(COLLECTION_BIG_SEGMENTS_METADATA); + + const metadata = await metadataCollection.findOne({ _id: METADATA_KEY }); + + if (metadata && metadata[FIELD_LAST_UP_TO_DATE]) { + return { lastUpToDate: metadata[FIELD_LAST_UP_TO_DATE] }; + } + + return {}; + } catch (error) { + this._logger?.error(`MongoDB big segment store getMetadata error: ${error}`); + throw error; + } + } + + /** + * Retrieves the big segment membership information for a specific user. + * + * @param userHash The hashed user key to look up + * @returns Promise resolving to membership object (segment refs mapped to boolean inclusion status), or undefined if user not found + */ + async getUserMembership( + userHash: string, + ): Promise { + try { + const userCollection = await this._state.getCollection(COLLECTION_BIG_SEGMENTS_USER); + + const userData = await userCollection.findOne({ [FIELD_USER_HASH]: userHash }); + + if (!userData) { + return undefined; + } + + const membership: interfaces.BigSegmentStoreMembership = {}; + + // Process excluded segment references + if (userData[FIELD_EXCLUDED] && Array.isArray(userData[FIELD_EXCLUDED])) { + userData[FIELD_EXCLUDED].forEach((segmentRef: string) => { + membership[segmentRef] = false; + }); + } + + // Process included segment references + if (userData[FIELD_INCLUDED] && Array.isArray(userData[FIELD_INCLUDED])) { + userData[FIELD_INCLUDED].forEach((segmentRef: string) => { + membership[segmentRef] = true; + }); + } + + // Return undefined if no membership data was found + if (Object.keys(membership).length === 0) { + return undefined; + } + + return membership; + } catch (error) { + this._logger?.error(`MongoDB big segment store getUserMembership error: ${error}`); + throw error; + } + } + + /** + * Closes the connection to MongoDB. + */ + close(): void { + this._state.close(); + } +} diff --git a/packages/store/node-server-sdk-mongodb/src/MongoDBBigSegmentStoreFactory.ts b/packages/store/node-server-sdk-mongodb/src/MongoDBBigSegmentStoreFactory.ts new file mode 100644 index 000000000..36ca25e43 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/MongoDBBigSegmentStoreFactory.ts @@ -0,0 +1,38 @@ +import { interfaces, LDClientContext } from '@launchdarkly/node-server-sdk'; + +import LDMongoDBOptions from './LDMongoDBOptions'; +import MongoDBBigSegmentStore from './MongoDBBigSegmentStore'; + +/** + * Configures a big segment store factory backed by a MongoDB instance. + * + * "Big segments" are a specific type of user segments. For more information, read the + * LaunchDarkly documentation about user segments: https://docs.launchdarkly.com/home/users/segments + * + * @param options Optional MongoDB configuration options including connection URI, database name, + * collection prefix, and other MongoDB-specific settings. + * + * @returns A function which creates big segment stores based on the provided config. + * + * @example + * ```typescript + * import { init } from '@launchdarkly/node-server-sdk'; + * import { MongoDBBigSegmentStore } from '@launchdarkly/node-server-sdk-mongodb'; + * + * const client = init('your-sdk-key', { + * bigSegments: { + * store: MongoDBBigSegmentStore({ + * uri: 'mongodb://localhost:27017', + * database: 'launchdarkly', + * prefix: 'ld_' + * }) + * } + * }); + * ``` + */ +export default function MongoDBBigSegmentStoreFactory( + options?: LDMongoDBOptions, +): (config: LDClientContext) => interfaces.BigSegmentStore { + return (config: LDClientContext) => + new MongoDBBigSegmentStore(options, config?.basicConfiguration.logger); +} diff --git a/packages/store/node-server-sdk-mongodb/src/MongoDBClientState.ts b/packages/store/node-server-sdk-mongodb/src/MongoDBClientState.ts new file mode 100644 index 000000000..27885f45a --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/MongoDBClientState.ts @@ -0,0 +1,129 @@ +import { Collection, Db, Document, MongoClient, MongoServerError } from 'mongodb'; + +import LDMongoDBOptions from './LDMongoDBOptions'; + +/** + * Manages the MongoDB client state and connections. + * @internal + */ +export default class MongoDBClientState { + private _client: MongoClient | undefined; + + private _db: Db | undefined; + + private _isConnected: boolean = false; + + private _prefix: string; + + private readonly _maxRetries: number; + + private readonly _retryDelayMS: number; + + constructor(private readonly _options?: LDMongoDBOptions) { + this._prefix = _options?.prefix ?? ''; + this._maxRetries = _options?.maxRetries ?? 3; + this._retryDelayMS = _options?.retryDelayMS ?? 1000; + } + + /** + * Gets the prefixed collection name. + */ + public prefixedCollection(name: string): string { + return this._prefix ? `${this._prefix}${name}` : name; + } + + /** + * Gets the MongoDB database instance, connecting if necessary. + */ + public async getDatabase(): Promise { + if (!this._isConnected) { + await this._connect(); + } + return this._db!; + } + + /** + * Gets a MongoDB collection by name. + */ + public async getCollection(name: string): Promise> { + const db = await this.getDatabase(); + return db.collection(this.prefixedCollection(name)); + } + + /** + * Closes the MongoDB connection. + */ + public close(): void { + if (this._client) { + this._client.close(); + this._client = undefined; + this._db = undefined; + this._isConnected = false; + } + } + + /** + * Connects to MongoDB with retry logic. + */ + private async _connect(): Promise { + const uri = this._options?.uri ?? 'mongodb://localhost:27017'; + const databaseName = this._options?.database ?? 'launchdarkly'; + const connectTimeoutMS = this._options?.connectTimeoutMS ?? 10000; + + const clientOptions = { + connectTimeoutMS, + serverSelectionTimeoutMS: connectTimeoutMS, + ...this._options?.clientOptions, + }; + + let lastError: Error | undefined; + + for (let attempt = 0; attempt <= this._maxRetries; attempt++) { + try { + this._client = new MongoClient(uri, clientOptions); + await this._client.connect(); + this._db = this._client.db(databaseName); + this._isConnected = true; + return; + } catch (error) { + lastError = error as Error; + this.close(); + + if (attempt < this._maxRetries) { + await this._delay(this._retryDelayMS); + } + } + } + + throw new Error( + `Failed to connect to MongoDB after ${this._maxRetries + 1} attempts: ${lastError?.message}`, + ); + } + + /** + * Delays execution for the specified number of milliseconds. + */ + private async _delay(ms: number): Promise { + return new Promise((resolve) => { + setTimeout(resolve, ms); + }); + } + + /** + * Checks if the error is a transient error that should be retried. + */ + private _isTransientError(error: any): boolean { + if (error instanceof MongoServerError) { + // Network errors, timeouts, and certain server errors are retryable + return ( + error.code === 11000 || // Duplicate key error (could be temporary in some cases) + error.code === 50 || // ExceededTimeLimit + error.code === 89 || // NetworkTimeout + error.message.includes('network') || + error.message.includes('timeout') || + error.message.includes('connection') + ); + } + return false; + } +} diff --git a/packages/store/node-server-sdk-mongodb/src/MongoDBCore.ts b/packages/store/node-server-sdk-mongodb/src/MongoDBCore.ts new file mode 100644 index 000000000..b3be45f8e --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/MongoDBCore.ts @@ -0,0 +1,295 @@ +import { interfaces, LDLogger } from '@launchdarkly/node-server-sdk'; + +import MongoDBClientState from './MongoDBClientState'; + +/** + * @internal + */ +interface FeatureDocument { + _id: string; + namespace: string; + version: number; + item?: string; + deleted?: boolean; +} + +/** + * @internal + */ +interface InitializedDocument { + _id: string; + initialized: boolean; + timestamp: Date; +} + +/** + * @internal + */ +export const COLLECTION_FEATURES = 'features'; + +/** + * @internal + */ +export const COLLECTION_SEGMENTS = 'segments'; + +/** + * @internal + */ +export const COLLECTION_INITIALIZED = 'initialized'; + +/** + * @internal + */ +export const INITIALIZED_TOKEN = '$inited'; + +/** + * Internal implementation of the MongoDB feature store. + * + * Implementation notes: + * + * Feature flags, segments, and any other kind of entity the LaunchDarkly client may wish + * to store, are stored in separate collections based on their namespace (e.g., "features", "segments"). + * Each document contains: + * - `_id`: The key of the item + * - `namespace`: The namespace (for consistency and queries) + * - `version`: The version number (for optimistic updates) + * - `item`: The serialized JSON data (when not deleted) + * - `deleted`: Boolean flag indicating if the item is deleted + * + * The initialization state is tracked using a special document in the "initialized" collection. + * + * MongoDB's document-based storage allows us to store the entire serialized item as a single + * field, similar to DynamoDB but with more flexible querying capabilities. + * + * For upsert operations, we use MongoDB's conditional updates with version checking to ensure + * consistency without requiring transactions. + * + * @internal + */ +export default class MongoDBCore implements interfaces.PersistentDataStore { + private readonly _initedKey: string; + + constructor( + private readonly _state: MongoDBClientState, + private readonly _logger?: LDLogger, + ) { + this._initedKey = INITIALIZED_TOKEN; + } + + async init( + allData: interfaces.KindKeyedStore, + callback: () => void, + ): Promise { + try { + // Read existing items for all namespaces to determine what to delete + const existingItems = new Set(); + + for (const collection of allData) { + const { namespace } = collection.key; + const mongoCollection = await this._state.getCollection(namespace); + const existingDocs = await mongoCollection.find({}, { projection: { _id: 1 } }).toArray(); + + for (const doc of existingDocs) { + existingItems.add(`${namespace}:${doc._id}`); + } + } + + // Process new data and mark items that should remain + const itemsToKeep = new Set(); + + for (const collection of allData) { + const { namespace } = collection.key; + const items = collection.item; + const mongoCollection = await this._state.getCollection(namespace); + + // Prepare bulk operations for this namespace + const bulkOps: any[] = []; + + for (const keyedItem of items) { + const itemKey = `${namespace}:${keyedItem.key}`; + itemsToKeep.add(itemKey); + + const doc: FeatureDocument = { + _id: keyedItem.key, + namespace, + version: keyedItem.item.version, + }; + + if (keyedItem.item.deleted) { + doc.deleted = true; + } else if (keyedItem.item.serializedItem) { + doc.item = keyedItem.item.serializedItem; + } + + bulkOps.push({ + replaceOne: { + filter: { _id: keyedItem.key }, + replacement: doc, + upsert: true, + }, + }); + } + + // Execute bulk operations for this namespace + if (bulkOps.length > 0) { + await mongoCollection.bulkWrite(bulkOps, { ordered: false }); + } + } + + // Delete items that are no longer present in the new data + for (const collection of allData) { + const { namespace } = collection.key; + const mongoCollection = await this._state.getCollection(namespace); + + const itemsToDelete: string[] = []; + for (const existingItem of existingItems) { + if (existingItem.startsWith(`${namespace}:`) && !itemsToKeep.has(existingItem)) { + itemsToDelete.push(existingItem.substring(namespace.length + 1)); + } + } + + if (itemsToDelete.length > 0) { + await mongoCollection.deleteMany({ _id: { $in: itemsToDelete } }); + } + } + + // Set the initialized flag + const initCollection = await this._state.getCollection(COLLECTION_INITIALIZED); + await initCollection.replaceOne( + { _id: this._initedKey }, + { initialized: true, timestamp: new Date() } as any, + { upsert: true } + ); + + } catch (error) { + this._logger?.error(`Error initializing MongoDB store: ${error}`); + } + + callback(); + } + + async get( + kind: interfaces.PersistentStoreDataKind, + key: string, + callback: (descriptor: interfaces.SerializedItemDescriptor | undefined) => void, + ): Promise { + try { + const collection = await this._state.getCollection(kind.namespace); + const doc = await collection.findOne({ _id: key }); + + if (doc) { + const descriptor: interfaces.SerializedItemDescriptor = { + version: doc.version || 0, + deleted: !!doc.deleted, + serializedItem: doc.item, + }; + callback(descriptor); + } else { + callback(undefined); + } + } catch (error) { + this._logger?.error(`Error reading ${kind.namespace}:${key}: ${error}`); + callback(undefined); + } + } + + async getAll( + kind: interfaces.PersistentStoreDataKind, + callback: ( + descriptors: interfaces.KeyedItem[] | undefined, + ) => void, + ): Promise { + try { + const collection = await this._state.getCollection(kind.namespace); + const docs = await collection.find({ deleted: { $ne: true } }).toArray(); + + const results: interfaces.KeyedItem[] = []; + + for (const doc of docs) { + results.push({ + key: doc._id, + item: { + version: doc.version || 0, + deleted: false, + serializedItem: doc.item, + }, + }); + } + + callback(results); + } catch (error) { + this._logger?.error(`Error reading all from ${kind.namespace}: ${error}`); + callback(undefined); + } + } + + async upsert( + kind: interfaces.PersistentStoreDataKind, + key: string, + descriptor: interfaces.SerializedItemDescriptor, + callback: ( + err?: Error | undefined, + updatedDescriptor?: interfaces.SerializedItemDescriptor | undefined, + ) => void, + ): Promise { + try { + const collection = await this._state.getCollection(kind.namespace); + + const doc: FeatureDocument = { + _id: key, + namespace: kind.namespace, + version: descriptor.version, + }; + + if (descriptor.deleted) { + doc.deleted = true; + } else if (descriptor.serializedItem) { + doc.item = descriptor.serializedItem; + } + + // Use optimistic concurrency control - only update if version is higher + const result = await collection.replaceOne( + { + _id: key, + $or: [ + { version: { $exists: false } }, + { version: { $lt: descriptor.version } }, + ], + }, + doc, + { upsert: true } + ); + + if (result.matchedCount > 0 || result.upsertedCount > 0) { + // Successfully updated or inserted + callback(undefined, descriptor); + } else { + // Version conflict - read the current version + this.get(kind, key, (currentDescriptor) => { + callback(undefined, currentDescriptor); + }); + } + } catch (error) { + callback(error as Error, undefined); + } + } + + async initialized(callback: (isInitialized: boolean) => void): Promise { + try { + const collection = await this._state.getCollection(COLLECTION_INITIALIZED); + const doc = await collection.findOne({ _id: this._initedKey }); + callback(!!doc?.initialized); + } catch (error) { + this._logger?.error(`Error checking initialization status: ${error}`); + callback(false); + } + } + + close(): void { + this._state.close(); + } + + getDescription(): string { + return 'MongoDB'; + } +} diff --git a/packages/store/node-server-sdk-mongodb/src/MongoDBFeatureStore.ts b/packages/store/node-server-sdk-mongodb/src/MongoDBFeatureStore.ts new file mode 100644 index 000000000..2d0bf1cc3 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/MongoDBFeatureStore.ts @@ -0,0 +1,81 @@ +import { + interfaces, + LDFeatureStore, + LDFeatureStoreDataStorage, + LDFeatureStoreItem, + LDFeatureStoreKindData, + LDKeyedFeatureStoreItem, + LDLogger, + PersistentDataStoreWrapper, +} from '@launchdarkly/node-server-sdk'; + +import LDMongoDBOptions from './LDMongoDBOptions'; +import MongoDBClientState from './MongoDBClientState'; +import MongoDBCore from './MongoDBCore'; +import TtlFromOptions from './TtlFromOptions'; + +/** + * Integration between the LaunchDarkly SDK and MongoDB. + * + * This feature store implementation stores LaunchDarkly feature flags and segments + * in MongoDB collections, providing persistent storage for your feature flag data. + * + * Features: + * - Automatic collection management based on data kinds (features, segments, etc.) + * - Optimistic concurrency control using version numbers + * - Configurable caching with TTL support + * - Connection pooling and retry logic + * - Proper cleanup of deleted items during initialization + */ +export default class MongoDBFeatureStore implements LDFeatureStore { + private _wrapper: PersistentDataStoreWrapper; + + /** + * Creates a new MongoDB feature store. + * + * @param options MongoDB configuration options + * @param logger Optional logger instance + */ + constructor(options?: LDMongoDBOptions, logger?: LDLogger) { + this._wrapper = new PersistentDataStoreWrapper( + new MongoDBCore(new MongoDBClientState(options), logger), + TtlFromOptions(options), + ); + } + + get( + kind: interfaces.DataKind, + key: string, + callback: (res: LDFeatureStoreItem | null) => void, + ): void { + this._wrapper.get(kind, key, callback); + } + + all(kind: interfaces.DataKind, callback: (res: LDFeatureStoreKindData) => void): void { + this._wrapper.all(kind, callback); + } + + init(allData: LDFeatureStoreDataStorage, callback: () => void): void { + this._wrapper.init(allData, callback); + } + + delete(kind: interfaces.DataKind, key: string, version: number, callback: () => void): void { + this._wrapper.delete(kind, key, version, callback); + } + + upsert(kind: interfaces.DataKind, data: LDKeyedFeatureStoreItem, callback: () => void): void { + this._wrapper.upsert(kind, data, callback); + } + + initialized(callback: (isInitialized: boolean) => void): void { + this._wrapper.initialized(callback); + } + + close(): void { + this._wrapper.close(); + } + + getDescription?(): string { + return this._wrapper.getDescription(); + } +} diff --git a/packages/store/node-server-sdk-mongodb/src/MongoDBFeatureStoreFactory.ts b/packages/store/node-server-sdk-mongodb/src/MongoDBFeatureStoreFactory.ts new file mode 100644 index 000000000..ccf3d7b31 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/MongoDBFeatureStoreFactory.ts @@ -0,0 +1,35 @@ +import { LDClientContext } from '@launchdarkly/node-server-sdk'; + +import LDMongoDBOptions from './LDMongoDBOptions'; +import MongoDBFeatureStore from './MongoDBFeatureStore'; + +/** + * Configures a feature store backed by a MongoDB instance. + * + * For more details about how and why you can use a persistent feature store, see + * the [Using MongoDB as a persistent feature store](https://docs.launchdarkly.com/sdk/features/storing-data) documentation. + * + * @example + * ```typescript + * import { init } from '@launchdarkly/node-server-sdk'; + * import { MongoDBFeatureStore } from '@launchdarkly/node-server-sdk-mongodb'; + * + * const client = init('your-sdk-key', { + * featureStore: MongoDBFeatureStore({ + * uri: 'mongodb://localhost:27017', + * database: 'launchdarkly', + * prefix: 'ld_', + * cacheTTL: 30 + * }) + * }); + * ``` + * + * @param options Optional MongoDB configuration options including connection URI, database name, + * collection prefix, cache TTL, and other MongoDB-specific settings. + * + * @returns A factory function suitable for use in the SDK configuration (LDOptions). + */ +export default function MongoDBFeatureStoreFactory(options?: LDMongoDBOptions) { + return (config: LDClientContext) => + new MongoDBFeatureStore(options, config.basicConfiguration.logger); +} diff --git a/packages/store/node-server-sdk-mongodb/src/TtlFromOptions.ts b/packages/store/node-server-sdk-mongodb/src/TtlFromOptions.ts new file mode 100644 index 000000000..bf7b71b25 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/TtlFromOptions.ts @@ -0,0 +1,21 @@ +import LDMongoDBOptions from './LDMongoDBOptions'; + +/** + * The default TTL cache time in seconds. + */ +const DEFAULT_CACHE_TTL_S = 30; + +/** + * Get a cache TTL based on LDMongoDBOptions. If the TTL is not specified, then + * the default of 30 seconds will be used. + * @param options The options to get a TTL for. + * @returns The TTL, in seconds. + * @internal + */ +export default function TtlFromOptions(options?: LDMongoDBOptions): number { + // 0 is a valid option. So we need a null/undefined check. + if (options?.cacheTTL === undefined || options.cacheTTL === null) { + return DEFAULT_CACHE_TTL_S; + } + return options!.cacheTTL; +} diff --git a/packages/store/node-server-sdk-mongodb/src/index.ts b/packages/store/node-server-sdk-mongodb/src/index.ts new file mode 100644 index 000000000..d1e3b255f --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/src/index.ts @@ -0,0 +1,7 @@ +// Exporting the factories without the 'Factory' suffix. This keeps them in-line with +// previous store versions. The differentiation between the factory and the store +// is not critical for consuming the SDK. +export { default as MongoDBFeatureStore } from './MongoDBFeatureStoreFactory'; +export { default as MongoDBBigSegmentStore } from './MongoDBBigSegmentStoreFactory'; + +export { default as LDMongoDBOptions } from './LDMongoDBOptions'; diff --git a/packages/store/node-server-sdk-mongodb/tsconfig.eslint.json b/packages/store/node-server-sdk-mongodb/tsconfig.eslint.json new file mode 100644 index 000000000..afc791b02 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/tsconfig.eslint.json @@ -0,0 +1,8 @@ +{ + "extends": "../../../tsconfig.eslint.json", + "parserOptions": { + "project": "./tsconfig.json" + }, + "include": ["src/**/*", "__tests__/**/*"], + "exclude": ["dist/**/*"] +} diff --git a/packages/store/node-server-sdk-mongodb/tsconfig.json b/packages/store/node-server-sdk-mongodb/tsconfig.json new file mode 100644 index 000000000..31f99222e --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + // Uses "." so it can load package.json. + "rootDir": ".", + "outDir": "dist", + "target": "es2017", + "lib": ["es2017"], + "module": "commonjs", + "strict": true, + "noImplicitOverride": true, + // Needed for CommonJS modules: markdown-it, fs-extra + "allowSyntheticDefaultImports": true, + "sourceMap": true, + "declaration": true, + "declarationMap": true, // enables importers to jump to source + "resolveJsonModule": true, + "stripInternal": true, + "moduleResolution": "node", + "skipLibCheck": true + }, + "exclude": ["**/*.test.ts", "dist", "node_modules", "__tests__"] +} diff --git a/packages/store/node-server-sdk-mongodb/tsconfig.ref.json b/packages/store/node-server-sdk-mongodb/tsconfig.ref.json new file mode 100644 index 000000000..0c86b2c55 --- /dev/null +++ b/packages/store/node-server-sdk-mongodb/tsconfig.ref.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "include": ["src/**/*"], + "compilerOptions": { + "composite": true + } +}