From 2e24e3206902105b013085ae2b9556ff1fb7ea6f Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sun, 8 Jun 2025 14:35:17 -0400 Subject: [PATCH 1/6] feat: memories update --- .../changelog/content/config_v1.2.7.mdx | 12 + pages/changelog/config_v1.2.7.mdx | 13 ++ .../librechat_yaml/object_structure/_meta.ts | 1 + .../object_structure/config.mdx | 25 +- .../object_structure/memory.mdx | 216 ++++++++++++++++++ pages/docs/features/_meta.ts | 1 + pages/docs/features/index.mdx | 8 + pages/docs/features/memory.mdx | 165 +++++++++++++ 8 files changed, 440 insertions(+), 1 deletion(-) create mode 100644 components/changelog/content/config_v1.2.7.mdx create mode 100644 pages/changelog/config_v1.2.7.mdx create mode 100644 pages/docs/configuration/librechat_yaml/object_structure/memory.mdx create mode 100644 pages/docs/features/memory.mdx diff --git a/components/changelog/content/config_v1.2.7.mdx b/components/changelog/content/config_v1.2.7.mdx new file mode 100644 index 000000000..b64b1acb8 --- /dev/null +++ b/components/changelog/content/config_v1.2.7.mdx @@ -0,0 +1,12 @@ +- Added `memory` configuration to control memory functionality for conversations + - Configure memory persistence and personalization settings + - Set token limits and message window sizes for memory context + - Configure agents for memory processing with provider-specific settings + - Supports both predefined agents (by ID) and custom agent configurations + - See [Memory Configuration](/docs/configuration/librechat_yaml/object_structure/memory) for details + +- Added memory-related capabilities to conversation processing + - Enables conversation memory and personalization features + - Configurable token limits and context window management + - Integration with agent-based memory processing + - Defaults to personalization enabled with a 5-message window size \ No newline at end of file diff --git a/pages/changelog/config_v1.2.7.mdx b/pages/changelog/config_v1.2.7.mdx new file mode 100644 index 000000000..20f0a54de --- /dev/null +++ b/pages/changelog/config_v1.2.7.mdx @@ -0,0 +1,13 @@ +--- +date: 2025/1/11 +title: ⚙️ Config v1.2.7 +--- + +import { ChangelogHeader } from '@/components/changelog/ChangelogHeader' +import Content from '@/components/changelog/content/config_v1.2.7.mdx' + + + +--- + + \ No newline at end of file diff --git a/pages/docs/configuration/librechat_yaml/object_structure/_meta.ts b/pages/docs/configuration/librechat_yaml/object_structure/_meta.ts index 84d6fe146..8801a20d2 100644 --- a/pages/docs/configuration/librechat_yaml/object_structure/_meta.ts +++ b/pages/docs/configuration/librechat_yaml/object_structure/_meta.ts @@ -6,6 +6,7 @@ export default { model_specs: 'Model Specs', registration: 'Registration', balance: 'Balance', + memory: 'Memory', agents: 'Agents', mcp_servers: 'MCP Servers', aws_bedrock: 'AWS Bedrock', diff --git a/pages/docs/configuration/librechat_yaml/object_structure/config.mdx b/pages/docs/configuration/librechat_yaml/object_structure/config.mdx index aa5274ebc..ba332f507 100644 --- a/pages/docs/configuration/librechat_yaml/object_structure/config.mdx +++ b/pages/docs/configuration/librechat_yaml/object_structure/config.mdx @@ -7,7 +7,7 @@ @@ -258,6 +258,29 @@ see also: - [alloweddomains](/docs/configuration/librechat_yaml/object_structure/registration#alloweddomains), - [Registration Object Structure](/docs/configuration/librechat_yaml/object_structure/registration) +## memory + +**Key:** + + +**Subkeys:** + + +see: [Memory Object Structure](/docs/configuration/librechat_yaml/object_structure/memory) + ## actions **Key:** diff --git a/pages/docs/configuration/librechat_yaml/object_structure/memory.mdx b/pages/docs/configuration/librechat_yaml/object_structure/memory.mdx new file mode 100644 index 000000000..5208b0725 --- /dev/null +++ b/pages/docs/configuration/librechat_yaml/object_structure/memory.mdx @@ -0,0 +1,216 @@ +# Memory Configuration + +## Overview + +The `memory` object allows you to configure conversation memory and personalization features for the application. This configuration controls how the system remembers and personalizes conversations, including token limits, message context windows, and agent-based memory processing. + +## Example + +```yaml filename="memory" +memory: + disabled: false + validKeys: ["user_preferences", "conversation_context", "personal_info"] + tokenLimit: 2000 + personalize: true + messageWindowSize: 5 + agent: + provider: "openai" + model: "gpt-4" + instructions: "You are a helpful assistant that remembers user preferences and context." + model_parameters: + temperature: 0.7 + max_tokens: 1000 +``` + +## disabled + + + +**Default:** `false` + +```yaml filename="memory / disabled" +memory: + disabled: true +``` + +## validKeys + + + +**Default:** No restriction (all keys are valid) + +```yaml filename="memory / validKeys" +memory: + validKeys: + - "user_preferences" + - "conversation_context" + - "personal_information" + - "learned_facts" +``` + +## tokenLimit + + + +**Default:** No limit + +```yaml filename="memory / tokenLimit" +memory: + tokenLimit: 2000 +``` + +## personalize + + + +**Default:** `true` + +```yaml filename="memory / personalize" +memory: + personalize: false +``` + +## messageWindowSize + + + +**Default:** `5` + +```yaml filename="memory / messageWindowSize" +memory: + messageWindowSize: 10 +``` + +## agent + + + +The `agent` field supports two different configuration formats: + +### Agent by ID + +When you have a pre-configured agent, you can reference it by its ID: + +```yaml filename="memory / agent (by ID)" +memory: + agent: + id: "memory-agent-001" +``` + +### Custom Agent Configuration + +For more control, you can define a complete agent configuration: + +```yaml filename="memory / agent (custom)" +memory: + agent: + provider: "openai" + model: "gpt-4" + instructions: "You are a memory assistant that helps maintain conversation context and user preferences." + model_parameters: + temperature: 0.3 + max_tokens: 1500 + top_p: 0.9 +``` + +#### Agent Configuration Fields + +When using custom agent configuration, the following fields are available: + +**provider** (required) + + +**model** (required) + + +**instructions** (optional) + + +**model_parameters** (optional) + + +## Complete Configuration Example + +Here's a comprehensive example showing all memory configuration options: + +```yaml filename="librechat.yaml" +version: 1.2.7 +cache: true + +memory: + disabled: false + validKeys: + - "user_preferences" + - "conversation_context" + - "learned_facts" + - "personal_information" + tokenLimit: 3000 + personalize: true + messageWindowSize: 8 + agent: + provider: "openai" + model: "gpt-4" + instructions: | + Store memory using only the specified validKeys. For user_preferences: save + explicitly stated preferences about communication style, topics of interest, + or workflow preferences. For conversation_context: save important facts or + ongoing projects mentioned. For learned_facts: save objective information + about the user. For personal_information: save only what the user explicitly + shares about themselves. Delete outdated or incorrect information promptly. + model_parameters: + temperature: 0.2 + max_tokens: 2000 + top_p: 0.8 + frequency_penalty: 0.1 +``` + +## Notes + +- Memory functionality enhances conversation continuity and personalization +- When `personalize` is true, users get a toggle in their chat interface to control memory usage +- Token limits help control memory usage and processing costs +- Valid keys provide granular control over what information can be stored +- Custom `instructions` replace default memory handling instructions and should be used with `validKeys` +- Agent configuration allows customization of memory processing behavior +- When disabled, all memory features are turned off regardless of other settings +- The message window size affects how much recent context is considered for memory updates \ No newline at end of file diff --git a/pages/docs/features/_meta.ts b/pages/docs/features/_meta.ts index 2e42abc06..13b0b6af4 100644 --- a/pages/docs/features/_meta.ts +++ b/pages/docs/features/_meta.ts @@ -4,6 +4,7 @@ export default { code_interpreter: 'Code Interpreter API', artifacts: 'Artifacts - Generative UI', web_search: 'Web Search', + memory: 'Memory', image_gen: 'Image Generation', // local_setup: 'Local Setup', // custom_endpoints: 'Custom Endpoints', diff --git a/pages/docs/features/index.mdx b/pages/docs/features/index.mdx index bd1e6c8a1..16cc2c22d 100644 --- a/pages/docs/features/index.mdx +++ b/pages/docs/features/index.mdx @@ -37,6 +37,14 @@ import Image from 'next/image' - **Flexible Configuration**: Choose from multiple services for each component - **[Learn More →](/docs/features/web_search)** +### 🧠 [Memory](/docs/features/memory) + +- **Persistent Context**: Remember information across conversations for a personalized experience +- **User Control**: Users can toggle memory on/off for individual chats when enabled, as well as create, edit and delete memories manually +- **Customizable Storage**: Control what types of information can be stored with valid keys and token limits +- **Configuration Required**: Must be explicitly configured in `librechat.yaml` to work +- **[Learn More →](/docs/features/memory)** + ### 🪄 **[Artifacts](/docs/features/artifacts)** - **Generative UI:** React, HTML, Mermaid diagrams diff --git a/pages/docs/features/memory.mdx b/pages/docs/features/memory.mdx new file mode 100644 index 000000000..a9b11ada5 --- /dev/null +++ b/pages/docs/features/memory.mdx @@ -0,0 +1,165 @@ +--- +title: Memory +description: Enable conversation memory and personalization features in LibreChat +--- + +# Memory + +## Overview + +Memory in LibreChat allows the system to remember information across conversations, providing a more personalized and context-aware experience. When enabled, the AI can recall user preferences, important facts, and conversation context to enhance future interactions. + + +Memory functionality must be explicitly configured in your `librechat.yaml` file to work. It is not enabled by default. + + +## Key Features + +- **Persistent Context**: Information learned in one conversation can be recalled in future conversations +- **User Control**: When enabled, users can toggle memory on/off for their individual chats +- **Customizable Storage**: Control what types of information can be stored using valid keys +- **Token Management**: Set limits on memory usage to control costs +- **Agent Integration**: Use AI agents to intelligently manage what gets remembered + +## Configuration + +To enable memory features, you need to add the `memory` configuration to your `librechat.yaml` file: + +```yaml filename="librechat.yaml" +version: 1.2.7 +cache: true + +memory: + disabled: false # Set to true to completely disable memory + personalize: true # Gives users the ability to toggle memory on/off + tokenLimit: 2000 # Maximum tokens for memory storage + messageWindowSize: 5 # Number of recent messages to consider + agent: + provider: "openai" + model: "gpt-4" +``` + +See the [Memory Configuration Guide](/docs/configuration/librechat_yaml/object_structure/memory) for detailed configuration options. + +## How It Works + +### 1. Information Storage +When memory is enabled, the system can store: +- User preferences (communication style, topics of interest) +- Important facts and context from conversations +- Personal information explicitly shared by users +- Ongoing projects or tasks mentioned + +### 2. Context Window +The `messageWindowSize` parameter determines how many recent messages are analyzed for memory updates. This helps the system decide what information is worth remembering. + +### 3. User Control +When `personalize` is set to `true`: +- Users see a memory toggle in their chat interface +- They can enable/disable memory for individual conversations +- Memory settings persist across sessions + +### 4. Valid Keys +You can restrict what types of information are stored by specifying `validKeys`: + +```yaml filename="memory / validKeys" +memory: + validKeys: + - "user_preferences" + - "conversation_context" + - "learned_facts" + - "personal_information" +``` + +## Best Practices + +### 1. Token Limits +Set appropriate token limits to balance functionality with cost: +- Higher limits allow more comprehensive memory +- Lower limits reduce processing costs +- Consider your usage patterns and budget + +### 2. Custom Instructions +When using `validKeys`, provide custom instructions to the memory agent: + +```yaml filename="memory / agent with instructions" +memory: + agent: + provider: "openai" + model: "gpt-4" + instructions: | + Store information only in the specified validKeys categories. + Focus on explicitly stated preferences and important facts. + Delete outdated or corrected information promptly. +``` + +### 3. Privacy Considerations +- Memory stores user information across conversations +- Ensure users understand what information is being stored +- Consider implementing data retention policies +- Provide clear documentation about memory usage + +## Examples + +### Basic Configuration +Enable memory with default settings: + +```yaml filename="Basic memory config" +memory: + disabled: false + personalize: true +``` + +### Advanced Configuration +Full configuration with all options: + +```yaml filename="Advanced memory config" +memory: + disabled: false + validKeys: ["preferences", "context", "facts"] + tokenLimit: 3000 + personalize: true + messageWindowSize: 10 + agent: + provider: "anthropic" + model: "claude-3-opus-20240229" + instructions: "Remember only explicitly stated preferences and key facts." + model_parameters: + temperature: 0.3 + max_tokens: 1500 +``` + +### Using Predefined Agents +Reference an existing agent by ID: + +```yaml filename="Memory with agent ID" +memory: + agent: + id: "memory-specialist-001" +``` + +## Troubleshooting + +### Memory Not Working +1. Verify memory is configured in `librechat.yaml` +2. Check that `disabled` is set to `false` +3. Ensure the configured agent/model is available +4. Verify users have enabled memory in their chat interface + +### High Token Usage +1. Reduce `tokenLimit` to control costs +2. Decrease `messageWindowSize` to analyze fewer messages +3. Use `validKeys` to restrict what gets stored +4. Review and optimize agent instructions + +### Inconsistent Memory +1. Check if users are toggling memory on/off +2. Verify token limits aren't being exceeded +3. Ensure consistent agent configuration +4. Review stored memory for conflicts + +## Related Features + +- [Agents](/docs/features/agents) - Build custom AI assistants +- [Presets](/docs/user_guides/presets) - Save conversation settings +- [Fork Messages](/docs/features/fork) - Branch conversations while maintaining context \ No newline at end of file From 8e7b2f607380d1a2bcaa4b87499011facaf3dd3a Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sun, 8 Jun 2025 14:35:25 -0400 Subject: [PATCH 2/6] docs: Add LibreChat documentation rules and configuration update guidelines --- .cursor/rules/librechat-documentation.mdc | 146 ++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 .cursor/rules/librechat-documentation.mdc diff --git a/.cursor/rules/librechat-documentation.mdc b/.cursor/rules/librechat-documentation.mdc new file mode 100644 index 000000000..45a58ba2d --- /dev/null +++ b/.cursor/rules/librechat-documentation.mdc @@ -0,0 +1,146 @@ +--- +description: +globs: +alwaysApply: true +--- +# LibreChat Documentation Rules + +## Config Version Updates + +When updating the LibreChat config version (e.g., from v1.2.6 to v1.2.7), follow these steps: + +### 1. Create Changelog Files + +#### Main Changelog File +Create: `pages/changelog/config_v{VERSION}.mdx` + +Template: +```mdx +--- +date: YYYY/MM/DD +title: ⚙️ Config v{VERSION} +--- + +import { ChangelogHeader } from '@/components/changelog/ChangelogHeader' +import Content from '@/components/changelog/content/config_v{VERSION}.mdx' + + + +--- + + +``` + +#### Content File +Create: `components/changelog/content/config_v{VERSION}.mdx` + +Format: +- Use bullet points starting with `-` +- Group related changes together +- Include links to detailed documentation using `[Feature Name](/docs/configuration/librechat_yaml/object_structure/{feature})` +- Describe what was added/changed and its purpose +- Keep descriptions concise but informative + +Example: +```mdx +- Added `memory` configuration to control memory functionality for conversations + - Configure memory persistence and personalization settings + - Set token limits and message window sizes for memory context + - Configure agents for memory processing with provider-specific settings + - Supports both predefined agents (by ID) and custom agent configurations + - See [Memory Configuration](/docs/configuration/librechat_yaml/object_structure/memory) for details +``` + +### 2. Create Object Structure Documentation + +For new root-level configurations, create: `pages/docs/configuration/librechat_yaml/object_structure/{feature}.mdx` + +Structure: +1. **Title**: `# {Feature} Configuration` +2. **Overview**: Brief description of the feature +3. **Example**: Complete YAML example showing all options +4. **Field Documentation**: Use `` components for each field +5. **Subsections**: For complex nested objects +6. **Notes**: Important considerations at the end + +### 3. Update Navigation + +Add the new feature to: `pages/docs/configuration/librechat_yaml/object_structure/_meta.ts` + +Insert alphabetically or logically within the structure: +```ts +export default { + config: 'Root Settings', + file_config: 'File Config', + interface: 'Interface (UI)', + // ... other entries + memory: 'Memory', // Add new entry + // ... remaining entries +} +``` + +### 4. Update Main Config Documentation + +In `pages/docs/configuration/librechat_yaml/object_structure/config.mdx`: + +1. Update the version example: + ```yaml + ['version', 'String', 'Specifies the version of the configuration file.', 'version: 1.2.7' ], + ``` + +2. Add the new configuration section (insert alphabetically or logically): + ```mdx + ## memory + + **Key:** + + + **Subkeys:** + + + see: [Memory Object Structure](/docs/configuration/librechat_yaml/object_structure/memory) + ``` + +## Documentation Standards + +### OptionTable Usage +```mdx + +``` + +### YAML Examples +- Use `filename` attribute for code blocks: ` ```yaml filename="memory" ` +- Show realistic, working examples +- Include comments only when necessary for clarity + +### Field Descriptions +- Be precise about default values +- Explain the impact of different settings +- Note any relationships between fields +- Mention when fields are required vs optional + +### Special Considerations +- For boolean fields that give users control, clarify WHO gets the control (admin vs end-user) +- For fields that replace default behavior, explicitly state this +- For union types, show examples of each variant +- For nested objects, create subsections with their own OptionTables + +## Version Numbering +- Config versions follow semantic versioning: v{MAJOR}.{MINOR}.{PATCH} +- Adding new root-level configurations typically warrants a minor version bump +- Breaking changes require a major version bump +- Bug fixes or minor adjustments use patch versions \ No newline at end of file From 4caf00214c10c3bca5a890f3ad28a8a9446cf963 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sun, 8 Jun 2025 14:47:43 -0400 Subject: [PATCH 3/6] docs: Update Node.js version requirements across documentation - Revised Node.js prerequisites to specify v20.19.0+ (or ^22.12.0 or >= 23.0.0) in README.md, get_started.mdx, npm.mdx, docker_linux.mdx, and nginx.mdx for clarity and compatibility with openid-client v6. --- README.md | 2 +- pages/docs/development/get_started.mdx | 2 +- pages/docs/local/npm.mdx | 3 ++- pages/docs/remote/docker_linux.mdx | 2 +- pages/docs/remote/nginx.mdx | 2 +- 5 files changed, 6 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 652ee0aec..b349fa9b1 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Based on [Nextra](https://nextra.site/) ## Local Development -Pre-requisites: Node.js 18+, pnpm 9+ +Pre-requisites: Node.js v20.19.0+ (or ^22.12.0 or >= 23.0.0), pnpm 9+ 1. Optional: Create env based on [.env.template](./.env.template) 2. Run `pnpm i` to install the dependencies. diff --git a/pages/docs/development/get_started.mdx b/pages/docs/development/get_started.mdx index a9db14170..97b8b2b27 100644 --- a/pages/docs/development/get_started.mdx +++ b/pages/docs/development/get_started.mdx @@ -8,7 +8,7 @@ description: Learn how to contribute using GitHub Desktop, VS Code extensions, a ## Requirements - [Git](https://git-scm.com/downloads) (Essential) -- [Node.js](https://nodejs.org/en/download) (Essential, use the LTS version) +- [Node.js](https://nodejs.org/en/download) (Essential, use v20.19.0+ or ^22.12.0 or >= 23.0.0) - [MongoDB](https://www.mongodb.com/try/download/community) (Essential, for the database) - [Git LFS](https://git-lfs.com/) (Useful for larger files) - [GitHub Desktop](https://desktop.github.com/) (Optional) diff --git a/pages/docs/local/npm.mdx b/pages/docs/local/npm.mdx index d84e45453..504c1620b 100644 --- a/pages/docs/local/npm.mdx +++ b/pages/docs/local/npm.mdx @@ -9,7 +9,8 @@ For most scenarios, Docker Compose is the recommended installation method due to ## Prerequisites -- Node.js 18+: [https://nodejs.org/en/download](https://nodejs.org/en/download) +- Node.js v20.19.0+ (or ^22.12.0 or >= 23.0.0): [https://nodejs.org/en/download](https://nodejs.org/en/download) + - LibreChat uses CommonJS (CJS) and requires these specific Node.js versions for compatibility with openid-client v6 - Git: https://git-scm.com/download/ - MongoDB (Atlas or Community Server) - [MongoDB Atlas](/docs/configuration/mongodb/mongodb_atlas) diff --git a/pages/docs/remote/docker_linux.mdx b/pages/docs/remote/docker_linux.mdx index d9f52f988..c7ffb6539 100644 --- a/pages/docs/remote/docker_linux.mdx +++ b/pages/docs/remote/docker_linux.mdx @@ -166,7 +166,7 @@ npm -v ![image](https://github.com/danny-avila/LibreChat/assets/110412045/fbba1a38-95cd-4e8e-b813-04001bb82b25) -> Note: this will install some pretty old versions, for npm in particular. For the purposes of this guide, however, this is fine, but this is just a heads up in case you try other things with node in the droplet. Do look up a guide for getting the latest versions of the above as necessary. +> Note: this will install some pretty old versions, for npm in particular. LibreChat requires Node.js v20.19.0+ (or ^22.12.0 or >= 23.0.0) for compatibility with openid-client v6 when using CommonJS. If you need to run LibreChat directly on the host (not using Docker), you'll need to install a compatible Node.js version. However, for this Docker-based guide, the Node.js version on the host doesn't matter as the application runs inside containers. **Ok, now that you have set up the Droplet, you will now setup the app itself** diff --git a/pages/docs/remote/nginx.mdx b/pages/docs/remote/nginx.mdx index b212ccd37..113b75711 100644 --- a/pages/docs/remote/nginx.mdx +++ b/pages/docs/remote/nginx.mdx @@ -12,7 +12,7 @@ This guide covers the essential steps for securing your LibreChat deployment wit 1. A cloud server (e.g., AWS, Google Cloud, Azure, Digital Ocean). 2. A registered domain name. 3. Terminal access to your cloud server. -4. Node.js and NPM installed on your server. +4. Node.js v20.19.0+ (or ^22.12.0 or >= 23.0.0) and NPM installed on your server. ## Initial Setup From 0239fabf53c55af3de6e95e6529c00aa43aa0f8b Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Sun, 8 Jun 2025 15:00:02 -0400 Subject: [PATCH 4/6] docs: Add DEBUG_OPENID_REQUESTS environment variable for enhanced OpenID debugging - Introduced a new environment variable, DEBUG_OPENID_REQUESTS, to enable detailed logging of OpenID request headers for better troubleshooting. - Updated documentation to include troubleshooting steps for OpenID Connect, emphasizing the use of the new logging feature. --- .../authentication/OAuth2-OIDC/index.mdx | 12 +++++++++++- .../authentication/OAuth2-OIDC/token-reuse.mdx | 3 ++- pages/docs/configuration/dotenv.mdx | 1 + 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/pages/docs/configuration/authentication/OAuth2-OIDC/index.mdx b/pages/docs/configuration/authentication/OAuth2-OIDC/index.mdx index fefa2bfe1..68991832e 100644 --- a/pages/docs/configuration/authentication/OAuth2-OIDC/index.mdx +++ b/pages/docs/configuration/authentication/OAuth2-OIDC/index.mdx @@ -27,4 +27,14 @@ This section will cover how to configure OAuth2 and OpenID Connect with LibreCha - [AWS Cognito](/docs/configuration/authentication/OAuth2-OIDC/aws) - [Azure Entra/AD](/docs/configuration/authentication/OAuth2-OIDC/azure) - [Keycloak](/docs/configuration/authentication/OAuth2-OIDC/keycloak) - - [Re-use OpenID Tokens for Login Session](/docs/configuration/authentication/OAuth2-OIDC/token-reuse) \ No newline at end of file + - [Re-use OpenID Tokens for Login Session](/docs/configuration/authentication/OAuth2-OIDC/token-reuse) + +## Troubleshooting OpenID Connect + +If you encounter issues with OpenID Connect authentication: + +1. **Enable Header Debug Logging**: Set `DEBUG_OPENID_REQUESTS=true` in your environment variables to log request headers in addition to URLs (with sensitive data masked). Note: Request URLs are always logged at debug level +2. **Check Redirect URIs**: Ensure your callback URL matches exactly between your provider and LibreChat configuration +3. **Verify Scopes**: Make sure all required scopes are properly configured +4. **Review Provider Logs**: Check your identity provider's logs for authentication errors +5. **Validate Tokens**: Ensure your provider is issuing valid tokens with the expected claims \ No newline at end of file diff --git a/pages/docs/configuration/authentication/OAuth2-OIDC/token-reuse.mdx b/pages/docs/configuration/authentication/OAuth2-OIDC/token-reuse.mdx index d11a0531c..44ed13e3c 100644 --- a/pages/docs/configuration/authentication/OAuth2-OIDC/token-reuse.mdx +++ b/pages/docs/configuration/authentication/OAuth2-OIDC/token-reuse.mdx @@ -100,4 +100,5 @@ If you encounter issues with token reuse: 2. Check that admin consent has been granted 3. Ensure the API permissions are correctly set up 4. Verify the token cache is working as expected -5. Check the application logs for any authentication errors \ No newline at end of file +5. Check the application logs for any authentication errors +6. Enable detailed OpenID request header logging by setting `DEBUG_OPENID_REQUESTS=true` in your environment variables to see request headers in addition to URLs (with sensitive data masked) \ No newline at end of file diff --git a/pages/docs/configuration/dotenv.mdx b/pages/docs/configuration/dotenv.mdx index 5e82a3641..9e571b79e 100644 --- a/pages/docs/configuration/dotenv.mdx +++ b/pages/docs/configuration/dotenv.mdx @@ -1012,6 +1012,7 @@ For more information: ['OPENID_IMAGE_URL', 'string', 'The URL of the OpenID login button image.','OPENID_IMAGE_URL='], ['OPENID_USE_END_SESSION_ENDPOINT', 'string', 'Whether to use the Issuer End Session Endpoint as a Logout Redirect','OPENID_USE_END_SESSION_ENDPOINT=TRUE'], ['OPENID_AUTO_REDIRECT', 'boolean', 'Whether to automatically redirect to the OpenID provider.','OPENID_AUTO_REDIRECT=true'], + ['DEBUG_OPENID_REQUESTS', 'boolean', 'Enable detailed logging of OpenID request headers. When disabled (default), only request URLs are logged at debug level. When enabled, request headers are also logged (with sensitive data masked) for deeper debugging of authentication issues.','DEBUG_OPENID_REQUESTS=false'], ]} /> From e78b466aa96d0d5b776bb71552e7b05339f41a46 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 9 Jun 2025 11:11:22 -0400 Subject: [PATCH 5/6] docs: Update changelog dates for versions v1.2.6 and v1.2.7 --- pages/changelog/config_v1.2.6.mdx | 2 +- pages/changelog/config_v1.2.7.mdx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pages/changelog/config_v1.2.6.mdx b/pages/changelog/config_v1.2.6.mdx index d35affae4..cd5516472 100644 --- a/pages/changelog/config_v1.2.6.mdx +++ b/pages/changelog/config_v1.2.6.mdx @@ -1,5 +1,5 @@ --- -date: 2025/5/7 +date: 2025/5/8 title: ⚙️ Config v1.2.6 --- diff --git a/pages/changelog/config_v1.2.7.mdx b/pages/changelog/config_v1.2.7.mdx index 20f0a54de..42551b2ec 100644 --- a/pages/changelog/config_v1.2.7.mdx +++ b/pages/changelog/config_v1.2.7.mdx @@ -1,5 +1,5 @@ --- -date: 2025/1/11 +date: 2025/6/9 title: ⚙️ Config v1.2.7 --- From 2691a857f5e8e45644b14490f10f54574ed12f51 Mon Sep 17 00:00:00 2001 From: Danny Avila Date: Mon, 9 Jun 2025 11:16:33 -0400 Subject: [PATCH 6/6] docs: Enhance memory configuration documentation - Updated the memory configuration section to clarify the default value for the personalize option. - Changed the provider name from "openai" to "openAI" for consistency. - Added notes regarding the provider field and valid model parameters. - Improved examples for memory agent configuration. --- pages/docs/features/memory.mdx | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/pages/docs/features/memory.mdx b/pages/docs/features/memory.mdx index a9b11ada5..11bfe53de 100644 --- a/pages/docs/features/memory.mdx +++ b/pages/docs/features/memory.mdx @@ -9,7 +9,7 @@ description: Enable conversation memory and personalization features in LibreCha Memory in LibreChat allows the system to remember information across conversations, providing a more personalized and context-aware experience. When enabled, the AI can recall user preferences, important facts, and conversation context to enhance future interactions. - + Memory functionality must be explicitly configured in your `librechat.yaml` file to work. It is not enabled by default. @@ -31,14 +31,18 @@ cache: true memory: disabled: false # Set to true to completely disable memory - personalize: true # Gives users the ability to toggle memory on/off + personalize: true # Gives users the ability to toggle memory on/off, true by default tokenLimit: 2000 # Maximum tokens for memory storage messageWindowSize: 5 # Number of recent messages to consider agent: - provider: "openai" + provider: "openAI" model: "gpt-4" ``` +The provider field should match the accepted values as defined in the [Model Spec Guide](/docs/configuration/librechat_yaml/object_structure/model_specs#endpoint). + +**Note:** If you are using a custom endpoint, the endpoint value must match the defined custom endpoint name exactly. + See the [Memory Configuration Guide](/docs/configuration/librechat_yaml/object_structure/memory) for detailed configuration options. ## How It Works @@ -85,7 +89,7 @@ When using `validKeys`, provide custom instructions to the memory agent: ```yaml filename="memory / agent with instructions" memory: agent: - provider: "openai" + provider: "openAI" model: "gpt-4" instructions: | Store information only in the specified validKeys categories. @@ -106,8 +110,10 @@ Enable memory with default settings: ```yaml filename="Basic memory config" memory: - disabled: false - personalize: true + tokenLimit: 2000 + agent: + provider: "openAI" + model: "gpt-4.1-mini" ``` ### Advanced Configuration @@ -126,9 +132,10 @@ memory: instructions: "Remember only explicitly stated preferences and key facts." model_parameters: temperature: 0.3 - max_tokens: 1500 ``` +For valid model parameters per provider, see the [Model Spec Preset Fields](/docs/configuration/librechat_yaml/object_structure/model_specs#preset-fields). + ### Using Predefined Agents Reference an existing agent by ID: