From 81dbbc1a83c8079194c5f2b42aa0b7a1d76c7a1a Mon Sep 17 00:00:00 2001 From: Luis Fernando De Pombo <2381303+depombo@users.noreply.github.com> Date: Sun, 28 Sep 2025 12:15:04 -0700 Subject: [PATCH] protect all APIs --- docs/Tutorials/firebase+gemini.md | 10 +++++----- docs/Tutorials/supabase+openai.md | 10 +++++----- docs/debugging.md | 12 ++++++------ docs/intro.md | 18 ++++++++++-------- docs/security.md | 12 +++++++----- docusaurus.config.ts | 2 +- src/pages/index.tsx | 17 ++++++++--------- src/pages/pricing.mdx | 4 ++-- 8 files changed, 44 insertions(+), 41 deletions(-) diff --git a/docs/Tutorials/firebase+gemini.md b/docs/Tutorials/firebase+gemini.md index de64671..50c05ab 100644 --- a/docs/Tutorials/firebase+gemini.md +++ b/docs/Tutorials/firebase+gemini.md @@ -4,7 +4,7 @@ slug: /firebase # Gemini, Firebase Auth and Flutter -This tutorial will guide you through how to call the Google Gemini API from a Flutter app that uses Firebase Authentication without a backend or cloud function using Backmesh. This will entail creating an LLM API Gatekeeper in the Backmesh Dashboard and then using it in your app. +This tutorial will guide you through how to call the Google Gemini API from a Flutter app that uses Firebase Authentication without a backend or cloud function using Backmesh. This will entail creating an API Gatekeeper in the Backmesh Dashboard and then using it in your app. The Backmesh dashboard won't have any API proxies so click `New` to get started @@ -28,17 +28,17 @@ Go to the Firebase dashboard to grab the project ID and public project key ![Firebase Screenshot](/firebase+gemini/firebase.png) -And put them into the Backmesh LLM API Gatekeeper configuration +And put them into the Backmesh API Gatekeeper configuration ![Step 2 Screenshot](/firebase+gemini/step2.png) -## Step 3: Safely use the Backmesh LLM API Gatekeeper in your app using the Firebase Auth JWT +## Step 3: Safely use the Backmesh API Gatekeeper in your app using the Firebase Auth JWT -Grab the Backmesh LLM API Gatekeeper URL provided: +Grab the Backmesh API Gatekeeper URL provided: ![Step 3 Screenshot](/firebase+gemini/step3.png) -Now you are ready to make API calls in Flutter through the Backmesh LLM API Gatekeeper URL by passing in the user’s JWT token from your app’s authentication provider instead of the API private key to the API client. +Now you are ready to make API calls in Flutter through the Backmesh API Gatekeeper URL by passing in the user’s JWT token from your app’s authentication provider instead of the API private key to the API client. ```dart title="gemini.dart" import 'dart:io'; diff --git a/docs/Tutorials/supabase+openai.md b/docs/Tutorials/supabase+openai.md index 5fee03c..57b44fd 100644 --- a/docs/Tutorials/supabase+openai.md +++ b/docs/Tutorials/supabase+openai.md @@ -4,7 +4,7 @@ slug: /supabase # OpenAI, Supabase Auth and Javascript -This tutorial will guide you through how to call the OpenAI API from a Javascript web app that uses Supabase Authentication without a backend or cloud function using Backmesh. This will entail creating an LLM API Gatekeeper in the Backmesh Dashboard and then using it in your app. +This tutorial will guide you through how to call the OpenAI API from a Javascript web app that uses Supabase Authentication without a backend or cloud function using Backmesh. This will entail creating an API Gatekeeper in the Backmesh Dashboard and then using it in your app. The Backmesh dashboard won't have any API proxies so click `New` to get started @@ -28,17 +28,17 @@ Go to the Supabase dashboard to grab the project ID and public project key ![Supabase Screenshot](/supabase+openai/supabase.png) -And put them into the Backmesh LLM API Gatekeeper configuration +And put them into the Backmesh API Gatekeeper configuration ![Step 2 Screenshot](/supabase+openai/step2.png) -## Step 3: Safely use the Backmesh LLM API Gatekeeper in your app using the Supabase Auth JWT +## Step 3: Safely use the Backmesh API Gatekeeper in your app using the Supabase Auth JWT -Grab the Backmesh LLM API Gatekeeper URL provided: +Grab the Backmesh API Gatekeeper URL provided: ![Step 3 Screenshot](/supabase+openai/step3.png) -Now you are ready to make API calls in Javascript through the Backmesh LLM API Gatekeeper URL by passing in the user’s JWT token from your app’s authentication provider instead of the API private key to the API client. +Now you are ready to make API calls in Javascript through the Backmesh API Gatekeeper URL by passing in the user’s JWT token from your app’s authentication provider instead of the API private key to the API client. ```js title="openai.js" import OpenAI from "openai"; diff --git a/docs/debugging.md b/docs/debugging.md index 4bc9ed8..ebcd458 100644 --- a/docs/debugging.md +++ b/docs/debugging.md @@ -5,23 +5,23 @@ slug: /debugging # Debugging -You have created a Backmesh LLM API Gatekeeper and are ready to use it use one of the LLM APIs. But when you start sending requests you get an error. What do you do? +You have created a Backmesh API Gatekeeper and are ready to use it use one of the APIs. But when you start sending requests you get an error. What do you do? -## 1. Make sure your LLM API key is properly configured +## 1. Make sure your API key is properly configured -Call the API directly to make sure your private LLM key is properly configured using `curl` or a tool like Postman or Bruno. For the latter we have a predefined collection of requests defined for OpenAI, Anthropic and Gemini in our [GitHub repo](https://github.com/backmesh/backmesh/tree/main/collections). +Call the API directly to make sure your private key is properly configured using `curl` or a tool like Postman or Bruno. For the latter we have a predefined collection of requests defined for OpenAI, Anthropic and Gemini in our [GitHub repo](https://github.com/backmesh/backmesh/tree/main/collections). ![bruno llm api call](/debugging/bruno_llm_api.png) ## 2. Make sure your JWT is valid and properly generated -Create a test user with your authentication provider using email and password authentication. Then use the test user credentials to generate a fresh JWT using our generator tool for [Supabase](/supabase-jwt) or [Firebase](/firebase-jwt). Now try to call the LLM API via the JWT proxy using the generated token instead of the private LLM key, and the JWT proxy URL instead of the LLM API URL. +Create a test user with your authentication provider using email and password authentication. Then use the test user credentials to generate a fresh JWT using our generator tool for [Supabase](/supabase-jwt) or [Firebase](/firebase-jwt). Now try to call the API via the JWT proxy using the generated token instead of the private key, and the JWT proxy URL instead of the API URL. ![bruno backmesh proxy call](/debugging/bruno_proxy.png) -## 3. Make sure your LLM SDK of choice is properly configured +## 3. Make sure your SDK of choice is properly configured -Backmesh LLM API Gatekeepers are compatible with any LLM SDKs and app frameworks. Make sure your LLM SDK is properly configured to let you override the base URL of the LLM API with that of the Backmesh Gatekeeper. Check out our tutorials for examples on how to do this for different frameworks: +Backmesh API Gatekeepers are compatible with any SDKs and app frameworks. Make sure your SDK is properly configured to let you override the base URL of the API with that of the Backmesh Gatekeeper. Check out our tutorials for examples on how to do this for different frameworks: - [Flutter app using Firebase Authentication and Google Gemini API](/docs/firebase) - [Javascript web app using Supabase Authentication and OpenAI API](/docs/supabase) diff --git a/docs/intro.md b/docs/intro.md index 435fd9a..63e6287 100644 --- a/docs/intro.md +++ b/docs/intro.md @@ -5,9 +5,9 @@ slug: / # Introduction -Securely call LLM APIs directly from your mobile or web app using **any LLM SDK** without exposing private API keys. Only 2 changes needed in your app: -1. Replace the LLM API URL with the Backmesh Gatekeeper URL. -2. Replace the LLM private key with the authenticated user's JWT. +Securely call APIs directly from your mobile or web app using **any SDK** without exposing private API keys. Only 2 changes needed in your app: +1. Replace the API URL with the Backmesh Gatekeeper URL. +2. Replace the private key with the authenticated user's JWT. ```js title="openai.ts" import OpenAI from "openai"; @@ -28,7 +28,7 @@ const client = new OpenAI({ Backmesh is an open-source, thoroughly tested backend that uses military grade encryption to protect your LLM API key and offer an API Gatekeeper to let your app safely call the API -## How is the LLM API protected +## How is the API protected - **🛡️ JWT Authentication:** Requests are verified with [JWTs](https://firebase.google.com/docs/auth/admin/verify-id-tokens) from the app's authentication provider so only your users have access to the LLM API via Backmesh Gatekeeper. - **🚧 Rate limits per user:** Configurable per-user rate limits to prevent abuse (e.g. no more than 5 OpenAI API calls per user per hour). @@ -38,6 +38,8 @@ For more details, see the [security documentation](/docs/security). **LLM Private Key APIs Supported:** +Backmesh implements resource-level access control for sensitive LLM API resources, such as [Files](https://platform.openai.com/docs/api-reference/files) and [Threads](https://platform.openai.com/docs/api-reference/threads). This ensures that only the users who create these types of resources can continue to access them. + - [x] OpenAI - [x] Gemini - [x] Anthropic @@ -48,15 +50,15 @@ For more details, see the [security documentation](/docs/security). - [x] Supabase - [x] Firebase -Leave a comment on [Discord](https://discord.backmesh.com) if your provider or LLM API is not supported. +Leave a comment on [Discord](https://discord.backmesh.com) if your authentication provider is not supported or LLM API needs a specific access controls. -## LLM Analytics without SDKs +## API Usage per user without SDKs -Backmesh will automatically instrument LLM requests to let you understand LLM API usage across your users e.g. error rates, costs, response times across models, etc. Please leave a comment on [Discord](https://discord.backmesh.com) with more information about what LLM API endpoints you are using and what analytics you would like to see. +Backmesh will automatically instrument all requests to let you understand API usage across your users e.g. error rates, costs, response times, etc. Please leave a comment on [Discord](https://discord.backmesh.com) with more information about what API endpoints you are using and what analytics you would like to see. ## Hosting Options -Backmesh is open source and can be [self hosted](/docs/selfhost) in your own Cloudflare account which includes a generous free tier. We also offer a hosted [SaaS](https://app.backmesh.com) with different [pricing plans](/pricing). LLM API analytics are displayed in the SaaS dashboard only though. +Backmesh is open source and can be [self hosted](/docs/selfhost) in your own Cloudflare account which includes a generous free tier. We also offer a hosted [SaaS](https://app.backmesh.com) with different [pricing plans](/pricing). API analytics are displayed in the SaaS dashboard only though. ## Tutorials diff --git a/docs/security.md b/docs/security.md index 41d630e..702c4b2 100644 --- a/docs/security.md +++ b/docs/security.md @@ -18,16 +18,18 @@ Let's quickly go over authorization and the two different types of authenticatio For example, Firebase provides user authentication, but only properly configured Firestore security rules provide authorization to your database. Furthermore, only adding Firebase AppCheck can provide client app authentication. More about that [here](https://firebase.google.com/docs/firestore/security/overview). -## How does Backmesh protect your LLM API +## How does Backmesh protect your API -Backmesh protects your LLM API in four different ways: +Backmesh protects your API in four different ways: -1. JWT user authentication to ensure that requests to the LLM API come from one of your users. However, this by itself does not provide authorization about which specific users are allowed to make which requests to the LLM API, or how many requests a specific user can make. +1. JWT user authentication to ensure that requests to the API come from one of your users. However, this by itself does not provide authorization about which specific users are allowed to make which requests to the API, or how many requests a specific user can make. 2. Backmesh lets you set rate limits for each of your users e.g. no user should be calling a given API more than X times per hour. A screenshot from the dashboard: ![ratelimit](/ratelimit.png) -3. Backmesh implements resource-level access control for sensitive API resources, such as [Files](https://platform.openai.com/docs/api-reference/files) and [Threads](https://platform.openai.com/docs/api-reference/threads). This ensures that only the users who create these types of resources can continue to access them. +3. Backmesh has a whitelist of endpoints that it will allow for each API. For example, endpoints such as [`/administration`](https://platform.openai.com/docs/api-reference/administration) in OpenAI can modify your OpenAI account and are not included in the whitelist as your app shouldn't need access. If you find that you have a use case for an endpoint that is not whitelisted in an LLM API, or you would like to be able to configure this whitelist in the Backmesh dashboard please email us, message us on Discord so we can better understand your use case and build a feature that serves your needs. -4. Backmesh has a whitelist of endpoints that it will allow for each LLM API. For example, endpoints such as [`/administration`](https://platform.openai.com/docs/api-reference/administration) in OpenAI can modify your OpenAI account and are not included in the whitelist as your app shouldn't need access. If you find that you have a use case for an endpoint that is not whitelisted in an LLM API, or you would like to be able to configure this whitelist in the Backmesh dashboard please email us, message us on Discord so we can better understand your use case and build a feature that serves your needs. +## Additional Access Control for LLM APIs + +Backmesh implements resource-level access control for sensitive LLM API resources, such as [Files](https://platform.openai.com/docs/api-reference/files) and [Threads](https://platform.openai.com/docs/api-reference/threads). This ensures that only the users who create these types of resources can continue to access them. \ No newline at end of file diff --git a/docusaurus.config.ts b/docusaurus.config.ts index fa08b2a..aea8698 100644 --- a/docusaurus.config.ts +++ b/docusaurus.config.ts @@ -12,7 +12,7 @@ const config: Config = { // JWT Proxy for LLM APIs // open source Backend for LLM APIs // open source BaaS for AI apps - tagline: "Don't ship LLM API keys in your app", + tagline: "Don't ship private API keys in your app", favicon: "img/favicon.ico", // Set the production url of your site here diff --git a/src/pages/index.tsx b/src/pages/index.tsx index 9bf1763..b9a45e4 100644 --- a/src/pages/index.tsx +++ b/src/pages/index.tsx @@ -26,8 +26,7 @@ export default function Home() {

{siteConfig.tagline}

-

Hide LLM secret keys in your app and avoid leaks that lead to thousands of dollars in LLM API costs 💸🚨

- {/*

No SDK needed and LLM user analytics included.

*/} +

Hide API keys in your app and avoid leaks that lead to thousands of dollars in API bills 💸🚨

@@ -58,7 +57,7 @@ export default function Home() {
-

Use an open-source, battle-tested backend to protect your LLM API key +

Use an open-source, battle-tested backend to protect your API key

@@ -70,7 +69,7 @@ export default function Home() {

- Requests are verified with JWTs from the app's authentication provider so only your users have access to the LLM API via Backmesh. + Requests are verified with JWTs from the app's authentication provider so only your users have access to the API via Backmesh.

@@ -92,11 +91,11 @@ export default function Home() {

🔐

-

API resource access control

+

LLM API resource access control

- Sensitive API resources like Files and Threads are protected so only the users that create them can continue to access them. + Sensitive LLM API resources like Files and Threads are protected so only the users that create them can continue to access them.

@@ -129,7 +128,7 @@ export default function Home() {

What is Backmesh

-

Backmesh is an open-source, thoroughly tested backend that uses military grade encryption to protect your LLM API key and offer an API Gatekeeper to let your app safely call the API

+

Backmesh is an open-source, thoroughly tested backend that uses military grade encryption to protect your API key and offer an API Gatekeeper to let your app safely call the API

-

LLM User Analytics without packages +

API usage per user without packages

-

All LLM API calls are instrumented so you can identify usage patterns, reduce costs and improve user satisfaction within your AI applications.

+

All API calls are instrumented so you can identify usage patterns, reduce costs and improve user satisfaction within your app.

diff --git a/src/pages/pricing.mdx b/src/pages/pricing.mdx index 8ede2b5..8230c62 100644 --- a/src/pages/pricing.mdx +++ b/src/pages/pricing.mdx @@ -39,13 +39,13 @@ export const Tooltip = ({ children, summary, details }) => { {/* Pricing Data */} export const pricingData = { plans: [ - { name: 'Starter', price: $25 /mo }, + { name: 'Starter', price: $20 /mo }, { name: 'Pro', price: Chat with us }, { name: 'Enterprise', price: Chat with us } ], features: [ { - name: 'Unlimited LLM API Gatekeepers', + name: 'Unlimited API Gatekeepers', values: ['✓', '✓', '✓'] }, {