Skip to content

Add AI wrapping for openai #397

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 27 commits into from
Jun 11, 2025
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
ce26856
Add new "ai_op"
bitterpanda63 Jun 9, 2025
4d715a7
Update README.md to include section on AI SDKs
bitterpanda63 Jun 9, 2025
ca8c9c2
Add empty openai module
bitterpanda63 Jun 9, 2025
dac9412
Import openai as a sink
bitterpanda63 Jun 9, 2025
797dd65
Create new AIStatistics object with test cases
bitterpanda63 Jun 9, 2025
2b5592b
Move ai_statistics to simple storage
bitterpanda63 Jun 9, 2025
83ef070
Add AIStatistics to thread_cache
bitterpanda63 Jun 9, 2025
1ab7651
is_empty -> empty
bitterpanda63 Jun 9, 2025
4133f23
add ai stats to heartbeatr
bitterpanda63 Jun 9, 2025
10befa2
Add a global on_ai_call to ai_statisitcs.py
bitterpanda63 Jun 9, 2025
4120471
Sync AI stats and create a new merging function to facilitate
bitterpanda63 Jun 9, 2025
a683354
Add openai sink code
bitterpanda63 Jun 9, 2025
dc8816d
move on_ai_call global to helper function file
bitterpanda63 Jun 9, 2025
f10d86b
Add openai as a dev dependency
bitterpanda63 Jun 9, 2025
11859f8
Create new flask-openai sample app
bitterpanda63 Jun 10, 2025
d8a6383
Install openai and re-lock
bitterpanda63 Jun 10, 2025
07536c8
Add openai to flask-openai
bitterpanda63 Jun 10, 2025
1344621
Fix .get not existing on Response - openai wrapper
bitterpanda63 Jun 10, 2025
18aa06f
Update test cases for thread_cache_test and sink tests
bitterpanda63 Jun 10, 2025
177960b
Fix bug with import_list and add unit tests to test
bitterpanda63 Jun 10, 2025
b64e717
Delete unused sink test case
bitterpanda63 Jun 10, 2025
d3e8d17
Remove mentions of dogs in flask-openai readme
bitterpanda63 Jun 10, 2025
7b0877d
Add ^1.0 limit for openai
bitterpanda63 Jun 10, 2025
3e59acf
Add support for chat completions
bitterpanda63 Jun 10, 2025
fa9dcbb
Add extra wrapping to openai.py
bitterpanda63 Jun 10, 2025
0f5aa2f
Update sample app to include more example routes for openai
bitterpanda63 Jun 10, 2025
8eac181
Cleanup flask-openai app
bitterpanda63 Jun 10, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,9 @@ Zen for Python 3 is compatible with:
* ✅ [`motor`](https://pypi.org/project/motor/) (See `pymongo` version)
* ✅ [`clickhouse-driver`](https://pypi.org/project/clickhouse-driver)

### AI SDKs
* ✅ [`openai`](https://pypi.org/project/openai)

## Reporting to your Aikido Security dashboard

> Aikido is your no nonsense application security platform. One central system that scans your source code & cloud, shows you what vulnerabilities matter, and how to fix them - fast. So you can get back to building.
Expand Down
3 changes: 3 additions & 0 deletions aikido_zen/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,4 +64,7 @@ def protect(mode="daemon"):
import aikido_zen.sinks.os_system
import aikido_zen.sinks.subprocess

# Import AI sinks
import aikido_zen.sinks.openai

logger.info("Zen by Aikido v%s starting.", PKG_VERSION)
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from aikido_zen.storage.users import Users
from aikido_zen.storage.hostnames import Hostnames
from ..realtime.start_polling_for_changes import start_polling_for_changes
from ...storage.ai_statistics import AIStatistics
from ...storage.statistics import Statistics

# Import functions :
Expand Down Expand Up @@ -46,6 +47,7 @@ def __init__(self, block, api, token, serverless):
self.users = Users(1000)
self.packages = {}
self.statistics = Statistics()
self.ai_stats = AIStatistics()
self.middleware_installed = False

if isinstance(serverless, str) and len(serverless) == 0:
Expand All @@ -69,7 +71,11 @@ def report_initial_stats(self):
This is run 1m after startup, and checks if we should send out
a preliminary heartbeat with some stats.
"""
data_present = not self.statistics.empty() or len(self.routes.routes) > 0
data_present = (
not self.statistics.empty()
or len(self.routes.routes) > 0
or not self.ai_stats.empty()
)
should_report_initial_stats = data_present and not self.conf.received_any_stats
if should_report_initial_stats:
self.send_heartbeat()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,21 @@ def send_heartbeat(connection_manager):
users = connection_manager.users.as_array()
routes = list(connection_manager.routes)
outgoing_domains = connection_manager.hostnames.as_array()
ai_stats = connection_manager.ai_stats.get_stats()

connection_manager.statistics.clear()
connection_manager.users.clear()
connection_manager.routes.clear()
connection_manager.hostnames.clear()
connection_manager.ai_stats.clear()
res = connection_manager.api.report(
connection_manager.token,
{
"type": "heartbeat",
"time": get_unixtime_ms(),
"agent": connection_manager.get_manager_info(),
"stats": stats,
"ai": ai_stats,
"hostnames": outgoing_domains,
"routes": routes,
"users": users,
Expand Down
3 changes: 3 additions & 0 deletions aikido_zen/background_process/commands/sync_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ def process_sync_data(connection_manager, data, conn, queue=None):
# Sync stats
connection_manager.statistics.import_from_record(data.get("stats", {}))

# Sync ai stats
connection_manager.ai_stats.import_list(data.get("ai_stats", []))

if connection_manager.conf.last_updated_at > 0:
# Only report data if the config has been fetched.
return {
Expand Down
7 changes: 7 additions & 0 deletions aikido_zen/helpers/on_ai_call.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from aikido_zen.thread.thread_cache import get_cache


def on_ai_call(provider, model, input_tokens, output_tokens):
cache = get_cache()
if cache:
cache.ai_stats.on_ai_call(provider, model, input_tokens, output_tokens)
25 changes: 25 additions & 0 deletions aikido_zen/sinks/openai.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
from aikido_zen.helpers.on_ai_call import on_ai_call
from aikido_zen.helpers.register_call import register_call
from aikido_zen.sinks import on_import, patch_function, after


@after
def _create(func, instance, args, kwargs, return_value):
op = "openai.resources.responses.responses.Responses.create"
register_call(op, "ai_op")

on_ai_call(
provider="openai",
model=return_value.get("model", ""),
input_tokens=return_value.usage.input_tokens,
output_tokens=return_value.usage.output_tokens,
)


@on_import("openai.resources.responses.responses")
def patch(m):
"""
patching module openai
- patches function create(...) on Responses class, to inspect response
"""
patch_function(m, "Responses.create", _create)
56 changes: 56 additions & 0 deletions aikido_zen/storage/ai_statistics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import copy


class AIStatistics:
def __init__(self):
self.calls = {}

def ensure_provider_stats(self, provider, model):
key = get_provider_key(provider, model)

if key not in self.calls:
self.calls[key] = {
"provider": provider,
"model": model,
"calls": 0,
"tokens": {
"input": 0,
"output": 0,
"total": 0,
},
}

return self.calls[key]

def on_ai_call(self, provider, model, input_tokens, output_tokens):
if not provider or not model:
return

provider_stats = self.ensure_provider_stats(provider, model)
provider_stats["calls"] += 1
provider_stats["tokens"]["input"] += input_tokens
provider_stats["tokens"]["output"] += output_tokens
provider_stats["tokens"]["total"] += input_tokens + output_tokens

def get_stats(self):
return [copy.deepcopy(stats) for stats in self.calls.values()]

def import_list(self, ai_stats_list):
for new_entry in ai_stats_list:
existing_entry = self.ensure_provider_stats(
new_entry["provider"], new_entry["model"]
)
existing_entry["calls"] += new_entry["calls"]
existing_entry["tokens"]["input"] = new_entry["tokens"]["input"]
existing_entry["tokens"]["output"] = new_entry["tokens"]["output"]
existing_entry["tokens"]["total"] = new_entry["tokens"]["total"]

def clear(self):
self.calls.clear()

def empty(self):
return len(self.calls) == 0


def get_provider_key(provider, model):
return f"{provider}:{model}"
Loading
Loading