-
Notifications
You must be signed in to change notification settings - Fork 0
Open
Description
Overview
Create a comprehensive logging system to record all AI requests and responses to help diagnose issues, improve service quality, and track usage patterns.
Requirements
- Store both user requests to AI and AI responses
- Associate logs with user accounts
- Implement proper retention and privacy policies
- Add admin interface for reviewing logs
Database Changes
New Migration File
File: /backend/migrations/YYYYMMDDHHMMSS_create_ai_logs_table.js
exports.up = function(knex) {
return knex.schema.createTable('ai_logs', table => {
table.increments('id').primary();
table.string('user_address', 42).notNullable().index();
table.text('request_prompt').notNullable();
table.text('response_text');
table.jsonb('request_metadata').defaultTo('{}');
table.jsonb('response_metadata').defaultTo('{}');
table.string('status', 20).defaultTo('pending').index();
table.integer('tokens_used').defaultTo(0);
table.integer('processing_time_ms').defaultTo(0);
table.string('model', 50);
table.string('error_message');
table.timestamp('created_at').defaultTo(knex.fn.now());
table.timestamp('completed_at');
// Add indexes for common queries
table.index('created_at');
table.index(['user_address', 'created_at']);
table.index(['status', 'created_at']);
});
};
exports.down = function(knex) {
return knex.schema.dropTable('ai_logs');
};Backend Implementation
1. AI Logs Model
File: /backend/models/aiLogs.js
const knex = require('../db/knex');
class AILogs {
/**
* Create a new AI log entry when a request is initiated
* @param {string} userAddress - Blockchain address of the user
* @param {string} requestPrompt - The prompt sent to the AI
* @param {Object} metadata - Additional request metadata
* @returns {Promise<number>} - The ID of the created log entry
*/
static async createLog(userAddress, requestPrompt, metadata = {}) {
const [logId] = await knex('ai_logs').insert({
user_address: userAddress,
request_prompt: requestPrompt,
request_metadata: JSON.stringify(metadata),
status: 'pending',
created_at: knex.fn.now()
}).returning('id');
return logId;
}
/**
* Update a log entry with the AI response
* @param {number} logId - The ID of the log entry
* @param {string} responseText - The AI-generated response
* @param {Object} responseMetadata - Additional response metadata
* @param {number} tokensUsed - Number of tokens used
* @param {number} processingTime - Processing time in milliseconds
* @param {string} model - AI model used
* @returns {Promise<void>}
*/
static async updateWithResponse(
logId,
responseText,
responseMetadata = {},
tokensUsed = 0,
processingTime = 0,
model = 'unknown'
) {
await knex('ai_logs')
.where({ id: logId })
.update({
response_text: responseText,
response_metadata: JSON.stringify(responseMetadata),
status: 'completed',
tokens_used: tokensUsed,
processing_time_ms: processingTime,
model: model,
completed_at: knex.fn.now()
});
}
/**
* Log an error that occurred during AI processing
* @param {number} logId - The ID of the log entry
* @param {string} errorMessage - The error message
* @returns {Promise<void>}
*/
static async logError(logId, errorMessage) {
await knex('ai_logs')
.where({ id: logId })
.update({
status: 'error',
error_message: errorMessage,
completed_at: knex.fn.now()
});
}
/**
* Get logs for a specific user
* @param {string} userAddress - The blockchain address of the user
* @param {number} limit - Maximum number of logs to return
* @param {number} offset - Offset for pagination
* @returns {Promise<Array>} - Array of log entries
*/
static async getLogsForUser(userAddress, limit = 50, offset = 0) {
return knex('ai_logs')
.where({ user_address: userAddress })
.orderBy('created_at', 'desc')
.limit(limit)
.offset(offset);
}
/**
* Get all logs for admin review
* @param {Object} filters - Optional filters
* @param {number} limit - Maximum number of logs to return
* @param {number} offset - Offset for pagination
* @returns {Promise<Array>} - Array of log entries
*/
static async getAllLogs(filters = {}, limit = 100, offset = 0) {
const query = knex('ai_logs').orderBy('created_at', 'desc');
// Apply filters if provided
if (filters.userAddress) {
query.where('user_address', filters.userAddress);
}
if (filters.status) {
query.where('status', filters.status);
}
if (filters.startDate && filters.endDate) {
query.whereBetween('created_at', [filters.startDate, filters.endDate]);
}
return query.limit(limit).offset(offset);
}
/**
* Get statistics about AI usage
* @returns {Promise<Object>} - Statistics object
*/
static async getStatistics() {
const totalRequests = await knex('ai_logs').count('id as count').first();
const statusCounts = await knex('ai_logs')
.select('status')
.count('id as count')
.groupBy('status');
const tokensUsed = await knex('ai_logs')
.sum('tokens_used as total')
.first();
const avgProcessingTime = await knex('ai_logs')
.avg('processing_time_ms as average')
.where('status', 'completed')
.first();
return {
totalRequests: totalRequests.count,
statusCounts: statusCounts.reduce((acc, curr) => {
acc[curr.status] = curr.count;
return acc;
}, {}),
tokensUsed: tokensUsed.total || 0,
avgProcessingTime: avgProcessingTime.average || 0
};
}
}
module.exports = AILogs;2. Update AI Processing Route
File: /backend/routes/ai.js
// Add to top of the file:
const AILogs = require('../models/aiLogs');
// Update the /process endpoint:
router.post('/process', auth, async (req, res) => {
let logId = null;
try {
const { challengeId, signature, aiPrompt } = req.body;
const userAddress = req.user.address;
// Create log entry at the beginning
logId = await AILogs.createLog(userAddress, aiPrompt, {
requestTime: new Date().toISOString(),
ipAddress: req.ip // Optionally log IP address (consider privacy implications)
});
// Verify signature and track usage
await AIUsage.verifyAndTrackUsage(challengeId, signature, userAddress);
// Capture start time for performance measurement
const startTime = Date.now();
// Process AI request with existing AI service
const aiResult = await processAIRequest(aiPrompt);
// Calculate processing time
const processingTime = Date.now() - startTime;
// Update log with response
await AILogs.updateWithResponse(
logId,
aiResult.generatedContent,
{
responseTime: new Date().toISOString(),
promptTokens: aiResult.usage?.promptTokens || 0,
completionTokens: aiResult.usage?.completionTokens || 0
},
aiResult.usage?.totalTokens || 0,
processingTime,
aiResult.model || 'unknown'
);
res.json({
success: true,
generatedContent: aiResult.generatedContent,
// Include any other response data
});
} catch (error) {
// Log the error if we have a log ID
if (logId) {
await AILogs.logError(logId, error.message || 'Unknown error');
}
res.status(error.message === 'Daily AI usage limit reached' ? 429 : 400).json({
success: false,
error: error.message
});
}
});3. Admin Routes for Log Access
File: /backend/routes/admin.js
const express = require('express');
const router = express.Router();
const AILogs = require('../models/aiLogs');
const { adminAuth } = require('../middleware/auth');
/**
* Get all AI logs (admin only)
*/
router.get('/ai-logs', adminAuth, async (req, res) => {
try {
const {
user_address,
status,
start_date,
end_date,
limit = 100,
offset = 0
} = req.query;
const filters = {};
if (user_address) filters.userAddress = user_address;
if (status) filters.status = status;
if (start_date && end_date) {
filters.startDate = start_date;
filters.endDate = end_date;
}
const logs = await AILogs.getAllLogs(
filters,
parseInt(limit),
parseInt(offset)
);
res.json({
success: true,
logs
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
});
/**
* Get AI usage statistics (admin only)
*/
router.get('/ai-stats', adminAuth, async (req, res) => {
try {
const stats = await AILogs.getStatistics();
res.json({
success: true,
statistics: stats
});
} catch (error) {
res.status(500).json({
success: false,
error: error.message
});
}
});
module.exports = router;Frontend Implementation (Admin Dashboard)
1. AI Logs Component
File: /app/components/admin/AILogsTable.tsx
import React, { useState, useEffect } from 'react';
import { Table, Button, Form, Row, Col, Badge, Pagination } from 'react-bootstrap';
import { format } from 'date-fns';
import api from '../../services/api';
interface AILog {
id: number;
user_address: string;
request_prompt: string;
response_text: string;
status: 'pending' | 'completed' | 'error';
tokens_used: number;
processing_time_ms: number;
model: string;
error_message?: string;
created_at: string;
completed_at?: string;
}
const AILogsTable: React.FC = () => {
const [logs, setLogs] = useState<AILog[]>([]);
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
const [page, setPage] = useState(1);
const [totalPages, setTotalPages] = useState(1);
const [filters, setFilters] = useState({
userAddress: '',
status: '',
startDate: '',
endDate: ''
});
const fetchLogs = async () => {
try {
setLoading(true);
setError(null);
const params = new URLSearchParams();
if (filters.userAddress) params.append('user_address', filters.userAddress);
if (filters.status) params.append('status', filters.status);
if (filters.startDate) params.append('start_date', filters.startDate);
if (filters.endDate) params.append('end_date', filters.endDate);
params.append('limit', '20');
params.append('offset', ((page - 1) * 20).toString());
const response = await api.get(`/api/admin/ai-logs?${params.toString()}`);
setLogs(response.data.logs);
// Assuming the API returns total count
setTotalPages(Math.ceil(response.data.total / 20));
} catch (error) {
setError('Failed to fetch AI logs');
console.error('Error fetching logs:', error);
} finally {
setLoading(false);
}
};
useEffect(() => {
fetchLogs();
}, [page, filters]);
const handleFilterChange = (field: string, value: string) => {
setFilters(prev => ({ ...prev, [field]: value }));
setPage(1); // Reset to first page when filters change
};
const getStatusBadge = (status: string) => {
switch (status) {
case 'pending':
return <Badge bg="warning">Pending</Badge>;
case 'completed':
return <Badge bg="success">Completed</Badge>;
case 'error':
return <Badge bg="danger">Error</Badge>;
default:
return <Badge bg="secondary">{status}</Badge>;
}
};
const truncateText = (text: string, maxLength: number = 50) => {
if (!text) return '';
return text.length > maxLength ? `${text.substring(0, maxLength)}...` : text;
};
return (
<div className="ai-logs-container">
<h2>AI Interaction Logs</h2>
{/* Filters */}
<div className="filters-container mb-4">
<Row>
<Col md={3}>
<Form.Group>
<Form.Label>User Address</Form.Label>
<Form.Control
type="text"
placeholder="0x..."
value={filters.userAddress}
onChange={(e) => handleFilterChange('userAddress', e.target.value)}
/>
</Form.Group>
</Col>
<Col md={2}>
<Form.Group>
<Form.Label>Status</Form.Label>
<Form.Select
value={filters.status}
onChange={(e) => handleFilterChange('status', e.target.value)}
>
<option value="">All</option>
<option value="pending">Pending</option>
<option value="completed">Completed</option>
<option value="error">Error</option>
</Form.Select>
</Form.Group>
</Col>
<Col md={3}>
<Form.Group>
<Form.Label>Start Date</Form.Label>
<Form.Control
type="date"
value={filters.startDate}
onChange={(e) => handleFilterChange('startDate', e.target.value)}
/>
</Form.Group>
</Col>
<Col md={3}>
<Form.Group>
<Form.Label>End Date</Form.Label>
<Form.Control
type="date"
value={filters.endDate}
onChange={(e) => handleFilterChange('endDate', e.target.value)}
/>
</Form.Group>
</Col>
<Col md={1} className="d-flex align-items-end">
<Button
variant="outline-secondary"
onClick={() => setFilters({
userAddress: '',
status: '',
startDate: '',
endDate: ''
})}
className="mb-3"
>
Clear
</Button>
</Col>
</Row>
</div>
{error && <div className="alert alert-danger">{error}</div>}
{loading ? (
<div className="text-center my-5">
<div className="spinner-border" role="status">
<span className="visually-hidden">Loading...</span>
</div>
</div>
) : (
<>
<Table responsive striped hover>
<thead>
<tr>
<th>ID</th>
<th>User</th>
<th>Prompt</th>
<th>Response</th>
<th>Status</th>
<th>Tokens</th>
<th>Time (ms)</th>
<th>Created At</th>
<th>Actions</th>
</tr>
</thead>
<tbody>
{logs.length === 0 ? (
<tr>
<td colSpan={9} className="text-center">No logs found</td>
</tr>
) : (
logs.map(log => (
<tr key={log.id}>
<td>{log.id}</td>
<td>
{truncateText(log.user_address, 10)}
</td>
<td>{truncateText(log.request_prompt)}</td>
<td>{truncateText(log.response_text)}</td>
<td>{getStatusBadge(log.status)}</td>
<td>{log.tokens_used}</td>
<td>{log.processing_time_ms}</td>
<td>
{log.created_at ? format(new Date(log.created_at), 'MM/dd/yyyy HH:mm') : '-'}
</td>
<td>
<Button
size="sm"
variant="outline-primary"
onClick={() => {/* Implement view details modal */}}
>
Details
</Button>
</td>
</tr>
))
)}
</tbody>
</Table>
{/* Pagination */}
<div className="d-flex justify-content-center">
<Pagination>
<Pagination.First
disabled={page === 1}
onClick={() => setPage(1)}
/>
<Pagination.Prev
disabled={page === 1}
onClick={() => setPage(p => Math.max(1, p - 1))}
/>
{Array.from({ length: Math.min(5, totalPages) }, (_, i) => {
const pageNum = page <= 3
? i + 1
: page >= totalPages - 2
? totalPages - 4 + i
: page - 2 + i;
if (pageNum <= 0 || pageNum > totalPages) return null;
return (
<Pagination.Item
key={pageNum}
active={pageNum === page}
onClick={() => setPage(pageNum)}
>
{pageNum}
</Pagination.Item>
);
})}
<Pagination.Next
disabled={page === totalPages}
onClick={() => setPage(p => Math.min(totalPages, p + 1))}
/>
<Pagination.Last
disabled={page === totalPages}
onClick={() => setPage(totalPages)}
/>
</Pagination>
</div>
</>
)}
</div>
);
};
export default AILogsTable;2. Log Detail Modal
File: /app/components/admin/AILogDetailModal.tsx
import React from 'react';
import { Modal, Button, Table, Badge } from 'react-bootstrap';
import { format } from 'date-fns';
interface AILogDetail {
id: number;
user_address: string;
request_prompt: string;
response_text: string;
request_metadata: any;
response_metadata: any;
status: string;
tokens_used: number;
processing_time_ms: number;
model: string;
error_message?: string;
created_at: string;
completed_at?: string;
}
interface AILogDetailModalProps {
show: boolean;
onHide: () => void;
log: AILogDetail | null;
}
const AILogDetailModal: React.FC<AILogDetailModalProps> = ({ show, onHide, log }) => {
if (!log) return null;
const formatDate = (dateStr?: string) => {
if (!dateStr) return '-';
return format(new Date(dateStr), 'MM/dd/yyyy HH:mm:ss');
};
const formatJson = (json: any) => {
try {
return JSON.stringify(json, null, 2);
} catch (e) {
return JSON.stringify(json);
}
};
return (
<Modal show={show} onHide={onHide} size="lg">
<Modal.Header closeButton>
<Modal.Title>AI Log Details #{log.id}</Modal.Title>
</Modal.Header>
<Modal.Body>
<Table bordered>
<tbody>
<tr>
<th>Status</th>
<td>
<Badge bg={
log.status === 'completed' ? 'success' :
log.status === 'error' ? 'danger' : 'warning'
}>
{log.status.toUpperCase()}
</Badge>
</td>
</tr>
<tr>
<th>User Address</th>
<td>{log.user_address}</td>
</tr>
<tr>
<th>Created At</th>
<td>{formatDate(log.created_at)}</td>
</tr>
<tr>
<th>Completed At</th>
<td>{formatDate(log.completed_at)}</td>
</tr>
<tr>
<th>Processing Time</th>
<td>{log.processing_time_ms}ms</td>
</tr>
<tr>
<th>Tokens Used</th>
<td>{log.tokens_used}</td>
</tr>
<tr>
<th>AI Model</th>
<td>{log.model || 'Unknown'}</td>
</tr>
{log.error_message && (
<tr>
<th>Error</th>
<td className="text-danger">{log.error_message}</td>
</tr>
)}
</tbody>
</Table>
<h5>User Prompt</h5>
<div className="border p-3 mb-3 bg-light">
<pre className="mb-0" style={{ whiteSpace: 'pre-wrap' }}>{log.request_prompt}</pre>
</div>
<h5>AI Response</h5>
<div className="border p-3 mb-3 bg-light">
<pre className="mb-0" style={{ whiteSpace: 'pre-wrap' }}>{log.response_text || '(No response)'}</pre>
</div>
<h5>Request Metadata</h5>
<div className="border p-3 mb-3 bg-light">
<pre className="mb-0" style={{ whiteSpace: 'pre-wrap' }}>{formatJson(log.request_metadata)}</pre>
</div>
<h5>Response Metadata</h5>
<div className="border p-3 mb-3 bg-light">
<pre className="mb-0" style={{ whiteSpace: 'pre-wrap' }}>{formatJson(log.response_metadata)}</pre>
</div>
</Modal.Body>
<Modal.Footer>
<Button variant="secondary" onClick={onHide}>
Close
</Button>
</Modal.Footer>
</Modal>
);
};
export default AILogDetailModal;Privacy and Data Retention
Data Retention Policy
Add a scheduled job to purge old logs based on your data retention policy:
File: /backend/jobs/pruneAILogs.js
const knex = require('../db/knex');
const { subDays } = require('date-fns');
/**
* Prune AI logs older than the retention period
* @param {number} retentionDays - Number of days to keep logs
*/
async function pruneAILogs(retentionDays = 90) {
try {
const cutoffDate = subDays(new Date(), retentionDays);
console.log(`Pruning AI logs older than ${cutoffDate.toISOString()}`);
const deletedCount = await knex('ai_logs')
.where('created_at', '<', cutoffDate)
.del();
console.log(`Deleted ${deletedCount} old AI logs`);
} catch (error) {
console.error('Error pruning AI logs:', error);
}
}
module.exports = pruneAILogs;Implementation Tasks
- Create database migration for ai_logs table
- Implement AILogs model
- Update AI processing route to log requests and responses
- Implement admin routes for accessing logs
- Create admin UI components for viewing logs
- Implement data retention job
- Add documentation about the logging system
- Update privacy policy to reflect data collection
Testing Requirements
- Test log creation at the start of AI requests
- Test log updates with AI responses
- Test error logging
- Test admin log retrieval with different filters
- Test log detail viewing
- Test data retention job
Success Criteria
- All AI requests and responses are properly logged
- Logs include necessary metadata for debugging
- Admins can view and filter logs
- Old logs are automatically purged
- Privacy is maintained with proper access controls
Timeline
- Database migration: 0.5 day
- Backend implementation: 1 day
- Admin UI: 1 day
- Testing and refinement: 0.5 day
Notes
- Consider anonymizing sensitive data in prompts/responses
- Implement proper access controls to restrict log access to administrators only
- Consider adding export functionality for logs
- Implement monitoring alerts for high error rates
- Add rate limiting to admin API endpoints to prevent abuse