feat: storing activity logs in mongodb (#129)

* feat: storing logs in mongodb

* chore: push api logs to mongo

* chore: key changed

* chore: api log middleware
This commit is contained in:
Bavisetti Narayan
2024-05-08 15:44:30 +05:30
committed by GitHub
parent f5f0fbf8c5
commit df96f19364
9 changed files with 125 additions and 8 deletions

View File

@@ -31,3 +31,5 @@ USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
MONGO_DB_URL="mongodb://plane-mongodb:27017/"

View File

@@ -2,14 +2,69 @@ from django.utils import timezone
from datetime import timedelta
from plane.db.models import APIActivityLog
from celery import shared_task
from django.conf import settings
from pymongo import MongoClient
from pymongo.errors import BulkWriteError
from plane.utils.exception_logger import log_exception
BATCH_SIZE = 3000
@shared_task
def delete_api_logs():
# Get the logs older than 30 days to delete
logs_to_delete = APIActivityLog.objects.filter(
created_at__lte=timezone.now() - timedelta(days=30)
)
# Delete the logs
logs_to_delete._raw_delete(logs_to_delete.db)
if settings.MONGO_DB_URL:
# Get the logs older than 30 days to delete
logs_to_delete = APIActivityLog.objects.filter(
created_at__lte=timezone.now() - timedelta(days=30)
)
# Create a MongoDB client
client = MongoClient(settings.MONGO_DB_URL)
db = client["plane"]
collection = db["api_activity_logs"]
# Function to insert documents in batches
def bulk_insert(docs):
try:
collection.insert_many(docs)
except BulkWriteError as bwe:
log_exception(bwe)
# Prepare the logs for bulk insert
def log_generator():
batch = []
for log in logs_to_delete.iterator():
batch.append(
{
"token_identifier": log.token_identifier,
"path": log.path,
"method": log.method,
"query_params": log.query_params,
"headers": log.headers,
"body": log.body,
"response_body": log.response_body,
"response_code": log.response_code,
"ip_address": log.ip_address,
"user_agent": log.user_agent,
"created_at": log.created_at,
"updated_at": log.updated_at,
"created_by": str(log.created_by_id) if log.created_by_id else None,
"updated_by": str(log.updated_by_id) if log.updated_by_id else None,
}
)
# If batch size is reached, yield the batch
if len(batch) == BATCH_SIZE:
yield batch
batch = []
# Yield the remaining logs
if batch:
yield batch
# Upload the logs to MongoDB in batches
for batch in log_generator():
bulk_insert(batch)
# Delete the logs
logs_to_delete._raw_delete(logs_to_delete.db)

View File

@@ -0,0 +1,23 @@
from pymongo import MongoClient
def singleton(cls):
instances = {}
def wrapper(*args, **kwargs):
if cls not in instances:
instances[cls] = cls(*args, **kwargs)
return instances[cls]
return wrapper
@singleton
class Database:
db = None
client = None
def __init__(self, mongo_uri, database_name):
self.client = MongoClient(mongo_uri)
self.db = self.client[database_name]
def get_db(self):
return self.db

View File

@@ -38,6 +38,5 @@ class APITokenLogMiddleware:
except Exception as e:
print(e)
# If the token does not exist, you can decide whether to log this as an invalid attempt
return None

View File

@@ -350,3 +350,7 @@ INSTANCE_KEY = os.environ.get(
SKIP_ENV_VAR = os.environ.get("SKIP_ENV_VAR", "1") == "1"
DATA_UPLOAD_MAX_MEMORY_SIZE = int(os.environ.get("FILE_SIZE_LIMIT", 5242880))
# MongoDB Settings
MONGO_DB_URL = os.environ.get("MONGO_DB_URL", False)

View File

@@ -34,4 +34,4 @@ posthog==3.0.2
cryptography==42.0.4
lxml==4.9.3
boto3==1.28.40
pymongo==4.6.1

View File

@@ -33,6 +33,9 @@ x-app-env: &app-env
- MINIO_ROOT_PASSWORD=${MINIO_ROOT_PASSWORD:-"secret-key"}
- BUCKET_NAME=${BUCKET_NAME:-uploads}
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
- MONGO_DB_URL=${MONGO_DB_URL:-"mongodb://plane-mongodb:27017/"}
- MONGO_INITDB_ROOT_USERNAME=${MONGO_INITDB_ROOT_USERNAME:-plane}
- MONGO_INITDB_ROOT_PASSWORD=${MONGO_INITDB_ROOT_PASSWORD:-plane}
services:
web:
@@ -122,6 +125,15 @@ services:
command: postgres -c 'max_connections=1000'
volumes:
- pgdata:/var/lib/postgresql/data
plane-mongodb:
<<: *app-env
image: mongo:7.0.5
pull_policy: if_not_present
restart: unless-stopped
volumes:
- mongodbdata:/data/db
plane-redis:
<<: *app-env
image: redis:7.2.4-alpine
@@ -155,6 +167,7 @@ volumes:
pgdata:
redisdata:
uploads:
mongodbdata:
logs_api:
logs_worker:
logs_beat-worker:

View File

@@ -6,6 +6,7 @@ volumes:
redisdata:
uploads:
pgdata:
mongodbdata:
services:
@@ -43,6 +44,16 @@ services:
- .env
environment:
PGDATA: /var/lib/postgresql/data
plane-mongodb:
image: mongo:7.0.5
restart: unless-stopped
networks:
- dev_env
volumes:
- mongodbdata:/data/db
env_file:
- .env
web:
build:

View File

@@ -40,6 +40,15 @@ services:
depends_on:
- plane-db
- plane-redis
mongodb:
image: "mongo"
restart: unless-stopped
volumes:
- "mongodb_data:/data/db"
environment:
- MONGO_INITDB_ROOT_USERNAME=${MONGO_INITDB_ROOT_USERNAME:-plane}
- MONGO_INITDB_ROOT_PASSWORD=${MONGO_INITDB_ROOT_PASSWORD:-plane}
worker:
container_name: bgworker
@@ -127,3 +136,4 @@ volumes:
pgdata:
redisdata:
uploads:
mongodb_data: