dev: move storage metadata collection to background job (#5818)

* fix: move storage metadata collection to background job

* fix: docker compose and env

* fix: archive endpoint
This commit is contained in:
Nikhil
2024-10-16 13:55:49 +05:30
committed by GitHub
parent cc613e57c9
commit 9b85306359
9 changed files with 71 additions and 32 deletions

View File

@@ -22,6 +22,7 @@ from plane.db.models import (
from plane.settings.storage import S3Storage
from plane.app.permissions import allow_permission, ROLE
from plane.utils.cache import invalidate_cache_directly
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
class UserAssetsV2Endpoint(BaseAPIView):
@@ -193,14 +194,11 @@ class UserAssetsV2Endpoint(BaseAPIView):
def patch(self, request, asset_id):
# get the asset id
asset = FileAsset.objects.get(id=asset_id, user_id=request.user.id)
storage = S3Storage(request=request)
# get the storage metadata
asset.is_uploaded = True
# get the storage metadata
if asset.storage_metadata is None:
asset.storage_metadata = storage.get_object_metadata(
object_name=asset.asset.name
)
if not asset.storage_metadata:
get_asset_object_metadata.delay(asset_id=str(asset_id))
# get the entity and save the asset id for the request field
self.entity_asset_save(
asset_id=asset_id,
@@ -446,14 +444,11 @@ class WorkspaceFileAssetEndpoint(BaseAPIView):
def patch(self, request, slug, asset_id):
# get the asset id
asset = FileAsset.objects.get(id=asset_id, workspace__slug=slug)
storage = S3Storage(request=request)
# get the storage metadata
asset.is_uploaded = True
# get the storage metadata
if asset.storage_metadata is None:
asset.storage_metadata = storage.get_object_metadata(
object_name=asset.asset.name
)
if not asset.storage_metadata:
get_asset_object_metadata.delay(asset_id=str(asset_id))
# get the entity and save the asset id for the request field
self.entity_asset_save(
asset_id=asset_id,
@@ -686,14 +681,11 @@ class ProjectAssetEndpoint(BaseAPIView):
asset = FileAsset.objects.get(
id=pk,
)
storage = S3Storage(request=request)
# get the storage metadata
asset.is_uploaded = True
# get the storage metadata
if asset.storage_metadata is None:
asset.storage_metadata = storage.get_object_metadata(
object_name=asset.asset.name
)
if not asset.storage_metadata:
get_asset_object_metadata.delay(asset_id=str(pk))
# update the attributes
asset.attributes = request.data.get("attributes", asset.attributes)

View File

@@ -520,7 +520,26 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
.annotate(first_name=F("assignees__first_name"))
.annotate(last_name=F("assignees__last_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar_url=F("assignees__avatar_url"))
.annotate(
avatar_url=Case(
# If `avatar_asset` exists, use it to generate the asset URL
When(
assignees__avatar_asset__isnull=False,
then=Concat(
Value("/api/assets/v2/static/"),
"assignees__avatar_asset", # Assuming avatar_asset has an id or relevant field
Value("/"),
),
),
# If `avatar_asset` is None, fall back to using `avatar` field directly
When(
assignees__avatar_asset__isnull=True,
then="assignees__avatar",
),
default=Value(None),
output_field=models.CharField(),
)
)
.annotate(display_name=F("assignees__display_name"))
.values(
"first_name",

View File

@@ -20,6 +20,7 @@ from plane.db.models import FileAsset, Workspace
from plane.bgtasks.issue_activities_task import issue_activity
from plane.app.permissions import allow_permission, ROLE
from plane.settings.storage import S3Storage
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
class IssueAttachmentEndpoint(BaseAPIView):
@@ -254,10 +255,7 @@ class IssueAttachmentV2Endpoint(BaseAPIView):
issue_attachment.is_uploaded = True
# Get the storage metadata
if issue_attachment.storage_metadata is None:
storage = S3Storage(request=request)
issue_attachment.storage_metadata = storage.get_object_metadata(
issue_attachment.asset.name
)
if not issue_attachment.storage_metadata:
get_asset_object_metadata.delay(str(issue_attachment.id))
issue_attachment.save()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@@ -0,0 +1,28 @@
# Third party imports
from celery import shared_task
# Module imports
from plane.db.models import FileAsset
from plane.settings.storage import S3Storage
from plane.utils.exception_logger import log_exception
@shared_task
def get_asset_object_metadata(asset_id):
try:
# Get the asset
asset = FileAsset.objects.get(pk=asset_id)
# Create an instance of the S3 storage
storage = S3Storage()
# Get the storage
asset.storage_metadata = storage.get_object_metadata(
object_name=asset.asset.name
)
# Save the asset
asset.save()
return
except FileAsset.DoesNotExist:
return
except Exception as e:
log_exception(e)
return

View File

@@ -39,7 +39,11 @@ class S3Storage(S3Boto3Storage):
aws_access_key_id=self.aws_access_key_id,
aws_secret_access_key=self.aws_secret_access_key,
region_name=self.aws_region,
endpoint_url=f"{request.scheme}://{request.get_host()}",
endpoint_url=(
f"{request.scheme}://{request.get_host()}"
if request
else self.aws_s3_endpoint_url
),
config=boto3.session.Config(signature_version="s3v4"),
)
else:

View File

@@ -15,6 +15,7 @@ from rest_framework.response import Response
from .base import BaseAPIView
from plane.db.models import DeployBoard, FileAsset
from plane.settings.storage import S3Storage
from plane.bgtasks.storage_metadata_task import get_asset_object_metadata
class EntityAssetEndpoint(BaseAPIView):
@@ -159,14 +160,11 @@ class EntityAssetEndpoint(BaseAPIView):
# get the asset id
asset = FileAsset.objects.get(id=pk, workspace=deploy_board.workspace)
storage = S3Storage(request=request)
# get the storage metadata
asset.is_uploaded = True
# get the storage metadata
if asset.storage_metadata is None:
asset.storage_metadata = storage.get_object_metadata(
object_name=asset.asset.name
)
if not asset.storage_metadata:
get_asset_object_metadata.delay(str(asset.id))
# update the attributes
asset.attributes = request.data.get("attributes", asset.attributes)

View File

@@ -34,7 +34,7 @@ x-app-env: &app-env
- SECRET_KEY=${SECRET_KEY:-60gp0byfz2dvffa45cxl20p1scy9xbpf6d8c5y0geejgkyp1b5}
# DATA STORE SETTINGS
- USE_MINIO=${USE_MINIO:-1}
- AWS_REGION=${AWS_REGION:-""}
- AWS_REGION=${AWS_REGION:-}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-"access-key"}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-"secret-key"}
- AWS_S3_ENDPOINT_URL=${AWS_S3_ENDPOINT_URL:-http://plane-minio:9000}

View File

@@ -60,12 +60,12 @@ http {
proxy_pass http://space:3002/spaces/;
}
location /${BUCKET_NAME}/ {
location /${BUCKET_NAME} {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://plane-minio:9000/uploads/;
proxy_pass http://plane-minio:9000/${BUCKET_NAME};
}
}
}

View File

@@ -68,12 +68,12 @@ http {
proxy_pass http://space:3000/spaces/;
}
location /${BUCKET_NAME}/ {
location /${BUCKET_NAME} {
proxy_http_version 1.1;
proxy_set_header Upgrade ${dollar}http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host ${dollar}http_host;
proxy_pass http://plane-minio:9000/uploads/;
proxy_pass http://plane-minio:9000/${BUCKET_NAME};
}
}
}