From 91329af7b2bb2a1b18d6bb7c6e07d12f60d27bc4 Mon Sep 17 00:00:00 2001 From: Nikhil <118773738+pablohashescobar@users.noreply.github.com> Date: Mon, 25 Mar 2024 13:14:00 +0530 Subject: [PATCH] dev: importers and integrations (#140) --- apiserver/plane/app/serializers/__init__.py | 10 + .../app/serializers/integration/__init__.py | 8 + .../plane/app/serializers/integration/base.py | 22 + .../app/serializers/integration/github.py | 45 ++ .../app/serializers/integration/slack.py | 14 + apiserver/plane/app/urls/__init__.py | 3 +- apiserver/plane/app/urls/importer.py | 37 ++ apiserver/plane/app/urls/integration.py | 150 +++++ apiserver/plane/app/views/__init__.py | 25 +- apiserver/plane/app/views/importer/base.py | 560 ++++++++++++++++++ .../plane/app/views/integration/__init__.py | 9 + apiserver/plane/app/views/integration/base.py | 183 ++++++ .../plane/app/views/integration/github.py | 202 +++++++ .../plane/app/views/integration/slack.py | 96 +++ apiserver/plane/utils/importers/__init__.py | 0 apiserver/plane/utils/importers/jira.py | 117 ++++ .../plane/utils/integrations/__init__.py | 0 apiserver/plane/utils/integrations/github.py | 154 +++++ apiserver/plane/utils/integrations/slack.py | 21 + 19 files changed, 1654 insertions(+), 2 deletions(-) create mode 100644 apiserver/plane/app/serializers/integration/__init__.py create mode 100644 apiserver/plane/app/serializers/integration/base.py create mode 100644 apiserver/plane/app/serializers/integration/github.py create mode 100644 apiserver/plane/app/serializers/integration/slack.py create mode 100644 apiserver/plane/app/urls/importer.py create mode 100644 apiserver/plane/app/urls/integration.py create mode 100644 apiserver/plane/app/views/importer/base.py create mode 100644 apiserver/plane/app/views/integration/__init__.py create mode 100644 apiserver/plane/app/views/integration/base.py create mode 100644 apiserver/plane/app/views/integration/github.py create mode 100644 apiserver/plane/app/views/integration/slack.py create mode 100644 apiserver/plane/utils/importers/__init__.py create mode 100644 apiserver/plane/utils/importers/jira.py create mode 100644 apiserver/plane/utils/integrations/__init__.py create mode 100644 apiserver/plane/utils/integrations/github.py create mode 100644 apiserver/plane/utils/integrations/slack.py diff --git a/apiserver/plane/app/serializers/__init__.py b/apiserver/plane/app/serializers/__init__.py index 4391624936..92835d0913 100644 --- a/apiserver/plane/app/serializers/__init__.py +++ b/apiserver/plane/app/serializers/__init__.py @@ -124,3 +124,13 @@ from .exporter import ExporterHistorySerializer from .webhook import WebhookSerializer, WebhookLogSerializer from .dashboard import DashboardSerializer, WidgetSerializer + +from .integration import ( + IntegrationSerializer, + WorkspaceIntegrationSerializer, + GithubIssueSyncSerializer, + GithubRepositorySerializer, + GithubRepositorySyncSerializer, + GithubCommentSyncSerializer, + SlackProjectSyncSerializer, +) diff --git a/apiserver/plane/app/serializers/integration/__init__.py b/apiserver/plane/app/serializers/integration/__init__.py new file mode 100644 index 0000000000..112ff02d16 --- /dev/null +++ b/apiserver/plane/app/serializers/integration/__init__.py @@ -0,0 +1,8 @@ +from .base import IntegrationSerializer, WorkspaceIntegrationSerializer +from .github import ( + GithubRepositorySerializer, + GithubRepositorySyncSerializer, + GithubIssueSyncSerializer, + GithubCommentSyncSerializer, +) +from .slack import SlackProjectSyncSerializer diff --git a/apiserver/plane/app/serializers/integration/base.py b/apiserver/plane/app/serializers/integration/base.py new file mode 100644 index 0000000000..01e484ed02 --- /dev/null +++ b/apiserver/plane/app/serializers/integration/base.py @@ -0,0 +1,22 @@ +# Module imports +from plane.app.serializers import BaseSerializer +from plane.db.models import Integration, WorkspaceIntegration + + +class IntegrationSerializer(BaseSerializer): + class Meta: + model = Integration + fields = "__all__" + read_only_fields = [ + "verified", + ] + + +class WorkspaceIntegrationSerializer(BaseSerializer): + integration_detail = IntegrationSerializer( + read_only=True, source="integration" + ) + + class Meta: + model = WorkspaceIntegration + fields = "__all__" diff --git a/apiserver/plane/app/serializers/integration/github.py b/apiserver/plane/app/serializers/integration/github.py new file mode 100644 index 0000000000..850bccf1b3 --- /dev/null +++ b/apiserver/plane/app/serializers/integration/github.py @@ -0,0 +1,45 @@ +# Module imports +from plane.app.serializers import BaseSerializer +from plane.db.models import ( + GithubIssueSync, + GithubRepository, + GithubRepositorySync, + GithubCommentSync, +) + + +class GithubRepositorySerializer(BaseSerializer): + class Meta: + model = GithubRepository + fields = "__all__" + + +class GithubRepositorySyncSerializer(BaseSerializer): + repo_detail = GithubRepositorySerializer(source="repository") + + class Meta: + model = GithubRepositorySync + fields = "__all__" + + +class GithubIssueSyncSerializer(BaseSerializer): + class Meta: + model = GithubIssueSync + fields = "__all__" + read_only_fields = [ + "project", + "workspace", + "repository_sync", + ] + + +class GithubCommentSyncSerializer(BaseSerializer): + class Meta: + model = GithubCommentSync + fields = "__all__" + read_only_fields = [ + "project", + "workspace", + "repository_sync", + "issue_sync", + ] diff --git a/apiserver/plane/app/serializers/integration/slack.py b/apiserver/plane/app/serializers/integration/slack.py new file mode 100644 index 0000000000..9c461c5b9b --- /dev/null +++ b/apiserver/plane/app/serializers/integration/slack.py @@ -0,0 +1,14 @@ +# Module imports +from plane.app.serializers import BaseSerializer +from plane.db.models import SlackProjectSync + + +class SlackProjectSyncSerializer(BaseSerializer): + class Meta: + model = SlackProjectSync + fields = "__all__" + read_only_fields = [ + "project", + "workspace", + "workspace_integration", + ] diff --git a/apiserver/plane/app/urls/__init__.py b/apiserver/plane/app/urls/__init__.py index 9adb9e4124..6ce8e8b5e9 100644 --- a/apiserver/plane/app/urls/__init__.py +++ b/apiserver/plane/app/urls/__init__.py @@ -19,7 +19,8 @@ from .views import urlpatterns as view_urls from .workspace import urlpatterns as workspace_urls from .api import urlpatterns as api_urls from .webhook import urlpatterns as webhook_urls - +from .importer import urlpatterns as importer_urls +from .integration import urlpatterns as integration_urls from .active_cycle import urlpatterns as active_cycle_urls diff --git a/apiserver/plane/app/urls/importer.py b/apiserver/plane/app/urls/importer.py new file mode 100644 index 0000000000..f3a018d789 --- /dev/null +++ b/apiserver/plane/app/urls/importer.py @@ -0,0 +1,37 @@ +from django.urls import path + + +from plane.app.views import ( + ServiceIssueImportSummaryEndpoint, + ImportServiceEndpoint, + UpdateServiceImportStatusEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//importers//", + ServiceIssueImportSummaryEndpoint.as_view(), + name="importer-summary", + ), + path( + "workspaces//projects/importers//", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers/", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers///", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//projects//service//importers//", + UpdateServiceImportStatusEndpoint.as_view(), + name="importer-status", + ), +] diff --git a/apiserver/plane/app/urls/integration.py b/apiserver/plane/app/urls/integration.py new file mode 100644 index 0000000000..cf3f82d5a4 --- /dev/null +++ b/apiserver/plane/app/urls/integration.py @@ -0,0 +1,150 @@ +from django.urls import path + + +from plane.app.views import ( + IntegrationViewSet, + WorkspaceIntegrationViewSet, + GithubRepositoriesEndpoint, + GithubRepositorySyncViewSet, + GithubIssueSyncViewSet, + GithubCommentSyncViewSet, + BulkCreateGithubIssueSyncEndpoint, + SlackProjectSyncViewSet, +) + + +urlpatterns = [ + path( + "integrations/", + IntegrationViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="integrations", + ), + path( + "integrations//", + IntegrationViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="integrations", + ), + path( + "workspaces//workspace-integrations/", + WorkspaceIntegrationViewSet.as_view( + { + "get": "list", + } + ), + name="workspace-integrations", + ), + path( + "workspaces//workspace-integrations//", + WorkspaceIntegrationViewSet.as_view( + { + "post": "create", + } + ), + name="workspace-integrations", + ), + path( + "workspaces//workspace-integrations//provider/", + WorkspaceIntegrationViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="workspace-integrations", + ), + # Github Integrations + path( + "workspaces//workspace-integrations//github-repositories/", + GithubRepositoriesEndpoint.as_view(), + ), + path( + "workspaces//projects//workspace-integrations//github-repository-sync/", + GithubRepositorySyncViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + ), + path( + "workspaces//projects//workspace-integrations//github-repository-sync//", + GithubRepositorySyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync/", + GithubIssueSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/", + BulkCreateGithubIssueSyncEndpoint.as_view(), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//", + GithubIssueSyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/", + GithubCommentSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//", + GithubCommentSyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + ## End Github Integrations + # Slack Integration + path( + "workspaces//projects//workspace-integrations//project-slack-sync/", + SlackProjectSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//workspace-integrations//project-slack-sync//", + SlackProjectSyncViewSet.as_view( + { + "delete": "destroy", + "get": "retrieve", + } + ), + ), + ## End Slack Integration +] diff --git a/apiserver/plane/app/views/__init__.py b/apiserver/plane/app/views/__init__.py index abcfcbdadb..d619bb1b13 100644 --- a/apiserver/plane/app/views/__init__.py +++ b/apiserver/plane/app/views/__init__.py @@ -38,7 +38,7 @@ from .workspace.base import ( WorkSpaceAvailabilityCheckEndpoint, UserWorkspaceDashboardEndpoint, WorkspaceThemeViewSet, - ExportWorkspaceUserActivityEndpoint + ExportWorkspaceUserActivityEndpoint, ) from .workspace.member import ( @@ -232,3 +232,26 @@ from .webhook.base import ( from .dashboard.base import DashboardEndpoint, WidgetsEndpoint from .error_404 import custom_404_view + +from .importer.base import ( + ServiceIssueImportSummaryEndpoint, + ImportServiceEndpoint, + UpdateServiceImportStatusEndpoint, + BulkImportIssuesEndpoint, + BulkImportModulesEndpoint, +) + +from .integration.base import ( + IntegrationViewSet, + WorkspaceIntegrationViewSet, +) + +from .integration.github import ( + GithubRepositoriesEndpoint, + GithubRepositorySyncViewSet, + GithubIssueSyncViewSet, + GithubCommentSyncViewSet, + BulkCreateGithubIssueSyncEndpoint, +) + +from .integration.slack import SlackProjectSyncViewSet diff --git a/apiserver/plane/app/views/importer/base.py b/apiserver/plane/app/views/importer/base.py new file mode 100644 index 0000000000..9ef85181cf --- /dev/null +++ b/apiserver/plane/app/views/importer/base.py @@ -0,0 +1,560 @@ +# Python imports +import uuid + +# Third party imports +from rest_framework import status +from rest_framework.response import Response + +# Django imports +from django.db.models import Max, Q + +# Module imports +from plane.app.views import BaseAPIView +from plane.db.models import ( + WorkspaceIntegration, + Importer, + APIToken, + Project, + State, + IssueSequence, + Issue, + IssueActivity, + IssueComment, + IssueLink, + IssueLabel, + Workspace, + IssueAssignee, + Module, + ModuleLink, + ModuleIssue, + Label, +) +from plane.app.serializers import ( + ImporterSerializer, + IssueFlatSerializer, + ModuleSerializer, +) +from plane.utils.integrations.github import get_github_repo_details +from plane.utils.importers.jira import ( + jira_project_issue_summary, + is_allowed_hostname, +) +from plane.bgtasks.importer_task import service_importer +from plane.utils.html_processor import strip_tags +from plane.app.permissions import WorkSpaceAdminPermission + + +class ServiceIssueImportSummaryEndpoint(BaseAPIView): + def get(self, request, slug, service): + if service == "github": + owner = request.GET.get("owner", False) + repo = request.GET.get("repo", False) + + if not owner or not repo: + return Response( + {"error": "Owner and repo are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace_integration = WorkspaceIntegration.objects.get( + integration__provider="github", workspace__slug=slug + ) + + access_tokens_url = workspace_integration.metadata.get( + "access_tokens_url", False + ) + + if not access_tokens_url: + return Response( + { + "error": "There was an error during the installation of the GitHub app. To resolve this issue, we recommend reinstalling the GitHub app." + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + issue_count, labels, collaborators = get_github_repo_details( + access_tokens_url, owner, repo + ) + return Response( + { + "issue_count": issue_count, + "labels": labels, + "collaborators": collaborators, + }, + status=status.HTTP_200_OK, + ) + + if service == "jira": + # Check for all the keys + params = { + "project_key": "Project key is required", + "api_token": "API token is required", + "email": "Email is required", + "cloud_hostname": "Cloud hostname is required", + } + + for key, error_message in params.items(): + if not request.GET.get(key, False): + return Response( + {"error": error_message}, + status=status.HTTP_400_BAD_REQUEST, + ) + + project_key = request.GET.get("project_key", "") + api_token = request.GET.get("api_token", "") + email = request.GET.get("email", "") + cloud_hostname = request.GET.get("cloud_hostname", "") + + response = jira_project_issue_summary( + email, api_token, project_key, cloud_hostname + ) + if "error" in response: + return Response(response, status=status.HTTP_400_BAD_REQUEST) + else: + return Response( + response, + status=status.HTTP_200_OK, + ) + return Response( + {"error": "Service not supported yet"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ImportServiceEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def post(self, request, slug, service): + project_id = request.data.get("project_id", False) + + if not project_id: + return Response( + {"error": "Project ID is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + workspace = Workspace.objects.get(slug=slug) + + if service == "github": + data = request.data.get("data", False) + metadata = request.data.get("metadata", False) + config = request.data.get("config", False) + if not data or not metadata or not config: + return Response( + {"error": "Data, config and metadata are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + api_token = APIToken.objects.filter( + user=request.user, workspace=workspace + ).first() + if api_token is None: + api_token = APIToken.objects.create( + user=request.user, + label="Importer", + workspace=workspace, + ) + + importer = Importer.objects.create( + service=service, + project_id=project_id, + status="queued", + initiated_by=request.user, + data=data, + metadata=metadata, + token=api_token, + config=config, + created_by=request.user, + updated_by=request.user, + ) + + service_importer.delay(service, importer.id) + serializer = ImporterSerializer(importer) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + if service == "jira": + data = request.data.get("data", False) + metadata = request.data.get("metadata", False) + config = request.data.get("config", False) + + cloud_hostname = metadata.get("cloud_hostname", False) + + if not cloud_hostname: + return Response( + {"error": "Cloud hostname is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if not is_allowed_hostname(cloud_hostname): + return Response( + {"error": "Hostname is not a valid hostname."}, + status=status.HTTP_400_BAD_REQUEST, + ) + + if not data or not metadata: + return Response( + {"error": "Data, config and metadata are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + api_token = APIToken.objects.filter( + user=request.user, workspace=workspace + ).first() + if api_token is None: + api_token = APIToken.objects.create( + user=request.user, + label="Importer", + workspace=workspace, + ) + + importer = Importer.objects.create( + service=service, + project_id=project_id, + status="queued", + initiated_by=request.user, + data=data, + metadata=metadata, + token=api_token, + config=config, + created_by=request.user, + updated_by=request.user, + ) + + service_importer.delay(service, importer.id) + serializer = ImporterSerializer(importer) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + return Response( + {"error": "Servivce not supported yet"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug): + imports = ( + Importer.objects.filter(workspace__slug=slug) + .order_by("-created_at") + .select_related("initiated_by", "project", "workspace") + ) + serializer = ImporterSerializer(imports, many=True) + return Response(serializer.data) + + def delete(self, request, slug, service, pk): + importer = Importer.objects.get( + pk=pk, service=service, workspace__slug=slug + ) + + if importer.imported_data is not None: + # Delete all imported Issues + imported_issues = importer.imported_data.get("issues", []) + Issue.issue_objects.filter(id__in=imported_issues).delete() + + # Delete all imported Labels + imported_labels = importer.imported_data.get("labels", []) + Label.objects.filter(id__in=imported_labels).delete() + + if importer.service == "jira": + imported_modules = importer.imported_data.get("modules", []) + Module.objects.filter(id__in=imported_modules).delete() + importer.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + def patch(self, request, slug, service, pk): + importer = Importer.objects.get( + pk=pk, service=service, workspace__slug=slug + ) + serializer = ImporterSerializer( + importer, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + +class UpdateServiceImportStatusEndpoint(BaseAPIView): + def post(self, request, slug, project_id, service, importer_id): + importer = Importer.objects.get( + pk=importer_id, + workspace__slug=slug, + project_id=project_id, + service=service, + ) + importer.status = request.data.get("status", "processing") + importer.save() + return Response(status.HTTP_200_OK) + + +class BulkImportIssuesEndpoint(BaseAPIView): + def post(self, request, slug, project_id, service): + # Get the project + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + # Get the default state + default_state = State.objects.filter( + ~Q(name="Triage"), project_id=project_id, default=True + ).first() + # if there is no default state assign any random state + if default_state is None: + default_state = State.objects.filter( + ~Q(name="Triage"), project_id=project_id + ).first() + + # Get the maximum sequence_id + last_id = IssueSequence.objects.filter( + project_id=project_id + ).aggregate(largest=Max("sequence"))["largest"] + + last_id = 1 if last_id is None else last_id + 1 + + # Get the maximum sort order + largest_sort_order = Issue.objects.filter( + project_id=project_id, state=default_state + ).aggregate(largest=Max("sort_order"))["largest"] + + largest_sort_order = ( + 65535 if largest_sort_order is None else largest_sort_order + 10000 + ) + + # Get the issues_data + issues_data = request.data.get("issues_data", []) + + if not len(issues_data): + return Response( + {"error": "Issue data is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Issues + bulk_issues = [] + for issue_data in issues_data: + bulk_issues.append( + Issue( + project_id=project_id, + workspace_id=project.workspace_id, + state_id=( + issue_data.get("state") + if issue_data.get("state", False) + else default_state.id + ), + name=issue_data.get("name", "Issue Created through Bulk"), + description_html=issue_data.get( + "description_html", "

" + ), + description_stripped=( + None + if ( + issue_data.get("description_html") == "" + or issue_data.get("description_html") is None + ) + else strip_tags(issue_data.get("description_html")) + ), + sequence_id=last_id, + sort_order=largest_sort_order, + start_date=issue_data.get("start_date", None), + target_date=issue_data.get("target_date", None), + priority=issue_data.get("priority", "none"), + created_by=request.user, + ) + ) + + largest_sort_order = largest_sort_order + 10000 + last_id = last_id + 1 + + issues = Issue.objects.bulk_create( + bulk_issues, + batch_size=100, + ignore_conflicts=True, + ) + + # Sequences + _ = IssueSequence.objects.bulk_create( + [ + IssueSequence( + issue=issue, + sequence=issue.sequence_id, + project_id=project_id, + workspace_id=project.workspace_id, + ) + for issue in issues + ], + batch_size=100, + ) + + # Attach Labels + bulk_issue_labels = [] + for issue, issue_data in zip(issues, issues_data): + labels_list = issue_data.get("labels_list", []) + bulk_issue_labels = bulk_issue_labels + [ + IssueLabel( + issue=issue, + label_id=label_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for label_id in labels_list + ] + + _ = IssueLabel.objects.bulk_create( + bulk_issue_labels, batch_size=100, ignore_conflicts=True + ) + + # Attach Assignees + bulk_issue_assignees = [] + for issue, issue_data in zip(issues, issues_data): + assignees_list = issue_data.get("assignees_list", []) + bulk_issue_assignees = bulk_issue_assignees + [ + IssueAssignee( + issue=issue, + assignee_id=assignee_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for assignee_id in assignees_list + ] + + _ = IssueAssignee.objects.bulk_create( + bulk_issue_assignees, batch_size=100, ignore_conflicts=True + ) + + # Track the issue activities + IssueActivity.objects.bulk_create( + [ + IssueActivity( + issue=issue, + actor=request.user, + project_id=project_id, + workspace_id=project.workspace_id, + comment=f"imported the issue from {service}", + verb="created", + created_by=request.user, + ) + for issue in issues + ], + batch_size=100, + ) + + # Create Comments + bulk_issue_comments = [] + for issue, issue_data in zip(issues, issues_data): + comments_list = issue_data.get("comments_list", []) + bulk_issue_comments = bulk_issue_comments + [ + IssueComment( + issue=issue, + comment_html=comment.get("comment_html", "

"), + actor=request.user, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for comment in comments_list + ] + + _ = IssueComment.objects.bulk_create( + bulk_issue_comments, batch_size=100 + ) + + # Attach Links + _ = IssueLink.objects.bulk_create( + [ + IssueLink( + issue=issue, + url=issue_data.get("link", {}).get( + "url", "https://github.com" + ), + title=issue_data.get("link", {}).get( + "title", "Original Issue" + ), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for issue, issue_data in zip(issues, issues_data) + ] + ) + + return Response( + {"issues": IssueFlatSerializer(issues, many=True).data}, + status=status.HTTP_201_CREATED, + ) + + +class BulkImportModulesEndpoint(BaseAPIView): + def post(self, request, slug, project_id, service): + modules_data = request.data.get("modules_data", []) + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + modules = Module.objects.bulk_create( + [ + Module( + name=module.get("name", uuid.uuid4().hex), + description=module.get("description", ""), + start_date=module.get("start_date", None), + target_date=module.get("target_date", None), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for module in modules_data + ], + batch_size=100, + ignore_conflicts=True, + ) + + modules = Module.objects.filter( + id__in=[module.id for module in modules] + ) + + if len(modules) == len(modules_data): + _ = ModuleLink.objects.bulk_create( + [ + ModuleLink( + module=module, + url=module_data.get("link", {}).get( + "url", "https://plane.so" + ), + title=module_data.get("link", {}).get( + "title", "Original Issue" + ), + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for module, module_data in zip(modules, modules_data) + ], + batch_size=100, + ignore_conflicts=True, + ) + + bulk_module_issues = [] + for module, module_data in zip(modules, modules_data): + module_issues_list = module_data.get("module_issues_list", []) + bulk_module_issues = bulk_module_issues + [ + ModuleIssue( + issue_id=issue, + module=module, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + ) + for issue in module_issues_list + ] + + _ = ModuleIssue.objects.bulk_create( + bulk_module_issues, batch_size=100, ignore_conflicts=True + ) + + serializer = ModuleSerializer(modules, many=True) + return Response( + {"modules": serializer.data}, status=status.HTTP_201_CREATED + ) + + else: + return Response( + { + "message": "Modules created but issues could not be imported" + }, + status=status.HTTP_200_OK, + ) diff --git a/apiserver/plane/app/views/integration/__init__.py b/apiserver/plane/app/views/integration/__init__.py new file mode 100644 index 0000000000..ea20d96eaf --- /dev/null +++ b/apiserver/plane/app/views/integration/__init__.py @@ -0,0 +1,9 @@ +from .base import IntegrationViewSet, WorkspaceIntegrationViewSet +from .github import ( + GithubRepositorySyncViewSet, + GithubIssueSyncViewSet, + BulkCreateGithubIssueSyncEndpoint, + GithubCommentSyncViewSet, + GithubRepositoriesEndpoint, +) +from .slack import SlackProjectSyncViewSet diff --git a/apiserver/plane/app/views/integration/base.py b/apiserver/plane/app/views/integration/base.py new file mode 100644 index 0000000000..41611e5f69 --- /dev/null +++ b/apiserver/plane/app/views/integration/base.py @@ -0,0 +1,183 @@ +# Python improts +import uuid +import requests + +# Django imports +from django.contrib.auth.hashers import make_password + +# Third party imports +from rest_framework.response import Response +from rest_framework import status +from sentry_sdk import capture_exception + +# Module imports +from plane.app.views import BaseViewSet +from plane.db.models import ( + Integration, + WorkspaceIntegration, + Workspace, + User, + WorkspaceMember, + APIToken, +) +from plane.app.serializers import ( + IntegrationSerializer, + WorkspaceIntegrationSerializer, +) +from plane.utils.integrations.github import ( + get_github_metadata, + delete_github_installation, +) +from plane.app.permissions import WorkSpaceAdminPermission +from plane.utils.integrations.slack import slack_oauth + + +class IntegrationViewSet(BaseViewSet): + serializer_class = IntegrationSerializer + model = Integration + + def create(self, request): + serializer = IntegrationSerializer(data=request.data) + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def partial_update(self, request, pk): + integration = Integration.objects.get(pk=pk) + if integration.verified: + return Response( + {"error": "Verified integrations cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = IntegrationSerializer( + integration, data=request.data, partial=True + ) + + if serializer.is_valid(): + serializer.save() + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + + def destroy(self, request, pk): + integration = Integration.objects.get(pk=pk) + if integration.verified: + return Response( + {"error": "Verified integrations cannot be updated"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + integration.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + + +class WorkspaceIntegrationViewSet(BaseViewSet): + serializer_class = WorkspaceIntegrationSerializer + model = WorkspaceIntegration + + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .select_related("integration") + ) + + def create(self, request, slug, provider): + workspace = Workspace.objects.get(slug=slug) + integration = Integration.objects.get(provider=provider) + config = {} + if provider == "github": + installation_id = request.data.get("installation_id", None) + if not installation_id: + return Response( + {"error": "Installation ID is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + metadata = get_github_metadata(installation_id) + config = {"installation_id": installation_id} + + if provider == "slack": + code = request.data.get("code", False) + + if not code: + return Response( + {"error": "Code is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + slack_response = slack_oauth(code=code) + + metadata = slack_response + access_token = metadata.get("access_token", False) + team_id = metadata.get("team", {}).get("id", False) + if not metadata or not access_token or not team_id: + return Response( + { + "error": "Slack could not be installed. Please try again later" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + config = {"team_id": team_id, "access_token": access_token} + + # Create a bot user + bot_user = User.objects.create( + email=f"{uuid.uuid4().hex}@plane.so", + username=uuid.uuid4().hex, + password=make_password(uuid.uuid4().hex), + is_password_autoset=True, + is_bot=True, + first_name=integration.title, + avatar=( + integration.avatar_url + if integration.avatar_url is not None + else "" + ), + ) + + # Create an API Token for the bot user + api_token = APIToken.objects.create( + user=bot_user, + user_type=1, # bot user + workspace=workspace, + ) + + workspace_integration = WorkspaceIntegration.objects.create( + workspace=workspace, + integration=integration, + actor=bot_user, + api_token=api_token, + metadata=metadata, + config=config, + ) + + # Add bot user as a member of workspace + _ = WorkspaceMember.objects.create( + workspace=workspace_integration.workspace, + member=bot_user, + role=20, + ) + return Response( + WorkspaceIntegrationSerializer(workspace_integration).data, + status=status.HTTP_201_CREATED, + ) + + def destroy(self, request, slug, pk): + workspace_integration = WorkspaceIntegration.objects.get( + pk=pk, workspace__slug=slug + ) + + if workspace_integration.integration.provider == "github": + installation_id = workspace_integration.config.get( + "installation_id", False + ) + if installation_id: + delete_github_installation(installation_id=installation_id) + + workspace_integration.delete() + return Response(status=status.HTTP_204_NO_CONTENT) diff --git a/apiserver/plane/app/views/integration/github.py b/apiserver/plane/app/views/integration/github.py new file mode 100644 index 0000000000..2d37c64b07 --- /dev/null +++ b/apiserver/plane/app/views/integration/github.py @@ -0,0 +1,202 @@ +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from sentry_sdk import capture_exception + +# Module imports +from plane.app.views import BaseViewSet, BaseAPIView +from plane.db.models import ( + GithubIssueSync, + GithubRepositorySync, + GithubRepository, + WorkspaceIntegration, + ProjectMember, + Label, + GithubCommentSync, + Project, +) +from plane.app.serializers import ( + GithubIssueSyncSerializer, + GithubRepositorySyncSerializer, + GithubCommentSyncSerializer, +) +from plane.utils.integrations.github import get_github_repos +from plane.app.permissions import ( + ProjectBasePermission, + ProjectEntityPermission, +) + + +class GithubRepositoriesEndpoint(BaseAPIView): + permission_classes = [ + ProjectBasePermission, + ] + + def get(self, request, slug, workspace_integration_id): + page = request.GET.get("page", 1) + workspace_integration = WorkspaceIntegration.objects.get( + workspace__slug=slug, pk=workspace_integration_id + ) + + if workspace_integration.integration.provider != "github": + return Response( + {"error": "Not a github integration"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + access_tokens_url = workspace_integration.metadata["access_tokens_url"] + repositories_url = ( + workspace_integration.metadata["repositories_url"] + + f"?per_page=100&page={page}" + ) + repositories = get_github_repos(access_tokens_url, repositories_url) + return Response(repositories, status=status.HTTP_200_OK) + + +class GithubRepositorySyncViewSet(BaseViewSet): + permission_classes = [ + ProjectBasePermission, + ] + + serializer_class = GithubRepositorySyncSerializer + model = GithubRepositorySync + + def perform_create(self, serializer): + serializer.save(project_id=self.kwargs.get("project_id")) + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + ) + + def create(self, request, slug, project_id, workspace_integration_id): + name = request.data.get("name", False) + url = request.data.get("url", False) + config = request.data.get("config", {}) + repository_id = request.data.get("repository_id", False) + owner = request.data.get("owner", False) + + if not name or not url or not repository_id or not owner: + return Response( + {"error": "Name, url, repository_id and owner are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Get the workspace integration + workspace_integration = WorkspaceIntegration.objects.get( + pk=workspace_integration_id + ) + + # Delete the old repository object + GithubRepositorySync.objects.filter( + project_id=project_id, workspace__slug=slug + ).delete() + GithubRepository.objects.filter( + project_id=project_id, workspace__slug=slug + ).delete() + + # Create repository + repo = GithubRepository.objects.create( + name=name, + url=url, + config=config, + repository_id=repository_id, + owner=owner, + project_id=project_id, + ) + + # Create a Label for github + label = Label.objects.filter( + name="GitHub", + project_id=project_id, + ).first() + + if label is None: + label = Label.objects.create( + name="GitHub", + project_id=project_id, + description="Label to sync Plane issues with GitHub issues", + color="#003773", + ) + + # Create repo sync + repo_sync = GithubRepositorySync.objects.create( + repository=repo, + workspace_integration=workspace_integration, + actor=workspace_integration.actor, + credentials=request.data.get("credentials", {}), + project_id=project_id, + label=label, + ) + + # Add bot as a member in the project + _ = ProjectMember.objects.get_or_create( + member=workspace_integration.actor, role=20, project_id=project_id + ) + + # Return Response + return Response( + GithubRepositorySyncSerializer(repo_sync).data, + status=status.HTTP_201_CREATED, + ) + + +class GithubIssueSyncViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + + serializer_class = GithubIssueSyncSerializer + model = GithubIssueSync + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), + repository_sync_id=self.kwargs.get("repo_sync_id"), + ) + + +class BulkCreateGithubIssueSyncEndpoint(BaseAPIView): + def post(self, request, slug, project_id, repo_sync_id): + project = Project.objects.get(pk=project_id, workspace__slug=slug) + + github_issue_syncs = request.data.get("github_issue_syncs", []) + github_issue_syncs = GithubIssueSync.objects.bulk_create( + [ + GithubIssueSync( + issue_id=github_issue_sync.get("issue"), + repo_issue_id=github_issue_sync.get("repo_issue_id"), + issue_url=github_issue_sync.get("issue_url"), + github_issue_id=github_issue_sync.get("github_issue_id"), + repository_sync_id=repo_sync_id, + project_id=project_id, + workspace_id=project.workspace_id, + created_by=request.user, + updated_by=request.user, + ) + for github_issue_sync in github_issue_syncs + ], + batch_size=100, + ignore_conflicts=True, + ) + + serializer = GithubIssueSyncSerializer(github_issue_syncs, many=True) + return Response(serializer.data, status=status.HTTP_201_CREATED) + + +class GithubCommentSyncViewSet(BaseViewSet): + permission_classes = [ + ProjectEntityPermission, + ] + + serializer_class = GithubCommentSyncSerializer + model = GithubCommentSync + + def perform_create(self, serializer): + serializer.save( + project_id=self.kwargs.get("project_id"), + issue_sync_id=self.kwargs.get("issue_sync_id"), + ) diff --git a/apiserver/plane/app/views/integration/slack.py b/apiserver/plane/app/views/integration/slack.py new file mode 100644 index 0000000000..c22ee3e52b --- /dev/null +++ b/apiserver/plane/app/views/integration/slack.py @@ -0,0 +1,96 @@ +# Django import +from django.db import IntegrityError + +# Third party imports +from rest_framework import status +from rest_framework.response import Response +from sentry_sdk import capture_exception + +# Module imports +from plane.app.views import BaseViewSet, BaseAPIView +from plane.db.models import ( + SlackProjectSync, + WorkspaceIntegration, + ProjectMember, +) +from plane.app.serializers import SlackProjectSyncSerializer +from plane.app.permissions import ( + ProjectBasePermission, + ProjectEntityPermission, +) +from plane.utils.integrations.slack import slack_oauth + + +class SlackProjectSyncViewSet(BaseViewSet): + permission_classes = [ + ProjectBasePermission, + ] + serializer_class = SlackProjectSyncSerializer + model = SlackProjectSync + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + .filter( + project__project_projectmember__member=self.request.user, + project__project_projectmember__is_active=True, + ) + ) + + def create(self, request, slug, project_id, workspace_integration_id): + try: + code = request.data.get("code", False) + + if not code: + return Response( + {"error": "Code is required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + slack_response = slack_oauth(code=code) + + workspace_integration = WorkspaceIntegration.objects.get( + workspace__slug=slug, pk=workspace_integration_id + ) + + workspace_integration = WorkspaceIntegration.objects.get( + pk=workspace_integration_id, workspace__slug=slug + ) + slack_project_sync = SlackProjectSync.objects.create( + access_token=slack_response.get("access_token"), + scopes=slack_response.get("scope"), + bot_user_id=slack_response.get("bot_user_id"), + webhook_url=slack_response.get("incoming_webhook", {}).get( + "url" + ), + data=slack_response, + team_id=slack_response.get("team", {}).get("id"), + team_name=slack_response.get("team", {}).get("name"), + workspace_integration=workspace_integration, + project_id=project_id, + ) + _ = ProjectMember.objects.get_or_create( + member=workspace_integration.actor, + role=20, + project_id=project_id, + ) + serializer = SlackProjectSyncSerializer(slack_project_sync) + return Response(serializer.data, status=status.HTTP_200_OK) + except IntegrityError as e: + if "already exists" in str(e): + return Response( + {"error": "Slack is already installed for the project"}, + status=status.HTTP_410_GONE, + ) + capture_exception(e) + return Response( + { + "error": "Slack could not be installed. Please try again later" + }, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/utils/importers/__init__.py b/apiserver/plane/utils/importers/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/apiserver/plane/utils/importers/jira.py b/apiserver/plane/utils/importers/jira.py new file mode 100644 index 0000000000..6f3a7c2178 --- /dev/null +++ b/apiserver/plane/utils/importers/jira.py @@ -0,0 +1,117 @@ +import requests +import re +from requests.auth import HTTPBasicAuth +from sentry_sdk import capture_exception +from urllib.parse import urlparse, urljoin + + +def is_allowed_hostname(hostname): + allowed_domains = [ + "atl-paas.net", + "atlassian.com", + "atlassian.net", + "jira.com", + ] + parsed_uri = urlparse(f"https://{hostname}") + domain = parsed_uri.netloc.split(":")[0] # Ensures no port is included + base_domain = ".".join(domain.split(".")[-2:]) + return base_domain in allowed_domains + + +def is_valid_project_key(project_key): + if project_key: + project_key = project_key.strip().upper() + # Adjust the regular expression as needed based on your specific requirements. + if len(project_key) > 30: + return False + # Check the validity of the key as well + pattern = re.compile(r"^[A-Z0-9]{1,10}$") + return pattern.match(project_key) is not None + else: + False + + +def generate_valid_project_key(project_key): + return project_key.strip().upper() + + +def generate_url(hostname, path): + if not is_allowed_hostname(hostname): + raise ValueError("Invalid or unauthorized hostname") + return urljoin(f"https://{hostname}", path) + + +def jira_project_issue_summary(email, api_token, project_key, hostname): + try: + if not is_allowed_hostname(hostname): + return {"error": "Invalid or unauthorized hostname"} + + if not is_valid_project_key(project_key): + return {"error": "Invalid project key"} + + auth = HTTPBasicAuth(email, api_token) + headers = {"Accept": "application/json"} + + # make the project key upper case + project_key = generate_valid_project_key(project_key) + + # issues + issue_url = generate_url( + hostname, + f"/rest/api/3/search?jql=project={project_key} AND issuetype!=Epic", + ) + issue_response = requests.request( + "GET", issue_url, headers=headers, auth=auth + ).json()["total"] + + # modules + module_url = generate_url( + hostname, + f"/rest/api/3/search?jql=project={project_key} AND issuetype=Epic", + ) + module_response = requests.request( + "GET", module_url, headers=headers, auth=auth + ).json()["total"] + + # status + status_url = generate_url( + hostname, f"/rest/api/3/project/${project_key}/statuses" + ) + status_response = requests.request( + "GET", status_url, headers=headers, auth=auth + ).json() + + # labels + labels_url = generate_url( + hostname, f"/rest/api/3/label/?jql=project={project_key}" + ) + labels_response = requests.request( + "GET", labels_url, headers=headers, auth=auth + ).json()["total"] + + # users + users_url = generate_url( + hostname, f"/rest/api/3/users/search?jql=project={project_key}" + ) + users_response = requests.request( + "GET", users_url, headers=headers, auth=auth + ).json() + + return { + "issues": issue_response, + "modules": module_response, + "labels": labels_response, + "states": len(status_response), + "users": ( + [ + user + for user in users_response + if user.get("accountType") == "atlassian" + ] + ), + } + except Exception as e: + capture_exception(e) + return { + "error": "Something went wrong could not fetch information from jira" + } diff --git a/apiserver/plane/utils/integrations/__init__.py b/apiserver/plane/utils/integrations/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/apiserver/plane/utils/integrations/github.py b/apiserver/plane/utils/integrations/github.py new file mode 100644 index 0000000000..5a7ce2aa29 --- /dev/null +++ b/apiserver/plane/utils/integrations/github.py @@ -0,0 +1,154 @@ +import os +import jwt +import requests +from urllib.parse import urlparse, parse_qs +from datetime import datetime, timedelta +from cryptography.hazmat.primitives.serialization import load_pem_private_key +from cryptography.hazmat.backends import default_backend +from django.conf import settings + + +def get_jwt_token(): + app_id = os.environ.get("GITHUB_APP_ID", "") + secret = bytes( + os.environ.get("GITHUB_APP_PRIVATE_KEY", ""), encoding="utf8" + ) + current_timestamp = int(datetime.now().timestamp()) + due_date = datetime.now() + timedelta(minutes=10) + expiry = int(due_date.timestamp()) + payload = { + "iss": app_id, + "sub": app_id, + "exp": expiry, + "iat": current_timestamp, + "aud": "https://github.com/login/oauth/access_token", + } + + priv_rsakey = load_pem_private_key(secret, None, default_backend()) + token = jwt.encode(payload, priv_rsakey, algorithm="RS256") + return token + + +def get_github_metadata(installation_id): + token = get_jwt_token() + + url = f"https://api.github.com/app/installations/{installation_id}" + headers = { + "Authorization": "Bearer " + str(token), + "Accept": "application/vnd.github+json", + } + response = requests.get(url, headers=headers).json() + return response + + +def get_github_repos(access_tokens_url, repositories_url): + token = get_jwt_token() + + headers = { + "Authorization": "Bearer " + str(token), + "Accept": "application/vnd.github+json", + } + + oauth_response = requests.post( + access_tokens_url, + headers=headers, + ).json() + + oauth_token = oauth_response.get("token", "") + headers = { + "Authorization": "Bearer " + str(oauth_token), + "Accept": "application/vnd.github+json", + } + response = requests.get( + repositories_url, + headers=headers, + ).json() + return response + + +def delete_github_installation(installation_id): + token = get_jwt_token() + + url = f"https://api.github.com/app/installations/{installation_id}" + headers = { + "Authorization": "Bearer " + str(token), + "Accept": "application/vnd.github+json", + } + response = requests.delete(url, headers=headers) + return response + + +def get_github_repo_details(access_tokens_url, owner, repo): + token = get_jwt_token() + + headers = { + "Authorization": "Bearer " + str(token), + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + + oauth_response = requests.post( + access_tokens_url, + headers=headers, + ).json() + + oauth_token = oauth_response.get("token") + headers = { + "Authorization": "Bearer " + oauth_token, + "Accept": "application/vnd.github+json", + } + open_issues = requests.get( + f"https://api.github.com/repos/{owner}/{repo}", + headers=headers, + ).json()["open_issues_count"] + + total_labels = 0 + + labels_response = requests.get( + f"https://api.github.com/repos/{owner}/{repo}/labels?per_page=100&page=1", + headers=headers, + ) + + # Check if there are more pages + if len(labels_response.links.keys()): + # get the query parameter of last + last_url = labels_response.links.get("last").get("url") + parsed_url = urlparse(last_url) + last_page_value = parse_qs(parsed_url.query)["page"][0] + total_labels = total_labels + 100 * (int(last_page_value) - 1) + + # Get labels in last page + last_page_labels = requests.get(last_url, headers=headers).json() + total_labels = total_labels + len(last_page_labels) + else: + total_labels = len(labels_response.json()) + + # Currently only supporting upto 100 collaborators + # TODO: Update this function to fetch all collaborators + collaborators = requests.get( + f"https://api.github.com/repos/{owner}/{repo}/collaborators?per_page=100&page=1", + headers=headers, + ).json() + + return open_issues, total_labels, collaborators + + +def get_release_notes(): + token = settings.GITHUB_ACCESS_TOKEN + + if token: + headers = { + "Authorization": "Bearer " + str(token), + "Accept": "application/vnd.github.v3+json", + } + else: + headers = { + "Accept": "application/vnd.github.v3+json", + } + url = "https://api.github.com/repos/makeplane/plane/releases?per_page=5&page=1" + response = requests.get(url, headers=headers) + + if response.status_code != 200: + return {"error": "Unable to render information from Github Repository"} + + return response.json() diff --git a/apiserver/plane/utils/integrations/slack.py b/apiserver/plane/utils/integrations/slack.py new file mode 100644 index 0000000000..0cc5b93b27 --- /dev/null +++ b/apiserver/plane/utils/integrations/slack.py @@ -0,0 +1,21 @@ +import os +import requests + + +def slack_oauth(code): + SLACK_OAUTH_URL = os.environ.get("SLACK_OAUTH_URL", False) + SLACK_CLIENT_ID = os.environ.get("SLACK_CLIENT_ID", False) + SLACK_CLIENT_SECRET = os.environ.get("SLACK_CLIENT_SECRET", False) + + # Oauth Slack + if SLACK_OAUTH_URL and SLACK_CLIENT_ID and SLACK_CLIENT_SECRET: + response = requests.get( + SLACK_OAUTH_URL, + params={ + "code": code, + "client_id": SLACK_CLIENT_ID, + "client_secret": SLACK_CLIENT_SECRET, + }, + ) + return response.json() + return {}