use celery for asynchronous tasks, rename session_uuid -> file_uuid
This commit is contained in:
+4
-8
@@ -1,11 +1,7 @@
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
from celery import shared_task
|
||||
from src.input.file_reader import read_audio #export PYTHONPATH="/home/karthikeyan/code/MainProject/freq-split-enhance:$PYTHONPATH" for exporting the module
|
||||
|
||||
import time
|
||||
|
||||
@shared_task
|
||||
def process_uploaded_file(file_path):
|
||||
# Simulate long-running task
|
||||
read_audio(file_path=file_path)
|
||||
return 'File processed'
|
||||
def save_uploaded_file(file_path, file_content):
|
||||
"""Save uploaded file asynchronously"""
|
||||
with open(file_path, 'wb') as destination:
|
||||
destination.write(file_content)
|
||||
+7
-9
@@ -3,6 +3,7 @@ import uuid
|
||||
from rest_framework.decorators import api_view
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
from .tasks import save_uploaded_file
|
||||
|
||||
UPLOAD_DIR = "/tmp/freqsplit"
|
||||
|
||||
@@ -18,24 +19,21 @@ def upload_audio(request):
|
||||
audio_file = request.FILES['file']
|
||||
|
||||
# Generate a unique ID for this upload
|
||||
session_id = str(uuid.uuid4())[:8]
|
||||
file_uuid = str(uuid.uuid4())[:8]
|
||||
|
||||
#Create a subdirectory for this upload
|
||||
session_dir = os.path.join(UPLOAD_DIR, session_id)
|
||||
os.makedirs(session_dir, exist_ok=True)
|
||||
upload_dir = os.path.join(UPLOAD_DIR, file_uuid)
|
||||
os.makedirs(upload_dir, exist_ok=True)
|
||||
|
||||
file_path = os.path.join(session_dir, audio_file.name)
|
||||
file_path = os.path.join(upload_dir, audio_file.name)
|
||||
|
||||
# Save the uploaded file
|
||||
with open(file_path, 'wb') as destination:
|
||||
for chunk in audio_file.chunks():
|
||||
destination.write(chunk)
|
||||
save_uploaded_file.delay(file_path, audio_file.read())
|
||||
|
||||
return Response(
|
||||
{
|
||||
"Status": "File uploaded successfully",
|
||||
"session_id": session_id,
|
||||
"file_path": file_path,
|
||||
"file_uuid": file_uuid,
|
||||
},
|
||||
status=status.HTTP_201_CREATED,
|
||||
)
|
||||
|
||||
@@ -1 +1,3 @@
|
||||
from celery_app import app as celery
|
||||
from .celery import app as celery_app
|
||||
|
||||
__all__ = ('celery_app',)
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
import os
|
||||
from celery import Celery
|
||||
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
|
||||
|
||||
app = Celery('backend')
|
||||
# Load config from Django settings, using a `CELERY_` prefix
|
||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
||||
app.autodiscover_tasks()
|
||||
@@ -123,6 +123,10 @@ STATIC_URL = 'static/'
|
||||
# https://docs.djangoproject.com/en/5.1/ref/settings/#default-auto-field
|
||||
|
||||
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
||||
|
||||
# COnfigure Redis as message broker
|
||||
CELERY_BROKER_URL = 'redis://localhost:6379/0'
|
||||
CELERY_ACCEPT_CONTENT = ['json']
|
||||
CELERY_TASK_SERIALIZER = 'json'
|
||||
CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True
|
||||
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
from celery import Celery
|
||||
|
||||
# Automatically set environment variables in celery_app.py
|
||||
|
||||
# Set Django settings module
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'backend.settings')
|
||||
|
||||
# Add the project directory to sys.path (similar to the manual PYTHONPATH)
|
||||
project_root = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
sys.path.append(project_root)
|
||||
|
||||
app = Celery('backend')
|
||||
|
||||
# Load configuration from Django settings, using the CELERY namespace.
|
||||
app.config_from_object('django.conf:settings', namespace='CELERY')
|
||||
|
||||
# Autodiscover tasks from installed apps.
|
||||
app.autodiscover_tasks()
|
||||
Reference in New Issue
Block a user