Skip to content

Commit 58942a9

Browse files
authored
added transcoding app from mitol and removed transcoding job code (#2421)
* added transcoding app from mitol and removed transcoding job code * removed redundant code * removed redundant settings * refactored tests and updated transcoding url * added dependency package `mitol-django-transcoding:2025.2.25` * added missing settinsg for the transcode jobs test * fixed value for the post transcoding action setting iin tests * bump version for mitol-django-transcoding * rebased, added support for local testing, and added missing settings * removed local testing * added latest transcoding app package * removed redundant asserts
1 parent bdebb99 commit 58942a9

File tree

14 files changed

+129
-187
lines changed

14 files changed

+129
-187
lines changed

main/settings.py

Lines changed: 1 addition & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,7 @@
6969
"mitol.common.settings.webpack",
7070
"mitol.mail.settings.email",
7171
"mitol.authentication.settings.touchstone",
72+
"mitol.transcoding.settings.job",
7273
)
7374

7475
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
@@ -517,23 +518,6 @@
517518
description="Open catalog urls",
518519
required=False,
519520
)
520-
VIDEO_S3_TRANSCODE_PREFIX = get_string(
521-
name="VIDEO_S3_TRANSCODE_PREFIX",
522-
default="aws_mediaconvert_transcodes",
523-
description=(
524-
"Prefix to be used for S3 keys of files transcoded from AWS MediaConvert"
525-
),
526-
)
527-
VIDEO_S3_TRANSCODE_ENDPOINT = get_string(
528-
name="VIDEO_S3_TRANSCODE_ENDPOINT",
529-
default="aws_mediaconvert_transcodes",
530-
description=("Endpoint to be used for AWS MediaConvert"),
531-
)
532-
VIDEO_TRANSCODE_QUEUE = get_string(
533-
name="VIDEO_TRANSCODE_QUEUE",
534-
default="Default",
535-
description=("Name of MediaConvert queue to use for transcoding"),
536-
)
537521
YT_ACCESS_TOKEN = get_string(
538522
name="YT_ACCESS_TOKEN", default="", description="Youtube access token"
539523
)

main/urls.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@
4141
path("", include("mitol.authentication.urls.saml")),
4242
path("", include("mitol.mail.urls")),
4343
path("", include("videos.urls")),
44+
path("api/", include("mitol.transcoding.urls")),
4445
]
4546

4647
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)

poetry.lock

Lines changed: 100 additions & 30 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ package-mode = false
1111
python = "~3.12"
1212

1313
beautifulsoup4 = "^4.12.2"
14-
boto3 = "1.37.4"
14+
boto3 = "1.37.11"
1515
celery = "^5.3.0"
1616
cryptography = "^44.0.0"
1717
dj-database-url = "2.3.0"
@@ -56,6 +56,7 @@ uwsgitop = "^0.12"
5656
yamale = "6.0.0"
5757
xmlsec = "1.3.13"
5858
posthog = "^3.7.0"
59+
mitol-django-transcoding = "2025.4.8"
5960

6061
[tool.poetry.group.dev.dependencies]
6162
bpython = "^0.25"

videos/api.py

Lines changed: 9 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,16 @@
11
"""APi functions for video processing"""
22

3-
import json
43
import logging
54
import os
65

7-
import boto3
86
import botocore
97
from django.conf import settings
8+
from mitol.transcoding.api import media_convert_job
109

1110
from content_sync.utils import move_s3_object
1211
from gdrive_sync.models import DriveFile
1312
from gdrive_sync.utils import fetch_content_file_size
1413
from main.s3_utils import get_boto3_resource
15-
from videos.apps import VideoApp
1614
from videos.constants import (
1715
DESTINATION_ARCHIVE,
1816
DESTINATION_YOUTUBE,
@@ -62,42 +60,11 @@ def prepare_video_download_file(video: Video):
6260

6361
def create_media_convert_job(video: Video):
6462
"""Create a MediaConvert job for a Video"""
65-
source_prefix = settings.DRIVE_S3_UPLOAD_PREFIX
66-
client = boto3.client(
67-
"mediaconvert",
68-
region_name=settings.AWS_REGION,
69-
endpoint_url=settings.VIDEO_S3_TRANSCODE_ENDPOINT,
70-
)
71-
with open( # noqa: PTH123
72-
os.path.join( # noqa: PTH118
73-
settings.BASE_DIR, f"{VideoApp.name}/config/mediaconvert.json"
74-
),
75-
encoding="utf-8",
76-
) as job_template:
77-
job_dict = json.loads(job_template.read())
78-
job_dict["UserMetadata"]["filter"] = settings.VIDEO_TRANSCODE_QUEUE
79-
job_dict["Queue"] = (
80-
f"arn:aws:mediaconvert:{settings.AWS_REGION}:{settings.AWS_ACCOUNT_ID}:queues/{settings.VIDEO_TRANSCODE_QUEUE}"
81-
)
82-
job_dict["Role"] = (
83-
f"arn:aws:iam::{settings.AWS_ACCOUNT_ID}:role/{settings.AWS_ROLE_NAME}"
84-
)
85-
destination = os.path.splitext( # noqa: PTH122
86-
video.source_key.replace(
87-
source_prefix,
88-
settings.VIDEO_S3_TRANSCODE_PREFIX,
89-
)
90-
)[0]
91-
job_dict["Settings"]["OutputGroups"][0]["OutputGroupSettings"][
92-
"FileGroupSettings"
93-
]["Destination"] = f"s3://{settings.AWS_STORAGE_BUCKET_NAME}/{destination}"
94-
job_dict["Settings"]["Inputs"][0]["FileInput"] = (
95-
f"s3://{settings.AWS_STORAGE_BUCKET_NAME}/{video.source_key}"
96-
)
97-
job = client.create_job(**job_dict)
98-
VideoJob.objects.get_or_create(video=video, job_id=job["Job"]["Id"])
99-
video.status = VideoStatus.TRANSCODING
100-
video.save()
63+
64+
job = media_convert_job(video.source_key)
65+
VideoJob.objects.get_or_create(video=video, job_id=job["Job"]["Id"])
66+
video.status = VideoStatus.TRANSCODING
67+
video.save()
10168

10269

10370
def process_video_outputs(video: Video, output_group_details: dict):
@@ -124,8 +91,10 @@ def process_video_outputs(video: Video, output_group_details: dict):
12491
prepare_video_download_file(video)
12592

12693

127-
def update_video_job(video_job: VideoJob, results: dict):
94+
def update_video_job(results: dict):
12895
"""Update a VideoJob and associated Video, VideoFiles based on MediaConvert results""" # noqa: E501
96+
97+
video_job = VideoJob.objects.get(job_id=results.get("jobId"))
12998
video_job.job_output = results
13099
status = results.get("status")
131100
video = video_job.video

videos/api_test.py

Lines changed: 7 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ def test_create_media_convert_job(settings, mocker):
3232
"""create_media_convert_job should send a request to MediaConvert, create a VideoJob object"""
3333
queue_name = "test_queue"
3434
settings.VIDEO_TRANSCODE_QUEUE = queue_name
35-
mock_boto = mocker.patch("videos.api.boto3")
35+
mock_boto = mocker.patch("mitol.transcoding.api.boto3")
3636
job_id = "abcd123-gh564"
3737
mock_boto.client.return_value.create_job.return_value = {"Job": {"Id": job_id}}
3838
video = VideoFactory.create()
@@ -49,16 +49,9 @@ def test_create_media_convert_job(settings, mocker):
4949
destination = call_kwargs["Settings"]["OutputGroups"][0]["OutputGroupSettings"][
5050
"FileGroupSettings"
5151
]["Destination"]
52-
assert destination.startswith(
53-
f"s3://{settings.AWS_STORAGE_BUCKET_NAME}/{settings.VIDEO_S3_TRANSCODE_PREFIX}"
54-
)
5552
assert destination.endswith(
5653
path.splitext(video.source_key.split("/")[-1])[0] # noqa: PTH122
5754
)
58-
assert (
59-
call_kwargs["Settings"]["Inputs"][0]["FileInput"]
60-
== f"s3://{settings.AWS_STORAGE_BUCKET_NAME}/{video.source_key}"
61-
)
6255
assert VideoJob.objects.filter(job_id=job_id, video=video).count() == 1
6356
video.refresh_from_db()
6457
assert video.status == VideoStatus.TRANSCODING
@@ -132,12 +125,14 @@ def test_update_video_job_success(mocker, raises_exception):
132125
)
133126
mock_log = mocker.patch("videos.api.log.exception")
134127
video_job = VideoJobFactory.create(status=VideoJobStatus.CREATED)
128+
mock_job = mocker.patch("videos.api.VideoJob.objects.get")
129+
mock_job.return_value = video_job
135130
with open( # noqa: PTH123
136131
f"{TEST_VIDEOS_WEBHOOK_PATH}/cloudwatch_sns_complete.json",
137132
encoding="utf-8",
138133
) as infile:
139134
data = json.loads(infile.read())["detail"]
140-
update_video_job(video_job, data)
135+
update_video_job(data)
141136
mock_process_outputs.assert_called_once()
142137
video_job.refresh_from_db()
143138
assert video_job.job_output == data
@@ -149,11 +144,13 @@ def test_update_video_job_error(mocker):
149144
"""The video job should be updated as expected if the transcode job failed"""
150145
mock_log = mocker.patch("videos.api.log.error")
151146
video_job = VideoJobFactory.create()
147+
mock_job = mocker.patch("videos.api.VideoJob.objects.get")
148+
mock_job.return_value = video_job
152149
with open( # noqa: PTH123
153150
f"{TEST_VIDEOS_WEBHOOK_PATH}/cloudwatch_sns_error.json", encoding="utf-8"
154151
) as infile:
155152
data = json.loads(infile.read())["detail"]
156-
update_video_job(video_job, data)
153+
update_video_job(data)
157154
video_job.refresh_from_db()
158155
assert video_job.job_output == data
159156
assert video_job.error_code == str(data.get("errorCode"))

0 commit comments

Comments
 (0)