From 2b3748da4b0e8de0d399990e9d1583bc51f997f3 Mon Sep 17 00:00:00 2001
From: klpoland
Date: Fri, 20 Feb 2026 14:16:47 -0500
Subject: [PATCH 1/7] adding time based filtering and slider ui
---
.../api_methods/helpers/temporal_filtering.py | 168 ++++++++++++++++++
.../serializers/capture_serializers.py | 56 +++++-
gateway/sds_gateway/api_methods/tasks.py | 62 ++++++-
.../js/actions/DownloadActionManager.js | 13 +-
gateway/sds_gateway/static/js/file-list.js | 6 +-
gateway/sds_gateway/templates/base.html | 3 +
.../templates/users/file_list.html | 2 +
.../users/partials/captures_page_table.html | 4 +-
.../users/partials/web_download_modal.html | 164 +++++++++++++++++
gateway/sds_gateway/users/views.py | 11 ++
10 files changed, 476 insertions(+), 13 deletions(-)
create mode 100644 gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
diff --git a/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py b/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
new file mode 100644
index 000000000..16b648d91
--- /dev/null
+++ b/gateway/sds_gateway/api_methods/helpers/temporal_filtering.py
@@ -0,0 +1,168 @@
+import re
+
+from django.db.models import QuerySet
+
+from sds_gateway.api_methods.models import CaptureType, Capture, File
+from sds_gateway.api_methods.utils.opensearch_client import get_opensearch_client
+from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
+from loguru import logger as log
+
+# Digital RF spec: rf@SECONDS.MILLISECONDS.h5 (e.g. rf@1396379502.000.h5)
+# https://github.com/MITHaystack/digital_rf
+DRF_RF_FILENAME_PATTERN = re.compile(
+ r"^rf@(\d+)\.(\d+)\.h5$",
+ re.IGNORECASE,
+)
+DRF_RF_FILENAME_REGEX_STR = r"^rf@\d+\.\d+\.h5$"
+
+
+def drf_rf_filename_from_ms(ms: int) -> str:
+ """Format ms as DRF rf data filename (canonical for range queries)."""
+ return f"rf@{ms // 1000}.{ms % 1000:03d}.h5"
+
+
+def drf_rf_filename_to_ms(file_name: str) -> int | None:
+ """
+ Parse DRF rf data filename to milliseconds.
+ Handles rf@SECONDS.MILLISECONDS.h5; fractional part padded to 3 digits.
+ """
+ name = file_name.strip()
+ match = DRF_RF_FILENAME_PATTERN.match(name)
+ if not match:
+ return None
+ try:
+ seconds = int(match.group(1))
+ frac = match.group(2).ljust(3, "0")[:3]
+ return seconds * 1000 + int(frac)
+ except (ValueError, TypeError):
+ return None
+
+
+def _catch_capture_type_error(capture_type: CaptureType) -> None:
+ if capture_type != CaptureType.DigitalRF:
+ msg = "Only DigitalRF captures are supported for temporal filtering."
+ log.error(msg)
+ raise ValueError(msg)
+
+
+def _parse_drf_rf_timestamp(file_name: str) -> int | None:
+ """Extract timestamp in ms from a Digital RF data filename (alias for drf_rf_filename_to_ms)."""
+ return drf_rf_filename_to_ms(file_name)
+
+
+def get_capture_bounds(capture_type: CaptureType, capture_uuid: str) -> tuple[int, int]:
+ """Get start and end bounds for capture from opensearch."""
+
+ _catch_capture_type_error(capture_type)
+
+ client = get_opensearch_client()
+ index = f"captures-{capture_type}"
+
+ try:
+ response = client.get(index=index, id=capture_uuid)
+ except Exception as e:
+ if getattr(e, "status_code", None) == 404 or (hasattr(e, "info") and e.info.get("status") == 404):
+ raise ValueError(
+ f"Capture {capture_uuid} not found in OpenSearch index {index}"
+ ) from e
+ raise
+
+ if not response.get("found"):
+ raise ValueError(
+ f"Capture {capture_uuid} not found in OpenSearch index {index}"
+ )
+
+ source = response["_source"]
+ search_props = source["search_props"]
+ start_time = search_props["start_time"]
+ end_time = search_props["end_time"]
+ print(f"start_time: {start_time}, end_time: {end_time}")
+ return start_time, end_time
+
+
+def get_data_files(capture_type: CaptureType, capture: Capture) -> QuerySet[File]:
+ """Get the data files in the capture."""
+ _catch_capture_type_error(capture_type)
+
+ return get_capture_files(capture).filter(name__regex=DRF_RF_FILENAME_REGEX_STR)
+
+
+def get_file_cadence(capture_type: CaptureType, capture: Capture) -> int:
+ """Get the file cadence in milliseconds. OpenSearch bounds are in seconds."""
+ _catch_capture_type_error(capture_type)
+
+ capture_uuid = str(capture.uuid)
+ try:
+ start_time, end_time = get_capture_bounds(capture_type, capture_uuid)
+ except ValueError as e:
+ log.error(e)
+ raise e
+
+ data_files = get_data_files(capture_type, capture)
+ count = data_files.count()
+ if count == 0:
+ return 0
+ duration_sec = end_time - start_time
+ print(f"duration_sec: {duration_sec}")
+ duration_ms = duration_sec * 1000
+ print(f"duration_ms: {duration_ms}")
+ return max(1, int(duration_ms / count))
+
+
+def get_duration_bounds(capture_type: CaptureType, capture_uuid: str, relative_time: int) -> tuple[int, int]:
+ """Return (0, length_of_capture_ms). OpenSearch bounds are in seconds."""
+ try:
+ start_time, end_time = get_capture_bounds(capture_type, capture_uuid)
+ except ValueError as e:
+ log.error(e)
+ raise e
+
+ length_of_capture_ms = (end_time - start_time) * 1000
+ return 0, length_of_capture_ms
+
+
+def filter_capture_data_files_selection_bounds(
+ capture_type: CaptureType,
+ capture: Capture,
+ start_time: int, # relative ms from start of capture (from UI)
+ end_time: int, # relative ms from start of capture (from UI)
+) -> QuerySet[File]:
+ """Filter the capture file selection bounds to the given start and end times."""
+ _catch_capture_type_error(capture_type)
+ epoch_start_sec, _ = get_capture_bounds(capture_type, capture.uuid)
+ epoch_start_ms = epoch_start_sec * 1000
+ start_ms = epoch_start_ms + start_time
+ end_ms = epoch_start_ms + end_time
+
+ start_file_name = drf_rf_filename_from_ms(start_ms)
+ end_file_name = drf_rf_filename_from_ms(end_ms)
+
+ data_files = get_data_files(capture_type, capture)
+ return data_files.filter(
+ name__gte=start_file_name,
+ name__lte=end_file_name,
+ ).order_by("name")
+
+def get_capture_files_with_temporal_filter(
+ capture_type: CaptureType,
+ capture: Capture,
+ start_time: int | None = None, # milliseconds since epoch (start of capture)
+ end_time: int | None = None, # milliseconds since epoch
+) -> QuerySet[File]:
+ """Get the capture files with temporal filtering."""
+ _catch_capture_type_error(capture_type)
+
+ if start_time is None or end_time is None:
+ log.warning("Start or end time is None, returning all capture files without temporal filtering")
+ return get_capture_files(capture)
+
+ # get non-data files
+ non_data_files = get_capture_files(capture).exclude(name__regex=DRF_RF_FILENAME_REGEX_STR)
+
+ # get data files with temporal filtering
+ data_files = filter_capture_data_files_selection_bounds(
+ capture_type, capture, start_time, end_time
+ )
+
+ # return all files
+ return non_data_files.union(data_files)
\ No newline at end of file
diff --git a/gateway/sds_gateway/api_methods/serializers/capture_serializers.py b/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
index 037ebafd3..692628483 100644
--- a/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
+++ b/gateway/sds_gateway/api_methods/serializers/capture_serializers.py
@@ -9,6 +9,8 @@
from rest_framework.utils.serializer_helpers import ReturnList
from sds_gateway.api_methods.helpers.index_handling import retrieve_indexed_metadata
+from sds_gateway.api_methods.helpers.temporal_filtering import get_capture_bounds
+from sds_gateway.api_methods.helpers.temporal_filtering import get_file_cadence
from sds_gateway.api_methods.models import Capture
from sds_gateway.api_methods.models import CaptureType
from sds_gateway.api_methods.models import DEPRECATEDPostProcessedData
@@ -70,6 +72,8 @@ class CaptureGetSerializer(serializers.ModelSerializer[Capture]):
files = serializers.SerializerMethodField()
center_frequency_ghz = serializers.SerializerMethodField()
sample_rate_mhz = serializers.SerializerMethodField()
+ length_of_capture_ms = serializers.SerializerMethodField()
+ file_cadence_ms = serializers.SerializerMethodField()
files_count = serializers.SerializerMethodField()
total_file_size = serializers.SerializerMethodField()
formatted_created_at = serializers.SerializerMethodField()
@@ -94,12 +98,29 @@ def get_files(self, capture: Capture) -> ReturnList[File]:
def get_center_frequency_ghz(self, capture: Capture) -> float | None:
"""Get the center frequency in GHz from the capture model property."""
return capture.center_frequency_ghz
-
- @extend_schema_field(serializers.FloatField)
+
+ @extend_schema_field(serializers.FloatField(allow_null=True))
def get_sample_rate_mhz(self, capture: Capture) -> float | None:
- """Get the sample rate in MHz from the capture model property."""
+ """Get the sample rate in MHz from the capture model property. None if not indexed in OpenSearch."""
return capture.sample_rate_mhz
+ @extend_schema_field(serializers.IntegerField(allow_null=True))
+ def get_length_of_capture_ms(self, capture: Capture) -> int | None:
+ """Get the length of the capture in milliseconds. OpenSearch bounds are in seconds."""
+ try:
+ start_time, end_time = get_capture_bounds(capture.capture_type, str(capture.uuid))
+ return (end_time - start_time) * 1000
+ except (ValueError, IndexError, KeyError):
+ return None
+
+ @extend_schema_field(serializers.IntegerField(allow_null=True))
+ def get_file_cadence_ms(self, capture: Capture) -> int | None:
+ """Get the file cadence in milliseconds. None if not indexed in OpenSearch."""
+ try:
+ return get_file_cadence(capture.capture_type, capture)
+ except (ValueError, IndexError, KeyError):
+ return None
+
@extend_schema_field(serializers.IntegerField)
def get_files_count(self, capture: Capture) -> int:
"""Get the count of files associated with this capture."""
@@ -304,6 +325,8 @@ class CompositeCaptureSerializer(serializers.Serializer):
files_count = serializers.SerializerMethodField()
total_file_size = serializers.SerializerMethodField()
formatted_created_at = serializers.SerializerMethodField()
+ length_of_capture_ms = serializers.SerializerMethodField()
+ file_cadence_ms = serializers.SerializerMethodField()
def get_files(self, obj: dict[str, Any]) -> ReturnList[File]:
"""Get all files from all channels in the composite capture."""
@@ -350,6 +373,33 @@ def get_formatted_created_at(self, obj: dict[str, Any]) -> str:
return created_at.strftime("%m/%d/%Y %I:%M:%S %p")
return ""
+ @extend_schema_field(serializers.IntegerField(allow_null=True))
+ def get_length_of_capture_ms(self, obj: dict[str, Any]) -> int | None:
+ """Use first channel's bounds for composite capture duration."""
+ channels = obj.get("channels") or []
+ if not channels:
+ return None
+ try:
+ capture = Capture.objects.get(uuid=channels[0]["uuid"])
+ start_time, end_time = get_capture_bounds(
+ capture.capture_type, str(capture.uuid)
+ )
+ return (end_time - start_time) * 1000
+ except (ValueError, IndexError, KeyError):
+ return None
+
+ @extend_schema_field(serializers.IntegerField(allow_null=True))
+ def get_file_cadence_ms(self, obj: dict[str, Any]) -> int | None:
+ """Use first channel's file cadence for composite capture."""
+ channels = obj.get("channels") or []
+ if not channels:
+ return None
+ try:
+ capture = Capture.objects.get(uuid=channels[0]["uuid"])
+ return get_file_cadence(capture.capture_type, capture)
+ except (ValueError, IndexError, KeyError):
+ return None
+
def build_composite_capture_data(captures: list[Capture]) -> dict[str, Any]:
"""Build composite capture data from a list of captures with the same top_level_dir.
diff --git a/gateway/sds_gateway/api_methods/tasks.py b/gateway/sds_gateway/api_methods/tasks.py
index e4aed2651..c7dff0b31 100644
--- a/gateway/sds_gateway/api_methods/tasks.py
+++ b/gateway/sds_gateway/api_methods/tasks.py
@@ -26,6 +26,7 @@
from sds_gateway.api_methods.models import TemporaryZipFile
from sds_gateway.api_methods.models import ZipFileStatus
from sds_gateway.api_methods.models import user_has_access_to_item
+from sds_gateway.api_methods.models import CaptureType
from sds_gateway.api_methods.utils.disk_utils import DISK_SPACE_BUFFER
from sds_gateway.api_methods.utils.disk_utils import check_disk_space_available
from sds_gateway.api_methods.utils.disk_utils import estimate_disk_size
@@ -676,15 +677,26 @@ def _process_item_files(
item_type: ItemType,
item_uuid: UUID,
temp_zip: TemporaryZipFile,
+ start_time: int | None = None,
+ end_time: int | None = None,
) -> tuple[Mapping[str, UUID | int | str] | None, str | None, int | None, int | None]: # pyright: ignore[reportMissingTypeArgument]
"""
Process files for an item and create a zip file.
+ Args:
+ user: The user requesting the files
+ item: The item object (Dataset or Capture)
+ item_type: Type of item (dataset or capture)
+ item_uuid: UUID of the item to download
+ temp_zip: The temporary zip file to create
+ start_time: Optional start time for temporal filtering
+ end_time: Optional end time for temporal filtering
+
Returns:
tuple: (error_response, zip_file_path, total_size, files_processed)
If error_response is not None, the other values are None
"""
- files = _get_item_files(user, item, item_type)
+ files = _get_item_files(user, item, item_type, start_time, end_time)
if not files:
log.warning(f"No files found for {item_type} {item_uuid}")
error_message = f"No files found in {item_type}"
@@ -979,7 +991,11 @@ def _handle_timeout_exception(
time_limit=30 * 60, soft_time_limit=25 * 60
) # 30 min hard limit, 25 min soft limit
def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915
- item_uuid: UUID, user_id: str, item_type: str | ItemType
+ item_uuid: UUID,
+ user_id: str,
+ item_type: str | ItemType,
+ start_time: int | None = None,
+ end_time: int | None = None,
) -> Mapping[str, UUID | str | int]:
"""
Unified Celery task to create a zip file of item files and send it via email.
@@ -990,6 +1006,8 @@ def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915
item_uuid: UUID of the item to process
user_id: ID of the user requesting the download
item_type: Type of item (dataset or capture)
+ start_time: Optional start time for temporal filtering
+ end_time: Optional end time for temporal filtering
Returns:
dict: Task result with status and details
"""
@@ -1053,6 +1071,8 @@ def send_item_files_email( # noqa: C901, PLR0911, PLR0912, PLR0915
item_type=item_type_enum,
item_uuid=item_uuid,
temp_zip=temp_zip,
+ start_time=start_time,
+ end_time=end_time,
)
)
if error_response:
@@ -1251,7 +1271,13 @@ def _validate_item_download_request(
return None, user, item
-def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]:
+def _get_item_files(
+ user: User,
+ item: Any,
+ item_type: ItemType,
+ start_time: int | None = None,
+ end_time: int | None = None,
+) -> list[File]:
"""
Get all files for an item based on its type.
@@ -1259,14 +1285,16 @@ def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]:
user: The user requesting the files
item: The item object (Dataset or Capture)
item_type: Type of item (dataset or capture)
-
+ start_time: Optional start time for temporal filtering
+ end_time: Optional end time for temporal filtering
Returns:
List of files associated with the item
"""
- from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
+ from sds_gateway.api_methods.helpers.temporal_filtering import get_capture_files_with_temporal_filter
from sds_gateway.api_methods.utils.relationship_utils import (
get_dataset_files_including_captures,
)
+ from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
if item_type == ItemType.DATASET:
files_queryset = get_dataset_files_including_captures(
@@ -1277,8 +1305,28 @@ def _get_item_files(user: User, item: Any, item_type: ItemType) -> list[File]:
return files
if item_type == ItemType.CAPTURE:
- files = get_capture_files(item, include_deleted=False)
- log.info(f"Found {len(files)} files for capture {item.uuid}")
+ capture_type = item.capture_type
+ # temporal filtering is only supported for DigitalRF captures
+ if capture_type is CaptureType.DigitalRF:
+ files = get_capture_files_with_temporal_filter(
+ capture_type=capture_type,
+ capture=item,
+ start_time=start_time,
+ end_time=end_time,
+ )
+ else:
+ if start_time is not None or end_time is not None:
+ logger.warning(
+ "Temporal filtering is only supported for DigitalRF captures, "
+ "ignoring start_time and end_time"
+ )
+
+ files = get_capture_files(
+ capture=item,
+ include_deleted=False,
+ )
+
+ logger.info(f"Found {len(files)} files for capture {item.uuid}")
return list(files)
log.warning(f"Unknown item type: {item_type}")
diff --git a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
index ca33ca45d..233cb9b43 100644
--- a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
+++ b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
@@ -274,7 +274,18 @@ class DownloadActionManager {
}
};
- window.DOMUtils.openModal(modalId);
+ // Initialize temporal slider from button data attributes (clears or builds slider)
+ const durationMs = parseInt(button.getAttribute("data-length-of-capture-ms"), 10);
+ const fileCadenceMs = parseInt(button.getAttribute("data-file-cadence-ms"), 10);
+ if (typeof window.initCaptureDownloadSlider === "function") {
+ window.initCaptureDownloadSlider(
+ Number.isNaN(durationMs) ? 0 : durationMs,
+ Number.isNaN(fileCadenceMs) ? 1000 : fileCadenceMs,
+ );
+ }
+
+ // Show the modal
+ window.showWebDownloadModal(captureUuid, captureName);
}
/**
diff --git a/gateway/sds_gateway/static/js/file-list.js b/gateway/sds_gateway/static/js/file-list.js
index aba3069c9..609c19f17 100644
--- a/gateway/sds_gateway/static/js/file-list.js
+++ b/gateway/sds_gateway/static/js/file-list.js
@@ -711,6 +711,8 @@ class FileListCapturesTableManager extends CapturesTableManager {
centerFrequencyGhz: ComponentUtils.escapeHtml(
capture.center_frequency_ghz || "",
),
+ lengthOfCaptureMs: capture.length_of_capture_ms ?? 0,
+ fileCadenceMs: capture.file_cadence_ms ?? 1000,
};
let typeDisplay = safeData.captureTypeDisplay || safeData.captureType;
@@ -835,7 +837,9 @@ class FileListCapturesTableManager extends CapturesTableManager {
+ data-capture-name="${safeData.name}"
+ data-length-of-capture-ms="${safeData.lengthOfCaptureMs}"
+ data-file-cadence-ms="${safeData.fileCadenceMs}">
Download
diff --git a/gateway/sds_gateway/templates/base.html b/gateway/sds_gateway/templates/base.html
index 2e14c130a..fdebfe804 100644
--- a/gateway/sds_gateway/templates/base.html
+++ b/gateway/sds_gateway/templates/base.html
@@ -19,6 +19,8 @@
href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.11.3/font/bootstrap-icons.min.css" />
+
{% block css %}
@@ -226,6 +228,7 @@
{# Removed JS that was hiding/showing the body #}
{% endblock inline_javascript %}
+
diff --git a/gateway/sds_gateway/users/views.py b/gateway/sds_gateway/users/views.py
index 41f2a70eb..85e340f91 100644
--- a/gateway/sds_gateway/users/views.py
+++ b/gateway/sds_gateway/users/views.py
@@ -3333,6 +3333,15 @@ def post(
Returns:
A JSON response containing the download status
"""
+ # optional start and end times for temporal filtering
+ start_time = request.POST.get("start_time", None)
+ end_time = request.POST.get("end_time", None)
+
+ if start_time:
+ start_time = int(start_time)
+ if end_time:
+ end_time = int(end_time)
+
# Validate item type
if item_type not in self.ITEM_MODELS:
return JsonResponse(
@@ -3399,6 +3408,8 @@ def post(
str(item.uuid),
str(request.user.id),
item_type,
+ start_time=start_time,
+ end_time=end_time,
)
return JsonResponse(
From 39c488502ee8a5d71b0c8dace50fcd84b1c097ef Mon Sep 17 00:00:00 2001
From: klpoland
Date: Mon, 23 Feb 2026 09:17:22 -0500
Subject: [PATCH 2/7] fix label updates
---
gateway/sds_gateway/api_methods/tasks.py | 12 ++--
.../users/partials/web_download_modal.html | 55 +++++++++++--------
2 files changed, 40 insertions(+), 27 deletions(-)
diff --git a/gateway/sds_gateway/api_methods/tasks.py b/gateway/sds_gateway/api_methods/tasks.py
index c7dff0b31..7a13f4390 100644
--- a/gateway/sds_gateway/api_methods/tasks.py
+++ b/gateway/sds_gateway/api_methods/tasks.py
@@ -20,13 +20,13 @@
from redis import Redis
from sds_gateway.api_methods.models import Capture
+from sds_gateway.api_methods.models import CaptureType
from sds_gateway.api_methods.models import Dataset
from sds_gateway.api_methods.models import File
from sds_gateway.api_methods.models import ItemType
from sds_gateway.api_methods.models import TemporaryZipFile
from sds_gateway.api_methods.models import ZipFileStatus
from sds_gateway.api_methods.models import user_has_access_to_item
-from sds_gateway.api_methods.models import CaptureType
from sds_gateway.api_methods.utils.disk_utils import DISK_SPACE_BUFFER
from sds_gateway.api_methods.utils.disk_utils import check_disk_space_available
from sds_gateway.api_methods.utils.disk_utils import estimate_disk_size
@@ -1290,11 +1290,13 @@ def _get_item_files(
Returns:
List of files associated with the item
"""
- from sds_gateway.api_methods.helpers.temporal_filtering import get_capture_files_with_temporal_filter
+ from sds_gateway.api_methods.helpers.temporal_filtering import (
+ get_capture_files_with_temporal_filter,
+ )
+ from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
from sds_gateway.api_methods.utils.relationship_utils import (
get_dataset_files_including_captures,
)
- from sds_gateway.api_methods.utils.relationship_utils import get_capture_files
if item_type == ItemType.DATASET:
files_queryset = get_dataset_files_including_captures(
@@ -1316,7 +1318,7 @@ def _get_item_files(
)
else:
if start_time is not None or end_time is not None:
- logger.warning(
+ log.warning(
"Temporal filtering is only supported for DigitalRF captures, "
"ignoring start_time and end_time"
)
@@ -1326,7 +1328,7 @@ def _get_item_files(
include_deleted=False,
)
- logger.info(f"Found {len(files)} files for capture {item.uuid}")
+ log.info(f"Found {len(files)} files for capture {item.uuid}")
return list(files)
log.warning(f"Unknown item type: {item_type}")
diff --git a/gateway/sds_gateway/templates/users/partials/web_download_modal.html b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
index 5275a8972..8fb67d878 100644
--- a/gateway/sds_gateway/templates/users/partials/web_download_modal.html
+++ b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
@@ -38,13 +38,14 @@
You will receive an email with a download link when the file is ready.
{% if item_type == "capture" %}
-
-
Filter capture files to download within a subset of time:
-
-
0:00:00 - 0:00:00
-
-
-
+
+
Filter capture files to download within a subset of time:
+
+
0:00:00.000 - 0:00:00.000
+
0 files
+
+
+
{% endif %}
Cancel
@@ -62,12 +63,16 @@
diff --git a/gateway/sds_gateway/users/tests/test_drf_views.py b/gateway/sds_gateway/users/tests/test_drf_views.py
index ebe29ee65..c3c6c8d65 100644
--- a/gateway/sds_gateway/users/tests/test_drf_views.py
+++ b/gateway/sds_gateway/users/tests/test_drf_views.py
@@ -2,6 +2,7 @@
import json
import uuid
+from unittest.mock import patch
import pytest
from django.conf import settings
@@ -326,60 +327,6 @@ def test_share_with_multiple_users(
assert permissions.filter(shared_with=user_to_share_with).exists()
assert permissions.filter(shared_with=user2).exists()
- def test_unified_download_dataset_success(
- self, client: Client, owner: User, dataset: Dataset
- ) -> None:
- """Test successful download request using the unified download endpoint."""
- client.force_login(owner)
- url = reverse(
- "users:download_item",
- kwargs={"item_type": ItemType.DATASET, "item_uuid": dataset.uuid},
- )
-
- response = client.post(url)
-
- assert response.status_code == status.HTTP_202_ACCEPTED
- result = response.json()
- assert result["success"] is True
- assert "download request accepted" in result["message"].lower()
- assert "task_id" in result
- assert result["item_name"] == dataset.name
- assert result["user_email"] == owner.email
-
- def test_unified_download_dataset_not_owner(
- self, client: Client, user_to_share_with: User, dataset: Dataset
- ) -> None:
- """Test download request when user is not the owner."""
- client.force_login(user_to_share_with)
- url = reverse(
- "users:download_item",
- kwargs={"item_type": ItemType.DATASET, "item_uuid": dataset.uuid},
- )
-
- response = client.post(url)
-
- assert response.status_code == status.HTTP_404_NOT_FOUND
- result = response.json()
- assert result["success"] is False
- assert "not found or access denied" in result["message"].lower()
-
- def test_unified_download_dataset_invalid_type(
- self, client: Client, owner: User, dataset: Dataset
- ) -> None:
- """Test download request with invalid item type."""
- client.force_login(owner)
- url = reverse(
- "users:download_item",
- kwargs={"item_type": "invalid_type", "item_uuid": dataset.uuid},
- )
-
- response = client.post(url)
-
- assert response.status_code == status.HTTP_400_BAD_REQUEST
- result = response.json()
- assert result["success"] is False
- assert "invalid item type" in result["message"].lower()
-
def test_share_with_group_individual_members_already_shared(
self, client: Client, owner: User, user_to_share_with: User, dataset: Dataset
) -> None:
@@ -543,3 +490,186 @@ def test_capture_share_modal_displays_groups_properly(
member_emails = [member["email"] for member in group_entry["members"]]
assert user_to_share_with.email in member_emails
assert user2.email in member_emails
+
+
+@pytest.mark.django_db
+class TestDownloadItemView:
+ """Tests for the DownloadItemView (unified download endpoint)."""
+
+ @pytest.fixture
+ def client(self) -> Client:
+ return Client()
+
+ @pytest.fixture
+ def owner(self) -> User:
+ """Create a user who owns items."""
+ return User.objects.create_user(
+ email="owner@example.com",
+ password=TEST_PASSWORD,
+ name="Owner User",
+ is_approved=True,
+ )
+
+ @pytest.fixture
+ def user_to_share_with(self) -> User:
+ """Create a user to share items with."""
+ return User.objects.create_user(
+ email="share@example.com",
+ password=TEST_PASSWORD,
+ name="Share User",
+ is_approved=True,
+ )
+
+ @pytest.fixture
+ def dataset(self, owner: User) -> Dataset:
+ """Create a dataset owned by the owner."""
+ return Dataset.objects.create(
+ uuid=uuid.uuid4(),
+ name="Test Dataset",
+ owner=owner,
+ description="A test dataset",
+ )
+
+ def test_unified_download_dataset_success(
+ self, client: Client, owner: User, dataset: Dataset
+ ) -> None:
+ """Test successful download request using the unified download endpoint."""
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.DATASET, "item_uuid": dataset.uuid},
+ )
+
+ response = client.post(url)
+
+ assert response.status_code == status.HTTP_202_ACCEPTED
+ result = response.json()
+ assert result["success"] is True
+ assert "download request accepted" in result["message"].lower()
+ assert "task_id" in result
+ assert result["item_name"] == dataset.name
+ assert result["user_email"] == owner.email
+
+ def test_unified_download_dataset_not_owner(
+ self, client: Client, user_to_share_with: User, dataset: Dataset
+ ) -> None:
+ """Test download request when user is not the owner."""
+ client.force_login(user_to_share_with)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.DATASET, "item_uuid": dataset.uuid},
+ )
+
+ response = client.post(url)
+
+ assert response.status_code == status.HTTP_404_NOT_FOUND
+ result = response.json()
+ assert result["success"] is False
+ assert "not found or access denied" in result["message"].lower()
+
+ def test_unified_download_dataset_invalid_type(
+ self, client: Client, owner: User, dataset: Dataset
+ ) -> None:
+ """Test download request with invalid item type."""
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": "invalid_type", "item_uuid": dataset.uuid},
+ )
+
+ response = client.post(url)
+
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+ result = response.json()
+ assert result["success"] is False
+ assert "invalid item type" in result["message"].lower()
+
+ def test_unified_download_capture_with_time_filter_success(
+ self, client: Client, owner: User
+ ) -> None:
+ """Test capture download request with start_time/end_time passes bounds to task."""
+ capture = Capture.objects.create(
+ uuid=uuid.uuid4(),
+ name="Test DRF Capture",
+ owner=owner,
+ capture_type="drf",
+ top_level_dir="/test",
+ index_name="captures-drf",
+ )
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.CAPTURE, "item_uuid": capture.uuid},
+ )
+ data = {"start_time": "1000", "end_time": "5000"}
+
+ with patch(
+ "sds_gateway.users.views.send_item_files_email"
+ ) as mock_send_task:
+ mock_send_task.delay.return_value = type("Result", (), {"id": "task-1"})()
+ response = client.post(url, data)
+
+ assert response.status_code == status.HTTP_202_ACCEPTED
+ result = response.json()
+ assert result["success"] is True
+ assert "download request accepted" in result["message"].lower()
+ mock_send_task.delay.assert_called_once()
+ call_kwargs = mock_send_task.delay.call_args[1]
+ assert call_kwargs["start_time"] == 1000
+ assert call_kwargs["end_time"] == 5000
+
+ def test_unified_download_capture_without_time_filter(
+ self, client: Client, owner: User
+ ) -> None:
+ """Test capture download without start_time/end_time passes None to task."""
+ capture = Capture.objects.create(
+ uuid=uuid.uuid4(),
+ name="Test DRF Capture",
+ owner=owner,
+ capture_type="drf",
+ top_level_dir="/test",
+ index_name="captures-drf",
+ )
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.CAPTURE, "item_uuid": capture.uuid},
+ )
+
+ with patch(
+ "sds_gateway.users.views.send_item_files_email"
+ ) as mock_send_task:
+ mock_send_task.delay.return_value = type("Result", (), {"id": "task-1"})()
+ response = client.post(url)
+
+ assert response.status_code == status.HTTP_202_ACCEPTED
+ mock_send_task.delay.assert_called_once()
+ call_kwargs = mock_send_task.delay.call_args[1]
+ assert call_kwargs.get("start_time") is None
+ assert call_kwargs.get("end_time") is None
+
+ def test_unified_download_capture_invalid_time_range(
+ self, client: Client, owner: User
+ ) -> None:
+ """Test capture download with start_time >= end_time returns 400."""
+ capture = Capture.objects.create(
+ uuid=uuid.uuid4(),
+ name="Test DRF Capture",
+ owner=owner,
+ capture_type="drf",
+ top_level_dir="/test",
+ index_name="captures-drf",
+ )
+ client.force_login(owner)
+ url = reverse(
+ "users:download_item",
+ kwargs={"item_type": ItemType.CAPTURE, "item_uuid": capture.uuid},
+ )
+ data = {"start_time": "5000", "end_time": "1000"}
+
+ response = client.post(url, data)
+
+ assert response.status_code == status.HTTP_400_BAD_REQUEST
+ result = response.json()
+ assert result["success"] is False
+ assert "start_time" in result["message"].lower() or "time range" in result["message"].lower()
diff --git a/gateway/sds_gateway/users/views.py b/gateway/sds_gateway/users/views.py
index d3d44b123..1b4b145f3 100644
--- a/gateway/sds_gateway/users/views.py
+++ b/gateway/sds_gateway/users/views.py
@@ -3300,6 +3300,35 @@ def _serve_file_download(self, zip_uuid: str, user) -> HttpResponse:
user_temporary_zip_download_view = TemporaryZipDownloadView.as_view()
+def _parse_optional_time(raw_value: str | None, param_name: str) -> tuple[int | None, JsonResponse | None]:
+ """Parse optional start/end time. Returns (value, None) or (None, error_response)."""
+ if raw_value in (None, ""):
+ return None, None
+ try:
+ value = int(raw_value)
+ except (TypeError, ValueError):
+ return None, JsonResponse(
+ {"success": False, "message": f"Invalid {param_name}; it must be an integer value."},
+ status=400,
+ )
+ if value < 0:
+ return None, JsonResponse(
+ {"success": False, "message": f"Invalid {param_name}; it must be greater than or equal to 0."},
+ status=400,
+ )
+ return value, None
+
+
+def _validate_time_range(start_time: int | None, end_time: int | None) -> JsonResponse | None:
+ """Return 400 JsonResponse if both provided and start >= end; else None."""
+ if start_time is not None and end_time is not None and start_time >= end_time:
+ return JsonResponse(
+ {"success": False, "message": "Invalid time range; start_time must be less than end_time."},
+ status=400,
+ )
+ return None
+
+
class DownloadItemView(Auth0LoginRequiredMixin, View):
"""
Unified view to handle item download requests for both datasets and captures.
@@ -3333,13 +3362,19 @@ def post(
Returns:
A JSON response containing the download status
"""
- # optional start and end times for temporal filtering
- start_time = request.POST.get("start_time") or None
- end_time = request.POST.get("end_time") or None
- if start_time is not None:
- start_time = int(start_time)
- if end_time is not None:
- end_time = int(end_time)
+ # Optional start and end times for temporal filtering
+ raw_start_time = request.POST.get("start_time")
+ raw_end_time = request.POST.get("end_time")
+
+ start_time, err = _parse_optional_time(raw_start_time, "start_time")
+ if err is not None:
+ return err
+ end_time, err = _parse_optional_time(raw_end_time, "end_time")
+ if err is not None:
+ return err
+ err = _validate_time_range(start_time, end_time)
+ if err is not None:
+ return err
# Validate item type
if item_type not in self.ITEM_MODELS:
From 0aa1c3a29c82ce7163a869afc6e059aa79099643 Mon Sep 17 00:00:00 2001
From: klpoland
Date: Fri, 6 Mar 2026 14:19:12 -0500
Subject: [PATCH 6/7] add flatpickr to handle datetime selection for better
control
---
.../js/actions/DownloadActionManager.js | 96 +++++++++++++------
gateway/sds_gateway/static/js/file-list.js | 18 ++++
gateway/sds_gateway/templates/base.html | 3 +
.../templates/users/file_list.html | 6 +-
.../users/partials/web_download_modal.html | 6 +-
5 files changed, 93 insertions(+), 36 deletions(-)
diff --git a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
index cb2da7332..5b23012a6 100644
--- a/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
+++ b/gateway/sds_gateway/static/js/actions/DownloadActionManager.js
@@ -49,35 +49,32 @@ function formatUtcRange(startEpochSec, startMs, endMs) {
return fmt(startDate) + " - " + fmt(endDate) + " (UTC)";
}
-/** Format ms from capture start as datetime-local value (local time). */
-function msToDatetimeLocal(captureStartEpochSec, ms) {
+/** Format ms from capture start as UTC string for display (Y-m-d H:i:s). */
+function msToUtcString(captureStartEpochSec, ms) {
if (!Number.isFinite(captureStartEpochSec) || !Number.isFinite(ms)) return "";
const d = new Date(captureStartEpochSec * 1000 + ms);
const pad2 = (x) => String(x).padStart(2, "0");
- const pad3 = (x) => String(x).padStart(3, "0");
return (
- d.getFullYear() +
+ d.getUTCFullYear() +
"-" +
- pad2(d.getMonth() + 1) +
+ pad2(d.getUTCMonth() + 1) +
"-" +
- pad2(d.getDate()) +
- "T" +
- pad2(d.getHours()) +
+ pad2(d.getUTCDate()) +
+ " " +
+ pad2(d.getUTCHours()) +
":" +
- pad2(d.getMinutes()) +
+ pad2(d.getUTCMinutes()) +
":" +
- pad2(d.getSeconds()) +
- "." +
- pad3(d.getMilliseconds())
+ pad2(d.getUTCSeconds())
);
}
-/** Parse datetime-local value to ms from capture start (UTC epoch sec). */
-function datetimeLocalToMs(captureStartEpochSec, valueStr) {
- if (!Number.isFinite(captureStartEpochSec) || !valueStr || !valueStr.trim()) return NaN;
- const d = new Date(valueStr.trim());
- if (Number.isNaN(d.getTime())) return NaN;
- return d.getTime() - captureStartEpochSec * 1000;
+/** Parse UTC date string (Y-m-d H:i:s or Y-m-d H:i) to epoch ms. */
+function parseUtcStringToEpochMs(str) {
+ if (!str || !str.trim()) return NaN;
+ const s = str.trim();
+ const d = new Date(s.endsWith("Z") ? s : s.replace(" ", "T") + "Z");
+ return Number.isFinite(d.getTime()) ? d.getTime() : NaN;
}
class DownloadActionManager {
@@ -411,6 +408,33 @@ class DownloadActionManager {
endDateTimeEntry.disabled = !hasEpoch;
}
if (durationMs <= 0) return;
+ var fpStart = null, fpEnd = null;
+ var epochStart = captureStartEpochSec * 1000;
+ var epochEnd = epochStart + durationMs;
+ if (hasEpoch && typeof flatpickr !== 'undefined' && startDateTimeEntry && endDateTimeEntry) {
+ var fpOpts = {
+ enableTime: true,
+ enableSeconds: true,
+ utc: true,
+ dateFormat: 'Y-m-d H:i:S',
+ time_24hr: true,
+ minDate: epochStart,
+ maxDate: epochEnd,
+ allowInput: true,
+ static: true,
+ appendTo: webDownloadModal || undefined,
+ };
+ flatpickr(startDateTimeEntry, Object.assign({}, fpOpts, {
+ onChange: function() { syncFromDateTimeEntries(); }
+ }));
+ flatpickr(endDateTimeEntry, Object.assign({}, fpOpts, {
+ onChange: function() { syncFromDateTimeEntries(); }
+ }));
+ fpStart = startDateTimeEntry._flatpickr;
+ fpEnd = endDateTimeEntry._flatpickr;
+ startDateTimeEntry.disabled = false;
+ endDateTimeEntry.disabled = false;
+ }
noUiSlider.create(sliderEl, {
start: [0, durationMs],
connect: true,
@@ -444,8 +468,10 @@ class DownloadActionManager {
if (startTimeEntry) startTimeEntry.value = String(Math.round(startMs));
if (endTimeEntry) endTimeEntry.value = String(Math.round(endMs));
if (hasEpoch) {
- if (startDateTimeEntry) startDateTimeEntry.value = msToDatetimeLocal(captureStartEpochSec, startMs);
- if (endDateTimeEntry) endDateTimeEntry.value = msToDatetimeLocal(captureStartEpochSec, endMs);
+ if (fpStart && typeof fpStart.setDate === 'function') fpStart.setDate(epochStart + startMs);
+ else if (startDateTimeEntry) startDateTimeEntry.value = msToUtcString(captureStartEpochSec, startMs);
+ if (fpEnd && typeof fpEnd.setDate === 'function') fpEnd.setDate(epochStart + endMs);
+ else if (endDateTimeEntry) endDateTimeEntry.value = msToUtcString(captureStartEpochSec, endMs);
}
});
if (rangeLabel) {
@@ -471,10 +497,11 @@ class DownloadActionManager {
if (startTimeEntry) startTimeEntry.value = startVal;
if (endTimeEntry) endTimeEntry.value = endVal;
if (hasEpoch && startDateTimeEntry && endDateTimeEntry) {
- startDateTimeEntry.value = msToDatetimeLocal(captureStartEpochSec, 0);
- endDateTimeEntry.value = msToDatetimeLocal(captureStartEpochSec, durationMs);
- startDateTimeEntry.disabled = false;
- endDateTimeEntry.disabled = false;
+ if (fpStart && typeof fpStart.setDate === 'function') fpStart.setDate(epochStart);
+ else startDateTimeEntry.value = msToUtcString(captureStartEpochSec, 0);
+ if (fpEnd && typeof fpEnd.setDate === 'function') fpEnd.setDate(epochEnd);
+ else endDateTimeEntry.value = msToUtcString(captureStartEpochSec, durationMs);
+ if (!fpStart) { startDateTimeEntry.disabled = false; endDateTimeEntry.disabled = false; }
}
function syncSliderFromEntries() {
@@ -492,18 +519,28 @@ class DownloadActionManager {
}
function syncFromDateTimeEntries() {
if (!hasEpoch || !sliderEl.noUiSlider || !startDateTimeEntry || !endDateTimeEntry) return;
- var startMs = datetimeLocalToMs(captureStartEpochSec, startDateTimeEntry.value);
- var endMs = datetimeLocalToMs(captureStartEpochSec, endDateTimeEntry.value);
+ var startMs, endMs;
+ if (startDateTimeEntry._flatpickr && endDateTimeEntry._flatpickr) {
+ var dStart = startDateTimeEntry._flatpickr.selectedDates[0];
+ var dEnd = endDateTimeEntry._flatpickr.selectedDates[0];
+ startMs = dStart ? dStart.getTime() - epochStart : 0;
+ endMs = dEnd ? dEnd.getTime() - epochStart : durationMs;
+ } else {
+ startMs = parseUtcStringToEpochMs(startDateTimeEntry.value) - epochStart;
+ endMs = parseUtcStringToEpochMs(endDateTimeEntry.value) - epochStart;
+ }
if (Number.isNaN(startMs) || Number.isNaN(endMs)) return;
startMs = Math.max(0, Math.min(startMs, durationMs));
endMs = Math.max(0, Math.min(endMs, durationMs));
if (startMs >= endMs) endMs = Math.min(startMs + fileCadenceMs, durationMs);
+ var cur = sliderEl.noUiSlider.get();
+ if (Math.round(Number(cur[0])) === Math.round(startMs) && Math.round(Number(cur[1])) === Math.round(endMs)) return;
sliderEl.noUiSlider.set([startMs, endMs]);
}
if (startTimeEntry) startTimeEntry.addEventListener('change', syncSliderFromEntries);
if (endTimeEntry) endTimeEntry.addEventListener('change', syncSliderFromEntries);
- if (startDateTimeEntry) startDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
- if (endDateTimeEntry) endDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
+ if (startDateTimeEntry && !startDateTimeEntry._flatpickr) startDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
+ if (endDateTimeEntry && !endDateTimeEntry._flatpickr) endDateTimeEntry.addEventListener('change', syncFromDateTimeEntries);
}
/**
@@ -653,7 +690,6 @@ class DownloadActionManager {
const dataFilesTotalSizeRaw = button.getAttribute("data-data-files-total-size");
const dataFilesTotalSize = dataFilesTotalSizeRaw !== null && dataFilesTotalSizeRaw !== '' ? parseInt(dataFilesTotalSizeRaw, 10) : NaN;
const captureStartEpochSec = parseInt(button.getAttribute("data-capture-start-epoch-sec"), 10);
- const captureUuid = button.getAttribute("data-capture-uuid") || undefined;
this.initializeCaptureDownloadSlider(
Number.isNaN(durationMs) ? 0 : durationMs,
Number.isNaN(fileCadenceMs) ? 1000 : fileCadenceMs,
@@ -663,7 +699,7 @@ class DownloadActionManager {
dataFilesCount: Number.isNaN(dataFilesCount) ? 0 : dataFilesCount,
totalFilesCount: Number.isNaN(totalFilesCount) ? 0 : totalFilesCount,
dataFilesTotalSize: Number.isNaN(dataFilesTotalSize) ? undefined : dataFilesTotalSize,
- captureUuid: captureUuid,
+ captureUuid: captureUuid || undefined,
captureStartEpochSec: Number.isNaN(captureStartEpochSec) ? undefined : captureStartEpochSec,
},
);
diff --git a/gateway/sds_gateway/static/js/file-list.js b/gateway/sds_gateway/static/js/file-list.js
index 756526285..3f5175077 100644
--- a/gateway/sds_gateway/static/js/file-list.js
+++ b/gateway/sds_gateway/static/js/file-list.js
@@ -575,6 +575,24 @@ class FileListCapturesTableManager extends CapturesTableManager {
this.searchButtonLoading = document.getElementById("search-btn-loading");
}
+ /**
+ * Use web download modal (with temporal slider) when DownloadActionManager is available.
+ */
+ handleDownloadCapture(button) {
+ if (window.currentDownloadManager && document.getElementById("webDownloadModal")) {
+ const captureUuid = button.getAttribute("data-capture-uuid");
+ const captureName = button.getAttribute("data-capture-name") || captureUuid;
+ if (captureUuid) {
+ window.currentDownloadManager.handleCaptureDownload(
+ captureUuid,
+ captureName,
+ button,
+ );
+ }
+ return;
+ }
+ }
+
/**
* Override showLoading to toggle button contents instead of showing separate indicator
*/
diff --git a/gateway/sds_gateway/templates/base.html b/gateway/sds_gateway/templates/base.html
index fdebfe804..b121a037d 100644
--- a/gateway/sds_gateway/templates/base.html
+++ b/gateway/sds_gateway/templates/base.html
@@ -21,6 +21,8 @@
href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" />
+
{% block css %}
@@ -44,6 +46,7 @@
+
{% endblock javascript %}
{# djlint:off H021 #}
diff --git a/gateway/sds_gateway/templates/users/file_list.html b/gateway/sds_gateway/templates/users/file_list.html
index 2e03a166e..d3d9a77c0 100644
--- a/gateway/sds_gateway/templates/users/file_list.html
+++ b/gateway/sds_gateway/templates/users/file_list.html
@@ -527,11 +527,11 @@ Upload Result
}
};
- // Initialize managers for captures
- const permissionsManager = new PermissionsManager(pageConfig.permissions);
+ // Initialize managers for captures (use window.* — classes are attached by module/scripts)
+ const permissionsManager = new window.PermissionsManager(pageConfig.permissions);
// Initialize download manager
- const downloadManager = new DownloadActionManager({
+ const downloadManager = new window.DownloadActionManager({
permissions: permissionsManager
});
diff --git a/gateway/sds_gateway/templates/users/partials/web_download_modal.html b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
index 0d0341f9c..8003acd2d 100644
--- a/gateway/sds_gateway/templates/users/partials/web_download_modal.html
+++ b/gateway/sds_gateway/templates/users/partials/web_download_modal.html
@@ -4,7 +4,7 @@
tabindex="-1"
aria-labelledby="webDownloadModalLabel-{{ item.uuid }}"
aria-hidden="true">
-
+
@@ -1703,7 +1539,7 @@ class ModalManager {
// Primary file information - most useful for users
if (file.size) {
metadata.push(
- `Size: ${ComponentUtils.formatFileSize(file.size)} (${file.size.toLocaleString()} bytes)`,
+ `Size: ${window.DOMUtils.formatFileSize(file.size)} (${file.size.toLocaleString()} bytes)`,
);
}
diff --git a/gateway/sds_gateway/static/js/core/DOMUtils.js b/gateway/sds_gateway/static/js/core/DOMUtils.js
index 902196232..3309538f5 100644
--- a/gateway/sds_gateway/static/js/core/DOMUtils.js
+++ b/gateway/sds_gateway/static/js/core/DOMUtils.js
@@ -18,11 +18,17 @@ class DOMUtils {
* @returns {string} Formatted file size
*/
formatFileSize(bytes) {
- if (bytes === 0) return "0 Bytes";
- const k = 1024;
- const sizes = ["Bytes", "KB", "MB", "GB", "TB"];
- const i = Math.floor(Math.log(bytes) / Math.log(k));
- return `${Number.parseFloat((bytes / k ** i).toFixed(2))} ${sizes[i]}`;
+ const n = Number(bytes);
+ if (!Number.isFinite(n) || n < 0) return "0 bytes";
+ if (n === 0) return "0 bytes";
+ const units = ["bytes", "KB", "MB", "GB"];
+ let i = 0;
+ let v = n;
+ while (v >= 1024 && i < units.length - 1) {
+ v /= 1024;
+ i++;
+ }
+ return (i === 0 ? v : v.toFixed(2)) + " " + units[i];
}
/**
diff --git a/gateway/sds_gateway/static/js/core/PageLifecycleManager.js b/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
index cc6e03c0b..e3fa07f1d 100644
--- a/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
+++ b/gateway/sds_gateway/static/js/core/PageLifecycleManager.js
@@ -82,6 +82,9 @@ class PageLifecycleManager {
case "capture-list":
this.initializeCaptureListPage();
break;
+ case "published-datasets-list":
+ this.initializePublishedDatasetsListPage();
+ break;
default:
console.warn(`Unknown page type: ${this.pageType}`);
}
@@ -165,6 +168,32 @@ class PageLifecycleManager {
this.initializeCaptureModals();
}
+ /**
+ * Published datasets search page: pagination + dataset modals (same modal wiring as dataset list, no sort UI).
+ */
+ initializePublishedDatasetsListPage() {
+ this.initializePagination();
+ this.initializeDatasetModals();
+ }
+
+
+ /**
+ * Single DownloadActionManager for document-wide .web-download-btn / SDK buttons (not per modal).
+ */
+ ensureDownloadActionManager() {
+ if (
+ this.downloadActionManager ||
+ !this.permissions ||
+ !window.DownloadActionManager
+ ) {
+ return;
+ }
+ this.downloadActionManager = new window.DownloadActionManager({
+ permissions: this.permissions,
+ });
+ this.managers.push(this.downloadActionManager);
+ }
+
/**
* Initialize search handlers
*/
@@ -269,6 +298,10 @@ class PageLifecycleManager {
* Initialize dataset modals
*/
initializeDatasetModals() {
+ // TODO: Refactor this to align all modal initialization
+ // with a single manager instance per modal type.
+ // Plan to do this on a future PR.
+
// Pre-initialize all modals on the page with proper config to prevent Bootstrap auto-initialization errors
const allModals = document.querySelectorAll(".modal");
for (const modal of allModals) {
@@ -299,6 +332,7 @@ class PageLifecycleManager {
for (const modal of datasetModals) {
const itemUuid = modal.getAttribute("data-item-uuid");
+ const itemType = modal.getAttribute("data-item-type");
if (!itemUuid || !this.permissions) {
console.warn(
@@ -309,9 +343,9 @@ class PageLifecycleManager {
if (window.ShareActionManager) {
const shareManager = new window.ShareActionManager({
- itemUuid: itemUuid,
- itemType: "dataset",
permissions: this.permissions,
+ itemUuid: itemUuid,
+ itemType: itemType,
});
this.managers.push(shareManager);
@@ -321,28 +355,18 @@ class PageLifecycleManager {
if (window.VersioningActionManager && !modal.versioningActionManager) {
const versioningManager = new window.VersioningActionManager({
- datasetUuid: itemUuid,
permissions: this.permissions,
+ datasetUuid: itemUuid,
});
this.managers.push(versioningManager);
modal.versioningActionManager = versioningManager;
}
- if (window.DownloadActionManager) {
- const downloadManager = new window.DownloadActionManager({
- permissions: this.permissions,
- });
- this.managers.push(downloadManager);
-
- // Store reference on modal
- modal.downloadActionManager = downloadManager;
- }
-
if (window.DetailsActionManager) {
const detailsManager = new window.DetailsActionManager({
permissions: this.permissions,
itemUuid: itemUuid,
- itemType: "dataset",
+ itemType: itemType,
});
this.managers.push(detailsManager);
@@ -350,18 +374,25 @@ class PageLifecycleManager {
modal.detailsActionManager = detailsManager;
}
}
+
+ this.ensureDownloadActionManager();
}
/**
* Initialize capture modals
*/
initializeCaptureModals() {
+ // TODO: Refactor this to align all modal initialization
+ // with a single manager instance per modal type.
+ // Plan to do this on a future PR.
+
const captureModals = document.querySelectorAll(
".modal[data-item-type='capture']",
);
for (const modal of captureModals) {
const itemUuid = modal.getAttribute("data-item-uuid");
+ const itemType = modal.getAttribute("data-item-type");
if (!itemUuid || !this.permissions) {
console.warn(
@@ -372,28 +403,18 @@ class PageLifecycleManager {
if (window.ShareActionManager) {
const shareManager = new window.ShareActionManager({
- itemUuid: itemUuid,
- itemType: "capture",
permissions: this.permissions,
+ itemUuid: itemUuid,
+ itemType: itemType,
});
this.managers.push(shareManager);
// Store reference on modal
modal.shareActionManager = shareManager;
}
-
- if (window.DownloadActionManager) {
- const downloadManager = new window.DownloadActionManager({
- itemUuid: itemUuid,
- itemType: "capture",
- permissions: this.permissions,
- });
- this.managers.push(downloadManager);
-
- // Store reference on modal
- modal.downloadActionManager = downloadManager;
- }
}
+
+ this.ensureDownloadActionManager();
}
/**
diff --git a/gateway/sds_gateway/static/js/file-list.js b/gateway/sds_gateway/static/js/file-list.js
index 3f5175077..6c660c27d 100644
--- a/gateway/sds_gateway/static/js/file-list.js
+++ b/gateway/sds_gateway/static/js/file-list.js
@@ -1,3 +1,9 @@
+/**
+ * TODO: This file has a lot of redundancy with manager files
+ * And needs to be deprecated. and have its functionality migrated
+ * to the new JS structure.
+ */
+
/* File List Page JavaScript - Refactored to use Components */
/**
@@ -575,24 +581,6 @@ class FileListCapturesTableManager extends CapturesTableManager {
this.searchButtonLoading = document.getElementById("search-btn-loading");
}
- /**
- * Use web download modal (with temporal slider) when DownloadActionManager is available.
- */
- handleDownloadCapture(button) {
- if (window.currentDownloadManager && document.getElementById("webDownloadModal")) {
- const captureUuid = button.getAttribute("data-capture-uuid");
- const captureName = button.getAttribute("data-capture-name") || captureUuid;
- if (captureUuid) {
- window.currentDownloadManager.handleCaptureDownload(
- captureUuid,
- captureName,
- button,
- );
- }
- return;
- }
- }
-
/**
* Override showLoading to toggle button contents instead of showing separate indicator
*/
diff --git a/gateway/sds_gateway/templates/users/components/dataset_list_table.html b/gateway/sds_gateway/templates/users/components/dataset_list_table.html
index ad45ff497..2257a7244 100644
--- a/gateway/sds_gateway/templates/users/components/dataset_list_table.html
+++ b/gateway/sds_gateway/templates/users/components/dataset_list_table.html
@@ -183,8 +183,8 @@
No datasets yet
+ data-item-uuid="{{ dataset.uuid }}"
+ data-item-type="dataset">
Web Download
diff --git a/gateway/sds_gateway/templates/users/dataset_list.html b/gateway/sds_gateway/templates/users/dataset_list.html
index baf0de549..4ead29d05 100644
--- a/gateway/sds_gateway/templates/users/dataset_list.html
+++ b/gateway/sds_gateway/templates/users/dataset_list.html
@@ -31,20 +31,7 @@ Datasets
-
- {% for dataset in page_obj %}
- {% include "users/partials/dataset_details_modal.html" with dataset=dataset %}
- {% include "users/partials/web_download_modal.html" with item=dataset item_type="dataset" %}
- {% include "users/partials/share_modal.html" with item=dataset item_type="dataset" %}
- {% include "users/partials/sdk_download_modal.html" with dataset=dataset %}
- {% if dataset.is_owner or dataset.permission_level == 'co-owner' %}
- {% include "users/partials/dataset_version_control.html" with dataset=dataset %}
- {% if not dataset.dataset.status == 'final' or not dataset.is_public %}
- {% include "users/partials/publish_dataset_modal.html" with dataset=dataset %}
- {% endif %}
- {% endif %}
- {% endfor %}
-
+ {% include "users/components/dataset_list_modals.html" with page_obj=page_obj %}
{% endblock content %}
{% block javascript %}
{# djlint:off #}
diff --git a/gateway/sds_gateway/templates/users/file_list.html b/gateway/sds_gateway/templates/users/file_list.html
index d3d9a77c0..6c6a830cb 100644
--- a/gateway/sds_gateway/templates/users/file_list.html
+++ b/gateway/sds_gateway/templates/users/file_list.html
@@ -369,8 +369,6 @@
Upload Result
{% include "users/partials/capture_modal.html" %}
-
- {% include "users/partials/web_download_modal.html" with item_type="capture" capture=capture_obj %}
{% if VISUALIZATIONS_ENABLED %}
{% include "visualizations/partials/visualization_modal.html" with visualization_compatibility=visualization_compatibility %}
diff --git a/gateway/sds_gateway/templates/users/files.html b/gateway/sds_gateway/templates/users/files.html
index 63e84d774..9cc93386a 100644
--- a/gateway/sds_gateway/templates/users/files.html
+++ b/gateway/sds_gateway/templates/users/files.html
@@ -178,12 +178,19 @@ Files
{% endif %}
-
- Download
-
+ Download
{% elif item.type == 'file' %}
diff --git a/gateway/sds_gateway/templates/users/partials/captures_page_table.html b/gateway/sds_gateway/templates/users/partials/captures_page_table.html
index 088b8c369..9aa0b6a30 100644
--- a/gateway/sds_gateway/templates/users/partials/captures_page_table.html
+++ b/gateway/sds_gateway/templates/users/partials/captures_page_table.html
@@ -137,15 +137,16 @@
{% endif %}
- Download
diff --git a/gateway/sds_gateway/templates/users/partials/search_published_datasets_tab.html b/gateway/sds_gateway/templates/users/partials/search_published_datasets_tab.html
index 4556109ad..573ac3589 100644
--- a/gateway/sds_gateway/templates/users/partials/search_published_datasets_tab.html
+++ b/gateway/sds_gateway/templates/users/partials/search_published_datasets_tab.html
@@ -114,9 +114,7 @@ No datasets found