Переделал усреднение. Вариант 1
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
"""
|
||||
Points averaging view for satellite data grouping by day/night intervals.
|
||||
Groups points by Source, then by time intervals within each Source.
|
||||
"""
|
||||
from datetime import datetime, timedelta
|
||||
from django.contrib.auth.mixins import LoginRequiredMixin
|
||||
@@ -8,7 +9,7 @@ from django.shortcuts import render
|
||||
from django.views import View
|
||||
from django.utils import timezone
|
||||
|
||||
from ..models import ObjItem, Satellite
|
||||
from ..models import ObjItem, Satellite, Source
|
||||
from ..utils import (
|
||||
calculate_mean_coords,
|
||||
calculate_distance_wgs84,
|
||||
@@ -29,8 +30,9 @@ class PointsAveragingView(LoginRequiredMixin, View):
|
||||
"""
|
||||
|
||||
def get(self, request):
|
||||
# Get satellites that have points with geo data
|
||||
# Get satellites that have sources with points with geo data
|
||||
satellites = Satellite.objects.filter(
|
||||
parameters__objitem__source__isnull=False,
|
||||
parameters__objitem__geo_obj__coords__isnull=False
|
||||
).distinct().order_by('name')
|
||||
|
||||
@@ -44,13 +46,14 @@ class PointsAveragingView(LoginRequiredMixin, View):
|
||||
|
||||
class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
"""
|
||||
API endpoint for grouping and averaging points by day/night intervals.
|
||||
API endpoint for grouping and averaging points by Source and day/night intervals.
|
||||
|
||||
Groups points into:
|
||||
- Day: 08:00 - 19:00
|
||||
- Night: 19:00 - 08:00 (next day)
|
||||
- Weekend: Friday 19:00 - Monday 08:00
|
||||
|
||||
For each group, calculates average coordinates and checks for outliers (>56 km).
|
||||
For each group within each Source, calculates average coordinates and checks for outliers (>56 km).
|
||||
"""
|
||||
|
||||
def get(self, request):
|
||||
@@ -76,9 +79,50 @@ class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
except ValueError:
|
||||
return JsonResponse({'error': 'Неверный формат даты'}, status=400)
|
||||
|
||||
# Get all points for the satellite in the date range
|
||||
objitems = ObjItem.objects.filter(
|
||||
parameter_obj__id_satellite=satellite,
|
||||
# Get all Sources for the satellite that have points in the date range
|
||||
sources = Source.objects.filter(
|
||||
source_objitems__parameter_obj__id_satellite=satellite,
|
||||
source_objitems__geo_obj__coords__isnull=False,
|
||||
source_objitems__geo_obj__timestamp__gte=date_from_obj,
|
||||
source_objitems__geo_obj__timestamp__lt=date_to_obj,
|
||||
).distinct().prefetch_related(
|
||||
'source_objitems',
|
||||
'source_objitems__geo_obj',
|
||||
'source_objitems__geo_obj__mirrors',
|
||||
'source_objitems__parameter_obj',
|
||||
'source_objitems__parameter_obj__polarization',
|
||||
'source_objitems__parameter_obj__modulation',
|
||||
'source_objitems__parameter_obj__standard',
|
||||
)
|
||||
|
||||
if not sources.exists():
|
||||
return JsonResponse({'error': 'Источники не найдены в указанном диапазоне'}, status=404)
|
||||
|
||||
# Process each source
|
||||
result_sources = []
|
||||
for source in sources:
|
||||
source_data = self._process_source(source, date_from_obj, date_to_obj)
|
||||
if source_data['groups']: # Only add if has groups with points
|
||||
result_sources.append(source_data)
|
||||
|
||||
if not result_sources:
|
||||
return JsonResponse({'error': 'Точки не найдены в указанном диапазоне'}, status=404)
|
||||
|
||||
return JsonResponse({
|
||||
'success': True,
|
||||
'satellite': satellite.name,
|
||||
'date_from': date_from,
|
||||
'date_to': date_to,
|
||||
'sources': result_sources,
|
||||
'total_sources': len(result_sources),
|
||||
})
|
||||
|
||||
def _process_source(self, source, date_from_obj, date_to_obj):
|
||||
"""
|
||||
Process a single Source: get its points and group them by time intervals.
|
||||
"""
|
||||
# Get all points for this source in the date range
|
||||
objitems = source.source_objitems.filter(
|
||||
geo_obj__coords__isnull=False,
|
||||
geo_obj__timestamp__gte=date_from_obj,
|
||||
geo_obj__timestamp__lt=date_to_obj,
|
||||
@@ -89,16 +133,12 @@ class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
'parameter_obj__modulation',
|
||||
'parameter_obj__standard',
|
||||
'geo_obj',
|
||||
'source',
|
||||
).prefetch_related(
|
||||
'geo_obj__mirrors'
|
||||
).order_by('geo_obj__timestamp')
|
||||
|
||||
if not objitems.exists():
|
||||
return JsonResponse({'error': 'Точки не найдены в указанном диапазоне'}, status=404)
|
||||
|
||||
# Group points by source name and day/night intervals
|
||||
groups = self._group_points_by_intervals(objitems)
|
||||
# Group points by day/night intervals
|
||||
groups = self._group_points_by_intervals(list(objitems))
|
||||
|
||||
# Process each group: calculate average and check for outliers
|
||||
result_groups = []
|
||||
@@ -106,21 +146,27 @@ class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
group_result = self._process_group(group_key, points)
|
||||
result_groups.append(group_result)
|
||||
|
||||
return JsonResponse({
|
||||
'success': True,
|
||||
'satellite': satellite.name,
|
||||
'date_from': date_from,
|
||||
'date_to': date_to,
|
||||
# Get source name from first point or use ID
|
||||
source_name = f"Источник #{source.id}"
|
||||
if objitems.exists():
|
||||
first_point = objitems.first()
|
||||
if first_point.name:
|
||||
source_name = first_point.name
|
||||
|
||||
return {
|
||||
'source_id': source.id,
|
||||
'source_name': source_name,
|
||||
'total_points': sum(len(g['points']) for g in result_groups),
|
||||
'groups': result_groups,
|
||||
'total_groups': len(result_groups),
|
||||
})
|
||||
}
|
||||
|
||||
def _group_points_by_intervals(self, objitems):
|
||||
"""
|
||||
Group points by source name and day/night intervals.
|
||||
Group points by day/night intervals.
|
||||
|
||||
Day: 08:00 - 19:00
|
||||
Night: 19:00 - 08:00 (next day)
|
||||
Weekend: Friday 19:00 - Monday 08:00
|
||||
"""
|
||||
groups = {}
|
||||
|
||||
@@ -129,19 +175,14 @@ class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
continue
|
||||
|
||||
timestamp = timezone.localtime(objitem.geo_obj.timestamp)
|
||||
# timestamp = objitem.geo_obj.timestamp
|
||||
source_name = objitem.name or f"Объект #{objitem.id}"
|
||||
|
||||
# Determine interval
|
||||
interval_key = self._get_interval_key(timestamp)
|
||||
|
||||
# Create group key: (source_name, interval_key)
|
||||
group_key = (source_name, interval_key)
|
||||
if interval_key not in groups:
|
||||
groups[interval_key] = []
|
||||
|
||||
if group_key not in groups:
|
||||
groups[group_key] = []
|
||||
|
||||
groups[group_key].append(objitem)
|
||||
groups[interval_key].append(objitem)
|
||||
|
||||
return groups
|
||||
|
||||
@@ -208,7 +249,7 @@ class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
return date - timedelta(days=3)
|
||||
return date
|
||||
|
||||
def _process_group(self, group_key, points):
|
||||
def _process_group(self, interval_key, points):
|
||||
"""
|
||||
Process a group of points: calculate average and check for outliers.
|
||||
|
||||
@@ -218,8 +259,6 @@ class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
3. Iteratively add points within 56 km of current average
|
||||
4. Points not within 56 km of final average are outliers
|
||||
"""
|
||||
source_name, interval_key = group_key
|
||||
|
||||
# Parse interval info
|
||||
date_str, interval_type = interval_key.rsplit('_', 1)
|
||||
interval_date = datetime.strptime(date_str, '%Y-%m-%d').date()
|
||||
@@ -322,7 +361,7 @@ class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
# Calculate median time from valid points using timestamp_objects array
|
||||
valid_timestamps = []
|
||||
for i in valid_indices:
|
||||
if timestamp_objects[i]:
|
||||
if i < len(timestamp_objects) and timestamp_objects[i]:
|
||||
valid_timestamps.append(timestamp_objects[i])
|
||||
|
||||
median_time_str = '-'
|
||||
@@ -344,7 +383,6 @@ class PointsAveragingAPIView(LoginRequiredMixin, View):
|
||||
median_time_str = timezone.localtime(median_datetime).strftime("%d.%m.%Y %H:%M")
|
||||
|
||||
return {
|
||||
'source_name': source_name,
|
||||
'interval_key': interval_key,
|
||||
'interval_label': interval_label,
|
||||
'total_points': len(points_data),
|
||||
|
||||
Reference in New Issue
Block a user