Update some work schemes

This commit is contained in:
t0xa 2026-02-10 15:50:37 +03:00
parent 62e533fec5
commit b48234194c
26 changed files with 1630 additions and 1019 deletions

View file

@ -0,0 +1,92 @@
@startuml Crowd Node - Request Processing Flow
!define COMPONENT_BG_COLOR #E3F2FD
!define API_BG_COLOR #FFF3E0
!define STORAGE_BG_COLOR #F3E5F5
title Crowd Node: Процесс обработки запроса на анализ изображения
participant "Scheduler" as Scheduler
participant "Redis Queue" as RedisQueue #FFCCCC
participant "tasks.py" as TaskListener COMPONENT_BG_COLOR
participant "analyzer.py" as Analyzer COMPONENT_BG_COLOR
participant "frames.py" as FramePuller COMPONENT_BG_COLOR
participant "LivePreview service" as LivePreview API_BG_COLOR
box #LightGreen
participant "PersonDetector" as PersonDetector
participant "PersonDetectionService" as PersonDetectionService
end box
participant "Redis Cache" as RedisCache #FFCCCC
participant "S3 Storage" as S3 STORAGE_BG_COLOR
participant "Central" as Central API_BG_COLOR
== Получение задания ==
Scheduler -> RedisQueue: push task\n{cmd: "analyze_crowd",\nparams: {uin, camera_id, zones, ...}}
activate RedisQueue
TaskListener -> RedisQueue: pull_task()
activate TaskListener
RedisQueue --> TaskListener: task data
deactivate RedisQueue
TaskListener -> Analyzer: analyze_crowd(uin, camera_id, zones, **options)
activate Analyzer
== Получение кадра ==
Analyzer -> FramePuller: pull(uin, camera)
activate FramePuller
FramePuller -> LivePreview: GET /internal/preview
LivePreview --> FramePuller: JPEG image bytes
FramePuller -> FramePuller: Frame(content)\n- создает PIL Image\n- генерирует image_id
FramePuller --> Analyzer: Frame object
deactivate FramePuller
group #LightGreen Новый подход
== Взаимодействие с новым сервисом==
Analyzer -> PersonDetector: _run_detecotrs(uin, camera, frame, zones)
activate PersonDetector
PersonDetector -> PersonDetector: prepare() - Метод для подготовки\nданных для отправки в сервис
PersonDetector -> PersonDetectionService: POST /picture/analyze\nМетод для анализа
activate PersonDetectionService
PersonDetectionService -> PersonDetectionService: Сервис анализирует изображение
PersonDetectionService --> PersonDetector: {results:\n\t[detection_1, detection_2, ...]\n}
deactivate PersonDetectionService
PersonDetector -> PersonDetector: parse_response() - Метод\nдля приведения резултата в формат,\nкоторый был раньше
end
PersonDetector --> Analyzer: result dict\n{zone_id: {count, objects}, ...}
deactivate PersonDetector
== Форматирование результата ==
deactivate Detector
== Сохранение результата ==
Analyzer -> S3: storage.upload_fileobj(\n image, bucket, key, ...)
S3 --> Analyzer: ObjRef
Analyzer -> S3: storage.generate_presigned_url(obj_ref)
S3 --> Analyzer: presigned_url
== Отправка результата в Central ==
Analyzer -> Central: central.send('new_measurement', {\n timestamp,\n camera_id,\n measurement_id,\n image: zones_url,\n timings,\n errors,\n zones: zones_info\n})
note right
Отправка через aio_broker
в очередь 'overmind:input'
с командой 'new_measurement'
end note
Central --> Analyzer: (async, no wait)
Analyzer -> Analyzer: Обновить БД zones_db:\ndetected_at = time.time()
Analyzer --> TaskListener: complete
deactivate Analyzer
TaskListener -> TaskListener: Ожидать следующую задачу
deactivate TaskListener
@enduml

View file

@ -0,0 +1,252 @@
@startuml Crowd Node - Request Processing Flow
!define COMPONENT_BG_COLOR #E3F2FD
!define API_BG_COLOR #FFF3E0
!define STORAGE_BG_COLOR #F3E5F5
title Crowd Node: Процесс обработки запроса на анализ изображения
actor "Scheduler/External System" as Scheduler
participant "Redis Queue" as RedisQueue #FFCCCC
participant "tasks.py\n(Task Listener)" as TaskListener COMPONENT_BG_COLOR
participant "analyzer.py\n(Main Analyzer)" as Analyzer COMPONENT_BG_COLOR
participant "frames.py\n(Frame Puller)" as FramePuller COMPONENT_BG_COLOR
participant "LivePreview Service" as LivePreview API_BG_COLOR
participant "TevianHeadsDetector\n(detectors/tevian.py)" as Detector COMPONENT_BG_COLOR
participant "Redis Cache" as RedisCache #FFCCCC
participant "Tevian Cloud API\n(ext_api/tevian_api.py)" as TevianAPI API_BG_COLOR
participant "S3 Storage" as S3 STORAGE_BG_COLOR
participant "Central (crowd backend)" as Central API_BG_COLOR
== Получение задания ==
Scheduler -> RedisQueue: push task\n{cmd: "analyze_crowd",\nparams: {uin, camera_id, zones, ...}}
activate RedisQueue
TaskListener -> RedisQueue: pull_task()
activate TaskListener
RedisQueue --> TaskListener: task data
deactivate RedisQueue
TaskListener -> TaskListener: dispatch to handler\nHANDLERS['analyze_crowd']
TaskListener -> Analyzer: analyze_crowd(uin, camera_id, zones, **options)
activate Analyzer
== Получение кадра ==
Analyzer -> FramePuller: pull(uin, camera)
activate FramePuller
FramePuller -> LivePreview: GET /internal/preview?\nu={uin}&camera={camera}&q=2
activate LivePreview
LivePreview --> FramePuller: JPEG image bytes
deactivate LivePreview
FramePuller -> FramePuller: Frame(content)\n- создает PIL Image\n- генерирует image_id
FramePuller --> Analyzer: Frame object
deactivate FramePuller
Analyzer -> Analyzer: rotate(rotation_angle)\nесли нужно
== Запуск детекторов ==
Analyzer -> Analyzer: _run_detectors(uin, camera, frame, zones)
Analyzer -> Detector: request(uin, camera, frame, zones)
activate Detector
== Подготовка Tevian (prepare) ==
Detector -> Detector: prepare(cam_name, zones)
note right
Подготовка включает:
1. Создание/получение камеры
2. Синхронизацию очередей (зон)
3. Обновление параметров зон
end note
Detector -> Detector: _get_or_create_camera(cam_name)
Detector -> RedisCache: get_camera(cam_name)
activate RedisCache
RedisCache --> Detector: TCamera или None
deactivate RedisCache
alt Камеры нет в кеше
Detector -> TevianAPI: TCamera.get_all()
activate TevianAPI
TevianAPI -> TevianAPI: _refresh_token() if needed
TevianAPI -> "Tevian Cloud": GET /api/cameras
"Tevian Cloud" --> TevianAPI: список камер [{id, name, ...}]
TevianAPI --> Detector: [TCamera, ...]
deactivate TevianAPI
Detector -> RedisCache: set_camera(cam) для каждой
alt Камера все еще не найдена
Detector -> TevianAPI: TCamera.create(cam_name)
activate TevianAPI
TevianAPI -> "Tevian Cloud": POST /api/cameras\n{name, rtsp, frequency_plan_id, ...}
"Tevian Cloud" --> TevianAPI: {id, name, status, ...}
TevianAPI --> Detector: TCamera
deactivate TevianAPI
Detector -> RedisCache: set_camera(cam)
end
end
== Синхронизация очередей (зон) ==
Detector -> Detector: _get_camera_queues(cam)
Detector -> RedisCache: get_queue(q_id)\nдля каждого queues_ids камеры
RedisCache --> Detector: TQueue objects
loop Для каждой зоны из запроса
Detector -> Detector: Конвертировать координаты\nв относительные (0..1)
alt Очередь не найдена
Detector -> TevianAPI: TQueue.create(cam_id, zone_id, polygon, min_head_size)
activate TevianAPI
TevianAPI -> "Tevian Cloud": POST /api/queues\n{name, camera_id, roi_polygon_relative, ...}
"Tevian Cloud" --> TevianAPI: {id, name, camera_id, ...}
TevianAPI --> Detector: TQueue
deactivate TevianAPI
Detector -> RedisCache: set_queue(queue)
else Параметры зоны изменились
Detector -> TevianAPI: queue.save()
activate TevianAPI
TevianAPI -> "Tevian Cloud": POST /api/queues/{id}\n{roi_polygon_relative, ...}
"Tevian Cloud" --> TevianAPI: updated queue
TevianAPI --> Detector: success
deactivate TevianAPI
Detector -> RedisCache: set_queue(queue)
end
end
loop Для старых очередей (не в списке зон)
Detector -> TevianAPI: TQueue.delete_by_id(queue_id)
activate TevianAPI
TevianAPI -> "Tevian Cloud": DELETE /api/queues/{id}
"Tevian Cloud" --> TevianAPI: success
TevianAPI --> Detector: success
deactivate TevianAPI
Detector -> RedisCache: delete_queue(queue_id)
end
Detector -> TevianAPI: cam.refresh()
note right
Обновляем состояние камеры
после изменения очередей
end note
activate TevianAPI
TevianAPI -> "Tevian Cloud": GET /api/cameras/{id}
"Tevian Cloud" --> TevianAPI: {status, is_accepting_snapshots, ...}
TevianAPI --> Detector: updated TCamera
deactivate TevianAPI
Detector -> RedisCache: set_camera(cam)
== Отправка снапшота и получение результатов ==
Detector -> Detector: Проверка rate limiting\n(FORCED_WAIT_PERIOD)
note right
Избегаем HTTP 429: Too Many Requests
Ждем если запрос слишком частый
end note
alt Слишком частые запросы
Detector -> Detector: asyncio.sleep(wait_for)
end
Detector -> TevianAPI: cam.send_snapshot(frame.data)
activate TevianAPI
TevianAPI -> "Tevian Cloud": POST /api/cameras/{id}/snapshots\nContent-Type: image/jpeg\nbody: <JPEG bytes>
"Tevian Cloud" --> TevianAPI: {snapshot_accepted_at: <timestamp>}
TevianAPI --> Detector: timestamp
deactivate TevianAPI
Detector -> Detector: asyncio.sleep(TEVIAN_RECOGNITION_DELAY)\n(рекомендуется 12 сек)
Detector -> TevianAPI: TRecognition.get_many(queues_ids, timestamp)
activate TevianAPI
loop Polling до получения результатов или timeout
TevianAPI -> "Tevian Cloud": GET /api/recognitions?\nqueues_ids={ids}&utc_timestamp={ts}
"Tevian Cloud" --> TevianAPI: [recognitions...]
alt Результатов меньше чем очередей
TevianAPI -> TevianAPI: await gen.sleep(2)\nи повторить
else Все результаты получены
TevianAPI -> TevianAPI: break
end
end
TevianAPI -> TevianAPI: Фильтровать detections:\nоставить только\nfiltered_status == 'passed_filters'
TevianAPI --> Detector: [TRecognition, ...]
deactivate TevianAPI
== Форматирование результата ==
loop Для каждого recognition
Detector -> RedisCache: get_queue(rec.queue_id)
RedisCache --> Detector: TQueue
Detector -> Detector: Форматировать objects:\n[{x, y, w, h}, ...]\nиз bbox данных
Detector -> Detector: result[queue.name] = {\n 'count': len(objects),\n 'objects': objects\n}
end
Detector --> Analyzer: result dict\n{zone_id: {count, objects}, ...}
deactivate Detector
Analyzer -> Analyzer: _build_zones_info(zones, detected_values)
note right
Объединяет данные зон с результатами
детекторов, определяет length_by_ai
end note
Analyzer -> Analyzer: _get_triggered_zones(zones_info, timestamp)
note right
Определяет зоны для подсветки
на основе trigger_at и trigger_type
end note
== Сохранение результата ==
Analyzer -> Analyzer: frame.draw_zones(triggered_zones)
note right
Рисует полигоны триггерных зон
на изображении с прозрачностью
end note
Analyzer -> Analyzer: resize_image(image, 640)
Analyzer -> S3: storage.upload_fileobj(\n image, bucket, key, ...)
activate S3
S3 --> Analyzer: ObjRef
deactivate S3
Analyzer -> S3: storage.generate_presigned_url(obj_ref)
activate S3
S3 --> Analyzer: presigned_url
deactivate S3
Analyzer -> Analyzer: Удалить query params\nиз URL (сделать публичным)
== Отправка результата в Central ==
Analyzer -> Central: central.send('new_measurement', {\n timestamp,\n camera_id,\n measurement_id,\n image: zones_url,\n timings,\n errors,\n zones: zones_info\n})
activate Central
note right
Отправка через aio_broker
в очередь 'overmind:input'
с командой 'new_measurement'
end note
Central --> Analyzer: (async, no wait)
deactivate Central
Analyzer -> Analyzer: Обновить БД zones_db:\ndetected_at = time.time()
Analyzer --> TaskListener: complete
deactivate Analyzer
TaskListener -> TaskListener: Ожидать следующую задачу
deactivate TaskListener
@enduml

View file

@ -0,0 +1,249 @@
@startuml Crowd Node - Request Processing Flow
!define COMPONENT_BG_COLOR #E3F2FD
!define API_BG_COLOR #FFF3E0
!define STORAGE_BG_COLOR #F3E5F5
title Crowd Node: Процесс обработки запроса на анализ изображения
participant "Scheduler" as Scheduler
participant "Redis Queue" as RedisQueue #FFCCCC
participant "tasks.py\n(Task Listener)" as TaskListener COMPONENT_BG_COLOR
participant "analyzer.py\n(Main Analyzer)" as Analyzer COMPONENT_BG_COLOR
participant "frames.py\n(Frame Puller)" as FramePuller COMPONENT_BG_COLOR
participant "LivePreview service" as LivePreview API_BG_COLOR
participant "TevianHeadsDetector\n(detectors/tevian.py)" as Detector COMPONENT_BG_COLOR
participant "Redis Cache" as RedisCache #FFCCCC
participant "Tevian Cloud API\n(ext_api/tevian_api.py)" as TevianAPI API_BG_COLOR
participant "S3 Storage" as S3 STORAGE_BG_COLOR
participant "Central (crowd backend)" as Central API_BG_COLOR
== Получение задания ==
Scheduler -> RedisQueue: push task\n{cmd: "analyze_crowd",\nparams: {uin, camera_id, zones, ...}}
activate RedisQueue
TaskListener -> RedisQueue: pull_task()
activate TaskListener
RedisQueue --> TaskListener: task data
deactivate RedisQueue
TaskListener -> Analyzer: analyze_crowd(uin, camera_id, zones, **options)
activate Analyzer
== Получение кадра ==
Analyzer -> FramePuller: pull(uin, camera)
activate FramePuller
FramePuller -> LivePreview: GET /internal/preview
activate LivePreview
LivePreview --> FramePuller: JPEG image bytes
deactivate LivePreview
FramePuller -> FramePuller: Frame(content)\n- создает PIL Image\n- генерирует image_id
FramePuller --> Analyzer: Frame object
deactivate FramePuller
== Запуск детектора ==
Analyzer -> Analyzer: _run_detectors(uin, camera, frame, zones)
Analyzer -> Detector: request(uin, camera, frame, zones)
activate Detector
== Подготовка Tevian (prepare) ==
Detector -> Detector: prepare(cam_name, zones)
note right
Подготовка включает:
1. Создание/получение камеры
2. Синхронизацию очередей (зон)
3. Обновление параметров зон
end note
Detector -> Detector: _get_or_create_camera(cam_name)
Detector -> RedisCache: get_camera(cam_name)
activate RedisCache
RedisCache --> Detector: TCamera or None
deactivate RedisCache
alt Камеры нет в кеше
Detector -> TevianAPI: TCamera.get_all()
activate TevianAPI
TevianAPI -> TevianAPI: _refresh_token() if needed
TevianAPI -> "Tevian Cloud": GET /api/cameras
"Tevian Cloud" --> TevianAPI: список камер [{id, name, ...}]
TevianAPI --> Detector: [TCamera, ...]
deactivate TevianAPI
Detector -> RedisCache: set_camera(cam) для каждой
alt Камера все еще не найдена
Detector -> TevianAPI: TCamera.create(cam_name)
activate TevianAPI
TevianAPI -> "Tevian Cloud": POST /api/cameras\n{name, rtsp, frequency_plan_id, ...}
"Tevian Cloud" --> TevianAPI: {id, name, status, ...}
TevianAPI --> Detector: TCamera
deactivate TevianAPI
Detector -> RedisCache: set_camera(cam)
end
end
== Синхронизация очередей (зон) ==
Detector -> Detector: _get_camera_queues(cam)
Detector -> RedisCache: get_queue(q_id)\nдля каждого queues_ids камеры
RedisCache --> Detector: TQueue objects
loop Для каждой зоны из запроса
Detector -> Detector: Конвертировать координаты\nв относительные (0..1)
alt Очередь не найдена
Detector -> TevianAPI: TQueue.create(cam_id, zone_id, polygon, min_head_size)
activate TevianAPI
TevianAPI -> "Tevian Cloud": POST /api/queues\n{name, camera_id, roi_polygon_relative, ...}
"Tevian Cloud" --> TevianAPI: {id, name, camera_id, ...}
TevianAPI --> Detector: TQueue
deactivate TevianAPI
Detector -> RedisCache: set_queue(queue)
else Параметры зоны изменились
Detector -> TevianAPI: queue.save()
activate TevianAPI
TevianAPI -> "Tevian Cloud": POST /api/queues/{id}\n{roi_polygon_relative, ...}
"Tevian Cloud" --> TevianAPI: updated queue
TevianAPI --> Detector: success
deactivate TevianAPI
Detector -> RedisCache: set_queue(queue)
end
end
loop Для старых очередей (не в списке зон)
Detector -> TevianAPI: TQueue.delete_by_id(queue_id)
activate TevianAPI
TevianAPI -> "Tevian Cloud": DELETE /api/queues/{id}
"Tevian Cloud" --> TevianAPI: success
TevianAPI --> Detector: success
deactivate TevianAPI
Detector -> RedisCache: delete_queue(queue_id)
end
Detector -> TevianAPI: cam.refresh()
note right
Обновляем состояние камеры
после изменения очередей
end note
activate TevianAPI
TevianAPI -> "Tevian Cloud": GET /api/cameras/{id}
"Tevian Cloud" --> TevianAPI: {status, is_accepting_snapshots, ...}
TevianAPI --> Detector: updated TCamera
deactivate TevianAPI
Detector -> RedisCache: set_camera(cam)
== Отправка снапшота и получение результатов ==
Detector -> Detector: Проверка rate limiting\n(FORCED_WAIT_PERIOD)
note right
Избегаем HTTP 429: Too Many Requests
Ждем если запрос слишком частый
end note
alt Слишком частые запросы
Detector -> Detector: asyncio.sleep(wait_for)
end
Detector -> TevianAPI: cam.send_snapshot(frame.data)
activate TevianAPI
TevianAPI -> "Tevian Cloud": POST /api/cameras/{id}/snapshots\nContent-Type: image/jpeg\nbody: <JPEG bytes>
"Tevian Cloud" --> TevianAPI: {snapshot_accepted_at: <timestamp>}
TevianAPI --> Detector: timestamp
deactivate TevianAPI
Detector -> Detector: asyncio.sleep(TEVIAN_RECOGNITION_DELAY)\n(рекомендуется 12 сек)
Detector -> TevianAPI: TRecognition.get_many(queues_ids, timestamp)
activate TevianAPI
loop Polling до получения результатов или timeout
TevianAPI -> "Tevian Cloud": GET /api/recognitions?\nqueues_ids={ids}&utc_timestamp={ts}
"Tevian Cloud" --> TevianAPI: [recognitions...]
alt Результатов меньше чем очередей
TevianAPI -> TevianAPI: await gen.sleep(2)\nи повторить
else Все результаты получены
TevianAPI -> TevianAPI: break
end
end
TevianAPI -> TevianAPI: Фильтровать detections:\nоставить только\nfiltered_status == 'passed_filters'
TevianAPI --> Detector: [TRecognition, ...]
deactivate TevianAPI
== Форматирование результата ==
loop Для каждого recognition
Detector -> RedisCache: get_queue(rec.queue_id)
RedisCache --> Detector: TQueue
Detector -> Detector: Форматировать objects:\n[{x, y, w, h}, ...]\nиз bbox данных
Detector -> Detector: result[queue.name] = {\n 'count': len(objects),\n 'objects': objects\n}
end
Detector --> Analyzer: result dict\n{zone_id: {count, objects}, ...}
deactivate Detector
Analyzer -> Analyzer: _build_zones_info(zones, detected_values)
note right
Объединяет данные зон с результатами
детекторов, определяет length_by_ai
end note
Analyzer -> Analyzer: _get_triggered_zones(zones_info, timestamp)
note right
Определяет зоны для подсветки
на основе trigger_at и trigger_type
end note
== Сохранение результата ==
Analyzer -> Analyzer: frame.draw_zones(triggered_zones)
note right
Рисует полигоны триггерных зон
на изображении с прозрачностью
end note
Analyzer -> Analyzer: resize_image(image, 640)
Analyzer -> S3: storage.upload_fileobj(\n image, bucket, key, ...)
activate S3
S3 --> Analyzer: ObjRef
deactivate S3
Analyzer -> S3: storage.generate_presigned_url(obj_ref)
activate S3
S3 --> Analyzer: presigned_url
deactivate S3
Analyzer -> Analyzer: Удалить query params\nиз URL (сделать публичным)
== Отправка результата в Central ==
Analyzer -> Central: central.send('new_measurement', {\n timestamp,\n camera_id,\n measurement_id,\n image: zones_url,\n timings,\n errors,\n zones: zones_info\n})
activate Central
note right
Отправка через aio_broker
в очередь 'overmind:input'
с командой 'new_measurement'
end note
Central --> Analyzer: (async, no wait)
deactivate Central
Analyzer -> Analyzer: Обновить БД zones_db:\ndetected_at = time.time()
Analyzer --> TaskListener: complete
deactivate Analyzer
TaskListener -> TaskListener: Ожидать следующую задачу
deactivate TaskListener
@enduml

View file

@ -0,0 +1,59 @@
@startuml Crowd Node - Request Processing Flow
!define COMPONENT_BG_COLOR #E3F2FD
!define API_BG_COLOR #FFF3E0
!define STORAGE_BG_COLOR #F3E5F5
participant "tasks.py" as TaskListener COMPONENT_BG_COLOR
participant "analyzer.py" as Analyzer COMPONENT_BG_COLOR
participant "PersonDetector" as PersonDetector
participant "PersonDetectionService" as PersonDetectionService
participant "Redis Queue" as RedisQueue #FFCCCC
participant "S3 Storage" as S3 STORAGE_BG_COLOR
participant "Central" as Central API_BG_COLOR
== Вариант 1: Получаем задачу в analyze, процессим синхронно и отдаем ответ ==
TaskListener -> Analyzer : Получена задача на процессинг
Analyzer -> PersonDetector : Отправка задачи на анализ
PersonDetector -> PersonDetectionService : Установка HTTP соединения
group Открытое HTTP соединение
PersonDetector -> PersonDetectionService : HTTP POST запрос
PersonDetectionService -> PersonDetectionService : Обарботка запроса
PersonDetectionService --> PersonDetector : Отправка в response результата
end group
PersonDetector --> Analyzer : Результаты задачи
== Вариант 2: Получаем задачу в analyze, кладем в очередь, процессим в очереди, формируем результат и отдаем в ответе analyze ==
TaskListener -> Analyzer : Получена задача на процессинг
Analyzer -> RedisQueue : Положили в очередь задачу на анализ изображения
group Polling
PersonDetector -> RedisQueue : полит очередь на предмет наличия задач
RedisQueue --> PersonDetector : Получает задачу на анализ
group Асинхронный запрос на анализ
PersonDetector -> PersonDetectionService : Дерганье API сервиса для анализа
PersonDetectionService -> PersonDetectionService : Обарботка запроса
PersonDetectionService --> PersonDetector : Отправка в response результата
end group
end group
group Polling
Analyzer -> RedisQueue : Полит в ожидании выполненных задач
RedisQueue --> Analyzer : Выполненные задачи анализа
end group
== Вариант 3: Получаем задачу в analyze, процессим синхронно, формируем результат, отдаем в ответе get_results ==
note over PersonDetector
Такое ощущение что это похоже не вариант 1
end note
== Вариант 4: Получаем задачу в analyze кладем в очередь, процессим в очереди, формируем результат и отдаем в ответе get_results ==
TaskListener -> Analyzer : Получена задача на процессинг
activate Analyzer
Analyzer -> PersonDetector : Отправка задачи на анализ\n<b>во внутреннюю очередь PersonDetector'a</b>
group Внутрянка PersonDetector'a
PersonDetector -> PersonDetector: как то хэндлит запросы на обработку
group Обработка в порядке очереди
PersonDetector -> PersonDetectionService : отправляет HTTP запросы на обработку\nпо внутренней логике
PersonDetectionService --> PersonDetector
PersonDetector --> Analyzer : Результат задачи на обработку
deactivate Analyzer
end group
end group
@enduml

View file

@ -1,540 +0,0 @@
@startuml Folder System Architecture
title Система папок/групп в Ivideon
package "Database" @startuml FolderSystemSimple
title Система папок в Ivideon
entity "folders" as folders_db {
* _id : ObjectId
--
* owner_id : string
* name : string
* parents : array
* objects : array
* root : boolean
}
entity "permission_grants" as grants_db {
* _id : ObjectId
--
* object_id : string
* object_type : string
* grantee_id : string
* permissions : array
}
entity "servers" as servers_db {
* _id : ObjectId
--
* owner_id : string
* cameras : object
}
entity "Folder" as folder_class {
+ get_objects(type)
+ add_object(obj)
+ remove_object(obj)
+ has_permissions(perm)
}
entity "FolderTree" as tree_class {
+ folders : dict
+ find_folders()
+ reload()
}
entity "Camera" as camera_node {
+ id : "server:index"
+ object_type : "camera"
}
folders_db ||--o{ folder_class
grants_db ||--o{ folder_class
servers_db ||--o{ camera_node
tree_class --> folder_class : manages
folder_class --> camera_node : contains
note right of folders_db
objects[] format:
[
{object_type: "camera",
object_id: "server:0"},
{object_type: "folder",
object_id: "subfolder_id"}
]
end note
note bottom of tree_class
Usage:
tree = FolderTree(user_id)
folder = tree.folders[folder_id]
cameras = folder.get_objects("camera")
end note
@enduml
@startuml FolderSystemSimple
title Система папок в Ivideon
entity "folders" as folders_db {
* _id : ObjectId
--
* owner_id : string
* name : string
* parents : array
* objects : array
* root : boolean
}
entity "permission_grants" as grants_db {
* _id : ObjectId
--
* object_id : string
* object_type : string
* grantee_id : string
* permissions : array
}
entity "servers" as servers_db {
* _id : ObjectId
--
* owner_id : string
* cameras : object
}
entity "Folder" as folder_class {
+ get_objects(type)
+ add_object(obj)
+ remove_object(obj)
+ has_permissions(perm)
}
entity "FolderTree" as tree_class {
+ folders : dict
+ find_folders()
+ reload()
}
entity "Camera" as camera_node {
+ id : "server:index"
+ object_type : "camera"
}
folders_db ||--o{ folder_class
grants_db ||--o{ folder_class
servers_db ||--o{ camera_node
tree_class --> folder_class : manages
folder_class --> camera_node : contains
note right of folders_db
objects[] format:
[
{object_type: "camera",
object_id: "server:0"},
{object_type: "folder",
object_id: "subfolder_id"}
]
end note
note bottom of tree_class
Usage:
tree = FolderTree(user_id)
folder = tree.folders[folder_id]
cameras = folder.get_objects("camera")
end note
@enduml
@startuml FolderSystemSimple
title Система папок в Ivideon
entity "folders" as folders_db {
* _id : ObjectId
--
* owner_id : string
* name : string
* parents : array
* objects : array
* root : boolean
}
entity "permission_grants" as grants_db {
* _id : ObjectId
--
* object_id : string
* object_type : string
* grantee_id : string
* permissions : array
}
entity "servers" as servers_db {
* _id : ObjectId
--
* owner_id : string
* cameras : object
}
entity "Folder" as folder_class {
+ get_objects(type)
+ add_object(obj)
+ remove_object(obj)
+ has_permissions(perm)
}
entity "FolderTree" as tree_class {
+ folders : dict
+ find_folders()
+ reload()
}
entity "Camera" as camera_node {
+ id : "server:index"
+ object_type : "camera"
}
folders_db ||--o{ folder_class
grants_db ||--o{ folder_class
servers_db ||--o{ camera_node
tree_class --> folder_class : manages
folder_class --> camera_node : contains
note right of folders_db
objects[] format:
[
{object_type: "camera",
object_id: "server:0"},
{object_type: "folder",
object_id: "subfolder_id"}
]
end note
note bottom of tree_class
Usage:
tree = FolderTree(user_id)
folder = tree.folders[folder_id]
cameras = folder.get_objects("camera")
end note
@enduml
@startuml FolderSystemSimple
title Система папок в Ivideon
entity "folders" as folders_db {
* _id : ObjectId
--
* owner_id : string
* name : string
* parents : array
* objects : array
* root : boolean
}
entity "permission_grants" as grants_db {
* _id : ObjectId
--
* object_id : string
* object_type : string
* grantee_id : string
* permissions : array
}
entity "servers" as servers_db {
* _id : ObjectId
--
* owner_id : string
* cameras : object
}
entity "Folder" as folder_class {
+ get_objects(type)
+ add_object(obj)
+ remove_object(obj)
+ has_permissions(perm)
}
entity "FolderTree" as tree_class {
+ folders : dict
+ find_folders()
+ reload()
}
entity "Camera" as camera_node {
+ id : "server:index"
+ object_type : "camera"
}
folders_db ||--o{ folder_class
grants_db ||--o{ folder_class
servers_db ||--o{ camera_node
tree_class --> folder_class : manages
folder_class --> camera_node : contains
note right of folders_db
objects[] format:
[
{object_type: "camera",
object_id: "server:0"},
{object_type: "folder",
object_id: "subfolder_id"}
]
end note
note bottom of tree_class
Usage:
tree = FolderTree(user_id)
folder = tree.folders[folder_id]
cameras = folder.get_objects("camera")
end note
@enduml
@startuml FolderSystemSimple
title Система папок в Ivideon
entity "folders" as folders_db {
* _id : ObjectId
--
* owner_id : string
* name : string
* parents : array
* objects : array
* root : boolean
}
entity "permission_grants" as grants_db {
* _id : ObjectId
--
* object_id : string
* object_type : string
* grantee_id : string
* permissions : array
}
entity "servers" as servers_db {
* _id : ObjectId
--
* owner_id : string
* cameras : object
}
entity "Folder" as folder_class {
+ get_objects(type)
+ add_object(obj)
+ remove_object(obj)
+ has_permissions(perm)
}
entity "FolderTree" as tree_class {
+ folders : dict
+ find_folders()
+ reload()
}
entity "Camera" as camera_node {
+ id : "server:index"
+ object_type : "camera"
}
folders_db ||--o{ folder_class
grants_db ||--o{ folder_class
servers_db ||--o{ camera_node
tree_class --> folder_class : manages
folder_class --> camera_node : contains
note right of folders_db
objects[] format:
[
{object_type: "camera",
object_id: "server:0"},
{object_type: "folder",
object_id: "subfolder_id"}
]
end note
note bottom of tree_class
Usage:
tree = FolderTree(user_id)
folder = tree.folders[folder_id]
cameras = folder.get_objects("camera")
end note
@enduml
@startuml FolderSystemSimple
title Система папок в Ivideon
entity "folders" as folders_db {
* _id : ObjectId
--
* owner_id : string
* name : string
* parents : array
* objects : array
* root : boolean
}
entity "permission_grants" as grants_db {
* _id : ObjectId
--
* object_id : string
* object_type : string
* grantee_id : string
* permissions : array
}
entity "servers" as servers_db {
* _id : ObjectId
--
* owner_id : string
* cameras : object
}
entity "Folder" as folder_class {
+ get_objects(type)
+ add_object(obj)
+ remove_object(obj)
+ has_permissions(perm)
}
entity "FolderTree" as tree_class {
+ folders : dict
+ find_folders()
+ reload()
}
entity "Camera" as camera_node {
+ id : "server:index"
+ object_type : "camera"
}
folders_db ||--o{ folder_class
grants_db ||--o{ folder_class
servers_db ||--o{ camera_node
tree_class --> folder_class : manages
folder_class --> camera_node : contains
note right of folders_db
objects[] format:
[
{object_type: "camera",
object_id: "server:0"},
{object_type: "folder",
object_id: "subfolder_id"}
]
end note
note bottom of tree_class
Usage:
tree = FolderTree(user_id)
folder = tree.folders[folder_id]
cameras = folder.get_objects("camera")
end note
@enduml
{
database folders_db as "folders collection"
database grants_db as "permission_grants"
database servers_db as "servers collection"
}
package "Folder Classes" {
class Folder {
+id: ObjectId
+owner_id: string
+name: string
+parents: List[string]
+objects: List[dict]
+root: boolean
--
+get_objects(type): List[string]
+add_object(obj)
+remove_object(obj)
+has_permissions(perm): boolean
}
class FolderTree {
+owner_id: string
+folders: Dict[id, Folder]
+objects: Dict[type, Dict[id, Node]]
+roots: List[Folder]
--
+find_folders(): List[Folder]
+reload()
}
class BaseNode {
+id: string
+owner_id: string
+grantee_id: string
+grants: Set[PermissionGrant]
--
+has_permissions(perm): boolean
+permissions: Tuple[string]
}
}
package "Permission System" {
class PermissionGrant {
+object_id: string
+object_type: string
+grantee_id: string
+permissions: List[string]
+shared_at: dict
}
}
package "Node Types" {
class Camera {
+id: "server:index"
+object_type: "camera"
}
}
' Relationships
Folder --|> BaseNode
FolderTree --> Folder : manages
Folder --> PermissionGrant : has grants
BaseNode --> PermissionGrant : uses
Folder --> folders_db : stored in
PermissionGrant --> grants_db : stored in
Camera --> servers_db : stored in
' Composition relationships
Folder --> Camera : "contains (objects[])"
Folder --> Folder : "contains subfolders"
note right of Folder
objects[] содержит:
[
{object_type: "camera",
object_id: "server:0"},
{object_type: "folder",
object_id: "subfolder_id"}
]
end note
note right of FolderTree
Главная точка доступа:
tree = FolderTree(user_id)
folder = tree.folders[folder_id]
cameras = folder.get_objects("camera")
end note
note left of PermissionGrant
Права доступа:
- admin (изменение)
- read (просмотр)
- Наследование по иерархии
end note
@enduml

View file

@ -1,326 +0,0 @@
@startuml _get_all_user_cameras Sequence Diagram
title Последовательность выполнения _get_all_user_cameras
participant "Caller" as caller
participant "_get_all_user_cameras" as main_func
participant "_get_servers" as get_servers
participant "MongoDB" as mongo
participant "ivideon.servers" as servers_collection
note over main_func
Входные параметры:
- user_id: int
- requested_cameras: list[str]
(формат: ["server1:0", "server1:1"])
- service_name: str (например: "crowd")
end note
caller -> main_func: _get_all_user_cameras(user_id, requested_cameras, service_name)
activate main_func
main_func -> main_func: cameras = {}
main_func -> get_servers: _get_servers(requested_cameras)
activate get_servers
note over get_servers
Извлекает server_ids из camera_ids:
["server1:0", "server1:1"]
→ ["server1", "server1"]
→ ["server1"]
end note
@startuml _get_all_user_cameras Activity Diagram
title Алгоритм работы _get_all_user_cameras
start
note right
**Входные параметры:**
• user_id: int
• requested_cameras: list[str]
(формат: ["server1:0", "server1:1"])
• service_name: str (например: "crowd")
end note
:Инициализация cameras = {};
:Извлечь server_ids из requested_cameras|
note right
["server1:0", "server1:1"]
→ ["server1"]
end note
:Построить MongoDB запрос:
query = {
'deleted': {'$ne': True},
'_id': {'$in': server_ids}
}|
:Задать проекцию полей:
projection = {
'_id': 1, 'owner_id': 1, 'name': 1,
'cameras': 1, 'cam_services': 1,
'info': 1, 'timezone': 1
}|
:Выполнить запрос к MongoDB:
servers = db.ivideon().servers.find(query, projection)|
partition "Обработка серверов" {
:Взять следующий server;
while (Есть серверы для обработки?) is (да)
:server_id = server['_id'];
:is_shared = server['owner_id'] != user_id;
:server_build_type = server.get('info', {}).get('build_type', '');
:is_server_embedded = server_build_type.endswith('camera');
:cam_services = server.get('cam_services', {});
partition "Обработка камер сервера" {
:Взять следующую камеру (camera_idx, camera_data);
while (Есть камеры на сервере?) is (да)
:service_info = cam_services.get(camera_idx, {})
.get(service_name, {});
if (service_info.get('active', False) == True?) then (да)
:camera_id = f'{server_id}:{camera_idx}';
if (is_server_embedded?) then (да)
:camera_name = server['name'];
else (нет)
:camera_name = camera_data.get('name');
endif
:cameras[camera_id] = {
'id': camera_id,
'owner_id': server['owner_id'],
'server': server_id,
'name': camera_name,
'is_shared': is_shared,
'timezone': server.get('timezone') or
server.get('timezone_default'),
'is_embedded': is_server_embedded
};
else (нет)
note right: Камера пропускается - сервис неактивен
endif
:Взять следующую камеру (camera_idx, camera_data);
endwhile (нет)
}
:Взять следующий server;
endwhile (нет)
}
:return cameras;
stop
note left
**Результат:** dict[camera_id, camera_info]
**Пример:**
{
"507f...439011:0": {
"id": "507f...439011:0",
"owner_id": "user123",
"server": "507f...439011",
"name": "Камера входа",
"is_shared": false,
"timezone": "Europe/Moscow",
"is_embedded": false
}
}
end note
@enduml
ggVG
get_servers -> get_servers: requested_server_ids = [camera_id.split(':')[0] \\nfor camera_id in requested_camera_ids]
get_servers -> get_servers: query = {\n 'deleted': {'$ne': True},\n '_id': {'$in': requested_server_ids}\n}
get_servers -> get_servers: projection = {\n '_id': 1, 'owner_id': 1, 'name': 1,\n 'cameras': 1, 'cam_services': 1,\n 'info': 1, 'timezone': 1\n}
get_servers -> mongo: db.ivideon().servers.find(query, projection)
activate mongo
mongo -> servers_collection: find documents
activate servers_collection
servers_collection -> mongo: return server documents
deactivate servers_collection
mongo -> get_servers: list[server_documents]
deactivate mongo
get_servers -> main_func: return servers_list
deactivate get_servers
loop for each server in servers_list
main_func -> main_func: server@startuml _get_all_user_cameras Activity Diagram
title Алгоритм работы _get_all_user_cameras
start
note right
**Входные параметры:**
• user_id: int
• requested_cameras: list[str]
(формат: ["server1:0", "server1:1"])
• service_name: str (например: "crowd")
end note
:Инициализация cameras = {};
:Извлечь server_ids из requested_cameras|
note right
["server1:0", "server1:1"]
→ ["server1"]
end note
:Построить MongoDB запрос:
query = {
'deleted': {'$ne': True},
'_id': {'$in': server_ids}
}|
:Задать проекцию полей:
projection = {
'_id': 1, 'owner_id': 1, 'name': 1,
'cameras': 1, 'cam_services': 1,
'info': 1, 'timezone': 1
}|
:Выполнить запрос к MongoDB:
servers = db.ivideon().servers.find(query, projection)|
partition "Обработка серверов" {
:Взять следующий server;
while (Есть серверы для обработки?) is (да)
:server_id = server['_id'];
:is_shared = server['owner_id'] != user_id;
:server_build_type = server.get('info', {}).get('build_type', '');
:is_server_embedded = server_build_type.endswith('camera');
:cam_services = server.get('cam_services', {});
partition "Обработка камер сервера" {
:Взять следующую камеру (camera_idx, camera_data);
while (Есть камеры на сервере?) is (да)
:service_info = cam_services.get(camera_idx, {})
.get(service_name, {});
if (service_info.get('active', False) == True?) then (да)
:camera_id = f'{server_id}:{camera_idx}';
if (is_server_embedded?) then (да)
:camera_name = server['name'];
else (нет)
:camera_name = camera_data.get('name');
endif
:cameras[camera_id] = {
'id': camera_id,
'owner_id': server['owner_id'],
'server': server_id,
'name': camera_name,
'is_shared': is_shared,
'timezone': server.get('timezone') or
server.get('timezone_default'),
'is_embedded': is_server_embedded
};
else (нет)
note right: Камера пропускается - сервис неактивен
endif
:Взять следующую камеру (camera_idx, camera_data);
endwhile (нет)
}
:Взять следующий server;
endwhile (нет)
}
:return cameras;
stop
note left
**Результат:** dict[camera_id, camera_info]
**Пример:**
{
"507f...439011:0": {
"id": "507f...439011:0",
"owner_id": "user123",
"server": "507f...439011",
"name": "Камера входа",
"is_shared": false,
"timezone": "Europe/Moscow",
"is_embedded": false
}
}
end note
@enduml
ggVG_id = server['_id']
main_func -> main_func: is_shared = server['owner_id'] != user_id
main_func -> main_func: server_build_type = server.get('info', {}).get('build_type', '')
main_func -> main_func: is_server_embedded = server_build_type.endswith('camera')
main_func -> main_func: cam_services = server.get('cam_services', {})
loop for camera_idx, camera_data in server.cameras.items()
main_func -> main_func: service_info = cam_services.get(camera_idx, {})\\n .get(service_name, {})
alt service_info.get('active', False) == True
main_func -> main_func: camera_id = f'{server_id}:{camera_idx}'
alt is_server_embedded == True
main_func -> main_func: camera_name = server['name']
else
main_func -> main_func: camera_name = camera_data.get('name')
end
main_func -> main_func: cameras[camera_id] = {\n 'id': camera_id,\n 'owner_id': server['owner_id'],\n 'server': server_id,\n 'name': camera_name,\n 'is_shared': is_shared,\n 'timezone': server.timezone,\n 'is_embedded': is_server_embedded\n}
note right
Создается полная информация
о камере для возврата
end note
else
note right
Камера пропускается:
сервис неактивен
end note
end
end
end
main_func -> caller: return cameras dict
deactivate main_func
note over caller
Результат: dict[camera_id, camera_info]
где camera_id = "server_id:camera_index"
Пример:
{
"507f...439011:0": {
"id": "507f...439011:0",
"owner_id": "user123",
"server": "507f...439011",
"name": "Камера входа",
"is_shared": false,
"timezone": "Europe/Moscow",
"is_embedded": false
}
}
end note
@enduml

View file

@ -1,58 +0,0 @@
@startuml
title Верхнеуровневые сущности сервиса Crowd
card api_concept{
entity "CrowdReport(APIObject)" as CRA{
+ id str
+ owner_id str
+ type str
+ name str
+ status str
+ created_at timestamp
+ updated_at timestamp
+ progress int
+ options dict
+ create() -> CrowdReport
}
}
card crowd_service{
card backend {
}
card bot_notifier {
}
card frontend {
card impl {
entity CrowdReport{
+ delete() -> None
+ create() -> CrowdReport
}
}
}
card node {
}
card protocols{
}
card report_builder{
}
card utils {
}
}
json options_dict {
"cameras": ["cam1", "cam2"],
"folders": ["folder1"],
"zones": ["zone1"]
}
CrowdReport ..|> CRA
CRA::options -- options_dict
@enduml

View file

@ -32,7 +32,10 @@ Project starts the 1st of january 2026
[API-9489 II] starts 2026-02-09
[API-9489 II] displays on same row as [API-9489]
[API-9489 II] requires 14 days
[API-9489 II] is colored in LightGray
[API-9489 II] is colored in LightGreen
[API-9576] starts 2026-02-09
[API-9576] requires 14 days
[API-9576] is colored in cyan
legend

View file

@ -5,11 +5,15 @@ printscale daily zoom 4
[Sprint 2026-3] requires 14 days
Project starts 2026-02-09
-- API-9489 --
-- API-9489 --
[Sprint 2026-3] starts 2026-02-09
[Формирование драфта спеки] starts 2026-02-09
[Формирование драфта спеки] is colored in cyan
[Формирование драфта спеки] requires 2 days
[Формирование драфта спеки] is colored in LightGreen
[Формирование драфта спеки] requires 3 days
-- ▶ API-9576 --
[Реализация Фазы 1] starts 2026-02-12
[Реализация Фазы 1] is colored in lightgreen
[Реализация Фазы 1] requires 2 days
' Подсветка выходных
2026-02-14 to 2026-02-15 are colored LightBlue

View file

@ -0,0 +1,83 @@
@startuml API Infrastructure with HAProxy
skinparam linetype ortho
title Архитектура развертывания API (с HAProxy)
actor "Клиент" as Client
cloud "Internet" as Internet
package "Kubernetes Cluster" {
package "Ingress Layer" {
component "Nginx Ingress\nController" as NginxIngress #lightblue
note right of NginxIngress
- HTTPS (443) termination
- TLS (Let's Encrypt)
- X-Forwarded-For
- Domains:
* api.ivideon.com
* api.stage-01.stg01-k8s.extcam.com
end note
}
package "Proxy Layer" {
component "HAProxy\n(haproxy-central)" as HAProxy #lightgreen
note right of HAProxy
- Port 80 (HTTP)
- ACL routing
- Health checks (/status)
- Backend: api4.service.ivideon:80
end note
}
package "Service Layer" {
component "api4 Service" as Service #lightyellow
note right of Service
- Kubernetes Service
- Port 80 → 8080
- Load balancing
- DNS: api4.service.ivideon
end note
}
package "Application Layer" {
collections "api4 Pods" as Pods
component "Pod 1" as Pod1 {
component "Tornado\nHTTP Server" as Tornado1 #orange
note bottom of Tornado1
- Port: 8080
- xheaders: true
- Workers: 4
end note
}
component "Pod 2-N" as PodN {
component "Tornado\nHTTP Server" as TornadoN #orange
}
}
}
database "MongoDB\n(main)" as MongoDB
database "MongoDB\n(user_registry)" as UserRegistry
Client --> Internet: HTTPS\nPOST /public/registration
Internet --> NginxIngress: 443 (HTTPS)
NginxIngress --> HAProxy: 80 (HTTP)\n+ X-Forwarded-For
HAProxy --> Service: api4.service.ivideon:80\n(ACL: !has_api5_components)
Service --> Pods: Round-robin LB
Pods --> Pod1: 8080
Pods --> PodN: 8080
Pod1 --> MongoDB: users.insert_one()
Pod1 --> UserRegistry: check duplicate
note bottom of HAProxy
**HAProxy ACL Routing:**
- use_backend api4 if host_api !has_api5_components
- Health check: GET /status
- server-template api-four-srv 4
- option redispatch
end note
@enduml

View file

@ -0,0 +1,187 @@
@startuml Camera Addition Workflow
skinparam sequenceMessageAlign center
skinparam responseMessageBelowArrow true
title Ivideon Camera Addition Workflow (New User)
actor "User" as user
participant "API5\n(api_concept)" as api5
participant "Device Registry\n(Router)" as registry
participant "API4\n(api)" as api4
participant "MongoDB" as db
participant "Ivideon Server\n/Camera Device" as device
participant "Broker/Hive" as broker
participant "Billing System" as billing
== ЭТАП 1: Инициализация (Token Creation) ==
user -> api5: POST /servers\n{name, device_id, device_id_type}
activate api5
api5 -> api5: Server.create()
api5 -> api5: AttachmentToken.create()
api5 -> db: Валидация blacklist\n(device_id)
db --> api5: OK
api5 -> api4: POST /users/{uid}/server_attachment/tokens
activate api4
api4 -> db: INSERT attachment_tokens\n{token, expires_at, status='pending'}
api4 --> api5: {token, expires_at}
deactivate api4
api5 -> api5: Отправка метрики\nDeviceCreationMetric(token_creation)
api5 --> user: {token, expires_at, status='pending'}
deactivate api5
== ЭТАП 2: Использование токена (Token Acquisition) ==
device -> registry: POST /public/roster\n{login=token, mac, sn}
activate registry
note right
Камера/сервер подключается
с использованием токена
end note
registry -> db: Поиск токена в attachment_tokens
db --> registry: {token_data, customer_cloud}
registry -> api5: Редирект в customer cloud\nPOST /public/roster
activate api5
api5 -> api5: AttachmentToken.acquire()\n{mac_address, serial_number}
api5 -> api4: POST /public/roster
activate api4
api4 -> db: Создание/обновление сервера\nservers.{uin}
api4 -> db: Генерация UIN и password
db --> api4: {uin, password}
api4 --> api5: {uin, password}
deactivate api4
api5 -> db: UPDATE attachment_tokens\nstatus='finished'
api5 -> api5: EVENT: attachment_token/finished
api5 --> registry: {uin, password}
deactivate api5
registry --> device: {uin, password}
deactivate registry
note right of device
Устройство сохраняет
UIN и password для
дальнейшей аутентификации
end note
== ЭТАП 3: Регистрация сервера (Server Registration) ==
device -> broker: Подключение с\n{uin, password}
activate broker
broker -> db: Проверка credentials
db --> broker: OK
broker -> db: UPDATE servers.{uin}\n{online=true, connected=true}
broker -> api4: EVENT: server/online
deactivate broker
== ЭТАП 4: Обнаружение и добавление камер (Camera Discovery) ==
device -> api4: NOTIFICATION: new_cams\n{server_id, cameras: {...}}
activate api4
api4 -> api4: roster.new_cams()
api4 -> db: object_lock(server_id)
api4 -> api4: _process_new_cams()
note right
Для каждой камеры:
1. Валидация сервера
2. Фильтрация инициализированных
3. Проставление флагов:
- billing_notified = True
- billing_initialized = True
4. Установка timezone
end note
api4 -> api4: Выбор обработчика по billver:\n- b4_new_cams()\n- b5_new_cams()\n- mts_b2b_new_cams()
== ЭТАП 5: Конфигурация услуг (Service Configuration) ==
alt Billing v4
api4 -> api4: b4_new_cams()
api4 -> api4: set_defaults_for_new_objects()
api4 -> db: CREATE cam_services[cam_id]\n{archive, notifications, ...}
else Billing v5
api4 -> api4: b5_new_cams()
api4 -> billing: camera_tariffs.try_set_default()
activate billing
billing -> db: Поиск prepaid plans\n(по serial number)
billing -> db: CREATE camera_plan
billing --> api4: OK
deactivate billing
api4 -> db: CREATE cam_services[cam_id]
else MTS B2B
api4 -> api4: mts_b2b_new_cams()
api4 -> db: Поиск неназначенной\nMTS subscription
api4 -> db: Назначение тарифа
api4 -> api4: LOG: mts-subscription-activated
end
api4 -> db: UPDATE servers.{server_id}.cameras.{cam_id}\n{name, online, billing_initialized, ...}
api4 -> api4: Отправка метрики\nDeviceCreationMetric(billing_subscription)
api4 -> api4: Аудит лог: device/add
api4 -> api4: TDC-notifier уведомление
api4 --> device: OK
deactivate api4
== ЭТАП 6: Отражение в интерфейсе (API Response) ==
user -> api5: GET /servers/{id}?projection=cameras
activate api5
api5 -> db: FIND servers.{id}.cameras
db --> api5: {cameras_dict}
api5 -> api5: Server.cameras property
loop for each camera
api5 -> api5: roster.show_camera_to_user()
note right
Фильтрация:
- Не удалена
- Есть name
- billing_initialized
end note
api5 -> api5: Camera.from_server()
end
api5 --> user: [\n {id, name, owner_id, online, type, ...},\n ...\n]
deactivate api5
== ЭТАП 7: Управление камерой (Operations) ==
user -> api5: PUT /servers/{srv}/cameras/{cam}/name\n{name: "New Name"}
activate api5
api5 -> api5: Проверка прав доступа
api5 -> db: UPDATE servers.{srv}.cameras.{cam}.name
db --> api5: OK
api5 -> broker: Отправка команды на device
activate broker
broker -> device: UPDATE camera name
broker --> api5: ACK
deactivate broker
api5 -> api5: Аудит лог: camera/update
api5 --> user: {id, name: "New Name", ...}
deactivate api5
@enduml

View file

@ -0,0 +1,151 @@
@startuml Desktop Server Workflow
skinparam sequenceMessageAlign center
title Ivideon Desktop Server (MacBook) - Camera Addition Workflow
actor "User" as user
participant "Ivideon Server\nApp (MacBook)" as app
participant "MacBook\nFaceTime Camera" as camera
participant "API5/API4" as api
database "MongoDB" as db
participant "Broker/Hive" as broker
== ЭТАП 1: Установка и авторизация ==
user -> app: Запускает приложение
activate app
app -> user: Показывает форму входа
user -> app: Вводит email + password
app -> api: POST /auth\n{email, password}
activate api
api -> db: Валидация credentials
api --> app: {access_token, user_id, session_id}
deactivate api
note right of app
Приложение получило
access_token для
API запросов
end note
== ЭТАП 2: Автоматическая регистрация сервера ==
app -> app: Определяет параметры:\n- device_id = MacBook SN\n- build_type = "mac_server"\n- name = "MacBook Pro"
app -> api: POST /public/roster\n{login: "user@example.com",\nname: "MacBook Pro",\ndevice_id: "ABC123",\nbuild_type: "mac_server"}
activate api
api -> api: DeviceInfo.guess_from_request()\n→ device_type = DESKTOP
api -> api: initiate_desktop_attachment()
note right
Создание СУРРОГАТНОГО
attachment token
(пользователь не создавал!)
end note
api -> db: FIND user by login\n"user@example.com"
db --> api: {_id: "100-user123"}
api -> db: CREATE attachment_token\n{_id: "ABC123",\nuser: "100-user123",\nname: "MacBook Pro",\ntype: "serial_number"}
api -> api: consume_attachment_token()
api -> db: CREATE/UPDATE server\n{uin: generate(),\nowner: "100-user123",\nname: "MacBook Pro",\npassword: random()}
db --> api: {uin: "100-abc...",\npassword: "Kfnl31ba..."}
api --> app: {uin: "100-abc...",\npassword: "Kfnl31ba...",\nname: "MacBook Pro"}
deactivate api
== ЭТАП 3: Сохранение credentials ==
app -> app: Сохранение в config:\n~/Library/.../config.xml\n<uin>100-abc...</uin>\n<password>Kfnl31ba...</password>
== ЭТАП 4: Подключение к облаку ==
app -> broker: CONNECT\n{uin: "100-abc...",\npassword: "Kfnl31ba..."}
activate broker
broker -> db: VERIFY credentials
db --> broker: OK
broker -> db: UPDATE servers\n{online: true}
broker --> app: Connected
deactivate broker
note right of app
Сервер подключен к облаку
Статус: ONLINE
end note
== ЭТАП 5: Обнаружение доступных камер ==
app -> app: Сканирование устройств:\n- USB камеры\n- IP-камеры (UPnP/ONVIF)\n- Вручную добавленные
app -> camera: Опрос устройства
camera --> app: {name: "FaceTime HD",\ntype: "USB",\ncodec: "h264",\nresolution: 1280x720}
app -> user: Показывает список:\n☐ FaceTime HD Camera\n☐ IP Camera (192.168.1.105)
note right of user
Пользователь видит все
доступные камеры
и выбирает нужные
end note
== ЭТАП 6: Активация камеры ==
user -> app: Выбирает ☑ FaceTime HD Camera
app -> app: Присваивает ID камере:\ncam_id = 65536
app -> broker: NOTIFICATION: new_cams\n{server_id: "100-abc...",\ncameras: {\n "65536": {\n name: "FaceTime HD",\n online: true,\n type: "web"\n }\n}}
activate broker
broker -> api: Пересылка notification
deactivate broker
activate api
api -> api: roster.new_cams()
api -> db: UPDATE servers.cameras.65536\n{name: "FaceTime HD",\nonline: true,\nbilling_initialized: true}
api -> api: b5_new_cams()\nПодключение услуг
api -> db: CREATE cam_services.65536\n{archive, notifications, ...}
api -> api: Отправка метрики\nDeviceCreationMetric
deactivate api
== ЭТАП 7: Камера доступна ==
user -> api: GET /cameras\nAuthorization: Bearer <access_token>
activate api
api -> db: FIND servers\nwhere owner_id = "100-user123"
db --> api: [{cameras: {"65536": {...}}}]
api --> user: [\n {\n id: "100-abc...:65536",\n name: "FaceTime HD Camera",\n type: "web",\n online: true\n }\n]
deactivate api
== ЭТАП 8: Добавление второй камеры (IP) ==
user -> app: Добавить IP-камеру\nrtsp://192.168.1.105
app -> app: cam_id = 131072
app -> broker: NOTIFICATION: new_cams\n{cameras: {\n "131072": {...}\n}}
note right
Процесс повторяется
для каждой новой камеры
IDs инкрементируются:
65536, 131072, 196608, ...
end note
@enduml

View file

@ -0,0 +1,123 @@
@startuml Device Types Hierarchy
package "Ivideon Devices" {
abstract class "Server" as server {
+ id: UIN
+ owner_id: user_id
+ device_type: string
+ cameras: dict
--
Точка подключения к облаку
}
class "Desktop Server" as desktop {
device_type = "desktop"
build_type = "windows*|linux*|mac*"
--
Может иметь МНОГО камер
- USB камеры
- IP камеры через сеть
}
class "Embedded Camera" as embedded {
device_type = "camera"
build_type = "*-camera"
--
Имеет ОДНУ камеру (ID = "0")
IP-камера со встроенным Ivideon
}
class "DVR/NVR" as dvr {
device_type = "dvr"
build_type = "*dvr*"
--
Может иметь МНОГО камер
Аналоговые камеры
}
class "Doorbell" as doorbell {
device_type = "doorbell"
build_type = "doorbell-*"
--
Имеет ОДНУ камеру
Видеодомофон
}
class "Cloud Bridge" as bridge {
device_type = "cloud_bridge"
--
Мост для подключения
сторонних камер
}
abstract class "Camera" as camera {
+ id: "uin:cam_id"
+ server: uin
+ type: string
+ owner_id: user_id
--
Источник видеопотока
}
class "Embedded Type" as cam_embedded {
type = "embedded"
--
Встроенная IP-камера
Сервер = камера
cam_id = "0"
}
class "IP Type" as cam_ip {
type = "ip"
--
IP-камера через сеть
Подключена к серверу
}
class "Web Type" as cam_web {
type = "web"
--
USB веб-камера
Подключена к десктопу
}
class "Analog Type" as cam_analog {
type = "analog"
--
Аналоговая камера
Подключена через DVR
}
server <|-- desktop
server <|-- embedded
server <|-- dvr
server <|-- doorbell
server <|-- bridge
camera <|-- cam_embedded
camera <|-- cam_ip
camera <|-- cam_web
camera <|-- cam_analog
desktop "1" *-- "0..*" cam_ip : has
desktop "1" *-- "0..*" cam_web : has
embedded "1" *-- "1" cam_embedded : has (ID="0")
dvr "1" *-- "1..*" cam_analog : has
doorbell "1" *-- "1" cam_embedded : has
}
note right of embedded
IP-камера = Сервер + 1 Камера
Почему регистрируется сервер?
Потому что это унифицированный
способ для ВСЕХ типов устройств
end note
note right of desktop
Обычный сервер = Сервер + N Камер
Сначала подключается сервер,
потом он сообщает о камерах
end note
@enduml

View file

@ -0,0 +1,85 @@
@startuml Folder Structure
package "MongoDB Collection: folders" {
object "Root Folder" as root {
_id = "root_abc123"
name = "__root__"
parents = []
objects = []
owner_id = "123456"
owner_name = "user@example.com"
root = true
}
object "Folder: Office" as office {
_id = "folder_office_xyz"
name = "Office"
parents = ["root_abc123"]
objects = [
{object_type: "camera", object_id: "cam_1"},
{object_type: "camera", object_id: "cam_2"}
]
owner_id = "123456"
}
object "Folder: Warehouse" as warehouse {
_id = "folder_warehouse_qwe"
name = "Warehouse"
parents = ["root_abc123"]
objects = [
{object_type: "camera", object_id: "cam_3"},
{object_type: "server", object_id: "srv_1"}
]
owner_id = "123456"
}
object "Subfolder: Entrance" as entrance {
_id = "folder_entrance_asd"
name = "Entrance"
parents = ["root_abc123", "folder_office_xyz"]
objects = [
{object_type: "camera", object_id: "cam_4"}
]
owner_id = "123456"
}
}
object "User" as user {
_id = 123456
login = "user@example.com"
root_folder = "root_abc123"
}
user --> root : root_folder
root --> office : subfolder
root --> warehouse : subfolder
office --> entrance : subfolder
note right of root
При создании пользователя
создается пустая root_folder
objects = []
parents = []
root = true
end note
note right of office
Пользователь может создавать
папки для организации камер:
- Офис
- Склад
- Парковка
и т.д.
end note
note right of entrance
Поддерживается вложенность:
parents = [root, office]
Уровень вложенности:
level = len(parents) = 2
end note
@enduml

View file

@ -0,0 +1,29 @@
@startuml
participant "Video Node" as VN
participant "crowd.node\n(scheduler)" as Scheduler
queue "Redis Queue" as Redis
participant "crowd.node\n(analyzer)" as Analyzer
participant "crowd.backend" as Backend
participant "Tevian API" as Tevian
database MongoDB
== Периодическая работа (каждые N секунд) ==
Scheduler -> MongoDB: Найти камеры с queue_detection
Scheduler -> Scheduler: Проверить, нужно ли обновить зоны
Scheduler -> Redis: push task {"cmd": "analyze_crowd", "params": {...}}
Redis <- Analyzer: pop task
Analyzer -> VN: GET /snapshots/pull (получить кадр)
VN --> Analyzer: JPEG frame
Analyzer -> Analyzer: Повернуть кадр (если нужно)
Analyzer -> Tevian: Detect heads in zones
Tevian --> Analyzer: Detections
Analyzer -> Analyzer: Определить triggered zones
Analyzer -> Analyzer: Нарисовать зоны на кадре
Analyzer -> Backend: Send measurement + image URL
Backend -> MongoDB: Save measurement
Note over Scheduler, Analyzer: Scheduler и Analyzer работают\nв одном сервисе crowd.node,\nно scheduler запускается по cron
@enduml

View file

@ -0,0 +1,27 @@
@startuml
actor User
participant "analytics_manager\n(balancer)" as Manager
participant "ANL Node\n(supervisor)" as Node
participant "Worker 1\n(camera 1)" as W1
participant "Worker 2\n(camera 2)" as W2
database MongoDB
User -> Manager: Enable fire_analytics for camera
Manager -> MongoDB: Find available ANL node
Manager -> Node: Assign camera to node
Node -> W1: spawn(mp.Process)
activate W1
W1 -> W1: Pull video stream
W1 -> W1: Process frames
W1 -> Manager: Send events via WebSocket
deactivate W1
Note over Node: Supervisor создает отдельный\nmultiprocessing воркер\nдля каждой камеры
Node -> Node: Check workers health
Node -> W2: spawn(mp.Process) for camera 2
activate W2
W2 -> W2: Process video stream
deactivate W2
@enduml

View file

@ -0,0 +1,101 @@
@startuml geo_architecture
skinparam componentStyle rectangle
skinparam backgroundColor white
title Архитектура сервиса Geo (Geobalancing Server)
package "Geo Service" {
package "geo-backend" as backend {
component [handlers.py\nHTTP обработчики] as handlers
component [balancing.py\nЛогика балансировки] as balancing
component [stream_request.py\nОбработка стримов] as stream
component [auth.py\nАвторизация] as auth
component [billing.py\nБиллинг] as billing
}
package "geo-api" as api {
component [REST API\nУправление серверами] as rest_api
component [Управление группами] as groups_api
component [Управление локациями] as locations_api
}
package "geo-resolver" as resolver {
component [IP Resolver\nГеолокация по IP] as ip_resolver
component [Geo Providers\n(IP-API, MaxMind)] as geo_providers
component [Resolver Cache] as resolver_cache
}
package "geo-web" as web {
component [Web Interface\nАдмин панель] as web_ui
}
package "Общие компоненты" {
component [ip_location.py\nIPLocation] as ip_location
component [api_client.py\nGeoApiClient] as api_client
component [errors.py] as errors
}
}
package "Хранилище данных" {
database "MongoDB" as mongo
frame "Кэши" {
component [place_cache] as place_cache
component [group_cache] as group_cache
component [dns_cache] as dns_cache
}
}
' Связи
handlers --> balancing
handlers --> stream
handlers --> auth
handlers --> billing
balancing --> ip_resolver
balancing --> group_cache
balancing --> place_cache
stream --> auth
ip_resolver --> geo_providers
ip_resolver --> resolver_cache
web_ui --> api_client
api_client ..> rest_api : HTTP
rest_api --> mongo
backend --> mongo
backend --> place_cache
backend --> group_cache
handlers ..> ip_location
balancing ..> ip_location
note right of backend
**Основные задачи:**
• Обработка запросов на балансировку
• Выбор оптимального сервера
• Редиректы на ноды
• Авторизация и биллинг
end note
note right of resolver
**Выделен отдельно из-за**
**большого потребления памяти**
Позволяет запускать geo-backend
в несколько процессов
end note
note right of mongo
**Хранит:**
• Серверы (nodes)
• Группы серверов
• Географические локации
• Привязки локаций к группам
• Резервации
• Настройки хранилищ
end note
@enduml

View file

@ -0,0 +1,135 @@
@startuml geo_balancing_sequence
title Получение пути до камеры через Geo (пример: Analytics запрашивает стрим)
actor "Сервис Аналитики" as analytics
participant "geo-backend\n(handlers.py)" as geo_handler
participant "GeoResolver\nClient" as resolver
participant "geo-resolver" as geo_resolver
database "place_cache\ngroup_cache" as cache
participant "balancing.py" as balancing
participant "auth.py" as auth
participant "auth-server" as auth_server
participant "billing.py" as billing
database "MongoDB" as mongo
participant "Выбранная\nНода" as node
== Запрос стрима ==
analytics -> geo_handler: GET /flv?server=UIN&camera=CAM_ID&access_token=TOKEN
note right
Параметры запроса:
• server: UIN видеосервера
• camera: ID камеры
• access_token: токен доступа
• format: flv/hls/ws-fmp4
end note
geo_handler -> geo_handler: Извлечь IP клиента\nиз запроса
activate geo_handler
== Определение геолокации ==
geo_handler -> resolver: resolve(client_ip)
activate resolver
resolver -> geo_resolver: HTTP GET /resolve?ip=CLIENT_IP
activate geo_resolver
geo_resolver -> geo_resolver: Проверить кэш
geo_resolver -> geo_resolver: Запросить у Geo Providers\n(IP-API, MaxMind)
geo_resolver --> resolver: IPLocation(\n country_code="RU",\n region_iso="MOW",\n city="Москва",\n coordinates=[55.75, 37.61]\n)
deactivate geo_resolver
resolver --> geo_handler: IPLocation
deactivate resolver
== Поиск оптимальной ноды ==
geo_handler -> cache: Получить группу для локации
activate cache
cache --> geo_handler: geo_group_id
deactivate cache
geo_handler -> balancing: select_server(\n partner_id,\n geo_group,\n pattern="streaming"\n)
activate balancing
balancing -> cache: node_for_pattern(\n partner_id,\n geo_group,\n "streaming"\n)
activate cache
cache -> cache: Выбор по весам\nи доступности
cache --> balancing: selected_node_id
deactivate cache
alt Нода не найдена
balancing -> balancing: select_fallback_pattern()
balancing -> cache: Повторный поиск с fallback
cache --> balancing: fallback_node_id
end
balancing --> geo_handler: BalancingResponse(\n node_id,\n node_host\n)
deactivate balancing
== Авторизация ==
geo_handler -> auth: authorize_token(\n token,\n rights="view",\n object_type="camera",\n object_id=CAM_ID\n)
activate auth
auth -> auth_server: POST /authorize
activate auth_server
auth_server -> mongo: Проверить права\nпользователя
activate mongo
mongo --> auth_server: user_rights
deactivate mongo
auth_server --> auth: {\n owner_type: "user",\n owner_id: "USER_ID"\n}
deactivate auth_server
auth --> geo_handler: token_info
deactivate auth
== Проверка биллинга ==
geo_handler -> billing: check_limits(user_id, camera_id)
activate billing
billing -> mongo: Получить тариф\nи лимиты
activate mongo
mongo --> billing: tariff_info
deactivate mongo
billing --> geo_handler: limits_ok
deactivate billing
== Получение данных о сервере ==
geo_handler -> mongo: Получить информацию\nо сервере и камере
activate mongo
mongo --> geo_handler: {\n server: {dc, online, cameras},\n camera: {online, streams}\n}
deactivate mongo
== Формирование редиректа ==
geo_handler -> geo_handler: Сформировать URL редиректа:\n• Схема (http/https/wss)\n• Хост выбранной ноды\n• Параметры (server, camera, token)\n• Подпись запроса
geo_handler --> analytics: HTTP 302 Redirect\nLocation: https://node-moscow-1.ivideon.com/flv?server=UIN&camera=CAM_ID
deactivate geo_handler
== Подключение к ноде ==
analytics -> node: GET /flv?server=UIN&camera=CAM_ID&sig=...
activate node
node --> analytics: FLV Stream (видеопоток)
deactivate node
note over analytics, node
После редиректа сервис аналитики
получает прямое подключение к ноде,
географически близкой к нему
end note
@enduml

View file

@ -1,91 +0,0 @@
@startuml PersonSDK Architecture
!theme plain
skinparam backgroundColor white
skinparam componentStyle rectangle
package "PersonSDK" {
component "IRuntime" as runtime
component "Configuration" as config
interface "IBackend" as backend
component "CPU Backend" as cpu_backend
component "GPU Backend" as gpu_backend
component "Other Backends" as other_backends
interface "IHeadDetector" as head_detector
interface "IBodyDetector" as body_detector
interface "IHeadPosition" as head_position
interface "IBodyPosition" as body_position
interface "IFrameView" as frame_view
' Advanced processing components
interface "IBodyExtractor" as body_extractor
interface "IBodyClassifier" as body_classifier
interface "ITracker" as tracker
interface "IUpperBodyExtractor" as upper_extractor
interface "IUpperBodyClassifier" as upper_classifier
interface "IBodyMatcher" as body_matcher
' Utility interfaces
interface "ISizeRule" as size_rule
interface "IRoi" as roi
interface "KwArg" as kwarg
}
' Relationships
runtime --> config : uses
runtime --> backend : creates
backend <|-- cpu_backend
backend <|-- gpu_backend
backend <|-- other_backends
backend --> head_detector : new_head_detector()
backend --> body_detector : new_body_detector()
backend --> body_extractor : new_body_extractor()
backend --> body_classifier : new_body_classifier()
backend --> tracker : new_tracker()
head_detector --> head_position : returns vector<>
body_detector --> body_position : returns vector<>
head_detector --> frame_view : processes
body_detector --> frame_view : processes
head_detector --> kwarg : uses
body_detector --> kwarg : uses
head_detector --> size_rule : uses
head_detector --> roi : uses
body_extractor --> body_position : processes
body_classifier --> body_position : processes
tracker --> head_position : tracks
tracker --> body_position : tracks
upper_extractor --> body_position : processes
upper_classifier --> body_position : processes
body_matcher --> body_position : matches
note top of runtime
Entry point for SDK
Handles configuration
and backend management
end note
note right of head_detector
Main detection interface
set_defaults(args)
run(image, args)
Thread-safe with external sync
end note
note bottom of kwarg
Flexible parameter system:
Type-safe variants
Named parameters
Default value management
end note
@enduml

View file

@ -0,0 +1,22 @@
@startuml
title Импорт на уровне модуля (НЕ работает)
participant "pytest" as P
participant "test_module" as T
participant "review_task.py" as R
participant "db.maas()" as D
participant "settings" as S
P -> T: import test_module
T -> R: import MaasReviewTask
R -> R: define class MaasReviewTask
R -> D: COLLECTION = db.maas().review_tasks
D -> S: get_mongo_uri('maas')
S --> D: Error: settings not initialized
note right: settings еще не настроены!
P -> P: setup fixtures
note right: Слишком поздно!
P -> T: run test_track_work_time()
@enduml

View file

@ -0,0 +1,24 @@
@startuml
title Импорт внутри функции (работает)
participant "pytest" as P
participant "test_module" as T
participant "review_task.py" as R
participant "db.maas()" as D
participant "settings" as S
P -> T: import test_module
note right: Импорта MaasReviewTask нет!
P -> P: setup fixtures
P -> S: configure settings
S --> P: settings ready
P -> T: run test_track_work_time()
T -> R: import MaasReviewTask
note right: Импорт происходит ЗДЕСЬ
R -> R: define class MaasReviewTask
R -> D: COLLECTION = db.maas().review_tasks
D -> S: get_mongo_uri('maas')
S --> D: Success!
@enduml