Update repo from work PC
This commit is contained in:
parent
8141aaa323
commit
b57cb47de3
11 changed files with 125 additions and 90 deletions
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"workbench.colorTheme": "Box UK Contrast (rainglow)",
|
||||
"workbench.colorTheme": "Default Dark Modern",
|
||||
"cSpell.words": [
|
||||
"descr",
|
||||
"dotenv",
|
||||
|
|
|
|||
0
drawings/dict_and_set_in_memory.puml
Normal file
0
drawings/dict_and_set_in_memory.puml
Normal file
0
ivideon/faq_с_Лешей.puml
Normal file
0
ivideon/faq_с_Лешей.puml
Normal file
|
|
@ -1,96 +1,33 @@
|
|||
@startuml
|
||||
title Создание отчёта о рабочем времени (face_work_time_json)
|
||||
@startgantt
|
||||
printscale daily zoom 3
|
||||
[Sprint 2026-2] requires 14 days
|
||||
Project starts 2026-01-26
|
||||
-- API-9487 --
|
||||
[Sprint 2026-2] starts 2026-01-26
|
||||
|
||||
actor Client
|
||||
participant "face/api" as API
|
||||
database "MongoDB\nuser_reports" as ReportsDB
|
||||
participant "report_checker\n(Worker)" as Checker
|
||||
participant "reporter.py" as Reporter
|
||||
participant "WorkTimeJsonReport" as WorkTime
|
||||
participant "data_loader" as DataLoader
|
||||
database "MongoDB\nface_events" as EventsDB
|
||||
database "MongoDB\nfaces" as FacesDB
|
||||
participant "tools.py" as Tools
|
||||
database "Redis" as Redis
|
||||
[Разбор задачи] starts 2026-01-26
|
||||
[Разбор задачи] is colored in lightgreen
|
||||
|
||||
== 1. Создание задачи на отчёт ==
|
||||
Client -> API: POST /reports\n(type=face_work_time_json)
|
||||
API -> API: FaceReport.create()\nvalidate options, build query
|
||||
API -> ReportsDB: insert(status='in_queue')
|
||||
API --> Client: {success: true, id: report_id}
|
||||
[Установка PersonSDK на ноду] requires 1 days
|
||||
[Установка PersonSDK на ноду] is colored in red
|
||||
[Установка PersonSDK на ноду] starts 2026-01-27
|
||||
|
||||
== 2. Обработка отчёта воркером ==
|
||||
loop каждые SLEEP_INTERVAL секунд
|
||||
Checker -> ReportsDB: find_one_and_update\n(status='in_queue' → 'in_progress')
|
||||
end
|
||||
[Докеризация PersonSDK] starts 2026-01-28
|
||||
[Докеризация PersonSDK] starts at [Установка PersonSDK на ноду]'s end
|
||||
[Докеризация PersonSDK] is colored in lightgreen
|
||||
[Докеризация PersonSDK] requires 2 days
|
||||
|
||||
Checker -> Checker: ProcessPoolExecutor\ncreate_report_sync(report)
|
||||
note right: Новый процесс\nс asyncio.run()
|
||||
[Нагрузочное тестирование PersonSDK] starts 2026-01-30
|
||||
[Нагрузочное тестирование PersonSDK] is colored in lightgreen
|
||||
[Нагрузочное тестирование PersonSDK] requires 4 days
|
||||
|
||||
Checker -> Reporter: create_report(report)
|
||||
Reporter -> Reporter: @with_context\nустановить контекст пользователя
|
||||
Reporter -> Reporter: Выбрать класс из REGISTRY\n(face_work_time_json → WorkTimeJsonReport)
|
||||
Reporter -> WorkTime: make_report()
|
||||
[Докеризация TevianSDK] starts 2026-02-02
|
||||
[Докеризация TevianSDK] is colored in lightgreen
|
||||
[Докеризация TevianSDK] requires 1 days
|
||||
|
||||
== 3. Загрузка данных ==
|
||||
WorkTime -> DataLoader: get_events(query)
|
||||
DataLoader -> EventsDB: find(query)\n[best_shot_time, face_id, camera_id]
|
||||
EventsDB --> DataLoader: events[]
|
||||
DataLoader --> WorkTime: events
|
||||
|
||||
WorkTime -> DataLoader: get_faces(galleries, events,\ncameras_in, cameras_out, tz)
|
||||
|
||||
loop для каждого события
|
||||
DataLoader -> DataLoader: result[face_id] = GroupedFace(face_id)
|
||||
DataLoader -> DataLoader: group.handle(best_shot_time, direction)
|
||||
end
|
||||
|
||||
DataLoader -> DataLoader: _get_faces_without_events()\n(лица из галерей без событий)
|
||||
DataLoader -> FacesDB: find(_id NOT IN face_ids,\ngallery IN galleries)
|
||||
FacesDB --> DataLoader: faces_without_events
|
||||
|
||||
DataLoader -> DataLoader: face_ids = events + faces_without_events
|
||||
|
||||
== 4. Обогащение данных фотографиями ==
|
||||
DataLoader -> DataLoader: _get_faces_data(face_ids)
|
||||
DataLoader -> FacesDB: find(_id IN face_ids)\n[person, photos, description, gallery_id]
|
||||
FacesDB --> DataLoader: faces_data[]
|
||||
|
||||
loop для каждого face
|
||||
DataLoader -> DataLoader: _get_photo(face_data)
|
||||
alt photos пустой или None
|
||||
DataLoader --> DataLoader: return None
|
||||
note right #pink: **ПРИЧИНА 1**\nphotos отсутствует
|
||||
else photos есть
|
||||
DataLoader -> DataLoader: найти default фото\nили первое из списка
|
||||
DataLoader -> Tools: get_thumb_url(photo)
|
||||
alt есть thumbnails.thumbnail_200.url
|
||||
Tools --> DataLoader: url
|
||||
else есть thumbnails.thumbnail_200.obj_ref
|
||||
Tools -> Tools: storage.generate_presigned_url()
|
||||
Tools --> DataLoader: presigned_url
|
||||
else нет thumbnail_200
|
||||
Tools --> DataLoader: None
|
||||
note right #pink: **ПРИЧИНА 2**\nнет thumbnail_200
|
||||
end
|
||||
end
|
||||
DataLoader -> DataLoader: face.photo = photo_url
|
||||
end
|
||||
|
||||
DataLoader --> WorkTime: List[GroupedFace]
|
||||
|
||||
== 5. Генерация и сохранение ==
|
||||
WorkTime -> WorkTime: _generate(query, faces, tz, schedule)
|
||||
note right: Формирует JSON:\n[name, photo, face_id, ...]
|
||||
|
||||
WorkTime -> Redis: setex(key, json, TTL=1 day)
|
||||
WorkTime -> ReportsDB: update(status='done',\n_redis_key=key)
|
||||
|
||||
== 6. Получение результата ==
|
||||
Client -> API: GET /reports/{id}/json
|
||||
API -> ReportsDB: find(_id=id)
|
||||
API -> Redis: get(_redis_key)
|
||||
Redis --> API: json_data
|
||||
API --> Client: {summary: {...}, details: {...}}
|
||||
|
||||
@enduml
|
||||
-- API-9488 --
|
||||
-- API-9489 --
|
||||
2026-01-31 to 2026-02-01 are colored LightBlue
|
||||
2026-02-07 to 2026-02-08 are colored LightBlue
|
||||
@endgantt
|
||||
|
|
|
|||
28
python/schemes/dict_in_memory_before36.puml
Normal file
28
python/schemes/dict_in_memory_before36.puml
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
@startuml dict_before_36
|
||||
package "PyDictObject" {
|
||||
card "ma_size: 8" as size
|
||||
card "ma_used: 3" as used
|
||||
note right of used
|
||||
Реально используемых записей
|
||||
end note
|
||||
}
|
||||
|
||||
package "Hash Table Array (размер=8)" {
|
||||
card "Index 0:\n[empty]" as idx0 #FFE6E6
|
||||
card "Index 1:\nhash=0x12AF\nkey→'name'\nvalue→'John'" as idx1 #E6F3FF
|
||||
card "Index 2:\n[empty]" as idx2 #FFE6E6
|
||||
card "Index 3:\nhash=0x7F23\nkey→'age'\nvalue→25" as idx3 #E6F3FF
|
||||
card "Index 4:\n[empty]" as idx4 #FFE6E6
|
||||
card "Index 5:\nhash=0x3B81\nkey→'city'\nvalue→'Moscow'" as idx5 #E6F3FF
|
||||
card "Index 6:\n[empty]" as idx6 #FFE6E6
|
||||
card "Index 7:\n[empty]" as idx7 #FFE6E6
|
||||
}
|
||||
|
||||
note bottom of idx0
|
||||
5 из 8 слотов пусты
|
||||
Это "мертвое" пространство
|
||||
end note
|
||||
|
||||
size --> idx0
|
||||
used --> idx1
|
||||
@enduml
|
||||
34
python/schemes/dict_in_memory_post36_resize_1.puml
Normal file
34
python/schemes/dict_in_memory_post36_resize_1.puml
Normal file
|
|
@ -0,0 +1,34 @@
|
|||
@startuml compact_dict_before_resize
|
||||
title Compact Dict (3.6+) ПЕРЕД resize - capacity=8
|
||||
|
||||
package "Индексный массив [8]" {
|
||||
card "idx[0]: EMPTY (-1)" as i0 #FFE6E6
|
||||
card "idx[1]: 2" as i1 #E6F3FF
|
||||
card "idx[2]: EMPTY (-1)" as i2 #FFE6E6
|
||||
card "idx[3]: 0" as i3 #E6F3FF
|
||||
card "idx[4]: EMPTY (-1)" as i4 #FFE6E6
|
||||
card "idx[5]: 3" as i5 #E6F3FF
|
||||
card "idx[6]: 1" as i6 #E6F3FF
|
||||
card "idx[7]: 4" as i7 #E6F3FF
|
||||
}
|
||||
|
||||
package "Таблица записей (entries) [5]" {
|
||||
card "entry[0]:\nhash('name')=0x1A3\nkey='name'\nvalue='Alice'" as e0
|
||||
card "entry[1]:\nhash('age')=0x2E6\nkey='age'\nvalue=25" as e1
|
||||
card "entry[2]:\nhash('city')=0x4B1\nkey='city'\nvalue='Moscow'" as e2
|
||||
card "entry[3]:\nhash('job')=0x7D5\nkey='job'\nvalue='Dev'" as e3
|
||||
card "entry[4]:\nhash('id')=0x9F7\nkey='id'\nvalue=42" as e4
|
||||
}
|
||||
|
||||
i3 --> e0
|
||||
i6 --> e1
|
||||
i1 --> e2
|
||||
i5 --> e3
|
||||
i7 --> e4
|
||||
|
||||
note bottom of e4
|
||||
dk_nentries = 5
|
||||
dk_usable = 3 (осталось мест)
|
||||
end note
|
||||
|
||||
@enduml
|
||||
0
python/to_thread/timeline.puml
Normal file
0
python/to_thread/timeline.puml
Normal file
0
python/to_thread/timeline_2.puml
Normal file
0
python/to_thread/timeline_2.puml
Normal file
0
python/to_thread/with_to_thread.puml
Normal file
0
python/to_thread/with_to_thread.puml
Normal file
36
python/to_thread/wo_to_thread.puml
Normal file
36
python/to_thread/wo_to_thread.puml
Normal file
|
|
@ -0,0 +1,36 @@
|
|||
@startuml scenario1_blocking_io
|
||||
title Синхронная I/O БЕЗ to_thread() - Event Loop заблокирован
|
||||
|
||||
participant "Event Loop\n(Main Thread)" as EL
|
||||
participant GIL
|
||||
participant "OS Kernel" as OS
|
||||
|
||||
note over EL,GIL #FFB6C1: 🔒 GIL удерживается
|
||||
|
||||
EL -> EL: async def handler()
|
||||
EL -> EL: data = requests.get(url) 🚨
|
||||
|
||||
note over EL #FFB6C1
|
||||
**Event Loop ЗАБЛОКИРОВАН!**
|
||||
Другие async задачи ждут
|
||||
Тысячи корутин простаивают
|
||||
end note
|
||||
|
||||
EL -> GIL: Удерживает GIL
|
||||
EL -> OS: socket.recv() (системный вызов)
|
||||
|
||||
note over OS
|
||||
Ядро ОС читает данные
|
||||
из сети
|
||||
(это может занять секунды!)
|
||||
end note
|
||||
|
||||
OS --> EL: Данные получены
|
||||
|
||||
note over EL #FFD700
|
||||
Event Loop снова свободен
|
||||
Но время потеряно!
|
||||
end note
|
||||
|
||||
EL -> EL: return data
|
||||
@enduml
|
||||
Loading…
Reference in a new issue