Initial migration

This commit is contained in:
pro100ton 2024-11-02 14:12:45 +03:00
commit ed782d491a
1113 changed files with 102957 additions and 0 deletions

11
.coveragerc Normal file
View file

@ -0,0 +1,11 @@
[run]
omit =
/usr/local/*
/usr/lib/*
/usr/lib64/*
*tests*
*__init__.py
*migrations*
[html]
title=ARMA Management Console coverage report

41
.env.dev Normal file
View file

@ -0,0 +1,41 @@
DJANGO_SETTINGS_MODULE=console.settings.dev
DEBUG=1
MIN_LOG_LEVEL=INFO
DJANGO_ALLOWED_HOSTS=*
POSTGRES_DB=armaconsole
POSTGRES_USER=armaconsole
POSTGRES_PASSWORD=arma_console_password
POSTGRES_HOST=db
POSTGRES_PORT=5432
REDIS_HOST=redis
REDIS_PORT=6379
DOCKER=1
DOCKER_DJANGO_NAME=djangoapp
DOCKER_DB_NAME=db
DOCKER_NGINX_NAME=pnginxserver
DOCKER_SELERY_BEAT_NAME=celerybeatcontainer
DOCKER_SELERY_NAME=celerycontainer
DOCKER_REDIS_NAME=redis
DOCKER_LOGSTASH_NAME=logstash
DOCKER_VECTOR_NAME=vector
DOCKER_KIBANA_NAME=kibana
DOCKER_PGADMIN_SERVER=pgadminserver
DOCKER_FLOWER_NAME=flower
DOCKER_ELASTIC_NAME=elasticsearch
DOCKER_SELENIUM_NAME=seleniumhub
DOCKER_SCAN_SUGGEST=false
COLUMNS=80
ELK_VERSION=7.12.0
ELASTIC_PASSWORD=changeme
ELASTIC_USER=elastic
PGADMIN_DEFAULT_EMAIL=pgadmin4@pgadmin.org
PGADMIN_DEFAULT_PASSWORD=admin
WEB_UI_PORT=9090
WEB_PDB_PORT=7755
CORRELATOR_API=http://correlator:5566
ELASTICSEARCH_API=elasticsearch:9200
NCIRCC_DOMAIN_NAME=https://test-lk.cert.gov.ru
LICENSE_CLIENT_URL=http://license-client:8050
SERVE=django
PUBLIC_URL=/static/react
SECRET_KEY=n&gyo1luo0!wj9y!drq!5n02s)9h80+o3nrxo=61e)_ge14(4l

19
.env.prod Normal file
View file

@ -0,0 +1,19 @@
DJANGO_SETTINGS_MODULE=console.settings.prod
DEBUG=0
LOG_PATH=/var/log/armaconsole
DJANGO_ALLOWED_HOSTS=*
DEBCONF_DBCONF_FPATH=/etc/armaconsole/debconf_dbconfig
REDIS_HOST=localhost
REDIS_PORT=6379
RABBIT_URL=http://localhost:5672
COLUMNS=80
ELK_VERSION=7.12.0
ELASTIC_PASSWORD=changeme
ELASTIC_USER=elastic
MIN_LOG_LEVEL=INFO
WEB_UI_PORT=9090
PUBLIC_DIR=/var/www/armaconsole/public
CORRELATOR_URL=http://localhost:5566
ELASTIC_URL=http://127.0.0.1:9200
LICENSE_CLIENT_URL=http://127.0.0.1:8050
NCIRCC_DOMAIN_NAME=https://lk.cert.gov.ru

55
.gitignore vendored Normal file
View file

@ -0,0 +1,55 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
# Unit test / coverage reports
htmlcov/
.coverage
.coverage.*
.cache
.pytest_cache/
# Translations
*.mo
*.po
# Django stuff:
*.log.xz
*.log
db.sqlite3
db.sqlite3-journal
# Environments
.env
.venv
env/*
venv/
ENV/
env.bak/
venv.bak/
# Pyre type checker
.vscode/
.idea
*/migrations/0*
dockerlogs
/static
# Cache
django_cache/*
upload/*
.DS_Store
public/test_coverage/*
*.csv
*.zip
*.json
!deb_old/skeleton/var/log/armaconsole/bad_input.log
/artifacts/
# Auto generated by react
console/templates/console/index.html

226
.gitlab-ci.yml Normal file
View file

@ -0,0 +1,226 @@
include:
- project: 'iwa/adm/ci/cicd_extra'
ref: $cicd_branch
file:
- 'ymls/header.yml'
- 'ymls/save_env_vars.yml'
- 'ymls/templates/template_version_upload.yml'
variables:
actions:
value: ""
description: "Keys: unit (unit tests), integ (integration tests), fw (live.firewall tests), build. default:''(all jobs)"
stages:
- save_env_vars
- test
- build
- build_deb_pkg
- version_upload
core_unit_test_job:
stage: test
needs:
- job: save_env_vars_job
artifacts: false
variables:
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
image:
name: nexus.iwarma.ru:8123/iwarma-docker/python_go:3.9-1.16.15-alpine.gitlab
rules:
- if: !reference [.rulesTemplate, testRuleUnit]
- if: !reference [.rulesTemplate, testRuleAlways]
script:
- /bin/sh ./cicd/unit_tests.sh
artifacts:
when: always
paths:
- ./public/test_coverage/index.html
- ./*.log
expire_in: 1 day
tags:
- docker-debian11
core_integ_test_job:
stage: test
needs:
- job: save_env_vars_job
artifacts: false
rules:
- if: !reference [.rulesTemplate, testRuleInteg]
- if: !reference [.rulesTemplate, testRuleAlways]
variables:
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
POSTGRES_PASSWORD: 'postgres'
ES_JAVA_OPTS: "-Xmx512m -Xms512m"
ELASTIC_PASSWORD: changeme
REDIS_PORT: '6379'
image:
name: nexus.iwarma.ru:8123/iwarma-docker/python_go:3.9-1.16.15-alpine.gitlab
#entrypoint: ["/bin/sh"]
services:
- name: registry.iwarma.ru/iwa/dev/console-docker/console-elasticsearch:latest
alias: elasticsearch
command: [ "bin/elasticsearch", "-Ediscovery.type=single-node" ]
- name: postgres:12-alpine3.16
alias: db
- name: redis:alpine3.16
alias: redis
script:
- /bin/sh ./cicd/integration_tests.sh
artifacts:
when: always
paths:
- ./public/test_coverage/index.html
- ./*.log
expire_in: 1 day
tags:
- docker-debian11
core_live_fw_test_job:
stage: test
needs:
- job: save_env_vars_job
artifacts: false
rules:
- if: !reference [.rulesTemplate, testRuleFW]
- if: !reference [.rulesTemplate, testRuleAlways]
variables:
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
image:
name: nexus.iwarma.ru:8123/iwarma-docker/python_go:3.9-1.16.15-alpine.gitlab
script:
- /bin/sh ./cicd/live_fw_tests.sh
artifacts:
when: always
paths:
- ./public/test_coverage/index.html
- ./*.log
expire_in: 1 day
tags:
- docker-debian11-fw
checker_test_job:
stage: test
variables:
GIT_STRATEGY: clone
needs:
- job: save_env_vars_job
artifacts: false
rules:
- if: !reference [.rulesTemplate, testRuleUnit]
- if: !reference [.rulesTemplate, testRuleAlways]
script:
- cd checker
- /bin/bash ../cicd/go_test.sh
tags:
- shell-debian11
core_build_job:
stage: build
needs:
- job: core_unit_test_job
artifacts: false
optional: true
- job: core_integ_test_job
artifacts: false
optional: true
- job: core_live_fw_test_job
artifacts: false
optional: true
rules:
- if: !reference [.rulesTemplate, buildRule]
variables:
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
script:
- python3 ./cicd_extra/build_job.py -n amccore
artifacts:
paths:
- console/static/react
- console/templates/console/index.html
- console/settings/base.py
expire_in: 1 day
tags:
- shell-debian11
checker_build_job:
stage: build
needs:
- job: checker_test_job
artifacts: false
optional: true
rules:
- if: !reference [.rulesTemplate, buildRule]
variables:
GIT_STRATEGY: clone
script:
- python3 ./cicd_extra/build_go_job.py -n amcchecker
artifacts:
paths:
- ./checker/checker
expire_in: 1 day
tags:
- shell-debian11
core_deb_pkg_job:
stage: build_deb_pkg
variables:
GIT_STRATEGY: clone
GIT_SUBMODULE_STRATEGY: recursive
needs:
- job: core_build_job
artifacts: true
rules:
- if: !reference [.rulesTemplate, buildRule]
script:
- python3 ./cicd_extra/pack_job.py -p amccore
artifacts:
paths:
- artifact.json
expire_in: 1 day
tags:
- shell-debian11
checker_deb_pkg_job:
stage: build_deb_pkg
variables:
GIT_STRATEGY: clone
needs:
- job: checker_build_job
artifacts: true
rules:
- if: !reference [.rulesTemplate, buildRule]
script:
- python3 ./cicd_extra/pack_job.py -p amcchecker
artifacts:
paths:
- artifact.json
expire_in: 1 day
tags:
- shell-debian11
core_ver_upload_job:
extends: .version_upload
rules:
- if: !reference [.rulesTemplate, buildRule]
needs:
- job: core_deb_pkg_job
artifacts: true
checker_ver_upload_job:
extends: .version_upload
rules:
- if: !reference [.rulesTemplate, buildRule]
needs:
- job: checker_deb_pkg_job
artifacts: true

8
.gitmodules vendored Normal file
View file

@ -0,0 +1,8 @@
[submodule "correlator"]
path = correlator
url = ../../../../iwa/dev/console/correlator.git
branch = develop
[submodule "license"]
path = license
url = ../../../../iwa/dev/license/client.git
branch = develop

452
CHANGELOG.md Normal file
View file

@ -0,0 +1,452 @@
# Журнал изменений
Формат основан на [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
и этот проект придерживается [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## 1.3.0 [Not released]
### 1.3.0-pre_dev5
### Добавлено
- Реализована интеграция с ГОССОПКА для отправки инцидентов[#996](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/995)
- Реализована настройка и отправка инцидентов в ГОСОПКА и список отправленных инцидентов [#996](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/996)
- Добавлена возможность настройки антивируса на Endpoint [MC-359](https://iwarma.atlassian.net/browse/MC-359)
- Добавлен фикс при загрузке правил сурикаты на ARMAIF [MC-4](https://iwarma.atlassian.net/browse/MC-4)
- Ошибка контроля целостности при синхронизации endpoint [MC-458](https://iwarma.atlassian.net/browse/MC-458)
### 1.3.0-dev5
- Добавлена настройка ротации на странице управления AMAIE [MC-10](https://iwarma.atlassian.net/browse/MC-10)
- Добавлена привилегия `can_download_rotation_files` [MC-113](https://iwarma.atlassian.net/browse/MC-113)
- Добавлена возможность смены своего пароля без прав [MC-356](https://iwarma.atlassian.net/browse/MC-356)
- Добавлена возможность просмотра списка групп корреляции без правила `can_edit_correlation_groups` [MC-110](https://iwarma.atlassian.net/browse/MC-110)
### 1.3.0-dev6
- Исправлено отображение блокировки пользователя с истекшей датой [MC-291](https://iwarma.atlassian.net/browse/MC-291)
- Добавлена фильтрация событий только антивируса при просмотре событий через меню настроки антивируса IE [MC-441](https://iwarma.atlassian.net/browse/MC-441)
# 1.3.0-dev8
- Исправлено отображение кнопку редактирования связанных источников для фаервола и эндпоинта без привилегии [#MC-122](https://iwarma.atlassian.net/browse/MC-122)
## [V3] - Не выпущено
### Добавлено
- Реализована система прав и наследование вложенных прав [#810](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/810)
- Добавлены заранее настроенные группы пользователей [#811](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/811)
- Отображение полей sid и rev на странице списка правил корреляции [#868](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/868)
- Добавить валидаторы пароля при создании пользователя [#827](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/827)
- Удаление всех источников событий при активации новой лицензии [#903](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/903)
- Добавить возможность ручной разблокировки пользователя в случае его блокировки [#826](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/826)
- Сделать API для страницы создания правила корреляции для React [#928](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/928)
- Добавлено сообщение подтверждения действия при смене IF в настройках правил коррелятора [#883](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/883)
- Переработан конфиг нормализации событий endpoint на vector [#763](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/763)
- Переработан конфиг нормализации событий fierwall на vector [#762](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/762)
- Добавлены отсутствующие права доступа к api и описание api и прав в openapi схеме [#233](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/233)
### Исправлено
- Ошибки локализации в Системах Защиты [#871](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/871)
- Параметр "ограничение попыток" может быть сохранен меньше нуля [#872](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/872)
- Локализация всплывающего сообщения в Системных настройках [#870](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/870)
- В лог celeryD.log пишется ошибка по работе автоматической карте сети [#799](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/799)
- Ошибки локализации на автоматической карте сети [#843](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/843)
- Некорректная обработка отсутствия индекса в списке событий [#699](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/699)
- Опечатки в Помощи в поиске по событиям [#839](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/839)
- Исправил формат вывода локального времени [#839](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/839)
- Привелегии блока Активы [#637](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/637)
- Не хватает ползунка в отчете об импорте [#787](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/787)
- Не парсится сообщение о нарушении контрольной суммы с Endpoint [#863](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/863)
- Создание дублирующий источник по порту [#844](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/844)
- Период буферизации событий в секундах [#856](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/856)
- Обновление конфигурации с endpoint [#864](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/864)
- Проблемы привилегий Систем защиты [#805](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/805)
- При удалении актива отсутствует Всплывающее сообщение об успешном удалении [#831](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/831)
- Пользователю доступно самостоятельно изменение "Даты окончания срока действия" [#878](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/878)
- Различное отображение событий в списке событий и в инциденте [#832](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/832)
- Исправлено создание источника при копировании endpoint если достигнуто максимальное значения порта [#880](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/880)
- Привилегии "Системные настройки" [#765](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/765)
- Не понятно прошла ли загрузка правил СОВ или нет [#874](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/874)
- Скаченная конфигурация не может быть загружена [#873](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/873)
- Удаление ssl сертификата и ключа в системных настройках [#897](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/897)
- Исправлено добавление ssl сертификатов и ключей в системных настройках[#900](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/900)
- Лишний пробел в столбце "Дата" списка событий [#858](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/858)
- Копирование Endpoint не обрабатывает ограничение по лицензии [#881](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/881)
- Привелегия "Может удалять автоматически созданные источники" не работает [#629](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/629)
- "Разростание" лога nginx.access.log по времени от количества Endpoint [#789](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/789)
- Добавление Admin в группу пользователей вызывает дублирование в назначении инцидента [#869](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/869)
- Требования к сложности пароля не работают при первоначальном создании пользователя [#710](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/710)
- Не появляется кнопка "Добавить" в источниках после удаления последнего источника [#820](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/820)
- Дублирование активов фаервола с одинаковым ip [#800](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/800)
- Удаление виджетов после изменения привилегий у пользователя [#627](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/627)
- На автоматической карте сети доступна кнопка "Редактировать актив" не взирая на отсутствие привилегий [#651](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/651)
- Доработка отображения времени [#922](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/922)
- Переименовать названия некоторых прав доступа [#825](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/825)
- Отправка SYSLOG по TCP если он выбран в экспорте инцидентов [#912](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/912)
- Вывод ошибок при импорте правил корреляции с несуществующим IF [#885](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/885)
- Исправлен зум ниже средней полосы [#853](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/853)
- Расширенная информация о назначенном пользователе в заротированных инцидентах [#887](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/887)
- Не срабатывает правило корреляции с генерацией инцидентов если не указывать поле поиска [#879](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/879)
- Дубликат кнопок действий в списке источников событий [#940](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/940)
- Изменение названия Endpoint создает новый источник, который затем нельзя удалить [#888](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/888)
- Конвертировать число инпутов лицензии в INT [#551](https://gitlab.iwarma.ru/iwa/dev/console/core/-/merge_requests/551)
- Некорректное отображение имени файла фонового изображения [#812](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/812)
- Исправлены все тесты[#969](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/969)
### Добавлено
- Реализована система прав и наследование вложенных прав [#810](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/810)
- Добавлены заранее настроенные группы пользователей [#811](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/811)
- Отображение полей sid и rev на странице списка правил корреляции [#868](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/868)
## [1.1.0] - Не выпущено
## [1.1.0-rc21]
### Исправлено
- Ошибки локализации в списке Корреляции Часть 2 [#776](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/776)
## [1.1.0-rc20]
### Исправлено
- Дополнено взаимодействие Console-Endpoint device control для версии ARMAIE 2.3.4 [#901](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/901)
- Разбор сообщений от endpoint версии 2.3.4 [#739](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/739)
## [1.1.0-rc19]
### Добавлено
- Разбор сообщений о USB от Endpoint [#739](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/739)
- Переделано взаимодействие Console-Endpoint device control [#901](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/901)
### Исправлено
- Теперь отключенное правило корреляции можно включать [#877](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/877)
- Исправлена локализация в справке импорта/экспорта правил корреляции [#776](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/776)
- Исправлена ошибка при создании endpoint в случае совпадения имени с источником события [#904](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/904)
- Разбор сообщений firewall [#895](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/895)
- Добавлена справка для ротации событий [#857](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/857)
- Исправлена ошибка когда не работает фильтр по протоколу UDP на автоматической карте сети [#821](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/821)
- Сортировка в списке инцидентов [#688](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/688)
## [1.1.0-rc18] - 11.10.2021
### Добавлено
- Реализовано требование о смене собственного пароля [#770](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/770)
### Исправлено
- Аналогичный метод редактирования источника событий для MC, как это сделано для IE [#860](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/860)
- Отображение фильтров по-умолчанию при загрузке страницы списка инцидентов [#688](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/688)
- Копирование Endpoint'а с источником и выбор порта для копии [#862](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/862)
## [1.1.0-rc17] - 08.11.2021
### Добавлено
- Отображение сообщения об ошибках удаления источника событий [#261](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/261)
- Аналогичный метод редактирования источника событий для MC, как это сделано для IE [#860](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/860)
- Отображение фильтров по-умолчанию при загрузке страницы списка инцидентов [#688](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/688)
### Исправлено
- После отключения TLS перестали работать виджеты [#775](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/775)
- Странный перевод Источников событий [#721] (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/721)
- Ошибка в сообщении при неверном вводе логина\пароля [#697](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/697)
- Жесткий фильтр Статуса на таблице активов [#728](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/728)
- Снятие и установка галочки "Создать источник" - очищает поле "Порт" [#746](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/746)
- Сортировка на странице правил корреляции при стандартной по столбцу status [#875](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/875)
## [1.1.0-rc16] - 25.10.2021
- Убран отладочный код из консоли браузера [#846](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/846)
- Исправлен поиск в Инцидентах для не суперпользователей [#797](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/797)
- Исправлена сериализация при экспорте правила корреляции по Действию Инциденты [#784](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/782)
- Исправлен баг когда при отсутствии фильтра "Отображать соседей" не отображаются соединения [#848](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/848)
- Исправлена проблема когда виджеты "Информация о системе" и "Службы" не оставались на обзорной панели [#447](https://gitlab.iwarma.ru/iwa/dev/console/core/-/merge_requests/447)
- Исправлено имя столбца "Имя сигнатуры" на странице список событий [#722](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/722)
- Исправлен перевод ошибки при активации лицензии онлайн [#719](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/719)
- Исправлено работа импорта правил корреляции при загрузке правила с rev ниже, чем загружено [#834](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/834)
- Исправлена генерация лейблов ассетов с неизвестным типом на карте сетевых взаимодействий [#579](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/579)
- Исправлена ошибка Jquery в виджете [#766](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/766)
- Изменена система ротации инцидентов [#857](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/857)
## [1.1.0-rc14] - 11.10.2021
### Добавлено
- Клиент сервера лицензий [#666](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/666)
- Добавлена гибкость в привилегиях коррелятора [#628](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/628)
- Убран выбор протокола соединения при работе с ARAMIF [#234](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/234)
- Исправлена работа кнопки фильтра по активам на карте сетевых взаимодействий [#692](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/692)
### Изменено
- Убран столбец "Тип" из списка правил корреляции [#676](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/676)
- Скрыт столбец Система в разделе "Список источников" [#678](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/678)
- Изменена страница списка событий [#691](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/691)
- Изменена страница настройки экспорта инцидентов [#674](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/674)
- Изменен конфиг logstash, для корректного разбора и фильтрации сообщений не "CEF" формата [#575](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/575)
- Изменен русский перевод привелегии ```can_export_journals``` с ```Может скачивать журналы``` на ```Может экспортировать журналы``` [#626](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/626)
- Изменены фильтры по умолчанию на странице списка инцидентов [#688](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/688)
- Удалена привилегия "может просматривать сетевые атаки" [#631](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/631)
- Удалено поле "Описание" на экране добавления подложке к карте сети [#750](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/750)
- Скрыта функцию "Скачать" статическую карту" [#759](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/759)
- Не переведенные всплывающие уведомления на странице списка Endpoint + новое уведомление при ошибке загрузке конфига [#758](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/758)
- Изменено название кнопок в таблице Endpoint [#757](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/757)
- Скрыты кнопки экспорта в CSV при отсутствии прав на просмотр хранилища [#638](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/638)
- Исправлена верстка в проверке под правило корреляции [#726](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/726)
- Исправлено взаимодействие с кнопкой "редактирование групп" на странице правила корреляции [#696](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/696)
- Исправлены ошибки локализациии при создании\редактировании правила [#724](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/724)
- Изменен функционал работы часов консоли [#764](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/764)
- При загрузке правил теперь корректно выполняется алгоритм проверки существования SID и REV [#780](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/780)
### Исправлено
- При редактировании IP МЭ происходило дублирование Asset [#349](https://gitlab.iwarma.ru/iwa/dev/console/core/-/merge_requests/349)
- Изменение статуса инцидента изменяет его ID [#606](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/606)
- Скрыты функции по работе с уязвимостями [#602](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/602)
- Отсутствует кнопка управлять группами (пользователей в списке пользователей [#618](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/618)
- В форму редактирования endpoint добавлены поля, как в форме добавления [#679](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/679)
- Исправлены тесты [#669](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/669)
- Исправлены Ошибки при работе со справочником "Производитель" [#636](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/636)
- Изменено поведение статус актива "----" распознается как разрешенный актив" [#647](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/647)
- Исправлено "Привилегия "может просматривать активы" вызывает ошибку при входе в список активов [#693](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/693)
- Не нужная привилегия "может просматривать сетевые атаки." [#631](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/631)
- При создании Endpoint-а можно создать источник, невзирая на отсутствие привилегий [#609](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/609)
- Исправлена ошибка при редактировании источника события [#601](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/601)
- Привилегия "Может скачивать журналы" отвечает за другое. [#626](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/626)
- При экспорте активов,формируется некорректный фаил [#600](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/600)
- "Дата окончания срока действия" пользователя не мешает ему войти в систему" [#617](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/617)
- Проблемы привелегий управления списком пользователей [#624](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/624)
- На статической карте сети для нодов раскрытых групп добавлен фон и убран не нужный значок [#661](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/661)
- Жеcткий фильтр Статуса на таблице активов [#728](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/728)
- Доступ на страницу лицензии без входа [#720](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/720)
- Конфиг Logstash для IF [#745](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/745)
- Ошибка локализации в Настройках ротации [#747](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/747)
- Странный перевод Источников событий [#721](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/721)
- Инциденты по активам Endpoint не связываются с активом Endpoint [#660](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/660)
- Не обработанная ошибка при активации лицензии онлайн [#719](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/719)
- Проблемы при установке новой лицензии [#716](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/716)
- Не локализована ошибка валидации при неверном вводе логина\пароля [#697](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/697)
- Ошибки локализации и орфографии [#594](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/594)
- Переводы (опечатки и другие орг.моменты) [#680](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/680/)
- Ошибка копирования конфигурации endpoint [#754](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/754)
- Ошибка добавления источника логов [#261](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/261)
- Локализация ошибок при блокировке пользователя [#773](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/773)
- Локализация ошибок формы смены пароля [#817](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/817)
- Ошибка локализации ошибки на странице карты сети [#813](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/813)
- Ошибки локализации раздела Лицензия [#718](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/718)
- Ошибки локализации в списке Корреляции Часть 2 [#776] (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/776)
## [1.1.0-rc10] - 2021-08-26
### Изменено
- Убрана кнопка Экспорт из списка активов [#671](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/671)
- Убрана колонка "Тип" из списка систем защиты и из карточки [#672](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/672)
### Исправлено
- Кнопка сохранения правила корреляции глючит [#670](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/670)
## [1.1.0-rc9] - 2021-08-23
### Исправлено
- Добавление интерфейса управления флешками (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/634)
- Потеря смысла в привилегиях блока "Обзорная панель:" (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/615)
- Создание правила корреляции. Пропадает значение в "Поле" Условия (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/604)
- Коррелятор неверно создает новые индексы (https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/14)
- Коррелятор неверно создает новые индексы (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/598)
- Редактирование настроек эндпоинта порождает новый актив в таблице активов (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/593)
- Работа фильтров на странице Assets (https://gitlab.iwarma.ru/iwa/dev/console/console-ui-react/-/issues/7)
- В MC остались ненужные элементы на UI в части интеграции с Endpoint (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/589)
- Тестирование Тестовой Задачи (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/583)
- Тестовая задача (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/582)
- Кнопка сохранения правила корреляции глючит (https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/19)
- Сделать кнопку "Сохранить на странице создания правила корреляции не активной до добавления хотя бы одного действия
(https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/668)
- Добавить предикат QueryString (https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/17)
- Излишний код в странице авторизации (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/658)
- Test task (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/657)
- Проблема в получении событий с межсетевого экрана (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/655)
- Обновить фронт для OPC DA (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/644)
- Странное поведение правила корреляции (https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/15)
- Не работают уведомления (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/640)
- Баг при обновлении виджета "Коррелятор" [#329](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/329)
- Удалена модель `NormalizedEvent` в приложении `logstash` [#498](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/498)
## [1.1.0-rc8] - 2021-08-12
### Исправлено
- В Фильтрах инцидентов в "Назначен на" отображаются даже удаленные пользователи (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/639)
- Баг в консоли - не правильные поля (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/610)
- Bug in correlator rules export (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/576)
- Поиск в корреляции (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/573)
- Просмотр хранилища (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/572)
- Открытие журнала инцидентов (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/571)
- Ротация журналов событий (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/570)
- Проблема с созданием правил корреляции (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/569)
- Баги карты сетевых взаимодействий (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/558)
- При попытке зайти в инцидент с большим количеством событий, вылетает ошибка 500 (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/554)
- Доработка поля "Поиск" (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/552)
- Требуется объединение одинаковых событий коррелятором. (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/544)
- Добавить фильтрацию инцидентов (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/543)
- Разработать клиент для сервера лицензирования (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/516)
- Добавить описание в диалог помощи для поиска на странице событий (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/492)
## 1.0.2 (20210128)
### Добавлено
- Случайный пароль для базы данных по умолчанию
- Экспорт событий
- Коррелятор
- Возможность задать действия при возникновении инцидентов
- Мультипоточность обработки событий
- Экспорт событий и инцидентов в CSV
- Возможность настройки базы данных при установке при помощи debconf
- Автодополнение фильтров
- Отображение важности и некоторых других параметров значками
- Отображение запуска сервисов через Веб интерфейс при старте
- Правила коррелятора по умолчанию
- Работа с Endpoint
- Виджет лицензии
### Изменено
- Отображение события и инцидента
- Отображение некоторых виджетов графиков изменено и улучшено
- Формат отображение дат
- Верхнее меню навигации переработано для более удобного использования
- Отключена возможность входа в admin панель
- Форма смены пароля
- Страница хранилища переработана
- Увеличен максимальный размер загружаемых файлов
- Формат CEF
### Исправлено
- Работа с базами данных при установке, удалении
- Статусы ошибок при работе с хранилищем
- Валидация некоторых полей
- Настройки логсташ
- Переводы
- Отсутствие иконки на некоторых страницах Веб интерфейса
- Расположение элементов на странице виджетов
- Размер некоторых виджетов
- Удаление вершины карты сети
- Удвоение кнопки таблицы
- Ширина колонки с иконками
- Очистка имени новой группы при создании
- Порядок старта служб
- Проксирование через nginx
- Удалены излишние привилегии
- Убраны кнопки, вызов которых невозможен из-за отсутствия привилегий
- Ошибка 500 после старта
## Изменение версии на 1.0
## 1.4 (20200730)
### Добавлено
- Карта сети
- Пользовательские карты
- Интерфейс ввода логов
- Динамический ввод логов
- Скрытие и деактивация неактивных иконок
- Обновлены переводы
- Выбор условий ротации данных
### Изменено
- Страница инцидентов
### Исправлено
- Права доступа через API
- Отображение удаленных пользователей, назначенных на какие-либо события
- Некоторые активы не отображались
- Загрузка файлов на сенсор
- Отсутствие отображения типа на странице активов
- Ошибка при входе администратором на страницу профиля
- Некоторые ошибки разбора логов
## 1.3 (20200526)
### Добавлено
- Скрипт для форматирования вывода дампа
- Новые виджеты
- Появление активов, полученных в событиях
### Изменено
- Единый стиль для таблиц
- После истечения даты срока действия пользователь - пользователь становится неактивен (невозможен вход от лица такого пользователя)
- Пользователь администратор теперь создается только при первом запуске, таким образом теперь можно удалить начального пользователя или сменить его данные, без восстановления его данных на изначальные после рестарта
### Исправлено
- Баги с активностью записей и кнопок при невыделенных группах на экране управления группами
- Баги сохранения настроек ротации
- Баг загрузки конфигурации на ARMA Industrial firewall
- Баг при изменении имени группы
- Неверный парсинг некоторых событий, получаемых через CEF
- Дата создания инцидента была некорректна
## 1.2 (20200422)
### Добавлено
- Подсказки иконок таблиц
- Уведомления
- Загрузка правил IDS на сенсоры
- Карта сети
- Раздел для хранения и скачивания данных (Хранилище)
- Виджеты инцидентов по важности и категориям
- Возможность задания временной зоны для каждого пользователя
- Ротация инцидентов и событий по расписанию и размеру
### Изменено
- Хэдер изменен на однострочный
- Улучшен виджет информации о системе и сервисах
- Улучшен механизм взаимодействия с виджетами
### Исправлено
- Автофокус на странице логина
- Некоторые переводы
- Права доступа к некоторым страницам
- Баги виджета диапазона дат
- Изменение страницы добавления пользователи при некорректно заполненных полях
- Предупреждения браузера на главной странице
## 1.1 (20200317)
### Добавлено
- Подсказки иконок таблиц
- Единый стиль диалогов
- Единый стиль и хороший вид полей выбора файла
- Экспорт/импорт данных БД
- Экспорт/импорт правил корреляции
- IPython как консоль работы с django по умолчанию
- Страница активов
- Страница сенсоров
- Перевод виджетов
- Добавлено больше информации на странице списка событий
- Добавлена возможность добавления правил корреляции для событий МЭ входа и доступа в WEB
- Добавлена валидация ввода в поле выбора диапазона дат
- Группировка событий
- Иконка для перехода в dashboard
- Отображение событий внутри инцидента
- Страница изменения прав пользователей
### Исправлено
- Ошибка добавления пользователя
- Работа debugToolbar
- Мелкие исправления и переводы
- Падение logstash
- Фильтры на странице инцидентов не всегда работали
- Баг из-за чего не создавался пользователь с верно введенными данными
- Статус инцидента некорректно менялся
## 1.0 (20200217)
### Добавлено
- Добавлен базовый проект
- Добавлено приложение с шаблоном adminlte
- Добавлена докер конфигурация для разработки
- Добавлен набор конфигов logstash для arpwatch, firewall, suricata, web-auth и web-access
- Добавлено API для приема нормализованных событий от logstash
- Добавлены страницы работы с пользователями (список, добавление, удаление, редактирование)
- Добавлена страница списка событий, с возможностью поиска и фильтрации
- Добавлена страница списка инцидентов, с возможностью поиска и фильтрации
- Добавлена страница просмотра и редактирования инцидентов
- Добавлена страница "Панель оператора" с возможностью добавления виджетов и сохранения раскладки для каждого пользователя
- Добавлена страница логина
- Добавлены страницы для ошибок 403, 404 и 500
- Добавлены виджеты "Системная информация" и "Сервисы"
- Добавлен набор прав, функции для их получения и проверки
- Добавлена модель события корреляции и базовый коррелятор
- Добавлена возможность создания правил корреляции по файлу правил Suricata
- Базовые настройки логорирования
- Базовые настройки кеширования
- Добавлен класс формы для отображения виджетов в стиле adminlte
- Добавлено поле ввода интервалов даты и времени
- Добавлено поле ввода интервалов целых чисел
- Добавлено поле ввода даты
- Добавлен плагин Datatables для единого отображения и поведения таблиц
- Добавлен конфиг для Gitlab-CI
- В режиме отладки, добавлен Debub-toolbar
- Добавлен набор контекст-процссоров для генерации меню

58
README.md Normal file
View file

@ -0,0 +1,58 @@
# Подготовка к запуску тестов unit, integration, live_firewall
Перед запуском тестов, нужно подготовить базу данных, для этого запускаем:
```bash
DJANGO_SETTINGS_MODULE=console.settings.test python manage.py makemigrations
DJANGO_SETTINGS_MODULE=console.settings.test python manage.py migrate
```
# Запуск тестов unit
Для запуска тестов, нужно активировать виртуальное окружение и запустить:
```bash
pytest --disable-warnings -m unit
```
# Запуск тестов интеграционных
Для запуска интеграционных тестов, вызываем:
```bash
pytest --disable-warnings -m integration
```
При этом, интеграционные можно запускать либо в докере, либо на развернутой виртуалке, тк
они завязаны на внешние сервисы.
# Запуск тестов на живом AIF
Для запуска тестов на живом AIF, вызываем:
```bash
pytest --disable-warnings -m live_firewall
```
Перед каждым запуском теста проводится проверка доступности AIF, в случае если
AIF не доступен тест завершается с состоянием False
После выполенения последнего теста "test_live_set_firewall_suricata" AIF уходит в
перезагрузку длительностью ~40-60 сек.
**Важно! Pipline запускает тесты последовательно, согласн очереди запросов от пользователей GitLab,
управляет этим отдельный раннер**
# Оценка покрытия тестов
Чтобы оценить общее покрытие кода тестами, нужно в docker запустить следующие команды:
```bash
coverage run -m pytest --disable-warnings
coverage report
```
Для HTML результата:
```bash
coverage html
```
# Отключение тестов при push'e и merge request'e
Тесты в пайплайне запускаются автоматически. Чтобы тесты при push'e и merge request'e не запускались,
необходимо указать в сообщении коммита ключ: #no_test

0
assets/__init__.py Normal file
View file

16
assets/admin.py Normal file
View file

@ -0,0 +1,16 @@
from django.contrib import admin
from assets.models.assets import OperatingSystem, AssetListGroup, Asset, AssetManufacturer
from core.mixins import JsonWidgetMixin
class AssetAdmin(JsonWidgetMixin, admin.ModelAdmin):
list_display = ('ip', 'os', 'name', 'status')
list_display_links = ('name',)
list_filter = ('os',)
admin.site.register(OperatingSystem)
admin.site.register(AssetListGroup)
admin.site.register(Asset, AssetAdmin)
admin.site.register(AssetManufacturer)

5
assets/apps.py Normal file
View file

@ -0,0 +1,5 @@
from django.apps import AppConfig
class AssetsConfig(AppConfig):
name = 'assets'

3
assets/constants.py Normal file
View file

@ -0,0 +1,3 @@
from incident.models import Incident
RESOLVED_STATUS = Incident.Status.RESOLVED

14
assets/filters.py Normal file
View file

@ -0,0 +1,14 @@
from django_filters import rest_framework as filters
from assets.models.assets import Asset
class AssetFilter(filters.FilterSet):
incidents = filters.UUIDFilter(method='filter_by_incidents')
def filter_by_incidents(self, queryset, name, value):
return queryset.filter(incidents=value)
class Meta:
model = Asset
fields = ['incidents']

View file

View file

92
assets/models/assets.py Normal file
View file

@ -0,0 +1,92 @@
from django.db import models
from django.utils.translation import gettext_lazy
from console.models import UniqueNameDescriptionModel, UpdatedNameDescriptionModel, SensorConnectedMixin
from core.validators import mac_address_validator
from incident.models import Incident
class OperatingSystem(UniqueNameDescriptionModel):
""" One item in operation systems list """
class Meta:
verbose_name = gettext_lazy('Operating system')
class AssetListGroup(UniqueNameDescriptionModel):
""" Asset list display group """
collapsed = models.BooleanField(default=False)
class AssetManufacturer(UniqueNameDescriptionModel):
""" Asset manufacturer """
class Meta:
verbose_name = gettext_lazy('Asset manufacturer')
class Asset(SensorConnectedMixin, UpdatedNameDescriptionModel):
""" Model for one asset """
class AllowStatus(models.IntegerChoices):
NEW = 0, gettext_lazy('New asset')
ALLOWED = 1, gettext_lazy('Allowed asset')
class AssetType(models.TextChoices):
USER = 'user', gettext_lazy('User')
ARMA = 'arma_industrial_firewall', gettext_lazy('ARMA industrial firewall')
PLC = 'plc', gettext_lazy('PLC')
PC = 'pc', gettext_lazy('PC')
SERVER = 'server', gettext_lazy('Server')
NETWORK_DEVICE = 'network_device', gettext_lazy('Network device')
manufacturer = models.ForeignKey(AssetManufacturer,
verbose_name=gettext_lazy('Manufacturer'),
on_delete=models.SET_NULL,
blank=True,
null=True)
model = models.CharField(blank=True,
null=True,
max_length=150,
verbose_name=gettext_lazy('Model'),
help_text=gettext_lazy('Asset model'))
ip = models.GenericIPAddressField(verbose_name=gettext_lazy('IP'), help_text=gettext_lazy("Asset's IP address"))
mac = models.CharField(blank=True,
null=True,
max_length=17,
verbose_name=gettext_lazy('MAC'),
help_text=gettext_lazy("Asset's MAC address"),
validators=[mac_address_validator])
os = models.ForeignKey(OperatingSystem,
on_delete=models.SET_NULL,
verbose_name=gettext_lazy('OS'),
help_text=gettext_lazy('Operation systems, found on asset'),
blank=True,
null=True)
ports = models.JSONField(verbose_name=gettext_lazy('Ports'),
help_text=gettext_lazy('List of open ports'),
null=True,
blank=True, default="[]")
incidents = models.ManyToManyField(Incident, verbose_name=gettext_lazy('Incidents'), blank=True)
group = models.ForeignKey(AssetListGroup,
on_delete=models.SET_NULL,
blank=True,
null=True,
verbose_name=gettext_lazy('Group'))
asset_type = models.CharField(choices=AssetType.choices,
max_length=128,
verbose_name=gettext_lazy('Asset type'),
blank=True,
null=True)
status = models.IntegerField(choices=AllowStatus.choices,
verbose_name=gettext_lazy('Asset status'),
help_text=gettext_lazy('Asset allow status'),
default=AllowStatus.NEW,
blank=True)
@property
def manufacturer_name(self):
return self.manufacturer.name
def __str__(self):
return self.name

View file

View file

@ -0,0 +1,139 @@
import json
from django.db.models import Q
from rest_framework import serializers
from assets.constants import RESOLVED_STATUS
from assets.models.assets import Asset, AssetListGroup, OperatingSystem, AssetManufacturer
from console.models import Vulnerability
from core.serializers import DateTimeLocalizedField, ModelLocalizedSerializer
from incident.models import Incident
from incident.serializers.incident import IncidentSerializer
class OsSerializer(serializers.ModelSerializer):
class Meta:
model = OperatingSystem
fields = '__all__'
class OsNameSerializer(serializers.ModelSerializer):
class Meta:
model = OperatingSystem
fields = ['name']
class AssetManufacturerSerializer(serializers.ModelSerializer):
class Meta:
model = AssetManufacturer
fields = ['id', 'name', 'description']
class AssetGroupSerializer(serializers.ModelSerializer):
class Meta:
model = AssetListGroup
fields = ['name', 'description', 'id', 'collapsed']
class AssetListSerializer(serializers.ModelSerializer):
updated = DateTimeLocalizedField()
count_incidents = serializers.IntegerField()
class Meta:
model = Asset
fields = ['id', 'name', 'asset_type', 'status', 'ip', 'updated', 'count_incidents']
class AssetDetailSerializer(serializers.ModelSerializer):
os = OsSerializer()
group = AssetGroupSerializer()
incidents = IncidentSerializer(many=True)
ports = serializers.SerializerMethodField()
updated = DateTimeLocalizedField()
class Meta:
model = Asset
fields = '__all__'
def get_ports(self, asset):
try:
ports = json.loads(asset.ports)
return f'{ports}' or f'{[]}'
except TypeError:
return f'{[]}'
class AssetCreateUpdateSerializer(serializers.ModelSerializer):
class Meta:
model = Asset
fields = '__all__'
def to_representation(self, instance):
return AssetDetailSerializer(instance=instance).data
class AssetAuthorizeSerializer(serializers.Serializer):
selected_assets = serializers.ListField(child=serializers.IntegerField())
class AssetCsvExportSerializer(ModelLocalizedSerializer):
""" Serializer for CSV export of Assets data
The idea behind parsing almost every field is that if we dont do this, than fields that are used as links to the
other models will be shown in table as the dictionary
"""
incidents = serializers.SerializerMethodField('get_incidents')
os = serializers.ReadOnlyField(source='os.name', allow_null=True)
group = serializers.ReadOnlyField(source='group.name', allow_null=True)
asset_type = serializers.ReadOnlyField(source='get_asset_type_display', allow_null=True)
status = serializers.ReadOnlyField(source='get_status_display', allow_null=True)
manufacturer = serializers.ReadOnlyField(source='manufacturer_name', allow_null=True)
def get_incidents(self, obj):
amount_of_active_incs = Incident.objects.filter(~Q(status=RESOLVED_STATUS), asset=obj).count()
return amount_of_active_incs
class Meta:
model = Asset
fields = ['name', 'manufacturer', 'updated', 'model', 'ip', 'os', 'ports', 'incidents',
'group', 'asset_type', 'status']
class IncidentTitleSerializer(serializers.ModelSerializer):
class Meta:
model = Incident
fields = ['pk', 'title', 'status']
class AssetInfoSerializer(serializers.ModelSerializer):
incidents = IncidentTitleSerializer(many=True)
os = OsNameSerializer()
updated = DateTimeLocalizedField()
class Meta:
model = Asset
fields = ['id', 'name', 'description', 'ip', 'os', 'ports', 'updated', 'incidents', 'status']
class AssetIncidentInfoSerializer(serializers.ModelSerializer):
class Meta:
model = Incident
fields = ['pk', 'title', 'description', 'status']
class AssetVulnerabilitiesInfoSerializer(serializers.ModelSerializer):
class Meta:
model = Vulnerability
fields = ['name', 'description']
class AssetActiveProblemsSerializer(serializers.ModelSerializer):
incidents = AssetIncidentInfoSerializer(many=True)
def get_incidents(self, obj):
inc_queryset = Incident.objects.filter(~Q(status=RESOLVED_STATUS), asset=obj)
serializer = AssetIncidentInfoSerializer(instance=inc_queryset, many=True, context=self.context)
return serializer.data
class Meta:
model = Asset
fields = ['incidents']

0
assets/tests/__init__.py Normal file
View file

View file

@ -0,0 +1,47 @@
import http
import logging
from http import HTTPStatus
import pytest
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework.test import APIClient
from assets.models.assets import OperatingSystem, Asset
from perms.models import Perm
_log = logging.getLogger()
TIMEOUT = 10 # time before timeout exception appears
User = get_user_model()
@pytest.mark.django_db
class TestAssetPagesAccess(object):
@pytest.fixture(autouse=True)
def setup_tests(self, django_user_model, add_user_with_permissions):
""" Fixture for preparing database for tests
:param client: object to work with test session
:param django_user_model: object to work with User model in pytest
:param add_user_with_permissions: fixture for adding new user
:return: prepared for tests database with:
user 'user1' with perms.can_view_user permission
user 'test_user' for testing view_user page features
"""
username = 'foo'
password = 'bar'
add_user_with_permissions(username=username, password=password,
is_superuser=True)
add_user_with_permissions(username='test_no_perms', password='1')
add_user_with_permissions(username='test_right_perms', password='1',
permissions=[Perm.can_view_assets_list,
Perm.can_view_asset,
Perm.can_edit_asset])
add_user_with_permissions(username='test_perms', password='1',
permissions=[])
os = OperatingSystem.objects.create(name='MACOS')
for i in range(5):
Asset.objects.create(name=f'test{i}', ip='1.1.1.1', os=os)

View file

@ -0,0 +1,186 @@
import json
import logging
import pytest
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework import status
from assets.models.assets import Asset, OperatingSystem
from incident.models import Incident
from perms.models import Perm
_log = logging.getLogger()
TIMEOUT = 10 # time before timeout exception appears
User = get_user_model()
@pytest.mark.django_db
class TestAssetPagesAccess(object):
@pytest.fixture(autouse=True)
def setup_tests(self, django_user_model, add_user_with_permissions):
""" Fixture for preparing database for tests
:param client: object to work with test session
:param django_user_model: object to work with User model in pytest
:param add_user_with_permissions: fixture for adding new user
:return: prepared for tests database with:
user 'user1' with perms.can_view_user permission
user 'test_user' for testing view_user page features
"""
username = 'foo'
password = 'bar'
add_user_with_permissions(username=username, password=password,
is_superuser=True)
add_user_with_permissions(username='test_no_perms', password='1')
add_user_with_permissions(username='test_right_perms', password='1',
permissions=[Perm.can_view_assets_list,
Perm.can_view_asset,
Perm.can_edit_asset,
Perm.can_delete_asset])
add_user_with_permissions(username='test_perms', password='1',
permissions=[])
os = OperatingSystem.objects.create(name='MACOS')
for i in range(5):
Asset.objects.create(name=f'test{i}', ip='1.1.1.1', os=os)
# TODO: Need to fix this
@pytest.mark.skip
@pytest.mark.integration
def test_export_assets_in_csv_api(self, add_user_with_permissions, api_client):
username = 'user'
password = 'pro100ton'
add_user_with_permissions(username=username,
password=password,
permissions=[Perm.can_view_network,
Perm.can_work_with_incidents,
Perm.can_export_incidents_list,
Perm.can_export_assets]
)
user = User.objects.get(username=username)
api_client.force_authenticate(user)
response = api_client.get(reverse('asset-csv-export'))
assert response.status_code == status.HTTP_200_OK
@pytest.mark.unit
def test_user_w_perm_can_edit_asset(self, api_client):
""" Test for checking if user with right permissions can access edit asset page """
user = User.objects.get(username='test_right_perms')
api_client.force_authenticate(user)
asset_pk = Asset.objects.get(name='test0').pk
url = reverse('asset-detail', args=[asset_pk])
response = api_client.patch(url)
assert status.HTTP_200_OK == response.status_code
@pytest.mark.unit
def test_user_wo_perm_cant_edit_asset(self, api_client):
""" Test for checking if user without right permissions cannot edit asset"""
user = User.objects.get(username='test_perms')
api_client.force_authenticate(user)
asset_pk = Asset.objects.get(name='test0').pk
url = reverse('asset-detail', args=[asset_pk])
response = api_client.patch(url)
assert status.HTTP_403_FORBIDDEN == response.status_code
@pytest.mark.unit
def test_asset_updated_correctly(self, api_client):
""" Test for checking if asset is saved correctly after update"""
user = User.objects.get(username='test_right_perms')
api_client.force_authenticate(user)
asset_pk = Asset.objects.get(name='test0').pk
url = reverse('asset-detail', args=[asset_pk])
response = api_client.patch(url, {'model': 'test_model','ports':'[5000,6000]'})
assert response.json()['model'] == 'test_model'
assert status.HTTP_200_OK == response.status_code
assert Asset.objects.get(name='test0').model == 'test_model'
assert Asset.objects.get(name='test0').ports == json.loads("[5000,6000]")
@pytest.mark.unit
def test_asset_is_deleted_correctly(self, api_client):
""" Test for checking if asset is deleted correctly"""
user = User.objects.get(username='test_right_perms')
api_client.force_authenticate(user)
asset_pk = Asset.objects.get(name='test0').pk
url = reverse('asset-detail', args=[asset_pk])
response = api_client.delete(url)
assert status.HTTP_200_OK == response.status_code
assert Asset.objects.filter(name='test0').exists() is False
assert Asset.objects.count() == 4
@pytest.mark.unit
def test_check_serializer_validation(self, api_client):
""" Test for checking if serializer return error with invalid data"""
user = User.objects.get(username='test_right_perms')
api_client.force_authenticate(user)
asset = Asset.objects.get(name='test0')
asset_pk = asset.pk
asset_os = asset.os
url = reverse('asset-detail', args=[asset_pk])
response = api_client.patch(url, {'os': 'bad'})
assert status.HTTP_400_BAD_REQUEST == response.status_code
assert 'os' in response.data
assert Asset.objects.get(name='test0').os == asset_os
@pytest.mark.merge
def test_asset_correctly_count_number_of_incidents(self, api_client):
user = User.objects.get(username='test_right_perms')
api_client.force_authenticate(user)
asset = Asset.objects.get(name='test0')
incident = Incident.objects.create(title='test_inc', importance=10, event_count=10, events='')
asset.incidents.add(incident)
url = reverse('asset-list')
response = api_client.get(url)
assert response.status_code == status.HTTP_200_OK
assert response.data['results'][0]['count_incidents'] == 1
assert response.data['results'][1]['count_incidents'] == 0
@pytest.mark.merge
def test_filter_by_incidents(self, api_client):
""" Test for checking filter returns asset"""
user = User.objects.get(username='test_right_perms')
api_client.force_authenticate(user)
incident = Incident.objects.create(title='test_inc', importance=10, event_count=10, events='')
incident_id = incident.incident_id
asset = Asset.objects.get(name='test0')
asset.incidents.add(incident)
url = reverse('asset-list')
response = api_client.get(url, **{'QUERY_STRING': f'incidents={incident_id}'})
assert status.HTTP_200_OK == response.status_code
_log.info(response.data)
_log.info(incident_id)
assert response.data['count'] == 1
assert response.data['results'][0]['id'] == asset.pk
bad_response = api_client.get(url, **{'QUERY_STRING': f'incidents=bad'})
assert status.HTTP_400_BAD_REQUEST == bad_response.status_code
@pytest.mark.unit
def test_create_asset_with_bad_status(self, api_client):
"""We set the asset status asset to 0 by default. Then we will check that the status is always 0"""
user = User.objects.get(username='foo')
api_client.force_authenticate(user)
url = reverse('logstash-asset-list')
response = api_client.post(url,
data={
"asset_type": "",
"description": "Description",
"group": "",
"ip": "127.0.0.1",
"manufacturer": "",
"model": "",
"name": "192.168.1.101",
"os": "",
"ports": "[5000]",
"sensor": "armaif_1",
"status": "25622",
"type": "asset",
}, format="json")
assert status.HTTP_201_CREATED == response.status_code
asset = Asset.objects.get(name='192.168.1.101')
assert asset.status == 0

19
assets/urls.py Normal file
View file

@ -0,0 +1,19 @@
from django.urls import path, include
from rest_framework import routers
from assets.views.assets import AssetViewSet, AssetInfoViewSet, OsViewSet, AssetGroupViewSet, AssetProblemsViewSet, \
AssetManufacturersViewSet
router = routers.DefaultRouter()
router.register('elements', AssetViewSet, basename='asset')
router.register('manufacturers', AssetManufacturersViewSet, basename='asset-manufacturers')
router.register('groups', AssetGroupViewSet, basename='asset-groups')
router.register('info', AssetInfoViewSet, basename='asset-info')
router.register('os', OsViewSet, basename='os')
router.register('problems', AssetProblemsViewSet, basename='asset-problems')
urlpatterns = [
path('', include(router.urls))
]

0
assets/views/__init__.py Normal file
View file

125
assets/views/assets.py Normal file
View file

@ -0,0 +1,125 @@
from django.db.models import Count, Q
from django.http import JsonResponse
from rest_framework.decorators import action
from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, UpdateModelMixin, CreateModelMixin
from rest_framework.viewsets import GenericViewSet
from assets.constants import RESOLVED_STATUS
from assets.filters import AssetFilter
from assets.models.assets import Asset, OperatingSystem, AssetListGroup, AssetManufacturer
from assets.serializers.assets import AssetGroupSerializer, AssetInfoSerializer, OsSerializer, AssetDetailSerializer, \
AssetCsvExportSerializer, AssetListSerializer, AssetCreateUpdateSerializer, AssetActiveProblemsSerializer, \
AssetAuthorizeSerializer, AssetManufacturerSerializer
from core.mixins import ApiPermissionCheckMixin, ExportToCsvMixin, DestroyModelResponseStatus200Mixin
from perms.models import Perm
class AssetViewSet(ApiPermissionCheckMixin,
ListModelMixin,
RetrieveModelMixin,
UpdateModelMixin,
DestroyModelResponseStatus200Mixin,
ExportToCsvMixin,
GenericViewSet):
column_titles = AssetCsvExportSerializer.Meta.fields
console_permissions = {'csv_export': [Perm.can_export_assets], 'list': [Perm.can_view_assets_list],
'destroy': [Perm.can_delete_asset], 'retrieve': [Perm.can_view_asset],
'authorize_assets': [Perm.can_view_assets_list], 'update': [Perm.can_edit_asset],
'partial_update': [Perm.can_edit_asset],
}
filters = []
filterset_class = AssetFilter
class Meta:
model = Asset
def get_queryset(self):
return Asset.objects.annotate(count_incidents=Count('incidents', filter=~Q(status=RESOLVED_STATUS)))
def get_serializer_class(self):
if self.action == 'list':
return AssetListSerializer
if self.action in ['update', 'partial_update']:
return AssetCreateUpdateSerializer
return AssetDetailSerializer
@action(detail=False, methods=["POST"], name="authorize_assets")
def authorize_assets(self, request):
""" API for authorizing assets by changing its status from NEW to ALLOWED """
serializer = AssetAuthorizeSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
assets_to_change = serializer.validated_data['selected_assets']
Asset.objects.filter(pk__in=assets_to_change).update(status=Asset.AllowStatus.ALLOWED)
return JsonResponse({'status': 'ok'})
class AssetGroupViewSet(ApiPermissionCheckMixin,
ListModelMixin,
RetrieveModelMixin,
CreateModelMixin,
UpdateModelMixin,
DestroyModelResponseStatus200Mixin,
GenericViewSet):
serializer_class = AssetGroupSerializer
queryset = AssetListGroup.objects.order_by('name').all()
console_permissions = {'default': [Perm.can_view_assets_list]}
class Meta:
model = AssetListGroup
class AssetInfoViewSet(ApiPermissionCheckMixin,
RetrieveModelMixin,
ListModelMixin,
GenericViewSet):
serializer_class = AssetInfoSerializer
console_permissions = {'retrieve': [Perm.can_view_asset], 'list': [Perm.can_view_assets_list]}
queryset = Asset.objects.all()
class Meta:
model = Asset
class OsViewSet(ApiPermissionCheckMixin,
ListModelMixin,
RetrieveModelMixin,
CreateModelMixin,
UpdateModelMixin,
DestroyModelResponseStatus200Mixin,
GenericViewSet):
serializer_class = OsSerializer
queryset = OperatingSystem.objects.order_by('name')
console_permissions = {'list': [Perm.can_edit_assets_catalogs], 'destroy': [Perm.can_edit_assets_catalogs],
'retrieve': [Perm.can_edit_assets_catalogs], 'update': [Perm.can_edit_assets_catalogs],
'partial_update': [Perm.can_edit_assets_catalogs], 'create': [Perm.can_edit_assets_catalogs]
}
class Meta:
model = OsSerializer
class AssetProblemsViewSet(ApiPermissionCheckMixin, RetrieveModelMixin, GenericViewSet):
serializer_class = AssetActiveProblemsSerializer
console_permissions = {'retrieve': [Perm.can_view_incidents_list]}
queryset = Asset.objects.all()
class Meta:
model = Asset
class AssetManufacturersViewSet(ApiPermissionCheckMixin,
ListModelMixin,
RetrieveModelMixin,
CreateModelMixin,
UpdateModelMixin,
DestroyModelResponseStatus200Mixin,
GenericViewSet):
serializer_class = AssetManufacturerSerializer
queryset = AssetManufacturer.objects.all()
console_permissions = {'list': [Perm.can_edit_assets_catalogs], 'destroy': [Perm.can_edit_assets_catalogs],
'retrieve': [Perm.can_edit_assets_catalogs], 'update': [Perm.can_edit_assets_catalogs],
'partial_update': [Perm.can_edit_assets_catalogs], 'create': [Perm.can_edit_assets_catalogs]
}
class Meta:
model = AssetManufacturer

2
checker/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
checker
env/*

View file

View file

@ -0,0 +1,15 @@
[Unit]
Description=ARMA management console checker service
After=network.target
[Service]
Type=simple
ExecStart=/usr/local/armaconsole/app/amcchecker/checker
WorkingDirectory=/usr/local/armaconsole/app/amcchecker
Restart=always
RestartSec=5s
StartLimitInterval=1h
StartLimitBurst=0
[Install]
WantedBy=multi-user.target

9
checker/go.mod Normal file
View file

@ -0,0 +1,9 @@
module tehiz.ru/console/checker
go 1.14
require (
github.com/gorilla/mux v1.8.0
github.com/sirupsen/logrus v1.7.0
golang.org/x/text v0.3.4
)

12
checker/go.sum Normal file
View file

@ -0,0 +1,12 @@
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

397
checker/main.go Normal file
View file

@ -0,0 +1,397 @@
package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"html/template"
"net/http"
"os"
"os/exec"
"strings"
"github.com/gorilla/mux"
log "github.com/sirupsen/logrus"
"golang.org/x/text/language"
"golang.org/x/text/message"
)
const page = `<!DOCTYPE html>
<html>
<head>
<title>{{ .PageTitle }}</title>
<meta charset="utf-8">
<meta http-equiv="X-UA-COMPATIBLE" content="IE=edge">
<meta content="width=device-width, initial-scale=1, maximum-scale=1, user-scalable=no" name="viewport">
<link rel="shortcut icon" type="image/png" href="/static/adminlte/images/favicon.png" />
<link rel="stylesheet" type="text/css" href="/static/adminlte/plugins/fontawesome-free/css/all.min.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/additions/ionicons.min.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/plugins/jquery-ui/jquery-ui.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/plugins/ion-rangeslider/css/ion.rangeSlider.min.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/plugins/bootstrap-slider/css/bootstrap-slider.min.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/plugins/icheck-bootstrap/icheck-bootstrap.min.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/plugins/daterangepicker/daterangepicker.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/plugins/toastr/toastr.min.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/plugins/select2/css/select2.min.css">
<link rel="stylesheet" type="text/css" href="/static/adminltemod/plugins/datatables.min.css" />
<link rel="stylesheet" type="text/css" href="/static/adminltemod/css/armaDatatables.css" />
<link rel="stylesheet" type="text/css" href="/static/adminltemod/css/bootstrap-dialog.css" />
<link rel="stylesheet" type="text/css" href="/static/adminlte/dist/css/adminlte.css">
<link rel="stylesheet" type="text/css" href="/static/adminltemod/css/adminltemod.css">
<link rel="stylesheet" type="text/css" href="/static/adminlte/additions/googleapis.css">
<link rel="stylesheet" type="text/css" href="/static/adminltemod/css/bootstrap-datetimepicker.css">
</head>
<body class="hold-transition login-page" style="height: auto;">
<div class="login-box">
<div class="login-logo">
<a href="#"><img class="w-100" src="/static/adminlte/images/logo_lg.svg" alt="logo" ></a>
</div>
<div class="card">
<div class="card-body login-card-body">
<div class="row">
<div class="col">
<p class="login-box-msg">{{ .CardTitle }}</p>
</div>
</div>
<div class="row">
<div class="col">
<div class="progress">
<div id="progress" class="progress-bar progress-bar-striped progress-bar-animated" role="progressbar" style="width: 10%" aria-valuenow="10"
aria-valuemin="0" aria-valuemax="100"></div>
</div>
</div>
</div>
<div class="row">
<div class="col" style="margin-top: 1em;">
<button class="btn btn-primary .btn-sm" type="button" data-toggle="collapse" data-target="#collapseExample" aria-expanded="false" aria-controls="collapseExample">
{{ .Details }}
</button>
</div>
</div>
<div class="collapse" id="collapseExample">
<div class="row">
<div class="col">
<table id="serviceList" class="table table-hover dataTable">
<thead>
<th>{{ .ServiceName }}</th>
<th>{{ .Status }}</th>
<th>{{ .SubStatus }}</th>
</thead>
<tbody></tbody>
</table>
</div>
</div>
</div>
</div>
<!-- /.login-card-body -->
</div>
</div>
<!-- /.login-box -->
<script src="/static/adminlte/plugins/jquery/jquery.min.js"></script>
<script src="/static/adminlte/plugins/bootstrap/js/bootstrap.bundle.min.js "></script>
<script src="/static/adminlte/plugins/moment/moment-with-locales.min.js"></script>
<script src="/static/adminlte/plugins/inputmask/min/jquery.inputmask.bundle.min.js"></script>
<script src="/static/adminlte/plugins/bs-custom-file-input/bs-custom-file-input.min.js "></script>
<script src="/static/adminlte/plugins/bootstrap-switch/js/bootstrap-switch.min.js"></script>
<script src="/static/adminlte/plugins/select2/js/select2.full.min.js "></script>
<script src="/static/adminlte/plugins/toastr/toastr.min.js"></script>
<script src="/static/adminlte/plugins/jquery-ui/jquery-ui.min.js"></script>
<script src="/static/adminltemod/plugins/daterangepicker/daterangepicker.js"></script>
<script src="/static/adminlte/plugins/ion-rangeslider/js/ion.rangeSlider.min.js"></script>
<script src="/static/adminlte/plugins/bootstrap-slider/bootstrap-slider.min.js"></script>
<script src="/static/adminltemod/js/overlay.js"></script>
<!-- Resolve conflict in jQuery UI tooltip with Bootstrap tooltip -->
<script>
$.widget.bridge('uibutton', $.ui.button);
</script>
<script src="/static/adminltemod/js/bootstrap-dialog.js"></script>
<script src="/static/adminltemod/js/bootstrap3-typeahead.js"></script>
<script src="/static/adminltemod/js/multitypeahead.js"></script>
<script src="/static/adminltemod/plugins/datatables.min.js"></script>
<script src="/static/adminltemod/js/infoTable.js"></script>
<script src="/static/adminlte/dist/js/adminlte.js"></script>
<script src="/static/adminltemod/js/adminltemod.js"></script>
<script src="/static/adminltemod/js/bootstrap-dialog-trans.js"></script>
<script src="/static/adminltemod/js/quick_edit.js"></script>
<!-- timepicker -->
<script type="text/javascript" src="/static/adminltemod/js/bootstrap-datetimepicker.js"></script>
<script type="text/javascript" src="/static/adminltemod/js/jquery.collapser.min.js"></script>
<script type="text/javascript">
$(document).ready(function () {
let table = $('#serviceList').DataTable({
ajax: {
url: window.location.origin + "/state/",
dataSrc: "items"
},
paging: false,
info: false,
searching: false,
ordering: false,
drawCallback: function (settings) {
let total = $('#serviceList').find('tr').length-1;
let good = $('#serviceList').find('.badge-count').length;
$("#progress").attr("aria-valuemax", total);
$("#progress").attr("aria-valuenow", good);
$("#progress").attr("style", "width: " + (good * 100) / total + "%");
// Need to check that we can get login page
if (total === good) {
$.ajax({
url: window.location.origin,
complete: function (xhr, textStatus) {
if (xhr.status == 200) {
window.location.replace(window.location.origin);
}
}
});
}
},
columns: [
{
data: "name",
render: function (data, type, row, meta) {
return data.split(".")[0].replace("amc", "");;
}
},
{
data: "active_state",
render: function (data, type, row, meta) {
var css_class = "badge-danger";
if (data === "active") {
css_class = "badge-success badge-count";
} else if (data === "activating") {
css_class = "badge-warning";
} else if (data === "inactive") {
css_class = "bg-warning disabled";
}
return '<span class="badge ' + css_class +'"">' + data +'</span>';
}
},
{
data: "sub_state",
render: function (data, type, row, meta) {
var css_class = "badge-danger";
if (data === "running") {
css_class = "badge-success";
} else if (data === "start"){
css_class = "badge-warning";
} else if (data === "dead") {
css_class = "bg-warning disabled";
}
return '<span class="badge ' + css_class +'"">' + data +'</span>';
}
}
]
});
setInterval(function () {
table.ajax.reload();
}, 1000);
});
</script>
</body>
</html>`
// Context for rendering template
type Context struct {
PageTitle string
CardTitle string
Details string
ServiceName string
Status string
SubStatus string
}
type Item struct {
Name string `json:"name"`
State string `json:"active_state"`
SubState string `json:"sub_state"`
}
type Response struct {
Status string `json:"status"`
Reason string `json:"reason"`
Items []Item `json:"items"`
}
func (response Response) Send(w http.ResponseWriter) {
bytes, err := json.Marshal(response)
if err != nil {
log.Errorf("Can't serialize stat: %v\n", err.Error())
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.Write(bytes)
}
const (
StatusOK = "ok"
StatusErr = "error"
)
func checkService(w http.ResponseWriter, r *http.Request) {
var response Response
items, err := checkServices()
if err != nil {
log.Errorf("Can't get status: %v\n", err.Error())
response.Status = StatusErr
response.Reason = err.Error()
response.Send(w)
return
}
response.Status = StatusOK
response.Items = items
response.Send(w)
}
func renderPage(w http.ResponseWriter, r *http.Request) {
t, _, err := language.ParseAcceptLanguage(r.Header.Get("Accept-Language"))
if err != nil {
log.Errorf("Got error parsing Accept-Language header: %v", err.Error())
}
log.Infof("Got languages: %v", t)
templ, err := template.New("page").Parse(page)
if err != nil {
log.Errorf("Can't parse template: %v", err.Error())
w.WriteHeader(http.StatusInternalServerError)
return
}
var print *message.Printer
if len(t) > 0 {
for _, cur := range t {
if fmt.Sprintf("%v", cur)[:2] == fmt.Sprintf("%v", language.Russian) {
log.Debugf("Creating printer for lang %v", cur)
print = message.NewPrinter(cur)
break
}
}
}
if print == nil {
log.Infof("Create default printer")
print = message.NewPrinter(language.English)
}
context := Context{
PageTitle: print.Sprintf("PageTitle", "Loading"),
CardTitle: print.Sprintf("CardTitle", "Please wait, services are loading"),
Details: print.Sprintf("Details", "Show details"),
ServiceName: print.Sprintf("ServiceName", "Service name"),
Status: print.Sprintf("Status", "Status"),
SubStatus: print.Sprintf("SubStatus", "Sub status"),
}
var buf bytes.Buffer
err = templ.Execute(&buf, context)
if err != nil {
log.Errorf("Can't render template: %v", err.Error())
w.WriteHeader(http.StatusInternalServerError)
return
}
w.Write(buf.Bytes())
}
func httpMatch(r *http.Request, rm *mux.RouteMatch) bool {
// This check prevent non localhost requests
if r.RemoteAddr[:5] == "[::1]" {
return true
} else if r.RemoteAddr[:9] == "127.0.0.1" {
return true
} else if r.RemoteAddr[:9] == "localhost" {
return true
}
log.Error("Bad remote addr")
return false
}
func init() {
message.SetString(language.Russian, "PageTitle", "Загрузка")
message.SetString(language.Russian, "CardTitle", "Пожалуйста, подождите, сервисы загружаются")
message.SetString(language.Russian, "Details", "Подробности")
message.SetString(language.Russian, "ServiceName", "Сервис")
message.SetString(language.Russian, "Status", "Статус")
message.SetString(language.Russian, "SubStatus", "Подстатус")
message.SetString(language.English, "PageTitle", "Loading")
message.SetString(language.English, "CardTitle", "Please wait, services are loading")
message.SetString(language.English, "Details", "Show details")
message.SetString(language.English, "ServiceName", "Service name")
message.SetString(language.English, "Status", "Status")
message.SetString(language.English, "SubStatus", "Sub status")
}
// Services to check it's state
var services = [...]string{"amccore.service", "amccelery.service", "amccelerybeat.service", "amccorrelator.service", "amcclient.service", "elasticsearch.service", "amcvector.service"}
func checkServices() ([]Item, error) {
result := make([]Item, 0)
for _, service := range services {
// Get service status
var out1 bytes.Buffer
cmd1 := exec.Command("systemctl", "show", "-p", "ActiveState", "--value", service)
cmd1.Stdout = &out1
err := cmd1.Run()
if err != nil {
log.Errorf("Can't get service active state: %v", err.Error())
return nil, err
}
var out2 bytes.Buffer
cmd2 := exec.Command("systemctl", "show", "-p", "SubState", "--value", service)
cmd2.Stdout = &out2
err = cmd2.Run()
if err != nil {
log.Errorf("Can't get service sub state: %v", err.Error())
return nil, err
}
result = append(result, Item{
Name: service,
State: strings.Replace(out1.String(), "\n", "", 1),
SubState: strings.Replace(out2.String(), "\n", "", 1),
})
}
return result, nil
}
func main() {
log.SetFormatter(&log.TextFormatter{})
log.SetOutput(os.Stdout)
port := flag.Int("port", 9080, "Port for work")
flag.Parse()
log.Info("Starting")
router := mux.NewRouter()
router.HandleFunc("/", checkService).Methods("GET").MatcherFunc(httpMatch)
router.HandleFunc("/page", renderPage).Methods("GET").MatcherFunc(httpMatch)
http.ListenAndServe(fmt.Sprintf(":%v", *port), router)
}

1
cicd/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
/tmp/

84
cicd/config.yml Normal file
View file

@ -0,0 +1,84 @@
repo_name: console_core
type: component
projects:
amccore:
variables:
packages:
amccore:
deb:
files:
- include:
- 'deb/skeleton/usr'
- 'deb/skeleton/var'
exclude:
- '.gitkeep'
dst_dir: ''
- include:
- 'assets'
- 'company'
- 'console'
- 'core'
- 'correlation'
- 'dashboard'
- 'devices'
- 'events'
- 'incident'
- 'incident_export'
- 'inputs'
- 'license_info'
- 'logstash'
- 'manage.py'
- 'ncircc'
- 'networkmap'
- 'perms'
- 'rotation'
- 'storage'
- 'users'
- 'notifications'
exclude:
- '.gitignore'
- 'console/static'
- '*test*'
dst_dir: 'usr/local/armaconsole/app'
- include:
- 'console/static'
dst_dir: 'var/www/armaconsole/public'
control:
maintainer: 'arma'
section: 'admin'
depends: 'nginx, sudo, python3, python3-pip, python3-venv, redis, redis-server, gettext, elasticsearch (= 7.12.0), golang, vector (= 0.19.1), postgresql, rabbitmq-server'
pre-depends: 'gcc, make, libpq-dev, python3-dev, openssl, ca-certificates, bash, default-jre, apt-utils, postgresql-contrib'
priority: 'optional'
description: |
ARMA management console (AMC)
AMC manages sensors and monitors events and incidents
arch:
- amd64
amcchecker:
variables:
exe_path: 'checker'
packages:
amcchecker:
deb:
files:
- include:
- 'checker/deb/skeleton/DEBIAN'
- 'checker/deb/skeleton/usr'
exclude:
- '.gitkeep'
dst_dir: ''
- include:
- 'checker/checker'
dst_dir: 'usr/local/armaconsole/app/amcchecker'
control:
maintainer: 'arma'
section: 'admin'
depends: ''
pre-depends: ''
priority: 'optional'
description: |
ARMA management console (AMC) service
AMC manages sensors and monitors events and incidents
arch:
- amd64

11
cicd/go_test.sh Normal file
View file

@ -0,0 +1,11 @@
#!/bin/bash
set -e # fail on any error
PKG=$(cat go.mod | grep module | awk '{print $2}')
PKG_LIST=$(go list ${PKG}/... )
for CUR_PKG in $PKG_LIST
do
go test -timeout 30s -coverprofile=/tmp/go-code-cover $CUR_PKG
done

87
cicd/integration_tests.sh Normal file
View file

@ -0,0 +1,87 @@
#!/bin/bash
trap 'p=$(jobs -p); if [ "$p" != "" ]; then kill -s 9 $p; fi; rm_fd' EXIT
set -x
log_file_path="amc_integ_tests.log"
exec 3> >(tee -a $log_file_path)
exec 1>&3
exec 2>&3
rm_fd() {
exec 1>&-
exec 2>&-
exec 3>&-
wait
}
CUR_FILE_PATH=$(dirname "$0")
CUR_FILE_PATH=$(cd "$CUR_FILE_PATH" && pwd)
PRJ_ROOT_PATH=$(dirname "$CUR_FILE_PATH")
timeout=10
function check {
if [ "$1" == "$timeout" ]; then
echo "timeout. connection to $2 failed"
exit 1
else
echo "connection to $2 established"
fi
}
. $CUR_FILE_PATH/up_license_client.sh
check $client_counter "license_client"
response=0
counter=0
while [ "$response" != 200 ] && [ "$counter" != "$timeout" ]; do
response=$(curl -s -o /dev/null -w "%{http_code}" -u elastic:changeme -X GET http://elasticsearch:9200/)
sleep 5s
counter=$(( $counter + 1 ))
done
check $counter "elasticsearch"
response=0
counter=0
re=^.*PONG.*$
while ! [[ $response =~ ${re} ]] && [ "$counter" != "$timeout" ]; do
response=$(echo "PING" | nc -w 2 redis 6379)
sleep 5s
counter=$(( $counter + 1 ))
done
check $counter "redis"
set -ex
# if [ -d test_env ]; then
# rm -rf test_env
# echo "old env was removed"
# fi
#python3 -m virtualenv test_env
#source test_env/bin/activate
pip install --upgrade pip
pip install -r requirements_test.txt
DJANGO_SETTINGS_MODULE=console.settings.test python manage.py makemigrations
DJANGO_SETTINGS_MODULE=console.settings.test python manage.py migrate
DJANGO_SETTINGS_MODULE=console.settings.test coverage run -m pytest --disable-warnings -m integration
coverage html -d public/test_coverage/

76
cicd/live_fw_tests.sh Normal file
View file

@ -0,0 +1,76 @@
#!/bin/sh
trap 'p=$(jobs -p); if [ "$p" != "" ]; then kill -s 9 $p; fi; rm_fd' EXIT
set -ex
log_file_path="amc_live_fw_tests.log"
exec 3> >(tee -a $log_file_path)
exec 1>&3
exec 2>&3
rm_fd() {
exec 1>&-
exec 2>&-
exec 3>&-
wait
}
CUR_FILE_PATH=$(dirname "$0")
CUR_FILE_PATH=$(cd "$CUR_FILE_PATH" && pwd)
PRJ_ROOT_PATH=$(dirname "$CUR_FILE_PATH")
timeout=16 #1 minute 20 seconds
function check {
if [ "$1" == "$timeout" ]; then
echo "timeout. connection to $2 failed"
exit 1
else
echo "connection to $2 established"
fi
}
. $CUR_FILE_PATH/up_license_client.sh
check $client_counter "license_client"
# if [ -d test_env ]; then
# rm -rf test_env
# echo "old env was removed"
# fi
# python3 -m virtualenv test_env
# source test_env/bin/activate
pip install --upgrade pip
pip install -r requirements_test.txt
DJANGO_SETTINGS_MODULE=console.settings.test python manage.py makemigrations
DJANGO_SETTINGS_MODULE=console.settings.test python manage.py migrate
set +e
response=1
counter=0
while [ "$response" != 0 ] && [ "$counter" != "$timeout" ]; do
DJANGO_SETTINGS_MODULE=console.settings.test pytest --disable-warnings devices/tests/test_firewall_live.py -k 'test_live_get_firewall_status_online'
response=$(echo $?)
sleep 5s
counter=$(( $counter + 1 ))
done
check $counter "live.firewall"
set -e
DJANGO_SETTINGS_MODULE=console.settings.test coverage run -m pytest --disable-warnings -m live_firewall
coverage html -d public/test_coverage/

98
cicd/scripts/build.py Normal file
View file

@ -0,0 +1,98 @@
#! /usr/bin/python3
import os
import argparse
import subprocess
import re
import shutil
from pathlib import Path
from distutils.dir_util import copy_tree
import fileinput
from dotenv import dotenv_values
def parse_arguments() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument("-n", "--name",
required=False,
type=str,
help="Internal repository part to be built")
parser.add_argument("-v", "--version",
required=True,
type=str,
help="Repository version")
return parser.parse_args()
def main() -> None:
args = parse_arguments()
if args.name is None or args.name == 'amccore':
front_path = "frontend"
back_path = "console"
footer_path = f"{front_path}/src/components/Layout/components/Footer"
console_base_path = "console/settings/base.py"
# PUBLIC_URL for react app
react_env = dotenv_values('deb/react.env')
os.environ['PUBLIC_URL'] = react_env['PUBLIC_URL']
# version
version_found = False
# change base.py version
with fileinput.input(files=console_base_path, inplace=True) as fileLines:
for line in fileLines:
if re.search('SITE_INFO = {', line) is not None:
version_found = True
if version_found and re.search(' *}', line) is not None:
version_found = False
if version_found:
res = re.sub(r"(.*version': )[^,]*", rf"\g<1>'{args.version}'", line)
else:
res = line
if res is not None:
print(res, flush=True, end="")
# check environment.js for localhost
with open(f'{front_path}/src/enviroments/enviroments.js', 'r') as file:
for line in file:
if re.match(r'.*localhost: *.*localhost.*', line) is not None:
print('Error: localhost in enviroments.js')
exit(1)
# -----------------------------build React-------------------------------------
res = subprocess.Popen(['npm', 'ci'], cwd=front_path)#, env=react_env)
res.wait()
if res.returncode != 0:
print("npm ci failed", flush=True)
exit(1)
else:
print("npm ci successed", flush=True)
res = subprocess.Popen(['npm', 'run', 'build'], cwd=front_path)#, env=react_env)
res.wait()
if res.returncode != 0:
print("'npm run build' failed", flush=True)
exit(1)
else:
print("'npm run build' successed", flush=True)
shutil.rmtree(f'{front_path}/node_modules')
shutil.move(f'{front_path}/build/index.html', f'{back_path}/templates/console/index.html')
if os.path.exists(f'{back_path}/static/react'):
shutil.rmtree(f'{back_path}/static/react')
Path(f'{back_path}/static/react').mkdir(parents=True, exist_ok=True)
copy_tree(f'{front_path}/build/', f'{back_path}/static/react/')
if __name__ == "__main__":
main()

62
cicd/unit_tests.sh Normal file
View file

@ -0,0 +1,62 @@
#!/bin/sh
trap 'p=$(jobs -p); if [ "$p" != "" ]; then kill -s 9 $p; fi; rm_fd' EXIT
set -ex
log_file_path="amc_unit_tests.log"
exec 3> >(tee -a $log_file_path)
exec 1>&3
exec 2>&3
rm_fd() {
exec 1>&-
exec 2>&-
exec 3>&-
wait
}
CUR_FILE_PATH=$(dirname "$0")
CUR_FILE_PATH=$(cd "$CUR_FILE_PATH" && pwd)
PRJ_ROOT_PATH=$(dirname "$CUR_FILE_PATH")
function check {
if [ "$1" == "$timeout" ]; then
echo "timeout. connection to $2 failed"
exit 1
else
echo "connection to $2 established"
fi
}
. $CUR_FILE_PATH/up_license_client.sh
check $client_counter "license_client"
# if [ -d test_env ]; then
# rm -rf test_env
# echo "old env was removed"
# fi
if [ -f db.sqlite3 ]; then
rm -f db.sqlite3
echo "old db.sqlite3 was removed"
fi
#python3 -m virtualenv test_env
#source test_env/bin/activate
pip install --upgrade pip
pip install -r requirements_test.txt
DJANGO_SETTINGS_MODULE=console.settings.test python manage.py makemigrations
DJANGO_SETTINGS_MODULE=console.settings.test python manage.py migrate
DJANGO_SETTINGS_MODULE=console.settings.test coverage run -m pytest --disable-warnings -m unit
coverage html -d public/test_coverage/

64
cicd/up_license_client.sh Normal file
View file

@ -0,0 +1,64 @@
#! /bin/sh
cur_flags=$-
echo "current flags: $cur_flags"
set -e
CUR_PATH=$(dirname "$0")
CUR_PATH=$(cd "$CUR_PATH" && pwd)
ROOT_PATH=$(dirname "$CUR_PATH")
LICENSE_PATH="$ROOT_PATH/license"
echo "starting license client"
echo "license client home path: $LICENSE_PATH"
cd $LICENSE_PATH
echo -e "127.0.0.1\tlicense-client" >> /etc/hosts
touch /etc/machine-id
export GOPROXY='http://nexus.iwarma.ru/repository/proxy-go/' \
GOPRIVATE='gitlab.iwarma.ru' \
GONOPROXY='gitlab.iwarma.ru'
NETRC_PATH="$CUR_PATH/.netrc"
echo "machine gitlab.iwarma.ru" > $NETRC_PATH
echo "login $NETRC_USER" >> $NETRC_PATH
echo "password $NETRC_TOKEN" >> $NETRC_PATH
mv $NETRC_PATH /root/ #maybe you should comment some extra dns in /etc/resolv.conf
go mod tidy
go build -ldflags "-s -w"
./client --config config_example.json </dev/null >/dev/null 2>&1 &
client_response=0
client_counter=0
client_timeout=10
set +e
while [ "$client_response" != 200 ] && [ "$client_response" != 404 ] && [ "$client_counter" != "$client_timeout" ]; do
client_response=$(curl -s -o /dev/null -w "%{http_code}" -X GET http://license-client:8050/license/)
sleep 1s
client_counter=$(( $client_counter + 1 ))
done
echo "client response: $client_response"
echo "client counter: $client_counter"
#client_PID=$(pidof client)
cd $ROOT_PATH
case "e" in
"$cur_flags")
set -e
;;
*)
set +e
;;
esac

0
company/__init__.py Normal file
View file

14
company/admin.py Normal file
View file

@ -0,0 +1,14 @@
from django.contrib import admin
from company.models.company import Company
from company.models.location import LocationCode
@admin.register(Company)
class CompanyAdmin(admin.ModelAdmin):
pass
@admin.register(LocationCode)
class LocationCodeAdmin(admin.ModelAdmin):
pass

5
company/apps.py Normal file
View file

@ -0,0 +1,5 @@
from django.apps import AppConfig
class CompanyConfig(AppConfig):
name = 'company'

View file

View file

37
company/models/company.py Normal file
View file

@ -0,0 +1,37 @@
from django.db import models
from django.utils.translation import gettext_lazy
from company.models.location import LocationCode
from ncircc.enums.notifications import AffectedSystemFunction
class Company(models.Model):
"""Model with Company information for NCIRCC."""
name = models.CharField(gettext_lazy('Name of the organization'), max_length=127)
is_cii = models.BooleanField(gettext_lazy('Subject CII'), default=False)
location = models.ForeignKey(
LocationCode, null=True,
on_delete=models.SET_NULL,
verbose_name=gettext_lazy('Country/Region code'),
)
city = models.CharField(gettext_lazy('City'), max_length=127)
affected_system_function = models.CharField(
gettext_lazy('Affected system function'),
choices=AffectedSystemFunction.choices,
default=AffectedSystemFunction.NUCLEAR_POWER.value,
max_length=127,
)
api_key = models.CharField(
gettext_lazy('Token'),
max_length=127,
null=True,
help_text=gettext_lazy('Token access to NCIRCC API'),
)
class Meta:
verbose_name = gettext_lazy('Company')
verbose_name_plural = gettext_lazy('Companies')
def __str__(self):
return self.name

View file

@ -0,0 +1,15 @@
from django.db import models
from django.utils.translation import gettext_lazy
class LocationCode(models.Model):
""" Directory of Country/Region in format ICO-3166-2"""
code = models.CharField(gettext_lazy('code'), max_length=15, help_text='Format from ISO-3166-2')
class Meta:
verbose_name = gettext_lazy('Country/Region code')
verbose_name_plural = gettext_lazy('Country/Region codes')
def __str__(self) -> str:
return self.code

View file

View file

@ -0,0 +1,22 @@
from rest_framework import serializers
from company.models.company import Company
from company.serializers.location import LocationSerializer
class CompanySerializer(serializers.ModelSerializer):
"""Serializer for retrieve."""
location = LocationSerializer()
class Meta:
model = Company
fields = ('name', 'is_cii', 'location', 'city', 'affected_system_function', 'api_key')
class CompanyCreateSerializer(serializers.ModelSerializer):
"""Serializer for creating and updating Company."""
class Meta:
model = Company
fields = ('name', 'is_cii', 'location', 'city', 'affected_system_function', 'api_key')

View file

@ -0,0 +1,11 @@
from rest_framework import serializers
from company.models.location import LocationCode
class LocationSerializer(serializers.ModelSerializer):
"""Serializer location code ISO-3166-2"""
class Meta:
model = LocationCode
fields = ('id', 'code')

View file

View file

@ -0,0 +1,48 @@
import logging
from typing import Optional, Dict, Any
from company.models.company import Company
from company.serializers.company import CompanySerializer
_log = logging.getLogger(__name__)
class CompanyCreateAndUpdateService:
"""Service for creating, and updating company"""
def __init__(self, company: Optional[Company], data: Dict[str, Any]):
_log.debug('Start create or update company')
self.company = company
self.data = self.prepare_date(data.copy())
def prepare_date(self, data: Dict[str, Any]) -> Dict[str, Any]:
"""Prepare data for creating and updating company"""
location_id = data.get('location')
if location_id:
data['location_id'] = location_id
del data['location']
return data
def _update(self) -> Company:
"""Update company data"""
_log.debug(f'Update company: {self.company}')
company = self.company
for attr, value in self.data.items():
setattr(company, attr, value)
company.save()
return company
def _create(self) -> Company:
"""Create company."""
company = Company.objects.create(**self.data)
_log.debug(f'Create company: {company}')
return company
def save(self) -> Dict:
"""SAve company data"""
_log.debug(f'Save company nata: {self.company}')
if self.company is None:
company = self._create()
else:
company = self._update()
return CompanySerializer(company).data

Binary file not shown.

View file

View file

@ -0,0 +1,98 @@
import pytest
from django.urls import reverse
from rest_framework.test import APIRequestFactory, force_authenticate
from rest_framework import status
from company.models.company import Company
from company.models.location import LocationCode
from company.views.company_api import CompanyApiView
from ncircc.enums.notifications import AffectedSystemFunction
@pytest.mark.django_db
class TestCompanyApi:
url = reverse('company_api')
user = None
@pytest.fixture(autouse=True)
def setup_test(self, add_user_with_permissions):
self.user = add_user_with_permissions(username='test_api_user', password='TestApiPass123', is_superuser=True)
self.location, _ = LocationCode.objects.get_or_create(code='RU-MOS')
@pytest.mark.unit
def test_create_compy_with_valid_data(self):
"""Test creating company through api with valid data."""
count_after = Company.objects.count()
assert not count_after
data = {
'name': 'Test_NAME_company',
'is_cii': False,
'location': self.location.id,
'city': 'Moscow',
'affected_system_function': AffectedSystemFunction.NUCLEAR_POWER.value,
}
request = APIRequestFactory().post(self.url, data)
force_authenticate(request, self.user)
view = CompanyApiView.as_view()
response = view(request)
count_after = Company.objects.count()
assert response.status_code == 200
assert count_after == 1
assert response.data['name'] == data['name']
assert response.data['location']['id'] == self.location.id
assert response.data['location']['code'] == self.location.code
@pytest.mark.unit
def test_create_company_with_not_valid_date(self):
"""Test creating company through api with not valid data."""
data = {
'name': 'Test_NAME_company',
}
request = APIRequestFactory().post(self.url, data)
force_authenticate(request, self.user)
view = CompanyApiView.as_view()
response = view(request)
count_after = Company.objects.count()
assert response.status_code == 400
assert count_after == 0
assert 'city' in response.data
@pytest.mark.unit
def test_update_company_with_valid_date(self):
"""Test updating company data through api"""
Company.objects.create(name='Test_name_1', city='Moscow')
count_before = Company.objects.count()
assert count_before == 1
data = {'name': 'Test_name_2', 'is_cii': True, 'city': 'Moscow'}
request = APIRequestFactory().post(self.url, data)
force_authenticate(request, self.user)
view = CompanyApiView.as_view()
response = view(request)
count_after = Company.objects.count()
assert response.status_code == 200
assert count_after == count_before
assert response.data['name'] == data['name']
assert response.data['is_cii'] == data['is_cii']
@pytest.mark.unit
def test_get_company_data_with_created_company(self):
Company.objects.create(name='Test_name_1', city='Moscow')
count = Company.objects.count()
assert count == 1
request = APIRequestFactory().get(self.url)
force_authenticate(request, self.user)
view = CompanyApiView.as_view()
response = view(request)
assert response.status_code == 200
@pytest.mark.unit
def test_get_company_data_without_company(self, api_client):
api_client.force_authenticate(self.user)
count = Company.objects.count()
assert not count
response = api_client.get(self.url)
assert response.status_code == status.HTTP_200_OK
assert response.json() == {"details": "company not initialized"}

View file

@ -0,0 +1,57 @@
import pytest
from company.models.company import Company
from company.models.location import LocationCode
from company.services.company_create_update import CompanyCreateAndUpdateService
from ncircc.enums.notifications import AffectedSystemFunction
@pytest.mark.django_db
class TestCreateOrUpdateCompany:
@pytest.fixture(autouse=True)
def setup_test(self):
self.location, _ = LocationCode.objects.get_or_create(code='RU-MOS')
self.data_for_create = {
'name': 'TestNAMEcompany',
'is_cii': False,
'location': self.location.id,
'city': 'Moscow',
'affected_system_function': AffectedSystemFunction.OTHER.value,
'api_key': ''
}
@pytest.mark.unit
def test_create_company(self):
count_before = Company.objects.count()
assert count_before == 0
data = CompanyCreateAndUpdateService(None, self.data_for_create).save()
assert data['name'] == 'TestNAMEcompany'
assert not data['is_cii']
assert data['location'] == {'id': self.location.id, 'code': self.location.code}
assert data['city'] == 'Moscow'
assert data['affected_system_function'] == AffectedSystemFunction.OTHER.value
assert data['api_key'] == ''
count_after = Company.objects.count()
assert count_after == 1
@pytest.mark.unit
def test_update_partially(self):
data_for_create = self.data_for_create.copy()
data_for_create['location_id'] = self.location.id
del data_for_create['location']
company = Company.objects.create(**data_for_create)
count_before = Company.objects.count()
assert count_before == 1
data_for_update = {
'name': 'TestNameCompany2',
'is_cii': True,
'affected_system_function': AffectedSystemFunction.NUCLEAR_POWER.value,
}
data = CompanyCreateAndUpdateService(company, data_for_update).save()
count_after = Company.objects.count()
assert count_after == count_before
assert data.get('name') == data_for_update.get('name')
assert data.get('is_cii') == data_for_update.get('is_cii')
assert data.get('affected_system_function') == 'Атомная энергетика'

View file

@ -0,0 +1,26 @@
import pytest
from django.urls import reverse
from rest_framework.test import APIClient
@pytest.mark.django_db
class TestLocationApi:
url = reverse('locations_api')
user = None
@pytest.fixture(autouse=True)
def setup_test(self, add_user_with_permissions):
self.user = add_user_with_permissions(username='test_api_user', password='TestApiPass123', is_superuser=True)
self.client = APIClient()
self.client.force_authenticate(user=self.user)
@pytest.mark.unit
def test_get_location_list(self):
"""Test view return status 200 and data location list with code and id"""
response = self.client.get(self.url)
assert response.status_code == 200
data = response.json()
assert isinstance(data, list)
assert len(data)
assert 'id' in data[0]
assert 'code' in data[0]

10
company/urls.py Normal file
View file

@ -0,0 +1,10 @@
from django.urls import path
from company.views.company_api import CompanyApiView
from company.views.location_api import LocationCodeApi
urlpatterns = [
path('', CompanyApiView.as_view(), name='company_api'),
path('locations/', LocationCodeApi.as_view(), name='locations_api')
]

View file

View file

@ -0,0 +1,47 @@
from typing import Optional
from django.http import Http404
from rest_framework.response import Response
from rest_framework.views import APIView
from company.models.company import Company
from company.serializers.company import CompanySerializer, CompanyCreateSerializer
from company.services.company_create_update import CompanyCreateAndUpdateService
from core.mixins import ApiPermissionCheckMixin
from perms.models import Perm
class CompanyApiView(ApiPermissionCheckMixin, APIView):
action = None
console_permissions = {'get': [Perm.can_view_company_card], 'post': [Perm.can_edit_company_card]}
def dispatch(self, request, *args, **kwargs):
if request.method == 'GET':
self.action = 'get'
elif request.method == 'POST':
self.action = 'post'
else:
self.action = None
return super().dispatch(request, *args, **kwargs)
def get_company(self) -> Optional[Company]:
"""Return first Compay or None"""
return Company.objects.first()
def get(self, request, *args, **kwargs) -> Response:
"""Return date company."""
company = self.get_company()
if company is None:
return Response({"details": "company not initialized"})
data = CompanySerializer(company).data
return Response(data)
def post(self, request, *args, **kwargs) -> Response:
"""Creating or updating company in database."""
serializer = CompanyCreateSerializer(data=request.data)
if not serializer.is_valid():
return Response(serializer.errors, status=400)
company = self.get_company()
data = CompanyCreateAndUpdateService(company, serializer.data).save()
return Response(data, status=200)

View file

@ -0,0 +1,14 @@
from rest_framework.response import Response
from rest_framework.views import APIView
from company.models.location import LocationCode
from company.serializers.location import LocationSerializer
class LocationCodeApi(APIView):
"""View for return location list"""
def get(self, request, *args, **kwargs) -> Response:
locations = LocationCode.objects.all()
data = LocationSerializer(locations, many=True).data
return Response(data)

53
conftest.py Normal file
View file

@ -0,0 +1,53 @@
import os
from pathlib import Path
import pytest
import pytest_asyncio
from channels.db import database_sync_to_async
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User
from rest_framework.test import APIClient
from core.utils import dtnow
from users.models import UserInfo
# noinspection PyUnresolvedReferences
from console.tests.test_utils import add_user_with_permissions, test_server, get_url
# noinspection PyUnresolvedReferences
from networkmap.tests.migration_fixtures import create_filter_test_data, create_elk_function_test_data
@pytest.fixture(scope='function')
def add_admin_user_migration():
""" Fixture for adding admin user from 0003_add_admin_user migration """
if not User.objects.filter(username='admin').exists():
UserInfo.create_user(username='admin',
password=make_password('nimda'),
email='admin@example.com',
is_superuser=True,
is_staff=True,
expire_date=dtnow(days=700).date(),
comment='admin')
@pytest.fixture(scope='function')
def api_client():
""" Fixture for creating api_client"""
api_client = APIClient()
return api_client
@pytest.fixture(autouse=True)
def remove_files_after_test_vector_config():
TMP_DIR_VECTOR = '/tmp/vector'
Path(TMP_DIR_VECTOR).mkdir(parents=True, exist_ok=True)
yield
files = os.listdir(TMP_DIR_VECTOR)
for file in files:
os.remove(os.path.join(TMP_DIR_VECTOR, file))
@pytest_asyncio.fixture
async def async_admin_user(django_user_model):
return await database_sync_to_async(lambda: django_user_model.objects.get())()

7
console/__init__.py Normal file
View file

@ -0,0 +1,7 @@
from __future__ import absolute_import, unicode_literals
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
__all__ = ('celery_app',)

34
console/admin.py Normal file
View file

@ -0,0 +1,34 @@
from django.contrib import admin
from django.contrib.auth.models import User
from django.contrib.postgres import fields
from django_json_widget.widgets import JSONEditorWidget
from console import models
from networkmap import models as netmap_models
class IncidentAdmin(admin.ModelAdmin):
date_hierarchy = 'timestamp'
list_display = ('incident_id', 'timestamp', 'title', 'category', 'importance', 'status', 'assigned_to')
list_filter = ('category', 'status', 'assigned_to')
list_display_links = ['incident_id']
formfield_overrides = {
fields.JSONField: {'widget': JSONEditorWidget},
}
class ConnectionAdmin(admin.ModelAdmin):
list_display = ['pk', 'src_asset', 'dst_asset', 'src_port', 'dst_port', 'updated']
list_filter = ['src_asset', 'dst_asset']
list_display_links = ['pk']
# Re-register UserAdmin
admin.site.register(models.VulnerabilityEffect)
admin.site.register(models.VulnerabilityRecommendations)
admin.site.register(models.Vulnerability)
admin.site.register(models.ConnectionType)
admin.site.register(models.Connection, ConnectionAdmin)
admin.site.register(netmap_models.NetworkMap)

0
console/api/__init__.py Normal file
View file

25
console/api/base.py Normal file
View file

@ -0,0 +1,25 @@
from django.contrib.auth.decorators import login_required, user_passes_test
from rest_framework.decorators import api_view
from rest_framework.response import Response
from console.serializers import CelerySerializer
from core.decorators import log_url
from storage.tasks import dump_db
@log_url
@login_required
@user_passes_test(lambda u: u.is_superuser)
@api_view(('GET',))
def download_db_snapshot(request):
""" Downloads fixture to restore current DB state, for test and dev purposes only
request->models allow to control which tables would be downloaded, for example 'auth,console.userinfo """
dump_models = request.GET.get('models', tuple())
if dump_models:
dump_models = dump_models.split(',')
res = dump_db.apply_async(args=(request.user.pk, dump_models))
serializer = CelerySerializer(data={'task_id': res.id})
serializer.is_valid()
return Response(serializer.data)

31
console/api/celery.py Normal file
View file

@ -0,0 +1,31 @@
from celery.result import AsyncResult
from django.contrib.auth.decorators import login_required
from rest_framework.decorators import api_view
from rest_framework.response import Response
from console.serializers import CelerySerializer
from core.decorators import log_url
@log_url
@login_required
@api_view(('GET',))
def check_task_state(request, task_id):
""" Check that celery task is finished
:param task_id: Celery task ID
"""
result = AsyncResult(task_id)
if result.successful():
serializer = CelerySerializer(
data={'task_id': task_id,
'finished': result.successful(),
'result': result.get()
})
else:
serializer = CelerySerializer(
data={'task_id': task_id,
'finished': result.successful(),
'result': result.get()})
serializer.is_valid()
return Response(serializer.data)

View file

@ -0,0 +1,22 @@
import logging
from rest_framework.mixins import RetrieveModelMixin, ListModelMixin
from rest_framework.viewsets import GenericViewSet
from console.models import Connection
from console.serializers import ConnectionSerializer
from core.mixins import ApiPermissionCheckMixin
from perms.models import Perm
_log = logging.getLogger(__name__)
class ConnectionViewSet(ApiPermissionCheckMixin, RetrieveModelMixin, ListModelMixin,
GenericViewSet):
serializer_class = ConnectionSerializer
console_permissions = [Perm.can_view_network]
class Meta:
model = Connection
queryset = Connection.objects.all()

0
console/api/events.py Normal file
View file

0
console/api/incidents.py Normal file
View file

332
console/api/users.py Normal file
View file

@ -0,0 +1,332 @@
import json
import logging
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import User, Permission, Group
from django.http import JsonResponse
from django.shortcuts import get_object_or_404
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from console import conslog
from console.serializers import AllPermsSerializer, GroupNameSerializer
from core.decorators import log_url
from perms.models import Perm
from perms.services.get_permissions import get_all_linked_permissions, get_linked_permissions_name
from users.serializers import UserSerializers
_log = logging.getLogger(__name__)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
def all_perms(requset):
""" API for getting list of all console permissions
:param requset: request object
:return: JSON with all console groups
"""
if requset.method == 'GET':
permissions = Permission.objects.all().exclude(
name='Can view vulnerabilities')
serializer = AllPermsSerializer(permissions, many=True)
return JsonResponse(serializer.data, safe=False)
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@log_url
@login_required
def get_linked_permissions(request):
permissions = request.GET.getlist('permissions[]', [])
data = get_linked_permissions_name(permissions)
return JsonResponse(data)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
def user_perms(request, pk):
""" API for getting user permissions
:param request: request object
:param pk: target user primaty key
:return: JSON with target user permissions
"""
if request.method == 'GET':
user_by_id = get_object_or_404(User, pk=pk)
user_perms = Permission.objects.filter(user=user_by_id)
serializer = AllPermsSerializer(user_perms, many=True)
return JsonResponse(serializer.data, safe=False)
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
def all_groups(requset):
""" API for getting all console groups
:param requset: request object
:return: JSON with all console groups
"""
if requset.method == 'GET':
groups = Group.objects.all()
serializer = GroupNameSerializer(groups, many=True)
return JsonResponse(serializer.data, safe=False)
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
def group_perms(request, g_name):
""" API for getting target group permissions
:param request: request object
:param g_name: target group name
:return: JSON with target group permissions
"""
if request.method == 'GET':
target_group = Group.objects.filter(name=g_name)
g_perms = Permission.objects.filter(content_type__app_label="perms", group__in=target_group).exclude(
name='Can view vulnerabilities')
serializer = AllPermsSerializer(g_perms, many=True)
return JsonResponse(serializer.data, safe=False)
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
def all_perms_wo_group(request, g_name):
""" API for getting permissions, which target group dont have
:param request: request object
:param g_name: target group name
:return: JSON with permissions, whic target group dont have
"""
if request.method == 'GET':
target_group = Group.objects.filter(name=g_name)
perm_codenames = []
g_perms = Permission.objects.filter(content_type__app_label="perms", group__in=target_group)
perms_count = g_perms.count()
for perm in g_perms:
perm_codenames.append(perm.codename)
exc_perms = Permission.objects.filter(content_type__app_label="perms").exclude(
codename__in=perm_codenames
).exclude(name='Can view vulnerabilities')
serializer = AllPermsSerializer(exc_perms, many=True)
return JsonResponse(serializer.data, safe=False)
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
def group_users(request, g_name):
""" API for getting users, which assigned to target group
:param request: request object
:param g_name: target group name
:return: JSON with target group users
"""
if request.method == 'GET':
users = User.objects.filter(groups__name=g_name).exclude(username__startswith='deleted_')
serializer = UserSerializers(users, many=True)
return JsonResponse(serializer.data, safe=False)
else:
return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
def group_users_excluded(request, g_name):
""" API for getting users, which are not assigned to the target group
:param request: request object
:param g_name: target group name
:return: JSON with serialized user data
"""
group_users = User.objects.filter(groups__name=g_name)
group_users_usernames = []
for user in group_users:
group_users_usernames.append(user.username)
available_users = User.objects.exclude(username__in=group_users_usernames).filter(is_active=True)
serializer = UserSerializers(available_users, many=True)
return JsonResponse(serializer.data, safe=False)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
@api_view(['POST'])
def update_group_permission(request, g_name):
""" API for updating target group permissions with the information, that user provided
:param request: request object
:param g_name: target group name
:return: JSON with serialized permissions data
"""
# Recieving JSON with perms from top right table on 'manage_rights' page
table_data = json.loads(request.POST.get('perms'))
# perms_all = []
# for item in table_data:
# perms_all.append(item['codename'])
# perms_from_table = Permission.objects.filter(codename__in=perms_all).exclude(
# name='Can view vulnerabilities')
# serializer = AllPermsSerializer(perms_from_table, many=True)
# Method for setting new permissions from permission list, provided by user
perm_codenames = []
# Getting list of codenames, that users chose to apply to target group
for item in table_data:
perm_codenames.append(item.get('codename'))
# Gettimg current perms of target group
target_group = Group.objects.get(name=g_name)
# Clearing all permissions from group
target_group.permissions.clear()
# Adding permissions, chosen by user to target group
perm_codenames = get_all_linked_permissions(perm_codenames)
permissions = Permission.objects.filter(codename__in=perm_codenames)
target_group.permissions.set(permissions)
serializer = AllPermsSerializer(permissions, many=True)
log_message = f"User [{request.user}] updated the [[{g_name}]] group permissions. New group permissions: {perm_codenames}"
conslog.add_info_log(log_message, _log)
return JsonResponse(serializer.data, safe=False)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
@api_view(['POST'])
def update_group_users(request, g_name):
""" API for updating the group users set
:param request: request object
:param g_name: target group name from the user
:return: JSON with serialized users data
"""
users = User.objects.filter(groups__name=g_name)
target_group = Group.objects.get(name=g_name)
# Gettimg list of usernames form bottom right table in JSON format
users_new = json.loads(request.POST.get('users'))
usernames_after = []
for user in users_new:
usernames_after.append(user.get('username'))
# Getting usernames, which are assigned to the target group before any changes
users_before = User.objects.filter(groups__name=g_name)
usernames_before = []
for user in users_before:
usernames_before.append(user.username)
# Removing all users from group
target_group.user_set.clear()
users_before_1 = User.objects.filter(groups__name=g_name)
# Adding users to group, provided by operator
for username in usernames_after:
target_group.user_set.add(User.objects.get(username=username))
users_before_2 = User.objects.filter(groups__name=g_name)
serializer = UserSerializers(users, many=True)
log_message = f"User [{request.user}] updated the [[{g_name}]] group users. New group users: {usernames_after}"
conslog.add_info_log(log_message, _log)
return JsonResponse(serializer.data, safe=False)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
@api_view(['POST'])
def delete_group_api(request, g_name):
""" API for deleting the group
:param request: request object
:param g_name: target group name
:return: JSON with serialized groups data
"""
target_group = Group.objects.get(name=g_name)
serializer = GroupNameSerializer(target_group)
Group.objects.get(name=g_name).delete()
log_message = f"User [{request.user}] deleted [[{g_name}]] group"
conslog.add_info_log(log_message, _log)
return JsonResponse(serializer.data)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
@api_view(['POST'])
def add_new_group(request, g_name):
""" API for adding new group
:param request: request object
:param g_name: target group name
:return: JSON with serialized groups data
"""
# Prepearing data for serializer (needs to be dict value, revieving a string from request
dict_name = {'name': g_name}
serializer = GroupNameSerializer(data=dict_name)
if serializer.is_valid():
serializer.save()
# serializer.add_new_group(validated_info=serializer.data)
log_message = f"User [{request.user}] created a [[{g_name}]] group"
conslog.add_info_log(log_message, _log)
return Response(serializer.data)
else:
log_message = f"User [{request.user}] failed to create a new group [[{g_name}]] due to the error: {serializer.errors}"
conslog.add_error_log(log_message, _log)
return JsonResponse(serializer.errors, status=400)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
@api_view(['POST'])
def rename_group(request, g_name, new_g_name):
""" API for renaming the target group
:param request: request object
:param g_name: target group name
:param new_g_name: new target group name
"""
# Prepearing data for serializer (needs to be dict value, revieving a string from request
dict_name = {'name': new_g_name}
serializer = GroupNameSerializer(data=dict_name)
if serializer.is_valid():
upd_group = Group.objects.get(name=g_name)
upd_group.name = serializer.data['name']
upd_group.save()
log_message = f"User [{request.user}] renamed [[{g_name}]] group with the name: [[{new_g_name}]]"
conslog.add_info_log(log_message, _log)
return Response(serializer.data)
else:
log_message = f"User [{request.user}] failed to rename [[{g_name}]], due to the error: {serializer.errors}"
conslog.add_error_log(log_message, _log)
return JsonResponse(serializer.errors, status=400)
@log_url
@login_required
@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True)
@api_view(['POST'])
def copy_group(request, g_name, new_g_name):
""" API for copying the target group
:param request: request object
:param g_name: target group name
:param new_g_name: new target group name
"""
# Preparing data for serializer (needs to be dict value, revieving a string from request
dict_name = {'name': new_g_name}
serializer = GroupNameSerializer(data=dict_name)
if serializer.is_valid():
master_group = Group.objects.get(name=g_name)
slave_group = Group.objects.create(name=serializer.data['name'])
for perm in master_group.permissions.all():
slave_group.permissions.add(Permission.objects.get(codename=perm.codename))
log_message = f"User [{request.user}] created a copy of the [[{g_name}]] group with the name: [[{new_g_name}]]"
conslog.add_info_log(log_message, _log)
return Response(serializer.data)
else:
log_message = f"User [{request.user}] failed to create copy of the [[{g_name}]], due to the error: {serializer.errors}"
conslog.add_error_log(log_message, _log)
return JsonResponse(serializer.errors, status=400)

71
console/api_urls.py Normal file
View file

@ -0,0 +1,71 @@
import logging
from django.urls import include, path, re_path
from rest_framework import routers
from rest_framework.authtoken import views
import console.api.users
from console.api import celery
from console.api.connections import ConnectionViewSet
from console.views.index import page_not_found
from core.views.view_login import LoginView, LogoutView
from core.views.view_settings import ProductVersionView
from networkmap.api import (AssetDangerViewSet, AutoNetmapConnectionsViewSet,
AutoNetmapElementsViewSet, NetmapElementsViewSet,
NetmapGroupsViewSet, UserMapViewSet)
_log = logging.getLogger()
router = routers.DefaultRouter()
router.register('connections', ConnectionViewSet, basename='connections')
router.register('netmap/static/elements', NetmapElementsViewSet, basename='netmap-elements')
router.register('netmap/auto/elements', AutoNetmapElementsViewSet, basename='auto-netmap-elements')
router.register('netmap/auto/connections', AutoNetmapConnectionsViewSet, basename='auto-netmap-connections')
router.register('netmap/groups', NetmapGroupsViewSet, basename='netmap-groups')
router.register('netmap/maps', UserMapViewSet, basename='netmap-maps')
router.register('netmap/in-danger', AssetDangerViewSet, basename='netmap-in-danger')
urlpatterns = (
path('', include(router.urls)),
path('devices/', include('devices.urls')),
path('store/', include('storage.urls')),
path('assets/', include('assets.urls')),
path('inputs/', include('inputs.urls')),
path('license/', include('license_info.urls')),
path('logstash/', include('logstash.urls')),
path('dashboard/', include('dashboard.api_urls')),
path('ncircc/', include('ncircc.urls')),
path('company/', include('company.urls')),
path('correlation/', include('correlation.urls')),
path('netmap/', include('networkmap.urls')),
path('incidents/', include('incident.urls')),
path('incident_export/', include('incident_export.urls')),
path('rotation/', include('rotation.urls')),
path('endpoint/', include('devices.urls_endpoint')),
path('users/', include('users.urls')),
path('notifications/', include('notifications.urls')),
path('', include('core.urls')),
path('groups/', console.api.users.all_groups, name='api_get_all_groups'),
path('groups/<str:g_name>/perms', console.api.users.group_perms, name='api_group_perms'),
path('groups/except_<str:g_name>', console.api.users.all_perms_wo_group, name='api_get_excluded_group_perms'),
path('groups/linked_permissions/', console.api.users.get_linked_permissions, name='api_get_linked_permissions'),
path('groups/<str:g_name>/users', console.api.users.group_users, name='api_get_group_users'),
path('groups/<str:g_name>/available_users',
console.api.users.group_users_excluded,
name='api_get_available_group_users'),
path('groups/<str:g_name>/update/perms',
console.api.users.update_group_permission,
name='api_update_group_permissions'),
path('groups/<str:g_name>/udpate/users', console.api.users.update_group_users, name='api_update_group_users'),
path('groups/<str:g_name>/remove', console.api.users.delete_group_api, name='api_delete_group'),
path('groups/<path:g_name>/add', console.api.users.add_new_group, name='api_add_new_group'),
path('groups/<str:g_name>/rename/<path:new_g_name>', console.api.users.rename_group, name='api_rename_group'),
path('groups/<str:g_name>/copy/<path:new_g_name>', console.api.users.copy_group, name='api_copy_group'),
path('celery/<str:task_id>/', celery.check_task_state, name='celery_check_task_state'),
path('auth/token/', views.obtain_auth_token, name='auth_get_token'),
path('events/', include('events.api_urls')),
path('logout/', LogoutView.as_view(), name='logout'),
path('login/', LoginView.as_view(), name='api_login'),
path('product/version/', ProductVersionView.as_view(), name='api_product_version'),
re_path(r'^', page_not_found, name='not_found'),
)

5
console/apps.py Normal file
View file

@ -0,0 +1,5 @@
from django.apps import AppConfig
class ConsoleConfig(AppConfig):
name = 'console'

56
console/asgi.py Normal file
View file

@ -0,0 +1,56 @@
import logging
import os
from channels.auth import AuthMiddlewareStack
from channels.db import database_sync_to_async
from channels.routing import ProtocolTypeRouter, URLRouter
from django.contrib.auth.models import AnonymousUser
from django.core.asgi import get_asgi_application
from rest_framework.authtoken.models import Token
from console.routing import websocket_urlpatterns
_log = logging.getLogger(__name__)
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'console.settings.dev')
django_asgi_app = get_asgi_application()
class TokenHeaderAuthMiddleware:
"""Middleware для авторизации WebSocket с помощью токена в header.
Будет вызываться в последнюю очередь и только после
неуспешной попытки аутентификации по сессии, т.е. если scope['user'] == AnonymousUser()
Пример:
Authorization: Token vgft67uhgtreerfcgvh678uihvhkugct7iyukv
"""
def __init__(self, inner):
self.inner = inner
async def __call__(self, scope, receive, send):
headers = dict(scope['headers'])
if b'authorization' in headers and scope['user'] == AnonymousUser():
token_name, token_key = headers[b'authorization'].decode().split()
if token_name == 'Token':
user = await self.get_user_by_token(token_key)
if user:
scope['user'] = user
return await self.inner(scope, receive, send)
return await self.inner(scope, receive, send)
@database_sync_to_async
def get_user_by_token(self, token):
try:
return Token.objects.get(key=token).user
except Token.DoesNotExist:
return
application = ProtocolTypeRouter(
{
"websocket": AuthMiddlewareStack(TokenHeaderAuthMiddleware(URLRouter(websocket_urlpatterns)))
}
)

19
console/celery.py Normal file
View file

@ -0,0 +1,19 @@
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'console.settings.dev')
app = Celery('console')
# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()

93
console/conslog.py Normal file
View file

@ -0,0 +1,93 @@
def add_info_log(message, logger):
""" Function for creating INFO log entry
:param message: log message, provided by functions down below
:param logger: instance of logger
:return: log entry to the logger's log
"""
return logger.info(message)
def add_error_log(message, logger):
""" Function for creating ERROR log entry
:param message: log message, provided by functions down below
:param logger: instance of logger
:return: log entry to the logger's log
"""
return logger.error(message)
def add_warning_log(message, logger):
""" Function for creating WARNING log entry
:param message: log message, provided by functions down below
:param logger: instance of logger
:return: log entry to the logger's log
"""
return logger.warning(message)
def url_access_log(request):
""" Function for creating a log entry for accessing url
:param request: get view request for forming the url name
:param user: get current user which trying t o acess url
:return: string for log entry
"""
user = request.user
url = request.get_full_path()
return f"User [{user}] accessed <{url}> page"
def object_create_log(instance, obj_type, user=None):
""" Create log string for object create request
:param user: Who create object
:param instance: Object's instance (model)
:return: String with log record
"""
if user:
return f'User [{user}] created new [{obj_type}] - [{instance}]'
else:
return f'Unknown user created new [{obj_type}] - [{instance}]'
def object_list_log(user, obj_type):
""" Create log string for object list request
:param user: Who create object
:param obj_type: Object's type
:return: String with log record
"""
return f'User [{user}] request a list of [{obj_type}]'
def object_retrieve_log(user, instance, obj_type):
""" Create log string for object retrieve
:param user: Who send request
:param instance: Retrieved instance
:return:String with log record
"""
return f'User [{user}] retrieve an object [{instance}] of type [{obj_type}]'
def object_update_log(user, instance, obj_type):
""" Create log string for object update
:param user: Who send request
:param instance: Updated instance
:return: String with log record
"""
return f'User [{user}] perform update of object [{instance}] of type [{obj_type}]'
def object_destroy_log(user, instance, obj_type):
""" Create log string for object destroy
:param user: Who send request
:param instance: Destroyed instance
:return: String with log record
"""
return f'User [{user}] perform destroy of object [{instance}] of type [{obj_type}]'
def form_errors(user, errors):
""" Create log string for form validation errors
:param user: User, who fill the form
:param errors: List of errors
:return:String with log record
"""
return f'User [{user}] has form errors: [{errors}]'

View file

View file

View file

@ -0,0 +1,32 @@
import os
from pathlib import Path
from django.conf import settings
from django.core.management.base import BaseCommand
from django.template import Template, Context
ELK_URL = getattr(settings, 'ELASTIC_URL', 'localhost:9200')
ELK_LOGIN = getattr(settings, 'ELK_LOGIN', 'elastic')
ELK_PASS = getattr(settings, 'ELK_PASS', 'changeme')
VECTOR_CONFIG_DIR = getattr(settings, 'LOGSTASH_CONFIG_DIR')
class Command(BaseCommand):
help = 'Load default Vector configs'
def handle(self, *args, **options):
templates_path = Path(os.path.abspath(__file__)).parents[0] / 'templates'
context = Context({
"elastic_url": ELK_URL,
"elastic_login": ELK_LOGIN,
"elastic_password": ELK_PASS,
})
Path(VECTOR_CONFIG_DIR).mkdir(exist_ok=True)
for template in os.listdir(templates_path):
with open(f"{templates_path}/{template}", 'r') as template_file:
template_text = template_file.read()
config_content = Template(template_text).render(context)
with open(os.path.join(VECTOR_CONFIG_DIR, template), 'w') as f:
f.write(config_content)
print(f'Created {template}')

View file

@ -0,0 +1,34 @@
import logging
import os
from pathlib import Path
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.management.base import BaseCommand
from correlation.services.import_service import ImportRulesService
from incident.models import IncidentRecommendations
_log = logging.getLogger(__name__)
def create_addition():
"""
During rules export, only those recommendations and effects that are used in the rules are exported.
Here we manually create some objects that are not used anywhere in the rules
"""
IncidentRecommendations.objects.get_or_create(name="Перевести работу АСУ ТП на резервный ПЛК",
description="Необходимо перевести работу на резервный ПЛК для "
"избежания нарушения технологического процесса.")
class Command(BaseCommand):
help = 'Load default rules'
def handle(self, *args, **options):
rules_path = Path(os.path.abspath(__file__)).parents[0]
with open(f'{rules_path}/rules_console.json', 'rb') as rule_file:
file = SimpleUploadedFile("rules.json", rule_file.read())
create_addition()
import_service = ImportRulesService(file, check_version=False)
import_service.run_import()

View file

@ -0,0 +1,46 @@
# Vector pipeline that collects logs and sends them to ElasticSearch
[sources.celery_logs_from_file]
type = "file"
include = [
"/var/log/armaconsole/celeryd.log",
"/var/log/armaconsole/celerybeat.log",
]
# Parse data
[transforms.parse_celery_logs]
type = "remap"
inputs = [
"celery_logs_from_file"
]
source = '''
source_file = .file
if ends_with(.file, "celerybeat.log") {
parsed, err = parse_regex(.message, r'\[(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d*:\s)[^ ]* (?P<message>.*)')
message = replace(.message, parsed.timestamp, "") ?? ""
} else if ends_with(.file, "celeryd.log") {
parsed, err = parse_regex(.message, r'\[(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d*:\s)[^ ]* (?P<message>.*)')
message = replace(.message, parsed.timestamp, "") ?? ""
}
if err != null {
abort
}
. = {}
.timestamp = now()
.message = message
.file = source_file
'''
[sinks.celery_logs_to_es]
type = "elasticsearch"
inputs = ["parse_celery_logs"]
compression = "none"
healthcheck = true
auth.strategy= "basic"
auth.user = "{{ elastic_login }}"
auth.password = "{{ elastic_password }}"
endpoint = "{{ elastic_url }}"
normal.index = "system-logs"
id_key = "event_uuid"

View file

@ -0,0 +1,41 @@
# Vector pipeline that collects logs and sends them to ElasticSearch
[sources.console_logs_from_file]
type = "file"
include = [
"/var/log/armaconsole/console.log",
]
# Parse data
[transforms.parse_console_logs]
type = "remap"
inputs = [
"console_logs_from_file"
]
source = '''
source_file = .file
parsed, err = parse_regex(.message, r'(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d*) (?P<level>[^ ]*) (?P<message>.*)')
message = parsed.message
if err != null {
abort
}
. = {}
.timestamp = now()
.message = message
.file = source_file
'''
[sinks.console_logs_to_es]
type = "elasticsearch"
inputs = ["parse_console_logs"]
compression = "none"
healthcheck = true
auth.strategy= "basic"
auth.user = "{{ elastic_login }}"
auth.password = "{{ elastic_password }}"
endpoint = "{{ elastic_url }}"
normal.index = "system-logs"
id_key = "event_uuid"

View file

@ -0,0 +1,41 @@
# Vector pipeline that collects logs and sends them to ElasticSearch
[sources.es_logs_from_file]
type = "file"
include = [
"/var/log/elasticsearch/console-cluster.log",
]
# Parse data
[transforms.parse_es_logs]
type = "remap"
inputs = [
"es_logs_from_file"
]
source = '''
source_file = .file
parsed, err = parse_regex(.message, r'(?P<timestamp>\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2},\d*\])(?P<message>.*)')
message = parsed.message
if err != null {
abort
}
. = {}
.timestamp = now()
.message = message
.file = source_file
'''
[sinks.es_logs_to_es]
type = "elasticsearch"
inputs = ["parse_es_logs"]
compression = "none"
healthcheck = true
auth.strategy= "basic"
auth.user = "{{ elastic_login }}"
auth.password = "{{ elastic_password }}"
endpoint = "{{ elastic_url }}"
normal.index = "system-logs"
id_key = "event_uuid"

View file

@ -0,0 +1,41 @@
# Vector pipeline that collects logs and sends them to ElasticSearch
[sources.gunicorn_logs_from_file]
type = "file"
include = [
"/var/log/armaconsole/gunicorn/gunicorn.log",
]
# Parse data
[transforms.parse_gunicorn_logs]
type = "remap"
inputs = [
"gunicorn_logs_from_file"
]
source = '''
source_file = .file
parsed, err = parse_regex(.message, r'(?P<timestamp>[^ ]+[ ]+[^ ]+[ ]+[^ ]+[ ]+)(?P<message>.*)')
message = parsed.message
if err != null {
abort
}
. = {}
.timestamp = now()
.message = message
.file = source_file
'''
[sinks.gunicorn_logs_to_es]
type = "elasticsearch"
inputs = ["parse_gunicorn_logs"]
compression = "none"
healthcheck = true
auth.strategy= "basic"
auth.user = "{{ elastic_login }}"
auth.password = "{{ elastic_password }}"
endpoint = "{{ elastic_url }}"
normal.index = "system-logs"
id_key = "event_uuid"

View file

@ -0,0 +1,46 @@
# Vector pipeline that collects logs and sends them to ElasticSearch
[sources.nginx_logs_from_file]
type = "file"
include = [
"/var/log/armaconsole/nginx.error.log",
"/var/log/armaconsole/nginx.access.log",
]
# Parse data
[transforms.parse_nginx_logs]
type = "remap"
inputs = [
"nginx_logs_from_file"
]
source = '''
source_file = .file
if ends_with(.file, "nginx.error.log") {
parsed, err = parse_regex(.message, r'(?P<timestamp>\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2}) (?P<message>.*)')
message = parsed.message
} else if ends_with(.file, "nginx.access.log") {
parsed, err = parse_regex(.message, r'(?P<timestamp>\s\[\d{2}/\D+/\d{4}:\d{2}:\d{2}:\d{2} \+\d{4}\])')
message = replace(.message, parsed.timestamp, "") ?? ""
}
if err != null {
abort
}
. = {}
.timestamp = now()
.message = message
.file = source_file
'''
[sinks.nginx_logs_to_es]
type = "elasticsearch"
inputs = ["parse_nginx_logs"]
compression = "none"
healthcheck = true
auth.strategy= "basic"
auth.user = "{{ elastic_login }}"
auth.password = "{{ elastic_password }}"
endpoint = "{{ elastic_url }}"
normal.index = "system-logs"
id_key = "event_uuid"

View file

@ -0,0 +1,41 @@
# Vector pipeline that collects logs and sends them to ElasticSearch
[sources.postgresql_logs_from_file]
type = "file"
include = [
"/var/log/postgresql/postgresql-11-main.log",
]
# Parse data
[transforms.parse_postgresql_logs]
type = "remap"
inputs = [
"postgresql_logs_from_file"
]
source = '''
source_file = .file
parsed, err = parse_regex(.message, r'(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d*) [^ ]* (?P<message>.*)')
message = parsed.message
if err != null {
abort
}
. = {}
.timestamp = now()
.message = message
.file = source_file
'''
[sinks.postgresql_logs_to_es]
type = "elasticsearch"
inputs = ["parse_postgresql_logs"]
compression = "none"
healthcheck = true
auth.strategy= "basic"
auth.user = "{{ elastic_login }}"
auth.password = "{{ elastic_password }}"
endpoint = "{{ elastic_url }}"
normal.index = "system-logs"
id_key = "event_uuid"

View file

@ -0,0 +1,41 @@
# Vector pipeline that collects logs and sends them to ElasticSearch
[sources.redis_logs_from_file]
type = "file"
include = [
"/var/log/redis/redis-server.log",
]
# Parse data
[transforms.parse_redis_logs]
type = "remap"
inputs = [
"redis_logs_from_file"
]
source = '''
source_file = .file
parsed, err = parse_regex(.message, r'[^ ](?P<timestamp>\s\d{2} \S* \d{4} \d{2}:\d{2}:\d{2}.\d*) (?P<message>.*)')
message = replace(.message, parsed.timestamp, "") ?? ""
if err != null {
abort
}
. = {}
.timestamp = now()
.message = message
.file = source_file
'''
[sinks.redis_logs_to_es]
type = "elasticsearch"
inputs = ["parse_redis_logs"]
compression = "none"
healthcheck = true
auth.strategy= "basic"
auth.user = "{{ elastic_login }}"
auth.password = "{{ elastic_password }}"
endpoint = "{{ elastic_url }}"
normal.index = "system-logs"
id_key = "event_uuid"

View file

@ -0,0 +1,35 @@
[sources.syslog_file_logs]
type = "file"
include = ["/var/log/syslog"]
read_from = "end"
[transforms.parse_syslog_file_logs]
type = "remap"
inputs = ["syslog_file_logs"]
source = '''
source_file = .file
source_syslog_message = .message
syslog_message, err = parse_syslog(source_syslog_message)
if err != null {
abort
}
. = {}
.timestamp = now()
.message = syslog_message.message
.file = source_file
'''
[sinks.syslog_file_to_es]
type = "elasticsearch"
inputs = ["parse_syslog_file_logs"]
compression = "none"
healthcheck = true
auth.strategy= "basic"
auth.user = "{{ elastic_login }}"
auth.password = "{{ elastic_password }}"
endpoint = "{{ elastic_url }}"
normal.index = "system-logs"
id_key = "event_uuid"

View file

134
console/models.py Normal file
View file

@ -0,0 +1,134 @@
import logging
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.translation import gettext_lazy, pgettext_lazy
from django_celery_beat.models import PeriodicTask
from rest_framework.authtoken.models import Token
from solo.models import SingletonModel
from core.fields import IntegerField
_log = logging.getLogger(__name__)
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
""" Generate Auth token for user
See https://www.django-rest-framework.org/api-guide/authentication/#generating-tokens
"""
if created:
Token.objects.create(user=instance)
class NameDescriptionModel(models.Model):
""" Abstract model for name - description pair model """
name = models.CharField(max_length=128, verbose_name=gettext_lazy(
'Name'))
description = models.TextField(null=True,
blank=True,
verbose_name=gettext_lazy('Description'),
help_text=gettext_lazy('Description'))
class Meta:
abstract = True
ordering = ['name']
def __str__(self):
return self.name
class UniqueNameDescriptionModel(NameDescriptionModel):
""" NameDescriptionModel heir to make name field unique """
name = models.CharField(max_length=128, verbose_name=gettext_lazy('Name'), unique=True)
class Meta:
abstract = True
ordering = ['name']
class SensorConnectedMixin(models.Model):
""" Add connection to sensor by name.
Sensor can be ARMAIF or Endpoint
"""
sensor = models.CharField(null=True, blank=True, max_length=128, verbose_name=gettext_lazy("Sensor name"))
class Meta:
abstract = True
class UpdatedNameDescriptionModel(NameDescriptionModel):
""" Abstract model for name - description pair with updated info """
updated = models.DateTimeField(auto_now=True, verbose_name=gettext_lazy(
'Updated'), help_text=gettext_lazy('Date and time, when asset was updated'))
class Meta:
abstract = True
ordering = ['updated', 'name']
def __str__(self):
return self.name
class VulnerabilityEffect(NameDescriptionModel):
""" Possible Vulnerability effect """
pass
class VulnerabilityRecommendations(NameDescriptionModel):
""" Recommendations how to close Vulnerability """
pass
class Vulnerability(UniqueNameDescriptionModel):
""" Description of one Vulnerability """
detection_method = models.TextField(verbose_name=gettext_lazy('Detection method'))
affected_software = models.TextField(verbose_name=gettext_lazy('Vulnerable software'))
close_recommendations = models.ManyToManyField(VulnerabilityRecommendations,
verbose_name=gettext_lazy('Resolve recommendations'),
help_text=gettext_lazy('How to resolve the vulnerability'),
blank=True)
effects = models.ManyToManyField(VulnerabilityEffect,
verbose_name=pgettext_lazy('as consequences', 'Effects'),
help_text=gettext_lazy('Consequences of the vulnerability'),
blank=True)
class ConnectionType(NameDescriptionModel):
""" Asset connection type """
pass
class Connection(models.Model):
"""
Connection between two assets
"""
from assets.models.assets import Asset
class ProtocolType(models.TextChoices):
TCP = 'TCP', gettext_lazy('TCP protocol')
UDP = 'UDP', gettext_lazy('UDP protocol')
src_asset = models.ForeignKey(Asset, related_name='src', on_delete=models.CASCADE,
verbose_name=gettext_lazy('Source asset'))
dst_asset = models.ForeignKey(Asset,
related_name='dst',
on_delete=models.CASCADE,
verbose_name=gettext_lazy('Destination asset'))
src_port = IntegerField(verbose_name=gettext_lazy('Source port'), null=True, blank=True,
min_value=0, max_value=65535)
dst_port = IntegerField(verbose_name=gettext_lazy('Destination port'), null=True, blank=True,
min_value=0, max_value=65535)
connection_protocol = models.CharField(choices=ProtocolType.choices,
verbose_name=gettext_lazy('Connection protocol'),
help_text=gettext_lazy('Connection protocol type'), blank=True, null=True,
default=ProtocolType.TCP, max_length=10)
created = models.DateTimeField(auto_now=True)
updated = models.DateTimeField(auto_now=True, verbose_name=gettext_lazy(
'Updated'), help_text=gettext_lazy('Date and time, when connection was updated'))

7
console/routing.py Normal file
View file

@ -0,0 +1,7 @@
from django.urls import path
from notifications.services.ws import WSNotification
websocket_urlpatterns = [
path('ws/notifications/', WSNotification.as_asgi()),
]

68
console/serializers.py Normal file
View file

@ -0,0 +1,68 @@
import logging
import re
from django.contrib.auth.models import Group, Permission
from django.utils.translation import gettext
from rest_framework import serializers
from assets.models.assets import Asset
from console.models import (Connection)
from core.serializers import ModelLocalizedSerializer, DateTimeLocalizedField
RE_GROUPNAME = re.compile('[@+!#$%^&*()<>?/|}{~:]')
logger_info = logging.getLogger('console.user.info')
_log = logging.getLogger()
class AllPermsSerializer(ModelLocalizedSerializer):
name = serializers.SerializerMethodField('get_name')
class Meta:
model = Permission
fields = ['codename', 'name', 'content_type']
def get_name(self, obj):
return gettext(obj.name)
class GroupNameSerializer(ModelLocalizedSerializer):
class Meta:
model = Group
fields = ['name']
def validate_name(self, value):
""" Method for validating the name, which was passed to the serializer
@param value: name from the serializer (pass through: serializer.data)
@return: name of the group
"""
# Check for special symbols in group name
if RE_GROUPNAME.search(value) is not None:
raise serializers.ValidationError(gettext('Incorrect group name format. Try another'))
return value
class CelerySerializer(serializers.Serializer):
task_id = serializers.UUIDField()
finished = serializers.BooleanField(required=False)
result = serializers.IntegerField(required=False)
# Network map serializers for ConnectionInfoSerializer
# Asset connection info serializer
class ConnectionAssetInfoSerializer(serializers.ModelSerializer):
class Meta:
model = Asset
fields = ['pk', 'name', 'ip']
# Connection serializer
class ConnectionSerializer(serializers.ModelSerializer):
src_asset = ConnectionAssetInfoSerializer()
dst_asset = ConnectionAssetInfoSerializer()
updated = DateTimeLocalizedField()
created = DateTimeLocalizedField()
class Meta:
model = Connection
fields = ['src_asset', 'dst_asset', 'connection_protocol', 'created', 'updated']

View file

View file

@ -0,0 +1,18 @@
import os
from django.conf import settings
def load_product_version():
"""Get product version from file"""
file_name = os.path.join(settings.BASE_DIR, 'product_version')
return_text = {'product': 'InfoWatch ARMA Management Console', 'version': 'None'}
if os.path.exists(file_name):
with open(file_name, 'r') as f:
try:
return_text['version'] = f.readline().split(':')[1]
return return_text
except ValueError:
return return_text
else:
return return_text

View file

473
console/settings/base.py Normal file
View file

@ -0,0 +1,473 @@
""" Django settings for console project. """
import os
import sys
from celery.schedules import crontab
from django.utils.translation import gettext_lazy
from console.services.product import load_product_version
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
DEBUG = int(os.environ.get('DEBUG', '0')) > 0
# Program is started under testing framework
TEST_MODE = 'test' in sys.argv or 'pytest' in sys.modules or os.environ.get("TEST_MODE")
# Program is used by developer (human), i.e. loads additional debug software
DEV_MODE = DEBUG and not TEST_MODE
# False to disable access to admin control panel via web
ADMIN_PANEL_ENABLED = DEV_MODE
##################################
### SECURITY ###
##################################
CORS_ALLOW_ALL_ORIGINS = True
CORS_ALLOW_CREDENTIALS = True
CORS_EXPOSE_HEADERS = ['Content-Disposition']
ALLOWED_HOSTS = ['*']
######################################
### APPLICATIONS ###
######################################
INSTALLED_APPS = [
'django.forms',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'django_json_widget',
'solo',
'django_celery_beat',
'sequences.apps.SequencesConfig',
'corsheaders',
'django_filters',
'channels',
]
PROJECT_APPS = [
'assets.apps.AssetsConfig',
'license_info.apps.LicenseInfoConfig',
'perms.apps.PermsConfig',
'console.apps.ConsoleConfig',
'core.apps.CoreConfig',
'correlation.apps.CorrelationConfig',
'dashboard.apps.DashboardConfig',
'company.apps.CompanyConfig',
'ncircc.apps.NcirccConfig',
'logstash.apps.LogstashConfig',
'networkmap.apps.NetworkmapConfig',
'users.apps.UsersConfig',
'incident_export.apps.IncidentExportConfig',
'storage.apps.StorageConfig',
'incident.apps.IncidentConfig',
'events.apps.EventsConfig',
'rotation.apps.RotationConfig',
'inputs.apps.InputsConfig',
'devices.apps.DevicesConfig',
'notifications.apps.NotificationsConfig',
]
INSTALLED_APPS += PROJECT_APPS
MIDDLEWARE = [
'core.middleware.LicenseMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'core.middleware.TimezoneMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
########################################
### AUTHENTICATION ###
########################################
AUTHENTICATION_BACKENDS = [
'core.backends.ConsoleAuthBackend.ConsoleAuthSystem',
'django.contrib.auth.backends.ModelBackend',
]
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 8,
}
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
##################################
### DATABASE ###
##################################
if not os.environ['POSTGRES_PORT']:
os.environ['POSTGRES_PORT'] = '5432'
DATABASES = {
'default': {
'ENGINE': "django.db.backends.postgresql", # Project depends on postgre, so no way to change it from env
'NAME': os.environ.get('POSTGRES_DB'),
'USER': os.environ.get('POSTGRES_USER'),
'PASSWORD': os.environ.get('POSTGRES_PASSWORD'),
'HOST': os.environ.get('POSTGRES_HOST'),
'PORT': os.environ.get('POSTGRES_PORT'),
}
}
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
#################################
### LOGGING ###
#################################
LOG_PATH = os.environ.get('LOG_PATH', os.path.join(BASE_DIR, 'dockerlogs'))
USER_LOG_FILENAME = os.environ.get('USER_LOG_FILENAME', 'console.log')
MIN_LOG_LEVEL = os.environ.get('MIN_LOG_LEVEL', 'DEBUG')
LOG_MAX_BYTES = int(os.environ.get('LOG_MAX_BYTES', 1024 * 1024 * 5))
LOG_BACKUP_COUNT = int(os.environ.get('LOG_BACKUP_COUNT', 500))
os.makedirs(LOG_PATH, exist_ok=True)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'info_message': {
'format': '%(asctime)s %(levelname)s %(message)s',
},
'debug_format': {
'format': '%(asctime)s %(levelname)s %(filename)s %(funcName)s %(message)s'
}
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': MIN_LOG_LEVEL,
},
'file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join(LOG_PATH, USER_LOG_FILENAME),
'formatter': 'info_message',
'encoding': 'utf-8',
'maxBytes': LOG_MAX_BYTES,
'backupCount': LOG_BACKUP_COUNT,
},
},
'loggers': {
'': {
'handlers': ['file', 'console'],
'level': MIN_LOG_LEVEL,
},
},
}
##############################################
### INTERNATIONALIZATION ###
##############################################
LANGUAGE_CODE = 'en'
LANGUAGES = [
('en', gettext_lazy('English')),
('ru', gettext_lazy('Russian')),
]
INITIAL_DATE_FORMAT = "Y-m-d"
# @see https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
TIME_ZONE = os.environ.get('TIME_ZONE', 'UTC')
# Timezone used for users by default
DEFAULT_CURRENT_TIMEZONE = os.environ.get('DEFAULT_CURRENT_TIMEZONE', 'Europe/Moscow')
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOCALE_PATHS = [
os.path.join(BASE_DIR, 'locale')
]
###############################
### FILES ###
###############################
# Build paths for generated files like static inside the project like this: os.path.join(PUBLIC_DIR, ...)
PUBLIC_DIR = os.environ.get('PUBLIC_DIR', os.path.join(BASE_DIR, 'public'))
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(PUBLIC_DIR, 'static')
MEDIA_ROOT = os.path.join(PUBLIC_DIR, 'media')
REDIS_HOST = os.environ.get('REDIS_HOST', 'redis')
REDIS_PORT = int(os.environ.get('REDIS_PORT', 6379))
REDIS_CACHE_TIMEOUT = 86400
###############################
### CACHE ###
###############################
SOLO_CACHE = 'local'
SOLO_CACHE_TIMEOUT = 60 * 5 # 5 mins
SOLO_CACHE_PREFIX = 'solo'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'local': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
},
'redis': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}/1',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient'
}
}
}
#############################
### DRF ###
#############################
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
'rest_framework.renderers.BrowsableAPIRenderer',
),
'DEFAULT_FILTER_BACKENDS': (
'django_filters.rest_framework.DjangoFilterBackend',
'rest_framework.filters.OrderingFilter',
'core.backends.filters.SearchAllFieldsBackend',
),
'DEFAULT_PAGINATION_CLASS': 'core.services.pagination.BasicPagination',
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated',
],
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.SessionAuthentication',
'rest_framework.authentication.TokenAuthentication',
],
}
################################
### CELERY ###
################################
CELERY_BROKER_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}'
CELERY_RESULT_BACKEND = f'redis://{REDIS_HOST}:{REDIS_PORT}'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = TIME_ZONE
CELERY_IMPORTS = ['devices.tasks.firewall', 'devices.tasks.sensor']
""" Single place to setup at which time daily tasks should execute, crontab schedule object"""
DAILY_CRONTAB = crontab(minute='0', hour='1')
""" Single place to setup at which time weekly tasks should execute, crontab schedule object"""
WEEKLY_CRONTAB = crontab(minute='0', hour='1', day_of_week='1')
""" Single place to setup at which time monthly tasks should execute, crontab schedule object"""
MONTHLY_CRONTAB = crontab(minute='0', hour='1', day_of_month='1')
""" Execute every 2 minutes."""
EVERY_2_MINUTE = crontab(minute='*/2')
ROTATE_SIZE_CHECK_CRONTAB = crontab(minute='*/5')
CELERY_BEAT_SCHEDULE = {
'update_statistics': {
'task': 'dashboard.tasks.update_statistics_task',
'schedule': crontab()
},
'expire_users': {
'task': 'console.tasks.expire_users_task',
'schedule': DAILY_CRONTAB
},
'update_auto_network_map_data': {
'task': 'networkmap.tasks.update_auto_network_map_data',
'schedule': crontab()
},
'update_firewall_info_task': {
'task': 'devices.tasks.firewall.update_firewall_info_task',
'schedule': crontab()
},
'update_amount_of_elk_events': {
'task': 'console.tasks.update_amount_of_aggregated_events',
'schedule': crontab()
},
'check_blocked_users': {
'task': 'core.tasks.check_blocked_users',
'schedule': crontab()
},
'update_status_notification': {
'task': 'ncircc.tasks.update_status_notification',
'schedule': crontab(), # todo Уточнить точное время
},
'update_comments': {
'task': 'ncircc.tasks.update_comments',
'schedule': crontab(), # todo Уточнить точное время
},
'ping_sensors': {
'task': 'devices.tasks.sensor.ping_sensors',
'schedule': crontab()
},
'get_disk_usage_task': {
'task': 'core.tasks.get_disk_usage_task',
'schedule': crontab()
},
'reboot_correlator_task': {
'task': 'correlation.tasks.reboot_correlator_task',
'schedule': EVERY_2_MINUTE,
},
}
######################################
### AMC SERVICES ###
######################################
# LICENSE
LICENSE_CLIENT_URL = os.environ.get('LICENSE_CLIENT_URL', 'http://license-client:8050')
LICENSE_CACHE_TIMEOUT = 60 * 60 # 60 minutes
LICENSE_FEATURE_EVENT_PROCESSING = "event_processing"
LICENSE_OPTION_EVENT_SOURCE_COUNT = "event_sources"
# NGINX
NGINX_ENABLED_CONFIG_FILENAME = "armaconsole.nginx"
NGINX_HTTP_CONFIG_FILENAME = "armaconsole_http.nginx"
NGINX_HTTPS_CONFIG_FILENAME = "armaconsole_https.nginx"
NGINX_SITES_AVAILABLE = "/usr/local/armaconsole/nginx"
# CORRELATOR
CORRELATOR_SEVERITY_LEVEL = int(os.environ.get('CORRELATOR_SEVERITY_LEVEL', 6))
CORRELATOR_AUTO_CATEGORY_NAME = os.environ.get('CORRELATOR_AUTO_CATEGORY_NAME', gettext_lazy('Auto'))
CORRELATOR_URL = os.environ.get('CORRELATOR_URL', 'http://correlator:5566')
# VECTOR
LOGSTASH_CONFIG_DIR = os.environ.get('LOGSTASH_CONFIG_DIR', os.path.join(PUBLIC_DIR, 'vector'))
# ELASTICSEARCH
ELASTIC_URL = os.environ.get('ELASTIC_URL', 'http://elasticsearch:9200')
elk_split = ELASTIC_URL.replace('http://', '').split(':')
ELK_HOST = elk_split[0] if len(elk_split) >= 0 else 'elasticsearch'
ELK_PORT = elk_split[1] if len(elk_split) > 0 else 9200
ELK_LOGIN = os.environ.get('ELASTIC_USER', 'elastic')
ELK_PASS = os.environ.get('ELASTIC_PASSWORD', 'changeme')
ELK_MAX_ENTRIES = 100000
ELK_AGGREGATED_INDEX = 'aggregated-*'
ELK_FIREWALL_PRODUCT_NAME = 'Industrial Firerwall' # Yes this is mistake, but it is how now Vector parses IF logs
ELK_ENDPOINT_PRODUCT_NAME = 'Industrial Endpoint'
# RABBITMQ
RABBIT_URL = os.environ.get('RABBIT_URL', 'http://rabbitmq-management:5672')
rabbit_split = RABBIT_URL.replace('http://', '').split(':')
RABBIT_HOST = rabbit_split[0] if len(rabbit_split) >= 0 else 'rabbitmq-management'
RABBIT_PORT = rabbit_split[1] if len(rabbit_split) > 0 else 5672
###############################
### OTHER ###
###############################
# ROUTING
ROOT_URLCONF = 'console.urls'
WSGI_APPLICATION = 'console.wsgi.application'
ASGI_APPLICATION = 'console.asgi.application'
LOGIN_REDIRECT_URL = 'index'
LOGOUT_REDIRECT_URL = 'login'
LOGIN_URL = 'login'
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'channels_redis.core.RedisChannelLayer',
'CONFIG': {
"hosts": [(REDIS_HOST, REDIS_PORT)],
},
},
}
# TEMPLATES
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
# Site info must not contain dynamic data, only static values
SITE_INFO = {
'domain': 'infowatch.ru',
'name': 'InfoWatch ARMA',
'package': 'armaconsole', # Must be a valid deb package name
'version': '1.4.0', # Must be a valid deb package version
'architecture': 'amd64', # Must be a valid deb package architecture
# These values are used in CEF format
'vendor': 'InfoWatch ARMA',
'product': 'ARMAMC'
}
# How many (in percent) keep in table while rotation occurs
# So 0.3 - means 30% of table will stay while rotation
SAVE_DURING_ROTATION = float(os.environ.get('SAVE_DURING_ROTATION', 0.3))
EMAIL_HOST_USER = 'console@arma.com'
EMAIL_HOST = 'localhost'
# GENERATE SELFSIGNED CERTIFICATE
TLS_CERT_DAYS = 365
TLS_CERT_KEY_SIZE = 2048
TLS_CERT_COUNTRY = "RU"
TLS_CERT_STATE = "Moscow"
TLS_CERT_LOCALITY = "Moscow"
TLS_CERT_ORIG_NAME = "ARMA"
TLS_CERT_UNIT_NAME = "Console"
TLS_CERT_COMMON_NAME = "iwarma.ru"
TLS_CERT_FILENAME = "/etc/nginx/ssl/armaconsole/nginx-selfsigned.crt"
TLS_CERT_KEY_FILENAME = "/etc/nginx/ssl/armaconsole/nginx-selfsigned.key"
# TODO: need use
TLS_CERT_DHPARAM_FILENAME = "/etc/nginx/ssl/armaconsole/dhparam.pem"
WEB_UI_PORT = int(os.environ.get('WEB_UI_PORT', 9090))
MAX_UPLOADSIZE = 80 * 1024 * 1024
# NCIRCC
NCIRCC_DOMAIN_NAME = os.environ.get('NCIRCC_DOMAIN_NAME', 'https://test-lk.cert.gov.ru')
NCIRCC_CERT_VERIFY = '/etc/ssl/certs/' if os.path.exists('/etc/ssl/certs/') else False
# Compatible ARMAIF versions
MINIMAL_COMPATIBLE_AIF_VERSION = "3.6"
MINIMAL_VERSION_CORRELATION_RULES = '1.3.4'
# Product version
PRODUCT_VERSION = load_product_version()

29
console/settings/dev.py Normal file
View file

@ -0,0 +1,29 @@
""" Django settings for development debug purpose """
import os
from dotenv import load_dotenv
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
load_dotenv(dotenv_path=os.path.join(BASE_DIR, '.env.dev'))
os.environ['LOG_PATH'] = os.environ.get('LOG_PATH', os.path.join(BASE_DIR, 'dockerlogs'))
WEB_PDB_PORT = os.environ.get('WEB_PDB_PORT')
from .base import *
SITE_INFO['name'] += ' DEBUG'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CACHES["default"] = {'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}
ELASTIC_URL = "http://elasticsearch:9200"
# For testing purpose
LICENSE_CACHE_TIMEOUT = 30
# Security key for import/export firewall config
SECRET_KEY = os.environ.get('SECRET_KEY', '')

58
console/settings/prod.py Normal file
View file

@ -0,0 +1,58 @@
""" Django production settings for console project. """
import os
import dotenv
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
orig_user_env = dict(os.environ.items())
dotenv.load_dotenv(dotenv_path=os.path.join(BASE_DIR, '.env.prod'))
if os.path.exists('/etc/armaconsole/env.prod'):
dotenv.load_dotenv(dotenv_path='/etc/armaconsole/env.prod')
# Convert DBC variables to our names
dbc_conf = dotenv.dotenv_values(dotenv_path=os.environ.get('DEBCONF_DBCONF_FPATH'))
debconf_dbconfig_django_map = (
('dbname', 'POSTGRES_DB'),
('dbuser', 'POSTGRES_USER'),
('dbpass', 'POSTGRES_PASSWORD'),
('dbserver', 'POSTGRES_HOST'),
('dbport', 'POSTGRES_PORT'),
)
for debconf_val, django_val in debconf_dbconfig_django_map:
os.environ[django_val] = dbc_conf.get(debconf_val, os.environ.get(django_val))
# Restore original environment, as user environment is more important
os.environ.update(orig_user_env)
# noinspection PyUnresolvedReferences
from .base import *
# Security key for import/export firewall config
SECRET_KEY = os.environ.get('SECRET_KEY', '')
CACHES = {
'default': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}/1',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient'
}
},
'local': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}/1',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient'
}
},
'redis': {
'BACKEND': 'django_redis.cache.RedisCache',
'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}/1',
'OPTIONS': {
'CLIENT_CLASS': 'django_redis.client.DefaultClient'
}
}
}

38
console/settings/test.py Normal file
View file

@ -0,0 +1,38 @@
""" Django settings for development debug purpose """
import os
from dotenv import load_dotenv
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
load_dotenv(dotenv_path=os.path.join(BASE_DIR, '.env.dev'))
os.environ['LOG_PATH'] = os.environ.get('LOG_PATH', os.path.join(BASE_DIR, 'dockerlogs'))
os.environ['TEST_MODE'] = "True"
WEB_PDB_PORT = os.environ.get('WEB_PDB_PORT')
from .base import *
SITE_INFO['name'] += ' DEBUG'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CACHES["default"] = {'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}
ELASTIC_URL = "http://elasticsearch:9200"
LICENSE_CLIENT_URL = 'http://license-client:8050'
# For testing purpose
LICENSE_CACHE_TIMEOUT = 30
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
DEFAULT_AUTO_FIELD = 'django.db.models.AutoField'
# Security key for import/export firewall config
SECRET_KEY = os.environ.get('SECRET_KEY')

36
console/tasks.py Normal file
View file

@ -0,0 +1,36 @@
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from celery.utils.log import get_task_logger
from django.conf import settings
from django.core.cache import caches
from elasticsearch import Elasticsearch
from core.utils import dtnow
from events.constants import ELK_HOST, ELK_PORT, ELK_LOGIN, ELK_PASS
from users.models import UserInfo
_log = get_task_logger(__name__)
MEDIA_ROOT = getattr(settings, 'MEDIA_ROOT')
CACHE_TIMEOUT = getattr(settings, 'REDIS_CACHE_TIMEOUT', 120)
REDIS_ELK_EVENTS_KEY = 'amount_of_aggregated_events'
@shared_task
def update_amount_of_aggregated_events():
""" Task for updating the total amount of aggregated events, stored in elasticsearch """
es = Elasticsearch([{'host': ELK_HOST, 'port': ELK_PORT}], http_auth=(ELK_LOGIN, ELK_PASS))
es_search = es.count(index=['aggregated-*', 'system-*'])
caches['redis'].set(REDIS_ELK_EVENTS_KEY, es_search['count'], CACHE_TIMEOUT)
def expire_users():
for user_info in UserInfo.objects.filter(expire_date__lte=dtnow().date(), user__is_active=True):
user_info.user.is_active = False
user_info.user.save()
@shared_task
def expire_users_task():
expire_users()

View file

@ -0,0 +1,10 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/>
<link rel="icon" href="/static/login_react/favicon.ico"/>
<meta name="viewport" content="width=device-width,initial-scale=1"/>
<meta name="theme-color" content="#000000"/>
<meta name="description" content="Web site created using create-react-app"/>
<link rel="apple-touch-icon" href="/logo192.png"/>
<link rel="manifest" href="/static/login_react/manifest.json"/>
<title>Management console</title><script defer="defer" src="/static/login_react/js/main.9ee785bf.js"></script>
<link href="/static/login_react/css/main.c84782cd.css" rel="stylesheet">
</head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div></body></html>

View file

Some files were not shown because too many files have changed in this diff Show more