From ed782d491afde35b0a490bcc02d48a7f5c8c319a Mon Sep 17 00:00:00 2001 From: pro100ton Date: Sat, 2 Nov 2024 14:12:45 +0300 Subject: [PATCH] Initial migration --- .coveragerc | 11 + .env.dev | 41 + .env.prod | 19 + .gitignore | 55 + .gitlab-ci.yml | 226 + .gitmodules | 8 + CHANGELOG.md | 452 + README.md | 58 + assets/__init__.py | 0 assets/admin.py | 16 + assets/apps.py | 5 + assets/constants.py | 3 + assets/filters.py | 14 + assets/migrations/__init__.py | 0 assets/models/__init__.py | 0 assets/models/assets.py | 92 + assets/serializers/__init__.py | 0 assets/serializers/assets.py | 139 + assets/tests/__init__.py | 0 assets/tests/test_assets.py | 47 + assets/tests/test_assets_api.py | 186 + assets/urls.py | 19 + assets/views/__init__.py | 0 assets/views/assets.py | 125 + checker/.gitignore | 2 + checker/deb/skeleton/DEBIAN/.gitkeep | 0 .../usr/lib/systemd/system/amcchecker.service | 15 + checker/go.mod | 9 + checker/go.sum | 12 + checker/main.go | 397 + cicd/.gitignore | 1 + cicd/config.yml | 84 + cicd/go_test.sh | 11 + cicd/integration_tests.sh | 87 + cicd/live_fw_tests.sh | 76 + cicd/scripts/build.py | 98 + cicd/unit_tests.sh | 62 + cicd/up_license_client.sh | 64 + company/__init__.py | 0 company/admin.py | 14 + company/apps.py | 5 + company/migrations/__init__.py | 0 company/models/__init__.py | 0 company/models/company.py | 37 + company/models/location.py | 15 + company/serializers/__init__.py | 0 company/serializers/company.py | 22 + company/serializers/location.py | 11 + company/services/__init__.py | 0 company/services/company_create_update.py | 48 + .../ncircc_files/organization_info.xlsx | Bin 0 -> 16605 bytes company/tests/__init__.py | 0 company/tests/test_company_api.py | 98 + company/tests/test_company_services.py | 57 + company/tests/test_locations_api.py | 26 + company/urls.py | 10 + company/views/__init__.py | 0 company/views/company_api.py | 47 + company/views/location_api.py | 14 + conftest.py | 53 + console/__init__.py | 7 + console/admin.py | 34 + console/api/__init__.py | 0 console/api/base.py | 25 + console/api/celery.py | 31 + console/api/connections.py | 22 + console/api/events.py | 0 console/api/incidents.py | 0 console/api/users.py | 332 + console/api_urls.py | 71 + console/apps.py | 5 + console/asgi.py | 56 + console/celery.py | 19 + console/conslog.py | 93 + console/management/__init__.py | 0 console/management/commands/__init__.py | 0 .../commands/create_vector_configs.py | 32 + console/management/commands/load_rules.py | 34 + .../commands/templates/mc_logs_celery.toml | 46 + .../commands/templates/mc_logs_console.toml | 41 + .../commands/templates/mc_logs_es.toml | 41 + .../commands/templates/mc_logs_gunicorn.toml | 41 + .../commands/templates/mc_logs_nginx.toml | 46 + .../templates/mc_logs_postgresql.toml | 41 + .../commands/templates/mc_logs_redis.toml | 41 + .../commands/templates/mc_logs_syslog.toml | 35 + console/migrations/__init__.py | 0 console/models.py | 134 + console/routing.py | 7 + console/serializers.py | 68 + console/services/__init__.py | 0 console/services/product.py | 18 + console/settings/__init__.py | 0 console/settings/base.py | 473 + console/settings/dev.py | 29 + console/settings/prod.py | 58 + console/settings/test.py | 38 + console/tasks.py | 36 + console/templates/console/login.html | 10 + console/tests/__init__.py | 0 console/tests/test_api.py | 34 + console/tests/test_auth.py | 42 + console/tests/test_commands.py | 86 + console/tests/test_data/file | 0 console/tests/test_data/test_certificate.crt | 21 + console/tests/test_data/test_certificate.key | 28 + console/tests/test_elastic.py | 38 + console/tests/test_extension_validator.py | 60 + console/tests/test_filters.py | 1 + console/tests/test_utils.py | 181 + console/tests/test_views.py | 25 + console/urls.py | 27 + console/utils.py | 10 + console/views/__init__.py | 0 console/views/index.py | 35 + console/wsgi.py | 15 + core/__init__.py | 0 core/admin.py | 13 + core/apps.py | 5 + core/backends/ConsoleAuthBackend.py | 64 + core/backends/__init__.py | 0 core/backends/filters.py | 38 + core/constants.py | 2 + core/decorators.py | 15 + core/extensions.py | 6 + core/fields.py | 21 + core/middleware.py | 59 + core/migrations/__init__.py | 0 core/mixins.py | 124 + core/models.py | 52 + core/serializers.py | 124 + core/services/__init__.py | 0 core/services/amc_services.py | 58 + core/services/authentication.py | 107 + core/services/pagination.py | 8 + core/services/tls_settings.py | 125 + core/tasks.py | 46 + core/tests/__init__.py | 0 core/tests/filters_backend.py | 44 + core/tests/test_amc_services.py | 80 + core/tests/test_authentication.py | 72 + core/tests/test_data/test_certificate.crt | 21 + core/tests/test_data/test_certificate.key | 28 + core/tests/test_tasks.py | 28 + core/tests/test_tls_settings.py | 189 + core/urls.py | 11 + core/utils.py | 76 + core/validators.py | 52 + core/views/__init__.py | 0 core/views/amc_services.py | 21 + core/views/view_login.py | 35 + core/views/view_settings.py | 98 + correlation/__init__.py | 0 correlation/admin.py | 23 + correlation/api.py | 93 + correlation/apps.py | 27 + correlation/constants.py | 48 + correlation/fields.py | 30 + correlation/migrations/__init__.py | 0 correlation/models.py | 64 + correlation/serializers.py | 166 + correlation/services/__init__.py | 0 correlation/services/import_service.py | 319 + correlation/services/rules.py | 19 + correlation/services/signals.py | 26 + correlation/tasks.py | 101 + correlation/tests/__init__.py | 0 correlation/tests/data/image.png | Bin 0 -> 1732 bytes correlation/tests/test_api.py | 462 + correlation/tests/test_rule_import_service.py | 276 + correlation/tests/test_serializer.py | 41 + correlation/urls.py | 10 + correlator/.dockerignore | 32 + correlator/.gitignore | 13 + correlator/.gitlab-ci.yml | 90 + correlator/.golangci | 654 ++ correlator/CHANGELOG.md | 285 + correlator/Dockerfile | 30 + correlator/README.md | 33 + correlator/aggregator/aggregator.go | 56 + correlator/aggregator/aggregator_test.go | 309 + correlator/api/api.go | 111 + correlator/api/api_test.go | 336 + correlator/cicd/Dockerfile | 3 + correlator/cicd/config.yml | 30 + correlator/cicd/docker-compose.yml | 22 + correlator/cicd/test.sh | 11 + correlator/cicd/test_job.sh | 58 + correlator/cmd/correlator/.gitignore | 1 + correlator/cmd/correlator/config_example.yaml | 91 + correlator/cmd/correlator/logging.go | 57 + correlator/cmd/correlator/main.go | 231 + correlator/config/config.go | 49 + correlator/config/default_config.go | 100 + correlator/correlator/correlator.go | 33 + correlator/correlator/correlator_test.go | 643 ++ correlator/deb/skeleton/DEBIAN/.gitkeep | 0 .../lib/systemd/system/amccorrelator.service | 21 + .../app/amccorrelator/aggregator/.gitkeep | 0 .../app/amccorrelator/docs/.gitkeep | 0 correlator/docker/sh/entrypoint.sh | 31 + correlator/es/es.go | 308 + correlator/es/es_test.go | 394 + correlator/events/events.go | 175 + correlator/events/events_test.go | 288 + correlator/events/stat.go | 30 + correlator/events/stat_test.go | 36 + correlator/events/store.go | 214 + correlator/events/store_test.go | 634 ++ correlator/events/timewindow.go | 40 + correlator/events/timewindow_test.go | 45 + correlator/events/tools.go | 17 + correlator/events/util.go | 264 + correlator/go.mod | 14 + correlator/make_release.py | 37 + correlator/mapping/component.go | 175 + correlator/mapping/component_test.go | 156 + correlator/mapping/index.go | 160 + correlator/mapping/mapping.go | 77 + correlator/rules/action.go | 33 + correlator/rules/action_asset.go | 329 + correlator/rules/action_asset_test.go | 202 + correlator/rules/action_bash.go | 198 + correlator/rules/action_bash_test.go | 212 + correlator/rules/action_exec.go | 234 + correlator/rules/action_exec_test.go | 307 + correlator/rules/action_firewall.go | 568 ++ correlator/rules/action_firewall_test.go | 859 ++ correlator/rules/action_http.go | 134 + correlator/rules/action_http_test.go | 87 + correlator/rules/action_incident.go | 369 + correlator/rules/action_incident_test.go | 444 + correlator/rules/action_syslog.go | 184 + correlator/rules/action_syslog_test.go | 156 + correlator/rules/action_t.go | 98 + correlator/rules/action_t_test.go | 68 + correlator/rules/debug_action.go | 40 + correlator/rules/predicate.go | 123 + correlator/rules/predicate_test.go | 172 + correlator/rules/rule.go | 222 + correlator/rules/rule_store.go | 195 + correlator/rules/rule_store_test.go | 390 + correlator/rules/rule_test.go | 596 ++ correlator/rules/stat.go | 36 + correlator/rules/stat_test.go | 28 + correlator/rules/test_tools.go | 17 + correlator/rules/web_tools.go | 74 + correlator/stat/stat.go | 57 + correlator/stat/stat_test.go | 61 + correlator/util/test.go | 186 + dashboard/__init__.py | 0 dashboard/admin.py | 12 + dashboard/api.py | 266 + dashboard/api_urls.py | 23 + dashboard/apps.py | 37 + dashboard/migrations/__init__.py | 0 dashboard/models.py | 28 + dashboard/serializers.py | 82 + dashboard/tasks.py | 75 + dashboard/tests/__init__.py | 0 dashboard/tests/test_layout_add.py | 32 + dashboard/tests/test_rights.py | 40 + dashboard/tests/test_widgets.py | 305 + dashboard/utils.py | 12 + dashboard/views.py | 74 + dashboard/widgets/__init__.py | 0 dashboard/widgets/opened_incs_widget.py | 10 + dashboard/widgets/services_widget.py | 15 + dashboard/widgets/sys_info_widget.py | 75 + dashboard/widgets/top_ids_widget.py | 12 + dashboard/widgets/widgets.py | 167 + deb/react.env | 1 + .../usr/lib/systemd/system/amccelery.service | 20 + .../lib/systemd/system/amccelerybeat.service | 20 + .../usr/lib/systemd/system/amccore.service | 27 + deb/skeleton/usr/local/sbin/amcpsh | 3 + .../var/www/armaconsole/public/media/.gitkeep | 0 deb_old/skeleton/DEBIAN/config | 16 + deb_old/skeleton/DEBIAN/control.template | 12 + deb_old/skeleton/DEBIAN/dirs | 3 + deb_old/skeleton/DEBIAN/postinst | 179 + deb_old/skeleton/DEBIAN/postrm | 130 + deb_old/skeleton/DEBIAN/preinst | 68 + deb_old/skeleton/DEBIAN/prerm | 60 + .../etc/armaconsole/elasticsearch.yml | 23 + .../skeleton/etc/armaconsole/env/vector.env | 3 + deb_old/skeleton/etc/armaconsole/license.yml | 17 + deb_old/skeleton/etc/armaconsole/logstash.yml | 21 + deb_old/skeleton/etc/armaconsole/vector.yml | 16 + deb_old/skeleton/etc/cron.d/armaconsole | 2 + .../etc/nginx/snippets/ssl-params.conf | 21 + .../etc/nginx/ssl/armaconsole/.gitkeep | 0 .../usr/lib/systemd/system/amccelery.service | 25 + .../lib/systemd/system/amccelerybeat.service | 25 + .../usr/lib/systemd/system/amcchecker.service | 15 + .../lib/systemd/system/amccorrelator.service | 24 + .../lib/systemd/system/amcgunicorn.service | 31 + .../usr/lib/systemd/system/amclicense.service | 20 + .../lib/systemd/system/amcsetelkpass.service | 12 + .../systemd/system/amcstartconfigure.service | 12 + .../usr/lib/systemd/system/amcvector.service | 24 + .../armaconsole.elasticsearch.conf | 6 + .../armaconsole.logstash.conf | 7 + .../armaconsole/nginx/armaconsole_http.nginx | 71 + .../armaconsole/nginx/armaconsole_https.nginx | 72 + .../local/armaconsole/setElkPassService.sh | 5 + .../armaconsole/startConfigureService.sh | 18 + deb_old/skeleton/usr/local/sbin/amcpsh | 3 + .../var/log/armaconsole/bad_input.log | 0 .../var/www/armaconsole/public/.gitkeep | 0 .../var/www/armaconsole/public/media/.gitkeep | 0 devices/__init__.py | 0 devices/admin.py | 12 + devices/apps.py | 5 + devices/constants.py | 7 + devices/enums.py | 31 + devices/exceptions.py | 6 + devices/fields.py | 31 + devices/filters.py | 10 + devices/migrations/__init__.py | 0 devices/models/__init__.py | 0 devices/models/device.py | 45 + devices/models/endpoint_device.py | 119 + devices/models/firewall.py | 62 + devices/models/sensor.py | 20 + devices/serializers/__init__.py | 0 devices/serializers/device.py | 45 + devices/serializers/endpoint_serializers.py | 56 + devices/serializers/firewall.py | 29 + devices/serializers/sensor_serializers.py | 48 + devices/services/__init__.py | 0 devices/services/endpoint/__init__.py | 0 .../services/endpoint/endpoint_antivirus.py | 36 + .../services/endpoint/endpoint_get_status.py | 43 + devices/services/endpoint/endpoint_redis.py | 27 + .../services/endpoint/endpoint_services.py | 230 + devices/services/firewall/__init__.py | 5 + devices/services/firewall/exception.py | 104 + devices/services/firewall/firewall.py | 290 + devices/services/sensor/__init__.py | 0 devices/services/sensor/enums.py | 33 + devices/services/sensor/rabbitmq.py | 241 + devices/services/sensor/service.py | 24 + devices/services/sensor/utils.py | 68 + devices/services/vector.py | 78 + devices/tasks/__init__.py | 0 devices/tasks/firewall.py | 93 + devices/tasks/sensor.py | 42 + devices/templates/vector/config/armaif.toml | 620 ++ devices/templates/vector/config/endpoint.toml | 332 + devices/templates/vector/config/sensor.toml | 9 + devices/tests/__init__.py | 0 devices/tests/devices_utils.py | 12 + devices/tests/endpoint_utils.py | 27 + devices/tests/test_devices.py | 28 + devices/tests/test_devices_api.py | 122 + devices/tests/test_endpoint_api.py | 325 + devices/tests/test_endpoint_device_service.py | 201 + devices/tests/test_files/__init__.py | 0 devices/tests/test_files/bad_local.zeek | 116 + devices/tests/test_files/config.xml | 1 + devices/tests/test_files/good_local.zeek | 115 + devices/tests/test_files/live_if_config.xml | 1182 +++ devices/tests/test_files/live_if_rulesets.tar | Bin 0 -> 28160 bytes devices/tests/test_filters.py | 30 + devices/tests/test_firewall_api.py | 236 + devices/tests/test_firewall_live.py | 78 + devices/tests/test_firewall_service.py | 144 + devices/tests/test_group_devices_api.py | 133 + devices/tests/test_sensor_api.py | 221 + devices/tests/test_sensor_send_message.py | 168 + devices/tests/test_vector_service.py | 32 + devices/urls.py | 18 + devices/urls_endpoint.py | 11 + devices/views/__init__.py | 0 devices/views/arma_firewall.py | 111 + devices/views/device.py | 35 + devices/views/endpoint_views.py | 125 + devices/views/sensor.py | 119 + docker/compose/config/correlator/Dockerfile | 6 + .../config/elk/elasticsearch/Dockerfile | 11 + .../elasticsearch/config/elasticsearch.yml | 18 + .../elk/elasticsearch/elk-post-hooks.sh | 6 + docker/compose/config/elk/kibana/Dockerfile | 7 + .../config/elk/kibana/config/kibana.yml | 13 + docker/compose/config/elk/vector/.gitignore | 2 + docker/compose/config/elk/vector/Dockerfile | 3 + .../config/elk/vector/config/vector.yml | 4 + .../config/elk/vector/pipeline/default.toml | 22 + docker/compose/config/license/Dockerfile | 6 + docker/compose/config/nginx/default.conf | 56 + docker/compose/config/nginx/nginx.docker | 4 + docker/compose/config/python/pip.conf | 4 + docker/compose/config/python/python.docker | 16 + docker/compose/config/react/Dockerfile | 5 + .../compose/config/selenium/selenium.docker | 0 docker/compose/correlator.yml | 18 + docker/compose/django.yml | 126 + docker/compose/el.yml | 57 + docker/compose/flower.yml | 14 + docker/compose/kibana.yml | 19 + docker/compose/license.yml | 18 + docker/compose/pgadmin.yml | 13 + docker/compose/selenium.yml | 40 + docker/django_entrypoint.sh | 21 + docker/generator.py | 223 + docker/print_dump.py | 14 + docker/scripts/set_mapping.py | 59 + docker/start.py | 397 + events/__init__.py | 0 events/admin.py | 0 events/api_urls.py | 8 + events/apps.py | 5 + events/constants.py | 13 + events/migrations/__init__.py | 0 events/models/__init__.py | 0 events/serializers/__init__.py | 0 events/services/__init__.py | 0 events/services/elk_string_search.py | 147 + events/services/inputs.py | 3 + events/tests/__init__.py | 0 events/tests/test_api.py | 68 + events/tests/test_services.py | 112 + events/tests/utils.py | 83 + events/views/__init__.py | 0 events/views/elk_string_query_search_api.py | 28 + finalschemaAPI.yaml | 2082 +++++ frontend/.eslintignore | 4 + frontend/.eslintrc.js | 1 + frontend/.gitignore | 6 + frontend/.npmrc | 1 + frontend/.prettierignore | 1 + frontend/.prettierrc.js | 1 + frontend/README.md | 25 + frontend/config/env.js | 104 + frontend/config/getHttpsConfig.js | 66 + frontend/config/jest/babelTransform.js | 29 + frontend/config/jest/cssTransform.js | 14 + frontend/config/jest/fileTransform.js | 42 + frontend/config/modules.js | 134 + frontend/config/paths.js | 74 + frontend/config/webpack.config.js | 753 ++ .../persistentCache/createEnvironmentHash.js | 9 + frontend/config/webpackDevServer.config.js | 127 + frontend/eslint.config.js | 112 + frontend/finalschemaAPI.yaml | 5882 ++++++++++++ frontend/prettier.config.js | 23 + frontend/public/favicon.ico | Bin 0 -> 15086 bytes frontend/public/index.html | 17 + frontend/public/robots.txt | 3 + frontend/scripts/build.js | 212 + frontend/scripts/start.js | 154 + frontend/scripts/test.js | 53 + frontend/src/App/App.css | 4 + frontend/src/App/App.tsx | 66 + frontend/src/App/logo.svg | 1 + frontend/src/assets/constants/timeZones.js | 441 + frontend/src/assets/icons/upload.svg | 43 + frontend/src/assets/images/Svg/languageRU.svg | 9 + frontend/src/assets/images/Svg/languageUS.svg | 23 + frontend/src/assets/images/close_sidebar.svg | 20 + frontend/src/assets/images/open_sidebar.svg | 20 + frontend/src/assets/logo/logo_lg.svg | 274 + frontend/src/assets/logo/logo_md.svg | 1 + frontend/src/assets/logo/logo_sm.svg | 86 + .../src/components/AppTable/AppTable.scss | 12 + frontend/src/components/AppTable/AppTable.tsx | 33 + .../AutoCompleteMulti.module.scss | 21 + .../AutocompleteMulti/AutoCompleteMulti.tsx | 173 + .../Breadcrump/BreadCrumbs.module.scss | 3 + .../src/components/Breadcrump/BreadCrumbs.tsx | 50 + .../src/components/Breadcrump/helpers.tsx | 52 + frontend/src/components/Breadcrump/i18n.ts | 62 + frontend/src/components/Breadcrump/types.ts | 7 + .../Dashboard/Dashboard.module.scss | 44 + .../src/components/Dashboard/Dashboard.tsx | 137 + .../Dashboard/components/DrawerFooter.tsx | 27 + .../Dashboard/components/DrawerHeader.tsx | 59 + .../components/EditAndSaveButton.tsx | 47 + frontend/src/components/Dashboard/index.ts | 3 + frontend/src/components/Dashboard/types.ts | 71 + .../useSetCurrentCardInfoAfterMount.tsx | 22 + .../DashboardTable/DashboardTable.module.scss | 88 + .../DashboardTable/DashboardTable.tsx | 271 + .../SingleActionsCard/SingleActionsCard.tsx | 96 + .../SingleActionsCard/buttonSet.tsx | 127 + .../components/SingleActionsCard/types.ts | 62 + .../components/StatusType/StatusType.tsx | 65 + .../useActionsWithSelectedCards/index.ts | 3 + .../useActionsWithSelectedCards/types.ts | 35 + .../useActionsWithSelectedCards.tsx | 110 + .../components/DashboardTable/constants.ts | 27 + .../components/DashboardTable/helpers.test.js | 132 + .../src/components/DashboardTable/helpers.ts | 108 + .../src/components/DashboardTable/index.ts | 8 + .../src/components/DashboardTable/types.ts | 21 + .../DashboardTable/useGroupTableColumns.tsx | 77 + .../components/DynamicTable/DynamicTable.tsx | 404 + frontend/src/components/DynamicTable/types.ts | 19 + .../EllipsisWithToggle.module.scss | 30 + .../EllipsisWithToggle/EllipsisWithToggle.tsx | 54 + .../ForeignKeyEntitiesTable.tsx | 242 + .../ForeignKeyEntitiesTable/i18n.ts | 69 + .../makeForeignKeyEntitiesTableColumns.tsx | 150 + .../FormFieldsBlock.module.scss | 61 + .../FormFieldsBlock/FormFieldsBlock.tsx | 65 + .../GenericDashboard/GenericDashboard.tsx | 231 + .../GenericDashboard/hooks/index.ts | 4 + .../hooks/useCollectiveDelete.ts | 38 + .../GenericDashboard/hooks/useExportData.ts | 76 + .../GenericDashboard/hooks/useImportData.ts | 78 + .../GenericDashboard/hooks/useItemDelete.ts | 52 + .../src/components/GenericDashboard/index.ts | 3 + .../src/components/GenericDashboard/types.ts | 13 + .../GenericGroupCard/GenericGroupCard.tsx | 78 + .../GenericGroupsDashboard.tsx | 51 + .../GenericGroupsDashboardUiStore.ts | 20 + .../GenericGroupsDashboard/index.ts | 3 + .../makeGenericGroupsTableColumns.tsx | 68 + .../GenericGroupsDashboard/types.ts | 10 + frontend/src/components/Layout/Layout.tsx | 18 + .../components/Footer/Footer.module.scss | 8 + .../Layout/components/Footer/Footer.tsx | 32 + .../Layout/components/Footer/index.ts | 3 + .../Layout/components/Header/Header.scss | 27 + .../Layout/components/Header/Header.tsx | 108 + .../Header/LanguageMenu/LanguageMenu.tsx | 45 + .../components/Header/MainMenu/MainMenu.tsx | 122 + .../Header/MainMenu/Menu.module.scss | 14 + .../MainMenu/__tests__/helpers.test.tsx | 93 + .../components/Header/MainMenu/helpers.tsx | 49 + .../Layout/components/Header/MainMenu/i18n.ts | 72 + .../components/Header/MainMenu/index.ts | 3 + .../components/Header/MainMenu/types.ts | 39 + .../Header/SecondaryMenu/SecondaryMenu.tsx | 52 + .../components/Header/SecondaryMenu/types.ts | 3 + .../Layout/components/Header/index.ts | 3 + .../LeftSidebar/LeftSidebar.module.scss | 37 + .../components/LeftSidebar/LeftSidebar.tsx | 72 + .../LeftSidebar/LeftSidebarItem.tsx | 20 + .../Layout/components/LeftSidebar/helpers.tsx | 226 + .../Layout/components/LeftSidebar/index.ts | 4 + frontend/src/components/Layout/index.ts | 8 + .../src/components/Messages/errorMessage.ts | 14 + frontend/src/components/Messages/index.ts | 5 + .../components/Messages/progressMessages.ts | 48 + .../components/Messages/runWithMessages.ts | 35 + .../src/components/Messages/successMessage.ts | 16 + .../src/components/Messages/warningMessage.ts | 16 + .../CardInfoDrawer/CardInfoDrawer.module.scss | 36 + .../Modals/CardInfoDrawer/CardInfoDrawer.tsx | 162 + .../Modals/ConfirmModal/ConfirmModal.tsx | 19 + .../components/Modals/HelpQuery/FieldTab.tsx | 59 + .../Modals/HelpQuery/HelpQuery.module.scss | 5 + .../components/Modals/HelpQuery/HelpQuery.tsx | 44 + .../components/Modals/HelpQuery/SyntaxTab.tsx | 80 + .../components/Modals/TagDrawer/TagDrawer.tsx | 67 + .../Modals/TagDrawer/TagFormTemplate.tsx | 119 + frontend/src/components/Notification/index.ts | 3 + .../components/Notification/notification.tsx | 10 + .../src/components/Search/Search.module.scss | 66 + frontend/src/components/Search/Search.tsx | 118 + frontend/src/components/Search/index.ts | 3 + .../SelectMultiple/SelectMultiple.tsx | 51 + .../SelectWithObjectValue.tsx | 67 + .../StringDatePicker/StringDatePicker.tsx | 46 + .../StringTimePicker/StringTimePicker.tsx | 40 + .../TextHighlighter/TextHighlighter.tsx | 37 + frontend/src/components/constants.ts | 2 + .../src/components/formValidators/index.ts | 3 + .../formValidators/validateIp4Address.ts | 21 + .../validateMultiplePortsAsJsonString.ts | 35 + .../formValidators/validatePassword.ts | 44 + .../src/components/helpers/deepDifference.ts | 19 + frontend/src/components/helpers/delay.ts | 6 + .../helpers/handleFormRequestError.ts | 123 + frontend/src/components/helpers/index.ts | 5 + .../components/helpers/makeListForSelect.ts | 9 + .../helpers/makeListForSelectWithNull.ts | 10 + .../components/helpers/sorterColumnTable.ts | 14 + frontend/src/components/i18n.ts | 358 + frontend/src/components/index.ts | 10 + frontend/src/components/types.ts | 26 + frontend/src/core/i18n/config.ts | 37 + frontend/src/core/i18n/constants.ts | 6 + frontend/src/core/i18n/helpers.ts | 98 + frontend/src/core/i18n/index.ts | 5 + frontend/src/core/i18n/namespaces.ts | 19 + frontend/src/core/i18n/types.ts | 1 + frontend/src/core/i18n/useTranslation.ts | 1 + frontend/src/enviroments/enviroments.js | 92 + frontend/src/hooks/index.ts | 2 + frontend/src/hooks/useAsyncEffect.ts | 7 + frontend/src/hooks/useClickOutsideElement.ts | 24 + frontend/src/index.css | 16 + frontend/src/index.tsx | 28 + frontend/src/mock/TitlesData.tsx | 32 + frontend/src/mock/dataAssets.tsx | 54 + frontend/src/myApi.ts | 4189 +++++++++ .../pages/500/TechnicalProblem.module.scss | 0 frontend/src/pages/500/TechnicalProblem.tsx | 7 + frontend/src/pages/500/index.ts | 1 + .../AccountCard/AccountCard.tsx | 52 + .../AccountForm/AccountForm.tsx | 272 + .../AccountsDashboard.module.scss | 7 + .../AccountsDashboard/AccountsDashboard.tsx | 117 + .../AccountsDashboardUiStore.ts | 29 + frontend/src/pages/AccountsDashboard/i18n.ts | 113 + frontend/src/pages/AccountsDashboard/index.ts | 2 + .../makeAccountsDashboardActions.ts | 37 + .../makeAccountsDashboardColumns.tsx | 113 + .../src/pages/AppRoutes/AppRoutes.module.scss | 34 + frontend/src/pages/AppRoutes/AppRoutes.tsx | 30 + frontend/src/pages/AppRoutes/constants.ts | 124 + frontend/src/pages/AppRoutes/index.ts | 3 + frontend/src/pages/AppRoutes/routes.tsx | 210 + .../src/pages/AssetsDashboard/Assets.scss | 23 + .../AssetsCardInfo/AssetsCardInfo.module.scss | 89 + .../AssetsCardInfo/FormItems.tsx | 206 + .../AssetsCardInfo/FormItemsLarge.tsx | 227 + .../AssetsDashboard/AssetsCardInfo/types.ts | 12 + .../AssetsCardInfo/validation.ts | 18 + .../pages/AssetsDashboard/AssetsDashboard.tsx | 536 ++ .../AssetsLeftSideBarContent.tsx | 33 + .../pages/AssetsDashboard/AssetsUiStore.tsx | 239 + frontend/src/pages/AssetsDashboard/MOCK.ts | 33 + .../__tests__/converter.test.ts | 44 + .../src/pages/AssetsDashboard/constants.ts | 71 + .../src/pages/AssetsDashboard/converter.tsx | 82 + .../pages/AssetsDashboard/helpers.module.scss | 28 + frontend/src/pages/AssetsDashboard/helpers.ts | 59 + frontend/src/pages/AssetsDashboard/i18n.ts | 217 + frontend/src/pages/AssetsDashboard/index.ts | 5 + frontend/src/pages/AssetsDashboard/types.ts | 141 + .../AssetsDashboard/useAssetsColumns.tsx | 172 + .../CorrelationRules.module.scss | 82 + .../CorrelationRulesActionsEditor.tsx | 191 + .../actionForms/AssetActionForm.tsx | 209 + .../actionForms/BashActionForm.tsx | 45 + .../actionForms/CommonActionFormProps.ts | 15 + .../actionForms/ExecActionForm.tsx | 84 + .../actionForms/FirewallActionForm.tsx | 423 + .../actionForms/HttpActionForm.tsx | 75 + .../actionForms/IncidentActionForm.tsx | 194 + .../actionForms/SyslogActionForm.tsx | 106 + .../actionForms/helpers.tsx | 91 + .../actionForms/index.ts | 7 + .../makeNewCorrelationRuleAction.ts | 91 + .../CorrelationRulesCard.module.scss | 8 + .../CorrelationRulesCard.tsx | 161 + .../CorrelationRulesChecker.tsx | 219 + .../CorrelationRulesCheckerHelpDialog.tsx | 338 + .../CorrelationRulesCheckerUiStore.ts | 89 + .../CorrelationRulesChecker/consts.ts | 26 + .../makeCorrelationRulesEventsColumns.tsx | 68 + .../CorrelationRulesDashboard.tsx | 107 + .../CorrelationRulesUiStore.ts | 41 + .../CorrelationRulesDashboard/helpers.ts | 48 + .../pages/CorrelationRulesDashboard/i18n.ts | 795 ++ .../pages/CorrelationRulesDashboard/index.ts | 3 + .../makeCorrelationRulesColumns.tsx | 106 + .../pages/CorrelationRulesDashboard/types.ts | 199 + .../CorrelationRulesGroupsDashboard.tsx | 40 + .../CorrelationRulesGroupsDashboardUiStore.ts | 10 + .../CorrelationRulesGroupsDashboard/index.ts | 2 + .../DevicesDashboard/Devices.module.scss | 19 + .../AntivirusResultTableUiStore.ts | 39 + .../AntivirusResutTable.tsx | 107 + .../DevicesCardInfo.module.scss | 16 + .../DevicesCardInfo/Endpoint/EndpointForm.tsx | 227 + .../Endpoint/EndpointLargeFormItems.tsx | 256 + .../Endpoint/RotationBlock.tsx | 168 + .../DevicesCardInfo/Endpoint/index.ts | 1 + .../DevicesCardInfo/Endpoint/types.ts | 123 + .../Endpoint/useEndpointFormConfiguration.tsx | 279 + .../DevicesCardInfo/Firewall/FireWallForm.tsx | 59 + .../DevicesCardInfo/Firewall/index.ts | 1 + .../Firewall/useFirewallFormConfiguration.tsx | 171 + .../DevicesCardInfo/GeneralForm.tsx | 96 + .../SelectorAddNewEventSource.tsx | 48 + .../DevicesCardInfo/Sensor/SensorForm.tsx | 136 + .../DevicesCardInfo/Sensor/index.ts | 1 + .../Sensor/useSensorFormConfiguration.tsx | 242 + .../DevicesDashboard/DevicesCardInfo/types.ts | 96 + .../DevicesDashboard/DevicesDashboard.tsx | 1061 +++ .../DevicesLeftSideBarContent.tsx | 31 + .../SensorInformationModal.tsx | 35 + .../DevicesDashboard/Stores/DevicesUiStore.ts | 539 ++ .../DevicesDashboard/Stores/EnpointUiStore.ts | 60 + .../pages/DevicesDashboard/Stores/index.ts | 1 + .../src/pages/DevicesDashboard/constants.ts | 248 + .../src/pages/DevicesDashboard/helpers.ts | 129 + frontend/src/pages/DevicesDashboard/i18n.ts | 712 ++ frontend/src/pages/DevicesDashboard/index.ts | 5 + .../DevicesDashboard/makeDevicesColumns.tsx | 276 + frontend/src/pages/DevicesDashboard/types.ts | 340 + .../src/pages/DevicesDashboard/useFields.ts | 56 + .../EventExportSettings.scss | 8 + .../EventExportSettings.tsx | 22 + .../pages/EventExportSettings/TableOPCUA.tsx | 117 + .../pages/EventExportSettings/TableSyslog.tsx | 115 + .../EventSourceDashboard.scss | 0 .../EventSourceDashboard.tsx | 5 + .../EventCardInfo.module.scss | 65 + .../EventsJournalDashboard/EventCardInfo.tsx | 214 + .../EventsJournalDashboard/EventsFilters.tsx | 93 + .../EventsJournal.module.scss | 0 .../EventsJournalActions.tsx | 55 + .../EventsJournalApi.ts | 36 + .../EventsJournalDashboard.tsx | 109 + .../EventsJournalUiStore.tsx | 48 + .../pages/EventsJournalDashboard/columns.tsx | 101 + .../pages/EventsJournalDashboard/constants.ts | 65 + .../EventsJournalDashboard/converter.tsx | 63 + .../src/pages/EventsJournalDashboard/i18n.ts | 200 + .../src/pages/EventsJournalDashboard/index.ts | 5 + .../src/pages/EventsJournalDashboard/types.ts | 202 + .../useEventsJournalsLeftSideBarContents.ts | 13 + .../pages/IncidentsDashboard/Incidents.scss | 11 + .../IncidentsDashboard/IncidentsActions.tsx | 17 + .../pages/IncidentsDashboard/IncidentsApi.ts | 25 + .../IncidentsCardInfo/CardWithPagination.tsx | 41 + .../IncidentsCardInfo/DrawerFooter.tsx | 28 + .../IncidentsCardInfo/DrawerHeader.tsx | 126 + .../IncidentsCardInfo/EditAndSaveButton.tsx | 74 + .../IncidentsCardInfo/EventsTable.tsx | 110 + .../IncidentCardInfoFormTemplate.tsx | 201 + .../IncidentsCardInfo.module.scss | 16 + .../IncidentsCardInfo/IncidentsCardInfo.tsx | 287 + .../IncidentsCardInfo/helpers.ts | 35 + .../IncidentsCardInfo/index.ts | 1 + .../IncidentsCardInfo/types.ts | 9 + .../IncidentsDashboard/IncidentsDashboard.tsx | 132 + .../IncidentsDashboard/IncidentsFilters.tsx | 96 + .../IncidentsDashboard/IncidentsUiStore.tsx | 177 + frontend/src/pages/IncidentsDashboard/MOCK.ts | 193 + .../NCIRCCNotificationForm.module.scss | 30 + .../NCIRCCNotificationForm.tsx | 369 + .../NCIRCCNotificationForm/constants.tsx | 259 + .../NCIRCCNotificationForm/helpers.tsx | 473 + .../NCIRCCNotificationForm/index.ts | 1 + .../NCIRCCNotificationForm/types.ts | 93 + .../__tests__/helpers.test.ts | 94 + .../src/pages/IncidentsDashboard/constants.ts | 9 + .../src/pages/IncidentsDashboard/helpers.tsx | 72 + frontend/src/pages/IncidentsDashboard/i18n.ts | 543 ++ .../src/pages/IncidentsDashboard/index.ts | 5 + .../makeIncidentsColumns.tsx | 199 + .../src/pages/IncidentsDashboard/types.ts | 302 + .../LicenseActivationPage.module.scss | 34 + .../LicenseActivationPage.test.tsx | 184 + .../LicenseActivationPage.tsx | 236 + .../LicenseActivationPageStore.ts | 88 + .../src/pages/LicenseActivationPage/i18n.ts | 44 + .../src/pages/LicenseActivationPage/index.ts | 2 + .../pages/LicensePage/LicensePage.module.scss | 18 + .../pages/LicensePage/LicensePage.test.tsx | 97 + .../src/pages/LicensePage/LicensePage.tsx | 176 + frontend/src/pages/LicensePage/i18n.ts | 51 + frontend/src/pages/LicensePage/index.ts | 1 + frontend/src/pages/Login/Login.scss | 30 + frontend/src/pages/Login/Login.tsx | 68 + frontend/src/pages/Login/LoginApi.ts | 22 + .../pages/NCIRCCDashboard/BulletinContent.tsx | 18 + .../src/pages/NCIRCCDashboard/Bulletins.tsx | 81 + .../src/pages/NCIRCCDashboard/Incidents.scss | 14 + frontend/src/pages/NCIRCCDashboard/MOCK.tsx | 636 ++ .../pages/NCIRCCDashboard/NCIRCCDashboard.tsx | 94 + .../pages/NCIRCCDashboard/NCIRCCUiStore.tsx | 140 + .../pages/NCIRCCDashboard/Ncircc.module.scss | 39 + .../NcirccMessages/Messages.module.scss | 19 + .../NCIRCCIncidentMessenger.module.scss | 39 + .../NCIRCCIncidentMessenger.tsx | 80 + .../NCIRCCIncidentMessenger/index.ts | 3 + .../NCIRCCNotification.module.scss | 14 + .../NCIRCCNotification/NCIRCCNotification.tsx | 91 + .../NCIRCCNotification/index.ts | 3 + .../NCIRCCNotificationsList.tsx | 53 + .../NotificationList.module.scss | 22 + .../NCIRCCNotificationsList/index.ts | 1 + .../NcirccMessages/NcirccMessages.tsx | 164 + .../NCIRCCDashboard/NcirccMessages/helpers.ts | 5 + .../NCIRCCDashboard/NcirccMessages/index.ts | 1 + .../OrganizationCard.module.scss | 20 + .../OrganizationCard/OrganizationCard.tsx | 218 + .../OrganizationCard/helpers.ts | 36 + .../StatusWithButtonUuid.module.scss | 9 + .../components/StatusWithButtonUuid.tsx | 34 + .../pages/NCIRCCDashboard/components/index.ts | 3 + .../pages/NCIRCCDashboard/components/types.ts | 6 + frontend/src/pages/NCIRCCDashboard/i18n.ts | 94 + frontend/src/pages/NCIRCCDashboard/index.ts | 3 + frontend/src/pages/NCIRCCDashboard/types.ts | 55 + .../NetworkActivityMap.scss | 0 .../NetworkActivityMap/NetworkActivityMap.tsx | 7 + frontend/src/pages/NetworkMap/NetworkMap.scss | 0 frontend/src/pages/NetworkMap/NetworkMap.tsx | 7 + .../pages/ProfilePage/ProfilePage.module.scss | 23 + .../src/pages/ProfilePage/ProfilePage.tsx | 121 + frontend/src/pages/ProfilePage/i18n.ts | 17 + frontend/src/pages/ProfilePage/index.ts | 1 + .../RotationDashboard/EventsRotationType.tsx | 49 + .../IncidentsRotationSettings.tsx | 49 + frontend/src/pages/RotationDashboard/MOCK.ts | 37 + .../RotationDashboard/RotationDashboard.tsx | 45 + .../RotationDashboard/RotationUiStore.tsx | 44 + .../pages/RotationDashboard/Template/Main.tsx | 135 + .../RotationDashboard/Template/Period.tsx | 165 + .../Template/Template.module.scss | 25 + .../RotationDashboard/Template/helpers.ts | 53 + .../__tests__/helpers.test.ts | 58 + .../src/pages/RotationDashboard/helpers.ts | 35 + frontend/src/pages/RotationDashboard/i18n.ts | 200 + frontend/src/pages/RotationDashboard/index.ts | 3 + frontend/src/pages/RotationDashboard/types.ts | 92 + .../SettingsDashboard/Authentication.tsx | 115 + .../SettingsDashboard/SettingsDashboard.tsx | 44 + .../SettingsDashboard/SettingsUiStore.ts | 54 + .../SettingsDashboard/TlsSertificate.tsx | 304 + .../src/pages/SettingsDashboard/constatns.ts | 50 + frontend/src/pages/SettingsDashboard/i18n.ts | 133 + frontend/src/pages/SettingsDashboard/index.ts | 3 + .../src/pages/SettingsDashboard/validation.ts | 13 + .../src/pages/StorageDashboard/Storage.scss | 11 + .../src/pages/StorageDashboard/StorageApi.ts | 35 + .../StorageCardInfo/DrawerFooter.tsx | 27 + .../StorageCardInfo/DrawerHeader.tsx | 24 + .../StorageCardInfo/StorageCardInfo.tsx | 105 + .../StorageDashboard/StorageCardInfo/index.ts | 3 + .../StorageDashboard/StorageDashboard.tsx | 244 + .../pages/StorageDashboard/StorageFilters.tsx | 92 + .../StorageDashboard/StorageLeftSideBar.tsx | 18 + .../StorageDashboard/StorageMockData.tsx | 206 + .../pages/StorageDashboard/StorageUiStore.tsx | 55 + .../src/pages/StorageDashboard/converter.tsx | 37 + .../src/pages/StorageDashboard/helpers.ts | 44 + frontend/src/pages/StorageDashboard/i18n.ts | 110 + .../pages/StorageDashboard/storageColumns.tsx | 99 + frontend/src/pages/StorageDashboard/types.ts | 106 + .../WidgetsDashboard/WidgetsDashboard.tsx | 13 + frontend/src/react-app-env.d.ts | 71 + frontend/src/reportWebVitals.ts | 16 + frontend/src/services/FileDownload.ts | 56 + frontend/src/services/RestApi.ts | 91 + frontend/src/services/logoutApi.ts | 18 + frontend/src/services/request.ts | 127 + frontend/src/setupTests.ts | 20 + .../src/stores/AccountsLeftSideBarContent.tsx | 20 + frontend/src/stores/AccountsStore.ts | 174 + frontend/src/stores/AccountsStoreNew.ts | 38 + .../src/stores/AntivirusResultsTableStore.ts | 64 + frontend/src/stores/AppStore.ts | 45 + frontend/src/stores/AssetsGroupsStore.ts | 16 + .../src/stores/AssetsManufacturersStore.ts | 16 + frontend/src/stores/AssetsOsesStore.ts | 16 + frontend/src/stores/AssetsStore.ts | 250 + .../src/stores/CorrelationRulesGroupsStore.ts | 16 + frontend/src/stores/CorrelationRulesStore.ts | 37 + frontend/src/stores/CurrentUserStore.ts | 45 + frontend/src/stores/Decorators/Loadeble.ts | 3 + .../src/stores/Decorators/LoadingDecorator.ts | 41 + frontend/src/stores/DevicesStore.ts | 331 + frontend/src/stores/EndpointStore.ts | 31 + frontend/src/stores/EntityStore.ts | 75 + frontend/src/stores/EventsJournalStore.ts | 80 + frontend/src/stores/FirewallStore.ts | 78 + .../src/stores/GenericDashboardUiStore.ts | 137 + frontend/src/stores/GenericDataStore.ts | 310 + frontend/src/stores/HelpersStore.ts | 19 + .../src/stores/IncidentsCategoriesStore.ts | 16 + frontend/src/stores/IncidentsEffectsStore.ts | 16 + .../stores/IncidentsRecommendationsStore.ts | 16 + frontend/src/stores/IncidentsStore.ts | 172 + frontend/src/stores/LicenseStore.ts | 48 + frontend/src/stores/NCIRCCStore.ts | 188 + frontend/src/stores/RootStore.ts | 190 + frontend/src/stores/RotationStore.ts | 84 + frontend/src/stores/SensorStore.ts | 54 + frontend/src/stores/SettingsStore.ts | 91 + frontend/src/stores/StorageStore.ts | 112 + frontend/src/stores/StoreProvider.ts | 15 + frontend/src/stores/SystemInfoStore.ts | 51 + frontend/src/stores/TagsStore.ts | 104 + frontend/src/styles/antdChangedStyles.scss | 11 + frontend/src/styles/headlines.scss | 17 + frontend/src/styles/helpers.scss | 106 + frontend/src/styles/styles.scss | 3 + frontend/src/styles/variables.scss | 1 + .../testingUtils/handleLocaleFileRequest.ts | 24 + frontend/src/testingUtils/index.ts | 2 + .../testingUtils/testRenderWithProviders.tsx | 38 + frontend/src/types/default.ts | 582 ++ frontend/src/types/types.d.ts | 5 + .../typings/i18next-react-postprocessor.d.ts | 1 + frontend/typings/integrations.d.ts | 10 + frontend/typings/react-i18next.d.ts | 66 + incident/__init__.py | 0 incident/admin.py | 22 + incident/apps.py | 5 + incident/filters.py | 18 + incident/migrations/__init__.py | 0 incident/models.py | 132 + incident/serializers/__init__.py | 0 incident/serializers/incident.py | 94 + .../serializers/incident_edit_serializer.py | 10 + incident/services/__init__.py | 0 incident/services/ws_incidents.py | 18 + incident/tests/__init__.py | 0 incident/tests/test_api.py | 296 + incident/tests/test_filters.py | 30 + incident/tests/test_incidents_list.py | 83 + incident/tests/test_serializers.py | 67 + incident/urls.py | 18 + incident/views/__init__.py | 0 incident/views/incidents_api.py | 127 + incident_export/__init__.py | 0 incident_export/admin.py | 7 + incident_export/apps.py | 8 + incident_export/enums.py | 22 + incident_export/migrations/__init__.py | 0 incident_export/models.py | 69 + incident_export/serializers.py | 66 + incident_export/services/export.py | 85 + incident_export/signals.py | 16 + incident_export/tasks.py | 78 + incident_export/tests/__init__.py | 0 incident_export/tests/test_cef_format.py | 207 + incident_export/tests/test_incident_export.py | 79 + .../tests/test_incident_export_api.py | 104 + incident_export/urls.py | 12 + incident_export/views.py | 52 + inputs/__init__.py | 0 inputs/admin.py | 26 + inputs/apps.py | 5 + inputs/constants.py | 9 + inputs/enums.py | 29 + inputs/migrations/__init__.py | 0 inputs/models.py | 96 + inputs/serializers.py | 58 + inputs/services/__init__.py | 0 inputs/services/delete_input.py | 30 + inputs/services/inputs.py | 82 + inputs/services/remove_loginputs.py | 20 + inputs/services/update_config.py | 75 + inputs/tests/__init__.py | 0 inputs/tests/test_serializer.py | 69 + inputs/urls.py | 11 + inputs/views.py | 30 + license/.gitignore | 4 + license/CHANGELOG.md | 41 + license/README.md | 117 + license/config_example.yaml | 22 + license/go.mod | 12 + license/go.sum | 599 ++ license/logging.go | 57 + license/main.go | 71 + license_info/__init__.py | 0 license_info/admin.py | 3 + license_info/apps.py | 5 + license_info/decorators.py | 22 + license_info/exeptions.py | 6 + license_info/migrations/__init__.py | 0 license_info/templatetags/__init__.py | 0 license_info/templatetags/license_info.py | 30 + license_info/tests/__init__.py | 0 license_info/tests/test_middleware.py | 73 + license_info/tests/test_tools.py | 40 + license_info/tools.py | 126 + license_info/urls.py | 11 + license_info/views.py | 92 + logstash/.gitignore | 1 + logstash/__init__.py | 0 logstash/admin.py | 0 logstash/api.py | 110 + logstash/apps.py | 23 + logstash/constants.py | 11 + logstash/migrations/__init__.py | 0 logstash/models/__init__.py | 0 logstash/serializers.py | 41 + logstash/tasks.py | 88 + logstash/tests/__init__.py | 0 logstash/tests/test_license.py | 45 + logstash/tests/test_services.py | 22 + logstash/tests/tests_media/wrong_format.png | Bin 0 -> 64398 bytes logstash/urls.py | 14 + make_release.py | 37 + manage.py | 21 + miggunicorn.sh | 0 ncircc/__init__.py | 0 ncircc/admin.py | 13 + ncircc/apps.py | 5 + ncircc/enums/__init__.py | 0 ncircc/enums/notifications.py | 130 + ncircc/migrations/__init__.py | 0 ncircc/models/__init__.py | 0 ncircc/models/comments.py | 24 + ncircc/models/notification.py | 95 + ncircc/serializers/__init__.py | 0 ncircc/serializers/comments.py | 20 + ncircc/serializers/notification.py | 57 + ncircc/services/__init__.py | 0 ncircc/services/comments.py | 185 + ncircc/services/notification.py | 339 + ncircc/services/utils.py | 11 + ncircc/tasks.py | 42 + ncircc/tests/__init__.py | 0 ncircc/tests/test_comments_api.py | 98 + ncircc/tests/test_comments_services.py | 108 + ncircc/tests/test_notification_api.py | 132 + ncircc/tests/test_notification_serializers.py | 106 + ncircc/tests/test_notification_services.py | 308 + ncircc/tests/test_utils.py | 36 + ncircc/tests/utils.py | 136 + ncircc/urls.py | 11 + ncircc/views/__init__.py | 0 ncircc/views/notification_api.py | 134 + networkmap/__init__.py | 0 networkmap/admin.py | 5 + networkmap/api.py | 510 + networkmap/apps.py | 5 + networkmap/migrations/__init__.py | 0 networkmap/models.py | 38 + networkmap/serializers.py | 147 + networkmap/services.py | 27 + networkmap/tasks.py | 13 + networkmap/tests/__init__.py | 0 networkmap/tests/migration_fixtures.py | 42 + networkmap/tests/netmap_test_utils.py | 27 + networkmap/tests/test_api.py | 346 + networkmap/tests/test_data/controller.png | Bin 0 -> 3170 bytes networkmap/urls.py | 19 + notifications/__init__.py | 0 notifications/admin.py | 5 + notifications/apps.py | 6 + notifications/enums.py | 19 + notifications/migrations/__init__.py | 0 notifications/models.py | 15 + notifications/serializers.py | 9 + notifications/services/__init__.py | 0 notifications/services/notification_sender.py | 95 + notifications/services/ws.py | 56 + notifications/tests/__init__.py | 0 notifications/tests/test_notifications_api.py | 58 + .../tests/test_notifications_service.py | 31 + notifications/tests/test_ws_consumers.py | 46 + notifications/urls.py | 11 + notifications/views.py | 24 + perms/__init__.py | 0 perms/admin.py | 3 + perms/apps.py | 5 + perms/migrations/__init__.py | 0 perms/models.py | 241 + perms/services/__init__.py | 0 perms/services/get_permissions.py | 35 + perms/tests/__init__.py | 0 perms/tests/test_api_perms.py | 41 + perms/tests/test_migrations.py | 37 + perms/tests/test_services.py | 33 + perms/views.py | 3 + product_version | 1 + pytest.ini | 12 + requirements.txt | 54 + requirements_test.txt | 13 + rotation/__init__.py | 0 rotation/admin.py | 7 + rotation/apps.py | 5 + rotation/constants.py | 12 + rotation/enums.py | 40 + rotation/migrations/__init__.py | 0 rotation/models.py | 39 + rotation/serializers.py | 77 + rotation/services/cron_utils.py | 157 + rotation/services/update_schedule.py | 53 + rotation/tasks.py | 227 + rotation/tests/test_crontab_functions.py | 193 + rotation/tests/test_serializers.py | 66 + rotation/tests/test_tasks.py | 37 + rotation/tests/test_views.py | 142 + rotation/urls.py | 13 + rotation/views.py | 66 + schema.yml | 8200 +++++++++++++++++ search-solid.svg | 1 + start_dev_django_app.sh | 40 + storage/__init__.py | 0 storage/admin.py | 12 + storage/apps.py | 5 + storage/enums.py | 8 + storage/exception.py | 6 + storage/export.py | 226 + storage/migrations/__init__.py | 0 storage/models.py | 115 + storage/serializers.py | 42 + storage/services.py | 37 + storage/tasks.py | 66 + storage/tests/__init__.py | 0 storage/tests/test.py | 264 + storage/tests/test_export.py | 191 + storage/urls.py | 13 + storage/views.py | 99 + users/__init__.py | 0 users/admin.py | 19 + users/api.py | 54 + users/apps.py | 11 + users/constants.py | 14 + users/migrations/__init__.py | 0 users/models.py | 37 + users/serializers.py | 142 + users/services/__init__.py | 0 users/services/signals.py | 18 + users/services/userinfo.py | 128 + users/services/validators.py | 88 + users/tests.py | 621 ++ users/urls.py | 9 + 1113 files changed, 102957 insertions(+) create mode 100644 .coveragerc create mode 100644 .env.dev create mode 100644 .env.prod create mode 100644 .gitignore create mode 100644 .gitlab-ci.yml create mode 100644 .gitmodules create mode 100644 CHANGELOG.md create mode 100644 README.md create mode 100644 assets/__init__.py create mode 100644 assets/admin.py create mode 100644 assets/apps.py create mode 100644 assets/constants.py create mode 100644 assets/filters.py create mode 100644 assets/migrations/__init__.py create mode 100644 assets/models/__init__.py create mode 100644 assets/models/assets.py create mode 100644 assets/serializers/__init__.py create mode 100644 assets/serializers/assets.py create mode 100644 assets/tests/__init__.py create mode 100644 assets/tests/test_assets.py create mode 100644 assets/tests/test_assets_api.py create mode 100644 assets/urls.py create mode 100644 assets/views/__init__.py create mode 100644 assets/views/assets.py create mode 100644 checker/.gitignore create mode 100644 checker/deb/skeleton/DEBIAN/.gitkeep create mode 100644 checker/deb/skeleton/usr/lib/systemd/system/amcchecker.service create mode 100644 checker/go.mod create mode 100644 checker/go.sum create mode 100644 checker/main.go create mode 100644 cicd/.gitignore create mode 100644 cicd/config.yml create mode 100644 cicd/go_test.sh create mode 100644 cicd/integration_tests.sh create mode 100644 cicd/live_fw_tests.sh create mode 100644 cicd/scripts/build.py create mode 100644 cicd/unit_tests.sh create mode 100644 cicd/up_license_client.sh create mode 100644 company/__init__.py create mode 100644 company/admin.py create mode 100644 company/apps.py create mode 100644 company/migrations/__init__.py create mode 100644 company/models/__init__.py create mode 100644 company/models/company.py create mode 100644 company/models/location.py create mode 100644 company/serializers/__init__.py create mode 100644 company/serializers/company.py create mode 100644 company/serializers/location.py create mode 100644 company/services/__init__.py create mode 100644 company/services/company_create_update.py create mode 100644 company/static/ncircc_files/organization_info.xlsx create mode 100644 company/tests/__init__.py create mode 100644 company/tests/test_company_api.py create mode 100644 company/tests/test_company_services.py create mode 100644 company/tests/test_locations_api.py create mode 100644 company/urls.py create mode 100644 company/views/__init__.py create mode 100644 company/views/company_api.py create mode 100644 company/views/location_api.py create mode 100644 conftest.py create mode 100644 console/__init__.py create mode 100644 console/admin.py create mode 100644 console/api/__init__.py create mode 100644 console/api/base.py create mode 100644 console/api/celery.py create mode 100644 console/api/connections.py create mode 100644 console/api/events.py create mode 100644 console/api/incidents.py create mode 100644 console/api/users.py create mode 100644 console/api_urls.py create mode 100644 console/apps.py create mode 100644 console/asgi.py create mode 100644 console/celery.py create mode 100644 console/conslog.py create mode 100644 console/management/__init__.py create mode 100644 console/management/commands/__init__.py create mode 100644 console/management/commands/create_vector_configs.py create mode 100644 console/management/commands/load_rules.py create mode 100644 console/management/commands/templates/mc_logs_celery.toml create mode 100644 console/management/commands/templates/mc_logs_console.toml create mode 100644 console/management/commands/templates/mc_logs_es.toml create mode 100644 console/management/commands/templates/mc_logs_gunicorn.toml create mode 100644 console/management/commands/templates/mc_logs_nginx.toml create mode 100644 console/management/commands/templates/mc_logs_postgresql.toml create mode 100644 console/management/commands/templates/mc_logs_redis.toml create mode 100644 console/management/commands/templates/mc_logs_syslog.toml create mode 100644 console/migrations/__init__.py create mode 100644 console/models.py create mode 100644 console/routing.py create mode 100644 console/serializers.py create mode 100644 console/services/__init__.py create mode 100644 console/services/product.py create mode 100644 console/settings/__init__.py create mode 100644 console/settings/base.py create mode 100644 console/settings/dev.py create mode 100644 console/settings/prod.py create mode 100644 console/settings/test.py create mode 100644 console/tasks.py create mode 100644 console/templates/console/login.html create mode 100644 console/tests/__init__.py create mode 100644 console/tests/test_api.py create mode 100644 console/tests/test_auth.py create mode 100644 console/tests/test_commands.py create mode 100644 console/tests/test_data/file create mode 100644 console/tests/test_data/test_certificate.crt create mode 100644 console/tests/test_data/test_certificate.key create mode 100644 console/tests/test_elastic.py create mode 100644 console/tests/test_extension_validator.py create mode 100644 console/tests/test_filters.py create mode 100644 console/tests/test_utils.py create mode 100644 console/tests/test_views.py create mode 100644 console/urls.py create mode 100644 console/utils.py create mode 100644 console/views/__init__.py create mode 100644 console/views/index.py create mode 100644 console/wsgi.py create mode 100644 core/__init__.py create mode 100644 core/admin.py create mode 100644 core/apps.py create mode 100644 core/backends/ConsoleAuthBackend.py create mode 100644 core/backends/__init__.py create mode 100644 core/backends/filters.py create mode 100644 core/constants.py create mode 100644 core/decorators.py create mode 100644 core/extensions.py create mode 100644 core/fields.py create mode 100644 core/middleware.py create mode 100644 core/migrations/__init__.py create mode 100644 core/mixins.py create mode 100644 core/models.py create mode 100644 core/serializers.py create mode 100644 core/services/__init__.py create mode 100644 core/services/amc_services.py create mode 100644 core/services/authentication.py create mode 100644 core/services/pagination.py create mode 100644 core/services/tls_settings.py create mode 100644 core/tasks.py create mode 100644 core/tests/__init__.py create mode 100644 core/tests/filters_backend.py create mode 100644 core/tests/test_amc_services.py create mode 100644 core/tests/test_authentication.py create mode 100644 core/tests/test_data/test_certificate.crt create mode 100644 core/tests/test_data/test_certificate.key create mode 100644 core/tests/test_tasks.py create mode 100644 core/tests/test_tls_settings.py create mode 100644 core/urls.py create mode 100644 core/utils.py create mode 100644 core/validators.py create mode 100644 core/views/__init__.py create mode 100644 core/views/amc_services.py create mode 100644 core/views/view_login.py create mode 100644 core/views/view_settings.py create mode 100644 correlation/__init__.py create mode 100644 correlation/admin.py create mode 100644 correlation/api.py create mode 100644 correlation/apps.py create mode 100644 correlation/constants.py create mode 100644 correlation/fields.py create mode 100644 correlation/migrations/__init__.py create mode 100644 correlation/models.py create mode 100644 correlation/serializers.py create mode 100644 correlation/services/__init__.py create mode 100644 correlation/services/import_service.py create mode 100644 correlation/services/rules.py create mode 100644 correlation/services/signals.py create mode 100644 correlation/tasks.py create mode 100644 correlation/tests/__init__.py create mode 100644 correlation/tests/data/image.png create mode 100644 correlation/tests/test_api.py create mode 100644 correlation/tests/test_rule_import_service.py create mode 100644 correlation/tests/test_serializer.py create mode 100644 correlation/urls.py create mode 100644 correlator/.dockerignore create mode 100644 correlator/.gitignore create mode 100644 correlator/.gitlab-ci.yml create mode 100644 correlator/.golangci create mode 100644 correlator/CHANGELOG.md create mode 100644 correlator/Dockerfile create mode 100644 correlator/README.md create mode 100644 correlator/aggregator/aggregator.go create mode 100644 correlator/aggregator/aggregator_test.go create mode 100644 correlator/api/api.go create mode 100644 correlator/api/api_test.go create mode 100644 correlator/cicd/Dockerfile create mode 100644 correlator/cicd/config.yml create mode 100644 correlator/cicd/docker-compose.yml create mode 100644 correlator/cicd/test.sh create mode 100644 correlator/cicd/test_job.sh create mode 100644 correlator/cmd/correlator/.gitignore create mode 100644 correlator/cmd/correlator/config_example.yaml create mode 100644 correlator/cmd/correlator/logging.go create mode 100644 correlator/cmd/correlator/main.go create mode 100644 correlator/config/config.go create mode 100644 correlator/config/default_config.go create mode 100644 correlator/correlator/correlator.go create mode 100644 correlator/correlator/correlator_test.go create mode 100644 correlator/deb/skeleton/DEBIAN/.gitkeep create mode 100644 correlator/deb/skeleton/usr/lib/systemd/system/amccorrelator.service create mode 100644 correlator/deb/skeleton/usr/local/armaconsole/app/amccorrelator/aggregator/.gitkeep create mode 100644 correlator/deb/skeleton/usr/local/armaconsole/app/amccorrelator/docs/.gitkeep create mode 100644 correlator/docker/sh/entrypoint.sh create mode 100644 correlator/es/es.go create mode 100644 correlator/es/es_test.go create mode 100644 correlator/events/events.go create mode 100644 correlator/events/events_test.go create mode 100644 correlator/events/stat.go create mode 100644 correlator/events/stat_test.go create mode 100644 correlator/events/store.go create mode 100644 correlator/events/store_test.go create mode 100644 correlator/events/timewindow.go create mode 100644 correlator/events/timewindow_test.go create mode 100644 correlator/events/tools.go create mode 100644 correlator/events/util.go create mode 100644 correlator/go.mod create mode 100644 correlator/make_release.py create mode 100644 correlator/mapping/component.go create mode 100644 correlator/mapping/component_test.go create mode 100644 correlator/mapping/index.go create mode 100644 correlator/mapping/mapping.go create mode 100644 correlator/rules/action.go create mode 100644 correlator/rules/action_asset.go create mode 100644 correlator/rules/action_asset_test.go create mode 100644 correlator/rules/action_bash.go create mode 100644 correlator/rules/action_bash_test.go create mode 100644 correlator/rules/action_exec.go create mode 100644 correlator/rules/action_exec_test.go create mode 100644 correlator/rules/action_firewall.go create mode 100644 correlator/rules/action_firewall_test.go create mode 100644 correlator/rules/action_http.go create mode 100644 correlator/rules/action_http_test.go create mode 100644 correlator/rules/action_incident.go create mode 100644 correlator/rules/action_incident_test.go create mode 100644 correlator/rules/action_syslog.go create mode 100644 correlator/rules/action_syslog_test.go create mode 100644 correlator/rules/action_t.go create mode 100644 correlator/rules/action_t_test.go create mode 100644 correlator/rules/debug_action.go create mode 100644 correlator/rules/predicate.go create mode 100644 correlator/rules/predicate_test.go create mode 100644 correlator/rules/rule.go create mode 100644 correlator/rules/rule_store.go create mode 100644 correlator/rules/rule_store_test.go create mode 100644 correlator/rules/rule_test.go create mode 100644 correlator/rules/stat.go create mode 100644 correlator/rules/stat_test.go create mode 100644 correlator/rules/test_tools.go create mode 100644 correlator/rules/web_tools.go create mode 100644 correlator/stat/stat.go create mode 100644 correlator/stat/stat_test.go create mode 100644 correlator/util/test.go create mode 100644 dashboard/__init__.py create mode 100644 dashboard/admin.py create mode 100644 dashboard/api.py create mode 100644 dashboard/api_urls.py create mode 100644 dashboard/apps.py create mode 100644 dashboard/migrations/__init__.py create mode 100644 dashboard/models.py create mode 100644 dashboard/serializers.py create mode 100644 dashboard/tasks.py create mode 100644 dashboard/tests/__init__.py create mode 100644 dashboard/tests/test_layout_add.py create mode 100644 dashboard/tests/test_rights.py create mode 100644 dashboard/tests/test_widgets.py create mode 100644 dashboard/utils.py create mode 100644 dashboard/views.py create mode 100644 dashboard/widgets/__init__.py create mode 100644 dashboard/widgets/opened_incs_widget.py create mode 100644 dashboard/widgets/services_widget.py create mode 100644 dashboard/widgets/sys_info_widget.py create mode 100644 dashboard/widgets/top_ids_widget.py create mode 100644 dashboard/widgets/widgets.py create mode 100644 deb/react.env create mode 100644 deb/skeleton/usr/lib/systemd/system/amccelery.service create mode 100644 deb/skeleton/usr/lib/systemd/system/amccelerybeat.service create mode 100644 deb/skeleton/usr/lib/systemd/system/amccore.service create mode 100644 deb/skeleton/usr/local/sbin/amcpsh create mode 100644 deb/skeleton/var/www/armaconsole/public/media/.gitkeep create mode 100644 deb_old/skeleton/DEBIAN/config create mode 100644 deb_old/skeleton/DEBIAN/control.template create mode 100644 deb_old/skeleton/DEBIAN/dirs create mode 100644 deb_old/skeleton/DEBIAN/postinst create mode 100644 deb_old/skeleton/DEBIAN/postrm create mode 100644 deb_old/skeleton/DEBIAN/preinst create mode 100644 deb_old/skeleton/DEBIAN/prerm create mode 100644 deb_old/skeleton/etc/armaconsole/elasticsearch.yml create mode 100644 deb_old/skeleton/etc/armaconsole/env/vector.env create mode 100644 deb_old/skeleton/etc/armaconsole/license.yml create mode 100644 deb_old/skeleton/etc/armaconsole/logstash.yml create mode 100644 deb_old/skeleton/etc/armaconsole/vector.yml create mode 100644 deb_old/skeleton/etc/cron.d/armaconsole create mode 100644 deb_old/skeleton/etc/nginx/snippets/ssl-params.conf create mode 100644 deb_old/skeleton/etc/nginx/ssl/armaconsole/.gitkeep create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amccelery.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amccelerybeat.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amcchecker.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amccorrelator.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amcgunicorn.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amclicense.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amcsetelkpass.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amcstartconfigure.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/amcvector.service create mode 100644 deb_old/skeleton/usr/lib/systemd/system/elasticsearch.service.d/armaconsole.elasticsearch.conf create mode 100644 deb_old/skeleton/usr/lib/systemd/system/logstash.service.d/armaconsole.logstash.conf create mode 100644 deb_old/skeleton/usr/local/armaconsole/nginx/armaconsole_http.nginx create mode 100644 deb_old/skeleton/usr/local/armaconsole/nginx/armaconsole_https.nginx create mode 100644 deb_old/skeleton/usr/local/armaconsole/setElkPassService.sh create mode 100644 deb_old/skeleton/usr/local/armaconsole/startConfigureService.sh create mode 100644 deb_old/skeleton/usr/local/sbin/amcpsh create mode 100644 deb_old/skeleton/var/log/armaconsole/bad_input.log create mode 100644 deb_old/skeleton/var/www/armaconsole/public/.gitkeep create mode 100644 deb_old/skeleton/var/www/armaconsole/public/media/.gitkeep create mode 100644 devices/__init__.py create mode 100644 devices/admin.py create mode 100644 devices/apps.py create mode 100644 devices/constants.py create mode 100644 devices/enums.py create mode 100644 devices/exceptions.py create mode 100644 devices/fields.py create mode 100644 devices/filters.py create mode 100644 devices/migrations/__init__.py create mode 100644 devices/models/__init__.py create mode 100644 devices/models/device.py create mode 100644 devices/models/endpoint_device.py create mode 100644 devices/models/firewall.py create mode 100644 devices/models/sensor.py create mode 100644 devices/serializers/__init__.py create mode 100644 devices/serializers/device.py create mode 100644 devices/serializers/endpoint_serializers.py create mode 100644 devices/serializers/firewall.py create mode 100644 devices/serializers/sensor_serializers.py create mode 100644 devices/services/__init__.py create mode 100644 devices/services/endpoint/__init__.py create mode 100644 devices/services/endpoint/endpoint_antivirus.py create mode 100644 devices/services/endpoint/endpoint_get_status.py create mode 100644 devices/services/endpoint/endpoint_redis.py create mode 100644 devices/services/endpoint/endpoint_services.py create mode 100644 devices/services/firewall/__init__.py create mode 100644 devices/services/firewall/exception.py create mode 100644 devices/services/firewall/firewall.py create mode 100644 devices/services/sensor/__init__.py create mode 100644 devices/services/sensor/enums.py create mode 100644 devices/services/sensor/rabbitmq.py create mode 100644 devices/services/sensor/service.py create mode 100644 devices/services/sensor/utils.py create mode 100644 devices/services/vector.py create mode 100644 devices/tasks/__init__.py create mode 100644 devices/tasks/firewall.py create mode 100644 devices/tasks/sensor.py create mode 100644 devices/templates/vector/config/armaif.toml create mode 100644 devices/templates/vector/config/endpoint.toml create mode 100644 devices/templates/vector/config/sensor.toml create mode 100644 devices/tests/__init__.py create mode 100644 devices/tests/devices_utils.py create mode 100644 devices/tests/endpoint_utils.py create mode 100644 devices/tests/test_devices.py create mode 100644 devices/tests/test_devices_api.py create mode 100644 devices/tests/test_endpoint_api.py create mode 100644 devices/tests/test_endpoint_device_service.py create mode 100644 devices/tests/test_files/__init__.py create mode 100644 devices/tests/test_files/bad_local.zeek create mode 100644 devices/tests/test_files/config.xml create mode 100644 devices/tests/test_files/good_local.zeek create mode 100644 devices/tests/test_files/live_if_config.xml create mode 100644 devices/tests/test_files/live_if_rulesets.tar create mode 100644 devices/tests/test_filters.py create mode 100644 devices/tests/test_firewall_api.py create mode 100644 devices/tests/test_firewall_live.py create mode 100644 devices/tests/test_firewall_service.py create mode 100644 devices/tests/test_group_devices_api.py create mode 100644 devices/tests/test_sensor_api.py create mode 100644 devices/tests/test_sensor_send_message.py create mode 100644 devices/tests/test_vector_service.py create mode 100644 devices/urls.py create mode 100644 devices/urls_endpoint.py create mode 100644 devices/views/__init__.py create mode 100644 devices/views/arma_firewall.py create mode 100644 devices/views/device.py create mode 100644 devices/views/endpoint_views.py create mode 100644 devices/views/sensor.py create mode 100644 docker/compose/config/correlator/Dockerfile create mode 100644 docker/compose/config/elk/elasticsearch/Dockerfile create mode 100644 docker/compose/config/elk/elasticsearch/config/elasticsearch.yml create mode 100644 docker/compose/config/elk/elasticsearch/elk-post-hooks.sh create mode 100644 docker/compose/config/elk/kibana/Dockerfile create mode 100644 docker/compose/config/elk/kibana/config/kibana.yml create mode 100644 docker/compose/config/elk/vector/.gitignore create mode 100644 docker/compose/config/elk/vector/Dockerfile create mode 100644 docker/compose/config/elk/vector/config/vector.yml create mode 100644 docker/compose/config/elk/vector/pipeline/default.toml create mode 100644 docker/compose/config/license/Dockerfile create mode 100644 docker/compose/config/nginx/default.conf create mode 100644 docker/compose/config/nginx/nginx.docker create mode 100644 docker/compose/config/python/pip.conf create mode 100644 docker/compose/config/python/python.docker create mode 100644 docker/compose/config/react/Dockerfile create mode 100644 docker/compose/config/selenium/selenium.docker create mode 100644 docker/compose/correlator.yml create mode 100644 docker/compose/django.yml create mode 100644 docker/compose/el.yml create mode 100644 docker/compose/flower.yml create mode 100644 docker/compose/kibana.yml create mode 100644 docker/compose/license.yml create mode 100644 docker/compose/pgadmin.yml create mode 100644 docker/compose/selenium.yml create mode 100644 docker/django_entrypoint.sh create mode 100644 docker/generator.py create mode 100644 docker/print_dump.py create mode 100644 docker/scripts/set_mapping.py create mode 100644 docker/start.py create mode 100644 events/__init__.py create mode 100644 events/admin.py create mode 100644 events/api_urls.py create mode 100644 events/apps.py create mode 100644 events/constants.py create mode 100644 events/migrations/__init__.py create mode 100644 events/models/__init__.py create mode 100644 events/serializers/__init__.py create mode 100644 events/services/__init__.py create mode 100644 events/services/elk_string_search.py create mode 100644 events/services/inputs.py create mode 100644 events/tests/__init__.py create mode 100644 events/tests/test_api.py create mode 100644 events/tests/test_services.py create mode 100644 events/tests/utils.py create mode 100644 events/views/__init__.py create mode 100644 events/views/elk_string_query_search_api.py create mode 100644 finalschemaAPI.yaml create mode 100644 frontend/.eslintignore create mode 100644 frontend/.eslintrc.js create mode 100644 frontend/.gitignore create mode 100644 frontend/.npmrc create mode 100644 frontend/.prettierignore create mode 100644 frontend/.prettierrc.js create mode 100644 frontend/README.md create mode 100644 frontend/config/env.js create mode 100644 frontend/config/getHttpsConfig.js create mode 100644 frontend/config/jest/babelTransform.js create mode 100644 frontend/config/jest/cssTransform.js create mode 100644 frontend/config/jest/fileTransform.js create mode 100644 frontend/config/modules.js create mode 100644 frontend/config/paths.js create mode 100644 frontend/config/webpack.config.js create mode 100644 frontend/config/webpack/persistentCache/createEnvironmentHash.js create mode 100644 frontend/config/webpackDevServer.config.js create mode 100644 frontend/eslint.config.js create mode 100644 frontend/finalschemaAPI.yaml create mode 100644 frontend/prettier.config.js create mode 100644 frontend/public/favicon.ico create mode 100644 frontend/public/index.html create mode 100644 frontend/public/robots.txt create mode 100644 frontend/scripts/build.js create mode 100644 frontend/scripts/start.js create mode 100644 frontend/scripts/test.js create mode 100644 frontend/src/App/App.css create mode 100644 frontend/src/App/App.tsx create mode 100644 frontend/src/App/logo.svg create mode 100644 frontend/src/assets/constants/timeZones.js create mode 100644 frontend/src/assets/icons/upload.svg create mode 100644 frontend/src/assets/images/Svg/languageRU.svg create mode 100644 frontend/src/assets/images/Svg/languageUS.svg create mode 100644 frontend/src/assets/images/close_sidebar.svg create mode 100644 frontend/src/assets/images/open_sidebar.svg create mode 100644 frontend/src/assets/logo/logo_lg.svg create mode 100644 frontend/src/assets/logo/logo_md.svg create mode 100644 frontend/src/assets/logo/logo_sm.svg create mode 100644 frontend/src/components/AppTable/AppTable.scss create mode 100644 frontend/src/components/AppTable/AppTable.tsx create mode 100644 frontend/src/components/AutocompleteMulti/AutoCompleteMulti.module.scss create mode 100644 frontend/src/components/AutocompleteMulti/AutoCompleteMulti.tsx create mode 100644 frontend/src/components/Breadcrump/BreadCrumbs.module.scss create mode 100644 frontend/src/components/Breadcrump/BreadCrumbs.tsx create mode 100644 frontend/src/components/Breadcrump/helpers.tsx create mode 100644 frontend/src/components/Breadcrump/i18n.ts create mode 100644 frontend/src/components/Breadcrump/types.ts create mode 100644 frontend/src/components/Dashboard/Dashboard.module.scss create mode 100644 frontend/src/components/Dashboard/Dashboard.tsx create mode 100644 frontend/src/components/Dashboard/components/DrawerFooter.tsx create mode 100644 frontend/src/components/Dashboard/components/DrawerHeader.tsx create mode 100644 frontend/src/components/Dashboard/components/EditAndSaveButton.tsx create mode 100644 frontend/src/components/Dashboard/index.ts create mode 100644 frontend/src/components/Dashboard/types.ts create mode 100644 frontend/src/components/Dashboard/useSetCurrentCardInfoAfterMount.tsx create mode 100644 frontend/src/components/DashboardTable/DashboardTable.module.scss create mode 100644 frontend/src/components/DashboardTable/DashboardTable.tsx create mode 100644 frontend/src/components/DashboardTable/components/SingleActionsCard/SingleActionsCard.tsx create mode 100644 frontend/src/components/DashboardTable/components/SingleActionsCard/buttonSet.tsx create mode 100644 frontend/src/components/DashboardTable/components/SingleActionsCard/types.ts create mode 100644 frontend/src/components/DashboardTable/components/StatusType/StatusType.tsx create mode 100644 frontend/src/components/DashboardTable/components/useActionsWithSelectedCards/index.ts create mode 100644 frontend/src/components/DashboardTable/components/useActionsWithSelectedCards/types.ts create mode 100644 frontend/src/components/DashboardTable/components/useActionsWithSelectedCards/useActionsWithSelectedCards.tsx create mode 100644 frontend/src/components/DashboardTable/constants.ts create mode 100644 frontend/src/components/DashboardTable/helpers.test.js create mode 100644 frontend/src/components/DashboardTable/helpers.ts create mode 100644 frontend/src/components/DashboardTable/index.ts create mode 100644 frontend/src/components/DashboardTable/types.ts create mode 100644 frontend/src/components/DashboardTable/useGroupTableColumns.tsx create mode 100644 frontend/src/components/DynamicTable/DynamicTable.tsx create mode 100644 frontend/src/components/DynamicTable/types.ts create mode 100644 frontend/src/components/EllipsisWithToggle/EllipsisWithToggle.module.scss create mode 100644 frontend/src/components/EllipsisWithToggle/EllipsisWithToggle.tsx create mode 100644 frontend/src/components/ForeignKeyEntitiesTable/ForeignKeyEntitiesTable.tsx create mode 100644 frontend/src/components/ForeignKeyEntitiesTable/i18n.ts create mode 100644 frontend/src/components/ForeignKeyEntitiesTable/makeForeignKeyEntitiesTableColumns.tsx create mode 100644 frontend/src/components/FormFieldsBlock/FormFieldsBlock.module.scss create mode 100644 frontend/src/components/FormFieldsBlock/FormFieldsBlock.tsx create mode 100644 frontend/src/components/GenericDashboard/GenericDashboard.tsx create mode 100644 frontend/src/components/GenericDashboard/hooks/index.ts create mode 100644 frontend/src/components/GenericDashboard/hooks/useCollectiveDelete.ts create mode 100644 frontend/src/components/GenericDashboard/hooks/useExportData.ts create mode 100644 frontend/src/components/GenericDashboard/hooks/useImportData.ts create mode 100644 frontend/src/components/GenericDashboard/hooks/useItemDelete.ts create mode 100644 frontend/src/components/GenericDashboard/index.ts create mode 100644 frontend/src/components/GenericDashboard/types.ts create mode 100644 frontend/src/components/GenericGroupsDashboard/GenericGroupCard/GenericGroupCard.tsx create mode 100644 frontend/src/components/GenericGroupsDashboard/GenericGroupsDashboard.tsx create mode 100644 frontend/src/components/GenericGroupsDashboard/GenericGroupsDashboardUiStore.ts create mode 100644 frontend/src/components/GenericGroupsDashboard/index.ts create mode 100644 frontend/src/components/GenericGroupsDashboard/makeGenericGroupsTableColumns.tsx create mode 100644 frontend/src/components/GenericGroupsDashboard/types.ts create mode 100644 frontend/src/components/Layout/Layout.tsx create mode 100644 frontend/src/components/Layout/components/Footer/Footer.module.scss create mode 100644 frontend/src/components/Layout/components/Footer/Footer.tsx create mode 100644 frontend/src/components/Layout/components/Footer/index.ts create mode 100644 frontend/src/components/Layout/components/Header/Header.scss create mode 100644 frontend/src/components/Layout/components/Header/Header.tsx create mode 100644 frontend/src/components/Layout/components/Header/LanguageMenu/LanguageMenu.tsx create mode 100644 frontend/src/components/Layout/components/Header/MainMenu/MainMenu.tsx create mode 100644 frontend/src/components/Layout/components/Header/MainMenu/Menu.module.scss create mode 100644 frontend/src/components/Layout/components/Header/MainMenu/__tests__/helpers.test.tsx create mode 100644 frontend/src/components/Layout/components/Header/MainMenu/helpers.tsx create mode 100644 frontend/src/components/Layout/components/Header/MainMenu/i18n.ts create mode 100644 frontend/src/components/Layout/components/Header/MainMenu/index.ts create mode 100644 frontend/src/components/Layout/components/Header/MainMenu/types.ts create mode 100644 frontend/src/components/Layout/components/Header/SecondaryMenu/SecondaryMenu.tsx create mode 100644 frontend/src/components/Layout/components/Header/SecondaryMenu/types.ts create mode 100644 frontend/src/components/Layout/components/Header/index.ts create mode 100644 frontend/src/components/Layout/components/LeftSidebar/LeftSidebar.module.scss create mode 100644 frontend/src/components/Layout/components/LeftSidebar/LeftSidebar.tsx create mode 100644 frontend/src/components/Layout/components/LeftSidebar/LeftSidebarItem.tsx create mode 100644 frontend/src/components/Layout/components/LeftSidebar/helpers.tsx create mode 100644 frontend/src/components/Layout/components/LeftSidebar/index.ts create mode 100644 frontend/src/components/Layout/index.ts create mode 100644 frontend/src/components/Messages/errorMessage.ts create mode 100644 frontend/src/components/Messages/index.ts create mode 100644 frontend/src/components/Messages/progressMessages.ts create mode 100644 frontend/src/components/Messages/runWithMessages.ts create mode 100644 frontend/src/components/Messages/successMessage.ts create mode 100644 frontend/src/components/Messages/warningMessage.ts create mode 100644 frontend/src/components/Modals/CardInfoDrawer/CardInfoDrawer.module.scss create mode 100644 frontend/src/components/Modals/CardInfoDrawer/CardInfoDrawer.tsx create mode 100644 frontend/src/components/Modals/ConfirmModal/ConfirmModal.tsx create mode 100644 frontend/src/components/Modals/HelpQuery/FieldTab.tsx create mode 100644 frontend/src/components/Modals/HelpQuery/HelpQuery.module.scss create mode 100644 frontend/src/components/Modals/HelpQuery/HelpQuery.tsx create mode 100644 frontend/src/components/Modals/HelpQuery/SyntaxTab.tsx create mode 100644 frontend/src/components/Modals/TagDrawer/TagDrawer.tsx create mode 100644 frontend/src/components/Modals/TagDrawer/TagFormTemplate.tsx create mode 100644 frontend/src/components/Notification/index.ts create mode 100644 frontend/src/components/Notification/notification.tsx create mode 100644 frontend/src/components/Search/Search.module.scss create mode 100644 frontend/src/components/Search/Search.tsx create mode 100644 frontend/src/components/Search/index.ts create mode 100644 frontend/src/components/SelectMultiple/SelectMultiple.tsx create mode 100644 frontend/src/components/SelectWithObjectValue/SelectWithObjectValue.tsx create mode 100644 frontend/src/components/StringDatePicker/StringDatePicker.tsx create mode 100644 frontend/src/components/StringTimePicker/StringTimePicker.tsx create mode 100644 frontend/src/components/TextHighlighter/TextHighlighter.tsx create mode 100644 frontend/src/components/constants.ts create mode 100644 frontend/src/components/formValidators/index.ts create mode 100644 frontend/src/components/formValidators/validateIp4Address.ts create mode 100644 frontend/src/components/formValidators/validateMultiplePortsAsJsonString.ts create mode 100644 frontend/src/components/formValidators/validatePassword.ts create mode 100644 frontend/src/components/helpers/deepDifference.ts create mode 100644 frontend/src/components/helpers/delay.ts create mode 100644 frontend/src/components/helpers/handleFormRequestError.ts create mode 100644 frontend/src/components/helpers/index.ts create mode 100644 frontend/src/components/helpers/makeListForSelect.ts create mode 100644 frontend/src/components/helpers/makeListForSelectWithNull.ts create mode 100644 frontend/src/components/helpers/sorterColumnTable.ts create mode 100644 frontend/src/components/i18n.ts create mode 100644 frontend/src/components/index.ts create mode 100644 frontend/src/components/types.ts create mode 100644 frontend/src/core/i18n/config.ts create mode 100644 frontend/src/core/i18n/constants.ts create mode 100644 frontend/src/core/i18n/helpers.ts create mode 100644 frontend/src/core/i18n/index.ts create mode 100644 frontend/src/core/i18n/namespaces.ts create mode 100644 frontend/src/core/i18n/types.ts create mode 100644 frontend/src/core/i18n/useTranslation.ts create mode 100644 frontend/src/enviroments/enviroments.js create mode 100644 frontend/src/hooks/index.ts create mode 100644 frontend/src/hooks/useAsyncEffect.ts create mode 100644 frontend/src/hooks/useClickOutsideElement.ts create mode 100644 frontend/src/index.css create mode 100644 frontend/src/index.tsx create mode 100644 frontend/src/mock/TitlesData.tsx create mode 100644 frontend/src/mock/dataAssets.tsx create mode 100644 frontend/src/myApi.ts create mode 100644 frontend/src/pages/500/TechnicalProblem.module.scss create mode 100644 frontend/src/pages/500/TechnicalProblem.tsx create mode 100644 frontend/src/pages/500/index.ts create mode 100644 frontend/src/pages/AccountsDashboard/AccountCard/AccountCard.tsx create mode 100644 frontend/src/pages/AccountsDashboard/AccountForm/AccountForm.tsx create mode 100644 frontend/src/pages/AccountsDashboard/AccountsDashboard.module.scss create mode 100644 frontend/src/pages/AccountsDashboard/AccountsDashboard.tsx create mode 100644 frontend/src/pages/AccountsDashboard/AccountsDashboardUiStore.ts create mode 100644 frontend/src/pages/AccountsDashboard/i18n.ts create mode 100644 frontend/src/pages/AccountsDashboard/index.ts create mode 100644 frontend/src/pages/AccountsDashboard/makeAccountsDashboardActions.ts create mode 100644 frontend/src/pages/AccountsDashboard/makeAccountsDashboardColumns.tsx create mode 100644 frontend/src/pages/AppRoutes/AppRoutes.module.scss create mode 100644 frontend/src/pages/AppRoutes/AppRoutes.tsx create mode 100644 frontend/src/pages/AppRoutes/constants.ts create mode 100644 frontend/src/pages/AppRoutes/index.ts create mode 100644 frontend/src/pages/AppRoutes/routes.tsx create mode 100644 frontend/src/pages/AssetsDashboard/Assets.scss create mode 100644 frontend/src/pages/AssetsDashboard/AssetsCardInfo/AssetsCardInfo.module.scss create mode 100644 frontend/src/pages/AssetsDashboard/AssetsCardInfo/FormItems.tsx create mode 100644 frontend/src/pages/AssetsDashboard/AssetsCardInfo/FormItemsLarge.tsx create mode 100644 frontend/src/pages/AssetsDashboard/AssetsCardInfo/types.ts create mode 100644 frontend/src/pages/AssetsDashboard/AssetsCardInfo/validation.ts create mode 100644 frontend/src/pages/AssetsDashboard/AssetsDashboard.tsx create mode 100644 frontend/src/pages/AssetsDashboard/AssetsLeftSideBarContent.tsx create mode 100644 frontend/src/pages/AssetsDashboard/AssetsUiStore.tsx create mode 100644 frontend/src/pages/AssetsDashboard/MOCK.ts create mode 100644 frontend/src/pages/AssetsDashboard/__tests__/converter.test.ts create mode 100644 frontend/src/pages/AssetsDashboard/constants.ts create mode 100644 frontend/src/pages/AssetsDashboard/converter.tsx create mode 100644 frontend/src/pages/AssetsDashboard/helpers.module.scss create mode 100644 frontend/src/pages/AssetsDashboard/helpers.ts create mode 100644 frontend/src/pages/AssetsDashboard/i18n.ts create mode 100644 frontend/src/pages/AssetsDashboard/index.ts create mode 100644 frontend/src/pages/AssetsDashboard/types.ts create mode 100644 frontend/src/pages/AssetsDashboard/useAssetsColumns.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRules.module.scss create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/CorrelationRulesActionsEditor.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/AssetActionForm.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/BashActionForm.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/CommonActionFormProps.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/ExecActionForm.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/FirewallActionForm.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/HttpActionForm.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/IncidentActionForm.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/SyslogActionForm.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/helpers.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/actionForms/index.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesActionsEditor/makeNewCorrelationRuleAction.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesCard/CorrelationRulesCard.module.scss create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesCard/CorrelationRulesCard.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesChecker/CorrelationRulesChecker.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesChecker/CorrelationRulesCheckerHelpDialog/CorrelationRulesCheckerHelpDialog.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesChecker/CorrelationRulesCheckerUiStore.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesChecker/consts.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesChecker/makeCorrelationRulesEventsColumns.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesDashboard.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/CorrelationRulesUiStore.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/helpers.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/i18n.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/index.ts create mode 100644 frontend/src/pages/CorrelationRulesDashboard/makeCorrelationRulesColumns.tsx create mode 100644 frontend/src/pages/CorrelationRulesDashboard/types.ts create mode 100644 frontend/src/pages/CorrelationRulesGroupsDashboard/CorrelationRulesGroupsDashboard.tsx create mode 100644 frontend/src/pages/CorrelationRulesGroupsDashboard/CorrelationRulesGroupsDashboardUiStore.ts create mode 100644 frontend/src/pages/CorrelationRulesGroupsDashboard/index.ts create mode 100644 frontend/src/pages/DevicesDashboard/Devices.module.scss create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/AntivirusResultTable/AntivirusResultTableUiStore.ts create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/AntivirusResultTable/AntivirusResutTable.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/DevicesCardInfo.module.scss create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Endpoint/EndpointForm.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Endpoint/EndpointLargeFormItems.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Endpoint/RotationBlock.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Endpoint/index.ts create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Endpoint/types.ts create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Endpoint/useEndpointFormConfiguration.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Firewall/FireWallForm.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Firewall/index.ts create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Firewall/useFirewallFormConfiguration.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/GeneralForm.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/SelectorAddNewEventSource.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Sensor/SensorForm.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Sensor/index.ts create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/Sensor/useSensorFormConfiguration.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesCardInfo/types.ts create mode 100644 frontend/src/pages/DevicesDashboard/DevicesDashboard.tsx create mode 100644 frontend/src/pages/DevicesDashboard/DevicesLeftSideBarContent.tsx create mode 100644 frontend/src/pages/DevicesDashboard/SensorInformationModal.tsx create mode 100644 frontend/src/pages/DevicesDashboard/Stores/DevicesUiStore.ts create mode 100644 frontend/src/pages/DevicesDashboard/Stores/EnpointUiStore.ts create mode 100644 frontend/src/pages/DevicesDashboard/Stores/index.ts create mode 100644 frontend/src/pages/DevicesDashboard/constants.ts create mode 100644 frontend/src/pages/DevicesDashboard/helpers.ts create mode 100644 frontend/src/pages/DevicesDashboard/i18n.ts create mode 100644 frontend/src/pages/DevicesDashboard/index.ts create mode 100644 frontend/src/pages/DevicesDashboard/makeDevicesColumns.tsx create mode 100644 frontend/src/pages/DevicesDashboard/types.ts create mode 100644 frontend/src/pages/DevicesDashboard/useFields.ts create mode 100644 frontend/src/pages/EventExportSettings/EventExportSettings.scss create mode 100644 frontend/src/pages/EventExportSettings/EventExportSettings.tsx create mode 100644 frontend/src/pages/EventExportSettings/TableOPCUA.tsx create mode 100644 frontend/src/pages/EventExportSettings/TableSyslog.tsx create mode 100644 frontend/src/pages/EventSourceDashboard/EventSourceDashboard.scss create mode 100644 frontend/src/pages/EventSourceDashboard/EventSourceDashboard.tsx create mode 100644 frontend/src/pages/EventsJournalDashboard/EventCardInfo.module.scss create mode 100644 frontend/src/pages/EventsJournalDashboard/EventCardInfo.tsx create mode 100644 frontend/src/pages/EventsJournalDashboard/EventsFilters.tsx create mode 100644 frontend/src/pages/EventsJournalDashboard/EventsJournal.module.scss create mode 100644 frontend/src/pages/EventsJournalDashboard/EventsJournalActions.tsx create mode 100644 frontend/src/pages/EventsJournalDashboard/EventsJournalApi.ts create mode 100644 frontend/src/pages/EventsJournalDashboard/EventsJournalDashboard.tsx create mode 100644 frontend/src/pages/EventsJournalDashboard/EventsJournalUiStore.tsx create mode 100644 frontend/src/pages/EventsJournalDashboard/columns.tsx create mode 100644 frontend/src/pages/EventsJournalDashboard/constants.ts create mode 100644 frontend/src/pages/EventsJournalDashboard/converter.tsx create mode 100644 frontend/src/pages/EventsJournalDashboard/i18n.ts create mode 100644 frontend/src/pages/EventsJournalDashboard/index.ts create mode 100644 frontend/src/pages/EventsJournalDashboard/types.ts create mode 100644 frontend/src/pages/EventsJournalDashboard/useEventsJournalsLeftSideBarContents.ts create mode 100644 frontend/src/pages/IncidentsDashboard/Incidents.scss create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsActions.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsApi.ts create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/CardWithPagination.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/DrawerFooter.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/DrawerHeader.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/EditAndSaveButton.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/EventsTable.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/IncidentCardInfoFormTemplate.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/IncidentsCardInfo.module.scss create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/IncidentsCardInfo.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/helpers.ts create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/index.ts create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsCardInfo/types.ts create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsDashboard.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsFilters.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/IncidentsUiStore.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/MOCK.ts create mode 100644 frontend/src/pages/IncidentsDashboard/NCIRCCNotificationForm/NCIRCCNotificationForm.module.scss create mode 100644 frontend/src/pages/IncidentsDashboard/NCIRCCNotificationForm/NCIRCCNotificationForm.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/NCIRCCNotificationForm/constants.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/NCIRCCNotificationForm/helpers.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/NCIRCCNotificationForm/index.ts create mode 100644 frontend/src/pages/IncidentsDashboard/NCIRCCNotificationForm/types.ts create mode 100644 frontend/src/pages/IncidentsDashboard/__tests__/helpers.test.ts create mode 100644 frontend/src/pages/IncidentsDashboard/constants.ts create mode 100644 frontend/src/pages/IncidentsDashboard/helpers.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/i18n.ts create mode 100644 frontend/src/pages/IncidentsDashboard/index.ts create mode 100644 frontend/src/pages/IncidentsDashboard/makeIncidentsColumns.tsx create mode 100644 frontend/src/pages/IncidentsDashboard/types.ts create mode 100644 frontend/src/pages/LicenseActivationPage/LicenseActivationPage.module.scss create mode 100644 frontend/src/pages/LicenseActivationPage/LicenseActivationPage.test.tsx create mode 100644 frontend/src/pages/LicenseActivationPage/LicenseActivationPage.tsx create mode 100644 frontend/src/pages/LicenseActivationPage/LicenseActivationPageStore.ts create mode 100644 frontend/src/pages/LicenseActivationPage/i18n.ts create mode 100644 frontend/src/pages/LicenseActivationPage/index.ts create mode 100644 frontend/src/pages/LicensePage/LicensePage.module.scss create mode 100644 frontend/src/pages/LicensePage/LicensePage.test.tsx create mode 100644 frontend/src/pages/LicensePage/LicensePage.tsx create mode 100644 frontend/src/pages/LicensePage/i18n.ts create mode 100644 frontend/src/pages/LicensePage/index.ts create mode 100644 frontend/src/pages/Login/Login.scss create mode 100644 frontend/src/pages/Login/Login.tsx create mode 100644 frontend/src/pages/Login/LoginApi.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/BulletinContent.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/Bulletins.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/Incidents.scss create mode 100644 frontend/src/pages/NCIRCCDashboard/MOCK.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/NCIRCCDashboard.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/NCIRCCUiStore.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/Ncircc.module.scss create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/Messages.module.scss create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCIncidentMessenger/NCIRCCIncidentMessenger.module.scss create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCIncidentMessenger/NCIRCCIncidentMessenger.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCIncidentMessenger/index.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCNotifications/NCIRCCNotification/NCIRCCNotification.module.scss create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCNotifications/NCIRCCNotification/NCIRCCNotification.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCNotifications/NCIRCCNotification/index.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCNotifications/NCIRCCNotificationsList/NCIRCCNotificationsList.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCNotifications/NCIRCCNotificationsList/NotificationList.module.scss create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NCIRCCNotifications/NCIRCCNotificationsList/index.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/NcirccMessages.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/helpers.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/NcirccMessages/index.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/OrganizationCard/OrganizationCard.module.scss create mode 100644 frontend/src/pages/NCIRCCDashboard/OrganizationCard/OrganizationCard.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/OrganizationCard/helpers.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/components/StatusWithButtonUuid.module.scss create mode 100644 frontend/src/pages/NCIRCCDashboard/components/StatusWithButtonUuid.tsx create mode 100644 frontend/src/pages/NCIRCCDashboard/components/index.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/components/types.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/i18n.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/index.ts create mode 100644 frontend/src/pages/NCIRCCDashboard/types.ts create mode 100644 frontend/src/pages/NetworkActivityMap/NetworkActivityMap.scss create mode 100644 frontend/src/pages/NetworkActivityMap/NetworkActivityMap.tsx create mode 100644 frontend/src/pages/NetworkMap/NetworkMap.scss create mode 100644 frontend/src/pages/NetworkMap/NetworkMap.tsx create mode 100644 frontend/src/pages/ProfilePage/ProfilePage.module.scss create mode 100644 frontend/src/pages/ProfilePage/ProfilePage.tsx create mode 100644 frontend/src/pages/ProfilePage/i18n.ts create mode 100644 frontend/src/pages/ProfilePage/index.ts create mode 100644 frontend/src/pages/RotationDashboard/EventsRotationType.tsx create mode 100644 frontend/src/pages/RotationDashboard/IncidentsRotationSettings.tsx create mode 100644 frontend/src/pages/RotationDashboard/MOCK.ts create mode 100644 frontend/src/pages/RotationDashboard/RotationDashboard.tsx create mode 100644 frontend/src/pages/RotationDashboard/RotationUiStore.tsx create mode 100644 frontend/src/pages/RotationDashboard/Template/Main.tsx create mode 100644 frontend/src/pages/RotationDashboard/Template/Period.tsx create mode 100644 frontend/src/pages/RotationDashboard/Template/Template.module.scss create mode 100644 frontend/src/pages/RotationDashboard/Template/helpers.ts create mode 100644 frontend/src/pages/RotationDashboard/__tests__/helpers.test.ts create mode 100644 frontend/src/pages/RotationDashboard/helpers.ts create mode 100644 frontend/src/pages/RotationDashboard/i18n.ts create mode 100644 frontend/src/pages/RotationDashboard/index.ts create mode 100644 frontend/src/pages/RotationDashboard/types.ts create mode 100644 frontend/src/pages/SettingsDashboard/Authentication.tsx create mode 100644 frontend/src/pages/SettingsDashboard/SettingsDashboard.tsx create mode 100644 frontend/src/pages/SettingsDashboard/SettingsUiStore.ts create mode 100644 frontend/src/pages/SettingsDashboard/TlsSertificate.tsx create mode 100644 frontend/src/pages/SettingsDashboard/constatns.ts create mode 100644 frontend/src/pages/SettingsDashboard/i18n.ts create mode 100644 frontend/src/pages/SettingsDashboard/index.ts create mode 100644 frontend/src/pages/SettingsDashboard/validation.ts create mode 100644 frontend/src/pages/StorageDashboard/Storage.scss create mode 100644 frontend/src/pages/StorageDashboard/StorageApi.ts create mode 100644 frontend/src/pages/StorageDashboard/StorageCardInfo/DrawerFooter.tsx create mode 100644 frontend/src/pages/StorageDashboard/StorageCardInfo/DrawerHeader.tsx create mode 100644 frontend/src/pages/StorageDashboard/StorageCardInfo/StorageCardInfo.tsx create mode 100644 frontend/src/pages/StorageDashboard/StorageCardInfo/index.ts create mode 100644 frontend/src/pages/StorageDashboard/StorageDashboard.tsx create mode 100644 frontend/src/pages/StorageDashboard/StorageFilters.tsx create mode 100644 frontend/src/pages/StorageDashboard/StorageLeftSideBar.tsx create mode 100644 frontend/src/pages/StorageDashboard/StorageMockData.tsx create mode 100644 frontend/src/pages/StorageDashboard/StorageUiStore.tsx create mode 100644 frontend/src/pages/StorageDashboard/converter.tsx create mode 100644 frontend/src/pages/StorageDashboard/helpers.ts create mode 100644 frontend/src/pages/StorageDashboard/i18n.ts create mode 100644 frontend/src/pages/StorageDashboard/storageColumns.tsx create mode 100644 frontend/src/pages/StorageDashboard/types.ts create mode 100644 frontend/src/pages/WidgetsDashboard/WidgetsDashboard.tsx create mode 100644 frontend/src/react-app-env.d.ts create mode 100644 frontend/src/reportWebVitals.ts create mode 100644 frontend/src/services/FileDownload.ts create mode 100644 frontend/src/services/RestApi.ts create mode 100644 frontend/src/services/logoutApi.ts create mode 100644 frontend/src/services/request.ts create mode 100644 frontend/src/setupTests.ts create mode 100644 frontend/src/stores/AccountsLeftSideBarContent.tsx create mode 100644 frontend/src/stores/AccountsStore.ts create mode 100644 frontend/src/stores/AccountsStoreNew.ts create mode 100644 frontend/src/stores/AntivirusResultsTableStore.ts create mode 100644 frontend/src/stores/AppStore.ts create mode 100644 frontend/src/stores/AssetsGroupsStore.ts create mode 100644 frontend/src/stores/AssetsManufacturersStore.ts create mode 100644 frontend/src/stores/AssetsOsesStore.ts create mode 100644 frontend/src/stores/AssetsStore.ts create mode 100644 frontend/src/stores/CorrelationRulesGroupsStore.ts create mode 100644 frontend/src/stores/CorrelationRulesStore.ts create mode 100644 frontend/src/stores/CurrentUserStore.ts create mode 100644 frontend/src/stores/Decorators/Loadeble.ts create mode 100644 frontend/src/stores/Decorators/LoadingDecorator.ts create mode 100644 frontend/src/stores/DevicesStore.ts create mode 100644 frontend/src/stores/EndpointStore.ts create mode 100644 frontend/src/stores/EntityStore.ts create mode 100644 frontend/src/stores/EventsJournalStore.ts create mode 100644 frontend/src/stores/FirewallStore.ts create mode 100644 frontend/src/stores/GenericDashboardUiStore.ts create mode 100644 frontend/src/stores/GenericDataStore.ts create mode 100644 frontend/src/stores/HelpersStore.ts create mode 100644 frontend/src/stores/IncidentsCategoriesStore.ts create mode 100644 frontend/src/stores/IncidentsEffectsStore.ts create mode 100644 frontend/src/stores/IncidentsRecommendationsStore.ts create mode 100644 frontend/src/stores/IncidentsStore.ts create mode 100644 frontend/src/stores/LicenseStore.ts create mode 100644 frontend/src/stores/NCIRCCStore.ts create mode 100644 frontend/src/stores/RootStore.ts create mode 100644 frontend/src/stores/RotationStore.ts create mode 100644 frontend/src/stores/SensorStore.ts create mode 100644 frontend/src/stores/SettingsStore.ts create mode 100644 frontend/src/stores/StorageStore.ts create mode 100644 frontend/src/stores/StoreProvider.ts create mode 100644 frontend/src/stores/SystemInfoStore.ts create mode 100644 frontend/src/stores/TagsStore.ts create mode 100644 frontend/src/styles/antdChangedStyles.scss create mode 100644 frontend/src/styles/headlines.scss create mode 100644 frontend/src/styles/helpers.scss create mode 100644 frontend/src/styles/styles.scss create mode 100644 frontend/src/styles/variables.scss create mode 100644 frontend/src/testingUtils/handleLocaleFileRequest.ts create mode 100644 frontend/src/testingUtils/index.ts create mode 100644 frontend/src/testingUtils/testRenderWithProviders.tsx create mode 100644 frontend/src/types/default.ts create mode 100644 frontend/src/types/types.d.ts create mode 100644 frontend/typings/i18next-react-postprocessor.d.ts create mode 100644 frontend/typings/integrations.d.ts create mode 100644 frontend/typings/react-i18next.d.ts create mode 100644 incident/__init__.py create mode 100644 incident/admin.py create mode 100644 incident/apps.py create mode 100644 incident/filters.py create mode 100644 incident/migrations/__init__.py create mode 100644 incident/models.py create mode 100644 incident/serializers/__init__.py create mode 100644 incident/serializers/incident.py create mode 100644 incident/serializers/incident_edit_serializer.py create mode 100644 incident/services/__init__.py create mode 100644 incident/services/ws_incidents.py create mode 100644 incident/tests/__init__.py create mode 100644 incident/tests/test_api.py create mode 100644 incident/tests/test_filters.py create mode 100644 incident/tests/test_incidents_list.py create mode 100644 incident/tests/test_serializers.py create mode 100644 incident/urls.py create mode 100644 incident/views/__init__.py create mode 100644 incident/views/incidents_api.py create mode 100644 incident_export/__init__.py create mode 100644 incident_export/admin.py create mode 100644 incident_export/apps.py create mode 100644 incident_export/enums.py create mode 100644 incident_export/migrations/__init__.py create mode 100644 incident_export/models.py create mode 100644 incident_export/serializers.py create mode 100644 incident_export/services/export.py create mode 100644 incident_export/signals.py create mode 100644 incident_export/tasks.py create mode 100644 incident_export/tests/__init__.py create mode 100644 incident_export/tests/test_cef_format.py create mode 100644 incident_export/tests/test_incident_export.py create mode 100644 incident_export/tests/test_incident_export_api.py create mode 100644 incident_export/urls.py create mode 100644 incident_export/views.py create mode 100644 inputs/__init__.py create mode 100644 inputs/admin.py create mode 100644 inputs/apps.py create mode 100644 inputs/constants.py create mode 100644 inputs/enums.py create mode 100644 inputs/migrations/__init__.py create mode 100644 inputs/models.py create mode 100644 inputs/serializers.py create mode 100644 inputs/services/__init__.py create mode 100644 inputs/services/delete_input.py create mode 100644 inputs/services/inputs.py create mode 100644 inputs/services/remove_loginputs.py create mode 100644 inputs/services/update_config.py create mode 100644 inputs/tests/__init__.py create mode 100644 inputs/tests/test_serializer.py create mode 100644 inputs/urls.py create mode 100644 inputs/views.py create mode 100644 license/.gitignore create mode 100644 license/CHANGELOG.md create mode 100644 license/README.md create mode 100644 license/config_example.yaml create mode 100644 license/go.mod create mode 100644 license/go.sum create mode 100644 license/logging.go create mode 100644 license/main.go create mode 100644 license_info/__init__.py create mode 100644 license_info/admin.py create mode 100644 license_info/apps.py create mode 100644 license_info/decorators.py create mode 100644 license_info/exeptions.py create mode 100644 license_info/migrations/__init__.py create mode 100644 license_info/templatetags/__init__.py create mode 100644 license_info/templatetags/license_info.py create mode 100644 license_info/tests/__init__.py create mode 100644 license_info/tests/test_middleware.py create mode 100644 license_info/tests/test_tools.py create mode 100644 license_info/tools.py create mode 100644 license_info/urls.py create mode 100644 license_info/views.py create mode 100644 logstash/.gitignore create mode 100644 logstash/__init__.py create mode 100644 logstash/admin.py create mode 100644 logstash/api.py create mode 100644 logstash/apps.py create mode 100644 logstash/constants.py create mode 100644 logstash/migrations/__init__.py create mode 100644 logstash/models/__init__.py create mode 100644 logstash/serializers.py create mode 100644 logstash/tasks.py create mode 100644 logstash/tests/__init__.py create mode 100644 logstash/tests/test_license.py create mode 100644 logstash/tests/test_services.py create mode 100644 logstash/tests/tests_media/wrong_format.png create mode 100644 logstash/urls.py create mode 100644 make_release.py create mode 100644 manage.py create mode 100644 miggunicorn.sh create mode 100644 ncircc/__init__.py create mode 100644 ncircc/admin.py create mode 100644 ncircc/apps.py create mode 100644 ncircc/enums/__init__.py create mode 100644 ncircc/enums/notifications.py create mode 100644 ncircc/migrations/__init__.py create mode 100644 ncircc/models/__init__.py create mode 100644 ncircc/models/comments.py create mode 100644 ncircc/models/notification.py create mode 100644 ncircc/serializers/__init__.py create mode 100644 ncircc/serializers/comments.py create mode 100644 ncircc/serializers/notification.py create mode 100644 ncircc/services/__init__.py create mode 100644 ncircc/services/comments.py create mode 100644 ncircc/services/notification.py create mode 100644 ncircc/services/utils.py create mode 100644 ncircc/tasks.py create mode 100644 ncircc/tests/__init__.py create mode 100644 ncircc/tests/test_comments_api.py create mode 100644 ncircc/tests/test_comments_services.py create mode 100644 ncircc/tests/test_notification_api.py create mode 100644 ncircc/tests/test_notification_serializers.py create mode 100644 ncircc/tests/test_notification_services.py create mode 100644 ncircc/tests/test_utils.py create mode 100644 ncircc/tests/utils.py create mode 100644 ncircc/urls.py create mode 100644 ncircc/views/__init__.py create mode 100644 ncircc/views/notification_api.py create mode 100644 networkmap/__init__.py create mode 100644 networkmap/admin.py create mode 100644 networkmap/api.py create mode 100644 networkmap/apps.py create mode 100644 networkmap/migrations/__init__.py create mode 100644 networkmap/models.py create mode 100644 networkmap/serializers.py create mode 100644 networkmap/services.py create mode 100644 networkmap/tasks.py create mode 100644 networkmap/tests/__init__.py create mode 100644 networkmap/tests/migration_fixtures.py create mode 100644 networkmap/tests/netmap_test_utils.py create mode 100644 networkmap/tests/test_api.py create mode 100644 networkmap/tests/test_data/controller.png create mode 100644 networkmap/urls.py create mode 100644 notifications/__init__.py create mode 100644 notifications/admin.py create mode 100644 notifications/apps.py create mode 100644 notifications/enums.py create mode 100644 notifications/migrations/__init__.py create mode 100644 notifications/models.py create mode 100644 notifications/serializers.py create mode 100644 notifications/services/__init__.py create mode 100644 notifications/services/notification_sender.py create mode 100644 notifications/services/ws.py create mode 100644 notifications/tests/__init__.py create mode 100644 notifications/tests/test_notifications_api.py create mode 100644 notifications/tests/test_notifications_service.py create mode 100644 notifications/tests/test_ws_consumers.py create mode 100644 notifications/urls.py create mode 100644 notifications/views.py create mode 100644 perms/__init__.py create mode 100644 perms/admin.py create mode 100644 perms/apps.py create mode 100644 perms/migrations/__init__.py create mode 100644 perms/models.py create mode 100644 perms/services/__init__.py create mode 100644 perms/services/get_permissions.py create mode 100644 perms/tests/__init__.py create mode 100644 perms/tests/test_api_perms.py create mode 100644 perms/tests/test_migrations.py create mode 100644 perms/tests/test_services.py create mode 100644 perms/views.py create mode 100644 product_version create mode 100644 pytest.ini create mode 100644 requirements.txt create mode 100644 requirements_test.txt create mode 100644 rotation/__init__.py create mode 100644 rotation/admin.py create mode 100644 rotation/apps.py create mode 100644 rotation/constants.py create mode 100644 rotation/enums.py create mode 100644 rotation/migrations/__init__.py create mode 100644 rotation/models.py create mode 100644 rotation/serializers.py create mode 100644 rotation/services/cron_utils.py create mode 100644 rotation/services/update_schedule.py create mode 100644 rotation/tasks.py create mode 100644 rotation/tests/test_crontab_functions.py create mode 100644 rotation/tests/test_serializers.py create mode 100644 rotation/tests/test_tasks.py create mode 100644 rotation/tests/test_views.py create mode 100644 rotation/urls.py create mode 100644 rotation/views.py create mode 100644 schema.yml create mode 100644 search-solid.svg create mode 100644 start_dev_django_app.sh create mode 100644 storage/__init__.py create mode 100644 storage/admin.py create mode 100644 storage/apps.py create mode 100644 storage/enums.py create mode 100644 storage/exception.py create mode 100644 storage/export.py create mode 100644 storage/migrations/__init__.py create mode 100644 storage/models.py create mode 100644 storage/serializers.py create mode 100644 storage/services.py create mode 100644 storage/tasks.py create mode 100644 storage/tests/__init__.py create mode 100644 storage/tests/test.py create mode 100644 storage/tests/test_export.py create mode 100644 storage/urls.py create mode 100644 storage/views.py create mode 100644 users/__init__.py create mode 100644 users/admin.py create mode 100644 users/api.py create mode 100644 users/apps.py create mode 100644 users/constants.py create mode 100644 users/migrations/__init__.py create mode 100644 users/models.py create mode 100644 users/serializers.py create mode 100644 users/services/__init__.py create mode 100644 users/services/signals.py create mode 100644 users/services/userinfo.py create mode 100644 users/services/validators.py create mode 100644 users/tests.py create mode 100644 users/urls.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..643c33b --- /dev/null +++ b/.coveragerc @@ -0,0 +1,11 @@ +[run] +omit = + /usr/local/* + /usr/lib/* + /usr/lib64/* + *tests* + *__init__.py + *migrations* + +[html] +title=ARMA Management Console coverage report diff --git a/.env.dev b/.env.dev new file mode 100644 index 0000000..9e9bde0 --- /dev/null +++ b/.env.dev @@ -0,0 +1,41 @@ +DJANGO_SETTINGS_MODULE=console.settings.dev +DEBUG=1 +MIN_LOG_LEVEL=INFO +DJANGO_ALLOWED_HOSTS=* +POSTGRES_DB=armaconsole +POSTGRES_USER=armaconsole +POSTGRES_PASSWORD=arma_console_password +POSTGRES_HOST=db +POSTGRES_PORT=5432 +REDIS_HOST=redis +REDIS_PORT=6379 +DOCKER=1 +DOCKER_DJANGO_NAME=djangoapp +DOCKER_DB_NAME=db +DOCKER_NGINX_NAME=pnginxserver +DOCKER_SELERY_BEAT_NAME=celerybeatcontainer +DOCKER_SELERY_NAME=celerycontainer +DOCKER_REDIS_NAME=redis +DOCKER_LOGSTASH_NAME=logstash +DOCKER_VECTOR_NAME=vector +DOCKER_KIBANA_NAME=kibana +DOCKER_PGADMIN_SERVER=pgadminserver +DOCKER_FLOWER_NAME=flower +DOCKER_ELASTIC_NAME=elasticsearch +DOCKER_SELENIUM_NAME=seleniumhub +DOCKER_SCAN_SUGGEST=false +COLUMNS=80 +ELK_VERSION=7.12.0 +ELASTIC_PASSWORD=changeme +ELASTIC_USER=elastic +PGADMIN_DEFAULT_EMAIL=pgadmin4@pgadmin.org +PGADMIN_DEFAULT_PASSWORD=admin +WEB_UI_PORT=9090 +WEB_PDB_PORT=7755 +CORRELATOR_API=http://correlator:5566 +ELASTICSEARCH_API=elasticsearch:9200 +NCIRCC_DOMAIN_NAME=https://test-lk.cert.gov.ru +LICENSE_CLIENT_URL=http://license-client:8050 +SERVE=django +PUBLIC_URL=/static/react +SECRET_KEY=n&gyo1luo0!wj9y!drq!5n02s)9h80+o3nrxo=61e)_ge14(4l diff --git a/.env.prod b/.env.prod new file mode 100644 index 0000000..81743ac --- /dev/null +++ b/.env.prod @@ -0,0 +1,19 @@ +DJANGO_SETTINGS_MODULE=console.settings.prod +DEBUG=0 +LOG_PATH=/var/log/armaconsole +DJANGO_ALLOWED_HOSTS=* +DEBCONF_DBCONF_FPATH=/etc/armaconsole/debconf_dbconfig +REDIS_HOST=localhost +REDIS_PORT=6379 +RABBIT_URL=http://localhost:5672 +COLUMNS=80 +ELK_VERSION=7.12.0 +ELASTIC_PASSWORD=changeme +ELASTIC_USER=elastic +MIN_LOG_LEVEL=INFO +WEB_UI_PORT=9090 +PUBLIC_DIR=/var/www/armaconsole/public +CORRELATOR_URL=http://localhost:5566 +ELASTIC_URL=http://127.0.0.1:9200 +LICENSE_CLIENT_URL=http://127.0.0.1:8050 +NCIRCC_DOMAIN_NAME=https://lk.cert.gov.ru \ No newline at end of file diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..40ad8d4 --- /dev/null +++ b/.gitignore @@ -0,0 +1,55 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# Unit test / coverage reports +htmlcov/ +.coverage +.coverage.* +.cache +.pytest_cache/ + +# Translations +*.mo +*.po + +# Django stuff: +*.log.xz +*.log +db.sqlite3 +db.sqlite3-journal + +# Environments +.env +.venv +env/* +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Pyre type checker +.vscode/ +.idea +*/migrations/0* +dockerlogs +/static + +# Cache +django_cache/* + +upload/* +.DS_Store + +public/test_coverage/* + +*.csv +*.zip +*.json + +!deb_old/skeleton/var/log/armaconsole/bad_input.log + +/artifacts/ + +# Auto generated by react +console/templates/console/index.html \ No newline at end of file diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000..1c93bd8 --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,226 @@ +include: + - project: 'iwa/adm/ci/cicd_extra' + ref: $cicd_branch + file: + - 'ymls/header.yml' + - 'ymls/save_env_vars.yml' + - 'ymls/templates/template_version_upload.yml' + +variables: + + actions: + value: "" + description: "Keys: unit (unit tests), integ (integration tests), fw (live.firewall tests), build. default:''(all jobs)" + + +stages: + - save_env_vars + - test + - build + - build_deb_pkg + - version_upload + + +core_unit_test_job: + stage: test + needs: + - job: save_env_vars_job + artifacts: false + variables: + GIT_STRATEGY: clone + GIT_SUBMODULE_STRATEGY: recursive + image: + name: nexus.iwarma.ru:8123/iwarma-docker/python_go:3.9-1.16.15-alpine.gitlab + rules: + - if: !reference [.rulesTemplate, testRuleUnit] + - if: !reference [.rulesTemplate, testRuleAlways] + script: + - /bin/sh ./cicd/unit_tests.sh + artifacts: + when: always + paths: + - ./public/test_coverage/index.html + - ./*.log + expire_in: 1 day + tags: + - docker-debian11 + +core_integ_test_job: + stage: test + needs: + - job: save_env_vars_job + artifacts: false + rules: + - if: !reference [.rulesTemplate, testRuleInteg] + - if: !reference [.rulesTemplate, testRuleAlways] + variables: + GIT_STRATEGY: clone + GIT_SUBMODULE_STRATEGY: recursive + + POSTGRES_PASSWORD: 'postgres' + + ES_JAVA_OPTS: "-Xmx512m -Xms512m" + ELASTIC_PASSWORD: changeme + REDIS_PORT: '6379' + image: + name: nexus.iwarma.ru:8123/iwarma-docker/python_go:3.9-1.16.15-alpine.gitlab + #entrypoint: ["/bin/sh"] + services: + - name: registry.iwarma.ru/iwa/dev/console-docker/console-elasticsearch:latest + alias: elasticsearch + command: [ "bin/elasticsearch", "-Ediscovery.type=single-node" ] + - name: postgres:12-alpine3.16 + alias: db + - name: redis:alpine3.16 + alias: redis + script: + - /bin/sh ./cicd/integration_tests.sh + artifacts: + when: always + paths: + - ./public/test_coverage/index.html + - ./*.log + expire_in: 1 day + tags: + - docker-debian11 + +core_live_fw_test_job: + stage: test + needs: + - job: save_env_vars_job + artifacts: false + rules: + - if: !reference [.rulesTemplate, testRuleFW] + - if: !reference [.rulesTemplate, testRuleAlways] + variables: + GIT_STRATEGY: clone + GIT_SUBMODULE_STRATEGY: recursive + image: + name: nexus.iwarma.ru:8123/iwarma-docker/python_go:3.9-1.16.15-alpine.gitlab + script: + - /bin/sh ./cicd/live_fw_tests.sh + artifacts: + when: always + paths: + - ./public/test_coverage/index.html + - ./*.log + expire_in: 1 day + tags: + - docker-debian11-fw + +checker_test_job: + stage: test + variables: + GIT_STRATEGY: clone + needs: + - job: save_env_vars_job + artifacts: false + rules: + - if: !reference [.rulesTemplate, testRuleUnit] + - if: !reference [.rulesTemplate, testRuleAlways] + script: + - cd checker + - /bin/bash ../cicd/go_test.sh + tags: + - shell-debian11 + +core_build_job: + stage: build + needs: + - job: core_unit_test_job + artifacts: false + optional: true + - job: core_integ_test_job + artifacts: false + optional: true + - job: core_live_fw_test_job + artifacts: false + optional: true + rules: + - if: !reference [.rulesTemplate, buildRule] + variables: + GIT_STRATEGY: clone + GIT_SUBMODULE_STRATEGY: recursive + script: + - python3 ./cicd_extra/build_job.py -n amccore + artifacts: + paths: + - console/static/react + - console/templates/console/index.html + - console/settings/base.py + expire_in: 1 day + tags: + - shell-debian11 + +checker_build_job: + stage: build + needs: + - job: checker_test_job + artifacts: false + optional: true + rules: + - if: !reference [.rulesTemplate, buildRule] + variables: + GIT_STRATEGY: clone + script: + - python3 ./cicd_extra/build_go_job.py -n amcchecker + artifacts: + paths: + - ./checker/checker + expire_in: 1 day + tags: + - shell-debian11 + +core_deb_pkg_job: + stage: build_deb_pkg + variables: + GIT_STRATEGY: clone + GIT_SUBMODULE_STRATEGY: recursive + needs: + - job: core_build_job + artifacts: true + rules: + - if: !reference [.rulesTemplate, buildRule] + script: + - python3 ./cicd_extra/pack_job.py -p amccore + artifacts: + paths: + - artifact.json + expire_in: 1 day + tags: + - shell-debian11 + +checker_deb_pkg_job: + stage: build_deb_pkg + variables: + GIT_STRATEGY: clone + needs: + - job: checker_build_job + artifacts: true + rules: + - if: !reference [.rulesTemplate, buildRule] + script: + - python3 ./cicd_extra/pack_job.py -p amcchecker + artifacts: + paths: + - artifact.json + expire_in: 1 day + tags: + - shell-debian11 + +core_ver_upload_job: + extends: .version_upload + rules: + - if: !reference [.rulesTemplate, buildRule] + needs: + - job: core_deb_pkg_job + artifacts: true + +checker_ver_upload_job: + extends: .version_upload + rules: + - if: !reference [.rulesTemplate, buildRule] + needs: + - job: checker_deb_pkg_job + artifacts: true + \ No newline at end of file diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..7b1375b --- /dev/null +++ b/.gitmodules @@ -0,0 +1,8 @@ +[submodule "correlator"] + path = correlator + url = ../../../../iwa/dev/console/correlator.git + branch = develop +[submodule "license"] + path = license + url = ../../../../iwa/dev/license/client.git + branch = develop diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..509ab3f --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,452 @@ +# Журнал изменений +Формат основан на [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +и этот проект придерживается [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## 1.3.0 [Not released] +### 1.3.0-pre_dev5 +### Добавлено +- Реализована интеграция с ГОССОПКА для отправки инцидентов[#996](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/995) +- Реализована настройка и отправка инцидентов в ГОСОПКА и список отправленных инцидентов [#996](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/996) +- Добавлена возможность настройки антивируса на Endpoint [MC-359](https://iwarma.atlassian.net/browse/MC-359) +- Добавлен фикс при загрузке правил сурикаты на ARMAIF [MC-4](https://iwarma.atlassian.net/browse/MC-4) +- Ошибка контроля целостности при синхронизации endpoint [MC-458](https://iwarma.atlassian.net/browse/MC-458) + +### 1.3.0-dev5 +- Добавлена настройка ротации на странице управления AMAIE [MC-10](https://iwarma.atlassian.net/browse/MC-10) +- Добавлена привилегия `can_download_rotation_files` [MC-113](https://iwarma.atlassian.net/browse/MC-113) +- Добавлена возможность смены своего пароля без прав [MC-356](https://iwarma.atlassian.net/browse/MC-356) +- Добавлена возможность просмотра списка групп корреляции без правила `can_edit_correlation_groups` [MC-110](https://iwarma.atlassian.net/browse/MC-110) + +### 1.3.0-dev6 +- Исправлено отображение блокировки пользователя с истекшей датой [MC-291](https://iwarma.atlassian.net/browse/MC-291) +- Добавлена фильтрация событий только антивируса при просмотре событий через меню настроки антивируса IE [MC-441](https://iwarma.atlassian.net/browse/MC-441) + +# 1.3.0-dev8 +- Исправлено отображение кнопку редактирования связанных источников для фаервола и эндпоинта без привилегии [#MC-122](https://iwarma.atlassian.net/browse/MC-122) + +## [V3] - Не выпущено +### Добавлено +- Реализована система прав и наследование вложенных прав [#810](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/810) +- Добавлены заранее настроенные группы пользователей [#811](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/811) +- Отображение полей sid и rev на странице списка правил корреляции [#868](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/868) +- Добавить валидаторы пароля при создании пользователя [#827](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/827) +- Удаление всех источников событий при активации новой лицензии [#903](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/903) +- Добавить возможность ручной разблокировки пользователя в случае его блокировки [#826](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/826) +- Сделать API для страницы создания правила корреляции для React [#928](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/928) +- Добавлено сообщение подтверждения действия при смене IF в настройках правил коррелятора [#883](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/883) +- Переработан конфиг нормализации событий endpoint на vector [#763](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/763) +- Переработан конфиг нормализации событий fierwall на vector [#762](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/762) +- Добавлены отсутствующие права доступа к api и описание api и прав в openapi схеме [#233](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/233) + +### Исправлено +- Ошибки локализации в Системах Защиты [#871](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/871) +- Параметр "ограничение попыток" может быть сохранен меньше нуля [#872](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/872) +- Локализация всплывающего сообщения в Системных настройках [#870](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/870) +- В лог celeryD.log пишется ошибка по работе автоматической карте сети [#799](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/799) +- Ошибки локализации на автоматической карте сети [#843](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/843) +- Некорректная обработка отсутствия индекса в списке событий [#699](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/699) +- Опечатки в Помощи в поиске по событиям [#839](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/839) +- Исправил формат вывода локального времени [#839](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/839) +- Привелегии блока Активы [#637](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/637) +- Не хватает ползунка в отчете об импорте [#787](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/787) +- Не парсится сообщение о нарушении контрольной суммы с Endpoint [#863](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/863) +- Создание дублирующий источник по порту [#844](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/844) +- Период буферизации событий в секундах [#856](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/856) +- Обновление конфигурации с endpoint [#864](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/864) +- Проблемы привилегий Систем защиты [#805](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/805) +- При удалении актива отсутствует Всплывающее сообщение об успешном удалении [#831](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/831) +- Пользователю доступно самостоятельно изменение "Даты окончания срока действия" [#878](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/878) +- Различное отображение событий в списке событий и в инциденте [#832](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/832) +- Исправлено создание источника при копировании endpoint если достигнуто максимальное значения порта [#880](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/880) +- Привилегии "Системные настройки" [#765](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/765) +- Не понятно прошла ли загрузка правил СОВ или нет [#874](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/874) +- Скаченная конфигурация не может быть загружена [#873](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/873) +- Удаление ssl сертификата и ключа в системных настройках [#897](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/897) +- Исправлено добавление ssl сертификатов и ключей в системных настройках[#900](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/900) +- Лишний пробел в столбце "Дата" списка событий [#858](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/858) +- Копирование Endpoint не обрабатывает ограничение по лицензии [#881](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/881) +- Привелегия "Может удалять автоматически созданные источники" не работает [#629](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/629) +- "Разростание" лога nginx.access.log по времени от количества Endpoint [#789](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/789) +- Добавление Admin в группу пользователей вызывает дублирование в назначении инцидента [#869](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/869) +- Требования к сложности пароля не работают при первоначальном создании пользователя [#710](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/710) +- Не появляется кнопка "Добавить" в источниках после удаления последнего источника [#820](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/820) +- Дублирование активов фаервола с одинаковым ip [#800](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/800) +- Удаление виджетов после изменения привилегий у пользователя [#627](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/627) +- На автоматической карте сети доступна кнопка "Редактировать актив" не взирая на отсутствие привилегий [#651](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/651) +- Доработка отображения времени [#922](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/922) +- Переименовать названия некоторых прав доступа [#825](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/825) +- Отправка SYSLOG по TCP если он выбран в экспорте инцидентов [#912](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/912) +- Вывод ошибок при импорте правил корреляции с несуществующим IF [#885](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/885) +- Исправлен зум ниже средней полосы [#853](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/853) +- Расширенная информация о назначенном пользователе в заротированных инцидентах [#887](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/887) +- Не срабатывает правило корреляции с генерацией инцидентов если не указывать поле поиска [#879](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/879) +- Дубликат кнопок действий в списке источников событий [#940](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/940) +- Изменение названия Endpoint создает новый источник, который затем нельзя удалить [#888](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/888) +- Конвертировать число инпутов лицензии в INT [#551](https://gitlab.iwarma.ru/iwa/dev/console/core/-/merge_requests/551) +- Некорректное отображение имени файла фонового изображения [#812](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/812) +- Исправлены все тесты[#969](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/969) + + +### Добавлено +- Реализована система прав и наследование вложенных прав [#810](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/810) +- Добавлены заранее настроенные группы пользователей [#811](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/811) +- Отображение полей sid и rev на странице списка правил корреляции [#868](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/868) + +## [1.1.0] - Не выпущено +## [1.1.0-rc21] +### Исправлено +- Ошибки локализации в списке Корреляции Часть 2 [#776](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/776) + +## [1.1.0-rc20] +### Исправлено +- Дополнено взаимодействие Console-Endpoint device control для версии ARMAIE 2.3.4 [#901](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/901) +- Разбор сообщений от endpoint версии 2.3.4 [#739](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/739) + +## [1.1.0-rc19] +### Добавлено +- Разбор сообщений о USB от Endpoint [#739](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/739) +- Переделано взаимодействие Console-Endpoint device control [#901](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/901) + + +### Исправлено +- Теперь отключенное правило корреляции можно включать [#877](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/877) +- Исправлена локализация в справке импорта/экспорта правил корреляции [#776](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/776) +- Исправлена ошибка при создании endpoint в случае совпадения имени с источником события [#904](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/904) +- Разбор сообщений firewall [#895](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/895) +- Добавлена справка для ротации событий [#857](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/857) +- Исправлена ошибка когда не работает фильтр по протоколу UDP на автоматической карте сети [#821](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/821) +- Сортировка в списке инцидентов [#688](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/688) + +## [1.1.0-rc18] - 11.10.2021 +### Добавлено +- Реализовано требование о смене собственного пароля [#770](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/770) + +### Исправлено +- Аналогичный метод редактирования источника событий для MC, как это сделано для IE [#860](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/860) +- Отображение фильтров по-умолчанию при загрузке страницы списка инцидентов [#688](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/688) +- Копирование Endpoint'а с источником и выбор порта для копии [#862](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/862) + +## [1.1.0-rc17] - 08.11.2021 +### Добавлено +- Отображение сообщения об ошибках удаления источника событий [#261](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/261) +- Аналогичный метод редактирования источника событий для MC, как это сделано для IE [#860](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/860) +- Отображение фильтров по-умолчанию при загрузке страницы списка инцидентов [#688](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/688) + +### Исправлено +- После отключения TLS перестали работать виджеты [#775](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/775) +- Странный перевод Источников событий [#721] (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/721) +- Ошибка в сообщении при неверном вводе логина\пароля [#697](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/697) +- Жесткий фильтр Статуса на таблице активов [#728](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/728) +- Снятие и установка галочки "Создать источник" - очищает поле "Порт" [#746](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/746) +- Сортировка на странице правил корреляции при стандартной по столбцу status [#875](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/875) + +## [1.1.0-rc16] - 25.10.2021 +- Убран отладочный код из консоли браузера [#846](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/846) +- Исправлен поиск в Инцидентах для не суперпользователей [#797](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/797) +- Исправлена сериализация при экспорте правила корреляции по Действию Инциденты [#784](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/782) +- Исправлен баг когда при отсутствии фильтра "Отображать соседей" не отображаются соединения [#848](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/848) +- Исправлена проблема когда виджеты "Информация о системе" и "Службы" не оставались на обзорной панели [#447](https://gitlab.iwarma.ru/iwa/dev/console/core/-/merge_requests/447) +- Исправлено имя столбца "Имя сигнатуры" на странице список событий [#722](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/722) +- Исправлен перевод ошибки при активации лицензии онлайн [#719](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/719) +- Исправлено работа импорта правил корреляции при загрузке правила с rev ниже, чем загружено [#834](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/834) +- Исправлена генерация лейблов ассетов с неизвестным типом на карте сетевых взаимодействий [#579](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/579) +- Исправлена ошибка Jquery в виджете [#766](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/766) +- Изменена система ротации инцидентов [#857](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/857) + +## [1.1.0-rc14] - 11.10.2021 +### Добавлено +- Клиент сервера лицензий [#666](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/666) +- Добавлена гибкость в привилегиях коррелятора [#628](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/628) +- Убран выбор протокола соединения при работе с ARAMIF [#234](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/234) +- Исправлена работа кнопки фильтра по активам на карте сетевых взаимодействий [#692](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/692) + +### Изменено +- Убран столбец "Тип" из списка правил корреляции [#676](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/676) +- Скрыт столбец Система в разделе "Список источников" [#678](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/678) +- Изменена страница списка событий [#691](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/691) +- Изменена страница настройки экспорта инцидентов [#674](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/674) +- Изменен конфиг logstash, для корректного разбора и фильтрации сообщений не "CEF" формата [#575](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/575) +- Изменен русский перевод привелегии ```can_export_journals``` с ```Может скачивать журналы``` на ```Может экспортировать журналы``` [#626](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/626) +- Изменены фильтры по умолчанию на странице списка инцидентов [#688](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/688) +- Удалена привилегия "может просматривать сетевые атаки" [#631](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/631) +- Удалено поле "Описание" на экране добавления подложке к карте сети [#750](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/750) +- Скрыта функцию "Скачать" статическую карту" [#759](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/759) +- Не переведенные всплывающие уведомления на странице списка Endpoint + новое уведомление при ошибке загрузке конфига [#758](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/758) +- Изменено название кнопок в таблице Endpoint [#757](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/757) +- Скрыты кнопки экспорта в CSV при отсутствии прав на просмотр хранилища [#638](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/638) +- Исправлена верстка в проверке под правило корреляции [#726](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/726) +- Исправлено взаимодействие с кнопкой "редактирование групп" на странице правила корреляции [#696](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/696) +- Исправлены ошибки локализациии при создании\редактировании правила [#724](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/724) +- Изменен функционал работы часов консоли [#764](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/764) +- При загрузке правил теперь корректно выполняется алгоритм проверки существования SID и REV [#780](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/780) + + +### Исправлено +- При редактировании IP МЭ происходило дублирование Asset [#349](https://gitlab.iwarma.ru/iwa/dev/console/core/-/merge_requests/349) +- Изменение статуса инцидента изменяет его ID [#606](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/606) +- Скрыты функции по работе с уязвимостями [#602](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/602) +- Отсутствует кнопка управлять группами (пользователей в списке пользователей [#618](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/618) +- В форму редактирования endpoint добавлены поля, как в форме добавления [#679](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/679) +- Исправлены тесты [#669](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/669) +- Исправлены Ошибки при работе со справочником "Производитель" [#636](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/636) +- Изменено поведение статус актива "----" распознается как разрешенный актив" [#647](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/647) +- Исправлено "Привилегия "может просматривать активы" вызывает ошибку при входе в список активов [#693](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/693) +- Не нужная привилегия "может просматривать сетевые атаки." [#631](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/631) +- При создании Endpoint-а можно создать источник, невзирая на отсутствие привилегий [#609](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/609) +- Исправлена ошибка при редактировании источника события [#601](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/601) +- Привилегия "Может скачивать журналы" отвечает за другое. [#626](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/626) +- При экспорте активов,формируется некорректный фаил [#600](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/600) +- "Дата окончания срока действия" пользователя не мешает ему войти в систему" [#617](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/617) +- Проблемы привелегий управления списком пользователей [#624](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/624) +- На статической карте сети для нодов раскрытых групп добавлен фон и убран не нужный значок [#661](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/661) +- Жеcткий фильтр Статуса на таблице активов [#728](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/728) +- Доступ на страницу лицензии без входа [#720](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/720) +- Конфиг Logstash для IF [#745](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/745) +- Ошибка локализации в Настройках ротации [#747](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/747) +- Странный перевод Источников событий [#721](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/721) +- Инциденты по активам Endpoint не связываются с активом Endpoint [#660](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/660) +- Не обработанная ошибка при активации лицензии онлайн [#719](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/719) +- Проблемы при установке новой лицензии [#716](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/716) +- Не локализована ошибка валидации при неверном вводе логина\пароля [#697](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/697) +- Ошибки локализации и орфографии [#594](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/594) +- Переводы (опечатки и другие орг.моменты) [#680](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/680/) +- Ошибка копирования конфигурации endpoint [#754](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/754) +- Ошибка добавления источника логов [#261](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/261) +- Локализация ошибок при блокировке пользователя [#773](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/773) +- Локализация ошибок формы смены пароля [#817](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/817) +- Ошибка локализации ошибки на странице карты сети [#813](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/813) +- Ошибки локализации раздела Лицензия [#718](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/718) +- Ошибки локализации в списке Корреляции Часть 2 [#776] (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/776) + + + +## [1.1.0-rc10] - 2021-08-26 +### Изменено +- Убрана кнопка Экспорт из списка активов [#671](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/671) +- Убрана колонка "Тип" из списка систем защиты и из карточки [#672](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/672) + +### Исправлено +- Кнопка сохранения правила корреляции глючит [#670](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/670) + +## [1.1.0-rc9] - 2021-08-23 +### Исправлено + - Добавление интерфейса управления флешками (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/634) + - Потеря смысла в привилегиях блока "Обзорная панель:" (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/615) + - Создание правила корреляции. Пропадает значение в "Поле" Условия (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/604) + - Коррелятор неверно создает новые индексы (https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/14) + - Коррелятор неверно создает новые индексы (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/598) + - Редактирование настроек эндпоинта порождает новый актив в таблице активов (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/593) + - Работа фильтров на странице Assets (https://gitlab.iwarma.ru/iwa/dev/console/console-ui-react/-/issues/7) + - В MC остались ненужные элементы на UI в части интеграции с Endpoint (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/589) + - Тестирование Тестовой Задачи (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/583) + - Тестовая задача (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/582) + - Кнопка сохранения правила корреляции глючит (https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/19) + - Сделать кнопку "Сохранить на странице создания правила корреляции не активной до добавления хотя бы одного действия +(https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/668) + - Добавить предикат QueryString (https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/17) + - Излишний код в странице авторизации (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/658) + - Test task (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/657) + - Проблема в получении событий с межсетевого экрана (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/655) + - Обновить фронт для OPC DA (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/644) + - Странное поведение правила корреляции (https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/15) + - Не работают уведомления (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/640) + - Баг при обновлении виджета "Коррелятор" [#329](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/329) + - Удалена модель `NormalizedEvent` в приложении `logstash` [#498](https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/498) + + +## [1.1.0-rc8] - 2021-08-12 +### Исправлено +- В Фильтрах инцидентов в "Назначен на" отображаются даже удаленные пользователи (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/639) +- Баг в консоли - не правильные поля (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/610) +- Bug in correlator rules export (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/576) +- Поиск в корреляции (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/573) +- Просмотр хранилища (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/572) +- Открытие журнала инцидентов (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/571) +- Ротация журналов событий (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/570) +- Проблема с созданием правил корреляции (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/569) +- Баги карты сетевых взаимодействий (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/558) +- При попытке зайти в инцидент с большим количеством событий, вылетает ошибка 500 (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/554) +- Доработка поля "Поиск" (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/552) +- Требуется объединение одинаковых событий коррелятором. (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/544) +- Добавить фильтрацию инцидентов (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/543) +- Разработать клиент для сервера лицензирования (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/516) +- Добавить описание в диалог помощи для поиска на странице событий (https://gitlab.iwarma.ru/iwa/dev/console/core/-/issues/492) + +## 1.0.2 (20210128) + +### Добавлено +- Случайный пароль для базы данных по умолчанию +- Экспорт событий +- Коррелятор +- Возможность задать действия при возникновении инцидентов +- Мультипоточность обработки событий +- Экспорт событий и инцидентов в CSV +- Возможность настройки базы данных при установке при помощи debconf +- Автодополнение фильтров +- Отображение важности и некоторых других параметров значками +- Отображение запуска сервисов через Веб интерфейс при старте +- Правила коррелятора по умолчанию +- Работа с Endpoint +- Виджет лицензии + +### Изменено +- Отображение события и инцидента +- Отображение некоторых виджетов графиков изменено и улучшено +- Формат отображение дат +- Верхнее меню навигации переработано для более удобного использования +- Отключена возможность входа в admin панель +- Форма смены пароля +- Страница хранилища переработана +- Увеличен максимальный размер загружаемых файлов +- Формат CEF + +### Исправлено +- Работа с базами данных при установке, удалении +- Статусы ошибок при работе с хранилищем +- Валидация некоторых полей +- Настройки логсташ +- Переводы +- Отсутствие иконки на некоторых страницах Веб интерфейса +- Расположение элементов на странице виджетов +- Размер некоторых виджетов +- Удаление вершины карты сети +- Удвоение кнопки таблицы +- Ширина колонки с иконками +- Очистка имени новой группы при создании +- Порядок старта служб +- Проксирование через nginx +- Удалены излишние привилегии +- Убраны кнопки, вызов которых невозможен из-за отсутствия привилегий +- Ошибка 500 после старта + +## Изменение версии на 1.0 + +## 1.4 (20200730) + +### Добавлено +- Карта сети +- Пользовательские карты +- Интерфейс ввода логов +- Динамический ввод логов +- Скрытие и деактивация неактивных иконок +- Обновлены переводы +- Выбор условий ротации данных + +### Изменено +- Страница инцидентов + +### Исправлено +- Права доступа через API +- Отображение удаленных пользователей, назначенных на какие-либо события +- Некоторые активы не отображались +- Загрузка файлов на сенсор +- Отсутствие отображения типа на странице активов +- Ошибка при входе администратором на страницу профиля +- Некоторые ошибки разбора логов + +## 1.3 (20200526) + +### Добавлено +- Скрипт для форматирования вывода дампа +- Новые виджеты +- Появление активов, полученных в событиях + +### Изменено +- Единый стиль для таблиц +- После истечения даты срока действия пользователь - пользователь становится неактивен (невозможен вход от лица такого пользователя) +- Пользователь администратор теперь создается только при первом запуске, таким образом теперь можно удалить начального пользователя или сменить его данные, без восстановления его данных на изначальные после рестарта + +### Исправлено +- Баги с активностью записей и кнопок при невыделенных группах на экране управления группами +- Баги сохранения настроек ротации +- Баг загрузки конфигурации на ARMA Industrial firewall +- Баг при изменении имени группы +- Неверный парсинг некоторых событий, получаемых через CEF +- Дата создания инцидента была некорректна + + +## 1.2 (20200422) + +### Добавлено +- Подсказки иконок таблиц +- Уведомления +- Загрузка правил IDS на сенсоры +- Карта сети +- Раздел для хранения и скачивания данных (Хранилище) +- Виджеты инцидентов по важности и категориям +- Возможность задания временной зоны для каждого пользователя +- Ротация инцидентов и событий по расписанию и размеру + +### Изменено +- Хэдер изменен на однострочный +- Улучшен виджет информации о системе и сервисах +- Улучшен механизм взаимодействия с виджетами + +### Исправлено +- Автофокус на странице логина +- Некоторые переводы +- Права доступа к некоторым страницам +- Баги виджета диапазона дат +- Изменение страницы добавления пользователи при некорректно заполненных полях +- Предупреждения браузера на главной странице + + +## 1.1 (20200317) + +### Добавлено +- Подсказки иконок таблиц +- Единый стиль диалогов +- Единый стиль и хороший вид полей выбора файла +- Экспорт/импорт данных БД +- Экспорт/импорт правил корреляции +- IPython как консоль работы с django по умолчанию +- Страница активов +- Страница сенсоров +- Перевод виджетов +- Добавлено больше информации на странице списка событий +- Добавлена возможность добавления правил корреляции для событий МЭ входа и доступа в WEB +- Добавлена валидация ввода в поле выбора диапазона дат +- Группировка событий +- Иконка для перехода в dashboard +- Отображение событий внутри инцидента +- Страница изменения прав пользователей + +### Исправлено +- Ошибка добавления пользователя +- Работа debugToolbar +- Мелкие исправления и переводы +- Падение logstash +- Фильтры на странице инцидентов не всегда работали +- Баг из-за чего не создавался пользователь с верно введенными данными +- Статус инцидента некорректно менялся + + +## 1.0 (20200217) + +### Добавлено +- Добавлен базовый проект +- Добавлено приложение с шаблоном adminlte +- Добавлена докер конфигурация для разработки +- Добавлен набор конфигов logstash для arpwatch, firewall, suricata, web-auth и web-access +- Добавлено API для приема нормализованных событий от logstash +- Добавлены страницы работы с пользователями (список, добавление, удаление, редактирование) +- Добавлена страница списка событий, с возможностью поиска и фильтрации +- Добавлена страница списка инцидентов, с возможностью поиска и фильтрации +- Добавлена страница просмотра и редактирования инцидентов +- Добавлена страница "Панель оператора" с возможностью добавления виджетов и сохранения раскладки для каждого пользователя +- Добавлена страница логина +- Добавлены страницы для ошибок 403, 404 и 500 +- Добавлены виджеты "Системная информация" и "Сервисы" +- Добавлен набор прав, функции для их получения и проверки +- Добавлена модель события корреляции и базовый коррелятор +- Добавлена возможность создания правил корреляции по файлу правил Suricata +- Базовые настройки логорирования +- Базовые настройки кеширования +- Добавлен класс формы для отображения виджетов в стиле adminlte +- Добавлено поле ввода интервалов даты и времени +- Добавлено поле ввода интервалов целых чисел +- Добавлено поле ввода даты +- Добавлен плагин Datatables для единого отображения и поведения таблиц +- Добавлен конфиг для Gitlab-CI +- В режиме отладки, добавлен Debub-toolbar +- Добавлен набор контекст-процссоров для генерации меню diff --git a/README.md b/README.md new file mode 100644 index 0000000..b7bc8e5 --- /dev/null +++ b/README.md @@ -0,0 +1,58 @@ +# Подготовка к запуску тестов unit, integration, live_firewall + +Перед запуском тестов, нужно подготовить базу данных, для этого запускаем: +```bash +DJANGO_SETTINGS_MODULE=console.settings.test python manage.py makemigrations +DJANGO_SETTINGS_MODULE=console.settings.test python manage.py migrate +``` + +# Запуск тестов unit + +Для запуска тестов, нужно активировать виртуальное окружение и запустить: +```bash +pytest --disable-warnings -m unit +``` + +# Запуск тестов интеграционных + +Для запуска интеграционных тестов, вызываем: +```bash +pytest --disable-warnings -m integration +``` +При этом, интеграционные можно запускать либо в докере, либо на развернутой виртуалке, тк +они завязаны на внешние сервисы. + +# Запуск тестов на живом AIF + +Для запуска тестов на живом AIF, вызываем: +```bash +pytest --disable-warnings -m live_firewall +``` +Перед каждым запуском теста проводится проверка доступности AIF, в случае если +AIF не доступен тест завершается с состоянием False + +После выполенения последнего теста "test_live_set_firewall_suricata" AIF уходит в +перезагрузку длительностью ~40-60 сек. + +**Важно! Pipline запускает тесты последовательно, согласн очереди запросов от пользователей GitLab, +управляет этим отдельный раннер** + + + +# Оценка покрытия тестов + +Чтобы оценить общее покрытие кода тестами, нужно в docker запустить следующие команды: +```bash +coverage run -m pytest --disable-warnings +coverage report +``` + +Для HTML результата: +```bash +coverage html +``` + +# Отключение тестов при push'e и merge request'e + +Тесты в пайплайне запускаются автоматически. Чтобы тесты при push'e и merge request'e не запускались, +необходимо указать в сообщении коммита ключ: #no_test \ No newline at end of file diff --git a/assets/__init__.py b/assets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/assets/admin.py b/assets/admin.py new file mode 100644 index 0000000..1229c67 --- /dev/null +++ b/assets/admin.py @@ -0,0 +1,16 @@ +from django.contrib import admin + +from assets.models.assets import OperatingSystem, AssetListGroup, Asset, AssetManufacturer +from core.mixins import JsonWidgetMixin + + +class AssetAdmin(JsonWidgetMixin, admin.ModelAdmin): + list_display = ('ip', 'os', 'name', 'status') + list_display_links = ('name',) + list_filter = ('os',) + + +admin.site.register(OperatingSystem) +admin.site.register(AssetListGroup) +admin.site.register(Asset, AssetAdmin) +admin.site.register(AssetManufacturer) diff --git a/assets/apps.py b/assets/apps.py new file mode 100644 index 0000000..5569d30 --- /dev/null +++ b/assets/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class AssetsConfig(AppConfig): + name = 'assets' diff --git a/assets/constants.py b/assets/constants.py new file mode 100644 index 0000000..c49736e --- /dev/null +++ b/assets/constants.py @@ -0,0 +1,3 @@ +from incident.models import Incident + +RESOLVED_STATUS = Incident.Status.RESOLVED diff --git a/assets/filters.py b/assets/filters.py new file mode 100644 index 0000000..1ef2e9d --- /dev/null +++ b/assets/filters.py @@ -0,0 +1,14 @@ +from django_filters import rest_framework as filters + +from assets.models.assets import Asset + + +class AssetFilter(filters.FilterSet): + incidents = filters.UUIDFilter(method='filter_by_incidents') + + def filter_by_incidents(self, queryset, name, value): + return queryset.filter(incidents=value) + + class Meta: + model = Asset + fields = ['incidents'] diff --git a/assets/migrations/__init__.py b/assets/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/assets/models/__init__.py b/assets/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/assets/models/assets.py b/assets/models/assets.py new file mode 100644 index 0000000..5a7e262 --- /dev/null +++ b/assets/models/assets.py @@ -0,0 +1,92 @@ +from django.db import models +from django.utils.translation import gettext_lazy + +from console.models import UniqueNameDescriptionModel, UpdatedNameDescriptionModel, SensorConnectedMixin +from core.validators import mac_address_validator +from incident.models import Incident + + +class OperatingSystem(UniqueNameDescriptionModel): + """ One item in operation systems list """ + + class Meta: + verbose_name = gettext_lazy('Operating system') + + +class AssetListGroup(UniqueNameDescriptionModel): + """ Asset list display group """ + collapsed = models.BooleanField(default=False) + + +class AssetManufacturer(UniqueNameDescriptionModel): + """ Asset manufacturer """ + + class Meta: + verbose_name = gettext_lazy('Asset manufacturer') + + +class Asset(SensorConnectedMixin, UpdatedNameDescriptionModel): + """ Model for one asset """ + + class AllowStatus(models.IntegerChoices): + NEW = 0, gettext_lazy('New asset') + ALLOWED = 1, gettext_lazy('Allowed asset') + + class AssetType(models.TextChoices): + USER = 'user', gettext_lazy('User') + ARMA = 'arma_industrial_firewall', gettext_lazy('ARMA industrial firewall') + PLC = 'plc', gettext_lazy('PLC') + PC = 'pc', gettext_lazy('PC') + SERVER = 'server', gettext_lazy('Server') + NETWORK_DEVICE = 'network_device', gettext_lazy('Network device') + + manufacturer = models.ForeignKey(AssetManufacturer, + verbose_name=gettext_lazy('Manufacturer'), + on_delete=models.SET_NULL, + blank=True, + null=True) + model = models.CharField(blank=True, + null=True, + max_length=150, + verbose_name=gettext_lazy('Model'), + help_text=gettext_lazy('Asset model')) + ip = models.GenericIPAddressField(verbose_name=gettext_lazy('IP'), help_text=gettext_lazy("Asset's IP address")) + mac = models.CharField(blank=True, + null=True, + max_length=17, + verbose_name=gettext_lazy('MAC'), + help_text=gettext_lazy("Asset's MAC address"), + validators=[mac_address_validator]) + os = models.ForeignKey(OperatingSystem, + on_delete=models.SET_NULL, + verbose_name=gettext_lazy('OS'), + help_text=gettext_lazy('Operation systems, found on asset'), + blank=True, + null=True) + ports = models.JSONField(verbose_name=gettext_lazy('Ports'), + help_text=gettext_lazy('List of open ports'), + null=True, + blank=True, default="[]") + incidents = models.ManyToManyField(Incident, verbose_name=gettext_lazy('Incidents'), blank=True) + group = models.ForeignKey(AssetListGroup, + on_delete=models.SET_NULL, + blank=True, + null=True, + verbose_name=gettext_lazy('Group')) + asset_type = models.CharField(choices=AssetType.choices, + max_length=128, + verbose_name=gettext_lazy('Asset type'), + blank=True, + null=True) + status = models.IntegerField(choices=AllowStatus.choices, + verbose_name=gettext_lazy('Asset status'), + help_text=gettext_lazy('Asset allow status'), + default=AllowStatus.NEW, + blank=True) + + @property + def manufacturer_name(self): + return self.manufacturer.name + + def __str__(self): + return self.name diff --git a/assets/serializers/__init__.py b/assets/serializers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/assets/serializers/assets.py b/assets/serializers/assets.py new file mode 100644 index 0000000..34f7454 --- /dev/null +++ b/assets/serializers/assets.py @@ -0,0 +1,139 @@ +import json + +from django.db.models import Q +from rest_framework import serializers + +from assets.constants import RESOLVED_STATUS +from assets.models.assets import Asset, AssetListGroup, OperatingSystem, AssetManufacturer +from console.models import Vulnerability +from core.serializers import DateTimeLocalizedField, ModelLocalizedSerializer +from incident.models import Incident +from incident.serializers.incident import IncidentSerializer + + +class OsSerializer(serializers.ModelSerializer): + class Meta: + model = OperatingSystem + fields = '__all__' + + +class OsNameSerializer(serializers.ModelSerializer): + class Meta: + model = OperatingSystem + fields = ['name'] + + +class AssetManufacturerSerializer(serializers.ModelSerializer): + class Meta: + model = AssetManufacturer + fields = ['id', 'name', 'description'] + + +class AssetGroupSerializer(serializers.ModelSerializer): + class Meta: + model = AssetListGroup + fields = ['name', 'description', 'id', 'collapsed'] + + +class AssetListSerializer(serializers.ModelSerializer): + updated = DateTimeLocalizedField() + count_incidents = serializers.IntegerField() + + class Meta: + model = Asset + fields = ['id', 'name', 'asset_type', 'status', 'ip', 'updated', 'count_incidents'] + + +class AssetDetailSerializer(serializers.ModelSerializer): + os = OsSerializer() + group = AssetGroupSerializer() + incidents = IncidentSerializer(many=True) + ports = serializers.SerializerMethodField() + updated = DateTimeLocalizedField() + + class Meta: + model = Asset + fields = '__all__' + + def get_ports(self, asset): + try: + ports = json.loads(asset.ports) + return f'{ports}' or f'{[]}' + except TypeError: + return f'{[]}' + + +class AssetCreateUpdateSerializer(serializers.ModelSerializer): + class Meta: + model = Asset + fields = '__all__' + + def to_representation(self, instance): + return AssetDetailSerializer(instance=instance).data + + +class AssetAuthorizeSerializer(serializers.Serializer): + selected_assets = serializers.ListField(child=serializers.IntegerField()) + + +class AssetCsvExportSerializer(ModelLocalizedSerializer): + """ Serializer for CSV export of Assets data + The idea behind parsing almost every field is that if we dont do this, than fields that are used as links to the + other models will be shown in table as the dictionary + """ + incidents = serializers.SerializerMethodField('get_incidents') + os = serializers.ReadOnlyField(source='os.name', allow_null=True) + group = serializers.ReadOnlyField(source='group.name', allow_null=True) + asset_type = serializers.ReadOnlyField(source='get_asset_type_display', allow_null=True) + status = serializers.ReadOnlyField(source='get_status_display', allow_null=True) + manufacturer = serializers.ReadOnlyField(source='manufacturer_name', allow_null=True) + + def get_incidents(self, obj): + amount_of_active_incs = Incident.objects.filter(~Q(status=RESOLVED_STATUS), asset=obj).count() + return amount_of_active_incs + + class Meta: + model = Asset + fields = ['name', 'manufacturer', 'updated', 'model', 'ip', 'os', 'ports', 'incidents', + 'group', 'asset_type', 'status'] + + +class IncidentTitleSerializer(serializers.ModelSerializer): + class Meta: + model = Incident + fields = ['pk', 'title', 'status'] + + +class AssetInfoSerializer(serializers.ModelSerializer): + incidents = IncidentTitleSerializer(many=True) + os = OsNameSerializer() + updated = DateTimeLocalizedField() + + class Meta: + model = Asset + fields = ['id', 'name', 'description', 'ip', 'os', 'ports', 'updated', 'incidents', 'status'] + + +class AssetIncidentInfoSerializer(serializers.ModelSerializer): + class Meta: + model = Incident + fields = ['pk', 'title', 'description', 'status'] + + +class AssetVulnerabilitiesInfoSerializer(serializers.ModelSerializer): + class Meta: + model = Vulnerability + fields = ['name', 'description'] + + +class AssetActiveProblemsSerializer(serializers.ModelSerializer): + incidents = AssetIncidentInfoSerializer(many=True) + + def get_incidents(self, obj): + inc_queryset = Incident.objects.filter(~Q(status=RESOLVED_STATUS), asset=obj) + serializer = AssetIncidentInfoSerializer(instance=inc_queryset, many=True, context=self.context) + return serializer.data + + class Meta: + model = Asset + fields = ['incidents'] diff --git a/assets/tests/__init__.py b/assets/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/assets/tests/test_assets.py b/assets/tests/test_assets.py new file mode 100644 index 0000000..2e1ebc3 --- /dev/null +++ b/assets/tests/test_assets.py @@ -0,0 +1,47 @@ +import http +import logging +from http import HTTPStatus + +import pytest +from django.contrib.auth import get_user_model +from django.urls import reverse +from rest_framework.test import APIClient + +from assets.models.assets import OperatingSystem, Asset +from perms.models import Perm + +_log = logging.getLogger() + +TIMEOUT = 10 # time before timeout exception appears +User = get_user_model() + + +@pytest.mark.django_db +class TestAssetPagesAccess(object): + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model, add_user_with_permissions): + """ Fixture for preparing database for tests + :param client: object to work with test session + :param django_user_model: object to work with User model in pytest + :param add_user_with_permissions: fixture for adding new user + :return: prepared for tests database with: + user 'user1' with perms.can_view_user permission + user 'test_user' for testing view_user page features + """ + username = 'foo' + password = 'bar' + + add_user_with_permissions(username=username, password=password, + is_superuser=True) + add_user_with_permissions(username='test_no_perms', password='1') + add_user_with_permissions(username='test_right_perms', password='1', + permissions=[Perm.can_view_assets_list, + Perm.can_view_asset, + Perm.can_edit_asset]) + add_user_with_permissions(username='test_perms', password='1', + permissions=[]) + os = OperatingSystem.objects.create(name='MACOS') + for i in range(5): + Asset.objects.create(name=f'test{i}', ip='1.1.1.1', os=os) + diff --git a/assets/tests/test_assets_api.py b/assets/tests/test_assets_api.py new file mode 100644 index 0000000..0fdc324 --- /dev/null +++ b/assets/tests/test_assets_api.py @@ -0,0 +1,186 @@ +import json +import logging + +import pytest +from django.contrib.auth import get_user_model +from django.urls import reverse +from rest_framework import status + +from assets.models.assets import Asset, OperatingSystem +from incident.models import Incident +from perms.models import Perm + +_log = logging.getLogger() + +TIMEOUT = 10 # time before timeout exception appears +User = get_user_model() + + +@pytest.mark.django_db +class TestAssetPagesAccess(object): + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model, add_user_with_permissions): + """ Fixture for preparing database for tests + :param client: object to work with test session + :param django_user_model: object to work with User model in pytest + :param add_user_with_permissions: fixture for adding new user + :return: prepared for tests database with: + user 'user1' with perms.can_view_user permission + user 'test_user' for testing view_user page features + """ + username = 'foo' + password = 'bar' + + add_user_with_permissions(username=username, password=password, + is_superuser=True) + add_user_with_permissions(username='test_no_perms', password='1') + add_user_with_permissions(username='test_right_perms', password='1', + permissions=[Perm.can_view_assets_list, + Perm.can_view_asset, + Perm.can_edit_asset, + Perm.can_delete_asset]) + add_user_with_permissions(username='test_perms', password='1', + permissions=[]) + os = OperatingSystem.objects.create(name='MACOS') + for i in range(5): + Asset.objects.create(name=f'test{i}', ip='1.1.1.1', os=os) + + # TODO: Need to fix this + @pytest.mark.skip + @pytest.mark.integration + def test_export_assets_in_csv_api(self, add_user_with_permissions, api_client): + username = 'user' + password = 'pro100ton' + add_user_with_permissions(username=username, + password=password, + permissions=[Perm.can_view_network, + Perm.can_work_with_incidents, + Perm.can_export_incidents_list, + Perm.can_export_assets] + ) + user = User.objects.get(username=username) + api_client.force_authenticate(user) + + response = api_client.get(reverse('asset-csv-export')) + assert response.status_code == status.HTTP_200_OK + + @pytest.mark.unit + def test_user_w_perm_can_edit_asset(self, api_client): + """ Test for checking if user with right permissions can access edit asset page """ + user = User.objects.get(username='test_right_perms') + api_client.force_authenticate(user) + asset_pk = Asset.objects.get(name='test0').pk + url = reverse('asset-detail', args=[asset_pk]) + response = api_client.patch(url) + assert status.HTTP_200_OK == response.status_code + + @pytest.mark.unit + def test_user_wo_perm_cant_edit_asset(self, api_client): + """ Test for checking if user without right permissions cannot edit asset""" + user = User.objects.get(username='test_perms') + api_client.force_authenticate(user) + asset_pk = Asset.objects.get(name='test0').pk + url = reverse('asset-detail', args=[asset_pk]) + response = api_client.patch(url) + assert status.HTTP_403_FORBIDDEN == response.status_code + + @pytest.mark.unit + def test_asset_updated_correctly(self, api_client): + """ Test for checking if asset is saved correctly after update""" + user = User.objects.get(username='test_right_perms') + api_client.force_authenticate(user) + asset_pk = Asset.objects.get(name='test0').pk + url = reverse('asset-detail', args=[asset_pk]) + response = api_client.patch(url, {'model': 'test_model','ports':'[5000,6000]'}) + assert response.json()['model'] == 'test_model' + assert status.HTTP_200_OK == response.status_code + assert Asset.objects.get(name='test0').model == 'test_model' + assert Asset.objects.get(name='test0').ports == json.loads("[5000,6000]") + + @pytest.mark.unit + def test_asset_is_deleted_correctly(self, api_client): + """ Test for checking if asset is deleted correctly""" + user = User.objects.get(username='test_right_perms') + api_client.force_authenticate(user) + asset_pk = Asset.objects.get(name='test0').pk + url = reverse('asset-detail', args=[asset_pk]) + response = api_client.delete(url) + assert status.HTTP_200_OK == response.status_code + assert Asset.objects.filter(name='test0').exists() is False + assert Asset.objects.count() == 4 + + @pytest.mark.unit + def test_check_serializer_validation(self, api_client): + """ Test for checking if serializer return error with invalid data""" + user = User.objects.get(username='test_right_perms') + api_client.force_authenticate(user) + asset = Asset.objects.get(name='test0') + asset_pk = asset.pk + asset_os = asset.os + url = reverse('asset-detail', args=[asset_pk]) + response = api_client.patch(url, {'os': 'bad'}) + assert status.HTTP_400_BAD_REQUEST == response.status_code + assert 'os' in response.data + assert Asset.objects.get(name='test0').os == asset_os + + @pytest.mark.merge + def test_asset_correctly_count_number_of_incidents(self, api_client): + user = User.objects.get(username='test_right_perms') + api_client.force_authenticate(user) + asset = Asset.objects.get(name='test0') + incident = Incident.objects.create(title='test_inc', importance=10, event_count=10, events='') + asset.incidents.add(incident) + url = reverse('asset-list') + response = api_client.get(url) + assert response.status_code == status.HTTP_200_OK + assert response.data['results'][0]['count_incidents'] == 1 + assert response.data['results'][1]['count_incidents'] == 0 + + @pytest.mark.merge + def test_filter_by_incidents(self, api_client): + """ Test for checking filter returns asset""" + user = User.objects.get(username='test_right_perms') + api_client.force_authenticate(user) + incident = Incident.objects.create(title='test_inc', importance=10, event_count=10, events='') + incident_id = incident.incident_id + + asset = Asset.objects.get(name='test0') + asset.incidents.add(incident) + + url = reverse('asset-list') + response = api_client.get(url, **{'QUERY_STRING': f'incidents={incident_id}'}) + assert status.HTTP_200_OK == response.status_code + _log.info(response.data) + _log.info(incident_id) + assert response.data['count'] == 1 + assert response.data['results'][0]['id'] == asset.pk + + bad_response = api_client.get(url, **{'QUERY_STRING': f'incidents=bad'}) + assert status.HTTP_400_BAD_REQUEST == bad_response.status_code + + @pytest.mark.unit + def test_create_asset_with_bad_status(self, api_client): + """We set the asset status asset to 0 by default. Then we will check that the status is always 0""" + user = User.objects.get(username='foo') + api_client.force_authenticate(user) + url = reverse('logstash-asset-list') + response = api_client.post(url, + data={ + "asset_type": "", + "description": "Description", + "group": "", + "ip": "127.0.0.1", + "manufacturer": "", + "model": "", + "name": "192.168.1.101", + "os": "", + "ports": "[5000]", + "sensor": "armaif_1", + "status": "25622", + "type": "asset", + }, format="json") + + assert status.HTTP_201_CREATED == response.status_code + asset = Asset.objects.get(name='192.168.1.101') + assert asset.status == 0 diff --git a/assets/urls.py b/assets/urls.py new file mode 100644 index 0000000..e68c6cf --- /dev/null +++ b/assets/urls.py @@ -0,0 +1,19 @@ +from django.urls import path, include +from rest_framework import routers + +from assets.views.assets import AssetViewSet, AssetInfoViewSet, OsViewSet, AssetGroupViewSet, AssetProblemsViewSet, \ + AssetManufacturersViewSet + +router = routers.DefaultRouter() + + +router.register('elements', AssetViewSet, basename='asset') +router.register('manufacturers', AssetManufacturersViewSet, basename='asset-manufacturers') +router.register('groups', AssetGroupViewSet, basename='asset-groups') +router.register('info', AssetInfoViewSet, basename='asset-info') +router.register('os', OsViewSet, basename='os') +router.register('problems', AssetProblemsViewSet, basename='asset-problems') + +urlpatterns = [ + path('', include(router.urls)) +] diff --git a/assets/views/__init__.py b/assets/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/assets/views/assets.py b/assets/views/assets.py new file mode 100644 index 0000000..20ba24d --- /dev/null +++ b/assets/views/assets.py @@ -0,0 +1,125 @@ +from django.db.models import Count, Q +from django.http import JsonResponse +from rest_framework.decorators import action +from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, UpdateModelMixin, CreateModelMixin +from rest_framework.viewsets import GenericViewSet + +from assets.constants import RESOLVED_STATUS +from assets.filters import AssetFilter +from assets.models.assets import Asset, OperatingSystem, AssetListGroup, AssetManufacturer +from assets.serializers.assets import AssetGroupSerializer, AssetInfoSerializer, OsSerializer, AssetDetailSerializer, \ + AssetCsvExportSerializer, AssetListSerializer, AssetCreateUpdateSerializer, AssetActiveProblemsSerializer, \ + AssetAuthorizeSerializer, AssetManufacturerSerializer +from core.mixins import ApiPermissionCheckMixin, ExportToCsvMixin, DestroyModelResponseStatus200Mixin +from perms.models import Perm + + +class AssetViewSet(ApiPermissionCheckMixin, + ListModelMixin, + RetrieveModelMixin, + UpdateModelMixin, + DestroyModelResponseStatus200Mixin, + ExportToCsvMixin, + GenericViewSet): + column_titles = AssetCsvExportSerializer.Meta.fields + console_permissions = {'csv_export': [Perm.can_export_assets], 'list': [Perm.can_view_assets_list], + 'destroy': [Perm.can_delete_asset], 'retrieve': [Perm.can_view_asset], + 'authorize_assets': [Perm.can_view_assets_list], 'update': [Perm.can_edit_asset], + 'partial_update': [Perm.can_edit_asset], + } + filters = [] + filterset_class = AssetFilter + + class Meta: + model = Asset + + def get_queryset(self): + return Asset.objects.annotate(count_incidents=Count('incidents', filter=~Q(status=RESOLVED_STATUS))) + + def get_serializer_class(self): + if self.action == 'list': + return AssetListSerializer + if self.action in ['update', 'partial_update']: + return AssetCreateUpdateSerializer + return AssetDetailSerializer + + @action(detail=False, methods=["POST"], name="authorize_assets") + def authorize_assets(self, request): + """ API for authorizing assets by changing its status from NEW to ALLOWED """ + serializer = AssetAuthorizeSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + assets_to_change = serializer.validated_data['selected_assets'] + Asset.objects.filter(pk__in=assets_to_change).update(status=Asset.AllowStatus.ALLOWED) + return JsonResponse({'status': 'ok'}) + + +class AssetGroupViewSet(ApiPermissionCheckMixin, + ListModelMixin, + RetrieveModelMixin, + CreateModelMixin, + UpdateModelMixin, + DestroyModelResponseStatus200Mixin, + GenericViewSet): + serializer_class = AssetGroupSerializer + queryset = AssetListGroup.objects.order_by('name').all() + console_permissions = {'default': [Perm.can_view_assets_list]} + + class Meta: + model = AssetListGroup + + +class AssetInfoViewSet(ApiPermissionCheckMixin, + RetrieveModelMixin, + ListModelMixin, + GenericViewSet): + serializer_class = AssetInfoSerializer + console_permissions = {'retrieve': [Perm.can_view_asset], 'list': [Perm.can_view_assets_list]} + queryset = Asset.objects.all() + + class Meta: + model = Asset + + +class OsViewSet(ApiPermissionCheckMixin, + ListModelMixin, + RetrieveModelMixin, + CreateModelMixin, + UpdateModelMixin, + DestroyModelResponseStatus200Mixin, + GenericViewSet): + serializer_class = OsSerializer + queryset = OperatingSystem.objects.order_by('name') + console_permissions = {'list': [Perm.can_edit_assets_catalogs], 'destroy': [Perm.can_edit_assets_catalogs], + 'retrieve': [Perm.can_edit_assets_catalogs], 'update': [Perm.can_edit_assets_catalogs], + 'partial_update': [Perm.can_edit_assets_catalogs], 'create': [Perm.can_edit_assets_catalogs] + } + + class Meta: + model = OsSerializer + + +class AssetProblemsViewSet(ApiPermissionCheckMixin, RetrieveModelMixin, GenericViewSet): + serializer_class = AssetActiveProblemsSerializer + console_permissions = {'retrieve': [Perm.can_view_incidents_list]} + queryset = Asset.objects.all() + + class Meta: + model = Asset + + +class AssetManufacturersViewSet(ApiPermissionCheckMixin, + ListModelMixin, + RetrieveModelMixin, + CreateModelMixin, + UpdateModelMixin, + DestroyModelResponseStatus200Mixin, + GenericViewSet): + serializer_class = AssetManufacturerSerializer + queryset = AssetManufacturer.objects.all() + console_permissions = {'list': [Perm.can_edit_assets_catalogs], 'destroy': [Perm.can_edit_assets_catalogs], + 'retrieve': [Perm.can_edit_assets_catalogs], 'update': [Perm.can_edit_assets_catalogs], + 'partial_update': [Perm.can_edit_assets_catalogs], 'create': [Perm.can_edit_assets_catalogs] + } + + class Meta: + model = AssetManufacturer diff --git a/checker/.gitignore b/checker/.gitignore new file mode 100644 index 0000000..d9a1c9b --- /dev/null +++ b/checker/.gitignore @@ -0,0 +1,2 @@ +checker +env/* diff --git a/checker/deb/skeleton/DEBIAN/.gitkeep b/checker/deb/skeleton/DEBIAN/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/checker/deb/skeleton/usr/lib/systemd/system/amcchecker.service b/checker/deb/skeleton/usr/lib/systemd/system/amcchecker.service new file mode 100644 index 0000000..e856d15 --- /dev/null +++ b/checker/deb/skeleton/usr/lib/systemd/system/amcchecker.service @@ -0,0 +1,15 @@ +[Unit] +Description=ARMA management console checker service +After=network.target + +[Service] +Type=simple +ExecStart=/usr/local/armaconsole/app/amcchecker/checker +WorkingDirectory=/usr/local/armaconsole/app/amcchecker +Restart=always +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target diff --git a/checker/go.mod b/checker/go.mod new file mode 100644 index 0000000..bd548a0 --- /dev/null +++ b/checker/go.mod @@ -0,0 +1,9 @@ +module tehiz.ru/console/checker + +go 1.14 + +require ( + github.com/gorilla/mux v1.8.0 + github.com/sirupsen/logrus v1.7.0 + golang.org/x/text v0.3.4 +) diff --git a/checker/go.sum b/checker/go.sum new file mode 100644 index 0000000..66725a4 --- /dev/null +++ b/checker/go.sum @@ -0,0 +1,12 @@ +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.4 h1:0YWbFKbhXG/wIiuHDSKpS0Iy7FSA+u45VtBMfQcFTTc= +golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= diff --git a/checker/main.go b/checker/main.go new file mode 100644 index 0000000..26ef9e6 --- /dev/null +++ b/checker/main.go @@ -0,0 +1,397 @@ +package main + +import ( + "bytes" + "encoding/json" + "flag" + "fmt" + "html/template" + "net/http" + "os" + "os/exec" + "strings" + + "github.com/gorilla/mux" + log "github.com/sirupsen/logrus" + "golang.org/x/text/language" + "golang.org/x/text/message" +) + +const page = ` + + + + {{ .PageTitle }} + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +` + +// Context for rendering template +type Context struct { + PageTitle string + CardTitle string + Details string + ServiceName string + Status string + SubStatus string +} + +type Item struct { + Name string `json:"name"` + State string `json:"active_state"` + SubState string `json:"sub_state"` +} + +type Response struct { + Status string `json:"status"` + Reason string `json:"reason"` + Items []Item `json:"items"` +} + +func (response Response) Send(w http.ResponseWriter) { + bytes, err := json.Marshal(response) + if err != nil { + log.Errorf("Can't serialize stat: %v\n", err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Write(bytes) +} + +const ( + StatusOK = "ok" + StatusErr = "error" +) + +func checkService(w http.ResponseWriter, r *http.Request) { + var response Response + + items, err := checkServices() + if err != nil { + log.Errorf("Can't get status: %v\n", err.Error()) + + response.Status = StatusErr + response.Reason = err.Error() + response.Send(w) + return + } + + response.Status = StatusOK + response.Items = items + + response.Send(w) +} + +func renderPage(w http.ResponseWriter, r *http.Request) { + t, _, err := language.ParseAcceptLanguage(r.Header.Get("Accept-Language")) + if err != nil { + log.Errorf("Got error parsing Accept-Language header: %v", err.Error()) + } + log.Infof("Got languages: %v", t) + + templ, err := template.New("page").Parse(page) + if err != nil { + log.Errorf("Can't parse template: %v", err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + + var print *message.Printer + if len(t) > 0 { + for _, cur := range t { + if fmt.Sprintf("%v", cur)[:2] == fmt.Sprintf("%v", language.Russian) { + log.Debugf("Creating printer for lang %v", cur) + print = message.NewPrinter(cur) + break + } + } + } + + if print == nil { + log.Infof("Create default printer") + print = message.NewPrinter(language.English) + } + + context := Context{ + PageTitle: print.Sprintf("PageTitle", "Loading"), + CardTitle: print.Sprintf("CardTitle", "Please wait, services are loading"), + Details: print.Sprintf("Details", "Show details"), + ServiceName: print.Sprintf("ServiceName", "Service name"), + Status: print.Sprintf("Status", "Status"), + SubStatus: print.Sprintf("SubStatus", "Sub status"), + } + + var buf bytes.Buffer + err = templ.Execute(&buf, context) + + if err != nil { + log.Errorf("Can't render template: %v", err.Error()) + w.WriteHeader(http.StatusInternalServerError) + return + } + + w.Write(buf.Bytes()) +} + +func httpMatch(r *http.Request, rm *mux.RouteMatch) bool { + // This check prevent non localhost requests + if r.RemoteAddr[:5] == "[::1]" { + return true + } else if r.RemoteAddr[:9] == "127.0.0.1" { + return true + } else if r.RemoteAddr[:9] == "localhost" { + return true + } + log.Error("Bad remote addr") + + return false +} + +func init() { + message.SetString(language.Russian, "PageTitle", "Загрузка") + message.SetString(language.Russian, "CardTitle", "Пожалуйста, подождите, сервисы загружаются") + message.SetString(language.Russian, "Details", "Подробности") + message.SetString(language.Russian, "ServiceName", "Сервис") + message.SetString(language.Russian, "Status", "Статус") + message.SetString(language.Russian, "SubStatus", "Подстатус") + + message.SetString(language.English, "PageTitle", "Loading") + message.SetString(language.English, "CardTitle", "Please wait, services are loading") + message.SetString(language.English, "Details", "Show details") + message.SetString(language.English, "ServiceName", "Service name") + message.SetString(language.English, "Status", "Status") + message.SetString(language.English, "SubStatus", "Sub status") +} + +// Services to check it's state +var services = [...]string{"amccore.service", "amccelery.service", "amccelerybeat.service", "amccorrelator.service", "amcclient.service", "elasticsearch.service", "amcvector.service"} + +func checkServices() ([]Item, error) { + result := make([]Item, 0) + + for _, service := range services { + // Get service status + var out1 bytes.Buffer + cmd1 := exec.Command("systemctl", "show", "-p", "ActiveState", "--value", service) + cmd1.Stdout = &out1 + err := cmd1.Run() + if err != nil { + log.Errorf("Can't get service active state: %v", err.Error()) + return nil, err + } + + var out2 bytes.Buffer + cmd2 := exec.Command("systemctl", "show", "-p", "SubState", "--value", service) + cmd2.Stdout = &out2 + err = cmd2.Run() + if err != nil { + log.Errorf("Can't get service sub state: %v", err.Error()) + return nil, err + } + + result = append(result, Item{ + Name: service, + State: strings.Replace(out1.String(), "\n", "", 1), + SubState: strings.Replace(out2.String(), "\n", "", 1), + }) + } + + return result, nil +} + +func main() { + log.SetFormatter(&log.TextFormatter{}) + log.SetOutput(os.Stdout) + + port := flag.Int("port", 9080, "Port for work") + flag.Parse() + + log.Info("Starting") + router := mux.NewRouter() + router.HandleFunc("/", checkService).Methods("GET").MatcherFunc(httpMatch) + router.HandleFunc("/page", renderPage).Methods("GET").MatcherFunc(httpMatch) + + http.ListenAndServe(fmt.Sprintf(":%v", *port), router) +} diff --git a/cicd/.gitignore b/cicd/.gitignore new file mode 100644 index 0000000..82520ca --- /dev/null +++ b/cicd/.gitignore @@ -0,0 +1 @@ +/tmp/ diff --git a/cicd/config.yml b/cicd/config.yml new file mode 100644 index 0000000..bf0a8da --- /dev/null +++ b/cicd/config.yml @@ -0,0 +1,84 @@ +repo_name: console_core +type: component +projects: + amccore: + variables: + packages: + amccore: + deb: + files: + - include: + - 'deb/skeleton/usr' + - 'deb/skeleton/var' + exclude: + - '.gitkeep' + dst_dir: '' + - include: + - 'assets' + - 'company' + - 'console' + - 'core' + - 'correlation' + - 'dashboard' + - 'devices' + - 'events' + - 'incident' + - 'incident_export' + - 'inputs' + - 'license_info' + - 'logstash' + - 'manage.py' + - 'ncircc' + - 'networkmap' + - 'perms' + - 'rotation' + - 'storage' + - 'users' + - 'notifications' + exclude: + - '.gitignore' + - 'console/static' + - '*test*' + dst_dir: 'usr/local/armaconsole/app' + - include: + - 'console/static' + dst_dir: 'var/www/armaconsole/public' + control: + maintainer: 'arma' + section: 'admin' + depends: 'nginx, sudo, python3, python3-pip, python3-venv, redis, redis-server, gettext, elasticsearch (= 7.12.0), golang, vector (= 0.19.1), postgresql, rabbitmq-server' + pre-depends: 'gcc, make, libpq-dev, python3-dev, openssl, ca-certificates, bash, default-jre, apt-utils, postgresql-contrib' + priority: 'optional' + description: | + ARMA management console (AMC) + AMC manages sensors and monitors events and incidents + arch: + - amd64 + + amcchecker: + variables: + exe_path: 'checker' + packages: + amcchecker: + deb: + files: + - include: + - 'checker/deb/skeleton/DEBIAN' + - 'checker/deb/skeleton/usr' + exclude: + - '.gitkeep' + dst_dir: '' + - include: + - 'checker/checker' + dst_dir: 'usr/local/armaconsole/app/amcchecker' + control: + maintainer: 'arma' + section: 'admin' + depends: '' + pre-depends: '' + priority: 'optional' + description: | + ARMA management console (AMC) service + AMC manages sensors and monitors events and incidents + arch: + - amd64 diff --git a/cicd/go_test.sh b/cicd/go_test.sh new file mode 100644 index 0000000..1582631 --- /dev/null +++ b/cicd/go_test.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -e # fail on any error + +PKG=$(cat go.mod | grep module | awk '{print $2}') +PKG_LIST=$(go list ${PKG}/... ) + +for CUR_PKG in $PKG_LIST +do + go test -timeout 30s -coverprofile=/tmp/go-code-cover $CUR_PKG +done \ No newline at end of file diff --git a/cicd/integration_tests.sh b/cicd/integration_tests.sh new file mode 100644 index 0000000..b6bc1fb --- /dev/null +++ b/cicd/integration_tests.sh @@ -0,0 +1,87 @@ +#!/bin/bash + +trap 'p=$(jobs -p); if [ "$p" != "" ]; then kill -s 9 $p; fi; rm_fd' EXIT + +set -x + +log_file_path="amc_integ_tests.log" + +exec 3> >(tee -a $log_file_path) +exec 1>&3 +exec 2>&3 + + +rm_fd() { + exec 1>&- + exec 2>&- + exec 3>&- + wait +} + +CUR_FILE_PATH=$(dirname "$0") +CUR_FILE_PATH=$(cd "$CUR_FILE_PATH" && pwd) +PRJ_ROOT_PATH=$(dirname "$CUR_FILE_PATH") + +timeout=10 + + +function check { + if [ "$1" == "$timeout" ]; then + echo "timeout. connection to $2 failed" + exit 1 + else + echo "connection to $2 established" + fi +} + +. $CUR_FILE_PATH/up_license_client.sh + +check $client_counter "license_client" + +response=0 +counter=0 + +while [ "$response" != 200 ] && [ "$counter" != "$timeout" ]; do + response=$(curl -s -o /dev/null -w "%{http_code}" -u elastic:changeme -X GET http://elasticsearch:9200/) + sleep 5s + counter=$(( $counter + 1 )) +done + +check $counter "elasticsearch" + + +response=0 +counter=0 +re=^.*PONG.*$ + +while ! [[ $response =~ ${re} ]] && [ "$counter" != "$timeout" ]; do + response=$(echo "PING" | nc -w 2 redis 6379) + sleep 5s + counter=$(( $counter + 1 )) +done + +check $counter "redis" + + +set -ex + +# if [ -d test_env ]; then +# rm -rf test_env +# echo "old env was removed" +# fi + +#python3 -m virtualenv test_env + +#source test_env/bin/activate + +pip install --upgrade pip + +pip install -r requirements_test.txt + +DJANGO_SETTINGS_MODULE=console.settings.test python manage.py makemigrations + +DJANGO_SETTINGS_MODULE=console.settings.test python manage.py migrate + +DJANGO_SETTINGS_MODULE=console.settings.test coverage run -m pytest --disable-warnings -m integration + +coverage html -d public/test_coverage/ diff --git a/cicd/live_fw_tests.sh b/cicd/live_fw_tests.sh new file mode 100644 index 0000000..5cb01a2 --- /dev/null +++ b/cicd/live_fw_tests.sh @@ -0,0 +1,76 @@ +#!/bin/sh + +trap 'p=$(jobs -p); if [ "$p" != "" ]; then kill -s 9 $p; fi; rm_fd' EXIT + +set -ex + +log_file_path="amc_live_fw_tests.log" + +exec 3> >(tee -a $log_file_path) +exec 1>&3 +exec 2>&3 + + +rm_fd() { + exec 1>&- + exec 2>&- + exec 3>&- + wait +} + +CUR_FILE_PATH=$(dirname "$0") +CUR_FILE_PATH=$(cd "$CUR_FILE_PATH" && pwd) +PRJ_ROOT_PATH=$(dirname "$CUR_FILE_PATH") + +timeout=16 #1 minute 20 seconds + + +function check { + if [ "$1" == "$timeout" ]; then + echo "timeout. connection to $2 failed" + exit 1 + else + echo "connection to $2 established" + fi +} + +. $CUR_FILE_PATH/up_license_client.sh + +check $client_counter "license_client" + +# if [ -d test_env ]; then +# rm -rf test_env +# echo "old env was removed" +# fi + +# python3 -m virtualenv test_env + +# source test_env/bin/activate + +pip install --upgrade pip + +pip install -r requirements_test.txt + +DJANGO_SETTINGS_MODULE=console.settings.test python manage.py makemigrations + +DJANGO_SETTINGS_MODULE=console.settings.test python manage.py migrate + +set +e + +response=1 +counter=0 + +while [ "$response" != 0 ] && [ "$counter" != "$timeout" ]; do + DJANGO_SETTINGS_MODULE=console.settings.test pytest --disable-warnings devices/tests/test_firewall_live.py -k 'test_live_get_firewall_status_online' + response=$(echo $?) + sleep 5s + counter=$(( $counter + 1 )) +done + +check $counter "live.firewall" + +set -e + +DJANGO_SETTINGS_MODULE=console.settings.test coverage run -m pytest --disable-warnings -m live_firewall + +coverage html -d public/test_coverage/ \ No newline at end of file diff --git a/cicd/scripts/build.py b/cicd/scripts/build.py new file mode 100644 index 0000000..65c8b1d --- /dev/null +++ b/cicd/scripts/build.py @@ -0,0 +1,98 @@ +#! /usr/bin/python3 + +import os +import argparse +import subprocess +import re +import shutil +from pathlib import Path +from distutils.dir_util import copy_tree +import fileinput +from dotenv import dotenv_values + + +def parse_arguments() -> argparse.Namespace: + parser = argparse.ArgumentParser() + parser.add_argument("-n", "--name", + required=False, + type=str, + help="Internal repository part to be built") + parser.add_argument("-v", "--version", + required=True, + type=str, + help="Repository version") + return parser.parse_args() + + +def main() -> None: + args = parse_arguments() + if args.name is None or args.name == 'amccore': + front_path = "frontend" + back_path = "console" + + footer_path = f"{front_path}/src/components/Layout/components/Footer" + console_base_path = "console/settings/base.py" + + # PUBLIC_URL for react app + react_env = dotenv_values('deb/react.env') + os.environ['PUBLIC_URL'] = react_env['PUBLIC_URL'] + + # version + version_found = False + + # change base.py version + with fileinput.input(files=console_base_path, inplace=True) as fileLines: + for line in fileLines: + + if re.search('SITE_INFO = {', line) is not None: + version_found = True + if version_found and re.search(' *}', line) is not None: + version_found = False + + if version_found: + res = re.sub(r"(.*version': )[^,]*", rf"\g<1>'{args.version}'", line) + else: + res = line + + if res is not None: + print(res, flush=True, end="") + + # check environment.js for localhost + with open(f'{front_path}/src/enviroments/enviroments.js', 'r') as file: + for line in file: + if re.match(r'.*localhost: *.*localhost.*', line) is not None: + print('Error: localhost in enviroments.js') + exit(1) + + # -----------------------------build React------------------------------------- + res = subprocess.Popen(['npm', 'ci'], cwd=front_path)#, env=react_env) + res.wait() + + if res.returncode != 0: + print("npm ci failed", flush=True) + exit(1) + else: + print("npm ci successed", flush=True) + + res = subprocess.Popen(['npm', 'run', 'build'], cwd=front_path)#, env=react_env) + res.wait() + + if res.returncode != 0: + print("'npm run build' failed", flush=True) + exit(1) + else: + print("'npm run build' successed", flush=True) + + shutil.rmtree(f'{front_path}/node_modules') + + shutil.move(f'{front_path}/build/index.html', f'{back_path}/templates/console/index.html') + + if os.path.exists(f'{back_path}/static/react'): + shutil.rmtree(f'{back_path}/static/react') + + Path(f'{back_path}/static/react').mkdir(parents=True, exist_ok=True) + copy_tree(f'{front_path}/build/', f'{back_path}/static/react/') + + +if __name__ == "__main__": + main() diff --git a/cicd/unit_tests.sh b/cicd/unit_tests.sh new file mode 100644 index 0000000..512c643 --- /dev/null +++ b/cicd/unit_tests.sh @@ -0,0 +1,62 @@ +#!/bin/sh + +trap 'p=$(jobs -p); if [ "$p" != "" ]; then kill -s 9 $p; fi; rm_fd' EXIT + +set -ex + +log_file_path="amc_unit_tests.log" + +exec 3> >(tee -a $log_file_path) +exec 1>&3 +exec 2>&3 + +rm_fd() { + exec 1>&- + exec 2>&- + exec 3>&- + wait +} + +CUR_FILE_PATH=$(dirname "$0") +CUR_FILE_PATH=$(cd "$CUR_FILE_PATH" && pwd) +PRJ_ROOT_PATH=$(dirname "$CUR_FILE_PATH") + + +function check { + if [ "$1" == "$timeout" ]; then + echo "timeout. connection to $2 failed" + exit 1 + else + echo "connection to $2 established" + fi +} + +. $CUR_FILE_PATH/up_license_client.sh + +check $client_counter "license_client" + +# if [ -d test_env ]; then +# rm -rf test_env +# echo "old env was removed" +# fi + +if [ -f db.sqlite3 ]; then + rm -f db.sqlite3 + echo "old db.sqlite3 was removed" +fi + +#python3 -m virtualenv test_env + +#source test_env/bin/activate + +pip install --upgrade pip + +pip install -r requirements_test.txt + +DJANGO_SETTINGS_MODULE=console.settings.test python manage.py makemigrations + +DJANGO_SETTINGS_MODULE=console.settings.test python manage.py migrate + +DJANGO_SETTINGS_MODULE=console.settings.test coverage run -m pytest --disable-warnings -m unit + +coverage html -d public/test_coverage/ \ No newline at end of file diff --git a/cicd/up_license_client.sh b/cicd/up_license_client.sh new file mode 100644 index 0000000..c2f486f --- /dev/null +++ b/cicd/up_license_client.sh @@ -0,0 +1,64 @@ +#! /bin/sh + +cur_flags=$- +echo "current flags: $cur_flags" + +set -e + +CUR_PATH=$(dirname "$0") +CUR_PATH=$(cd "$CUR_PATH" && pwd) +ROOT_PATH=$(dirname "$CUR_PATH") +LICENSE_PATH="$ROOT_PATH/license" + +echo "starting license client" +echo "license client home path: $LICENSE_PATH" + +cd $LICENSE_PATH + +echo -e "127.0.0.1\tlicense-client" >> /etc/hosts + +touch /etc/machine-id + +export GOPROXY='http://nexus.iwarma.ru/repository/proxy-go/' \ + GOPRIVATE='gitlab.iwarma.ru' \ + GONOPROXY='gitlab.iwarma.ru' + +NETRC_PATH="$CUR_PATH/.netrc" + +echo "machine gitlab.iwarma.ru" > $NETRC_PATH +echo "login $NETRC_USER" >> $NETRC_PATH +echo "password $NETRC_TOKEN" >> $NETRC_PATH + +mv $NETRC_PATH /root/ #maybe you should comment some extra dns in /etc/resolv.conf +go mod tidy +go build -ldflags "-s -w" + +./client --config config_example.json /dev/null 2>&1 & + +client_response=0 +client_counter=0 +client_timeout=10 + +set +e + +while [ "$client_response" != 200 ] && [ "$client_response" != 404 ] && [ "$client_counter" != "$client_timeout" ]; do + client_response=$(curl -s -o /dev/null -w "%{http_code}" -X GET http://license-client:8050/license/) + sleep 1s + client_counter=$(( $client_counter + 1 )) +done + +echo "client response: $client_response" +echo "client counter: $client_counter" + +#client_PID=$(pidof client) + +cd $ROOT_PATH + +case "e" in + "$cur_flags") + set -e + ;; + *) + set +e + ;; +esac \ No newline at end of file diff --git a/company/__init__.py b/company/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/company/admin.py b/company/admin.py new file mode 100644 index 0000000..935c823 --- /dev/null +++ b/company/admin.py @@ -0,0 +1,14 @@ +from django.contrib import admin + +from company.models.company import Company +from company.models.location import LocationCode + + +@admin.register(Company) +class CompanyAdmin(admin.ModelAdmin): + pass + + +@admin.register(LocationCode) +class LocationCodeAdmin(admin.ModelAdmin): + pass diff --git a/company/apps.py b/company/apps.py new file mode 100644 index 0000000..3d1e229 --- /dev/null +++ b/company/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class CompanyConfig(AppConfig): + name = 'company' diff --git a/company/migrations/__init__.py b/company/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/company/models/__init__.py b/company/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/company/models/company.py b/company/models/company.py new file mode 100644 index 0000000..ced3e24 --- /dev/null +++ b/company/models/company.py @@ -0,0 +1,37 @@ +from django.db import models +from django.utils.translation import gettext_lazy + +from company.models.location import LocationCode +from ncircc.enums.notifications import AffectedSystemFunction + + +class Company(models.Model): + """Model with Company information for NCIRCC.""" + + name = models.CharField(gettext_lazy('Name of the organization'), max_length=127) + is_cii = models.BooleanField(gettext_lazy('Subject CII'), default=False) + location = models.ForeignKey( + LocationCode, null=True, + on_delete=models.SET_NULL, + verbose_name=gettext_lazy('Country/Region code'), + ) + city = models.CharField(gettext_lazy('City'), max_length=127) + affected_system_function = models.CharField( + gettext_lazy('Affected system function'), + choices=AffectedSystemFunction.choices, + default=AffectedSystemFunction.NUCLEAR_POWER.value, + max_length=127, + ) + api_key = models.CharField( + gettext_lazy('Token'), + max_length=127, + null=True, + help_text=gettext_lazy('Token access to NCIRCC API'), + ) + + class Meta: + verbose_name = gettext_lazy('Company') + verbose_name_plural = gettext_lazy('Companies') + + def __str__(self): + return self.name diff --git a/company/models/location.py b/company/models/location.py new file mode 100644 index 0000000..68bc112 --- /dev/null +++ b/company/models/location.py @@ -0,0 +1,15 @@ +from django.db import models +from django.utils.translation import gettext_lazy + + +class LocationCode(models.Model): + """ Directory of Country/Region in format ICO-3166-2""" + + code = models.CharField(gettext_lazy('code'), max_length=15, help_text='Format from ISO-3166-2') + + class Meta: + verbose_name = gettext_lazy('Country/Region code') + verbose_name_plural = gettext_lazy('Country/Region codes') + + def __str__(self) -> str: + return self.code diff --git a/company/serializers/__init__.py b/company/serializers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/company/serializers/company.py b/company/serializers/company.py new file mode 100644 index 0000000..9757c5f --- /dev/null +++ b/company/serializers/company.py @@ -0,0 +1,22 @@ +from rest_framework import serializers + +from company.models.company import Company +from company.serializers.location import LocationSerializer + + +class CompanySerializer(serializers.ModelSerializer): + """Serializer for retrieve.""" + + location = LocationSerializer() + + class Meta: + model = Company + fields = ('name', 'is_cii', 'location', 'city', 'affected_system_function', 'api_key') + + +class CompanyCreateSerializer(serializers.ModelSerializer): + """Serializer for creating and updating Company.""" + + class Meta: + model = Company + fields = ('name', 'is_cii', 'location', 'city', 'affected_system_function', 'api_key') diff --git a/company/serializers/location.py b/company/serializers/location.py new file mode 100644 index 0000000..e9bf0c8 --- /dev/null +++ b/company/serializers/location.py @@ -0,0 +1,11 @@ +from rest_framework import serializers + +from company.models.location import LocationCode + + +class LocationSerializer(serializers.ModelSerializer): + """Serializer location code ISO-3166-2""" + + class Meta: + model = LocationCode + fields = ('id', 'code') diff --git a/company/services/__init__.py b/company/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/company/services/company_create_update.py b/company/services/company_create_update.py new file mode 100644 index 0000000..c5cecc0 --- /dev/null +++ b/company/services/company_create_update.py @@ -0,0 +1,48 @@ +import logging +from typing import Optional, Dict, Any + +from company.models.company import Company +from company.serializers.company import CompanySerializer + +_log = logging.getLogger(__name__) + + +class CompanyCreateAndUpdateService: + """Service for creating, and updating company""" + + def __init__(self, company: Optional[Company], data: Dict[str, Any]): + _log.debug('Start create or update company') + self.company = company + self.data = self.prepare_date(data.copy()) + + def prepare_date(self, data: Dict[str, Any]) -> Dict[str, Any]: + """Prepare data for creating and updating company""" + location_id = data.get('location') + if location_id: + data['location_id'] = location_id + del data['location'] + return data + + def _update(self) -> Company: + """Update company data""" + _log.debug(f'Update company: {self.company}') + company = self.company + for attr, value in self.data.items(): + setattr(company, attr, value) + company.save() + return company + + def _create(self) -> Company: + """Create company.""" + company = Company.objects.create(**self.data) + _log.debug(f'Create company: {company}') + return company + + def save(self) -> Dict: + """SAve company data""" + _log.debug(f'Save company nata: {self.company}') + if self.company is None: + company = self._create() + else: + company = self._update() + return CompanySerializer(company).data diff --git a/company/static/ncircc_files/organization_info.xlsx b/company/static/ncircc_files/organization_info.xlsx new file mode 100644 index 0000000000000000000000000000000000000000..17c3fdee1365df52764360f954627438e65de50b GIT binary patch literal 16605 zcmeHugL@`j(syi|6HaU=6Wf^Bww+9D+qONiC$?>4VjEv(cK3aDXP@scc;CLSt8t&M z-#J}f)pe>)m8=9X2nqlg00aO40096|LPm%;AOHXi7ytk=00fY_fVGu_p_PNSf~$?8 zy(W!|r3HR22oPB|0MN(%|F``w=0LB)kX1JwLc8)2k6?#Vh>w7xA_$yjBsJ0vup}oj zT8O197?jsWh^m4j6+iwh5@pQ+HRJd(ys}gV6+yNw3;J3)5mI%)On+kYLuy-*3?biF zrLtw!!9FZm81Q@N*P&%#5)DgwbkrEHM5Hh%tbmkj9arOh{=8sq0rGZ={Zf#nV~0}=S}X@@r5a@C=JI5EId-PEc(Jo%Muh;Up+Y^A zm`?!W`k=g08IL;2^x;0i5joM|?7*`wm_O$g`xE3al=^po)p@!n&v2aUE1q|4LGlPN zOL*~0bkY6t8hMOF$sq$~peQ0N=o=I_gopBA^ zwKhT_?EBfKTfo|GABYaw_r*ac^|w2V+nT*M7L6UEQ|-VFG^{*c2@lCVBNOFpKC!n0 zX>bQN(h=Q! zNTcM3UP6B;uC|?_g*`3Jul@fi?Ehjd{kKOikCl??p@RuL7kvpDyq{Z(M&Orl<`Zot zQ1J8?TZONW$RoyH>mLL@ND$F|JaZ{ngD^p(j0K5YfD?$umExn; zFQvM!_fQEq$1i_c5m?>Gntc>Ek><6KSojl`CzxIGWI6?P&|c4YsnUJWg5dTMOHtm0 z-LzUS(~g7CRolSg$F*QuC-SR1wRHO6S3)MF2ZjlefzR2uUTSqrmm?Xjy{wQuWrG(( zzF~O9>mN4kzm?JK-83ZJB$Z-`u zj(uvx{jjqnZJcq;EG>E#<{!KS97Ys|n9B{p^0E51Kg7R!8+E%xex`b?rxusCA^gg3 zkhxU3!*q%>(_K#O|3c2PXuz+}U~oxA8c-f#6RQh(AtWwQ(~vkIL!YG2ospn4dW9sZ zF%>?nZaK?z#m3n}gE63t+0@S4QL}kr-i^mj*L0l5TkotWtv?QuHV8~-xsZMTA`%%f znTJ4ir+>jDkvTR5tH{2Zy**}}Yi4D^+}Iy|y)=&?D`EfHf0Jt_wXW{isY>q4&72Y$q6ymlY zu`ntGK4z4MSrU|%ZqW?*HMZg%8FJFuPTu^RaN%rlmkDfL49rHH$H{&+FXG-oJ4ubQ z%B&CB3)t`>$>K_pWm#n#rP^gYW$<$%F0I<0oiDZ;uXXO~Yvf&56>_fyg-1z8R*wRX z+FS}=7}>4hKI>W@lWtx?k6eRzytQj}?#f)=PdnnStm`>n)~hPsd0lYITviZvIN)xd zi!K9gE^MTZkcVJhc(6VR=xJ|bi!w%&t2+7+4HB7YpwYum?Hzkg-meaxtBwz1s;n1p zsq@X4s%Rm&bO7^P&w2unUl?I9a>o4(pWWJ>AuT=zq_NWES z!p}}Fi+hcjtP4xxJMguKi0%#!Yb_cuAv1y>J;o{+^fM6Oy)vx&00tDP!u>@a2zio| z3{@vE2{%AJQ;Cx_U)E~6o#XxjdOus3Oj8btB{#3MW|#p%RKUrnZWu$}F8W>e_C#ct zXobO|1vrheQ)UhGP1v=V4J=p@5ADy29--auc?8F1Fbf-O0StW_nYK7GTO%&wBpy`W z0UR(ao2#HJ_bVPnfC%jnLWw~@14s91nS1bgfgAiiU4u~#ItkBl#tl6U6*9|-(-h5+{gpk8&<&SYLJ4YEc8owT%S|p&seJSlP2?snXc(ajh@+WwsDCZ*U8Dg z03J)pqN zQNXB9``Nyh0w&7i=&6R1X^iL2PU-y!`tKyin1Pf<N`FigV;K994@mjQ*a7Zl5pitBo4uLai zkx|xY)4e?HoFuL@0H+wU%jF{cSz2!pUi129b$@6bVITytNMZ>)lJMS84AW6=FFkPUEpEH1cWuLn z)s|%RqT6?OOJJIlpJzqrsUQkyQN-sR4zza(m#TB>|ZM!SMMU+@M!?mKfSRV6yNa_3g?_Kp*+lH|@SyM#!k|wgC^9@(Skt2;FLiZ4)n3V5*c}hiuhlH)sQRQ&xQKEw_Lt&){$a_XcR%cD@(v@sa7c=aivJgY_)8y{Tfha-^t>_EHbi z4JR0&>!RzU>!BN{{!!gq-Cf;Z-BsOJZO<1FHL*s-uEW+sXM$&IpE#jC@M~80Ploiy zt(mRRp$usEAvY_&eD;dPuvqf3qNa2>(GuHToNdrhRjrNax)at{_Up&J2Ub2@fhdx4n6;sIF;2w&_Cv@#IQi%dPoi`LqewV5E zxg6nVU4RO!S*WDLlj(csHn!tXS^`Bhi5g7`l3l1$EMwnZt1q*)p;My~>a>bt$aB8l zn5DM#F_f*>RGqnFUu$H=CdIQ3NkOzo*O#Gd^3V#pn&^g@trif_tf5hY5FtsNCZ)iT z^QT3bk}%kw3*(fy$;C7eAM*qL`Qy-(81^anbL3YQ69=rrTrudz7Ck#fpuo@r-YyI` z(YU=*7kA}c++4WneSUH|o-?TNL>yiWeQZDnKsqOE-qnTnEweQ!zqo+qB za0hyPl4K?KJ-v6BY;Cb7e-zrLFOuWWvN+B;Z1oe%E0E0vwyaY_Y1LY3BtesN#FSm~ z&$bL++jZSm2At8h`r0chw-{phd^%*N#5Ms#fy}DG@Lt9PL789=vPN?+KIhikffeW|o_^g;)hQsa(E%VLo2tT+msXoxIj<@rKZuqSmfprjh*xPRz{nLT3!iP8e z<=UfA|9_1S!*AmgCl&SE_#`~x5pMX^#9bgFDiv2#G?p$m81G012jCVpJbs;fUl&tZ zOgIFrh&LelG{(qycf2$+ba)6bPPu~O4;x7AE4=)bdByj|#Wj+Y_==CIV6wr-H@z#a z=57Wl4540L9n`@maYCZ&+lCrx z3#M@oy^`pup{<@vsY=7A+g5R5G2`}%Zh)bi>Fd`Ad5!}9bb`U z0X4m)h`tS-A#mM1?D7?wq;o~pXdX^%8FVVC9fJg_ZHM7lm+nW3PsVxV6AujPws%RR z#s%HTXN93S3ZSah?(AX^857?CUlnX_&R>dWcKVk!(N**_2nab%qB9r>ef9ygd5?X| z2iVfy*W|z8#oY9}pKCu(vr|Md*uptOnAw2*@kj3U|umQ463jtYsqkt|8;X16WR2i)XGHy6j zW4*6bWZl3DUD@tFIJkPax~gYwWa7zebiBFUpKe9NIVX8#c4{=dKdHbu^L%yX_Ii0f zc(P;fba!=QgTd?n=E9i~@@nqa=+$EJ{{BiYRrGuwRrGva1)nt=RQqO=g8}E($sL8I zI(J*r;q|Rk@k#3pgWTEAtr>P$5${uuMyQV+V|-a9I=nza8t@*s<%L`G4gl&@Ms_X` z>d>+L(T*ajrG1-QM;1$ya(p&n3$ns2X~e<^MU_lR6{MnEghE|cJE006^cp0EuTabI z2gOV!MJgexrA-wC+7Q39Z>SD#<<%2$p{C;OfBakZ@h{VzsWR%APbiu%0gBUV3xP*f zq=%8>Aw80KH5_$y$viA#j>naP2^L|Aq(C|Snv+P(N|ES+@N65g`J(SIq3|w8s1)J= zjbx9DoMdnCY6t193(!;;Lvae*L4;mwKw1BHb0O;Cu2M_hprdw1=Jg+>)X}niEc^yPF4uw&TR()^#5EnNB)L6KH%JW`&@Vs$|A~h5M32| zT7{Jf-5Y$#kxBJ2HrO5JWAR7vI_+<51lp)8+G9qVP*InCK$P0?Y$Tb|dscgxH+ zz@uE=kcjMXt!IBd?&UVSsN@9dY0|m~fwtm`HtuhJ7Il{X8rk#UXw{XX{nPIbqRsb} zA4}O^=TJU+qw730*6X!*a%T>3?_=6cZPAQ90`GXZoq4>TQGN>IdD9?!f2*JDX7#a?&F;Oi6FjIW!sT>QXKx^q0B-O- zg(~S|bBP0vZLOeq_Dlh*6|IV{98Tkq-LzDE-8w~vUvn~4kQ-tTHoDl661tSSaPPL? zM$a$1^6UY+Yx|%PQpcJsy+)l@WH(-fv1+fNXn`byP(}sk!ifinv!5KrUtbCRzA-h{ z1jv54m7Rr>*>eBt9RB`0wD|jj{heG?Qs!2QN_=9JQfW|3in2zLVUkh#E7`E*gm;a^ z74(nXLUAau$IqW1o9VQK&Gf|05QB}&^P>~9w1IXGSC$Tw4$uwJjq?i=7k@<|{yUf< z#p6xo|KXB)WBpo>`|XlCm>60b(*EB6_C`-sC#+XkP&(ij-|-L6)EYi7!5yJASdf~> zHJGfzaZR&W*5n8Z_oVm?pilQa7AMEfRfJH;`m~jR9Jl z#DE|^a*M*-aC>?XSg_`lF18P3lvY``HDBOd==$j|CB|{ybT85)KH}KK`x8YjaXYfL zd$qmh{n2<~Dl^#D7q?pG@zYUL4MJGES%OS9=B2rXkGuAQ}&*lffP>m&<-*x$Gk$+-gqd|S)st^Owz6>@R$IQh= ze@gj&jp#|G5kyF{$v1F2o29@29EeZOYg{nzRScMbBA( zCy;DS1U3`+7XIpkJOES54^exXmWk|zvs*u)GM_7nuyfDajbcofF)#fvd)8fU_SDEHr|tKF>aRvXq>E})_0Vls`BS_mw;wz6DlnnQl;{Z0Tt9R0DfB~p&&5=bK3hnQ=Sj(89}s!!GiDvlXs zRCNa14)&77qpevIoXr)^WIhC)d{IF%G_VyHP9-?d4ymOHdu8(zhYm4LY^uyoPX#B} zp;Eo=>2byDBRc>1+?b`s^-ObqLzsE|kkskCfajC3wK07tB0ZHXOb2x(I0754i(5c3>_U$b1fD@Q zEz4%tRZQ0{@pz)I%C`3k1$~Ag3}ePEPGV_zL~~JL0}GU|rVgS= zgCoPj2wyFG_aA&_^k-R)xVaf)F4XYN2za_@A$9=OpyC%bB4zn+z3~VTS41<*zlTI( z+W@9y>JO)Pq1ap1Hpg|j)aP#iS=JQ2CRZXMr$E}7UAy{Y%*1#&VfB%zs<6i~&DUsO z$dk$Kt{QfO@fhu66P^>mldYwkfrDWG?9hNX^D94!9|nq%Nlf{i1fHC7z7sl*phN*& zY8~v#KP{2)>RSkor#5qXDS%2>0hAxbt|e;=SVQLFDjg1Ms|~>uDQ%A zNbd8RU2KSlooE;9dfxbV535zrT3OEKq@6172F!t`?<_@4GE#`K(2&L)i43+JiDw5Z z{e@P#b3dlc4Zg#-XY(uvbqWHTh(nzv^NKkYx^Y?hODOpA7G6m=-#N#vP#o z90xgq#-~_GRDKB}6-i23#x_{S&q+uV{-zmk7-Ksn z!Z{9HDM+g9=MRz29#4#`4Cf^e%cO{v+cbBC4}nMB%rZDUAPg64MS?kDrovl(y0c2%-Bm5#*LvkIZA>V-~H z$nSJ3AgCzd`Xx6E1C=q9W}jUn~@S?py4jKmm? znv|Jx6aFM2;#u&DQDn97wx+RXb*oCfov%t2rQ%ju6zZ#%ENG0;CQW z0|+#a$0b`D?2AxNv_34v%5+0vjx%K1!zVeeUY$S-LVw%Sqi10O;5XD%g*0+w-FFBNOe6H zt?Jx#3)T+dC_OtDO6&5fm461w)$*K(@Rf17A8{B%oj7C1;K8XSRHF%@^4t)=Dz#4E z9N$)MoHT%W-ka8h`@|ev$@Kztm${t2|7{#K9hP~=78C$Lju-#{_8&&e-oe$v(EgYI zdZIQKwatRkseJT~us6P|4*?xqjfL^GqOL+IwPdzT49A8vBu3vTN3r0vB;jt+&APoY zNc>fH#mnb0>dAr>0SsvAaoQ=3k4R6y^WH7X(`nVMBbewEt2TbJcdi0ie!*sY#+w*f z`Tb?uyv~t&j|@^)5c%XtBK>IM8P980;*p^nT|T@Sj5v07t|)z?B{1em9w+a{f7}MB z(?7m1SRhU++@YTi>MK2YB+Lqvx^TE?E+y33ViiBJ@QE3>`2$lXfpkAHp*y+hPqh zr_LXlawT_kzQ|@22*~*Al?%hj3HFBvk0bOEtCd=*JBZpF@8|&yqh|yH)vi4K`n1?{uURqd?@X}9e!UNk=0AyD<{?vDqR1>8OEC-+v=eAV(7fFn*3 z#kv^#DM^=|-b1l;4hkY=;!7BFd5zhZBFoMjvw1`hH^>S0UA@`aAS7zmTPM!8gi}N> zHkv{P@PYkl*gn2`z;LWS9(iA$%)*MgY?`y5=HjXbwDMmv{By!N{?Md z%ck2^gg|kYRg9eM;9SSA)KRE+IIe;>QhV`kGeoUUo%UP9$sTj!GmzCs`eW(DKNjl1 zU@jqy1faAe@GkgNg%I}5Df5(js=wAg-R_T#HD0*3KH1+6938Ay396;X1;+^rHk400 zLA*3Q?d_kAIhS?nT%CgkQAsMeC2$rTq(rFJ9+Ix9R8{q(>?R*=R42z4S@VD6Tn`Bx)Gh|PE5|luHKN1=%twCpm zV_+t>XlR?@eZ)?U_rs&K1DU;60yd@*;da7>cnPG+4%bBM3ToHypvFVA@-FHi8_Vg4 z%J1KpT;|#FT%GbP;g%!z$(^upjcwL#QhT}{n-}OU?H{DJ-Uhg=yatacY`iU*TTMZn z1yx8az~1y`4vqo>eQC8F0Z$0;BG(uua7HX#iVEjU8f06itTH%YCu&ZvmF(&4IE%d3 zns=xXXR4_jHw3+%The>4!mY613~SKCLB@djF`7Szk7;JVb^fhGAv{iF(e>d_7~*_{ z8vpriHqo^+G>~_&Gqp1Q{fRDDU6EE7LGh@X^nJ6E+`4b`OzhoR>a~jp%)I@Nq2z$^jXsl4;jK3&_13*Jb0ki_)g31 zbY;@1hUX_m5+$60#NOFphPdIA8kN$gR{3MVeo#DPtAXJe#=h=~ zs|>vw26n{fm!mma{kVY3uf3RQE$XG4>s6h>Y|aup8ynPjndQB$!#kIi;;R;^0y8&8 zV~O{*ON^E;>`ZQOo)#&Araz~1mimiWxX&H?C@w3LgPnTK7d;V_Ofc_Ns_*}B(_3EGuRV9Me`q=%18=BGb*%lQQjwZkLNC=Gw9a|6HZL$%Y68HOx~`#f z&2f?3;|$JC((xmv5g>+SN`L4iRBSg}YOw9nbIN=l_jV|-k8SUY9BT@t+Y+~J^3b~Z?6y*r9v=6NB^+G#Ah_Y-?bB|XVT<&|kU zNKP+zPe4Kh@z3OE7i3{2P=@dO7Lb+R)T_T(TCuIEOdG5Dln z5o4%YtSgK)64;qU5_f)+q3PU`!GP8)nY`OuNs=54BPwXYIv@UAwe24hB#d6UlF>Q-0k)ti)p((L&Zj#}X=JAOj8 zRxx)CN`Tf+0znQT8ns_hVR&5qvx1~C8luw4tX)A&0!2AT3)vvG5w%!+(s{cT7phoPWM0^i|>FmU8cX!B)3U zu{igId1*aSPkyvw#!_wsNif~maRkfjaFs}cWos{Z@G&a$Qiw}g=Wr&w9K~>2-fyUvz z_M50q$*BD-dY$4Ow+N2eBeY`}gjoA5l_}n3Dkk5H?58Ec)M!lUelQ7yqg3^E|4TP6 z$+J=--JnDIqxGt{e6zB5XfE6cehg?W)16u}M3bmlJWpk#yU)%`%yv~eoGs5Yd&bax zDi1_Y4IGU0Z(%jKZq7ikr{*OnKloFzk0~PMAP5<@#qo;c^7Pl8r!4HCtg1>qyL|SX ziwPpydG<^eT>6PGUSmHgd=jioD1z2^A?pegX=byp!Uz%Q(S$iL5huWtwZk0X{z_-; z2$dKEnuEPsTyU26sk3l*T1TEsVdI-~?|0)5Ig9VhrZdFf#GO(T1D?L5`ju<+ZHiv>?; z(aseGSRwW{ijXW;-i+&Nek(*;M-taSXWZ=Sm~+tC>maN%(3EHpn|L|B&g^@H8q}Lk zko-17&Rjc&@R5-K2hvcjVjZraxm=1*wrvA)M5p1z`#Y>aVPDd{jI%iQ@j7eag!{~Y zLKg@NUVgiyA0FAsX@lMtq6#Hty)0&Hp5ad~kY&NcJ;Ej|8}eaNJ;fc2o0|gp(uI4b z@AfP_XvT3(ttyqjWEo-+d+KO!P+vsbr&OC~;7FXHQ}+7mK&v@sFtpx<=yZl(Pq^ge z_ej`hBYH;<{~0_5ZYX?Y{In=@;(I&!aO~_R-L;W#PFx{J{j8iA&X zBa9AqdEs%7C}HL6fT@theUNAXTrdNl3V*<2naA&QF-2sdj#$<<0tl&1pRkd#d(Cy$$T6~hwj@ql^XI>EP1knj1%@KS#*L0j)Ur*S8IaHFYJ?Dz| z=mfN9+iQ8WcESq8I3y2<3Q(ykw+S6cp+FcBJ?dJv7aGs0S43NNjJo6DvI{9!&=RZv zq^2Z{&gC4_c!%*QGF`zv>G6>a_21FOO6b>+)(@^wp#lJq{)00%AF}`lLpymx2ZvwN z0op&V>1p&#txn>{#CK#-2Hs6yipTY+Agqk&QIz#CC3Xg+C1yAh`uz)0S!>By__Ikq zOT}Pf^6gXif~V|JX+8neI(_E$#(v#cBpgQ}-xg+LO?TryYd+Y2Yr=u%hC?6_jIs;H z$l98`Mr%WU{BMH(fsVlXIU-v z6GiB!TjXi!^9e?6%gX3BQrMH7(e=vEPl+NalCU_j3f31z^4mxVn2kBMPWK_=1+$al zRrZ&DNTup7Wv!fXdm6$ZhIv^XDiQjcaoT}&pdCXs${W}x*CV8MASL8;Qp)*}O4YI) z%!il-`NWx{Z!yR37qUz`A|2?x6^rn|Y)mEW((0y5~ncQ@Rd zyn5B{!BiV3&tIIO*RGwA7Ou%U3}+Y>MI7rNFPz>i1n`s@D~Hd4nTztw$u|#cW0~w( z%iYEEfUolyWc}QM(3)0FJz@Ry9PWo!@hP(TQ_k3~7o%k*sQ!MU%FdUT6Ac>Y;%f!c zFw8gt5Pxq31R#E12nav|{J$=IY#}%S{_g#^;FTfTd^qZPA71)DCxZ^UdLNED?eD`s zlAV;-%vOa_I%scj(bi;~gs+JrLU5T*vwdG%>}bEyT0MBqY3O)a>+u zLed)GH~sxNoHE+FPq1%Aoyrr4LcgB$B{FerupfNcV69!SJ{dF4b2F7QdemDZTBv3f zM~`!7RJW*b(_XJ3yAd`_pkj$r?GRmPf33T*vT%PIx}|9TA|Y~dm#cB#P-bRq$w@=H z@{^OiYVXTWpiIeYy-WogRf~v`3_>ZEkyECM{o0zS8jOiJi>(Y1l{A?^kQCBB<<>7I zu@la*6s>7v-_1Hh_9FJ<>eJN2+KOw{?(agUwD>l_Hz>IwxF<-2wDxC?ut2a6W=vVc zcm`vtZDZC)j^^uf4&TE+)9CYQEs@+{>7kjo-gatZ^C;vfL6MIF_4+H3ZTsSzIcCQ* z5tD5Du&NQ;la0S_MXo|SA4Mi@v=TworX+l+HF&RmD-z<+yeIsA*L)t);Nw^KK-gJz zU&wllL)@03F!U^e-#T1opj1f(*%om;WRfX6p~iz;S|DQ1f^W=k+;OESi@w7-IZWt{rby-6bZ zY)uFhs%&q@Wv_2mTRcn4MEfCUieO(1Th7iAh;r% z??AofhW@sr9;JHA`*skEMR*B-=Ofv7KSyAaBf6-|c}E{6VI>7n%*qfFnJ$jCb6#2e zHnNc&(X||*4L#`S&?p1?#X|vx!$9kU5p(5CfPL85Fk^i3hXtueYxkTZmOcq+%OrI+ z)$UE4jhQ?m&}>lQIo6osv5cyQS(2gY_5!1T)9ctn`Ulkraw1#|s*qbO>ftpNotdI{ zpZj?~S`hFHpw4_HvMh#YG(>rzSCEkORvaWmwwdGSv<*6|hUR<1|KX<=JU9^g)f{n8 z7+9T?Khz|!9F?tM5do`qyqL;Mh1=K1b?IgtceWhRnRFyb(_l3SCUc&U5*P(hn~*{v z03$jGXx4HJxgKO~sT|6j2KTBPRS~Xh&SxFBzIGvm=HUMKF3O2Y1 zSr7<~8W6JmzN@0blqlgd=X>R%JU&`2`=_Ql{4#>#Y%(y6=ytFv$Y>CjSd;P)MW=%D z1I;$lAA5Y@H4DaOwbqpR1iGQ-cztZ%P$9cy#4-G-x>SZHDE&^iKkQG|BQ(GlaUJR0 zG78q2mC6Galed<~2w~p3siT8XXTw6Og$KaetQDPaT{62yKP?7#sTpl=$Rq2`lxH@b$@=E63UQi-eupWEbZLCWHFywe~iJx)8(8ua@?ZG znlM+6kj-`$Xh;_apN-^ZXhDQB~TI_sXO2yhXI?UwUM|P zO*?&)-UHaR7oDZ3kB^O%#QbJ$a^^UIZrv~t>RsTXsW@*E;2`n1ez=)OOXq4osRNo7 z>U+qNZSB{UCkdMMsZW>^ni488;5#CSmqjq8?Aqk~us@kl#0VDCQ?%f9N`HgSkO*o{ z&yY%muN)j?SuJc(M}uN7e%kq%hueauWhbO`V9rvK{COP8drDo?X84<^tPwC-HcCozmN z=YEgSg}Hfc3jd&zES5f_DxlrEVp~dm0gb62ey99OZhKDAJT93~YuL@0+`j2q6VvGA z6<*UHg`7hpJRk4sAFdK05cS8j`0q=j|Mf=yb^Mz`X<3Q?1o+Pq&3_U8IyQf(&A%0F z{z>>}+1g*E>mQNaKZ@A?B>vBGn7>E?0D7Q)6aRmTWB$bXvpC`}q*s{#8N`2;O8klP zXO`<y>|!{ux03 zi*)P5h5mQ~`#;0!e$~q= literal 0 HcmV?d00001 diff --git a/company/tests/__init__.py b/company/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/company/tests/test_company_api.py b/company/tests/test_company_api.py new file mode 100644 index 0000000..997dba1 --- /dev/null +++ b/company/tests/test_company_api.py @@ -0,0 +1,98 @@ +import pytest +from django.urls import reverse +from rest_framework.test import APIRequestFactory, force_authenticate +from rest_framework import status + +from company.models.company import Company +from company.models.location import LocationCode +from company.views.company_api import CompanyApiView +from ncircc.enums.notifications import AffectedSystemFunction + + +@pytest.mark.django_db +class TestCompanyApi: + url = reverse('company_api') + user = None + + @pytest.fixture(autouse=True) + def setup_test(self, add_user_with_permissions): + self.user = add_user_with_permissions(username='test_api_user', password='TestApiPass123', is_superuser=True) + self.location, _ = LocationCode.objects.get_or_create(code='RU-MOS') + + @pytest.mark.unit + def test_create_compy_with_valid_data(self): + """Test creating company through api with valid data.""" + count_after = Company.objects.count() + assert not count_after + + data = { + 'name': 'Test_NAME_company', + 'is_cii': False, + 'location': self.location.id, + 'city': 'Moscow', + 'affected_system_function': AffectedSystemFunction.NUCLEAR_POWER.value, + } + request = APIRequestFactory().post(self.url, data) + force_authenticate(request, self.user) + view = CompanyApiView.as_view() + response = view(request) + count_after = Company.objects.count() + assert response.status_code == 200 + assert count_after == 1 + assert response.data['name'] == data['name'] + assert response.data['location']['id'] == self.location.id + assert response.data['location']['code'] == self.location.code + + @pytest.mark.unit + def test_create_company_with_not_valid_date(self): + """Test creating company through api with not valid data.""" + data = { + 'name': 'Test_NAME_company', + } + request = APIRequestFactory().post(self.url, data) + force_authenticate(request, self.user) + view = CompanyApiView.as_view() + response = view(request) + count_after = Company.objects.count() + assert response.status_code == 400 + assert count_after == 0 + assert 'city' in response.data + + @pytest.mark.unit + def test_update_company_with_valid_date(self): + """Test updating company data through api""" + Company.objects.create(name='Test_name_1', city='Moscow') + count_before = Company.objects.count() + assert count_before == 1 + + data = {'name': 'Test_name_2', 'is_cii': True, 'city': 'Moscow'} + request = APIRequestFactory().post(self.url, data) + force_authenticate(request, self.user) + view = CompanyApiView.as_view() + response = view(request) + count_after = Company.objects.count() + assert response.status_code == 200 + assert count_after == count_before + assert response.data['name'] == data['name'] + assert response.data['is_cii'] == data['is_cii'] + + @pytest.mark.unit + def test_get_company_data_with_created_company(self): + Company.objects.create(name='Test_name_1', city='Moscow') + count = Company.objects.count() + assert count == 1 + + request = APIRequestFactory().get(self.url) + force_authenticate(request, self.user) + view = CompanyApiView.as_view() + response = view(request) + assert response.status_code == 200 + + @pytest.mark.unit + def test_get_company_data_without_company(self, api_client): + api_client.force_authenticate(self.user) + count = Company.objects.count() + assert not count + response = api_client.get(self.url) + assert response.status_code == status.HTTP_200_OK + assert response.json() == {"details": "company not initialized"} diff --git a/company/tests/test_company_services.py b/company/tests/test_company_services.py new file mode 100644 index 0000000..042796c --- /dev/null +++ b/company/tests/test_company_services.py @@ -0,0 +1,57 @@ +import pytest + +from company.models.company import Company +from company.models.location import LocationCode +from company.services.company_create_update import CompanyCreateAndUpdateService +from ncircc.enums.notifications import AffectedSystemFunction + + +@pytest.mark.django_db +class TestCreateOrUpdateCompany: + @pytest.fixture(autouse=True) + def setup_test(self): + self.location, _ = LocationCode.objects.get_or_create(code='RU-MOS') + self.data_for_create = { + 'name': 'TestNAMEcompany', + 'is_cii': False, + 'location': self.location.id, + 'city': 'Moscow', + 'affected_system_function': AffectedSystemFunction.OTHER.value, + 'api_key': '' + } + + @pytest.mark.unit + def test_create_company(self): + count_before = Company.objects.count() + assert count_before == 0 + data = CompanyCreateAndUpdateService(None, self.data_for_create).save() + + assert data['name'] == 'TestNAMEcompany' + assert not data['is_cii'] + assert data['location'] == {'id': self.location.id, 'code': self.location.code} + assert data['city'] == 'Moscow' + assert data['affected_system_function'] == AffectedSystemFunction.OTHER.value + assert data['api_key'] == '' + + count_after = Company.objects.count() + assert count_after == 1 + + @pytest.mark.unit + def test_update_partially(self): + data_for_create = self.data_for_create.copy() + data_for_create['location_id'] = self.location.id + del data_for_create['location'] + company = Company.objects.create(**data_for_create) + count_before = Company.objects.count() + assert count_before == 1 + data_for_update = { + 'name': 'TestNameCompany2', + 'is_cii': True, + 'affected_system_function': AffectedSystemFunction.NUCLEAR_POWER.value, + } + data = CompanyCreateAndUpdateService(company, data_for_update).save() + count_after = Company.objects.count() + assert count_after == count_before + assert data.get('name') == data_for_update.get('name') + assert data.get('is_cii') == data_for_update.get('is_cii') + assert data.get('affected_system_function') == 'Атомная энергетика' diff --git a/company/tests/test_locations_api.py b/company/tests/test_locations_api.py new file mode 100644 index 0000000..15cefed --- /dev/null +++ b/company/tests/test_locations_api.py @@ -0,0 +1,26 @@ +import pytest +from django.urls import reverse +from rest_framework.test import APIClient + + +@pytest.mark.django_db +class TestLocationApi: + url = reverse('locations_api') + user = None + + @pytest.fixture(autouse=True) + def setup_test(self, add_user_with_permissions): + self.user = add_user_with_permissions(username='test_api_user', password='TestApiPass123', is_superuser=True) + self.client = APIClient() + self.client.force_authenticate(user=self.user) + + @pytest.mark.unit + def test_get_location_list(self): + """Test view return status 200 and data location list with code and id""" + response = self.client.get(self.url) + assert response.status_code == 200 + data = response.json() + assert isinstance(data, list) + assert len(data) + assert 'id' in data[0] + assert 'code' in data[0] diff --git a/company/urls.py b/company/urls.py new file mode 100644 index 0000000..b5b0099 --- /dev/null +++ b/company/urls.py @@ -0,0 +1,10 @@ +from django.urls import path + +from company.views.company_api import CompanyApiView +from company.views.location_api import LocationCodeApi + + +urlpatterns = [ + path('', CompanyApiView.as_view(), name='company_api'), + path('locations/', LocationCodeApi.as_view(), name='locations_api') +] diff --git a/company/views/__init__.py b/company/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/company/views/company_api.py b/company/views/company_api.py new file mode 100644 index 0000000..345160a --- /dev/null +++ b/company/views/company_api.py @@ -0,0 +1,47 @@ +from typing import Optional + +from django.http import Http404 +from rest_framework.response import Response +from rest_framework.views import APIView + +from company.models.company import Company +from company.serializers.company import CompanySerializer, CompanyCreateSerializer +from company.services.company_create_update import CompanyCreateAndUpdateService +from core.mixins import ApiPermissionCheckMixin +from perms.models import Perm + + +class CompanyApiView(ApiPermissionCheckMixin, APIView): + action = None + console_permissions = {'get': [Perm.can_view_company_card], 'post': [Perm.can_edit_company_card]} + + def dispatch(self, request, *args, **kwargs): + if request.method == 'GET': + self.action = 'get' + elif request.method == 'POST': + self.action = 'post' + else: + self.action = None + + return super().dispatch(request, *args, **kwargs) + + def get_company(self) -> Optional[Company]: + """Return first Compay or None""" + return Company.objects.first() + + def get(self, request, *args, **kwargs) -> Response: + """Return date company.""" + company = self.get_company() + if company is None: + return Response({"details": "company not initialized"}) + data = CompanySerializer(company).data + return Response(data) + + def post(self, request, *args, **kwargs) -> Response: + """Creating or updating company in database.""" + serializer = CompanyCreateSerializer(data=request.data) + if not serializer.is_valid(): + return Response(serializer.errors, status=400) + company = self.get_company() + data = CompanyCreateAndUpdateService(company, serializer.data).save() + return Response(data, status=200) diff --git a/company/views/location_api.py b/company/views/location_api.py new file mode 100644 index 0000000..8369d44 --- /dev/null +++ b/company/views/location_api.py @@ -0,0 +1,14 @@ +from rest_framework.response import Response +from rest_framework.views import APIView + +from company.models.location import LocationCode +from company.serializers.location import LocationSerializer + + +class LocationCodeApi(APIView): + """View for return location list""" + + def get(self, request, *args, **kwargs) -> Response: + locations = LocationCode.objects.all() + data = LocationSerializer(locations, many=True).data + return Response(data) diff --git a/conftest.py b/conftest.py new file mode 100644 index 0000000..ea553d8 --- /dev/null +++ b/conftest.py @@ -0,0 +1,53 @@ +import os +from pathlib import Path + +import pytest +import pytest_asyncio +from channels.db import database_sync_to_async +from django.contrib.auth.hashers import make_password +from django.contrib.auth.models import User +from rest_framework.test import APIClient + +from core.utils import dtnow +from users.models import UserInfo +# noinspection PyUnresolvedReferences +from console.tests.test_utils import add_user_with_permissions, test_server, get_url +# noinspection PyUnresolvedReferences +from networkmap.tests.migration_fixtures import create_filter_test_data, create_elk_function_test_data + + +@pytest.fixture(scope='function') +def add_admin_user_migration(): + """ Fixture for adding admin user from 0003_add_admin_user migration """ + if not User.objects.filter(username='admin').exists(): + UserInfo.create_user(username='admin', + password=make_password('nimda'), + email='admin@example.com', + is_superuser=True, + is_staff=True, + expire_date=dtnow(days=700).date(), + comment='admin') + + +@pytest.fixture(scope='function') +def api_client(): + """ Fixture for creating api_client""" + + api_client = APIClient() + return api_client + + +@pytest.fixture(autouse=True) +def remove_files_after_test_vector_config(): + TMP_DIR_VECTOR = '/tmp/vector' + + Path(TMP_DIR_VECTOR).mkdir(parents=True, exist_ok=True) + yield + files = os.listdir(TMP_DIR_VECTOR) + for file in files: + os.remove(os.path.join(TMP_DIR_VECTOR, file)) + + +@pytest_asyncio.fixture +async def async_admin_user(django_user_model): + return await database_sync_to_async(lambda: django_user_model.objects.get())() diff --git a/console/__init__.py b/console/__init__.py new file mode 100644 index 0000000..d128d39 --- /dev/null +++ b/console/__init__.py @@ -0,0 +1,7 @@ +from __future__ import absolute_import, unicode_literals + +# This will make sure the app is always imported when +# Django starts so that shared_task will use this app. +from .celery import app as celery_app + +__all__ = ('celery_app',) \ No newline at end of file diff --git a/console/admin.py b/console/admin.py new file mode 100644 index 0000000..0778170 --- /dev/null +++ b/console/admin.py @@ -0,0 +1,34 @@ +from django.contrib import admin + +from django.contrib.auth.models import User +from django.contrib.postgres import fields +from django_json_widget.widgets import JSONEditorWidget + +from console import models +from networkmap import models as netmap_models + + +class IncidentAdmin(admin.ModelAdmin): + date_hierarchy = 'timestamp' + list_display = ('incident_id', 'timestamp', 'title', 'category', 'importance', 'status', 'assigned_to') + list_filter = ('category', 'status', 'assigned_to') + list_display_links = ['incident_id'] + + formfield_overrides = { + fields.JSONField: {'widget': JSONEditorWidget}, + } + + +class ConnectionAdmin(admin.ModelAdmin): + list_display = ['pk', 'src_asset', 'dst_asset', 'src_port', 'dst_port', 'updated'] + list_filter = ['src_asset', 'dst_asset'] + list_display_links = ['pk'] + + +# Re-register UserAdmin +admin.site.register(models.VulnerabilityEffect) +admin.site.register(models.VulnerabilityRecommendations) +admin.site.register(models.Vulnerability) +admin.site.register(models.ConnectionType) +admin.site.register(models.Connection, ConnectionAdmin) +admin.site.register(netmap_models.NetworkMap) diff --git a/console/api/__init__.py b/console/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/console/api/base.py b/console/api/base.py new file mode 100644 index 0000000..5b13df4 --- /dev/null +++ b/console/api/base.py @@ -0,0 +1,25 @@ +from django.contrib.auth.decorators import login_required, user_passes_test +from rest_framework.decorators import api_view +from rest_framework.response import Response + +from console.serializers import CelerySerializer +from core.decorators import log_url +from storage.tasks import dump_db + + +@log_url +@login_required +@user_passes_test(lambda u: u.is_superuser) +@api_view(('GET',)) +def download_db_snapshot(request): + """ Downloads fixture to restore current DB state, for test and dev purposes only + request->models allow to control which tables would be downloaded, for example 'auth,console.userinfo """ + dump_models = request.GET.get('models', tuple()) + if dump_models: + dump_models = dump_models.split(',') + + res = dump_db.apply_async(args=(request.user.pk, dump_models)) + serializer = CelerySerializer(data={'task_id': res.id}) + serializer.is_valid() + + return Response(serializer.data) diff --git a/console/api/celery.py b/console/api/celery.py new file mode 100644 index 0000000..c2fc0cd --- /dev/null +++ b/console/api/celery.py @@ -0,0 +1,31 @@ +from celery.result import AsyncResult +from django.contrib.auth.decorators import login_required +from rest_framework.decorators import api_view +from rest_framework.response import Response + +from console.serializers import CelerySerializer +from core.decorators import log_url + + +@log_url +@login_required +@api_view(('GET',)) +def check_task_state(request, task_id): + """ Check that celery task is finished + :param task_id: Celery task ID + """ + result = AsyncResult(task_id) + if result.successful(): + serializer = CelerySerializer( + data={'task_id': task_id, + 'finished': result.successful(), + 'result': result.get() + }) + else: + serializer = CelerySerializer( + data={'task_id': task_id, + 'finished': result.successful(), + 'result': result.get()}) + + serializer.is_valid() + return Response(serializer.data) diff --git a/console/api/connections.py b/console/api/connections.py new file mode 100644 index 0000000..abeb386 --- /dev/null +++ b/console/api/connections.py @@ -0,0 +1,22 @@ +import logging + +from rest_framework.mixins import RetrieveModelMixin, ListModelMixin +from rest_framework.viewsets import GenericViewSet + +from console.models import Connection +from console.serializers import ConnectionSerializer +from core.mixins import ApiPermissionCheckMixin +from perms.models import Perm + +_log = logging.getLogger(__name__) + + +class ConnectionViewSet(ApiPermissionCheckMixin, RetrieveModelMixin, ListModelMixin, + GenericViewSet): + serializer_class = ConnectionSerializer + console_permissions = [Perm.can_view_network] + + class Meta: + model = Connection + + queryset = Connection.objects.all() diff --git a/console/api/events.py b/console/api/events.py new file mode 100644 index 0000000..e69de29 diff --git a/console/api/incidents.py b/console/api/incidents.py new file mode 100644 index 0000000..e69de29 diff --git a/console/api/users.py b/console/api/users.py new file mode 100644 index 0000000..c3c4a2a --- /dev/null +++ b/console/api/users.py @@ -0,0 +1,332 @@ +import json +import logging + +from django.contrib.auth.decorators import login_required, permission_required +from django.contrib.auth.models import User, Permission, Group +from django.http import JsonResponse +from django.shortcuts import get_object_or_404 +from rest_framework import status +from rest_framework.decorators import api_view +from rest_framework.response import Response + +from console import conslog +from console.serializers import AllPermsSerializer, GroupNameSerializer +from core.decorators import log_url +from perms.models import Perm +from perms.services.get_permissions import get_all_linked_permissions, get_linked_permissions_name +from users.serializers import UserSerializers + +_log = logging.getLogger(__name__) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +def all_perms(requset): + """ API for getting list of all console permissions + :param requset: request object + :return: JSON with all console groups + """ + if requset.method == 'GET': + permissions = Permission.objects.all().exclude( + name='Can view vulnerabilities') + serializer = AllPermsSerializer(permissions, many=True) + return JsonResponse(serializer.data, safe=False) + else: + return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + + +@log_url +@login_required +def get_linked_permissions(request): + permissions = request.GET.getlist('permissions[]', []) + data = get_linked_permissions_name(permissions) + return JsonResponse(data) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +def user_perms(request, pk): + """ API for getting user permissions + :param request: request object + :param pk: target user primaty key + :return: JSON with target user permissions + """ + if request.method == 'GET': + user_by_id = get_object_or_404(User, pk=pk) + user_perms = Permission.objects.filter(user=user_by_id) + serializer = AllPermsSerializer(user_perms, many=True) + return JsonResponse(serializer.data, safe=False) + else: + return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +def all_groups(requset): + """ API for getting all console groups + :param requset: request object + :return: JSON with all console groups + """ + + if requset.method == 'GET': + groups = Group.objects.all() + serializer = GroupNameSerializer(groups, many=True) + return JsonResponse(serializer.data, safe=False) + else: + return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +def group_perms(request, g_name): + """ API for getting target group permissions + :param request: request object + :param g_name: target group name + :return: JSON with target group permissions + """ + if request.method == 'GET': + target_group = Group.objects.filter(name=g_name) + g_perms = Permission.objects.filter(content_type__app_label="perms", group__in=target_group).exclude( + name='Can view vulnerabilities') + serializer = AllPermsSerializer(g_perms, many=True) + return JsonResponse(serializer.data, safe=False) + else: + return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +def all_perms_wo_group(request, g_name): + """ API for getting permissions, which target group dont have + :param request: request object + :param g_name: target group name + :return: JSON with permissions, whic target group dont have + """ + + if request.method == 'GET': + target_group = Group.objects.filter(name=g_name) + perm_codenames = [] + g_perms = Permission.objects.filter(content_type__app_label="perms", group__in=target_group) + perms_count = g_perms.count() + for perm in g_perms: + perm_codenames.append(perm.codename) + exc_perms = Permission.objects.filter(content_type__app_label="perms").exclude( + codename__in=perm_codenames + ).exclude(name='Can view vulnerabilities') + serializer = AllPermsSerializer(exc_perms, many=True) + return JsonResponse(serializer.data, safe=False) + else: + return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +def group_users(request, g_name): + """ API for getting users, which assigned to target group + :param request: request object + :param g_name: target group name + :return: JSON with target group users + """ + if request.method == 'GET': + users = User.objects.filter(groups__name=g_name).exclude(username__startswith='deleted_') + serializer = UserSerializers(users, many=True) + return JsonResponse(serializer.data, safe=False) + else: + return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +def group_users_excluded(request, g_name): + """ API for getting users, which are not assigned to the target group + :param request: request object + :param g_name: target group name + :return: JSON with serialized user data + """ + group_users = User.objects.filter(groups__name=g_name) + group_users_usernames = [] + for user in group_users: + group_users_usernames.append(user.username) + available_users = User.objects.exclude(username__in=group_users_usernames).filter(is_active=True) + serializer = UserSerializers(available_users, many=True) + return JsonResponse(serializer.data, safe=False) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +@api_view(['POST']) +def update_group_permission(request, g_name): + """ API for updating target group permissions with the information, that user provided + :param request: request object + :param g_name: target group name + :return: JSON with serialized permissions data + """ + # Recieving JSON with perms from top right table on 'manage_rights' page + table_data = json.loads(request.POST.get('perms')) + + # perms_all = [] + # for item in table_data: + # perms_all.append(item['codename']) + # perms_from_table = Permission.objects.filter(codename__in=perms_all).exclude( + # name='Can view vulnerabilities') + # serializer = AllPermsSerializer(perms_from_table, many=True) + + # Method for setting new permissions from permission list, provided by user + perm_codenames = [] + # Getting list of codenames, that users chose to apply to target group + for item in table_data: + perm_codenames.append(item.get('codename')) + # Gettimg current perms of target group + target_group = Group.objects.get(name=g_name) + # Clearing all permissions from group + target_group.permissions.clear() + # Adding permissions, chosen by user to target group + perm_codenames = get_all_linked_permissions(perm_codenames) + permissions = Permission.objects.filter(codename__in=perm_codenames) + target_group.permissions.set(permissions) + serializer = AllPermsSerializer(permissions, many=True) + log_message = f"User [{request.user}] updated the [[{g_name}]] group permissions. New group permissions: {perm_codenames}" + conslog.add_info_log(log_message, _log) + + return JsonResponse(serializer.data, safe=False) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +@api_view(['POST']) +def update_group_users(request, g_name): + """ API for updating the group users set + :param request: request object + :param g_name: target group name from the user + :return: JSON with serialized users data + """ + users = User.objects.filter(groups__name=g_name) + target_group = Group.objects.get(name=g_name) + # Gettimg list of usernames form bottom right table in JSON format + users_new = json.loads(request.POST.get('users')) + usernames_after = [] + for user in users_new: + usernames_after.append(user.get('username')) + # Getting usernames, which are assigned to the target group before any changes + users_before = User.objects.filter(groups__name=g_name) + usernames_before = [] + for user in users_before: + usernames_before.append(user.username) + + # Removing all users from group + target_group.user_set.clear() + users_before_1 = User.objects.filter(groups__name=g_name) + + # Adding users to group, provided by operator + for username in usernames_after: + target_group.user_set.add(User.objects.get(username=username)) + users_before_2 = User.objects.filter(groups__name=g_name) + serializer = UserSerializers(users, many=True) + log_message = f"User [{request.user}] updated the [[{g_name}]] group users. New group users: {usernames_after}" + conslog.add_info_log(log_message, _log) + return JsonResponse(serializer.data, safe=False) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +@api_view(['POST']) +def delete_group_api(request, g_name): + """ API for deleting the group + :param request: request object + :param g_name: target group name + :return: JSON with serialized groups data + """ + target_group = Group.objects.get(name=g_name) + serializer = GroupNameSerializer(target_group) + Group.objects.get(name=g_name).delete() + log_message = f"User [{request.user}] deleted [[{g_name}]] group" + conslog.add_info_log(log_message, _log) + return JsonResponse(serializer.data) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +@api_view(['POST']) +def add_new_group(request, g_name): + """ API for adding new group + :param request: request object + :param g_name: target group name + :return: JSON with serialized groups data + """ + # Prepearing data for serializer (needs to be dict value, revieving a string from request + dict_name = {'name': g_name} + serializer = GroupNameSerializer(data=dict_name) + if serializer.is_valid(): + serializer.save() + # serializer.add_new_group(validated_info=serializer.data) + log_message = f"User [{request.user}] created a [[{g_name}]] group" + conslog.add_info_log(log_message, _log) + return Response(serializer.data) + else: + log_message = f"User [{request.user}] failed to create a new group [[{g_name}]] due to the error: {serializer.errors}" + conslog.add_error_log(log_message, _log) + return JsonResponse(serializer.errors, status=400) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +@api_view(['POST']) +def rename_group(request, g_name, new_g_name): + """ API for renaming the target group + :param request: request object + :param g_name: target group name + :param new_g_name: new target group name + """ + # Prepearing data for serializer (needs to be dict value, revieving a string from request + dict_name = {'name': new_g_name} + serializer = GroupNameSerializer(data=dict_name) + if serializer.is_valid(): + upd_group = Group.objects.get(name=g_name) + upd_group.name = serializer.data['name'] + upd_group.save() + log_message = f"User [{request.user}] renamed [[{g_name}]] group with the name: [[{new_g_name}]]" + conslog.add_info_log(log_message, _log) + return Response(serializer.data) + else: + log_message = f"User [{request.user}] failed to rename [[{g_name}]], due to the error: {serializer.errors}" + conslog.add_error_log(log_message, _log) + return JsonResponse(serializer.errors, status=400) + + +@log_url +@login_required +@permission_required(Perm.perm_req(Perm.can_add_group), raise_exception=True) +@api_view(['POST']) +def copy_group(request, g_name, new_g_name): + """ API for copying the target group + :param request: request object + :param g_name: target group name + :param new_g_name: new target group name + """ + # Preparing data for serializer (needs to be dict value, revieving a string from request + dict_name = {'name': new_g_name} + serializer = GroupNameSerializer(data=dict_name) + if serializer.is_valid(): + master_group = Group.objects.get(name=g_name) + slave_group = Group.objects.create(name=serializer.data['name']) + for perm in master_group.permissions.all(): + slave_group.permissions.add(Permission.objects.get(codename=perm.codename)) + log_message = f"User [{request.user}] created a copy of the [[{g_name}]] group with the name: [[{new_g_name}]]" + conslog.add_info_log(log_message, _log) + return Response(serializer.data) + else: + log_message = f"User [{request.user}] failed to create copy of the [[{g_name}]], due to the error: {serializer.errors}" + conslog.add_error_log(log_message, _log) + return JsonResponse(serializer.errors, status=400) diff --git a/console/api_urls.py b/console/api_urls.py new file mode 100644 index 0000000..3fc3006 --- /dev/null +++ b/console/api_urls.py @@ -0,0 +1,71 @@ +import logging + +from django.urls import include, path, re_path +from rest_framework import routers +from rest_framework.authtoken import views + +import console.api.users +from console.api import celery +from console.api.connections import ConnectionViewSet +from console.views.index import page_not_found +from core.views.view_login import LoginView, LogoutView +from core.views.view_settings import ProductVersionView +from networkmap.api import (AssetDangerViewSet, AutoNetmapConnectionsViewSet, + AutoNetmapElementsViewSet, NetmapElementsViewSet, + NetmapGroupsViewSet, UserMapViewSet) + +_log = logging.getLogger() + +router = routers.DefaultRouter() +router.register('connections', ConnectionViewSet, basename='connections') +router.register('netmap/static/elements', NetmapElementsViewSet, basename='netmap-elements') +router.register('netmap/auto/elements', AutoNetmapElementsViewSet, basename='auto-netmap-elements') +router.register('netmap/auto/connections', AutoNetmapConnectionsViewSet, basename='auto-netmap-connections') +router.register('netmap/groups', NetmapGroupsViewSet, basename='netmap-groups') +router.register('netmap/maps', UserMapViewSet, basename='netmap-maps') +router.register('netmap/in-danger', AssetDangerViewSet, basename='netmap-in-danger') + +urlpatterns = ( + path('', include(router.urls)), + path('devices/', include('devices.urls')), + path('store/', include('storage.urls')), + path('assets/', include('assets.urls')), + path('inputs/', include('inputs.urls')), + path('license/', include('license_info.urls')), + path('logstash/', include('logstash.urls')), + path('dashboard/', include('dashboard.api_urls')), + path('ncircc/', include('ncircc.urls')), + path('company/', include('company.urls')), + path('correlation/', include('correlation.urls')), + path('netmap/', include('networkmap.urls')), + path('incidents/', include('incident.urls')), + path('incident_export/', include('incident_export.urls')), + path('rotation/', include('rotation.urls')), + path('endpoint/', include('devices.urls_endpoint')), + path('users/', include('users.urls')), + path('notifications/', include('notifications.urls')), + path('', include('core.urls')), + path('groups/', console.api.users.all_groups, name='api_get_all_groups'), + path('groups//perms', console.api.users.group_perms, name='api_group_perms'), + path('groups/except_', console.api.users.all_perms_wo_group, name='api_get_excluded_group_perms'), + path('groups/linked_permissions/', console.api.users.get_linked_permissions, name='api_get_linked_permissions'), + path('groups//users', console.api.users.group_users, name='api_get_group_users'), + path('groups//available_users', + console.api.users.group_users_excluded, + name='api_get_available_group_users'), + path('groups//update/perms', + console.api.users.update_group_permission, + name='api_update_group_permissions'), + path('groups//udpate/users', console.api.users.update_group_users, name='api_update_group_users'), + path('groups//remove', console.api.users.delete_group_api, name='api_delete_group'), + path('groups//add', console.api.users.add_new_group, name='api_add_new_group'), + path('groups//rename/', console.api.users.rename_group, name='api_rename_group'), + path('groups//copy/', console.api.users.copy_group, name='api_copy_group'), + path('celery//', celery.check_task_state, name='celery_check_task_state'), + path('auth/token/', views.obtain_auth_token, name='auth_get_token'), + path('events/', include('events.api_urls')), + path('logout/', LogoutView.as_view(), name='logout'), + path('login/', LoginView.as_view(), name='api_login'), + path('product/version/', ProductVersionView.as_view(), name='api_product_version'), + re_path(r'^', page_not_found, name='not_found'), +) diff --git a/console/apps.py b/console/apps.py new file mode 100644 index 0000000..627e20f --- /dev/null +++ b/console/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class ConsoleConfig(AppConfig): + name = 'console' diff --git a/console/asgi.py b/console/asgi.py new file mode 100644 index 0000000..5b43876 --- /dev/null +++ b/console/asgi.py @@ -0,0 +1,56 @@ +import logging +import os + +from channels.auth import AuthMiddlewareStack +from channels.db import database_sync_to_async +from channels.routing import ProtocolTypeRouter, URLRouter +from django.contrib.auth.models import AnonymousUser +from django.core.asgi import get_asgi_application +from rest_framework.authtoken.models import Token + +from console.routing import websocket_urlpatterns + +_log = logging.getLogger(__name__) + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'console.settings.dev') + +django_asgi_app = get_asgi_application() + + +class TokenHeaderAuthMiddleware: + """Middleware для авторизации WebSocket с помощью токена в header. + + Будет вызываться в последнюю очередь и только после + неуспешной попытки аутентификации по сессии, т.е. если scope['user'] == AnonymousUser() + + Пример: + Authorization: Token vgft67uhgtreerfcgvh678uihvhkugct7iyukv + """ + + def __init__(self, inner): + self.inner = inner + + async def __call__(self, scope, receive, send): + headers = dict(scope['headers']) + if b'authorization' in headers and scope['user'] == AnonymousUser(): + token_name, token_key = headers[b'authorization'].decode().split() + if token_name == 'Token': + user = await self.get_user_by_token(token_key) + if user: + scope['user'] = user + return await self.inner(scope, receive, send) + return await self.inner(scope, receive, send) + + @database_sync_to_async + def get_user_by_token(self, token): + try: + return Token.objects.get(key=token).user + except Token.DoesNotExist: + return + + +application = ProtocolTypeRouter( + { + "websocket": AuthMiddlewareStack(TokenHeaderAuthMiddleware(URLRouter(websocket_urlpatterns))) + } +) diff --git a/console/celery.py b/console/celery.py new file mode 100644 index 0000000..577c2f5 --- /dev/null +++ b/console/celery.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import, unicode_literals + +import os + +from celery import Celery + +# set the default Django settings module for the 'celery' program. +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'console.settings.dev') + +app = Celery('console') + +# Using a string here means the worker doesn't have to serialize +# the configuration object to child processes. +# - namespace='CELERY' means all celery-related configuration keys +# should have a `CELERY_` prefix. +app.config_from_object('django.conf:settings', namespace='CELERY') + +# Load task modules from all registered Django app configs. +app.autodiscover_tasks() diff --git a/console/conslog.py b/console/conslog.py new file mode 100644 index 0000000..a048e8c --- /dev/null +++ b/console/conslog.py @@ -0,0 +1,93 @@ +def add_info_log(message, logger): + """ Function for creating INFO log entry + :param message: log message, provided by functions down below + :param logger: instance of logger + :return: log entry to the logger's log + """ + return logger.info(message) + + +def add_error_log(message, logger): + """ Function for creating ERROR log entry + :param message: log message, provided by functions down below + :param logger: instance of logger + :return: log entry to the logger's log + """ + return logger.error(message) + + +def add_warning_log(message, logger): + """ Function for creating WARNING log entry + :param message: log message, provided by functions down below + :param logger: instance of logger + :return: log entry to the logger's log + """ + return logger.warning(message) + + +def url_access_log(request): + """ Function for creating a log entry for accessing url + :param request: get view request for forming the url name + :param user: get current user which trying t o acess url + :return: string for log entry + """ + user = request.user + url = request.get_full_path() + return f"User [{user}] accessed <{url}> page" + + +def object_create_log(instance, obj_type, user=None): + """ Create log string for object create request + :param user: Who create object + :param instance: Object's instance (model) + :return: String with log record + """ + if user: + return f'User [{user}] created new [{obj_type}] - [{instance}]' + else: + return f'Unknown user created new [{obj_type}] - [{instance}]' + + +def object_list_log(user, obj_type): + """ Create log string for object list request + :param user: Who create object + :param obj_type: Object's type + :return: String with log record + """ + return f'User [{user}] request a list of [{obj_type}]' + + +def object_retrieve_log(user, instance, obj_type): + """ Create log string for object retrieve + :param user: Who send request + :param instance: Retrieved instance + :return:String with log record + """ + return f'User [{user}] retrieve an object [{instance}] of type [{obj_type}]' + + +def object_update_log(user, instance, obj_type): + """ Create log string for object update + :param user: Who send request + :param instance: Updated instance + :return: String with log record + """ + return f'User [{user}] perform update of object [{instance}] of type [{obj_type}]' + + +def object_destroy_log(user, instance, obj_type): + """ Create log string for object destroy + :param user: Who send request + :param instance: Destroyed instance + :return: String with log record + """ + return f'User [{user}] perform destroy of object [{instance}] of type [{obj_type}]' + + +def form_errors(user, errors): + """ Create log string for form validation errors + :param user: User, who fill the form + :param errors: List of errors + :return:String with log record + """ + return f'User [{user}] has form errors: [{errors}]' diff --git a/console/management/__init__.py b/console/management/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/console/management/commands/__init__.py b/console/management/commands/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/console/management/commands/create_vector_configs.py b/console/management/commands/create_vector_configs.py new file mode 100644 index 0000000..6370b72 --- /dev/null +++ b/console/management/commands/create_vector_configs.py @@ -0,0 +1,32 @@ +import os +from pathlib import Path + +from django.conf import settings +from django.core.management.base import BaseCommand +from django.template import Template, Context + +ELK_URL = getattr(settings, 'ELASTIC_URL', 'localhost:9200') +ELK_LOGIN = getattr(settings, 'ELK_LOGIN', 'elastic') +ELK_PASS = getattr(settings, 'ELK_PASS', 'changeme') +VECTOR_CONFIG_DIR = getattr(settings, 'LOGSTASH_CONFIG_DIR') + + +class Command(BaseCommand): + help = 'Load default Vector configs' + + def handle(self, *args, **options): + templates_path = Path(os.path.abspath(__file__)).parents[0] / 'templates' + context = Context({ + "elastic_url": ELK_URL, + "elastic_login": ELK_LOGIN, + "elastic_password": ELK_PASS, + }) + Path(VECTOR_CONFIG_DIR).mkdir(exist_ok=True) + + for template in os.listdir(templates_path): + with open(f"{templates_path}/{template}", 'r') as template_file: + template_text = template_file.read() + config_content = Template(template_text).render(context) + with open(os.path.join(VECTOR_CONFIG_DIR, template), 'w') as f: + f.write(config_content) + print(f'Created {template}') diff --git a/console/management/commands/load_rules.py b/console/management/commands/load_rules.py new file mode 100644 index 0000000..6407af1 --- /dev/null +++ b/console/management/commands/load_rules.py @@ -0,0 +1,34 @@ +import logging +import os +from pathlib import Path + +from django.core.files.uploadedfile import SimpleUploadedFile +from django.core.management.base import BaseCommand + +from correlation.services.import_service import ImportRulesService +from incident.models import IncidentRecommendations + +_log = logging.getLogger(__name__) + + +def create_addition(): + """ + During rules export, only those recommendations and effects that are used in the rules are exported. + Here we manually create some objects that are not used anywhere in the rules + """ + IncidentRecommendations.objects.get_or_create(name="Перевести работу АСУ ТП на резервный ПЛК", + description="Необходимо перевести работу на резервный ПЛК для " + "избежания нарушения технологического процесса.") + + +class Command(BaseCommand): + help = 'Load default rules' + + def handle(self, *args, **options): + rules_path = Path(os.path.abspath(__file__)).parents[0] + with open(f'{rules_path}/rules_console.json', 'rb') as rule_file: + file = SimpleUploadedFile("rules.json", rule_file.read()) + + create_addition() + import_service = ImportRulesService(file, check_version=False) + import_service.run_import() diff --git a/console/management/commands/templates/mc_logs_celery.toml b/console/management/commands/templates/mc_logs_celery.toml new file mode 100644 index 0000000..6e5f484 --- /dev/null +++ b/console/management/commands/templates/mc_logs_celery.toml @@ -0,0 +1,46 @@ +# Vector pipeline that collects logs and sends them to ElasticSearch +[sources.celery_logs_from_file] + type = "file" + include = [ + "/var/log/armaconsole/celeryd.log", + "/var/log/armaconsole/celerybeat.log", + ] + +# Parse data +[transforms.parse_celery_logs] + type = "remap" + inputs = [ + "celery_logs_from_file" + ] + source = ''' + source_file = .file + + if ends_with(.file, "celerybeat.log") { + parsed, err = parse_regex(.message, r'\[(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d*:\s)[^ ]* (?P.*)') + message = replace(.message, parsed.timestamp, "") ?? "" + } else if ends_with(.file, "celeryd.log") { + parsed, err = parse_regex(.message, r'\[(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d*:\s)[^ ]* (?P.*)') + message = replace(.message, parsed.timestamp, "") ?? "" + } + if err != null { + abort + } + + . = {} + .timestamp = now() + .message = message + .file = source_file + ''' + + +[sinks.celery_logs_to_es] + type = "elasticsearch" + inputs = ["parse_celery_logs"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_password }}" + endpoint = "{{ elastic_url }}" + normal.index = "system-logs" + id_key = "event_uuid" diff --git a/console/management/commands/templates/mc_logs_console.toml b/console/management/commands/templates/mc_logs_console.toml new file mode 100644 index 0000000..5748de8 --- /dev/null +++ b/console/management/commands/templates/mc_logs_console.toml @@ -0,0 +1,41 @@ +# Vector pipeline that collects logs and sends them to ElasticSearch +[sources.console_logs_from_file] + type = "file" + include = [ + "/var/log/armaconsole/console.log", + ] + +# Parse data +[transforms.parse_console_logs] + type = "remap" + inputs = [ + "console_logs_from_file" + ] + source = ''' + source_file = .file + + parsed, err = parse_regex(.message, r'(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d*) (?P[^ ]*) (?P.*)') + message = parsed.message + + if err != null { + abort + } + + . = {} + .timestamp = now() + .message = message + .file = source_file + ''' + + +[sinks.console_logs_to_es] + type = "elasticsearch" + inputs = ["parse_console_logs"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_password }}" + endpoint = "{{ elastic_url }}" + normal.index = "system-logs" + id_key = "event_uuid" diff --git a/console/management/commands/templates/mc_logs_es.toml b/console/management/commands/templates/mc_logs_es.toml new file mode 100644 index 0000000..b02cdc6 --- /dev/null +++ b/console/management/commands/templates/mc_logs_es.toml @@ -0,0 +1,41 @@ +# Vector pipeline that collects logs and sends them to ElasticSearch +[sources.es_logs_from_file] + type = "file" + include = [ + "/var/log/elasticsearch/console-cluster.log", + ] + +# Parse data +[transforms.parse_es_logs] + type = "remap" + inputs = [ + "es_logs_from_file" + ] + source = ''' + source_file = .file + + parsed, err = parse_regex(.message, r'(?P\[\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2},\d*\])(?P.*)') + message = parsed.message + + if err != null { + abort + } + + . = {} + .timestamp = now() + .message = message + .file = source_file + ''' + + +[sinks.es_logs_to_es] + type = "elasticsearch" + inputs = ["parse_es_logs"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_password }}" + endpoint = "{{ elastic_url }}" + normal.index = "system-logs" + id_key = "event_uuid" diff --git a/console/management/commands/templates/mc_logs_gunicorn.toml b/console/management/commands/templates/mc_logs_gunicorn.toml new file mode 100644 index 0000000..341445f --- /dev/null +++ b/console/management/commands/templates/mc_logs_gunicorn.toml @@ -0,0 +1,41 @@ +# Vector pipeline that collects logs and sends them to ElasticSearch +[sources.gunicorn_logs_from_file] + type = "file" + include = [ + "/var/log/armaconsole/gunicorn/gunicorn.log", + ] + +# Parse data +[transforms.parse_gunicorn_logs] + type = "remap" + inputs = [ + "gunicorn_logs_from_file" + ] + source = ''' + source_file = .file + + parsed, err = parse_regex(.message, r'(?P[^ ]+[ ]+[^ ]+[ ]+[^ ]+[ ]+)(?P.*)') + message = parsed.message + + if err != null { + abort + } + + . = {} + .timestamp = now() + .message = message + .file = source_file + ''' + + +[sinks.gunicorn_logs_to_es] + type = "elasticsearch" + inputs = ["parse_gunicorn_logs"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_password }}" + endpoint = "{{ elastic_url }}" + normal.index = "system-logs" + id_key = "event_uuid" diff --git a/console/management/commands/templates/mc_logs_nginx.toml b/console/management/commands/templates/mc_logs_nginx.toml new file mode 100644 index 0000000..fb9d268 --- /dev/null +++ b/console/management/commands/templates/mc_logs_nginx.toml @@ -0,0 +1,46 @@ +# Vector pipeline that collects logs and sends them to ElasticSearch +[sources.nginx_logs_from_file] + type = "file" + include = [ + "/var/log/armaconsole/nginx.error.log", + "/var/log/armaconsole/nginx.access.log", + ] + +# Parse data +[transforms.parse_nginx_logs] + type = "remap" + inputs = [ + "nginx_logs_from_file" + ] + source = ''' + source_file = .file + + if ends_with(.file, "nginx.error.log") { + parsed, err = parse_regex(.message, r'(?P\d{4}/\d{2}/\d{2} \d{2}:\d{2}:\d{2}) (?P.*)') + message = parsed.message + } else if ends_with(.file, "nginx.access.log") { + parsed, err = parse_regex(.message, r'(?P\s\[\d{2}/\D+/\d{4}:\d{2}:\d{2}:\d{2} \+\d{4}\])') + message = replace(.message, parsed.timestamp, "") ?? "" + } + if err != null { + abort + } + + . = {} + .timestamp = now() + .message = message + .file = source_file + ''' + + +[sinks.nginx_logs_to_es] + type = "elasticsearch" + inputs = ["parse_nginx_logs"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_password }}" + endpoint = "{{ elastic_url }}" + normal.index = "system-logs" + id_key = "event_uuid" diff --git a/console/management/commands/templates/mc_logs_postgresql.toml b/console/management/commands/templates/mc_logs_postgresql.toml new file mode 100644 index 0000000..32705d0 --- /dev/null +++ b/console/management/commands/templates/mc_logs_postgresql.toml @@ -0,0 +1,41 @@ +# Vector pipeline that collects logs and sends them to ElasticSearch +[sources.postgresql_logs_from_file] + type = "file" + include = [ + "/var/log/postgresql/postgresql-11-main.log", + ] + +# Parse data +[transforms.parse_postgresql_logs] + type = "remap" + inputs = [ + "postgresql_logs_from_file" + ] + source = ''' + source_file = .file + + parsed, err = parse_regex(.message, r'(?P\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}.\d*) [^ ]* (?P.*)') + message = parsed.message + + if err != null { + abort + } + + . = {} + .timestamp = now() + .message = message + .file = source_file + ''' + + +[sinks.postgresql_logs_to_es] + type = "elasticsearch" + inputs = ["parse_postgresql_logs"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_password }}" + endpoint = "{{ elastic_url }}" + normal.index = "system-logs" + id_key = "event_uuid" diff --git a/console/management/commands/templates/mc_logs_redis.toml b/console/management/commands/templates/mc_logs_redis.toml new file mode 100644 index 0000000..441f059 --- /dev/null +++ b/console/management/commands/templates/mc_logs_redis.toml @@ -0,0 +1,41 @@ +# Vector pipeline that collects logs and sends them to ElasticSearch +[sources.redis_logs_from_file] + type = "file" + include = [ + "/var/log/redis/redis-server.log", + ] + +# Parse data +[transforms.parse_redis_logs] + type = "remap" + inputs = [ + "redis_logs_from_file" + ] + source = ''' + source_file = .file + + parsed, err = parse_regex(.message, r'[^ ](?P\s\d{2} \S* \d{4} \d{2}:\d{2}:\d{2}.\d*) (?P.*)') + message = replace(.message, parsed.timestamp, "") ?? "" + + if err != null { + abort + } + + . = {} + .timestamp = now() + .message = message + .file = source_file + ''' + + +[sinks.redis_logs_to_es] + type = "elasticsearch" + inputs = ["parse_redis_logs"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_password }}" + endpoint = "{{ elastic_url }}" + normal.index = "system-logs" + id_key = "event_uuid" diff --git a/console/management/commands/templates/mc_logs_syslog.toml b/console/management/commands/templates/mc_logs_syslog.toml new file mode 100644 index 0000000..7b022b2 --- /dev/null +++ b/console/management/commands/templates/mc_logs_syslog.toml @@ -0,0 +1,35 @@ +[sources.syslog_file_logs] +type = "file" +include = ["/var/log/syslog"] +read_from = "end" + +[transforms.parse_syslog_file_logs] +type = "remap" +inputs = ["syslog_file_logs"] +source = ''' + source_file = .file + source_syslog_message = .message + + syslog_message, err = parse_syslog(source_syslog_message) + + if err != null { + abort + } + + . = {} + .timestamp = now() + .message = syslog_message.message + .file = source_file + ''' + +[sinks.syslog_file_to_es] + type = "elasticsearch" + inputs = ["parse_syslog_file_logs"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_password }}" + endpoint = "{{ elastic_url }}" + normal.index = "system-logs" + id_key = "event_uuid" \ No newline at end of file diff --git a/console/migrations/__init__.py b/console/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/console/models.py b/console/models.py new file mode 100644 index 0000000..bc82787 --- /dev/null +++ b/console/models.py @@ -0,0 +1,134 @@ +import logging + +from django.conf import settings +from django.contrib.auth.models import User +from django.db import models +from django.db.models.signals import post_save +from django.dispatch import receiver +from django.utils.translation import gettext_lazy, pgettext_lazy +from django_celery_beat.models import PeriodicTask +from rest_framework.authtoken.models import Token +from solo.models import SingletonModel + +from core.fields import IntegerField + +_log = logging.getLogger(__name__) + + +@receiver(post_save, sender=settings.AUTH_USER_MODEL) +def create_auth_token(sender, instance=None, created=False, **kwargs): + """ Generate Auth token for user + + See https://www.django-rest-framework.org/api-guide/authentication/#generating-tokens + """ + if created: + Token.objects.create(user=instance) + + +class NameDescriptionModel(models.Model): + """ Abstract model for name - description pair model """ + name = models.CharField(max_length=128, verbose_name=gettext_lazy( + 'Name')) + description = models.TextField(null=True, + blank=True, + verbose_name=gettext_lazy('Description'), + help_text=gettext_lazy('Description')) + + class Meta: + abstract = True + ordering = ['name'] + + def __str__(self): + return self.name + + +class UniqueNameDescriptionModel(NameDescriptionModel): + """ NameDescriptionModel heir to make name field unique """ + name = models.CharField(max_length=128, verbose_name=gettext_lazy('Name'), unique=True) + + class Meta: + abstract = True + ordering = ['name'] + + +class SensorConnectedMixin(models.Model): + """ Add connection to sensor by name. + + Sensor can be ARMAIF or Endpoint + """ + sensor = models.CharField(null=True, blank=True, max_length=128, verbose_name=gettext_lazy("Sensor name")) + + class Meta: + abstract = True + + +class UpdatedNameDescriptionModel(NameDescriptionModel): + """ Abstract model for name - description pair with updated info """ + updated = models.DateTimeField(auto_now=True, verbose_name=gettext_lazy( + 'Updated'), help_text=gettext_lazy('Date and time, when asset was updated')) + + class Meta: + abstract = True + ordering = ['updated', 'name'] + + def __str__(self): + return self.name + + +class VulnerabilityEffect(NameDescriptionModel): + """ Possible Vulnerability effect """ + pass + + +class VulnerabilityRecommendations(NameDescriptionModel): + """ Recommendations how to close Vulnerability """ + pass + + +class Vulnerability(UniqueNameDescriptionModel): + """ Description of one Vulnerability """ + detection_method = models.TextField(verbose_name=gettext_lazy('Detection method')) + affected_software = models.TextField(verbose_name=gettext_lazy('Vulnerable software')) + close_recommendations = models.ManyToManyField(VulnerabilityRecommendations, + verbose_name=gettext_lazy('Resolve recommendations'), + help_text=gettext_lazy('How to resolve the vulnerability'), + blank=True) + effects = models.ManyToManyField(VulnerabilityEffect, + verbose_name=pgettext_lazy('as consequences', 'Effects'), + help_text=gettext_lazy('Consequences of the vulnerability'), + blank=True) + + +class ConnectionType(NameDescriptionModel): + """ Asset connection type """ + pass + + +class Connection(models.Model): + """ + Connection between two assets + """ + + from assets.models.assets import Asset + + class ProtocolType(models.TextChoices): + TCP = 'TCP', gettext_lazy('TCP protocol') + UDP = 'UDP', gettext_lazy('UDP protocol') + + src_asset = models.ForeignKey(Asset, related_name='src', on_delete=models.CASCADE, + verbose_name=gettext_lazy('Source asset')) + dst_asset = models.ForeignKey(Asset, + related_name='dst', + on_delete=models.CASCADE, + verbose_name=gettext_lazy('Destination asset')) + src_port = IntegerField(verbose_name=gettext_lazy('Source port'), null=True, blank=True, + min_value=0, max_value=65535) + dst_port = IntegerField(verbose_name=gettext_lazy('Destination port'), null=True, blank=True, + min_value=0, max_value=65535) + connection_protocol = models.CharField(choices=ProtocolType.choices, + verbose_name=gettext_lazy('Connection protocol'), + help_text=gettext_lazy('Connection protocol type'), blank=True, null=True, + default=ProtocolType.TCP, max_length=10) + created = models.DateTimeField(auto_now=True) + updated = models.DateTimeField(auto_now=True, verbose_name=gettext_lazy( + 'Updated'), help_text=gettext_lazy('Date and time, when connection was updated')) diff --git a/console/routing.py b/console/routing.py new file mode 100644 index 0000000..a05c166 --- /dev/null +++ b/console/routing.py @@ -0,0 +1,7 @@ +from django.urls import path + +from notifications.services.ws import WSNotification + +websocket_urlpatterns = [ + path('ws/notifications/', WSNotification.as_asgi()), +] diff --git a/console/serializers.py b/console/serializers.py new file mode 100644 index 0000000..7ca6ad9 --- /dev/null +++ b/console/serializers.py @@ -0,0 +1,68 @@ +import logging +import re + +from django.contrib.auth.models import Group, Permission +from django.utils.translation import gettext +from rest_framework import serializers + +from assets.models.assets import Asset +from console.models import (Connection) +from core.serializers import ModelLocalizedSerializer, DateTimeLocalizedField + +RE_GROUPNAME = re.compile('[@+!#$%^&*()<>?/|}{~:]') +logger_info = logging.getLogger('console.user.info') +_log = logging.getLogger() + + +class AllPermsSerializer(ModelLocalizedSerializer): + name = serializers.SerializerMethodField('get_name') + + class Meta: + model = Permission + fields = ['codename', 'name', 'content_type'] + + def get_name(self, obj): + return gettext(obj.name) + + +class GroupNameSerializer(ModelLocalizedSerializer): + class Meta: + model = Group + fields = ['name'] + + def validate_name(self, value): + """ Method for validating the name, which was passed to the serializer + @param value: name from the serializer (pass through: serializer.data) + @return: name of the group + """ + # Check for special symbols in group name + if RE_GROUPNAME.search(value) is not None: + raise serializers.ValidationError(gettext('Incorrect group name format. Try another')) + + return value + + +class CelerySerializer(serializers.Serializer): + task_id = serializers.UUIDField() + finished = serializers.BooleanField(required=False) + result = serializers.IntegerField(required=False) + + +# Network map serializers for ConnectionInfoSerializer +# Asset connection info serializer +class ConnectionAssetInfoSerializer(serializers.ModelSerializer): + class Meta: + model = Asset + fields = ['pk', 'name', 'ip'] + + +# Connection serializer +class ConnectionSerializer(serializers.ModelSerializer): + src_asset = ConnectionAssetInfoSerializer() + dst_asset = ConnectionAssetInfoSerializer() + updated = DateTimeLocalizedField() + created = DateTimeLocalizedField() + + class Meta: + model = Connection + fields = ['src_asset', 'dst_asset', 'connection_protocol', 'created', 'updated'] diff --git a/console/services/__init__.py b/console/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/console/services/product.py b/console/services/product.py new file mode 100644 index 0000000..0ab97dc --- /dev/null +++ b/console/services/product.py @@ -0,0 +1,18 @@ +import os + +from django.conf import settings + + +def load_product_version(): + """Get product version from file""" + file_name = os.path.join(settings.BASE_DIR, 'product_version') + return_text = {'product': 'InfoWatch ARMA Management Console', 'version': 'None'} + if os.path.exists(file_name): + with open(file_name, 'r') as f: + try: + return_text['version'] = f.readline().split(':')[1] + return return_text + except ValueError: + return return_text + else: + return return_text diff --git a/console/settings/__init__.py b/console/settings/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/console/settings/base.py b/console/settings/base.py new file mode 100644 index 0000000..bd46469 --- /dev/null +++ b/console/settings/base.py @@ -0,0 +1,473 @@ +""" Django settings for console project. """ +import os +import sys + +from celery.schedules import crontab +from django.utils.translation import gettext_lazy + +from console.services.product import load_product_version + +# Build paths inside the project like this: os.path.join(BASE_DIR, ...) +BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +DEBUG = int(os.environ.get('DEBUG', '0')) > 0 + +# Program is started under testing framework +TEST_MODE = 'test' in sys.argv or 'pytest' in sys.modules or os.environ.get("TEST_MODE") + +# Program is used by developer (human), i.e. loads additional debug software +DEV_MODE = DEBUG and not TEST_MODE + +# False to disable access to admin control panel via web +ADMIN_PANEL_ENABLED = DEV_MODE + +################################## +### SECURITY ### +################################## + +CORS_ALLOW_ALL_ORIGINS = True +CORS_ALLOW_CREDENTIALS = True +CORS_EXPOSE_HEADERS = ['Content-Disposition'] + +ALLOWED_HOSTS = ['*'] + +###################################### +### APPLICATIONS ### +###################################### + +INSTALLED_APPS = [ + 'django.forms', + 'django.contrib.admin', + 'django.contrib.auth', + 'django.contrib.contenttypes', + 'django.contrib.sessions', + 'django.contrib.messages', + 'django.contrib.staticfiles', + 'rest_framework', + 'rest_framework.authtoken', + 'django_json_widget', + 'solo', + 'django_celery_beat', + 'sequences.apps.SequencesConfig', + 'corsheaders', + 'django_filters', + 'channels', +] + +PROJECT_APPS = [ + 'assets.apps.AssetsConfig', + 'license_info.apps.LicenseInfoConfig', + 'perms.apps.PermsConfig', + 'console.apps.ConsoleConfig', + 'core.apps.CoreConfig', + 'correlation.apps.CorrelationConfig', + 'dashboard.apps.DashboardConfig', + 'company.apps.CompanyConfig', + 'ncircc.apps.NcirccConfig', + 'logstash.apps.LogstashConfig', + 'networkmap.apps.NetworkmapConfig', + 'users.apps.UsersConfig', + 'incident_export.apps.IncidentExportConfig', + 'storage.apps.StorageConfig', + 'incident.apps.IncidentConfig', + 'events.apps.EventsConfig', + 'rotation.apps.RotationConfig', + 'inputs.apps.InputsConfig', + 'devices.apps.DevicesConfig', + 'notifications.apps.NotificationsConfig', +] + +INSTALLED_APPS += PROJECT_APPS + +MIDDLEWARE = [ + 'core.middleware.LicenseMiddleware', + 'django.middleware.security.SecurityMiddleware', + 'django.contrib.sessions.middleware.SessionMiddleware', + 'django.middleware.locale.LocaleMiddleware', + 'corsheaders.middleware.CorsMiddleware', + 'django.middleware.common.CommonMiddleware', + 'django.middleware.csrf.CsrfViewMiddleware', + 'django.contrib.auth.middleware.AuthenticationMiddleware', + 'django.contrib.messages.middleware.MessageMiddleware', + 'core.middleware.TimezoneMiddleware', + 'django.middleware.clickjacking.XFrameOptionsMiddleware', +] + +######################################## +### AUTHENTICATION ### +######################################## + +AUTHENTICATION_BACKENDS = [ + 'core.backends.ConsoleAuthBackend.ConsoleAuthSystem', + 'django.contrib.auth.backends.ModelBackend', +] + +AUTH_PASSWORD_VALIDATORS = [ + { + 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + 'OPTIONS': { + 'min_length': 8, + } + }, + { + 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + }, + { + 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + }, +] + +################################## +### DATABASE ### +################################## + +if not os.environ['POSTGRES_PORT']: + os.environ['POSTGRES_PORT'] = '5432' +DATABASES = { + 'default': { + 'ENGINE': "django.db.backends.postgresql", # Project depends on postgre, so no way to change it from env + 'NAME': os.environ.get('POSTGRES_DB'), + 'USER': os.environ.get('POSTGRES_USER'), + 'PASSWORD': os.environ.get('POSTGRES_PASSWORD'), + 'HOST': os.environ.get('POSTGRES_HOST'), + 'PORT': os.environ.get('POSTGRES_PORT'), + } +} + +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + +################################# +### LOGGING ### +################################# + +LOG_PATH = os.environ.get('LOG_PATH', os.path.join(BASE_DIR, 'dockerlogs')) +USER_LOG_FILENAME = os.environ.get('USER_LOG_FILENAME', 'console.log') +MIN_LOG_LEVEL = os.environ.get('MIN_LOG_LEVEL', 'DEBUG') +LOG_MAX_BYTES = int(os.environ.get('LOG_MAX_BYTES', 1024 * 1024 * 5)) +LOG_BACKUP_COUNT = int(os.environ.get('LOG_BACKUP_COUNT', 500)) + +os.makedirs(LOG_PATH, exist_ok=True) +LOGGING = { + 'version': 1, + 'disable_existing_loggers': False, + 'formatters': { + 'info_message': { + 'format': '%(asctime)s %(levelname)s %(message)s', + }, + 'debug_format': { + 'format': '%(asctime)s %(levelname)s %(filename)s %(funcName)s %(message)s' + } + }, + 'handlers': { + 'console': { + 'class': 'logging.StreamHandler', + 'level': MIN_LOG_LEVEL, + }, + 'file': { + 'level': 'DEBUG', + 'class': 'logging.handlers.RotatingFileHandler', + 'filename': os.path.join(LOG_PATH, USER_LOG_FILENAME), + 'formatter': 'info_message', + 'encoding': 'utf-8', + 'maxBytes': LOG_MAX_BYTES, + 'backupCount': LOG_BACKUP_COUNT, + }, + }, + 'loggers': { + '': { + 'handlers': ['file', 'console'], + 'level': MIN_LOG_LEVEL, + }, + }, +} + +############################################## +### INTERNATIONALIZATION ### +############################################## + +LANGUAGE_CODE = 'en' +LANGUAGES = [ + ('en', gettext_lazy('English')), + ('ru', gettext_lazy('Russian')), +] + +INITIAL_DATE_FORMAT = "Y-m-d" + +# @see https://en.wikipedia.org/wiki/List_of_tz_database_time_zones +TIME_ZONE = os.environ.get('TIME_ZONE', 'UTC') +# Timezone used for users by default +DEFAULT_CURRENT_TIMEZONE = os.environ.get('DEFAULT_CURRENT_TIMEZONE', 'Europe/Moscow') + +USE_I18N = True +USE_L10N = True +USE_TZ = True + +LOCALE_PATHS = [ + os.path.join(BASE_DIR, 'locale') +] + +############################### +### FILES ### +############################### + +# Build paths for generated files like static inside the project like this: os.path.join(PUBLIC_DIR, ...) +PUBLIC_DIR = os.environ.get('PUBLIC_DIR', os.path.join(BASE_DIR, 'public')) + +PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) +STATIC_URL = '/static/' +MEDIA_URL = '/media/' +STATIC_ROOT = os.path.join(PUBLIC_DIR, 'static') +MEDIA_ROOT = os.path.join(PUBLIC_DIR, 'media') + +REDIS_HOST = os.environ.get('REDIS_HOST', 'redis') +REDIS_PORT = int(os.environ.get('REDIS_PORT', 6379)) +REDIS_CACHE_TIMEOUT = 86400 + +############################### +### CACHE ### +############################### + +SOLO_CACHE = 'local' +SOLO_CACHE_TIMEOUT = 60 * 5 # 5 mins +SOLO_CACHE_PREFIX = 'solo' + +CACHES = { + 'default': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, + 'local': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, + 'redis': { + 'BACKEND': 'django_redis.cache.RedisCache', + 'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}/1', + 'OPTIONS': { + 'CLIENT_CLASS': 'django_redis.client.DefaultClient' + } + } +} + +############################# +### DRF ### +############################# + +REST_FRAMEWORK = { + 'DEFAULT_RENDERER_CLASSES': ( + 'rest_framework.renderers.JSONRenderer', + 'rest_framework.renderers.BrowsableAPIRenderer', + ), + 'DEFAULT_FILTER_BACKENDS': ( + 'django_filters.rest_framework.DjangoFilterBackend', + 'rest_framework.filters.OrderingFilter', + 'core.backends.filters.SearchAllFieldsBackend', + ), + 'DEFAULT_PAGINATION_CLASS': 'core.services.pagination.BasicPagination', + 'DEFAULT_PERMISSION_CLASSES': [ + 'rest_framework.permissions.IsAuthenticated', + ], + 'DEFAULT_AUTHENTICATION_CLASSES': [ + 'rest_framework.authentication.SessionAuthentication', + 'rest_framework.authentication.TokenAuthentication', + ], +} + +################################ +### CELERY ### +################################ + +CELERY_BROKER_URL = f'redis://{REDIS_HOST}:{REDIS_PORT}' +CELERY_RESULT_BACKEND = f'redis://{REDIS_HOST}:{REDIS_PORT}' +CELERY_ACCEPT_CONTENT = ['application/json'] +CELERY_TASK_SERIALIZER = 'json' +CELERY_RESULT_SERIALIZER = 'json' +CELERY_TIMEZONE = TIME_ZONE +CELERY_IMPORTS = ['devices.tasks.firewall', 'devices.tasks.sensor'] + +""" Single place to setup at which time daily tasks should execute, crontab schedule object""" +DAILY_CRONTAB = crontab(minute='0', hour='1') +""" Single place to setup at which time weekly tasks should execute, crontab schedule object""" +WEEKLY_CRONTAB = crontab(minute='0', hour='1', day_of_week='1') +""" Single place to setup at which time monthly tasks should execute, crontab schedule object""" +MONTHLY_CRONTAB = crontab(minute='0', hour='1', day_of_month='1') +""" Execute every 2 minutes.""" +EVERY_2_MINUTE = crontab(minute='*/2') + +ROTATE_SIZE_CHECK_CRONTAB = crontab(minute='*/5') +CELERY_BEAT_SCHEDULE = { + 'update_statistics': { + 'task': 'dashboard.tasks.update_statistics_task', + 'schedule': crontab() + }, + 'expire_users': { + 'task': 'console.tasks.expire_users_task', + 'schedule': DAILY_CRONTAB + }, + 'update_auto_network_map_data': { + 'task': 'networkmap.tasks.update_auto_network_map_data', + 'schedule': crontab() + }, + 'update_firewall_info_task': { + 'task': 'devices.tasks.firewall.update_firewall_info_task', + 'schedule': crontab() + }, + 'update_amount_of_elk_events': { + 'task': 'console.tasks.update_amount_of_aggregated_events', + 'schedule': crontab() + }, + 'check_blocked_users': { + 'task': 'core.tasks.check_blocked_users', + 'schedule': crontab() + }, + 'update_status_notification': { + 'task': 'ncircc.tasks.update_status_notification', + 'schedule': crontab(), # todo Уточнить точное время + }, + 'update_comments': { + 'task': 'ncircc.tasks.update_comments', + 'schedule': crontab(), # todo Уточнить точное время + }, + 'ping_sensors': { + 'task': 'devices.tasks.sensor.ping_sensors', + 'schedule': crontab() + }, + 'get_disk_usage_task': { + 'task': 'core.tasks.get_disk_usage_task', + 'schedule': crontab() + }, + 'reboot_correlator_task': { + 'task': 'correlation.tasks.reboot_correlator_task', + 'schedule': EVERY_2_MINUTE, + }, + +} + +###################################### +### AMC SERVICES ### +###################################### + +# LICENSE +LICENSE_CLIENT_URL = os.environ.get('LICENSE_CLIENT_URL', 'http://license-client:8050') +LICENSE_CACHE_TIMEOUT = 60 * 60 # 60 minutes +LICENSE_FEATURE_EVENT_PROCESSING = "event_processing" +LICENSE_OPTION_EVENT_SOURCE_COUNT = "event_sources" + +# NGINX +NGINX_ENABLED_CONFIG_FILENAME = "armaconsole.nginx" +NGINX_HTTP_CONFIG_FILENAME = "armaconsole_http.nginx" +NGINX_HTTPS_CONFIG_FILENAME = "armaconsole_https.nginx" +NGINX_SITES_AVAILABLE = "/usr/local/armaconsole/nginx" + +# CORRELATOR +CORRELATOR_SEVERITY_LEVEL = int(os.environ.get('CORRELATOR_SEVERITY_LEVEL', 6)) +CORRELATOR_AUTO_CATEGORY_NAME = os.environ.get('CORRELATOR_AUTO_CATEGORY_NAME', gettext_lazy('Auto')) +CORRELATOR_URL = os.environ.get('CORRELATOR_URL', 'http://correlator:5566') + +# VECTOR +LOGSTASH_CONFIG_DIR = os.environ.get('LOGSTASH_CONFIG_DIR', os.path.join(PUBLIC_DIR, 'vector')) + +# ELASTICSEARCH +ELASTIC_URL = os.environ.get('ELASTIC_URL', 'http://elasticsearch:9200') +elk_split = ELASTIC_URL.replace('http://', '').split(':') +ELK_HOST = elk_split[0] if len(elk_split) >= 0 else 'elasticsearch' +ELK_PORT = elk_split[1] if len(elk_split) > 0 else 9200 +ELK_LOGIN = os.environ.get('ELASTIC_USER', 'elastic') +ELK_PASS = os.environ.get('ELASTIC_PASSWORD', 'changeme') +ELK_MAX_ENTRIES = 100000 +ELK_AGGREGATED_INDEX = 'aggregated-*' +ELK_FIREWALL_PRODUCT_NAME = 'Industrial Firerwall' # Yes this is mistake, but it is how now Vector parses IF logs +ELK_ENDPOINT_PRODUCT_NAME = 'Industrial Endpoint' + +# RABBITMQ +RABBIT_URL = os.environ.get('RABBIT_URL', 'http://rabbitmq-management:5672') +rabbit_split = RABBIT_URL.replace('http://', '').split(':') +RABBIT_HOST = rabbit_split[0] if len(rabbit_split) >= 0 else 'rabbitmq-management' +RABBIT_PORT = rabbit_split[1] if len(rabbit_split) > 0 else 5672 + +############################### +### OTHER ### +############################### + +# ROUTING +ROOT_URLCONF = 'console.urls' +WSGI_APPLICATION = 'console.wsgi.application' +ASGI_APPLICATION = 'console.asgi.application' +LOGIN_REDIRECT_URL = 'index' +LOGOUT_REDIRECT_URL = 'login' +LOGIN_URL = 'login' + +CHANNEL_LAYERS = { + 'default': { + 'BACKEND': 'channels_redis.core.RedisChannelLayer', + 'CONFIG': { + "hosts": [(REDIS_HOST, REDIS_PORT)], + }, + }, +} + +# TEMPLATES +TEMPLATES = [ + { + 'BACKEND': 'django.template.backends.django.DjangoTemplates', + 'DIRS': [], + 'APP_DIRS': True, + 'OPTIONS': { + 'context_processors': [ + 'django.template.context_processors.debug', + 'django.template.context_processors.request', + 'django.contrib.auth.context_processors.auth', + 'django.contrib.messages.context_processors.messages', + ], + }, + }, +] + +# Site info must not contain dynamic data, only static values +SITE_INFO = { + 'domain': 'infowatch.ru', + 'name': 'InfoWatch ARMA', + 'package': 'armaconsole', # Must be a valid deb package name + 'version': '1.4.0', # Must be a valid deb package version + 'architecture': 'amd64', # Must be a valid deb package architecture + # These values are used in CEF format + 'vendor': 'InfoWatch ARMA', + 'product': 'ARMAMC' +} + +# How many (in percent) keep in table while rotation occurs +# So 0.3 - means 30% of table will stay while rotation +SAVE_DURING_ROTATION = float(os.environ.get('SAVE_DURING_ROTATION', 0.3)) + +EMAIL_HOST_USER = 'console@arma.com' +EMAIL_HOST = 'localhost' + +# GENERATE SELFSIGNED CERTIFICATE +TLS_CERT_DAYS = 365 +TLS_CERT_KEY_SIZE = 2048 +TLS_CERT_COUNTRY = "RU" +TLS_CERT_STATE = "Moscow" +TLS_CERT_LOCALITY = "Moscow" +TLS_CERT_ORIG_NAME = "ARMA" +TLS_CERT_UNIT_NAME = "Console" +TLS_CERT_COMMON_NAME = "iwarma.ru" +TLS_CERT_FILENAME = "/etc/nginx/ssl/armaconsole/nginx-selfsigned.crt" +TLS_CERT_KEY_FILENAME = "/etc/nginx/ssl/armaconsole/nginx-selfsigned.key" +# TODO: need use +TLS_CERT_DHPARAM_FILENAME = "/etc/nginx/ssl/armaconsole/dhparam.pem" + +WEB_UI_PORT = int(os.environ.get('WEB_UI_PORT', 9090)) +MAX_UPLOADSIZE = 80 * 1024 * 1024 + +# NCIRCC +NCIRCC_DOMAIN_NAME = os.environ.get('NCIRCC_DOMAIN_NAME', 'https://test-lk.cert.gov.ru') +NCIRCC_CERT_VERIFY = '/etc/ssl/certs/' if os.path.exists('/etc/ssl/certs/') else False + +# Compatible ARMAIF versions +MINIMAL_COMPATIBLE_AIF_VERSION = "3.6" + +MINIMAL_VERSION_CORRELATION_RULES = '1.3.4' + +# Product version +PRODUCT_VERSION = load_product_version() diff --git a/console/settings/dev.py b/console/settings/dev.py new file mode 100644 index 0000000..800fcf2 --- /dev/null +++ b/console/settings/dev.py @@ -0,0 +1,29 @@ +""" Django settings for development debug purpose """ +import os + +from dotenv import load_dotenv + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +load_dotenv(dotenv_path=os.path.join(BASE_DIR, '.env.dev')) +os.environ['LOG_PATH'] = os.environ.get('LOG_PATH', os.path.join(BASE_DIR, 'dockerlogs')) + +WEB_PDB_PORT = os.environ.get('WEB_PDB_PORT') + +from .base import * + +SITE_INFO['name'] += ' DEBUG' + +PASSWORD_HASHERS = ( + 'django.contrib.auth.hashers.MD5PasswordHasher', +) + +CACHES["default"] = {'BACKEND': 'django.core.cache.backends.dummy.DummyCache'} + +ELASTIC_URL = "http://elasticsearch:9200" + +# For testing purpose +LICENSE_CACHE_TIMEOUT = 30 + +# Security key for import/export firewall config +SECRET_KEY = os.environ.get('SECRET_KEY', '') + diff --git a/console/settings/prod.py b/console/settings/prod.py new file mode 100644 index 0000000..25f0de8 --- /dev/null +++ b/console/settings/prod.py @@ -0,0 +1,58 @@ +""" Django production settings for console project. """ +import os + +import dotenv + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +orig_user_env = dict(os.environ.items()) + +dotenv.load_dotenv(dotenv_path=os.path.join(BASE_DIR, '.env.prod')) + +if os.path.exists('/etc/armaconsole/env.prod'): + dotenv.load_dotenv(dotenv_path='/etc/armaconsole/env.prod') + +# Convert DBC variables to our names +dbc_conf = dotenv.dotenv_values(dotenv_path=os.environ.get('DEBCONF_DBCONF_FPATH')) +debconf_dbconfig_django_map = ( + ('dbname', 'POSTGRES_DB'), + ('dbuser', 'POSTGRES_USER'), + ('dbpass', 'POSTGRES_PASSWORD'), + ('dbserver', 'POSTGRES_HOST'), + ('dbport', 'POSTGRES_PORT'), +) +for debconf_val, django_val in debconf_dbconfig_django_map: + os.environ[django_val] = dbc_conf.get(debconf_val, os.environ.get(django_val)) + +# Restore original environment, as user environment is more important +os.environ.update(orig_user_env) + +# noinspection PyUnresolvedReferences +from .base import * + +# Security key for import/export firewall config +SECRET_KEY = os.environ.get('SECRET_KEY', '') + +CACHES = { + 'default': { + 'BACKEND': 'django_redis.cache.RedisCache', + 'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}/1', + 'OPTIONS': { + 'CLIENT_CLASS': 'django_redis.client.DefaultClient' + } + }, + 'local': { + 'BACKEND': 'django_redis.cache.RedisCache', + 'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}/1', + 'OPTIONS': { + 'CLIENT_CLASS': 'django_redis.client.DefaultClient' + } + }, + 'redis': { + 'BACKEND': 'django_redis.cache.RedisCache', + 'LOCATION': f'redis://{REDIS_HOST}:{REDIS_PORT}/1', + 'OPTIONS': { + 'CLIENT_CLASS': 'django_redis.client.DefaultClient' + } + } +} diff --git a/console/settings/test.py b/console/settings/test.py new file mode 100644 index 0000000..f60b26d --- /dev/null +++ b/console/settings/test.py @@ -0,0 +1,38 @@ +""" Django settings for development debug purpose """ +import os + +from dotenv import load_dotenv + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +load_dotenv(dotenv_path=os.path.join(BASE_DIR, '.env.dev')) +os.environ['LOG_PATH'] = os.environ.get('LOG_PATH', os.path.join(BASE_DIR, 'dockerlogs')) +os.environ['TEST_MODE'] = "True" + +WEB_PDB_PORT = os.environ.get('WEB_PDB_PORT') + +from .base import * + +SITE_INFO['name'] += ' DEBUG' + +PASSWORD_HASHERS = ( + 'django.contrib.auth.hashers.MD5PasswordHasher', +) + +CACHES["default"] = {'BACKEND': 'django.core.cache.backends.dummy.DummyCache'} + +ELASTIC_URL = "http://elasticsearch:9200" +LICENSE_CLIENT_URL = 'http://license-client:8050' +# For testing purpose +LICENSE_CACHE_TIMEOUT = 30 + +DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), + } +} + +DEFAULT_AUTO_FIELD = 'django.db.models.AutoField' + +# Security key for import/export firewall config +SECRET_KEY = os.environ.get('SECRET_KEY') diff --git a/console/tasks.py b/console/tasks.py new file mode 100644 index 0000000..27a6675 --- /dev/null +++ b/console/tasks.py @@ -0,0 +1,36 @@ +from __future__ import absolute_import, unicode_literals + +from celery import shared_task +from celery.utils.log import get_task_logger +from django.conf import settings +from django.core.cache import caches +from elasticsearch import Elasticsearch + +from core.utils import dtnow +from events.constants import ELK_HOST, ELK_PORT, ELK_LOGIN, ELK_PASS +from users.models import UserInfo + +_log = get_task_logger(__name__) + +MEDIA_ROOT = getattr(settings, 'MEDIA_ROOT') +CACHE_TIMEOUT = getattr(settings, 'REDIS_CACHE_TIMEOUT', 120) +REDIS_ELK_EVENTS_KEY = 'amount_of_aggregated_events' + + +@shared_task +def update_amount_of_aggregated_events(): + """ Task for updating the total amount of aggregated events, stored in elasticsearch """ + es = Elasticsearch([{'host': ELK_HOST, 'port': ELK_PORT}], http_auth=(ELK_LOGIN, ELK_PASS)) + es_search = es.count(index=['aggregated-*', 'system-*']) + caches['redis'].set(REDIS_ELK_EVENTS_KEY, es_search['count'], CACHE_TIMEOUT) + + +def expire_users(): + for user_info in UserInfo.objects.filter(expire_date__lte=dtnow().date(), user__is_active=True): + user_info.user.is_active = False + user_info.user.save() + + +@shared_task +def expire_users_task(): + expire_users() diff --git a/console/templates/console/login.html b/console/templates/console/login.html new file mode 100644 index 0000000..3033856 --- /dev/null +++ b/console/templates/console/login.html @@ -0,0 +1,10 @@ + + + + + + + + Management console + +
\ No newline at end of file diff --git a/console/tests/__init__.py b/console/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/console/tests/test_api.py b/console/tests/test_api.py new file mode 100644 index 0000000..752f853 --- /dev/null +++ b/console/tests/test_api.py @@ -0,0 +1,34 @@ +import pytest +from rest_framework import status +from rest_framework.reverse import reverse + + +@pytest.mark.unit +@pytest.mark.django_db +class TestProductAPI: + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model): + self.admin_user = django_user_model.objects.get(username='admin') + + def test_get_product_version(self, api_client): + api_client.force_authenticate(self.admin_user) + response = api_client.get(reverse('api_product_version')) + assert response.status_code == status.HTTP_200_OK + assert response.data['product'] == 'InfoWatch ARMA Management Console' + assert response.data['version'] == ' 555' + + +@pytest.mark.unit +@pytest.mark.django_db +class TestConsoleAPI: + + def test_check_invalid_path(self, api_client): + response = api_client.get('/en/api/product/version/') + assert response.status_code == status.HTTP_403_FORBIDDEN # it is normal behavior, path exists + + response = api_client.get('/en/api/invalid/') + assert response.status_code == status.HTTP_404_NOT_FOUND + + response = api_client.get('/en/api/device/') + assert response.status_code == status.HTTP_404_NOT_FOUND diff --git a/console/tests/test_auth.py b/console/tests/test_auth.py new file mode 100644 index 0000000..7901789 --- /dev/null +++ b/console/tests/test_auth.py @@ -0,0 +1,42 @@ +import pytest +from rest_framework import status +from rest_framework.reverse import reverse + + +@pytest.mark.unit +@pytest.mark.django_db +class TestAuth: + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model): + self.admin_user = django_user_model.objects.get(username='admin') + + def test_login_with_active_user(self, api_client): + url = reverse('api_login') + + credentials = { + 'username': 'admin', + 'password': 'nimda' + } + response = api_client.post(url, data=credentials) + assert response.status_code == status.HTTP_200_OK + + credentials = { + 'username': 'admin', + 'password': 'invalid_password' + } + + response = api_client.post(url, data=credentials) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + def test_login_with_non_active_user(self, api_client): + url = reverse('api_login') + self.admin_user.is_active = False + self.admin_user.save() + + credentials = { + 'username': 'admin', + 'password': 'nimda' + } + response = api_client.post(url, data=credentials) + assert response.status_code == status.HTTP_400_BAD_REQUEST diff --git a/console/tests/test_commands.py b/console/tests/test_commands.py new file mode 100644 index 0000000..4c4a155 --- /dev/null +++ b/console/tests/test_commands.py @@ -0,0 +1,86 @@ +import json +import os +from pathlib import Path +from unittest import mock +from unittest.mock import patch + +import pytest +import tomli +from django.core.files.uploadedfile import SimpleUploadedFile + +from console.management.commands.create_vector_configs import Command as CreateVectorConfig +from console.management.commands.load_rules import Command +from correlation.models import Rule +from correlation.services.import_service import ImportRulesService +from incident.models import IncidentEffect, IncidentCategory, IncidentRecommendations + + +@pytest.fixture(autouse=True) +def test_dir(tmp_path): + with patch('console.management.commands.create_vector_configs.VECTOR_CONFIG_DIR', tmp_path) as test_dir: + yield test_dir + + +def mock_correlator_task(*args, **kwargs): + pass + + +@pytest.mark.unit +@pytest.mark.django_db +class TestConsoleCommands: + + @pytest.fixture(autouse=True) + def setup_tests(self): + with open('console/management/commands/rules_console.json', 'r') as json_file: + self.json_data = json.load(json_file) + self.count_recommendations = self.calculate_recommendations(self.json_data) + self.count_effects = self.calculate_effects(self.json_data) + self.count_category = self.calculate_category(self.json_data) + self.count_rules = self.calculate_rules(self.json_data) + + def test_create_recommendation_and_effects(self): + + with open('console/management/commands/rules_console.json', 'rb') as test_file: + file = SimpleUploadedFile("rules.json", test_file.read()) + with mock.patch('correlation.tasks.update_correlator_tasks', mock_correlator_task): + ImportRulesService(file).run_import() + assert IncidentRecommendations.objects.count() == self.count_recommendations + assert IncidentEffect.objects.count() == self.count_effects + assert IncidentCategory.objects.count() == self.count_category + + def test_rule_create(self): + with open('console/management/commands/rules_console.json', 'r') as json_file: + self.json_data = json.load(json_file) + with mock.patch('correlation.tasks.update_correlator_tasks', mock_correlator_task): + command = Command() + command.handle() + assert Rule.objects.count() == self.count_rules + + def test_crete_config_vector(self, test_dir): + command = CreateVectorConfig() + command.handle() + assert len(os.listdir(test_dir)) == 8 + config_path = Path(test_dir) / 'mc_logs_es.toml' # one of eight + source_content = config_path.read_text() + parse_content = tomli.loads(source_content) + assert parse_content['sinks']['es_logs_to_es']['auth']['user'] == "elastic" + assert parse_content['sinks']['es_logs_to_es']['auth']['password'] == "changeme" + assert parse_content['sinks']['es_logs_to_es']['endpoint'] == "http://elasticsearch:9200" + + def calculate_recommendations(self, data: dict) -> int: + return len(data['close_recommendations']) + + def calculate_effects(self, data: dict) -> int: + return len(data['effects']) + + def calculate_category(self, data: dict) -> int: + category = set() + for rule in data.get('rules', []): + for action in rule['actions_json']: + if action['type'] == 'incident' and action['category'] != '': + # there is only one incident category for each rule, but it in list + category.add(action['category'][0]['name']) + return len(category) + + def calculate_rules(self, data: dict) -> int: + return len(data['rules']) diff --git a/console/tests/test_data/file b/console/tests/test_data/file new file mode 100644 index 0000000..e69de29 diff --git a/console/tests/test_data/test_certificate.crt b/console/tests/test_data/test_certificate.crt new file mode 100644 index 0000000..fcf2a17 --- /dev/null +++ b/console/tests/test_data/test_certificate.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDhTCCAm2gAwIBAgIUEa17zGmRu6NjYfpIoWft1Y1aE4AwDQYJKoZIhvcNAQEL +BQAwUjELMAkGA1UEBhMCUlUxDzANBgNVBAgMBk1vc2NvdzEPMA0GA1UEBwwGTW9z +Y293MQ0wCwYDVQQKDARBUk1BMRIwEAYDVQQDDAlpd2FybWEucnUwHhcNMjExMTIz +MTMyNjI1WhcNMjIxMTIzMTMyNjI1WjBSMQswCQYDVQQGEwJSVTEPMA0GA1UECAwG +TW9zY293MQ8wDQYDVQQHDAZNb3Njb3cxDTALBgNVBAoMBEFSTUExEjAQBgNVBAMM +CWl3YXJtYS5ydTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJvme/oG +F18guJMCrPw+Ij2qN4WdsxGkvvUywgZ5sCHYEXnW/CV41dCe2c5d9sl3sIztF1wD +HAfQmiAKmosOnWVAatxxTjPTajQiXYBY8hrRownD5XTxMhQ0yLVXcVB/ibeTc1rL +FfxdagpgoWvjUkJqIX2vy7Ec+6XGQ7t+Bn084tWdUxZuHXgRQkY+SsSWY/9r0/ph +3q/DwbC7veb5YDUPmcpgPcRyGan+24RYHg20VMS70gDWPEFjr4U3jNFFjsir9m++ +MokND5WegjxrivU9BGAON1gzEJ7qf10efJgJIpsEHpDL6oh1FSen5DxbzRbYEzVm +qqQFdYwRhxtnJ6UCAwEAAaNTMFEwHQYDVR0OBBYEFKvrJpCyxSWlnEaAQXNQ8pwe +IG6tMB8GA1UdIwQYMBaAFKvrJpCyxSWlnEaAQXNQ8pweIG6tMA8GA1UdEwEB/wQF +MAMBAf8wDQYJKoZIhvcNAQELBQADggEBAHpyDKF1AaE+RtnLJcUmZZ39qJO+Hbzb +jLClScBO40EEUhxyvE7TztvvIhEc5aN11QvKW7CbMlWkjoTgAb8Q7JY1wChhsZch +ApFEfnlMJbQujRZbijnhd4XCo4N8DFjm/hI4T08cVlSnRrfufCVA3n7Q66YhbpLu +w+nOgGu17Egem7Gqm1VzoJjGDNVfKXTUNt85nO5MiWUEv49sRpvFEUb26inj2ly2 +hK8kfjrZYZztGgMqOmm0upvjlYqEMgTWi6lPjfH+tpodE9fRTIalAqnsBFPqxYB8 +5P3/sCbpi8PjNZVKK1Zfk0o/P266xFUMHbLTcJ0bitSK2BnZd2FjD5g= +-----END CERTIFICATE----- diff --git a/console/tests/test_data/test_certificate.key b/console/tests/test_data/test_certificate.key new file mode 100644 index 0000000..17e215b --- /dev/null +++ b/console/tests/test_data/test_certificate.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCb5nv6BhdfILiT +Aqz8PiI9qjeFnbMRpL71MsIGebAh2BF51vwleNXQntnOXfbJd7CM7RdcAxwH0Jog +CpqLDp1lQGrccU4z02o0Il2AWPIa0aMJw+V08TIUNMi1V3FQf4m3k3NayxX8XWoK +YKFr41JCaiF9r8uxHPulxkO7fgZ9POLVnVMWbh14EUJGPkrElmP/a9P6Yd6vw8Gw +u73m+WA1D5nKYD3Echmp/tuEWB4NtFTEu9IA1jxBY6+FN4zRRY7Iq/ZvvjKJDQ+V +noI8a4r1PQRgDjdYMxCe6n9dHnyYCSKbBB6Qy+qIdRUnp+Q8W80W2BM1ZqqkBXWM +EYcbZyelAgMBAAECggEAZgNXtK8vs+uxW2Ew2I2/0iID2sbqWe03lzLzi1EtU7gT +F2r563s6Thzj4QnTZtmPCWhLFMZkHSj+8WB+5+dUTuDsC/uIdJkusZabIFUTd0Ee +MlGhX/hEELzJaQAjAc2yzBEMNqkVZeGqdcftWK+TMCpKZ/gUjXytnjgyMgmxLVkY +iZfAodlnswmheN5ItvEVhji+44rCjIC/3fz1IG9U+2KUxPx/LjMy16zazkZzThEV +UG3GqOZg2ncS2bNFNiG6HhbyghF0xo33Hme46rwVh49kErcyjHUvYG3gctK9QliU +N0B0YiaSRBHguGoIUJhPI2wbdo4ToqSEWxfUkxrJgQKBgQDK9vVW5PgsssUzpRGx +Gx/jKMUMCQJKHBm73WFdBpGKCeZZcpnfNDzANxhBTtumAy8d5HHVrI78X68Lt3CW +/8XWir6GiDLp+KEUflPBS+FuHGFtNRoEnxoDeqqTSS4XkjhMjVK976LtJOSsNcAN +WHrMGzy+3RUkosKy01NLJpnanQKBgQDEozigg7vAahLjqFC4AQm8CX+1mkFWf1kM +iv8k+zfhYA3o4QUrIxgLX4+lfBZIr+xrnUqu/r1jpXwcpcrJihvh+33TeKEXX+iR +c2vy2sfD2sM2ft3pfXmj7IKV2VKO4AFdlo9IBO4gi8ZAPhXG7HiuSyyKXahzlyT3 +ZlBZEgJOqQKBgQC8KhPtyTzjg6Ebbg9m2DTJzkRQEhITTtX0uxrGuY44IVFy37IT +okQoF2vfMBKmaBFIbz+xztaI1tRb0mcJNnrdmadk5eP90cjUTQGtFIIcKSeRrUc/ +vZjKXPDCt3eJ1r9nCSYKfJ3ZqPhvRy2TsdG2ZBH/CMvPOS2zyANSiqjcaQKBgHxp +bPWaA5udQXBK2S9icL9JH0VOYNKSZkwaUY4baGKvPH3AiV2eqaLghmlElnDM5f+8 +mDkaMcevN1SEzUYwnK2hSh4Xb4zzgJkudvlD1Sqk6eg74rnNSr4dcQ3QX3zIW/TT +wrnlbKio5vlUjsC6cyyLoZW15lOkKJ5jXKjOTSlpAoGBAJO5qSArSmPaPhiZjbRK +9dLnzTW36BnNm7SIx5/lXtJJjApXKI3xQxljXbsUnyHM6iInzEQdOFHrX1ThGD4M +oWW8DgmO40D7yT2VeATyEDn16QEplvw1qX8pX+zjuy9U990fG+numudtaI+y4VAp +9OAtk5UPuNihFY/b6FDcxaOZ +-----END PRIVATE KEY----- diff --git a/console/tests/test_elastic.py b/console/tests/test_elastic.py new file mode 100644 index 0000000..61850a8 --- /dev/null +++ b/console/tests/test_elastic.py @@ -0,0 +1,38 @@ +import pytest +from elasticsearch import Elasticsearch + +from events.constants import ELK_HOST, ELK_PORT, ELK_LOGIN, ELK_PASS +from rotation.tasks import delete_elasticsearch_indexes_by_template + +TEST_EXCLUDE_INDEXES = [ + ({'test-index-1'}, 1), + ({'test-index-1', 'test-index2'}, 1), +] + + +class TestDeleteElasticsearchIndexes: + # TODO: add random index name + index_name = 'test-index' + + @pytest.fixture(autouse=True) + def setup_tests(self): + self.es = Elasticsearch([{'host': ELK_HOST, 'port': ELK_PORT}], http_auth=(ELK_LOGIN, ELK_PASS)) + for i in range(3): + self.es.indices.create(index=f'{self.index_name}-{i}', ignore=400) + yield + self.es.indices.delete(index=f'{self.index_name}-*', ignore=[400, 404]) + + @pytest.mark.integration + def test_delete_all_index_by_template(self): + index_template = f'{self.index_name}-*' + index_count = len(self.es.indices.get_alias(index=index_template).keys()) + assert index_count == 3 + delete_elasticsearch_indexes_by_template(index_template) + assert len(self.es.indices.get_alias(index=index_template)) == 0 + + @pytest.mark.parametrize('exclude_indexes,expected', TEST_EXCLUDE_INDEXES) + @pytest.mark.integration + def test_delete_index_by_template_witch_exclude_index(self, exclude_indexes: set, expected: int): + index_template = f'{self.index_name}-*' + delete_elasticsearch_indexes_by_template(index_template, es=self.es, exclude_indexes=exclude_indexes) + assert len(self.es.indices.get_alias(index=index_template, ignore=[400, 404])) == expected diff --git a/console/tests/test_extension_validator.py b/console/tests/test_extension_validator.py new file mode 100644 index 0000000..a81ed94 --- /dev/null +++ b/console/tests/test_extension_validator.py @@ -0,0 +1,60 @@ +import os +from pathlib import Path + +from django.core.exceptions import ValidationError +from django.utils.translation import gettext_lazy + +from core.validators import ValidateFileExtension + +import pytest + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +file_json = os.path.join(BASE_DIR, "tests", "test_data/file.json") +file_zip = os.path.join(BASE_DIR, "tests", "test_data/file.zip") +file_without_extension = os.path.join(BASE_DIR, "tests", "test_data/file") + + +class TestExtensionValidator: + + @pytest.mark.unit + def test_raise_error_if_extension_incorrect(self): + """ + The test fails if the validator did not raise a ValidationError with invalid file + """ + + validator = ValidateFileExtension(allowed_extensions=['.zip']) + ext = Path(file_json).suffix.lower() + try: + validator(open(file_json, 'rb')) + except ValidationError as e: + assert str(e.message) == gettext_lazy( + "File extension '{extension}' is not allowed. Allowed extensions are: '{allowed_extensions}'.").format( + extension=ext, allowed_extensions=', '.join(validator.allowed_extensions)) + else: + assert False, 'Validator not working' + + @pytest.mark.unit + def test_works_with_several_extensions(self): + """ + The test fails if the validator raise a ValidationError with correct file + """ + validator = ValidateFileExtension(allowed_extensions=['.zip', '.json']) + try: + validator(open(file_zip, 'rb')) + except ValidationError: + assert False, 'Validator not working' + else: + assert True + + @pytest.mark.unit + def test_works_with_files_without_extension(self): + """ + The test fails if the validator did not raise a ValidationError with file without extension + """ + validator = ValidateFileExtension(allowed_extensions=['.zip', '.json']) + try: + validator(open(file_without_extension, 'rb')) + except ValidationError: + assert True + else: + assert False, 'Validator not working' diff --git a/console/tests/test_filters.py b/console/tests/test_filters.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/console/tests/test_filters.py @@ -0,0 +1 @@ + diff --git a/console/tests/test_utils.py b/console/tests/test_utils.py new file mode 100644 index 0000000..340e73e --- /dev/null +++ b/console/tests/test_utils.py @@ -0,0 +1,181 @@ +import http +import os +import socket +import time + +import pytest +from django.core.exceptions import ObjectDoesNotExist +from django.urls import reverse +from pytest_django.live_server_helper import LiveServer + +from perms.models import Perm +from users.models import UserInfo + +DEFAULT_USER = 'admin' +DEFAULT_PASSWORD = 'nimda' + +TEST_LANGS = sorted(('ru', 'en')) +TEST_TIMEZONES = sorted(('UTC', 'Europe/Moscow', 'Europe/Paris', 'America/New_York')) + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +CUR_DIR = os.path.abspath(os.getcwd()) +TIMEOUT = 10 # time before timeout exception appears +POLL_TIMEOUT = 0.3 # time execute next poll cycle, for example when waiting element + +from django.conf import settings + +LOGS_F = os.path.join(getattr(settings, 'LOG_PATH', ''), getattr(settings, 'USER_LOG_FILENAME', '')) + + +@pytest.fixture(scope='session') +def test_server() -> LiveServer: + """ Custom fixture for creating live test server """ + addr = socket.gethostbyname(socket.gethostname()) + server = LiveServer(addr) + try: + yield server + finally: + server.stop() + + +def find_message_in_log_file(file, message): + """ Function for finding certain string in log file + :param file: Log file descriptor + :param message: message, that needs to be found + :return: + Assert True if message is found + Assert False if message not found + """ + with open(file, encoding='utf-8') as f: + if message in f.read(): + return True, message + else: + return False, message + + +@pytest.fixture +def get_url(test_server): + """ Get url from liveserver + + :param url: Url's name + :param kwargs: Dictionary with arguments for reverse function + :return: string with url + """ + + def _get_url(url, kwargs=None): + return test_server.url + reverse(url, kwargs=kwargs) + + return _get_url + + +@pytest.fixture +def add_user_with_permissions(django_user_model): + """ Add user with selected permissions + + :param username: User name + :param password: User password + :param permissions: List with Perm's permissions, that will be add to created user + :param is_superuser: If True - user will be superuser + :return: created user with updated permissions cache + """ + + def _login_user(username, password, permissions=[], is_superuser=False): + user = django_user_model.objects.create_user(username=username, password=password) + if is_superuser: + user.is_superuser = True + user.save() + + UserInfo(user=user).save() + + for cur in permissions: + user.user_permissions.add(Perm.get_rights(cur)) + + return django_user_model.objects.get(pk=user.pk) + + return _login_user + + +def wait_db_element(query_method, message='', timeout=TIMEOUT, poll=POLL_TIMEOUT): + end_time = time.time() + timeout + screen, stacktrace = None, None + while True: + try: + e = query_method() + if e: + return e + except ObjectDoesNotExist as exc: + message = message or str(exc) + screen = getattr(exc, 'screen', None) + stacktrace = getattr(exc, 'stacktrace', None) + time.sleep(poll) + if time.time() > end_time: + break + # TODO: Maybe add here screen and stacktrace from above + raise RuntimeError(message) + + +class PermApiBaseTest: + """ Test class for checking API permissions + @param get_url: URL to check + @param test_server: testing server instance + @param api_list: Fixture with API for testing in following format: + [Permission_to_check, URL of api, arguments for API (optional)] + e.x. with arguments + [[Perm.can_export_events], 'api_change_event_export', dict(state='disable')] + e.x. without arguments + [[Perm.can_export_events], 'api_change_event_export'] + """ + + def test_api_without_perms(self, get_url, test_server, api_list, add_user_with_permissions, client): + username = 'user_with_no_perms' + password = 'nimda' + user = add_user_with_permissions(username=username, password=password) + client.force_login(user) + if len(api_list) == 2: + response = client.get(get_url(api_list[1])) + elif len(api_list) == 3: + response = client.get(get_url(api_list[1], api_list[2])) + else: + assert False, f'Incorrect format of api_list instance: {api_list}' + assert response.status_code == http.HTTPStatus.FORBIDDEN + + def test_api_with_perms(self, get_url, test_server, add_user_with_permissions, api_list, client): + # username 'admin_1' used instead of DEFAULT_USER because in some cases login_user failing due to the + # existence of user with that username + # Fixed by @lvlukianenko in recent commits + username = 'user_with_perms' + password = 'nimda' + user = add_user_with_permissions(username=username, password=password, permissions=api_list[0]) + client.force_login(user) + if len(api_list) == 2: + response = client.get(get_url(api_list[1])) + elif len(api_list) == 3: + response = client.get(get_url(api_list[1], api_list[2])) + else: + assert False, f'Incorrect format of api_list instance: {api_list}' + assert response.status_code == http.HTTPStatus.OK + +# TODO: Move to integration tests +# class TestDeleteElasticsearchIndexes: +# index_name = 'test-index' +# +# @pytest.fixture(autouse=True) +# def setup_tests(self): +# self.es = Elasticsearch([{'host': ELK_HOST, 'port': ELK_PORT}], http_auth=(ELK_LOGIN, ELK_PASS)) +# for i in range(3): +# self.es.indices.create(index=f'{self.index_name}-{i}', ignore=400) +# yield +# self.es.indices.delete(index=f'{self.index_name}-*', ignore=[400, 404]) +# +# def test_delete_all_index_by_template(self): +# index_template = f'{self.index_name}-*' +# index_count = len(self.es.indices.get_alias(index=index_template).keys()) +# assert index_count == 3 +# delete_elasticsearch_indexes_by_template(index_template) +# assert len(self.es.indices.get_alias(index=index_template)) == 0 +# +# @pytest.mark.parametrize('exclude_indexes,expected', TEST_EXCLUDE_INDEXES) +# def test_delete_index_by_template_witch_exclude_index(self, exclude_indexes: set, expected: int): +# index_template = f'{self.index_name}-*' +# delete_elasticsearch_indexes_by_template(index_template, es=self.es, exclude_indexes=exclude_indexes) +# assert len(self.es.indices.get_alias(index=index_template, ignore=[400, 404])) == expected diff --git a/console/tests/test_views.py b/console/tests/test_views.py new file mode 100644 index 0000000..052a9dd --- /dev/null +++ b/console/tests/test_views.py @@ -0,0 +1,25 @@ +import pytest +from django.test import TestCase, Client +from django.urls import reverse + + +class TestViews(TestCase): + @pytest.mark.unit + def test_en_translate_page(self): + """ Test for checking if english version of test page opens """ + client = Client() + url = reverse('login') + response = client.get(url[url.find('/', 1):], HTTP_ACCEPT_LANGUAGE='en', follow=True) + self.assertEqual(response.status_code, 200) + + @pytest.mark.unit + def test_ru_translate_page(self): + """ Test for checking if russian version of test page opens """ + client = Client() + url = reverse('login') + response = client.get(url[url.find('/', 1):], HTTP_ACCEPT_LANGUAGE='ru', follow=True) + self.assertEqual(response.status_code, 200) + + + + diff --git a/console/urls.py b/console/urls.py new file mode 100644 index 0000000..6d90a9f --- /dev/null +++ b/console/urls.py @@ -0,0 +1,27 @@ +import logging + +from django.conf import settings +from django.conf.urls.i18n import i18n_patterns +from django.contrib import admin +from django.urls import path, include, re_path + +from .views.index import IndexView, LicenseActivationView, LoginTemplateView + +logger_error = logging.getLogger('console.user.error') + + +urlpatterns = [ + re_path(r'^login/$', LoginTemplateView.as_view(), name='login'), + re_path(r'^license/$', LicenseActivationView.as_view(), name="license_activation"), +] + i18n_patterns(re_path(r'^api/', include('console.api_urls')),) + + +if getattr(settings, 'ADMIN_PANEL_ENABLED', False): + urlpatterns += i18n_patterns( + path('admin/', admin.site.urls), + ) + +# mast be in end file because it's all patter after / +urlpatterns += [ + re_path(r'^', IndexView.as_view(), name='index'), +] diff --git a/console/utils.py b/console/utils.py new file mode 100644 index 0000000..2a4179f --- /dev/null +++ b/console/utils.py @@ -0,0 +1,10 @@ +from django.contrib.auth.models import User +from django.db.models import Q, QuerySet + +from perms.models import Perm + + +def get_users_by_perm(perm: str) -> QuerySet: + """Return user list by permission or superuser.""" + _perms = Perm.get_rights(perm) + return User.objects.filter(Q(user_permissions=_perms) | Q(groups__permissions=_perms) | Q(is_superuser=True)).distinct() diff --git a/console/views/__init__.py b/console/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/console/views/index.py b/console/views/index.py new file mode 100644 index 0000000..b136a01 --- /dev/null +++ b/console/views/index.py @@ -0,0 +1,35 @@ +import json + +from django.contrib.auth.mixins import LoginRequiredMixin +from django.http import Http404 +from django.views.generic import TemplateView + + +class IndexView(LoginRequiredMixin, TemplateView): + """ View for displaying index page.""" + http_method_names = ['get'] + template_name = 'console/index.html' + + def get_context_data(self, **kwargs) -> dict: + context = super().get_context_data(**kwargs) + license_info = {} + user = self.request.user + if user.is_authenticated: + context['user_permissions'] = json.dumps({}) + context['license'] = json.dumps(license_info) + return context + + +class LoginTemplateView(TemplateView): + http_method_names = ['get'] + template_name = 'console/login.html' + + +class LicenseActivationView(TemplateView): + """View for activating license.""" + http_method_names = ['get'] + template_name = 'license_info/license_activation.html' + + +def page_not_found(request): + raise Http404 diff --git a/console/wsgi.py b/console/wsgi.py new file mode 100644 index 0000000..7ed4d6f --- /dev/null +++ b/console/wsgi.py @@ -0,0 +1,15 @@ +""" WSGI config for console project. + +It exposes the WSGI callable as a module-level variable named ``application``. + +For more information on this file, see +https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/ +""" + +import os + +from django.core.wsgi import get_wsgi_application + +os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'console.settings.dev') + +application = get_wsgi_application() diff --git a/core/__init__.py b/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/admin.py b/core/admin.py new file mode 100644 index 0000000..6012e08 --- /dev/null +++ b/core/admin.py @@ -0,0 +1,13 @@ +from django.contrib import admin + +from core import models +from core.mixins import JsonWidgetMixin + + +class UsernameLoginAuthCheckAdmin(JsonWidgetMixin, admin.ModelAdmin): + list_display = ('user', 'is_username_auth_blocked', 'username_unlock_time') + + +admin.site.register(models.UsernameLoginAuthCheck, UsernameLoginAuthCheckAdmin) +admin.site.register(models.ConsoleAuthSettings) +admin.site.register(models.TLSSettings) diff --git a/core/apps.py b/core/apps.py new file mode 100644 index 0000000..26f78a8 --- /dev/null +++ b/core/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class CoreConfig(AppConfig): + name = 'core' diff --git a/core/backends/ConsoleAuthBackend.py b/core/backends/ConsoleAuthBackend.py new file mode 100644 index 0000000..5ea5438 --- /dev/null +++ b/core/backends/ConsoleAuthBackend.py @@ -0,0 +1,64 @@ +from datetime import date + +from django.contrib.auth.backends import BaseBackend +from django.contrib.auth.models import User +from django.core.exceptions import ObjectDoesNotExist +from django.utils.translation import gettext_lazy + +from core.extensions import ValidationError +from core.models import UsernameLoginAuthCheck +from core.services.authentication import create_authentication_log_message, handle_login_attempt +from users.models import UserInfo + + +class ConsoleAuthSystem(BaseBackend): + def authenticate(self, request, username=None, password=None): + if request.META.get("HTTP_X_FORWARDED_FOR"): + ip_address = request.META.get("HTTP_X_FORWARDED_FOR") + elif request.META.get("REMOTE_ADDR"): + ip_address = request.META.get("REMOTE_ADDR") + else: + ip_address = None + try: + logging_user = User.objects.get(username=username) + if self.check_user_expire_date(logging_user): + logging_user.is_active = False + logging_user.save() + raise ValidationError(gettext_lazy("The credentials have expired")) + except User.DoesNotExist: + create_authentication_log_message('attempt', f'[{username}] does not exist', ip_address) + return None + attempt_username_data = UsernameLoginAuthCheck.objects.get_or_create(user=logging_user)[0] + if logging_user.check_password(password): + login_allowed, message = handle_login_attempt(attempt_username_data, True, ip_address) + if login_allowed: + return logging_user + else: + raise ValidationError(message) + else: + _, message = handle_login_attempt(attempt_username_data, False, ip_address) + raise ValidationError(message) + + def get_user(self, user_id): + try: + return User.objects.get(pk=user_id) + except User.DoesNotExist: + return None + + def check_user_expire_date(self, user): + """We check whether the user's expiration date has not expired. + If expired, we return False , if not True. Superuser passed + """ + if user.is_superuser: + return False + try: + user_info = UserInfo.objects.get(user=user) + if user_info.expire_date < date.today(): + return True + else: + return False + except ObjectDoesNotExist: + UserInfo.objects.create(user=user, expire_date=date.today()) + return False + except TypeError: + return False diff --git a/core/backends/__init__.py b/core/backends/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/backends/filters.py b/core/backends/filters.py new file mode 100644 index 0000000..d668ec1 --- /dev/null +++ b/core/backends/filters.py @@ -0,0 +1,38 @@ +import logging + +from django.db.models import Q +from rest_framework import filters + +from django.db.models.fields import CharField, TextField, EmailField, SlugField, URLField, UUIDField + +_log = logging.getLogger() + + +class SearchAllFieldsBackend(filters.BaseFilterBackend): + """DRF search filter per all fields""" + def get_search_terms(self, request): + """ + Search terms are set by a ?search=... query parameter. + """ + params = request.query_params.get('search', '') + params = params.replace('\x00', '') # strip null characters + return params + + def filter_queryset(self, request, queryset, view): + def is_type(field): + checked_types = [CharField, TextField, EmailField, SlugField, URLField, UUIDField] + for field_type in checked_types: + if isinstance(field, field_type): + return True + return False + + search = self.get_search_terms(request) + if not search: + return queryset + + fields = [field.name for field in queryset.model._meta.fields if is_type(field) and field.choices is None] + + queries = Q() + for field in fields: + queries |= Q(**{f'{field}__icontains': search}) + return queryset.filter(queries) diff --git a/core/constants.py b/core/constants.py new file mode 100644 index 0000000..123eed8 --- /dev/null +++ b/core/constants.py @@ -0,0 +1,2 @@ +DEFAULT_CERT_FILENAME = "certificate.crt" +DEFAULT_KEY_FILENAME = "certificate.key" \ No newline at end of file diff --git a/core/decorators.py b/core/decorators.py new file mode 100644 index 0000000..93e3125 --- /dev/null +++ b/core/decorators.py @@ -0,0 +1,15 @@ +import logging +from functools import wraps + +from console import conslog + +_log = logging.getLogger(__name__) + + +def log_url(view): + @wraps(view) + def wrapper(request, *args, **kwargs): + conslog.add_info_log(conslog.url_access_log(request), _log) + return view(request, *args, **kwargs) + + return wrapper diff --git a/core/extensions.py b/core/extensions.py new file mode 100644 index 0000000..9dd8aa5 --- /dev/null +++ b/core/extensions.py @@ -0,0 +1,6 @@ +from rest_framework import status +from rest_framework.exceptions import APIException + + +class ValidationError(APIException): + status_code = status.HTTP_400_BAD_REQUEST \ No newline at end of file diff --git a/core/fields.py b/core/fields.py new file mode 100644 index 0000000..348940d --- /dev/null +++ b/core/fields.py @@ -0,0 +1,21 @@ +import itertools + +from django.core.validators import MinValueValidator, MaxValueValidator +from django.db import models + + +class IntegerField(models.IntegerField): + def __init__(self, *args, min_value=None, max_value=None, validators=tuple(), **kwargs): + self.min_value, self.max_value = min_value, max_value + if min_value is not None: + validators = itertools.chain((MinValueValidator(min_value),), validators) + if max_value is not None: + validators = itertools.chain((MaxValueValidator(max_value),), validators) + super().__init__(*args, validators=validators, **kwargs) + + def formfield(self, **kwargs): + if self.min_value is not None: + kwargs['min_value'] = self.min_value + if self.max_value is not None: + kwargs['max_value'] = self.max_value + return super().formfield(**kwargs) diff --git a/core/middleware.py b/core/middleware.py new file mode 100644 index 0000000..22c4888 --- /dev/null +++ b/core/middleware.py @@ -0,0 +1,59 @@ +import logging + +import pytz +from django.conf import settings +from django.utils import timezone + +from license_info.tools import get_license_info +from users.services.userinfo import UserStatusService +from license_info.exeptions import LicenseException + +_log = logging.getLogger(__name__) + + +class TimezoneMiddleware: + def __init__(self, get_response): + self.get_response = get_response + + def __call__(self, request): + if request.user.is_authenticated: + timezone_name = getattr(getattr(request.user, 'userinfo', None), 'timezone', None) + timezone_name = timezone_name or getattr(settings, 'DEFAULT_CURRENT_TIMEZONE', None) or 'UTC' + timezone.activate(pytz.timezone(timezone_name)) + service = UserStatusService(request.user) + service.set_status() + else: + timezone.deactivate() + + response = self.get_response(request) + timezone.deactivate() + return response + + +class LicenseMiddleware: + """Check if user activate license""" + + def __init__(self, get_response): + self.get_response = get_response + # One-time configuration and initialization. + + def __call__(self, request): + # Get license + info = None + try: + info = get_license_info() + except LicenseException: + _log.error("Can't get license information") + + # Append it to request + request.license = info + + # Return request + response = self.get_response(request) + + # # Allow only license urls + # # Don't use reverse here, because we need several URL's to allow + # if info is None and "license" not in request.path: + # return LicenseInfoAPIView.as_view(request) + + return response diff --git a/core/migrations/__init__.py b/core/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/mixins.py b/core/mixins.py new file mode 100644 index 0000000..1ff19ce --- /dev/null +++ b/core/mixins.py @@ -0,0 +1,124 @@ +import logging + +from django.db import models +from django.http import HttpResponseRedirect +from django_json_widget.widgets import JSONEditorWidget +from rest_framework import mixins, status +from rest_framework.decorators import action +from rest_framework.response import Response +from rest_framework.reverse import reverse + +from console import conslog +from perms.models import Perm +from storage.tasks import export_task + +_log = logging.getLogger(__name__) + + +class ExportMixin: + """ + Mixin for exporting data in different formats + We get the initial queryset and query-parameters and perform filtering inside the task using django_filters + """ + + def export(self, request, export_type, *args, **kwargs): + # Getting data for export + queryset = self.get_queryset() + query_parameters = request.query_params + model_name = queryset.model._meta.model_name + + result = export_task.apply_async(args=(export_type, request.user.pk, model_name, query_parameters)) + data_storage_id = result.get() + + download_url = reverse('store-download', kwargs={"pk": data_storage_id}) + return HttpResponseRedirect(download_url) + + +class ExportToCsvMixin(ExportMixin): + """ Mixin for exporting data as CSV format """ + + @action(detail=False, name='csv-export', methods=['GET']) + def csv_export(self, request, *args, **kwargs): + return self.export(request, export_type='csv') + + +class ExportToJSONMixin(ExportMixin): + """ Mixin for exporting data as JSON format """ + + @action(detail=False, name='json-export', methods=['GET']) + def json_export(self, request, *args, **kwargs): + return self.export(request, export_type='json') + + +class DestroyModelResponseStatus200Mixin(mixins.DestroyModelMixin): + """ This mixin allow ViewSet to log destroy object process """ + + def destroy(self, request, *args, **kwargs): + user, url = request.user, request.get_full_path() + message = f"User [{user}] accessed <{url}> page" + _log.info(message) + instance = self.get_object() + self.perform_destroy(instance) + message = f'User [{user}] perform destroy of object [{instance}] of type [{self.Meta.model.__name__}]' + _log.info(message) + return Response(status=status.HTTP_200_OK) + + +class ApiPermissionCheckMixin: + """ API for checking the permissions if currently authenticated user has access to API + Usage: + + from perms.models import Perm + + class TestViewSet(ApiPermissionCheckMixin, ...): + console_permissions = [Perm.can_view_network, ...] + ...code... + + User can set permissions for any action, or set default permissions. + + class TestViewSet(ApiPermissionCheckMixin, ...): + console_permissions = { + "create": [Perm.can_create_asset], + "delete": [Perm.can_delete_asset], + "default": [Perm.can_view_asset] + } + If for current action, no permissions is set, class will check 'default' list. + """ + console_permissions = [] + + def check_permissions(self, request): + """ + Actual permission check + """ + if isinstance(self.console_permissions, (list, tuple)): + perms = [Perm.perm_req(item) for item in self.console_permissions] + + else: + if self.action in self.console_permissions: + perms = [Perm.perm_req(item) for item in self.console_permissions[self.action]] + elif 'default' in self.console_permissions: + perms = [Perm.perm_req(item) for item in self.console_permissions['default']] + else: + perms = [] + + if not request.user.has_perms(perms): + self.permission_denied(request) + + return super().check_permissions(request) + + +class JsonWidgetMixin: + """ + Mixin to override default json field with new widget + """ + formfield_overrides = { + models.JSONField: {'widget': JSONEditorWidget}, + } + + +class LogURLMixin: + """Log every user action""" + + def dispatch(self, request, *args, **kwargs): + conslog.add_info_log(conslog.url_access_log(request), _log) + return super().dispatch(request, *args, **kwargs) diff --git a/core/models.py b/core/models.py new file mode 100644 index 0000000..fad1551 --- /dev/null +++ b/core/models.py @@ -0,0 +1,52 @@ +import logging +from datetime import timedelta + +from django.contrib.auth.models import User +from django.core.validators import MaxValueValidator, MinValueValidator, FileExtensionValidator +from django.db import models +from django.utils.translation import gettext_lazy +from solo.models import SingletonModel + +_log = logging.getLogger(__name__) + + +class TLSSettings(SingletonModel): + singleton_instance_id = 1 + enabled = models.BooleanField(verbose_name=gettext_lazy("Enable TLS"), default=False) + certificate = models.FileField(verbose_name=gettext_lazy("Certificate"), + help_text=gettext_lazy("TLS certificate in PEM format"), + validators=[FileExtensionValidator(allowed_extensions=["crt", "pem"])], + null=True, blank=True) + key = models.FileField(verbose_name=gettext_lazy("Key"), help_text=gettext_lazy("TLS certificate's key"), + validators=[FileExtensionValidator(allowed_extensions=["key"])], + null=True, blank=True) + + +class UsernameLoginAuthCheck(models.Model): + """ Model for storing authentication data for certain usernames """ + user = models.OneToOneField(User, on_delete=models.CASCADE, primary_key=True) + is_username_auth_blocked = models.BooleanField(default=False, + verbose_name=gettext_lazy( + "Is authentication blocked for this username"), + help_text=gettext_lazy( + "Flag, that shows if authentication for that username is blocked")) + failed_login_attempts = models.IntegerField(default=0, + verbose_name=gettext_lazy("Failed login attempts"), + help_text=gettext_lazy("Failed login attempts for username")) + username_unlock_time = models.DateTimeField(verbose_name=gettext_lazy("Authentication block time"), + help_text=gettext_lazy( + "Time, when access to authentication for that username was blocked"), + blank=True, + null=True) + + +class ConsoleAuthSettings(SingletonModel): + """ Singleton model for storing console authentication backend settings """ + login_attempts_limit = models.IntegerField(default=3, + validators=[MinValueValidator(0), MaxValueValidator(100)], + verbose_name=gettext_lazy('Login attempts limit'), + help_text=gettext_lazy( + 'Attempts, after which access to authorization will be blocked. Range from 1 to 100. 0 is off')) + login_block_timeout = models.DurationField(default=timedelta(minutes=30), + verbose_name=gettext_lazy('Login authentication timeout'), + help_text=gettext_lazy('Timeout, during which user cannot authenticate')) diff --git a/core/serializers.py b/core/serializers.py new file mode 100644 index 0000000..a224ce6 --- /dev/null +++ b/core/serializers.py @@ -0,0 +1,124 @@ +import datetime +import logging + +from django.forms.utils import to_current_timezone +from django.utils.formats import get_format +from django.utils.translation import gettext_lazy +from rest_framework import serializers, ISO_8601 +from rest_framework.serializers import * + +from core.models import ConsoleAuthSettings, TLSSettings + +_log = logging.getLogger(__name__) + + +class AuthSettingsSerializer(serializers.ModelSerializer): + class Meta: + model = ConsoleAuthSettings + fields = ('login_attempts_limit', 'login_block_timeout') + + +class TLSSettingsSerializer(serializers.ModelSerializer): + class Meta: + model = TLSSettings + fields = ["enabled", "certificate", "key"] + extra_kwargs = { + 'enabled': { + 'required': True + } + } + + def validate(self, attrs): + file_count = 0 + if attrs.get("certificate", None): + file_count += 1 + if attrs.get("key", None): + file_count += 1 + _log.info(f"File count: {file_count}") + + attrs['file_count'] = file_count + + if attrs['enabled'] and file_count == 0 and (not self.instance.certificate.name or + not self.instance.key.name): + _log.error("No cert or key provided from model") + raise ValidationError(gettext_lazy("No certificate or key provided")) + + if file_count == 1: + _log.error("No cert or key provided from request") + raise ValidationError(gettext_lazy("No certificate or key provided")) + + return attrs + + def to_representation(self, tls_settings: TLSSettings) -> OrderedDict: + data = super(TLSSettingsSerializer, self).to_representation(tls_settings) + if tls_settings.certificate: + data['certificate'] = tls_settings.certificate.url + if tls_settings.key: + data['key'] = tls_settings.key.url + return data + + +class DateTimeLocalizedField(serializers.DateTimeField): + def __init__(self, format=empty, input_formats=None, default_timezone=None, *args, unix=False, **kwargs): + if unix: + format, input_formats = '%s', ['%s'] + else: + if format is empty: + format = get_format('DATETIME_INPUT_FORMATS')[0] + if input_formats is None: + input_formats = get_format('DATETIME_INPUT_FORMATS') + input_formats.append(ISO_8601) # any locale support iso-8601 + super().__init__(format, input_formats, default_timezone, *args, **kwargs) + + +class DateLocalizedField(serializers.DateField): + def __init__(self, format=empty, input_formats=None, *args, unix=False, **kwargs): + if unix: + format, input_formats = '%s', ['%s'] + else: + if format is empty: + format = get_format('DATE_INPUT_FORMATS')[0] + if input_formats is None: + input_formats = get_format('DATE_INPUT_FORMATS') + input_formats.append(ISO_8601) # any locale support iso-8601 + super().__init__(format, input_formats, *args, **kwargs) + + +class TimeLocalizedField(serializers.TimeField): + def __init__(self, format=empty, input_formats=None, *args, **kwargs): + if format is empty: + format = '%H:%M' + if input_formats is None: + input_formats = get_format('TIME_INPUT_FORMATS') + input_formats.append(ISO_8601) # any locale support iso-8601 + super().__init__(format, input_formats, *args, **kwargs) + + +class TimeLocalizedTzAwareField(TimeLocalizedField): + @staticmethod + def _replace_time(datetime_obj, time_obj): + return datetime_obj.replace(hour=time_obj.hour, minute=time_obj.minute, second=time_obj.second, + microsecond=time_obj.microsecond) + + def to_internal_value(self, value): + t_tz_naive = super().to_internal_value(value) + if isinstance(t_tz_naive, datetime.time): + dt_tz_aware = TimeLocalizedTzAwareField._replace_time(timezone.localtime(), t_tz_naive) + t_utc_naive = dt_tz_aware.astimezone().time() + value = t_utc_naive + return value + + def to_representation(self, value): + t_utc_naive = value + if isinstance(t_utc_naive, datetime.time): + dt_utc_aware = TimeLocalizedTzAwareField._replace_time(timezone.now(), t_utc_naive) + t_tz_naive = to_current_timezone(dt_utc_aware).time() + value = t_tz_naive + return super().to_representation(value) + + +class ModelLocalizedSerializer(serializers.ModelSerializer): + serializer_field_mapping = serializers.ModelSerializer.serializer_field_mapping.copy() + serializer_field_mapping[models.DateField] = DateLocalizedField + serializer_field_mapping[models.DateTimeField] = DateTimeLocalizedField + serializer_field_mapping[models.TimeField] = TimeLocalizedTzAwareField diff --git a/core/services/__init__.py b/core/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/services/amc_services.py b/core/services/amc_services.py new file mode 100644 index 0000000..efbbaa0 --- /dev/null +++ b/core/services/amc_services.py @@ -0,0 +1,58 @@ +import logging +import subprocess + +from rest_framework.exceptions import APIException + +_log = logging.getLogger(__name__) + + +class AMCServiceException(APIException): + pass + + +class AMCService: + MAPPING_SERVICE = { + 'nginx': 'nginx.service', + 'elasticsearch': 'elasticsearch.service', + 'gunicorn': 'amcgunicorn.service', + 'celery': 'amccelery.service', + 'celerybeat': 'amccelerybeat.service', + 'correlator': 'amccorrelator.service', + 'vector': 'amcvector.service', + 'postgresql': 'postgresql.service' + } + + def __init__(self, service_name): + self.service = service_name + + try: + self.system_service_name = self.MAPPING_SERVICE[service_name] + except KeyError: + raise AMCServiceException({'status': 'error', 'detail': f'Unable to work with "{service_name}" service'}) + + def get_status(self): + """ + Check whether any of the specified units are active (i.e. running). + """ + command = ['sudo', 'systemctl', 'is-active', f'{self.system_service_name}'] + action = 'get status' + return self._run_cmd(command, action) + + def reboot(self): + """ + Reload service if they support it. If not, stop and then start them instead + If the services are not running yet, they will be started + """ + command = ['sudo', 'systemctl', 'reload-or-restart', f'{self.system_service_name}'] + action = 'reboot' + return self._run_cmd(command, action) + + def _run_cmd(self, cmd, action='perform operation'): + subproc = subprocess.run(cmd, capture_output=True) + if subproc.returncode != 0: # usually 0 is the correct exit code + error = subproc.stderr or subproc.stdout + _log.error(f"Can't {action} '{self.service}': {error.decode('utf-8')}") + raise AMCServiceException({'status': 'error', + 'detail': f"Can't {action} '{self.service}': {error.decode('utf-8').strip()}"}) + output = subproc.stdout.decode().strip() + return output diff --git a/core/services/authentication.py b/core/services/authentication.py new file mode 100644 index 0000000..8c54433 --- /dev/null +++ b/core/services/authentication.py @@ -0,0 +1,107 @@ +import logging + +from django.utils.translation import gettext_lazy + +from core.models import ConsoleAuthSettings +from core.utils import dtnow + +_log = logging.getLogger(__name__) + + +def create_authentication_log_message(action, message, ip_addr=None): + log_string = f'Authentication {action}' + if ip_addr: + log_string += f' from ip: {ip_addr}' + log_string += f': {message}' + return log_string + + +def handle_login_attempt(attempt_username_data, login_successful, ip_address): + if attempt_username_data.is_username_auth_blocked: + _log.info(create_authentication_log_message( + 'attempt', + f'[{attempt_username_data.user.username}] account is blocked until ' + f'{attempt_username_data.username_unlock_time}', + ip_address)) + attempt_username_data.save() + return False, gettext_lazy('Authentication for this user is blocked. Please contact administrator') + if login_successful: + if attempt_username_data.failed_login_attempts == 0: + _log.info(create_authentication_log_message( + 'attempt', f'[{attempt_username_data.user.username}] has been successfully authenticated', + ip_address)) + attempt_username_data.save() + return True, None + else: + _log.info(create_authentication_log_message( + 'attempt', + f'[{attempt_username_data.user.username}] has been successfully authenticated after ' + f'{attempt_username_data.failed_login_attempts} ' + f'tries. Failed login attempts counter has been reset', + ip_address)) + attempt_username_data.failed_login_attempts = 0 + attempt_username_data.save() + return True, None + else: + # Get authentication settings + auth_settings = ConsoleAuthSettings.get_solo() + _log.info(create_authentication_log_message( + 'attempt', + f'[{attempt_username_data.user.username}] failed to authenticate. Amount of failed attempts: ' + f'{attempt_username_data.failed_login_attempts + 1}/{auth_settings.login_attempts_limit}', + ip_address)) + amount_of_failed_attempts = attempt_username_data.failed_login_attempts + 1 + attempt_username_data.failed_login_attempts += 1 + + if (auth_settings.login_attempts_limit > 0 and + amount_of_failed_attempts == auth_settings.login_attempts_limit and + attempt_username_data.is_username_auth_blocked == False): + + attempt_username_data.is_username_auth_blocked = True + user_unlock_time = dtnow() + auth_settings.login_block_timeout + _log.info(create_authentication_log_message( + 'settings', + f'[{attempt_username_data.user.username}] Blocked authentication for ' + f'{attempt_username_data.user.username} until {user_unlock_time} ' + f'due to the exceeding the limit of login attempts', + ip_address)) + attempt_username_data.username_unlock_time = user_unlock_time + attempt_username_data.save() + return False, gettext_lazy('Authentication for this user is blocked for {time}').format( + time=auth_settings.login_block_timeout) + elif attempt_username_data.is_username_auth_blocked: + _log.info(create_authentication_log_message( + 'attempt', + f'[{attempt_username_data.user.username}] failed to authenticate due to timeout. ' + f'Amount of failed attempts: ' + f'{attempt_username_data.failed_login_attempts + 1}/{auth_settings.login_attempts_limit}', + ip_address)) + return False, gettext_lazy( + "Authentication for this user is blocked, please contact system administrator"), + attempt_username_data.save() + return False, gettext_lazy('Please enter a correct username and password. ' + 'Note that both fields may be case-sensitive.') + + +def get_timeout_formatted(console_auth_settings): + """ Method for formatting DurationField into dict with corresponding time values + :return: dictionary with the following content: + { + 'days': , + 'hours': , + 'minutes': , + 'seconds': + } + """ + formatted_timeout = {'days': console_auth_settings.login_block_timeout.days} + formatted_timeout['hours'], rem = divmod(console_auth_settings.login_block_timeout.seconds, 3600) + formatted_timeout["minutes"], formatted_timeout["seconds"] = divmod(rem, 60) + return formatted_timeout + + +def set_account_block(auth_data): # todo not used. remove? + """ Method for forbieden access to authentication for user account """ + auth_data.is_username_auth_blocked = True + _log.info(create_authentication_log_message( + 'settings', f'user [{auth_data.user.username}] has been blocked by Administrator')) + auth_data.save() diff --git a/core/services/pagination.py b/core/services/pagination.py new file mode 100644 index 0000000..91fd56d --- /dev/null +++ b/core/services/pagination.py @@ -0,0 +1,8 @@ +from rest_framework.pagination import PageNumberPagination + + +class BasicPagination(PageNumberPagination): + """Basic pagination class """ + page_size = 10 + page_size_query_param = 'page_size' + max_page_size = 1000 diff --git a/core/services/tls_settings.py b/core/services/tls_settings.py new file mode 100644 index 0000000..7616383 --- /dev/null +++ b/core/services/tls_settings.py @@ -0,0 +1,125 @@ +import logging +import os +import shutil +from shutil import copyfile +from subprocess import Popen, PIPE + +from django.conf import settings + +from core.models import TLSSettings +from core import constants + + +_log = logging.getLogger(__name__) + + +def remove_ssl_certificate() -> None: + """Remove ssl certificate from model and folder""" + instance = TLSSettings.get_solo() + instance.certificate = None + instance.key = None + instance.enabled = False + instance.save() + instance.clear_cache() + + cert_path = os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_CERT_FILENAME) + key_path = os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_KEY_FILENAME) + + if os.path.exists(cert_path) or os.path.exists(key_path): + os.remove(cert_path) + os.remove(key_path) + + +def update_nginx(https_enabled: bool): + """Update NGINX config + @param https_enabled: if True - enable HTTPS """ + + instance = TLSSettings.get_solo() + + if instance.certificate and instance.key: + _log.info("Update cert and key from MEDIA_ROOT") + cert_path = os.path.join(settings.MEDIA_ROOT, instance.certificate.name) + key_path = os.path.join(settings.MEDIA_ROOT, instance.key.name) + + copyfile(cert_path, settings.TLS_CERT_FILENAME) + copyfile(key_path, settings.TLS_CERT_KEY_FILENAME) + + if https_enabled and (not instance.certificate or not instance.key): + _log.error("No cert or key provided to update_nginx with enabled HTTPS") + return + + # Select new one + if https_enabled: + src_file = os.path.join(settings.NGINX_SITES_AVAILABLE, settings.NGINX_HTTPS_CONFIG_FILENAME) + dst_file = os.path.join(settings.NGINX_SITES_AVAILABLE, settings.NGINX_ENABLED_CONFIG_FILENAME) + else: + src_file = os.path.join(settings.NGINX_SITES_AVAILABLE, settings.NGINX_HTTP_CONFIG_FILENAME) + dst_file = os.path.join(settings.NGINX_SITES_AVAILABLE, settings.NGINX_ENABLED_CONFIG_FILENAME) + + shutil.copyfile(src_file, dst_file) + + +def restart_nginx(): + """Restart NGINX""" + # TODO: check config: nginx -T + proc = Popen(['sudo', 'systemctl', 'reload', 'nginx.service'], stdin=PIPE, stdout=PIPE, stderr=PIPE) + output, error = proc.communicate() + + if proc.returncode != 0: + _log.error(f"Can't reload nginx: {error.decode('utf-8')}") + raise RuntimeError(f"Can't reload nginx: {error.decode('utf-8')}") + + +def generate_cert(cert_filename, key_filename): + """ + Generate new TLS certificate + @raises RuntimeError: if some error occurs + @param cert_filename: Where to write certificate + @param key_filename: Where to write key + """ + _log.info(f"Generating new cert {cert_filename} and key {key_filename}") + + subj = f"/C={settings.TLS_CERT_COUNTRY}/ST={settings.TLS_CERT_STATE}/L={settings.TLS_CERT_LOCALITY}/O={settings.TLS_CERT_ORIG_NAME}/CN={settings.TLS_CERT_COMMON_NAME}" + args = ['openssl', 'req', '-x509', '-nodes', '-days', str(settings.TLS_CERT_DAYS), '-newkey', + f'rsa:{settings.TLS_CERT_KEY_SIZE}', '-keyout', key_filename, '-out', cert_filename, '-subj', + subj] + _log.info(" ".join(args)) + proc = Popen(args, stdin=PIPE, stdout=PIPE, stderr=PIPE) + + output, error = proc.communicate() + output = output.decode("utf-8") + error = error.decode("utf-8") + if proc.returncode != 0: + _log.error(f"Can't generate TLS cert: {output}{error}") + raise RuntimeError(f"Can't create certificate: {output}{error}") + + if 'error' in output: + _log.error(f"Got some error {output}") + raise RuntimeError("Can't create certificate:\n" + output) + + if 'error' in error: + _log.error(f"Got some error {error}") + raise RuntimeError("Can't create certificate:\n" + error) + + +def handle_uploaded_file(content, filename): + if not os.path.exists(settings.MEDIA_ROOT): + os.makedirs(settings.MEDIA_ROOT) + + with open(filename, 'wb+') as destination: + for chunk in content.chunks(): + destination.write(chunk) + + +def update_model(tls_settings): + tls_settings.certificate.name = constants.DEFAULT_CERT_FILENAME + tls_settings.key.name = constants.DEFAULT_KEY_FILENAME + os.chmod(os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_CERT_FILENAME), 0o644) + os.chmod(os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_KEY_FILENAME), 0o644) + + +def create_cert(tls_settings): + generate_cert(os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_CERT_FILENAME), + os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_KEY_FILENAME)) + + update_model(tls_settings) diff --git a/core/tasks.py b/core/tasks.py new file mode 100644 index 0000000..b605613 --- /dev/null +++ b/core/tasks.py @@ -0,0 +1,46 @@ +import logging +import shutil + +from celery import shared_task + +from core.models import UsernameLoginAuthCheck +from core.services.authentication import create_authentication_log_message +from core.utils import dtnow +from notifications.enums import NotificationImportance, NotificationGroup +from notifications.models import Notification +from notifications.services.notification_sender import NotificationService + +_log = logging.getLogger(__name__) + +PERCENT_10 = 10 + + +def check_for_users_end_of_timeouts(): + """ Function for getting users with blocked access """ + for blocked_user in UsernameLoginAuthCheck.objects.filter(is_username_auth_blocked=True): + if dtnow() > blocked_user.username_unlock_time: + """ Resetting access to authentication for user account """ + blocked_user.failed_login_attempts = 0 + blocked_user.is_username_auth_blocked = False + _log.info(create_authentication_log_message( + 'settings', f'user [{blocked_user.user.username}] has been unlocked due to the end of block timeout')) + blocked_user.save() + + +@shared_task +def check_blocked_users(): + check_for_users_end_of_timeouts() + + +def get_disk_usage(): + total, used, free = shutil.disk_usage("/") + if free / total * 100 < PERCENT_10: # send if less than 10% left + notification = Notification.objects.create(text='Free disk space is running out', + importance=NotificationImportance.HIGH) + NotificationService().send(notification=notification, group=NotificationGroup.NOTIFICATION) + _log.info(f'Send Notification "Free disk space is running out": total: {total}, used: {used}, free: {free}') + + +@shared_task() +def get_disk_usage_task(): + get_disk_usage() diff --git a/core/tests/__init__.py b/core/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/tests/filters_backend.py b/core/tests/filters_backend.py new file mode 100644 index 0000000..718aa8a --- /dev/null +++ b/core/tests/filters_backend.py @@ -0,0 +1,44 @@ +from unittest.mock import Mock + +import pytest +from rest_framework.test import APIRequestFactory + +from core.backends.filters import SearchAllFieldsBackend +from devices.models.device import Device + +request = Mock() + + +@pytest.mark.django_db +@pytest.mark.unit +class TestSearchAllFieldsBackend: + """Test DRF Filter backend.""" + count = 10 + + @pytest.fixture(autouse=True) + def setup_test(self): + for i in range(self.count): + if i % 2: + description = f'test description_{i}' + else: + description = f'test text' + Device(name=f'title_{i}', description=description, ip=f'2.2.2.{i}', + port=f'9{i}99', type='firewall').save() + + @pytest.mark.parametrize('search_string, count', ( + ('test', 10), + ('1', 1), + ('description', 5), + ('text', 5), + ('mama', 0), + ('', 10) + )) + def test_query_count(self, search_string, count): + """Test count after filter.""" + filter = SearchAllFieldsBackend() + request.query_params = {'search': search_string} + + before_queryset = Device.objects.all() + assert len(before_queryset) == self.count + queryset_after = filter.filter_queryset(request=request, queryset=before_queryset, view=None) + assert len(queryset_after) == count diff --git a/core/tests/test_amc_services.py b/core/tests/test_amc_services.py new file mode 100644 index 0000000..acc2a44 --- /dev/null +++ b/core/tests/test_amc_services.py @@ -0,0 +1,80 @@ +from unittest.mock import patch + +import pytest +from rest_framework.reverse import reverse + +from core.services.amc_services import AMCServiceException, AMCService + + +def mock_subprocess(code=0, stdout=b'', stderr=b''): + class MockSubprocessRun: + def __init__(self, *args, **kwargs): + self.returncode = code + self.stdout = stdout + self.stderr = stderr + + return MockSubprocessRun + + +class TestManageService: + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model): + self.user = django_user_model.objects.get() + + def test_receive_invalid_service(self): + service_name = 'test' + with pytest.raises(AMCServiceException) as exc: + AMCService(service_name) + assert exc.value.detail['status'] == 'error' + assert exc.value.detail['detail'] == f'Unable to work with "{service_name}" service' + + @patch('core.services.amc_services.subprocess.run', mock_subprocess(stdout=b'active')) + def test_get_status_service(self): + service = AMCService('nginx') + assert service.get_status() == 'active' + + @patch('core.services.amc_services.subprocess.run', mock_subprocess(code=5, stderr=b'error')) + def test_unable_to_get_status_service(self): + service_name = 'nginx' + service = AMCService(service_name) + with pytest.raises(AMCServiceException) as exc: + service.get_status() + assert exc.value.detail['status'] == 'error' + assert exc.value.detail['detail'] == f"Can't get status '{service_name}': error" + + @patch('core.services.amc_services.subprocess.run', mock_subprocess()) + def test_reboot_service(self): + service_name = 'nginx' + service = AMCService(service_name) + assert service.reboot() == '' # it is correct stdout response + + @patch('core.services.amc_services.subprocess.run', mock_subprocess(stdout=b'active')) + def test_api_get_status_service(self, api_client): + api_client.force_authenticate(self.user) + response = api_client.get(reverse('amc-status', kwargs={'service_name': 'nginx'})) + assert response.json() == {'status': 'active'} + + @patch('core.services.amc_services.subprocess.run', mock_subprocess(code=1)) + def test_api_get_status_service_with_error(self, api_client): + api_client.force_authenticate(self.user) + response = api_client.get(reverse('amc-status', params={'service_name': 'nginx'})) + assert response.json() == {'status': 'error', 'detail': f"Can't get status 'nginx': "} + + @patch('core.services.amc_services.subprocess.run', mock_subprocess()) + def test_api_reboot_service(self, api_client): + api_client.force_authenticate(self.user) + response = api_client.get(reverse('amc-reboot', kwargs={'service_name': 'nginx'})) + assert response.json() == {'status': 'ok'} + + @patch('core.services.amc_services.subprocess.run', mock_subprocess(code=1)) + def test_api_reboot_service_with_error(self, api_client): + api_client.force_authenticate(self.user) + response = api_client.get(reverse('amc-reboot', kwargs={'service_name': 'nginx'})) + assert response.json() == {'status': 'error', 'detail': f"Can't reboot 'nginx': "} + + @patch('core.services.amc_services.subprocess.run', mock_subprocess(code=1)) + def test_api_get_status_with_invalid_service(self, api_client): + api_client.force_authenticate(self.user) + response = api_client.get(reverse('amc-status', kwargs={'service_name': 'invalid'})) + assert response.json() == {'status': 'error', 'detail': f'Unable to work with "invalid" service'} diff --git a/core/tests/test_authentication.py b/core/tests/test_authentication.py new file mode 100644 index 0000000..d77b153 --- /dev/null +++ b/core/tests/test_authentication.py @@ -0,0 +1,72 @@ +import pytest +from django.contrib.auth.models import User +from django.urls import reverse + +from core.models import ConsoleAuthSettings, UsernameLoginAuthCheck +from core.tasks import check_for_users_end_of_timeouts +from core.utils import dtnow + +TEST_USERNAME = 'foo' +TEST_PASSWORD = 'bar' +LOGIN_URL_NAME = 'api_login' + + +@pytest.mark.django_db +class TestAssetPagesAccess(object): + + @pytest.fixture(autouse=True) + def setup_tests(self, client, django_user_model, add_user_with_permissions): + add_user_with_permissions(username=TEST_USERNAME, password=TEST_PASSWORD, is_superuser=True) + + @pytest.mark.unit + def test_success_log(self, caplog, client): + client.post(reverse(LOGIN_URL_NAME), + data={'username': TEST_USERNAME, 'password': TEST_PASSWORD}) + assert f'[{TEST_USERNAME}] has been successfully authenticated' in caplog.text + + # TODO: Strange test, unlock user only after login try + @pytest.mark.unit + @pytest.mark.django_db + def test_unlock_user_task(self, client, caplog): + client.post(reverse(LOGIN_URL_NAME), + data={'username': TEST_USERNAME, 'password': TEST_PASSWORD}) + test_user = User.objects.get(username=TEST_USERNAME) + test_user_authentication_settings = UsernameLoginAuthCheck.objects.get(user=test_user) + test_user_authentication_settings.is_username_auth_blocked = True + test_user_authentication_settings.username_unlock_time = dtnow(days=-1) + test_user_authentication_settings.save() + response = client.post(reverse(LOGIN_URL_NAME), + data={'username': TEST_USERNAME, 'password': TEST_PASSWORD}) + + assert response.status_code == 400 + assert f'[{TEST_USERNAME}] account is blocked until' in caplog.text + check_for_users_end_of_timeouts() + assert not UsernameLoginAuthCheck.objects.get(user=test_user).is_username_auth_blocked + + @pytest.mark.unit + @pytest.mark.django_db + def test_block(self, caplog, client): + auth_settings = ConsoleAuthSettings.get_solo() + for i in range(0, auth_settings.login_attempts_limit): + client.post(reverse(LOGIN_URL_NAME), + data={'username': TEST_USERNAME, 'password': 'BAD_PASSWORD'}) + + test_user = User.objects.get(username=TEST_USERNAME) + test_user_authentication_settings = UsernameLoginAuthCheck.objects.get(user=test_user) + assert test_user_authentication_settings.is_username_auth_blocked + + @pytest.mark.unit + @pytest.mark.django_db + def test_limit_reset_after_correct_authentication_data(self, caplog, client): + auth_settings = ConsoleAuthSettings.get_solo() + for i in range(0, auth_settings.login_attempts_limit - 1): + client.post(reverse(LOGIN_URL_NAME), + data={'username': TEST_USERNAME, 'password': 'BAD_PASSWORD'}) + + response = client.post(reverse(LOGIN_URL_NAME), + data={'username': TEST_USERNAME, 'password': TEST_PASSWORD}) + test_user = User.objects.get(username=TEST_USERNAME) + test_user_authentication_settings = UsernameLoginAuthCheck.objects.get(user=test_user) + assert response.status_code == 200 + assert not test_user_authentication_settings.is_username_auth_blocked + assert test_user_authentication_settings.failed_login_attempts == 0 diff --git a/core/tests/test_data/test_certificate.crt b/core/tests/test_data/test_certificate.crt new file mode 100644 index 0000000..fcf2a17 --- /dev/null +++ b/core/tests/test_data/test_certificate.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDhTCCAm2gAwIBAgIUEa17zGmRu6NjYfpIoWft1Y1aE4AwDQYJKoZIhvcNAQEL +BQAwUjELMAkGA1UEBhMCUlUxDzANBgNVBAgMBk1vc2NvdzEPMA0GA1UEBwwGTW9z +Y293MQ0wCwYDVQQKDARBUk1BMRIwEAYDVQQDDAlpd2FybWEucnUwHhcNMjExMTIz +MTMyNjI1WhcNMjIxMTIzMTMyNjI1WjBSMQswCQYDVQQGEwJSVTEPMA0GA1UECAwG +TW9zY293MQ8wDQYDVQQHDAZNb3Njb3cxDTALBgNVBAoMBEFSTUExEjAQBgNVBAMM +CWl3YXJtYS5ydTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJvme/oG +F18guJMCrPw+Ij2qN4WdsxGkvvUywgZ5sCHYEXnW/CV41dCe2c5d9sl3sIztF1wD +HAfQmiAKmosOnWVAatxxTjPTajQiXYBY8hrRownD5XTxMhQ0yLVXcVB/ibeTc1rL +FfxdagpgoWvjUkJqIX2vy7Ec+6XGQ7t+Bn084tWdUxZuHXgRQkY+SsSWY/9r0/ph +3q/DwbC7veb5YDUPmcpgPcRyGan+24RYHg20VMS70gDWPEFjr4U3jNFFjsir9m++ +MokND5WegjxrivU9BGAON1gzEJ7qf10efJgJIpsEHpDL6oh1FSen5DxbzRbYEzVm +qqQFdYwRhxtnJ6UCAwEAAaNTMFEwHQYDVR0OBBYEFKvrJpCyxSWlnEaAQXNQ8pwe +IG6tMB8GA1UdIwQYMBaAFKvrJpCyxSWlnEaAQXNQ8pweIG6tMA8GA1UdEwEB/wQF +MAMBAf8wDQYJKoZIhvcNAQELBQADggEBAHpyDKF1AaE+RtnLJcUmZZ39qJO+Hbzb +jLClScBO40EEUhxyvE7TztvvIhEc5aN11QvKW7CbMlWkjoTgAb8Q7JY1wChhsZch +ApFEfnlMJbQujRZbijnhd4XCo4N8DFjm/hI4T08cVlSnRrfufCVA3n7Q66YhbpLu +w+nOgGu17Egem7Gqm1VzoJjGDNVfKXTUNt85nO5MiWUEv49sRpvFEUb26inj2ly2 +hK8kfjrZYZztGgMqOmm0upvjlYqEMgTWi6lPjfH+tpodE9fRTIalAqnsBFPqxYB8 +5P3/sCbpi8PjNZVKK1Zfk0o/P266xFUMHbLTcJ0bitSK2BnZd2FjD5g= +-----END CERTIFICATE----- diff --git a/core/tests/test_data/test_certificate.key b/core/tests/test_data/test_certificate.key new file mode 100644 index 0000000..17e215b --- /dev/null +++ b/core/tests/test_data/test_certificate.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCb5nv6BhdfILiT +Aqz8PiI9qjeFnbMRpL71MsIGebAh2BF51vwleNXQntnOXfbJd7CM7RdcAxwH0Jog +CpqLDp1lQGrccU4z02o0Il2AWPIa0aMJw+V08TIUNMi1V3FQf4m3k3NayxX8XWoK +YKFr41JCaiF9r8uxHPulxkO7fgZ9POLVnVMWbh14EUJGPkrElmP/a9P6Yd6vw8Gw +u73m+WA1D5nKYD3Echmp/tuEWB4NtFTEu9IA1jxBY6+FN4zRRY7Iq/ZvvjKJDQ+V +noI8a4r1PQRgDjdYMxCe6n9dHnyYCSKbBB6Qy+qIdRUnp+Q8W80W2BM1ZqqkBXWM +EYcbZyelAgMBAAECggEAZgNXtK8vs+uxW2Ew2I2/0iID2sbqWe03lzLzi1EtU7gT +F2r563s6Thzj4QnTZtmPCWhLFMZkHSj+8WB+5+dUTuDsC/uIdJkusZabIFUTd0Ee +MlGhX/hEELzJaQAjAc2yzBEMNqkVZeGqdcftWK+TMCpKZ/gUjXytnjgyMgmxLVkY +iZfAodlnswmheN5ItvEVhji+44rCjIC/3fz1IG9U+2KUxPx/LjMy16zazkZzThEV +UG3GqOZg2ncS2bNFNiG6HhbyghF0xo33Hme46rwVh49kErcyjHUvYG3gctK9QliU +N0B0YiaSRBHguGoIUJhPI2wbdo4ToqSEWxfUkxrJgQKBgQDK9vVW5PgsssUzpRGx +Gx/jKMUMCQJKHBm73WFdBpGKCeZZcpnfNDzANxhBTtumAy8d5HHVrI78X68Lt3CW +/8XWir6GiDLp+KEUflPBS+FuHGFtNRoEnxoDeqqTSS4XkjhMjVK976LtJOSsNcAN +WHrMGzy+3RUkosKy01NLJpnanQKBgQDEozigg7vAahLjqFC4AQm8CX+1mkFWf1kM +iv8k+zfhYA3o4QUrIxgLX4+lfBZIr+xrnUqu/r1jpXwcpcrJihvh+33TeKEXX+iR +c2vy2sfD2sM2ft3pfXmj7IKV2VKO4AFdlo9IBO4gi8ZAPhXG7HiuSyyKXahzlyT3 +ZlBZEgJOqQKBgQC8KhPtyTzjg6Ebbg9m2DTJzkRQEhITTtX0uxrGuY44IVFy37IT +okQoF2vfMBKmaBFIbz+xztaI1tRb0mcJNnrdmadk5eP90cjUTQGtFIIcKSeRrUc/ +vZjKXPDCt3eJ1r9nCSYKfJ3ZqPhvRy2TsdG2ZBH/CMvPOS2zyANSiqjcaQKBgHxp +bPWaA5udQXBK2S9icL9JH0VOYNKSZkwaUY4baGKvPH3AiV2eqaLghmlElnDM5f+8 +mDkaMcevN1SEzUYwnK2hSh4Xb4zzgJkudvlD1Sqk6eg74rnNSr4dcQ3QX3zIW/TT +wrnlbKio5vlUjsC6cyyLoZW15lOkKJ5jXKjOTSlpAoGBAJO5qSArSmPaPhiZjbRK +9dLnzTW36BnNm7SIx5/lXtJJjApXKI3xQxljXbsUnyHM6iInzEQdOFHrX1ThGD4M +oWW8DgmO40D7yT2VeATyEDn16QEplvw1qX8pX+zjuy9U990fG+numudtaI+y4VAp +9OAtk5UPuNihFY/b6FDcxaOZ +-----END PRIVATE KEY----- diff --git a/core/tests/test_tasks.py b/core/tests/test_tasks.py new file mode 100644 index 0000000..4750ac9 --- /dev/null +++ b/core/tests/test_tasks.py @@ -0,0 +1,28 @@ +from unittest.mock import patch + +import pytest + +from core.tasks import get_disk_usage + + +def mock_notification_service(): + class NotificationService: + def send(self, *args, **kwargs): + pass + return NotificationService + + +@pytest.mark.django_db +@pytest.mark.unit +class TestTask: + + @patch('core.tasks.shutil.disk_usage', lambda _: (100, 99, 1)) + @patch('core.tasks.NotificationService', mock_notification_service()) + def test_get_disk_usage_no_disk_space(self, caplog): + get_disk_usage() + assert 'Send Notification "Free disk space is running out"' in caplog.text + + @patch('core.tasks.shutil.disk_usage', lambda _: (100, 50, 50)) + def test_get_disk_usage(self, caplog): + get_disk_usage() + assert 'Send Notification "Free disk space is running out"' not in caplog.text diff --git a/core/tests/test_tls_settings.py b/core/tests/test_tls_settings.py new file mode 100644 index 0000000..8a449ad --- /dev/null +++ b/core/tests/test_tls_settings.py @@ -0,0 +1,189 @@ +import os.path +import tempfile +from subprocess import Popen, PIPE +from unittest.mock import patch + +import pytest +from django.conf import settings +from django.core.files.uploadedfile import SimpleUploadedFile +from django.urls import reverse +from rest_framework.test import APIClient + +from core.models import TLSSettings +from core.services.tls_settings import generate_cert + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +TEST_CERT = os.path.join(BASE_DIR, "tests", "test_data/test_certificate.crt") +TEST_KEY = os.path.join(BASE_DIR, "tests", "test_data/test_certificate.key") +TMP_DIR = tempfile.TemporaryDirectory() + +api_client = APIClient() + + +def mock_func_for_test(): + pass + + +class TestCertificateGeneration(object): + cert_file_name = "certificate.crt" + key_file_name = "certificate.key" + + @pytest.mark.unit + def test_can_create_cert(self): + with tempfile.TemporaryDirectory() as tmp_dir_name: + generate_cert(os.path.join(tmp_dir_name, self.cert_file_name), + os.path.join(tmp_dir_name, self.key_file_name)) + + assert os.path.exists(os.path.join(tmp_dir_name, self.cert_file_name)) + assert os.path.exists(os.path.join(tmp_dir_name, self.key_file_name)) + + @pytest.mark.unit + def test_got_error_when_bad_path(self): + with pytest.raises(RuntimeError): + generate_cert(os.path.join('arr', self.cert_file_name), + os.path.join('arr', self.key_file_name)) + + @pytest.mark.unit + def test_check_certificate(self): + with tempfile.TemporaryDirectory() as tmp_dir_name: + generate_cert(os.path.join(tmp_dir_name, self.cert_file_name), + os.path.join(tmp_dir_name, self.key_file_name)) + + proc = Popen( + ['openssl', 'x509', '-in', os.path.join(tmp_dir_name, self.cert_file_name), '-text', '-noout'], + stdin=PIPE, stdout=PIPE, stderr=PIPE) + output, error = proc.communicate() + + assert proc.returncode == 0 + output = output.decode('utf-8') + assert f"Issuer: C = {settings.TLS_CERT_COUNTRY}, ST = {settings.TLS_CERT_STATE}, L = {settings.TLS_CERT_LOCALITY}, O = {settings.TLS_CERT_ORIG_NAME}, CN = {settings.TLS_CERT_COMMON_NAME}" in output + assert f"Subject: C = {settings.TLS_CERT_COUNTRY}, ST = {settings.TLS_CERT_STATE}, L = {settings.TLS_CERT_LOCALITY}, O = {settings.TLS_CERT_ORIG_NAME}, CN = {settings.TLS_CERT_COMMON_NAME}" in output + + +class TestCertificateUpload: + api_url = reverse('tls-settings') + + @pytest.fixture(autouse=True) + def setup_test(self, add_user_with_permissions): + nginx_http_config_path = os.path.join(TMP_DIR.name, settings.NGINX_HTTP_CONFIG_FILENAME) + open(nginx_http_config_path, 'a').close() + self.user = add_user_with_permissions(username='admintest', password='adminpass', is_superuser=True) + + self.cert = SimpleUploadedFile(name='testcert.crt', content=open(TEST_CERT, 'rb').read(), + content_type='application/pkix-cert') + self.key = SimpleUploadedFile(name='testcert.key', content=open(TEST_KEY, 'rb').read(), + content_type='application/x-iwork-keynote-sffkey') + yield + os.remove(nginx_http_config_path) + + @pytest.mark.unit + @patch('django.conf.settings.NGINX_SITES_AVAILABLE', TMP_DIR.name) + @patch('core.views.view_settings.restart_nginx', mock_func_for_test) + def test_valid_upload_cert_and_key_in_enabled_cert(self, client) -> None: + api_client.force_authenticate(self.user) + + instance_before = TLSSettings.get_solo() + assert not instance_before.enabled + assert not instance_before.certificate.name + assert not instance_before.key.name + + form_data = {'enabled': False, 'certificate': self.cert, 'key': self.key} + response = api_client.patch(self.api_url, form_data) + assert response.status_code == 200 + + instance_after = TLSSettings.get_solo() + instance_after.clear_cache() + assert not instance_after.enabled + assert instance_after.certificate.name == 'certificate.crt' + assert instance_after.key.name == 'certificate.key' + + @pytest.mark.unit + def test_not_valid_upload_cert_without_key(self) -> None: + api_client.force_authenticate(self.user) + + instance_before = TLSSettings.get_solo() + assert not instance_before.enabled + assert not instance_before.certificate.name + assert not instance_before.key.name + + form_data = { + 'enabled': False, + 'certificate': self.cert + } + response = api_client.patch(self.api_url, form_data) + assert response.status_code == 400 + + instance_after = TLSSettings.get_solo() + instance_after.clear_cache() + assert not instance_after.enabled + assert not instance_after.certificate.name + assert not instance_after.key.name + + @pytest.mark.unit + def test_not_valid_upload_without_cert_and_key(self) -> None: + api_client.force_authenticate(self.user) + + instance_before = TLSSettings.get_solo() + assert not instance_before.enabled + assert not instance_before.certificate.name + assert not instance_before.key.name + + form_data = { + 'enabled': True + } + response = api_client.patch(self.api_url, form_data) + assert response.status_code == 400 + + instance_after = TLSSettings.get_solo() + instance_after.clear_cache() + assert not instance_after.enabled + assert not instance_after.certificate.name + assert not instance_after.key.name + + @pytest.mark.unit + @patch('django.conf.settings.NGINX_SITES_AVAILABLE', TMP_DIR.name) + @patch('core.views.view_settings.restart_nginx', mock_func_for_test) + def test_remove_ssl_certificate(self) -> None: + api_client.force_authenticate(self.user) + before_settings = TLSSettings.get_solo() + before_settings.certificate.name = 'certificate.cert' + before_settings.key.name = 'certificate.key' + before_settings.enabled = True + before_settings.save() + before_settings.clear_cache() + + assert before_settings.certificate.name == 'certificate.cert' + assert before_settings.key.name == 'certificate.key' + assert before_settings.enabled + + response = api_client.delete(self.api_url) + assert response.status_code == 200 + + settings_after_remove = TLSSettings.get_solo() + settings_after_remove.clear_cache() + + assert not settings_after_remove.enabled + assert not settings_after_remove.certificate.name + assert not settings_after_remove.key.name + + @pytest.mark.unit + @patch('django.conf.settings.NGINX_SITES_AVAILABLE', TMP_DIR.name) + @patch('core.views.view_settings.restart_nginx', mock_func_for_test) + def test_create_new_cert_via_api(self) -> None: + api_client.force_authenticate(self.user) + + instance_before = TLSSettings.get_solo() + instance_before.clear_cache() + + assert not instance_before.enabled + assert not instance_before.certificate.name + assert not instance_before.key.name + + response = api_client.post(self.api_url) + assert response.status_code == 200 + + instance_after = TLSSettings.get_solo() + instance_after.clear_cache() + assert instance_after.certificate.name == 'certificate.crt' + assert instance_after.key.name == 'certificate.key' + assert instance_after.enabled is False diff --git a/core/urls.py b/core/urls.py new file mode 100644 index 0000000..34afc50 --- /dev/null +++ b/core/urls.py @@ -0,0 +1,11 @@ +from django.urls import path + +from core.views.amc_services import AMCServiceStatus, AMCServiceReboot +from core.views.view_settings import AuthSettingsAPIView, TLSSettingsAPIView + +urlpatterns = [ + path('settings/auth/', AuthSettingsAPIView.as_view()), + path('settings/tls/', TLSSettingsAPIView.as_view(), name='tls-settings'), + path('services/status//', AMCServiceStatus.as_view(), name='amc-status'), + path('services/reboot//', AMCServiceReboot.as_view(), name='amc-reboot'), +] diff --git a/core/utils.py b/core/utils.py new file mode 100644 index 0000000..fc6a7da --- /dev/null +++ b/core/utils.py @@ -0,0 +1,76 @@ +import datetime +import logging +import os +import typing +from decimal import Decimal + +from django.http import HttpResponse +from django.utils import formats, timezone +from rest_framework.exceptions import APIException + +_log = logging.getLogger(__name__) + + +def catch_exception(func): + """ + Catches any exceptions raised in the function. + If the exception is a subclass of APIException, reraise it to be handled by DRF. + If not a subclass of APIException, then raise an APIException so that DRF handles it anyway and shows human error + """ + + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except APIException as e: + raise e + except Exception as e: + _log.error(f'Error: {str(e)}') + raise APIException(detail={ + 'status': 'error', + 'code': 'unknown', + 'detail': f'{str(e)}' + }) + + return wrapper + + +def dtnow(days=0, seconds=0, microseconds=0, milliseconds=0, minutes=0, hours=0, weeks=0, + tz: timezone = timezone.utc, local=False) -> datetime.datetime: + """ returns current datetime or datetime in future/past by time units + use positive/negative time units for future/past datetime + :param tz: timezone or None to use datetime without timezone, can not be None if local parameter is True + :param local: True to change returning value timezone to user local + :return: datetime.datetime object + """ + if not tz: # make sure None is used for disabling and only for nonlocal dates + tz = timezone.utc if local else None + ret = datetime.datetime.now(tz=tz) + datetime.timedelta(days=days, seconds=seconds, microseconds=microseconds, + milliseconds=milliseconds, minutes=minutes, hours=hours, + weeks=weeks) + return timezone.localtime(ret) if local else ret + + +def fmt_input( + val: typing.Union[str, bool, int, float, Decimal, datetime.datetime, datetime.date, datetime.time], + format: str = None) -> str: + """ Make localized string for specified input value using specified format + :param val: value to format, may be of different types + :param format: format for data or None to use default localized format + :return: localized data as str + """ + return formats.localize_input(val, format) + + +def store_file_response(file_name): + with open(file_name, 'rb') as fh: + return httpFileResponse(fh.read(), os.path.basename(file_name)) + + +def httpFileResponse(content, fname='file', zip=None): + response = HttpResponse(content, content_type="application/file") + fname = fname.replace('"', '').replace("'", '') + if zip: + response['Content-Disposition'] = f'attachment; filename="{fname}.zip"' + else: + response['Content-Disposition'] = f'attachment; filename="{fname}"' + return response diff --git a/core/validators.py b/core/validators.py new file mode 100644 index 0000000..f31c0c8 --- /dev/null +++ b/core/validators.py @@ -0,0 +1,52 @@ +import re +from pathlib import Path + +from django.core.exceptions import ValidationError +from django.core.validators import URLValidator +from django.utils.translation import gettext_lazy + + +def mac_address_validator(value): + """ Validator for checking if entered value is correct MAC address. + Valid MAC address formats for this validator are: + 1. String, separated with '-', e.g. '01-3A-4f-Ee-23-AF' + 2. String, separated with ':', e.g. '01:3E:4F:EE:23:af' + :param value: value from source for validation + :return: pass validation or raise validation error + """ + validation_error_message = gettext_lazy('Incorrect format of MAC address') + # Regex value, which will validate provided string + mac_regex = '[0-9a-f]{2}([-:]?)[0-9a-f]{2}(\\1[0-9a-f]{2}){4}$' + if not re.match(mac_regex, value.lower()): + raise ValidationError(message=validation_error_message) + + +def domain_or_ip_validator(value): + """ Validator for checking if entered value is IP address or domain + :param value: value from user + :return: pass validation or raise validation error + """ + validation_error_message = gettext_lazy('Please enter valid domain or IP address') + validator = URLValidator() + value_with_scheme = f'https://{value}' + try: + validator(value_with_scheme) + except ValidationError: + raise ValidationError(message=validation_error_message) + + +class ValidateFileExtension: + """Custom file extension form validator""" + + def __init__(self, allowed_extensions): + if allowed_extensions is not None: + allowed_extensions = [allowed_extension.lower() for allowed_extension in allowed_extensions] + self.allowed_extensions = allowed_extensions + + def __call__(self, value): + self.extension = Path(value.name).suffix.lower() + self.message = gettext_lazy( + "File extension '{extension}' is not allowed. Allowed extensions are: '{allowed_extensions}'.").format( + extension=self.extension, allowed_extensions=', '.join(self.allowed_extensions)) + if self.allowed_extensions is not None and self.extension not in self.allowed_extensions: + raise ValidationError(self.message) diff --git a/core/views/__init__.py b/core/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/views/amc_services.py b/core/views/amc_services.py new file mode 100644 index 0000000..90e7e23 --- /dev/null +++ b/core/views/amc_services.py @@ -0,0 +1,21 @@ +from rest_framework import status +from rest_framework.response import Response +from rest_framework.views import APIView + +from core.services.amc_services import AMCService + + +class AMCServiceStatus(APIView): + + def get(self, request, *args, **kwargs): + service = AMCService(kwargs['service_name']) + service_status = service.get_status() + return Response({'status': service_status}, status=status.HTTP_200_OK) + + +class AMCServiceReboot(APIView): + + def get(self, request, *args, **kwargs): + service = AMCService(kwargs['service_name']) + service.reboot() + return Response({'status': 'ok'}, status=status.HTTP_200_OK) diff --git a/core/views/view_login.py b/core/views/view_login.py new file mode 100644 index 0000000..3af6fb2 --- /dev/null +++ b/core/views/view_login.py @@ -0,0 +1,35 @@ +import logging + +from django.contrib.auth import authenticate, login, logout +from django.utils.translation import gettext_lazy +from rest_framework.permissions import AllowAny, IsAuthenticated +from rest_framework.response import Response +from rest_framework.views import APIView + +_log = logging.getLogger(__name__) + + +class LoginView(APIView): + """Login API with json data.""" + + permission_classes = [AllowAny] + + def post(self, request): + username = request.data['username'] + password = request.data['password'] + user = authenticate(request, username=username, password=password) + if user is None or not user.is_active: + _log.info(f'Authenticate error with username[{user}]') + return Response({'detail': gettext_lazy('Invalid credentials.')}, status=400) + login(request, user) + return Response({'status': 'ok'}, status=200) + + +class LogoutView(APIView): + """Logout API.""" + permission_classes = (IsAuthenticated,) + + def post(self, request): + _log.info(f'Logout user [{request.user.username}]') + logout(request) + return Response() diff --git a/core/views/view_settings.py b/core/views/view_settings.py new file mode 100644 index 0000000..370ba57 --- /dev/null +++ b/core/views/view_settings.py @@ -0,0 +1,98 @@ +import logging +import os + +from django.conf import settings +from rest_framework.generics import RetrieveUpdateAPIView, RetrieveUpdateDestroyAPIView, CreateAPIView +from rest_framework.response import Response +from rest_framework.views import APIView + +from core import constants +from core.mixins import ApiPermissionCheckMixin +from core.models import ConsoleAuthSettings, TLSSettings +from core.serializers import AuthSettingsSerializer, TLSSettingsSerializer +from core.services.tls_settings import handle_uploaded_file, update_nginx, remove_ssl_certificate, restart_nginx, \ + update_model, create_cert +from perms.models import Perm + +_log = logging.getLogger(__name__) + + +class AuthSettingsAPIView(ApiPermissionCheckMixin, + RetrieveUpdateAPIView): + console_permissions = [Perm.can_view_system_settings] + serializer_class = AuthSettingsSerializer + + def get_object(self): + auth_settings = ConsoleAuthSettings.get_solo() + return auth_settings + + class Meta: + model = ConsoleAuthSettings + + +class TLSSettingsAPIView(ApiPermissionCheckMixin, RetrieveUpdateDestroyAPIView, CreateAPIView): + console_permissions = [Perm.can_view_system_settings] + serializer_class = TLSSettingsSerializer + + def get_object(self): + tls_settings = TLSSettings.get_solo() + return tls_settings + + def create(self, request, *args, **kwargs): + """ + Generate new TLS certificate and save to MEDIA_ROOT + """ + tls_settings = self.get_object() + create_cert(tls_settings) + tls_settings.save() + data = self.serializer_class(tls_settings).data + return Response(data) + + def retrieve(self, request, *args, **kwargs): + tls_settings = self.get_object() + if os.path.exists(os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_CERT_FILENAME)) and \ + os.path.exists(os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_KEY_FILENAME)): + tls_settings.certificate.name = constants.DEFAULT_CERT_FILENAME + tls_settings.key.name = constants.DEFAULT_KEY_FILENAME + tls_settings.save() + serializer = self.get_serializer(instance=tls_settings) + return Response(serializer.data) + + def update(self, request, *args, **kwargs): + tls_settings = self.get_object() + serializer = self.get_serializer(instance=tls_settings, data=request.data) + serializer.is_valid(raise_exception=True) + + tls_settings.enabled = serializer.validated_data["enabled"] + + _log.info("Processing upload files") + if serializer.validated_data['file_count'] == 2: + handle_uploaded_file(serializer.validated_data.get("certificate", None), + os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_CERT_FILENAME)) + handle_uploaded_file(serializer.validated_data.get("key", None), + os.path.join(settings.MEDIA_ROOT, constants.DEFAULT_KEY_FILENAME)) + + update_model(tls_settings) + + _log.info(f"User [{request.user}] set HTTPS enabled to [{serializer.validated_data['enabled']}]") + update_nginx(serializer.validated_data["enabled"]) + restart_nginx() + tls_settings.save() + tls_settings.clear_cache() + + return Response(serializer.data) + + def destroy(self, request, *args, **kwargs): + remove_ssl_certificate() + update_nginx(False) + restart_nginx() + _log.info(f"User [{request.user}] set HTTPS enabled to false") + + return Response() + + +class ProductVersionView(APIView): + """Product version API""" + + def get(self, request): + return Response(settings.PRODUCT_VERSION) diff --git a/correlation/__init__.py b/correlation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/correlation/admin.py b/correlation/admin.py new file mode 100644 index 0000000..571012e --- /dev/null +++ b/correlation/admin.py @@ -0,0 +1,23 @@ +from django.contrib import admin +from django.contrib.postgres import fields +from django_json_widget.widgets import JSONEditorWidget + +from correlation.models import Rule, Group + + +class RuleAdmin(admin.ModelAdmin): + list_display = ("name", "depth", "created", "updated") + list_display_links = ["name"] + + formfield_overrides = { + fields.JSONField: {'widget': JSONEditorWidget}, + } + + +class GroupAdmin(admin.ModelAdmin): + list_display = ("name",) + list_display_links = ["name"] + + +admin.site.register(Rule, RuleAdmin) +admin.site.register(Group, GroupAdmin) diff --git a/correlation/api.py b/correlation/api.py new file mode 100644 index 0000000..4bfeb3e --- /dev/null +++ b/correlation/api.py @@ -0,0 +1,93 @@ +import io +import json +import logging + +from django.forms import model_to_dict +from django.http import HttpResponse +from rest_framework import status +from rest_framework.decorators import action +from rest_framework.mixins import ListModelMixin, RetrieveModelMixin, UpdateModelMixin, DestroyModelMixin +from rest_framework.response import Response +from rest_framework.viewsets import GenericViewSet, ModelViewSet + +from core.mixins import ApiPermissionCheckMixin, ExportToJSONMixin +from correlation.models import Group, Rule +from correlation.serializers import (RuleSerializer, GroupSerializer, ImporRulesFileSerializer) +from correlation.services.import_service import ImportRulesService +from correlation.tasks import update_correlator_tasks +from perms.models import Perm + +_log = logging.getLogger(__name__) + + +class RuleViewSet(ApiPermissionCheckMixin, + ExportToJSONMixin, + ListModelMixin, + RetrieveModelMixin, + UpdateModelMixin, + DestroyModelMixin, + GenericViewSet): + serializer_class = RuleSerializer + console_permissions = {"destroy": [Perm.can_delete_correlation_rules], + "list": [Perm.can_view_correlation_rules_list], + "create": [Perm.can_create_and_edit_correlation_rule], + "retrieve": [Perm.can_view_correlation_rule_card], + "update": [Perm.can_create_and_edit_correlation_rule], + "partial_update": [Perm.can_create_and_edit_correlation_rule], + "json_export": [Perm.can_create_and_edit_correlation_rule], + "import_rules": [Perm.can_create_and_edit_correlation_rule], + } + + def get_queryset(self): + if self.action == 'json_export': + return Rule.objects.exclude(actions_json__contains=[{'type': 'firewall'}]) + return Rule.objects.order_by('-status', 'id') + + def create(self, request, *args, **kwargs): + data_dict = request.data.copy() + data_dict["kind"] = "JSON" + rule = RuleSerializer(data=data_dict) + if rule.is_valid(): + rule.save() + return Response(data=rule.data, status=status.HTTP_201_CREATED) + else: + _log.critical(f"\n\n\nError: {rule.errors}\n\nData: {rule.data}\n\n") + return Response(data=rule.errors, status=status.HTTP_400_BAD_REQUEST) + + def get_object(self): + # get the initial value of the model fields + # in the signal (correlation/services/signals.py) we make a decision to increase rule.rev or not + rule = super().get_object() + rule._initial = model_to_dict(rule, fields=[field.name for field in rule._meta.fields]) + return rule + + @action(methods=['POST'], detail=False) + def import_rules(self, request, *args, **kwargs): + serializer = ImporRulesFileSerializer(request.POST, request.FILES) + serializer.is_valid(raise_exception=True) + service = ImportRulesService(serializer.validated_data['uploaded_file']) + result = service.run_import() + + # send all exists rules to correlator + update_correlator_tasks(clear_store=True) + + # create report file + file = io.BytesIO() + file.write(json.dumps(result, ensure_ascii=False, indent=4).encode()) + file.seek(0) + response = HttpResponse(file, content_type="application/file") + response['Content-Disposition'] = f'attachment; filename="Report.json"' + return response + + +class GroupViewSet(ApiPermissionCheckMixin, ModelViewSet): + serializer_class = GroupSerializer + + console_permissions = {'list': [Perm.can_view_correlation_rule_card], + 'create': [Perm.can_edit_correlation_groups], + 'retrieve': [Perm.can_edit_correlation_groups], + 'destroy': [Perm.can_edit_correlation_groups], + 'update': [Perm.can_edit_correlation_groups], + 'partial_update': [Perm.can_edit_correlation_groups]} + + queryset = Group.objects.all() diff --git a/correlation/apps.py b/correlation/apps.py new file mode 100644 index 0000000..589659c --- /dev/null +++ b/correlation/apps.py @@ -0,0 +1,27 @@ +import logging + +from django.apps import AppConfig +from django.conf import settings +from django.db.models.signals import pre_save + +_log = logging.getLogger() + +started = False + +TEST_MODE = getattr(settings, "TEST_MODE", False) + + +class CorrelationConfig(AppConfig): + name = 'correlation' + verbose_name = 'Correlation' + + def ready(self): + if not TEST_MODE: + from correlation.services.signals import rule_pre_save_update + pre_save.connect(rule_pre_save_update, sender=self) + global started + if not started: + from logstash.tasks import initial_task_update + res = initial_task_update.apply_async(countdown=60, args=(False)) + _log.info(f"Starting correlator rules initialization with task id {res.id}") + started = True diff --git a/correlation/constants.py b/correlation/constants.py new file mode 100644 index 0000000..6e2c318 --- /dev/null +++ b/correlation/constants.py @@ -0,0 +1,48 @@ +from enum import Enum + +import redis +from django.conf import settings +from django.contrib.auth.models import User +from django.utils.translation import gettext_lazy + +from django.db import models +from console.models import Vulnerability +from assets.models.assets import AssetManufacturer, OperatingSystem, AssetListGroup +from incident.models import IncidentRecommendations, IncidentEffect, IncidentCategory + +EXPORT_TMP_PATH = '/tmp/export.json' +MEDIA_ROOT = getattr(settings, 'MEDIA_ROOT', 'media') +CORRELATOR_URL = getattr(settings, 'CORRELATOR_URL', 'http://localhost:5566') +STARTUP_UPDATE_NAME = "correlator_startup_tasks" +redis_instance = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0) + +MODEL_NAME_MAPPER = { + 'os': OperatingSystem, + 'group': AssetListGroup, + 'manufacturer': AssetManufacturer, + 'vulnerabilities': Vulnerability, + 'effects': IncidentEffect, + 'close_recommendations': IncidentRecommendations, + 'assigned_to': User, + 'category': IncidentCategory +} + + +class Type(models.IntegerChoices): + system = 0, gettext_lazy("System") + user = 1, gettext_lazy("User") + + +class RuleParseErrors(Enum): + NAME_DESCRIPTION_ERROR = 1 + FORMAT_ERROR = 2 + + +class ActionType(models.TextChoices): + SYSLOG = "syslog", gettext_lazy("Syslog") + HTTP = "http", gettext_lazy("HTTP") + INCIDENT = "incident", gettext_lazy("Incident") + BASH = "bash", gettext_lazy("Bash") + EXECUTABLE = "exec", gettext_lazy("Run executable") + ASSET = "asset", gettext_lazy("New asset") + FIREWALL = "firewall", gettext_lazy("Firewall rule") diff --git a/correlation/fields.py b/correlation/fields.py new file mode 100644 index 0000000..7a9c532 --- /dev/null +++ b/correlation/fields.py @@ -0,0 +1,30 @@ +from rest_framework import serializers + +from correlation.models import Group, Rule + + +class RuleRelatedField(serializers.PrimaryKeyRelatedField): + + def to_representation(self, rule: Rule): + return IDNameRuleSerializer(rule).data + + +class IDNameRuleSerializer(serializers.ModelSerializer): + class Meta: + model = Rule + fields = ['id', 'name'] + + +class RuleGroupRelatedField(serializers.PrimaryKeyRelatedField): + + def to_representation(self, group: Group): + return IDNameGroupSerializer(group).data + + def use_pk_only_optimization(self): + return False + + +class IDNameGroupSerializer(serializers.ModelSerializer): + class Meta: + model = Group + fields = ['id', 'name'] diff --git a/correlation/migrations/__init__.py b/correlation/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/correlation/models.py b/correlation/models.py new file mode 100644 index 0000000..2559751 --- /dev/null +++ b/correlation/models.py @@ -0,0 +1,64 @@ +import logging + +from django.core.validators import MinValueValidator +from django.db import models +from django.utils.translation import gettext_lazy + +from console.models import NameDescriptionModel, UniqueNameDescriptionModel +from correlation.constants import Type + +_log = logging.getLogger(__name__) + + +class Group(UniqueNameDescriptionModel): + """ Groups for correlator rules""" + pass + + +class Rule(NameDescriptionModel): + """ Model for correlator rule""" + + group = models.ForeignKey(Group, + on_delete=models.SET_NULL, + related_name='rules', + null=True, + blank=True, + verbose_name=gettext_lazy("Group")) + multi = models.BooleanField(verbose_name=gettext_lazy("Multi reaction"), + help_text=gettext_lazy("Apply actions to every event that match the rule"), + default=False) + type = models.IntegerField(choices=Type.choices, + verbose_name=gettext_lazy("Type")) + status = models.BooleanField(verbose_name=gettext_lazy("Enabled"), help_text=gettext_lazy("Is rule enabled?")) + archived = models.BooleanField(verbose_name=gettext_lazy("Archived"), + help_text=gettext_lazy( + "Is rule archived? If set - than rule can never be enabled again"), + default=False) + depth = models.DurationField(verbose_name=gettext_lazy("Depth"), + help_text=gettext_lazy("Analize depth in format HH:MM:SS")) + created = models.DateTimeField(auto_now_add=True, + verbose_name=gettext_lazy('Created')) + updated = models.DateTimeField(auto_now=True, + verbose_name=gettext_lazy('Updated')) + rule_json = models.JSONField(verbose_name=gettext_lazy('Predicats'), + help_text=gettext_lazy('JSON object with predicats description')) + actions_json = models.JSONField(verbose_name=gettext_lazy('Actions'), + help_text=gettext_lazy('JSON object with actions description')) + + rev = models.IntegerField(default=1, + verbose_name=gettext_lazy('Rule version'), + help_text=gettext_lazy('Shows rule current version. Increments on change')) + sid = models.IntegerField(blank=False, + verbose_name=gettext_lazy('Rule SID'), + help_text=gettext_lazy('SID of correlation rule'), + validators=[MinValueValidator(1)]) + is_active = models.BooleanField(gettext_lazy('Is rule active'), + default=True, + help_text=gettext_lazy('Indicates if rule is activated or not')) + + class Meta: + unique_together = ('rev', 'sid') + + def save(self, *args, **kwargs): + self.is_being_parsed = kwargs.get("is_being_parsed", None) + super(Rule, self).save() diff --git a/correlation/serializers.py b/correlation/serializers.py new file mode 100644 index 0000000..0ecec4d --- /dev/null +++ b/correlation/serializers.py @@ -0,0 +1,166 @@ +import json +import logging + +from rest_framework import serializers +from rest_framework.exceptions import ValidationError + +from assets.models.assets import OperatingSystem, AssetListGroup, AssetManufacturer +from assets.serializers.assets import AssetManufacturerSerializer, OsNameSerializer, AssetGroupSerializer +from core.serializers import ModelLocalizedSerializer +from core.validators import ValidateFileExtension +from correlation.constants import ActionType +from correlation.fields import RuleGroupRelatedField, RuleRelatedField +from correlation.models import Rule, Group +from incident.models import IncidentEffect, IncidentRecommendations, IncidentCategory +from incident.serializers.incident import (IncidentRecommendationsSerializer, IncidentEffectSerializer, + IncidentCategorySerializer) + +_log = logging.getLogger(__name__) + + +class RuleSerializer(ModelLocalizedSerializer): + kind = serializers.CharField(source='get_type_display') + group = RuleGroupRelatedField(queryset=Group.objects.all(), allow_null=True) + + class Meta: + model = Rule + fields = ["id", "name", "description", "sid", "rev", "kind", "group", "status", "created", "updated", "multi", + "archived", "type", "depth", "rule_json", "actions_json"] + + def create(self, validated_data): + validated_data.pop("get_type_display") + return super().create(validated_data) + + def update(self, request, validated_data, *args, **kwargs): + validated_data.pop("get_type_display", None) + return super().update(request, validated_data, *args, **kwargs) + + def validate_rule_json(self, value): + list_fields = ['type', 'field', 'operands'] + try: + str_json = json.dumps(value) + json.loads(str_json) + for field in value: + if field not in list_fields: + raise serializers.ValidationError(f"Invalid value: unknown field - {field},\ + allowed {list_fields}") + except json.decoder.JSONDecodeError as err: + raise serializers.ValidationError(f"Invalid JSON: {err}") + value['field'] = 'NULL' + value['type'] = 'query_string' + return value + + def validate_actions_json(self, value): + action_type_list = [item[0] for item in ActionType.choices] + try: + if type(value) == list: + for item in value: + str_json = json.dumps(item) + json.loads(str_json) + if item["type"] not in action_type_list: + raise ValidationError( + f"Invalid JSON: unknown action type - {item['type']}, allowed {action_type_list}") + else: + str_json = json.dumps(value) + json.loads(str_json) + if value["type"] not in action_type_list: + raise ValidationError( + f"Invalid JSON: unknown action type - {value['type']}, allowed {action_type_list}") + except json.decoder.JSONDecodeError as err: + raise ValidationError(f"Invalid JSON: {err} - {value}") + except KeyError: + raise ValidationError(f"Invalid JSON: type not found ") + return value + + def validate_sid(self, sid): + qs = Rule.objects.filter(sid=sid, status=True) + current_rule_id = getattr(self.instance, 'id', None) # self.instance is Rule or None + if current_rule_id: + if qs.exclude(sid=self.instance.sid).exists(): + raise ValidationError('This SID is already in use') + return sid + else: + if qs.exists(): + raise ValidationError('This SID is already in use') + return sid + + def validate(self, attrs): + if not self.instance: # creating Rule + return attrs + # if the rule is disabled and we are trying to enable it, but there is already an enabled rule - return an error + if self.instance.status is False and attrs.get('status', False): + if Rule.objects.filter(sid=self.instance.sid, status=True).exists(): + raise ValidationError({'status': f'There is already an enabled rule with this sid: {self.instance.sid}'}) + + # if we are editing a rule, but there is already a saved rule with the current SID and REV+1 - return an error + if Rule.objects.filter(sid=self.instance.sid, rev=self.instance.rev+1).exists(): + raise ValidationError( + {'sid': f'Check that there is no rule with SID {self.instance.sid} and REV {self.instance.rev+1} ' + f'and try again'}) + return attrs + + +class RuleExportSerializer(ModelLocalizedSerializer): + group = serializers.SerializerMethodField() + actions_json = serializers.SerializerMethodField() + + class Meta: + model = Rule + fields = '__all__' + + def get_actions_json(self, obj): + export_actions = obj.actions_json + for action in export_actions: + if action['type'] == 'incident': + if 'effects' in action: + action['effects'] = IncidentEffectSerializer( + IncidentEffect.objects.filter(pk__in=list(map(int, action['effects']))), many=True).data + if 'close_recommendations' in action: + action['close_recommendations'] = IncidentRecommendationsSerializer( + IncidentRecommendations.objects.filter(pk__in=list(map(int, action['close_recommendations']))), + many=True).data + if action['assigned_to'] != '': + action['assigned_to'] = '' + if action['category'] != '': + action['category'] = IncidentCategorySerializer( + IncidentCategory.objects.filter(pk=int(action['category'])), many=True).data + if action['type'] == 'asset': + if action['os'] != '': + action['os'] = OsNameSerializer(OperatingSystem.objects.filter(pk=int(action['os'])), + many=True).data + if action['group'] != '': + action['group'] = AssetGroupSerializer(AssetListGroup.objects.filter(pk=int(action['group'])), + many=True).data + if action['manufacturer'] != '': + action['manufacturer'] = AssetManufacturerSerializer( + AssetManufacturer.objects.filter(pk=int(action['manufacturer'])), many=True).data + if 'vulnerabilities' in action: + action['vulnerabilities'] = '' + return export_actions + + def get_group(self, obj: Rule): + if obj.group: + return obj.group.name + else: + return "" + + +class GroupSerializer(serializers.ModelSerializer): + rules = RuleRelatedField(queryset=Rule.objects.all(), default=[], many=True) + + class Meta: + model = Group + fields = ["id", "name", "description", "rules"] + + +class ImporRulesFileSerializer(serializers.Serializer): + uploaded_file = serializers.FileField(validators=[ValidateFileExtension(['.json'])]) + + +class RuleImportSerializer(ModelLocalizedSerializer): + class Meta: + model = Rule + fields = [ + "name", "description", "group", "type", "status", "depth", + "actions_json", "rule_json", "multi", "sid", "rev" + ] diff --git a/correlation/services/__init__.py b/correlation/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/correlation/services/import_service.py b/correlation/services/import_service.py new file mode 100644 index 0000000..dafc46f --- /dev/null +++ b/correlation/services/import_service.py @@ -0,0 +1,319 @@ +import json +import logging + +from django.conf import settings +from django.utils.translation import gettext_lazy +from packaging import version +from rest_framework.exceptions import APIException + +from assets.models.assets import OperatingSystem, AssetManufacturer, AssetListGroup +from correlation.constants import EXPORT_TMP_PATH, MODEL_NAME_MAPPER +from correlation.constants import RuleParseErrors +from correlation.models import Group, Rule +from correlation.serializers import RuleImportSerializer +from incident.models import IncidentRecommendations, IncidentEffect, IncidentCategory + +_log = logging.getLogger(__name__) + + +class ImportException(APIException): + status_code = 400 + + +class ImportRulesService: + + def __init__(self, file, check_version=True): + self.file = file + self.check_rules_version = check_version + self.current_console_version = getattr(settings, 'SITE_INFO')['version'] + + # storage to store the full result of the import + self.result_log = [] + + # list of all errors (as string) during import + self.fails_list = [] + + def run_import(self): + """ Main method """ + + with open(EXPORT_TMP_PATH, 'wb') as f: + for chunk in self.file.chunks(): + f.write(chunk) + try: + with open(EXPORT_TMP_PATH, 'r') as f: + self.json_data = json.load(f) + except json.JSONDecodeError: + raise ImportException( + {'status': 'error', 'message': gettext_lazy("Incorrect JSON data in provided file")}) + + if self.check_rules_version: + self.check_version() + self.create_missing_objects() + + for rule in self.json_data['rules']: + try: + # start validation + is_rule_valid = self.validate_action_list(rule) + if is_rule_valid: + new_rule = RuleImportSerializer(data=rule) + if new_rule.is_valid(): + if self.validate_sid_rev(rule): + new_rule.save() + self.result_log.append({'rule_name': rule["name"], 'import_status': 'success', + 'report_message': str(gettext_lazy('Imported successfully'))}) + _log.info(f'Creating new rule {rule["name"]}') + else: + self.result_log.append({'rule_name': rule["name"], 'import_status': 'failed', + 'report_message': self.fails_list[-1]}) # get last added error + else: + self.result_log.append({'rule_name': rule["name"], 'import_status': 'failed', + 'report_message': f'Validation errors: {new_rule.errors}'}) + _log.error(f'Validation errors: {new_rule.errors}') + else: + self.result_log.append({'rule_name': rule["name"], 'import_status': 'failed', + 'report_message': self.fails_list[-1]}) # get last added error + _log.info(f'Rule {rule["name"]} is invalid: {self.fails_list[-1]}') + + except Exception as exc: + self.result_log.append({'rule_name': rule["name"], 'import_status': 'failed', + 'report_message': f'Unknown error: {str(exc)}'}) # get last added error + + _log.info(f'Rule import completed. List of errors, occurred during import process: {self.fails_list}') + + return self.result_log + + def check_version(self): + try: + importing_rules_console_version = self.json_data['meta']['version'] + minimum_version = version.parse(settings.MINIMAL_VERSION_CORRELATION_RULES) + importing_version = version.parse(importing_rules_console_version) + if importing_version >= minimum_version: + return + raise ImportException( + {'status': 'error', 'message': f'{gettext_lazy("Version incompatible")}'}) + except KeyError: + raise ImportException( + {'status': 'error', 'message': f'{gettext_lazy("Version incompatible")}'}) + + def create_missing_objects(self): + """ + Each attempt to create an object in the database is wrapped in try/except. + If the object was not created, then at later stages the rule will be marked as invalid, + because the required object does not exist in the database + """ + + # creating recommendations + for recommendation in self.json_data.get('close_recommendations', []): + try: + IncidentRecommendations.objects.get_or_create( + name=recommendation['name'], + defaults={ + "description": recommendation.get('description', '') + } + ) + except (AttributeError, TypeError): + continue + # creating effects + for effect in self.json_data.get('effects', []): + try: + IncidentEffect.objects.get_or_create( + name=effect['name'], + defaults={ + 'description': effect.get('description', '') + } + ) + except (AttributeError, TypeError): + continue + for rule in self.json_data.get('rules', []): + for action in rule['actions_json']: + if action['type'] == 'incident' and action['category'] != '': + # there is only one incident category for each rule, but it in list + for category in action['category']: + try: + IncidentCategory.objects.get_or_create( + name=category['name'], + defaults={ + 'description': category.get('description', '') + } + ) + except (AttributeError, TypeError): + continue + if action['type'] == 'asset': + if action['os'] != '': + # there is only one OperatingSystem for each rule, but it in list + for os in action['os']: + try: + OperatingSystem.objects.get_or_create(name=os['name']) + except (AttributeError, TypeError): + continue + if action['manufacturer'] != '': + for manufacturer in action['manufacturer']: + try: + AssetManufacturer.objects.get_or_create( + name=manufacturer['name'], + defaults={ + 'description': manufacturer.get('description', '') + } + ) + except (AttributeError, TypeError): + continue + if action['group'] != '': + for group in action['group']: + try: + AssetListGroup.objects.get_or_create( + name=group['name'], + defaults={ + 'description': group.get('description', '') + } + ) + except (AttributeError, TypeError): + continue + # creating rules group + group, created = Group.objects.get_or_create(name=rule.get('group')) + if created: + _log.info(f"Creating new rule group {group.name} for rule {rule.get('name')}") + rule['group'] = group.pk + + def validate_action_list(self, rule): + rule_is_valid = True + + for action in rule['actions_json']: + if action['type'] == 'asset': + check_list = [ + ['os', True], + ['group', True], + ['manufacturer', True], + ['vulnerabilities', False] + ] + rule_is_valid = self._parse_check_list(action, check_list) + elif action['type'] == 'incident': + check_list = [ + ['effects', False], + ['close_recommendations', False], + ['assigned_to', True], + ['category', True] + ] + rule_is_valid = self._parse_check_list(action, check_list) + if not rule_is_valid: + break + return rule_is_valid + + def validate_sid_rev(self, rule): + rule_is_valid = True + + # First, check if rule with same SID already exists in DB + try: + exist_rule = Rule.objects.get(sid=rule['sid'], status=True) + # If rule exists, first - check if REV is equal to importing rule + if exist_rule.rev == rule['rev']: + # If SID and REV are equal - skip the rule import by invalidating the rule + # and add to fail list message, that this rule is skipped + rule_is_valid = False + self.fails_list.append( + gettext_lazy('{rule_name} did not import because same rule already exists in database').format( + rule_name=rule['name'])) + elif exist_rule.rev > rule['rev']: + # If REV of importing rule is less than REV of rule in DB, skip this rule and add message to fail list + rule_is_valid = False + self.fails_list.append( + gettext_lazy('{rule_name} did not import because newer version of rule exists in database').format( + rule_name=rule["name"])) + elif exist_rule.rev < rule['rev']: + # If REV of importing rule is greater than REV of rule in DB, + # change active status of existing rule to False + exist_rule.archived = True + exist_rule.save(is_being_parsed=True) + else: + # Add mistake if all checks are failed + rule_is_valid = False + self.fails_list.append( + f'{rule["name"]} invalid SID or REV arguments') + except Rule.DoesNotExist: + # If there are no rules like this - rule_is_valid stays as True + return rule_is_valid + except KeyError: + rule_is_valid = False + self.fails_list.append(gettext_lazy( + 'Incorrect format of importing rule')) + + return rule_is_valid + + def _parse_check_list(self, action, check_list): + """ Function to parse all available rule's action field + :param action: current checking action from actions_json + :param check_list: list of fields, that should be checked + :return: + True if action is valid + False otherwise + """ + for check in check_list: + tmp_check = self._name_description_model_parser(action, check[0], check[1]) + if tmp_check[0]: + action[check[0]] = tmp_check[1] + else: + self.fails_list.append(tmp_check[1]) + return False + return True + + def _name_description_model_parser(self, action, action_key, is_single): + """ Function for checking if model instance exists in DB and add it if positive. + Otherwise, return false with error log + :param action: complete parameter of action in actions_json field of importing rule + :param action_key: NameDescription model name reference in action + :param is_single: True if in parsing action model with references as action_key has ChoiceField, + False if MultipleChoiceField + :return: list containing two elements, which could be following: + [False, 'error_log'], + [True, ''] + """ + + def generate_error_message(error_type, error_element=''): + translation_template_message = gettext_lazy( + '"{instance_name}" instance with "{instance_arg}" name does not exist') + if error_type == RuleParseErrors.NAME_DESCRIPTION_ERROR: + if is_single: + return translation_template_message.format( + instance_name=MODEL_NAME_MAPPER[action_key]._meta.verbose_name.title(), + instance_arg=action[action_key][0]["name"]) + else: + return translation_template_message.format( + instance_name=MODEL_NAME_MAPPER[action_key]._meta.verbose_name.title(), + instance_arg=error_element) + elif error_type == RuleParseErrors.FORMAT_ERROR: + return f'{MODEL_NAME_MAPPER[action_key]._meta.model_name} value has wrong format' + else: + return 'Unknown error occurred' + + if is_single: + if action_key == 'assigned_to': # always clear this field + return [True, ""] + if action[action_key] != "": + try: + existing_model_instance = MODEL_NAME_MAPPER[action_key].objects.get( + name=action[action_key][0]['name']) + return [True, str(existing_model_instance.pk)] + except MODEL_NAME_MAPPER[action_key].DoesNotExist: + return [False, generate_error_message(error_type=RuleParseErrors.NAME_DESCRIPTION_ERROR)] + else: + return [True, ""] + else: + if action_key in action: + if action_key == 'vulnerabilities': # always clear this field + return [True, []] + tmp_list = [] + if not isinstance(action[action_key], list): + return [False, generate_error_message(error_type=RuleParseErrors.FORMAT_ERROR)] + for element in action[action_key]: + try: + tmp_model_instance = MODEL_NAME_MAPPER[action_key].objects.get( + name=element) + tmp_list.append(str(tmp_model_instance.pk)) + except MODEL_NAME_MAPPER[action_key].DoesNotExist: + return [False, generate_error_message(error_type=RuleParseErrors.NAME_DESCRIPTION_ERROR, + error_element=element)] + except TypeError: + return [False, + generate_error_message(error_type=RuleParseErrors.FORMAT_ERROR, error_element=element)] + return [True, tmp_list] + else: + return [True, ""] diff --git a/correlation/services/rules.py b/correlation/services/rules.py new file mode 100644 index 0000000..bf371ca --- /dev/null +++ b/correlation/services/rules.py @@ -0,0 +1,19 @@ +from django.forms import model_to_dict + + +def to_correlator_data(rule): + return { + "name": rule.name, + "depth": str(int(rule.depth.total_seconds())) + "s", + "id": str(rule.pk), + "predicat": rule.rule_json, + "actions": rule.actions_json, + "multi": rule.multi + } + + +def check_if_only_status_changed(rule): + initial_data = getattr(rule, '_initial', {}) + new_data = model_to_dict(rule, fields=[field.name for field in rule._meta.fields]) + diffs = [k for k, v in initial_data.items() if v != new_data[k]] + return len(diffs) == 1 and 'status' in diffs diff --git a/correlation/services/signals.py b/correlation/services/signals.py new file mode 100644 index 0000000..4f1acb6 --- /dev/null +++ b/correlation/services/signals.py @@ -0,0 +1,26 @@ +import logging + +from django.db.models.signals import pre_save +from django.dispatch import receiver + +from correlation.models import Rule +from correlation.services.rules import check_if_only_status_changed + +_log = logging.getLogger(__name__) + + +@receiver(pre_save, sender=Rule) +def rule_pre_save_update(sender, instance, *args, **kwargs): + """Check if rule is archived and if it is - disable it""" + if instance.archived: + instance.status = False + if instance._state.adding: + if not instance.sid: + try: + instance.sid = Rule.objects.order_by('sid').last().sid + 1 + except AttributeError: + instance.sid = 1 + instance.status = True + else: + if not instance.is_being_parsed and not check_if_only_status_changed(instance): + instance.rev += 1 diff --git a/correlation/tasks.py b/correlation/tasks.py new file mode 100644 index 0000000..e06c9e2 --- /dev/null +++ b/correlation/tasks.py @@ -0,0 +1,101 @@ +from __future__ import absolute_import, unicode_literals + +import subprocess +from time import sleep + +import requests +from celery import shared_task +from celery.utils.log import get_task_logger +from django.conf import settings +from django.db.models.signals import post_delete, post_save +from django.dispatch import receiver + +from correlation.constants import redis_instance, STARTUP_UPDATE_NAME +from correlation.models import Rule +from correlation.services.rules import to_correlator_data + +_log = get_task_logger(__name__) + + +@shared_task +def initial_task_update(*args, **kwargs): + _log.info("Init correlator rules") + done = redis_instance.get(STARTUP_UPDATE_NAME) + if done == "True": + _log.info("Already done") + return + + for iter in range(180): + if update_correlator_tasks(False): + redis_instance.set(STARTUP_UPDATE_NAME, "True") + return + sleep(1) + + _log.error("Can't set initial correlation rules") + + +@shared_task +def update_correlator_tasks(clear_store=True): + # First - remove all rules + # Next - add new rules + if clear_store: + try: + url = settings.CORRELATOR_URL + '/clear/' + resp = requests.get(url) + if resp.status_code != 200: + _log.error("Store wasn't cleared") + return False + except requests.RequestException: + _log.error("Can't send clear request") + return False + + rules = Rule.objects.filter(status=True).order_by("pk") + data = [] + for rule in rules: + data.append(to_correlator_data(rule)) + + if len(data) == 0: + _log.info("No rules found. Correlator initialization complete") + return True + + try: + url = settings.CORRELATOR_URL + '/add_many/' + headers = {'Content-type': 'application/json', + 'Accept': 'text/plain', + 'Content-Encoding': 'utf-8'} + _log.debug(f"Sending {len(data)} rules to correlator") + resp = requests.post(url, json=data, headers=headers) + _log.debug(f"Response code: {resp.status_code}") + _log.debug(f"Response: {resp.text}") + return resp.status_code == 200 + except requests.ConnectionError as ex: + _log.error("Can't send rules to correlator. Reason -> {}".format(ex)) + return False + + _log.info(f"{len(data)} rules sended to correlator") + return True + + +@receiver(post_delete, sender=Rule) +@receiver(post_save, sender=Rule) +def sync_rules(sender, **kwargs): + """ Sync correlation rules after rule delete """ + rule = kwargs['instance'] + if not update_correlator_tasks(True): + _log.error(f"Can't sync correlator rules after [{rule.name}] delete") + else: + _log.info(f"Sync correlator rules after [{rule.name}] delete") + + +@shared_task +def reboot_correlator_task(): + """HOTFIX Task. rebut correlator and sync rules""" + _log.info('Rebut correlator') + subprocess.run(['sudo', 'systemctl', 'restart', 'amccorrelator'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + for _ in range(5): + result = update_correlator_tasks() + if result: + break + sleep(30) # wait correlator diff --git a/correlation/tests/__init__.py b/correlation/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/correlation/tests/data/image.png b/correlation/tests/data/image.png new file mode 100644 index 0000000000000000000000000000000000000000..8649f0475d8d20793b2ec431fe25a186a414cf10 GIT binary patch literal 1732 zcmV;#20QtQP)K2KOkBOVxIZChq#W-v7@TU%U6P(wycKT1hUJUToW3ke1U1ONa4 z000000000000000bb)GRa9mqwR9|UWHy;^RUrt?IT__Y0JUcxmBP0(51q1>E00030 z|NrOz)aw7%8sJzM<5^g%z7^qE`}_Ot|JUUG(NUkWzR|7K?Zo%@_v-8G-1N%N=D$;; zw;keH4dGY$`1t4M=HK_s*zm^0#KgqfwWhe3qO_HtvXYvtjgX>;-~C$L`&k>^R)9)7 zdPh2TL^pCnHC#0+_4D)M`p?qp!pq{jO_{8;$fbaflbx`Tn52n|n}8VFRTA1&ugOP< zPd{uvFjz7t*Vot1&d$l-xWCk}s;sQL&#O(Bskh6gqNJv>#iB=ypG1e3K!K4yc7!~M zfj4S*g^zZ7eP$+_Sl07Z646l;%urinP#D8a6TwRtnLIRcI!r4f@bK~9-`~;E(N?Lv zSEst7s;rcxsi~}{Nsytfz@MtUoR*iFc8!#vvx}Umhm4blk(_~MdVD-@dW&>!Nn~ro z_E~-ESVQAj6Wmn;(olz(O&_{U2*pZBc1aYjMh>Dq3z|6`jW`RDHV=t3I6yRKJ~LOX zz_z!!vbVXPqob#=pj3^VMT?x6t(irRmSKsMo1~LLkB&=#j!=M%NP35mfqim$drWb9 zYIb>no_LUwc!r^NkDzs4YHu@=ZHRzrafWDZd1EhEVq=tGX?tK$pIa)DTh#bkvh!J- z?^%@YS!U*0E8$q$_*aOTQ&)Ra64g>ep;BdcQgvlg8qQHrP*E$;P{-m=A*@axn@$bO zO-Y4JzS&EAi%YG}N?cn?YFS7ivPY=EMV6~YH;+Xxu|tefLS|Aza)Cg6us#)=JW!uH zQa?H>d^j+YHCtyjL^LulF*05|F$RG!AX_OHVI&MtA~_@=5_lU|0000rbW%=J06GH4 z^5LD8b8apw8vNh1ua1mF{{Hy)_U`NA;Nacc+sCpuHXa-V{r&yz?c(9#+}oX+NmiRW z+W-IqK1oDDR5;6GfCDCOP5}iL5fK(cB~ET81`MFgF2kGa9AjhSIk~-E-4&*tPPKdiilQJ11k_J082ZS z>@TvivP!5ZFG?t@{t+GpR3XR&@*hA_VE1|Lo8@L@)l*h(Z@=?c-NS$Fk&&61IzUU9 z*nPqBM=OBZ-6ka1SJgGAS-Us5EN)r#dUX%>wQZLa2ytPCtMKp)Ob z*xcu38Z&d5<-NBS)@jRD+*!W*cf-m_wmxDEqBf?czI%3U0J$Xik;lA`jg}VH?(S(V zE!M3;X2B8w0TnnW&6(8;_Uc)WD;Ms6PKP+s(sFgO!}B!^ES~GDt4qLPxwYB)^7)XA zZwo9zDy-B0B+jT6V=!=bo(zs_8{eBA78gT9GH$(DVhz;4VAYwz+bOIdZ-PNb|I&rl z^XG=vFLF)1{&nT2*0vMz#}7^9hXzzf&ZdKlEj{LihP;|;Ywqn35ajP?H?7t|i-Un% z&&kxee@9B{nwgv1+S-~0)E1{ob1^Wn`F2isurqThKK=3%&;`@{0{!D- z&CSj80t;uPu&FaJFtSXKH#ajgGj}=sEad7US6jP0|Db@0j)?(5@sf<7`~a9>s;wCa zm^)spe{uxGFmrJYI9cOh7s$>8Npkt-5EWB1UKc`{W{y5Ce$1+nM9Cr;);=Ju#N^62OSlJMn7omiUgP&ErsYzT~iGxcW aE(`!K@+CXylaC4j0000=6", + }, + 'actions_json': { + "type": "incident", + "title": "{{.SignName}}", + "comment": "", + "category": "", + "importance": "50", + "assigned_to": "", + "description": "{{.EventSrcMsg}}" + } + } + self.rule1 = Rule.objects.create( + **rule_data + ) + + rule_data_disable = rule_data + rule_data_disable['status'] = False + rule_data_disable['rev'] = 2 + self.rule1_disable = Rule.objects.create(**rule_data_disable) + + self.rule2 = Rule.objects.create( + name='2 Test with Status False', + type=rule_type, + status=False, + rev=1, + sid=3, + depth=datetime.timedelta(minutes=10), + rule_json={ + "type": "query_string", + "field": "", + "operands": "event_severity:>=6", + }, + actions_json={ + "type": "incident", + "title": "{{.SignName}}", + "comment": "", + "category": "", + "importance": "50", + "assigned_to": "", + "description": "{{.EventSrcMsg}}" + }, + + ) + self.rule2.status = False + self.rule2.save() + + @pytest.mark.integration + def test_rules_list(self, client, add_user_with_permissions): + user = add_user_with_permissions(username='test_user1', password='pwd123', is_superuser=True) + client.force_login(user=user) + url = reverse('rules-list') + response = client.get(url) + data_json = response.json() + assert response.status_code == 200 + assert len(data_json) == 4 + + @pytest.mark.integration + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_add_rule(self, client, add_user_with_permissions): + user = add_user_with_permissions(username='test_user1', password='pwd123', is_superuser=True) + client.force_login(user=user) + url = reverse('rules-list') + data = { + 'name': 'New Test with Status False', + 'sid': 3, + 'rev': 4, + 'kind': 'System', + 'group': None, + 'status': False, + 'multi': False, + 'archived': False, + 'type': 0, + 'depth': '00:20:00', + 'rule_json': { + 'type': 'query_string', + 'field': 'query field', + 'operands': 'event_severity:>=666' + }, + 'actions_json': { + 'type': 'incident', + 'title': '{{.SignName}}', + 'comment': 'New comment', + 'category': 'New category', + 'importance': '50', + 'assigned_to': '', + 'description': '{{.EventSrcMsg}}' + } + } + response = client.post(url, data=data, content_type='application/json') + assert response.status_code == 201 + rule = Rule.objects.get(name='New Test with Status False') + assert rule.sid == 3 + assert rule.rev == 4 + assert rule.group == None + assert not rule.status + assert not rule.archived + assert rule.type == 0 + assert rule.depth == datetime.timedelta(seconds=1200) + assert rule.rule_json == { + 'type': 'query_string', + 'field': 'NULL', + 'operands': 'event_severity:>=666' + } + assert rule.actions_json == { + 'type': 'incident', + 'title': '{{.SignName}}', + 'comment': 'New comment', + 'category': 'New category', + 'importance': '50', + 'assigned_to': '', + 'description': '{{.EventSrcMsg}}' + } + + @pytest.mark.integration + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_delete_rule(self, client, add_user_with_permissions): + user = add_user_with_permissions(username='test_user1', password='pwd123', is_superuser=True) + client.force_login(user=user) + rule = Rule.objects.get(name='2 Test with Status False') + rules_before = Rule.objects.count() + url = reverse('rules-detail', kwargs={"pk": rule.pk}) + response = client.delete(url) + assert response.status_code == 204 + assert Rule.objects.count() == rules_before - 1 + + @pytest.mark.unit + @pytest.mark.skip(reason='1.5 waiting of products') + def test_edit_rule(self, client, add_user_with_permissions): + user = add_user_with_permissions(username='test_user1', password='pwd123', is_superuser=True) + client.force_login(user=user) + rule = Rule.objects.get(name='2 Test with Status False') + url = reverse('rules-detail', kwargs={"pk": rule.pk}) + data = {'name': '2 Test with Status False', 'sid': 5, 'rev': 6, 'kind': 'System', 'group': 'None', + 'status': True, 'multi': True, + 'archived': True, 'type': 1, 'depth': '00:20:00', + 'rule_json': {'type': 'query_string', 'field': 'field', 'operands': 'event_severity:>=50'}, + 'actions_json': {'type': 'incident', 'title': '{{.SignName}}', 'comment': 'new comment', 'category': '', + 'importance': '500', 'assigned_to': 'user', 'description': '{{.EventSrcMsg}}'}} + response = client.patch(url, data=data, content_type='application/json') + assert response.status_code == 200 + rule = Rule.objects.get(name='2 Test with Status False') + assert rule.sid == 5 + assert rule.rev == 7 + assert rule.type == 1 + assert not rule.group + assert not rule.status + assert rule.multi + assert rule.archived + assert rule.depth == datetime.timedelta(seconds=1200) + assert rule.rule_json == {'type': 'query_string', 'field': 'field', 'operands': 'event_severity:>=50'} + assert rule.actions_json == {'type': 'incident', 'title': '{{.SignName}}', 'comment': 'new comment', + 'category': '', 'importance': '500', 'assigned_to': 'user', + 'description': '{{.EventSrcMsg}}'} + + @pytest.mark.integration + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_enable_rule_but_this_sid_already_enable(self, api_client): + api_client.force_login(user=self.user) + + url = reverse('rules-detail', kwargs={"pk": self.rule1_disable.pk}) + response = api_client.patch(url, data={'status': True}) + assert 'status' in response.json() + assert response.json()['status'] == ['There is already an enabled rule with this sid: 2'] + + @pytest.mark.integration + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_edit_rule_but_rule_with_same_sid_disable(self, api_client): + api_client.force_login(user=self.user) + + url = reverse('rules-detail', kwargs={"pk": self.rule1.pk}) + response = api_client.patch(url, data={'description': '-----'}) + assert 'sid' in response.json() + assert response.json()['sid'] == ['Check that there is no rule with SID 2 and REV 2 and try again'] + + @pytest.mark.integration + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_expansion_rule_json_when_edit(self, api_client): + api_client.force_login(user=self.user) + + url = reverse('rules-detail', kwargs={"pk": self.rule2.pk}) + response = api_client.patch(url, data={'rule_json': {'operands': 'test'}}, format='json') + assert response.json()['rule_json'] == { + 'operands': 'test', + 'field': 'NULL', + 'type': 'query_string' + } + + @pytest.mark.integration + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_edit_rule_only_status(self, api_client): + api_client.force_login(user=self.user) + + url = reverse('rules-detail', kwargs={"pk": self.rule2.pk}) + response = api_client.patch(url, data={'status': True}) + assert response.json()['rev'] == 1 + assert Rule.objects.first().rev == 1 + + @pytest.mark.integration + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_add_rule_error(self, client, add_user_with_permissions): + user = add_user_with_permissions(username='test_user1', password='pwd123', is_superuser=True) + client.force_login(user=user) + url = reverse('rules-list') + data = { + 'name': 'New Test with Status False', + 'sid': 3, + 'rev': 4, + 'kind': 'System', + 'group': 'None', + 'status': False, + 'multi': False, + 'archived': False, + 'type': 0, + 'depth': '00:20:00', + 'rule_json': { + 'type': 'query_string', + 'field': 'query field', + 'operands': 'event_severity:>=666' + }, + + } + response = client.post(url, data=data, content_type='application/json') + assert response.status_code == 400 + + @pytest.mark.integration + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_add_rule_with_non_unique_sid(self, api_client, add_user_with_permissions): + user = add_user_with_permissions(username='test_user1', password='pwd123', is_superuser=True) + api_client.force_authenticate(user) + url = reverse('rules-list') + data = { + 'name': 'New Test', + 'sid': 3, + 'rev': 4, + 'kind': 'System', + 'group': None, + 'status': True, + 'multi': False, + 'archived': False, + 'type': 0, + 'depth': '00:20:00', + 'rule_json': { + 'type': 'query_string', + 'field': 'query field', + 'operands': 'event_severity:>=666' + }, + 'actions_json': { + 'type': 'incident', + 'title': '{{.SignName}}', + 'comment': 'New comment', + 'category': 'New category', + 'importance': '50', + 'assigned_to': '', + 'description': '{{.EventSrcMsg}}' + } + } + + response = api_client.post(url, data=data, format='json') + assert response.status_code == status.HTTP_201_CREATED + + data['rev'] = 111111 + response = api_client.post(url, data=data, format='json') + + assert response.status_code == 400 + assert 'sid' in response.json() + + +@pytest.mark.django_db +class TestCorrelationGroups: + + @pytest.fixture(autouse=True) + def setup_test(self, django_user_model): + self.admin = django_user_model.objects.first() + test_rule = dict( + name='Rule 1', + type=1, + status=True, + rev=1, + sid=2, + depth=datetime.timedelta(minutes=10), + rule_json={ + "type": "query_string", + "field": "", + "operands": "event_severity:>=6", + }, + actions_json={ + "type": "incident", + "title": "{{.SignName}}", + "comment": "", + "category": "", + "importance": "50", + "assigned_to": "", + "description": "{{.EventSrcMsg}}" + }, + + ) + self.rule1 = Rule.objects.create(**test_rule) + self.group = Group.objects.create(name="test_group", description="test description") + + @pytest.mark.unit + def test_get_group(self, api_client): + api_client.force_authenticate(self.admin) + response = api_client.get(reverse('rules-groups-list')) + assert response.status_code == 200 + + data_json = response.json() + assert len(data_json['results']) == 1 # there is one group + assert data_json['count'] == 1 + print(data_json) + assert data_json['results'][0] == { + 'id': self.group.pk, + 'name': 'test_group', + 'description': 'test description', + 'rules': [] + } + + @pytest.mark.unit + def test_add_group(self, api_client): + api_client.force_authenticate(self.admin) + groups_before = Group.objects.count() + + data = { + "name": "New group", + "description": "Group description" + } + response = api_client.post(reverse('rules-groups-list'), data=data) + assert response.status_code == 201 + assert response.json()['name'] == data['name'] + assert response.json()['description'] == data['description'] + + groups_after = Group.objects.count() + assert groups_before != groups_after + + group = Group.objects.get(name="New group") + assert group.description == "Group description" + + @pytest.mark.unit + def test_delete_group(self, api_client): + api_client.force_authenticate(self.admin) + + group = Group.objects.create(name="group", description="test description") + url = reverse('rules-groups-detail', kwargs={"pk": group.pk}) + response = api_client.delete(url) + assert response.status_code == 204 + with pytest.raises(ObjectDoesNotExist): + Group.objects.get(pk=group.pk) + + @pytest.mark.unit + def test_update_group(self, api_client): + api_client.force_authenticate(self.admin) + group = Group.objects.create(name="group", description="test description") + data = { + "name": "New group", + "description": "Group description" + } + url = reverse('rules-groups-detail', kwargs={"pk": group.pk}) + response = api_client.patch(url, data=data) + assert response.status_code == 200 + group = Group.objects.get(name="New group") + assert group.description == "Group description" + + @pytest.mark.unit + def test_partial_update_group_description(self, api_client): + api_client.force_authenticate(self.admin) + group = Group.objects.create(name="group", description="test description") + data = { + "description": "Group description" + } + url = reverse('rules-groups-detail', kwargs={"pk": group.pk}) + response = api_client.patch(url, data=data) + assert response.status_code == 200 + group = Group.objects.get(name="group") + assert group.description == "Group description" + + @pytest.mark.unit + def test_partial_update_group_name(self, api_client): + api_client.force_authenticate(self.admin) + group = Group.objects.create(name="test", description="test description") + data = { + "name": "New group", + } + url = reverse('rules-groups-detail', kwargs={"pk": group.pk}) + response = api_client.patch(url, data=data) + assert response.status_code == 200 + group = Group.objects.get(name="New group") + assert group.description == "test description" + + @pytest.mark.unit + def test_add_rule_to_group(self, api_client): + api_client.force_authenticate(self.admin) + group = Group.objects.create(name="test", description="test description") + + url = reverse('rules-groups-detail', kwargs={"pk": group.pk}) + response = api_client.get(url) + assert response.json()['rules'] == [] + + data = {"rules": [self.rule1.pk]} + response = api_client.patch(url, data=data) + assert response.status_code == 200 + assert len(response.json()['rules']) == 1 + + @pytest.mark.unit + def test_edit_group_in_rule(self, api_client): + api_client.force_authenticate(self.admin) + group = Group.objects.create(name="test", description="test description") + + url = reverse('rules-detail', kwargs={"pk": self.rule1.pk}) + response = api_client.get(url) + assert response.json()['group'] == None + + data = {"group": [group.pk]} + response = api_client.patch(url, data=data) + assert response.status_code == 200 + assert response.json()['group'] == {'id': group.pk, 'name': 'test'} diff --git a/correlation/tests/test_rule_import_service.py b/correlation/tests/test_rule_import_service.py new file mode 100644 index 0000000..6065785 --- /dev/null +++ b/correlation/tests/test_rule_import_service.py @@ -0,0 +1,276 @@ +import json +import os +import shutil +from unittest.mock import patch + +import pytest +from django.urls import reverse + +from correlation.models import Rule +from incident.models import IncidentRecommendations, IncidentEffect + +TESTS_DIR = os.path.dirname(__file__) + +SID_REV_TESTS = [2, 20] + + +def generate_multiple_rules_checks(check_list, check_type, instance_name, action_key): + """ Function for generating test cases for linked models, which can be assigned more than one instance to one value + :param check_list - Final test list fixture, which will be send to test + :param check_type - 'asset' or 'incident', model instances names, which held linked models in theirs + 'actins_json` field + :param instance_name - string, which is basically model name without spaces and in lowercase + :param action_key - string, which is used as key `actions_json` field to held corresponding models + """ + test_dicts = [ + [{"name": f"test_1_{action_key}", "description": ""}, + {"name": f"test_2_{action_key}", "description": ""}, + {"name": "some_test", "description": ""}, ], + [{"name": f"test_1_{action_key}", "description": ""}, + {"name": "some_test", "description": ""}, + {"name": f"test_2_{action_key}", "description": ""}], + [{"name": "some_test", "description": ""}, + {"name": f"test_1_{action_key}", "description": ""}, + {"name": f"test_2_{action_key}", "description": ""}], + [{"name": "some_test", "description": ""}] + ] + + check_list.append( + [check_type, False, instance_name, + [{"name": f"test_1_{action_key}", "description": ""}, + {"name": f"test_2_{action_key}", "description": ""}, + {"name": f"test_3_{action_key}", "description": ""}], + True] + ) + for elem in test_dicts: + check_list.append( + [check_type, False, instance_name, elem, True] + ) + check_list.append( + [check_type, False, instance_name, 'some_test', False]) + + +def get_names_of_objects(array_of_objects): + if isinstance(array_of_objects, list): + array_of_names = [obj['name'] for obj in array_of_objects] + return array_of_names + else: + return 'wrong_format' + + +""" Fixture for linked models import/export tests +Structure of fixture: + [ + 'type of key in actions_json field, that held rule action`, + True if actions_json field is ChoiceField, False if MultipleChoiceField, + key of linked model, that are used in actions_json field, + value, which should be placed for test case, + True if rule should import without mistakes, False otherwise + ] +""" +NAME_DESCRIPTION_TESTS = [ + ['asset', True, 'os', 'some_test', True], + ['asset', True, 'os', 'test_os', True], + ['asset', True, 'group', 'some_test', True], + ['asset', True, 'group', 'test_group', True], + ['asset', True, 'manufacturer', 'some_test', True], + ['asset', True, 'manufacturer', 'test_manufacturer', True], + ['incident', True, 'category', 'some_test', True], + ['incident', True, 'category', 'test_incident_category', True], +] + +generate_multiple_rules_checks(NAME_DESCRIPTION_TESTS, 'incident', 'effects', 'incidenteffect') +generate_multiple_rules_checks(NAME_DESCRIPTION_TESTS, 'incident', 'close_recommendations', 'incidentrecommendations') + + +@pytest.fixture +def actual_importing_rule(tmpdir): + """ + Change version of app in file + Args: tmpdir(): + Returns: path to updated file + """ + path = os.path.join(TESTS_DIR, 'data/api_test_importing_rule_new_version.json') + tmp_dir = tmpdir.mkdir("sub") + shutil.copy(path, tmp_dir) + tmp_file = os.path.join(tmp_dir, 'api_test_importing_rule_new_version.json') + return tmp_file + + +def mock_correlator_task(*args, **kwargs): pass + + +@pytest.mark.django_db +class TestImportRules: + @pytest.fixture(autouse=True) + def setup_tests(self, client, django_user_model, add_user_with_permissions): + """ Fixture for creating basic setup for following import tests """ + self.admin = django_user_model.objects.first() + + @pytest.mark.unit + @pytest.mark.parametrize('sid_rev_test', SID_REV_TESTS) + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_modify_rev_after_importing_rules(self, api_client, sid_rev_test, actual_importing_rule): + """ Test for checking the correct SID and REV fields parsing of rules, which are being imported """ + + api_client.force_authenticate(self.admin) + test_file = actual_importing_rule + + # Step 1. Add rules to empty DB from test file + with open(test_file, 'r') as json_file: + api_client.post(reverse('rules-import-rules'), {'uploaded_file': json_file}) + + # Step 2. Create new file, which will store modified test data + modified_test_file = 'import_modify_test.json' + with open(test_file, 'r') as json_file, open(modified_test_file, 'w') as json_file_modified: + json_data = json.load(json_file) + json_data['rules'][0]['rev'] = sid_rev_test + json.dump(json_data, json_file_modified) + old_rule = Rule.objects.get() # exists only one rule + assert old_rule.archived == False + + # Step 3. Try to import file, with modified data + with open(modified_test_file, 'r') as json_file: + rule = Rule.objects.all() + api_client.post(reverse('rules-import-rules'), {'uploaded_file': json_file}) + os.remove(modified_test_file) + + assert Rule.objects.count() == 2 + + archived_rule = Rule.objects.first() + assert archived_rule.archived == True + + new_rule = Rule.objects.last() + assert new_rule.sid == old_rule.sid == archived_rule.sid + assert new_rule.rev == sid_rev_test + + @pytest.mark.unit + @pytest.mark.parametrize('name_description_test', NAME_DESCRIPTION_TESTS) + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_name_description_importing_rules(self, api_client, name_description_test, actual_importing_rule): + """ Test for checking the correct parse on all linked models, which are stored in `actions_json` field in + importing rules. + """ + api_client.force_authenticate(self.admin) + test_file = actual_importing_rule + + # Step 1. Get the current length of queryset of Rules, that are currently stored in database + rules_before = len(Rule.objects.all()) + + # Step 2. Prepare file for storing modified data, which later will be used in test cases + modified_test_file = 'import_modify_test.json' + + # Step 3. Prepare test data from fixture + with open(test_file, 'r') as json_file, open(modified_test_file, 'w') as json_file_modified: + json_data = json.load(json_file) + for action in json_data['rules'][0]['actions_json']: + if action['type'] == name_description_test[0]: + if name_description_test[1]: + action[name_description_test[2]] = [{'name': name_description_test[3]}] + else: + json_data[name_description_test[2]] = name_description_test[3] + array_of_names = get_names_of_objects(name_description_test[3]) + action[name_description_test[2]] = array_of_names + json.dump(json_data, json_file_modified) + + # Step 4. Trying to pass json file with modified data for test case + with open(modified_test_file, 'r') as json_file: + api_client.post(reverse('rules-import-rules'), {'uploaded_file': json_file}) + + # Step 5. Remove unnecessary file + os.remove(modified_test_file) + + # Step 6. Get amount of rules, that are currently stored in database for assert statement + rules_after = len(Rule.objects.all()) + if name_description_test[4]: + assert rules_after != rules_before + else: + assert rules_after == rules_before + + @pytest.mark.unit + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_sid_and_rev_when_importing(self, api_client, actual_importing_rule): + """ Test for checking if when rule is being imported that sid and rev values stays the same as in file, + where from import is performed + """ + api_client.force_authenticate(self.admin) + + with open(actual_importing_rule, 'r') as json_file: + api_client.post(reverse('rules-import-rules'), {'uploaded_file': json_file}) + + # Check if rev and sid fields are same as in imported rule + path = os.path.join(TESTS_DIR, 'data/api_test_importing_rule_new_version.json') + with open(path, 'r') as file: + json_data = json.load(file) + imported_rule = Rule.objects.get(name=json_data['rules'][0]['name']) + assert imported_rule.sid == json_data['rules'][0]['sid'] + assert imported_rule.rev == json_data['rules'][0]['rev'] + + @pytest.mark.unit + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_not_archived_rule_after_importing_invalid_rule(self, api_client, actual_importing_rule): + api_client.force_authenticate(self.admin) + test_file = actual_importing_rule + + # Step 1. Add rules to empty DB from test file + with open(test_file, 'r') as json_file: + api_client.post(reverse('rules-import-rules'), {'uploaded_file': json_file}) + + # Step 2. Create new file, which will store modified test data + modified_test_file = 'import_modify_test.json' + with open(test_file, 'r') as json_file, open(modified_test_file, 'w') as json_file_modified: + json_data = json.load(json_file) + json_data['rules'][0]['rev'] = 2 + json_data['rules'][0]['name'] = '' # invalid value, must be string + json.dump(json_data, json_file_modified) + old_rule = Rule.objects.get() # exists only one rule + assert old_rule.archived == False + + # Step 3. Try to import file, with modified data + with open(modified_test_file, 'r') as json_file: + rule = Rule.objects.all() + api_client.post(reverse('rules-import-rules'), {'uploaded_file': json_file}) + os.remove(modified_test_file) + + assert Rule.objects.count() == 1 + + new_rule = Rule.objects.get() + assert new_rule.archived == False + assert new_rule.sid == old_rule.sid + assert new_rule.rev == old_rule.rev + + +@pytest.mark.django_db +class TestAPIImportService: + @pytest.fixture(autouse=True) + def setup_tests(self, client, django_user_model, add_user_with_permissions): + """ Fixture for creating basic setup for following import tests """ + self.admin = django_user_model.objects.first() + + @pytest.mark.unit + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_api_import_rules(self, api_client, actual_importing_rule): + api_client.force_authenticate(self.admin) + with open(actual_importing_rule, 'r') as json_file: + response = api_client.post(reverse('rules-import-rules'), {'uploaded_file': json_file}) + assert response.status_code == 200 + assert response['content-disposition'] == 'attachment; filename="Report.json"' + assert Rule.objects.count() == 1 + assert IncidentRecommendations.objects.count() == 6 + assert IncidentEffect.objects.count() == 4 + + @pytest.mark.unit + @patch('correlation.tasks.update_correlator_tasks', mock_correlator_task) + @patch('correlation.api.update_correlator_tasks', mock_correlator_task) + def test_api_import_invalid_file(self, api_client): + api_client.force_authenticate(self.admin) + path = os.path.join(TESTS_DIR, 'data/image.png') + with open(path, 'rb') as file: + response = api_client.post(reverse('rules-import-rules'), data={'uploaded_file': file}) + assert response.status_code == 400 + assert 'uploaded_file' in response.json() diff --git a/correlation/tests/test_serializer.py b/correlation/tests/test_serializer.py new file mode 100644 index 0000000..685af4c --- /dev/null +++ b/correlation/tests/test_serializer.py @@ -0,0 +1,41 @@ +from datetime import timedelta + +import pytest + +from correlation.models import Rule +from correlation.serializers import RuleExportSerializer +from incident.models import IncidentCategory + + +@pytest.mark.unit +def test_rule_export_serializer(add_user_with_permissions): + user = add_user_with_permissions(username='testuser1', password='pwdqwe1') + category = IncidentCategory.objects.create(name='test name 123', description='test description 123') + assigned_to = [{'id': user.id, 'username': user.username, 'first_name': user.first_name, + 'is_active': user.is_active,'email':user.email}] + category_json = [{'id': category.id, 'name': category.name, 'description': category.description}] + + rule = Rule.objects.create( + name='Test event', + type=1, + status=True, + sid=3, + depth=timedelta(seconds=80037), + rule_json={ + "type": "query_string", + "field": "", + "operands": "event_severity:>=6", + }, + actions_json=[{ + "type": "incident", + "title": "{{.SignName}}", + "comment": "", + "category": category.id, + "importance": "50", + "assigned_to": user.id, + "description": "{{.EventSrcMsg}}" + }] + ) + result_data = RuleExportSerializer(rule).data + assert result_data['actions_json'][0]['assigned_to'] == '' # now we remove user in field + assert result_data['actions_json'][0]['category'] == category_json diff --git a/correlation/urls.py b/correlation/urls.py new file mode 100644 index 0000000..cb12569 --- /dev/null +++ b/correlation/urls.py @@ -0,0 +1,10 @@ +from rest_framework import routers + +from correlation.api import RuleViewSet, GroupViewSet + +router = routers.DefaultRouter() +router.register("rules", RuleViewSet, basename="rules") +router.register("groups", GroupViewSet, basename="rules-groups") + +urlpatterns = [] +urlpatterns += router.urls diff --git a/correlator/.dockerignore b/correlator/.dockerignore new file mode 100644 index 0000000..de6a84b --- /dev/null +++ b/correlator/.dockerignore @@ -0,0 +1,32 @@ +# IDE +.idea +.vscode + +# ENV +*.env +!*.example.env + +# GO +go.sum + +# TEMP +tmp +*.log +/builds +/cmd/correlator/*.json + +# GIT +.git +.gitignore +.gitlab-ci.yml + +# Docker +.docker/ +*Dockerfile* +docker-compose*.* +.dockerignore + +# Docs +docs/ +CHANGELOG.md +!docs/examples diff --git a/correlator/.gitignore b/correlator/.gitignore new file mode 100644 index 0000000..aee760b --- /dev/null +++ b/correlator/.gitignore @@ -0,0 +1,13 @@ +# IDE +.idea +.vscode + +# GO +go.sum + +# TEMP +tmp +*.log +/builds +/cmd/correlator/correlator +/cmd/correlator/*.json diff --git a/correlator/.gitlab-ci.yml b/correlator/.gitlab-ci.yml new file mode 100644 index 0000000..882848f --- /dev/null +++ b/correlator/.gitlab-ci.yml @@ -0,0 +1,90 @@ +include: + - project: 'iwa/adm/ci/cicd_extra' + ref: $cicd_branch + file: + - 'ymls/header.yml' + - 'ymls/save_env_vars.yml' + - 'ymls/version_upload.yml' + +variables: + + actions: + value: "" + description: "Keys: integ (integration tests), build. default:''(all jobs)" + +stages: + - save_env_vars + - test + - build + - build_deb_pkg + - version_upload + +test_job: + stage: test + variables: + GIT_STRATEGY: clone + POSTGRES_PASSWORD: 'postgres' + + ES_JAVA_OPTS: "-Xmx2g -Xms2g" #"-Xmx512m -Xms512m" + ELASTIC_PASSWORD: changeme + image: + name: nexus.iwarma.ru:8123/iwarma-docker/golang:1.16.15-bullseye.gitlab + services: + - name: registry.iwarma.ru/iwa/dev/console-docker/console-elasticsearch:latest + alias: elasticsearch + command: [ "bin/elasticsearch", "-Ediscovery.type=single-node" ] + rules: + - if: !reference [.rulesTemplate, testRuleInteg] + - if: !reference [.rulesTemplate, testRuleAlways] + needs: + - job: save_env_vars_job + artifacts: false + script: + - /bin/bash ./cicd/test_job.sh + artifacts: + paths: + - ./*.log + expire_in: 1 day + tags: + - docker-debian11 + +build_job: + stage: build + needs: + - job: test_job + artifacts: false + optional: true + variables: + GIT_STRATEGY: clone + rules: + - if: !reference [.rulesTemplate, buildRule] + script: + - python3 ./cicd_extra/build_go_job.py -n amccorrelator + artifacts: + paths: + - ./cmd/correlator/correlator + expire_in: 1 day + tags: + - shell-debian11 + +deb_pkg_job: + stage: build_deb_pkg + variables: + GIT_STRATEGY: clone + rules: + - if: !reference [.rulesTemplate, buildRule] + needs: + - job: build_job + artifacts: true + script: + - python3 ./cicd_extra/pack_job.py -p amccorrelator + artifacts: + paths: + - artifact.json + expire_in: 1 day + tags: + - shell-debian11 + +version_upload: + rules: + - if: !reference [.rulesTemplate, buildRule] \ No newline at end of file diff --git a/correlator/.golangci b/correlator/.golangci new file mode 100644 index 0000000..6d439f2 --- /dev/null +++ b/correlator/.golangci @@ -0,0 +1,654 @@ +run: + # default concurrency is a available CPU number + concurrency: 4 + + # timeout for analysis, e.g. 30s, 5m, default is 1m + timeout: 1m + + # exit code when at least one issue was found, default is 1 + issues-exit-code: 1 + + # include test files or not, default is true + tests: true + + # list of build tags, all linters use it. Default is empty list. + build-tags: + + # which dirs to skip: issues from them won't be reported; + # can use regexp here: generated.*, regexp is applied on full path; + # default value is empty list, but default dirs are skipped independently + # from this option's value (see skip-dirs-use-default). + # "/" will be replaced by current OS file path separator to properly work + # on Windows. + skip-dirs: + + # default is true. Enables skipping of directories: + # vendor$, third_party$, testdata$, examples$, Godeps$, builtin$ + skip-dirs-use-default: true + + # which files to skip: they will be analyzed, but issues from them + # won't be reported. Default value is empty list, but there is + # no need to include all autogenerated files, we confidently recognize + # autogenerated files. If it's not please let us know. + # "/" will be replaced by current OS file path separator to properly work + # on Windows. + skip-files: + + # by default isn't set. If set we pass it to "go list -mod={option}". From "go help modules": + # If invoked with -mod=readonly, the go command is disallowed from the implicit + # automatic updating of go.mod described above. Instead, it fails when any changes + # to go.mod are needed. This setting is most useful to check that go.mod does + # not need updates, such as in a continuous integration and testing system. + # If invoked with -mod=vendor, the go command assumes that the vendor + # directory holds the correct copies of dependencies and ignores + # the dependency descriptions in go.mod. + modules-download-mode: readonly|vendor|mod + + # Allow multiple parallel golangci-lint instances running. + # If false (default) - golangci-lint acquires file lock on start. + allow-parallel-runners: false + + +# output configuration options +output: + # colored-line-number|line-number|json|tab|checkstyle|code-climate|junit-xml|github-actions + # default is "colored-line-number" + format: colored-line-number + + # print lines of code with issue, default is true + print-issued-lines: true + + # print linter name in the end of issue text, default is true + print-linter-name: true + + # make issues output unique by line, default is true + uniq-by-line: true + + # add a prefix to the output file references; default is no prefix + path-prefix: "" + + # sorts results by: filepath, line and column + sort-results: false + + +# all available settings of specific linters +linters-settings: + + cyclop: + # the maximal code complexity to report + max-complexity: 10 + # the maximal average package complexity. If it's higher than 0.0 (float) the check is enabled (default 0.0) + package-average: 0.0 + # should ignore tests (default false) + skip-tests: false + + dogsled: + # checks assignments with too many blank identifiers; default is 2 + max-blank-identifiers: 2 + + dupl: + # tokens count to trigger issue, 150 by default + threshold: 100 + + errcheck: + # report about not checking of errors in type assertions: `a := b.(MyStruct)`; + # default is false: such cases aren't reported by default. + check-type-assertions: false + + # report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`; + # default is false: such cases aren't reported by default. + check-blank: false + + # [deprecated] comma-separated list of pairs of the form pkg:regex + # the regex is used to ignore names within pkg. (default "fmt:.*"). + # see https://github.com/kisielk/errcheck#the-deprecated-method for details + ignore: fmt:.*,io/ioutil:^Read.* + + # path to a file containing a list of functions to exclude from checking + # see https://github.com/kisielk/errcheck#excluding-functions for details + exclude: /path/to/file.txt + + errorlint: + # Report non-wrapping error creation using fmt.Errorf + errorf: true + + exhaustive: + # check switch statements in generated files also + check-generated: false + # indicates that switch statements are to be considered exhaustive if a + # 'default' case is present, even if all enum members aren't listed in the + # switch + default-signifies-exhaustive: false + + exhaustivestruct: + struct-patterns: + - '*.Test' + - '*.Test2' + - '*.Embedded' + - '*.External' + + forbidigo: + # Forbid the following identifiers + forbid: + - fmt.Errorf # consider errors.Errorf in github.com/pkg/errors + - fmt.Print.* # too much log noise + - ginkgo\\.F.* # these are used just for local development + # Exclude godoc examples from forbidigo checks. Default is true. + exclude_godoc_examples: false + + funlen: + lines: 60 + statements: 40 + + gci: + # put imports beginning with prefix after 3rd-party packages; + # only support one prefix + # if not set, use goimports.local-prefixes + local-prefixes: github.com/org/project + + gocognit: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 10 + + nestif: + # minimal complexity of if statements to report, 5 by default + min-complexity: 4 + + goconst: + # minimal length of string constant, 3 by default + min-len: 3 + # minimal occurrences count to trigger, 3 by default + min-occurrences: 3 + + gocritic: + # Which checks should be enabled; can't be combined with 'disabled-checks'; + # See https://go-critic.github.io/overview#checks-overview + # To check which checks are enabled run `GL_DEBUG=gocritic golangci-lint run` + # By default list of stable checks is used. + enabled-checks: + - rangeValCopy + + # Which checks should be disabled; can't be combined with 'enabled-checks'; default is empty + disabled-checks: + - regexpMust + + # Enable multiple checks by tags, run `GL_DEBUG=gocritic golangci-lint run` to see all tags and checks. + # Empty list by default. See https://github.com/go-critic/go-critic#usage -> section "Tags". + enabled-tags: + - performance + disabled-tags: + - experimental + + # Settings passed to gocritic. + # The settings key is the name of a supported gocritic checker. + # The list of supported checkers can be find in https://go-critic.github.io/overview. + settings: + captLocal: # must be valid enabled check name + # whether to restrict checker to params only (default true) + paramsOnly: true + elseif: + # whether to skip balanced if-else pairs (default true) + skipBalanced: true + hugeParam: + # size in bytes that makes the warning trigger (default 80) + sizeThreshold: 80 + nestingReduce: + # min number of statements inside a branch to trigger a warning (default 5) + bodyWidth: 5 + rangeExprCopy: + # size in bytes that makes the warning trigger (default 512) + sizeThreshold: 512 + # whether to check test functions (default true) + skipTestFuncs: true + rangeValCopy: + # size in bytes that makes the warning trigger (default 128) + sizeThreshold: 32 + # whether to check test functions (default true) + skipTestFuncs: true + ruleguard: + # path to a gorules file for the ruleguard checker + rules: '' + truncateCmp: + # whether to skip int/uint/uintptr types (default true) + skipArchDependent: true + underef: + # whether to skip (*x).method() calls where x is a pointer receiver (default true) + skipRecvDeref: true + unnamedResult: + # whether to check exported functions + checkExported: true + + gocyclo: + # minimal code complexity to report, 30 by default (but we recommend 10-20) + min-complexity: 10 + + godot: + # comments to be checked: `declarations`, `toplevel`, or `all` + scope: declarations + # list of regexps for excluding particular comment lines from check + exclude: + # example: exclude comments which contain numbers + # - '[0-9]+' + # check that each sentence starts with a capital letter + capital: false + + godox: + # report any comments starting with keywords, this is useful for TODO or FIXME comments that + # might be left in the code accidentally and should be resolved before merging + keywords: # default keywords are TODO, BUG, and FIXME, these can be overwritten by this setting + - NOTE + - OPTIMIZE # marks code that should be optimized before merging + - HACK # marks hack-arounds that should be removed before merging + + gofmt: + # simplify code: gofmt with `-s` option, true by default + simplify: true + + gofumpt: + # Choose whether or not to use the extra rules that are disabled + # by default + extra-rules: false + + goheader: + values: + const: + # define here const type values in format k:v, for example: + # COMPANY: MY COMPANY + regexp: + # define here regexp type values, for example + # AUTHOR: .*@mycompany\.com + template: # |- + # put here copyright header template for source code files, for example: + # Note: {{ YEAR }} is a builtin value that returns the year relative to the current machine time. + # + # {{ AUTHOR }} {{ COMPANY }} {{ YEAR }} + # SPDX-License-Identifier: Apache-2.0 + + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at: + + # http://www.apache.org/licenses/LICENSE-2.0 + + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + template-path: + # also as alternative of directive 'template' you may put the path to file with the template source + + goimports: + # put imports beginning with prefix after 3rd-party packages; + # it's a comma-separated list of prefixes + local-prefixes: github.com/org/project + + golint: + # minimal confidence for issues, default is 0.8 + min-confidence: 0.8 + + gomnd: + settings: + mnd: + # the list of enabled checks, see https://github.com/tommy-muehle/go-mnd/#checks for description. + checks: argument,case,condition,operation,return,assign + # ignored-numbers: 1000 + # ignored-files: magic_.*.go + # ignored-functions: math.* + + gomoddirectives: + # Allow local `replace` directives. Default is false. + replace-local: false + # List of allowed `replace` directives. Default is empty. + replace-allow-list: + - launchpad.net/gocheck + # Allow to not explain why the version has been retracted in the `retract` directives. Default is false. + retract-allow-no-explanation: false + # Forbid the use of the `exclude` directives. Default is false. + exclude-forbidden: false + + gomodguard: + allowed: + modules: # List of allowed modules + # - gopkg.in/yaml.v2 + domains: # List of allowed module domains + # - golang.org + blocked: + modules: # List of blocked modules + # - github.com/uudashr/go-module: # Blocked module + # recommendations: # Recommended modules that should be used instead (Optional) + # - golang.org/x/mod + # reason: "`mod` is the official go.mod parser library." # Reason why the recommended module should be used (Optional) + versions: # List of blocked module version constraints + # - github.com/mitchellh/go-homedir: # Blocked module with version constraint + # version: "< 1.1.0" # Version constraint, see https://github.com/Masterminds/semver#basic-comparisons + # reason: "testing if blocked version constraint works." # Reason why the version constraint exists. (Optional) + local_replace_directives: false # Set to true to raise lint issues for packages that are loaded from a local path via replace directive + + govet: + # report about shadowed variables + check-shadowing: true + + # settings per analyzer + settings: + printf: # analyzer name, run `go tool vet help` to see all analyzers + funcs: # run `go tool vet help printf` to see available settings for `printf` analyzer + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Infof + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Warnf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Errorf + - (github.com/golangci/golangci-lint/pkg/logutils.Log).Fatalf + + # enable or disable analyzers by name + # run `go tool vet help` to see all analyzers + enable: + - atomicalign + enable-all: false + disable: + - shadow + disable-all: false + + depguard: + list-type: blacklist + include-go-root: false + packages: + - github.com/sirupsen/logrus + packages-with-error-message: + # specify an error message to output when a blacklisted package is used + - github.com/sirupsen/logrus: "logging is allowed only by logutils.Log" + + ifshort: + # Maximum length of variable declaration measured in number of lines, after which linter won't suggest using short syntax. + # Has higher priority than max-decl-chars. + max-decl-lines: 1 + # Maximum length of variable declaration measured in number of characters, after which linter won't suggest using short syntax. + max-decl-chars: 30 + + importas: + # using `servingv1` alias for `knative.dev/serving/pkg/apis/serving/v1` package + servingv1: knative.dev/serving/pkg/apis/serving/v1 + # using `autoscalingv1alpha1` alias for `knative.dev/serving/pkg/apis/autoscaling/v1alpha1` package + autoscalingv1alpha1: knative.dev/serving/pkg/apis/autoscaling/v1alpha1 + # You can specify the package path by regular expression, + # and alias by regular expression expansion syntax like below. + # see https://github.com/julz/importas#use-regular-expression for details + "$1$2": 'knative.dev/serving/pkg/apis/(\w+)/(v[\w\d]+)' + + lll: + # max line length, lines longer will be reported. Default is 120. + # '\t' is counted as 1 character by default, and can be changed with the tab-width option + line-length: 120 + # tab width in spaces. Default to 1. + tab-width: 1 + + makezero: + # Allow only slices initialized with a length of zero. Default is false. + always: false + + maligned: + # print struct with more effective memory layout or not, false by default + suggest-new: true + + misspell: + # Correct spellings using locale preferences for US or UK. + # Default is to use a neutral variety of English. + # Setting locale to US will correct the British spelling of 'colour' to 'color'. + locale: US + ignore-words: + - someword + + nakedret: + # make an issue if func has more lines of code than this setting and it has naked returns; default is 30 + max-func-lines: 30 + + prealloc: + # XXX: we don't recommend using this linter before doing performance profiling. + # For most programs usage of prealloc will be a premature optimization. + + # Report preallocation suggestions only on simple loops that have no returns/breaks/continues/gotos in them. + # True by default. + simple: true + range-loops: true # Report preallocation suggestions on range loops, true by default + for-loops: false # Report preallocation suggestions on for loops, false by default + + promlinter: + # Promlinter cannot infer all metrics name in static analysis. + # Enable strict mode will also include the errors caused by failing to parse the args. + strict: false + # Please refer to https://github.com/yeya24/promlinter#usage for detailed usage. + disabled-linters: + # - "Help" + # - "MetricUnits" + # - "Counter" + # - "HistogramSummaryReserved" + # - "MetricTypeInName" + # - "ReservedChars" + # - "CamelCase" + # - "lintUnitAbbreviations" + + predeclared: + # comma-separated list of predeclared identifiers to not report on + ignore: "" + # include method names and field names (i.e., qualified names) in checks + q: false + + nolintlint: + # Enable to ensure that nolint directives are all used. Default is true. + allow-unused: false + # Disable to ensure that nolint directives don't have a leading space. Default is true. + allow-leading-space: true + # Exclude following linters from requiring an explanation. Default is []. + allow-no-explanation: [] + # Enable to require an explanation of nonzero length after each nolint directive. Default is false. + require-explanation: true + # Enable to require nolint directives to mention the specific linter being suppressed. Default is false. + require-specific: true + + rowserrcheck: + packages: + - github.com/jmoiron/sqlx + - + revive: + # see https://github.com/mgechev/revive#available-rules for details. + ignore-generated-header: true + severity: warning + rules: + - name: indent-error-flow + severity: warning + + tagliatelle: + # check the struck tag name case + case: + # use the struct field name to check the name of the struct tag + use-field-name: true + rules: + # any struct tag type can be used. + # support string case: `camel`, `pascal`, `kebab`, `snake`, `goCamel`, `goPascal`, `goKebab`, `goSnake`, `upper`, `lower` + json: camel + yaml: camel + xml: camel + bson: camel + avro: snake + mapstructure: kebab + + testpackage: + # regexp pattern to skip files + skip-regexp: (export|internal)_test\.go + + thelper: + # The following configurations enable all checks. It can be omitted because all checks are enabled by default. + # You can enable only required checks deleting unnecessary checks. + test: + first: true + name: true + begin: true + benchmark: + first: true + name: true + begin: true + tb: + first: true + name: true + begin: true + + unparam: + # Inspect exported functions, default is false. Set to true if no external program/library imports your code. + # XXX: if you enable this setting, unparam will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find external interfaces. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + unused: + # treat code as a program (not a library) and report unused exported identifiers; default is false. + # XXX: if you enable this setting, unused will report a lot of false-positives in text editors: + # if it's called for subdir of a project it can't find funcs usages. All text editor integrations + # with golangci-lint call it on a directory with the changed file. + check-exported: false + + whitespace: + multi-if: false # Enforces newlines (or comments) after every multi-line if statement + multi-func: false # Enforces newlines (or comments) after every multi-line function signature + + wsl: + # If true append is only allowed to be cuddled if appending value is + # matching variables, fields or types on line above. Default is true. + strict-append: true + # Allow calls and assignments to be cuddled as long as the lines have any + # matching variables, fields or types. Default is true. + allow-assign-and-call: true + # Allow assignments to be cuddled with anything. Default is false. + allow-assign-and-anything: false + # Allow multiline assignments to be cuddled. Default is true. + allow-multiline-assign: true + # Allow declarations (var) to be cuddled. + allow-cuddle-declarations: false + # Allow trailing comments in ending of blocks + allow-trailing-comment: false + # Force newlines in end of case at this limit (0 = never). + force-case-trailing-whitespace: 0 + # Force cuddling of err checks with err var assignment + force-err-cuddling: false + # Allow leading comments to be separated with empty lines + allow-separated-leading-comment: false + + # The custom section can be used to define linter plugins to be loaded at runtime. See README doc + # for more info. + custom: + # Each custom linter should have a unique name. + example: + # The path to the plugin *.so. Can be absolute or local. Required for each custom linter + path: /path/to/example.so + # The description of the linter. Optional, just for documentation purposes. + description: This is an example usage of a plugin linter. + # Intended to point to the repo location of the linter. Optional, just for documentation purposes. + original-url: github.com/golangci/example-linter + +linters: + enable: + - megacheck + - govet + disable: + - maligned + - prealloc + disable-all: false + presets: + - bugs + - unused + fast: false + + +issues: + # List of regexps of issue texts to exclude, empty list by default. + # But independently from this option we use default exclude patterns, + # it can be disabled by `exclude-use-default: false`. To list all + # excluded by default patterns execute `golangci-lint run --help` + exclude: + - abcdef + + # Excluding configuration per-path, per-linter, per-text and per-source + exclude-rules: + # Exclude some linters from running on tests files. + - path: _test\.go + linters: + - gocyclo + - errcheck + - dupl + - gosec + + # Exclude known linters from partially hard-vendored code, + # which is impossible to exclude via "nolint" comments. + - path: internal/hmac/ + text: "weak cryptographic primitive" + linters: + - gosec + + # Exclude some staticcheck messages + - linters: + - staticcheck + text: "SA9003:" + + # Exclude lll issues for long lines with go:generate + - linters: + - lll + source: "^//go:generate " + + # Independently from option `exclude` we use default exclude patterns, + # it can be disabled by this option. To list all + # excluded by default patterns execute `golangci-lint run --help`. + # Default value for this option is true. + exclude-use-default: false + + # The default value is false. If set to true exclude and exclude-rules + # regular expressions become case sensitive. + exclude-case-sensitive: false + + # The list of ids of default excludes to include or disable. By default it's empty. + include: + - EXC0002 # disable excluding of issues about comments from golint + + # Maximum issues count per one linter. Set to 0 to disable. Default is 50. + max-issues-per-linter: 0 + + # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. + max-same-issues: 0 + + # Show only new issues: if there are unstaged changes or untracked files, + # only those changes are analyzed, else only changes in HEAD~ are analyzed. + # It's a super-useful option for integration of golangci-lint into existing + # large codebase. It's not practical to fix all existing issues at the moment + # of integration: much better don't allow issues in new code. + # Default is false. + new: false + + # Show only new issues created after git revision `REV` + new-from-rev: REV + + # Show only new issues created in git patch with set file path. + new-from-patch: path/to/patch/file + + # Fix found issues (if it's supported by the linter) + fix: true + +severity: + # Default value is empty string. + # Set the default severity for issues. If severity rules are defined and the issues + # do not match or no severity is provided to the rule this will be the default + # severity applied. Severities should match the supported severity names of the + # selected out format. + # - Code climate: https://docs.codeclimate.com/docs/issues#issue-severity + # - Checkstyle: https://checkstyle.sourceforge.io/property_types.html#severity + # - Github: https://help.github.com/en/actions/reference/workflow-commands-for-github-actions#setting-an-error-message + default-severity: error + + # The default value is false. + # If set to true severity-rules regular expressions become case sensitive. + case-sensitive: false + + # Default value is empty list. + # When a list of severity rules are provided, severity information will be added to lint + # issues. Severity rules have the same filtering capability as exclude rules except you + # are allowed to specify one matcher per severity rule. + # Only affects out formats that support setting severity information. + rules: + - linters: + - dupl + severity: info \ No newline at end of file diff --git a/correlator/CHANGELOG.md b/correlator/CHANGELOG.md new file mode 100644 index 0000000..7ac5d9a --- /dev/null +++ b/correlator/CHANGELOG.md @@ -0,0 +1,285 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [1.1.6] - 2022-11-15 +## Added +- Исправлены тесты + +## [1.1.5] - 2022-10-21 +## Added +- Изменено сообщение лога [MC-1308](https://jira.iwarma.ru/browse/MC-1308) +- Тест полей аггрегированных событий [MC-824](https://jira.iwarma.ru/browse/MC-824) +- Для правила типа syslog поле "proto" измнено на "protocol" [MC-1347](https://jira.iwarma.ru/browse/MC-1347) +- Для правила HTTP добавлен timeout [MC-1436](https://jira.iwarma.ru/browse/MC-1436) + +## [1.1.4] - 2022-07-25 +## Added + +- Маппинг полей при создании индекса [MC-1061](https://jira.iwarma.ru/browse/MC-1061) + +## Fixed + +- Исправлены тесты для запуска всех тестов разом [MC-845](https://jira.iwarma.ru/browse/MC-845) + +## [1.1.3] - -2022-07-20 + +## Fixed + +- Добавление тэга в событие [MC-166](https://jira.iwarma.ru/browse/MC-166) + +## [1.1.2] + +### Fixed + +- Длинна заголовка инцидента увеличена с 128 до 256 символов [MC-723](https://jira.iwarma.ru/browse/MC-723) +- Циклическое создание инцидентов [MC-166](https://jira.iwarma.ru/browse/MC-166) + + +## [1.1.1] -2022-05-31 + +### Fixed + +- Исправлена отправка агрегированных событий в elastic [MC-819](https://jira.iwarma.ru/browse/MC-819) + +## [1.1.0] - 2022-05-23 + +### Added + +- Добавлено поле ``message`` для сообщений от suricata [MC-97](https://jira.iwarma.ru/browse/MC-97) +- Добавлен универсальный CEF приемник [MC-327](https://jira.iwarma.ru/browse/MC-327) + +## [1.0.10] - 2022-05-12 + +### Changed +- Изменен цикл агрегации событий (добавлен лимит на выгрузку событий) [#23](https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/23) +- Переработана работа с предикатами [#18](https://gitlab.iwarma.ru/iwa/dev/console/correlator/-/issues/18) + +## [1.0.9] - 2021-11-09 + +### Changed +- Disable function name in log messages +- Update bulk requests are now send inside rule execution loop + +## [1.0.8] - 2021-10-30 + +### Changed +- Fix problem in CheckAndCreateIndex, when index already exist + +## [1.0.7] - 2021-10-25 + +### Added +- Custom aggregation fields +- Create aggregated index if we don't have one +- YAML format for config file + +### Changed +- File config_example.json. Update elasticsearch section + +## [1.0.6] - 2021-09-01 + +### Fixed +- If we have an error in RunRulesSync's elastic call, we now throw error and +disable this rule + +## [1.0.5] - 2021-08-11 + +### Added +- Query string predicate + +## [1.0.4] - 2021-08-03 + +### Changed +- Now all ignore ssl error options are enabled by default + +## [1.0.3] - 2021-07-15 + +### Changed +- Add ability to ignore SSL errors in elasticsearch client +- Add ability to ignore SSL errors in requests to Console + +## [1.0.2] - 2021-06-23 + +### Fixed +- For http action, we not process content-type header correctly +- Fix problem with index creation in main.go + + +## [1.0.1] - 2021-06-08 + +### Changed +- Now, normalized events show it's index + +## [1.0.0] + +### Changed +- New elasticsearch connection package +- Aggregator algorithm + +## [0.1.29] - 2021-04-14 + +### Changed +- Add option to select log formatter +- Add ability to encode query to elasticsearch + +## [0.1.28] - 2021-03-19 + +### Added +- Ability to set logging level +- Logging to file +- Log rotation + +### Changed +- Logging verbosity + +## [0.1.27] - 2021-01-04 + +### Fixed +- Problem with FirewallRule. Sucscess response was parsed wrong + +## [0.1.26] - 2020-11-11 + +### Added +- FirewallRule action will send apply request to firewall +after all rules created +- TestServer to simulate HTTP endpoints + +### Changed +- Correlator bash test + +## [0.1.25] - 2020-11-10 + +### Changed +- Fix firewall action template render + +## [0.1.24] - 2020-11-10 + +### Changed +- For incident action, selet multi rule to add +all events to that incident + +## [0.1.23] - 2020-11-06 + +### Changed +- Add sensor type to incident and asset actions + +## [0.1.22] - 2020-11-02 + +### Changed +- Replace API handler functions with closure generators +- Replace API router with Gorilla +- Fix error messages in FirewallAction.ParseInterface func + +## [0.1.21] - 2020-10-30 + +### Added +- Smart mapping + +## [0.1.20] - 2020-10-28 + +### Added +- Add option CFG_A_CLEAR_NORMALIZED to clear normalized events +after correlatrion. This must prevent disk overflow. + +## [0.1.19] - 2020-10-28 + +### Added +- GetNow function to get current time accordint to CFG_UTC_NOW setting + +### Changed +- Functions, where aggregator and correlator create time range, now +use GetNow to sinc querys to global system time + +## [0.1.18] - 2020-10-28 + +### Added +- Flags to disable aggregator and correlator + +## [0.1.17] - 2020-10-27 + +### Changed +- Move aggregator to separate function + +### Added +- Agg integration test for aggregator + +## [0.1.16] - 2020-10-22 + +### Changed +- Add "Single" action rule. In such rule, action will be applyed +to every event that match rule predicat + +## [0.1.14] - 2020-10-20 + +### Changed +- Change incident action title field. Now it limited by 127 symbols +- Change aggregated event hash function, now it's SHA 512/256 + +## [0.1.13] - 2020-10-05 + +### Changed +- Change ARMAIF response parsing code + +## [0.1.12] - 2020-10-05 + +### Changed +- Move request\response dump code to separate function + +## [0.1.11] - 2020-10-05 + +### Changed +- Fix FirewallAction interface argument, now it's a string not a list + +## [0.1.10] - 2020-10-04 + +### Changed +- Fix FirewallAction dump requests + +## [0.1.9] - 2020-10-04 + +### Changed +- Dump FirewallAction requests wil hawe a more informative content +- Dump FirewallAction requests will have a more human-readable file name + +## [0.1.8] - 2020-10-04 + +### Added +- Dump FirewallAction requests + +## [0.1.7] - 2020-10-04 + +### Changed +- Fix FirewallAction logging +- Fix FirewallAction ARMAIF response status check. There was 201 instead of 200. + +## [0.1.6] - 2020-10-04 + +### Changed +- Fix FirewallAction interface list serialization + +## [0.1.5] - 2020-10-04 + +### Changed +- Now, FirewallAction interface will send as list to ARMAIF + +## [0.1.4] - 2020-10-04 + +### Changed +- Remove FirewallAcrion description size check. Now, it's up to Django, to validate it's length + +## [0.1.3] - 2020-10-04 + +### Changed +- Remove description template from FirewallAction + +## [0.1.2] - 2020-10-04 + +### Changed +- FirewallAction url + +## [0.1.1] - 2020-10-04 + +### Added +- Add FirewallAction ability to send actual requests to ARMAIF diff --git a/correlator/Dockerfile b/correlator/Dockerfile new file mode 100644 index 0000000..135d1ee --- /dev/null +++ b/correlator/Dockerfile @@ -0,0 +1,30 @@ +FROM golang:1.14-alpine3.13 as builder +WORKDIR /go/src/correlator + +RUN apk add --no-cache gcc g++ git + +COPY go.mod . +RUN go mod download + +RUN apk add --no-cache curl httpie jq +RUN curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v1.39.0 \ + && go get -u github.com/jstemmer/go-junit-report \ + && go get github.com/boumenot/gocover-cobertura + +COPY . . +RUN CGO_ENABLED=0 go install -a -ldflags '-extldflags "-static"' ./cmd/correlator + +RUN apk add --no-cache bash +CMD bash + +FROM alpine:3.13 +WORKDIR / + +RUN apk add --no-cache tini +RUN apk add --no-cache bash +COPY --from=builder /go/bin/* /usr/local/bin/ +COPY docker/sh/* /usr/local/bin/ +COPY docs/examples /etc/correlator + +ENTRYPOINT [ "tini", "--", "./docker/sh/entrypoint.sh" ] +CMD [ "correlator", "-config", "/etc/correlator/config_example.json" ] \ No newline at end of file diff --git a/correlator/README.md b/correlator/README.md new file mode 100644 index 0000000..30582b1 --- /dev/null +++ b/correlator/README.md @@ -0,0 +1,33 @@ +# Testing + +To run tests, you must run elastic: +```bash +docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -e ES_JAVA_OPTS="-Xms2g -Xmx2g" registry.iwarma.ru/iwa/dev/console-docker/console-elasticsearch:latest +``` + +Second variant: +``` +docker-compose -f cicd/docker-compose.yml up -d && docker-compose -f cicd/docker-compose.yml logs -f golang +docker-compose -f cicd/docker-compose.yml down +``` + +# Testing SSL + +To test against SSL, you should use [this](https://www.elastic.co/guide/en/elasticsearch/reference/current/configuring-tls-docker.html) instruction for +creating elastic with SSL. + +To test ssl connection, use: +```bash +curl -k -u elastic:changeme https://elasticsearch:9200/ +``` + +*Important!* need to change elastic password to `changeme` in `.env` file + +# Building + +To build correlator for running with docker: +```bash +CGO_ENABLED=0 go build -a -ldflags '-extldflags "-static"' +``` + +This will build static version of correlator witch can be used on alpine image, maybe on mac . \ No newline at end of file diff --git a/correlator/aggregator/aggregator.go b/correlator/aggregator/aggregator.go new file mode 100644 index 0000000..f713dd9 --- /dev/null +++ b/correlator/aggregator/aggregator.go @@ -0,0 +1,56 @@ +package aggregator + +import ( + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/events" + "time" +) + +func Aggregator(store *events.AggregatedEventStore, window time.Duration, stop chan interface{}) { + cl := log.WithField("part", "aggregator") + ticker1 := time.NewTicker(window) + ticker2 := time.NewTicker(viper.GetDuration(config.AggregatorIterationDuration)) + + // We have one query to all iterations + // Maybe, we should use MatchAll query + query := elastic.NewMatchAllQuery() + + for { + select { + case <-stop: + cl.Info("Stopping aggregator") + ticker1.Stop() + ticker2.Stop() + store.SendUpdateBulk() + return + + case <-ticker1.C: + store.UpdateWindow(events.TimeWindow{ + Begin: time.Now().UTC(), + End: time.Now().UTC().Add(window), + }) + cl.Debugf("Aggrerator windows change: %v", store.GetWindow()) + + case <-ticker2.C: + cl.Trace("Start aggregator iteration") + raw, errs1 := store.GetClient().Query(events.GetNormalizedIndexName(), query) + eventsChan, errs2 := events.ParseEvents(raw, errs1, viper.GetInt(config.Threads)) + + store.AddEvents(eventsChan) + + // Log errors if we have some + go func() { + for err := range errs2 { + if err != nil { + cl.Errorf("%+v", err) + } + } + }() + store.SendUpdateBulk() + cl.Trace("Finish aggregator iteration") + } + } +} diff --git a/correlator/aggregator/aggregator_test.go b/correlator/aggregator/aggregator_test.go new file mode 100644 index 0000000..5d8494f --- /dev/null +++ b/correlator/aggregator/aggregator_test.go @@ -0,0 +1,309 @@ +package aggregator + +import ( + "encoding/json" + "fmt" + "github.com/olivere/elastic/v7" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/events" + "iwarma.ru/console/correlator/util" + "testing" + "time" +) + +func TestAggregator(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, viper.GetString(config.ElasticNormalizedIndexName), viper.GetString(config.ElasticAggregatedIndexName)) + if err != nil { + t.Errorf("%v", err) + return + } + + stop := make(chan interface{}) + duration := time.Second * 10 + + eventsStore := events.NewAggregatedEventStore(client, events.TimeWindow{ + Begin: time.Now().UTC(), + End: time.Now().UTC().Add(duration), + }) + + go func() { + Aggregator(eventsStore, duration, stop) + }() + + N := 3 + for i := 0; i < N; i++ { + err = events.FillNormalizedEventsForAggregation(viper.GetString(config.ElasticNormalizedIndexName), N/(i+1), client) + if err != nil { + t.Errorf("%v", err) + return + } + time.Sleep(duration + time.Second) + } + + stop <- struct{}{} + + time.Sleep(time.Second * 5) + + // Let's check what we have + eventsResult, errs := client.Query(viper.GetString(config.ElasticNormalizedIndexName), elastic.NewMatchAllQuery()) + normalizedCount := 0 + for range eventsResult { + normalizedCount++ + } + + if normalizedCount != 0 { + t.Errorf("Not all normalized events were deleted. Still have %v", normalizedCount) + } + + for err = range errs { + if err != nil { + t.Errorf("%v", err) + } + } + + // And Aggregated + eventsResult, errs = client.Query(viper.GetString(config.ElasticAggregatedIndexName), elastic.NewMatchAllQuery()) + aggregatedCount := 0 + for range eventsResult { + aggregatedCount++ + } + + if aggregatedCount != N { + t.Errorf("Bad aggregated events count. Expect %v, got %v", N, aggregatedCount) + } + + for err = range errs { + if err != nil { + t.Errorf("%v", err) + } + } + +} + +func TestAggregatorSeveralNormalizedIndex(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + index1 := "arma-1" + index2 := "arma-2" + + viper.Set(config.ElasticNormalizedIndexName, "arma-*") + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, viper.GetString(config.ElasticNormalizedIndexName), viper.GetString(config.ElasticAggregatedIndexName)) + if err != nil { + t.Errorf("%v", err) + return + } + + stop := make(chan interface{}) + duration := time.Second * 10 + + eventsStore := events.NewAggregatedEventStore(client, events.TimeWindow{ + Begin: time.Now().UTC(), + End: time.Now().UTC().Add(duration), + }) + + go func() { + Aggregator(eventsStore, duration, stop) + }() + + N := 3 + for i := 0; i < N; i++ { + err = events.FillNormalizedEventsForAggregation(index1, N, client) + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.FillNormalizedEventsForAggregation(index2, N, client) + if err != nil { + t.Errorf("%v", err) + return + } + } + + time.Sleep(duration) + + stop <- struct{}{} + eventsStore.SendUpdateBulk() + + // Let's check correlator stat + stat := eventsStore.GetStat() + + if stat.EventsProcessed != uint64(N*N*2) { + t.Errorf("Bad event processed count: %v", stat.EventsProcessed) + } + + // All normalized events must be aggregated in one agregated event + if stat.EventsAggregated != 1 { + t.Errorf("Bad event aggregated count. Expect %v, got %v", 1, stat.EventsAggregated) + } + + // Now, check indices + count, err := client.CountDocuments(index1, index1) + if err != nil { + t.Errorf("%v", err) + } + + if count != 0 { + t.Errorf("Not all normalized events were deleted") + } + + count, err = client.CountDocuments(viper.GetString(config.ElasticAggregatedIndexName)) + if err != nil { + t.Errorf("%v", err) + } + + if count != 1 { + t.Errorf("Bad aggregated events count. Expect %v, got %v", 1, count) + } + +} + +func TestSendUpdateBulk(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + eventsFields := []string{ + "event_severity", + "event_protocol", + "message", + "device_vendor", + "device_product", + "device_action", + "device_version", + "device_timezone", + "sign_id", + "sign_category", + "sign_subcategory", + "application", + "source_ip", + "source_host", + "source_port", + "source_mac", + "source_timezone", + "source_software", + "source_action", + "destination_ip", + "destination_mac", + "destination_timezone", + "destination_software", + "destination_action", + "destination_host", + "destination_port", + "destination_user", + "cs1", + "cs1Label", + "cs2", + "cs2Label", + "object_type", + "event_last", + "source_user", + "@created", + "event_hash", + "index", + "timestamp", + "aggregated_id", + "event_src_msg", + "type", + "event_count", + "event_hash", + "event_timestamp", + "timestamp", + "event_id", + "event_first", + } + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, viper.GetString(config.ElasticNormalizedIndexName), viper.GetString(config.ElasticAggregatedIndexName)) + if err != nil { + t.Errorf("%v", err) + return + } + + stop := make(chan interface{}) + duration := time.Second * 10 + + eventsStore := events.NewAggregatedEventStore(client, events.TimeWindow{ + Begin: time.Now().UTC(), + End: time.Now().UTC().Add(duration), + }) + + go func() { + Aggregator(eventsStore, duration, stop) + }() + + N := 3 + for i := 0; i < N; i++ { + err = events.FillNormalizedEventsForAggregation(viper.GetString(config.ElasticNormalizedIndexName), N/(i+1), client) + if err != nil { + t.Errorf("%v", err) + return + } + time.Sleep(duration + time.Second) + } + + stop <- struct{}{} + eventsResult, errs := client.Query(viper.GetString(config.ElasticAggregatedIndexName), elastic.NewMatchAllQuery()) + errorsCount := 0 + for item := range eventsResult { + var event events.Event + err := json.Unmarshal(item.Source, &event) + if err != nil { + t.Errorf("%v", err) + } + for key, _ := range event { + if !contains(eventsFields, key) { + errorsCount++ + t.Errorf("Bad field -> %v", key) + } + } + + } + if errorsCount != 0 { + t.Errorf("Bad fields in events") + return + } + for err = range errs { + if err != nil { + t.Errorf("%v", err) + } + } + + // Clear after test + err = events.ClearIndex(client, viper.GetString(config.ElasticNormalizedIndexName), viper.GetString(config.ElasticAggregatedIndexName)) + if err != nil { + t.Errorf("%v", err) + } +} + +func contains(fields []string, item string) bool { + for _, field := range fields { + if field == item { + fmt.Printf("%v - %v \n", field, item) + return true + } + } + return false +} diff --git a/correlator/api/api.go b/correlator/api/api.go new file mode 100644 index 0000000..db12a4c --- /dev/null +++ b/correlator/api/api.go @@ -0,0 +1,111 @@ +package api + +import ( + "bytes" + "encoding/json" + "io" + "net/http" + + "iwarma.ru/console/correlator/events" + + log "github.com/sirupsen/logrus" + "iwarma.ru/console/correlator/rules" +) + +// LogUrlMiddleware Write remote address of handler caller +func LogUrlMiddleware(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + cl := log.WithFields(log.Fields{"url": r.URL.String(), "source": "API", "method": r.Method}) + cl.Debugf("Call from %v", r.RemoteAddr) + next.ServeHTTP(w, r) + }) +} + +// ClearStoreHandler Clear rule store +func ClearStoreHandler(store *rules.RuleStore) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + cl := log.WithFields(log.Fields{"url": r.URL, "source": "API", "method": r.Method}) + cl.Trace("Start") + defer cl.Trace("Finish") + + cl.Info("Clear store") + store.ClearStore() + w.WriteHeader(http.StatusOK) + } +} + +// AddRulesHandler Add rules to store +func AddRulesHandler(store *rules.RuleStore) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + cl := log.WithFields(log.Fields{"url": r.URL, "source": "API", "method": r.Method}) + cl.Trace("Start") + defer cl.Trace("Finish") + + found := false + if contentType, ok := r.Header["Content-Type"]; ok { + + for _, cur := range contentType { + if cur == "application/json" { + found = true + break + } + } + + } + + if !found { + cl.Errorf("Bad content type") + w.WriteHeader(http.StatusBadRequest) + return + } + + var buffer bytes.Buffer + tee := io.TeeReader(r.Body, &buffer) + + var newRules []rules.Rule + err := json.NewDecoder(tee).Decode(&newRules) + if err != nil { + cl.Errorf("Can't decode request: %v", err) + cl.Debugf("Incoming message: %v", buffer.String()) + w.WriteHeader(http.StatusBadRequest) + return + } + + cl.Infof("Add new %v rules", len(newRules)) + store.AddRules(newRules...) + w.WriteHeader(http.StatusOK) + } +} + +// StatHandler Get Correlator and aggregator stat +func StatHandler(agr *events.AggregatedEventStore, cor *rules.RuleStore) func(w http.ResponseWriter, r *http.Request) { + return func(w http.ResponseWriter, r *http.Request) { + cl := log.WithFields(log.Fields{"url": r.URL, "source": "API", "method": r.Method}) + cl.Trace("Start") + defer cl.Trace("Finish") + + data, err := json.Marshal(struct { + AggregatorStat *events.Stat `json:"aggregator"` + CorrelatorStat *rules.Stat `json:"correlator"` + }{ + AggregatorStat: agr.GetStat(), + CorrelatorStat: cor.GetStat(), + }) + + if err != nil { + cl.Errorf("Can't serialize stat: %v", err) + w.WriteHeader(http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + count, err := w.Write(data) + if count != len(data) { + cl.Errorf("Bad write count. Expect %v, got %v", len(data), count) + } + + if err != nil { + cl.Errorf("%v", err) + } + } +} diff --git a/correlator/api/api_test.go b/correlator/api/api_test.go new file mode 100644 index 0000000..33551b6 --- /dev/null +++ b/correlator/api/api_test.go @@ -0,0 +1,336 @@ +package api + +import ( + "bytes" + "io/ioutil" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/events" + "iwarma.ru/console/correlator/util" + "net/http" + "net/http/httptest" + "testing" + "time" + + "iwarma.ru/console/correlator/rules" +) + +func TestAddRulesHandler(t *testing.T) { + + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + rulesStr := ` + [ + { + "name":"Test2", + "depth":"600s", + "id":"4", + "predicat":{ + "name":"top_predicat", + "type":"query_string", + "field":"destination_host", + "value":[ + { + "name":"top_predicat_child1", + "type":"match", + "field":"sign_category", + "value":"Firewall", + "parent":"top_predicat" + }, + { + "name":"top_predicat_child2", + "type":"match", + "field":"device_vendor", + "value":"armaif", + "parent":"top_predicat" + } + ], + "parent":"top_predicat" + }, + "actions":[ + { + "type":"firewall", + "quick":true, + "action":"pass", + "armaif":"1", + "sensor":{ + "ip":"1.1.1.1", + "key":"3uIhK/pCIKaLhkv9cmYg4V7DQ1Adt4zZLovThtbfaYzqr1YUdG4DdK6lKuqEjq0vfrDac2KczPbm7dwW", + "scheme":"http", + "secret":"5bYfRxkmDwFFY3WL/AHIooFTVQwHdDYSGT8I/6zvzwiSPAMrc9YNNS1CMRoSerxtUnkXRm2ZTEe4LtWo" + }, + "enabled":true, + "gateway":"", + "protocol":"any", + "sequence":"5000", + "direction":"in", + "interface":"lan", + "ipprotocol":"inet", + "source_net":"any", + "description":"", + "source_port":"", + "destination_net":"any", + "destination_port":"" + } + ], + "multi":false + } + ]` + + body := bytes.NewBuffer([]byte(rulesStr)) + + request := httptest.NewRequest("POST", "/add_many/", body) + request.Header.Add("Content-Type", "application/json") + w := httptest.NewRecorder() + + handler := AddRulesHandler(store) + handler(w, request) + + resp := w.Result() + + if resp.StatusCode != http.StatusOK { + t.Errorf("Bad response status code: %v", resp.StatusCode) + } + + // Let's check what we have + stat := store.GetStat() + + if stat.RuleCount != 1 { + t.Errorf("Bad rule count. Expect 1 got %v", stat.RuleCount) + } +} + +func TestAddRulesHandler2(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + rulesStr := ` + [ + { + "name":"Test2", + "depth":"600s", + "id":"4", + "predicat":{ + "name":"top_predicat", + "type":"query_string", + "field":"", + "value":[ + { + + "type":"query_string", + "field":"sign_category", + + "operands":"sign_category:\"Firewall\"" + }, + { + + "type":"query_string", + "field":"", + "operands":"device_vendor:\"armaif\" ", + + } + ], + + }, + "actions":[ + { + "type":"firewall", + "quick":true, + "action":"pass", + "armaif":"1", + "sensor":{ + "ip":"1.1.1.1", + "key":"3uIhK/pCIKaLhkv9cmYg4V7DQ1Adt4zZLovThtbfaYzqr1YUdG4DdK6lKuqEjq0vfrDac2KczPbm7dwW", + "scheme":"http", + "secret":"5bYfRxkmDwFFY3WL/AHIooFTVQwHdDYSGT8I/6zvzwiSPAMrc9YNNS1CMRoSerxtUnkXRm2ZTEe4LtWo" + }, + "enabled":true, + "gateway":"", + "protocol":"any", + "sequence":"5000", + "direction":"in", + "interface":"lan", + "ipprotocol":"inet", + "source_net":"any", + "description":"", + "source_port":"", + "destination_net":"any", + "destination_port":"" + } + ], + "multi":false + } + ]` + + body := bytes.NewBuffer([]byte(rulesStr)) + + request := httptest.NewRequest("POST", "/add_many/", body) + w := httptest.NewRecorder() + + handler := AddRulesHandler(store) + handler(w, request) + + resp := w.Result() + + if resp.StatusCode != http.StatusBadRequest { + t.Errorf("Bad response status code: %v", resp.StatusCode) + } + + // Let's check what we have + stat := store.GetStat() + + if stat.RuleCount != 0 { + t.Errorf("Bad rule count. Expect 0 got %v", stat.RuleCount) + } +} + +func TestClearStoreHandler(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + actions := make([]rules.Action, 1) + action := rules.TestAction{ + PerformError: true, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(rules.Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: rules.NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + }) + + request := httptest.NewRequest("GET", "/clear/", nil) + w := httptest.NewRecorder() + + handler := ClearStoreHandler(store) + handler(w, request) + + resp := w.Result() + + if resp.StatusCode != http.StatusOK { + t.Errorf("Bad response status code: %v", resp.StatusCode) + } + + // Check what we have + stat := store.GetStat() + if stat.RuleCount != 0 { + t.Errorf("Bad rule count. Expect 0, got %v", stat.RuleCount) + } +} + +func TestCorrelatorStatHandler(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + actions := make([]rules.Action, 1) + action := rules.TestAction{ + PerformError: true, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(rules.Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: rules.NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + }) + + request := httptest.NewRequest("GET", "/stat/", nil) + w := httptest.NewRecorder() + duration := time.Second * 30 + ruleStore := rules.NewRuleStore(client) + eventsStore := events.NewAggregatedEventStore(client, events.TimeWindow{ + Begin: time.Now().UTC(), + End: time.Now().UTC().Add(duration), + }) + handler := StatHandler(eventsStore, ruleStore) + handler(w, request) + + resp := w.Result() + + if resp.StatusCode != http.StatusOK { + t.Errorf("Bad response status code: %v", resp.StatusCode) + } + + // Let's check response + body, err := ioutil.ReadAll(resp.Body) + defer resp.Body.Close() + + if string(body) != `{"aggregator":{"events_processed":0,"events_aggregated":0,"average_iteration":{"value":"0s"}},"correlator":{"rule_count":0,"average_time":{"value":"0s"},"average_rule_time":{},"incident_count":0,"events_count":0,"errors":{}}}` { + t.Errorf("Bad response: %v", string(body)) + } +} diff --git a/correlator/cicd/Dockerfile b/correlator/cicd/Dockerfile new file mode 100644 index 0000000..4319030 --- /dev/null +++ b/correlator/cicd/Dockerfile @@ -0,0 +1,3 @@ +FROM golang:1.16-alpine + +RUN apk update && apk add git curl gcc musl-dev bash diff --git a/correlator/cicd/config.yml b/correlator/cicd/config.yml new file mode 100644 index 0000000..dff6c02 --- /dev/null +++ b/correlator/cicd/config.yml @@ -0,0 +1,30 @@ +repo_name: console_correlator +type: component +projects: + amccorrelator: + variables: + exe_path: 'cmd/correlator' + packages: + amccorrelator: + deb: + files: + - include: + - 'deb/skeleton/DEBIAN' + - 'deb/skeleton/usr' + exclude: + - '.gitkeep' + dst_dir: '' + - include: + - 'cmd/correlator/correlator' + dst_dir: 'usr/local/armaconsole/app/amccorrelator/cmd/correlator' + control: + maintainer: 'InfoWatch ARMA' + section: 'admin' + depends: '' + pre-depends: '' + priority: 'optional' + description: | + ARMA management console (AMC) service + AMC manages sensors and monitors events and incidents + arch: + - amd64 diff --git a/correlator/cicd/docker-compose.yml b/correlator/cicd/docker-compose.yml new file mode 100644 index 0000000..9a49ea4 --- /dev/null +++ b/correlator/cicd/docker-compose.yml @@ -0,0 +1,22 @@ +version: "3.7" +services: + elasticsearch: + container_name: elasticsearch + image: registry.iwarma.ru/iwa/dev/console-docker/console-elasticsearch:latest + ports: + - "9200:9200" + # - "9300:9300" + environment: + ES_JAVA_OPTS: "-Xmx4g -Xms4g" + ELASTIC_PASSWORD: changeme + # Use single node discovery in order to disable production mode and avoid bootstrap checks + # see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html + discovery.type: single-node + + golang: + #image: golang:gitlab + build: + context: . + volumes: + - "../:/src:rw" + command: bash -c "cd /src && /bin/bash cicd/test_job.sh" diff --git a/correlator/cicd/test.sh b/correlator/cicd/test.sh new file mode 100644 index 0000000..c4beaae --- /dev/null +++ b/correlator/cicd/test.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -ex # fail on any error + +PKG=$(cat go.mod | grep module | awk '{print $2}') +PKG_LIST=$(go list ${PKG}/... ) + +for CUR_PKG in $PKG_LIST +do + go test -timeout 600s -coverprofile=/tmp/go-code-cover $CUR_PKG +done \ No newline at end of file diff --git a/correlator/cicd/test_job.sh b/correlator/cicd/test_job.sh new file mode 100644 index 0000000..4b986a6 --- /dev/null +++ b/correlator/cicd/test_job.sh @@ -0,0 +1,58 @@ +#!/bin/bash + +trap 'rm_fd' EXIT + +set -x + +log_file_path="amccorrelator_tests.log" + +exec 3> >(tee -a $log_file_path) +exec 1>&3 +exec 2>&3 + + +rm_fd() { + exec 1>&- + exec 2>&- + exec 3>&- + wait +} + + +timeout=10 + +keys="GOPROXY=http://nexus.iwarma.ru/repository/proxy-go/ \ + GOPRIVATE=https://gitlab.iwarma.ru" +echo $keys + +export $keys + +function check { + if [ "$1" == "$timeout" ]; then + echo "timeout. connection to $2 failed" + exit 1 + else + echo "connection to $2 established" + fi +} + + +response=0 +counter=0 + +while [ "$response" != 200 ] && [ "$counter" != "$timeout" ]; do + response=$(curl -s -o /dev/null -w "%{http_code}" -u elastic:changeme -X GET http://elasticsearch:9200/) + sleep 5s + counter=$(( $counter + 1 )) +done + +check $counter "elasticsearch" + + +set -ex # fail on any error + +cp cmd/correlator/config_example.json cmd/correlator/config.json + +echo "tests starting..." + +/bin/bash ./cicd/test.sh diff --git a/correlator/cmd/correlator/.gitignore b/correlator/cmd/correlator/.gitignore new file mode 100644 index 0000000..f105470 --- /dev/null +++ b/correlator/cmd/correlator/.gitignore @@ -0,0 +1 @@ +correlator diff --git a/correlator/cmd/correlator/config_example.yaml b/correlator/cmd/correlator/config_example.yaml new file mode 100644 index 0000000..2e1b86f --- /dev/null +++ b/correlator/cmd/correlator/config_example.yaml @@ -0,0 +1,91 @@ +verbose: false # More information in log, default is false +threads: 10 # Number of working threads, default is 10 + +aggregator: + updater: + workers: 1 # Number of working threads for aggregator bulk update, default is 1 + bulk_count: 100 # Number of bulk requests for aggregated events, before flush will be called, default is 100 + flush: 1m # Time interval, after witch, flush will be called, default is 1m + normalizer: + workers: 1 # Number of working threads for aggregator bulk update normalized events, default is 1 + bulk_count: 100 # Number of bulk requests for normalized events, before flush will be called, default is 100 + flush: 1m # Time interval, after witch, flush will be called, default is 1m + + iteration: 30s # How often aggregator will query for next bunch of normalized events. Default value is 30s + window: 5m # Aggregator inspection window size. Default value is 30s + +correlator: + workers: 1 # Number of working threads for correlator bulk requests, default is 1 + bulk_count: 100 # Number of bulk requests for aggregated events, before flush will be called, default is 100 + flush: 1m # Time interval, after witch, flush will be called, default is 1m + api: + port: 556 # Port for correlator API. Default value is 5566 + +console: + auth: + username: admin # Username to work with AMC (console) web interface. No default value + password: nimda # Password to work with AMC (console) web interface. No default value + url: + token: "http://localhost:8000/ru/api/auth/token/" # Url to obtain auth token from web interface. No default value + incident: "http://localhost:8000/ru/api/logstash/incident/" # Url to create incident in web interface. No default value + asset: "http://localhost:8000/ru/api/logstash/asset/" # Url to create asset in web interface. No default value + ignore_ssl_errors: true # If true, all connections to console will ignore SSL errors. Default if true + +debug: + dump: false # Dump all network request/responses. Default is false + path: "/var/log/armaconsole" # Path, where to store debug dumps + +syslog: + tag: "correlator" # Tag for syslog action. Default is "correlaor" + +log: + filename: license.log # Where to write log + max_size: 100 # Max size of log file before rotation + max_bkup: 10 # How many old logs files exist + max_age: 10 # How long we need to keep old log files + compress: true # Should we compress ol log files + level: 4 # Verbosity of logging. Default is 4. Max is 6 (trace) + formatter: text # Formatter type. Valid values are "json" and "text" + force_colors: true # If formatter is "text", this option enable color output for logging + +elastic: + aggregated_index: "aggregated-2006.01.02" # Index pattern for aggregator, default is aggregated-2006.01.02 + normalized_index: "arma-*" # Index pattern for correlator, default is arma-*. Warning! If changed, need to change logstash settings + url: "http://localhost:9200" # URL for elasticsearch access. No default value + username: "elastic" # Username for elasticsearch access. No default value + password: "changeme" # Password for elasticsearch access. No default value + retry: + count: 10 # How many times should we retry to connect to elasticsearch. Default is 10 + timeout: 20s # How long must we wait before next connection attempt. Default is 20s + log: + query: false # Log every elasticsearch query we call. Default is false + encode_query: false # Encode elasticsearch query with base64. Default is false + ignore_ssl_errors: false # If true, elastic client will ignore HTTPS errors. Default is true + +actions: + firewall_rule: + ignore_ssl_errors: true # If true, firewall rule action will ignore SSL errors. Default if true + +events_fields: # List of fields for aggregation + - event_severity + - event_protocol + - device_vendor + - device_product + - device_action + - device_version + - sign_id + - sign_category + - sign_subcategory + - sign_name + - source_ip + - source_host + - source_port + - destination_ip + - destination_host + - destination_port + - destination_user + - cs1 + - cs1Label + - cs2 + - cs2Label + - object_type \ No newline at end of file diff --git a/correlator/cmd/correlator/logging.go b/correlator/cmd/correlator/logging.go new file mode 100644 index 0000000..504460b --- /dev/null +++ b/correlator/cmd/correlator/logging.go @@ -0,0 +1,57 @@ +package main + +import ( + "io" + "os" + "time" + + "github.com/natefinch/lumberjack" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" +) + +func getLevel() log.Level { + level := viper.GetInt(config.LogLevel) + + switch level { + case 0: + return log.PanicLevel + case 1: + return log.FatalLevel + case 2: + return log.ErrorLevel + case 3: + return log.WarnLevel + case 4: + return log.InfoLevel + case 5: + return log.DebugLevel + case 6: + return log.TraceLevel + } + return log.InfoLevel +} + +func initLogging() error { + lumberjackLogrotate := &lumberjack.Logger{ + Filename: viper.GetString(config.LogFileName), + MaxSize: viper.GetInt(config.LogMaxSize), + MaxBackups: viper.GetInt(config.LogMaxBkup), + MaxAge: viper.GetInt(config.LogMaxAge), + Compress: viper.GetBool(config.LogCompress), + } + + if viper.GetString(config.LogFormatter) == "json" { + log.SetFormatter(&log.JSONFormatter{TimestampFormat: time.RFC3339}) + } else { + log.SetFormatter(&log.TextFormatter{TimestampFormat: time.RFC3339, ForceColors: viper.GetBool(config.LogForceColors)}) + } + + logMultiWriter := io.MultiWriter(os.Stdout, lumberjackLogrotate) + log.SetOutput(logMultiWriter) + + log.SetLevel(getLevel()) + + return nil +} diff --git a/correlator/cmd/correlator/main.go b/correlator/cmd/correlator/main.go new file mode 100644 index 0000000..0ea20d1 --- /dev/null +++ b/correlator/cmd/correlator/main.go @@ -0,0 +1,231 @@ +package main + +import ( + "flag" + "fmt" + "net/http" + "os" + "runtime" + "runtime/pprof" + "strings" + "sync" + "time" + + "iwarma.ru/console/correlator/aggregator" + "iwarma.ru/console/correlator/api" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/correlator" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/events" + "iwarma.ru/console/correlator/mapping" + "iwarma.ru/console/correlator/rules" + + "github.com/spf13/viper" + + log "github.com/sirupsen/logrus" + + "github.com/gorilla/mux" +) + +var ( + VERSION = "1.1.6" +) + +var ( + configFile = flag.String("config", "config.json", "Path to config file") + cpuprofile = flag.String("cpuprofile", "", "write cpu profile to `file`") + memprofile = flag.String("memprofile", "", "write memory profile to `file`") + noMappingFlag = flag.Bool("no-mapping", false, "Don't apply mapping") + versionFlag = flag.Bool("version", false, "Show version and exit") + noCorrelator = flag.Bool("no-correlator", false, "Disable correlator") + noAggregator = flag.Bool("no-aggregator", false, "Disable aggregator") +) + +func main() { + + cl := log.WithFields(log.Fields{"part": "main", "ver": VERSION}) + + //############################## + // Read and parse config + //############################## + + flag.Parse() + + if *versionFlag == true { + cl.Infof("Correlator version %v", VERSION) + return + } + + // Viper config + err := config.ReadCfg(*configFile) + if err != nil { + cl.Fatalf("%v", err) + } + + //############################## + // Init logging + //############################## + err = initLogging() + if err != nil { + cl.Fatalf("%v", err) + } + + //############################## + // Profiling + //############################## + if *cpuprofile != "" { + f, err := os.Create(*cpuprofile) + if err != nil { + cl.Fatal("could not create CPU profile: ", err) + } + defer f.Close() // error handling omitted for example + if err := pprof.StartCPUProfile(f); err != nil { + cl.Fatal("could not start CPU profile: ", err) + } + cl.Infof("Start CPU profile") + defer pprof.StopCPUProfile() + } + + //############################## + // Memory profiling + //############################## + if *memprofile != "" { + f, err := os.Create(*memprofile) + if err != nil { + cl.Fatal("could not create memory profile: ", err) + } + defer f.Close() // error handling omitted for example + runtime.GC() // get up-to-date statistics + if err := pprof.WriteHeapProfile(f); err != nil { + cl.Fatal("could not write memory profile: ", err) + } + + cl.Infof("Start Memory profile") + } + + //############################## + // Connecting to elastic + //############################## + client, err := es.NewElastic() + if err != nil { + cl.Fatalf("Can't conect to elastic: %v", err) + } + + //############################## + // Prepare store + //############################## + + // Correlator's store + ruleStore := rules.NewRuleStore(client) + + // Aggregator's store + window := viper.GetDuration(config.AggregatorWindow) + eventsStore := events.NewAggregatedEventStore(client, events.TimeWindow{ + Begin: time.Now().UTC(), + End: time.Now().UTC().Add(window), + }) + + //############################## + // Api + //############################## + cl.Infof("Starting API on port %v", viper.GetInt(config.ApiPort)) + router := mux.NewRouter() + router.HandleFunc("/add_many/", api.AddRulesHandler(ruleStore)).Methods("POST") + router.HandleFunc("/clear/", api.ClearStoreHandler(ruleStore)).Methods("GET") + router.HandleFunc("/stats/", api.StatHandler(eventsStore, ruleStore)).Methods("GET") + router.Use(api.LogUrlMiddleware) + + // TODO: Add error control + go http.ListenAndServe(fmt.Sprintf(":%v", viper.GetInt(config.ApiPort)), router) + cl.Infof("API server started on port %v", viper.GetInt(config.ApiPort)) + + //############################## + // Prepare elastic + //############################## + cl.Info("Checking indices") + + var normalizedEventIndexName, aggregatedEventIndexName string + if strings.Contains(events.GetNormalizedIndexName(), "*") || strings.Contains(events.GetAggregatedIndexName(), "*") { + // Need to replace * with current date + normalizedEventIndexName = strings.Replace(events.GetNormalizedIndexName(), "*", time.Now().UTC().Format("2006.01.02"), 1) + aggregatedEventIndexName = strings.Replace(events.GetAggregatedIndexName(), "*", time.Now().UTC().Format("2006.01.02"), 1) + } else { + normalizedEventIndexName = events.GetNormalizedIndexName() + aggregatedEventIndexName = events.GetAggregatedIndexName() + } + + err = client.CheckAndCreateIndex(normalizedEventIndexName, aggregatedEventIndexName) + if err != nil { + cl.Fatalf("Can't check index for normalized events: %v", err) + } + + //############################## + // Mapping + //############################## + if *noMappingFlag { + cl.Info("No mapping will be applied") + } else { + + var mappingArray []mapping.Mapping + // See https://github.com/spf13/viper/issues/196 + err := viper.UnmarshalKey("mapping", &mappingArray) + if err != nil { + cl.Errorf("Can't read mapping from config") + } + + err = mapping.UpdateMapping(&mappingArray) + if err != nil { + cl.Errorf("Can't apply mapping: %v", err) + } + } + + cl.Info("Indices are ready") + + //############################## + // Debug dumping prepare + //############################## + if viper.GetBool(config.DebugDumpRequest) { + if _, err := os.Stat(viper.GetString(config.DebugDumpPath)); os.IsNotExist(err) { + log.Fatalf("Dump path does not exist") + } + } + + var wg sync.WaitGroup + + wgCount := 2 + if *noAggregator == true { + wgCount -= 1 + } + + if *noCorrelator == true { + wgCount -= 1 + } + + wg.Add(wgCount) + + //############################## + // Aggregator + //############################## + + aggregatorStop := make(chan interface{}) + if *noAggregator == false { + go func() { + aggregator.Aggregator(eventsStore, window, aggregatorStop) + wg.Done() + }() + } + + //############################## + // Correlator + //############################## + + correlatorStop := make(chan interface{}) + if *noCorrelator == false { + go func() { + correlator.Correlator(ruleStore, correlatorStop) + wg.Done() + }() + } + + wg.Wait() +} diff --git a/correlator/config/config.go b/correlator/config/config.go new file mode 100644 index 0000000..09b474a --- /dev/null +++ b/correlator/config/config.go @@ -0,0 +1,49 @@ +package config + +// Config names +const ( + Verbose = "verbose" // More information in log, default is false + Threads = "threads" // Number of working threads, default is 10 + AggregatorUpdateWorkers = "aggregator.updater.workers" // Number of working threads for aggregator bulk update, default is 1 + AggregatorBulkCount = "aggregator.updater.bulk_count" // Number of bulk requests for aggregated events, before flush will be called, default is 100 + AggregatorBulkFlushInterval = "aggregator.updater.flush" // Time interval, after witch, flush will be called, default is 1m + AggregatorNormalizedWorkers = "aggregator.normalizer.workers" // Number of working threads for aggregator bulk update normalized events, default is 1 + AggregatorNormalizerBulkCount = "aggregator.normalizer.bulk_count" // Number of bulk requests for normalized events, before flush will be called, default is 100 + AggregatorNormalizedBulkFlushInterval = "aggregator.normalizer.flush" // Time interval, after witch, flush will be called, default is 1m + AggregatorIterationDuration = "aggregator.iteration" // How often aggregator will query for next bunch of normalized events. Default value is 30s + AggregatorWindow = "aggregator.window" // Aggregator inspection window size. Default value is 30s + CorrelatorWorkers = "correlator.workers" // Number of working threads for correlator bulk requests, default is 1 + CorrelatorBulkCount = "correlator.bulk_count" // Number of bulk requests for aggregated events, before flush will be called, default is 100 + CorrelatorFlushInterval = "correlator.flush" // Time interval, after witch, flush will be called, default is 1m + ApiPort = "correlator.api.port" // Port for correlator API. Default value is 5566 + ConsoleUsername = "console.auth.username" // Username to work with AMC (console) web interface. No default value + ConsolePassword = "console.auth.password" // Password to work with AMC (console) web interface. No default value + ConsoleUrlToken = "console.url.token" // Url to obtain auth token from web interface. No default value + ConsoleUrlIncident = "console.url.incident" // Url to create incident in web interface. No default value + ConsoleUrlAsset = "console.url.asset" // Url to create asset in web interface. No default value + ConsoleIgnoreSSLErrors = "console.ignore_ssl_errors" // If true, all connections to console will ignore SSL errors. Default if true + DebugDumpRequest = "debug.dump" // Dump all network request/responses. Default is false + DebugDumpPath = "debug.path" // Path, where to store debug dumps + SyslogTag = "syslog.tag" // Tag for syslog action. Default is "correlaor" + LogFileName = "log.filename" // Where to write logs. Default is "correlator.log" + LogMaxSize = "log.max_size" // Max size of log file before rotation. Default is 100 + LogMaxBkup = "log.max_bkup" // How many old logs files exist. Default is 10 + LogMaxAge = "log.max_age" // How long we need to keep old log files. Default is 10 + LogCompress = "log.compress" // Should we compress ol log files. Default is true + LogLevel = "log.level" // Verbosity of logging. Default is 2. Max is 6 (trace) + LogFormatter = "log.formatter" // Formatter type. Valid values are "json" and "text". Default is "json" + LogForceColors = "log.force_colors" // If formatter is "text", this option enable color output for logging. Default is false + ElasticAggregatedIndexName = "elastic.aggregated_index" // Index pattern for aggregator, default is aggregated-2006.01.02 + ElasticNormalizedIndexName = "elastic.normalized_index" // Index pattern for correlator, default is arma-*. Warning! If changed, need to change logstash settings + ElasticUrl = "elastic.url" // URL for elasticsearch access. No default value + ElasticUsername = "elastic.username" // Username for elasticsearch access. No default value + ElasticPassword = "elastic.password" // Password for elasticsearch access. No default value + ElasticRetryCount = "elastic.retry.count" // How many times should we retry to connect to elasticsearch. Default is 10 + ElasticConnectionTimeout = "elastic.retry.timeout" // How long must we wait before next connection attempt. Default is 20s + ElasticLogQuery = "elastic.log.query" // Log every elasticsearch query we call. Default is false + ElasticLogEncodeQuery = "elastic.log.encode_query" // Encode elasticsearch query with base64. Default is false + ElasticIgnoreSSLErrors = "elastic.ignore_ssl_errors" // If true, elastic client will ignore HTTPS errors. Default is true + ActionFirewallRuleIgnoreSSLErrors = "actions.firewall_rule.ignore_ssl_errors" // If true, firewall rule action will ignore SSL errors. Default if true + AggregatedFields = "events_fields" // List of fields for aggregation + ScrollSize = "scroll_size" // Size specifies the number of documents Elasticsearch should return from each shard, per page +) diff --git a/correlator/config/default_config.go b/correlator/config/default_config.go new file mode 100644 index 0000000..3766abe --- /dev/null +++ b/correlator/config/default_config.go @@ -0,0 +1,100 @@ +package config + +import ( + "github.com/spf13/viper" + "time" +) + +// ReadCfg read correlator config file and apply default values +func ReadCfg(file string) error { + viper.SetConfigFile(file) + viper.AddConfigPath(".") + viper.AddConfigPath("/etc/armaconsole") + viper.AutomaticEnv() + viper.SetEnvPrefix("CORRELATOR") + + // Set defaults + viper.SetDefault(AggregatorUpdateWorkers, 1) + viper.SetDefault(AggregatorBulkCount, 100) + viper.SetDefault(AggregatorBulkFlushInterval, time.Minute) + + viper.SetDefault(AggregatorNormalizedWorkers, 1) + viper.SetDefault(AggregatorNormalizerBulkCount, 100) + viper.SetDefault(AggregatorNormalizedBulkFlushInterval, time.Minute) + viper.SetDefault(AggregatorIterationDuration, time.Second*30) + + viper.SetDefault(CorrelatorWorkers, 10) + viper.SetDefault(CorrelatorBulkCount, 100) + viper.SetDefault(CorrelatorFlushInterval, time.Minute) + + viper.SetDefault(Threads, 10) + + viper.SetDefault(ElasticAggregatedIndexName, "aggregated-2006.01.02") + viper.SetDefault(ElasticNormalizedIndexName, "arma-*") + viper.SetDefault(ElasticIgnoreSSLErrors, true) + + viper.SetDefault(ActionFirewallRuleIgnoreSSLErrors, true) + + viper.SetDefault(ApiPort, 5566) + + viper.SetDefault(ElasticRetryCount, 10) + viper.SetDefault(ElasticConnectionTimeout, time.Second*20) + viper.SetDefault(AggregatorWindow, time.Second*30) + viper.SetDefault(SyslogTag, "correlator") + + viper.SetDefault(LogFileName, "correlator.log") + viper.SetDefault(LogMaxSize, 100) + viper.SetDefault(LogMaxBkup, 10) + viper.SetDefault(LogMaxAge, 10) + viper.SetDefault(LogCompress, true) + viper.SetDefault(LogLevel, 2) + viper.SetDefault(LogFormatter, "json") + viper.SetDefault(LogForceColors, false) + + viper.SetDefault(ElasticLogEncodeQuery, false) + viper.SetDefault(ElasticLogQuery, false) + + viper.SetDefault(ConsoleIgnoreSSLErrors, true) + viper.SetDefault(AggregatedFields, []string{ + "event_severity", + "event_protocol", + "message", + "device_vendor", + "device_product", + "device_action", + "device_version", + "device_timezone", + "sign_id", + "sign_category", + "sign_subcategory", + "application", + "source_ip", + "source_host", + "source_port", + "source_mac", + "source_timezone", + "source_software", + "source_action", + "destination_ip", + "destination_mac", + "destination_timezone", + "destination_software", + "destination_action", + "destination_host", + "destination_port", + "destination_user", + "cs1", + "cs1Label", + "cs2", + "cs2Label", + "object_type", + }) + viper.SetDefault(ScrollSize, 1000) + + err := viper.ReadInConfig() + if err != nil { + return err + } + + return nil +} diff --git a/correlator/correlator/correlator.go b/correlator/correlator/correlator.go new file mode 100644 index 0000000..60fa049 --- /dev/null +++ b/correlator/correlator/correlator.go @@ -0,0 +1,33 @@ +package correlator + +import ( + "time" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/rules" +) + +func Correlator(store *rules.RuleStore, stop chan interface{}) { + cl := log.WithField("part", "Correlator") + tick := time.Tick(time.Second) + errs := make(chan error, viper.GetInt(config.Threads)+5) + + cl.Info("Starting Correlator") + + for { + select { + case <-tick: + cl.Debug("Iteration") + errs <- store.RunRulesSync() + case err := <-errs: + if err != nil { + cl.Errorf("Got error: %v", err) + } + case <-stop: + cl.Info("Stopping Correlator") + return + } + } +} diff --git a/correlator/correlator/correlator_test.go b/correlator/correlator/correlator_test.go new file mode 100644 index 0000000..0872502 --- /dev/null +++ b/correlator/correlator/correlator_test.go @@ -0,0 +1,643 @@ +package correlator + +import ( + "encoding/json" + "fmt" + "github.com/google/uuid" + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/events" + "iwarma.ru/console/correlator/rules" + "iwarma.ru/console/correlator/util" + "testing" + "time" +) + +// Check that one iteration of correlator works +func TestCorrelator(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Events count + N := 10 + + err = events.FillAggregatedEvents(events.GetAggregatedIndexName(), N, client) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + actions := make([]rules.Action, 1) + action := rules.TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(rules.Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: rules.NewPredicate("", "device_vendor:\"TestDevice\""), + Actions: actions, + }) + + rules.PerformCountClear() + rules.EventsProcessedClear() + + // Prepare correlator + stop := make(chan interface{}) + + // Run correlator + go func() { + Correlator(store, stop) + }() + + time.Sleep(time.Second * 2) + + stop <- "" + + // Let's check what we have + goodCount := 1 + if rules.GetPerformCount() != goodCount { + t.Errorf("Bad perform count. Expect %v, got %v", goodCount, rules.GetPerformCount()) + } + + if rules.GetEventsProcessed() != goodCount { + t.Errorf("Bad events processed count. Expect %v, got %v", goodCount, rules.GetEventsProcessed()) + } + + stat := store.GetStat() + + if stat == nil { + t.Errorf("No stat") + return + } + + if stat.IncidentCount != uint64(goodCount) { + t.Errorf("Got bad stat, expect incident count eq %v, got %v", goodCount, stat.IncidentCount) + } +} + +// Check that several iterations of correlator work +func TestCorrelatorCycle(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + actions := make([]rules.Action, 1) + action := rules.TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(rules.Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: rules.NewPredicate("", "device_vendor:\"TestDevice\""), + Actions: actions, + }) + + rules.PerformCountClear() + rules.EventsProcessedClear() + + // Prepare correlator + stop := make(chan interface{}) + + // Run correlator + go func() { + Correlator(store, stop) + }() + + // Iteration count and event count in every iteration + N := 10 + + for i := 0; i < N; i++ { + err = events.FillAggregatedEvents(events.GetAggregatedIndexName(), N, client) + if err != nil { + t.Errorf("%v", err) + return + } + } + + // Here we need to wait until all events were send to elastic and correlator process them + time.Sleep(time.Second * 4) + stop <- "" + time.Sleep(time.Second * 3) + + // Let's check what we have + // No checks for GetPerformCount or EventsProcessed from action + // Because we can't control how many times correlator will get the events + + stat := store.GetStat() + + if stat == nil { + t.Errorf("No stat") + return + } + + if stat.EventsCount != uint64(N*N) { + t.Errorf("Bad total events count, expect %v, got %v", N*N, stat.EventsCount) + } + + log.Infof("Incident count: %v Events count: %v", rules.GetPerformCount(), rules.GetEventsProcessed()) +} + +// Check how correlator work in cycle with multi rule selected +func TestCorrelatorCycleMultiRule(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + actions := make([]rules.Action, 1) + action := rules.TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(rules.Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: true, + Depth: time.Second * 30, + Predicate: rules.NewPredicate("", "device_vendor:\"TestDevice\""), + Actions: actions, + }) + + rules.PerformCountClear() + rules.EventsProcessedClear() + + // Prepare correlator + stop := make(chan interface{}) + + // Run correlator + go func() { + Correlator(store, stop) + }() + + // Iteration count and event count in every iteration + N := 10 + + for i := 0; i < N; i++ { + err = events.FillAggregatedEvents(events.GetAggregatedIndexName(), N, client) + if err != nil { + t.Errorf("%v", err) + return + } + } + + // Here we need to wait until all events were send to elastic and correlator process them + time.Sleep(time.Second * 4) + stop <- "" + time.Sleep(time.Second * 3) + + // Let's check what we have + // No checks for GetPerformCount or EventsProcessed from action + // Because we can't control how many times correlator wil get events + stat := store.GetStat() + + if stat == nil { + t.Errorf("No stat") + return + } + + if stat.EventsCount != uint64(N*N) { + t.Errorf("Bad total events count, expect %v, got %v", N*N, stat.EventsCount) + } + + log.Infof("Incident count: %v Events count: %v", rules.GetPerformCount(), rules.GetEventsProcessed()) +} + +// Check how many incidents will we have +func TestCorrelatorIncidents(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + actions := make([]rules.Action, 1) + action := rules.TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(rules.Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: rules.NewPredicate("", "device_vendor:\"TestDevice\""), + Actions: actions, + }) + + rules.PerformCountClear() + rules.EventsProcessedClear() + + // Prepare correlator + stop := make(chan interface{}) + + // Run correlator + go func() { + Correlator(store, stop) + }() + + // Iteration count and event count in every iteration + N := 10 + EventCount := 3 + + for i := 0; i < EventCount; i++ { + err = events.FillAggregatedEvents(events.GetAggregatedIndexName(), N, client) + if err != nil { + t.Errorf("%v", err) + return + } + + time.Sleep(time.Second * 3) + } + + // Wait till correlator process all events + time.Sleep(time.Second * 3) + stop <- "" + time.Sleep(time.Second * 3) // Wait for stop + + // Let's check what we have + if rules.GetPerformCount() != EventCount { + t.Errorf("Bad incident count. Expect %v got %v", EventCount, rules.GetPerformCount()) + } + + // Because of Multi = false in rule, we must have events count == incident count + if rules.GetEventsProcessed() != EventCount { + t.Errorf("Bad events count, expect %v got %v", EventCount, rules.GetEventsProcessed()) + } +} + +func TestCorrelatorNoIndex(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + actions := make([]rules.Action, 1) + action := rules.TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(rules.Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: rules.NewPredicate("", "device_vendor:\"TestDevice\""), + Actions: actions, + }) + + rules.PerformCountClear() + rules.EventsProcessedClear() + + // Prepare correlator + stop := make(chan interface{}) + + // Run correlator + go func() { + Correlator(store, stop) + }() + + time.Sleep(time.Second * 5) + stop <- "" + time.Sleep(time.Second * 3) + + // Let's check what we have + stat := store.GetStat() + + if stat.IncidentCount != 0 { + t.Errorf("Bad incident count. Expect 0 got %v", stat.IncidentCount) + } + + if stat.EventsCount != 0 { + t.Errorf("Bad event count. Expect 0 got %v", stat.EventsCount) + } +} + +func TestCorrelatorActionError(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare store + store := rules.NewRuleStore(client) + + actions := make([]rules.Action, 1) + action := rules.TestAction{ + PerformError: true, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(rules.Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: rules.NewPredicate("", "device_vendor:\"TestDevice\""), + Actions: actions, + }) + + rules.PerformCountClear() + rules.EventsProcessedClear() + + // Prepare correlator + stop := make(chan interface{}) + + // Run correlator + go func() { + Correlator(store, stop) + }() + + // Iteration count and event count in every iteration + N := 10 + EventCount := 3 + + for i := 0; i < EventCount; i++ { + err = events.FillAggregatedEvents(events.GetAggregatedIndexName(), N, client) + if err != nil { + t.Errorf("%v", err) + return + } + + time.Sleep(time.Second * 3) + } + + // Wait till correlator process all events + //time.Sleep(time.Second*3) + stop <- "" + time.Sleep(time.Second * 3) // Wait for stop + + stat := store.GetStat() + if stat.IncidentCount != 0 { + t.Errorf("Bad incident count #1. Expect 0 got %v", stat.IncidentCount) + } + + goodEventCount := 1 + // Here we don't have events in store stat + // because we count hem if no error occurs + if stat.EventsCount != uint64(0) { + t.Errorf("Bad event count #1. Expect %v got %v", 0, stat.EventsCount) + } + + if rules.GetEventsProcessed() != goodEventCount { + t.Errorf("Bad event count #2. Expect %v got %v", goodEventCount, rules.GetEventsProcessed()) + } + + if rules.GetPerformCount() != goodEventCount { + t.Errorf("Bad incident count #2. Expect %v got %v", goodEventCount, rules.GetPerformCount()) + } + + log.Infof("%v", stat.String()) +} + +func TestRequest1(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + viper.Set(config.ElasticLogQuery, true) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName(), events.GetNormalizedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + // Let's prepare input + const total = 150 + const good = 100 + + var goodSid = [...]string{"act=2002910", "act=2001219", "act=2010937", "act=2009582", "act=2000538", "act=2010939", "act=2010935"} + goodCounter := 0 + + bulk := client.NewBulkRequest() + + for i := 0; i < total; i++ { + + srcMsg := "CEF:0|InfoWatch ARMA|ARMAIF|3.0|idspower|IDS power|5|rt=1604793739000 log_from=suricata deviceFacility=28775 gid=1 cs1=429496728 cs1Label=Signature cs2=12 cs2Label=line_number rev=1 classification=null priority=3 proto=TCP ip_src=10.20.30.50 port_src=80 ip_dst=10.20.30.1 port_dst=34568 %v" + if goodCounter < good { + srcMsg = fmt.Sprintf(srcMsg, goodSid[goodCounter%len(goodSid)]) + goodCounter += 1 + } else { + srcMsg = fmt.Sprintf(srcMsg, "act=6000") + } + + event := events.Event{ + events.AggregatedId: uuid.NewString(), + events.FirstEvent: time.Now().UTC(), + events.LastEvent: time.Now().UTC(), + events.EventCount: total, + events.Created: time.Now().UTC(), + events.Tags: make([]string, 0), + events.Hash: uuid.NewString(), + events.CeleryDone: false, + "timestamp": time.Now().UTC(), + "type": "ass", + "index": events.GetAggregatedIndexName(), + "event_timestamp": time.Now().UTC(), + "event_id": uuid.NewString(), + "event_severity": uint8(i % 10), + "event_src_msg": srcMsg, + "event_protocol": "TCP", + "device_vendor": "TestDevice", + "device_product": "TestProduct", + "device_version": "1.0", + "device_action": "Test", + "sign_ad": fmt.Sprintf("%v", i), + "sign_category": "Test", + "sign_subcategory": "Test", + "application": "Test", + "source_ip": "127.0.0.1", + "source_mac": "00:50:56:c0:00:08", + "source_host": "localhost", + "source_port": uint32(i), + "source_user": "root", + "destination_ip": "127.0.0.1", + "destination_host": "localhost", + "destination_port": uint32(i), + "destination_user": "user", + } + + bulk = bulk.Add(elastic.NewBulkIndexRequest().Index(events.GetAggregatedIndexName()).Id(event.GetString(events.EventID)).Doc(event)) + } + + bulkResponse, err := client.ExecuteBulk(bulk) + if err != nil { + t.Errorf("Can't index documents: %v", err) + return + } + + if bulkResponse.Errors { + t.Errorf("Got errors from bulk requset: %v", bulkResponse.Failed()) + return + } + + if len(bulkResponse.Indexed()) != total { + t.Errorf("Bad bulk index count. Got %v, expect %v", len(bulkResponse.Indexed()), total) + return + } + + // wait until elastic is ready + + time.Sleep(time.Second) + + // Now we need construct rule + ruleStr := `{ + "name":"Test", + "depth":"600s", + "id":"1", + "predicat":{ + "type":"query_string", + "field":"NULL", + "value":[ + "event_src_msg: \"act=2002910\" OR event_src_msg: \"act=2001219\" OR event_src_msg: \"act=2010937\" OR event_src_msg: \"act=2009582\" OR event_src_msg: \"act=2000538\" OR event_src_msg: \"act=2010939\" OR event_src_msg: \"act=2010935\"" + ] + }, + "actions":[ + { + "host":"localhost", + "port":"514", + "protocol":"udp", + "name":"test", + "template":"{{.event_src_msg}}", + "type":"syslog" + } + ] +}` + var rule rules.Rule + err = json.Unmarshal([]byte(ruleStr), &rule) + if err != nil { + t.Errorf("Can't read rule: %v", err) + return + } + + // Now, let's run it + eventList, err := rule.Do(client) + if err != nil { + t.Errorf("Can't do rule: %v", err) + return + } + + if len(*eventList) != good { + t.Errorf("Bad event's count. Expect %v, got %v", good, len(*eventList)) + } +} diff --git a/correlator/deb/skeleton/DEBIAN/.gitkeep b/correlator/deb/skeleton/DEBIAN/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/correlator/deb/skeleton/usr/lib/systemd/system/amccorrelator.service b/correlator/deb/skeleton/usr/lib/systemd/system/amccorrelator.service new file mode 100644 index 0000000..20ca93d --- /dev/null +++ b/correlator/deb/skeleton/usr/lib/systemd/system/amccorrelator.service @@ -0,0 +1,21 @@ +[Unit] +Description=ARMA management console correlator +Requires=elasticsearch.service +Requires=amcvector.service +After=network.target +After=elasticsearch.service +After=amcvector.service + +[Service] +User=armaconsole +Group=www-data +WorkingDirectory=/usr/local/armaconsole/app/amccorrelator/cmd/correlator +ExecStart=/usr/local/armaconsole/app/amccorrelator/cmd/correlator/correlator -config /etc/armaconsole/correlator.json +Restart=on-failure +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target + diff --git a/correlator/deb/skeleton/usr/local/armaconsole/app/amccorrelator/aggregator/.gitkeep b/correlator/deb/skeleton/usr/local/armaconsole/app/amccorrelator/aggregator/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/correlator/deb/skeleton/usr/local/armaconsole/app/amccorrelator/docs/.gitkeep b/correlator/deb/skeleton/usr/local/armaconsole/app/amccorrelator/docs/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/correlator/docker/sh/entrypoint.sh b/correlator/docker/sh/entrypoint.sh new file mode 100644 index 0000000..27f39d2 --- /dev/null +++ b/correlator/docker/sh/entrypoint.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env sh +elastic_url=${ELASTICSEARCH_URL:-http://elasticsearch:9200} +elastic_user=${ELASTIC_USER:-elastic} +elastic_password=${ELASTIC_PASSWORD:-changeme} +elastic_auth=$elastic_user:$elastic_password + +wait_loops=20 +wait_sleep=3 +count=0 +while ! [ $(curl --write-out %{http_code} --silent --output /dev/null -u $elastic_auth $elastic_url/_cat/health?h=st) = 200 ]; do + count=`expr $count + 1` + if [ $count -ge $wait_loops ]; then + echo "$(date) - still not ready, giving up" + exit 1 + fi + echo "$(date) - waiting to be ready" + sleep $wait_sleep +done + +uri_norm=$elastic_url/_component_template/normalized_component +uri_index=$elastic_url/_index_template/normalized + +if ! [ $(curl --write-out %{http_code} --silent --output /dev/null -u $elastic_auth "$uri_norm") = 200 ]; then + curl -X PUT -w "\n" -u $elastic_auth "$uri_norm" -H "Content-Type: application/json" -d"$(cat cmd/correlator/tests/mapping/normalized-component.json)" +else echo "Normalized component already mapped"; fi + +if ! [ $(curl --write-out %{http_code} --silent --output /dev/null -u $elastic_auth "$uri_index") = 200 ]; then + curl -X PUT -w "\n" -u $elastic_auth "$uri_index" -H "Content-Type: application/json" -d"$(cat cmd/correlator/tests/mapping/normalized-index-template.json)" +else echo "Index normalized already mapped"; fi + +exec "$@" diff --git a/correlator/es/es.go b/correlator/es/es.go new file mode 100644 index 0000000..874a314 --- /dev/null +++ b/correlator/es/es.go @@ -0,0 +1,308 @@ +package es + +import ( + "context" + "crypto/tls" + "encoding/base64" + "encoding/json" + "fmt" + "io" + "net/http" + "regexp" + "time" + + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" +) + +/* + For testing purpose https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html + Prepare docker pull docker.elastic.co/elasticsearch/elasticsearch:7.12.0 + Run docker run -p 9200:9200 -p 9300:9300 -e "discovery.type=single-node" -e ES_JAVA_OPTS="-Xms2g -Xmx2g" docker.elastic.co/elasticsearch/elasticsearch:7.12.0 +*/ + +// Elastic Struct to encapsulate elastic context +type Elastic struct { + log *log.Entry + client *elastic.Client + ctx context.Context +} + +func NewClient() (*elastic.Client, error) { + var elasticClient *elastic.Client + options := make([]elastic.ClientOptionFunc, 2) + options[0] = elastic.SetURL(viper.GetString(config.ElasticUrl)) + options[1] = elastic.SetBasicAuth( + viper.GetString(config.ElasticUsername), + viper.GetString(config.ElasticPassword)) + + if viper.GetBool(config.ElasticIgnoreSSLErrors) { + transport := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + httpClient := &http.Client{Transport: transport} + options = append(options, elastic.SetHttpClient(httpClient)) + log.Info("Disable SSL validation") + } + + gotHttps, err := regexp.MatchString("^https://", viper.GetString(config.ElasticUrl)) + if err != nil { + log.Errorf("Can't parse elasticsearch url: %v", err) + return nil, err + } + + if gotHttps { + log.Debug("Set HTTPS scheme") + options = append(options, elastic.SetScheme("https")) + } + + log.Info("Connecting to elastic") + for i := 0; i < viper.GetInt(config.ElasticRetryCount); i++ { + client, err := elastic.NewClient(options...) + if err == nil { + elasticClient = client + break + } + + log.Infof("No connection. Retry in %v. Attempt %v from %v", + viper.GetDuration(config.ElasticConnectionTimeout), + i+1, + viper.GetInt(config.ElasticRetryCount)) + + time.Sleep(viper.GetDuration(config.ElasticConnectionTimeout)) + } // End elastic connection + if elasticClient == nil { + return nil, fmt.Errorf("can't connect to elasticsearch") + } + return elasticClient, nil +} + +// NewElastic Create new Elastic and connect it to server +// This function can be slow due to elastic connection timeout +func NewElastic() (*Elastic, error) { + result := &Elastic{ + log: log.WithField("part", "elastic"), + ctx: context.Background(), + } + client, err := NewClient() + if err != nil { + return nil, fmt.Errorf("can't connect to elasticsearch") + } + + result.client = client + + result.log.Debug("Elastic client is ready") + return result, nil +} + +// CheckAndCreateIndex Check that index exist and create if not +func (el *Elastic) CheckAndCreateIndex(index ...string) error { + el.log.Debugf("Checking index %v", index) + mapping := `{ + "settings":{ + "number_of_shards":1, + "number_of_replicas":0 + }, + "mappings":{ + "properties": { + "sign_name": { + "type": "text", + "fields": { + "keyword": {"type": "keyword"} + } + }, + "source_ip": { + "type": "text", + "fields": { + "keyword": {"type": "keyword"} + } + }, + "destination_ip": { + "type": "text", + "fields": { + "keyword": {"type": "keyword"} + } + } + } +} + } +}` + for _, cur := range index { + exists, err := el.client.IndexExists(cur).Do(el.ctx) + if err != nil { + el.log.Errorf("%+v", err) + return err + } + + if !exists { + createIndex, err := el.client.CreateIndex(cur).BodyString(mapping).ErrorTrace(true).Do(el.ctx) + if err != nil { + el.log.Errorf("%+v", err) + return err + } + + if !createIndex.Acknowledged { + return fmt.Errorf("can't create index %v", cur) + } + } + } + + return nil +} + +// Query Run elasticsearch query +func (el *Elastic) Query(index string, query elastic.Query) (chan *elastic.SearchHit, chan error) { + hits := make(chan *elastic.SearchHit) + errs := make(chan error, 1) + scrollSize := viper.GetInt(config.ScrollSize) + go func() { + defer close(hits) + defer close(errs) + + scroll := el.client.Scroll(index).Query(query) + scroll.Size(scrollSize) + + // We need to call scroll.Do until we got an EOF + for { + res, err := scroll.Do(el.ctx) + if err == io.EOF { + break + } + + if err != nil { + el.log.Errorf("Got error from scroll: %v", err) + errs <- err + break + } + + for _, hit := range res.Hits.Hits { + select { + case hits <- hit: + case <-el.ctx.Done(): + { + errs <- el.ctx.Err() + break + } + } + } + } + + // Need to clear scroll + err := scroll.Clear(el.ctx) + if err != nil { + el.log.Errorf("%v", err) + errs <- err + } + }() + + return hits, errs +} + +func (el *Elastic) DeleteIndex(index ...string) error { + res, err := el.client.DeleteIndex(index...).Do(el.ctx) + if err != nil { + el.log.Errorf("Can't delete index %v: %v", index, err) + return err + } + + if !res.Acknowledged { + el.log.Errorf("Can't delete index %v", index) + return fmt.Errorf("can't delete index") + } + + return nil +} + +func (el *Elastic) CheckIndex(index string) (bool, error) { + res, err := el.client.IndexExists(index).Do(el.ctx) + if err != nil { + el.log.Errorf("Can't check index %v exist: %v", index, err) + return false, err + } + + return res, nil +} + +func (el *Elastic) NewBulkRequest() *elastic.BulkService { + return el.client.Bulk() +} + +func (el *Elastic) ExecuteBulk(bulk *elastic.BulkService) (*elastic.BulkResponse, error) { + return bulk.Do(el.ctx) +} + +func (el *Elastic) NewBulkProcessor(workers int, actions int, flush time.Duration, stats bool) (*elastic.BulkProcessor, error) { + processor := el.client.BulkProcessor() + processor.Workers(workers) + processor.BulkActions(actions) + processor.FlushInterval(flush) + processor.Stats(stats) + + return processor.Do(el.ctx) +} + +// DebugQuery Print query to log +func (el *Elastic) DebugQuery(index string, query elastic.Query) { + if viper.GetBool(config.ElasticLogQuery) { + go func() { + source, err := query.Source() + if err != nil { + el.log.Errorf("Got error while getting source: %v", err) + return + } + + bytes, err := json.Marshal(source) + if err != nil { + el.log.Errorf("Got error while query marshal: %v", err) + el.log.Debugf("Query origin: %v", source) + return + } + + if viper.GetBool(config.ElasticLogEncodeQuery) { + encoded := base64.StdEncoding.EncodeToString(bytes) + clog := el.log.WithFields(log.Fields{"query": encoded, "index": index}) + clog.Info("Query ready") + } else { + clog := el.log.WithFields(log.Fields{"query": string(bytes), "index": index}) + clog.Info("Query ready") + } + }() + } +} + +func (el *Elastic) RefreshIndex(index ...string) error { + res, err := el.client.Refresh(index...).Do(el.ctx) + if err != nil { + el.log.Errorf("Can't refresh index: %v", err) + return err + } + + var hasErrors bool + + if res.Shards.Failed > 0 { + hasErrors = true + for _, cur := range res.Shards.Failures { + if cur != nil { + reason, err := json.Marshal(cur.Reason) + if err != nil { + el.log.Errorf("Can't marshal index refresh reason: %v", err) + el.log.Debugf("Reason: %v", cur.Reason) + continue + } + el.log.Errorf("Error refresh index %v. Reason %v, status %v", cur.Index, string(reason), cur.Status) + } + } + } + + if hasErrors { + return fmt.Errorf("got some errors") + } + + return nil +} + +func (el *Elastic) CountDocuments(index ...string) (int64, error) { + return el.client.Count(index...).Do(el.ctx) +} diff --git a/correlator/es/es_test.go b/correlator/es/es_test.go new file mode 100644 index 0000000..c03fa8f --- /dev/null +++ b/correlator/es/es_test.go @@ -0,0 +1,394 @@ +package es + +import ( + "encoding/json" + "fmt" + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "io" + "iwarma.ru/console/correlator/config" + "testing" + "time" +) + +func prepareElastic() { + viper.Set(config.ElasticUrl, "http://elasticsearch:9200") + viper.Set(config.ElasticRetryCount, 1) + viper.Set(config.ElasticUsername, "elastic") + viper.Set(config.ElasticPassword, "changeme") + viper.Set(config.Verbose, true) + viper.Set(config.ElasticAggregatedIndexName, "test-aggregated") + viper.Set(config.ElasticNormalizedIndexName, "test-normalized") + viper.Set(config.AggregatorIterationDuration, time.Second*2) + viper.Set(config.Threads, 10) +} + +func SetupTest(t *testing.T) { + prepareElastic() + +} +func TearDownTest(t *testing.T) { + client, err := NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + err = clearIndex("*", client) + if err != nil { + t.Errorf("%v", err) + } + del, err := client.client.DeleteIndex("*").Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + } + if !del.Acknowledged { + t.Errorf("Got bad response. Indexs, wasn't deleted") + } +} + +func clearIndex(index string, el *Elastic) error { + exists, err := el.client.IndexExists(index).Do(el.ctx) + if err != nil { + log.Errorf("%+v", err) + return err + } + + if exists { + deleted, err := el.client.DeleteIndex(index).Do(el.ctx) + if err != nil { + log.Errorf("%+v", err) + } + + if !deleted.Acknowledged { + log.Errorf("Index %v wasn't deleted", index) + return fmt.Errorf("index %v wasn't deleted", index) + } + } + + return nil +} + +// Check that we can connect to local elastic +func TestNewElastic(t *testing.T) { + SetupTest(t) + defer TearDownTest(t) + + _, err := NewElastic() + + if err != nil { + t.Errorf("%v", err) + } +} + +// Check that we heve an error if we can't connect to elastic +func TestNewElasticError(t *testing.T) { + SetupTest(t) + defer TearDownTest(t) + viper.Set(config.ElasticUrl, "http://example.com") + + _, err := NewElastic() + + if err == nil { + t.Errorf("%v", err) + } + viper.Set(config.ElasticUrl, "http://elasticsearch:9200") +} + +// Check that we can create index +func TestCheckAndCreateIndexNoIndex(t *testing.T) { + SetupTest(t) + defer TearDownTest(t) + + client, err := NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + indexName := "my_test_index" + + // Check that we don't have such index + exist, err := client.client.IndexExists(indexName).Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + } + + if exist { + t.Errorf("Index %v already exist", indexName) + return + } + + err = client.CheckAndCreateIndex(indexName) + if err != nil { + t.Errorf("%v", err) + return + } + mapp, err := client.client.GetMapping().Index(indexName).Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + return + } + jsonMap, err := json.Marshal(mapp) + if err != nil { + t.Errorf("%v", err) + return + } + mappingStr := "{\"my_test_index\":{\"mappings\":{\"properties\":{\"destination_ip\":{\"fields\":{\"keyword\":{\"type\":\"keyword\"}},\"type\":\"text\"},\"sign_name\":{\"fields\":{\"keyword\":{\"type\":\"keyword\"}},\"type\":\"text\"},\"source_ip\":{\"fields\":{\"keyword\":{\"type\":\"keyword\"}},\"type\":\"text\"}}}}}" + if string(jsonMap) != mappingStr { + t.Errorf("Bad index mapping. Expected %v . Have %v", mappingStr, string(jsonMap)) + return + } + // Check that we have it now + exist, err = client.client.IndexExists(indexName).Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + } + + if !exist { + t.Errorf("Index %v wasn't created", indexName) + return + } + +} + +// Check that if we call CheckAndCreateIndex on existing index it isn't change +func TestCheckAndCreateIndexHaveIndex(t *testing.T) { + SetupTest(t) + defer TearDownTest(t) + + client, err := NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + indexName := "my_test_index" + + // Now we need to check if we already have such index + exist, err := client.client.IndexExists(indexName).Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + return + } + + if !exist { + ret, err := client.client.CreateIndex(indexName).Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + return + } + + if !ret.Acknowledged { + t.Errorf("Index %v wasn't created", indexName) + return + } + } + + // Now, check what function do + err = client.CheckAndCreateIndex(indexName) + if err != nil { + t.Errorf("%v", err) + } + + // Check that index exist + exist, err = client.client.IndexExists(indexName).Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + return + } + + if !exist { + t.Errorf("Index %v doesn't exist", indexName) + } + +} + +func TestElasticQuery(t *testing.T) { + SetupTest(t) + defer TearDownTest(t) + + client, err := NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + indexName := "my_test_index" + + err = clearIndex(indexName, client) + if err != nil { + t.Errorf("%v", err) + return + } + + err = client.CheckAndCreateIndex(indexName) + if err != nil { + t.Errorf("%v", err) + return + } + + // Add some documents + type testStruct struct { + Id int + Value string + } + + N := 10 + + bulk := client.client.Bulk() + + for i := 0; i < N; i++ { + id := fmt.Sprintf("%v", i) + bulk = bulk.Add(elastic.NewBulkIndexRequest().Index(indexName).Id(id).Doc(testStruct{Id: i, Value: id})) + } + + bulkResponse, err := bulk.Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + return + } + + if bulkResponse.Errors { + t.Errorf("%v", bulkResponse.Failed()) + return + } + + if len(bulkResponse.Indexed()) != N { + t.Errorf("Not all documents was added. Expect %v, got %v", N, len(bulkResponse.Indexed())) + return + } + + // Need to wait until elastic is ready + time.Sleep(time.Second) + + // Now, let's query them back + results, errs := client.Query(indexName, elastic.NewMatchAllQuery()) + + // Count results + resultCount := 0 + for range results { + resultCount++ + } + + // Count errors + errorCount := 0 + for err = range errs { + if err != nil { + log.Errorf("%v", err) + errorCount++ + } + } + + if resultCount != N { + t.Errorf("Got bad result count. Expect %v, got %v", N, resultCount) + } + + if errorCount != 0 { + t.Errorf("Got some errors in scroll") + } + +} + +func TestElasticQueryScrollLimitParam(t *testing.T) { + SetupTest(t) + defer TearDownTest(t) + + client, err := NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + indexName := "my_test_index" + + err = clearIndex(indexName, client) + if err != nil { + t.Errorf("%v", err) + return + } + + err = client.CheckAndCreateIndex(indexName) + if err != nil { + t.Errorf("%v", err) + return + } + + // Add some documents + type testStruct struct { + Id int + Value string + } + + N := 200 + + bulk := client.client.Bulk() + + for i := 0; i < N; i++ { + id := fmt.Sprintf("%v", i) + bulk = bulk.Add(elastic.NewBulkIndexRequest().Index(indexName).Id(id).Doc(testStruct{Id: i, Value: id})) + } + + bulkResponse, err := bulk.Do(client.ctx) + if err != nil { + t.Errorf("%v", err) + return + } + + if bulkResponse.Errors { + t.Errorf("%v", bulkResponse.Failed()) + return + } + + if len(bulkResponse.Indexed()) != N { + t.Errorf("Not all documents was added. Expect %v, got %v", N, len(bulkResponse.Indexed())) + return + } + + // Need to wait until elastic is ready + time.Sleep(time.Second) + + // Prepare config + viper.Set(config.ScrollSize, 10) + // Get documents from query + query := elastic.NewMatchAllQuery() + hits := make(chan *elastic.SearchHit) + errs := make(chan error, 1) + scrollSize := viper.GetInt(config.ScrollSize) + go func() { + defer close(hits) + defer close(errs) + scroll := client.client.Scroll(indexName).Query(query) + scroll.Size(scrollSize) + + for { + res, err := scroll.Do(client.ctx) + if err == io.EOF { + break + } + + if len(res.Hits.Hits) != 10 { + t.Errorf("Scroll size is not 10") + return + } + + if err != nil { + client.log.Errorf("Got error from scroll: %v", err) + errs <- err + break + } + + for _, hit := range res.Hits.Hits { + select { + case hits <- hit: + case <-client.ctx.Done(): + { + errs <- client.ctx.Err() + break + } + } + } + } + }() + +} diff --git a/correlator/events/events.go b/correlator/events/events.go new file mode 100644 index 0000000..a013d7b --- /dev/null +++ b/correlator/events/events.go @@ -0,0 +1,175 @@ +package events + +import ( + "crypto/sha512" + "encoding/hex" + "encoding/json" + "fmt" + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "sync" +) + +type Event map[string]interface{} + +// AddTag Add tags to event +func (event *Event) AddTag(ruleId string) []string { + cl := log.WithField("func", "AddTag") + tags := event.GetValue(Tags) + var result []string + if tags == nil { + cl.Debug("Received nil tags. Create empty slice") + tags = make([]string, 0) + } + _, ok := tags.([]string) + if !ok { + items := make([]string, 0) + for _, tag := range tags.([]interface{}) { + items = append(items, tag.(string)) + } + result = append(items, ruleId) + event.SetValue(Tags, result) + } else { + result = append(tags.([]string), ruleId) + event.SetValue(Tags, result) + + } + cl.Debugf("Return event tags -> %v", result) + return result +} + +func (event *Event) GetValue(name string) interface{} { + val, ok := (*event)[name] + + if ok { + return val + } + + return nil +} + +func (event *Event) GetString(name string) string { + val, ok := (*event)[name].(string) + + if ok { + return val + } + + return "" +} + +func (event *Event) GetInt(name string) int { + val, ok := (*event)[name].(int) + + if ok { + return val + } + + return 0 +} + +func (event *Event) SetValue(key string, value interface{}) { + (*event)[key] = value +} + +//Frequently used fields are placed in constants +const ( + Hash = "event_hash" + FirstEvent = "event_first" + LastEvent = "event_last" + EventCount = "event_count" + Created = "@created" + Tags = "rule_tags" + AggregatedId = "aggregated_id" + CeleryDone = "celery_done" + EventID = "event_id" +) + +// Hash Calculate hash to check that two Normalized events are +// at the same aggregated event +func (event *Event) Hash() string { + cl := log.WithField("func", "PrepareFieldsString") + fieldsList := viper.GetStringSlice(config.AggregatedFields) + var fieldString string + for _, item := range fieldsList { + val := event.GetValue(item) + if val != nil { + fieldString += fmt.Sprintf("%v", val) + } else { + cl.Tracef("Bad filed name %v", item) + + } + } + if len(fieldString) == 0 { + cl.Warn("Fields list is empty") + } + hash := sha512.Sum512_256([]byte(fmt.Sprintf("%v", fieldString))) + return hex.EncodeToString(hash[:]) +} + +func (event *Event) HashToString() string { + return event.GetString(Hash) +} + +// ParseEvents Read query results from Elastic.Query and convert them to Event +func ParseEvents(raw chan *elastic.SearchHit, inputErrors chan error, threadCount int) (chan *Event, chan error) { + + cl := log.WithField("func", "ParseNormalizedEvents") + cl.Trace("Start") + + // We need at least one thread + if threadCount <= 0 { + threadCount = 1 + } + + results := make(chan *Event, threadCount) + errs := make(chan error, 1) + + var wg sync.WaitGroup + wg.Add(threadCount) + + for i := 0; i < threadCount; i++ { + go func() { + defer wg.Done() + + // Process all hits until raw channel is open + for hit := range raw { + if hit == nil { + continue + } + + var item interface{} + var event Event + err := json.Unmarshal(hit.Source, &item) + event = item.(map[string]interface{}) + if err != nil { + cl.Errorf("Can't read normlized event: %v", err) + cl.Debugf("Input value: %v", hit) + errs <- err + continue + } + + // Add information about event's index + event["index"] = hit.Index + + results <- &event + } + + // Send up error if we have one + if err := <-inputErrors; err != nil { + errs <- err + } + }() + } + + go func() { + wg.Wait() + close(results) + close(errs) + cl.Trace("Finish") + }() + + return results, errs +} diff --git a/correlator/events/events_test.go b/correlator/events/events_test.go new file mode 100644 index 0000000..5781a20 --- /dev/null +++ b/correlator/events/events_test.go @@ -0,0 +1,288 @@ +package events + +import ( + "fmt" + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/util" + "reflect" + "testing" +) + +func TestParseNormalizedEvents(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + index := "my_test_index" + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, index) + if err != nil { + t.Errorf("%v", err) + return + } + + // Add documents + goodCount := 10 + err = FillNormalizedEvents(index, goodCount, client) + if err != nil { + t.Errorf("%v", err) + return + } + + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(index, query) + events, errs2 := ParseEvents(raw, errs1, 1) + + // Now let's count + eventCount := 0 + for range events { + eventCount++ + } + + errCount := 0 + for err = range errs2 { + if err != nil { + log.Errorf("Got error: %v", err) + errCount++ + } + } + + if eventCount != goodCount { + t.Errorf("Bad event count. Expect %v, got %v", goodCount, eventCount) + } + + if errCount > 0 { + t.Error("Got some errors") + } + +} + +func TestParseAggregatedEvents(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + index := "my_test_index" + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, index) + if err != nil { + t.Errorf("%v", err) + return + } + + // Add documents + goodCount := 10 + err = FillAggregatedEvents(index, goodCount, client) + if err != nil { + t.Errorf("%v", err) + return + } + + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(index, query) + events, errs2 := ParseEvents(raw, errs1, 1) + + // Now let's count + eventCount := 0 + for range events { + eventCount++ + } + + errCount := 0 + for err = range errs2 { + if err != nil { + log.Errorf("Got error: %v", err) + errCount++ + } + } + + if eventCount != goodCount { + t.Errorf("Bad event count. Expect %v, got %v", goodCount, eventCount) + } + + if errCount > 0 { + t.Error("Got some errors") + } + +} + +func TestParseNormalizedEventsMassive(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + index := "my_test_index" + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, index) + if err != nil { + t.Errorf("%v", err) + return + } + + // Add documents + goodCount := 50000 + err = FillNormalizedEvents(index, goodCount, client) + if err != nil { + t.Errorf("%v", err) + return + } + + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(index, query) + events, errs2 := ParseEvents(raw, errs1, 20) + + // Now let's count + eventCount := 0 + for range events { + eventCount++ + } + + errCount := 0 + for err = range errs2 { + if err != nil { + log.Errorf("Got error: %v", err) + errCount++ + } + } + + if eventCount != goodCount { + t.Errorf("Bad event count. Expect %v, got %v", goodCount, eventCount) + } + + if errCount > 0 { + t.Error("Got some errors") + } + +} + +func TestAddTag(t *testing.T) { + events, err := PrepareEvents(2) + if err != nil { + t.Errorf("%v", err) + } + for event := range events { + // In new events tags is nil -> add tags will increment tags to nill + tags := event.AddTag("1") + exampleTags := []string{"1"} + if !reflect.DeepEqual(tags, exampleTags) { + t.Errorf("Bad event tags! Expect %v , have %v", exampleTags, tags) + } + tags = event.AddTag("2") + exampleDoubleTags := []string{"1", "2"} + if !reflect.DeepEqual(tags, exampleDoubleTags) { + t.Errorf("Bad event tags! Expect %v , have %v", exampleDoubleTags, tags) + } + + } + +} + +func TestGetValue(t *testing.T) { + events, err := PrepareEvents(1) + if err != nil { + t.Errorf("%v", err) + } + for event := range events { + typeValue, ok := event.GetValue("type").(string) + if !ok { + t.Errorf("Can`t get event value") + } + severityValue, ok := event.GetValue("event_severity").(float64) + if !ok { + t.Errorf("Can`t get event value") + } + if typeValue != "test" { + t.Errorf("Bad event value! Expect %v , have %v", "test", typeValue) + } + if severityValue != 0 { + t.Errorf("Bad event value! Expect %v , have %v", 0, severityValue) + } + + } + +} + +func TestGetString(t *testing.T) { + events, err := PrepareEvents(1) + if err != nil { + t.Errorf("%v", err) + } + for event := range events { + hostValue := event.GetString("source_host") + if hostValue != "localhost" { + t.Errorf("Bad event value! Expect %v , have %v", "localhost", hostValue) + } + } + +} + +func TestGetInt(t *testing.T) { + events, err := PrepareEvents(1) + if err != nil { + t.Errorf("%v", err) + } + for event := range events { + portValue := event.GetInt("destination_port") + if portValue != 0 { + t.Errorf("Bad event value! Expect %v , have %v", 0, portValue) + } + } + +} + +func PrepareEvents(eventsNeed int) (chan *Event, error) { + util.PrepareElastic() + index := "test-aggregated" + client, err := es.NewElastic() + if err != nil { + + return nil, err + } + err = ClearIndex(client, GetAggregatedIndexName()) + if err != nil { + return nil, err + } + + err = FillAggregatedEvents(GetAggregatedIndexName(), 1, client) + if err != nil { + return nil, err + } + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(index, query) + events, error2 := ParseEvents(raw, errs1, eventsNeed) + + errorsCount := 0 + for err = range error2 { + if err != nil { + fmt.Printf("Got error: %v", err) + errorsCount += 1 + } + } + if errorsCount != 0 { + return nil, fmt.Errorf("Can`t get events from elastic -> %v ", error2) + } else { + return events, nil + } + +} diff --git a/correlator/events/stat.go b/correlator/events/stat.go new file mode 100644 index 0000000..2e55e21 --- /dev/null +++ b/correlator/events/stat.go @@ -0,0 +1,30 @@ +package events + +import ( + "encoding/json" + "fmt" + "iwarma.ru/console/correlator/stat" +) + +// Stat Aggregator statistic +type Stat struct { + EventsProcessed uint64 `json:"events_processed"` + EventsAggregated uint64 `json:"events_aggregated"` + AverageIterationTime stat.AvgTime `json:"average_iteration"` +} + +func (stat *Stat) Update(newStat Stat) { + stat.EventsProcessed += newStat.EventsProcessed + stat.EventsAggregated += newStat.EventsAggregated + + stat.AverageIterationTime.AddStat(&newStat.AverageIterationTime) +} + +func (stat Stat) String() string { + bytes, err := json.Marshal(stat) + if err != nil { + return fmt.Sprintf("Can't marshall stat: %v", err) + } + + return string(bytes) +} diff --git a/correlator/events/stat_test.go b/correlator/events/stat_test.go new file mode 100644 index 0000000..432aa39 --- /dev/null +++ b/correlator/events/stat_test.go @@ -0,0 +1,36 @@ +package events + +import ( + "testing" + "time" +) + +func TestStat_String(t *testing.T) { + var stat Stat + + goodStr := `{"events_processed":0,"events_aggregated":0,"average_iteration":{"value":"0s"}}` + + if stat.String() != goodStr { + t.Errorf("Can't serialize stat. Expect %v, got %v", goodStr, stat.String()) + } +} + +func TestStat_Update(t *testing.T) { + var s1, s2 Stat + + s1.EventsProcessed = 10 + s1.EventsAggregated = 10 + s1.AverageIterationTime.Add(time.Second) + + s2.EventsProcessed = 10 + s2.EventsAggregated = 10 + s2.AverageIterationTime.Add(time.Second * 3) + + s1.Update(s2) + + goodStr := `{"events_processed":20,"events_aggregated":20,"average_iteration":{"value":"400ms"}}` + + if s1.String() != goodStr { + t.Errorf("Can't serialize stat. Expect %v, got %v", goodStr, s1.String()) + } +} diff --git a/correlator/events/store.go b/correlator/events/store.go new file mode 100644 index 0000000..b67d8bd --- /dev/null +++ b/correlator/events/store.go @@ -0,0 +1,214 @@ +package events + +import ( + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/es" + "strconv" + "sync" + "time" +) + +type UpdateType byte + +// Type of aggregated record in store +const ( + typeNone UpdateType = iota + typeNew + typeUpdate +) + +// AggregatedEventItem Aggregated event with mark for update purpose +type AggregatedEventItem struct { + AggregatedEvent Event + Updated UpdateType +} + +// AggregatedEventStore Sync store for aggregated events +type AggregatedEventStore struct { + mapMutex sync.Mutex + eventsMap map[string]*AggregatedEventItem + aggregatedProcessor *elastic.BulkProcessor + normalizedProcessor *elastic.BulkProcessor + aggregatedWindow TimeWindow + client *es.Elastic + stat Stat + cl *log.Entry +} + +func (store *AggregatedEventStore) GetStat() *Stat { + store.mapMutex.Lock() + defer store.mapMutex.Unlock() + + return &store.stat +} + +func (store *AggregatedEventStore) GetClient() *es.Elastic { + return store.client +} + +// NewAggregatedEventStore Create new AggregatedEventStore +func NewAggregatedEventStore(el *es.Elastic, window TimeWindow) *AggregatedEventStore { + result := new(AggregatedEventStore) + result.eventsMap = make(map[string]*AggregatedEventItem) + result.cl = log.WithField("part", "AggregatedEventStore") + + var err error + + // Prepare aggregated events bulk processor + result.aggregatedProcessor, err = el.NewBulkProcessor( + viper.GetInt(config.AggregatorUpdateWorkers), + viper.GetInt(config.AggregatorBulkCount), + viper.GetDuration(config.AggregatorBulkFlushInterval), + viper.GetBool(config.Verbose)) + if err != nil { + result.cl.Errorf("Can't start aggregated processor: %v", err) + panic(err) + } + + result.normalizedProcessor, err = el.NewBulkProcessor( + viper.GetInt(config.AggregatorNormalizedWorkers), + viper.GetInt(config.AggregatorNormalizerBulkCount), + viper.GetDuration(config.AggregatorNormalizedBulkFlushInterval), + viper.GetBool(config.Verbose)) + if err != nil { + result.cl.Errorf("Can't start normalizer processor: %v", err) + panic(err) + } + + result.aggregatedWindow = window + result.client = el + + return result +} + +// UpdateWindow Update aggregated store window and clean internal aggregated map +// Must call this after all bulk update ready, or you loose events +func (store *AggregatedEventStore) UpdateWindow(window TimeWindow) { + // We need to send all updates before we can change window + store.SendUpdateBulk() + + // Now we can update window + store.mapMutex.Lock() + defer store.mapMutex.Unlock() + + store.aggregatedWindow = window + store.eventsMap = make(map[string]*AggregatedEventItem) +} + +// GetWindow Get current aggregation window +func (store *AggregatedEventStore) GetWindow() TimeWindow { + return store.aggregatedWindow +} + +// MapSize Get size of store +func (store *AggregatedEventStore) MapSize() int { + store.mapMutex.Lock() + defer store.mapMutex.Unlock() + + return len(store.eventsMap) +} + +// TotalEventCount Get total event processed by store +func (store *AggregatedEventStore) TotalEventCount() uint64 { + store.mapMutex.Lock() + defer store.mapMutex.Unlock() + + return store.stat.EventsProcessed +} + +// AddEvents Read input channel and store normalized events +func (store *AggregatedEventStore) AddEvents(input chan *Event) { + for event := range input { + startIter := time.Now() + + hash := event.Hash() + + store.mapMutex.Lock() + store.stat.EventsProcessed++ + + item, ok := store.eventsMap[hash] + if ok { + // update record + item.AggregatedEvent[EventCount] = item.AggregatedEvent.GetInt(EventCount) + 1 + var eventTimestamp time.Time + eventTimestamp, err := time.Parse(time.RFC822, event.GetString("event_timestamp")) + if err != nil { + eventTimestamp = time.Now() + } + item.AggregatedEvent[LastEvent] = eventTimestamp + if item.Updated != typeNew { + item.Updated = typeUpdate + } + } else { + var eventTimestamp time.Time + eventTimestamp, err := time.Parse(time.RFC822, event.GetString("event_timestamp")) + if err != nil { + eventTimestamp = time.Now() + } + // Create new record + event.SetValue(Hash, hash) + event.SetValue(FirstEvent, eventTimestamp) + event.SetValue(LastEvent, eventTimestamp) + event.SetValue(EventCount, 1) + event.SetValue(Created, time.Now().UTC()) + item = &AggregatedEventItem{ + AggregatedEvent: *event, + Updated: typeNew, + } + store.eventsMap[hash] = item + store.stat.EventsAggregated++ + } + store.stat.AverageIterationTime.Add(time.Since(startIter)) + store.mapMutex.Unlock() + + // Create bulk request to delete processed normalized event + store.normalizedProcessor.Add(elastic.NewBulkDeleteRequest(). + Index(event.GetString("index")).Id(event.GetString(EventID))) + } +} + +// SendUpdateBulk Send aggregated events to server +func (store *AggregatedEventStore) SendUpdateBulk() { + windowHash := strconv.FormatUint(uint64(store.aggregatedWindow.Hash()), 10) + + store.mapMutex.Lock() + + var newCount, updateCount, allCount int32 + for _, value := range store.eventsMap { + value.AggregatedEvent.SetValue(AggregatedId, windowHash+"_"+value.AggregatedEvent.HashToString()) + if value.Updated == typeNew { + request := elastic.NewBulkIndexRequest().Index(GetAggregatedIndexName()).Doc(value.AggregatedEvent).Id(value.AggregatedEvent.GetString(AggregatedId)) + value.Updated = typeNone + store.aggregatedProcessor.Add(request) + newCount++ + } else if value.Updated == typeUpdate { + request := elastic.NewBulkUpdateRequest().Index(GetAggregatedIndexName()).Doc(value.AggregatedEvent).Id(value.AggregatedEvent.GetString(AggregatedId)) + value.Updated = typeNone + store.aggregatedProcessor.Add(request) + updateCount++ + } else { + allCount++ + } + } + + // Need to flush bulk processors before mutex unlock + err := store.normalizedProcessor.Flush() + if err != nil { + store.cl.Errorf("Can't send bulk update to normalized events: %v", err) + } + + err = store.aggregatedProcessor.Flush() + if err != nil { + store.cl.Errorf("Can't send bulk update to aggregated events: %v", err) + } + + store.mapMutex.Unlock() + + if viper.GetBool(config.Verbose) { + cl := log.WithField("func", "SendUpdateBulk") + cl.Debugf("New records: %d, Update records: %d, Total records: %d", newCount, updateCount, allCount+newCount+updateCount) + } +} diff --git a/correlator/events/store_test.go b/correlator/events/store_test.go new file mode 100644 index 0000000..565a17a --- /dev/null +++ b/correlator/events/store_test.go @@ -0,0 +1,634 @@ +package events + +import ( + "crypto/sha512" + "encoding/hex" + "fmt" + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/util" + "testing" + "time" +) + +func TestAggregatedEventStore_AddEvents(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + normalizedIndexName := "test-normalized" + aggregatedIndexName := "test-aggregated" + eventsFields := []string{ + "event_severity", + "event_protocol", + "message", + "device_vendor", + "device_product", + "device_action", + "device_version", + "device_timezone", + "sign_id", + "sign_category", + "sign_subcategory", + "application", + "source_ip", + "source_host", + "source_port", + "source_mac", + "source_timezone", + "source_software", + "source_action", + "destination_ip", + "destination_mac", + "destination_timezone", + "destination_software", + "destination_action", + "destination_host", + "destination_port", + "destination_user", + "cs1", + "cs1Label", + "cs2", + "cs2Label", + "object_type", + } + + viper.Set(config.ElasticAggregatedIndexName, aggregatedIndexName) + viper.Set(config.ElasticNormalizedIndexName, normalizedIndexName) + viper.Set(config.AggregatedFields, eventsFields) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, normalizedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, aggregatedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + + // Add documents + goodCount := 10 + err = FillNormalizedEventsForAggregation(normalizedIndexName, goodCount, client) + if err != nil { + t.Errorf("%v", err) + return + } + + // Prepare query + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(normalizedIndexName, query) + events, errs2 := ParseEvents(raw, errs1, 1) + + // Prepare store + window := TimeWindow{Begin: time.Now().UTC(), End: time.Now().UTC().Add(time.Second * 10)} + store := NewAggregatedEventStore(client, window) + + // Add events + store.AddEvents(events) + + // Check for errors + for err = range errs2 { + if err != nil { + t.Errorf("%v", err) + } + } + + // Now, change window to update all events + store.UpdateWindow(TimeWindow{Begin: time.Now().UTC(), End: time.Now().UTC().Add(time.Second * 10)}) + + // Need to wait for elastic finish + time.Sleep(time.Second) + + // Now, we can check what we have + if store.stat.EventsProcessed != uint64(goodCount) { + t.Errorf("Bad event processed count. Expect %v, got %v", goodCount, store.stat.EventsProcessed) + } + + // We create only one aggregated event from all our normalized events + if store.stat.EventsAggregated != uint64(1) { + t.Errorf("Bad event aggregated count. Expect %v, got %v", 1, store.stat.EventsAggregated) + } + + // Need to check what we have in index + raw, errs1 = client.Query(normalizedIndexName, query) + normalizedCount := 0 + for range raw { + normalizedCount++ + } + + // Check for errors + for err = range errs1 { + if err != nil { + t.Errorf("%v", err) + } + } + + if normalizedCount != 0 { + t.Errorf("Not all normalized events were deleted. We still have %v of them", normalizedCount) + } + + // Now check aggregated events + raw, errs1 = client.Query(aggregatedIndexName, query) + aggregatedCount := 0 + for range raw { + aggregatedCount++ + } + + // Check for errors + for err = range errs1 { + if err != nil { + t.Errorf("%v", err) + } + } + + if aggregatedCount != 1 { + t.Errorf("Got wrong aggregated events count. Expect 1, got %v", aggregatedCount) + } + + // Clear index + err = client.DeleteIndex(normalizedIndexName) + + if err != nil { + t.Errorf("%v", err) + } + + err = client.DeleteIndex(aggregatedIndexName) + if err != nil { + t.Errorf("%v", err) + } +} + +func TestAggregatedEventStore_AddEvents_long(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + normalizedIndexName := "test-normalized" + aggregatedIndexName := "test-aggregated" + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, normalizedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, aggregatedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + + // Add documents + goodCount := 1000 + err = FillNormalizedEventsForAggregation(normalizedIndexName, goodCount, client) + if err != nil { + t.Errorf("%v", err) + return + } + + log.Info("Index is ready") + + // Prepare query + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(normalizedIndexName, query) + events, errs2 := ParseEvents(raw, errs1, 10) + + // Prepare store + window := TimeWindow{Begin: time.Now().UTC(), End: time.Now().UTC().Add(time.Second * 10)} + store := NewAggregatedEventStore(client, window) + + log.Info("Add event start") + + start := time.Now() + // Add events + store.AddEvents(events) + + log.Infof("Add event finish. Elapsed %v", time.Since(start)) + + // Check for errors + for err = range errs2 { + if err != nil { + t.Errorf("%v", err) + } + } + + log.Info("Bulk flush start") + start = time.Now() + // Now, change window to update all events + store.UpdateWindow(TimeWindow{Begin: time.Now().UTC(), End: time.Now().UTC().Add(time.Second * 10)}) + log.Infof("Bulk flush finish. Elapsed %v", time.Since(start)) + + // Need to wait for elastic finish + time.Sleep(time.Second * 30) + + // Now, we can check what we have + if store.stat.EventsProcessed != uint64(goodCount) { + t.Errorf("Bad event processed count. Expect %v, got %v", goodCount, store.stat.EventsProcessed) + } + + // We create only one aggregated event from all our normalized events + if store.stat.EventsAggregated != uint64(1) { + t.Errorf("Bad event aggregated count. Expect %v, got %v", 1, store.stat.EventsAggregated) + } + + // Need to check what we have in index + raw, errs1 = client.Query(normalizedIndexName, query) + normalizedCount := 0 + for range raw { + normalizedCount++ + } + + // Check for errors + for err = range errs1 { + if err != nil { + t.Errorf("%v", err) + } + } + + if normalizedCount != 0 { + t.Errorf("Not all normalized events were deleted. We still have %v of them", normalizedCount) + } + + // Now check aggregated events + raw, errs1 = client.Query(aggregatedIndexName, query) + aggregatedCount := 0 + for range raw { + aggregatedCount++ + } + + // Check for errors + for err = range errs1 { + if err != nil { + t.Errorf("%v", err) + } + } + + if aggregatedCount != 1 { + t.Errorf("Got wrong aggregated events count. Expect 1, got %v", aggregatedCount) + } + +} + +//func TestAggregatedEventStore_UpdateWindow(t *testing.T) { +// util.SetupTest(t) +// defer util.TearDownTest(t) +// +// normalizedIndexName := "test-normalized" +// aggregatedIndexName := "test-aggregated" +// +// client, err := es.NewElastic() +// if err != nil { +// t.Errorf("%v", err) +// return +// } +// +// err = ClearIndex(client, normalizedIndexName) +// if err != nil { +// t.Errorf("%v", err) +// return +// } +// +// err = ClearIndex(client, aggregatedIndexName) +// if err != nil { +// t.Errorf("%v", err) +// return +// } +// +// addEvents := func(count int) int { +// err = FillNormalizedEventsForAggregation(normalizedIndexName, count, client) +// if err != nil { +// t.Errorf("%v", err) +// return 0 +// } +// return count +// } +// +// // Add documents +// goodCount := 100 +// totalEvents := 0 +// aggregatedEventCount := 10 +// log.Info("Index is ready") +// +// // Prepare store +// window := TimeWindow{Begin: time.Now().UTC(), End: time.Now().UTC().Add(time.Second * 10)} +// store := NewAggregatedEventStore(client, window) +// +// // Prepare query +// query := elastic.NewMatchAllQuery() +// for i := 0; i < aggregatedEventCount; i++ { +// // Add documents +// totalEvents += addEvents(goodCount) +// +// // Move aggregation window +// log.Info("Bulk flush start") +// start := time.Now() +// // Now, change window to update all events +// store.UpdateWindow(TimeWindow{Begin: time.Now().UTC(), End: time.Now().UTC().Add(time.Second * 10)}) +// log.Infof("Bulk flush finish. Elapsed %v", time.Since(start)) +// +// // Query for new events +// raw, errs1 := client.Query(normalizedIndexName, query) +// events, errs2 := ParseEvents(raw, errs1, 10) +// log.Info("Add event start") +// start = time.Now() +// // Add events +// store.AddEvents(events) +// log.Infof("Add event finish. Elapsed %v", time.Since(start)) +// +// for err = range errs2 { +// t.Errorf("%v", err) +// } +// +// // Need to wait for elastic finish +// time.Sleep(time.Second * 20) +// } +// +// // Move aggregation window +// log.Info("Bulk flush start") +// start := time.Now() +// // Now, change window to update all events +// store.UpdateWindow(TimeWindow{Begin: time.Now().UTC(), End: time.Now().UTC().Add(time.Second * 10)}) +// log.Infof("Bulk flush finish. Elapsed %v", time.Since(start)) +// +// // Need to wait for elastic finish +// time.Sleep(time.Second * 20) +// +// // Now, we can check what we have +// if store.stat.EventsProcessed != uint64(totalEvents) { +// t.Errorf("Bad event processed count. Expect %v, got %v", goodCount, store.stat.EventsProcessed) +// } +// +// // We create only one aggregated event from all our normalized events +// if store.stat.EventsAggregated != uint64(aggregatedEventCount) { +// t.Errorf("Bad event aggregated count. Expect %v, got %v", 1, store.stat.EventsAggregated) +// } +// +// // Need to check what we have in index +// raw, errs1 := client.Query(normalizedIndexName, query) +// normalizedCount := 0 +// for range raw { +// normalizedCount++ +// } +// +// // Check for errors +// for err = range errs1 { +// if err != nil { +// t.Errorf("%v", err) +// } +// } +// +// if normalizedCount != 0 { +// t.Errorf("Not all normalized events were deleted. We still have %v of them", normalizedCount) +// } +// +// // Now check aggregated events +// raw, errs1 = client.Query(aggregatedIndexName, query) +// aggregatedCount := 0 +// for range raw { +// aggregatedCount++ +// } +// +// // Check for errors +// for err = range errs1 { +// if err != nil { +// t.Errorf("%v", err) +// } +// } +// +// if aggregatedCount != aggregatedEventCount { +// t.Errorf("Got wrong aggregated events count. Expect %v, got %v", aggregatedEventCount, aggregatedCount) +// } +// +//} + +// TestHashFuncWithAllFields The check is performed on all standard event fields +func TestHashFuncWithAllFields(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + normalizedIndexName := "test-normalized" + aggregatedIndexName := "test-aggregated" + eventsFields := []string{ + "event_severity", + "event_protocol", + "message", + "device_vendor", + "device_product", + "device_action", + "message", + "device_version", + "sign_id", + "sign_category", + "sign_subcategory", + "application", + "source_ip", + "source_host", + "source_port", + "destination_ip", + "destination_host", + "destination_port", + "destination_user", + } + + viper.Set(config.ElasticAggregatedIndexName, aggregatedIndexName) + viper.Set(config.ElasticNormalizedIndexName, normalizedIndexName) + viper.Set(config.AggregatedFields, eventsFields) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, normalizedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, aggregatedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + goodCount := 10 + err = FillNormalizedEventsForAggregation(normalizedIndexName, goodCount, client) + if err != nil { + t.Errorf("%v", err) + return + } + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(normalizedIndexName, query) + events, errs2 := ParseEvents(raw, errs1, 10) + + for item := range errs2 { + if item != nil { + t.Errorf("%v", item) + } + } + var eventString string + for item := range events { + + for _, field := range eventsFields { + eventString += fmt.Sprintf("%v", item.GetValue(field)) + } + fieldStr := sha512.Sum512_256([]byte(fmt.Sprintf("%v", eventString))) + // + hashOld := hex.EncodeToString(fieldStr[:]) + hashNew := item.Hash() + if hashOld != hashNew { + t.Errorf("%v[%v->%v]", "The hashes don't match", hashNew, hashOld) + } + eventString = "" + + } + +} + +//TestHashFuncWithPartFields The check is performed with an incomplete list of fields +func TestHashFuncWithPartFields(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + normalizedIndexName := "test-normalized" + aggregatedIndexName := "test-aggregated" + eventsFields := []string{ + "event_severity", + "event_protocol", + "device_vendor", + "device_product", + "device_action", + "device_version", + "sign_id", + "sign_category", + "sign_subcategory", + "application", + "source_ip", + "source_host", + } + + viper.Set(config.ElasticAggregatedIndexName, aggregatedIndexName) + viper.Set(config.ElasticNormalizedIndexName, normalizedIndexName) + viper.Set(config.AggregatedFields, eventsFields) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, normalizedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, aggregatedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + goodCount := 10 + err = FillNormalizedEventsForAggregation(normalizedIndexName, goodCount, client) + if err != nil { + t.Errorf("%v", err) + + } + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(normalizedIndexName, query) + events, errs2 := ParseEvents(raw, errs1, 10) + + for err = range errs2 { + if err != nil { + t.Errorf("%v", err) + } + } + var eventString string + for item := range events { + + for _, field := range eventsFields { + eventString += fmt.Sprintf("%v", item.GetValue(field)) + } + fieldStr := sha512.Sum512_256([]byte(fmt.Sprintf("%v", eventString))) + hashOld := hex.EncodeToString(fieldStr[:]) + hashNew := item.Hash() + if hashOld != hashNew { + t.Errorf("%v[%v->%v]", "The hashes don't match", hashNew, hashOld) + } + eventString = "" + } +} + +// TestHashFuncWithEmptyFieldsList The test works completely without aggregation fields. +//In this case, the hash is calculated from an empty string +func TestHashFuncWithEmptyFieldsList(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + normalizedIndexName := "test-normalized" + aggregatedIndexName := "test-aggregated" + viper.Set(config.ElasticAggregatedIndexName, aggregatedIndexName) + viper.Set(config.ElasticNormalizedIndexName, normalizedIndexName) + viper.Set(config.AggregatedFields, make([]string, 0)) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, normalizedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + + err = ClearIndex(client, aggregatedIndexName) + if err != nil { + t.Errorf("%v", err) + return + } + goodCount := 10 + err = FillNormalizedEventsForAggregation(normalizedIndexName, goodCount, client) + if err != nil { + t.Errorf("%v", err) + + } + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(normalizedIndexName, query) + events, errs2 := ParseEvents(raw, errs1, 10) + + for err = range errs2 { + if err != nil { + t.Errorf("%v", err) + } + } + + for item := range events { + fieldStr := sha512.Sum512_256([]byte(fmt.Sprint(""))) + hashOld := hex.EncodeToString(fieldStr[:]) + hashNew := item.Hash() + if hashOld != hashNew { + t.Errorf("%v[%v->%v]", "The hashes don't match", hashNew, hashOld) + } + } +} diff --git a/correlator/events/timewindow.go b/correlator/events/timewindow.go new file mode 100644 index 0000000..9927588 --- /dev/null +++ b/correlator/events/timewindow.go @@ -0,0 +1,40 @@ +package events + +import ( + "encoding/json" + "time" + + "github.com/segmentio/fasthash/fnv1a" +) + +// TimeWindow Time interval +type TimeWindow struct { + Begin time.Time + End time.Time +} + +// NewTimeWindowFromNow Create new time window from now to now+duration +func NewTimeWindowFromNow(duration time.Duration) TimeWindow { + now := time.Now().UTC() + return TimeWindow{ + Begin: now, + End: now.Add(duration), + } +} + +// Hash Calculate time window hash +func (window *TimeWindow) Hash() uint32 { + var hash uint32 + hash = fnv1a.AddString32(hash, window.Begin.Format("2006-01-02T15:04:05")) + hash = fnv1a.AddString32(hash, window.End.Format("2006-01-02T15:04:05")) + return hash +} + +func (window TimeWindow) String() string { + bytes, err := json.Marshal(window) + if err != nil { + return "" + } + + return string(bytes) +} diff --git a/correlator/events/timewindow_test.go b/correlator/events/timewindow_test.go new file mode 100644 index 0000000..29e8b7d --- /dev/null +++ b/correlator/events/timewindow_test.go @@ -0,0 +1,45 @@ +package events + +import ( + "testing" + "time" +) + +func TestNewTimeWindowFromNow(t *testing.T) { + duration := time.Second + window := NewTimeWindowFromNow(duration) + + if window.Begin.Add(duration) != window.End { + t.Errorf("Bad window end. Expect %v got %v", window.Begin.Add(duration), window.End) + } +} + +func TestTimeWindowHash(t *testing.T) { + layout := "2006-01-02T15:04:05.000Z" + str1 := "2014-11-12T11:45:26.371Z" + str2 := "2014-12-12T11:45:26.371Z" + + t1, err := time.Parse(layout, str1) + if err != nil { + t.Errorf("%v", err) + return + } + + t2, err := time.Parse(layout, str2) + if err != nil { + t.Errorf("%v", err) + return + } + + window := TimeWindow{ + Begin: t1, + End: t2, + } + + hash := window.Hash() + goodHash := uint32(1686523187) + + if hash != goodHash { + t.Errorf("Bad timewindow hash. Expect %v, got %v", goodHash, hash) + } +} diff --git a/correlator/events/tools.go b/correlator/events/tools.go new file mode 100644 index 0000000..96ba70b --- /dev/null +++ b/correlator/events/tools.go @@ -0,0 +1,17 @@ +package events + +import ( + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "time" +) + +// GetAggregatedIndexName Get name for aggregated index +func GetAggregatedIndexName() string { + return time.Now().UTC().Format(viper.GetString(config.ElasticAggregatedIndexName)) +} + +// GetNormalizedIndexName Get name for normalized index +func GetNormalizedIndexName() string { + return time.Now().UTC().Format(viper.GetString(config.ElasticNormalizedIndexName)) +} diff --git a/correlator/events/util.go b/correlator/events/util.go new file mode 100644 index 0000000..89de91f --- /dev/null +++ b/correlator/events/util.go @@ -0,0 +1,264 @@ +package events + +import ( + "fmt" + "github.com/google/uuid" + "github.com/olivere/elastic/v7" + log "github.com/sirupsen/logrus" + "iwarma.ru/console/correlator/es" + "time" +) + +//func PrepareElastic() { +// viper.Set(config.ElasticUrl, "http://elasticsearch:9200") +// viper.Set(config.ElasticRetryCount, 1) +// viper.Set(config.ElasticUsername, "elastic") +// viper.Set(config.ElasticPassword, "changeme") +// viper.Set(config.Verbose, true) +// viper.Set(config.ElasticAggregatedIndexName, "test-aggregated") +// viper.Set(config.ElasticNormalizedIndexName, "test-normalized") +// viper.Set(config.AggregatorIterationDuration, time.Second*2) +// viper.Set(config.Threads, 10) +//} + +func ClearIndex(el *es.Elastic, index ...string) error { + for _, cur := range index { + exists, err := el.CheckIndex(cur) + if err != nil { + log.Errorf("%+v", err) + return err + } + + if exists { + err = el.DeleteIndex(cur) + if err != nil { + return err + } + } + } + + return nil +} + +func FillNormalizedEvents(index string, count int, el *es.Elastic) error { + bulk := el.NewBulkRequest() + + for i := 0; i < count; i++ { + event := Event{ + "timestamp": time.Time{}, + "type": "test", + "event_timestamp": time.Time{}, + "event_id": uuid.NewString(), + "event_severity": uint8(i), + "event_src_msg": fmt.Sprintf("Test message %v", i), + "event_protocol": "TCP", + "device_vendor": "TestDevice", + "device_product": "TestProduct", + "device_version": "1.0", + "device_action": "Test", + "device_timezone": "Europe/Moscow", + "message": "Test message", + "sign_id": fmt.Sprintf("%v", i), + "sign_category": "Test", + "sign_subcategory": "Test", + "application": "Test application", + "source_ip": "127.0.0.1", + "source_mac": "00:50:56:c0:00:08", + "source_host": "localhost", + "source_port": uint32(i), + "source_user": "root", + "source_timezone": "Europe/Moscow", + "source_software": "test server", + "source_action": "test_action", + "destination_ip": "127.0.0.1", + "destination_mac": "00:50:56:c0:00:10", + "destination_timezone": "Europe/Moscow", + "destination_software": "Apache server", + "destination_host": "localhost", + "destination_action": "connect", + "destination_port": uint32(i), + "destination_user": "user", + "cs1": uint32(i * 5), + "cs1Label": "Signature", + "cs2": uint32(i + 20), + "cs2Label": "line_number", + "object_type": "webserver", + } + + bulk = bulk.Add(elastic.NewBulkIndexRequest().Index(index).Id(event.GetString(EventID)).Doc(event)) + } + + bulkResponse, err := el.ExecuteBulk(bulk) + if err != nil { + log.Errorf("Can't index documents: %v", err) + return err + } + + if bulkResponse.Errors { + log.Errorf("Got errors from bulk requset: %v", bulkResponse.Failed()) + return fmt.Errorf("bulk error") + } + + if len(bulkResponse.Indexed()) != count { + log.Errorf("Bad bulk index count. Got %v, expect %v", len(bulkResponse.Indexed()), count) + return fmt.Errorf("bad bulk count") + } + + // wait until elastic is ready + time.Sleep(time.Second) + + return nil +} + +// FillNormalizedEventsForAggregation All thise event will be aggregated in one +func FillNormalizedEventsForAggregation(index string, count int, el *es.Elastic) error { + bulk := el.NewBulkRequest() + + for i := 0; i < count; i++ { + + event := Event{ + "timestamp": time.Time{}, + "type": "test", + "event_timestamp": time.Time{}, + "event_id": uuid.NewString(), + "event_severity": 5, + "event_src_msg": "Message", + "event_protocol": "TCP", + "device_vendor": "TestDevice", + "device_product": "TestProduct", + "device_version": "1.0", + "device_action": "Test", + "device_timezone": "Europe/Moscow", + "sign_id": "Test", + "sign_category": "Test", + "sign_subcategory": "Test", + "application": "Test application", + "source_ip": "127.0.0.1", + "source_mac": "00:50:56:c0:00:08", + "source_host": "localhost", + "source_port": 5000, + "message": "Suricata message", + "source_user": "root", + "source_timezone": "Europe/Moscow", + "source_software": "test server", + "source_action": "test_action", + "destination_ip": "127.0.0.1", + "destination_mac": "00:50:56:c0:00:10", + "destination_timezone": "Europe/Moscow", + "destination_software": "Apache server", + "destination_host": "localhost", + "destination_action": "connect", + "destination_port": 8000, + "destination_user": "user", + "cs1": 2563, + "cs1Label": "Signature", + "cs2": 12, + "cs2Label": "line_number", + "object_type": "webserver", + } + + bulk = bulk.Add(elastic.NewBulkIndexRequest().Index(index).Id(event.GetString(EventID)).Doc(event)) + } + + bulkResponse, err := el.ExecuteBulk(bulk) + if err != nil { + log.Errorf("Can't index documents: %v", err) + return err + } + + if bulkResponse.Errors { + log.Errorf("Got errors from bulk requset: %v", bulkResponse.Failed()) + return fmt.Errorf("bulk error") + } + + if len(bulkResponse.Indexed()) != count { + log.Errorf("Bad bulk index count. Got %v, expect %v", len(bulkResponse.Indexed()), count) + return fmt.Errorf("bad bulk count") + } + + // wait until elastic is ready + time.Sleep(time.Second) + + return nil +} + +// FillAggregatedEvents These events must not bee aggregated +func FillAggregatedEvents(index string, count int, el *es.Elastic) error { + bulk := el.NewBulkRequest() + + for i := 0; i < count; i++ { + hash := uuid.NewString() + event := Event{ + Hash: hash, + FirstEvent: time.Now().UTC().Add(time.Second * (-10)), + LastEvent: time.Now().UTC(), + EventCount: 0, + Created: time.Now().UTC().Add(time.Second * (-10)), + Tags: nil, + AggregatedId: hash, + CeleryDone: false, + "index": index, + "timestamp": time.Time{}, + "type": "test", + "event_timestamp": time.Time{}, + "event_id": uuid.NewString(), + "event_severity": uint8(i), + "event_src_msg": fmt.Sprintf("Test message %v", i), + "event_protocol": "TCP", + "device_vendor": "TestDevice", + "device_product": "TestProduct", + "device_version": "1.0", + "device_action": "Test", + "device_timezone": "Europe/Moscow", + "message": "Test message", + "sign_id": fmt.Sprintf("%v", i), + "sign_category": "Test", + "sign_subcategory": "Test", + "application": "Test application", + "source_ip": "127.0.0.1", + "source_mac": "00:50:56:c0:00:08", + "source_host": "localhost", + "source_port": uint32(i), + "source_user": "root", + "source_timezone": "Europe/Moscow", + "source_software": "test server", + "source_action": "test_action", + "destination_ip": "127.0.0.1", + "destination_mac": "00:50:56:c0:00:10", + "destination_timezone": "Europe/Moscow", + "destination_software": "Apache server", + "destination_host": "localhost", + "destination_action": "connect", + "destination_port": uint32(i), + "destination_user": "user", + "cs1": uint32(i * 5), + "cs1Label": "Signature", + "cs2": uint32(i + 20), + "cs2Label": "line_number", + "object_type": "webserver", + } + + bulk = bulk.Add(elastic.NewBulkIndexRequest().Index(index).Id(event.GetString(AggregatedId)).Doc(event)) + } + + bulkResponse, err := el.ExecuteBulk(bulk) + if err != nil { + log.Errorf("Can't index documents: %v", err) + return err + } + + if bulkResponse.Errors { + log.Errorf("Got errors from bulk requset: %v", bulkResponse.Failed()) + return fmt.Errorf("bulk error") + } + + if len(bulkResponse.Indexed()) != count { + log.Errorf("Bad bulk index count. Got %v, expect %v", len(bulkResponse.Indexed()), count) + return fmt.Errorf("bad bulk count") + } + + // wait until elastic is ready + time.Sleep(time.Second) + + return nil +} diff --git a/correlator/go.mod b/correlator/go.mod new file mode 100644 index 0000000..4ebf937 --- /dev/null +++ b/correlator/go.mod @@ -0,0 +1,14 @@ +module iwarma.ru/console/correlator + +go 1.16 + +require ( + github.com/google/uuid v1.2.0 + github.com/gorilla/mux v1.8.0 + github.com/natefinch/lumberjack v2.0.0+incompatible + github.com/olivere/elastic/v7 v7.0.24 + github.com/segmentio/fasthash v1.0.3 + github.com/sirupsen/logrus v1.8.1 + github.com/spf13/viper v1.7.1 + gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect +) diff --git a/correlator/make_release.py b/correlator/make_release.py new file mode 100644 index 0000000..a750654 --- /dev/null +++ b/correlator/make_release.py @@ -0,0 +1,37 @@ +#!/usr/bin/python3 + +import os +import re +import git +from pathlib import Path + + +def main(): + main_branch='master' + + cwd=Path(os.path.abspath(__file__)).parents[0] + repo=git.Repo(cwd) + repo.git.fetch('origin', '+refs/heads/*:refs/remotes/origin/*', '+refs/tags/*:refs/tags/*') + + orig_tag=next((tag for tag in repo.tags if tag.commit == repo.head.commit), None) + if orig_tag is None: + raise Exception(f"no tag on current commit {repo.head.commit}") + + release_tag=re.search('^(\d+.\d+.\d+)-.*',orig_tag.name) + if release_tag is None or release_tag[1] is None: + raise Exception(f"release_tag is None. Error") + + print(f'release candidate tag name is {orig_tag.name}') + print(f'release tag name is {release_tag[1]}') + + repo.git.checkout(main_branch) + repo.git.merge('--no-ff','-X','theirs','-m',f'"release {release_tag[1]}"',orig_tag.name) + repo.create_tag(release_tag[1],ref='HEAD', message=f'release {release_tag[1]}') + repo.git.push() + repo.git.push('--tags') + #git logf --first-parent master -n 2 + + print('finished successfully', flush=True) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/correlator/mapping/component.go b/correlator/mapping/component.go new file mode 100644 index 0000000..5723d11 --- /dev/null +++ b/correlator/mapping/component.go @@ -0,0 +1,175 @@ +package mapping + +import ( + "bytes" + "encoding/json" + "errors" + "io" + "io/ioutil" + "net/http" + "os" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" +) + +type Component struct { + Version int `json:"version"` + Template map[string]interface{} `json:"template"` +} + +type ComponentTemplate struct { + Name string `json:"name"` + Value Component `json:"component_template"` +} + +type ComponentResponse struct { + Templates []ComponentTemplate `json:"component_templates"` +} + +type UpdateResponse struct { + Acknowledged bool `json:"acknowledged"` +} + +func ReadComponent(reader io.Reader) (Component, error) { + byteValue, err := ioutil.ReadAll(reader) + if err != nil { + log.Errorf("Can't read component: %v", err) + return Component{}, err + } + + var result Component + err = json.Unmarshal(byteValue, &result) + if err != nil { + log.Errorf("Can't unmarshall component: %v", err) + return Component{}, err + } + + return result, nil +} + +func ReadComponentFromFile(fileName string) (Component, error) { + file, err := os.Open(fileName) + if err != nil { + log.Errorf("Can't open file: %v", err) + return Component{}, err + } + defer file.Close() + + return ReadComponent(file) +} + +func ReadComponentResponse(reader io.Reader) (ComponentResponse, error) { + byteValue, err := ioutil.ReadAll(reader) + if err != nil { + log.Errorf("Can't read component response: %v", err) + return ComponentResponse{}, err + } + + var result ComponentResponse + err = json.Unmarshal(byteValue, &result) + if err != nil { + log.Errorf("Can't unmarshall component response: %v", err) + return ComponentResponse{}, err + } + + return result, nil +} + +func ReadUpdateResponse(reader io.Reader) (UpdateResponse, error) { + byteValue, err := ioutil.ReadAll(reader) + if err != nil { + log.Errorf("Can't read response: %v", err) + return UpdateResponse{}, err + } + + log.Debugf("Got server response: %v", string(byteValue)) + + var result UpdateResponse + err = json.Unmarshal(byteValue, &result) + if err != nil { + log.Errorf("Can't unmarshall update response: %v", err) + return UpdateResponse{}, err + } + + return result, nil +} + +func CheckComponentUpdate(url string, component Component) (bool, error) { + // Get current mapping for component + http_client := &http.Client{} + req, err := http.NewRequest("GET", url, nil) + if err != nil { + log.Errorf("Can't create request to obtain component mapping version from server: %v", err) + return false, err + } + + // Set auth + req.SetBasicAuth(viper.GetString(config.ElasticUsername), viper.GetString(config.ElasticPassword)) + + resp, err := http_client.Do(req) + if err != nil { + log.Errorf("Can't send request, to check component mapping on server: %v", err) + return false, err + } + defer resp.Body.Close() + + responseComponent, err := ReadComponentResponse(resp.Body) + if err != nil { + log.Errorf("Can't parse component mapping response from server: %v", err) + return false, err + } + + // Now, check version + max_version := 0 + for _, cur := range responseComponent.Templates { + if cur.Value.Version > max_version { + max_version = cur.Value.Version + } + } + + log.Debugf("After checking %v versions, found max version %v, component version is %v", len(responseComponent.Templates), max_version, component.Version) + + return component.Version > max_version, nil +} + +func UpdateComponent(url string, component Component) error { + + byteValue, err := json.Marshal(component) + if err != nil { + log.Errorf("Can't marshall component: %v", err) + return err + } + + buffer := bytes.NewBuffer(byteValue) + + req, err := http.NewRequest("PUT", url, buffer) + if err != nil { + log.Errorf("Can't create copmonent update request: %v", err) + return err + } + + req.SetBasicAuth(viper.GetString(config.ElasticUsername), viper.GetString(config.ElasticPassword)) + req.Header.Add("Content-Type", "application/json") + + http_client := &http.Client{} + resp, err := http_client.Do(req) + if err != nil { + log.Errorf("Can't send component update request: %v", err) + return err + } + defer resp.Body.Close() + + response, err := ReadUpdateResponse(resp.Body) + if err != nil { + log.Errorf("Can't read server response: %v", err) + return err + } + + if response.Acknowledged != true { + return errors.New("Server didn't accept component") + } + + return nil +} diff --git a/correlator/mapping/component_test.go b/correlator/mapping/component_test.go new file mode 100644 index 0000000..7d4365f --- /dev/null +++ b/correlator/mapping/component_test.go @@ -0,0 +1,156 @@ +package mapping + +import ( + "bytes" + "io/ioutil" + "os" + "testing" +) + +const ( + goodComponentVersion = 4 + goodComponent = `{ + "template": { + "mappings": { + "properties": { + "event_hash": { + "type": "text" + }, + "event_first": { + "type": "date", + "format": "date_optional_time" + }, + "event_last": { + "type": "date", + "format": "date_optional_time" + }, + "event_count": { + "type": "long" + }, + "@created": { + "type": "date" + }, + "rule_tags": { + "type": "keyword" + } + } + } + }, + "version": 4 + }` + goodComponentResponse = ` + { + "component_templates" : [ + { + "name" : "aggregated_component", + "component_template" : { + "template" : { + "mappings" : { + "properties" : { + "@created" : { + "type" : "date" + }, + "rule_tags" : { + "type" : "keyword" + }, + "event_last" : { + "format" : "date_optional_time", + "type" : "date" + }, + "event_hash" : { + "type" : "text" + }, + "event_count" : { + "type" : "long" + }, + "event_first" : { + "format" : "date_optional_time", + "type" : "date" + } + } + } + }, + "version" : 4 + } + } + ] + }` +) + +func TestReadComponentWork(t *testing.T) { + buf := bytes.NewBuffer([]byte(goodComponent)) + + component, err := ReadComponent(buf) + if err != nil { + t.Errorf("Can't read good component: %v", err) + return + } + + if component.Version != goodComponentVersion { + t.Errorf("Bad component version. Expect %v, got %v", goodComponentVersion, component.Version) + } +} + +func TestReadComponentBad(t *testing.T) { + buf := bytes.NewBuffer(make([]byte, 1)) + _, err := ReadComponent(buf) + if err == nil { + t.Error("No error, when we need one") + } +} + +func TestReadComponentFromFileWork(t *testing.T) { + file, err := ioutil.TempFile("", "component.*.json") + if err != nil { + t.Errorf("Can't create temp file: %v", err) + return + } + + n, err := file.Write([]byte(goodComponent)) + if err != nil { + t.Errorf("Can't write temp file: %v", err) + } + + if n != len([]byte(goodComponent)) { + t.Errorf("Bad write count size: %v expect %v", n, len([]byte(goodComponent))) + } + + file.Close() + + defer os.Remove(file.Name()) + + component, err := ReadComponentFromFile(file.Name()) + if err != nil { + t.Errorf("Can't read component from file: %v", err) + return + } + + if component.Version != goodComponentVersion { + t.Errorf("Bad component version. Expect %v, got %v", goodComponentVersion, component.Version) + } +} + +func TestReadComponentFromFileBad(t *testing.T) { + _, err := ReadComponentFromFile("/tmp/11111111dddddddddddddd") + if err == nil { + t.Error("No error when we need one") + } +} + +func TestReadComponentResponseWork(t *testing.T) { + buf := bytes.NewBuffer([]byte(goodComponentResponse)) + + response, err := ReadComponentResponse(buf) + if err != nil { + t.Errorf("Can't read component response: %v", err) + return + } + + if len(response.Templates) != 1 { + t.Errorf("Bad response size, expect 1, got %v", len(response.Templates)) + } + + if response.Templates[0].Value.Version != goodComponentVersion { + t.Errorf("Got bad component version, expect %v got %v", response.Templates[0].Value.Version, goodComponentVersion) + } +} diff --git a/correlator/mapping/index.go b/correlator/mapping/index.go new file mode 100644 index 0000000..c2676e6 --- /dev/null +++ b/correlator/mapping/index.go @@ -0,0 +1,160 @@ +package mapping + +import ( + "bytes" + "encoding/json" + "errors" + "io" + "io/ioutil" + "net/http" + "os" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" +) + +type Meta struct { + Description string `json:"description"` +} + +type Index struct { + IndexPatterns []string `json:"index_patterns"` + Template map[string]interface{} `json:"template"` + Priority int `json:"priority"` + ComposedOf []string `json:"composed_of"` + Version int `json:"version"` + MetaInfo Meta `json:"_meta,omitempty"` +} + +type IndexResponseItem struct { + Name string `json:"name"` + Template Index `json:"index_template"` +} + +type IndexResponse struct { + Templates []IndexResponseItem `json:"index_templates"` +} + +func ReadIndex(reader io.Reader) (Index, error) { + byteValue, err := ioutil.ReadAll(reader) + if err != nil { + log.Errorf("Can't read index: %v", err) + return Index{}, err + } + + var result Index + err = json.Unmarshal(byteValue, &result) + if err != nil { + log.Errorf("Can't unmarshall index: %v", err) + return Index{}, err + } + + return result, nil +} + +func ReadIndexFromFile(fileName string) (Index, error) { + file, err := os.Open(fileName) + if err != nil { + log.Errorf("Can't open file: %v", err) + return Index{}, err + } + defer file.Close() + + return ReadIndex(file) +} + +func ReadIndexResponse(reader io.Reader) (IndexResponse, error) { + byteValue, err := ioutil.ReadAll(reader) + if err != nil { + log.Errorf("Can't read index response: %v", err) + return IndexResponse{}, err + } + + var result IndexResponse + err = json.Unmarshal(byteValue, &result) + if err != nil { + log.Errorf("Can't unmarshall index response: %v", err) + return IndexResponse{}, err + } + + return result, nil +} + +func CheckIndextUpdate(url string, index Index) (bool, error) { + // Get current mapping for index + http_client := &http.Client{} + req, err := http.NewRequest("GET", url, nil) + if err != nil { + log.Errorf("Can't create get index mapping request: %v", err) + return false, err + } + + // Set auth + req.SetBasicAuth(viper.GetString(config.ElasticUsername), viper.GetString(config.ElasticPassword)) + + resp, err := http_client.Do(req) + if err != nil { + log.Errorf("Can't send index get index mapping request: %v", err) + return false, err + } + defer resp.Body.Close() + + responseComponent, err := ReadIndexResponse(resp.Body) + if err != nil { + log.Errorf("Can't read get mappint for index response: %v", err) + return false, err + } + + // Now, check version + max_version := 0 + for _, cur := range responseComponent.Templates { + if cur.Template.Version > max_version { + max_version = cur.Template.Version + } + } + + log.Debugf("After checking %v versions, found max version %v, index version is %v", len(responseComponent.Templates), max_version, index.Version) + + return index.Version > max_version, nil +} + +func UpdateIndex(url string, index Index) error { + + byteValue, err := json.Marshal(index) + if err != nil { + log.Errorf("Can't marshall index: %v", err) + return err + } + + buffer := bytes.NewBuffer(byteValue) + + req, err := http.NewRequest("PUT", url, buffer) + if err != nil { + log.Errorf("Can't create index update request: %v", err) + return err + } + + req.SetBasicAuth(viper.GetString(config.ElasticUsername), viper.GetString(config.ElasticPassword)) + req.Header.Add("Content-Type", "application/json") + + http_client := &http.Client{} + resp, err := http_client.Do(req) + if err != nil { + log.Errorf("Can't exec index update request: %v", err) + return err + } + defer resp.Body.Close() + + response, err := ReadUpdateResponse(resp.Body) + if err != nil { + log.Errorf("Can't read index update response") + return err + } + + if response.Acknowledged != true { + return errors.New("Server didn't accept index") + } + + return nil +} diff --git a/correlator/mapping/mapping.go b/correlator/mapping/mapping.go new file mode 100644 index 0000000..7962c18 --- /dev/null +++ b/correlator/mapping/mapping.go @@ -0,0 +1,77 @@ +package mapping + +import ( + "fmt" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" +) + +type Mapping struct { + Url string + File string + Type string +} + +const ( + MappingComponent = "component" + MappingIndex = "index" +) + +func UpdateMapping(mapping *[]Mapping) error { + + for _, cur := range *mapping { + url := fmt.Sprintf("%v/%v", viper.GetString(config.ElasticUrl), cur.Url) + + cl := log.WithFields(log.Fields{"url": url, "file": cur.File, "type": cur.Type}) + + if cur.Type == MappingComponent { + component, err := ReadComponentFromFile(cur.File) + if err != nil { + return err + } + + ok, err := CheckComponentUpdate(url, component) + if err != nil { + return err + } + + if ok { + cl.Infof("Need to update component") + err = UpdateComponent(url, component) + if err != nil { + cl.Error(err) + return err + } + } else { + cl.Infof("NO need to update component") + } + } else if cur.Type == MappingIndex { + index, err := ReadIndexFromFile(cur.File) + if err != nil { + return err + } + + ok, err := CheckIndextUpdate(url, index) + if err != nil { + return err + } + + if ok { + cl.Infof("Need to update component") + err = UpdateIndex(url, index) + if err != nil { + cl.Error(err) + return err + } + } else { + cl.Infof("NO need to update index") + } + } else { + return fmt.Errorf("got unknown mapping type: %v", cur.Type) + } + } + + return nil +} diff --git a/correlator/rules/action.go b/correlator/rules/action.go new file mode 100644 index 0000000..83affbc --- /dev/null +++ b/correlator/rules/action.go @@ -0,0 +1,33 @@ +package rules + +import ( + "bytes" + "encoding/json" + "text/template" + + "iwarma.ru/console/correlator/events" +) + +type Action interface { + // Perform action for provided events + Perform(events *[]*events.Event) error + + // ParseInterface Used for parsing from json + ParseInterface(v interface{}) error + + // GetType Get type of action + GetType() string + + json.Unmarshaler + json.Marshaler +} + +func renderTemplate(templateSrc *template.Template, event *events.Event) (string, error) { + var buf bytes.Buffer + err := templateSrc.Execute(&buf, event) + if err != nil { + return "", err + } + + return buf.String(), nil +} diff --git a/correlator/rules/action_asset.go b/correlator/rules/action_asset.go new file mode 100644 index 0000000..89da7b0 --- /dev/null +++ b/correlator/rules/action_asset.go @@ -0,0 +1,329 @@ +package rules + +import ( + "bytes" + "crypto/tls" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "iwarma.ru/console/correlator/events" + "net/http" + "strconv" + "text/template" + + "iwarma.ru/console/correlator/config" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" +) + +const ( + AssetActionType = "asset" +) + +type AssetAction struct { + Name string + Description string + Manufacturer string + Model string + Ip string + Os string + Ports string + Vulnerabilities []string + Group string + AssetType string + Status string + + templates struct { + name *template.Template + description *template.Template + model *template.Template + ip *template.Template + ports *template.Template + } + + client *http.Client + token string +} + +func (action *AssetAction) GetType() string { + return AssetActionType +} + +func (action AssetAction) ToInterface() (map[string]interface{}, error) { + result := make(map[string]interface{}) + + result["type"] = AssetActionType + result["name"] = action.Name + result["description"] = action.Description + result["manufacturer"] = action.Manufacturer + result["model"] = action.Model + result["ip"] = action.Ip + result["os"] = action.Os + result["ports"] = action.Ports + + if len(action.Vulnerabilities) > 0 { + vulnerabilities := make([]interface{}, 0) + for _, cur := range action.Vulnerabilities { + tmp, err := strconv.Atoi(cur) + if err != nil { + return nil, err + } + vulnerabilities = append(vulnerabilities, tmp) + } + result["vulnerabilities"] = vulnerabilities + } + + result["group"] = action.Group + result["asset_type"] = action.AssetType + result["status"] = action.Status + + return result, nil +} + +func (action AssetAction) MarshalJSON() ([]byte, error) { + data, err := action.ToInterface() + if err != nil { + return nil, err + } + + return json.Marshal(data) +} + +func (action *AssetAction) UnmarshalJSON(b []byte) error { + var data interface{} + err := json.Unmarshal(b, &data) + if err != nil { + return err + } + + return action.ParseInterface(data) +} + +func (action *AssetAction) ParseInterface(v interface{}) error { + + m, ok := v.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't parse %v from %T", v, v) + } + + t, ok := m["type"].(string) + if !ok { + return errors.New("no type") + } + + if t != AssetActionType { + return fmt.Errorf("bad type, expect %v got %v", ExecActionType, t) + } + + action.Name, ok = m["name"].(string) + if !ok { + return errors.New("no name") + } + + action.Ip, ok = m["ip"].(string) + if !ok { + return errors.New("no IP") + } + + // Not required + action.Description, _ = m["description"].(string) + action.Manufacturer, _ = m["manufacturer"].(string) + action.Model, _ = m["model"].(string) + action.Os, _ = m["os"].(string) + action.Ports, _ = m["ports"].(string) + action.Group, _ = m["group"].(string) + action.AssetType, _ = m["asset_type"].(string) + action.Status, _ = m["status"].(string) + + // For array + if w, ok := m["vulnerabilities"]; ok { + switch v := w.(type) { + case []interface{}: + { + for _, cur := range v { + switch w := cur.(type) { + case string: + action.Vulnerabilities = append(action.Vulnerabilities, w) + case float64: + action.Vulnerabilities = append(action.Vulnerabilities, fmt.Sprintf("%v", w)) + default: + return fmt.Errorf("bad vulnerabilities type: %T with value %v in interface %v", cur, cur, m) + } + } + } + case string: + action.Vulnerabilities = append(action.Vulnerabilities, v) + case float64: + action.Vulnerabilities = append(action.Vulnerabilities, fmt.Sprintf("%v", v)) + default: + return fmt.Errorf("bad vulnerabilities type: %T with value %v in interface %v", v, v, m) + } + } + + return nil +} + +func (action *AssetAction) Perform(events *[]*events.Event) error { + cl := log.WithFields(log.Fields{"type": IncidentActionType, "event_count": len(*events)}) + cl.Debug("Start action") + defer cl.Debug("End action") + + if events == nil || len(*events) == 0 { + cl.Error("No events") + return nil + } + + var err error + + // Check if we need to prepare templates + if action.templates.name == nil { + action.templates.name, err = template.New("Name").Parse(action.Name) + if err != nil { + cl.Errorf("Can't create name template: %v", err) + return err + } + + action.templates.description, err = template.New("Description").Parse(action.Description) + if err != nil { + cl.Errorf("Can't create description template: %v", err) + return err + } + + action.templates.ip, err = template.New("Ip").Parse(action.Ip) + if err != nil { + cl.Errorf("Can't create ip template: %v", err) + return err + } + + action.templates.model, err = template.New("Model").Parse(action.Model) + if err != nil { + cl.Errorf("Can't create model template: %v", err) + return err + } + + action.templates.ports, err = template.New("Ports").Parse(action.Ports) + if err != nil { + cl.Errorf("Can't create ports template: %v", err) + return err + } + } + + // Check if we need to prepare http client + if action.client == nil { + if viper.GetBool(config.ConsoleIgnoreSSLErrors) { + transport := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + action.client = &http.Client{Transport: transport} + } else { + action.client = &http.Client{} + } + } + + // Get auth token + if action.token == "" { + action.token, err = ObtainAuthToken() + if err != nil { + cl.Errorf("Can't get auth token: %v", err) + return err + } + } + + // Actual action + for _, event := range *events { + + // Prepare body + interfaceBody, err := action.ToInterface() + if err != nil { + return err + } + + // Render templates + if _, ok := interfaceBody["name"]; ok { + interfaceBody["name"], err = renderTemplate(action.templates.name, event) + if err != nil { + return err + } + } + + if _, ok := interfaceBody["description"]; ok { + interfaceBody["description"], err = renderTemplate(action.templates.description, event) + if err != nil { + return err + } + } + + if _, ok := interfaceBody["ip"]; ok { + interfaceBody["ip"], err = renderTemplate(action.templates.ip, event) + if err != nil { + return err + } + } + + if _, ok := interfaceBody["model"]; ok { + interfaceBody["model"], err = renderTemplate(action.templates.model, event) + if err != nil { + return err + } + } + + if _, ok := interfaceBody["ports"]; ok { + interfaceBody["ports"], err = renderTemplate(action.templates.ports, event) + if err != nil { + return err + } + } + + // Add type to connect asset with sensor + interfaceBody["sensor"] = event.GetString("type") + + // Send request + jsonBody, err := json.Marshal(interfaceBody) + if err != nil { + cl.Errorf("Can't serialize body: %v", err) + return err + } + + cl.Debugf("Sending requset: %v", string(jsonBody)) + + request, err := http.NewRequest("POST", viper.GetString(config.ConsoleUrlAsset), bytes.NewBuffer(jsonBody)) + if err != nil { + cl.Errorf("Can't create request: %v", err) + return err + } + + // Set headers + request.Header.Set("Authorization", fmt.Sprintf("Token %v", action.token)) + request.Header.Set("Content-type", "application/json") + + // Do request + response, err := action.client.Do(request) + if err != nil { + cl.Errorf("Can't send request: %v", err) + return err + } + + // Check result + body, err := ioutil.ReadAll(response.Body) + if err != nil { + cl.Errorf("Can't read response body: %v", err) + return err + } + err = response.Body.Close() + if err != nil { + cl.Warningf("%v", err) + } + + cl.Debugf("Got response: %v", string(body)) + + if response.StatusCode != http.StatusCreated { + cl.Errorf("Bad status code: %v", response.Status) + return fmt.Errorf("bad server response code: %v", response.Status) + } + } + + return nil + +} diff --git a/correlator/rules/action_asset_test.go b/correlator/rules/action_asset_test.go new file mode 100644 index 0000000..be97d99 --- /dev/null +++ b/correlator/rules/action_asset_test.go @@ -0,0 +1,202 @@ +package rules + +import ( + "encoding/json" + "testing" +) + +func TestAssetActionType(t *testing.T) { + var action AssetAction + + if action.GetType() != AssetActionType { + t.Errorf("Got bad type: %v", action.GetType()) + } +} + +func TestAssetActionToInterface(t *testing.T) { + action := AssetAction{ + Name: "Test1", + Description: "Test2", + Manufacturer: "Test3", + Model: "Test4", + Ip: "Test5", + Os: "Test6", + Ports: "Test7", + Vulnerabilities: make([]string, 1), + Group: "Test9", + AssetType: "Test10", + Status: "Test11", + } + action.Vulnerabilities[0] = "8" + + m, err := action.ToInterface() + if err != nil { + t.Errorf("Can't create interface from action: %v", err) + } + + testMap(m, "name", "Test1", t) + testMap(m, "description", "Test2", t) + testMap(m, "manufacturer", "Test3", t) + testMap(m, "model", "Test4", t) + testMap(m, "ip", "Test5", t) + testMap(m, "os", "Test6", t) + testMap(m, "ports", "Test7", t) + + v, ok := m["vulnerabilities"].([]interface{}) + if !ok { + t.Errorf("Bad vulnerabilities type: %T", m["vulnerabilities"]) + } + + if len(v) != 1 { + t.Errorf("Bad vulnerabilities len: %v", len(v)) + } + + if v[0].(int) != 8 { + t.Errorf("Bad vulnerabilities value: %v", v[0]) + } + + testMap(m, "group", "Test9", t) + testMap(m, "asset_type", "Test10", t) + testMap(m, "status", "Test11", t) +} + +func TestAssetActionMarshal(t *testing.T) { + action := AssetAction{ + Name: "Test1", + Description: "Test2", + Manufacturer: "Test3", + Model: "Test4", + Ip: "Test5", + Os: "Test6", + Ports: "Test7", + Vulnerabilities: make([]string, 1), + Group: "Test9", + AssetType: "Test10", + Status: "Test11", + } + action.Vulnerabilities[0] = "8" + + data, err := json.Marshal(action) + if err != nil { + t.Errorf("Can't marshall action: %v", err) + } + + if string(data) != `{"asset_type":"Test10","description":"Test2","group":"Test9","ip":"Test5","manufacturer":"Test3","model":"Test4","name":"Test1","os":"Test6","ports":"Test7","status":"Test11","type":"asset","vulnerabilities":[8]}` { + t.Errorf("Got bad json: %v", string(data)) + } +} + +func TestAssetActionUnmarshall(t *testing.T) { + str := `{"asset_type":"Test10","description":"Test2","group":"Test9","ip":"Test5","manufacturer":"Test3","model":"Test4","name":"Test1","os":"Test6","ports":"Test7","status":"Test11","type":"asset","vulnerabilities":[8]}` + var action AssetAction + + err := json.Unmarshal([]byte(str), &action) + if err != nil { + t.Errorf("Can't unmarshall action: %v", err) + } + + if action.Name != "Test1" { + t.Errorf("Got bad Name: %v", action.Name) + } + + if action.Description != "Test2" { + t.Errorf("Got bad Description: %v", action.Description) + } + + if action.Manufacturer != "Test3" { + t.Errorf("Got bad Manufacturer: %v", action.Manufacturer) + } + + if action.Model != "Test4" { + t.Errorf("Got bad Model: %v", action.Model) + } + + if action.Ip != "Test5" { + t.Errorf("Got bad Ip: %v", action.Ip) + } + + if action.Os != "Test6" { + t.Errorf("Got bad Os: %v", action.Os) + } + + if action.Ports != "Test7" { + t.Errorf("Got bad Ports: %v", action.Ports) + } + + if len(action.Vulnerabilities) != 1 { + t.Errorf("Got bad vulnerabilities len: %v", len(action.Vulnerabilities)) + } + + if action.Vulnerabilities[0] != "8" { + t.Errorf("Got bad Vulnerabilities: %v", action.Vulnerabilities) + } + + if action.Group != "Test9" { + t.Errorf("Got bad Group: %v", action.Group) + } + + if action.AssetType != "Test10" { + t.Errorf("Got bad AssetType: %v", action.AssetType) + } + + if action.Status != "Test11" { + t.Errorf("Got bad Status: %v", action.Status) + } +} + +func TestAssetActionParseInterfaceBad1(t *testing.T) { + var data interface{} + action := &AssetAction{} + + err := action.ParseInterface(data) + if err == nil { + t.Errorf("Can parse bad interface") + } +} + +func TestAssetActionParseInterfaceBad2(t *testing.T) { + data := make(map[string]interface{}) + action := &AssetAction{} + + err := action.ParseInterface(data) + if err == nil { + t.Errorf("Can parse bad interface") + } +} + +func TestAssetActionParseInterfaceBad3(t *testing.T) { + data := make(map[string]interface{}) + action := &AssetAction{} + + data["type"] = "ass" + + err := action.ParseInterface(data) + if err == nil { + t.Errorf("Can parse bad interface") + } +} + +func TestAssetActionParseInterfaceBad4(t *testing.T) { + data := make(map[string]interface{}) + action := &AssetAction{} + + data["type"] = AssetActionType + + err := action.ParseInterface(data) + if err == nil { + t.Errorf("Can parse bad interface") + } +} + +func TestAssetActionParseInterfaceBad5(t *testing.T) { + data := make(map[string]interface{}) + action := &AssetAction{} + + data["type"] = AssetActionType + data["name"] = "ASS" + + err := action.ParseInterface(data) + if err == nil { + t.Errorf("Can parse bad interface") + } +} diff --git a/correlator/rules/action_bash.go b/correlator/rules/action_bash.go new file mode 100644 index 0000000..9c911c0 --- /dev/null +++ b/correlator/rules/action_bash.go @@ -0,0 +1,198 @@ +package rules + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "iwarma.ru/console/correlator/events" + "os" + "os/exec" + "regexp" + "strings" + "text/template" + + "iwarma.ru/console/correlator/config" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" +) + +const ( + BashActionType = "bash" +) + +type BashAction struct { + Body string `json:"body"` + + bodyTemplate *template.Template +} + +func (action *BashAction) GetType() string { + return BashActionType +} + +func (action BashAction) ToInterface() (map[string]interface{}, error) { + result := make(map[string]interface{}) + + result["body"] = action.Body + result["type"] = BashActionType + + return result, nil +} + +func (action BashAction) MarshalJSON() ([]byte, error) { + data, err := action.ToInterface() + if err != nil { + return nil, err + } + + return json.Marshal(data) +} + +func (action *BashAction) UnmarshalJSON(b []byte) error { + cur := struct { + CurType string `json:"type"` + Body string `json:"body"` + }{} + + err := json.Unmarshal(b, &cur) + if err != nil { + return err + } + + if cur.CurType != BashActionType { + return fmt.Errorf("bad action type, Expect %v, got %v", BashActionType, cur.CurType) + } + + action.Body = cur.Body + + return nil +} + +func (action *BashAction) ParseInterface(v interface{}) error { + m, ok := v.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't parse %v from %T", v, v) + } + + body, ok := m["body"].(string) + if !ok { + return errors.New("no body") + } + + t, ok := m["type"].(string) + if !ok { + return errors.New("no type") + } + + if t != BashActionType { + return fmt.Errorf("bad type, expect %v got %v", BashActionType, t) + } + + action.Body = body + + return nil +} + +// See https://stackoverflow.com/a/52594719 +func removeLBR(text string) string { + re := regexp.MustCompile(`\x{000D}\x{000A}|[\x{000A}\x{000B}\x{000C}\x{000D}\x{0085}\x{2028}\x{2029}]`) + return re.ReplaceAllString(text, "\n") +} + +func (action *BashAction) Perform(events *[]*events.Event) error { + cl := log.WithFields(log.Fields{"type": BashActionType, "event_count": len(*events)}) + cl.Debug("Start action") + defer cl.Debug("End action") + + var err error + + // Check if we need to prepare template + if action.bodyTemplate == nil { + text := strings.Replace(action.Body, `\r\n`, "\n", -1) + text = removeLBR(text) + + action.bodyTemplate, err = template.New("Body").Parse(text) + if err != nil { + cl.Errorf("Can't create body template: %v", err) + return err + } + } + + // Actual action + for _, event := range *events { + + // Render script body + body, err := renderTemplate(action.bodyTemplate, event) + if err != nil { + cl.Errorf("Can't render script body: %v", err) + return err + } + + // Create temp file + file, err := ioutil.TempFile("", "action.*.sh") + if err != nil { + cl.Errorf("Can't create script file: %v", err) + return err + } + + // Remove temp script + defer func() { + err = os.Remove(file.Name()) + if err != nil { + cl.Errorf("Can't remove temp script: %v", err) + } + }() + + count, err := file.Write([]byte(body)) + if err != nil { + cl.Errorf("Can't write script body: %v", err) + return err + } + + if count != len([]byte(body)) { + cl.Errorf("Write bad bytes count, expec %v, got %v", len([]byte(body)), count) + } + + err = file.Close() + if err != nil { + cl.Errorf("Can't close temp script: %v", err) + } + + err = os.Chmod(file.Name(), 0775) + if err != nil { + cl.Errorf("Can't set script permissions: %v", err) + return err + } + + if viper.GetBool(config.DebugDumpRequest) { + info, err := os.Stat(file.Name()) + if os.IsNotExist(err) { + cl.Errorf("Script %v does not exist #0", file.Name()) + } + cl.Debugf("File mode: %v", info.Mode()) + cl.Debugf("File size: %v", info.Size()) + } + + var buf bytes.Buffer + + cmd := exec.Command(file.Name()) + cmd.Stdout = &buf + cmd.Stderr = &buf + + // TODO: Maybe set to goroutine + err = cmd.Run() + if err != nil { + cl.Errorf("Script execution error: %v", err) + return err + } + + if viper.GetBool(config.Verbose) { + cl.Infof("Script output:\n%v", buf.String()) + } + } + + return nil +} diff --git a/correlator/rules/action_bash_test.go b/correlator/rules/action_bash_test.go new file mode 100644 index 0000000..216a71e --- /dev/null +++ b/correlator/rules/action_bash_test.go @@ -0,0 +1,212 @@ +package rules + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "iwarma.ru/console/correlator/events" + "os" + "testing" + "time" +) + +func TestBashActionType(t *testing.T) { + var action BashAction + + if action.GetType() != BashActionType { + t.Errorf("Bad action type: %v", action.GetType()) + } +} + +func TestBashToInterface(t *testing.T) { + action := BashAction{Body: "ASSS"} + + i, err := action.ToInterface() + if err != nil { + t.Errorf("Can't serialize: %v", err) + } + + m, ok := i["body"] + if !ok { + t.Errorf("No body") + } + + if m != "ASSS" { + t.Errorf("Bad body value: %v", err) + } + +} + +func TestBashActionSerialize(t *testing.T) { + action := BashAction{Body: "ASSS"} + + b, err := json.Marshal(action) + if err != nil { + t.Errorf("Can't serialize action: %v", err) + } + + if string(b) != `{"body":"ASSS","type":"bash"}` { + t.Errorf("Bad serialized action: %v", string(b)) + } +} + +func TestBashActionDeserialize(t *testing.T) { + + var action BashAction + + err := json.Unmarshal([]byte(`{"body":"ASSS","type":"bash"}`), &action) + if err != nil { + t.Errorf("Can't read action: %v", err) + } + + if action.Body != "ASSS" { + t.Errorf("Bad action body: %v", action.Body) + } +} + +func TestBashActionDesirealizeError(t *testing.T) { + var action BashAction + + err := json.Unmarshal([]byte(`{"body":"ASSS","type":"zzzz"}`), &action) + if err == nil { + t.Error("No error with bad action") + } +} + +func TestBashActionParseInterface(t *testing.T) { + i := make(map[string]interface{}) + i["body"] = "ASS" + i["type"] = BashActionType + + action := &BashAction{} + + err := action.ParseInterface(i) + if err != nil { + t.Errorf("Can't parse interface: %v", err) + } + + if action.Body != "ASS" { + t.Errorf("Bad action body: %v", action.Body) + } +} + +func TestBashActionParseInterfaceError1(t *testing.T) { + var i interface{} + + action := &BashAction{} + + err := action.ParseInterface(i) + if err == nil { + t.Error("Can parse wrong interface type") + } +} + +func TestBashActionParseInterfaceError2(t *testing.T) { + i := make(map[string]interface{}) + + action := &BashAction{} + + err := action.ParseInterface(i) + if err == nil { + t.Error("Can parse wrong interface without body") + } +} + +func TestBashActionParseInterfaceError3(t *testing.T) { + i := make(map[string]interface{}) + i["body"] = "ASS" + + action := &BashAction{} + + err := action.ParseInterface(i) + if err == nil { + t.Error("Can parse wrong interface without type") + } +} + +func TestBashActionParseInterfaceError4(t *testing.T) { + i := make(map[string]interface{}) + i["body"] = "ASS" + i["type"] = "ASS" + + action := &BashAction{} + + err := action.ParseInterface(i) + if err == nil { + t.Error("Can parse wrong interface with bad type") + } +} + +func TestBashActionPerform(t *testing.T) { + + tmpFile, err := ioutil.TempFile("", "test.*.test") + if err != nil { + t.Errorf("Can't create temp file: %v", err) + return + } + err = tmpFile.Close() + if err != nil { + t.Errorf("Can't close temp file: %v", err) + } + defer os.Remove(tmpFile.Name()) + + action := &BashAction{Body: `#!/bin/bash +NEW_UUID=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) +echo "$NEW_UUID" > {{.app_name}}`} + eventsArray := make([]*events.Event, 1) + eventsArray[0] = &events.Event{ + events.FirstEvent: time.Now(), + events.LastEvent: time.Now(), + events.EventCount: 5, + "event_severity": 5, + "app_name": tmpFile.Name(), + } + + err = action.Perform(&eventsArray) + if err != nil { + t.Errorf("Can't perform action: %v", err) + } + + // Check script result + file, err := os.Open(tmpFile.Name()) + if err != nil { + t.Errorf("Can't open result file: %v", err) + } + + buf, err := ioutil.ReadAll(file) + if err != nil { + t.Errorf("Can't open result file: %v", err) + } + + if len(string(buf)) == 0 { + t.Error("Got empty result file") + } + + t.Logf("Got result file content: \"%v\"", string(buf)) +} + +func TestBashActionPerform2(t *testing.T) { + cur := time.Now().Unix() + targetFile := fmt.Sprintf("/tmp/%v.txt", cur) + fileContent := fmt.Sprintf(`#!/bin/bash\r\n\r\n touch /tmp/%v.txt`, cur) + action := &BashAction{Body: fileContent} + + eventsArray := make([]*events.Event, 1) + eventsArray[0] = &events.Event{ + events.FirstEvent: time.Now(), + events.LastEvent: time.Now(), + events.EventCount: 5, + "event_severity": 5, + "app_name": "ASS", + } + + err := action.Perform(&eventsArray) + if err != nil { + t.Errorf("Can't perform action: %v", err) + } + + _, err = os.Stat(targetFile) + if err != nil { + t.Errorf("Can't get target file stat: %v", err) + } +} diff --git a/correlator/rules/action_exec.go b/correlator/rules/action_exec.go new file mode 100644 index 0000000..38a699a --- /dev/null +++ b/correlator/rules/action_exec.go @@ -0,0 +1,234 @@ +package rules + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "iwarma.ru/console/correlator/events" + "os/exec" + "strings" + "text/template" + + log "github.com/sirupsen/logrus" +) + +const ( + ExecActionType = "exec" +) + +type ExecAction struct { + Path string + Args string + Env string + Cwd string + + templates struct { + path *template.Template + args *template.Template + env *template.Template + cwd *template.Template + } +} + +func (action *ExecAction) GetType() string { + return ExecActionType +} + +func (action ExecAction) ToInterface() (map[string]interface{}, error) { + result := make(map[string]interface{}) + + result["type"] = ExecActionType + result["path"] = action.Path + result["args"] = action.Args + result["env"] = action.Env + result["cwd"] = action.Cwd + + return result, nil +} + +func (action ExecAction) MarshalJSON() ([]byte, error) { + data, err := action.ToInterface() + if err != nil { + return nil, err + } + + return json.Marshal(data) +} + +func (action *ExecAction) UnmarshalJSON(b []byte) error { + cur := struct { + CurType string `json:"type"` + Path string `json:"path"` + Args string `json:"args"` + Env string `json:"env"` + Cwd string `json:"cwd"` + }{} + + err := json.Unmarshal(b, &cur) + if err != nil { + return err + } + + if cur.CurType != ExecActionType { + return fmt.Errorf("bad action type, Expect %v, got %v", ExecActionType, cur.CurType) + } + + action.Path = cur.Path + action.Args = cur.Args + action.Env = cur.Env + action.Cwd = cur.Cwd + + return nil +} + +func (action *ExecAction) ParseInterface(v interface{}) error { + m, ok := v.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't parse %v from %T", v, v) + } + + t, ok := m["type"].(string) + if !ok { + return errors.New("no type") + } + + if t != ExecActionType { + return fmt.Errorf("bad type, expect %v got %v", ExecActionType, t) + } + + path, ok := m["path"].(string) + if !ok { + return errors.New("no path") + } + action.Path = path + + // Args isn't not required + args, ok := m["args"].(string) + if ok { + action.Args = args + } + + // Env isn't required + env, ok := m["env"].(string) + if ok { + action.Env = env + } + + // Cwd isn't required + cwd, ok := m["cwd"].(string) + if ok { + action.Cwd = cwd + } + + return nil +} + +func (action *ExecAction) Perform(events *[]*events.Event) error { + cl := log.WithFields(log.Fields{"type": ExecActionType, "event_count": len(*events)}) + cl.Debug("Start action") + defer cl.Debug("End action") + + var err error + + // Check if we need to prepare templates + if action.templates.path == nil { + action.templates.path, err = template.New("Path").Parse(action.Path) + if err != nil { + cl.Errorf("Can't create path template: %v", err) + return err + } + + action.templates.args, err = template.New("Args").Parse(action.Args) + if err != nil { + cl.Errorf("Can't create args template: %v", err) + return err + } + + action.templates.env, err = template.New("Env").Parse(action.Env) + if err != nil { + cl.Errorf("Can't create env template: %v", err) + return err + } + + action.templates.cwd, err = template.New("Cwd").Parse(action.Cwd) + if err != nil { + cl.Errorf("Can't create cwd template: %v", err) + return err + } + } + + // Actual action + for _, event := range *events { + + // Render templates + path, err := renderTemplate(action.templates.path, event) + if err != nil { + cl.Errorf("Can't render path: %v", err) + return err + } + + args, err := renderTemplate(action.templates.args, event) + if err != nil { + cl.Errorf("Can't render args: %v", err) + return err + } + + env, err := renderTemplate(action.templates.env, event) + if err != nil { + cl.Errorf("Can't render env: %v", err) + return err + } + + cwd, err := renderTemplate(action.templates.cwd, event) + if err != nil { + cl.Errorf("Can't render cwd: %v", err) + return err + } + + // Prepare command + var buf bytes.Buffer + + fullArgs := make([]string, 1) + fullArgs[0] = path + for _, cur := range strings.Split(args, " ") { + fullArgs = append(fullArgs, cur) + } + + cmd := &exec.Cmd{ + Path: path, + Args: fullArgs, + Env: strings.Split(env, " "), + Dir: cwd, + Stdout: &buf, + Stderr: &buf, + } + + debLog := log.WithFields(log.Fields{ + "type": ExecActionType, "event": event.GetString("event_id"), + "path": cmd.Path, "args": cmd.Args, + "env": cmd.Env, "cwd": cmd.Dir, + }) + + debLog.Debugf("Starting command") + + // TODO: Maybe set to goroutine + err = cmd.Start() + if err != nil { + cl.Errorf("Script execution error: %v", err) + debLog.Debugf("Executabe output:\n%v", buf.String()) + return err + } + + err = cmd.Wait() + if err != nil { + cl.Errorf("Error while waiting script finish: %v", err) + debLog.Debugf("Executabe output:\n%v", buf.String()) + return err + } + + debLog.Debugf("Executabe output:\n%v", buf.String()) + } + + return nil +} diff --git a/correlator/rules/action_exec_test.go b/correlator/rules/action_exec_test.go new file mode 100644 index 0000000..6b64cb1 --- /dev/null +++ b/correlator/rules/action_exec_test.go @@ -0,0 +1,307 @@ +package rules + +import ( + "bytes" + "encoding/json" + "io/ioutil" + "iwarma.ru/console/correlator/events" + "os" + "path/filepath" + "strings" + "testing" + "time" + + "github.com/spf13/viper" + + log "github.com/sirupsen/logrus" + "iwarma.ru/console/correlator/config" +) + +func TestExecActionGetType(t *testing.T) { + var action ExecAction + + if action.GetType() != ExecActionType { + t.Errorf("Got bad action type: %v", action.GetType()) + } +} + +func TestExecActionToInterface(t *testing.T) { + action := ExecAction{ + Path: "/usr/bin/bash", + Args: "-c \"go version\"", + Env: "DEBUG=1 VERBOSE=1", + Cwd: "/usr/bin", + } + + m, err := action.ToInterface() + if err != nil { + t.Errorf("Can't create interface: %v", err) + } + + action_type, ok := m["type"].(string) + if !ok { + t.Error("Can't get type") + } + + if action_type != ExecActionType { + t.Errorf("Got bad type: %v", t) + } + + action_path, ok := m["path"].(string) + if !ok { + t.Error("Can't get path") + } + + if action_path != action.Path { + t.Errorf("Got bad path: %v", action_path) + } + + action_args, ok := m["args"].(string) + if !ok { + t.Error("Can't get args") + } + + if action_args != action.Args { + t.Errorf("Got bad args: %v", action_args) + } + + action_env, ok := m["env"].(string) + if !ok { + t.Error("Can't get env") + } + + if action_env != action.Env { + t.Errorf("Bad action env: %v", action_env) + } + + action_cwd, ok := m["cwd"].(string) + if !ok { + t.Error("Can't get cwd") + } + + if action_cwd != action.Cwd { + t.Errorf("Bad action cwd: %v", action_cwd) + } +} + +func TestExecActionMarshall(t *testing.T) { + action := ExecAction{ + Path: "/usr/bin/bash", + Args: "-c \"go version\"", + Env: "DEBUG=1 VERBOSE=1", + Cwd: "/usr/bin", + } + + b, err := json.Marshal(action) + if err != nil { + t.Errorf("Can't serialize action: %v", err) + } + + if string(b) != `{"args":"-c \"go version\"","cwd":"/usr/bin","env":"DEBUG=1 VERBOSE=1","path":"/usr/bin/bash","type":"exec"}` { + t.Errorf("Bad action serializer result: %v", string(b)) + } +} + +func TestExecActionUnmarshall(t *testing.T) { + str := `{"args":"-c \"go version\"","cwd":"/usr/bin","env":"DEBUG=1 VERBOSE=1","path":"/usr/bin/bash","type":"exec"}` + + var action ExecAction + + err := json.Unmarshal([]byte(str), &action) + if err != nil { + t.Errorf("Can't unmarshall action: %v", err) + } + + if action.Path != "/usr/bin/bash" { + t.Errorf("Got bad path: %v", action.Path) + } + + if action.Args != "-c \"go version\"" { + t.Errorf("Got bad args: %v", action.Args) + } + + if action.Env != "DEBUG=1 VERBOSE=1" { + t.Errorf("Got bad env: %v", action.Env) + } + + if action.Cwd != "/usr/bin" { + t.Errorf("Got bad cwd: %v", action.Cwd) + } +} + +func TestExecActionUnmarshallBad(t *testing.T) { + str := `{"args":"-c \"go version\"","cwd":"/usr/bin","env":"DEBUG=1 VERBOSE=1","path":"/usr/bin/bash","type":"bash"}` + + var action ExecAction + + err := json.Unmarshal([]byte(str), &action) + if err == nil { + t.Error("Can unmarshall bad json") + } +} + +func TestExecParseInterface1(t *testing.T) { + s := "Test" + + action := &ExecAction{} + err := action.ParseInterface(s) + if err == nil { + t.Error("Can process bad interface") + } +} + +func TestExecParseInterface2(t *testing.T) { + m := make(map[string]interface{}) + + m["type"] = 3 + + action := &ExecAction{} + err := action.ParseInterface(m) + if err == nil { + t.Error("Can process bad type") + } +} + +func TestExecParseInterface3(t *testing.T) { + m := make(map[string]interface{}) + + action := &ExecAction{} + err := action.ParseInterface(m) + if err == nil { + t.Error("Can process bad type") + } +} + +func TestExecParseInterface4(t *testing.T) { + m := make(map[string]interface{}) + + m["type"] = "bash" + + action := &ExecAction{} + err := action.ParseInterface(m) + if err == nil { + t.Error("Can process bad type") + } +} + +func TestExecParseInterface5(t *testing.T) { + m := make(map[string]interface{}) + + m["type"] = ExecActionType + + action := &ExecAction{} + err := action.ParseInterface(m) + if err == nil { + t.Error("Can process bad type") + } +} + +func TestExecParseInterface6(t *testing.T) { + m := make(map[string]interface{}) + + m["type"] = ExecActionType + m["path"] = 15 + + action := &ExecAction{} + err := action.ParseInterface(m) + if err == nil { + t.Error("Can process bad type") + } +} + +func TestExecParseInterface7(t *testing.T) { + m := make(map[string]interface{}) + + m["type"] = ExecActionType + m["path"] = "/usr/bin" + m["args"] = "version" + m["env"] = "DEBUG" + m["cwd"] = "/usr" + + action := &ExecAction{} + err := action.ParseInterface(m) + if err != nil { + t.Error("Can't process good interface") + } + + if action.Path != m["path"] { + t.Errorf("Got bad path: %v", action.Path) + } + + if action.Args != m["args"] { + t.Errorf("Got bad args: %v", action.Args) + } + + if action.Env != m["env"] { + t.Errorf("Got bad env: %v", action.Env) + } + + if action.Cwd != m["cwd"] { + t.Errorf("Got bad cwd: %v", action.Cwd) + } +} + +func TestExecActionPerform(t *testing.T) { + file, err := ioutil.TempFile("", "test_action.*.sh") + if err != nil { + t.Error(err) + return + } + defer os.Remove(file.Name()) + + script := `#!/bin/bash + echo "$@ $T_1 $(pwd)"` + + _, err = file.Write([]byte(script)) + if err != nil { + t.Error(err) + } + + err = file.Sync() + if err != nil { + t.Error(err) + } + + err = file.Close() + if err != nil { + t.Error(err) + } + + err = os.Chmod(file.Name(), 0775) + if err != nil { + t.Error(err) + } + + path := filepath.Dir(file.Name()) + + action := ExecAction{ + Path: file.Name(), + Args: "AAA BBB", + Env: "T_1=RRR", + Cwd: path, + } + + eventsArray := make([]*events.Event, 1) + eventsArray[0] = &events.Event{ + events.FirstEvent: time.Now(), + events.LastEvent: time.Now(), + events.EventCount: 5, + "event_severity": 5, + "sign_name": file.Name(), + } + + var buf bytes.Buffer + log.SetOutput(&buf) + log.SetLevel(log.DebugLevel) + + viper.Set(config.Verbose, true) + err = action.Perform(&eventsArray) + if err != nil { + t.Error(err) + } + + if !strings.Contains(buf.String(), `Executabe output:\nAAA BBB RRR /tmp\n`) { + t.Errorf("Bad executable output: %v", buf.String()) + } + log.SetLevel(log.ErrorLevel) +} diff --git a/correlator/rules/action_firewall.go b/correlator/rules/action_firewall.go new file mode 100644 index 0000000..b68cedc --- /dev/null +++ b/correlator/rules/action_firewall.go @@ -0,0 +1,568 @@ +package rules + +import ( + "bytes" + "crypto/tls" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "iwarma.ru/console/correlator/events" + "net/http" + "strconv" + "strings" + "text/template" + "unicode" + + "iwarma.ru/console/correlator/config" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" +) + +const ( + FirewallActionType = "firewall" +) + +type ArmaBool bool + +func (b ArmaBool) ToInterface() interface{} { + if b == true { + return 1 + } else { + return 0 + } +} + +func (b *ArmaBool) ParseInterface(v interface{}) error { + switch value := v.(type) { + case int: + if value >= 1 { + *b = true + } else { + *b = false + } + case bool: + *b = ArmaBool(value) + case nil: + return errors.New("no value") + default: + return fmt.Errorf("can't parse type: %T with value %v", value, value) + } + + return nil +} + +type FirewallAction struct { + Enabled ArmaBool + Action string + Quick ArmaBool + Interface string + Direction string + IPProtocol string + Protocol string + SourceNet string + SourceNot ArmaBool + SourcePort string + DestinationNet string + DestinationNot ArmaBool + DestinationPort string + Log ArmaBool + Description string + Gateway string + Sequence string + + // Where to send request + url struct { + ip string + scheme string + key string + secret string + } + + templates struct { + sourceNet *template.Template + sourcePort *template.Template + destinationNet *template.Template + destinationPort *template.Template + } + + client *http.Client +} + +const ( + responseSaved = "saved" +) + +func (action *FirewallAction) GetType() string { + return FirewallActionType +} + +func (action FirewallAction) ToInterface() (map[string]interface{}, error) { + result := make(map[string]interface{}) + + result["type"] = FirewallActionType + result["enabled"] = action.Enabled.ToInterface() + result["action"] = action.Action + result["quick"] = action.Quick.ToInterface() + result["interface"] = action.Interface + result["interface"] = fmt.Sprintf("[%v]", action.Interface) + result["description"] = action.Description + result["direction"] = action.Direction + result["ipprotocol"] = action.IPProtocol + result["protocol"] = action.Protocol + result["source_net"] = action.SourceNet + result["source_not"] = action.SourceNot.ToInterface() + result["source_port"] = action.SourcePort + result["destination_net"] = action.DestinationNet + result["destination_port"] = action.DestinationPort + result["destination_not"] = action.DestinationNot.ToInterface() + result["log"] = action.Log.ToInterface() + result["gateway"] = action.Gateway + result["sequence"] = action.Sequence + + url := make(map[string]interface{}) + url["ip"] = action.url.ip + url["key"] = action.url.key + url["scheme"] = action.url.scheme + url["secret"] = action.url.secret + + result["sensor"] = url + + return result, nil +} + +func (action *FirewallAction) ParseInterface(v interface{}) error { + m, ok := v.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't parse %v from %T", v, v) + } + + t, ok := m["type"].(string) + if !ok { + return errors.New("no type") + } + + if t != FirewallActionType { + return fmt.Errorf("bad type, expect %v got %v", ExecActionType, t) + } + + err := action.Enabled.ParseInterface(m["enabled"]) + if err != nil { + return fmt.Errorf("can't parse \"enabled\" option: %v", err) + } + + action.Action, ok = m["action"].(string) + if !ok { + return errors.New("no action") + } + + if _, ok = m["quick"]; ok { + err = action.Quick.ParseInterface(m["quick"]) + if err != nil { + return fmt.Errorf("can't parse \"quick\" option: %v", err) + } + } + + if value, ok := m["interface"]; ok { + switch v := value.(type) { + case string: + action.Interface = v + case []interface{}: + arr := make([]string, len(v)) + for i := range v { + arr[i] = fmt.Sprintf("%v", v[i]) + } + + action.Interface = strings.Join(arr, ",") + case []string: + action.Interface = strings.Join(v, ",") + default: + return fmt.Errorf("bad interface type: %T (%v)", v, v) + } + } else { + return errors.New("no interface field") + } + + action.IPProtocol, ok = m["ipprotocol"].(string) + if !ok { + return errors.New("no IPProtocol") + } + + action.Protocol, ok = m["protocol"].(string) + if !ok { + return errors.New("no protocol") + } + + action.Direction, ok = m["direction"].(string) + if !ok { + return errors.New("no direction") + } + + action.SourceNet, ok = m["source_net"].(string) + if !ok { + return errors.New("no source net") + } + + if _, ok = m["source_not"]; ok { + err = action.SourceNot.ParseInterface(m["source_not"]) + if err != nil { + return fmt.Errorf("can't parse \"source_not\" option: %v", err) + } + } + + action.DestinationNet, ok = m["destination_net"].(string) + if !ok { + return errors.New("no destination net") + } + + if _, ok = m["destination_not"]; ok { + err = action.DestinationNot.ParseInterface(m["destination_not"]) + if err != nil { + return fmt.Errorf("can't parse \"destination_not\" option: %v", err) + } + } + + if _, ok = m["log"]; ok { + err = action.Log.ParseInterface(m["log"]) + if err != nil { + return fmt.Errorf("can't parse \"log\" option: %v", err) + } + } + + if _, ok = m["sequence"]; ok { + switch v := m["sequence"].(type) { + case string: + action.Sequence = v + case int: + action.Sequence = fmt.Sprintf("%v", v) + default: + return fmt.Errorf("can't parse type %T with value of %v", m["sequence"], m["sequence"]) + } + } + + if urlInterface, ok := m["sensor"]; ok { + url, ok := urlInterface.(map[string]interface{}) + if !ok { + return errors.New("can't convert sensor value") + } + + action.url.ip, ok = url["ip"].(string) + if !ok { + return errors.New("no Firewall target IP set") + } + + action.url.key, ok = url["key"].(string) + if !ok { + return errors.New("no Firewall target key set") + } + + action.url.scheme, ok = url["scheme"].(string) + if !ok { + return errors.New("no Firewall target scheme set") + } + + action.url.secret, ok = url["secret"].(string) + if !ok { + return errors.New("no Firewall target secret set") + } + } else { + return errors.New("no sensor field") + } + + // Not required + action.Description, _ = m["description"].(string) + action.SourcePort, _ = m["source_port"].(string) + action.DestinationPort, _ = m["destination_port"].(string) + action.Gateway, _ = m["gateway"].(string) + + return nil +} + +// Create rule structure for ARMAIF +func (action FirewallAction) toRule() (map[string]interface{}, map[string]interface{}, error) { + res := make(map[string]interface{}) + var err error + + res["enabled"] = action.Enabled.ToInterface() + res["sequence"], err = strconv.Atoi(action.Sequence) + if err != nil { + return nil, nil, err + } + + res["action"] = action.Action + res["quick"] = action.Quick.ToInterface() + res["interface"] = action.Interface + res["direction"] = action.Direction + res["ipprotocol"] = action.IPProtocol + res["protocol"] = action.Protocol + res["source_net"] = action.SourceNet + res["source_port"] = action.SourcePort + res["source_not"] = action.SourceNot.ToInterface() + res["destination_net"] = action.DestinationNet + res["destination_port"] = action.DestinationPort + res["destination_not"] = action.DestinationNot.ToInterface() + res["gateway"] = action.Gateway + res["log"] = action.Log.ToInterface() + res["description"] = action.Description + + rule := make(map[string]interface{}) + rule["rule"] = res + + return rule, res, nil +} + +func (action *FirewallAction) ApplyRule() error { + cl := log.WithFields(log.Fields{"type": FirewallActionType, "part": "ApplyRule"}) + + request, err := http.NewRequest("POST", fmt.Sprintf("%v://%v/api/firewall/filter/apply/", action.url.scheme, action.url.ip), nil) + if err != nil { + cl.Errorf("Can't create request: %v", err) + return err + } + + // Set headers + request.SetBasicAuth(action.url.key, action.url.secret) + + // Do request + response, err := action.client.Do(request) + if err != nil { + cl.Errorf("Can't send request: %v", err) + return err + } + defer response.Body.Close() + + // Check result + body, err := ioutil.ReadAll(response.Body) + if err != nil { + cl.Errorf("Can't read response body: %v", err) + return err + } + + // Dump request and response + if viper.GetBool(config.DebugDumpRequest) { + // No need to wait + go DumpNetwork("firewall", *request.URL, nil, body, response.StatusCode) + } + + // Check status code + if response.StatusCode != http.StatusOK { + cl.Errorf("Bad status code: %v", response.Status) + return fmt.Errorf("bad server response code: %v", response.Status) + } + + // Check response + var resp map[string]interface{} + err = json.Unmarshal(body, &resp) + if err != nil { + cl.Errorf("Can't unmarshall response: %v", err) + return fmt.Errorf("can't unmarshall response: %v", err) + } + + statusInt, ok := resp["status"] + if !ok { + cl.Errorf("No status field: %v", resp) + return fmt.Errorf("no status field: %v", resp) + } + + status, ok := statusInt.(string) + if !ok { + cl.Error("Can't convert status to string") + return errors.New("can't convert status to string") + } + + // Trim all \n and so on + status = strings.TrimFunc(status, func(r rune) bool { + return !unicode.IsLetter(r) && !unicode.IsNumber(r) + }) + + if status == "OK" { + return nil + } + + cl.Errorf("Bad status value: %v", status) + return fmt.Errorf("bad status value: %v", status) +} + +func (action *FirewallAction) Perform(events *[]*events.Event) error { + cl := log.WithFields(log.Fields{"type": FirewallActionType, "event_count": len(*events)}) + cl.Debug("Start action") + defer cl.Debug("End action") + + if events == nil || len(*events) == 0 { + cl.Error("No events") + return nil + } + + var err error + + // Check if we need to prepare templates + if action.templates.sourceNet == nil { + action.templates.sourceNet, err = template.New("SourceNet").Parse(action.SourceNet) + if err != nil { + cl.Errorf("Can't create source net template: %v", err) + return err + } + + action.templates.sourcePort, err = template.New("SourcePort").Parse(action.SourcePort) + if err != nil { + cl.Errorf("Can't create source port template: %v", err) + return err + } + + action.templates.destinationNet, err = template.New("DestinationNet").Parse(action.DestinationNet) + if err != nil { + cl.Errorf("Can't create destination net template: %v", err) + return err + } + + action.templates.destinationPort, err = template.New("DestinationPort").Parse(action.DestinationPort) + if err != nil { + cl.Errorf("Can't create destination port template: %v", err) + return err + } + } + + // Actual action + for _, event := range *events { + + // Prepare body + ibody, data, err := action.toRule() + if err != nil { + return err + } + + // Render templates + if _, ok := data["source_net"]; ok { + data["source_net"], err = renderTemplate(action.templates.sourceNet, event) + if err != nil { + return err + } + } + + if _, ok := data["source_port"]; ok { + data["source_port"], err = renderTemplate(action.templates.sourcePort, event) + if err != nil { + return err + } + } + + if _, ok := data["destination_net"]; ok { + data["destination_net"], err = renderTemplate(action.templates.destinationNet, event) + if err != nil { + return err + } + } + + if _, ok := data["destination_port"]; ok { + data["destination_port"], err = renderTemplate(action.templates.destinationPort, event) + if err != nil { + return err + } + } + + // Check if we have a http client + if action.client == nil { + if viper.GetBool(config.ActionFirewallRuleIgnoreSSLErrors) { + transport := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + action.client = &http.Client{Transport: transport} + cl.Debug("Disable SSL validation") + } else { + action.client = &http.Client{} + } + } + + // Prepare body + jsonBody, err := json.Marshal(ibody) + if err != nil { + cl.Errorf("Can't marshall request body: %v", err) + return err + } + + request, err := http.NewRequest("POST", fmt.Sprintf("%v://%v/api/firewall/filter/addRule/", action.url.scheme, action.url.ip), bytes.NewBuffer(jsonBody)) + if err != nil { + cl.Errorf("Can't create request: %v", err) + return err + } + + // Set headers + request.SetBasicAuth(action.url.key, action.url.secret) + request.Header.Set("Content-type", "application/json") + + // Do request + response, err := action.client.Do(request) + if err != nil { + cl.Errorf("Can't send request: %v", err) + return err + } + defer response.Body.Close() + + // Check result + body, err := ioutil.ReadAll(response.Body) + if err != nil { + cl.Errorf("Can't read response body: %v", err) + return err + } + + // Dump request and response + if viper.GetBool(config.DebugDumpRequest) { + // No need to wait + go DumpNetwork("firewall", *request.URL, ibody, body, response.StatusCode) + } + + // Check status code + if response.StatusCode != http.StatusOK { + cl.Errorf("Bad status code: %v", response.Status) + return fmt.Errorf("bad server response code: %v", response.Status) + } + + // Check response + var resp map[string]interface{} + err = json.Unmarshal(body, &resp) + if err != nil { + cl.Errorf("Can't unmarshall response: %v", err) + return fmt.Errorf("can't unmarshall response: %v", err) + } + + res, ok := resp["result"] + if !ok { + cl.Errorf("Bad ARMAIF response: %v", string(body)) + return fmt.Errorf("bad ARMAIF response: %v", string(body)) + } + + resVal, ok := res.(string) + if !ok { + cl.Errorf("Bad ARMAIF response type: %T - %v", resVal, resVal) + return fmt.Errorf("bad ARMAIF response type: %T - %v", resVal, resVal) + } + + if resVal != responseSaved { + cl.Errorf(`Bad armaif response. Expect "%v", got ""%v"`, responseSaved, resVal) + cl.Debugf(`Got response: %v`, string(body)) + return fmt.Errorf("bad armaif response. Expect \"%v\", got \"%v\"", responseSaved, resVal) + } + } + + return action.ApplyRule() +} + +func (action FirewallAction) MarshalJSON() ([]byte, error) { + data, err := action.ToInterface() + if err != nil { + return nil, err + } + + return json.Marshal(data) +} + +func (action *FirewallAction) UnmarshalJSON(b []byte) error { + var data interface{} + err := json.Unmarshal(b, &data) + if err != nil { + return err + } + + return action.ParseInterface(data) +} diff --git a/correlator/rules/action_firewall_test.go b/correlator/rules/action_firewall_test.go new file mode 100644 index 0000000..752ff74 --- /dev/null +++ b/correlator/rules/action_firewall_test.go @@ -0,0 +1,859 @@ +package rules + +import ( + "encoding/json" + "testing" +) + +func TestArmaBoolToInterface1(t *testing.T) { + ar := ArmaBool(true) + + v := ar.ToInterface() + + if v != 1 { + t.Errorf("Bad interface value, expect 1, got %v", v) + } +} + +func TestArmaBoolToInterface2(t *testing.T) { + ar := ArmaBool(false) + + v := ar.ToInterface() + + if v != 0 { + t.Errorf("Bad interface value, expect 0, got %v", v) + } +} + +func TestArmaBoolParseInterface1(t *testing.T) { + var v interface{} + var ar ArmaBool + + v = 1 + err := ar.ParseInterface(v) + if err != nil { + t.Errorf("Got parse error: %v", err) + } + + if bool(ar) != true { + t.Errorf("Got bad parse interface result, expect true, got %v", ar) + } +} + +func TestArmaBoolParseInterface2(t *testing.T) { + var v interface{} + var ar ArmaBool + + v = 10 + err := ar.ParseInterface(v) + if err != nil { + t.Errorf("Got parse error: %v", err) + } + + if bool(ar) != true { + t.Errorf("Got bad parse interface result, expect true, got %v", ar) + } +} + +func TestArmaBoolParseInterface3(t *testing.T) { + var v interface{} + var ar ArmaBool + + v = 0 + err := ar.ParseInterface(v) + if err != nil { + t.Errorf("Got parse error: %v", err) + } + + if bool(ar) != false { + t.Errorf("Got bad parse interface result, expect false, got %v", ar) + } +} + +func TestArmaBoolParseInterface4(t *testing.T) { + var v interface{} + var ar ArmaBool + + v = true + err := ar.ParseInterface(v) + if err != nil { + t.Errorf("Got parse error: %v", err) + } + + if bool(ar) != true { + t.Errorf("Got bad parse interface result, expect true, got %v", ar) + } +} + +func TestArmaBoolParseInterface5(t *testing.T) { + var v interface{} + var ar ArmaBool + + v = false + err := ar.ParseInterface(v) + if err != nil { + t.Errorf("Got parse error: %v", err) + } + + if bool(ar) != false { + t.Errorf("Got bad parse interface result, expect false, got %v", ar) + } +} + +func TestArmaBoolParseInterface6(t *testing.T) { + var v interface{} + var ar ArmaBool + + v = "true" + err := ar.ParseInterface(v) + if err == nil { + t.Errorf("Can parse bad interface: %v", v) + } +} + +func TestFirewallActionGetType(t *testing.T) { + var action FirewallAction + + if action.GetType() != FirewallActionType { + t.Errorf("Bad type, expect %v, got %v", FirewallActionType, action.GetType()) + } +} + +func TestFiewallActionToInterface(t *testing.T) { + m, err := FirewallAction{ + Enabled: true, + Action: "pass", + Quick: true, + Interface: "lan", + Description: "ddd", + IPProtocol: "inet", + Protocol: "TCP", + SourceNet: "any", + SourceNot: false, + SourcePort: "", + DestinationNet: "any", + DestinationNot: false, + DestinationPort: "", + Log: true, + Gateway: "", + }.ToInterface() + + if err != nil { + t.Errorf("Can't create interface: %v", err) + } + + testMap(m, "type", FirewallActionType, t) + testMap(m, "enabled", 1, t) + testMap(m, "action", "pass", t) + testMap(m, "quick", 1, t) + testMap(m, "interface", "[lan]", t) + testMap(m, "description", "ddd", t) + testMap(m, "ipprotocol", "inet", t) + testMap(m, "protocol", "TCP", t) + testMap(m, "source_net", "any", t) + testMap(m, "source_not", 0, t) + testMap(m, "source_port", "", t) + testMap(m, "destination_net", "any", t) + testMap(m, "destination_port", "", t) + testMap(m, "destination_not", 0, t) + testMap(m, "log", 1, t) + testMap(m, "gateway", "", t) +} + +func TestFirewallActionParseInterface1(t *testing.T) { + var i interface{} + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface2(t *testing.T) { + i := make(map[string]interface{}) + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface3(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = "ass" + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface4(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface5(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface6(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface7(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = struct{ Name string }{} + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface8(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface9(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface10(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface11(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface12(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + i["source_net"] = "" + i["source_not"] = struct{ Name string }{} + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface13(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + i["source_net"] = "" + i["source_not"] = 0 + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface14(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + i["source_net"] = "" + i["source_not"] = 0 + i["destination_net"] = "" + i["destination_not"] = struct{ Name string }{} + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface15(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + i["source_net"] = "" + i["source_not"] = 0 + i["destination_net"] = "" + i["destination_not"] = 0 + i["direction"] = "ass" + + url := make(map[string]interface{}) + url["ip"] = "127.0.0.1" + url["key"] = "ass" + url["scheme"] = "http" + url["secret"] = "ass" + + i["sensor"] = url + + var action FirewallAction + + err := action.ParseInterface(i) + if err != nil { + t.Errorf("Got error, when mustn't \"%v\" interface: %v", err, i) + } +} + +func TestFirewallActionParseInterface16(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + i["source_net"] = "" + i["source_not"] = 0 + i["destination_net"] = "" + i["destination_not"] = 0 + i["log"] = struct{ Name string }{} + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionParseInterface17(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + i["source_net"] = "" + i["source_not"] = 0 + i["destination_net"] = "" + i["destination_not"] = 0 + i["log"] = 1 + i["sequence"] = "a" + i["direction"] = "ass" + + url := make(map[string]interface{}) + url["ip"] = "127.0.0.1" + url["key"] = "ass" + url["scheme"] = "http" + url["secret"] = "ass" + + i["sensor"] = url + + var action FirewallAction + + err := action.ParseInterface(i) + if err != nil { + t.Errorf("Got error, when mustn't (%v) interface: %v", err, i) + } +} + +func TestFirewallActionParseInterface18(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + i["source_net"] = "" + i["source_not"] = 0 + i["destination_net"] = "" + i["destination_not"] = 0 + i["log"] = 1 + i["sequence"] = 12 + i["direction"] = "ass" + + url := make(map[string]interface{}) + url["ip"] = "127.0.0.1" + url["key"] = "ass" + url["scheme"] = "http" + url["secret"] = "ass" + + i["sensor"] = url + + var action FirewallAction + + err := action.ParseInterface(i) + if err != nil { + t.Errorf("Got error, when mustn't %v interface: %v", err, i) + } +} + +func TestFirewallActionParseInterface19(t *testing.T) { + i := make(map[string]interface{}) + i["type"] = FirewallActionType + i["enabled"] = 1 + i["action"] = "pass" + i["quick"] = 1 + i["interface"] = "lan" + i["ipprotocol"] = "inet" + i["protocol"] = "TCP" + i["source_net"] = "" + i["source_not"] = 0 + i["destination_net"] = "" + i["destination_not"] = 0 + i["log"] = 1 + i["sequence"] = struct{ Name string }{} + + var action FirewallAction + + err := action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestFirewallActionUnmarshal(t *testing.T) { + s := `{ + "type": "firewall", + "quick": true, + "action": "pass", + "armaif": "2", + "sensor": { + "ip": "192.168.56.104", + "key": "3uIhK/pCIKaLhkv9cmYg4V7DQ1Adt4zZLovThtbfaYzqr1YUdG4DdK6lKuqEjq0vfrDac2KczPbm7dwW", + "scheme": "http", + "secret": "5bYfRxkmDwFFY3WL/AHIooFTVQwHdDYSGT8I/6zvzwiSPAMrc9YNNS1CMRoSerxtUnkXRm2ZTEe4LtWo" + }, + "enabled": true, + "gateway": "", + "protocol": "any", + "sequence": "5000", + "direction": "in", + "interface": "LAN", + "ipprotocol": "inet", + "source_net": "any", + "source_not": true, + "description": "{{.EventSrcMsg}}", + "source_port": "", + "destination_net": "any", + "destination_not": true, + "destination_port": "" + }` + + var action FirewallAction + err := json.Unmarshal([]byte(s), &action) + if err != nil { + t.Errorf("Can't parse json: %v", err) + } + + if action.Quick != true { + t.Errorf("Bad quick value: %v", action.Quick) + } + + if action.Action != "pass" { + t.Errorf("Bad action value: %v", action.Action) + } + + if action.url.ip != "192.168.56.104" { + t.Errorf("Bad url ip: %v", action.url.ip) + } + + if action.url.scheme != "http" { + t.Errorf("Bad url scheme: %v", action.url.scheme) + } + + if action.url.key != "3uIhK/pCIKaLhkv9cmYg4V7DQ1Adt4zZLovThtbfaYzqr1YUdG4DdK6lKuqEjq0vfrDac2KczPbm7dwW" { + t.Errorf("Bad url key: %v", action.url.key) + } + + if action.url.secret != "5bYfRxkmDwFFY3WL/AHIooFTVQwHdDYSGT8I/6zvzwiSPAMrc9YNNS1CMRoSerxtUnkXRm2ZTEe4LtWo" { + t.Errorf("Bad secret value: %v", action.url.secret) + } + + if action.Enabled != true { + t.Errorf("Bad enabled value: %v", action.Enabled) + } + + if action.Gateway != "" { + t.Errorf("Bad gateway value: %v", action.Gateway) + } + + if action.Protocol != "any" { + t.Errorf("Bad protocol value: %v", action.Protocol) + } + + if action.Sequence != "5000" { + t.Errorf("Bad sequence number: %v", action.Sequence) + } + + if action.Direction != "in" { + t.Errorf("Bad direction value: %v", action.Direction) + } + + if action.Interface != "LAN" { + t.Errorf("Bad interface value: %v", action.Interface) + } + + if action.IPProtocol != "inet" { + t.Errorf("Bad ipprotocol value: %v", action.IPProtocol) + } + + if action.SourceNet != "any" { + t.Errorf("Bad source net value: %v", action.SourceNet) + } + + if action.Description != "{{.EventSrcMsg}}" { + t.Errorf("Bad description value: %v", action.Description) + } + + if action.SourcePort != "" { + t.Errorf("Bad source port value: %v", action.SourcePort) + } + + if action.SourceNot != true { + t.Errorf("Bad source not value: %v", action.SourceNot) + } + + if action.DestinationNet != "any" { + t.Errorf("Bad destination net value: %v", action.DestinationNet) + } + + if action.DestinationNot != true { + t.Errorf("Bad destination not value: %v", action.DestinationNot) + } + + if action.DestinationPort != "" { + t.Errorf("Bad destination port value: %v", action.DestinationPort) + } +} + +func TestFirewallActionMarshal(t *testing.T) { + action := FirewallAction{ + Quick: true, + Action: "pass", + url: struct { + ip string + scheme string + key string + secret string + }{ + ip: "192.168.56.104", + scheme: "http", + key: "3uIhK/pCIKaLhkv9cmYg4V7DQ1Adt4zZLovThtbfaYzqr1YUdG4DdK6lKuqEjq0vfrDac2KczPbm7dwW", + secret: "5bYfRxkmDwFFY3WL/AHIooFTVQwHdDYSGT8I/6zvzwiSPAMrc9YNNS1CMRoSerxtUnkXRm2ZTEe4LtWo", + }, + Enabled: true, + Protocol: "any", + Sequence: "5000", + Direction: "in", + Interface: "LAN", + IPProtocol: "inet", + SourceNet: "any", + SourceNot: true, + Description: "{{.EventSrcMsg}}", + DestinationNet: "any", + DestinationNot: true, + } + + data, err := json.Marshal(action) + if err != nil { + t.Errorf("Can't marshall: %v", err) + } + + if string(data) != `{"action":"pass","description":"{{.EventSrcMsg}}","destination_net":"any","destination_not":1,"destination_port":"","direction":"in","enabled":1,"gateway":"","interface":"[LAN]","ipprotocol":"inet","log":0,"protocol":"any","quick":1,"sensor":{"ip":"192.168.56.104","key":"3uIhK/pCIKaLhkv9cmYg4V7DQ1Adt4zZLovThtbfaYzqr1YUdG4DdK6lKuqEjq0vfrDac2KczPbm7dwW","scheme":"http","secret":"5bYfRxkmDwFFY3WL/AHIooFTVQwHdDYSGT8I/6zvzwiSPAMrc9YNNS1CMRoSerxtUnkXRm2ZTEe4LtWo"},"sequence":"5000","source_net":"any","source_not":1,"source_port":"","type":"firewall"}` { + t.Errorf("Bad result: %v", string(data)) + } +} + +func TestFirewallActionParseInterface20(t *testing.T) { + inputText := `{ + "type":"firewall", + "quick":true, + "action":"pass", + "armaif":"2", + "sensor":{ + "ip":"192.168.56.104", + "key":"4J0FSrWI2cknxRQb38xESVnupeJ8q1nuRMriHTJgiwM9653Da9XNRq9nZ9XUZH5TksORFoS8AwW0i/fc", + "scheme":"http", + "secret":"nxAZPULMyE731eBHAEcH+XMAdJhfZ3zk8eNuTmNjbE8E58pOnwUFzh8Nho/NU+LQ4uEYBOyjfARP63Qv" + }, + "gateway":"", + "protocol":"any", + "sequence":"5000", + "direction":"in", + "interface":"LAN", + "ipprotocol":"inet", + "source_net":"any", + "description":"", + "source_port":"", + "destination_net":"any", + "destination_port":"", + "enabled": false + }` + + var input map[string]interface{} + err := json.Unmarshal([]byte(inputText), &input) + if err != nil { + t.Errorf("Can't unmarshall interface: %v", err) + } + + var action FirewallAction + + err = action.ParseInterface(input) + if err != nil { + t.Errorf("Can't parse interface: %v", err) + } +} + +func TestFirewallActionParseInterface21(t *testing.T) { + inputText := `{ + "log":false, + "type":"firewall", + "quick":true, + "action":"pass", + "armaif":"2", + "sensor":{ + "ip":"192.168.56.104", + "key":"4J0FSrWI2cknxRQb38xESVnupeJ8q1nuRMriHTJgiwM9653Da9XNRq9nZ9XUZH5TksORFoS8AwW0i/fc", + "scheme":"http", + "secret":"nxAZPULMyE731eBHAEcH+XMAdJhfZ3zk8eNuTmNjbE8E58pOnwUFzh8Nho/NU+LQ4uEYBOyjfARP63Qv" + }, + "enabled":true, + "gateway":"", + "protocol":"any", + "sequence":"1", + "direction":"in", + "interface":[ + "lan" + ], + "ipprotocol":"inet", + "source_net":"any", + "source_not":false, + "description":"", + "source_port":"", + "destination_net":"any", + "destination_not":false, + "destination_port":"" + }` + + var input map[string]interface{} + err := json.Unmarshal([]byte(inputText), &input) + if err != nil { + t.Errorf("Can't unmarshall interface: %v", err) + } + + var action FirewallAction + + err = action.ParseInterface(input) + if err != nil { + t.Errorf("Can't parse interface: %v", err) + } + + if action.Interface != "lan" { + t.Errorf("Bad interface value. Expect lan, got %v ", action.Interface) + } +} + +//func TestActionRequest(t *testing.T) { +// +// testServer := util.NewTestServer(":8080") +// testServer.AddUrl("/api/firewall/filter/addRule/", []byte("{\"result\":\"saved\"}"), map[string]string{"Content-Type": "application/json"}) +// testServer.AddUrl("/api/firewall/filter/apply/", []byte("{\"status\":\"OK\"}"), map[string]string{"Content-Type": "application/json"}) +// +// go func() { +// if err := testServer.Start(); err != http.ErrServerClosed { +// t.Errorf("Can't start test server: %v", err) +// } +// }() +// +// // Prepare action +// inputText := `{ +// "log":false, +// "type":"firewall", +// "quick":true, +// "action":"pass", +// "armaif":"2", +// "sensor":{ +// "ip":"localhost:8080", +// "key":"4J0FSrWI2cknxRQb38xESVnupeJ8q1nuRMriHTJgiwM9653Da9XNRq9nZ9XUZH5TksORFoS8AwW0i/fc", +// "scheme":"http", +// "secret":"nxAZPULMyE731eBHAEcH+XMAdJhfZ3zk8eNuTmNjbE8E58pOnwUFzh8Nho/NU+LQ4uEYBOyjfARP63Qv" +// }, +// "enabled":true, +// "gateway":"", +// "protocol":"any", +// "sequence":"1", +// "direction":"in", +// "interface":[ +// "lan" +// ], +// "ipprotocol":"inet", +// "source_net":"{{.source_ip}}", +// "source_not":false, +// "description":"", +// "source_port":"{{.source_port}}", +// "destination_net":"any", +// "destination_not":false, +// "destination_port":"{{.destination_port}}" +// }` +// +// var input map[string]interface{} +// err := json.Unmarshal([]byte(inputText), &input) +// if err != nil { +// t.Errorf("Can't unmarshall interface: %v", err) +// } +// +// var action FirewallAction +// +// err = action.ParseInterface(input) +// if err != nil { +// t.Errorf("Can't parse interface: %v", err) +// } +// +// // Prepare events +// eventsArray := make([]*events.Event, 1) +// eventsArray[0] = &events.Event{ +// events.Hash: "", +// events.FirstEvent: fmt.Sprintf("%v", time.Now().UTC()), +// events.LastEvent: fmt.Sprintf("%v", time.Now().UTC()), +// events.EventCount: 5, +// events.Created: fmt.Sprintf("%v", time.Now().UTC()), +// events.Tags: make([]string, 0), +// events.AggregatedId: "", +// events.CeleryDone: false, +// +// "event_severity": 5, +// "source_ip": "127.0.0.1", +// "source_port": 443, +// "destination_ip": "127.0.0.2", +// "destination_port": 9090, +// } +// +// err = action.Perform(&eventsArray) +// if err != nil { +// t.Errorf("Got error in perform: %v", err) +// } +// +// err = testServer.Stop() +// if err != nil { +// t.Errorf("Can't stop server: %v", err) +// } +// +// if testServer.Urls[0].CallCount != 1 { +// t.Errorf("Bad url call count: %v", testServer.Urls[0].CallCount) +// } +// +// if testServer.Urls[0].Error != nil { +// t.Errorf("Got some error in url: %v", testServer.Urls[0].Error.Error()) +// } +// +// if testServer.Urls[0].GetRequestAsString() != `{"rule":{"action":"pass","description":"","destination_net":"any","destination_not":0,"destination_port":"9090","direction":"in","enabled":1,"gateway":"","interface":"lan","ipprotocol":"inet","log":0,"protocol":"any","quick":1,"sequence":1,"source_net":"127.0.0.1","source_not":0,"source_port":"443"}}` { +// t.Errorf("Got bad action request: %v", testServer.Urls[0].GetRequestAsString()) +// } +//} diff --git a/correlator/rules/action_http.go b/correlator/rules/action_http.go new file mode 100644 index 0000000..a26a58c --- /dev/null +++ b/correlator/rules/action_http.go @@ -0,0 +1,134 @@ +package rules + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "iwarma.ru/console/correlator/events" + "net/http" + "text/template" + "time" + + log "github.com/sirupsen/logrus" +) + +const ( + HttpActionType = "http" +) + +// HttpAction Send record via HTTP. Wil use application/json if empty content type set +type HttpAction struct { + Url string + Template string + ContentType string + bodyTemplate *template.Template +} + +func (action HttpAction) GetType() string { + return HttpActionType +} + +func (action *HttpAction) Perform(events *[]*events.Event) error { + contextLogger := log.WithFields(log.Fields{"type": HttpActionType, "event_count": len(*events)}) + contextLogger.Debug("Start action") + defer contextLogger.Debug("End action") + + if events == nil { + contextLogger.Error("Nil events provided") + return errors.New("nil events provided") + } + + // Create template + if action.bodyTemplate == nil { + bodyTemplate, err := template.New("Action").Parse(action.Template) + if err != nil { + return err + } + + action.bodyTemplate = bodyTemplate + } + + for _, cur := range *events { + // Render template + var buf bytes.Buffer + err := action.bodyTemplate.Execute(&buf, *cur) + if err != nil { + return err + } + + reader := bytes.NewReader(buf.Bytes()) + http.DefaultClient.Timeout = time.Second * 30 + _, err = http.Post(action.Url, action.ContentType, reader) + if err != nil { + return err + } + + } + http.DefaultClient.CloseIdleConnections() + return nil +} + +func (action HttpAction) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + CurType string `json:"type"` + Url string `json:"url"` + Template string `json:"template"` + ContentType string `json:"content_type"` + }{ + CurType: HttpActionType, + Url: action.Url, + Template: action.Template, + ContentType: action.ContentType, + }) +} + +func (action *HttpAction) UnmarshalJSON(b []byte) error { + + cur := struct { + CurType string `json:"type"` + Url string `json:"url"` + ContentType string `json:"content_type"` + Template string `json:"template"` + }{} + + err := json.Unmarshal(b, &cur) + if err != nil { + return err + } + + if cur.CurType != HttpActionType { + return fmt.Errorf("bad action type. Expect http, got %s", cur.CurType) + } + + action.Url = cur.Url + action.Template = cur.Template + action.ContentType = cur.ContentType + + return nil +} + +func (action *HttpAction) ParseInterface(v interface{}) error { + m, ok := v.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't parse %v from %T", v, v) + } + + action.Url, ok = m["url"].(string) + if !ok { + return fmt.Errorf("can't get URL") + } + + // Template can be in data or template + if action.Template, ok = m["data"].(string); !ok { + if action.Template, ok = m["template"].(string); !ok { + return fmt.Errorf("can't get template") + } + } + + if action.ContentType, ok = m["content_type"].(string); !ok { + return fmt.Errorf("no content type provided") + } + + return nil +} diff --git a/correlator/rules/action_http_test.go b/correlator/rules/action_http_test.go new file mode 100644 index 0000000..987b1db --- /dev/null +++ b/correlator/rules/action_http_test.go @@ -0,0 +1,87 @@ +package rules + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestActionHttpMarshal(t *testing.T) { + action := HttpAction{Url: "http://localhost:543/http", ContentType: "application/json", Template: `{{.event_id}}`} + + bytes, err := json.Marshal(action) + if err != nil { + t.Error(err) + } + + if string(bytes) != `{"type":"http","url":"http://localhost:543/http","template":"{{.event_id}}","content_type":"application/json"}` { + t.Error(fmt.Sprintf("Got result: %v", string(bytes))) + } +} + +func TestActionHttpUnmarshalGood(t *testing.T) { + str := `{"type":"http","url":"http://localhost:543/http","template":"{{.event_id}}"}` + + var action HttpAction + + err := json.Unmarshal([]byte(str), &action) + if err != nil { + t.Error(err) + } + + if action.Url != "http://localhost:543/http" { + t.Error(fmt.Sprintf("Bad url, got %s", action.Url)) + } + + if action.Template != "{{.event_id}}" { + t.Error(fmt.Sprintf("Got bad template: \"%s\"", action.Template)) + } +} + +func TestActionHttpUnmarshalBad(t *testing.T) { + str := `{"type":"syslog","url":"http://localhost:543/http","template":"{{.event_id}}"}` + + var action HttpAction + + err := json.Unmarshal([]byte(str), &action) + if err == nil { + t.Error("Don't get error with bad action type") + } +} + +func TestActionHttpParseInterfaceBad(t *testing.T) { + data := make([]interface{}, 1) + data[0] = 10 + + action := new(HttpAction) + + err := action.ParseInterface(data) + + if err == nil { + t.Error("No error with bad interfce") + } +} + +func TestActionHttpUnmarshalBad2(t *testing.T) { + data := `Some strange test` + + action := new(HttpAction) + + err := action.UnmarshalJSON([]byte(data)) + if err == nil { + t.Error("No error with bad json") + } +} + +func TestActionHttpParseInterfaceBad2(t *testing.T) { + data := make(map[string]interface{}) + data["url"] = "http" + + action := new(HttpAction) + + err := action.ParseInterface(data) + + if err == nil { + t.Error("No error with bad interfce") + } +} diff --git a/correlator/rules/action_incident.go b/correlator/rules/action_incident.go new file mode 100644 index 0000000..5604f08 --- /dev/null +++ b/correlator/rules/action_incident.go @@ -0,0 +1,369 @@ +package rules + +import ( + "bytes" + "crypto/tls" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "iwarma.ru/console/correlator/events" + "net/http" + "strconv" + "text/template" + + "iwarma.ru/console/correlator/config" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" +) + +const ( + IncidentActionType = "incident" +) + +type IncidentAction struct { + Title string `json:"title"` // template + Comment string `json:"comment"` // template + Effects []string `json:"effects"` + Category string `json:"category"` + Importance string `json:"importance"` + Assigned string `json:"assigned_to"` + Description string `json:"description"` // template + CloseRecommendations []string `json:"close_recommendations"` + + // Private + token string + client *http.Client + + template struct { + title *template.Template + comment *template.Template + description *template.Template + } +} + +func (action IncidentAction) GetType() string { + return IncidentActionType +} + +func (action IncidentAction) ToInterface() (map[string]interface{}, error) { + result := make(map[string]interface{}) + + result["title"] = action.Title + + if action.Comment != "" { + result["comment"] = action.Comment + } + + // Effects + if len(action.Effects) > 0 { + effects := make([]interface{}, 0) + for _, cur := range action.Effects { + tmp, err := strconv.Atoi(cur) + if err != nil { + return nil, err + } + effects = append(effects, tmp) + } + result["effects"] = effects + } + + // Category + if action.Category != "" { + catPk, err := strconv.Atoi(action.Category) + result["category"] = catPk + + if err != nil { + return nil, err + } + } + + // Importance + imp, err := strconv.Atoi(action.Importance) + if err != nil { + return nil, err + } + result["importance"] = imp + + // Assigned + if action.Assigned != "" { + ass, err := strconv.Atoi(action.Assigned) + if err != nil { + return nil, err + } + result["assigned_to"] = ass + } + + // Description + if action.Description != "" { + result["description"] = action.Description + } + + // Close recommendations + if len(action.CloseRecommendations) > 0 { + closeRec := make([]interface{}, 0) + for _, cur := range action.CloseRecommendations { + tmp, err := strconv.Atoi(cur) + if err != nil { + return nil, err + } + + closeRec = append(closeRec, tmp) + } + result["close_recommendations"] = closeRec + } + + return result, nil +} + +func (action IncidentAction) MarshalJSON() ([]byte, error) { + data, err := action.ToInterface() + if err != nil { + return nil, err + } + return json.Marshal(data) +} + +func (action *IncidentAction) UnmarshalJSON(b []byte) error { + var data interface{} + err := json.Unmarshal(b, &data) + if err != nil { + return err + } + + return action.ParseInterface(data) +} + +func (action *IncidentAction) ParseInterface(v interface{}) error { + m, ok := v.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't parse %v from %T", v, v) + } + + t, ok := m["type"].(string) + if !ok { + return errors.New("no type") + } + + if t != IncidentActionType { + return fmt.Errorf("got bad type: %v", t) + } + + action.Title, ok = m["title"].(string) + if !ok { + return errors.New("no title") + } + + // We can process without comment and description + action.Comment, _ = m["comment"].(string) + action.Description, _ = m["description"].(string) + + if w, ok := m["effects"]; ok { + switch v := w.(type) { + case []interface{}: + { + for _, cur := range v { + switch w := cur.(type) { + case string: + action.Effects = append(action.Effects, w) + case float64: + action.Effects = append(action.Effects, fmt.Sprintf("%v", w)) + default: + return fmt.Errorf("bad effect type: %T with value %v in interface %v", cur, cur, m) + } + } + } + case string: + action.Effects = append(action.Effects, v) + case float64: + action.Effects = append(action.Effects, fmt.Sprintf("%v", v)) + default: + return fmt.Errorf("bad effect type: %T with value %v in interface %v", v, v, m) + } + } + + action.Category, _ = m["category"].(string) + action.Importance, ok = m["importance"].(string) + if !ok { + return errors.New("no importance") + } + + action.Assigned, _ = m["assigned_to"].(string) + + if w, ok := m["close_recommendations"]; ok { + switch v := w.(type) { + case []interface{}: + { + for _, cur := range v { + switch w := cur.(type) { + case string: + action.CloseRecommendations = append(action.CloseRecommendations, w) + case float64: + action.CloseRecommendations = append(action.CloseRecommendations, fmt.Sprintf("%v", w)) + case nil: + break + default: + return fmt.Errorf("bad recommendations type: %T with value %v in interface %v", cur, cur, m) + } + } + } + case string: + action.CloseRecommendations = append(action.CloseRecommendations, v) + case float64: + action.CloseRecommendations = append(action.CloseRecommendations, fmt.Sprintf("%v", v)) + default: + return fmt.Errorf("bad close recommendations type: %T with value %v in interface %v", v, v, m) + } + } + + return nil +} + +func (action *IncidentAction) Perform(events *[]*events.Event) error { + cl := log.WithFields(log.Fields{"type": IncidentActionType, "event_count": len(*events)}) + cl.Debug("Start action") + defer cl.Debug("End action") + + if events == nil || len(*events) == 0 { + cl.Error("No events") + return nil + } + + var err error + + // Check if we need to prepare templates + if action.template.title == nil { + action.template.title, err = template.New("Title").Parse(action.Title) + if err != nil { + cl.Errorf("Can't create title template: %v", err) + return err + } + + action.template.comment, err = template.New("Comment").Parse(action.Comment) + if err != nil { + cl.Errorf("Can't create comment template: %v", err) + return err + } + + action.template.description, err = template.New("Description").Parse(action.Description) + if err != nil { + cl.Errorf("Can't create description template: %v", err) + return err + } + + } + + // Check if we need to prepare http client + if action.client == nil { + if viper.GetBool(config.ConsoleIgnoreSSLErrors) { + transport := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + action.client = &http.Client{Transport: transport} + } else { + action.client = &http.Client{} + } + } + + // Get auth token + if action.token == "" { + action.token, err = ObtainAuthToken() + if err != nil { + cl.Errorf("Can't get auth token: %v", err) + return err + } + } + + // For this type of action, multi means that we need to add all events + // to created incident + + // Prepare body + ibody, err := action.ToInterface() + if err != nil { + return err + } + + cur := *events + event := cur[0] + + // Render templates + if _, ok := ibody["title"]; ok { + s, err := renderTemplate(action.template.title, event) + if err != nil { + return err + } + + // Need to truncate title, so serializer will accept it + if len(s) > 256 { + s = s[:255] + } + + ibody["title"] = s + } + + if _, ok := ibody["comment"]; ok { + ibody["comment"], err = renderTemplate(action.template.comment, event) + if err != nil { + return err + } + } + + if _, ok := ibody["description"]; ok { + ibody["description"], err = renderTemplate(action.template.description, event) + if err != nil { + return err + } + } + + // Add type to connect incident with sensor + ibody["sensor"] = event.GetString("type") + + // Insert incidents info + // If we have a single-event rule, len of events will be 1 + ibody["event_count"] = len(*events) + ibody["events"] = events + + jsonBody, err := json.Marshal(ibody) + if err != nil { + cl.Errorf("Can't serialize body: %v", err) + return err + } + + request, err := http.NewRequest("POST", viper.GetString(config.ConsoleUrlIncident), bytes.NewBuffer(jsonBody)) + if err != nil { + cl.Errorf("Can't create request: %v", err) + return err + } + + // Set headers + request.Header.Set("Authorization", fmt.Sprintf("Token %v", action.token)) + request.Header.Set("Content-type", "application/json") + + // Do request + response, err := action.client.Do(request) + if err != nil { + cl.Errorf("Can't send request: %v", err) + return err + } + defer response.Body.Close() + + // Check result + body, err := ioutil.ReadAll(response.Body) + if err != nil { + cl.Errorf("Can't read response body: %v", err) + return err + } + + // Dump request and response + if viper.GetBool(config.DebugDumpRequest) { + go DumpNetwork("incident", *request.URL, ibody, body, response.StatusCode) + } + + if response.StatusCode != http.StatusCreated { + cl.Errorf("Bad status code, expect %v, got %v", http.StatusCreated, response.Status) + return fmt.Errorf("bad status code, expect %v, got %v", http.StatusCreated, response.Status) + } + + return nil +} diff --git a/correlator/rules/action_incident_test.go b/correlator/rules/action_incident_test.go new file mode 100644 index 0000000..5b5312b --- /dev/null +++ b/correlator/rules/action_incident_test.go @@ -0,0 +1,444 @@ +package rules + +import ( + "encoding/json" + "testing" +) + +func TestIncidentActionType(t *testing.T) { + var action IncidentAction + + if action.GetType() != IncidentActionType { + t.Errorf("Bad action type: %v", action.GetType()) + } +} + +func TestMarshall(t *testing.T) { + action := IncidentAction{ + Title: "Test2", + Comment: "Test3", + Effects: make([]string, 2), + Category: "14", + Importance: "90", + Assigned: "3", + Description: "Test7", + CloseRecommendations: make([]string, 2), + } + + action.Effects[0] = "3" + action.Effects[1] = "2" + action.CloseRecommendations[0] = "7" + action.CloseRecommendations[1] = "15" + + bytes, err := json.Marshal(action) + if err != nil { + t.Errorf("Got error: %v", err) + } + + if string(bytes) != `{"assigned_to":3,"category":14,"close_recommendations":[7,15],"comment":"Test3","description":"Test7","effects":[3,2],"importance":90,"title":"Test2"}` { + t.Errorf("Got bad json: %v", string(bytes)) + } +} + +func TestUnmarshall(t *testing.T) { + str := `{"type":"incident","title":"Veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeery looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong incident title", + "comment":"Test3","effects":["3","2"],"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":["7","15"]}` + + var action IncidentAction + err := json.Unmarshal([]byte(str), &action) + if err != nil { + t.Errorf("Can't unmarshall json: %v", err) + } + + if action.GetType() != IncidentActionType { + t.Errorf("Bad action type: %v", action.GetType()) + } + + if action.Title != "Veeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeery looooooooooooooooooooooooooooooooooooooooooooooooooooooooooooong incident title" { + t.Errorf("Bad action title: %v", action.Title) + } + if len(action.Title) > 256 { + t.Errorf("Title is bigger then 256") + } + + if len(action.Effects) != 2 { + t.Errorf("Bad effects size: %v", len(action.Effects)) + } + + if action.Effects[0] != "3" { + t.Errorf("Bad effect #0: %v", action.Effects[0]) + } + + if action.Effects[1] != "2" { + t.Errorf("Bad effect #1: %v", action.Effects[1]) + } + + if action.Category != "Test4" { + t.Errorf("Bad category: %v", action.Category) + } + + if action.Importance != "Test5" { + t.Errorf("Bad importance: %v", action.Importance) + } + + if action.Assigned != "Test6" { + t.Errorf("Bad assigned value: %v", action.Assigned) + } + + if action.Description != "Test7" { + t.Errorf("Bad description: %v", action.Description) + } + + if len(action.CloseRecommendations) != 2 { + t.Errorf("Bad close reocmendation len: %v", len(action.CloseRecommendations)) + } + + if action.CloseRecommendations[0] != "7" { + t.Errorf("Bad close recomendation #0: %v", action.CloseRecommendations[0]) + } + + if action.CloseRecommendations[1] != "15" { + t.Errorf("Bad close recomendation #1: %v", action.CloseRecommendations[1]) + } +} + +func TestBadUnmarshall(t *testing.T) { + str := `{"type":"test","url":"Test1","title":"Test2","comment":"Test3","effects":["3","2"],"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":["7","15"]}` + + var action IncidentAction + err := json.Unmarshal([]byte(str), &action) + if err == nil { + t.Errorf("Can read bad json: %v", action) + } + +} + +func TestBadUnmarshall2(t *testing.T) { + str := `jhgfghjk` + + var action IncidentAction + err := json.Unmarshal([]byte(str), &action) + if err == nil { + t.Errorf("Can read bad json: %v", action) + } +} + +func TestParseInterface(t *testing.T) { + str := `{"type":"incident","title":"Test2","comment":"Test3","effects":["3","2"],"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":["7","15"]}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err != nil { + t.Errorf("Can't parse interface: %v", err) + } + + if action.GetType() != IncidentActionType { + t.Errorf("Bad action type: %v", action.GetType()) + } + + if action.Title != "Test2" { + t.Errorf("Bad action title: %v", action.Title) + } + + if len(action.Effects) != 2 { + t.Errorf("Bad effects size: %v", len(action.Effects)) + } + + if action.Effects[0] != "3" { + t.Errorf("Bad effect #0: %v", action.Effects[0]) + } + + if action.Effects[1] != "2" { + t.Errorf("Bad effect #1: %v", action.Effects[1]) + } + + if action.Category != "Test4" { + t.Errorf("Bad category: %v", action.Category) + } + + if action.Importance != "Test5" { + t.Errorf("Bad importance: %v", action.Importance) + } + + if action.Assigned != "Test6" { + t.Errorf("Bad assigned value: %v", action.Assigned) + } + + if action.Description != "Test7" { + t.Errorf("Bad description: %v", action.Description) + } + + if len(action.CloseRecommendations) != 2 { + t.Errorf("Bad close reocmendation len: %v", len(action.CloseRecommendations)) + } + + if action.CloseRecommendations[0] != "7" { + t.Errorf("Bad close recomendation #0: %v", action.CloseRecommendations[0]) + } + + if action.CloseRecommendations[1] != "15" { + t.Errorf("Bad close recomendation #1: %v", action.CloseRecommendations[1]) + } +} + +func TestParseInterfaceWithSingleArrayValue(t *testing.T) { + str := `{"type":"incident","title":"Test2","comment":"Test3","effects":"3","category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":"7"}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err != nil { + t.Errorf("Can't parse interface: %v", err) + } + + if action.GetType() != IncidentActionType { + t.Errorf("Bad action type: %v", action.GetType()) + } + + if action.Title != "Test2" { + t.Errorf("Bad action title: %v", action.Title) + } + + if len(action.Effects) != 1 { + t.Errorf("Bad effects size: %v", len(action.Effects)) + } + + if action.Effects[0] != "3" { + t.Errorf("Bad effect #0: %v", action.Effects[0]) + } + + if action.Category != "Test4" { + t.Errorf("Bad category: %v", action.Category) + } + + if action.Importance != "Test5" { + t.Errorf("Bad importance: %v", action.Importance) + } + + if action.Assigned != "Test6" { + t.Errorf("Bad assigned value: %v", action.Assigned) + } + + if action.Description != "Test7" { + t.Errorf("Bad description: %v", action.Description) + } + + if len(action.CloseRecommendations) != 1 { + t.Errorf("Bad close reocmendation len: %v", len(action.CloseRecommendations)) + } + + if action.CloseRecommendations[0] != "7" { + t.Errorf("Bad close recomendation #0: %v", action.CloseRecommendations[0]) + } +} + +func TestParseInterfaceWithIntArrayValue(t *testing.T) { + str := `{"type":"incident","url":"Test1","title":"Test2","comment":"Test3","effects":3,"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":"7"}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err != nil { + t.Errorf("Can't parse interface: %v", err) + } + + if len(action.Effects) != 1 { + t.Errorf("Bad effects size: %v", len(action.Effects)) + } + + if action.Effects[0] != "3" { + t.Errorf("Bad effects #0 value: %v", action.Effects[0]) + } +} + +func TestParseInterfaceWithIntArrayValue2(t *testing.T) { + str := `{"type":"incident","url":"Test1","title":"Test2","comment":"Test3","effects":3,"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":7}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err != nil { + t.Errorf("Can't parse interface: %v", err) + } + + if len(action.CloseRecommendations) != 1 { + t.Errorf("Bad close recommendations size: %v", len(action.CloseRecommendations)) + } + + if action.CloseRecommendations[0] != "7" { + t.Errorf("Bad close recommendations #0 value: %v", action.CloseRecommendations[0]) + } +} + +func TestParseInterfaceWithIntArrayValue3(t *testing.T) { + str := `{"type":"incident","url":"Test1","title":"Test2","comment":"Test3","effects":3,"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":[7]}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err != nil { + t.Errorf("Can't parse interface: %v", err) + } + + if len(action.CloseRecommendations) != 1 { + t.Errorf("Bad close recommendations size: %v", len(action.CloseRecommendations)) + } + + if action.CloseRecommendations[0] != "7" { + t.Errorf("Bad close recommendations #0 value: %v", action.CloseRecommendations[0]) + } +} + +func TestParseInterfaceBadInput1(t *testing.T) { + str := `[1, 2, 3]` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestParseInterfaceBadInput2(t *testing.T) { + str := `{"type":"incident","comment":"Test3","effects":"3","category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":"7"}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestParseInterfaceBadInput3(t *testing.T) { + str := `{"type":"incident","url":"Test1","comment":"Test3","effects":"3","category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":"7"}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestParseInterfaceBadInput4(t *testing.T) { + str := `{"type":"incident","url":"Test1","title":"Test2","comment":"Test3","effects":{"test":"true"},"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":"7"}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestParseInterfaceBadInput5(t *testing.T) { + str := `{"type":"incident","url":"Test1","title":"Test2","comment":"Test3","effects":[1, 2, 3],"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":{"test":"true"}}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestParseInterfaceBadInput6(t *testing.T) { + str := `{"type":"incident","url":"Test1","title":"Test2","comment":"Test3","effects":[{"test":"test"}],"category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":"7"}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestParseInterfaceBadInput7(t *testing.T) { + str := `{"type":"incident","url":"Test1","title":"Test2","comment":"Test3","effects":"7","category":"Test4","importance":"Test5","assigned_to":"Test6","description":"Test7","close_recommendations":[{"test":"test"}]}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} + +func TestParseInterfaceBadInput8(t *testing.T) { + str := `{"type":"incident","url":"Test1","title":"Test2","comment":"Test3","effects":"3","category":"Test4","assigned_to":"Test6","description":"Test7","close_recommendations":"7"}` + var action IncidentAction + var i interface{} + + err := json.Unmarshal([]byte(str), &i) + if err != nil { + t.Errorf("Got error parsing json: %v", err) + } + + err = action.ParseInterface(i) + if err == nil { + t.Errorf("Can parse bad interface: %v", i) + } +} diff --git a/correlator/rules/action_syslog.go b/correlator/rules/action_syslog.go new file mode 100644 index 0000000..02ece83 --- /dev/null +++ b/correlator/rules/action_syslog.go @@ -0,0 +1,184 @@ +package rules + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "iwarma.ru/console/correlator/events" + "log/syslog" + "strconv" + "text/template" + + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + + log "github.com/sirupsen/logrus" +) + +const ( + SyslogProtoTcp = "tcp" + SyslogProtoUdp = "udp" + + SyslogActionType = "syslog" +) + +// SyslogAction Send record with syslog to selected host +type SyslogAction struct { + Host string + Port int + Proto string + Name string + Template string + writer *syslog.Writer + bodyTemplate *template.Template +} + +func (action SyslogAction) GetType() string { + return SyslogActionType +} + +func (action SyslogAction) Perform(events *[]*events.Event) error { + contextLogger := log.WithFields(log.Fields{"type": SyslogActionType, "event_count": len(*events)}) + contextLogger.Debug("Start action") + defer contextLogger.Debug("End action") + + if events == nil { + contextLogger.Error("Nil events provided") + return errors.New("nil events provided") + } + + // Create connection to syslog server + if action.writer == nil { + if action.Proto != SyslogProtoTcp && action.Proto != SyslogProtoUdp { + return fmt.Errorf("bad syslog protocol %v", action.Proto) + } + writer, err := syslog.Dial(action.Proto, fmt.Sprintf("%v:%v", action.Host, action.Port), syslog.LOG_LOCAL0, viper.GetString(config.SyslogTag)) + if err != nil { + return err + } + + action.writer = writer + } + + // Create template + if action.bodyTemplate == nil { + bodyTemplate, err := template.New("Action").Parse(action.Template) + if err != nil { + return err + } + + action.bodyTemplate = bodyTemplate + } + + // Send messages + for _, cur := range *events { + var buf bytes.Buffer + err := action.bodyTemplate.Execute(&buf, *cur) + if err != nil { + return err + } + + err = action.writer.Info(buf.String()) + if err != nil { + return err + } + } + return nil +} + +func (action SyslogAction) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + CurType string `json:"type"` + Host string `json:"host"` + Port string `json:"port"` + Proto string `json:"protocol"` + Name string `json:"name"` + Format string `json:"format"` + Template string `json:"template"` + }{ + CurType: SyslogActionType, + Host: action.Host, + Port: strconv.Itoa(action.Port), + Proto: action.Proto, + Name: action.Name, + Format: "", + Template: action.Template, + }) +} + +func (action *SyslogAction) UnmarshalJSON(b []byte) error { + cur := struct { + CurType string `json:"type"` + Host string `json:"host"` + Port string `json:"port"` + Proto string `json:"protocol"` + Name string `json:"name"` + Format string `json:"format"` + Template string `json:"template"` + }{} + + err := json.Unmarshal(b, &cur) + if err != nil { + return err + } + + if cur.CurType != SyslogActionType { + return fmt.Errorf("bad action type. Expect http, got %s", cur.CurType) + } + + action.Host = cur.Host + action.Port, err = strconv.Atoi(cur.Port) + if err != nil { + return fmt.Errorf("can't parse port: %v", cur.Port) + } + action.Proto = cur.Proto + action.Name = cur.Name + action.Template = cur.Template + + return nil +} + +func (action *SyslogAction) ParseInterface(v interface{}) error { + m, ok := v.(map[string]interface{}) + if !ok { + return fmt.Errorf("can't parse %v from %T", v, v) + } + + action.Host, ok = m["host"].(string) + if !ok { + return errors.New("can't get host") + } + + // Port can bee int (float64) or string + if port, ok := m["port"].(float64); ok { + action.Port = int(port) + } else if port, ok := m["port"].(string); ok { + cur, err := strconv.Atoi(port) + if err != nil { + return fmt.Errorf("bad port string: %v", port) + } + action.Port = cur + } else { + return fmt.Errorf("bad port type: %T with value %v", port, port) + } + + action.Proto, ok = m["protocol"].(string) + if !ok { + return errors.New("can't get protocol") + } + + action.Name, ok = m["name"].(string) + if !ok { + return errors.New("can't get name") + } + + // Template can be in data or template + if action.Template, ok = m["data"].(string); !ok { + if action.Template, ok = m["template"].(string); !ok { + return errors.New("can't get template") + } + } + + return nil +} diff --git a/correlator/rules/action_syslog_test.go b/correlator/rules/action_syslog_test.go new file mode 100644 index 0000000..d14bd41 --- /dev/null +++ b/correlator/rules/action_syslog_test.go @@ -0,0 +1,156 @@ +package rules + +import ( + "encoding/json" + "fmt" + "testing" +) + +func TestActionSyslogMarshal(t *testing.T) { + action := SyslogAction{Host: "localhost", Port: 514, Proto: "udp", Name: "my logger", Template: "{{.EventId}}"} + + bytes, err := json.Marshal(action) + if err != nil { + t.Error(err) + } + + if string(bytes) != `{"type":"syslog","host":"localhost","port":"514","protocol":"udp","name":"my logger","format":"","template":"{{.EventId}}"}` { + t.Error(fmt.Sprintf("Got bad json: %s", string(bytes))) + } +} + +func TestActionSyslogUnmarshalGood(t *testing.T) { + str := `{"type":"syslog","host":"localhost","port":"514","protocol":"udp","name":"my logger","format":"","template":"{{.EventId}}"}` + + var action SyslogAction + + err := json.Unmarshal([]byte(str), &action) + if err != nil { + t.Error(err) + } + + if action.Host != "localhost" { + t.Error(fmt.Sprintf("Got bad host: %v", action.Host)) + } + + if action.Port != 514 { + t.Error(fmt.Sprintf("Got bad port: %v", action.Port)) + } + + if action.Proto != SyslogProtoUdp { + t.Error(fmt.Sprintf("Got bad protocol: %v", action.Proto)) + } + + if action.Name != "my logger" { + t.Error(fmt.Sprintf("Got bad name: %v", action.Name)) + } + + if action.Template != "{{.EventId}}" { + t.Error(fmt.Sprintf("Got bad template: %v", action.Name)) + } +} + +func TestActionSyslogUnmarshalBadType(t *testing.T) { + str := `{"type":"http","url":"http://localhost:543/http","template":"{{.event_id}}"}` + + var action SyslogAction + + err := json.Unmarshal([]byte(str), &action) + if err == nil { + t.Error("Cant read json with bad action type") + } +} + +func TestActionSyslogUnmarshalBad(t *testing.T) { + str := `Some text` + + action := new(SyslogAction) + + err := action.UnmarshalJSON([]byte(str)) + if err == nil { + t.Error("No errow eith not a json") + } +} + +func TestActionSyslogParseInterfaceBad(t *testing.T) { + data := make([]interface{}, 1) + data[0] = 10 + + action := new(SyslogAction) + + err := action.ParseInterface(data) + + if err == nil { + t.Error("No error with bad interfce") + } +} + +func TestActionSyslogParseInterfaceBadNoHost(t *testing.T) { + data := make(map[string]interface{}) + + action := new(SyslogAction) + + err := action.ParseInterface(data) + + if err == nil { + t.Error("No error when no host set") + } +} + +func TestActionSyslogParseInterfaceBadNoPort(t *testing.T) { + data := make(map[string]interface{}) + data["host"] = "localhost" + + action := new(SyslogAction) + + err := action.ParseInterface(data) + + if err == nil { + t.Error("No error when no host set") + } +} + +func TestActionSyslogParseInterfaceBadNoProto(t *testing.T) { + data := make(map[string]interface{}) + data["host"] = "localhost" + data["port"] = 134.0 + + action := new(SyslogAction) + + err := action.ParseInterface(data) + + if err == nil { + t.Error("No error when no host set") + } +} + +func TestActionSyslogParseInterfaceBadNoName(t *testing.T) { + data := make(map[string]interface{}) + data["host"] = "localhost" + data["port"] = 134.0 + data["protocol"] = "TCP" + + action := new(SyslogAction) + + err := action.ParseInterface(data) + + if err == nil { + t.Error("No error when no host set") + } +} + +func TestActionSyslogParseInterfaceBadNoData(t *testing.T) { + data := make(map[string]interface{}) + data["host"] = "localhost" + data["port"] = 134.0 + data["protocol"] = "TCP" + data["name"] = "ass" + + action := new(SyslogAction) + + err := action.ParseInterface(data) + + if err == nil { + t.Error("No error when no host set") + } +} diff --git a/correlator/rules/action_t.go b/correlator/rules/action_t.go new file mode 100644 index 0000000..80899b7 --- /dev/null +++ b/correlator/rules/action_t.go @@ -0,0 +1,98 @@ +package rules + +import ( + "errors" + "iwarma.ru/console/correlator/events" + "sync" +) + +// TestAction is an Action for testing purpose +type TestAction struct { + PerformError bool + ParseInterfaceError bool + MarshalError bool + UnmarshalError bool +} + +var ( + performCount int + performMutex sync.Mutex + eventsProcessed int +) + +func performCountUpdate() { + performMutex.Lock() + defer performMutex.Unlock() + + performCount++ +} + +func PerformCountClear() { + performMutex.Lock() + defer performMutex.Unlock() + + performCount = 0 +} + +func GetPerformCount() int { + performMutex.Lock() + defer performMutex.Unlock() + + return performCount +} + +func EventsProcessedClear() { + performMutex.Lock() + defer performMutex.Unlock() + + eventsProcessed = 0 +} + +func GetEventsProcessed() int { + performMutex.Lock() + defer performMutex.Unlock() + + return eventsProcessed +} + +func (action TestAction) GetType() string { + return "TEST" +} + +func (action TestAction) Perform(events *[]*events.Event) error { + performCountUpdate() + + performMutex.Lock() + defer performMutex.Unlock() + eventsProcessed += len(*events) + + if action.PerformError { + return errors.New("test error") + } else { + return nil + } +} + +func (action *TestAction) ParseInterface(v interface{}) error { + if action.ParseInterfaceError { + return errors.New("test error") + } else { + return nil + } +} + +func (action TestAction) MarshalJSON() ([]byte, error) { + if action.MarshalError { + return nil, errors.New("test error") + } else { + return []byte(`{"type":"test"}`), nil + } +} + +func (action *TestAction) UnmarshalJSON(b []byte) error { + if action.UnmarshalError { + return errors.New("test error") + } else { + return nil + } +} diff --git a/correlator/rules/action_t_test.go b/correlator/rules/action_t_test.go new file mode 100644 index 0000000..ab88a51 --- /dev/null +++ b/correlator/rules/action_t_test.go @@ -0,0 +1,68 @@ +package rules + +import ( + "iwarma.ru/console/correlator/events" + "testing" +) + +func TestActionTestErrors(t *testing.T) { + action := TestAction{ + PerformError: true, + ParseInterfaceError: true, + MarshalError: true, + UnmarshalError: true, + } + + eventsArray := make([]*events.Event, 0) + + err := action.Perform(&eventsArray) + if err == nil { + t.Error("Got error in Perform") + } + + err = action.ParseInterface("test") + if err == nil { + t.Error("Got error in ParseInterface") + } + + bytes, err := action.MarshalJSON() + if err == nil { + t.Error("Got error in MarshalJSON") + } + + err = action.UnmarshalJSON(bytes) + if err == nil { + t.Error("Got error in UnmarshalJSON") + } +} + +func TestActionTestNoErrors(t *testing.T) { + action := TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + + eventsArray := make([]*events.Event, 0) + + err := action.Perform(&eventsArray) + if err != nil { + t.Error("Got error in Perform") + } + + err = action.ParseInterface("test") + if err != nil { + t.Error("Got error in ParseInterface") + } + + bytes, err := action.MarshalJSON() + if err != nil { + t.Error("Got error in MarshalJSON") + } + + err = action.UnmarshalJSON(bytes) + if err != nil { + t.Error("Got error in UnmarshalJSON") + } +} diff --git a/correlator/rules/debug_action.go b/correlator/rules/debug_action.go new file mode 100644 index 0000000..e031952 --- /dev/null +++ b/correlator/rules/debug_action.go @@ -0,0 +1,40 @@ +package rules + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/url" + "time" + + "github.com/google/uuid" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" +) + +// DumpNetwork Dump network request/response for debug purpose +func DumpNetwork(action string, url url.URL, request map[string]interface{}, response []byte, status int) { + data := make(map[string]interface{}) + + data["url"] = url + data["request"] = request + data["response"] = string(response) + data["status"] = status + + dumpData, err := json.MarshalIndent(data, "", "\t") + if err != nil { + log.Errorf("Can't dump request: %v", err) + log.Debugf("Sending request: %v", request) + log.Debugf("Got response: %v", string(response)) + } + + fileName := fmt.Sprintf("%v/correlator_debug-%v_%v_action_%v.json", viper.GetString(config.DebugDumpPath), uuid.New(), action, time.Now().Unix()) + + err = ioutil.WriteFile(fileName, dumpData, 0644) + if err != nil { + log.Errorf("Can't dump request: %v", err) + log.Debugf("Sending request: %v", request) + log.Debugf("Got response: %v", string(response)) + } +} diff --git a/correlator/rules/predicate.go b/correlator/rules/predicate.go new file mode 100644 index 0000000..3c750ee --- /dev/null +++ b/correlator/rules/predicate.go @@ -0,0 +1,123 @@ +package rules + +import ( + "encoding/json" + "errors" + "fmt" + "github.com/olivere/elastic/v7" +) + +// Predicate is a rule predicate +type Predicate struct { + Field string `json:"field"` + Operands []interface{} `json:"operands"` +} + +// NewPredicate Create new QueryString predicate +// TODO: Need to think about fields agrument, to search only in limited set of fields. See https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#query-string-multi-field +func NewPredicate(field string, operands ...interface{}) Predicate { + return Predicate{Field: field, Operands: operands} +} + +// Create query for Query String +func (predicate Predicate) sourceQueryString() (interface{}, error) { + + if len(predicate.Operands) != 1 { + return nil, fmt.Errorf("bad operands count, expect %v, got %v", 1, len(predicate.Operands)) + } + + value, ok := predicate.Operands[0].(string) + if !ok { + return nil, fmt.Errorf("bad operand type, need string, got %T", predicate.Operands[0]) + } + + if predicate.Field != "" { + return elastic.NewQueryStringQuery(value).DefaultField(predicate.Field).Source() + } + + return elastic.NewQueryStringQuery(value).Source() +} + +// Source returns the JSON-serializable query request. +func (predicate Predicate) Source() (interface{}, error) { + + return predicate.sourceQueryString() + +} + +// Parse predicate recursive +func getPredicate(p interface{}) (*Predicate, error) { + result := new(Predicate) + var value map[string]interface{} + + value, ok := p.(map[string]interface{}) + if !ok { + return nil, fmt.Errorf("bad interface, need map[string]interface{}, got %T", p) + } + + // Field + if _, ok := value["field"]; ok { + if v, ok := value["field"].(string); ok { + result.Field = v + } else { + return nil, fmt.Errorf("bad preticat field: %T, %v", value["field"], value["field"]) + } + } + + // Operands + var operands interface{} + if cur, ok := value["operands"]; ok { + operands = cur + } else if cur, ok := value["value"]; ok { + operands = cur + } else { + return nil, errors.New("no operands found") + } + + // Now, processing operands + switch v := operands.(type) { + case string: + result.Operands = make([]interface{}, 1) + result.Operands[0] = v + case []interface{}: + result.Operands = make([]interface{}, len(v)) + + for i, cur := range v { + switch curOperand := cur.(type) { + case string: + result.Operands[i] = curOperand + case interface{}: + op, err := getPredicate(curOperand) + if err != nil { + return nil, fmt.Errorf("can't parse predicat's operand #%v -> %v (%v)", i, curOperand, err) + } + result.Operands[i] = op + default: + return nil, fmt.Errorf("can't parse operand #%v with type %T and value %v", i, v, v) + + } + } + default: + return nil, fmt.Errorf("can't parse operands with type %T and value %v", v, v) + } + + return result, nil +} + +func (predicate *Predicate) UnmarshalJSON(b []byte) error { + var alias interface{} + err := json.Unmarshal(b, &alias) + if err != nil { + return err + } + + res, err := getPredicate(alias) + if err != nil { + return err + } + + predicate.Field = res.Field + predicate.Operands = res.Operands + + return nil +} diff --git a/correlator/rules/predicate_test.go b/correlator/rules/predicate_test.go new file mode 100644 index 0000000..7b249c3 --- /dev/null +++ b/correlator/rules/predicate_test.go @@ -0,0 +1,172 @@ +package rules + +import ( + "encoding/json" + "fmt" + "strings" + "testing" +) + +func TestNewQueryStringPredicate(t *testing.T) { + goodStr := "event_src_msg: \"act=2002910\"" + predicate := NewPredicate("", goodStr) + + if len(predicate.Operands) != 1 { + t.Errorf("Got bad operands count. Expect 1, got %v", len(predicate.Operands)) + } + + operand, ok := predicate.Operands[0].(string) + if !ok { + t.Errorf("Bad operand type. Expect string, got %T", predicate.Operands[0]) + } + + if operand != goodStr { + t.Errorf("Got bad operand. Expect %v, got %v", goodStr, operand) + } +} + +func TestParseQueryStringPredicate(t *testing.T) { + goodStr := "event_src_msg: \\\"act=2002910\\\"" + predicateStr := fmt.Sprintf(`{"type": "%v", "field": "NULL", "operands": [ "%v" ]}`, "query_string", goodStr) + + var predicate Predicate + err := json.Unmarshal([]byte(predicateStr), &predicate) + + if err != nil { + t.Logf("%v", predicateStr) + t.Errorf("%v", err) + return + } + + if len(predicate.Operands) != 1 { + t.Errorf("Got bad operands count. Expect 1, got %v", len(predicate.Operands)) + } + + operand, ok := predicate.Operands[0].(string) + if !ok { + t.Errorf("Bad operand type. Expect string, got %T", predicate.Operands[0]) + } + + if operand != strings.Replace(goodStr, "\\", "", -1) { + t.Errorf("Got bad operand. Expect \"%v\", got \"%v\"", goodStr, operand) + } +} + +func TestQueryStringToQuery(t *testing.T) { + goodStr := "event_src_msg: \"act=2002910\"" + predicate := NewPredicate("", goodStr) + + data, err := json.Marshal(predicate) + if err != nil { + t.Errorf("%v", err) + return + } + + predicateStr := `{"field":"","operands":["event_src_msg: \"act=2002910\""]}` + + if string(data) != predicateStr { + t.Errorf("Got bad predicate str. Expec %v, got %v", predicateStr, string(data)) + } +} + +func TestQueryStringWillUseField(t *testing.T) { + predicateStr := `{ + "type": "query_string", + "field": "", + "operands": "event_protocol: \"TCP\" AND event_severity:>5" + }` + + var predicate Predicate + err := json.Unmarshal([]byte(predicateStr), &predicate) + + if err != nil { + t.Logf("%v", predicateStr) + t.Errorf("%v", err) + return + } + + if len(predicate.Operands) != 1 { + t.Errorf("Got bad operands count. Expect 1, got %v", len(predicate.Operands)) + } + + _, ok := predicate.Operands[0].(string) + if !ok { + t.Errorf("Bad operand type. Expect string, got %T", predicate.Operands[0]) + } + +} + +func TestQueryStringFieldJson(t *testing.T) { + predicate := NewPredicate("event_src_msg", "event_protocol: TCP") + data, err := json.Marshal(predicate) + if err != nil { + t.Errorf("%v", err) + return + } + + predicateStr := `{"field":"event_src_msg","operands":["event_protocol: TCP"]}` + + if string(data) != predicateStr { + t.Errorf("Got bad predicate str. Expec %v, got %v", predicateStr, string(data)) + } +} + +func TestQueryStringFieldJsonEmpty(t *testing.T) { + predicate := NewPredicate("", "event_protocol: TCP") + + data, err := json.Marshal(predicate) + if err != nil { + t.Errorf("%v", err) + return + } + + predicateStr := `{"field":"","operands":["event_protocol: TCP"]}` + + if string(data) != predicateStr { + t.Errorf("Got bad predicate str. Expec %v, got %v", predicateStr, string(data)) + } +} + +func TestQueryStringFieldSource(t *testing.T) { + predicate := NewPredicate("event_src_msg", "event_protocol: TCP") + + src, err := predicate.Source() + if err != nil { + t.Errorf("%v", err) + return + } + + data, err := json.Marshal(src) + if err != nil { + t.Errorf("%v", err) + return + } + + predicateStr := `{"query_string":{"default_field":"event_src_msg","query":"event_protocol: TCP"}}` + + if string(data) != predicateStr { + t.Errorf("Got bad predicate str. Expec %v, got %v", predicateStr, string(data)) + } +} + +func TestQueryStringFieldSourceEmpty(t *testing.T) { + predicate := NewPredicate("", "event_protocol: TCP") + + src, err := predicate.Source() + if err != nil { + t.Errorf("%v", err) + return + } + + data, err := json.Marshal(src) + if err != nil { + t.Errorf("%v", err) + return + } + + predicateStr := `{"query_string":{"query":"event_protocol: TCP"}}` + + if string(data) != predicateStr { + t.Errorf("Got bad predicate str. Expec %v, got %v", predicateStr, string(data)) + } +} diff --git a/correlator/rules/rule.go b/correlator/rules/rule.go new file mode 100644 index 0000000..befe92c --- /dev/null +++ b/correlator/rules/rule.go @@ -0,0 +1,222 @@ +package rules + +import ( + "encoding/json" + "errors" + "fmt" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/events" + "sync" + "time" + + log "github.com/sirupsen/logrus" + + "github.com/olivere/elastic/v7" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" +) + +// Rule is a correlator rule +type Rule struct { + Id string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Multi bool `json:"multi"` + Depth time.Duration `json:"depth"` + Predicate Predicate `json:"predicat"` + Actions []Action `json:"actions"` +} + +// GetRangeQuery Create range query for selected depth +func (rule Rule) GetRangeQuery() (*elastic.BoolQuery, error) { + if int(rule.Depth) == 0 { + return nil, errors.New("have empty duration") + } + return elastic.NewBoolQuery().Should( + elastic.NewRangeQuery("event_first").Gte(time.Now().UTC().Add(-rule.Depth)).Lte(time.Now().UTC()), + elastic.NewRangeQuery("event_last").Gte(time.Now().UTC().Add(-rule.Depth)).Lte(time.Now().UTC())), nil +} + +// Do Apply rule +func (rule Rule) Do(client *es.Elastic) (*[]*events.Event, error) { + cl := log.WithFields(log.Fields{"name": rule.Name, "id": rule.Id}) + cl.Debug("Starting rule") + defer cl.Debug("Done rule") + + if len(rule.Id) == 0 { + return nil, errors.New("need rule ID to be set") + } + + if rule.Depth == 0 { + return nil, errors.New("bad duration value") + } + + // Don't process same event twice + tagsQuery := elastic.NewMatchQuery("rule_tags", rule.Id) + + // Some types of predicates need additional processing + query, err := rule.GetRangeQuery() + if err != nil { + cl.Error(err) + return nil, err + } + query.Must(rule.Predicate).MustNot(tagsQuery) + + // Show query if user want + client.DebugQuery(events.GetAggregatedIndexName(), query) + + aggregatedEvents := make([]*events.Event, 0) + + // Query for events + raw, errs1 := client.Query(events.GetAggregatedIndexName(), query) + aggregated, errs2 := events.ParseEvents(raw, errs1, viper.GetInt(config.Threads)) + + cl.Debug("Start reading results") + for event := range aggregated { + aggregatedEvents = append(aggregatedEvents, event) + } + cl.Debug("Finish reading results") + + for err := range errs2 { + cl.Debug("Got errors") + return nil, err + } + + cl.Debugf("Done getting events. Got %v events", len(aggregatedEvents)) + + if len(aggregatedEvents) == 0 { + cl.Debug("No aggregated events suitable for rule") + return nil, nil + } + + cl.Debug("Starting actions") + + var wg sync.WaitGroup + wg.Add(len(rule.Actions)) + actionErrs := make(chan error, len(rule.Actions)) + + // Apply actions + for _, cur := range rule.Actions { + go func(action Action) { + actionLog := cl.WithField("multi", rule.Multi) + defer wg.Done() + actionLog.Tracef("Start action %v", action.GetType()) + defer actionLog.Tracef("Finish action %v", action.GetType()) + + if rule.Multi { + actionLog.Debugf("Sending %v events to action", len(aggregatedEvents)) + err := action.Perform(&aggregatedEvents) + if err != nil { + actionErrs <- err + } + } else { + slice := aggregatedEvents[0:1] + actionLog.Debugf("Sending %v events to action", len(slice)) + err := action.Perform(&slice) + if err != nil { + actionErrs <- err + } + } + }(cur) + } + + wg.Wait() + close(actionErrs) + + for err := range actionErrs { + if err != nil { + cl.Errorf("%v", err) + return nil, err + } + } + + return &aggregatedEvents, nil +} + +//################################################### +// Json serializer/deserializer +//################################################### + +func (rule Rule) MarshalJSON() ([]byte, error) { + return json.Marshal(&struct { + Id string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Depth string `json:"depth"` + Predicat Predicate `json:"predicat"` + Multi bool `json:"multi"` + Actions *[]Action `json:"actions"` + }{ + Id: rule.Id, + Name: rule.Name, + Description: rule.Description, + Depth: rule.Depth.String(), + Predicat: rule.Predicate, + Actions: &rule.Actions, + }) +} + +func (rule *Rule) UnmarshalJSON(b []byte) error { + alias := &struct { + Id string `json:"id"` + Name string `json:"name"` + Description string `json:"description"` + Depth string `json:"depth"` + Predicat Predicate `json:"predicat"` + Multi bool `json:"multi"` + Actions []interface{} `json:"actions"` + }{} + + err := json.Unmarshal(b, alias) + if err != nil { + log.Debugf("Can't unmarshall rule: %v", string(b)) + return err + } + + rule.Depth, err = time.ParseDuration(alias.Depth) + if err != nil { + log.Debugf("Can't parse depth: %v", alias.Depth) + return err + } + + //https://pkg.go.dev/github.com/mitchellh/mapstructure?tab=doc + + rule.Id = alias.Id + rule.Name = alias.Name + rule.Description = alias.Description + rule.Predicate = alias.Predicat + rule.Multi = alias.Multi + for _, cur := range alias.Actions { + var item Action + + switch curType := cur.(map[string]interface{})["type"].(string); curType { + case HttpActionType: + item = &HttpAction{} + case SyslogActionType: + item = &SyslogAction{} + case IncidentActionType: + item = &IncidentAction{} + case BashActionType: + item = &BashAction{} + case ExecActionType: + item = &ExecAction{} + case AssetActionType: + item = &AssetAction{} + case FirewallActionType: + item = &FirewallAction{} + default: + return fmt.Errorf("can't parse action with type %v", curType) + } + + err = item.ParseInterface(cur) + if err != nil { + cl := log.WithFields(log.Fields{"type": cur.(map[string]interface{})["type"].(string)}) + cl.Debugf("Can't parse interface: %v", err) + return err + } + + rule.Actions = append(rule.Actions, item) + } + + return nil +} diff --git a/correlator/rules/rule_store.go b/correlator/rules/rule_store.go new file mode 100644 index 0000000..f5f9d02 --- /dev/null +++ b/correlator/rules/rule_store.go @@ -0,0 +1,195 @@ +package rules + +import ( + "fmt" + "sync" + "time" + + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/events" + "iwarma.ru/console/correlator/stat" + + log "github.com/sirupsen/logrus" + + "github.com/olivere/elastic/v7" +) + +// StoreItem store rule and additional info for it +type StoreItem struct { + Enabled bool + Rule +} + +// RuleStore is a store for rules +// It's is a correlator +type RuleStore struct { + rules []*StoreItem + rulesMutex sync.Mutex + stat *Stat + client *es.Elastic + cl *log.Entry + bulk *elastic.BulkService +} + +// NewRuleStore Create new rule store +func NewRuleStore(client *es.Elastic) *RuleStore { + result := new(RuleStore) + result.rules = make([]*StoreItem, 0) + result.client = client + result.cl = log.WithField("part", "RuleStore") + result.bulk = client.NewBulkRequest().Refresh("wait_for") + result.stat = NewStat() + return result +} + +//ClearStore clear store's rules and statistics +func (store *RuleStore) ClearStore() { + store.rulesMutex.Lock() + defer store.rulesMutex.Unlock() + + store.rules = make([]*StoreItem, 0) + store.stat = NewStat() + store.cl.Debug("Store cleared") +} + +//AddRules add array of rules to store +func (store *RuleStore) AddRules(rules ...Rule) { + store.rulesMutex.Lock() + defer store.rulesMutex.Unlock() + + var found = false + + for _, rule := range rules { + for index, cur := range store.rules { + if cur.Id == rule.Id { + store.rules[index] = &StoreItem{ + Enabled: true, + Rule: rule, + } + store.cl.Debugf("Rule %v updated", rule.Id) + found = true + break + } + } + + if !found { + store.rules = append(store.rules, &StoreItem{ + Enabled: true, + Rule: rule, + }) + store.stat.AverageRuleTime[rule.Id] = new(stat.AvgTime) + store.cl.Debugf("New rule %v added", rule.Id) + } + } + + store.stat.RuleCount = uint64(len(store.rules)) + store.cl.Debugf("Total rules count: %v", store.stat.RuleCount) +} + +//RunRulesSync run all rules in store one by one +func (store *RuleStore) RunRulesSync() error { + store.rulesMutex.Lock() + defer store.rulesMutex.Unlock() + + disableRule := func(curRule *StoreItem, err error) { + curRule.Enabled = false + store.stat.Errors[curRule.Id] = err.Error() + store.cl.Infof("Rule %v [%v] was disabled because of an error: %v", curRule.Name, curRule.Id, err) + } + + iterStart := time.Now() + for _, rule := range store.rules { + cl := store.cl.WithFields(log.Fields{"rule_id": rule.Id, "rule_name": rule.Name}) + + if !rule.Enabled { + cl.Debug("Rule is disabled") + continue + } + + ruleStart := time.Now() + resultEvents, err := rule.Do(store.client) + if err != nil { + cl.Debugf("Rule exec error: %v", err) + + // Need to check error. If it is 404 from aggregated index - show error and do nothing + // If it's not 404? we need to disable rule + if e, ok := err.(*elastic.Error); ok { + cl.Debugf("Elastic error: %v", err) + + if e.Status == 404 { + err = store.client.CheckAndCreateIndex(events.GetAggregatedIndexName()) + if err != nil { + cl.Errorf("Can't create index: %v", err) + disableRule(rule, fmt.Errorf("can't create index: %v", err)) + } + } else { + disableRule(rule, fmt.Errorf("elastic error: %v", err)) + } + } else { + disableRule(rule, fmt.Errorf("rule execute error: %v", err)) + cl.Errorf("Can't Do rule: %v", err) + return err + } + } + if resultEvents != nil { + cl.Debugf("Got %v events match", len(*resultEvents)) + store.stat.IncidentCount++ + store.stat.EventsCount += uint64(len(*resultEvents)) + + for _, event := range *resultEvents { + + // Check for index in event + if event.GetString("index") == "" { + cl.Errorf("Event with id %v dont't have an index", event.GetString(events.EventID)) + disableRule(rule, fmt.Errorf("go event %v without index", event.GetString(events.EventID))) + + // Don't stop update cycle, we can find + // more events without index + continue + } + + // Prepare tags + tags := event.AddTag(rule.Id) + updateRequest := elastic.NewBulkUpdateRequest(). + Doc(struct { + Tags []string `json:"rule_tags"` + }{Tags: tags}). + Id(event.GetString(events.AggregatedId)).Index(event.GetString("index")) + store.bulk.Add(updateRequest) + } + } + store.stat.AverageRuleTime[rule.Id].Add(time.Since(ruleStart)) + cl.Debug("Done executing rule") + if store.bulk.NumberOfActions() > 0 { + store.cl.Debug("Start bulk update") + response, err := store.client.ExecuteBulk(store.bulk) + store.cl.Debug("End bulk update") + if err != nil { + store.cl.Errorf("Got error from bulk update: %v", err) + disableRule(rule, fmt.Errorf("cant' update rule %v with elastic error: %v", rule.Name, err)) + } + + store.cl.Debugf("Updated count: %v", len(response.Updated())) + store.cl.Debugf("Failed count: %v", len(response.Failed())) + + failed := response.Failed() + if len(failed) > 0 { + disableRule(rule, fmt.Errorf("can't update rule %v with error %v (Id %v Index %v)", rule.Name, failed[0].Error.Reason, failed[0].Id, failed[0].Index)) + } + } else { + store.cl.Debug("No bulk actions") + } + + } + + store.stat.AverageIterationTime.Add(time.Since(iterStart)) + + return nil +} + +func (store *RuleStore) GetStat() *Stat { + store.rulesMutex.Lock() + defer store.rulesMutex.Unlock() + + return store.stat +} diff --git a/correlator/rules/rule_store_test.go b/correlator/rules/rule_store_test.go new file mode 100644 index 0000000..d682bba --- /dev/null +++ b/correlator/rules/rule_store_test.go @@ -0,0 +1,390 @@ +package rules + +import ( + "fmt" + "github.com/olivere/elastic/v7" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/events" + "iwarma.ru/console/correlator/util" + "testing" + "time" +) + +// Check that NewRuleStore create and init new store +func TestNewRuleStore(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + if len(store.rules) != 0 { + t.Errorf("Got bad rules count: %v", len(store.rules)) + } + + if store.client == nil { + t.Error("Client was not set") + } + + if store.stat.AverageRuleTime == nil { + t.Error("Store stat rules map was not created") + } + + if store.cl == nil { + t.Errorf("Logger wasn't created") + } +} + +// Check that clear store function is working +func TestClearStore(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + var rule Rule + + store.AddRules(rule) + + if len(store.rules) != 1 { + t.Errorf("Bad store sizr after add: %v", len(store.rules)) + } + + store.ClearStore() + + if len(store.rules) != 0 { + t.Error("Can't clear store") + } +} + +// Check that we can update rule +func TestUpdateRule(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + rule := Rule{Id: "10", Name: "Test"} + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + store.AddRules(rule) + + rule2 := Rule{Id: "10", Name: "Test2222"} + store.AddRules(rule2) + + if len(store.rules) != 1 { + t.Errorf("Bad rules count, need 1, got %v", len(store.rules)) + } + + if store.rules[0].Name != "Test2222" { + t.Error("Rule was not updated") + } +} + +// Check thar we can add several rules at a time +func TestAddRules(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + rules := make([]Rule, 10) + for i := 0; i < 10; i++ { + rules[i] = Rule{Id: fmt.Sprintf("%v", i), Name: fmt.Sprintf("Test_%v", i)} + } + + store.AddRules(rules...) + + if len(store.rules) != 10 { + t.Errorf("Bad rules count, neet 10, got %v", len(store.rules)) + } +} + +// Check that we can update several rules +func TestUpdateRules(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + rules := make([]Rule, 10) + for i := 0; i < 10; i++ { + rules[i] = Rule{Id: fmt.Sprintf("%v", i), Name: fmt.Sprintf("Test_%v", i)} + } + + store.AddRules(rules...) + + for i := 0; i < len(store.rules); i++ { + rules[i].Name = "DDDD" + } + + store.AddRules(rules...) + + for _, cur := range store.rules { + if cur.Name != "DDDD" { + t.Errorf("Rule %v wasn't updated", cur) + } + } +} + +func TestStoreCanGiveStat(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + stat := store.GetStat() + + if stat.AverageRuleTime == nil { + t.Error("Got nil AverageRuleTime") + } +} + +// Check that Do function works with single action mode +func TestRule_Do(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName()) + if err != nil { + t.Errorf("%v", err) + } + + err = events.FillAggregatedEvents(events.GetAggregatedIndexName(), 10, client) + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + actions := make([]Action, 1) + action := TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + }) + + PerformCountClear() + EventsProcessedClear() + + err = store.RunRulesSync() + if err != nil { + t.Errorf("%v", err) + } + + time.Sleep(time.Second * 2) + + goodCount := 1 + if GetPerformCount() != goodCount { + t.Errorf("Bad perform count. Expect %v, got %v", goodCount, GetPerformCount()) + } + + if GetEventsProcessed() != goodCount { + t.Errorf("Bad events processed count. Expect %v, got %v", goodCount, GetEventsProcessed()) + } +} + +// Check that Do function works with single action mode and multiple rules +func TestMultipleRule_Do(t *testing.T) { + util.PrepareElastic() + index := "test-aggregated" + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName()) + if err != nil { + t.Errorf("%v", err) + } + + err = events.FillAggregatedEvents(events.GetAggregatedIndexName(), 2, client) + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + actions := make([]Action, 1) + action := TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(Rule{ + Id: "1", + Name: "TestRule #1", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + }, + Rule{ + Id: "2", + Name: "TestRule #2", + Description: "Some long description", + Multi: false, + Depth: time.Second * 30, + Predicate: NewPredicate("", "source_ip:127.0.0.1"), + Actions: actions, + }, + ) + + PerformCountClear() + EventsProcessedClear() + + err = store.RunRulesSync() + if err != nil { + t.Errorf("%v", err) + } + + time.Sleep(time.Second * 2) + + goodCount := 2 + if GetPerformCount() != goodCount { + t.Errorf("Bad perform count. Expect %v, got %v", goodCount, GetPerformCount()) + } + query := elastic.NewMatchAllQuery() + + raw, errs1 := client.Query(index, query) + events, _ := events.ParseEvents(raw, errs1, 2) + + errorsCount := 0 + for event := range events { + tags, ok := event.GetValue("rule_tag").([]string) + if ok { + if len(tags) != 2 { + errorsCount += 1 + } + } + + } + if errorsCount != 0 { + t.Errorf("Got errors while count rule tags . Expect 0 errors, got %v", errorsCount) + } + +} + +// Check that Do function works with multi action mode +func TestRule_Do_multi(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName()) + if err != nil { + t.Errorf("%v", err) + } + + // Events count + N := 10 + + err = events.FillAggregatedEvents(events.GetAggregatedIndexName(), N, client) + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + actions := make([]Action, 1) + action := TestAction{ + PerformError: false, + ParseInterfaceError: false, + MarshalError: false, + UnmarshalError: false, + } + actions[0] = &action + + store.AddRules(Rule{ + Id: "1", + Name: "TestRule", + Description: "Some long description", + Multi: true, + Depth: time.Second * 30, + Predicate: NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + }) + + PerformCountClear() + EventsProcessedClear() + + err = store.RunRulesSync() + if err != nil { + t.Errorf("%v", err) + } + + time.Sleep(time.Second * 2) + + goodCount := 1 + if GetPerformCount() != goodCount { + t.Errorf("Bad perform count. Expect %v, got %v", goodCount, GetPerformCount()) + } + + if GetEventsProcessed() != N { + t.Errorf("Bad events processed count. Expect %v, got %v", N, GetEventsProcessed()) + } +} diff --git a/correlator/rules/rule_test.go b/correlator/rules/rule_test.go new file mode 100644 index 0000000..51b061e --- /dev/null +++ b/correlator/rules/rule_test.go @@ -0,0 +1,596 @@ +package rules + +import ( + "encoding/json" + "fmt" + "github.com/google/uuid" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/events" + "iwarma.ru/console/correlator/util" + "testing" + "time" + + "github.com/olivere/elastic/v7" +) + +func FillAggregatedEvents(index string, count int, el *es.Elastic, firstEvent time.Time, lastEvent time.Time) error { + bulk := el.NewBulkRequest() + + for i := 0; i < count; i++ { + event := events.Event{ + events.Hash: fmt.Sprintf("%v", i), + events.FirstEvent: firstEvent, + events.LastEvent: lastEvent, + events.EventCount: 0, + events.Created: firstEvent, + events.Tags: nil, + events.AggregatedId: fmt.Sprintf("%v", i), + events.CeleryDone: false, + "timestamp": time.Time{}, + "type": "test", + "event_timestamp": time.Time{}, + "event_id": uuid.NewString(), + "event_severity": uint8(i), + "event_src_msg": fmt.Sprintf("Test message %v", i), + "event_protocol": "TCP", + "device_vendor": "TestDevice", + "device_product": "TestProduct", + "device_version": "1.0", + "device_action": "Test", + "sign_id": fmt.Sprintf("%v", i), + "app_category": "Test", + "app_subcategory": "Test", + "app_name": "Test", + "source_ip": "127.0.0.1", + "source_mac": "00:50:56:c0:00:08", + "source_host": "localhost", + "source_port": uint32(i), + "source_user": "root", + "destination_ip": "127.0.0.1", + "destination_host": "localhost", + "destination_port": uint32(i), + "destination_user": "user", + } + + bulk = bulk.Add(elastic.NewBulkIndexRequest().Index(index).Id(event.GetString(events.EventID)).Doc(event)) + } + + bulkResponse, err := el.ExecuteBulk(bulk) + if err != nil { + log.Errorf("Can't index documents: %v", err) + return err + } + + if bulkResponse.Errors { + log.Errorf("Got errors from bulk requset: %v", bulkResponse.Failed()) + return fmt.Errorf("bulk error") + } + + if len(bulkResponse.Indexed()) != count { + log.Errorf("Bad bulk index count. Got %v, expect %v", len(bulkResponse.Indexed()), count) + return fmt.Errorf("bad bulk count") + } + + // wait until elastic is ready + time.Sleep(time.Second) + + return nil +} + +func TestGetRangeQueryBadDuration(t *testing.T) { + var rule Rule + + query, err := rule.GetRangeQuery() + if err == nil { + t.Error("Get duration from empty field") + } + + if query != nil { + t.Error("Got result where error occurs") + } +} + +func TestGetRangeQuery(t *testing.T) { + rule := Rule{Depth: time.Minute} + + query, err := rule.GetRangeQuery() + if err != nil { + t.Error("Can't parse rule duration") + return + } + + source, err := query.Source() + if err != nil { + t.Error("Can't get query source") + } + + if source == nil { + t.Error("Got nil source for query") + } +} + +func TestRuleMarshall(t *testing.T) { + actions := make([]Action, 2) + actions[0] = &HttpAction{Url: "http://localhost:3456", Template: "{{.EventSrcMsg}}", ContentType: "text/plain"} + actions[1] = &SyslogAction{Host: "localhost", Port: 514, Proto: SyslogProtoUdp, Name: "My Tester", Template: "{{.EventSrcMsg}}"} + + predicate := NewPredicate("", "event_severity:>=6") + + rule := Rule{Actions: actions, Depth: time.Minute * 5, Name: "My test rule", Predicate: predicate, Id: "a716da55-66c5-478b-80fb-2c2a7e517a01"} + + bytes, err := json.Marshal(rule) + if err != nil { + t.Error(err) + } + + if string(bytes) != `{"id":"a716da55-66c5-478b-80fb-2c2a7e517a01","name":"My test rule","description":"","depth":"5m0s","predicat":{"field":"","operands":["event_severity:\u003e=6"]},"multi":false,"actions":[{"type":"http","url":"http://localhost:3456","template":"{{.EventSrcMsg}}","content_type":"text/plain"},{"type":"syslog","host":"localhost","port":"514","protocol":"udp","name":"My Tester","format":"","template":"{{.EventSrcMsg}}"}]}` { + t.Errorf("Got bad json: %v", string(bytes)) + } +} + +func TestRuleMarshall2(t *testing.T) { + actions := make([]Action, 2) + actions[0] = &HttpAction{Url: "http://localhost:3456", Template: "{{.EventSrcMsg}}", ContentType: "application/json"} + actions[1] = &SyslogAction{Host: "localhost", Port: 514, Proto: SyslogProtoUdp, Name: "My Tester", Template: "{{.EventSrcMsg}}"} + + rule := Rule{Actions: actions, Depth: time.Minute * 5, Name: "My test rule", Id: "a716da55-66c5-478b-80fb-2c2a7e517a01"} + + bytes, err := json.Marshal(rule) + if err != nil { + t.Error(err) + } + + if string(bytes) != `{"id":"a716da55-66c5-478b-80fb-2c2a7e517a01","name":"My test rule","description":"","depth":"5m0s","predicat":{"field":"","operands":null},"multi":false,"actions":[{"type":"http","url":"http://localhost:3456","template":"{{.EventSrcMsg}}","content_type":"application/json"},{"type":"syslog","host":"localhost","port":"514","protocol":"udp","name":"My Tester","format":"","template":"{{.EventSrcMsg}}"}]}` { + t.Errorf("Got bad json: %v", string(bytes)) + } +} + +func TestRuleUnmarshall(t *testing.T) { + str := `{ + "id": "2", + "group": "Preset", + "actions": + [{ + "type": "incident", + "title": "{{.SignName}}", + "comment": "", + "category": "", + "importance": "50", + "assigned_to": "", + "description": "{{.EventSrcMsg}}"}], + "name": "Serious event", + "description": "Long description of rule", + "multi": false, + "type": 0, + "status": true, + "depth": "10m", + "created": "2021-11-25 15:35:45", + "updated": "2021-11-25 15:35:46", + "predicat": + { + "type": "query_string", + "field": "", + "operands": "event_severity:>=6" + }, + "rev": 1, + "sid": 2, + "is_active": true + }` + var rule Rule + err := json.Unmarshal([]byte(str), &rule) + if err != nil { + t.Error(err) + return + } + + if rule.Id != "2" { + t.Errorf("Got bad id: %v", rule.Id) + } + + if rule.Name != "Serious event" { + t.Errorf("Got bad name: %v", rule.Name) + } + + if rule.Description != "Long description of rule" { + t.Errorf("Got bad description: %v", rule.Description) + } + + if rule.Depth != time.Minute*10 { + t.Errorf("Got bad depth: %v", rule.Depth) + } + + if rule.Predicate.Field != "" { + t.Errorf("Got bad predicat field: %v", rule.Predicate.Field) + } + + if len(rule.Predicate.Operands) != 1 { + t.Errorf("Bad operands length: %v", len(rule.Predicate.Operands)) + } + + if rule.Predicate.Operands[0].(string) != "event_severity:>=6" { + t.Errorf("Got bad operand calue: %v of type %T", rule.Predicate.Operands[0], rule.Predicate.Operands[0]) + } + + // Check actions + if len(rule.Actions) != 1 { + t.Errorf("Got bad actions size: %v", len(rule.Actions)) + } + + action1 := rule.Actions[0] + if action1.GetType() != "incident" { + t.Errorf("Got bad action #0 type: %v", action1.GetType()) + } + +} + +func TestRuleUnmarshallBadNotJson(t *testing.T) { + text := "Some text" + + rule := &Rule{} + + err := rule.UnmarshalJSON([]byte(text)) + if err == nil { + t.Error("No error with not json") + } +} + +func TestRuleUnmarshallBadDepth(t *testing.T) { + text := `{"depth":"ass"}` + + rule := &Rule{} + + err := rule.UnmarshalJSON([]byte(text)) + if err == nil { + t.Error("No error with bad depth") + } +} + +func TestRuleUnmarshallBadActionType(t *testing.T) { + text := `{"depth":"1m","actions":[{"type":"ass"}]}` + + rule := &Rule{} + + err := rule.UnmarshalJSON([]byte(text)) + if err == nil { + t.Error("No error with bad action type") + } +} + +func TestRuleUnmarshallBadActionContent(t *testing.T) { + text := `{"depth":"1m","actions":[{"type":"http"}]}` + + rule := &Rule{} + + err := rule.UnmarshalJSON([]byte(text)) + if err == nil { + t.Error("No error with bad action type") + } +} + +// Check that we get somethings from elastic +func TestRuleDoWorks(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + depth := time.Minute * 50 + N := 10 + err = FillAggregatedEvents(events.GetAggregatedIndexName(), N, client, time.Now().UTC().Add(-depth), time.Now().UTC().Add(depth)) + if err != nil { + t.Errorf("%v", err) + return + } + + actions := make([]Action, 1) + actions[0] = &TestAction{} + + // Rule + rule := Rule{ + Id: "Rule", + Name: "My rule", + Depth: depth, + Multi: true, + Predicate: NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + } + + resultEvents, err := rule.Do(client) + if err != nil { + t.Error(err) + return + } + + if len(*resultEvents) != N { + t.Errorf("Bad events count, got %v need %v", len(*resultEvents), N) + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } +} + +// Check that if aggregated events miss rule depth we don't have any +func TestRuleDoWithBadDepth(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + depth := time.Minute * 5 + N := 10 + err = FillAggregatedEvents(events.GetAggregatedIndexName(), N, client, time.Now().UTC().Add(-depth*2), time.Now().UTC().Add(-depth*2)) + if err != nil { + t.Errorf("%v", err) + return + } + + actions := make([]Action, 1) + actions[0] = &TestAction{} + + // Rule + rule := Rule{ + Id: "Rule", + Name: "My rule", + Depth: time.Duration(0), + Multi: true, + Predicate: NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + } + + resultEvents, err := rule.Do(client) + if err == nil { + t.Error("Can work with bad duration") + } + + if resultEvents != nil { + t.Error("Got some events, when must be nil") + } +} + +// Check that we can detect errors while fetching aggregated events +func TestRuleDoWithGetterErrors(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + // Change index name to something that doesn't exist + viper.Set(config.ElasticAggregatedIndexName, "asss450") + + depth := time.Minute * 5 + + actions := make([]Action, 1) + actions[0] = &TestAction{} + + // Rule + rule := Rule{ + Id: "Rule", + Name: "My rule", + Depth: depth, + Multi: true, + Predicate: NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + } + + resultEvents, err := rule.Do(client) + if err == nil { + t.Error("No errors when we need some") + return + } + + if resultEvents != nil { + t.Error("Got some events when must be nil") + } +} + +// Check that we can detect action errors +func TestRuleDoWithActionError(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + err = events.ClearIndex(client, events.GetAggregatedIndexName()) + if err != nil { + t.Errorf("%v", err) + return + } + + depth := time.Minute * 5 + N := 10 + err = FillAggregatedEvents(events.GetAggregatedIndexName(), N, client, time.Now().UTC().Add(-depth), time.Now().UTC().Add(depth)) + if err != nil { + t.Errorf("%v", err) + return + } + + actions := make([]Action, 1) + actions[0] = &TestAction{PerformError: true} + + // Rule + rule := Rule{ + Id: "Rule", + Name: "My rule", + Depth: depth, + Multi: true, + Predicate: NewPredicate("", "device_vendor:TestDevice"), + Actions: actions, + } + + resultEvents, err := rule.Do(client) + if err == nil { + t.Error("No error when getter trows one") + } + + if resultEvents != nil { + t.Error("Got events when must be nil") + } +} + +func TestDjangoRuleUnmarshall(t *testing.T) { + message := `{ + "name":"Test", + "depth":"600s", + "id":"1", + "predicat":{ + "type":"query_string", + "field":"", + "operands" : "source_host:localhost" + + }, + "actions":[ + { + "host":"localhost", + "port":"514", + "protocol":"udp", + "name":"test", + "template":"{{.EventSrcMsg}}", + "type":"syslog" + } + ] + }` + + var rule Rule + err := json.Unmarshal([]byte(message), &rule) + if err != nil { + t.Errorf("Json error: %v", err) + } + + if rule.Name != "Test" { + t.Errorf("Got bad rule name: %v", rule.Name) + } + + if rule.Id != "1" { + t.Errorf("Got bad rule id: %v", rule.Id) + } + + if rule.Depth != time.Minute*10 { + t.Errorf("Got bad rule depth: %v", rule.Depth) + } + + predicat := rule.Predicate + + if len(predicat.Operands) != 1 { + t.Errorf("Got bad predicat operands length: %v", len(predicat.Operands)) + } + + value := predicat.Operands[0].(string) + if value != "source_host:localhost" { + t.Errorf("Got bad predicat value: %v", value) + } + + if len(rule.Actions) != 1 { + t.Errorf("Got bad actions length: %v", len(rule.Actions)) + } + + if rule.Actions[0].GetType() != SyslogActionType { + t.Errorf("Got bad action type: %v", rule.Actions[0].GetType()) + } + + bytes, err := json.Marshal(rule.Actions[0]) + if err != nil { + t.Errorf("Can't marshall action") + } + + if string(bytes) != `{"type":"syslog","host":"localhost","port":"514","protocol":"udp","name":"test","format":"","template":"{{.EventSrcMsg}}"}` { + t.Errorf(fmt.Sprintf("Got bad action: %v", string(bytes))) + } +} + +func TestDjangoRuleUnmarshall2(t *testing.T) { + message := `{ + "name":"Test", + "depth":"600s", + "id":"1", + "predicat":{ + "type":"query_string", + "field":"", + "operands":"source_port:514" + }, + "actions":[ + { + "host":"localhsot", + "port":"514", + "protocol":"udp", + "name":"test", + "template":"{{.EventSrcMsg}}", + "type":"syslog" + } + ] + }` + + var rule Rule + err := json.Unmarshal([]byte(message), &rule) + if err != nil { + t.Errorf("Json error: %v", err) + } + + if rule.Name != "Test" { + t.Errorf("Got bad rule name: %v", rule.Name) + } + + if rule.Id != "1" { + t.Errorf("Got bad rule id: %v", rule.Id) + } + + if rule.Depth != time.Minute*10 { + t.Errorf("Got bad rule depth: %v", rule.Depth) + } + + predicat := rule.Predicate + + if len(predicat.Operands) != 1 { + t.Errorf("Got bad predicat operands length: %v", len(predicat.Operands)) + } + + if len(rule.Actions) != 1 { + t.Errorf("Got bad actions length: %v", len(rule.Actions)) + } + + if rule.Actions[0].GetType() != SyslogActionType { + t.Errorf("Got bad action type: %v", rule.Actions[0].GetType()) + } + + bytes, err := json.Marshal(rule.Actions[0]) + if err != nil { + t.Errorf("Can't marshall action") + } + + if string(bytes) != `{"type":"syslog","host":"localhsot","port":"514","protocol":"udp","name":"test","format":"","template":"{{.EventSrcMsg}}"}` { + t.Errorf(fmt.Sprintf("Got bad action: %v", string(bytes))) + } +} diff --git a/correlator/rules/stat.go b/correlator/rules/stat.go new file mode 100644 index 0000000..5647d38 --- /dev/null +++ b/correlator/rules/stat.go @@ -0,0 +1,36 @@ +package rules + +import ( + "encoding/json" + "fmt" + "iwarma.ru/console/correlator/stat" +) + +// Stat is a Rule store statistic +type Stat struct { + RuleCount uint64 `json:"rule_count"` + AverageIterationTime stat.AvgTime `json:"average_time"` + AverageRuleTime map[string]*stat.AvgTime `json:"average_rule_time"` + IncidentCount uint64 `json:"incident_count"` + EventsCount uint64 `json:"events_count"` + Errors map[string]string `json:"errors"` +} + +// NewStat crete new Stat structure +func NewStat() *Stat { + return &Stat{ + AverageRuleTime: make(map[string]*stat.AvgTime), + IncidentCount: 0, + EventsCount: 0, + Errors: make(map[string]string), + } +} + +func (stat Stat) String() string { + bytes, err := json.Marshal(stat) + if err != nil { + return fmt.Sprintf("Can't marshall stat: %v", err) + } + + return string(bytes) +} diff --git a/correlator/rules/stat_test.go b/correlator/rules/stat_test.go new file mode 100644 index 0000000..063feb6 --- /dev/null +++ b/correlator/rules/stat_test.go @@ -0,0 +1,28 @@ +package rules + +import ( + "errors" + "iwarma.ru/console/correlator/es" + "iwarma.ru/console/correlator/util" + "testing" +) + +func TestStoreStringer(t *testing.T) { + util.SetupTest(t) + defer util.TearDownTest(t) + + client, err := es.NewElastic() + if err != nil { + t.Errorf("%v", err) + return + } + + store := NewRuleStore(client) + + stat := store.GetStat() + stat.Errors["12"] = errors.New("test error").Error() + + if stat.String() != `{"rule_count":0,"average_time":{"value":"0s"},"average_rule_time":{},"incident_count":0,"events_count":0,"errors":{"12":"test error"}}` { + t.Errorf("Got bad string representation: %v", store.GetStat().String()) + } +} diff --git a/correlator/rules/test_tools.go b/correlator/rules/test_tools.go new file mode 100644 index 0000000..3e93268 --- /dev/null +++ b/correlator/rules/test_tools.go @@ -0,0 +1,17 @@ +package rules + +import "testing" + +// Help function for testing +// Check if map has needed key and this key has needed value +// Write message if some error occur +func testMap(m map[string]interface{}, name string, value interface{}, t *testing.T) { + v, ok := m[name] + if !ok { + t.Errorf("No key with name %v in map %v", name, m) + } + + if v != value { + t.Errorf("Bad map value, expect %v (%T), got %v (%T)", value, value, v, v) + } +} diff --git a/correlator/rules/web_tools.go b/correlator/rules/web_tools.go new file mode 100644 index 0000000..869db93 --- /dev/null +++ b/correlator/rules/web_tools.go @@ -0,0 +1,74 @@ +package rules + +import ( + "bytes" + "crypto/tls" + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "iwarma.ru/console/correlator/config" +) + +// ObtainAuthToken Get auth token from web interface +func ObtainAuthToken() (string, error) { + body, err := json.Marshal(struct { + Username string `json:"username"` + Password string `json:"password"` + }{ + Username: viper.GetString(config.ConsoleUsername), + Password: viper.GetString(config.ConsolePassword)}) + if err != nil { + return "", err + } + + request, err := http.NewRequest("POST", viper.GetString(config.ConsoleUrlToken), bytes.NewBuffer(body)) + if err != nil { + log.Debugf("#0 Request to \"%v\"", viper.GetString(config.ConsoleUrlToken)) + return "", err + } + + request.Header.Set("Content-Type", "application/json") + + var client *http.Client + + if viper.GetBool(config.ConsoleIgnoreSSLErrors) { + transport := &http.Transport{ + TLSClientConfig: &tls.Config{InsecureSkipVerify: true}, + } + client = &http.Client{Transport: transport} + } else { + client = &http.Client{} + } + + resp, err := client.Do(request) + if err != nil { + log.Debugf("# 1Request to \"%v\"", viper.GetString(config.ConsoleUrlToken)) + return "", err + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("got bad status code: %v", resp.StatusCode) + } + + respBody, err := ioutil.ReadAll(resp.Body) + if err != nil { + return "", nil + } + + var data map[string]interface{} + err = json.Unmarshal(respBody, &data) + if err != nil { + return "", err + } + + if token, ok := data["token"].(string); ok { + return token, nil + } else { + return "", fmt.Errorf("can't read response from %v", data) + } +} diff --git a/correlator/stat/stat.go b/correlator/stat/stat.go new file mode 100644 index 0000000..7b15fa3 --- /dev/null +++ b/correlator/stat/stat.go @@ -0,0 +1,57 @@ +package stat + +import ( + "encoding/json" + "fmt" + "time" +) + +// How many values we store to calculate average time +const avgCount = 10 + +// AvgTime Structure for average time calculations +type AvgTime struct { + Value time.Duration `json:"value"` + index uint64 + duration [avgCount]time.Duration +} + +// Add time +func (stat *AvgTime) Add(d time.Duration) { + + if stat.index%avgCount == 0 { + stat.Value = 0 + for _, cur := range stat.duration { + stat.Value += cur + } + + stat.Value = stat.Value / avgCount + } + + stat.duration[stat.index%avgCount] = d + stat.index++ +} + +// AddStat sum current average time with other +func (stat *AvgTime) AddStat(newStat *AvgTime) { + for _, cur := range newStat.duration { + stat.Add(cur) + } +} + +func (stat AvgTime) MarshalJSON() ([]byte, error) { + return json.Marshal(struct { + Value string `json:"value"` + }{ + Value: fmt.Sprintf("%v", stat.Value), + }) +} + +func (stat AvgTime) String() string { + bytes, err := json.Marshal(stat) + if err != nil { + return fmt.Sprintf("Can't serialize: %v", err) + } + + return string(bytes) +} diff --git a/correlator/stat/stat_test.go b/correlator/stat/stat_test.go new file mode 100644 index 0000000..4118f7f --- /dev/null +++ b/correlator/stat/stat_test.go @@ -0,0 +1,61 @@ +package stat + +import ( + "testing" + "time" +) + +func TestTimeAdd(t *testing.T) { + var avg AvgTime + + for i := 0; i < avgCount+1; i++ { + avg.Add(time.Second) + } + + if avg.Value != time.Second { + t.Errorf("Got bad average time: %v", avg.Value) + } +} + +func TestTimeAdd2(t *testing.T) { + var avg AvgTime + + for i := 0; i < avgCount*10+1; i++ { + avg.Add(time.Second) + } + + if avg.Value != time.Second { + t.Errorf("Got bad average time: %v", avg.Value) + } +} + +func TestCopy(t *testing.T) { + avg1 := AvgTime{Value: time.Second, index: 2} + + var avg2 AvgTime + + avg2 = avg1 + + if avg2.Value != time.Second { + t.Errorf("Bad value: %v", avg2.Value) + } + + if avg2.index != 2 { + t.Errorf("Bad index: %v", avg2.index) + } +} + +func TestAvgTime_AddStat(t *testing.T) { + var avg1, avg2 AvgTime + + avg1.Add(time.Second) + avg2.Add(time.Second * 2) + + avg1.AddStat(&avg2) + + goodTime := time.Millisecond * 300 + + if avg1.Value != goodTime { + t.Errorf("Bad average time value, expect %v, got %v", goodTime, avg1.Value) + } +} diff --git a/correlator/util/test.go b/correlator/util/test.go new file mode 100644 index 0000000..3d95618 --- /dev/null +++ b/correlator/util/test.go @@ -0,0 +1,186 @@ +package util + +import ( + "context" + log "github.com/sirupsen/logrus" + "github.com/spf13/viper" + "io/ioutil" + "iwarma.ru/console/correlator/config" + "iwarma.ru/console/correlator/es" + "net/http" + "testing" + "time" +) + +func PrepareElastic() { + viper.Set(config.ElasticUrl, "http://elasticsearch:9200") + viper.Set(config.ElasticRetryCount, 1) + viper.Set(config.ElasticUsername, "elastic") + viper.Set(config.ElasticPassword, "changeme") + viper.Set(config.Verbose, true) + viper.Set(config.ElasticAggregatedIndexName, "test-aggregated") + viper.Set(config.ElasticNormalizedIndexName, "test-normalized") + viper.Set(config.AggregatorIterationDuration, time.Second*2) + viper.Set(config.Threads, 10) + eventsFields := []string{ + "event_severity", + "event_protocol", + "message", + "device_vendor", + "device_product", + "device_action", + "device_version", + "device_timezone", + "sign_id", + "sign_category", + "sign_subcategory", + "application", + "source_ip", + "source_host", + "source_port", + "source_mac", + "source_timezone", + "source_software", + "source_action", + "destination_ip", + "destination_mac", + "destination_timezone", + "destination_software", + "destination_action", + "destination_host", + "destination_port", + "destination_user", + "cs1", + "cs1Label", + "cs2", + "cs2Label", + "object_type", + } + viper.Set(config.AggregatedFields, eventsFields) +} + +// TestUrl One server endpoint description +type TestUrl struct { + Url string // Url to handle + Response []byte // Response to send + ResponseHeaders map[string]string // Response headers to send + ResponseStatus int // Status code for response. Default is 200 + ReceivedMessage []byte // Received message from client + Error error // Error from handler function + CallCount int // Counter of requests to this endpoint +} + +// TestServer Test HTTP server +type TestServer struct { + ServerAddress string // Address for server + Urls []*TestUrl // Url endpoints + Server *http.Server // Http server object + ServerMux *http.ServeMux // Http multiplexer for server +} + +// GetRequestAsString Get client request as string +func (url TestUrl) GetRequestAsString() string { + return string(url.ReceivedMessage) +} + +// Start test server +func (server *TestServer) Start() error { + return server.Server.ListenAndServe() +} + +// Stop test server +func (server *TestServer) Stop() error { + return server.Server.Shutdown(context.Background()) +} + +// AddUrl Add url to server +func (server *TestServer) AddUrl(url string, response []byte, responseHeaders map[string]string) *TestUrl { + result := new(TestUrl) + result.Url = url + result.Response = response + result.ResponseHeaders = responseHeaders + result.ResponseStatus = http.StatusOK + + handler := func(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + result.ReceivedMessage, result.Error = ioutil.ReadAll(r.Body) + result.CallCount = result.CallCount + 1 + for key, value := range result.ResponseHeaders { + w.Header().Set(key, value) + } + w.Write(result.Response) + } + + server.ServerMux.HandleFunc(result.Url, handler) + server.Urls = append(server.Urls, result) + + return result +} + +func (server *TestServer) AddUrlWithStatus(url string, status int) *TestUrl { + result := new(TestUrl) + result.Url = url + result.ResponseStatus = status + + handler := func(w http.ResponseWriter, r *http.Request) { + defer r.Body.Close() + result.ReceivedMessage, result.Error = ioutil.ReadAll(r.Body) + result.CallCount = result.CallCount + 1 + w.WriteHeader(result.ResponseStatus) + } + + server.ServerMux.HandleFunc(result.Url, handler) + server.Urls = append(server.Urls, result) + + return result +} + +// NewTestServer Create new test server +func NewTestServer(address string) *TestServer { + result := new(TestServer) + result.ServerAddress = address + result.Urls = make([]*TestUrl, 0) + result.ServerMux = http.NewServeMux() + result.Server = &http.Server{Addr: address, Handler: result.ServerMux} + + return result +} + +func SetupTest(t *testing.T) { + PrepareElastic() + +} + +func ClearIndex(el *es.Elastic, index ...string) error { + for _, cur := range index { + exists, err := el.CheckIndex(cur) + if err != nil { + log.Errorf("%+v", err) + return err + } + + if exists { + err = el.DeleteIndex(cur) + if err != nil { + return err + } + } + } + + return nil +} + +func TearDownTest(t *testing.T) { + client, err := es.NewClient() + if err != nil { + t.Error(err) + return + } + del, err := client.DeleteIndex("*").Do(context.Background()) + if err != nil { + t.Errorf("%v", err) + } + if !del.Acknowledged { + t.Errorf("Got bad response. Indexs, wasn't deleted") + } +} diff --git a/dashboard/__init__.py b/dashboard/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dashboard/admin.py b/dashboard/admin.py new file mode 100644 index 0000000..9fcd0d3 --- /dev/null +++ b/dashboard/admin.py @@ -0,0 +1,12 @@ +from django.contrib import admin + +from core.mixins import JsonWidgetMixin +from dashboard.models import DashboardLayout + + +class DashboardAdminLayout(JsonWidgetMixin, admin.ModelAdmin): + list_display = ['user'] + list_display_links = ['user'] + + +admin.site.register(DashboardLayout, DashboardAdminLayout) diff --git a/dashboard/api.py b/dashboard/api.py new file mode 100644 index 0000000..36a9847 --- /dev/null +++ b/dashboard/api.py @@ -0,0 +1,266 @@ +import logging + +import redis +import requests +from django.conf import settings +from django.contrib.auth.decorators import login_required, permission_required +from django.http import JsonResponse +from django.template.context_processors import csrf +from django.utils.translation import gettext_lazy +from rest_framework.decorators import api_view +from rest_framework.mixins import CreateModelMixin, UpdateModelMixin, RetrieveModelMixin, ListModelMixin +from rest_framework.response import Response +from rest_framework.views import APIView +from rest_framework.viewsets import GenericViewSet + +from assets.models.assets import Asset +from core.decorators import log_url +from core.mixins import ApiPermissionCheckMixin +from correlation.models import Rule +from dashboard.models import DashboardLayout +from dashboard.serializers import (AssetsByIncidentsSerializer, IncidentsByCategorySerializer, TopIdsEventsSerializer, + SystemInfoSerializer, SerivcesWidgetSerializer, OpenedIncidentsSerializer, + IncidentsByImportanceSerializer) +from dashboard.serializers import DashboardSerializer +from dashboard.utils import RedisInstances +from dashboard.widgets.opened_incs_widget import get_opened_incidents_count +from dashboard.widgets.services_widget import get_services_list +from dashboard.widgets.sys_info_widget import (get_machine_name, get_operating_system, get_cpu_model, get_uptime, + get_cpu_total, + get_console_version, get_disk_usage, get_mem, get_server_time, + get_description) +from dashboard.widgets.top_ids_widget import get_top_ids_events +from dashboard.widgets.widgets import get_widget_list_from_user +from incident.models import Incident, IncidentCategory +from perms.models import Perm + +CORRELATOR_URL = getattr(settings, 'CORRELATOR_URL', 'http://localhost:9200') +redis_instance = redis.StrictRedis(host=getattr(settings, 'REDIS_HOST', 'redis'), + port=getattr(settings, 'REDIS_PORT', 6379), db=0) + +_log = logging.getLogger(__name__) + + +@login_required +@log_url +def add_widget_form(request): + """ API for rendering the add widget form """ + user = request.user + available_widgets_for_user_count = len(get_widget_list_from_user(request.user)) + form = AddWidgetForm(user) + ctx = {} + ctx.update(csrf(request)) + form = render_crispy_form(form, context=ctx) + return JsonResponse({'status': 'ok', + 'form_html': form, + 'available_widgets_count': available_widgets_for_user_count}) + + +class DashboardLayoutViewSet(ApiPermissionCheckMixin, + ListModelMixin, + RetrieveModelMixin, + UpdateModelMixin, + CreateModelMixin, + GenericViewSet): + """ ViewSet for handling all operations with dashboard layout """ + pagination_class = None + serializer_class = DashboardSerializer + queryset = DashboardLayout.objects.all() + console_permissions = {'default': [Perm.can_add_widgets]} + + +# Incidendts by time widget api +@api_view(['GET']) +@permission_required(Perm.perm_req(Perm.can_add_widgets), raise_exception=True) +def get_current_incidents_by_time_widget_data(request): + response = { + 'day_stats': redis_instance.get(RedisInstances.WIDGET_INCS_BY_TIME_DAY), + 'week_stats': redis_instance.get(RedisInstances.WIDGET_INCS_BY_TIME_WEEK), + 'month_stats': redis_instance.get(RedisInstances.WIDGET_INCS_BY_TIME_MONTH), + 'year_stats': redis_instance.get(RedisInstances.WIDGET_INCS_BY_TIME_YEAR), + } + return Response(response, status=200) + + +# Events by time widget api +@api_view(['GET']) +@permission_required(Perm.perm_req(Perm.can_add_widgets), raise_exception=True) +def get_current_events_by_time_widget_data(request): + response = { + 'day_stats': redis_instance.get(RedisInstances.WIDGET_EVENTS_BY_TIME_DAY), + 'week_stats': redis_instance.get(RedisInstances.WIDGET_EVENTS_BY_TIME_WEEK), + 'month_stats': redis_instance.get(RedisInstances.WIDGET_EVENTS_BY_TIME_MONTH), + 'year_stats': redis_instance.get(RedisInstances.WIDGET_EVENTS_BY_TIME_YEAR), + } + return Response(response, status=200) + + +class AssetsByIncidentsViewSet(ApiPermissionCheckMixin, ListModelMixin, GenericViewSet): + pagination_class = None + serializer_class = AssetsByIncidentsSerializer + queryset = Asset.objects.filter(incidents__isnull=False).distinct() + console_permissions = [Perm.can_add_widgets] + + +class IncidentsByCategoryViewSet(ApiPermissionCheckMixin, ListModelMixin, GenericViewSet): + pagination_class = None + serializer_class = IncidentsByCategorySerializer + queryset = IncidentCategory.objects.filter(incident__isnull=False).distinct() + console_permissions = [Perm.can_view_incidents] + + +class TopIdsEventsWidget(ApiPermissionCheckMixin, APIView): + console_permissions = [Perm.can_view_events] + + def get(self, request): + context = { + 'top_ids_events': get_top_ids_events(), + } + + serializer = TopIdsEventsSerializer(data=context) + serializer.is_valid() + + return Response(serializer.data) + + +class IncidentByImportanceApi(ApiPermissionCheckMixin, APIView): + """ Data source for incidents by importance widget """ + console_permissions = [Perm.can_view_incidents] + + def filter_incidents_by_importance_levels(self, min_importance, max_importance): + return Incident.objects.filter(importance__range=(min_importance, max_importance)).count() + + def get(self, request): + _log.info("Call for incident by importance stats") + context = { + 'importance_none': self.filter_incidents_by_importance_levels(0, 0), + 'importance_info': self.filter_incidents_by_importance_levels(1, 9), + 'importance_low': self.filter_incidents_by_importance_levels(10, 39), + 'importance_medium': self.filter_incidents_by_importance_levels(40, 69), + 'importance_high': self.filter_incidents_by_importance_levels(70, 89), + 'importance_critical': self.filter_incidents_by_importance_levels(90, 101), + } + + serializer = IncidentsByImportanceSerializer(data=context) + serializer.is_valid() + + return Response(serializer.data) + + +class SysTimeWidgetApi(APIView): + def get(self, request): + return Response({'server_time': get_server_time()}) + + +class SysInfoWidgetApi(ApiPermissionCheckMixin, APIView): + """ Data source for system information widget """ + console_permissions = [Perm.can_view_sys_info] + + def get(self, request): + context = { + 'machine_name': get_machine_name(), + 'operating_system': get_operating_system(), + 'cpu_model': get_cpu_model(), + 'machine_uptime': get_uptime(), + 'total_cpu': get_cpu_total(), + 'console_version': get_console_version(), + 'server_time': get_server_time(), + 'description': get_description(), + } + + context.update(get_disk_usage()) + context.update(get_mem()) + + serializer = SystemInfoSerializer(data=context) + serializer.is_valid() + + return Response(serializer.data) + + +class ServicesWidgetApi(ApiPermissionCheckMixin, APIView): + """ Data source for services widget """ + console_permissions = [Perm.can_view_sys_info] + + def get(self, request): + serializer = SerivcesWidgetSerializer(data=get_services_list(), many=True) + serializer.is_valid() + + return Response(serializer.data) + + +class OpenedIncidentsWidgetApi(ApiPermissionCheckMixin, APIView): + console_permissions = [Perm.can_view_incidents] + + @staticmethod + def get(request): + context = {'inc_count': get_opened_incidents_count()} + + serializer = OpenedIncidentsSerializer(data=context) + serializer.is_valid() + + return Response(serializer.data) + + +class CorrelatorStat(ApiPermissionCheckMixin, APIView): + console_permissions = [Perm.can_view_correlation_rules_list] + message = 'Cannot access correlator API. Permission denied' + + @staticmethod + def get(request): + # Need to call correlator api + # Then, update it's json + url = CORRELATOR_URL + '/stats/' + + headers = {'Accept': 'application/json', + 'Content-Encoding': 'utf-8'} + + _log.info("Call correlator stat") + try: + response = requests.get(url, headers=headers) + except requests.ConnectionError: + _log.exception("Can't get correlator status") + return JsonResponse({"status": False, "error": gettext_lazy("Correlator not responding")}) + + correlator = response.json() + + # { + # "aggregator": { + # "events_processed": 0, + # "events_aggregated": 0, + # "average_iteration": { + # "value": "0s" + # } + # }, + # "correlator": { + # "rule_count": 3, + # "average_time": { + # "value": "15.487471ms" + # }, + # "average_rule_time": { + # "1": { + # "value": "5.338945ms" + # }, + # "2": { + # "value": "4.323614ms" + # }, + # "35": { + # "value": "3.4368ms" + # } + # }, + # "incident_count": 0 + # } + # } + + correlator["status"] = "true" + rules = Rule.objects.filter(status=True).all() + try: + for key in correlator["correlator"]["average_rule_time"]: + for cur in rules: + if cur.pk == int(key): + correlator["correlator"]["average_rule_time"][key]["title"] = cur.name + break + except TypeError: + correlator["status"] = "false" + _log.error(f"Got bad json: {correlator}") + + return JsonResponse(correlator) diff --git a/dashboard/api_urls.py b/dashboard/api_urls.py new file mode 100644 index 0000000..2d5e00a --- /dev/null +++ b/dashboard/api_urls.py @@ -0,0 +1,23 @@ +from django.urls import path, include +from rest_framework import routers + +from dashboard import api + +router = routers.DefaultRouter() +router.register('widgets/incs_by_cat', api.IncidentsByCategoryViewSet, basename='incidents_by_category') +router.register('widgets/assets_by_incs', api.AssetsByIncidentsViewSet, basename='assets_by_incs') +router.register('layout', api.DashboardLayoutViewSet, basename='layout_viewset') + +urlpatterns = [ + path('widgets/sys_info/', api.SysInfoWidgetApi.as_view(), name='sys_info_widget'), + path('widgets/sys_time/', api.SysTimeWidgetApi.as_view(), name='sys_time_widget'), + path('widgets/incs_by_importance/', api.IncidentByImportanceApi.as_view(), name='incs_by_importance'), + path('widgets/services_info/', api.ServicesWidgetApi.as_view(), name='services_info_widget'), + path('widgets/opened_incs/', api.OpenedIncidentsWidgetApi.as_view(), name='opened_incs'), + path('widgets/top_ids_events/', api.TopIdsEventsWidget.as_view(), name='top_ids_events'), + path('redis/incs_by_time/data', api.get_current_incidents_by_time_widget_data, name='incs_by_time_redis_data'), + path('redis/events_by_time/data', api.get_current_events_by_time_widget_data, name='events_by_time_redis_data'), + path('widgets/correlator', api.CorrelatorStat.as_view(), name='correlator_stat'), + path('', include(router.urls)), + path('add_new_widget_form', api.add_widget_form, name='api_add_widget_to_layout') +] diff --git a/dashboard/apps.py b/dashboard/apps.py new file mode 100644 index 0000000..19dbdee --- /dev/null +++ b/dashboard/apps.py @@ -0,0 +1,37 @@ +import json + +import redis +from django.apps import AppConfig +from django.conf import settings + +from dashboard.utils import RedisInstances + +TEST_MODE = getattr(settings, "TEST_MODE", False) + + +class DashboardConfig(AppConfig): + name = 'dashboard' + verbose_name = "Dashboard application" + + def ready(self): + if not TEST_MODE: + redis_instance = redis.StrictRedis(host=getattr(settings, 'REDIS_HOST', 'redis'), + port=getattr(settings, 'REDIS_PORT', 6379), db=0) + # Incidents by time widget REDIS data setup + if not redis_instance.exists(RedisInstances.WIDGET_INCS_BY_TIME_DAY): + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_DAY, json.dumps([0] * 24)) + if not redis_instance.exists(RedisInstances.WIDGET_INCS_BY_TIME_WEEK): + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_WEEK, json.dumps([0] * 7)) + if not redis_instance.exists(RedisInstances.WIDGET_INCS_BY_TIME_MONTH): + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_MONTH, json.dumps([0] * 30)) + if not redis_instance.exists(RedisInstances.WIDGET_INCS_BY_TIME_YEAR): + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_YEAR, json.dumps([0] * 12)) + # Events by time widget REDIS data setup + if not redis_instance.exists(RedisInstances.WIDGET_EVENTS_BY_TIME_DAY): + redis_instance.set(RedisInstances.WIDGET_EVENTS_BY_TIME_DAY, json.dumps([0] * 24)) + if not redis_instance.exists(RedisInstances.WIDGET_EVENTS_BY_TIME_WEEK): + redis_instance.set(RedisInstances.WIDGET_EVENTS_BY_TIME_WEEK, json.dumps([0] * 7)) + if not redis_instance.exists(RedisInstances.WIDGET_EVENTS_BY_TIME_MONTH): + redis_instance.set(RedisInstances.WIDGET_EVENTS_BY_TIME_MONTH, json.dumps([0] * 30)) + if not redis_instance.exists(RedisInstances.WIDGET_EVENTS_BY_TIME_YEAR): + redis_instance.set(RedisInstances.WIDGET_EVENTS_BY_TIME_YEAR, json.dumps([0] * 12)) diff --git a/dashboard/migrations/__init__.py b/dashboard/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dashboard/models.py b/dashboard/models.py new file mode 100644 index 0000000..c3755bc --- /dev/null +++ b/dashboard/models.py @@ -0,0 +1,28 @@ +from django.contrib.auth.models import User +from django.db import models +from django.db.models.signals import post_save +from django.dispatch import receiver +from django.utils.translation import gettext_lazy + + +class DashboardLayout(models.Model): + """ Dashboard widgets layout for one user """ + user = models.ForeignKey(User, + on_delete=models.CASCADE, + verbose_name=gettext_lazy('User'), + help_text=gettext_lazy('Owner of dashboard')) + widgets = models.JSONField(help_text=gettext_lazy('Widgets'), default=dict) + + def __str__(self): + return f'{self.pk} {self.user.username}' + + +@receiver(post_save, sender=User) +def add_dashboard_layout(sender, **kwargs): + """ Add dashboard layout to new user """ + user = kwargs['instance'] + layout = DashboardLayout.objects.filter(user=user) + + if not layout: + layout = DashboardLayout(user=user, widgets={}) + layout.save() diff --git a/dashboard/serializers.py b/dashboard/serializers.py new file mode 100644 index 0000000..af61915 --- /dev/null +++ b/dashboard/serializers.py @@ -0,0 +1,82 @@ +from django.utils.translation import gettext_lazy +from rest_framework import serializers + +from assets.models.assets import Asset +from dashboard.models import DashboardLayout +from incident.models import Incident, IncidentCategory +from networkmap.api import DANGER_STATUSES + + +class DashboardSerializer(serializers.ModelSerializer): + class Meta: + model = DashboardLayout + fields = '__all__' + + +class TopIdsEventsSerializer(serializers.Serializer): + top_ids_events = serializers.ListField(label=gettext_lazy("List of top 10 IDS events")) + + +class SystemInfoSerializer(serializers.Serializer): + """ System information widget data serializer """ + machine_name = serializers.CharField(label=gettext_lazy('Machine name')) + operating_system = serializers.CharField(label=gettext_lazy('OS')) + cpu_model = serializers.CharField(label=gettext_lazy('CPU model name')) + total_cpu = serializers.FloatField(label=gettext_lazy('Total CPU usage')) + machine_uptime = serializers.CharField(label=gettext_lazy('Current machine work time')) + server_time = serializers.CharField(label=gettext_lazy('Server time')) + mem_total = serializers.IntegerField(label=gettext_lazy('Total memory (MB)')) + mem_used = serializers.IntegerField(label=gettext_lazy('Used memory (MB)')) + disk_total = serializers.IntegerField(label=gettext_lazy('Disk total space (GB)')) + disk_free = serializers.IntegerField(label=gettext_lazy('Disk free space (GB)')) + disk_used = serializers.IntegerField(label=gettext_lazy('Disk used space (GB)')) + console_version = serializers.CharField(label=gettext_lazy('Console version')) + description = serializers.CharField(label=gettext_lazy('Machine description')) + lc_info = serializers.CharField(label=gettext_lazy('License')) + + +class SerivcesWidgetSerializer(serializers.Serializer): + user = serializers.CharField(label=gettext_lazy('User')) + command = serializers.CharField(label=gettext_lazy('Command')) + cpu = serializers.CharField(label=gettext_lazy('CPU (%)')) + + +class OpenedIncidentsSerializer(serializers.Serializer): + inc_count = serializers.IntegerField() + + +class IncidentsByImportanceSerializer(serializers.Serializer): + importance_none = serializers.IntegerField(label=gettext_lazy('Incidents with None importance')) + importance_info = serializers.IntegerField(label=gettext_lazy('Incidents with Info importance')) + importance_low = serializers.IntegerField(label=gettext_lazy('Incidents with Low importance')) + importance_medium = serializers.IntegerField(label=gettext_lazy('Incidents with Medium importance')) + importance_high = serializers.IntegerField(label=gettext_lazy('Incidents with High importance')) + importance_critical = serializers.IntegerField(label=gettext_lazy('Incidents with Critical importance')) + + +class IncidentsByCategorySerializer(serializers.ModelSerializer): + inc_count = serializers.SerializerMethodField('get_inc_count') + + def get_inc_count(self, obj): + return Incident.objects.filter(category=obj.pk).count() + + class Meta: + model = IncidentCategory + fields = ['name', 'inc_count'] + + +class AssetsByIncidentsSerializer(serializers.ModelSerializer): + """ Serializer for assets by incs widget """ + inc_count = serializers.SerializerMethodField('get_inc_count') + + def get_inc_count(self, obj): + """ Method for counting incidents of the asset, excluding incs with RESOLVED status + :param obj: asset object + :return: sum of asset incidents + """ + asset_incs = Asset.objects.get(pk=obj.pk).incidents + return asset_incs.filter(status__in=DANGER_STATUSES).count() + + class Meta: + model = Asset + fields = ['name', 'inc_count'] diff --git a/dashboard/tasks.py b/dashboard/tasks.py new file mode 100644 index 0000000..b31db14 --- /dev/null +++ b/dashboard/tasks.py @@ -0,0 +1,75 @@ +import json +import logging + +import redis +from celery import shared_task +from django.conf import settings + +from core.utils import dtnow +from dashboard.utils import RedisInstances +from incident.models import Incident + +_log = logging.getLogger(__name__) + +redis_instance = redis.StrictRedis(host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=0) + + +def update_incs_by_time_statistics(): + """ Task for updating data for incidents by time widget """ + _log.info('Starting update of the incidents statistics') + # Getting day data from REDIS + day_data = redis_instance.get(RedisInstances.WIDGET_INCS_BY_TIME_DAY) + week_data = redis_instance.get(RedisInstances.WIDGET_INCS_BY_TIME_WEEK) + month_data = redis_instance.get(RedisInstances.WIDGET_INCS_BY_TIME_MONTH) + year_data = redis_instance.get(RedisInstances.WIDGET_INCS_BY_TIME_YEAR) + day_data = json.loads(day_data) + week_data = json.loads(week_data) + month_data = json.loads(month_data) + year_data = json.loads(year_data) + # Getting amount of incidents happened in last 1 minute + now = dtnow() + one_minute_gap = dtnow(minutes=-1) + amount_of_incidents_within_a_minute = Incident.objects.filter(timestamp__range=(one_minute_gap, now)).count() + # Check if hour is changed + if one_minute_gap.hour != now.hour: + day_data.pop(0) + # Appending new number to the list + day_data.append(amount_of_incidents_within_a_minute) + # Updating redis data for the day + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_DAY, json.dumps(day_data)) + else: + day_data[-1] = day_data[-1] + amount_of_incidents_within_a_minute + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_DAY, json.dumps(day_data)) + # Check if day is changed + if one_minute_gap.day != now.day: + # Popping the first element of week and month data and appending sum of last day incidents + week_data.pop(0) + month_data.pop(0) + week_data.append(amount_of_incidents_within_a_minute) + month_data.append(amount_of_incidents_within_a_minute) + # Setting new REDIS value of week and month incidents list + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_WEEK, json.dumps(week_data)) + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_MONTH, json.dumps(month_data)) + else: + week_data[-1] = week_data[-1] + amount_of_incidents_within_a_minute + month_data[-1] = month_data[-1] + amount_of_incidents_within_a_minute + # Setting new REDIS value of week and month incidents list + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_WEEK, json.dumps(week_data)) + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_MONTH, json.dumps(month_data)) + # Check if year is changed + if one_minute_gap.month != now.month: + # Popping first element in year list + year_data.pop(0) + year_data.append(amount_of_incidents_within_a_minute) + # Setting new REDIS value of year incidents list + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_YEAR, json.dumps(year_data)) + else: + year_data[-1] = year_data[-1] + amount_of_incidents_within_a_minute + # Setting new REDIS value of year incidents list + redis_instance.set(RedisInstances.WIDGET_INCS_BY_TIME_YEAR, json.dumps(year_data)) + + +@shared_task +def update_statistics_task(): + # update_events_by_time_statistics() + update_incs_by_time_statistics() diff --git a/dashboard/tests/__init__.py b/dashboard/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dashboard/tests/test_layout_add.py b/dashboard/tests/test_layout_add.py new file mode 100644 index 0000000..4be25f4 --- /dev/null +++ b/dashboard/tests/test_layout_add.py @@ -0,0 +1,32 @@ +import pytest + +from dashboard.models import DashboardLayout + + +@pytest.mark.django_db +class TestLayout(object): + + @pytest.mark.unit + def test_add_user(self, add_user_with_permissions): + """ Check that when new user is created, layout for him is created too """ + username = "username" + password = "pro100ton" + + user = add_user_with_permissions(username=username, password=password) + + layout = DashboardLayout.objects.get(user=user) + + @pytest.mark.unit + def test_update_user(self, add_user_with_permissions): + """ Check that when user is updated, no new layout is created """ + username = "username" + password = "pro100ton" + + user = add_user_with_permissions(username=username, password=password) + + assert DashboardLayout.objects.filter(user=user).count() == 1 + + user.email = "name@name.com" + user.save() + + assert DashboardLayout.objects.filter(user=user).count() == 1 diff --git a/dashboard/tests/test_rights.py b/dashboard/tests/test_rights.py new file mode 100644 index 0000000..92c9e95 --- /dev/null +++ b/dashboard/tests/test_rights.py @@ -0,0 +1,40 @@ +import pytest + +from dashboard.widgets import widgets + + +@pytest.mark.django_db +class TestWidgetsPermissions(object): + + @pytest.mark.unit + def test_default_permissions(self, add_user_with_permissions): + """ Check that any user can see default widget """ + user = add_user_with_permissions(username='user', password='ssssddddfff') + widget = widgets.DashboardWidget() + + assert widget.user_can_see_widget(user) + + @pytest.mark.unit + @pytest.mark.parametrize("widget", widgets.all_subclasses(widgets.DashboardWidget)) + def test_user_can_see_widget(self, widget, add_user_with_permissions): + """ Check that user with correct permissions can see widget """ + cur = widget() + user = add_user_with_permissions(username='user', password='ssssddddfff', permissions=cur.perms) + + assert cur.user_can_see_widget(user) + + @pytest.mark.unit + @pytest.mark.parametrize("widget", widgets.all_subclasses(widgets.DashboardWidget)) + def test_user_cant_see_widget(self, widget, add_user_with_permissions): + """ Check that user without correct permissions can't see widget + + Test will skip widgets with no permissions, because every user can see such widgets + """ + cur = widget() + + if len(cur.perms) == 0: + pytest.skip(f"Can't run test for {widget}, because every user can see it") + + user = add_user_with_permissions(username='user', password='ssssddddfff') + + assert not cur.user_can_see_widget(user) diff --git a/dashboard/tests/test_widgets.py b/dashboard/tests/test_widgets.py new file mode 100644 index 0000000..74b6b89 --- /dev/null +++ b/dashboard/tests/test_widgets.py @@ -0,0 +1,305 @@ +import datetime +import json +from secrets import choice +from unittest.mock import patch + +import fakeredis +from unittest import mock +import pytest +from django.contrib.auth import get_user_model +from django.urls import reverse + +from assets.models.assets import OperatingSystem, Asset +# noinspection PyUnresolvedReferences +from console.tests.test_utils import add_user_with_permissions, get_url, test_server +from core.utils import dtnow +from dashboard.models import DashboardLayout +# TODO: enable when correlator is ready +# from dashboard.tasks import update_incs_by_time_statistics, update_events_by_time_statistics +from dashboard.tasks import update_incs_by_time_statistics +from dashboard.utils import RedisInstances +from dashboard.views import check_user_widgets +from incident.models import Incident +from networkmap.api import DANGER_STATUSES +from perms.models import Perm +from users.models import UserInfo + +# from dashboard.serializers import DashboardSerializer + +User = get_user_model() + +real_datetime_class = datetime.datetime +USERNAME = 'test_not_superuser' +PASSWORD = 'test_password' + + +def mock_datetime_now(target, dt): + class DatetimeSubclassMeta(type): + @classmethod + def __instancecheck__(mcs, obj): + return isinstance(obj, real_datetime_class) + + class BaseMockedDatetime(real_datetime_class): + @classmethod + def now(cls, tz=None): + return target.replace(tzinfo=tz) + + @classmethod + def utcnow(cls): + return target + + # Python2 & Python3 compatible metaclass + MockedDatetime = DatetimeSubclassMeta('datetime', (BaseMockedDatetime,), {}) + + return mock.patch.object(dt, 'datetime', MockedDatetime) + + +@pytest.mark.django_db +class TestApi(object): + + @pytest.fixture(autouse=True) + def testing_redis(self, django_user_model): + server = fakeredis.FakeServer() + test_redis = fakeredis.FakeStrictRedis(server=server) + test_redis.set(RedisInstances.WIDGET_INCS_BY_TIME_DAY, json.dumps([0] * 24)) + test_redis.set(RedisInstances.WIDGET_INCS_BY_TIME_WEEK, json.dumps([0] * 7)) + test_redis.set(RedisInstances.WIDGET_INCS_BY_TIME_MONTH, json.dumps([0] * 30)) + test_redis.set(RedisInstances.WIDGET_INCS_BY_TIME_YEAR, json.dumps([0] * 12)) + test_redis.set(RedisInstances.WIDGET_EVENTS_BY_TIME_DAY, json.dumps([0] * 24)) + test_redis.set(RedisInstances.WIDGET_EVENTS_BY_TIME_WEEK, json.dumps([0] * 7)) + test_redis.set(RedisInstances.WIDGET_EVENTS_BY_TIME_MONTH, json.dumps([0] * 30)) + test_redis.set(RedisInstances.WIDGET_EVENTS_BY_TIME_YEAR, json.dumps([0] * 12)) + redis_patcher = patch('dashboard.tasks.redis_instance', test_redis) + self.redis = redis_patcher.start() + + user1 = django_user_model.objects.create_superuser(username='admin400', password='nimda') + user2 = django_user_model.objects.create_user(username=USERNAME, password=PASSWORD) + UserInfo(user=user1).save() + UserInfo(user=user2).save() + + return test_redis + + @pytest.mark.unit + def test_inccidents_by_time_hour_task(self, testing_redis): + """ Test for checking the celery task for incidents by time widget past hour incident sum + Steps of the test: + 1. Get REDIS data before invoking task + 2. Create 2 incidents + 3. Run task again + 4. Check if value of day stats in REDIS changed + :param testing_redis: fixture for initializing testing REDIS instance + """ + day_data_before = json.loads(testing_redis.get(RedisInstances.WIDGET_INCS_BY_TIME_DAY)) + Incident.objects.create(timestamp=dtnow(), title='test_inc_1', importance=1, status=2, event_count=1, events='') + update_incs_by_time_statistics() + day_data_after = json.loads(testing_redis.get(RedisInstances.WIDGET_INCS_BY_TIME_DAY)) + assert day_data_before != day_data_after + + @pytest.mark.unit + def test_incidents_by_time_week_update(self, testing_redis): + """ Test for checking the data for incidents by time widget (week data), when day changes + Steps of the test: + 1. Create 3 incidents with the last of which occurs the day after the first two + 2. Every time incidents created, call update widget data function + 3. Compare last value of week data with 3 (sum of incidents) + :param testing_redis: + """ + for i in range(3): + incident_datetime = datetime.datetime(2020, 1, 1, 21, 59, 0, 0, None) + datetime.timedelta(hours=i) + + def dtnow_for_widgets(minutes=0): + hours_from_dtnow_call = minutes + return incident_datetime + datetime.timedelta(minutes=1, hours=hours_from_dtnow_call) + + with patch('dashboard.tasks.dtnow', dtnow_for_widgets): + created_inc = Incident.objects.create(timestamp=incident_datetime, + title=f"test_inc{i}", + importance=1, + status=2, + event_count=1, + events='') + update_incs_by_time_statistics() + week_stats = json.loads(testing_redis.get(RedisInstances.WIDGET_INCS_BY_TIME_WEEK)) + assert week_stats[-1] == 1 + + @pytest.mark.unit + def test_incidents_by_time_month_update(self, testing_redis): + """ Test for checking the data for incidents by time widget (month data), when day changes + Steps of the test: + 1. Create 3 incidents with the last of which occurs the day after the first two + 2. Every time incidents created, call update widget data function + 3. Compare last value of week data with 3 (sum of incidents) + :param testing_redis: + """ + for i in range(3): + incident_datetime = datetime.datetime(2020, 1, 1, 21, 59, 0, 0, None) + datetime.timedelta(hours=i) + + def dtnow_for_widgets(minutes=0): + hours_from_dtnow_call = minutes + return incident_datetime + datetime.timedelta(minutes=1, hours=hours_from_dtnow_call) + + with patch('dashboard.tasks.dtnow', dtnow_for_widgets): + created_inc = Incident.objects.create(timestamp=incident_datetime, + title=f"test_inc{i}", + importance=1, + status=2, + event_count=1, + events='') + update_incs_by_time_statistics() + month_stats = json.loads(testing_redis.get(RedisInstances.WIDGET_INCS_BY_TIME_MONTH)) + assert month_stats[-1] == 1 + + @pytest.mark.unit + def test_incidents_by_time_year_update(self, testing_redis): + """ Test for checking the data for incidents by time widget (month data), when day changes + Steps of the test: + 1. Create 3 incidents with the last of which occurs the day after the first two and on next month + 2. Every time incidents created, call update widget data function + 3. Compare last value of week data with 3 (sum of incidents) + :param testing_redis: fixture for initializing testing REDIS instance + """ + for i in range(3): + incident_datetime = datetime.datetime(2020, 1, 31, 21, 59, 0, 0, None) + datetime.timedelta(hours=i) + + def dtnow_for_widgets(minutes=0): + hours_from_dtnow_call = minutes + return incident_datetime + datetime.timedelta(minutes=1, hours=hours_from_dtnow_call) + + with patch('dashboard.tasks.dtnow', dtnow_for_widgets): + created_inc = Incident.objects.create(timestamp=incident_datetime, + title=f"test_inc{i}", + importance=1, + status=2, + event_count=1, + events='') + update_incs_by_time_statistics() + year_stats = json.loads(testing_redis.get(RedisInstances.WIDGET_INCS_BY_TIME_YEAR)) + assert year_stats[-1] == 1 + + @pytest.mark.unit + def test_assets_by_incs_widget_counting_right_incidents(self, get_url, client): + """ + Test for checking if widget backend correctly counts right incidents + Steps of the test: + 1. Create asset for testing purposes + 2. Create 3 incidents with statuses within DANGER_STATUSES list and assign them to the asset from step 1 + 3. Run widget backend + 4. Change status of one created incident to RESOLVED + 5. Run widget backend again + 6. Compare results of step 2 and 4 + :return: + PASSED if values are not equal + FAILED if not + """ + test_os = OperatingSystem.objects.create(name='test_OS') + test_asset = Asset.objects.create(name='test_asset', ip='1.1.1.1', os=test_os) + for i in range(3): + test_inc = Incident.objects.create(timestamp=dtnow(), + title=f"test_inc{i}", + importance=1, + status=choice(DANGER_STATUSES), + event_count=1, + events='') + test_asset.incidents.add(test_inc) + url = reverse('assets_by_incs-list') + client.force_login(User.objects.get(username='admin400')) + response = client.get(url) + response_result_before = json.loads(response.content.decode('utf-8')) + inc_count_before = response_result_before[0].get('inc_count') + changing_inc = Incident.objects.get(title='test_inc1') + changing_inc.status = Incident.Status.RESOLVED + changing_inc.save() + response = client.get(url) + response_result_after = json.loads(response.content.decode('utf-8')) + inc_count_after = response_result_after[0].get('inc_count') + assert inc_count_after != inc_count_before + + @pytest.mark.unit + def test_opened_incs_widget_counter(self, client): + """ + Test for checking if widget backend correctly counts opened widgets + Steps of the test: + 1. Create incidents with statuses within DANGER_STATUSES list + 2. Run widget backend + 3. Change status of 1 incident from step 1 to RESOLVED + 4. Run backend again + 5. Compare results of step 2 and step 4 + :param client: django testing client + :return: + PASSED if values are not equal + FAILED othervise + """ + for i in range(3): + Incident.objects.create(timestamp=dtnow(), + title=f"test_inc{i}", + importance=1, + status=choice(DANGER_STATUSES), + event_count=1, + events='') + url = reverse('opened_incs') + client.force_login(User.objects.get(username='admin400')) + response = client.get(url) + response_result_before = json.loads(response.content.decode('utf-8')) + count_before = response_result_before.get('inc_count') + changing_inc = Incident.objects.get(title='test_inc1') + changing_inc.status = Incident.Status.RESOLVED + changing_inc.save() + response = client.get(url) + response_result_after = json.loads(response.content.decode('utf-8')) + count_after = response_result_after.get('inc_count') + assert count_before != count_after + + @pytest.mark.unit + @pytest.mark.parametrize('user_perms, result_count, widget_name', [ + ([], 0, ''), + ([Perm.can_view_correlation_rules_list], 1, 'correlator-info'), + ([[Perm.can_view_sys_info], 2, 'sys-info']), + ]) + def test_user_allowed_widgets(self, client, user_perms: list, result_count: int, widget_name: str): + """ Test for checking the filtration of user system information and services widget, + depending on his permissions + """ + # Getting testing user + test_user = User.objects.get(username=USERNAME) + # Getting user dashboard instance + widgets_before = DashboardLayout.objects.get(user=test_user) + # Adding testing widgets to users dashboard + widgets_before.widgets = [ + { + "x": "0", + "y": "0", + "id": "sys-info", + "width": "2", + "height": "5" + }, + { + "x": "2", + "y": "5", + "id": "services", + "width": "2", + "height": "5" + }, + { + "x": "4", + "y": "10", + "id": "correlator-info", + "width": "2", + "height": "5" + }, + { + "x": "6", + "y": "15", + "id": "incs-by-category", + "width": "2", + "height": "5" + }, + ] + widgets_before.save() + + [test_user.user_permissions.add(Perm.get_rights(perm)) for perm in user_perms] + check_user_widgets(test_user) + widgets_after = DashboardLayout.objects.get(user=test_user) + + assert len(widgets_after.widgets) == result_count + if result_count: + assert widgets_after.widgets[0]['id'] == widget_name diff --git a/dashboard/utils.py b/dashboard/utils.py new file mode 100644 index 0000000..3df9c76 --- /dev/null +++ b/dashboard/utils.py @@ -0,0 +1,12 @@ +# Class for holding REDIS keys +class RedisInstances: + # Dashboard, Incidents by time widget + WIDGET_INCS_BY_TIME_DAY = 'widget_incs_by_time_day' + WIDGET_INCS_BY_TIME_WEEK = 'widget_incs_by_time_week' + WIDGET_INCS_BY_TIME_MONTH = 'widget_incs_by_time_month' + WIDGET_INCS_BY_TIME_YEAR = 'widget_incs_by_time_year' + # Dashboard, Events by time widget + WIDGET_EVENTS_BY_TIME_DAY = 'widget_events_by_time_day' + WIDGET_EVENTS_BY_TIME_WEEK = 'widget_events_by_time_week' + WIDGET_EVENTS_BY_TIME_MONTH = 'widget_events_by_time_month' + WIDGET_EVENTS_BY_TIME_YEAR = 'widget_events_by_time_year' diff --git a/dashboard/views.py b/dashboard/views.py new file mode 100644 index 0000000..060a4d5 --- /dev/null +++ b/dashboard/views.py @@ -0,0 +1,74 @@ +import logging + +from django.contrib.auth.decorators import login_required +from django.shortcuts import render + +from core.decorators import log_url +from perms.models import Perm + +from dashboard.models import DashboardLayout +from dashboard.widgets.widgets import FEATURES_MAP, get_widget_type_with_perms_map +from dashboard.widgets.widgets import get_widget_list_from_user +from license_info.tools import check_features + +_log = logging.getLogger() + + +@log_url +@login_required +def dashboard(request): + """ User's dashboard """ + # If there is no default layout for this user, create one. + layout = DashboardLayout.objects.get_or_create( + user=request.user + ) + check_user_widgets(request.user) + check_license_widgets() + widgets_list = get_widget_list_from_user(request.user) + form = AddWidgetForm(request.user) + context = {'widgets': widgets_list, + 'form': form, + 'layout_id': layout[0].id} + return render(request, 'dashboard/dashboard.html', context) + + +def check_user_widgets(user): + """ + Check user perms for watching widgets + + If the user had rights to view widgets, and then they were removed, and the user already pressed these widgets, + then after checking, the widgets will be removed from the panel." + + @param user: A user who wants to view widgets + + """ + + if user.is_superuser: + return + widgets = DashboardLayout.objects.get(user=user) + if not widgets: + return + + type_perms_map = get_widget_type_with_perms_map() + allowed_widgets = [] + for widget in widgets.widgets: + need_perms = type_perms_map[widget['id']] + if all([user.has_perm(Perm.perm_req(perm)) for perm in need_perms]): + allowed_widgets.append(widget) + widgets.widgets = allowed_widgets + widgets.save() + + +def check_license_widgets(): + """Check if we need to hide some widgets due to license restrictions""" + + dashboards = DashboardLayout.objects.all() + for cur in dashboards: + widgets_ids = [x['id'] for x in cur.widgets] + + for feature, bad_widgets in FEATURES_MAP.items(): + if not check_features([feature]): + if set(bad_widgets) & set(widgets_ids): + allowed_widgets = [wg for wg in cur.widgets if wg["id"] not in bad_widgets] + cur.widgets = allowed_widgets + cur.save() diff --git a/dashboard/widgets/__init__.py b/dashboard/widgets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dashboard/widgets/opened_incs_widget.py b/dashboard/widgets/opened_incs_widget.py new file mode 100644 index 0000000..f39c1c6 --- /dev/null +++ b/dashboard/widgets/opened_incs_widget.py @@ -0,0 +1,10 @@ +import logging + +from incident.models import Incident +from networkmap.api import DANGER_STATUSES + +_log = logging.getLogger() + + +def get_opened_incidents_count(): + return Incident.objects.filter(status__in=DANGER_STATUSES).distinct().count() diff --git a/dashboard/widgets/services_widget.py b/dashboard/widgets/services_widget.py new file mode 100644 index 0000000..2320509 --- /dev/null +++ b/dashboard/widgets/services_widget.py @@ -0,0 +1,15 @@ +import subprocess + + +def get_services_list(): + data = subprocess.check_output(['ps', '-Ao', 'user,comm,%cpu', '--sort=-pcpu']) + data = data.decode('utf-8') + + result = [] + for line in data.split('\n')[1:6]: + cur = line.split(" ") + cur = [x for x in cur if len(x)] + if len(cur) == 3: + result.append({"user": cur[0], "command": cur[1], 'cpu': cur[2]}) + + return result diff --git a/dashboard/widgets/sys_info_widget.py b/dashboard/widgets/sys_info_widget.py new file mode 100644 index 0000000..abb9e61 --- /dev/null +++ b/dashboard/widgets/sys_info_widget.py @@ -0,0 +1,75 @@ +import logging +import platform +import re +import shutil +import socket +import time + +import cpuinfo +import psutil +from django.conf import settings +from django.template.defaultfilters import filesizeformat +from django.utils import timesince + +from core.utils import fmt_input, dtnow + +_log = logging.getLogger(__name__) + + +def get_console_version(): + return getattr(settings, 'SITE_INFO')['version'] + + +def get_machine_name(): + pc_name = socket.gethostname() + pc_name = re.sub('\.local$', '', pc_name) + return pc_name + + +def get_description(): + # TODO: Currently taking the name of the console. Think about what "short description" can be + return getattr(settings, 'SITE_INFO')['name'] + + +def get_operating_system(): + return platform.system() + + +def get_cpu_model(): + return cpuinfo.get_cpu_info()['brand'] + + +def get_server_time(): + return fmt_input(dtnow(local=True).time()) + + +def get_uptime(): + uptime = int(time.time() - psutil.boot_time()) + converted_awake_time = timesince.timesince(dtnow(-(uptime / 86400))) + return converted_awake_time + + +def get_disk_usage(): + total, used, free = shutil.disk_usage("/") + + total = filesizeformat(total) + free = filesizeformat(free) + used = filesizeformat(used) + return {'disk_total': total, 'disk_free': free, 'disk_used': used} + + +def get_cpu_total(): + return psutil.cpu_percent() + + +def get_mem(): + """ + Get memory usage + + :return: Dict with mem_total and mem_used + + """ + gb_used = filesizeformat(psutil.virtual_memory().used) + gb_total = filesizeformat(psutil.virtual_memory().total) + + return {'mem_total': gb_total, 'mem_used': gb_used} diff --git a/dashboard/widgets/top_ids_widget.py b/dashboard/widgets/top_ids_widget.py new file mode 100644 index 0000000..97db32c --- /dev/null +++ b/dashboard/widgets/top_ids_widget.py @@ -0,0 +1,12 @@ +def get_top_ids_events(): + stub_data = [dict(name="SURICATA Applayer Protocol detection skipped", amount=150), + dict(name="ET POLICY Incoming Basic Auth Base64 HTTP Password detected unencrypted", amount=120), + dict(name="ET SCAN NMAP -sA (1)", amount=118), + dict(name="SURICATA HTTP unable to match response to request", amount=102), + dict(name="ET VOIP H.323 in Q.931 Call Setup - Inbound", amount=92), + dict(name="ET VOIP H.323 in Q.931 Call Setup ", amount=82), + dict(name="IDS7", amount=64), + dict(name="IDS8", amount=50), + dict(name="IDS9", amount=41), + dict(name="IDS10", amount=21)] + return stub_data diff --git a/dashboard/widgets/widgets.py b/dashboard/widgets/widgets.py new file mode 100644 index 0000000..8bba93b --- /dev/null +++ b/dashboard/widgets/widgets.py @@ -0,0 +1,167 @@ +from django.forms.widgets import Widget +from django.utils.translation import gettext_lazy + +from dashboard.models import DashboardLayout +from perms.models import Perm + +from django.conf import settings +from license_info.tools import check_features +import logging + +_log = logging.getLogger() + +LICENSE_FEATURE_EVENT_PROCESSING = getattr(settings, 'LICENSE_FEATURE_EVENT_PROCESSING') + +# Map license features to restricted widgets +FEATURES_MAP = { + LICENSE_FEATURE_EVENT_PROCESSING: ["correlator-info"] +} + + +def check_widget_license(widget): + """Check if widget allowed in current license + @return True if widget is allowed and False if not""" + for feature, bad_widgets in FEATURES_MAP.items(): + if widget.type in bad_widgets: + return check_features([feature]) + + return True + + +def all_subclasses(cls): + """ + Get all subclasses of class + + :see: https://stackoverflow.com/a/3862957 + + :param cls: Class, to find subclasses + :return: List of cls subclasses + """ + return set(cls.__subclasses__()).union( + [s for c in cls.__subclasses__() for s in all_subclasses(c)]) + + +def get_widget_list_from_user(user): + """ + Get list of available for user widgets + :param user: User, who want to see widgets + :return: List of available widgets + + """ + widgets_list = all_subclasses(DashboardWidget) + + # Get user widgets, that are on layout + user_widgets = DashboardLayout.objects.get(user=user) + widgets_ids = [] + for layout_widget in user_widgets.widgets: + widgets_ids.append(layout_widget['id']) + + # Creating a filtered list of widgets, without widgets on user layout + filtered_widgets_list = [] + + # Remove widget if lt is restricted by license + widgets_list = [cur for cur in widgets_list if check_widget_license(cur)] + + for w in widgets_list: + if w.type not in widgets_ids: + filtered_widgets_list.append(w) + + if user is None: + return widgets_list + + return [cur for cur in filtered_widgets_list if cur.user_can_see_widget(self=cur, user=user) is True] + + +class DashboardWidget(Widget): + """ + Base dashboard widget + """ + type = 'base' + label = gettext_lazy('Widget') + perms = [] + template_name = '' + + def user_can_see_widget(self, user): + """ + Check if user can see this widget + + Superuser can see all widgets + :param user: User to check + :return: True if user can see widget and false if not + """ + if user.is_superuser: + return True + + for cur in self.perms: + if not user.has_perm(Perm.perm_req(cur)): + return False + + return True + + +class IncsByImportance(DashboardWidget): + type = 'incs-by-importance' + label = gettext_lazy('Incidents by importance') + perms = [Perm.can_view_incidents] + + +class IncsByCategory(DashboardWidget): + type = 'incs-by-category' + label = gettext_lazy('Incidents by category') + perms = [Perm.can_view_incidents] + + +class SysInfoWidget(DashboardWidget): + type = 'sys-info' + label = gettext_lazy('System information') + perms = [Perm.can_view_sys_info] + + +class ServicesWidget(DashboardWidget): + type = 'services' + label = gettext_lazy('Services') + perms = [Perm.can_view_sys_info] + + +# TODO: Add top IDS events widget when correlator is ready +# class OpenedIncidentsWidget(DashboardWidget): +# type = 'opened-incs' +# label = gettext_lazy('Opened Incidents') +# perms = [Perm.can_view_incidents] + + +class IncsByTimeWidget(DashboardWidget): + type = 'incs-by-time' + label = gettext_lazy('Incidents by time') + perms = [Perm.can_view_incidents] + + +# TODO: Add events by time widget when correlator is ready +# class EventsByTimeWidget(DashboardWidget): +# type = 'events-by-time' +# label = gettext_lazy('Events by time') +# perms = [Perm.can_view_events] + + +class AssetsByIncsWidget(DashboardWidget): + type = 'assets_by_incs' + label = gettext_lazy('Assets by incidents') + perms = [Perm.can_view_incidents, Perm.can_view_assets_list] + + +# TODO: Add top IDS events widget when correlator is ready +# class TopIdsEvents(DashboardWidget): +# type = 'top-ids-events' +# label = gettext_lazy('Top IDS events') +# perms = [Perm.can_view_ids_events] + + +class CorrelatorInfoWidget(DashboardWidget): + type = 'correlator-info' + label = gettext_lazy('Correlator') + perms = [Perm.can_view_correlation_rules_list] + + +def get_widget_type_with_perms_map() -> dict: + all_widget = all_subclasses(DashboardWidget) + return {widget.type: widget.perms for widget in all_widget} diff --git a/deb/react.env b/deb/react.env new file mode 100644 index 0000000..08f66fb --- /dev/null +++ b/deb/react.env @@ -0,0 +1 @@ +PUBLIC_URL=/static/react \ No newline at end of file diff --git a/deb/skeleton/usr/lib/systemd/system/amccelery.service b/deb/skeleton/usr/lib/systemd/system/amccelery.service new file mode 100644 index 0000000..9d97df5 --- /dev/null +++ b/deb/skeleton/usr/lib/systemd/system/amccelery.service @@ -0,0 +1,20 @@ +[Unit] +Description=ARMA management console celery service +After=amcelasticsearch.service +After=amccorrelator.service + +[Service] +Type=simple +User=armaconsole +Group=www-data +Environment=DJANGO_SETTINGS_MODULE=console.settings.prod +WorkingDirectory=/usr/local/armaconsole/app +LogsDirectory=armaconsole +ExecStart=/usr/local/armaconsole/env/bin/celery -A console worker --logfile=/var/log/armaconsole/celeryd.log --loglevel=INFO +Restart=always +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target diff --git a/deb/skeleton/usr/lib/systemd/system/amccelerybeat.service b/deb/skeleton/usr/lib/systemd/system/amccelerybeat.service new file mode 100644 index 0000000..8a64b6b --- /dev/null +++ b/deb/skeleton/usr/lib/systemd/system/amccelerybeat.service @@ -0,0 +1,20 @@ +[Unit] +Description=ARMA management console celery beat service +After=amcelasticsearch.service +After=amccorrelator.service + +[Service] +Type=simple +User=armaconsole +Group=www-data +Environment=DJANGO_SETTINGS_MODULE=console.settings.prod +WorkingDirectory=/usr/local/armaconsole/app +LogsDirectory=armaconsole +ExecStart=/usr/local/armaconsole/env/bin/celery -A console beat --logfile=/var/log/armaconsole/celerybeat.log --loglevel=INFO --scheduler django_celery_beat.schedulers:DatabaseScheduler +Restart=always +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target diff --git a/deb/skeleton/usr/lib/systemd/system/amccore.service b/deb/skeleton/usr/lib/systemd/system/amccore.service new file mode 100644 index 0000000..432fd66 --- /dev/null +++ b/deb/skeleton/usr/lib/systemd/system/amccore.service @@ -0,0 +1,27 @@ +[Unit] +Description=ARMA management console gunicorn daemon +After=network.target +After=postgresql.service +After=redis-server.service +After=elasticsearch.service +After=amccelery.service +After=amccorrelator.service +After=amccelerybeat.service + +[Service] +User=armaconsole +Group=www-data +WorkingDirectory=/usr/local/armaconsole/app +LogsDirectory=armaconsole/gunicorn +Environment=DJANGO_SETTINGS_MODULE=console.settings.prod +ExecStart=/usr/local/armaconsole/env/bin/gunicorn --bind 0.0.0.0:8000 --workers=3 --timeout 300 --access-logfile /var/log/armaconsole/gunicorn/access.log --error-logfile /var/log/armaconsole/gunicorn/error.log --log-file /var/log/armaconsole/gunicorn/gunicorn.log console.wsgi:application +ExecReload=/bin/kill -s HUP $MAINPID +KillMode=mixed +Restart=on-failure +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target + diff --git a/deb/skeleton/usr/local/sbin/amcpsh b/deb/skeleton/usr/local/sbin/amcpsh new file mode 100644 index 0000000..adf5807 --- /dev/null +++ b/deb/skeleton/usr/local/sbin/amcpsh @@ -0,0 +1,3 @@ +#!/bin/sh + +(cd /usr/local/armaconsole/ && . env/bin/activate && cd app && DJANGO_SETTINGS_MODULE=console.settings.prod python manage.py shell) diff --git a/deb/skeleton/var/www/armaconsole/public/media/.gitkeep b/deb/skeleton/var/www/armaconsole/public/media/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/deb_old/skeleton/DEBIAN/config b/deb_old/skeleton/DEBIAN/config new file mode 100644 index 0000000..ecdc107 --- /dev/null +++ b/deb_old/skeleton/DEBIAN/config @@ -0,0 +1,16 @@ +#!/bin/bash + +set -e + +PROJECT=armaconsole + +# shellcheck disable=SC2034 +. /usr/share/debconf/confmodule +if [ -f /usr/share/dbconfig-common/dpkg/config.pgsql ]; then + . /usr/share/dbconfig-common/dpkg/config.pgsql + dbc_go "${PROJECT}" "$@" +fi + +DEBUG_INSTALL="${DEBUG_INSTALL:-0}" +[ "$DEBUG_INSTALL" == "1" ] && set -x +[ "$DEBUG_INSTALL" == "1" ] && echo "Calling config $*" diff --git a/deb_old/skeleton/DEBIAN/control.template b/deb_old/skeleton/DEBIAN/control.template new file mode 100644 index 0000000..6d0ed59 --- /dev/null +++ b/deb_old/skeleton/DEBIAN/control.template @@ -0,0 +1,12 @@ +Package: {package} +Version: {version} +Maintainer: arma +Architecture: {architecture} +Section: admin +Pre-Depends: gcc, make, libpq-dev, python3-dev, openssl, bash, postgresql, default-jre, apt-utils, debconf, dbconfig-pgsql | dbconfig-no-thanks | dbconfig-common, postgresql-contrib +Depends: nginx, sudo, python3, python3-pip, redis, redis-server, virtualenv, gettext, elasticsearch (= 7.12.0), golang, vector (= 0.19.1) +Conflicts: {conflicts} +Installed-Size: {installed_size} +Priority: optional +Description: ARMA management console + Manage sensors and monitor events and incidents diff --git a/deb_old/skeleton/DEBIAN/dirs b/deb_old/skeleton/DEBIAN/dirs new file mode 100644 index 0000000..65fc54d --- /dev/null +++ b/deb_old/skeleton/DEBIAN/dirs @@ -0,0 +1,3 @@ +etc/armaconsole +usr/local/armaconsole +var/www/armaconsole diff --git a/deb_old/skeleton/DEBIAN/postinst b/deb_old/skeleton/DEBIAN/postinst new file mode 100644 index 0000000..02937ef --- /dev/null +++ b/deb_old/skeleton/DEBIAN/postinst @@ -0,0 +1,179 @@ +#!/bin/bash + +# summary of how this script can be called: +# * `configure' +# * `abort-upgrade' +# * `abort-remove' `in-favour' +# +# * `abort-remove' +# * `abort-deconfigure' `in-favour' +# `removing' +# +# for details, see https://www.debian.org/doc/debian-policy/ or +# the debian-policy package + +set -e # fail on any error + +PROJECT=armaconsole + +# USERS CONFIG +PROJECT_USER="${PROJECT}" +PROJECT_GROUP="www-data" +PROJECT_CHOWNER="${PROJECT_USER}:${PROJECT_GROUP}" +SUDOERS_FILE_PATH="/etc/sudoers.d/${PROJECT}" + +# SYSTEMCTL CONFIG +SYSCTL_AMC_SERVICES="amcgunicorn amccelery amccelerybeat amccorrelator amclicense amcchecker amcvector" +SYSCTL_ALL_SERVICES="${SYSCTL_AMC_SERVICES} postgresql nginx elasticsearch redis-server" + +# AMC DIRS +AMC_ETC="/etc/${PROJECT}" +AMC_HOME="/usr/local/${PROJECT}" +AMC_WWW="/var/www/${PROJECT}" +AMC_PUBLIC="${AMC_WWW}/public" + +# CERTS PATHES +CORE_CERT_PATH="${AMC_PUBLIC}/media/certificate.crt" +CORE_PRIV_KEY_PATH="${AMC_PUBLIC}/media/certificate.key" +NGINX_SSL_PATH="/etc/nginx/ssl/${PROJECT}" +NGINX_CERT_PATH="${NGINX_SSL_PATH}/nginx-selfsigned.crt" +NGINX_PRIV_KEY_PATH="${NGINX_SSL_PATH}/nginx-selfsigned.key" +NGINX_DHPARAM_PATH="${NGINX_SSL_PATH}/dhparam.pem" +NGINX_CERT_SUBJ="/C=RU/ST=Moscow/L=Moscow/O=ARMA/CN=iwarma.ru" + +# ======[ Trap Errors ]======# +set -E # let shell functions inherit ERR trap +trap err_handler 1 2 3 15 ERR # Trap non-normal exit signals: 1/HUP, 2/INT, 3/QUIT, 15/TERM, ERR +# shellcheck disable=SC2128 +function err_handler() { + local exit_status=${1:-$?} + logger -s -p "syslog.err" -t "${PROJECT}.deb" \ + "${PROJECT}.deb script '$0' error code $exit_status (line $BASH_LINENO: '$BASH_COMMAND')" + exit "$exit_status" +} + +. /usr/share/debconf/confmodule +# shellcheck disable=SC2034 +# shellcheck disable=SC1090 +if [ -f /usr/share/dbconfig-common/dpkg/postinst.pgsql ]; then + . /usr/share/dbconfig-common/dpkg/postinst.pgsql + . "${AMC_ETC}/env.prod" + dbc_generate_include_owner="${PROJECT_CHOWNER}" + dbc_generate_include_perms="0640" + dbc_generate_include="sh:${DEBCONF_DBCONF_FPATH}" + dbc_pgsql_createdb_encoding="UTF8" + dbc_go "${PROJECT}" "$@" +fi + +DEBUG_INSTALL="${DEBUG_INSTALL:-0}" +[ "$DEBUG_INSTALL" == "1" ] && set -x +[ "$DEBUG_INSTALL" == "1" ] && echo "Calling postinst $*" + +function backup_config() { + fpath="$1" + if [ -f "${fpath}" ] && [ ! -f "${fpath}.AMCBK" ]; then + mv "${fpath}" "${fpath}.AMCBK" + fi +} + +function backup_and_copy_config() { + fpath="$1" + if [ -f "${fpath}" ] && [ ! -f "${fpath}.AMCBK" ]; then + cp -f "${fpath}" "${fpath}.AMCBK" + fi +} + +function restore_config() { + fpath="$1" + if [ -f "${fpath}.AMCBK" ]; then + rm -f "${fpath}" + mv "${fpath}.AMCBK" "${fpath}" + fi +} + +function backup_and_link_config() { + path_from="$1" + path_to="$2" + restore_config "${path_to}" + backup_config "${path_to}" + rm -f "${path_to}" + ln -sf "${path_from}" "${path_to}" +} + +set -u # treat unset variables as errors +case "$1" in +configure | abort-remove) + # Logstash + # backup_and_link_config "${AMC_ETC}/logstash.yml" "/etc/logstash/logstash.yml" + # ln -sf "/etc/logstash/conf.d/" "${AMC_PUBLIC}/logstash" + # chmod 777 "${AMC_PUBLIC}/logstash" # TODO change folder and owner instead 777 + + # Vector + # backup_and_link_config "${AMC_ETC}/vector.yml" "/etc/vector/vector.yml" + # ln -sf "/etc/vector/conf.d/" "${AMC_PUBLIC}/vector" + mkdir -p "${AMC_PUBLIC}/vector" + mkdir -p "${AMC_WWW}/vector" + + # Elasticsearch + backup_and_link_config "${AMC_ETC}/elasticsearch.yml" "/etc/elasticsearch/elasticsearch.yml" + mkdir -p "/usr/share/elasticsearch/data" + chmod 777 "/usr/share/elasticsearch/data" &>/dev/null + + if [ -f /.dockerenv ]; then + backup_and_copy_config "/etc/elasticsearch/jvm.options" + sed -i 's|^#\?-Djava.io.tmpdir=.*|-Djava.io.tmpdir=/var/log/elasticsearch|' /etc/elasticsearch/jvm.options + sed -i 's|^[# ]*-Xms[0-9]\+g *$|-Xms2g|' /etc/elasticsearch/jvm.options + sed -i 's|^[# ]*-Xmx[0-9]\+g *$|-Xmx2g|' /etc/elasticsearch/jvm.options + + # backup_and_copy_config "/etc/logstash/jvm.options" + # sed -i 's|^#\?-Djava.io.tmpdir=.*|-Djava.io.tmpdir=/var/log/logstash|' /etc/logstash/jvm.options + # sed -i 's|^[# ]*-Xms[0-9]\+g *$|-Xms2g|' /etc/logstash/jvm.options + # sed -i 's|^[# ]*-Xmx[0-9]\+g *$|-Xmx2g|' /etc/logstash/jvm.options + fi + + # Nginx + rm -f "/etc/nginx/sites-enabled/default" + cp -f "${AMC_HOME}/nginx/${PROJECT}_http.nginx" "${AMC_HOME}/nginx/${PROJECT}.nginx" + ln -sf "${AMC_HOME}/nginx/${PROJECT}.nginx" "/etc/nginx/sites-enabled/${PROJECT}.nginx" + # TODO: Generate cer, key, pem from core python module + [ -f "${NGINX_DHPARAM_PATH}" ] || openssl dhparam -out "${NGINX_DHPARAM_PATH}" 2048 &>/dev/null + openssl req -x509 -nodes -days 365 -newkey rsa:2048 \ + -keyout "${CORE_PRIV_KEY_PATH}" \ + -out "${CORE_CERT_PATH}" \ + -subj "${NGINX_CERT_SUBJ}" &>/dev/null + cp -f "${CORE_CERT_PATH}" "${NGINX_CERT_PATH}" + cp -f "${CORE_PRIV_KEY_PATH}" "${NGINX_PRIV_KEY_PATH}" + + # Dirs + chown -R "${PROJECT_CHOWNER}" "${AMC_HOME}" + chown -R "${PROJECT_CHOWNER}" "${AMC_ETC}" + chown -R "${PROJECT_CHOWNER}" "${AMC_WWW}" + chown -R "${PROJECT_CHOWNER}" "${NGINX_SSL_PATH}" + + + if [ -f /lib/systemd/system/vector.service ] || [ -f /usr/lib/systemd/system/vector.service ] + then + #stop default vector.service + systemctl stop vector.service + + #disable default vector.service + systemctl disable vector.service + + #remove default vector.service files + rm -f /lib/systemd/system/vector.service /usr/lib/systemd/system/vector.service + fi + + # Services + systemctl daemon-reload + # shellcheck disable=SC2086 + systemctl enable ${SYSCTL_ALL_SERVICES} + # shellcheck disable=SC2086 + systemctl restart ${SYSCTL_ALL_SERVICES} || true + ;; +abort-upgrade | abort-deconfigure) ;; +*) + echo "postinst called with unknown argument \`$1'" >&2 + exit 1 + ;; +esac +exit 0 diff --git a/deb_old/skeleton/DEBIAN/postrm b/deb_old/skeleton/DEBIAN/postrm new file mode 100644 index 0000000..21ee500 --- /dev/null +++ b/deb_old/skeleton/DEBIAN/postrm @@ -0,0 +1,130 @@ +#!/bin/bash + +# summary of how this script can be called: +# * `remove' +# * `purge' +# * `upgrade' +# * `failed-upgrade' +# * `abort-install' +# * `abort-install' +# * `abort-upgrade' +# * `disappear' +# +# for details, see https://www.debian.org/doc/debian-policy/ or +# the debian-policy package + +set -e # fail on any error + +PROJECT=armaconsole + +# USERS CONFIG +PROJECT_USER="${PROJECT}" +SUDOERS_FILE_PATH="/etc/sudoers.d/${PROJECT}" + +# SYSTEMCTL CONFIG +SYSCTL_AMC_SERVICES="amcgunicorn amccelery amccelerybeat amccorrelator amclicense amcchecker amcvector" + +# AMC DIRS +AMC_ETC="/etc/${PROJECT}" +AMC_HOME="/usr/local/${PROJECT}" +AMC_WWW="/var/www/${PROJECT}" +AMC_LOG="/var/log/${PROJECT}" +AMC_PUBLIC="${AMC_WWW}/public" + +# CERTS PATHES +NGINX_SSL_PATH="/etc/nginx/ssl/${PROJECT}" + +# ======[ Trap Errors ]======# +set -E # let shell functions inherit ERR trap +trap err_handler 1 2 3 15 ERR # Trap non-normal exit signals: 1/HUP, 2/INT, 3/QUIT, 15/TERM, ERR +# shellcheck disable=SC2128 +function err_handler() { + local exit_status=${1:-$?} + logger -s -p "syslog.err" -t "${PROJECT}.deb" \ + "${PROJECT}.deb script '$0' error code $exit_status (line $BASH_LINENO: '$BASH_COMMAND')" + exit "$exit_status" +} + +[ -f /usr/share/debconf/confmodule ] && . /usr/share/debconf/confmodule +if [ -f /usr/share/dbconfig-common/dpkg/postrm.pgsql ]; then + . /usr/share/dbconfig-common/dpkg/postrm.pgsql + dbc_go "${PROJECT}" "$@" +fi +[ -f /usr/share/debconf/confmodule ] && db_stop + +DEBUG_INSTALL="${DEBUG_INSTALL:-0}" +[ "$DEBUG_INSTALL" == "1" ] && set -x +[ "$DEBUG_INSTALL" == "1" ] && echo "Calling postrm $*" + +function restore_config() { + fpath="$1" + if [ -f "${fpath}.AMCBK" ]; then + rm -f "${fpath}" + mv "${fpath}.AMCBK" "${fpath}" + fi +} + +set -u # treat unset variables as errors +case "$1" in +disappear) + exit 1 + ;; +remove | upgrade | abort-install | abort-upgrade | failed-upgrade) + + # shellcheck disable=SC2086 + systemctl stop $SYSCTL_AMC_SERVICES &>/dev/null || true + # shellcheck disable=SC2086 + systemctl disable $SYSCTL_AMC_SERVICES &>/dev/null || true + + # Nginx + rm -f "/etc/nginx/sites-enabled/${PROJECT}*" &>/dev/null + rm -rf "${NGINX_SSL_PATH}" + + # Elasticsearch + restore_config "/etc/elasticsearch/elasticsearch.yml" + rm -rf "/usr/share/elasticsearch/data" &>/dev/null + + # Logstash + # restore_config "/etc/logstash/logstash.yml" + # rm -f "${AMC_PUBLIC}/logstash/armaif_"* &>/dev/null + + # if [ -f /.dockerenv ]; then + # restore_config "/etc/elasticsearch/jvm.options" + # restore_config "/etc/logstash/jvm.options" + # fi + + # Vector + rm -f "${AMC_PUBLIC}/vector/"* &>/dev/null + + # VENV + + # User + rm -rf "${SUDOERS_FILE_PATH}" + userdel "${PROJECT_USER}" &>/dev/null || true + + # Hosts, docker container not permit /etc/hosts + sed -i '/^127.0.0.1\slicense-client/d' /etc/hosts &>/dev/null || true + + # Dirs + rm -rf "${AMC_ETC}"/.[^.]* "${AMC_ETC}"/* + rm -rf "${AMC_HOME}"/.[^.]* "${AMC_HOME}"/* + rm -rf "${AMC_WWW}"/.[^.]* "${AMC_WWW}"/* + rm -rf "${AMC_LOG}"/.[^.]* "${AMC_LOG}"/* + ;; +purge) + # Nginx + rm -f "/etc/nginx/sites-available/${PROJECT}*" &>/dev/null + + # DB + if which ucf &>/dev/null; then + ucf --purge "${AMC_ETC}/debconf_dbconfig" + ucfr --purge ${PROJECT} "${AMC_ETC}/debconf_dbconfig" + fi + + ;; +*) + echo "postrm called with unknown argument \`$1'" >&2 + exit 1 + ;; +esac +exit 0 diff --git a/deb_old/skeleton/DEBIAN/preinst b/deb_old/skeleton/DEBIAN/preinst new file mode 100644 index 0000000..34749be --- /dev/null +++ b/deb_old/skeleton/DEBIAN/preinst @@ -0,0 +1,68 @@ +#!/bin/bash + +# summary of how this script can be called: +# * `install' +# * `install' +# * `upgrade' +# * `abort-upgrade' +# for details, see https://www.debian.org/doc/debian-policy/ or +# the debian-policy package + +set -e # fail on any error + +PROJECT=armaconsole + +# USERS CONFIG +PROJECT_USER="${PROJECT}" +PROJECT_GROUP="www-data" +SUDOERS_FILE_PATH="/etc/sudoers.d/${PROJECT}" + +# SYSTEMCTL CONFIG +SYSCTL_AMC_SERVICES="amcgunicorn amccelery amccelerybeat amccorrelator amclicense amcchecker amcvector" + +# AMC DIRS +AMC_HOME="/usr/local/${PROJECT}" + +# ======[ Trap Errors ]======# +set -E # let shell functions inherit ERR trap +trap err_handler 1 2 3 15 ERR # Trap non-normal exit signals: 1/HUP, 2/INT, 3/QUIT, 15/TERM, ERR +# shellcheck disable=SC2128 +function err_handler() { + local exit_status=${1:-$?} + logger -s -p "syslog.err" -t "${PROJECT}.deb" \ + "${PROJECT}.deb script '$0' error code $exit_status (line $BASH_LINENO: '$BASH_COMMAND')" + exit "$exit_status" +} + +DEBUG_INSTALL="${DEBUG_INSTALL:-0}" +[ "$DEBUG_INSTALL" == "1" ] && set -x +[ "$DEBUG_INSTALL" == "1" ] && echo "Calling preinst $*" + +set -u # treat unset variables as errors +case "$1" in +upgrade) + # shellcheck disable=SC2086 + systemctl stop $SYSCTL_AMC_SERVICES || true + # shellcheck disable=SC2086 + systemctl disable $SYSCTL_AMC_SERVICES || true + ;; +install) + # User + id -g ${PROJECT_GROUP} &>/dev/null || + groupadd ${PROJECT_GROUP} + id -u ${PROJECT_USER} &>/dev/null || + adduser --system --home "${AMC_HOME}" --no-create-home \ + --shell /bin/bash --ingroup "${PROJECT_GROUP}" --gecos "ARMA management console user" \ + "${PROJECT_USER}" &>/dev/null + echo "${PROJECT_USER} ALL=(ALL) NOPASSWD:ALL" >"${SUDOERS_FILE_PATH}" + + # Hosts + echo "127.0.0.1 license-client elasticsearch vector armaconsole" >> /etc/hosts + ;; +abort-upgrade) ;; +*) + echo "preinst called with unknown argument \`$1'" >&2 + exit 1 + ;; +esac +exit 0 diff --git a/deb_old/skeleton/DEBIAN/prerm b/deb_old/skeleton/DEBIAN/prerm new file mode 100644 index 0000000..f48afc1 --- /dev/null +++ b/deb_old/skeleton/DEBIAN/prerm @@ -0,0 +1,60 @@ +#!/bin/bash + +# summary of how this script can be called: +# * `remove' +# * `upgrade' +# * `failed-upgrade' +# * `remove' `in-favour' +# * `deconfigure' `in-favour' +# `removing' +# +# for details, see https://www.debian.org/doc/debian-policy/ or +# the debian-policy package + +set -e # fail on any error + +PROJECT=armaconsole + +# SYSTEMCTL CONFIG +SYSCTL_AMC_SERVICES="amcgunicorn amccelery amccelerybeat amccorrelator amclicense amcchecker amcvector" + +# ======[ Trap Errors ]======# +set -E # let shell functions inherit ERR trap +trap err_handler 1 2 3 15 ERR # Trap non-normal exit signals: 1/HUP, 2/INT, 3/QUIT, 15/TERM, ERR +# shellcheck disable=SC2128 +function err_handler() { + local exit_status=${1:-$?} + logger -s -p "syslog.err" -t "${PROJECT}.deb" \ + "${PROJECT}.deb script '$0' error code $exit_status (line $BASH_LINENO: '$BASH_COMMAND')" + exit "$exit_status" +} + +. /usr/share/debconf/confmodule +if [ -f /usr/share/dbconfig-common/dpkg/prerm.pgsql ]; then + . /usr/share/dbconfig-common/dpkg/prerm.pgsql + dbc_go "${PROJECT}" "$@" || true + db_stop +fi + +DEBUG_INSTALL="${DEBUG_INSTALL:-0}" +[ "$DEBUG_INSTALL" == "1" ] && set -x +[ "$DEBUG_INSTALL" == "1" ] && echo "Calling prerm $*" + +set -u # treat unset variables as errors +case "$1" in +remove) + # shellcheck disable=SC2086 + systemctl stop $SYSCTL_AMC_SERVICES || true + # shellcheck disable=SC2086 + systemctl disable $SYSCTL_AMC_SERVICES || true + ;; +upgrade | deconfigure) ;; +failed-upgrade) + exit 1 + ;; +*) + echo "prerm called with unknown argument \`$1'" >&2 + exit 1 + ;; +esac +exit 0 diff --git a/deb_old/skeleton/etc/armaconsole/elasticsearch.yml b/deb_old/skeleton/etc/armaconsole/elasticsearch.yml new file mode 100644 index 0000000..688923c --- /dev/null +++ b/deb_old/skeleton/etc/armaconsole/elasticsearch.yml @@ -0,0 +1,23 @@ +## Default Elasticsearch configuration from Elasticsearch base image. +### https://github.com/elastic/elasticsearch/blob/master/distribution/docker/src/docker/config/elasticsearch.yml +## +cluster.name: "console-cluster" +# TODO: change _local_ +network.host: 0.0.0.0 +discovery.type: single-node + +### X-Pack settings +### see https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-xpack.html +## +xpack.license.self_generated.type: basic +xpack.security.enabled: true +xpack.monitoring.collection.enabled: false + +http.cors.enabled: true +http.cors.allow-origin: "*" +http.cors.allow-methods: OPTIONS, HEAD, GET, POST, PUT, DELETE +http.cors.allow-headers: Authorization,X-Requested-With,X-Auth-Token,Content-Type,Content-Length +http.cors.allow-credentials: true + +path.logs: /var/log/elasticsearch/ +path.data: /usr/share/elasticsearch/data diff --git a/deb_old/skeleton/etc/armaconsole/env/vector.env b/deb_old/skeleton/etc/armaconsole/env/vector.env new file mode 100644 index 0000000..a78e4e4 --- /dev/null +++ b/deb_old/skeleton/etc/armaconsole/env/vector.env @@ -0,0 +1,3 @@ +VECTOR_CONFIG=/etc/armaconsole/vector.yml +VECTOR_CONFIG_DIR=/var/www/armaconsole/public/vector +VECTOR_WATCH_CONFIG=yes diff --git a/deb_old/skeleton/etc/armaconsole/license.yml b/deb_old/skeleton/etc/armaconsole/license.yml new file mode 100644 index 0000000..ef38011 --- /dev/null +++ b/deb_old/skeleton/etc/armaconsole/license.yml @@ -0,0 +1,17 @@ +license: + product_name: ARMA Console + file_name: /etc/armaconsole/license.bin +api: + port: 8050 +server: + url: https://license.iwarma.ru + ignore_ssl_errors: true +log: + filename: /var/log/armaconsole/license.log + max_size: 100 + max_bkup: 10 + max_age: 10 + compress: true + level: 4 + formatter: text + force_colors: true diff --git a/deb_old/skeleton/etc/armaconsole/logstash.yml b/deb_old/skeleton/etc/armaconsole/logstash.yml new file mode 100644 index 0000000..d922fe5 --- /dev/null +++ b/deb_old/skeleton/etc/armaconsole/logstash.yml @@ -0,0 +1,21 @@ +--- +## Default Logstash configuration from Logstash base image. +## https://github.com/elastic/logstash/blob/master/docker/data/logstash/config/logstash-full.yml +# +# TODO: change _local_ +http.host: "0.0.0.0" + +## X-Pack security credentials +# +xpack.monitoring.enabled: false +xpack.monitoring.elasticsearch.hosts: [ "http://localhost:9200" ] +xpack.monitoring.elasticsearch.username: elastic +xpack.monitoring.elasticsearch.password: changeme + +# Auto reload configs +config.reload.automatic: true + +# Speedup logstash +pipeline.batch.size: 10000 +pipeline.batch.delay: 400 +pipeline.workers: 20 diff --git a/deb_old/skeleton/etc/armaconsole/vector.yml b/deb_old/skeleton/etc/armaconsole/vector.yml new file mode 100644 index 0000000..0500762 --- /dev/null +++ b/deb_old/skeleton/etc/armaconsole/vector.yml @@ -0,0 +1,16 @@ +data_dir: "/var/www/armaconsole/vector" + +sources: + null_socker_source: + type: syslog + address: 0.0.0.0:0000 + mode: udp + +sinks: + null_file_sink: + type: file + inputs: + - null_socker_source + compression: none + path: /dev/null + encoding: text diff --git a/deb_old/skeleton/etc/cron.d/armaconsole b/deb_old/skeleton/etc/cron.d/armaconsole new file mode 100644 index 0000000..59b0de9 --- /dev/null +++ b/deb_old/skeleton/etc/cron.d/armaconsole @@ -0,0 +1,2 @@ +SHELL=/bin/sh +PATH=/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin diff --git a/deb_old/skeleton/etc/nginx/snippets/ssl-params.conf b/deb_old/skeleton/etc/nginx/snippets/ssl-params.conf new file mode 100644 index 0000000..9d01a54 --- /dev/null +++ b/deb_old/skeleton/etc/nginx/snippets/ssl-params.conf @@ -0,0 +1,21 @@ +# from https://cipherli.st/ +# and https://raymii.org/s/tutorials/Strong_SSL_Security_On_nginx.html + +ssl_protocols TLSv1 TLSv1.1 TLSv1.2; +ssl_prefer_server_ciphers on; +ssl_ciphers "EECDH+AESGCM:EDH+AESGCM:AES256+EECDH:AES256+EDH"; +ssl_ecdh_curve secp384r1; +ssl_session_cache shared:SSL:10m; +ssl_session_tickets off; +ssl_stapling on; +ssl_stapling_verify on; +resolver 8.8.8.8 8.8.4.4 valid=300s; +resolver_timeout 5s; +# Disable preloading HSTS for now. You can use the commented out header line that includes +# the "preload" directive if you understand the implications. +#add_header Strict-Transport-Security "max-age=63072000; includeSubdomains; preload"; +add_header Strict-Transport-Security "max-age=63072000; includeSubdomains"; +add_header X-Frame-Options DENY; +add_header X-Content-Type-Options nosniff; + +ssl_dhparam /etc/ssl/certs/dhparam.pem; diff --git a/deb_old/skeleton/etc/nginx/ssl/armaconsole/.gitkeep b/deb_old/skeleton/etc/nginx/ssl/armaconsole/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/deb_old/skeleton/usr/lib/systemd/system/amccelery.service b/deb_old/skeleton/usr/lib/systemd/system/amccelery.service new file mode 100644 index 0000000..9ec5a21 --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amccelery.service @@ -0,0 +1,25 @@ +[Unit] +Description=ARMA management console celery service +Requires=amcstartconfigure.service +Requires=amcsetelkpass.service +After=amcstartconfigure.service +After=amclogstash.service +After=amcelasticsearch.service +After=amcsetelkpass.service +After=amccorrelator.service + +[Service] +Type=simple +User=armaconsole +Group=www-data +Environment=DJANGO_SETTINGS_MODULE=console.settings.prod +WorkingDirectory=/usr/local/armaconsole/app +LogsDirectory=armaconsole +ExecStart=/usr/local/armaconsole/env/bin/celery worker -A console --logfile=/var/log/armaconsole/celeryd.log --loglevel=INFO +Restart=always +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target diff --git a/deb_old/skeleton/usr/lib/systemd/system/amccelerybeat.service b/deb_old/skeleton/usr/lib/systemd/system/amccelerybeat.service new file mode 100644 index 0000000..0ff011f --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amccelerybeat.service @@ -0,0 +1,25 @@ +[Unit] +Description=ARMA management console celery beat service +Requires=amcstartconfigure.service +Requires=amcsetelkpass.service +After=amcstartconfigure.service +After=amclogstash.service +After=amcelasticsearch.service +After=amcsetelkpass.service +After=amccorrelator.service + +[Service] +Type=simple +User=armaconsole +Group=www-data +Environment=DJANGO_SETTINGS_MODULE=console.settings.prod +WorkingDirectory=/usr/local/armaconsole/app +LogsDirectory=armaconsole +ExecStart=/usr/local/armaconsole/env/bin/celery beat -A console --logfile=/var/log/armaconsole/celerybeat.log --loglevel=INFO --scheduler django_celery_beat.schedulers:DatabaseScheduler +Restart=always +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target diff --git a/deb_old/skeleton/usr/lib/systemd/system/amcchecker.service b/deb_old/skeleton/usr/lib/systemd/system/amcchecker.service new file mode 100644 index 0000000..4e72824 --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amcchecker.service @@ -0,0 +1,15 @@ +[Unit] +Description=ARMA management console checker service +After=network.target + +[Service] +Type=simple +ExecStart=/usr/local/armaconsole/app/checker/checker +WorkingDirectory=/usr/local/armaconsole/app/checker +Restart=always +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target diff --git a/deb_old/skeleton/usr/lib/systemd/system/amccorrelator.service b/deb_old/skeleton/usr/lib/systemd/system/amccorrelator.service new file mode 100644 index 0000000..642c8e9 --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amccorrelator.service @@ -0,0 +1,24 @@ +[Unit] +Description=ARMA management console correlator +Requires=elasticsearch.service +Requires=amcvector.service +Requires=amcstartconfigure.service +Requires=amcsetelkpass.service +After=network.target +After=elasticsearch.service +After=amcsetelkpass.service +After=amcvector.service + +[Service] +User=armaconsole +Group=www-data +WorkingDirectory=/usr/local/armaconsole/app/correlator/cmd/correlator +ExecStart=/usr/local/armaconsole/app/correlator/cmd/correlator/correlator -config /etc/armaconsole/correlator.json +Restart=on-failure +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target + diff --git a/deb_old/skeleton/usr/lib/systemd/system/amcgunicorn.service b/deb_old/skeleton/usr/lib/systemd/system/amcgunicorn.service new file mode 100644 index 0000000..4fb48cb --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amcgunicorn.service @@ -0,0 +1,31 @@ +[Unit] +Description=ARMA management console gunicorn daemon +Requires=amcstartconfigure.service +Requires=amcsetelkpass.service +After=network.target +After=postgresql.service +After=redis-server.service +After=amcstartconfigure.service +After=elasticsearch.service +After=amcsetelkpass.service +After=amccelery.service +After=amccorrelator.service +After=amccelerybeat.service + +[Service] +User=armaconsole +Group=www-data +WorkingDirectory=/usr/local/armaconsole/app +LogsDirectory=armaconsole/gunicorn +Environment=DJANGO_SETTINGS_MODULE=console.settings.prod +ExecStart=/usr/local/armaconsole/env/bin/gunicorn --bind 0.0.0.0:8000 --workers=3 --timeout 300 --access-logfile /var/log/armaconsole/gunicorn/access.log --error-logfile /var/log/armaconsole/gunicorn/error.log --log-file /var/log/armaconsole/gunicorn/gunicorn.log console.wsgi:application +ExecReload=/bin/kill -s HUP $MAINPID +KillMode=mixed +Restart=on-failure +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target + diff --git a/deb_old/skeleton/usr/lib/systemd/system/amclicense.service b/deb_old/skeleton/usr/lib/systemd/system/amclicense.service new file mode 100644 index 0000000..ab367e9 --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amclicense.service @@ -0,0 +1,20 @@ +[Unit] +Description=AMC license daemon +After=network.target + +[Service] +User=armaconsole +Group=www-data +WorkingDirectory=/var/log/armaconsole +LogsDirectory=armaconsole +RuntimeDirectory=armaconsole +Type=simple +ExecStart=/usr/local/armaconsole/app/license/client --config /etc/armaconsole/license.yml +Restart=on-failure +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target + diff --git a/deb_old/skeleton/usr/lib/systemd/system/amcsetelkpass.service b/deb_old/skeleton/usr/lib/systemd/system/amcsetelkpass.service new file mode 100644 index 0000000..19a8116 --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amcsetelkpass.service @@ -0,0 +1,12 @@ +[Unit] +Description=ARMA management console set elk password service +Requires=elasticsearch.service +After=network.target +After=elasticsearch.service + +[Service] +Type=oneshot +User=elasticsearch +Group=elasticsearch +WorkingDirectory=/usr/share/elasticsearch +ExecStart=/usr/local/armaconsole/setElkPassService.sh diff --git a/deb_old/skeleton/usr/lib/systemd/system/amcstartconfigure.service b/deb_old/skeleton/usr/lib/systemd/system/amcstartconfigure.service new file mode 100644 index 0000000..0e8a9be --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amcstartconfigure.service @@ -0,0 +1,12 @@ +[Unit] +Description=ARMA management console preconfiguration service +After=network.target + +[Service] +Type=oneshot +User=armaconsole +Group=www-data +WorkingDirectory=/usr/local/armaconsole/app +LogsDirectory=armaconsole +Environment=DJANGO_SETTINGS_MODULE=console.settings.prod +ExecStart=/usr/local/armaconsole/startConfigureService.sh diff --git a/deb_old/skeleton/usr/lib/systemd/system/amcvector.service b/deb_old/skeleton/usr/lib/systemd/system/amcvector.service new file mode 100644 index 0000000..16ca845 --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/amcvector.service @@ -0,0 +1,24 @@ +[Unit] +Description=ARMA management console vector service +Requires=elasticsearch.service +Requires=amcsetelkpass.service +After=network.target +After=elasticsearch.service +After=amcsetelkpass.service + +[Service] +User=armaconsole +Group=www-data +ExecStartPre=/usr/bin/vector validate +ExecStart=/usr/bin/vector +ExecReload=/usr/bin/vector validate +ExecReload=/bin/kill -HUP $MAINPID +AmbientCapabilities=CAP_NET_BIND_SERVICE +EnvironmentFile=-/etc/armaconsole/env/vector.env +Restart=on-failure +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 + +[Install] +WantedBy=multi-user.target diff --git a/deb_old/skeleton/usr/lib/systemd/system/elasticsearch.service.d/armaconsole.elasticsearch.conf b/deb_old/skeleton/usr/lib/systemd/system/elasticsearch.service.d/armaconsole.elasticsearch.conf new file mode 100644 index 0000000..8c4def3 --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/elasticsearch.service.d/armaconsole.elasticsearch.conf @@ -0,0 +1,6 @@ +[Service] +TimeoutStartSec=5min +Restart=on-failure +RestartSec=5s +StartLimitInterval=1h +StartLimitBurst=0 diff --git a/deb_old/skeleton/usr/lib/systemd/system/logstash.service.d/armaconsole.logstash.conf b/deb_old/skeleton/usr/lib/systemd/system/logstash.service.d/armaconsole.logstash.conf new file mode 100644 index 0000000..3112680 --- /dev/null +++ b/deb_old/skeleton/usr/lib/systemd/system/logstash.service.d/armaconsole.logstash.conf @@ -0,0 +1,7 @@ +[Service] +ExecStart= +ExecStart=/usr/share/logstash/bin/logstash "--config.reload.automatic" "--path.settings" "/etc/logstash" +StartLimitInterval=1h +StartLimitBurst=0 +#StartLimitBurst=2 +#TimeoutStopSec=2m diff --git a/deb_old/skeleton/usr/local/armaconsole/nginx/armaconsole_http.nginx b/deb_old/skeleton/usr/local/armaconsole/nginx/armaconsole_http.nginx new file mode 100644 index 0000000..57bb33c --- /dev/null +++ b/deb_old/skeleton/usr/local/armaconsole/nginx/armaconsole_http.nginx @@ -0,0 +1,71 @@ +upstream armaconsole { + server 127.0.0.1:8000; +} + +upstream elasticsearch { + server 127.0.0.1:9200; +} + +upstream checker { + server 127.0.0.1:9080; +} + +server { + listen 443 http2 ssl; + listen [::]:443 http2 ssl; + server_name _; + + ssl_certificate /etc/nginx/ssl/armaconsole/nginx-selfsigned.crt; + ssl_certificate_key /etc/nginx/ssl/armaconsole/nginx-selfsigned.key; + ssl_dhparam /etc/nginx/ssl/armaconsole/dhparam.pem; + + return 301 http://$host$request_uri; +} + +server { + listen 80 default_server; + listen [::]:80 default_server; + server_name _; + + access_log /var/log/armaconsole/nginx.access.log; + error_log /var/log/armaconsole/nginx.error.log; + + client_max_body_size 500M; + + location = /favicon.ico { access_log off; log_not_found off; } + location /static { + alias /var/www/armaconsole/public/static/; + } + + location /media { + alias /var/www/armaconsole/public/media; + } + + location /delk { + rewrite ^/delk/(.*) /$1 break; + proxy_pass http://elasticsearch; + } + + location / { + proxy_pass http://armaconsole; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $host; + proxy_redirect off; + } + + location ~ ^/ru/api/endpoint/(.+)/keepalive/$ { + proxy_pass http://armaconsole; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $host; + proxy_redirect off; + access_log off; + log_not_found off; + } + + error_page 502 /state/page; + + location /state { + rewrite ^/state/(.*) /$1 break; + proxy_pass http://checker; + } +} diff --git a/deb_old/skeleton/usr/local/armaconsole/nginx/armaconsole_https.nginx b/deb_old/skeleton/usr/local/armaconsole/nginx/armaconsole_https.nginx new file mode 100644 index 0000000..87d1309 --- /dev/null +++ b/deb_old/skeleton/usr/local/armaconsole/nginx/armaconsole_https.nginx @@ -0,0 +1,72 @@ +upstream armaconsole { + server 127.0.0.1:8000; +} + +upstream elasticsearch { + server 127.0.0.1:9200; +} + +upstream checker { + server 127.0.0.1:9080; +} + + +server { + listen 80 default_server; + listen [::]:80 default_server; + server_name _; + return 301 https://$host$request_uri; +} + + +server { + listen 443 http2 ssl; + listen [::]:443 http2 ssl; + server_name _; + + ssl_certificate /etc/nginx/ssl/armaconsole/nginx-selfsigned.crt; + ssl_certificate_key /etc/nginx/ssl/armaconsole/nginx-selfsigned.key; + ssl_dhparam /etc/nginx/ssl/armaconsole/dhparam.pem; + + access_log /var/log/armaconsole/nginx.access.log; + error_log /var/log/armaconsole/nginx.error.log; + + client_max_body_size 500M; + + location = /favicon.ico { access_log off; log_not_found off; } + location /static { + alias /var/www/armaconsole/public/static/; + } + + location /media { + alias /var/www/armaconsole/public/media/; + } + + location /delk { + rewrite ^/delk/(.*) /$1 break; + proxy_pass http://elasticsearch; + } + + location / { + proxy_pass http://armaconsole; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $host; + proxy_redirect off; + } + + location ~ ^/ru/api/endpoint/(.+)/keepalive/$ { + proxy_pass http://armaconsole; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $host; + proxy_redirect off; + access_log off; + log_not_found off; + } + + error_page 502 /state/page; + + location /state { + rewrite ^/state/(.*) /$1 break; + proxy_pass http://checker; + } +} diff --git a/deb_old/skeleton/usr/local/armaconsole/setElkPassService.sh b/deb_old/skeleton/usr/local/armaconsole/setElkPassService.sh new file mode 100644 index 0000000..f478039 --- /dev/null +++ b/deb_old/skeleton/usr/local/armaconsole/setElkPassService.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +set -e # exit on any error +sleep 15 +printf "y\nchangeme\nchangeme\nchangeme\nchangeme\nchangeme\nchangeme\nchangeme\nchangeme\nchangeme\nchangeme\nchangeme\nchangeme\n" | /usr/share/elasticsearch/bin/elasticsearch-setup-passwords interactive || true diff --git a/deb_old/skeleton/usr/local/armaconsole/startConfigureService.sh b/deb_old/skeleton/usr/local/armaconsole/startConfigureService.sh new file mode 100644 index 0000000..ecfb735 --- /dev/null +++ b/deb_old/skeleton/usr/local/armaconsole/startConfigureService.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -e # exit on any error + +export DJANGO_SETTINGS_MODULE=${DJANGO_SETTINGS_MODULE:-console.settings.prod} + +. /usr/local/armaconsole/env/bin/activate + +# TODO: all from compilemessages to makemigrations is wrong here +# python manage.py compilemessages --locale=en_US ## remove +# python manage.py compilemessages --locale=ru_RU ## remove + python manage.py collectstatic --noinput ## remove + python manage.py makemigrations ## remove + + python manage.py migrate + python3 manage.py load_rules +# python manage.py collectstatic --clear --noinput ## remove +# python manage.py compilemessages -l ru -l en ## remove diff --git a/deb_old/skeleton/usr/local/sbin/amcpsh b/deb_old/skeleton/usr/local/sbin/amcpsh new file mode 100644 index 0000000..adf5807 --- /dev/null +++ b/deb_old/skeleton/usr/local/sbin/amcpsh @@ -0,0 +1,3 @@ +#!/bin/sh + +(cd /usr/local/armaconsole/ && . env/bin/activate && cd app && DJANGO_SETTINGS_MODULE=console.settings.prod python manage.py shell) diff --git a/deb_old/skeleton/var/log/armaconsole/bad_input.log b/deb_old/skeleton/var/log/armaconsole/bad_input.log new file mode 100644 index 0000000..e69de29 diff --git a/deb_old/skeleton/var/www/armaconsole/public/.gitkeep b/deb_old/skeleton/var/www/armaconsole/public/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/deb_old/skeleton/var/www/armaconsole/public/media/.gitkeep b/deb_old/skeleton/var/www/armaconsole/public/media/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/devices/__init__.py b/devices/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/admin.py b/devices/admin.py new file mode 100644 index 0000000..ab38921 --- /dev/null +++ b/devices/admin.py @@ -0,0 +1,12 @@ +from django.contrib import admin + +from devices.models.device import Device, DeviceGroup +from devices.models.endpoint_device import EndpointModel +from devices.models.firewall import ArmaIndustrialFirewall +from devices.models.sensor import ArmaSensor + +admin.site.register(Device) +admin.site.register(DeviceGroup) +admin.site.register(ArmaIndustrialFirewall) +admin.site.register(EndpointModel) +admin.site.register(ArmaSensor) diff --git a/devices/apps.py b/devices/apps.py new file mode 100644 index 0000000..d43cc4b --- /dev/null +++ b/devices/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class DevicesConfig(AppConfig): + name = 'devices' diff --git a/devices/constants.py b/devices/constants.py new file mode 100644 index 0000000..7399038 --- /dev/null +++ b/devices/constants.py @@ -0,0 +1,7 @@ +from django.conf import settings + +VECTOR_CONFIG_DIR = getattr(settings, 'LOGSTASH_CONFIG_DIR') + +MINIMAL_AIF_VERSION = getattr(settings, 'MINIMAL_COMPATIBLE_AIF_VERSION') +FIREWALL_TIMEOUT = 30 +CACHE_TIMEOUT = getattr(settings, 'REDIS_CACHE_TIMEOUT', 120) diff --git a/devices/enums.py b/devices/enums.py new file mode 100644 index 0000000..99b706c --- /dev/null +++ b/devices/enums.py @@ -0,0 +1,31 @@ +from django.db import models +from django.utils.translation import gettext_lazy, pgettext_lazy + + +class DeviceType(models.TextChoices): + FIREWALL = "firewall", gettext_lazy("ARMA IF") + ENDPOINT = "endpoint", gettext_lazy("ARMA Endpoint") + SENSOR = "sensor", gettext_lazy("ARMA Sensor") + + +class AdjustDatetime(models.IntegerChoices): + LOCAL = 1, gettext_lazy("Local") + NOCHANGE = 2, gettext_lazy("Without changes") + + +class ArmaIndustrialFirewallStatus(models.TextChoices): + online = 'online', gettext_lazy('Online') + offline = 'offline', gettext_lazy('Offline') + unauthorized = 'unauthorized', gettext_lazy('Unauthorized') + error = 'error', gettext_lazy('Error') + + +class EndpointRotationType(models.IntegerChoices): + TIME = 1, pgettext_lazy('Rotation type', 'Time') + SIZE = 2, pgettext_lazy('Rotation type', 'Size') + + +class EndpointRotationTime(models.IntegerChoices): + DAY = 1, pgettext_lazy('Rotation time settings', 'Day') + WEEK = 2, pgettext_lazy('Rotation time settings', 'Week') + MONTH = 3, pgettext_lazy('Rotation time settings', 'Month') diff --git a/devices/exceptions.py b/devices/exceptions.py new file mode 100644 index 0000000..340bae4 --- /dev/null +++ b/devices/exceptions.py @@ -0,0 +1,6 @@ +from rest_framework import status +from rest_framework.exceptions import APIException + + +class EndpointDeviceException(APIException): + status_code = status.HTTP_400_BAD_REQUEST diff --git a/devices/fields.py b/devices/fields.py new file mode 100644 index 0000000..cd18a2b --- /dev/null +++ b/devices/fields.py @@ -0,0 +1,31 @@ +from rest_framework import serializers + +from devices.models.device import Device, DeviceGroup + + +class DeviceRelatedField(serializers.PrimaryKeyRelatedField): + + def to_representation(self, device: Device): + return IDNameDeviceSerializer(device).data + + +class IDNameDeviceSerializer(serializers.ModelSerializer): + class Meta: + model = Device + fields = ['id', 'name'] + + +class DeviceGroupRelatedField(serializers.PrimaryKeyRelatedField): + + def to_representation(self, device: Device): + return IDNameDeviceGroupSerializer(device).data + + def use_pk_only_optimization(self): + return False + + +class IDNameDeviceGroupSerializer(serializers.ModelSerializer): + class Meta: + model = DeviceGroup + fields = ['id', 'name'] + diff --git a/devices/filters.py b/devices/filters.py new file mode 100644 index 0000000..1151660 --- /dev/null +++ b/devices/filters.py @@ -0,0 +1,10 @@ +from django_filters import rest_framework as filters + +from devices.models.device import Device + + +class DeviceFilter(filters.FilterSet): + + class Meta: + model = Device + fields = '__all__' diff --git a/devices/migrations/__init__.py b/devices/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/models/__init__.py b/devices/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/models/device.py b/devices/models/device.py new file mode 100644 index 0000000..344c3b1 --- /dev/null +++ b/devices/models/device.py @@ -0,0 +1,45 @@ +from django.core.validators import MinValueValidator, MaxValueValidator +from django.db import models +from django.utils.translation import gettext_lazy + +from console.models import NameDescriptionModel, UniqueNameDescriptionModel +from devices.enums import DeviceType, AdjustDatetime + + +class Device(NameDescriptionModel): + type = models.CharField(choices=DeviceType.choices, + max_length=8) + ip = models.GenericIPAddressField(unique=True, + verbose_name=gettext_lazy('IP'), + help_text=gettext_lazy('Device IP address')) + port = models.IntegerField(verbose_name=gettext_lazy('Port'), + help_text=gettext_lazy("Input port number (UDP)"), + unique=True, + error_messages={ + 'unique': gettext_lazy('This port is already in use') + }, + validators=(MinValueValidator(1500), + MaxValueValidator(65535))) + adjust_datetime = models.IntegerField(choices=AdjustDatetime.choices, + default=AdjustDatetime.LOCAL, + verbose_name=gettext_lazy("Adjust datetime")) + updated = models.DateTimeField( + gettext_lazy('Updated'), help_text=gettext_lazy('Date and time of the last update'), + auto_now=True + ) + group = models.ForeignKey('DeviceGroup', related_name='devices', on_delete=models.SET_NULL, null=True, blank=True) + + +class DeviceGroup(UniqueNameDescriptionModel): + pass + + +class DeviceConnectedMixin(models.Model): + """ Add connection to device by name. + + Device can be ARMAIF or Endpoint + """ + sensor = models.CharField(null=True, blank=True, max_length=128, verbose_name=gettext_lazy("Sensor name")) # todo rename later + + class Meta: + abstract = True diff --git a/devices/models/endpoint_device.py b/devices/models/endpoint_device.py new file mode 100644 index 0000000..2fe72ca --- /dev/null +++ b/devices/models/endpoint_device.py @@ -0,0 +1,119 @@ +from django.core.validators import MinValueValidator, MaxValueValidator +from django.db import models +from django.utils.translation import gettext_lazy + +from devices.enums import EndpointRotationType, EndpointRotationTime +from devices.models.device import Device + + +def _get_default_white_list(): + """Django raise a warning if this list is set as list""" + return [ + '%HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\SystemRoot%', + '%HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\ProgramFilesDir%' + ] + + +class EndpointModel(Device): + """Endpoint device model""" + + # White List + whitelist_enabled = models.BooleanField(gettext_lazy('Enable white list'), default=False) + whitelist_admin = models.BooleanField(gettext_lazy('Local admin ignores white list'), default=True) + white_list_paths = models.JSONField( + gettext_lazy('White List paths'), + max_length=1024, + help_text=gettext_lazy('Paths for white list'), + default=_get_default_white_list, null=True, blank=True + ) + + # Integrity control + integrity_control_enabled = models.BooleanField(gettext_lazy('Enable integrity control'), default=False) + integrity_control_timeout = models.IntegerField( + gettext_lazy('Event creation timeout'), + help_text=gettext_lazy('How often we can get integrity control events. Value in seconds'), + validators=[MinValueValidator(0), MaxValueValidator(60 * 60 * 24)], # 60*60*24 = 1 day + default=3 + ) + scan_paths = models.JSONField( + gettext_lazy('Scan paths for integrity control'), + max_length=1024, + help_text=gettext_lazy('Folder for integrity control'), + null=True, blank=True + ) + + # Antivirus + antivirus_enabled = models.BooleanField( + gettext_lazy('Enable antivirus'), + help_text=gettext_lazy('Detailed Antivirus control should be configured on host machine'), + default=False) + antivirus_remove_infected_files = models.BooleanField(gettext_lazy('Remove infected files'), default=False) + antivirus_paths = models.JSONField( + gettext_lazy('Antivirus paths'), + max_length=1024, + help_text=gettext_lazy('Path for scanning'), + default=list, null=True, blank=True + ) + antivirus_start_scan = models.BooleanField(verbose_name=gettext_lazy('Initiate antivirus scan'), default=False) + antivirus_update_db = models.BooleanField(verbose_name=gettext_lazy("Initiate antivirus db update"), default=True) + + # Access device USB, CD/DVD + device_control_enabled = models.BooleanField(gettext_lazy('Enable device control'), default=False) + prohibit_cd_access = models.BooleanField( + gettext_lazy('Prohibit CD/DVD access'), + help_text=gettext_lazy('To apply this change you need to restart host machine locally'), + default=False) + usb_control_enabled = models.BooleanField( + verbose_name=gettext_lazy('Enable USB control'), + help_text=gettext_lazy('To apply this change you need to restart host machine locally'), + default=False) + + # Service data + settings_changed = models.BooleanField( + gettext_lazy('Flag to mark if Endpoint logs has been changed'), + help_text=gettext_lazy('Check if you want to upload settings to Endpoint'), + default=True, + ) + incorrect_settings = models.BooleanField( + gettext_lazy('Flag to mark if config in current console has errors'), + help_text=gettext_lazy('Checked if config in console for Endpoint contains errors'), + default=False, + ) + config_errors = models.JSONField( + gettext_lazy('Endpoint config errors'), + help_text=gettext_lazy('Full list of Endpoint config errors'), + null=True, + blank=True + ) + request_config = models.BooleanField( + gettext_lazy('Flag to show if user wants to upload config from endpoint'), + help_text=gettext_lazy('If set to True, uploads config from endpoint, saves it'), + default=True, + ) + + is_requested_config_correct = models.BooleanField( + gettext_lazy('Flag to show if config, downloaded from endpoint, has correct format'), + help_text=gettext_lazy('If set to False, means that last attempt to download and set up config from Endpoint ' + 'has failed'), + default=True, + ) + # Rotation fields + event_rotation_type = models.IntegerField(choices=EndpointRotationType.choices, + verbose_name=gettext_lazy("Event rotation type"), + help_text=gettext_lazy("Select the rotation type"), + default=1) + + event_rotation_size = models.IntegerField(verbose_name=gettext_lazy("Event rotation size"), + help_text=gettext_lazy("Select the rotation size in KB"), + default=100, + validators=[MinValueValidator(100)]) + + event_rotation_period = models.IntegerField(choices=EndpointRotationTime.choices, + verbose_name=gettext_lazy("Event rotation period"), + help_text=gettext_lazy("Select the rotation period"), + default=1) + + event_rotation_time = models.CharField(max_length=1024, + verbose_name=gettext_lazy("Event rotation time"), + help_text=gettext_lazy("Select the event rotation time"), + default='00:00:00', null=True, blank=True) diff --git a/devices/models/firewall.py b/devices/models/firewall.py new file mode 100644 index 0000000..df6e153 --- /dev/null +++ b/devices/models/firewall.py @@ -0,0 +1,62 @@ +from django.utils.translation import gettext_lazy + +from assets.models.assets import Asset +from devices.models.device import Device +from django.db import models + + +class ArmaIndustrialFirewall(Device): + key = models.CharField(max_length=256, + blank=False, + default='', + verbose_name=gettext_lazy('Key'), + help_text=gettext_lazy('API key for device')) + secret = models.CharField(max_length=256, + blank=False, + default='', + verbose_name=gettext_lazy('Secret'), + help_text=gettext_lazy('Secret value for API key')) + comment = models.CharField(max_length=256, + blank=True, + default='', + verbose_name=gettext_lazy('Comment'), + help_text=gettext_lazy('Additional notes about device')) + website = models.CharField(max_length=256, + help_text=gettext_lazy('ArmaIndustrialFirewall website'), + default=gettext_lazy('Unknown')) + version = models.CharField(max_length=128, + help_text=gettext_lazy('ArmaIndustrialFirewall version'), + default=gettext_lazy('Unknown')) + remote_name = models.CharField(max_length=128, + help_text=gettext_lazy('ArmaIndustrialFirewall name'), + default=gettext_lazy('Unknown')) + identification_number = models.CharField(max_length=128, + help_text=gettext_lazy('ArmaIndustrialFirewall ID'), + default=gettext_lazy('Unknown')) + hash_number = models.CharField(max_length=256, + help_text=gettext_lazy('ArmaIndustrialFirewall hash'), + default=gettext_lazy('Unknown')) + flavour = models.CharField(max_length=128, + help_text=gettext_lazy('ArmaIndustrialFirewall flavour'), + default=gettext_lazy('Unknown')) + email = models.CharField(max_length=256, + help_text=gettext_lazy('ArmaIndustrialFirewall email'), + default=gettext_lazy('Unknown')) + copyright_years = models.CharField(max_length=128, + help_text=gettext_lazy('ArmaIndustrialFirewall copyright years'), + default=gettext_lazy('Unknown')) + copyright_url = models.CharField(max_length=256, + help_text=gettext_lazy('ArmaIndustrialFirewall copyright url'), + default=gettext_lazy('Unknown')) + copyright_owner = models.CharField(max_length=128, + help_text=gettext_lazy('ArmaIndustrialFirewall copyright owner'), + default=gettext_lazy('Unknown')) + architecture = models.CharField(max_length=128, + help_text=gettext_lazy('ArmaIndustrialFirewall architecture'), + default=gettext_lazy('Unknown')) + abi = models.CharField(max_length=128, + help_text=gettext_lazy('ArmaIndustrialFirewall abi'), + default=gettext_lazy('Unknown')) + + def __str__(self): + return f"{self.name} - {self.type}" diff --git a/devices/models/sensor.py b/devices/models/sensor.py new file mode 100644 index 0000000..e26a699 --- /dev/null +++ b/devices/models/sensor.py @@ -0,0 +1,20 @@ +import uuid + +from django.db import models +from django.utils import timezone + +from devices.models.device import Device + + +class ArmaSensor(Device): # todo need to know all fields + uuid = models.UUIDField(default=uuid.uuid4, unique=True) + synchronization = models.BooleanField(default=False) + authorization_key = models.CharField(max_length=255, blank=True) + + span_interface = models.CharField(max_length=255, blank=True) + control_interface = models.JSONField(default=dict) + signature_analysis = models.BooleanField(default=False) + protocols_analysis = models.BooleanField(default=False) + + def __str__(self): + return str(self.uuid) diff --git a/devices/serializers/__init__.py b/devices/serializers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/serializers/device.py b/devices/serializers/device.py new file mode 100644 index 0000000..f850294 --- /dev/null +++ b/devices/serializers/device.py @@ -0,0 +1,45 @@ +from rest_framework import serializers + +from devices.enums import DeviceType +from devices.fields import DeviceRelatedField, DeviceGroupRelatedField +from devices.models.device import Device, DeviceGroup +from devices.services.endpoint.endpoint_get_status import EndpointStatusService +from devices.services.firewall import FirewallService +from devices.services.sensor.service import SensorService + + +class DeviceSerializer(serializers.ModelSerializer): + status = serializers.SerializerMethodField() + group = DeviceGroupRelatedField(queryset=DeviceGroup.objects.all(), default=None) + + class Meta: + model = Device + fields = ['id', 'name', 'description', 'type', 'ip', 'port', 'updated', 'status', 'group'] + + def get_status(self, device: Device): + if device.type == DeviceType.FIREWALL: + return FirewallService(device).check_status() + + elif device.type == DeviceType.ENDPOINT: + return EndpointStatusService(device).get_status() + + elif device.type == DeviceType.SENSOR: + return SensorService(device).get_status() + + +class DeviceGroupSerializer(serializers.ModelSerializer): + devices = DeviceRelatedField(many=True, queryset=Device.objects.all(), default=[]) + + class Meta: + model = DeviceGroup + fields = ['id', 'name', 'description', 'devices'] + + +class ExportToCSVDeviceSerializer(serializers.ModelSerializer): + group = serializers.ReadOnlyField(source='group.name', allow_null=True) + + class Meta: + model = Device + # You need to explicitly list all fields, because then this list is passed to the function csv_export + # to create columns + fields = ['id', 'type', 'name', 'description', 'ip', 'port', 'group', 'updated'] diff --git a/devices/serializers/endpoint_serializers.py b/devices/serializers/endpoint_serializers.py new file mode 100644 index 0000000..1c7c066 --- /dev/null +++ b/devices/serializers/endpoint_serializers.py @@ -0,0 +1,56 @@ +from rest_framework import serializers + +from devices.fields import DeviceGroupRelatedField +from devices.models.device import DeviceGroup +from devices.models.endpoint_device import EndpointModel +from devices.services.endpoint.endpoint_get_status import EndpointStatusService + + +class EndpointDeviceSerializersAll(serializers.ModelSerializer): + """Serializer for create/update and show endpoint data.""" + + status = serializers.SerializerMethodField() + group = DeviceGroupRelatedField(queryset=DeviceGroup.objects.all(), default=None, allow_null=True) + + def get_status(self, endpoint: EndpointModel) -> dict: + return EndpointStatusService(endpoint).get_status() + + def validate_scan_paths(self, value): + if len(value) != len(set(value)): + raise serializers.ValidationError('Scan paths must be unique') + return value + + class Meta: + model = EndpointModel + fields = [ + 'id', 'name', 'description', 'ip', 'port', 'adjust_datetime', 'updated', 'status', + 'settings_changed', 'incorrect_settings', 'config_errors', 'request_config', 'group', + # white list + 'whitelist_enabled', 'whitelist_admin', 'white_list_paths', + # Integrity control + 'integrity_control_enabled', 'integrity_control_timeout', 'scan_paths', + # antivirus + 'antivirus_enabled', 'antivirus_remove_infected_files', 'antivirus_paths', 'antivirus_start_scan', + 'antivirus_update_db', + # usb/dvd + 'device_control_enabled', 'prohibit_cd_access', 'usb_control_enabled', + # rotation + 'event_rotation_type', 'event_rotation_size', 'event_rotation_period', 'event_rotation_time', + + ] + + +class EndpointConfigSerializer(serializers.ModelSerializer): + """Serializer to update data from endpoint.""" + + class Meta: + model = EndpointModel + fields = [ + 'device_control_enabled', 'usb_control_enabled', + 'integrity_control_enabled', 'scan_paths', 'integrity_control_timeout', + 'whitelist_enabled', 'whitelist_admin', 'white_list_paths', + 'ip', + 'antivirus_enabled', 'antivirus_paths', 'antivirus_remove_infected_files', 'antivirus_update_db', + "updated", + 'event_rotation_type', 'event_rotation_size', 'event_rotation_period', 'event_rotation_time', + ] diff --git a/devices/serializers/firewall.py b/devices/serializers/firewall.py new file mode 100644 index 0000000..976b805 --- /dev/null +++ b/devices/serializers/firewall.py @@ -0,0 +1,29 @@ +from rest_framework import serializers + +from devices.fields import DeviceGroupRelatedField +from devices.models.device import DeviceGroup +from devices.models.firewall import ArmaIndustrialFirewall + + +class FirewallSerializer(serializers.ModelSerializer): + group = DeviceGroupRelatedField(queryset=DeviceGroup.objects.all(), default=None, allow_null=True) + + class Meta: + model = ArmaIndustrialFirewall + exclude = ['type'] + read_only_fields = ['website', 'version', 'remote_name', 'identification_number', 'hash_number', 'flavour', + 'email', 'copyright_years', 'copyright_url', 'copyright_owner', 'architecture', 'abi'] + + +class CheckFirewallConnectionSerializer(serializers.Serializer): + ip = serializers.IPAddressField() + key = serializers.CharField(default='') + secret = serializers.CharField(default='') + + +class AifUploadConfigSerializer(serializers.Serializer): + conffile = serializers.FileField() + + +class AifUploadIdsRulesetsSerializer(serializers.Serializer): + rulesets = serializers.FileField() diff --git a/devices/serializers/sensor_serializers.py b/devices/serializers/sensor_serializers.py new file mode 100644 index 0000000..197d52d --- /dev/null +++ b/devices/serializers/sensor_serializers.py @@ -0,0 +1,48 @@ +import logging + +from rest_framework import serializers +from rest_framework.exceptions import ValidationError + +from core.validators import ValidateFileExtension +from devices.fields import DeviceGroupRelatedField +from devices.models.device import DeviceGroup +from devices.models.sensor import ArmaSensor +from devices.services.sensor.service import SensorService + +_log = logging.getLogger(__name__) + + +class CreateSensorSerializer(serializers.ModelSerializer): + class Meta: + model = ArmaSensor + fields = ['name', 'ip', 'port'] + + +class SensorSerializer(serializers.ModelSerializer): + group = DeviceGroupRelatedField(queryset=DeviceGroup.objects.all(), default=None, allow_null=True) + status = serializers.SerializerMethodField() + + class Meta: + model = ArmaSensor + fields = '__all__' + + def get_status(self, endpoint: ArmaSensor) -> dict: + return SensorService(endpoint).get_status() + + def validate_span_interface(self, span_interface): + actually_interfaces = SensorService(sensor=self.instance).get_interfaces() + interfaces_names = [interface['interface'] for interface in actually_interfaces['data']] + + if span_interface in interfaces_names: + return span_interface + raise ValidationError('select actually span interface') + + +class ZeekProtocolsDisableSerializer(serializers.Serializer): + """Serializer validate upload protocol list.""" + disable_protocols = serializers.ListField(child=serializers.CharField(), default=[]) + + +class ZeekSettingsUpdateSerializer(serializers.Serializer): + """Serializer validate upload local.zeek file.""" + file = serializers.FileField(validators=[ValidateFileExtension(allowed_extensions=[".zeek"])]) diff --git a/devices/services/__init__.py b/devices/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/services/endpoint/__init__.py b/devices/services/endpoint/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/services/endpoint/endpoint_antivirus.py b/devices/services/endpoint/endpoint_antivirus.py new file mode 100644 index 0000000..e53572d --- /dev/null +++ b/devices/services/endpoint/endpoint_antivirus.py @@ -0,0 +1,36 @@ +import logging +import os + +from rest_framework.generics import get_object_or_404 + +from core.utils import httpFileResponse +from devices.exceptions import EndpointDeviceException +from devices.models.endpoint_device import EndpointModel +from storage.models import DataStorage + +_log = logging.getLogger() + + +class EndpointAntivirusService: + """ Service to management endpoint antivirus """ + + def __init__(self, pk: int) -> None: + self.endpoint = get_object_or_404(EndpointModel, pk=pk) + + def update(self): + if not self.endpoint.antivirus_update_db: + raise EndpointDeviceException({'status': 'error', 'detail': 'Endpoint antivirus no update required'}) + try: + storage_file = DataStorage.objects.get(type=DataStorage.Type.CLAMAV) + except DataStorage.DoesNotExist: + raise EndpointDeviceException({'status': 'error', 'message': 'No database selected'}) + is_zip = True if storage_file.Format == DataStorage.Format.ZIP else False + storage_file = os.path.join(storage_file.get_full_path()) + self.endpoint.antivirus_update_db = False + self.endpoint.save() + return httpFileResponse(open(storage_file, 'rb'), "antivirus_update.zip", is_zip) + + @staticmethod + def all_update(update): + EndpointModel.objects.all().update(antivirus_update_db=update) + _log.info('Set flag update antivirus for all endpoints') diff --git a/devices/services/endpoint/endpoint_get_status.py b/devices/services/endpoint/endpoint_get_status.py new file mode 100644 index 0000000..20782ed --- /dev/null +++ b/devices/services/endpoint/endpoint_get_status.py @@ -0,0 +1,43 @@ +from devices.models.endpoint_device import EndpointModel +from devices.services.endpoint.endpoint_redis import RedisInterface + + +def get_status(request, pk: int) -> dict: + """ Function to respond with current Endpoint states. Current response states are: + config_errors, + request_config + :param request: request instance + :param pk: Corresponding Endpoint pk + :return: JSON response with endpoint configuration errors + """ + try: + endpoint = EndpointModel.objects.get(pk=pk) + except EndpointModel.DoesNotExist: + return {'status': 'error', 'reason': 'no such endpoint record'} + return { + 'endpoint_config_errors': endpoint.config_errors, + 'is_requested_config_correct': endpoint.is_requested_config_correct + } + + +class EndpointStatusService: + """Service return endpoint connect status""" + + def __init__(self, endpoint: EndpointModel): + if isinstance(endpoint, EndpointModel): + self.endpoint = endpoint + else: + self.endpoint = EndpointModel.objects.get(pk=endpoint.pk) + self.redis = RedisInterface() + + def get_status(self) -> dict: + endpoint_status = {} + if self.redis.get_keepalive(self.endpoint.pk): + if self.endpoint.config_errors: + endpoint_status['status'] = 'config_errors' + endpoint_status['config_errors'] = self.endpoint.incorrect_settings + else: + endpoint_status['status'] = 'online' + else: + endpoint_status['status'] = 'offline' + return endpoint_status diff --git a/devices/services/endpoint/endpoint_redis.py b/devices/services/endpoint/endpoint_redis.py new file mode 100644 index 0000000..1a41a37 --- /dev/null +++ b/devices/services/endpoint/endpoint_redis.py @@ -0,0 +1,27 @@ +import datetime + +import redis +from django.conf import settings + + +class RedisInterface: + """ Helper class, to set and get keepalive for Endpoint model """ + + endpoint_field_name = "ENDPOINT_STATUS_{}" + endpoint_timeout = 120 + + def __init__(self): + self.redis_instance = redis.StrictRedis(host=getattr(settings, 'REDIS_HOST', 'redis'), + port=getattr(settings, 'REDIS_PORT', 6379)) + + def get_keepalive(self, pk): + timestamp = self.redis_instance.get(self.endpoint_field_name.format(pk)) + + if timestamp is None: + return False + + delta = datetime.datetime.now() - datetime.datetime.fromtimestamp(float(timestamp.decode("utf-8"))) + return delta.seconds < self.endpoint_timeout + + def set_keepalive(self, pk): + self.redis_instance.set(self.endpoint_field_name.format(pk), str(datetime.datetime.now().timestamp())) diff --git a/devices/services/endpoint/endpoint_services.py b/devices/services/endpoint/endpoint_services.py new file mode 100644 index 0000000..b4644f8 --- /dev/null +++ b/devices/services/endpoint/endpoint_services.py @@ -0,0 +1,230 @@ +import base64 +import json +import logging + +from django.shortcuts import get_object_or_404 +from rest_framework.exceptions import APIException + +from devices.models.endpoint_device import EndpointModel +from devices.serializers.endpoint_serializers import EndpointConfigSerializer +from devices.services.endpoint.endpoint_get_status import EndpointStatusService +from devices.services.endpoint.endpoint_redis import RedisInterface +from devices.services.vector import VectorService + +_log = logging.getLogger(__name__) + + +class EndpointException(APIException): + status_code = 400 + default_detail = 'Cannot create endpoint.' + + +class EndpointManagementService: + """Service to management endpoint.""" + + def __init__(self, instance: EndpointModel) -> None: + self.instance = instance + self._log_service = VectorService(instance) + + def create(self): + _log.debug(f'Create endpoint logger config') + try: + self._log_service.update_config() + except Exception as err: + _log.error(f'Cannot create log file: {err}') + raise EndpointException(str(err)) + + def update(self): + _log.debug(f'Update endpoint logger config') + try: + self._log_service.update_config() + except Exception as err: + _log.error(f'Cannot update logger config') + raise EndpointException(str(err)) + + def destroy(self): + _log.debug(f'Destroy endpoint logger config') + try: + self._log_service.delete_config() + except Exception as err: + _log.error(f'Cannot delete logger config') + raise EndpointException(str(err)) + + +class EndpointKepAliveService: + + def __init__(self, pk: int, data) -> None: + self.endpoint = EndpointModel.objects.get(pk=pk) + self.data = json.loads(data.decode('utf-8')) + + def get_response(self) -> dict: + redis_interface = RedisInterface() + redis_interface.set_keepalive(self.endpoint.pk) + + status = self.data['status'] + + if status == 'error': + self.endpoint.config_errors = self.data['errors'] + self.endpoint.incorrect_settings = True + self.endpoint.save() + + if self.endpoint.settings_changed and self.data['status'] == 'ok': + self.endpoint.settings_changed = False + self.endpoint.incorrect_settings = False + self.endpoint.config_errors = None + self.endpoint.save() + return {'status': 'ok', 'command': 'setting change'} + + if self.endpoint.request_config: + return {'status': 'ok', 'command': 'upload'} + + return {'status': 'ok'} + + +class EndpointDownloadConfigService: + def __init__(self, pk: int) -> None: + self.endpoint = get_object_or_404(EndpointModel, pk=pk) + + def _dict_to_json(self, config: dict) -> bytes: + json_str = json.dumps(config, ensure_ascii=False, + indent=4, sort_keys=True).encode("utf-8") + return json_str + + def setup_endpoint(self) -> None: + if self.endpoint.antivirus_start_scan: + self.endpoint.antivirus_start_scan = False + self.endpoint.save() + + def download(self) -> dict: + config = self.save_endpoint_settings_to_dict() + # Serialize data to string + json_str = self._dict_to_json(config) + base64_bytes = base64.b64encode(json_str) + base64_message = base64_bytes.decode("utf-8") + + response = { + 'status': 'ok', + 'config': base64_message + } + return response + + def save_endpoint_settings_to_dict(self) -> dict: + """Convert endpoint model to dict""" + integrity_control_scan_paths = self.endpoint.scan_paths if self.endpoint.scan_paths else [] + white_list_paths = self.endpoint.white_list_paths if self.endpoint.white_list_paths else [] + antivirus_paths = self.endpoint.antivirus_paths if self.endpoint.antivirus_paths else [] + + config = { + "device_control": { + "cd": { + "deny_read": self.endpoint.prohibit_cd_access, + "deny_write": self.endpoint.prohibit_cd_access + }, + "enabled": self.endpoint.device_control_enabled, + }, + "integrity_control": { + "control_path": integrity_control_scan_paths, + "enabled": self.endpoint.integrity_control_enabled, + "timeout": f'{self.endpoint.integrity_control_timeout}s', + + }, + "white_list": { + "enabled": self.endpoint.whitelist_enabled, + "local_admin": self.endpoint.whitelist_admin, + "path": white_list_paths, + }, + "usb_control": { + "enabled": self.endpoint.usb_control_enabled, + }, + "antivirus": { + "start_scan": self.endpoint.antivirus_start_scan, + "scan_path": antivirus_paths, + "enabled": self.endpoint.antivirus_enabled, + "remove_infected_files": self.endpoint.antivirus_remove_infected_files, + }, + } + + rotation = { + 'type': self.endpoint.event_rotation_type, # Int + 'size': self.endpoint.event_rotation_size, # Int + 'period': self.endpoint.event_rotation_period, # Int + 'time': self.endpoint.event_rotation_time, # String in format: 01:00:00 + } + + config['rotation'] = rotation + return config + + def download_as_file(self) -> tuple: + config = self.save_endpoint_settings_to_dict() + filename = f'endpoint_config_{self.endpoint.pk}.json' + return self._dict_to_json(config), filename + + +class EndpointUploadConfigService: + def __init__(self, pk: int, data) -> None: + self.endpoint = EndpointModel.objects.get(pk=pk) + self.data = json.loads(data.decode('utf-8')) + self.prepare_data() + + def prepare_data(self): + """Data preparation, mapping endpoint fields to DB names.""" + mapping_fields = { + # Endpoint/MC + 'dc_enabled': 'device_control_enabled', + + 'ic_enabled': 'integrity_control_enabled', + 'scan_folders': 'scan_paths', + 'ic_timeout': 'integrity_control_timeout', + + 'wl_enable': 'whitelist_enabled', + 'wl_admin': 'whitelist_admin', + 'white_list': 'white_list_paths', + + 'clamav_enabled': 'antivirus_enabled', + 'clamav_paths': 'antivirus_paths', + 'clamav_remove_infected_files': 'antivirus_remove_infected_files', + 'clamav_start_scan': 'antivirus_start_scan', + } + + self.data['ip'] = self.endpoint.ip + for alias, field in mapping_fields.items(): + try: + self.data[field] = self.data[alias] + except KeyError: + continue + + def upload(self): + serializer = EndpointConfigSerializer(self.endpoint, data=self.data) + self.endpoint.request_config = False + if not serializer.is_valid(): + _log.error(serializer.errors) + self.endpoint.is_requested_config_correct = False + self.endpoint.save() + return {'status': 'error', 'error_message': serializer.errors} + + serializer.update(instance=self.endpoint, validated_data=serializer.validated_data) + self.endpoint.is_requested_config_correct = True + self.endpoint.incorrect_settings = False + self.endpoint.config_errors = None + self.endpoint.save() + _log.info(f'Endpoint [{self.endpoint.pk}] updated form endpoint instance') + return {'status': 'ok'} + + +class EndpointUpdateService: + """Service for update config from endpoint.""" + + def __init__(self, pk: int) -> None: + self.endpoint = get_object_or_404(EndpointModel, pk=pk) + self.status_service = EndpointStatusService(self.endpoint) + + def _update(self) -> None: + self.endpoint.request_config = True + self.endpoint.save() + + def update(self) -> dict: + status = self.status_service.get_status()['status'] + if status == 'offline': + raise EndpointException('Endpoint is offline') + self._update() + return {'status': 'ok'} diff --git a/devices/services/firewall/__init__.py b/devices/services/firewall/__init__.py new file mode 100644 index 0000000..c81da25 --- /dev/null +++ b/devices/services/firewall/__init__.py @@ -0,0 +1,5 @@ +from devices.services.firewall.firewall import FirewallService +from devices.services.firewall.exception import ( + ConnectionException, InvalidCredentialException, IncompatibilityVersionException, RemoteException, + NoContentDispositionException, APIException, InvalidResponseException, FailedUploadException, InvalidFileException +) diff --git a/devices/services/firewall/exception.py b/devices/services/firewall/exception.py new file mode 100644 index 0000000..dd99a95 --- /dev/null +++ b/devices/services/firewall/exception.py @@ -0,0 +1,104 @@ +from rest_framework import status +from rest_framework.exceptions import APIException as _default_APIException + + +class APIException(_default_APIException): + + status_code = status.HTTP_400_BAD_REQUEST + default_detail = { + 'status': 'error', + 'code': 'unknown', + 'detail': 'Unknown error occurred', + } + + +class ArmaIndustrialFirewallException(APIException): + + status_code = status.HTTP_400_BAD_REQUEST + default_detail = { + 'status': 'error', + 'code': 'unknown', + 'detail': 'Unknown error occurred', + } + + +class InvalidCredentialException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'error', + 'code': 'unauthorized', + 'detail': 'Invalid credentials provided to connect to firewall', + } + + +class ConnectionException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'error', + 'code': 'connection_error', + 'detail': 'There was a problem connecting to the firewall', + } + + +class IncompatibilityVersionException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'error', + 'code': 'incompatible', + 'detail': 'The firewall version is incompatible with the current console version' + } + + +class InvalidRequestException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'warning', + 'code': 'request_error', + 'detail': 'Firewall was reconfigured and caused internal error. Auto FW health check initiated. Please try again' + } + + +class RemoteException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'error', + 'code': 'remote_error', + 'detail': 'Unknown error occurred. Auto FW health check initiated. Please try again' + } + + +class NoContentDispositionException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'error', + 'code': 'no_content', + 'detail': 'No content disposition during downloading file from ARMA IF' + } + + +class InvalidFileException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'error', + 'code': 'invalid', + 'detail': 'Invalid file uploaded' + } + + +class FailedUploadException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'error', + 'code': 'failed', + 'detail': 'Failed to upload file to firewall' + } + + +class InvalidResponseException(ArmaIndustrialFirewallException): + + default_detail = { + 'status': 'error', + 'code': 'invalid_response', + 'detail': 'Unable to read firewall response' + } + diff --git a/devices/services/firewall/firewall.py b/devices/services/firewall/firewall.py new file mode 100644 index 0000000..31f1a75 --- /dev/null +++ b/devices/services/firewall/firewall.py @@ -0,0 +1,290 @@ +import json +import logging +import re +import warnings +from hashlib import sha256 +from http import HTTPStatus +from typing import Dict + +import requests +from asgiref.sync import async_to_sync +from channels.layers import get_channel_layer +from django.conf import settings +from django.core.cache import caches +from django.utils.translation import gettext_lazy +from packaging import version +from urllib3.exceptions import InsecureRequestWarning + +from core.utils import catch_exception +from devices.constants import FIREWALL_TIMEOUT, MINIMAL_AIF_VERSION +from devices.enums import ArmaIndustrialFirewallStatus +from devices.models.firewall import ArmaIndustrialFirewall +from devices.services.firewall.exception import ( + ConnectionException, InvalidCredentialException, IncompatibilityVersionException, RemoteException, + NoContentDispositionException, InvalidResponseException, FailedUploadException, InvalidFileException) + +_log = logging.getLogger(__name__) + + +class FirewallService: + SUCCESS_RESPONSE = {'status': 'ok'} + + def __init__(self, firewall: ArmaIndustrialFirewall = None): + self.firewall = firewall + + @catch_exception + def check_status(self) -> dict: + firewall_status = caches['redis'].get(f'firewall_{self.firewall.pk}_status') + return firewall_status + + @catch_exception + def download_file(self, type_file): + type_url_mapping = { + 'config': ('/api/core/system/downloadConfig', 'post'), + 'rulesets': ('/api/core/system/downloadIdsRulesets', 'get') + } + + additional_url_path, method = type_url_mapping[type_file] + url = self.get_addr(additional_url_path) + + if type_file == 'config': + hashed_key = self._get_hashed_key() + data = {"encrypt_password": hashed_key, + "encrypt_repeat_password": hashed_key, + "exclude_rrd": "on"} + else: + data = {} + + response = self.send_request(url, method, data=data) + filename_header = response.headers.get('content-disposition', None) + + if filename_header is None: + _log.error(f'No content disposition during downloading file from ARMA IF {response.content.decode("utf8")}') + raise NoContentDispositionException + filename = re.search("filename=(.+)", filename_header).group(1) + return response.content, filename + + @catch_exception + def upload_file(self, file, type_file): + type_url_mapping = { + 'config': ('/api/core/system/restoreConfig', 'post'), + 'rulesets': ('/api/ids/service/addUserlocalRulesets', 'post') + } + + additional_url_path, method = type_url_mapping[type_file] + url = self.get_addr(additional_url_path) + + if type_file == 'config': + kwargs = {'data': {'decrypt_password': self._get_hashed_key()}, + 'files': {'conffile': file}} + else: + kwargs = {'files': {'uploadRulesetFiles[]': file}} + + response = self.send_request(url, method, **kwargs) + response_js = self._get_json(response) + + if response.status_code == requests.codes.ok: + if response_js.get('status', '') == 'ok': + if type_file == 'config': + return self.SUCCESS_RESPONSE + else: + return self._parse_rulesets_response(response_js) + elif response_js.get('status', '') == 'invalid': + _log.error(f'InvalidFile: {response_js}') + raise InvalidFileException + elif response_js.get('status', '') == 'failed': + _log.error(f'FailedUpload: {response_js}') + raise FailedUploadException + else: + _log.error(f'Unable to read firewall response: {response_js}') + raise InvalidResponseException + else: + _log.error('Error uploading config to aif: ' + str(response.text)) + raise FailedUploadException + + @catch_exception + def check_connection(self, connection_data=None): + """ + API for checking the connection with firewall before adding it to database. Request method must be post, and + request body should contain JSON with following keys: 'ip', 'key', 'secret'. Otherwise API will + send JSON response with error message which signals that provided data is incorrect. + Returns different JSON responses, which depends on response conditions, like following: + status = 'ok' when connection has been established + status = 'error' with different codes: + code = 'unauthorized' if wrong authentication credentials provided + code = 'connection_error' if could not get IP address, provided by user + """ + if not connection_data: + connection_data = { + 'ip': self.firewall.ip, + 'key': self.firewall.key, + 'secret': self.firewall.secret, + } + _log.info(f'Try connect to firewall: {connection_data}') + + session = requests.Session() + session.auth = (connection_data['key'], connection_data['secret']) + session.verify = False + + # Forming request URL to check connection with firewall + request_url = f'http://{connection_data["ip"]}/api/core/system/info' + + try: + firewall_response = session.get(request_url, timeout=FIREWALL_TIMEOUT) + + if firewall_response.status_code == HTTPStatus.UNAUTHORIZED: + _log.error(f'Error: Invalid credential provided to connect to firewall with ip {connection_data["ip"]}') + raise InvalidCredentialException + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: + _log.error(f'Error: {str(e)}') + raise ConnectionException + + firewall_info = firewall_response.json() + firewall_version = firewall_info['items']['product_version'] + + if not self.firewall_version_validator(firewall_version): + _log.error( + f'The firewall version is incompatible with the current console version. IP:{connection_data["ip"]}') + raise IncompatibilityVersionException + + @catch_exception + def reboot(self): + additional_url_path, method = '/api/core/system/reboot', 'get' + url = self.get_addr(additional_url_path) + response = self.send_request(url, method) + response_js = self._get_json(response) + + if response_js.get('status', '') != 'ok': # Remote error + _log.error(f'Unable to reboot firewall. Response: {response_js}') + raise RemoteException + return self.SUCCESS_RESPONSE + + @catch_exception + def rule_fields(self, request): + additional_url_path, method = '/api/firewall/filter/getRule/', 'get' + url = self.get_addr(additional_url_path) + response = self.send_request(url, method, request) + response_js = self._get_json(response) + return response_js + + def send_request(self, request_url, method_str, request=None, **kwargs): + try: + session = self.get_session(request) + method = getattr(session, method_str) + firewall_response = method(request_url, timeout=FIREWALL_TIMEOUT, **kwargs) + if firewall_response.status_code == HTTPStatus.UNAUTHORIZED: + _log.error('Invalid credentials provided to connect to firewall') + raise InvalidCredentialException + except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: + _log.error(f'Error: {str(e)}') + raise ConnectionException + except requests.exceptions.RequestException as e: + _log.error(f'Firewall is offline. Error: {str(e)}') + raise ConnectionException + return firewall_response + + def get_info(self): + """ + Used only in 'devices/tasks/firewall.py'. + Returns only a dict without raising an exception so that the correct data is written to redis + """ + try: + session = self.get_session() + addr = self.get_addr('api/core/system/info') + response = session.get(addr, timeout=FIREWALL_TIMEOUT) + if response.status_code == requests.codes.UNAUTHORIZED: + return {'status': ArmaIndustrialFirewallStatus.unauthorized} + try: + response_js = response.json() + except ValueError as e: + _log.warning(f'Invalid output from firewall: {e}') + return {'status': ArmaIndustrialFirewallStatus.error, + 'detail': gettext_lazy('Invalid answer from firewall')} + if response_js.get('status', '') != 'ok' or response_js.get('items', {}).get('product_id', '') != 'armaif': + return {'status': ArmaIndustrialFirewallStatus.error} # remote error + return {'status': ArmaIndustrialFirewallStatus.online, 'data': response_js['items']} # success + except requests.exceptions.RequestException as e: + _log.error(f'Invalid output from firewall: {e}') + return {'status': ArmaIndustrialFirewallStatus.offline} # unreachable + + def get_session(self, request=None): + # Disabling warnings spam about certificates when ARMAIF in HTTPS mode + warnings.simplefilter('ignore', InsecureRequestWarning) + session = requests.Session() + session.auth = (self.firewall.key, self.firewall.secret) + session.verify = False + if request: + if request.LANGUAGE_CODE.lower() != "en": + lang = f"{request.LANGUAGE_CODE.lower()}, en;q=0.8" + else: + lang = request.LANGUAGE_CODE.lower() + session.headers.update({"Accept-Language": lang}) + return session + + def get_addr(self, add_path=''): + return 'https://{}/{}'.format(self.firewall.ip, add_path.lstrip('/')) + + @staticmethod + def firewall_version_validator(fw_version): + """ + # TODO: Due to the fact that version method is not completely valid for this purpose, we need to write + our own method for comparing AIF versions + Function for checking ARMAIF version compatability. Correct ARMAIF version format for this validator is: + '[numerical_version]-[mod_to_version]', numerical version must be in the following format: x.y.z, where: + x: int, major version + y: int, minor version + z: int, patch number + Also if version mod starts with `rc`, which means 'release candidate' - also returning True + :param fw_version: armaif version to check + :return: True if version os valid, False otherwise + """ + # Remove additions + fw_version_pure_version = fw_version.split('-')[0] + # Compare with minimal allowed version + return not version.parse(fw_version_pure_version) < version.parse(MINIMAL_AIF_VERSION) + + @staticmethod + def _get_hashed_key(): + hashed_key = sha256(settings.SECRET_KEY.encode()).hexdigest() + return hashed_key + + @staticmethod + def _get_json(response: requests.Response) -> dict: + try: + return response.json() + except json.decoder.JSONDecodeError: + _log.error(f'Unable to read firewall response: {response.text}') + raise InvalidResponseException + + @staticmethod + def _parse_rulesets_response(response_js): + results_json = { + 'status': 'ok', + 'success': 0, + 'failed': 0 + } + loaded_files = response_js.get('files') + for f_name, f_info in loaded_files.items(): + if f_info.get('status') in ['ok', 'warning']: + results_json['success'] += 1 + else: + results_json['failed'] += 1 + msg = f_info.get('msg', f_info.get('trmsg', 'unknown reason')) + _log.error(f'File {f_name} failed to upload to IDS with the following message: {msg}') + return results_json + + +def get_all_aif_with_status_map() -> Dict[str, Dict[str, str]]: + """Return mapping Firewalls id with status.""" + firewalls = ArmaIndustrialFirewall.objects.all() + id_status_map = {str(fw.pk): FirewallService(fw).check_status() for fw in firewalls} + return id_status_map + + +def firewalls_status_notification_to_ws(): + """Notification about firewalls status.""" + data = get_all_aif_with_status_map() + channel_layer = get_channel_layer() + async_to_sync( + channel_layer.group_send + )('notification_incidents', {'type': 'notification', 'data': {'firewalls_status': data}}) diff --git a/devices/services/sensor/__init__.py b/devices/services/sensor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/services/sensor/enums.py b/devices/services/sensor/enums.py new file mode 100644 index 0000000..f4382e0 --- /dev/null +++ b/devices/services/sensor/enums.py @@ -0,0 +1,33 @@ +import enum + + +class VectorMessage(enum.Enum): + status = 'vector_status' + start = 'vector_start' + stop = 'vector_stop' + restart = 'vector_restart' + reload = 'vector_reload' + service_info = 'vector_service_info' + + +class ZeekMessage(enum.Enum): + status = 'zeek_status' + start = 'zeek_start' + stop = 'zeek_stop' + restart = 'zeek_restart' + settings_update = 'zeek_settings_update' + protocols_disable = 'zeek_protocols_disable' + + +class SuricataMessage(enum.Enum): + status = 'suricata_status' + start = 'suricata_start' + stop = 'suricata_stop' + restart = 'suricata_restart' + + +class SystemMessage(enum.Enum): + settings_changed = 'settings_changed' + synchronization = 'synchronization' + ping = 'ping' + interfaces = 'interfaces' diff --git a/devices/services/sensor/rabbitmq.py b/devices/services/sensor/rabbitmq.py new file mode 100644 index 0000000..b4492a2 --- /dev/null +++ b/devices/services/sensor/rabbitmq.py @@ -0,0 +1,241 @@ +import json +import logging +from functools import partial +from typing import Optional, Union, Tuple + +import pika +from django.conf import settings +from pika.adapters.blocking_connection import BlockingChannel +from pika.exchange_type import ExchangeType +from rest_framework import status +from rest_framework.exceptions import APIException + +from devices.models.sensor import ArmaSensor +from devices.services.sensor.enums import SystemMessage, ZeekMessage, VectorMessage, SuricataMessage + +_log = logging.getLogger(__name__) + + +class SensorResponseException(APIException): + status_code = status.HTTP_400_BAD_REQUEST + + +class SensorManagement: + """Service for sending message and receiving response using RabbitMQ""" + + # Префикс, с которого начинаются все очереди для сенсоров. Каждый сенсор слушает свою очередь + SENSOR_QUEUE_PREFIX = 'sensor_' + + # "Обменник", к которому можно подключить очередь сенсора. + # Консоль может отправить одно сообщение в этот "обменник" и оно будет доставлено во все подключённые очереди + SENSORS_EXCHANGE = 'sensors' + + # если отправляется сообщение, на которое ожидается ответ, то в этом атрибуте хранится список таких ответов + _rpc_responses = tuple() + + def __init__(self, proceed=True): + + self.connection, self.channel = self._get_connection_data() + + # Флаг означает, что ожидание и обработка ответов от сенсора ещё продолжается + self._proceed = proceed + + def send_message(self, + sensor: ArmaSensor = None, + message_type: Union[ + SystemMessage, VectorMessage, ZeekMessage, SuricataMessage] = SystemMessage.ping, + body: Union[dict, bytes, str] = None, + wait_response: bool = False, + decode_response: bool = True, + time_limit: int = 5) -> Optional[dict]: + """Method for sending messages. + + If we requested a response from the sensor, then it returns dict(). Otherwise, it will return nothing. + Throws an exception if the response from the sensor cannot be decoded. + + :param sensor: ArmaSensor object. Specify if you want to send the message only to its queue. + If it is not transmitted, the message will be sent to all sensors. + :param message_type: A string that specifies what type of message will be sent. + Depending on this type, different logic will be called on the sensor side. + If the sensor does not know how to process any type, then it will return an error + :param body: The payload that can be sent along with the message. + Used on the sensor side. We only encode and send + :param wait_response: A flag that determines whether we are waiting for a response to the message or not. + If False, then we send the message and immediately close the connection. + Otherwise, we block the process until we wait for a response. + :param decode_response: Whether to decode the message that is returned from the sensor + :param time_limit: + If we send a message and a response is required, how long to wait for it. Specified in seconds. + If we sent a message to all sensors, but after a while not all of them answered, + then we will return only the received response + :return: + """ + + # Шаг 1. Определяем куда будем отправлять сообщение. Если передан сенсор, то только ему. Иначе отправим всем + if sensor: + queue = f'{self.SENSOR_QUEUE_PREFIX}{sensor.uuid}' # очередь сенсора, которую будет слушать только он + exchange = '' + required_responses = 1 # если отправляем сообщение только одному сенсору, то мы ожидаем только один ответ + else: + exchange, queue = self._get_sensors_exchange(), '' + required_responses = ArmaSensor.objects.count() # ожидаем количество ответов, по количеству сенсоров + + # Шаг 2. Готовим сообщение к отправке + # 2.1 Приводим необязательную полезную нагрузку к байтам + bytes_body = self._convert_to_bytes(body) + + # 2.2 Готовим мета-информацию (свойства) для сообщения + properties = { + # Тип сообщения. по нему сенсор определит что именно ему нужно сделать + 'type': message_type.value, + # Как долго отправленное сообщение будет храниться в очереди. + # По истечение времени если его никто не получит, то оно будет удалено + 'expiration': '5000' + } + + # 2.3 Если мы отправляем сообщение и хотим получить на него ответ, то должны указать: + # - в какую очередь сенсор должен направить ответ + # - какая функция будет вызываться при получении ответа в эту очередь + if wait_response: + on_message_callback = partial(self._response_callback, decode_response=decode_response) + properties['reply_to'] = self._prepare_callback_queue(on_message_callback) + + # конвертируем подготовленные свойства сообщения в особый формат + properties = pika.BasicProperties(**properties) + + # Шаг 3. Отправляем сообщение. Указываем получателей, тело сообщения и его свойства + self.channel.basic_publish(exchange=exchange, routing_key=queue, body=bytes_body, properties=properties) + + # Шаг 4. Получаем ответы от сенсора. Если мы их не запрашивали, то из функции вернётся None + response = self._get_responses(sensor, wait_response, required_responses, time_limit) + + # Шаг 5. Закрываем соединение + self.connection.close() + + return response + + def _get_responses(self, sensor, wait_response, required_responses, time_limit) -> Union[list, dict]: + """A function that counts messages and converts them to json format""" + + def _stop_proceed_after_timeout(): + self._proceed = False + + # Определяем функцию и через какое время она будет вызвана. + # Функция выключит флаг обработки и условие 'while' ниже не будет выполняться. + # Этот метод не блокирует выполнение + self.connection.call_later(time_limit, _stop_proceed_after_timeout) + + # псевдокод: + # пока мы ожидаем ответа и получены не все требуемые ответы и время ещё не вышло: ... + while wait_response and (len(self._rpc_responses) < required_responses) and self._proceed: + self.connection.process_data_events() + + # когда будет произведён выход из цикла (после получения всех ответов или по истечение времени) мы начнём + # обработку ответов + if wait_response and sensor: + # если мы ожидаем ответ только от одного сенсора и этот ответ получен, + # то мы проверяем его статус и возвращаем json ответ + if self._rpc_responses: + response = self._check_response_status(self._rpc_responses[0]) + return response + # если ответ не получен, то возбуждаем исключение + else: + raise SensorResponseException({ + 'status': 'error', + 'detail': 'sensor did not send a response' + }) + elif wait_response: + # Если ожидаем ответ от всех сенсоров и закончилось время обработки, то возвращаем всё, что успели получить. + return self._rpc_responses + + def _check_response_status(self, response): + """Check response status""" + + # Проверяем статус ответа. При необходимости возбуждаем исключение, чтобы на фронт был отдан ответ с 400 кодом + # Данное поле считать соглашением между консолью и сенсором + # Если сенсор не пришлёт это поле или пришлёт что-то другое, то считается, что это корректный ответ без ошибки + if isinstance(response, dict) and response.get('status', None) == 'error': + raise SensorResponseException(response) + return response + + def _get_sensors_exchange(self) -> str: + """Determine the exchanger and its type.""" + + # 'fanout' означает, что сообщение будет отправлено во все очереди, связанные с обменником + self.channel.exchange_declare(self.SENSORS_EXCHANGE, exchange_type=ExchangeType.fanout.value, durable=True) + return self.SENSORS_EXCHANGE + + def _prepare_callback_queue(self, callback_func) -> str: + """We create a queue in which we expect a response""" + + # Создаём одноразовую очередь для ожидания в ней ответа. Она удалится после закрытия соединения + result = self.channel.queue_declare(queue='', exclusive=True) + callback_queue = result.method.queue + + # Указываем функцию, которая будет обрабатывать сообщения во временной очереди + self.channel.basic_consume(queue=callback_queue, on_message_callback=callback_func, auto_ack=True) + + return callback_queue + + @staticmethod + def _convert_to_bytes(body) -> bytes: + """Convert body to bytes""" + + if body is None: + body = b'' + if isinstance(body, bytes): + return body + try: + str_json = json.dumps(body) + bytes_body = str_json.encode() + return bytes_body + except TypeError: + raise SensorResponseException({ + 'status': 'error', + 'detail': 'unable to encode data to send' + }) + + def _response_callback(self, channel, method, properties, body, decode_response: bool): + """A function that processes messages in the response queue + + Receives response, converts to json and writes to attribute self._rpc_responses + """ + + response = self._prepare_response(body, decode_response) + self._rpc_responses += (response, ) + + def _prepare_response(self, rpc_response: Optional[bytes], decode_rcp_response: bool) -> Union[dict, bytes]: + + if not rpc_response: + response = { + 'status': 'error', + 'detail': 'sensor doesnt return response' + } + return response if decode_rcp_response else json.dumps(response) + + if decode_rcp_response: + response = self._decode_rpc_response(rpc_response) + else: + response = rpc_response + return response + + @staticmethod + def _decode_rpc_response(rcp_response: bytes) -> dict: + try: + response = json.loads(rcp_response) + except json.JSONDecodeError: + response = { + 'status': 'error', + 'detail': 'failed to decode sensor response to json' + } + return response + + @staticmethod + def _get_connection_data() -> Tuple[pika.BlockingConnection, BlockingChannel]: + """Create connection to RabbitMQ""" + + connection = pika.BlockingConnection(pika.ConnectionParameters(host=settings.RABBIT_HOST, + port=settings.RABBIT_PORT)) + channel = connection.channel() + + return connection, channel diff --git a/devices/services/sensor/service.py b/devices/services/sensor/service.py new file mode 100644 index 0000000..77a46cc --- /dev/null +++ b/devices/services/sensor/service.py @@ -0,0 +1,24 @@ +from typing import Optional + +from django.core.cache import caches + +from devices.models.sensor import ArmaSensor +from devices.services.sensor.enums import SystemMessage +from devices.services.sensor.rabbitmq import SensorManagement + + +class SensorService: + + def __init__(self, sensor: ArmaSensor): + self.sensor = sensor + + def get_status(self) -> Optional[dict]: + sensor_status = caches['redis'].get(f'sensor_{self.sensor.pk}_status') or {'status': 'offline'} + return sensor_status + + def get_interfaces(self): + """Get all interfaces from sensor""" + + actually_interfaces = SensorManagement().send_message( + sensor=self.sensor, message_type=SystemMessage.interfaces, wait_response=True) + return actually_interfaces diff --git a/devices/services/sensor/utils.py b/devices/services/sensor/utils.py new file mode 100644 index 0000000..08a0f7a --- /dev/null +++ b/devices/services/sensor/utils.py @@ -0,0 +1,68 @@ +import base64 +import logging +import string +from secrets import choice + +import requests +from django.conf import settings + +_log = logging.getLogger(__name__) + + +class RabbitMQUserManagement: + def __init__(self, uuid): + self.uuid = uuid + + def create_random_rabbitmq_credentials(self): + """Create a username and password to connect to RabbitMQ""" + + username = f'sensor_{self.uuid}' + password = ''.join(choice(string.ascii_uppercase + string.digits) for i in range(20)) + + source_credentials = username + '@@' + password + + credentials_bytes = source_credentials.encode() + b64_credentials = base64.b64encode(credentials_bytes) + final_credentials = b64_credentials.decode() + + self._create_rabbitmq_user(username, password) + self._set_rabbitmq_permissions(username) + + return final_credentials + + @staticmethod + def _create_rabbitmq_user(username, password): + data = { + 'password': password, + 'tags': '' + } + response = requests.put(f'http://{settings.RABBIT_HOST}:15672/api/users/{username}/', + auth=('guest', 'guest'), + json=data) + if response.ok: + _log.info(f'RabbitMQ user {username} created') + else: + _log.error(f'Error creating RabbitMQ user {username}') + + @staticmethod + def _set_rabbitmq_permissions(username): + vhost = '%2F' # стандартный хост, на котором находится сервер RabbitMQ. это просто закодированный слэш "/" + + # https://www.rabbitmq.com/access-control.html#authorisation + data = { + # юзер может кофигурировать (создавать. удалять, изменять) только свою очередь + "configure": username, + + # может писать в автоматически сгенерированные очереди или в свою + "write": f'^(amq.gen.*|amq.default|{username})$', + + # может читать из своей очереди или из общей для всех сенсоров + "read": f"sensors|{username}" + } + response = requests.put(f'http://{settings.RABBIT_HOST}:15672/api/permissions/{vhost}/{username}/', + auth=('guest', 'guest'), + json=data) + if response.ok: + _log.info(f'Set permissions for RabbitMQ user {username}') + else: + _log.error(f'Error setting permissions for RabbitMQ user {username}') diff --git a/devices/services/vector.py b/devices/services/vector.py new file mode 100644 index 0000000..29f5502 --- /dev/null +++ b/devices/services/vector.py @@ -0,0 +1,78 @@ +import logging +import os + +from django.template.loader import render_to_string + +from devices.constants import VECTOR_CONFIG_DIR +from devices.enums import DeviceType +from devices.models.device import Device +from events.constants import ELK_URL, ELK_LOGIN, ELK_PASS + +_log = logging.getLogger(__name__) + + +class VectorServiceError(Exception): + pass + + +class VectorService: + + def __init__(self, device: Device): + self.device = device + self.config_type = self._get_config_type() + self.config_name = self._get_config_name() + + def update_config(self): + self.check_license() + config_content = self._create_config_content() + self._write_to_file(config_content) + + def delete_config(self): + self._delete_config_file() + + def check_license(self): + pass # todo call license service + + def _get_config_name(self) -> str: + return f'{self.config_type}_{self.device.pk}.toml' + + def _create_config_content(self) -> str: + context = { + "pk": self.device.pk, + "port": self.device.port, + "adjust_datetime": self.device.adjust_datetime, + "elastic_url": ELK_URL, + "elastic_login": ELK_LOGIN, + "elastic_pass": ELK_PASS, + } + + config = render_to_string(f"vector/config/{self.config_type}.toml", context) + return config + + def _write_to_file(self, config_content: str): + """" Check directory exists """ + if not os.path.exists(VECTOR_CONFIG_DIR): + os.makedirs(VECTOR_CONFIG_DIR) + """ Write new config content to correct file """ + with open(os.path.join(VECTOR_CONFIG_DIR, self.config_name), 'w') as f: + f.write(config_content) + _log.info(f'Created file [{self.config_name}]') + + def _delete_config_file(self): + try: + os.remove(os.path.join(VECTOR_CONFIG_DIR, self.config_name)) + _log.info(f'Removed file [{self.config_name}]') + except IOError: + _log.warning( + f'Try to remove device with pk {self.device.pk} but file {self.config_name} does not exist') + + def _get_config_type(self) -> str: + type_mapping = { + DeviceType.FIREWALL: "armaif", + DeviceType.ENDPOINT: "endpoint", + DeviceType.SENSOR: "sensor", + } + try: + return type_mapping[self.device.type] + except KeyError: + raise VectorServiceError(f"Can't made config for type {self.device.type}") diff --git a/devices/tasks/__init__.py b/devices/tasks/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/tasks/firewall.py b/devices/tasks/firewall.py new file mode 100644 index 0000000..a77e2a7 --- /dev/null +++ b/devices/tasks/firewall.py @@ -0,0 +1,93 @@ +import logging +import os + +from celery import shared_task +from django.conf import settings +from django.contrib.auth.models import User +from django.core.cache import caches + +from devices.constants import CACHE_TIMEOUT +from devices.enums import ArmaIndustrialFirewallStatus +from devices.models.firewall import ArmaIndustrialFirewall +from devices.services.firewall import FirewallService +from devices.services.firewall.firewall import firewalls_status_notification_to_ws +from storage.models import DataStorage, get_storage_path + +_log = logging.getLogger(__name__) +MEDIA_ROOT = getattr(settings, 'MEDIA_ROOT') + + +@shared_task +def update_firewall_info_task(): + update_firewall_info() + + +def update_firewall_info(): + for firewall in ArmaIndustrialFirewall.objects.all(): + info = FirewallService(firewall).get_info() + ret, status = {'data': info.get('data', info)}, info.get('status', ArmaIndustrialFirewallStatus.error) + + def set_firewall_status(firewall_status, firewall_pk): + response_json = { + 'status': firewall_status + } + caches['redis'].set(f'firewall_{firewall_pk}_status', response_json, CACHE_TIMEOUT) + + if status == ArmaIndustrialFirewallStatus.online: + set_firewall_status('online', firewall.pk) + firewall.website = ret['data']['product_website'] + firewall.version = ret['data']['product_version'] + firewall.remote_name = ret['data']['product_name'] + firewall.identification_number = ret['data']['product_id'] + firewall.hash_number = ret['data']['product_hash'] + firewall.flavour = ret['data']['product_flavour'] + firewall.email = ret['data']['product_email'] + firewall.copyright_years = ret['data']['product_copyright_years'] + firewall.copyright_url = ret['data']['product_copyright_url'] + firewall.copyright_owner = ret['data']['product_copyright_owner'] + firewall.architecture = ret['data']['product_arch'] + firewall.abi = ret['data']['product_abi'] + firewall.save() + elif status == ArmaIndustrialFirewallStatus.offline: + set_firewall_status('offline', firewall.pk) + elif status == ArmaIndustrialFirewallStatus.unauthorized: + set_firewall_status('unauthorized', firewall.pk) + else: + set_firewall_status('error', firewall.pk) + firewalls_status_notification_to_ws() + + +@shared_task +def download_files_from_firewall_task(firewall_pk: int, user_pk: int, type_file: str): + firewall = ArmaIndustrialFirewall.objects.get(pk=firewall_pk) + user = User.objects.get(pk=user_pk) + _log.debug(f'[TASK] Start download firewall-{firewall.pk} {type_file}. User: {user}') + return download_files_from_firewall(firewall, user, type_file) + + +def download_files_from_firewall(firewall: ArmaIndustrialFirewall, user: 'User', type_file: str) -> int: + """Скачивания файла с фаервола и сохранения его в локальное Хранилище.""" + _log.debug('[download_files_from_firewall] start') + format_map = {'rulesets': DataStorage.Format.TAR, 'config': DataStorage.Format.XML} + + store = DataStorage(type=DataStorage.Type.FIREWALL, format=format_map.get(type_file, DataStorage.Format.UNKNOWN), + user=user, + description=f'{type_file} from {firewall.name} firewall', + file='firewall', size=0) + store.save() + _log.debug(f'[download_files_from_firewall] create storage [{store.pk}]') + + file_byte, name = FirewallService(firewall).download_file(type_file) + _log.debug(f'[download_files_from_firewall] get file {name}') + file_name = get_storage_path(store, name) + file_path = os.path.join(MEDIA_ROOT, file_name) + os.makedirs(os.path.dirname(file_path), exist_ok=True) + _log.debug(f'[download_files_from_firewall] create file {file_path}') + with open(file_path, 'wb') as file: + file.write(file_byte) + store.file = file_name + store.size = os.path.getsize(file_path) + store.save() + + _log.debug(f'[download_files_from_firewall] finish.') + return store.pk diff --git a/devices/tasks/sensor.py b/devices/tasks/sensor.py new file mode 100644 index 0000000..84d1f90 --- /dev/null +++ b/devices/tasks/sensor.py @@ -0,0 +1,42 @@ +import logging + +from celery import shared_task +from django.core.cache import caches + +from devices.constants import CACHE_TIMEOUT +from devices.models.sensor import ArmaSensor +from devices.serializers.sensor_serializers import SensorSerializer +from devices.services.sensor.enums import SystemMessage +from devices.services.sensor.rabbitmq import SensorManagement + +_log = logging.getLogger(__name__) + + +@shared_task +def ping_sensors(): + + # make all sensors offline + all_sensors_id = [sensor for sensor in ArmaSensor.objects.values_list('id', flat=True)] + redis_data = {f'sensor_{sensor_id}_status': {'status': 'offline'} for sensor_id in all_sensors_id} + caches['redis'].set_many(redis_data, CACHE_TIMEOUT) + + # set new sensor status + sensors_responses = SensorManagement().send_message(message_type=SystemMessage.ping, wait_response=True) + for response in sensors_responses: + _log.debug(f'Sensor response: {response}') + sensor = ArmaSensor.objects.get(uuid=response['uuid']) + if not response['synchronization'] or not sensor.synchronization: + data = SensorSerializer(instance=sensor).data + sync_response = SensorManagement().send_message( + sensor=sensor, + message_type=SystemMessage.synchronization, + body=data, + wait_response=True + ) + + if sync_response.get('synchronization', None): + sensor.synchronization = True + sensor.save() + else: + _log.warning(f'Sensor {response["uuid"]} not synchronized: {sync_response}') + caches['redis'].set(f'sensor_{sensor.pk}_status', {'status': 'online'}, CACHE_TIMEOUT) diff --git a/devices/templates/vector/config/armaif.toml b/devices/templates/vector/config/armaif.toml new file mode 100644 index 0000000..144c8ae --- /dev/null +++ b/devices/templates/vector/config/armaif.toml @@ -0,0 +1,620 @@ + +# ─────────────────────────────────────────────────────────────────────────────────────────────────── +# ─██████████████─████████████████───██████──────────██████─██████████████─██████████─██████████████─ +# ─██░░░░░░░░░░██─██░░░░░░░░░░░░██───██░░██████████████░░██─██░░░░░░░░░░██─██░░░░░░██─██░░░░░░░░░░██─ +# ─██░░██████░░██─██░░████████░░██───██░░░░░░░░░░░░░░░░░░██─██░░██████░░██─████░░████─██░░██████████─ +# ─██░░██──██░░██─██░░██────██░░██───██░░██████░░██████░░██─██░░██──██░░██───██░░██───██░░██───────── +# ─██░░██████░░██─██░░████████░░██───██░░██──██░░██──██░░██─██░░██████░░██───██░░██───██░░██████████─ +# ─██░░░░░░░░░░██─██░░░░░░░░░░░░██───██░░██──██░░██──██░░██─██░░░░░░░░░░██───██░░██───██░░░░░░░░░░██─ +# ─██░░██████░░██─██░░██████░░████───██░░██──██████──██░░██─██░░██████░░██───██░░██───██░░██████████─ +# ─██░░██──██░░██─██░░██──██░░██─────██░░██──────────██░░██─██░░██──██░░██───██░░██───██░░██───────── +# ─██░░██──██░░██─██░░██──██░░██████─██░░██──────────██░░██─██░░██──██░░██─████░░████─██░░██───────── +# ─██░░██──██░░██─██░░██──██░░░░░░██─██░░██──────────██░░██─██░░██──██░░██─██░░░░░░██─██░░██───────── +# ─██████──██████─██████──██████████─██████──────────██████─██████──██████─██████████─██████───────── +# ─────────────────────────────────────────────────────────────────────────────────────────────────── + +[sources.if_{{pk}}_socket_source] + type = "syslog" + address = "0.0.0.0:{{port}}" + mode = "udp" + + + +# 1. Check is message CEF +[transforms.if_{{pk}}_cef_filter] + type="filter" + inputs=["if_{{pk}}_socket_source"] + condition = ''' match(string!(.message),r'^.*CEF:.*')''' + + +# Parse data from socket +[transforms.if_{{pk}}_parse_logs] + type = "remap" + inputs = ["if_{{pk}}_cef_filter"] + source = ''' + . |= parse_regex!(.message,r'CEF:\d+\|(?P([^\|]*))\|(?P([^\|]*))\|(?P([^\|]*))\|(?P([^\|]*))\|(?P([^\|]*))\|(?P([^\|]*))\|(?P.*)') + .aggregated = "false" + .orig_message = .message + .event_src_msg = .orig_message + .format = "CEF" + .type="armaif_1" + .extension =strip_whitespace(.extension) + .extension = strip_ansi_escape_codes(.extension) + .source_ip= .host + .destination_ip = "127.0.0.1" + .@timestamp = now() + + ''' + + + +#Check device product +[transforms.if_{{pk}}_check_device_product] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_logs"] + hooks.process =""" + function(event,emit) + if event.log["device_product"] == "ARMAIF" then + event.log.type="armaif_{{ pk }}" + event.log.device_product = "Industrial Firerwall" + event.log.message = "Empty message" + event.log.source_host = "0.0.0.0" + event.log.destination_host = "localhost" + emit(event) + end + end + """ + + +#Parse key value +[transforms.if_{{pk}}_parse_key_value] + type="lua" + version="2" + inputs=["if_{{pk}}_check_device_product"] + hooks.process=""" + function(event,emit) + str=event.log["extension"] + for name, value in str:gmatch"%s*([^=]*)=([^=]*)%f[%s%z]" do + event.log[name]=value + end + emit(event) + end + """ + + +{% if adjust_datetime == 1 %} +[transforms.if_{{pk}}_made_timestamp] + type="remap" + inputs=["if_{{pk}}_parse_key_value"] + source =""" + .event_timestamp= now() + .event_timestamp = format_timestamp!(.event_timestamp, format: "%+") + + """ + +{%else%} +[transforms.if_{{pk}}_made_timestamp] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_key_value"] + hooks.process = """ + function(event,emit) + local date_time = tonumber(event.log["rt"]) + m_date_time = date_time/1000 + event.log.event_timestamp = os.date("!%Y-%m-%dT%H:%M:%SZ",m_date_time) + emit(event) + end + """ +{%endif%} + + +#Parse input ARPWATCH +[transforms.if_{{pk}}_parse_arpwatch] + type="lua" + inputs =["if_{{pk}}_made_timestamp"] + version= "2" + hooks.process =""" + function(event,emit) + if event.log["name"] == "Arpwatch alert" then + if event.log["act"] ~= nil then + event.log.device_action = event.log["act"] + event.log.sign_id = event.log["act"] + + end + event.log.event_severity = event.log["severity"] + event.log.sign_subcategory = event.log["signature"] + event.log.sign_category = "ARPWATCH" + event.log.source_ip = event.log["src"] + event.log.sign_name = "New device "..event.log["src"] + + + end + emit(event) + end + + """ + +# Parse input firewall +[transforms.if_{{pk}}_parse_firewall] + type="lua" + version = "2" + inputs = ["if_{{pk}}_parse_arpwatch"] + hooks.process = """ + function (event,emit) + if event.log["name"] == "PF rule alert" then + if event.log["act"] ~= nil then + event.log.device_action = event.log["act"] + end + event.log.id = "armaif_1_firewall_match" + event.log.event_severity = event.log["severity"] + event.log.event_src_msg = event.log["message"] + event.log.event_protocol = event.log["proto"] + event.log.sign_id = event.log["cs1"] + event.log.sign_name = "Firewall Rule" + event.log.sign_category = "PF" + event.log.source_ip = event.log["src"] + event.log.source_port = event.log["spt"] + event.log.destination_ip = event.log["dst"] + event.log.destination_port = event.log["dpt"] + event.log.celery_done = "false" + end + emit(event) + end + + + """ + +# Parse input NTP +[transforms.if_{{pk}}_parse_ntp] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_firewall"] + hooks.process = """ + function (event,emit) + if event.log["signature"] == "ntppower" and event.log["name"] == "Ntp power" then + event.log.id = "armaif_{{ pk }}_ntp_match" + event.log.event_severity = event.log["severity"] + event.log.event_src_msg = event.log["message"] + event.log.device_product = event.log["deviceFacility"] + event.log.device_action = event.log["act"] + event.log.sign_id = event.log["dvcpid"] + event.log.sign_name = event.log["signature"] + event.log.sign_category = "NTP" + event.log.celery_done = "false" + event.log.source_ip= event.log["host"] + end + emit(event) + end + + """ + +#Parse input Suricata 1 +[transforms.if_{{pk}}_parse_suricata_1] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_ntp"] + hooks.process = """ + function (event,emit) + if event.log["signature"] == "idspower" then + if event.log["classification"] ~= nill then + event.log.sign_subcategory = event.log["classification"] + end + event.log.id = "armaif_1_suricata_match" + event.log.event_severity = event.log["severity"] + if event.log["msg"] ~= nil then + event.log.message = event.log["msg"] + end + + event.log.device_vendor = event.log["device_vendor"] + event.log.device_product = event.log["device_product"] + event.log.device_version = event.log["device_version"] + event.log.event_protocol = event.log["proto"] + event.log.device_action = event.log["act"] + event.log.sign_id = event.log["cs1"] + event.log.sign_name = event.log["name"] + event.log.sign_category = "IDS" + if event.log["ip_src"] ~= nil then + event.log.source_ip = event.log["ip_src"] + end + event.log.source_port = event.log["port_src"] + if event.log["ip_dst"] ~=nil then + event.log.destination_ip = event.log["ip_dst"] + end + event.log.destination_port = event.log["port_dst"] + event.log.celery_done = "false" + end + emit(event) + end + """ + +# Parse input Suricatqa 2 +[transforms.if_{{pk}}_parse_suricata_2] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_suricata_1"] + hooks.process = """ + function (event,emit) + if (event.log["device_product"] == "Suricata" and event.log["signature"] ~= "Turn off") then + event.log.id = "armaif_1_suricata_match_2" + event.log.event_severity = event.log["severity"] + if event.log["msg"] ~= nil then + event.log.message = event.log["msg"] + end + event.log.event_protocol = event.log["proto"] + event.log.device_vendor = event.log["device_vendor"] + event.log.device_product = event.log["device_product"] + event.log.device_version = event.log["device_version"] + event.log.sign_id = event.log["signature"] + event.log.sign_name = event.log["name"] + event.log.sign_category = "IDS" + event.log.source_ip = event.log["ip_src"] + event.log.source_port = event.log["port_src"] + event.log.destination_ip = event.log["ip_dst"] + event.log.destination_port = event.log["port_dst"] + event.log.celery_done = "false" + end + emit(event) + end + """ + + +#Parse input Suricata 3 +[transforms.if_{{pk}}_parse_suricata_3] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_suricata_2"] + hooks.process = """ + function(event,emit) + if event.log["signature"] == "idsalert" then + if event.log["classification"] ~= nil then + event.log.sign_subcategory = event.log["classification"] + end + event.log.id = "armaif_{{ pk }}_suricata_match_3" + event.log.event_severity = event.log["severity"] + if event.log["msg"] ~= nil then + event.log.message = event.log["msg"] + end + event.log.event_protocol = event.log["proto"] + event.log.device_vendor = event.log["device_vendor"] + event.log.device_product = event.log["device_product"] + event.log.device_version = event.log["device_version"] + event.log.sign_id = event.log["act"] + event.log.sign_name = event.log["signature"] + event.log.sign_category = "IDS" + if event.log["src"] ~=nil then + event.log.source_ip = event.log["src"] + end + event.log.source_port = event.log["spt"] + if event.log["dst"] ~= nil then + event.log.destination_ip = event.log["dst"] + end + event.log.destination_port = event.log["dpt"] + event.log.celery_done = "false" + end + emit(event) + end + """ + +#Parse input Suricata 4 +[transforms.if_{{pk}}_parse_suricata_4] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_suricata_3"] + hooks.process= """ + function(event,emit) + if (event.log["device_product"] == "Suricata" and event.log["signature"] == "Turn off") then + event.log.id = "armaif_{{ pk }}_suricata_match_4" + event.log.event_severity = event.log["severity"] + if event.log["msg"] ~= nil then + event.log.message = event.log["msg"] + end + event.log.event_protocol = "NULL" + event.log.device_vendor = event.log["device_vendor"] + event.log.device_product = event.log["device_product"] + event.log.device_version = event.log["device_version"] + event.log.device_action = event.log["act"] + event.log.sign_id = event.log["signature"] + event.log.sign_name = event.log["name"] + event.log.sign_category = "IDS" + event.log.source_ip = "127.0.0.1" + event.log.source_port = "0" + event.log.destination_ip = "127.0.0.1" + event.log.destination_port = "0" + event.log.celery_done = "false" + end + emit(event) + end + """ + +#Parse input Suricata 5 +[transforms.if_{{pk}}_parse_suricata_5] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_suricata_4"] + hooks.process= """ + function(event,emit) + if event.log["signature"] == "integrityalert" then + event.log.id = "armaif_{{pk}}_suricata_match_5" + event.log.event_severity = event.log["severity"] + if event.log["msg"] ~= nil then + event.log.message = event.log["msg"] + end + event.log.device_vendor = event.log["device_vendor"] + event.log.device_product = event.log["device_product"] + event.log.device_version = event.log["device_version"] + event.log.sign_id = "integrityalert" + event.log.sign_name = event.log["signature"] + event.log.sign_category = "Integrity" + event.log.celery_done = "false" + end + emit(event) + end + """ + +# Parse input Web access +[transforms.if_{{pk}}_parse_web_access] + type ="lua" + version="2" + inputs=["if_{{pk}}_parse_suricata_5"] + hooks.process= """ + function(event,emit) + if event.log["signature"] == "accessalert" then + event.log.id = "armaif_{{ pk }}_awb_access_match" + event.log.event_severity = event.log["severity"] + event.log.event_src_msg = event.log["message"] + if event.log["msg"] ~= nil then + event.log.message = event.log["msg"] + end + event.log.event_protocol = event.log["app"] + event.log.device_vendor = event.log["device_vendor"] + event.log.device_product = event.log["device_product"] + event.log.device_version = event.log["device_version"] + event.log.device_action = event.log["act"] + event.log.sign_id = event.log["signature"] + event.log.sign_category = "HTTP" + event.log.sign_subcategory = "Access" + event.log.sign_name = event.log["name"] + event.log.source_ip = event.log["src"] + event.log.destination_ip = event.log["dst"] + event.log.celery_done = "false" + end + emit(event) + end + """ +#Parse input Web Auth +[transforms.if_{{pk}}_parse_web_auth] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_web_access"] + hooks.process = """ + function(event,emit) + if ( event.log["signature"] == "webauth" and event.log["name"] == "Web authentication") then + event.log.id = "armaif_{{ pk }}_web_auth_match" + event.log.event_severity = event.log["severity"] + event.log.event_src_msg = event.log["message"] + event.log.device_vendor = event.log["device_vendor"] + event.log.device_product = event.log["name"] + event.log.device_version = event.log["device_version"] + event.log.device_action = "Auth" + event.log.sign_id = event.log["signature"] + event.log.sign_name = event.log["name"] + event.log.sign_category = "HTTP" + event.log.sign_subcategory = "Auth" + event.log.source_ip = event.log["src"] + event.log.source_user = event.log["suser"] + event.log.celery_done = "false" + end + emit(event) + end + """ + +#Parse input lighttpdaccess +[transforms.if_{{pk}}_parse_lighttpdaccess] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_web_auth"] + hooks.process= """ + function(event,emit) + if event.log["signature"] == "lighttpdaccess" then + event.log.id = "armaif_{{ pk }}_lhttp_match" + event.log.event_severity = event.log["severity"] + event.log.event_src_msg = event.log["message"] + event.log.device_vendor = event.log["device_vendor"] + event.log.device_product = event.log["device_product"] + event.log.device_version = event.log["device_version"] + event.log.sign_id = event.log["signature"] + event.log.sign_name = event.log["name"] + event.log.sign_category = "HTTP" + event.log.sign_subcategory = "Auth" + event.log.source_ip = event.log["src"] + event.log.destination_ip = event.log["dst"] + event.log.celery_done = "false" + end + emit(event) + end + """ + +#Parce ClamAv +[transforms.if_{{pk}}_parse_clam] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_lighttpdaccess"] + hooks.process= """ + function(event,emit) + if event.log["signature"] == "clamav_alert" then + event.log.id = "armaif_{{ pk }}_clam_match" + event.log.event_severity = event.log["severity"] + event.log.event_src_msg = event.log["message"] + event.log.request_url=event.log["cs1"] + event.log.sign_name='CLAMAV alert' + event.log.sign_category='HTTP' + event.log.virus_name = event.log["cs2"] + event.log.device_action = event.log["act"] + event.log.source_ip = event.log["src"] + event.log.celery_done = "false" + end + emit(event) + end + """ + + +#Parse NTP sync +[transforms.if_{{pk}}_parse_ntpsync] + type="lua" + version="2" + inputs=["if_{{pk}}_parse_clam"] + hooks.process= """ + function(event,emit) + if event.log["signature"] == "ntpmanualsync" then + event.log.id = "armaif_{{ pk }}_ntpsync_match" + event.log.event_severity = event.log["severity"] + event.log.event_src_msg = event.log["message"] + event.log.device_product = event.log["deviceFacility"] + event.log.message = event.log["msg"] + event.log.sign_name=event.log["name"] + event.log.sign_category='NTP' + event.log.attempts_count = event.log["cs1"] + event.log.celery_done = "false" + end + emit(event) + end + """ + + +#Create UUID +[transforms.if_{{pk}}_create_uuid] + type = "remap" + inputs=["if_{{pk}}_parse_ntpsync"] + source = """ + .event_id = uuid_v4() + + """ + + +# Replace +[transforms.if_{{pk}}_replace] + type="lua" + version="2" + inputs=["if_{{pk}}_create_uuid"] + hooks.process = """ + function(event,emit) + event.log.event_src_msg = event.log["orig_message"]:gsub("\u0000","") + event.log.orig_message = event.log["message"] + emit(event) + end + """ + +# Delete bad fields +[transforms.if_{{pk}}_prune_fields] + type="lua" + version="2" + inputs=["if_{{pk}}_replace"] + source =""" + function check_field(field) + local fields_list = {'destination_ip', + 'source_user', + 'event_severity', + 'event_protocol', + 'device_vendor', + 'event_src_msg', + 'sign_name', + 'sign_subcategory', + 'source_ip', + 'event_id', + 'device_version', + 'destination_port', + 'device_product', + 'device_action', + 'sign_id', + 'message', + 'source_port', + 'sign_category', + 'event_timestamp', + '@timestamp', + 'format', + 'type', + 'source_host', + 'destination_host', + 'request_url', + 'virus_name', + + } + for key,value in pairs(fields_list) do + if value == field then + return true + end + end + return false + + end + function process (event,emit) + for f, v in pairs(event.log) do + if check_field(f) ~= true then + event.log[f] = nil + end + end + + emit(event) + end + """ + hooks.process="process" + +# Cast variables to the right types +[transforms.if_{{pk}}_cast_types] + type="remap" + inputs = ["if_{{pk}}_prune_fields"] + source = ''' + .event_uuid = .event_id + .source_port = to_int!(.source_port) + .destination_port = to_int!(.destination_port) + .aggregated = to_bool!(.aggregated) + .event_severity = to_int!(.event_severity) + .event_timestamp = to_timestamp!(.event_timestamp) + .@timestamp = to_timestamp!(.@timestamp) + ''' + + +[transforms.if_{{pk}}_delete_null_values] + type="lua" + version="2" + inputs = ["if_{{pk}}_cast_types"] + hooks.process = """ + function(event,emit) + if event.log["source_port"] == 0 or event.log["source_port"] == "0" then + event.log.source_port = nil + + end + if event.log["destination_port"] == 0 or event.log["destination_port"] == "0" then + event.log.destination_port = nil + + end + + emit(event) + end + """ + + + +### Print parsed logs to stdout +#[sinks.print_{{pk}}] +# type = "console" +# inputs=["if_{{pk}}_delete_null_values"] +# encoding.codec ="json" +# + + + +[sinks.if_{{pk}}_elasticsearch_vector] + type = "elasticsearch" + inputs = ["if_{{pk}}_delete_null_values"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_pass }}" + endpoint = "{{ elastic_url }}" + normal.index = "arma-%Y.%m.%d" + id_key = "event_uuid" + + diff --git a/devices/templates/vector/config/endpoint.toml b/devices/templates/vector/config/endpoint.toml new file mode 100644 index 0000000..17173a7 --- /dev/null +++ b/devices/templates/vector/config/endpoint.toml @@ -0,0 +1,332 @@ + +# ─────────────────────────────────────────────────────────────────────────────────────────────────── +# ─██████████████─████████████████───██████──────────██████─██████████████─██████████─██████████████─ +# ─██░░░░░░░░░░██─██░░░░░░░░░░░░██───██░░██████████████░░██─██░░░░░░░░░░██─██░░░░░░██─██░░░░░░░░░░██─ +# ─██░░██████░░██─██░░████████░░██───██░░░░░░░░░░░░░░░░░░██─██░░██████░░██─████░░████─██░░██████████─ +# ─██░░██──██░░██─██░░██────██░░██───██░░██████░░██████░░██─██░░██──██░░██───██░░██───██░░██───────── +# ─██░░██████░░██─██░░████████░░██───██░░██──██░░██──██░░██─██░░██████░░██───██░░██───██░░██████████─ +# ─██░░░░░░░░░░██─██░░░░░░░░░░░░██───██░░██──██░░██──██░░██─██░░░░░░░░░░██───██░░██───██░░░░░░░░░░██─ +# ─██░░██████░░██─██░░██████░░████───██░░██──██████──██░░██─██░░██████░░██───██░░██───██░░██████████─ +# ─██░░██──██░░██─██░░██──██░░██─────██░░██──────────██░░██─██░░██──██░░██───██░░██───██░░██───────── +# ─██░░██──██░░██─██░░██──██░░██████─██░░██──────────██░░██─██░░██──██░░██─████░░████─██░░██████████─ +# ─██░░██──██░░██─██░░██──██░░░░░░██─██░░██──────────██░░██─██░░██──██░░██─██░░░░░░██─██░░░░░░░░░░██─ +# ─██████──██████─██████──██████████─██████──────────██████─██████──██████─██████████─██████████████─ +# ─────────────────────────────────────────────────────────────────────────────────────────────────── + + + + +[sources.ie_{{pk}}_socket_source] + type = "syslog" + address = '0.0.0.0:{{port}}' + mode = "udp" + + +# 1. Check is message CEF +[transforms.ie_{{pk}}_cef_filter] + type="filter" + inputs=["ie_{{pk}}_socket_source"] + condition = ''' match(string!(.message),r'^*CEF:.*')''' + + +# Parse data from socket +[transforms.ie_{{pk}}_parse_logs] + type = "remap" + inputs = ["ie_{{pk}}_cef_filter"] + source = ''' + . |= parse_regex!(.message,r'CEF:\d+\|(?P([^\|]*))\|(?P([^\|]*))\|(?P([^\|]*))\|(?P([^\|]*))\|(?P([^\|]*))\|(?P([^\|]*))\|(?P.*)') + .@timestamp = now() + .@timestamp = format_timestamp!(to_timestamp(.@timestamp), format: "%+") + .aggregated = "false" + .orig_message = .message + .device_vendor = "Infowatch ARMA" + .format = "CEF" + .source_ip=.host + .source_host=.host + .source_ip = .host + .event_severity = .severity + .sign_category = .signature + .event_src_msg = .message + .destination_ip="127.0.0.1" + .source_ip = .host + .extension = strip_whitespace(.extension) + .extension = strip_ansi_escape_codes(.extension) + ''' + + + +#Check device product +[transforms.ie_{{pk}}_check_device_product] + type="lua" + version="2" + inputs=["ie_{{pk}}_parse_logs"] + hooks.process =""" + function(event,emit) + if event.log["device_product"] == "ARMAIE" then + event.log.type="endpoint_{{ pk }}" + event.log.device_product = "Industrial Endpoint" + emit(event) + end + end + """ + + + +#Parse key value +[transforms.ie_{{pk}}_parse_key_value] + type="lua" + version="2" + inputs=["ie_{{pk}}_check_device_product"] + hooks.process=""" + function(event,emit) + str=event.log["extension"] + for name, value in str:gmatch"%s*([^=]*)=([^=]*)%f[%s%z]" do + event.log[name]=value + end + emit(event) + end + """ + +{% if adjust_datetime == 1 %} +[transforms.ie_{{pk}}_made_timestamp] + type="remap" + inputs=["ie_{{pk}}_parse_key_value"] + source =""" + .event_timestamp= now() + .event_timestamp = format_timestamp!(.event_timestamp, format: "%+") + + """ + +{%else%} +[transforms.ie_{{pk}}_made_timestamp] + type="lua" + version="2" + inputs=["ie_{{pk}}_parse_key_value"] + hooks.process = """ + function(event,emit) + local date_time = tonumber(event.log["rt"]) + event.log.event_timestamp = os.date("%Y-%m-%dT%H:%M:%SZ",date_time) + local date_str = event.log["event_timestamp"]:match("^+") + if date_str ~= nil then + m_date_time = date_time/1000 + event.log.event_timestamp = os.date("!%Y-%m-%dT%H:%M:%SZ",m_date_time) + end + + emit(event) + end + """ +{%endif%} + +#Parse White list +[transforms.ie_{{pk}}_parse_white_list] + type="lua" + version="2" + inputs = ["ie_{{pk}}_made_timestamp"] + hooks.process = """ + function (event,emit) + if event.log["name"] == "White list" then + event.log.sign_category = "Whitelist" + event.log.sign_name =event.log["act"]..":"..event.log["filePath"].."->"..event.log["cat"] + event.log.device_action = "BLOCK" + event.log.sign_subcategory = "ACCESS DENIED" + + end + emit(event) + end + """ + +#Parse Integrity control +[transforms.ie_{{pk}}_parse_integrity_control] + type="lua" + version="2" + inputs=["ie_{{pk}}_parse_white_list"] + hooks.process=""" + function(event,emit) + if event.log["name"] == "Integrity control" then + event.log.sign_name = event.log["act"]..":"..event.log["filePath"]..","..event.log["fname"] + event.log.sign_category = "Integrity control" + end + emit(event) + end + """ + +#Parse USB +[transforms.ie_{{pk}}_parse_usb] + type="lua" + version="2" + inputs=["ie_{{pk}}_parse_integrity_control"] + hooks.process = """ + function(event,emit) + if event.log["name"] == "USB" then + event.log.sign_name = "USB status="..event.log["act"] + event.log.sign_category = "Usb devices" + event.log.device_action = "Usb action" + event.log.sign_subcategory = event.log["msg"] + + end + emit(event) + end + """ + +#Parse antivirus +[transforms.ie_{{pk}}_parse_antivirus] + type="lua" + version="2" + inputs=["ie_{{pk}}_parse_usb"] + hooks.process = """ + function (event,emit) + if event.log["name"] == "Antivirus" then + if event.log["cs2"] ~= nil then + cs2 = event.log["cs2"] + end + event.log.sign_category = "Antivirus" + event.log.device_action = event.log["act"] + event.log.sign_name = event.log["act"] + event.log.sign_subcategory = event.log["act"] + if event.log["cs1"] ~= nil then + cs1 = event.log["cs1"] + event.log.sign_name = event.log["act"]..":"..event.log["filePath"].." "..cs1.." "..cs2 + end + + end + emit(event) + end + """ + + + + +#Check event type +[transforms.ie_{{pk}}_check_event_type] + type="lua" + version="2" + inputs=["ie_{{pk}}_parse_antivirus"] + hooks.process=""" + function(event,emit) + if event.log["type"]:match("^endpoint_*") ~= nil then + emit(event) + end + end + """ + + + + + +#Create UUID +[transforms.ie_{{pk}}_create_uuid] + type = "remap" + inputs=["ie_{{pk}}_check_event_type"] + source = """ + .event_id = uuid_v4() + + """ + +# Replace +[transforms.ie_{{pk}}_replace] + type="lua" + version="2" + inputs=["ie_{{pk}}_create_uuid"] + hooks.process = """ + function(event,emit) + event.log.event_src_msg = event.log["orig_message"]:gsub("\u0000","") + event.log.message = event.log["event_src_msg"] + event.log.orig_message = event.log["message"] + emit(event) + end + """ + + +# Delete bad fields +[transforms.ie_{{pk}}_prune_fields] + type="lua" + version="2" + inputs=["ie_{{pk}}_replace"] + source =""" + function check_field(field) + local fields_list = {'event_src_msg', + 'event_severity', + 'event_timestamp', + 'event_id', + 'device_vendor', + 'sign_name', + 'sign_subcategory', + 'event_id', + 'device_version', + 'device_product', + 'device_action', + 'sign_id', + 'message', + 'sign_category', + 'event_timestamp', + '@timestamp', + 'source_host', + 'source_ip', + 'destination_ip', + 'format', + 'type' + } + + + + + for key,value in pairs(fields_list) do + if value == field then + return true + end + end + return false + + end + function process (event,emit) + for f, v in pairs(event.log) do + if check_field(f) ~= true then + --- print("delete-> "..f) + event.log[f] = nil + end + end + + emit(event) + end + """ + hooks.process="process" + +# Cast variables to the right types +[transforms.ie_{{pk}}_cast_types] + type="remap" + inputs = ["ie_{{pk}}_prune_fields"] + source = ''' + .event_uuid = .event_id + #.source_port = to_int!(.source_port) + #.destination_port = to_int!(.destination_port) + .aggregated = to_bool!(.aggregated) + #.celery_done = to_bool!(.celery_done) + .event_severity = to_int!(.event_severity ) + .event_timestamp = to_timestamp!(.event_timestamp) + .@timestamp = to_timestamp!(.@timestamp) + ''' + + +## Print parsed logs to stdout +#[sinks.ie_print] +# type = "console" +# inputs=["ie_cast_types"] +# encoding.codec ="json" + + + + +[sinks.ie_{{pk}}_elasticsearch_vector] + type = "elasticsearch" + inputs = ["ie_{{pk}}_cast_types"] + compression = "none" + healthcheck = true + auth.strategy= "basic" + auth.user = "{{ elastic_login }}" + auth.password = "{{ elastic_pass }}" + endpoint = "{{ elastic_url }}" + normal.index = "arma-%Y.%m.%d" + id_key = "event_uuid" + + + diff --git a/devices/templates/vector/config/sensor.toml b/devices/templates/vector/config/sensor.toml new file mode 100644 index 0000000..58493e5 --- /dev/null +++ b/devices/templates/vector/config/sensor.toml @@ -0,0 +1,9 @@ +[sources.sensor_{{pk}}_vector] + type = "vector" + address = "0.0.0.0:{{port}}" + version = "1" + +[sinks.sensor_{{pk}}_logs_to_console] + type = "console" + inputs = ["sensor_{{pk}}_vector"] + encoding.codec = "json" diff --git a/devices/tests/__init__.py b/devices/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/tests/devices_utils.py b/devices/tests/devices_utils.py new file mode 100644 index 0000000..eb23cad --- /dev/null +++ b/devices/tests/devices_utils.py @@ -0,0 +1,12 @@ +class CeleryResult: + def get(self): + return 98765 # DataStorage id + + +def mock_export_csv_task(*args, **kwargs): + class CeleryTask: + + def apply_async(self, *args, **kwargs): + return CeleryResult() + + return CeleryTask() diff --git a/devices/tests/endpoint_utils.py b/devices/tests/endpoint_utils.py new file mode 100644 index 0000000..c35f3a1 --- /dev/null +++ b/devices/tests/endpoint_utils.py @@ -0,0 +1,27 @@ +def mock_vector_service_rise_exception(*args, **kwargs): + """Mock Vector service interface with creating/updating and destroy method. with raise exception.""" + class VectorServiceMock: + + def update_config(self): + raise Exception('Update config test mock exception') + + def delete_config(self): + raise Exception('Delete config test mock exception') + + return VectorServiceMock() + + +def mock_redis_return_online(*args, **kwargs): + """Redis interface has endpoint and get_keepalive return True(online)""" + class RedisInterfaceMock: + def get_keepalive(self, pk) -> bool: + return True + return RedisInterfaceMock() + + +def mock_endpoint_status_service_offline(*args, **kwargs): + """Endpoint Status Service Return status offline""" + class StatusServiceMock: + def get_status(self) -> dict: + return {'status': 'offline'} + return StatusServiceMock() diff --git a/devices/tests/test_devices.py b/devices/tests/test_devices.py new file mode 100644 index 0000000..7039df8 --- /dev/null +++ b/devices/tests/test_devices.py @@ -0,0 +1,28 @@ +import pytest +from django.db import IntegrityError + +from devices.models.device import Device +from devices.models.firewall import ArmaIndustrialFirewall + +@pytest.mark.unit +@pytest.mark.django_db +class TestDevices: + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model): + self.admin_user = django_user_model.objects.get(username='admin') + Device.objects.create(ip='1.1.1.1', port='1500', type='firewall') + + def test_creating_device_and_firewall_with_same_port(self): + device = Device.objects.create(ip='2.2.2.2', port='2500', type='endpoint') + FIREWALL_DATA = { + "name": "ADD IFTEST", + "ip": "192.168.56.103", + "key": "nWM0Pnj4w3DJHbkfIRQ2CbUdqIc0TMUYIHohRCSqWJ5TycVfLo3JlIyurmOXN7MaRMQv/hlUIPbD89Ng", + "secret": "veREg8dbHC/V4hSCi6LBzuQ0NF5eeS/50d7K7Ahut6X0N/77peVQE5ucIJ/fyKhp0RNlbCHEcen2Rk8U", + "port": "2500", + "type": 'firewall' + } + with pytest.raises(IntegrityError) as e: + ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + assert 'Key (port)=(2500) already exists.' in str(e.value) diff --git a/devices/tests/test_devices_api.py b/devices/tests/test_devices_api.py new file mode 100644 index 0000000..32444d0 --- /dev/null +++ b/devices/tests/test_devices_api.py @@ -0,0 +1,122 @@ +from unittest.mock import patch + +import pytest +from rest_framework import status +from rest_framework.reverse import reverse + +from devices.enums import DeviceType +from devices.models.device import Device +from devices.models.endpoint_device import EndpointModel +from devices.tests.devices_utils import mock_export_csv_task +from devices.tests.endpoint_utils import mock_redis_return_online + +TMP_DIR_VECTOR = '/tmp/vector' + + +@patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR) +@pytest.mark.integration +@pytest.mark.django_db +class TestDevicesAPI: + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model): + self.admin_user = django_user_model.objects.get(username='admin') + Device.objects.create(ip='1.1.1.1', port='1500', type='firewall') + + def test_getting_list_of_device(self, api_client): + api_client.force_authenticate(self.admin_user) + response = api_client.get(reverse('device-list')) + assert response.data['count'] == 1 + assert response.data['results'][0]['ip'] == '1.1.1.1' + assert response.data['results'][0]['port'] == 1500 + + def test_getting_device_with_search(self, api_client): + api_client.force_authenticate(self.admin_user) + Device.objects.create(ip='2.2.2.2', port='2500', type='firewall', name='Good device') + Device.objects.create(ip='3.3.3.3', port='9999', type='firewall', name='Bad device') + response = api_client.get(reverse('device-list')) + assert response.data['count'] == 3 + + response = api_client.get(reverse('device-list'), {'search': 'good'}) + assert response.data['count'] == 1 + assert response.data['results'][0]['ip'] == '2.2.2.2' + + def test_getting_device(self, api_client): + api_client.force_authenticate(self.admin_user) + device = Device.objects.create(ip='2.2.2.2', port='2500', type='firewall') + response = api_client.get(reverse('device-detail', kwargs={'pk': device.pk})) + assert response.data['ip'] == '2.2.2.2' + assert response.data['port'] == 2500 + + def test_updating_device_with_valid_data(self, api_client): + api_client.force_authenticate(self.admin_user) + device = Device.objects.create(ip='2.2.2.2', port='2500', type='firewall') + response = api_client.patch( + reverse('device-detail', kwargs={'pk': device.pk}), + data={'port': 60000, 'ip': '3.3.3.3'} + ) + assert response.status_code == status.HTTP_200_OK + assert response.data['ip'] == '3.3.3.3' + assert response.data['port'] == 60000 + + device = Device.objects.get(pk=device.pk) + assert device.ip == '3.3.3.3' + assert device.port == 60000 + + def test_updating_device_with_invalid_data(self, api_client): + api_client.force_authenticate(self.admin_user) + device = Device.objects.create(ip='2.2.2.2', port='2500', type='firewall') + response = api_client.patch( + reverse('device-detail', kwargs={'pk': device.pk}), + data={'port': 66666} # invalid port + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'port' in response.data + assert response.json()['port'] == ['Ensure this value is less than or equal to 65535.'] + + # device doesnt update + new_device = Device.objects.get(pk=device.pk) + assert new_device.ip == '2.2.2.2' + assert new_device.port == 2500 + + def test_deleting_device(self, api_client): + api_client.force_authenticate(self.admin_user) + device = Device.objects.create(ip='2.2.2.2', port='2500', type='firewall') + + response = api_client.delete( + reverse('device-detail', kwargs={'pk': device.pk}), + ) + assert response.status_code == status.HTTP_204_NO_CONTENT + + response = api_client.get( + reverse('device-detail', kwargs={'pk': device.pk}), + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + def test_endpoint_status_in_device_list(self, _, api_client): + """Test check endpoint with status online and config error status in device list api.""" + Device.objects.all().delete() + data = { + 'name': 'test_endpoint', + 'type': DeviceType.ENDPOINT, + 'ip': '127.0.0.1', + 'port': '5555', + 'settings_changed': False, + 'request_config': False, + } + endpoint = EndpointModel.objects.create(**data) + api_client.force_authenticate(self.admin_user) + response = api_client.get(reverse('device-list')) + assert response.data['count'] == 1 + endpoint_data = response.data['results'][0] + assert endpoint_data['id'] == endpoint.pk + assert endpoint_data['status'] == {'status': 'online'} + + @patch('core.mixins.export_task', mock_export_csv_task()) + def test_export_devices_to_csv(self, api_client): + api_client.force_authenticate(self.admin_user) + response = api_client.get(reverse('device-csv-export')) + + assert response.status_code == status.HTTP_302_FOUND + assert response.url == reverse('store-download', kwargs={'pk': 98765}) # pk defined in mock function diff --git a/devices/tests/test_endpoint_api.py b/devices/tests/test_endpoint_api.py new file mode 100644 index 0000000..3da57e5 --- /dev/null +++ b/devices/tests/test_endpoint_api.py @@ -0,0 +1,325 @@ +import glob +import os +from unittest.mock import patch + +import pytest +from django.conf import settings +from django.urls import reverse + +from devices.enums import DeviceType +from devices.models.device import DeviceGroup +from devices.models.endpoint_device import EndpointModel +from devices.tests.endpoint_utils import mock_vector_service_rise_exception, mock_redis_return_online, \ + mock_endpoint_status_service_offline +from rest_framework import status + +TMP_DIR_VECTOR = '/tmp/endpoint/vector' +TEST_FILES = os.path.join(settings.BASE_DIR, 'devices/tests/test_files/') + + +@pytest.mark.django_db +class TestEndpointAPI: + """Endpoint api tests.""" + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model): + DeviceGroup.objects.bulk_create({ + DeviceGroup(id=1, name='Group 1'), + DeviceGroup(id=2, name='Group 2') + }) + self.data = { + 'name': 'test_endpoint', + 'type': DeviceType.ENDPOINT, + 'ip': '127.0.0.1', + 'port': '5555', + 'settings_changed': False, + 'request_config': False, + 'group': DeviceGroup.objects.first() + } + self.user = django_user_model.objects.get(username='admin') + os.makedirs(TMP_DIR_VECTOR, exist_ok=True) + yield + files = glob.glob(f'{TMP_DIR_VECTOR}/*') + for file in files: + os.remove(os.path.join(TMP_DIR_VECTOR, file)) + + @pytest.fixture + def create_antivirus_database(self, api_client): + api_client.force_authenticate(self.user) + file_path = os.path.join(TEST_FILES, 'antivirus_update.zip') + file = open(file_path, 'rb') + url = reverse('store-antivirus') + data = {'file': file} + api_client.post(url, data) + + def test_list_api(self, api_client): + """Test to check lis api url""" + EndpointModel.objects.create(**self.data) + endpoint_count = EndpointModel.objects.count() + api_client.force_authenticate(self.user) + response = api_client.get(reverse('endpoint_api-list')) + assert response.status_code == 200 + assert response.json()['count'] == endpoint_count + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR) + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + def test_api_create_valid(self, _, api_client): + """Test creation endpoint api url.""" + assert not EndpointModel.objects.exists() + api_client.force_authenticate(self.user) + data = self.data.copy() + data['group'] = 1 + data.pop('settings_changed') + response = api_client.post(reverse('endpoint_api-list'), data=data, format='json') + assert response.status_code == 201 + response_data = response.json() + assert response_data['status'] == {'status': 'online'} + assert EndpointModel.objects.count() == 1 + endpoint = EndpointModel.objects.last() + assert endpoint.settings_changed + assert endpoint.antivirus_update_db + + @pytest.mark.skip('temporarily disabled to deal with transactions') + @patch('devices.services.endpoint.endpoint_services.VectorService', side_effect=mock_vector_service_rise_exception) + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + def test_api_create_rais_err(self, vector_mok, redis_mock, api_client): + """Test atomic create endpoint. After exception cannot create endpoint""" + assert not EndpointModel.objects.exists() + api_client.force_authenticate(self.user) + data = self.data.copy() + data['group'] = 1 + response = api_client.post(reverse('endpoint_api-list'), data=data, format='json') + assert response.status_code == 400 + response_data = response.json() + assert response_data['detail'] == 'Update config test mock exception' + assert not EndpointModel.objects.exists() + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR) + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + def test_api_update_valid(self, _, api_client): + """Test updating endpoint url.""" + assert not EndpointModel.objects.exists() + data = self.data.copy() + endpoint = EndpointModel.objects.create(**data) + api_client.force_authenticate(self.user) + data = self.data.copy() + data['port'] = '7777' + data['group'] = 1 + response = api_client.patch(reverse('endpoint_api-detail', args=[endpoint.pk]), data=data, format='json') + assert response.status_code == 200 + response_data = response.json() + assert response_data['status'] == {'status': 'online'} + assert EndpointModel.objects.count() == 1 + endpoint_after = EndpointModel.objects.get(pk=endpoint.pk) + assert endpoint_after.port == 7777 + assert endpoint_after.settings_changed + + @pytest.mark.skip('temporarily disabled to deal with transactions') + @patch('devices.services.endpoint.endpoint_services.VectorService', side_effect=mock_vector_service_rise_exception) + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + def test_api_update_rais_err(self, vector_mock, redis_mock, api_client): + """Test atomic update endpoint. After raise exception cannot create endpoint.""" + assert not EndpointModel.objects.exists() + endpoint = EndpointModel.objects.create(**self.data) + api_client.force_authenticate(self.user) + data = self.data.copy() + data['group'] = 1 + data['port'] = '7777' + response = api_client.patch(reverse('endpoint_api-detail', args=[endpoint.pk]), data=data, format='json') + assert response.status_code == 400 + response_data = response.json() + assert response_data['detail'] == 'Update config test mock exception' + assert EndpointModel.objects.count() == 1 + endpoint_after = EndpointModel.objects.get(pk=endpoint.pk) + assert endpoint_after.port == 5555 + assert not endpoint_after.settings_changed + + @pytest.mark.unit + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR) + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + def test_updating_ie_null_group(self, _, api_client): + api_client.force_authenticate(self.user) + device_group = DeviceGroup.objects.first() + data = self.data.copy() + data['group'] = device_group + endpoint = EndpointModel.objects.create(**data) + url = reverse('endpoint_api-detail', args=[endpoint.id]) + data['group'] = '' + response = api_client.patch(url, data=data) + assert response.status_code == status.HTTP_200_OK + assert response.data['group'] is None + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR) + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + @pytest.mark.unit + def test_api_scan_paths_duplicates(self, _, api_client): + """Test updating endpoint scan paths duplicate check.""" + assert not EndpointModel.objects.exists() + data = self.data.copy() + endpoint = EndpointModel.objects.create(**data) + api_client.force_authenticate(self.user) + data['group'] = DeviceGroup.objects.first().pk + data['scan_paths'] = ['C:\\1\\1.txt', 'C:\\1\\1.txt'] + response = api_client.patch(reverse('endpoint_api-detail', args=[endpoint.pk]), data=data, format='json') + assert response.status_code == 400 + assert 'scan_paths' in response.data + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR) + def test_api_destroy_valid(self, api_client): + """Test delete endpoint api""" + assert not EndpointModel.objects.exists() + endpoint = EndpointModel.objects.create(**self.data) + api_client.force_authenticate(self.user) + response = api_client.delete(reverse('endpoint_api-detail', args=[endpoint.pk])) + assert response.status_code == 204 + + @patch('devices.services.endpoint.endpoint_services.RedisInterface') + def test_keepalive_api(self, _, api_client): + """Test for returning data in keepalive api. Test error and ok statuses.""" + endpoint = EndpointModel.objects.create(**self.data) + url = reverse('endpoint_api-keepalive', args=[endpoint.pk]) + not_valid_url = reverse('endpoint_api-keepalive', args=[0]) + + api_client.force_authenticate(self.user) + response = api_client.get(not_valid_url, data={}) + assert response.status_code == 200 + assert response.json() == {'status': 'error', 'reason': 'no such endpoint record'} + + response = api_client.get(url, data={}) + assert response.status_code == 200 + assert response.json() == {'status': 'error', 'error_message': 'json decode error'} + + response = api_client.post(url, data={'status': 'ok'}, format='json') + assert response.status_code == 200 + assert response.json() == {'status': 'ok'} + + def test_download_config_format_json(self, api_client): + """Test download config data api format=json. And check status `OK`""" + endpoint = EndpointModel.objects.create(**self.data) + url = reverse('endpoint_api-download', args=[endpoint.pk]) + api_client.force_authenticate(self.user) + response = api_client.get(url) + assert response.status_code == 200 + data = response.json() + assert 'status' in data + assert data['status'] == 'ok' + assert 'config' in data + + def test_download_config_format_api(self, api_client): + """Test download config file with format=api.""" + endpoint = EndpointModel.objects.create(**self.data) + url = reverse('endpoint_api-download', args=[endpoint.pk]) + '?format=api' + api_client.force_authenticate(self.user) + response = api_client.get(url) + assert response.status_code == 200 + assert response.headers['Content-Type'] == 'application/file' + assert response.headers['Content-Disposition'] == f'attachment; filename="endpoint_config_{endpoint.pk}.json"' + assert isinstance(response.content, bytes) + + def test_download_config_404(self, api_client): + """Test download endpoint config with no-exists id.""" + url = reverse('endpoint_api-download', args=[0]) + api_client.force_authenticate(self.user) + response = api_client.get(url) + assert response.status_code == 404 + + def test_upload_valid_config(self, api_client): + """Test upload valid config data from endpoint.""" + endpoint = EndpointModel.objects.create(**self.data) + url = reverse('endpoint_api-upload', args=[endpoint.pk]) + api_client.force_authenticate(self.user) + data = { + 'wl_enable': True, + 'antivirus_enabled': True, + 'scan_paths': ['path1', 'path2'], + 'integrity_control_timeout': '123' + } + response = api_client.post(url, data=data, format='json') + assert response.status_code == 200 + assert response.json() == {'status': 'ok'} + + def test_upload_not_valid_config(self, api_client): + """Test upload not valid endpoint config and return error status""" + endpoint = EndpointModel.objects.create(**self.data) + url = reverse('endpoint_api-upload', args=[endpoint.pk]) + api_client.force_authenticate(self.user) + response = api_client.post(url, {}) + assert response.status_code == 200 + assert response.json() == {'status': 'error', 'error_message': 'json decode error'} + + def test_upload_config_to_not_exist_endpoint(self, api_client): + """Test return status with non exist endpoint ID.""" + url = reverse('endpoint_api-upload', args=[0]) + api_client.force_authenticate(self.user) + response = api_client.post(url, data={}) + assert response.status_code == 200 + assert response.json() == {'status': 'error', 'reason': 'no such endpoint record'} + + @patch('devices.services.endpoint.endpoint_services.EndpointStatusService') + def test_endpoint_config_request_valid(self, _, api_client): + """Test set request config in True when endpoint status `online`.""" + endpoint = EndpointModel.objects.create(**self.data) + url = reverse('endpoint_api-config-request', args=[endpoint.pk]) + api_client.force_authenticate(self.user) + response = api_client.get(url) + assert response.status_code == 200 + assert response.json() == {'status': 'ok'} + endpoint_after = EndpointModel.objects.get(pk=endpoint.pk) + assert endpoint_after.request_config + + def test_endpoint_config_request_404(self, api_client): + """Test raise status 404 if endpoint not-exists""" + url = reverse('endpoint_api-config-request', args=[0]) + api_client.force_authenticate(self.user) + response = api_client.get(url) + assert response.status_code == 404 + + @patch('devices.services.endpoint.endpoint_services.EndpointStatusService', + side_effect=mock_endpoint_status_service_offline) + def test_endpoint_config_request_not_valid(self, _, api_client): + """Test raise 400 error if endpoint is offline""" + endpoint = EndpointModel.objects.create(**self.data) + url = reverse('endpoint_api-config-request', args=[endpoint.pk]) + api_client.force_authenticate(self.user) + response = api_client.get(url) + assert response.status_code == 400 + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR) + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + def test_api_change_group(self, _, api_client): + """Test change endpoint group api url.""" + data = self.data.copy() + endpoint = EndpointModel.objects.create(**data) + api_client.force_authenticate(self.user) + data['group'] = DeviceGroup.objects.last().pk + response = api_client.patch(reverse('endpoint_api-detail', args=[endpoint.pk]), data=data, format='json') + assert response.status_code == 200 + endpoint = EndpointModel.objects.last() + assert endpoint.group == DeviceGroup.objects.last() + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR) + @patch('devices.services.endpoint.endpoint_get_status.RedisInterface', side_effect=mock_redis_return_online) + @pytest.mark.unit + def test_api_antivirus_get_update_if_available(self, _, api_client, create_antivirus_database): + """Test Endpoint antivirus update required""" + assert not EndpointModel.objects.exists() + data = self.data.copy() + data['antivirus_update_db'] = 'True' + endpoint = EndpointModel.objects.create(**data) + api_client.force_authenticate(self.user) + url = reverse('endpoint_api-antivirus-update', kwargs={'pk': endpoint.pk}) + response = api_client.get(url) + assert response.status_code == 200 + assert EndpointModel.objects.get(pk=endpoint.pk).antivirus_update_db is False + + @pytest.mark.unit + def test_api_antivirus_get_update_if_not_available(self, api_client): + """Test Endpoint antivirus no update required""" + assert not EndpointModel.objects.exists() + data = self.data.copy() + data['antivirus_update_db'] = 'False' + endpoint = EndpointModel.objects.create(**data) + api_client.force_authenticate(self.user) + url = reverse('endpoint_api-antivirus-update', kwargs={'pk': endpoint.pk}) + response = api_client.get(url) + assert response.status_code == 400 diff --git a/devices/tests/test_endpoint_device_service.py b/devices/tests/test_endpoint_device_service.py new file mode 100644 index 0000000..587ae8a --- /dev/null +++ b/devices/tests/test_endpoint_device_service.py @@ -0,0 +1,201 @@ +import glob +import os +import tempfile +from unittest.mock import patch + +import pytest + +from devices.enums import DeviceType, EndpointRotationType +from devices.models.endpoint_device import EndpointModel +from devices.services.endpoint.endpoint_services import EndpointManagementService, EndpointDownloadConfigService, \ + EndpointUploadConfigService + +TMP_DIR_VECTOR = tempfile.TemporaryDirectory() + + +@pytest.mark.django_db +@pytest.mark.unit +class TestEndpointManagementService: + """Test Endpoint management Services method's """ + + @pytest.fixture(autouse=True) + def setup_test(self): + os.makedirs(TMP_DIR_VECTOR.name, exist_ok=True) + self.endpoint = EndpointModel.objects.create( + type=DeviceType.ENDPOINT, ip='127.0.0.1', port=5555, + ) + yield + files = glob.glob(f'{TMP_DIR_VECTOR.name}/*') + for file in files: + os.remove(os.path.join(TMP_DIR_VECTOR.name, file)) + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR.name) + def test_create(self): + """Test Creation Endpoint with valid data and creation vector config.""" + assert not len(os.listdir(TMP_DIR_VECTOR.name)) + service = EndpointManagementService(self.endpoint) + service.create() + assert len(os.listdir(TMP_DIR_VECTOR.name)) == 1 + file_name = os.path.join(TMP_DIR_VECTOR.name, f'endpoint_{self.endpoint.pk}.toml') + assert os.path.exists(file_name) + with open(file_name, 'r') as file: + data = file.read() + assert f'0.0.0.0:{self.endpoint.port}' in data + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR.name) + def test_update(self): + """Test Updating Endpoint with valid data and updating vector config""" + assert not len(os.listdir(TMP_DIR_VECTOR.name)) + service = EndpointManagementService(self.endpoint) + service.create() + assert len(os.listdir(TMP_DIR_VECTOR.name)) == 1 + self.endpoint.port = 7777 + self.endpoint.save() + service.update() + assert len(os.listdir(TMP_DIR_VECTOR.name)) == 1 + file_name = os.path.join(TMP_DIR_VECTOR.name, f'endpoint_{self.endpoint.pk}.toml') + assert os.path.exists(file_name) + with open(file_name, 'r') as file: + data = file.read() + assert '0.0.0.0:7777' in data + + @patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR.name) + def test_destroy(self): + """Test destroy endpoint data with delete vector config""" + assert not len(os.listdir(TMP_DIR_VECTOR.name)) + service = EndpointManagementService(self.endpoint) + service.create() + assert len(os.listdir(TMP_DIR_VECTOR.name)) == 1 + service.destroy() + assert not len(os.listdir(TMP_DIR_VECTOR.name)) + + +@pytest.mark.django_db +@pytest.mark.unit +class TestEndpointDownloadConfigService: + @pytest.fixture(autouse=True) + def setup_test(self): + self.endpoint = EndpointModel.objects.create( + type=DeviceType.ENDPOINT, + ip='127.0.0.1', + port=5555, + event_rotation_type=EndpointRotationType.SIZE, + ) + + def test_convert_endpoint_settings_to_dict(self): + service = EndpointDownloadConfigService(self.endpoint.pk) + + data_dict = service.save_endpoint_settings_to_dict() + assert isinstance(data_dict, dict) + assert 'rotation' in data_dict + assert 'device_control' in data_dict + assert 'integrity_control' in data_dict + assert 'white_list' in data_dict + assert 'usb_control' in data_dict + assert 'antivirus' in data_dict + + assert data_dict['rotation']['type'] == EndpointRotationType.SIZE.value + assert data_dict['rotation']['size'] == 100 + + +@pytest.mark.django_db +@pytest.mark.utit +class TestEndpointUploadConfigService: + @pytest.fixture(autouse=True) + def setup_test(self): + self.endpoint = EndpointModel.objects.create( + type=DeviceType.ENDPOINT, + ip='127.0.0.1', + port=5555, + event_rotation_type=EndpointRotationType.SIZE, + + device_control_enabled=True, + + integrity_control_enabled=True, + scan_paths=[], + integrity_control_timeout=10, + + whitelist_enabled=True, + whitelist_admin=True, + white_list_paths=[], + + antivirus_enabled=True, + antivirus_paths=[], + antivirus_remove_infected_files=True, + ) + + self.raw_data = b'{"dc_enabled":false,"dc_apply":true,"prohibit_floppy_read":false,' \ + b'"prohibit_floppy_write":false,"prohibit_cd_enabled":false,"prohibit_removable_read":false,' \ + b'"prohibit_removable_write":false,"prohibit_tape_read":false,"prohibit_tape_write":false,' \ + b'"prohibit_wpd_read":false,"prohibit_wpd_write":false,"ic_enabled":false,"scan_folders":[],' \ + b'"ic_timeout":30,"rescan_enabled":false,"rescan_timeout":1000,"wl_enable":false,' \ + b'"wl_appy":true,"wl_admin":false,"white_list":[' \ + b'"%HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows ' \ + b'NT\\\\CurrentVersion\\\\SystemRoot%",' \ + b'"%HKEY_LOCAL_MACHINE\\\\SOFTWARE\\\\Microsoft\\\\Windows\\\\CurrentVersion' \ + b'\\\\ProgramFilesDir%"],"gui_enabled":true,"gui_port":4509,"updated":"","ip":"",' \ + b'"usb_control_enabled":false,"usb_allowed_storage":[],"usb_allowed_classes":[],' \ + b'"usb_hid_allow_other_subclasses":false,"usb_hid_allowed_subclasses":[],' \ + b'"usb_hid_deny_subclasses":[2,4,6],"usb_connected":[],"clamav_enabled":false,' \ + b'"clamav_scan_on_add":false,"clamav_live_scan":false,"clamav_stop_all_tasks":false,' \ + b'"clamav_paths":null,"clamav_remove_infected_files":false,"clamav_last_update":"09-01-2022",' \ + b'"event_rotation_type":1,"event_rotation_size":100,"event_rotation_period":1,' \ + b'"event_rotation_time":"12:57:01"}' + + def test_preparation_data(self): + service = EndpointUploadConfigService(self.endpoint.pk, self.raw_data) + + assert 'device_control_enabled' in service.data + assert not service.data['device_control_enabled'] + + assert 'integrity_control_enabled' in service.data + assert not service.data['integrity_control_enabled'] + + assert 'scan_paths' in service.data + assert service.data['scan_paths'] == [] + + assert 'integrity_control_timeout' in service.data + assert service.data['integrity_control_timeout'] == 30 + + assert 'whitelist_enabled' in service.data + assert not service.data['whitelist_enabled'] + + assert 'whitelist_admin' in service.data + assert not service.data['whitelist_admin'] + + assert 'white_list_paths' in service.data + 'HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\SystemRoot%' + assert service.data['white_list_paths'] == [ + "%HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\SystemRoot%", + "%HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\ProgramFilesDir%"] + + assert 'antivirus_enabled' in service.data + assert not service.data['antivirus_enabled'] + + assert 'antivirus_paths' in service.data + assert service.data['antivirus_paths'] is None + + assert 'antivirus_remove_infected_files' in service.data + assert not service.data['antivirus_remove_infected_files'] + + def test_valid_update_data(self): + service = EndpointUploadConfigService(self.endpoint.pk, self.raw_data) + result = service.upload() + assert result['status'] == 'ok' + endpoint = EndpointModel.objects.get(pk=self.endpoint.pk) + + assert not endpoint.device_control_enabled + + assert not endpoint.integrity_control_enabled + assert endpoint.scan_paths == [] + assert endpoint.integrity_control_timeout == 30 + + assert not endpoint.whitelist_enabled + assert not endpoint.whitelist_admin + assert endpoint.white_list_paths == [ + "%HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\SystemRoot%", + "%HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\ProgramFilesDir%"] + + assert not endpoint.antivirus_enabled + assert endpoint.antivirus_paths is None + assert not endpoint.antivirus_remove_infected_files diff --git a/devices/tests/test_files/__init__.py b/devices/tests/test_files/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/tests/test_files/bad_local.zeek b/devices/tests/test_files/bad_local.zeek new file mode 100644 index 0000000..e9845ed --- /dev/null +++ b/devices/tests/test_files/bad_local.zeek @@ -0,0 +1,116 @@ +##! Local site policy. Customize as appropriate. +##! +##! This file will not be overwritten when upgrading or reinstalling! + +# Installation-wide salt value that is used in some digest hashes, e.g., for +# the creation of file IDs. Please change this to a hard to guess value. +redef digest_salt = "Please change this value."; + +# This script logs which scripts were loaded during each run. +@load misc/loaded-scripts + +# Apply the default tuning scripts for common tuning settings. +@load tuning/defaults + +# Estimate and log capture loss. +@load misc/capture-loss + +# Enable logging of memory, packet and lag statistics. +@load misc/stats + +# Load the scan detection script. It's disabled by default because +# it often causes performance issues. +#@load misc/scan + +# Detect traceroute being run on the network. This could possibly cause +# performance trouble when there are a lot of traceroutes on your network. +# Enable cautiously. +#@load misc/detect-traceroute + +# Generate notices when vulnerable versions of software are discovered. +# The default is to only monitor software found in the address space defined +# as "local". Refer to the software framework's documentation for more +# information. +@load frameworks/software/vulnerable + +# Detect software changing (e.g. attacker installing hacked SSHD). +@load frameworks/software/version-changes + +# This adds signatures to detect cleartext forward and reverse windows shells. +@load-sigs frameworks/signatures/detect-windows-shells + +# Load all of the scripts that detect software in various protocols. +@load protocols/ftp/software +@load protocols/smtp/software +@load protocols/ssh/software +@load protocols/http/software +# The detect-webapps script could possibly cause performance trouble when +# running on live traffic. Enable it cautiously. +#@load protocols/http/detect-webapps + +# This script detects DNS results pointing toward your Site::local_nets +# where the name is not part of your local DNS zone and is being hosted +# externally. Requires that the Site::local_zones variable is defined. +@load protocols/dns/detect-external-names + +# Script to detect various activity in FTP sessions. +@load protocols/ftp/detect + +# Scripts that do asset tracking. +@load protocols/conn/known-hosts +@load protocols/conn/known-services +@load protocols/ssl/known-certs + +# This script enables SSL/TLS certificate validation. +@load protocols/ssl/validate-certs + +# This script prevents the logging of SSL CA certificates in x509.log +@load protocols/ssl/log-hostcerts-only + +# If you have GeoIP support built in, do some geographic detections and +# logging for SSH traffic. +@load protocols/ssh/geo-data +# Detect hosts doing SSH bruteforce attacks. +@load protocols/ssh/detect-bruteforcing +# Detect logins using "interesting" hostnames. +@load protocols/ssh/interesting-hostnames + +# Detect SQL injection attacks. +@load protocols/http/detect-sqli + +#### Network File Handling #### + +# Enable MD5 and SHA1 hashing for all files. +@load frameworks/files/hash-all-files + +# Detect SHA1 sums in Team Cymru's Malware Hash Registry. +@load frameworks/files/detect-MHR + +# Extend email alerting to include hostnames +@load policy/frameworks/notice/extend-email/hostnames + +# Enable logging of telemetry data into telemetry.log and +# telemetry_histogram.log. +@load frameworks/telemetry/log + +# Uncomment the following line to enable detection of the heartbleed attack. Enabling +# this might impact performance a bit. +# @load policy/protocols/ssl/heartbleed + +# Uncomment the following line to enable logging of connection VLANs. Enabling +# this adds two VLAN fields to the conn.log file. +# @load policy/protocols/conn/vlan-logging + +# Uncomment the following line to enable logging of link-layer addresses. Enabling +# this adds the link-layer address for each connection endpoint to the conn.log file. +# @load policy/protocols/conn/mac-logging + +# Uncomment this to source zkg's package state +# @load packages + + +wrong_event zeek_init() { <--- ERROR EVENT COMMAND + Analyzer::disable_analyzer(Analyzer::ANALYZER_DNS); + Analyzer::disable_analyzer(Analyzer::ANALYZER_HTTP); + Analyzer::disable_analyzer(Analyzer::ANALYZER_SSH); +} diff --git a/devices/tests/test_files/config.xml b/devices/tests/test_files/config.xml new file mode 100644 index 0000000..d80b296 --- /dev/null +++ b/devices/tests/test_files/config.xml @@ -0,0 +1 @@ +this text is required for proper AIF testing \ No newline at end of file diff --git a/devices/tests/test_files/good_local.zeek b/devices/tests/test_files/good_local.zeek new file mode 100644 index 0000000..1dc5744 --- /dev/null +++ b/devices/tests/test_files/good_local.zeek @@ -0,0 +1,115 @@ +##! Local site policy. Customize as appropriate. +##! +##! This file will not be overwritten when upgrading or reinstalling! + +# Installation-wide salt value that is used in some digest hashes, e.g., for +# the creation of file IDs. Please change this to a hard to guess value. +redef digest_salt = "Please change this value."; + +# This script logs which scripts were loaded during each run. +@load misc/loaded-scripts + +# Apply the default tuning scripts for common tuning settings. +@load tuning/defaults + +# Estimate and log capture loss. +@load misc/capture-loss + +# Enable logging of memory, packet and lag statistics. +@load misc/stats + +# Load the scan detection script. It's disabled by default because +# it often causes performance issues. +#@load misc/scan + +# Detect traceroute being run on the network. This could possibly cause +# performance trouble when there are a lot of traceroutes on your network. +# Enable cautiously. +#@load misc/detect-traceroute + +# Generate notices when vulnerable versions of software are discovered. +# The default is to only monitor software found in the address space defined +# as "local". Refer to the software framework's documentation for more +# information. +@load frameworks/software/vulnerable + +# Detect software changing (e.g. attacker installing hacked SSHD). +@load frameworks/software/version-changes + +# This adds signatures to detect cleartext forward and reverse windows shells. +@load-sigs frameworks/signatures/detect-windows-shells + +# Load all of the scripts that detect software in various protocols. +@load protocols/ftp/software +@load protocols/smtp/software +@load protocols/ssh/software +@load protocols/http/software +# The detect-webapps script could possibly cause performance trouble when +# running on live traffic. Enable it cautiously. +#@load protocols/http/detect-webapps + +# This script detects DNS results pointing toward your Site::local_nets +# where the name is not part of your local DNS zone and is being hosted +# externally. Requires that the Site::local_zones variable is defined. +@load protocols/dns/detect-external-names + +# Script to detect various activity in FTP sessions. +@load protocols/ftp/detect + +# Scripts that do asset tracking. +@load protocols/conn/known-hosts +@load protocols/conn/known-services +@load protocols/ssl/known-certs + +# This script enables SSL/TLS certificate validation. +@load protocols/ssl/validate-certs + +# This script prevents the logging of SSL CA certificates in x509.log +@load protocols/ssl/log-hostcerts-only + +# If you have GeoIP support built in, do some geographic detections and +# logging for SSH traffic. +@load protocols/ssh/geo-data +# Detect hosts doing SSH bruteforce attacks. +@load protocols/ssh/detect-bruteforcing +# Detect logins using "interesting" hostnames. +@load protocols/ssh/interesting-hostnames + +# Detect SQL injection attacks. +@load protocols/http/detect-sqli + +#### Network File Handling #### + +# Enable MD5 and SHA1 hashing for all files. +@load frameworks/files/hash-all-files + +# Detect SHA1 sums in Team Cymru's Malware Hash Registry. +@load frameworks/files/detect-MHR + +# Extend email alerting to include hostnames +@load policy/frameworks/notice/extend-email/hostnames + +# Enable logging of telemetry data into telemetry.log and +# telemetry_histogram.log. +@load frameworks/telemetry/log + +# Uncomment the following line to enable detection of the heartbleed attack. Enabling +# this might impact performance a bit. +# @load policy/protocols/ssl/heartbleed + +# Uncomment the following line to enable logging of connection VLANs. Enabling +# this adds two VLAN fields to the conn.log file. +# @load policy/protocols/conn/vlan-logging + +# Uncomment the following line to enable logging of link-layer addresses. Enabling +# this adds the link-layer address for each connection endpoint to the conn.log file. +# @load policy/protocols/conn/mac-logging + +# Uncomment this to source zkg's package state +# @load packages + + +event zeek_init() { + Analyzer::disable_analyzer(Analyzer::ANALYZER_DNS); + Analyzer::disable_analyzer(Analyzer::ANALYZER_HTTP); +} diff --git a/devices/tests/test_files/live_if_config.xml b/devices/tests/test_files/live_if_config.xml new file mode 100644 index 0000000..090709c --- /dev/null +++ b/devices/tests/test_files/live_if_config.xml @@ -0,0 +1,1182 @@ +---- BEGIN config.xml ---- +Version: InfoWatch ARMA Industrial Firewall 3.7.2-dev.29 +Cipher: AES-256-CBC +Hash: MD5 + +U2FsdGVkX18WQO8VZAOZPmcrqXVRysUB8w1AzwiGY3Q5SElxt6NOiRg2YCiRdYgJpZbdPayQRga8 +mV6uXiOU7U6ruMiDz5HS8LWkk0hkLvYZPoYUai4hIpMg09soGU84tMF3ub9O0OtO+Q89OasU7kWA +Vy3i8dRoTHE+/waUy4rslxHAYpLbQgHmNgMAn5LoUDd9yOWh2IUbhPrPFI9IWY2ogqAAIbGYOC/y +VV30L4z+3cXg5zQY9lAfYGT9I9XvEBjvoi2nlUnSHik9mYUtctcp8mS6Xa6Uz2a9iBaT96rLQekd +nSAIQVDyxc+C1veqzmEN1PlyAWhizEd0/WE/nCxsuh/5c27VliwZ+JCebX4cXSaizqxXkrN7MMom +0Vy2N9kxYpUr+kQfjpmR90lElQptKqF1rCM0p/GXofIY+kbnz+aVPxzNimKXaJq4OV7STwFYnsVs +wGHj/E6plG8HLYvNMmO6QMlrOM8X3TJyYRlcLJeAk14P3T2nVpvcwGPnKidnSG26nO7B+UoNmGKp +JKV/RXRpauF8i7y0fGG5PnpQJyoOVn5JbonnXcH1qJ/h58JdIp8G4v+/KtO236SPvQzwB94dQ1lM +E+gna+8gDnxtrrb45sH/DQ8sm0H5ph6oGmrm7QkducMbeUJ0awuB6sCruH5q6nDDkQJaRdu/i6ZM +cmGMDFQ02O6hFU0+N2TQwJykFRb7eJTkoRcKQkcbwcTMT9BC+WPhBi4oCKKa/P76CnSiHpMGpFzX +obPVR4NhkqPqmfw9eqriugquX64M0mkb3Od0WT4jIgyAD2N9P/iPpD8KWf/YsqtkpCXpWcmD38zD +og8KE0YF220l2hE4ww6JletNDNu9uN9sl64SkbIUDPow87mxsHOE7utSozWfMrseGOG2k6kRzsOy +uHPfArL1xOGTOsXjYlv+3xRTSML4WT7SOEU4JRXYOjiYZPyXGqeHIIzbnnPXUx9xcvut08UZniQg +23VKOKYttUUM8cTWa7fDAsw7vQJpe6Cg+cDOQZIoTawspmXnWMPJjatZc6RiYWTbyYxJPPnjYjui +leK5+UdYg3BglBBM5dEF6HxglPv3KLWf9YVxmBbC0eTp2UbntOFjeiODXl2P7gD1/mIcpIkNJE2L +zY8M8Y+C1mpRe6Craxv7f9BfipR+Av9wotZwo4L2YMKRzVBE8XR+POkVDUpv/4rbVJjkECVpYVaD +MRjubYrkRrbLFvyjndmp1A/EfatwiHTB04Z1Q5ixDrT++y40annBkLhYyI1BfO3MUpNnzU+MDrEO +4adA2h0GVC3524rEfioZDlbpAswoys8hWqVwff8g2rFiiIY90OAzWlDIDmB3psYa5ZiOcGNfcq0y +44zN+efYq316/EaaB53b0UB5CmXY5Q3FRo+ikq42mmM5HtP10ve7An09VKqWZDqCv3E8Dr4s/+dB +sWuJsG6+QQakAL5D5Ym3Q8x/8mxVZotBi7A6ga/SUheHwWk0twOOpBgvwlK+483XJFRJLa6m3HO7 +76CSZo7W5Q9R1rwpPcDZH+YucYut3h1tIJA5uV/ESgSNAHGtesGHmQAzvU4e9alfTiXlcFcPgMw0 +YClXUY89dRWxc1ZjlHM8Z1V7ZtnA7GOGYVgUMcspLT1aXUtK4ZJXDqq2GH6tciHR6Qyc4ukZOHz9 +a6JcJTt4v25Xd/TpvbhQUyc7E9Cv9t/2wrdrXOMnJndJxwIov7o+aR4GEgqbik9uoDUtFHGtin/v +7sRsfRD3ooDNu2rU+OrZiEkNoR2FalqvSOKqz098uONhhC++coXgzIQ+3IneKCDEcEJZHcSrTcFi +wyYR7OhWtR8MxaMvsIfuVSaCA/PKW8Pos78BvdSW9TM+k9YTgBfhpPFFlAPkHaaMJ7QRuiW1D9wU +LH/v7u8O1UR10r3HReX3+C0Q/MXtZrpfdgCaVPD7rlXigM4a89H5p+PmuRcbyfhEGeCEiHa4a2hR +M6oXrbC+t09DBAC6mDBvIs4qRW4v/3gY4rKCiO+p0uOoW2kZFpRiPnrd8fm0IMuofzdeEdEnQq5R +V9OSiT5QKk5K7HehstEQVzoGX0wEX7KYMARzoJB6Df1bP792ta3hsADc/VXHmvrHr0KeHaK7gj6T +SaKwjPtjM7yjkJhzOwCyJUs5glx4EpLNPiGLGX5K9/OjDQmX70Ey9GFIMHfRJZZx41DXbSyQJaTs +oKSYrbfisWF69kyXaxPrT0VhD3T+hO13ziEFfqI7wcd97W+9pKrNt2bQp9ad9fKAm+zcgtDSSETl +RidxDhK7RV8OMBSqMOXfKMjWern9abXZGTvGmYogFYQslH9yrcFHVONFpks9rYuUUelohXAU2FAm +J9q9lhKx6n2dHR1dpXETxqUwSPg2OOXBxE6XDwJQGPSxRPcNLCwrFfClpP+xyQr9CtO+XIqUUUAw +EBQ5NBDvoG09oZAWmDQU1kyiQNOdfgKx2/pl9ekpZhxZ+LpwB0tZ+68k2GXzqDqFHSMqdvbxjKhR +TZ0iYYRDamqfEX5jbYUA6lCkv9DVOz9bUXqwuYaIzsvNL5CpyK1xZ+mX+GJvnnVZPQ/OuSLSKI7O +CnGshyf15PHGJ7XhDFcGsEH5utmIoTUpw/Qt03qIgk8b6IjqZGNIX/A9CpSzMqExAJYzltV4F+iw +qQ2LeJQEHpDB1cgCTgzfjkTY1FrzFaObg75BciZakEANGmictCxV7qOFYaXxjMPQqxjBWnMLP+Zn +Nn9buISu1owociOUvIG9Uxy2BglbLaDtmjRuUUupPtwxMKjb0wNPJx0hjuDx9HX1x9BNiGwInTNy +FR0O+lGJ/BqSa4dhDIPWrmt06GC7KijUDiwlJ49YUQCt+px8vXmiRsBqNgViRGcmOvivy4EMz55C +yxEXyJMlMsH1fOBGtpxttaPhPglxJtPZBUnR8gf/YO9tk1JVNgo6jq0oyAeVgqQuocAvDg1nWTwJ +VerLw0KjB6fT4URz2Rndnqej/gB/xdRf6Ak5g2o4n9QRpzP+eLfNiLqKfrUx44CPpMfrxJMXqx7k +AFTB+59dgrrJQ5eCrGE3UC4iA7Ono0It3PBF/VihVHFANIJpUpa+bWgIbXHLTRMBJ/zNpHAT2EAh +3n62Y5NKwU/9yN9zewJ6ROo8d+SAJvBd3xzXxjc9iM2oen4dbZoJqF7D/kN2ZPU2CwokrVsA6MBN +YGJYhgNLlMVStfGJh1FP6zlwGeKdbpgSoi3oq6V2IwLAYrPRCMgJoA/K14RqkTGR9acmagObOP3c +SVD/qPWN2voLlHmyqwGhQtaNiX6Xb18ZKCqv3yAQhJ65+2SkaeWlk34kTxrxgpoet+FaGAi26sxn +aiiGPwj7wJUGyzZmXcixko7XTcG5ahtEcTb7Qq7BnjsbFU07U4u2ULw6FhUnudhSnR0bQ2UTslvs +HwbrbZmSAqBUCls2EjL3I7UZq2HBB0JRF2vYwxGXzfEyAGLzpp4mc+vHml81hGNxFDNDufHCsD/H +0ox+uWm0MUpNs5wnqdyk64QwtbDDEezzx5OP7fo06w2tA6dZOdeNL7PUigDCQWs/SZnFRc+8obgC +tUwgRWdj5XtHRZZe1soMRElOMDqpcXgviPKP03E9oDihsCjF/+RrDfduwd1dHem0ZhCI657a2k2i +NiMozq6w5nBbXBNuHdz0KFpmESLZf1RWked59YRcZmepO8eu6Rmiohtq+bof5NpFBhSmmmYl62KA +fPSTBxBOEsATwEHrgbeM0P51LkGHmgmztlrfSNu1abRQ38SzzC6nnykmIDKIN9F/znSIZo+BG63S +T8KefU20CO4430r04XS3RHzFFuxcJjNNG3cHV3TezRhpp7rFuJloq+VwVT9WWROIUZUT/50IhfVz +UDUzq4A6fnyL37/hAbMaMgKySVwmwpID21UvWtoGL7nwfIaJZc5QzTqBdaf7/1Qmnenhhj8RZR5j +cZCEfIWLAUEmqxxfrkFmfikvk986TWBPoIho5jh/SIfeREWGPu/SYwyMuEKl1I0u/2uoWKjJ5aSW +uEacdNR4hdpC8nGVADwHD2bBa5M3wAklBIN89vAMJK1I385gEBZefvQTw9r6yJclEUWVv1/ItWLf +JOx0TAGPPnoeNVY/vG5cmZxh/WnfdMaEesTmVQD6jjMcTHH827Rp2SZOlafvCmaj4D9MHycQDem2 +kT0h1ym8114T9xGrABv0a5felbO8aSPwn/NBctOU9VmOuUDxRT2FqrcIxp7BaizDyKVWYkQAWLeQ +Qr9jtmiUmpKoHWkmcPd6Bua+Oxs42ikz5eICji+b52PwgpVBIOQLdOK/VHNY+Vul2BDRawKkqk4P +8X/1qeyFSf0gA4wYWh+nynYW1OztsfAjs30fKlViFQontYQheoGNTc9T5SVCpFqbOKho8LyYerKN +WqWN9A4bH6alPm05ZEbfntQnRWZPtLQTBC1xT412lebY8LcrZzSLy7fLLrVlAyX4fi7XMmYyGMAK +7jSe5rezxW4hsYmqHk9mkshA5iztAvDwaO1N9rNNCv4P4sDPXhL4e8+RthAguoVqgfNC/rMvY4v7 +PRRraIIx0J+VsB5jcHRy1zg0kuHPbppxMQ7AUDmoOMC0zk4IENaLAqtvwDU0DHngvdJyikvIjU9s +JARqgNWnm5BFGwX7fJjlzZAPWSRSAQ2fogSLjwiJOxQShfMS9HYmyiXFCUGeQZMmQOHTsgLwEheS +kQ1WhIVE+XocaFqJOGabP4tISrykswciOpZtLq3AffT0Z05apf1LeuI0DI86ZTSeg1xMMSNCjmBI ++57cGRpU+gDnjA7o9lAPo0AC5uyCtyWV+IsLrzw8zgtG/HCIPcuGJhzBd971EDKYEMBkLypgsnNC +ZVF6sBIn2+GONnW6OHNXGC/eBlFxgTPFNLtFWPJc2q5LjuQ3LZotbXwDrrW7pq9v7SG3D2yZ+iIw +kz88GvwmRbLwKkx49I7d+d1JtZAdsSqWVqWB2CZL85WjpZmkVf+Z3oSuzPKcUUPcHmpzxoagQiQR +HL1IC0cEcbdp6nO0f/rONVXnVCtH2pQ+3PXTuUnrzjmJHG57zB1IMZQ1892Zsz/6g5VWw7dkFKFr +2wUXrKsQQ+AZjhaaIYmPI2vkPqXFW+K8h/k7b+ot87o6UM1hlaEjmMcXN85T/oTsm+2AJymFp5OS +DSQC475Zjbf6WOIyZJgv2dvLLygCA0t9cnzi7/uI6R8QuGDHsjs76FF0J/AUlfdAD9RqXilKSPZy +xtXcivH3s090qe2y9+G8SB7BkLSgl+Sso1WanvPltfhA7WAsibJui2/PMy6orzKKOMQnTAtue3zR +J0fWItl5pUNLWtDyT/c1KRd8wHjkvIn1VMgSdcu8i9+ncNZuLvq1jcYgyNTdjvVcOmi8HhAiqRW4 +bFqDDjR2C6OsseHpuhfWCd6rlCXMo1pYhR2pnERcNoOEEfDqzejLARtyIaHmb8VQuRRzRcr7vKxM +0xNCCgizpOv8uQhkhTBtvgtm4vI34qpRaAsSN+7ryu2KZTa/3rQdBom80bt6qzapeAAkhZFQq8aG +u62SCEB2xj1Y/ihBDI77zKMGY37L+IifoSmsZkn90x0ahKdSk8PpjE9w3cWOqeQ87NjLJm7JIGXS +CUTpVH3KwY/RQGI/LfepeZZAsx5e+qdvD4qPBqbH0yqWz/38jXSeRpxEiPGtmHwRMoOu4sGgH2Se +z5MrDnqWwByKVCbTkQSeRtzKyybKl+l5eJMmUcCUMsIxFfitrnz4KmKlrfVzJJ94+FvUnJjLHbMw +A8z9W1rhufNcqUhrLDawqfcK0Z0ZTqutYeLZjyvsy5LYk6fUrWgRgs1PDqnh/S9OuT7Pt+NdEw6q +TvsiGIFHJTgQor8+dMenjDEwza1bZiGoelerQMVic0OrXKWKsCOyDkzLNscxwEssbMHuaRl0gTh3 +qO6C5xzhT9CaRw1xeFYOfUzkZ612WAK7dgtbNSF3tgNYY5MwYDjEDwzZXA4z2pSMlo3b9C8pBxvF +tYpICWFxuPkQ98SiAfifdcd8YmxSGbJlr8MPLpbTnF/Z13ojIBV3RO5VXfeG5a3vsf2wuDNBQE+d +3OyA7ijjsU4AOH0H3no1KoifL7K1Me7t5UZDRnFNYsrLnmGb4In7RwjQv7drxgufc856dN1liMjy +Uq3JNZJns5lH3ixfQkpZYl0N0RAvt6RhdB2Y+PbFwB8bgWLfQ6AXDa1lSq26xAm9qJYOmufIvn7V +g1XIjhqO7vByWvt/oiv43k3GfX5KhfnJCItVfyyokoeuBWeSFe7svB2UMOCRgdxrvUQhGKLAl2zE +jQqHPqsMYJJK7kzOLM1o5SNfx5OxP9lH+ZbN4S3zFKel6bLAorPNTwyVdku0cZPP5bAtlCahvnWE +TcnyNdkW8+wgNHt3TufUwkea/fmJcrBQhbH1/pylDRf9j5mavqRtEKPYn/wU65I7MPSSoVytw16W +ZNUlLoXHLGte1XdEn/D0S0cZRiq6Xr1AsNbkV75osvJmJBa4G7HePVoZ5qHN48k93A/LHUAt1EBz +Rxtpc/3WK+13xv8KE6f+rUEJIFsiSpSTHqtYko7p6lB2fWUsXOZ/OoBmdCDrPg/TTcDzIOZHz+G4 +6d2DgB0pOLwgsmejaSMnJmdTU9Uf1ChCMERgEaJ6WfwmVIM5sZgGJ4V+KwaJwO6koPyefLAan2L1 +ym9PtV1tv0QnIBPFlIT7JCC/1fkxPlDWVPr5aMq4na7HSrpfUJ30hCS3NEr3wS5+6FdDg3LqPJit +pq/3nNsi0qLA7wn+osV7vV6sDjQHrNLZfQkFbu7OGsuahVFf5wWqPqbMiaTxBxG5aWEtyf4xly2k +fdhQORrYKiWkH9HcN/R+82YgRJyJqVtFuiUAUgUyQe1SWc2Epu7Dfj/iTWWlRv4R8e7UW7L6P8l5 +z+qp8WKYzX8/rYDOGSBAG1KgnQcD/BmOviMoJW6g/9YF/+2z0zpD/NiY4ptxevL35az+XchmDPwY +w73vX1DzvfOR4zdVX85+J6K5wPFWntB9Lr95j2P7Pc6URp/97vaohXeLvHeKP4H1vRBgvamSzeW3 +UmF65OihFjs3Rs6IwEEYBJ56QYI0rbltH6/Q9CV8vBMt3ClGMmO7H6uNYRCb+bydo/YlOj3Vfb21 +RSh3ItT23/6CPz2UFFq7TZ33j43+FUFvz19tAiCIEDDQtoH9V0JUVFx1l0U84xgy/W36WtSkZWKW +8XkA2+mWyEkTFCrTjDC1OwLvJ/M8j49I+Hj/wVBukT1xVeX5AfAY8SDn+iGJWk9b76rJkhakjUyK +85+Pd10Mj3cp86aewZ2v39VSWoprokoEuRnLm8h6+Ov5GeM8eUFGS/rjG9dlsaN35lYusUeIbXht +uK8lsm2Qy1AP/YeJ5qgDxm3kQ0Wk4HokAQAG9fmByEal6rGMJwwf9XC3eKwXdb95vGle97Xz2jDK +tYeT/E+wMLdmMI57dARQFXfQwzixUmK1dD1EQA1dh/Ucj4ormMtP7rnVMJi0RxAzwpvi1CqWU11t +Spb1NqNv385PaLmX96PdAXRMEJAV02vneWsnE5FczmC5iihj3bbPKO6s5ti9RQChyvAuN+GJafPE +Ti8PW/uevoyL4aHjOhexiXShYoH66AXLeXD0/71iCV3fAN8YEkNaP1jrgbfm5WQGxELdIWcqLem1 +eN+ynU0JoV8vdmAkHTQafNNWz5ATfj2YwfNUM9Vc3y5qfduLs4eFnxcYRPBQXsmCY9ycO7oxoZ5q +oMBN9zDH5ZtKEKR0ole6rk8hpDTdmyPg9AQjXD/nmJOCAFKB4MQTvFR0XIKupbyLag16UFfRg5i6 +nTOIK8TWSjgRndJGxLm/1xUzPnt0fc/hnJT92ukZu/Jhbq2tio2sBZYEKscfoUSGTcTudVRvGMEN +7w6W4m9l6E3MPyIKmhQwYGmCS8xiJCddkrgYteRMBDaMSgD511t6uoUmE6zfV/6nsdOdYinlx0eB +q4baP96cZS3HhZTgax/7/RXOrfYFqcoZdVWXieEhVVysRDLvm9Wi4aFGH4hYQV1NM/Dq5esEStx8 +7/pI6YEzn7TvmMzwKInivEEGdOKUJv4hqryUq/agWFg6YBwvZG/xqr4h7zFXJAmVPFp/F4BptoxF +T2lbyUXqsUpl689022trB+4+KGzINCzCSo46xQaqq4tB282+ThTpYiXEbueO1kouVIBWryNfu/yt +EWMJz+Xu4haCNiAiIQuUDE/rgbxAJulAvRCDnhIGTJYO9+RwjsJuMwh6a3HUApenzSn5NkWlkrvq +6wLPySDDvk1wWhSq0Spa7ou921jLJHLHvuiH1PjMeoSdnP7vLxLd36SNiSu8plG/oRrxosfqK8Qu +YwdQab6xijD2heRl2i3QrZFir1FkVLUzDJ0YQmv8ScWQInDF8ni5Rpd9+W7U/UUz2FCDTozbsPVW +7lfoSZkykvcv1dyNJyt4T5dEZP/Ig4cierrch5Nki5LfXg7qjgorgSnUwbvUntaBEHcPpegpG+9z +4lLeHBDx7buk8hm995LIs72q1r7GaI4bJJ5/abWW7G1QAN+7+L72Tgj+n/pHT5U4J8EGDPbnk+f5 +dpaTdKY2kAvvKBEs2VXNiBqBKr9LnVNyrbBzrmNqMISdJj8IQc5Fe5znbAGtNOtwlCrhNRGxWLdB +sOmYs9UM1f4xQS0+sbqpbsTF/5oDN+0GuYMGbbX6DyKrJL6UOQz5LwsJ9yxXGxsvpb2ASw4T6jBH +M9sfE753BLZviExCWQxN/vDaTcOUPaZ31vN4l+nq2TuVEu1apJEHUhhRqQ01rKffZ3bj6YPsORQE +pVFBxCX6sgyZXvxQUu88SMnrkSIT5/9pBJ9bC6EwtcgW9nmWBDnfgh3cpZ6Fcfo69CLs6rVuiYWv +IqAhgP3USeehCcKd6AMpgNB80VKsK4DRHrzZ80ISmnuPZrst3URhSbYdU6euFRUOZslelFoDRR+9 +kyAXZ8BRud2CQJBS+FytdmZ6pHj9Gg7pl4gzW9UWkWem/Cwvaj07613dAmh6xQnJDwVBi8JW1wGC +bQ+pErPElSfHs00jBzhSnlpIu2aLb6V1MuILMdQCKL/6bD6oiz6YvKuZru6WlGCsINQw6FVfAqlE +k67AJvmPk51oVMy4jMjksZcjp8smLSH1nKbFPnmguB3KsbFf1rQGfzNByr8+3dPKGJrdn7iIW6TZ +uHfTdsee5fvTUkU0P55G9RIaxXXiur8roT7UYVTD9J7nw++u9o+2nbFfmvdA3gSdJEv/jfMLUo3N +fn3inoQyhtO5bV8rnIU+0GI2W+2qgsSlQHLYoi8EbG9bXyDt7/w1bWcKbzba7QNQvL8M6m3Wuqx6 +y6Z4qKByDmGuy1DnIi0787jS3/eV06rd1g6Bfzn/XUH7+r5KLSVFhsFpETV9JCR+tM2SFUZmyKyB +fIVn5nOAWlJArEkPzqjh5OwDveuDsaFDG3EPYX5w2TsYDb19RrH2jVA0xnMQbPaB2zeOnCSizVkV +lEmmX/oVphcG2O2lYkAvpWDrgaPBeMRKUnWQV0H2TBdNeOut46xUsmFe1BVEYqqpg1mlt+NoAW3Y +kNQ4MJkZ3KJSZnP6iWjpXb0v69+PlV2bHW/Hz4+YRIr0Zedu+lD8kTDCHrRzNlpV631WLsdVQ9X0 +dEWCGxyFuvpsFkVUs3PPYskBRu1vkY/aSBJnxFyv4P3kHa8pB8JsUnm0ShM5Q0b5bnYssQxaZ1Bf +t8yoVzP8Bj230FpDGZpEkGz7UdodjLQV44Aor1koo98vX3LZZwVaBp9/XYxhsbva+bxVwq5NW9ZC +vewQ+6du34zGZx4OQi+nKtWpxRitoo4fvIAz161elxBTLQbdUtApTIbf6Pr1OenTL3u7SYda2roz +YSDJvRthjBE379em5cKqhXo8LDzmajhF+GLxbY/7tl3MBO4HUIssFiAwyIC57ka3D4xYtG8UJS+U +CCI9VC6DdfIphnOCFY73X/dvWB4sUU6xKvKOTtKdYWWCH+2EMrRSoAnphZQvsYjg8c/iQDfCCqqM +BWcKa2pT9L/q9Axt10zzaR6jKy8zm94s/j61LcZv9lmGcOifzGEPNSsjEAKv2eFwTS98ns+NwD0m +UnVLif8yAbpJz9b7C+kaHBW32JRvppRwIdv6zIU8Yr3k/6r/9CkdePBkVo4CEWRJxRLOIuQN7Lcm +KD+vlaveV1glxUFg8w26zkM0GkrOt4BssU8+PK+52QSx42dogZGXDvK8MFcccSVy+hsAZlHp88gz +wHkj+L2Gf5Di4rOKJTk3CPynMer3nn1iXXQjsDemgzOkVlq+NDs37+eq54J9MB49cbcIojKqZF52 +04KDM0D2qH6zVO9M4H65WIUFonLoLLDygzByEF223Sax6S5cECGlGzha4TwYy7xsX/+RuGJ82un6 +iFvjC5XFo1+ecmeBECQlZAdZg5Xyse9+NwkYGxJqjsW+HA2IQ1cAIFCGQvGAK7r+NKxTT3vJEgKZ +ujZiIENHS+nGL6bFUEomufmBBdiZHdNoLY5OgRgZRaQmbFT1O7X+FYsLAudjWBn7PRIc7XTiCBQA +YjtUSFHV8zIQ4DcUoSv6+9L+GiDWWywyA05WXUumub2WsFs0lSvyruIsBW47PikhrqUuKW3X/XEV +bRiATYKxqQx4K2ZVK3NQus5eoviVIRkFXemzvlt/Qs7gFqPbfCtGchIG0xaHiKrmZGP9rVqWxuov +vGOPaQInyLZebaaAVZIJb1KI7CjPOONnrpdE6/IU8KjB/uGP6Pg+hCiz+Ks1VaB2zKPY0NTKs8AX +Vc3PNvH1l6Aet11SEG9X+510Vo+YDvE86YImBDV1trBrMuoHW3y/QJpBLwerJFFThdpiwiI7uSx3 +Irhvx7r8r9Mp1bvrGeYT/hEtTdZg0TbMJhk8D92jVIx9RVrr8CsO6NMOgAvVf93gS4y2Y0Mxtc0d +kgRcShXhPOaFgRjjY6pSmVV2H/aA5uFdtzaPIrM9zdMiPMP+/Il+vkPbiY1DKZWRlRiSZmvYf5JS +sTPShucZpf79ODK1mUvQt3gHS5OdEUyoZz5IfEwP7/kEew4KtjkAn6xVp2oVjpZcwgMEuNqm8lGG +t6VAmRGkRXW0DriPuupSF4CxfKK+xLYMS7sRZnUhEdKwBf5IYu0IfdL3uvc4BwSTJD+1ubZu/j4z +4ZYwo6jcEECoEH7QEjRLefm8ew79BL9J/52ucYpO2nFWBkwG2aDGS/JE7Lq9KcIrPn+Nw3I11mYe +zOjmx7GkCRQ2Oc9lPAp4OfjoGF9MGr180+HHyvH3y/BF/QBo4lZo+NEbuECOTYVyvEkjOAUwJE2v +KEjo3xi9KCH7I92qkuI704Lo1C8FTAov4VzAH+RTvz6wjxJQOgZKELye4fsyQ4id1aWC9fmfCyZJ +Q7egVwGu/BHhvhFlbcLu+Ukt8O3PuaKHcNNO55EKaxZFkOOaphzcGNID8nps7LnErsfsgvVvMnkS +CYz8N8lZbUsXuyLmU32/mPpIO7hpnYkYV+1SURXtMDNfJmWFYgXPgebwjZwlQHbAFOixu/QTaaSC +QSO93GHJl1WevQNkoB+Xy4Mxm3ppoL/5bUU8975tzcuQ+c3d9CgDMmmSW32uMenuN+UxdIHXurR9 +OBrTfKfZzcjw61cbpNmoLSFPCtzkx4DeAwDg+mDSPG8UCPyRyRBtYi47ePpYBqT45J2Rq6BxUCqT +rxB0t+ygAFxy1crrhDvzm1X8C+uCADs2JqBk96TkZBETb8UTrzUytC7AShHvy211wWH77PAeFDXx +ilGM3sxQSHTX0mutj8Q4y97Xy6V6tVP/ez90m4POewbCCBICxLdmAZbu+ToQl/drwjxTagyT1RZn +rF+Bk3cC6YniE6ZITkCUH4Bqq67wm6NXQcvAumgsWdVtUNINfwetrHxuI51eqqXkNjLOxVJN/owF +ouRp8XfQhRQ9XFVKyYAbsmuy+12MtF5aM2EPC4YzoN89Y2XtdtttYSHlwPP/acn1Ufkzq1L2qfZR +g/npxB2ySXSVG98ILZP6UtbvjKhOYg8x5iwEXUoxxZ+EPBDOx6ers181RmfFCYU6zgu719JRRLDh +YL2K/mjeukzml2mE/qCGERIB5o8ARqH7lkjrk/k/Ve2vPzgRkbGQRMWBYtQlgwHhuQuDMAc6S02r +XgztRWhAeve8SJ05QpsuBjFmBQYnFED9/n+6PtepSkJoWEquQlxndYHQ/eg2vpVRKJrN+r8E3S1K +kMdImv9vMIXLjqENl977RZkfCV06BGIeCTS/hHqfDJL+PGHDnhTikkUd1GpG55RVD33/qDkJP9Ps +wDVvMxAsMTHFutNxkFnRVv4Re4d4ohSwUz/Gnj4IfzrpZ9SVnI8O9nV3SFHWNhsYsvlmGe4Khj9R +ptLo3NSvBZAA1/0xTQ/C7AFU8FJgZ0aXSlqqHWZKwhxIYy9+pFQdrIInBizB1KDskfxzvBXJRAnT +jmD+ahzccAt7RvozdY4t6ZNwNMUXlh0WM1I1ucnjgFUdQo+cPQXRSEvFHClIA7SMg4yDBlqk2I9r +pApGLBcWpoxaagHv+mjiczv/wRNbOiIZcMfwt7RJWgTggFyvF4jd5LhzpS3qJSK5yEERvG50ejlc ++8nmC3jLzlXq1HLGXrFx9eSLjKPbgSXiEPaeKrT6Y7dzeJhC6wkUIQ+Pvmn/A0ymJ8WzWUY+6Vyn +IP/C4gM7t+6sY0vsD18ZXQGeVwmVwEKJN+vWOcXSFiGrmYG3HlPAvCElYy3hoAOq4BEm2hc0AHOT +KsxSVz2gQkZnStz30fFVL8+YpIAcVDKaK4gGrR9EtLAYc/vlCT7lGK8/THVxeyEPpirAMIgjyVx3 +ocCPEq3V96/oKMJlVjwAKswBBAIoCqmSHGqmNXx/jDEatXlBR7SLEDfvMGS62jA/P7VNWjVlQYR2 +BzNHRSws19tzwmQrtO4vRi5lMR8qbUHdQqAYQRnWGH4Zs7tFZbIIsgsGX6Nh2lyMvBA2B2o8p1aW +u8yAK33aEx6O6RoPqkMjFBHaA5vAdJ9xXi1PKu39MG61sasjUCsEY/yHWplHISDB2pAzoCiyx+o8 +R33qdtQ0jfQ1E8v3+/7uk1fkTqKgAE9SOdWkYr4D8vGQOJ0ugvqSnaRU3/e3gBiJx/msHYfretwd +f8ulXSjrzbGhj916iTRsBpeKGQd9ZmWzirD0jhyRGtYp7CQGKgozjFKlKR4tSgu+dJoCTT2pCHMr +CYlRfc8xRBCFnoOu6xjokhTQeC/e8eWYmjwUtyeQO+WhF/60LTKTURTUCOmsnCQgrwtm8mKOMhOy +TtQWI7MUk/xupeWFIWim8quguWK1z9pSVcwS9Fn15ymO8MOWqprkg6/X9qd+j9tboSi8NGHHBLqa +FfVAFQQ+8aAGhZiHu1R8hG3S9ztam9Awjig30IqRWYrn3Cjs6WzGW0P82MTUhaW6Xdq7Qa4FjMvz +6pE9pO84c8VqbIEYi3f4ceBg8lA33cmibqCmmrdY95285vHxaRqMB1/leFfmexPyxRVKDiAgC2WM +oiXN5jr//yzVBLTymCwuFRCc1eP6xDEVOJ9gYHGPZrDVZv67zZiHAezQXoGjl2vDkOdiCK3Wn+lU +x9fJWyIRKb5yLbfBwoU2f2Ml7jQU91CI2DSy8i2F5CT9n/TzdA8TS+79nMH4Hxyk/tnArjYt43YZ +nlDeqpNaVmvLmoeFJfjn5d43rOedSWfx/aOixb3mOTBmYxogzL8M8Nj1VC+XkheHyF+tdBaU/uss +a+QN/d5JoaApf0GzzRabUyj5yfR+LbbwUpaHVAiligfOU6l9iDwl+JdptIYWByAmsOop3BQhs5Ne +DPfDtC1ZXG9sXbbxManwrQykDbNM5lfUgAxw/Qk+CR6RlsojnafFEjN3GT5tRB1wn62TpmOLsIMP +n3fgV7M+8IYv3hcKsMca1yHH/R9UJntQaSdbEY2fbAE9tpwkoP/Os1VEZn/mbAJY+XYjN/k0szkw +HgVqnXThkCuYukABBrnUhYjRM+9IZ9eCFL4Hl0mzQKI7MqkHk5KfQrS9Qzpl3SYGc0QjWP/i11mV +JbsqC3LnuPqr93AiolQF9pakFy+MiYACuayeTEZVs2evE9y5g8X4H68m3v3IPXCbpMjmJ+bdPFiQ +BuZRud7IVjrglKDvJ550EhYWphoMIB1iwJvTgQfgQZgzf6dI8be5mYs/e3bLCRHZJRAzp7e0gQyX +teLqwTmQtqzUXIcqpYJCsXjLcLcuRU3Ovq2PttXFmhN3G2ct5l0/g5U0CZb3cu2N/bcmKXE4dlXZ +evs5tdbSWS+1UkQ1XpF9XvRUC37A1umnfzcQxpQD7rTXcXeDHqz7Gs+eGmecToJqMKogFdVFGvYt +XmkzJYnAZmHDSBDR4XKSazVcKQE9RbLCm1yBCBrL6/noYUtl+OvHBLbxChIT1qEwMVylhYbuO1MC +hSBsVctXC8dNSdcf/QKnPSi0MXFm6O3EHw6QERby5p44zzvAJGyNTcboWXHCEQ3MibHvEIWn6W4X +pfkGAwmpEKVEElEVReCK2xw0/ybwcRiQ4ZGEFl8AhXZ9usc0xNqh5/OBl+1O2ThUnMgcAiJ+Dh8p +8JpEWN5ywC8Wct4/4+1SKBjRJMYgE98fI3gDEerF+zC6nNwLkMrrRcH238Vjt6TZz8//vHXTxdKH +Su+ztFV4yk/mUEHJjr410wwK1nCTeTj4nc0AteXwejkKQpHLW4rsRznn7qd8EovzpkG4975g1Yul +N22ajAzNqnpsKuMgR7T12dV1VNxM7R+fHbXIe8WBkGAsnSEJijJXgXhEqJmNe7+2MVbHG175nvWs +UMqH8W10Z1y9hlK83G4fhHKhZEJpHDJXWeqs7srBG3sNvGpwBPBbUrBH7EZ9XuyfnUyI2geH84yd +NKev8CFD2mFfElXURJRykOTNJVAyiCjZVZ6jyzWddWq0itVnWl/WuVeALRd3uqvMz4ArVYCB+Jqq +BkMepnwrgODkYTRUls7bpjfAvgSW2GMGUCU0Ixl/4n4UBZ0IP5f8FOWclp57U/89uRHZfFSxjCQS +QaK0yeAAe1WomRtpBTbVM8yGyJtbY30vk42ltdZ6VfbUBRJh4sBzlH7nG2V9TLkgsXG/XqNdzWiq +Auk8B5KKGiHBVNRHNWruiHTMxlpJdP5NYjfNrf6ENV+rRkduS7qClI5Qxm5hf0mlo0YEF3ZEjtAE +pZZLwFL68IQWYfsgGs++kx8kY+w609x8DE4ajYsA9TbDpNxtKfaz0uc922MGl5Kh6DY8JP2HS9Qa +7xAExpO/cuCsptT9PuyJrIEx+DcDO1W7tQUIF6q+ZJkKoa8E6AkJ8E+l+0P2n3dPOeqkbjcwaSDg +bKn1Ov2+zT9/Z/LomWcGxgJDxRU4Y1zxDrVFl18IdI1CSuZFgATrJnbSDaV4GkmLgREe9wt8zLzL +vOGCAiI9J7DscX500jwhqR6ceCqV1ZBqQTMKhUpy5DdW6wELEXlTR49HvVIYMIRNQqhmkOlyIZly +E3lxi5znbBlQgsZM8pvFGImjY3C0nB+CSxFtTFxzzpzhe7zcxoqnThqR6hTQwITt0ojUYWQBbuPQ +A4RFuNDoYhryg3eu+A94Q4RzRhLwjz/cXDcR2t6hXhrpyX2nxKYgftQL8B5tPkWkAgTfb5jsfbpU +u1BZz5wpsSGoyloqMVhlrtLPr/DolZkXoq/cjYzW4iyieKqpjZj7gtDgf4G0f8NOf7Hd4DHjdByD ++dZ3f1RXDBfxAfNFeqMKADyZrL1ZBQ5Fox6oh67Am2ywMWVfboCySJBQoi1kk/00ejb5x4QOVmkL +QzwZegP+aV2VwYXP2rADGVldwqB+/P3CFFwBblBdekvoxlnoW4dVIKl2c+oHPyPtVMUJQiz0QSBw +DuipqxKwJBvVQC6l2R8+2Xg6pt65oPaqGTYbXs4uFr4o6s5nNHqMqQCaKuNYW2IEUBhjnkBaOdIz +FmCAtYieugfn2a3eMhvF3oCcjO8lB/XWdRIEwa5AQk0EUzCPEhqxXPis99Y0kigHfm6sSgAYoWtq +GAfZ4+U3VJSx0hOxWj3EmTqlMT4pCMObh0UXHXlZ0tzagVTkVPcTfk0YBryX+ugIfr8XlRPBzw06 +Gwl2q4u54yaRG50UUDHfQmTzo1OT7JCUkF2igTyka1Tz1UcEvUIXLjtWPlO8XbmK4pZC9xZAMIzn +rhnWT3de2ykud1aZA0MD86calrn5jE/8Yr0yhxpIH5LJUSMvgmtq4X5GWi4VIruyOEnqWE5BbArS +WZIPYHzXsoIgOQp7blt7/H4Aef65ni/ljj8ogIL7W3p0Mx+izwpYVQkkbv3s6HHaZey3Xds6gJFx +dgpfWY2Vx4Np3jCj7jd0f44HJslOtbw4dtEyHo6sdjOnddFVL1VSCbLc0G9O5MM8pFP2tKTZQERc +fg5XCwLSaI1i9KTzdcZ46ciOw/X55oBGDah06HLtTvnFHIIemvT5fctpkkdg6lCKBWDlAElAjEP3 +300m1Ke4wLp4TGgpeN9IkZ4qHCjM2LVEymf54cCA25NBSnSl1cIUXpdSTSvy1WQagJzPGz0lne2m +soaMjIQFMA+N1ZZoxtTKhrTl8d5oQXEz6QAWjioJMb8g1cgyHrCBaYbYzGIeoUnuPi2xt7CfliSS +UOwO+QpR+OWMSJEfsOCcIfvl/iQCTsoQPFzNuNG7wqRMMtwaEGy+y7TBmSmuq+I5jHdSW3rA4ppX +4+4+/sxns+YQeuLpFmZtLqjRLfzgUQrt5WAxYkNU0KRRqqpUJrrD9f/lOqCIYXCfz//DWW5tfgoR +NZ/02PFLl33bpaPxJ5zbdk6i+/x0zDc4uKBXastk1HCPH2vL+/MashM0+itr6BgvE1dnX/XdXUng +uDhxxuTeul7odUYoEkVRKr+J5338LHe24GTK/5LhmC6AUZPgZrMD7RocMpYw73JKJpWVc1dA76qt +dQla4rm7sDCTjMeT8Yi3oKPuZdHly3xwTE9q3Q3eBY1BwCAL27vfmET89MPGtA+BdQ2mQ3AteUwv +iaH0iJaGNYSMm31TypbR6bHzRHl0FuGr3GmTKRQcZR1EcJUNp8L2yCxhsmDAIIYhxYooAiy1qdXW +XXjw+3mzk44MAGprjQnOYPysE8cvKvNEEUCEpqZpscxemCLLB5fsLZ4yE2pjSmRXGnsb7vC33+Kp +ncJPtRLalBZQH99MuFoNf/SWBlbuQAJSICnFYvV7wK3C7+kf/zp0Skzx48hZUpB42f8yxO7klBl8 +vEPaGZd4j06N0FQHXsprOqIaGuiIgJhLO8P2EB0o0MY3lpJbZnwG64t12gy6zAMABA8t9OcYQCKH +Uy8K3T69lJHGzqS/aVumhlTgEJuDp9g9U+KPZqeawZG1+MtfEieWjclxRx/XT1UnfeGfyYTJ55SV +Gz6afdXxDtXyARbeRim9opF4SirIN1XgG3UlZpmies8509KSniqwXD+NBxAWd9uncerOUpbcqHCO +kxvsiYT3tY+m5Zmj/9VkjBEss+GdTmNYlSq/G7QBuXQ5lxqUlk+ZulVaIsnxp6pVkkj8tfOBynU1 +i7dguaV+1WNQQlLiP5amux40j8kOxRL87uoRllWoE1TEPr0Br/lw+9XpaNjG2/ZjeWUcoI6G6Ojx +11yM7YbjHrF+Ma1IfVmUufJ3oM0CH9r6N/8eiYHJ+E6y5CMvpIS/O7Jvckf9jVy+pdvKIic+0TEk +GDzF1jiD4q9cuZMCTVGJpeyA086l7cuOUsY9RORW3/eQR0XNfgYWwx7WpVvGQ/F+sQADoUyecALq +51I6iWYq6w6Zsh0xcEemhQRjN2LUmpobYdWH1MHO45uvmjhynYPLwC1kFNcgp8fT1V4Bsdffji3+ +WOwjGvNiqO6EHjReFsuwG5jvGiVk+JU4Tr8jxRhLsotJgFpnfC7NjerG5K25zb+DjB9JMaOY0PP4 +JoeXxKm7/5f60YY51ychH9NYE580qxFPIS+mFrg2kvYUSdh/ih/sCgcNS6PFVskzVc0QG/4V8vw2 +uZO0++im4SMa9cMiAH652XCwJxTV3B3Z2c2mzh0TfNgCVNsGba05jooSyn/wA1XQOfypgpq7ZEpW +UHcuawberXncaW6fV9T/9cWKXJ2/JX0pglnPASR7nEcP9VdwDcsEZ3Ec63vmqqmHgSLAtZUtZ2/i +oD0jUJpIw2+YdWwMKvjGpZ50nDi/PW1dVKR445iiBbXX1yRRbWQHW9e+8XFyRFtlc1kxCUC0QgVG +cewOOuYuJ4uY2FkSRcRbyAvAZ5sVIyYcgQ1ArySwqMoBmSIC+IG+H8TbD/ygwVVNHuaJ6139gewE +pt5AswQ9XgEHPWB+OY/QoxuzA+gHwPek+qtLi8Agu4hACdD7FmLKdgmbSd8XyM4Y2PaukZf6XXgK +EkZPcD9OLY+/rvsCDJFS84VRSg8SQLlWqhqL1FufY4/PrGQYmojpkUiUv0IYgtS+amNcpbFjeJ+9 +FK2W7NhWKiPbVVXj9TY7KEt+x/BKcNeiq8iqiSO/XCzDIGpsmWH16kW0NEU+3o96/c5L1jaAEiNq +nD1Kqp7y1Q71y3A0WC8StGcI5cU50kLBgvKZo0fyDCWSCOWvBu+XgzkzDQx1G+mnPASXPWrqsZ+J +CwpQtcWQA1ydg9uxaNr6QLXIsdndun99oq+H9zGMzs2/kmb0ip+ou5nceklG1cZlPHbXdX+3x+Uf +4mIt7CPotIyKNmXdZSOnJkAYMeiiX+JDfS7FYt7WgmDkEdgbBi6irA+897s1euUNoRDsSWin0BJK +Nk0Te0mjWQ4C55uDsptBlOuSgCfwGCboaklRUHSFmsBBPnjDI2HhjY3RzbOQMYk+hLSOobxj6uQ6 +XXWfiaxK8tPbxN37kd4HGe2pa5PNCBF3DZbkOv31rm3HPtjhOxfnd40jhLSLNGghT4CblQO2p4eq +sneWppWFWeLFPcNUuj/x0cfovXCsW8qrTwBBjbcSGi2TYQLj8b6q7695oihdv/fX1mNMVc+BMCKs +MofbmGN9Q5BKigCAY9YO4WtJWi1ZJQVSgA3RU01Fj6whCWoErYK5UPaVBEibAVdSFhNlTjvcBmev +oVxCbkSr8flGtf4ptSWLzEITJcfFDt4cvyJlBLGauGdDc6OXSoT7mTD7nVQim7EdK6q+aBaNDLkE +sMl4E/b+q9MPvPloAU3zG3OZo/FfYM5PjCYn+E3siZGF2jDQuE941Rxa9piX5mxGjh01GnilM1by +IQW812qcGBuHG/AMJIwIVAIr3jTOTfTxeHG1RDJO65YWh6wwXlNCeI9mG9aFjnUpa8aLAsZuagYY +y13ytSnx8JuJZghGfTm/AtEMYyMeBtQFgDpTNekn5RRJemWmB+Xf2Q7ygI9+6AiK0Z5+Wf4vdKef +ipnzRlrPZx8ZkHKNoKmdIQuDmzSozteAn3UqL5fle1naQVCbAwwfgk4Tb+tqKRPnFOqxyHKZCdad +9UCa8QQY6hUVuPHHCjN7oO5Ho3K1VPUOL8/xs0xUshHpv/0VXrMbdMdzHmtgMvILDvtred48ewWX +pNSTw6zYyqiDFNjvb4bJsxsezpJJEr3Kfjs6tiCirGUWQcExa4AFChksd5LDbTJsWjnikzB7YeZ4 +efWFZoURR6v9waEXDVgDOWomvjgaRcJPQqvG+DLXPwLhzmZnSKnktWYV+xARnTIE550c6NuiqQeA +KrzBYGsbsuyTpqKB2BNtC7ihwHTBMVXEm/TAyRBPVsp2hHvP+URoFWdGnZPmcAzwbLlkR8Upt7ku +2QJJ2lnQp54p6fcaDpoeTsO5H7801U0PMTCk6hrRGcAJXu5F+V07qEji0jTPDNiddTLKHTmfWutI +vcxOD7dDiUczbF9iKiZzjkvO95Jaify3/aIsXImkRAbonyua3n6g/e9G35oTl1cxw/fbszHiy06V +YPTE2p9bETFBCMDoJEJtHpns5DCCGDB+IjVLHuXJ3pRl6w+0fzdzrIiv+Ul7+IqSIw/crr/4PEcA +0jN7s8WJtXyttMULCuzcXpYxWdJuY8EyID5I9Y7rR8k1XClbWpmjxrWgbDxqy29SB4y3cVXHozuk +8lnBSxvMNo7jb9n5yjfzOKMNw72xsO/hYy+N8BUMjubcJQlm4zAWyGzkpGOx8FK6y8GIdRt7n08B +2cKvFzHQslN+5KYVdy50u5WgBWhFlZPV5gk6sFda4PXHkirJ64sob29VBzCzfSE9oG8TFY7GxqfY +57wW0m6ve9GHHF7slCfh7GVKV4vULjlSOzyXjb5Pv4qkA2pMy0s0dZtYfdnF6uu8BAX4XLDa+rlR +Eui7Y9gse68CPpdxQpPrSL2uTidA9ycCdvRw/qM3U+UE46mb5+wEnw1twA5qXDZrU6jWaIUzA1n1 +KnHCfvw4cd7GXg4Sk4Wv4bSk7XgUhb0OjBUbd2F1QVPL0oazvh/zc+RhLd8TfkrHE4xq4Kolvgb+ +RrGbVkdPyJONAI1CCrk+cYDl9C8axC5umBTaJfWXKg7QW8vwkupmOhxwm+9cIIkkZDXZDfGLMaLI +8fZeStHbk3TXCoQ2mYIl9T8r8X9Of6VwjnjNDnPHBbQ8cksjDgy0WohvJJYuseLJOe8PDXFTOnCU +Fi1RuncrueKwAb5S8qnydqwngmGAkjXqD26pnkPqj5MyMcoIGQZ8SCzYpqTmQvqM6yKflnvH6Srh +fdiEyhSE2kJIUrq/tpKX/FHQIL0I8Dbx0htAE/BQycxTGUyp6CzXBZlUAKdTlE0cY8qKoyoZ1XrM +Uu6abygGn0+mUVXQ5oeDerhn67mRVwtx33PRiNWzw7rTM/9R4o2IuDrk7vygJASva20GY821phZ8 +beXASUKb4I8RliWfIok0cS/9BEV+H0KinfWJ/r7PcoM717ZSa+S10Ac1D/Al4JtEjR7I2/eC3asl +Dwm747KtqXE10LtAS3BZ9FWi5Fkd8cojoDAnr8RxjUVQRCstNmyWVivg14mH2ZdZIZPlVaboMxJp +sRvz9YfN56o7evhOcom3OpinhkdhMBfEIDbWnA3p5mGbbAJhqlN3+hM2zf19QAK96l16yZE8QzPv +3c6jKIfUNCf6f04ZN9T32lNAWLd9u7ktHUlYBPxDfW6mo6Lr3X3dlHqhhKg8vE2lsXiREf575TdF +XPRn16UwTkytzF/cO78yXyfPkkYO+W+ScqLZoB8CZtuwy1vVVzz9Io6TiZhuknU9fBVocNL3r0el +/2fehtZaxl3+nutkcjKuShQhcEl37o/KTqc9rh0L1Hru1YZRtRI9Xg0PEpiTnd+W7tia908U6PIF +WKUjOoMiUoapIYmd6JwuvFzyTvqIfUr7zfO+2DWRMkqZ2Wtirq0tef+TNp7EiVVMqpr6hfutvJ3p +dGp+CXoG+URXqw2tgodbvgcIZe45VUiBVRLOOyrYCS6VeuTmDckL3NMdMinxWjMBjJAEZ4Eaw6uk +VhJ0OGeWVe0b2x20cuGiU2JsXAH2vqJt/h/fZrlOk480Opw5lSKBu7N3ocPevz68YXtZ6dr1j070 +iclRQrvd+ZB1HlS6EoVCjTeWQVb3YyewKB/Up/mEVAmdaE7tRawi9JBZDkUI38Uh6xgFAD+Yy/3h +0HFE66UXSk8kUer+3+dLzgq0j8MuwMGwppyadxWv6vAgWS9j7tl3zbjffMOtz8VbFqwfGs7JbTHP +DcthWcVABrGoCPFNihaFr0/JUbyrzOVqgrG0QR25t9jjBtRGxw/UCqmSsDh8QRfQUw37jJN5gAR9 +1I2GkEWIG63nuL3D8YxDraT3jj6awgxulj42UieDu9IL4rat8bpUU3ocL8hHEOwBZ54XGMb4uqL3 +ROWiVdrK8jhHT/SbA0IIN13g4FN/PatymZU/trvahU1R41WcJAWX7Bxbb2gZa2VhlEsS2gpRy0yt +AqJTAkO2hzQY9d5EpbjnDnqdWMekrAeOB8ZlO5iZKZztisZSaIAsc7hhUr9mqkBdZd/hv95i0rHr +Qg6zGoXYR0Tn+nkollrVgmtzYNhvuf05cUMG5PYeo0RfUZ0dpm0bHuN7ooFqkWnT0DcAY2LNLM4N +ejSQJlFW/tsWOuyCYdryHttXOQsBhLjPIjHq6jCgAR1pMfVLfJixCHeLFEB0LCNkuMKMNRgyxmzM +bDJQg/Yb7R9gISm/yZg4fTFKkKUt/1vp3x/MX09jMdHQ2GZLKJ15sPySQSe1Ey7v8BdD/mwWKzpS +J5SwJ+dTXZD/q2h6kghK7drQSkG2qQRlfApdN8eEOkYQJ2SBjzJMcN0UNmFNFDiVBciRUMHJ7TjF +fIxIwn76AuY6OIKjA5cF3FMfAC9Jy6S2AjB39BSJYKZ2/WX3PAl7dWJD7c3Hl4I/nvvJhMH7cL1x +OsRmuai5GwfkWsHGtKZVfsn+Gj8of+44yB0iqFZy+ox2SnnpOjVGAlEpN6kWFMXcri3rWiLJZuGp +iTQCQ80yAdqZjkBd+5P38RpC0HYQGpq98/0RBzA12Ikso7TPVg7PXnX7wb+1xId+ROn50SOSP8Z7 +3KljMxw3mErHhSEcn171xeA+i/ajcf+ZOwDxFHmBqjg/yqTBB9P5MJgbw69EXJ8No/r3yqfgZnvd +0DkLH2xi4nA1rLmUV8sUZHGTl4AYuEhnWpUnoD5D2DpKSN/qshTyQRGFrns+UD1IAJIwWiUSQnuJ +wyWzm7olQ8z5JaV8hpkSAz7CjwZdICQ5pTAGeaO+hb1ayjIcPFNZoHzCUQLPgF/v4M9K2/zLoiFE +tyisiwhnFF7kWkyAq6nd9d2j9HmDLFwg4mbwO/LLq948XKvuxFpFq+5ccSPkOpEIe7f2EB+pMYWf +hsRtsmvsNM6UATHIwwsjxvawbt5cUtNCxObWc0Y2oHbIDrpOXbuBYxTwwRdZwnV4tjFXRR3Horjq +pLfQhWhNVJw3LfHaWJIqPL5RMc9N4dUl8PpmD284KZwv2Ul/m661MPi0FoqYZPWD8nhdPuU5XDM8 +U7Y9drNeaM/5uCOFic49TMHDZxctm82/Q0+SFCCW5z95qXYnTEsre88fTb/PG1ChVijdgIX91Ryn +oUMGGVtiOUzijT3ZBpEm5uZp9IIIT4udsqQ/vio/6xI8fvvFXsLVOWGW/7IibUJNWw+m4DdfY/IM +n4k8ZrKt9KFrE+fX8VXqOcvY6v8fqcxaEoOREKIm9B19AnXEkBpd+I1KhVun0K2s4fODOKCZUVvN +X2legu/QheIV8D7EvisEhZ6HmP45t1HGwkZ9IyIBo/A6s772WLtnb/deZbbT5X/4AOp7DSiyZJ8R +Y0qZdZ90D/pe3P0ScJEHiCh8z3kOJzHc6LMPSb3G1GouJny9apDsiTC9eR8RrPuUF0sf8sT177lA +px3NKKEjdcasMt/zaPGseYIdIQ7kfp0R0ghJQs39vY8cx/vgWJY0Y0RW7TVtLxojw0o3K4JiSqrc +NSo6Wf17ONDL9JbH8E5AwgeEZ4f0CziLtk0+qQQuiJqt7X7AzmyBQBDmeY50GYTRLdU6bpEN5O2k +k5XErXwybXLb1J78icWVjKbD/VhH75SOdEAPP0pSbh6ot1goGNYthkZBMHruCPIydlE3hwmI3S9J +bp+bKHVhs/9P4+ihrVmprUKVTb4JI5KvnYDTEkJeQv72vchd4xZ+Hjnz7o98s04AjDMpEE38UvDw +ZWdgixA4jdA3fFUH1LkUQCoLnJ4a+vsRUDqZ5pWj+hp7zDvNvF/esr/6r5Vbp9pBHdEoYLqlr7r2 +CXvB62IcA6RiXimpGZ84TimK0hdr7le7M1ZSLsS6az9qcf2UtETV4kFXB0sdYeLMrzohyYXyyFvf ++1WFP9D2Om3NweEFFR6dHV9HuKdZbwGu+OXUGjdo6wgFd8HfwCdvO3KC9OhF+t/v6guZg5Q5/Shg +VrhCohtXkwKBuO3of6EN+k9SqxPCgv7ynX3stVjAG5xZcfOZBW/y4d6cboAN56cKFVZgHoWd679B +tjJb1MYvnfbTuV68Dy4zO2cfQrNz0+rUo46+5o4myvPPXNQRr056RMo50U5oakd4AXAHjw85XrtC +1uXeQzCgiBdSXOjjk6lU8TIS6JXRYuqgRjY5/Afs7mo/SSXyW87SZDgxyOWQjymPSNtkyqhIucNd +mD/1eRjQIHyRqoXnAan1EVpdOXo3i4o2FwySpYw/e08BApzculxK8lwLF2vd4QLayosUokmyENUr +/B2NJA5PktpGDTCzQAfdEkgOInDl+MDqp/u15Q2gS1uzqj6Tu4E3zR8R1ehtCIJq4wZArcZYKmcZ +tvmcAKcDYofHygxi12sOYf6kzlnzYXuhjbKr3YlSLigomnc2QhVePpRBHCvicPMM2PMuKFy2rHo+ +Nk3R0HJypcylY8tzXT45ndUBjGGGcIamjGJiKRTZhW6J+x2l/qJCHZ82W8QiFtkRJ4dhXt5WES/L +afE5jMjQM25TfkMfw84E4g1ooIcnvaEPM3DCtLa/OzjCvvaRzrlT4NxSkIw/ItU4tG9tcQgWNnvU +OQmg8w7/9h40ZPxu8V7Nam8V6Wdk+CKqbJoKhyUg+BaL8LXODsCYWb1+GJA7rwc/u1RCTbJXLYKY +QU7kc8vKZsLXQydQVJ6X7dbffWoVwK+KSSTrXASWcHHXpWcU2vQ0LVH8bbDNu2mTamZpt8ygfY8P +0Yc+1WECqDH56CtqOTnyjTj3zspfaYsynDn7+73IdR1ED76g4T0nWwvFP0JLsOpGDgN5UIPHTFOw +qf6qH4AcHhVaYwjLMe0bEWXWcRfp25tHS3wtTd1ukbJ9bJH8CIU3AGpMmKmdaBfo5s/X7Wi/2Oys +zXJ4TcbItBzFnrSKHrKvT4HPMEubuQK/uaiWToTP0uCmXwOxrgWlhS7FLsDDBPo6EpDpshjsjvTk +rT+5GXJTaf3LRRe7SEtOLpxvKbTjUqHHIjFM51vPwyDMLpPcNcSXAb1C2PfKUvK7nf28chcV+x/u +Aa1ze5ftr6cTYrg+eJqUXOZg1HBO9Do7on7escpqe8zuBuCTEAxT+uxD1r7A8MS+tmbWYg0q6agM +bRVz3QGSnRvjTFBiNxKl3i/eLDqIyRCnFtPUwcqFv/vfFqG/brTZsWnFzKSBn3fSWhcfc7xrST1X +uHsFjssCQxMkwKLzy4F58elI8sxkPAjLJZC5E9eJ3rkzAEnQNg0xvdq8CmJoTI5MQ3d6Rjs+piDD +sUDA5NBxaEOaOkQ8AM5waJxXe2b9X3uI5Tr9NclVnD5L5hDp1bfKtpnh/mjxQc1+JOy3C2C/ggiu +RZ49s1XWSeFgEIhlLu3W/FUo1fcJC8hcP704LLarQzvhed3nuxAo4nG3GpiAnlqd/nVk+cED9UxB +TDVFI/FpnpDgnhD22DUT2Xlm6uIngI7+7mtOEUDBGE7Tp6G4ioBMR56rX+EHL1To/kaLmVX3cz8J +uia451svDO4lldJnZBTP+7fqlWfgPgp2oofr+94qFUqFCuz6BPBp2y5B/QB8yFI7bG6+1ovcoxHZ +ffEhE3S1WS8BnAW/8Cgfh2RNDaEmmaf/SnJUGaOD01OItDCNmbFh/kM95+1wuWXUKeKKdda6cDDq +Gj9AYEby6AWwUaEuplSPe1wCRTYAbbGXPLC2YgNsTfrL4TFKxfGXDA/5MZmggYVGJhRGssB2jIyH +vkEB7BXcdu1YJLRoPVlo9YU1phJHFMT1He0zZIuJWCCNnUzGzpmnUcOf+0yO2V0ek63VdkeTeWtU ++jGnv+AzNcp5GpI1nT9aFf/HoxSsRBXohH7GZUe/+QPCE1fywpKfMexA3KY5P5xnV/wbwUjMII0E +hi4Ff01LrppTVUnH1oBZg1LdWhTN8OCnT1dIsqtrPRXhjDe5duMfFUqOgN9bQLsBBpOkY7QZgvng +T87OgG2yiplt5ZcL4I5Ho/2gaRWLy3jORFQckkcFvuYsTJ81lQFSIxa9zlFuY77yp1lGeelwMW1I +X6QstVw26ImSgcPhhDtPO7S3vBKQS+bCVg255Gkak2wxF63GDZRQGQc6wu5BSJcVZhaJUMXLgj7y +Z1j3m3GcEyWqT1UOoyUFKov5QS0v8FUcOtpCobokqtAGq6hEteBvkIA2vYfXq9x8MWzMx7/87bKA +h3T1ZfLZFlJo06BuPqmyEPiMe8j1TAtc8+HdDT3Glvc0mJsqtBm++CQrmok4MfosgoxM1L+uGeS+ +GrE4PR7OcHsoxHAxGKXdWGXyt2utUzfJAi6EQzxDcHSVPesj4MyN+lyTBc+wEckBdcfAevs9NU22 ++zz8VIQvTVZ/HYyuFXOa0eTXTxl8AywF6/SIYVeJZ60H793dLI7N4w/Mbt7eOUNW0Xks1UgoiRRl +wGVfhaA0zrbO0qZ/RnA+pV7qpGfIKEHgA1woVKA1vtvlilL2xafCruIcxEv2zK9cP/+uKSL09Jl2 +bT/naINbyvMqKItCdd1EfW4WfMED25nGjiQfgAjWGSrC5CK0/Qwhiq7SYTFLXidhDyU/aCV3w76+ +2veUhBWwNCcewVYiOVpm0dYUgzj2tG1rIP7foaY0h8IkF5ndQYgYozct7f5ZuVp5w57V7ArwtNFP +edheKy0H0U3Vg1z+31N9mEoDg9b+tZONT+2tQlsGEwJFQ9lF3j9k6ffgYXmBUfoo6uaI3zH63CIO +DjcRnHPxwHQALcQNMNeaLe4w+ZU9erUvlScNwfe+asbzpKsdsZQJieqxKWGMyXy2VtHSjPdK/ny0 ++OEFjeu1l3y0gZd64xJa+7Jj/KyW88QnYZlwL9qs+0Oq0VVEN4oQe6lK+3WYidIiPWKRREJ9L0Nm +O3Aawd+ww8Ez/WqMFYVxgIJeCyupChlyrpd7aIB16j9J7ID2t/Ix3GCFoD/Qkhn12b5+pUtXyz+B +siZXHYXD+3wU8Tn//N4sEPbRAebywf8uUkBkP/rpKKeSYoDOruO7fecH7JfRpRGTJqZ+vAVVNOgG +9uJXgdTx8qQMb/Q6tIvQJMMKQUI9+wOVxEAbCRt38IRX7gT4BoTNX0TQDHd/xfJrBbXpkhChvHtg +uTd8F6p9ftKPnYKnLwSg4Cjtfn2iT0SxJuJa3YxI+0mdZS6IbPXwZApdB+r3tKSe66WcipotGsGc +k4JC93jtuxMcHnwtpe2ReN1AZ8ZHg33MY0g5p1t1m9lSCVTGSxsFAEzCg9mlxXm48riKrwuinJEb +JARpYktTqz2wDINSx0HjzdDGGPwd63uCmLAOxer2q/zEp5CEugzMiZNmarA/kEi7smY2/hhuXSq6 +sVD3S1M7HidJ8702wbtDOyb9uArOIr8RUtvopJXBEsqjJ7gNWwaxOLQ/tfy49NkxZQeBb55UHWi2 +/qbRK0vUmubapsu+yD/JReyqBGi1FoiVJllp2uFjbJnSadyjmKwtkOeJiXmw63qc2RGptIAOtPrg +gTMmqMUqivwhl0aStcf0OEZ/LCAXNlB3KnkCiD8KpEEIWerQHPDIx9Et71j8ImIyeMYzbAuCyzUi +zZTKRdUsmfYbTJsOAGZjBIGgKScROXnU4Wdoo+aUU/Z4AJ61+QaGONJmj1j1EeDr7Xt+nguuAMfA +orRtMz2v+gYH/Fgc3qFRiSzi9QGVTIwxfnGOoWsayHeE3dB3AhbcLp/5syJC9/T0KNXK+uea77tm +hE6F1q7gwPCrgDq84omIWxocpdV4R5wXRujTuIbti6Ww5G2mXZNQjaQYhAUfS0/OckNPPXKVWADk +vpF5TTlS2rvdtHQNLu+qq9Fm2NcedTcmFvzaVfcUlOE4JmPnmgoq7hMZeVV+9TdAKYNfTTkO2l5g +3RArwGkNNHWiFKV4WVKuvKCI5TXV9F1FCEYE4e8RRqMDy7EYw5JSxwcK72Bgzo70+3shqAwJ9xKb +yw3bHLeHl7XQ7m2DxNQ/fPzinI/faMkvYF+9FM/D/TC7Mrs9gJNQNHVT6drcPO2y6jLBYjS9rt1m +RR40YVoQuYpuf9SEejc/LAegydu2SMpazUVt6ApXFizbQlh8qbkTXPODN+GLGhghSedAq8uNuT9h +hC1LjY8ft8ygk9fMW+OxiNCQTtt1QLukt0vBkY7t1VLOm4UHsjEfQb2d7m07Pgfs9SWbO1MgEWtb +jfWANEv5Zhc5zT7tMEUq77mumOcog50xRfj77pkHunpyl8f7nb8hL1gIDgL5iS2aCdzGKW7HmyhX +2CF30R5cDvVOa7hBZJoo3ZHwiPlAMYzPmZXmJGjU3SavlBoRc07efhuK8oNu36TtkOTfWVIq9cwA +pmEnIwI5QQhj8xMTpKEs2khvoXnKYK9TfVcPAGBUyo7kKu/ds0EqfQ0z4aKg46zfztKyfIpYH9rr +Kd/9xJKvY/1y2Udx0XQN9zW5MCkXIquH68oPVmqdqpuoVx2x/i7ujlTsHI+6BxAaCwAlthnSVIuN +uZ9z62TwKIJfYTgZJtIBepffs/YJQ4HjAtUhsH09Q8HBSUe0Px/MwxCL+vsz7cOh12HYhCN/xkua +lyTEOAUfs1Od7+51kg2y+/fs4HRASb1dRaBNgJshiXsVue0sHuvybEajC3GGPZs6rrzQO+y7iphK +VCNvTHeMeQXlHixavukWNtQNEiBdmQ92FOvn4IxgRwRJxUS1tl42eOWKjSRPtoZqEucBMuUn9CZg +ui2UxtoyRAb1qC9xGD/sTjwN1cnJdPIqjnEXUHITIUxgikiCE3nrC0JoRDelejTvVbdoVR3ub1Fm +yHXHT9pTglIBVyVwsUNTOhhwAyMdN7JgaI5EDY9sN9qiXRDRF2hgd9FgcJPRPT3DTlWA4SJ/zDON +6fayuAqgNeN8n9yka9Er8If37bNzgX14L8J7HMU6HdAIXqzoQGqKO2n7LTBRABFBQjqohPVx1Bsa +3fXqRtdGWHT7u7cpnu3nt6sgjSdA1yzek3XOIVr5WRtify0sDcWQ+iHJNfb+bTmJcabPjmudYUmO +GDjKL5AQRD4+5Rlrb83zzZgnxZqJXdDGl9X41G1QpPU8cE4Q9yOvShE/pEuKQjqgX3GOLCz1PZGW +pb7d0nWGNxi0L8zvs/t3XBAvjwZ04YuWUK2lJa8aXDN3V1SNeg+KvuI7o1AfYhqpZHLXvVb9xytD +5vCQHRm2OjSuFHTtDaiiwE0m+iEGtwzidyLRTGKLUz0rkey6wKXp2jBd1Kz/2MUBNiQVHRIg/P+N +KtEsf8All0xYZAwE2EezfJwqTFu3l22bOjrOGdmZTUTprQjO0XIhhgTAWUt/aQMFifT2nMGU1Xj8 +NN2NnF/XvaLXi2QsyH6FZCN6DqtbPQlvHdtrEwEt2copERPa5ihpqciDMnIn/XixQqWoeNS1Of3y +l1zeXoh76XnfKthwLx/0gNNcuN0Vt7EQf4WQvip31azJu0P38luVixWG43d2b+DYxaBadGPFMpmB +CM7lLUhxKXjPGiUVp9JdNY8t4YBbgy8oOEGjjr24pRWHPB2n6bCk3Nvwp/rpoIOgo14kfOHMS3rf +POeCP1zWuG85JMa098fh1hgfpuC1teAub5jqzTepWsUSGf1b48rH0uTbe7FKKwNF8PXVuRPuMArM +XovIOC6pMsbny1eFCZ9BOcNJuICcK88Fm3n0xrSU+CHlzcvsW97GEYXau+nJHExvH0CdFWTfH6Mw +0bziiNgHqlsPJqACMmGxacJEtH3GaOYvxvdVC/zVgTEFWHZiOP4J66+E6BMWXolC9N5ONUgcQyuT +KWsz43ldjeSPMndnJaxg7wkaG56cHJNFrv2MHJ5m250B8vzomSeu2K2jlJ7hFG0hs1Ty7M+aC5P5 +wtwtWtgVekNoy0tsaNTghnb5cY179qYVPD8s+T0Qb0MU02LssVt03zLM2wYuK68x44HCuzH6Yxyj +yJk7gmEsIkPyxYZo+ZLdK2QNpw+6g4QUItQeLyWEql1psNzMWzX4jyfJq7kWiGyHLKv4w5tjhRJL +xsi/Y13Rw3S7Xb1n1gPfiEAvmONJLnN4ou7K0eqM65cVKadNUA/qJXpTRBQypCL+qj4cd1K7w/bK +oOK3ob1RjFKZVEIh0EjfKqLKpZFkrBH+7M686Onv1yB9MS/tWC1mfbbUyiGbfKGIBWQG3rzfR3aB +z9HiyELgsS0LXkS5VYd4YsR4A2eZdNXhdJYHk+CxyZ6JykeKUcQTf8ewAswEZySekt4Q7psuFAnW +4lWVpS7eDqQJPQQmdeivXzPP6pusp3KVQZHUPxkjXl7jY1X+Y9pu8K73FbqquuiEaoBZQ0iJ29uX +MpBj/tjjINan7r6xmwcwzqadP/o61x868f9hRJZur7wW3YELQhvH7T7OYcEB/Z0RaCGkUtmFL2db +i9R32wqR/5F7xWsYDM1s0I6y7WtYJ2DBF/TX33XTXPCB45nvaVxBzf/LcTbJQIibERN436e/isMK +KC0iyvjY/5c92rZoRX4bAUlHQHKwHk+tKdpz99az0K2mfhFxq6CZhJJ6yqABZ/QY7rWQfrfTDQsf +Tunb5xoBRhDgClXRnJsycqbKseJdY50s1x7COK77myDSdpoteTN/aC5nkcNCYqxcSLob/qJDNLo4 +n5RG7WIuoEfNW0Qsj8IvuZTWRKzWIb0ZfUTCLkuDvO5V6hjrx9dU+Wt/WRO+Q85OlRI2+vhMOEaO +UfytyjPOsMdsyNexVV8zOppf9FpS85bmsQEDGG4m96pF6lUJ9/aT3kjcyG15p+wWs4Yz4s5dupk/ +72ORyha1BvfpN/6M0KCigZ6c4/ssdmWsuLxkZ/YViuuFP/eRoJZsXPi+Eg0ANdJmmQqDHBOpwbaO +50nMSjr6gugvmraSx3jDe6S6k0h9snv7schAdTE9UVH7BYh/oU4GC8oKLLvbZ8bc9M6k5maM/ByT +ciH/DQEZIyhpNdrttmXE9mTEfCh0IAkqpiXONA16iI0cHQyrRG1CFRv/MQNo/hFCg4Q6Zn3e3wKq +rrs/pYOd7l7c+yWbgSN1fkZKu0zNRttiFLHmBzgO6gjqWGd0gHp8tWKVPke2u0rvjiyT54DiF5pX +UG3FkKaowS4VrRGbjNmNUpqQZu5it7V8dos1b/SkvBTTBdIE+YCuycN0BzeC96nxbEB3yWuMPmvd +FefNOhkHpOSNnKOA6S92rg1BBg7qF+zsMnP1S8pxGii3uVYV/xYfjQCEfEoG8HT1inHHGH8SNeIQ +f8tcimwOyU2tW2Cl++nBKpEGcqIDE4KvRoPOvUzZ6ZVXi4Lf4XuUH/A17zWPHCppSbYp3OLKYqW6 +r1kexDspjvOvZP/Z57G9isVo1ATlVqkBpQLfIsKctw2TmxXatUgMTZMK/6x5Od2zJE4a0wkvMynk +nXvq6Kbm/WIDedo6+HP3gNvCDt1dzOdj8L4DF8qcH8K6EioVDicZqt7nNGfgba6t0Y5GA3cQAFp5 +Hv7mOInNQDRO6hQ8LvzTkow6VT8v9m6/XWGvQ2DlUWeixdD7fbC63OCNnQ4uLr19vdvDfhg7ylUm +9L39gKvYL0acNldTmUYvSvEu44/vqL/me+enHBGeKgZht+IR5qM27Rt1gNQfszm/OAHs0udCxsjS +27Oqp6B86CecTaYFhmd5p17vaEpdxRhUyaRq2WN6u92dM0IVRszKlBCBVlyKI4MFZRlm1jJaApe+ +I5+xjCGUx3gUeSIACEiZ8JABHvTs2tHLUA1GoODng8skJoJuIT1Og7Na2hM2/ts9SKC3RHatlvPv +YvGPr/LMrywJwNH1xjqkk9hysiZgzxsaoCYZGYvutT1cudr6c51VSZlPqjJDBUsGwzenEY5cv1xj +QQS9MTiEdfNzHUya0qQF86bnZK46iGcq9HtMYxDY9ZsW6yVbNdx3i0fyAUX54HAgDOMdmybmwO/e +HvsevTAO7PDati2a1xCw59Ji4TaJIn0EZv1JsUMWGCyXBZVbNI3qMAcLqgDw2bIey13b/fuVKahF +Q7HLVlZgSmPii163PyLDiKZm6ma8B85VxCP5LDapH2OIPLY2+1mzs7hssFaIf/d3+6luXJo6G124 +BQvN7j6Cbmd8UfYsbsogJO1lJEek6ORznG/+7MH2gRyGuqa/LzCDftbuO7QwdqTmcgOj7jJ7Bqb8 +mAiq3/b61IR8pnwshXNF4QgjM6u8MfF1Boo1jGIb617zjzZ6+WVRt3FytaO+VdVbIiizqEnBpgXq +1BBJKwt7R4u7T0GW6noXl4sTGGzapLHkbfcRMeJqXgqkZZ8/dW9dS1gYPnnfAmUDFWwDCTPpIbMf +BCHv5dp6R/Xk7fXA0eyTMYdzZlqNVUkrj6baKA+Ms55pPjMaby7inJ5/BiZwxJwd3dFH6iz7Ubkb +ssZctWsB5TMzXs1JCDCCBoyXPwwfWM5B6niy6pR2sIKCi9SRD5IiAn3IEpRQ1+yXPCeWvj/Z7vwo +FAiegi1KUQfX+8dRm+yDaTfZN4YrxQI5u+xhV0fMpUInIrSzVh6HCIOqURIwCBOu0w4ZFq9uZyeJ +hp66S1mibPsotlQFwkW5AzaUsIBGStHs0vAwsC0sjASREiOU6RErajxFvzeXLE0OIpm+pLZc25r0 +7Jjfdoq8NiuSbmYOsOQhwLd1ExCcibr9ctT/NCQbi+yjf7SvkLg9+azQx4aAk6I9zio30/ttZ/LC +N3eT3ZRKFOBYO8EeunSNemto49R55LU1U547hqccshMLB4qflXG63ybbgwyqGvnGK3HqGmR7U+eU +8leZT3rFjMq75nhY7wtqicHyo3HrAzgG2DKR/AWsIPs77hFQgSCfyLi6D7+qwkjrIIHz8NfCuiS3 +HsqGNsykXhbWAjhoXvTbe/MGc7gAmXs4c0XELUKTi8kpyNA7mit5Gf93qITxUre48QzNVIAt6ibU +P/Efa6RXCPcgu5ea7tNXu0ffJcxR4JzhinWvOquwR66dYbl+wJQG4S4r8ZL+KUQbYIdY+vzOkB9z +S9PPXNviZYMwvI2Xszf/uKljkOcmClbXIDHcXAGNsaHIDIOIMoGx1Fml+zZ1TyhR53ah3ywrD1w7 +YBjta6Ch688LuBkzG+NucBVqYRfwcMnq2WMpT1Iwu5jl861Z3oF+qafxBsQ+lDIrbtHG+Jhma9LZ +IrD4Iy59DOEODQNuNXES4gxRpF+fLo7aUJCldM1D3opg/UWT5XsCgXkWPoRKvbg3tlPYK/3N8ClA +msXRrpdeXT7xeOIgQFCCsLAQrpR6HC+ZgQF51Ds+esFZU3nCubHAY7pufrtgUg1WxX1fNA52h1ux +zWDF/Y4x3JpMCm7cXlyN0jjCpDLGiQihibszWrNxqNicPGZlhYPFiDZuqYs+saJpyXKqF8PN0Mmm +ZFMcbWMCh+YNLt5fnM3jDjkkIDi3erO7WHbztvgG7XJ+TptwWAd772cEiBc2C5/ZORXls49mv0te +IMEhmo/v3tdHvGywqbLyTWoOdpuHQiuj+K4L5ACGBsoJ2kvOs3P9gN+u59jUzlh6Kzu8NhkVnj4g +iwrpdbqmtZ1IVh8Yjjc6uc/a8yWRGG6joo3CdLc/B2G1VezuuVpflcUTSajoubdNRhBlyaAEpQDm +5KVBSRbV4L5vQl/yUcVpi6LjSWUfpHmR522UnQUfx6+gOWA8TDqTF1XmS0HtnRUjdHonSU3s4Qt3 +/AkjhWjdMuMEPT+Fe4Rc+OTbWx8t4DeJK+Pa00uECm3WswaO3RidRBmA7koFOwYAqfQc54PkUcvX +jjWRE63FSl+9dZyyR5FSNS1S/j+LnVHr9LHLMPU90xbs+odpa8yupff3IGbnkdrzI42SbnVhlWpB +H2CHnu2RujVG699la5vhovcFd07h7zoLNX/Mw/scJpeNItGeJvCfcwVHtcNxRBt6aMo5dLkeXMDA +JPFWbEc81eOlLFFljE168Pneb3HjJdP8HweSwwJBa4wxd4UH4egN3T59UCYdDO6lLk5+oYkSHvH6 +kCviW+K23qWds/lAbdAJRVa4rLsiG+U8K39PeinnhPqxT9GAzFvQwv3ajuYi4GGmTrJEKUQSOCun +TF3neFVz+omyy+m59W9dl5EWO/Q346NN0VVbkNlDy+9DUBAcM4+S+okwZZmDhCRRQm8RRcWiyeD7 +F49nT0GjiJFYXwofXJAvnMCDcdCRWwyGOTdu3pKuvKyLpHFZL/s/Zx4EjAKMRUKzdwZ3MOYL1Tmm +CVhxZsbmzLM3XQh0nfoDJU79kHHp/ST9giRUZWMquKBlphYtrjU98V+Y36EOZ2FV8ujfknm/xFtY +vj05O2JbXxG9QZ+UH05iYyMpH3yjzzu51QKShYDM6U+5qqhsW8eTFnq8H3BPdyuTq0b2PfcG2tAU +UXIAzXiPXNiWytTNGXiHlsctlaL1J/8l4RSn0Feq5YBq9T+neptGicwCgRBitJbH3r7GPyJ3OT4U +NAkG9czNjTimJjwEFpjv4jMExQsdyXMjf3nl38+pNKybS1CAyFYRqcp9pBClQqLTr7OUoszb4LJ6 +gApaOebITbcCZktVXWkHfAP5gavryWCAKZy06gKJPMhDBPFBG4AkK4EaQ2kYtfah8sopZO3dsjH7 +D/Dg0jrtHoV9EwIFN2eYk7nJr8XXRQcp+uMytUMTFIxdqMq1lQ4vtXbJqlvCbAqFMKRTJf4lupYF +c+1YXYq8Zu7Z2diqYylbMWJAbHvprK6R+GzUBKi+uG0EBehgzww9WR2JPp4Lq2MJZNqKS7+NO4qv +LvbqkeS9NMjSBhwOciEzoeL20w4AH2dotV8VwwoWzsi2fbWqOjE4yUKEvmK9bsjoRCYaEkgxX2h0 +ZHgJHBtB21WTH6Qw4qQuMPwWPJsSdOh05WXOUFFt6J/3exl7bW0ErdMm9kE/vvTVIp7U5li1ZkHW +hqxx9swe3WUzFLhCOwD+d34JP9iPXsCRnVRhtksheD0Xi004M4l+xhbV+F2bL/k/WPCc2j0DYHEa +yanwcUwpZN8doloUeKU/EDdhDNYtxGl2UIYg/6gHawF+Uc5XCB82hnGO9E7fwBCLNiR+NAK4Pj81 +4IB5ayPYhqYb4pKGmaau244X42x4IUqivYz6V3ryt34L4MCb08oh9pu28Mxs6m/wVwJTuXPByeMk +f93zQtT9upyriZMC528drP358ZTc529DLXjpjR+cSLSpDbrIZtmUEdnQJbvjduoRDvxzaPciXg/i +9jWpQXqBzqk5JZ34bq6+gTXrdOkEsA88hJfM+bmFvbMA9su3pWpRyg6qyMI6cmMWuExDl/SPR0y0 +D285c5Sawb0JUpixR4RY8oCH4v+ryYZXMkQm3XQ1dLY/UACrm5tlHnouqU4VhZnOKvHvdr1OkbQn +jx6cOFlWGxSqE4Qr73XXwQ4rqy4j+c6lKXwXnkSK35vIpXMhohYhWyPv3v6hMKe3/PZJadBmT1uw +atHtFdq98c9zLqpTBoOYhOr2KiWx1PhHLbBlwK2H3zTUQPcG8eOj94H88fORT6X6knzFnetxgb8Q +2nIGsDBAPK3DHf88t6nmyJ44ZamZYK2Q/4kXDflPXnXUORLtEYNfDACRJdl5JknlT4esq+lB2mgL +SkQfh0U20eDqVpZu+jG6VTBsUGzE2LOjRdoOu/rt6QRbKduWwIJnnMtNS4+49wd2VhyH7k9EzMCQ +t41hMw2bl1IaXFFZdAysoRolHOvrXRs85wjoQ8FStDm2LW3CARrbJnSMj7v4HSi1YPHK+zCxzi7R +Yq4NvQgWxFqOTBFUbGE2jRZB8Kw94VZrsdq7EVcyYpaiflVPB4eO2qA2OXmMOO0Q1t9HOfgqRPnb +cjJ5kLHbS8ka0UQH/uwVoRGR4fPoHp5x0uMhh/jK+3PlVaFs/DXVuueJ01++pUdQA1pTHtc9Y76x +yLnC7G5KPfrltMOJYTvup9xOBpX8wjxFBBwxBfq0j+sqX/xX5TDWzRs1jjvb4n5paGe7v4Km6LUd +lv2VjdkdX44tfp0jIAMFVoYrbx/8U1Bb+AGI5uWDHKBDys22cpCibHLUc6HBXisXhHvLx2g+qkJw +PcVqELcm9Yp/8k6LldsNbYTjL8gJvEa0tTyfTCpF7WPfd8mPdbgUdz+YHMuoFteHkurl6EnKBHoY +zxfXHgy7uP10cyzJUvZJiOWaM7JM/eVk7F6+RQGjnw1nrazdEgzOUBzSv+fpcb/E+XWkycv/kgmL +CFrzUiXTAxtWi+rBosCkuQdIhYlqQdPp4bPvbdS+91UVImF8Kyfn6Z/JzIfAyrjqZKI007GZTK1M +0AVZGAS7f+vZj27J7WD8SHEAiVS0YT5TzZs8Wi/aAL43Yjyb9W0ks6ImZnZQxow2KFa5YSaJpSEn +5C1sFuXnSw5qklZNCeBfjQoO/3Pqvk9rgSaUSy4dxJEVKRfzbYSF6z94RwubSGaLNEEdAKhHkx9O +c7jX5FFjMJHaN+qkBJHVwaqtPggIZExo42SxLgPfE4z0DgI6XvpjmXLwZ/hUlt/ZlovkNQe6TzBu +52wDZxG+BXt/A0hSeIORTmiR103KBRsIQ/3Pe4AibM+MeYyc9V+zGGzpUCcux2OJaN1voaGlExC7 ++qf76FUyKYrjD0zzYSzd14SFXpE+jGPOTjZkaiiBM2Bj+96/d86dvA7/RC9R7bt0oq5OldQFLqkK +n0LFouGUcVTB+uAewV+wS+/rrk1SXq0Nrma5BNL4wgC6aThCgKcx3rb2Z83fGD5S64Pd2r7z8POY +Bte855LjE11+LS3Od7GbQZwkr0me91Iq8H3Pc1xk/4Q8srJ7l1bJ2r6YNdMjwp75uiVeWwvv5JSi +OfoOPOMrhtPGcwYQcl0zxhYGeiSmVqQJdFEMt6wjmPB2mLCD0iWANgvnEBuLdFWmSZ9E97+zzzzT +e6HtzE/UxycUJaI6t9bNH8qmOUM9XIH8W/JGgnfr2Id9+0rxUkjHb6BYRrGPi04McW6L5MqjD9Pt +OrFEgYjEMPLJM/bqvwX/EsZVpAX/dd1W1ivVLCckMk3cuUgHXjEeg+vQt+XFFkoWIWylpR3So9rq +47DneWRZC8ci3RpaUV5grGcM+9bmqkbGwR0WymqArCbdNcct5FdiZJScIGqIMANyy51h383+6COC +RpnaJUAVUSHXwT+n/8Mh5+7Kj8vQVKjMeLRh7Py4T9WJOBX+RA8Ui2jueDGuc2P3SA3uQbIjk+dz +lqA7T61uheCRbCxsHUFAlALUzW2RZO4rUmcx5hoRitN9dxRI+kMqoOEYrFtofjo3vPk+hRZsbNTZ +te/DqU69Rn3zdSBL/7y2CvWuBSKUBFYJ5CrQd9kmz70/BVWc7jCtDHl+WryF95dZSY114bx3V4fe +1ZmNYcSxnUIZjLu02LjN2oX9ZgcLFd9bZ9dU63gb/EmA/h7TEk3UiJ4PfskkSVzvSOOh9f/jXhAw +UTRVQsMoLDX/UeKRe2H8MQMbrrG+tWqdsmgjSvyVOYlAMEOsr7SrkSj8eNg6HW4YfQBQyuwptwVy +89NxBj8MH9nsrklEcBUvI6l1witlUA/GFyc+Kd75AGGzUF4IJHPbyH0hBHcSRkFBJbItM4fD12ki +0qTx8jjJcy0JF/+oFyGbGnr5QzHKAt0MuLdmb/JRzPBNxV4opzpLYLvE6qihA2tVQhRakpYSFROd +7H0xD1UAlkCUyfUpGl10MPzHUIg1NmUqkPYk1oL8nSKD9+YMqQTFxSix8VV+N3t7EVVhHiNvpmEO +ZwI3dEjGnryEHEIzSjbhqUfoEJaDHPIdA32SesIXzglyqvPWv6UPqFW+50NpcNPuJSEKF9VskrGq +a00RxvL+0OUl9lX7kMW57erHLdgbENmIHUsOjQ7KMON4HpneXYuiZE3J7cg3VzGdqVqG0R+W3QqJ +zGDeSkX1spf1j/d40DDhEcM80eAzC3HLRi60Gex8vxS2eCGY87c3OSnpTnwKAPWMWYZ8xJ47eu5M +RDiqSZ6yBI/3s4UIXPWc8SoOYlWDWYyKGV+aSZbY1izVB597UIzF3FF8vChXQpNmWfGPsbcW0Zdp +gqjpe39nwIyUZUirBGONyrSEU2bUhr3f8yqTnTUpkcBeaw3IanveeYDDoXW6DufwuxdmCSoC62Bh +oqsh5Rl2egMvlmC13bitB/WkmeiJ51t4BYXiDuCRSV0QJKnc4CEd2adLpVBI/pv+0+M5VB5rjNGq +Gn6lGjFOWusQv6qyDyp0tOA7DerZuF+I90xTru3fRgEN4sNnpIx018MD+n3VXU4E1f1+EKdBQE/+ +K5xayUwkw7dIVk2GllQlJcqIRAVwUv/sAu3DUZIEtfvfJOMVwDitPL1Wx6sTMm+w8Dw58KPJQYA+ +hiTB+wq2tuTcegcQ9L97tJhhFyf6cSRmuEQXukQTY4Us7UB9XVwMwYuqFMKIzJjVdb92DuA6TXRm +dGmomMxnC92lGMDLN1lSrYEPjXpQzcPXObwNw9yuWRhx4hG7KfCdTmk3uMXZHbX4N3mIT80z3Rgw +ZJtT+axK8tjhZHyXZJziY2f1azUeEjP+fPU99LzkFXHIq/PMmZFG8QaXcdP8AcGFAiHdFvhJGYtx +wtKMqz/igXAId9R64MgFD9LPrZdPU/efZPiXSVpRws8wfCw/TpFdNTadGT9nPrBkDbuvUkiCV2VD +2SrxE5CZErAX1Hve39ue4Kalei3DKlE9FiLBuWiR5PGPHjB8DiBKA1WFkigsxYSNRGaLofxvawLy +W5W8JIQlMC4vUezP/SYb8x/lINJC75VXFniBhFmByQQxJlTV+xq6y3r7aNi2R1n2KJbRWH2GQNzO +fNAAiAxbszXg+yU7Dv0MdkNxjSbvM4ypjQ7kX/Bu+/aDIzY1ScIV+itZyAX2eixZ+SEw/ok+JmzP +r7K3tvJ+DPJeO1CR7m5kOAbBX3fHOy1Ni/NhVn2TKTRqkFW9iHL41Dt3dG2QgUZ8MmCkH9w4v3bq +ujDPMIbv86yIprDnzL+9gDnUs7FiGy1HDR/Zkkq+CmZ9VbhNcuA++Jmt3SkF46Bm62mgAyMRIEt+ +lje42KsPQWp2kP9yv2btIDe3pj/2j56OZNEiqcMN1XH+3nSuBAMwurGnvwkiXKcOlNVWseL3RDlb +McceanQRaxR96fAEfvyYlt/HyRF/Sd5WNgtrAZUwlLyPJ/1ceg3E70CEOIw7BjKlJ35o+baSIzs/ +Hdp6wzH6enCCoFjPRq2ZsHTbt5VIm/y8aLwRYpjtj4ssh5IK6Ee4FxtcnfpOQyZMVf1O+ovZJp8H +UWvoZn1+RdwdGvJsEXvCRlLOlC7ALmZGCKa9jYbiD0iAvM1LBeUplhmTEd952BLAG7F25nJLJ+tk +uKrcjvYv9BHkwV4rQKvBxJeRSzJRuRdGIQKMOAJ6VrBUCbmeuet5FC9Wt04Yda4HpiO1Ag+hQPXT +WllfPTMLnFOgAA1g2jVWXddpwBqxrpBpqSIiwknl1clO61tM+ql30mZ5Vg9L5sWXYukYvsDYvlWl +eDU/JVrLQheJbKahElFXH6OVAz05feSzimHFuLPu3GFCrfFrHbTyEcqU6rhdYDDTQWIvRlVXrLWb +DXZd3o4u5nSyvv5mxAQ6xTVvrcUUu58jji1ryouqXvgtu7+eKKWRbOo40BunNaz4zqvYyd3xGsHt +A72yNsm7naWT9oLKPsdJc8NtwIwnpQDllLZjbUlG8QyccrDCXbRAAy0v9uSmCd3vcC3sVOevdwi1 +F1Zn1c7vDQaRNZz7Y0+SvYCyZNRKZNowjsrA+FDHqQtZ6T+MMZ2n1wk8iqDk4LRTY8FvuzK6Lc/k +k2QtZLHZy6VgREQxvQik2c/FLZy5JgARlbYcG1b+J16p5ihhpDWxtybUzny/f7Q9rIpWfyLV9/rG +jvs3ObqPdVIf4a8KeEFkFUN9oOmBe6cFviuYH6m8Axfa0PEopEyYLxOSXhhOYoF+RbtWjarznsU0 +xxJ5EJL6zIrDjRqsTiQThWr01naJqJVsoLgJ7B7jFaSE0UCzb1BAcjEE+hMmutL6rqbbbnca9r6e +SFnHPrCJOhZpPCvydS4JTLgmQLtVEYHOf0G63J3ZYnAzC/p39lBgbw+66u8ldDiXC4DgpVpTqif7 +2JsgQF6OqblMI24o+qx8tfdwh2H2V+ZLXwy7NmSkgmIzZPJ2J1EsgAt8gCzHECHuJub2oAQqrlHu +DuO88s74AtMP7uNuGea9N994hwV8WQT/vow4b91S1o4Dnp1a2FgrH0kd5qq1mjFFW5Y9LRnEwh6o +PZDVqKRrC72WUyNZb2PiFEBcBdiJfBCGdmtXuJWa50G2vjbjvIFyc+ahav2WgTCoyq80/xUcE5LP ++JZKFDUVH+iBCE5YBmIstwYioDuMtEOmp1hTDLZ5SE+cbYU50VWfJzPmtFhyRvhxAigruOSvAFO1 +SloiUJKXWHWUPRaD/6njIntUJ+P0LkOacK3VZ/7hozfekk3Nld9RksaoY9IIvfxsg5kUXsCcNUdm +JlaTvdiwMZVqB6gcoXfvrwdshWapOtSHuULutvfSsdP60LiwZ6tx53BTOyiamL14lamlGHItQJL+ +dIQ5NEUrL37CKMgP0n7DLdOIKd0l3cz86eXzbBZWlp4gWAdo0FAOl3KDpsdLOOu/+yQ4OV864V0z +989ZwI7KmVwukcQzs3P3IbZq/DaKpAkSSFs2yqKThBTp+A+/qS3H/cdQ0CEX4SswSiqzOyrcSKUp +uOlATdtFEdd/rjyEgj/CTsZu+Jef27UtyjovvB9PN5BIjPD/0Iec7mga9E/hpiZ6PdXJPqtZH75y +YQNi7d/VRs7pN5xE2ILHW/l4t8ZFr+SUWhkFu7rlaw9fhkBHyvVtGIlNYx8YNpr/VDGmMPMPvBqS +T5O8WUpOndI/LSJ2oGPc3pSBXE5AWYQt8RIsB25zZPau//FpmrmNFmfhaEt0qWy48tK6S1ZB9HBA +Z8v9gDyuv+dg9xe6F9cBLS2E3UCuu2nci3+dAFTKdmaCYqO1AzTSW3dZ+JRN3eLIsHrNKUd7x9Kd +NcgysfbmYbULYWRBtb7gEk4b4i9LGVJlXaJjcpNBxfRngFIEP7PjAs1TPE7B6CiCQR2NxCO6b0XE +zOfpAntfBCuyQYs2OwBf2EN/IHTdeJE+b//hrgrkXy8ugw2bZPbShnaavMGev/QceonFBu6qeEXm +GcpxLnow+eexMFJJrBMF0cyAMCtldkPtzbN7pGU5nHYxV0Un7t06L3Z4RdBOVO27niyMsju3Orv8 +6ACKbzIg8qxh+tK+IWycv2WhGkPcaQwBWeDsI9Z6XHzIwaWB/JvZ+57WoId4mgS6M5nX/pJa3Jgy +n+/O8InuDtyOZ7kV7dz2SIOwZorltvRkOz+6mFq9R7J4ON3qAZHxLhiqO77iTcfqoXww2ENKFbMw +tHbSN2zAgFoC/09KeBpGRjkD63epEirq0X4X0cTxTEnJi/FpFyG4Zc1FJ8AB81GLnJoDz/uOLeC2 +OdR332YLDGFCR3wiZY+p8ZXPAbVJKu3I430c6gLlSQjgH1IPlpX3FvtBapX1lQDpba5iuYEMytmE +f4Fjb0RqWizrdlKDqvzar/e4eprkOPdcU97eRJtaYjYHH0uoonTFPsDHKb44uxtYHhelZqVPx6o/ +tagiYbblv3GAVxEA/MsR97pPZlhuGxNZmXr+ABUxQmUTAfOCKAEutJPRp58HL2apBSRbhkeUWCXl +GRaKsG9Xw+ZaGaNBSQDla3y4urZPHC/lbNth6tO7++Xg1zwmt+K2xjRdhU+zlwKYl1DuK/Hk5eMK +f1F+ymsh8nWd10iHDaKL4SkTTGmjOpavLBE0l9aM2abrd7S8gDC00VpdEI0xiU9u8Uwd3e3uhsEE +Ip/3/PVVNHkQPoM8dfKDG1o1qT/R00htnOq67en2IOHOwlUij5Z50UflMSkv9OR8fcMiwL1P+gaf +QM8b3JOEGOYtcwDF3cgbvBFke0QdB+WHN3vQlNRZhCkHkElkW7hgibUkoCG98uglhU7OTUcxgTeG +RX3tzjex3yGAr92ETTkV4cP/m3AVLUbBmBiqBmvcKzp2Ajgu5ISdcMpsxlzsrq+dgFRxW2BAcuBE +6lo+xKpITF6OK86r5qgWz3TjUQlgOFhf0+pfFK2kVsvloQCc6VpKiVNOPBhpZjc1XxMQNsQSNya1 +LARY2wQxEXiDsUksb8QN/mRz1Wgkjb0OOdJXrubrkR5Gn5d1K5u21+WreSFwwwWMQRrUkJ5H+VY3 +lVhJAvt596cZR+yFacWWDrtK7TLV1if8maQn+2KcNPuIFdF1jAz7Qb1htDyVJr+vAqx0LuWho2Hg +FXc+CUJvgZB42+u6OYF0FB4pf0rgGr0rU+QJ6dxuKjXsukyUS+x9DjDlb8rLWF2J1QGtVT6Um+U2 ++U1VtELBVrB4/Uq8SD4T+enXQHc/1k+fXlu0RRdeh3++wWFpmo6vsMXTKcVsPcUHGYHzDGrHg+W5 +xr13f8UFslWZSGXYMSOSRROjLI7hjPsLCk+Jcq0C6BvbcBla3y2eYSlkO/a8XXYMcgHEKjoPqP0U +F1u/anEbJsx7omofNv8fJQELcj2jcOx+wNrjFtFksa2DMxMk6v2wEaftfh1juR4kkF3g6IV6yA0G +3+Q9KkOjRsynfnhUOaWat5PwIjuvNbkY6oD7WPjWZIKY/Xs122S/Aqyq0BOW6D1M4MZZaDCrV08X +5I6aXb3yH779NuPHci1nJskReEX5uACtfTAHrI8wqrHoLBHOA4KhJsrVTw4ekeBQqNFzpOT3RkXf +voUpa9YO6xQR1ZfpyA1hmi2GpoRunf8qGWJWJ+/U+YkNcbI0swfO3MBPISGiF4VWdsyN9kbttw+e +oScp51mc0mit9UTqPuo53eZZgEt6UQ+N4Pgo+VJUHNKC5iSuwomHQXQHL9CYAKxrOmjnp/Kovu5C +q1zqsfqDmxAZdvDW4UFEbLOLatV8tY8FlUxNMIb2OF8COxdXmP80E5kXKMjIyxuSx5zoQZJ2iERF +mW2Lu9VMfPxieAj7OJZvqsbClRGmSO87WU2N60HnAFR8F6UzxJ/ERZqyREKaRrUlD5PVQjng16as +OcnKzBBr7QKDLD3shy2HsJD5DYlN5vnDem6tvMmZkQCnv++NIHgFIVo7icXZokgoET6ynd8vWotq +Bt2ZhT5yjGyKPwigrq1vSWI5lOHZ2KKCwUkmtI4KmqxcDqIsKM0+GKh5TFFcEsmXbbc+mpH+56WF +OVfqq/YrKMSelZXsp5Atk+zqeH6vzg3OgOfoICmjrtSB8o4IVbo3OwQe5qz/Lh3URqc15HdLNGtW +w5rQd3UzmkSGGEFFUAMcJZN6A0tj0rpROUT7e4+MWYe2bd32AnrQqEm3jMOxC/qv0IkJ3Is3DGjU +8A7vsmpvY7u48VMj49R2bOu60aJINJJybC7NbuBCs/4Z8qWWE+lRitQJIGO/9DEJK33XjZefTAUt +xTHSqE3WC+Ps5Y3Yu4FVFFlVDnOBcM+pSJQPKe/2Em4j1KjkUghqP9+IFsvX+JVC80xuhIpsxiHF +8EBSxTV6cbWUKAY5kCZ4z60pVnHGZc4ZIqLQxyhOOydg8plR627twOeejRjDCObuyRfdYOGEUGCy +9BifdTaWVQBeTwEDvhGEv51G072JpIk3dROvBRuGMtrfWrRcwEVvpmOJmFVXDQbDRE652ILN4W1T +ZNjKHttzzUsCKU2FUDqp0qioQa1g0HbPyQEIF594N7X5YRblB6abboMxtDQZkvvSqEm7/EaH3miJ +KiEBzbx24s9ejWihkMlIlHjdKmQMdLUGO4mh46Q4lLfejyhR4JXgSsVzmsmEAoUsUpOamQD65X+/ +8kO1hLzgHICZzkmbA47Nej+8ftuT4t1KkdbfJUa4BSFKNh6IVh+fWmOO8UjLx+sLhdoSLqrweI31 +BglEFuw6c++8EvCTCNczpzBJ97nua/WN/1D9d9ItJVKuwS3veQvsr//Z3GiGtfNTWDV6odNRBvXN +oZ9i07wTDM8+85oi1XwKLF5QnlEft2qoHHo0yeQbY4dXFu3BhZ2pWE5ddNwj8gsNdmga7y+PsgIf +OjIn+Q0WZWTl9WcC/VhIIeQxn1ulRD4Bt1lAr1X4WzD8Xn/WXgm8NlJwLBkly//DUemlAZzOqaHt +rqwZz5V3D+1e17NAcbX307iyP4mkcOJ9c9W3ozEbT0NMRSxRBuhb/LsrR2DjPD2Mye+BgFHkp/3s +y+dCqAiTqc7aYtw+2YvFXoOOISCP411llga3MB5yNtgzIi36tdnBbvUUJNqAS3xzFutM6M5RCNy8 +GL5LkWXvWGg414MHj350B0xcY5CCLRlnod9pU9HFBlKyW9jIK4dOFgzBrzSkfqKCIGQeXutPSopt +IuDYD62cP/oFSuu7AJxrCJ7agsM5Uo5XBcEK0W4tXU24ebXdd7J/F+QIB2vEeLh/QtxS4Db1BAEQ +Gu/QxLx5KqLvhMxWhhQrRpJh6c1vOt0byi9USHjl4E0ahVeee5yB7S1FxDZVluk3RNAQve5MCIqG +twV9r7G7OkXZlvBHE7bxDgeN9jZF6l2W6Xqvh/amTRgOUbWsV7Ist/f3AXUlTzu9KKlLbwKrD1M3 +dwafLZ4lfsXnnfUMcMZiD8u3Sw4lZM0mDnxAgaNOtuEnCO6g1RkAXqZAgUUbSvv3QVfxAXIA9Uq0 +USGmGMEMIkTqSBzEhEnPvL8QWXjuJOqDsxd6O8Is1wVsvIgM+gKm3KsdT0MAlwOTJ/DC3uM6c+wX +rDJQwXN8gCO/6HnQ7dj6V5Cbhcf1kDJmETgm5lb3hVKW9/PGifQekKYfo4n3SfuWaoDmDIFcQLZX +NM4hd06CL6/gy2dyZcbSIu4X0xv9KRdeUzBe0BH0nUpSwgDG/86EdbIZMsgt8VWi7qgt15aQ32gc +aDBB+8t52Q13mzjVf7dgNtrgceDYGxVqYxWVaiBhF1FMkveFk5t1b9rZtIIPGNX8Nb+lwt4BWyZd +hnZkMN/ofdT8K5qKK/YhnF6DjUZJpBmNZpFd0SpVxCmlSqUN6w1G6iXP9vEA4BuujBraGF+gGq1V +0nhahuHdYpdYr/CEiLs5zpDSVS8D12HYRLp3LYQRzdT8OSsZEYC1V0tFODS/fkYqsl9DPVwzlaAB +Mqw17VW0FnSt7swgCSKn82HtiqzmKVjO74UJrkJmcRKR658xUJ2n+vKJHJdicyBZgaYpY2tLU4d+ +rAVc0X7UYQG8AsjZ437wh/EjybvJ7aDzi9rK5OHBlU/qaQDG5Gmojln0h9F8AhFgYla61nvigDnx +eTa40DaQujzbZZNw/aG2qxYj7luohoBhuVyrvJ04lq5USWyxk0sSJ7X9EKuKp2HG9moP41xoC3E1 +BjRrR7JJ1MNjcES3XItl9FxDF1aZprm76gxiHOdIdRAF1JHH4eny9XwhLmxMT70l6z/PJabRtTUQ +KQRttf+s9tKyPTE22pPUnH6UXn3rkCHEd0toHr9Tur3/eJ1+sF03S+2gkTsSRG7uQ0SvrqUJlJn3 +yfM/0ltvq/FV2bPUFa59EZKEW2FQjhwxlvv6YoQWzDv0siGSrr9fhsMe3Vx36VTG9gqwSmLuU8x3 +XbTlBhWF1k98VzQcTw/UUusp4Rn3sSAf+RtdZ3TAQf/kol1tZuU9k87WEnbl2XGgvyj41bECeSmU +aBcNDNqC/8VLRpmJ8yIMDHpcfv7Evchfl6x7jzw0PHM+NQvDxlJF0orpvChy8q0655jbLkiJMPfj +uQNLyjtxyNpZYLLl2Pe9fJSbNfGdkxNPL9T2HmyJnr7ZIIMHA6WVQ+66THkNIUZS4s42anz0g+7k +28eizWjgodPgVViCNJNPc2y3hVoauweVFxs9lB8fGJCt4pa62dsyEmA8a2+A8xLEGxnB61vpsZh6 +HGYpkDnHCivJcy9A2QpTdckSNIYttHArpFAdVedsX1M7xO8DfeOQLhtewkDd1HfoFbwaZTogBxUi +LNDpsQNEz3WPBN/I00nfjO3iZeypKCphsnn72d5nqSbsPgNWpuFfCAVAqrSpjhJc0KlhE4sdaeCA +YK0Th3XTxSuBbn+V5Vx8BjXnbMSpOCB83W8u935DbysWZiRirMNbn1FQWRM6IXwnboV6K7cqI70+ +YCE/SVUwo7E6yxksge4arvwB1e2IXAGe7oKh7tzlhkqEyhVlsspjTZGHKh0nMsQKKsM5OLgeVMYk +UbXw7qlKgIF4dDb0CBw+negvng0K1SSWeuW7rcAFf2qKOQSeKUFBquY46q0EpJ6SoEBWjF7VpM4b +N4tqkRoIC1B5ZE0yV540z4q0tcaaHr80wBqVUm9Itk3eOyWZwJe4KPsZwB6HlV+mSVyDSUjc5vOQ +gN96g7Oq2LcGVfckQii8zRz/6q3S3iC55UZvrRPqKO49N1QjEwzlSh7qjVF09go4uiLsP6fXg8AS +HOwbU/t3oUIf14QRhkzb0MLXYrrPR0lwrbYzXP4yooZImnee3DUb4dxYuakjQNrZfXmLCqjtTnEr +2P+DkhxITb97Vpn3bvnrE0QsTlp+Nl6PVKaN1csotC8ILapv1m+KalMFRLrnRS9l2mJK7y6YEYtF +ibaHg+4AfkRzgPXwg/9qdoVRLv3T2AVm6xYbJnMyxc5iwadnKrXwrlr+378SQwcFIOojs4xhf1nV +HEey8iHAkmtRRVa4NftDNE+V0aiEUptQ6YjCMMOw7HHAvuexMayJGhxSgLVTDuqkhM0se9khaOWc +HVqzhDcftvQQn6poFZnx35mouFA4KO0jZ/XBaKQw1aYPX0K8A6c0hOvJB/LVltvTAsm4TgWYZGg3 +0pzIhxV5GjzQ3bTNEWAGySavn0L0S/JfwJ2Qex4A6EyySErWWdpjJfEBThpW+poG4qY1cnySSxqj +GCMZsONrhKz/3ONWf7JzymvwmJ7t+r/wdL/+DcCVgtRH1Vp1aTlp8HIRkP7gQT/Jm+AC/ufpR2WU +JPKZAELcZ0WAEVQzNce6c+UeUE8vfdB5OMAdxzWshsm8BQ3dKm5NxnbZwU8W/sZNz1mbq183E8ep +WFQ/SOulvHm2YO2VrQBSdlxd2EYuP927G0DoP/YAbUlODYG7iLlMMTllEA5fpZ+DRTRVnVOFUPa0 +CLUjfPgyTG6jVmiGliW0HJ9kVdKmuWcbuyJvgeMgSG/RB0syRWQv3ckCUvPs0hGBI+8jDJ6Tmuek +PGguyYj+RYLOmfQyzqoUlY68I2T4QY1VCLIdk2Zqg0l1bPFRE7jWFNsTMiBOwM8NH7OfI+bOsdZj +RVw9zlJyn18q2bUk9OVF66XJmezk/CaXeXx7eXUcLLfk3QNYHUPmCk73/qDx5jeSJO2J+xOZzpoL +MasyqIa6Y4g4tYJ/RF6CS4qLqMde5N+JqrHZFre6pXwRBQWPHyagArfqfVzADpQeuMsX42XLicWN +G8ob9zErlEa6oKpfveF4iJu0ueYyRluRqpYPGWI2UCczTlgcj93/h7EbOTJVgzo1kAowtGVkO2qL +2T+QblQbPLu9pxTzFOvvbJPUI7OzAiZ7aZjxUZmQUc3PP8BIyNCAHafjtirkygAHALkdQruVA85V +XBT8dJFGjG4C7UE6rE0KDfEFn9j90CLA8ogaDkRzv2Xg/IVg1+ZRqaxx35QXShoW2rlm+tN/TpQL +p/uqncKg/C1XrCuipmVZ3vGSkSHrdqCbQuDzItCbsdadhDrCtI4lr8JysGnc8rCbmdA562G/ePny +206Gcpm5jdpHbmyN25km/fm4W/N9+zylCCiWN58AKOVJCeiTwrlZD5fNkhMv785xgtYa6JJT6H2p +IZZ5Cae55tF3Wd7FAgUW5D/q28taD3Q8yAJi5syw8Sp0Ki4OzA2jBG1p1Es17P1SacJw+bvfhG3t +hOBA0UxBaczWW3n9z5hjjbenoFys7mob4+8LlOlZ/IbHZTpLGWWLTLQzG0s51JjtXKIVtFrL6roq +p1CVcrUMrOtc+WjdWR0WhF+xJTSsjyDRYNEFRyp/xcWhKecrnGviEInA6AnsXdVjBhw2/oNmD9KB +f+0/eW2YrPZOqpulslyXuC9hTVruULfa0hEpQbw2Gf8CayeWmyJrHkWom+dsR/jC1CTPh3n7dIL3 +x7gdBNdPKwEMCnhdLfukmycQeUROPsoQRWszt7jv8oas2KIWHCL27Pee0edpU+FDHunGsCHRHKAI +7DAoe8l7cTGuE0cKbmdrEvfqwoNWllGYk4vLmUzN3B8FoiLyFY2FV04fY7gmCWtNyYsTxX5CFaeR +KM3TSRXf3HuCO4WxazV1QEPzC0xjAqcNhpnyBgROk6Mcsie+mJ4HgQicxzqTP2QXVz7oE0CER/wg +C5hKIPlQMptWRP3tqySF1ctkbWXbpdMGz7h5FRIIShhoLn5zG1Kov4D31dgCojS4Vb3BuGwJb/do +gfrISVFC2FaeZtQ/alYVD/kKQlYlzUwk8NpaihgVAfEJcOkmKp9TO3qyR/mpbTzZ4MZ4jEPKrfAC +VaC9drThLdmIWt45WfMk60+t8ucsXsL+Nn+uCzivzq3cU9pOc4ijwNF8vazud5sEbLGaO1oSKC2P +nJajcapjeAsguG/Ra+vKlNx3puNnt0YtLKArOr2kyCfN3aesnrau2B8tvAaPnfm3rhtAo7DobeVV +/zcMyDPd5ZVKCq7JTHSP1iyBocRA6WG43kksSGdwWrXKcF6N1wDA13yL88+9l36fUYbm+EOlzmia +z943ZiCI9MZjl3Af2dhy9aNTVDU8gLBqnqv/zXssvCx6NxOwp0wWvJTgzP887KEwE1h99MCiX6Hj +31G2Jnjx7GkXuQIu/VCA6EXoSTjAFZ+kdK7R5+nrxrwOdDAM8v4jPt8oFiwVPD6cNwMDON0A3bCS ++L17uUDoV5T8jgYWJt/93D5vUVyOeyWqSuEExdviLx5chUgX0qVNiiXedGZtsuJydXLXT8kqdPJo +b0h5HPl/WzQ1vMWU9orMRdKE8HE6tPtJZfPYcGrrPeJ12DCbhw8cZIbX+bnF38JwTEg4wn7LEGFJ +3/HlkWFFZRbmoW3z6PBe5gWepfBJwH8tgIZ0JcrAS9OVSr7/jW4l+j3V1tKQogOwdNm69SqF9u1q +sIMuZHPyR6gYRhlcIv6hnGf/l0H80xV/mgENziQiVFdnzpyxOUh4EwKrM32FdE+5b00yb0cbOSp8 +WpvGihdZFVITKsWWkgC6nRt/a7zzCSt4G80sQoziuFaIrT0s9RyWMR5fX8j2dS8QRkw6/g8YKsfv +vR4i+XobDMb7YUY08H6CH4Y9VslZdBOJV6IqedMOgVvQnWiKHS4+8acnMH9dH84tPCdvvnoQdKmj +PwH8xhETDHGfx+gElz4e5Y3xYd6igpsQetwE+FLc1Y0rJkemZsVHcBRo0P1StqlEA59EgyA8siQ8 +9Y/8cZ+yeTR+i8ypIwg8usEzC2hJDvt+ohkalvPa71D0XCIUdPLUxTq9L89BVk3K3NZpTKph8Nx3 +2QGqOboLMVlhPl6KrQ8NCG8KRp1sMR6PPtWD6Znlay8W41xiLnJr880lRfA5g47sGxmaUmI27wqO +fuKH+c+X8RCMYmVP2fzhYQMW1BHUQM/+5dgrqa8Ddydm937+lSUHR73z+iI6cPQzrcYlLIwbiQiF +Js1ynuwLfio8RZHqN2rT3pUHYV/J/tRMSiKuaqxocn8OxVkseTA7uoj6jMB/JNGC7kQv9oJy/Kzd +w/xjlkpymLz6+qO4qJRVA9qnRcaFFouWhWBzugiHL8Enzk8SFRXuUzHcP7cgWWIeE9SuNo6nI4f7 +6/y1l6pxs9ikGw78T/2I69ubofmva846M5wDIREDYYOjD1Iv+Yge3VQUc5uZ+j/e2X/LzWUFCu+G +J3tlYX/uFlqXTLO41iI4KNR2slnDmA7VvNVR5E9WKpwWH9qnnMUKHwmYprj8CBIlbx4ubyFPBVOR +XiEQ3O5K/S3HjJzdupc/M+Xe/B0nBDgoXzc+6hybrRntsLCWKIVkntrpAZrzPy1xek8LrLsUHk/I +wh24imD+CDPBovPDkwNlXacOMuFR4TeswRpBr6+U24ewXQmbMkuuSJ9Q6Q30peCL760/rDB4OCCv +Y2uSa7jzhj8HB2bMiaXuTexp62/FREAGasRcCwAmkPKihSBH2WOcG7mk5+PJIz484NehsmUYMuZ1 +Qsfc3jrWNdSI0cOpmv/EwR9CaFbos1lUOXHLHZfhKBzvHxukQKzWrVdGsg9RaOXE4GG2tBcyA/5N +Qz4Z9cgDyOls9aRtGXU6PzJhBMtxh5z9dVcupHlc/9JpTZ3yiyl+/1A/TqIcp/CR8Af8bqpP9rWg +E9jPnDIVE5K0mijpLJAHdwxadZem084noSEKaYQiOJGjC/6TBwzlcGFlAbPC6wBtzMLFq6N1LTt5 +U3+4KauVuuMHbUKUAPbGLA+VYNvCZqELXYmrAzj4a3leeCjskEr76MSw/UwQ7lTvlOq2aO/R7byN +ny5bbfYFQi5cYz5gpdM+D5mTuQzVk0W+v6hmLU0cg8SCkOqToZPhcfLXZxQEAa7Q+xlQH7wp/tda +gBOhkxg6iXiUwzuJKeCJ3HtU4UD5mkxhNCJzWJb/dE1HN1kXdv2erDbi8mnyGNGl1dQv+w5ygZl1 +YewEfLh6NZM6hnr7cywUZRMKeIGw9s9q7+VQ889Whetpa+VTqN7V/C9PTTlbMSXJTn2XCHn3f502 +AS7q/qStEqs2ebyesndSdd3qLIJ7F8e7TMfT6lYcfY2IR/qK0g4YtZtPrpLHj2KZZKlEScZJ5xgF +aF5afMKdbCFC5pJK7eU9YmZmjw2KMoMed0NG/6Bd4oQ7hOhNAIovv1/9957WVfkBN9/dP6fXhsGh +SOPiL2HrTR9d6EbxtQp4UqaF+mz/drWQmW8VtICHXX2sLoYm5NCkPuGS3DMov6N6ZBlQmI4RxhsA +roybU441kw1YegXP3SUZ6IHU3obT88m+r2SElJw/2NVGbuoe/hHWOQUeFI/Da8lQ3rq4krVvly/L +9S5k2mFhes/zTUDlylZmbZUDsWo7GF6WHP7ZwBqtl+DdGOpIZTRcULS8mrjA7YDrDoiXYixTYUgT +SecWQCu31P5kahrszvg1fSvU1F9P0BXJTWT82Ziru+uFkzP/sqNhTXPeElXPpE0ix1kYWFSaFEuW +tIo04a+gbgRqJYUNxb2wuNP5ZquMWCgesQmjjulqrT55He8hu8pdDOtPseMinrq74cY3h/JN0MRX +JY72N37oWFPhHtkm1HTfYMLoX7O+ByF3FnxOUhYU4J3NdlK+8sFS4sALr913hVyXzIikx0T6k6hE +tSs+uF+xc1doE/4y8dvX+F4ow5nKlWLsrohCWaMDbh+I4t0gmNegl6WkN3PLKLCZNaRWUmsiCrVc +pZUR4vEIVxI8P9pX5wU3krMuOeOynVTsz7qS6s49Vu4iaHHkqV8f7ARwaEcP36cRB1DkQdKA+gDy +FZMSv6q7aNootd2heZxwUNZD4cCLMrJKRpnFPnLsk0rLCuB3OSlU1w5N+91EF9CBcWA5LvmbYwdr +uEyK4Ha7t+8K8zf2nlpe3Ax91ZlJHHSXJA1d1qZ0UgjerDTJQXqUOiYNILjcSnrMk9+04aO5PU3a +9FfSq4ERO8dTlHKje5g0ecVCgA/NAnN8rQWXaKumjNOzMDZVI34Gk9QPBkjI2SENCuDz4pmIux/d +gCy0MM3y6FBjlibltEKOTERNRQm7hvbwBhZgs7cIt0aYJvhP7lhg3lhrjtjkLxpsqh56HWInfixG +/bLirKBfWjAk3v1YsU7DVTjeFeA0415UoWnhRDaZ7ScVsmrmXlhnc4F1xmniHoyjNeq7PXzonEdG +1Fq2B34QJBkUZGRUJSlKoEDm19vSVBqbE0wH/NlBdPHezKwwxMxT9dwL9hwHd5nWq5IZZBChmiII +7SNXM0IwL2AhPdHsIUDGnVloiGnlgolnKqf3MfKum7sw1CRmnLQgWcXuRh7rG9/fQ76qw4Oyuj3A +Yd1gLkpr15fPtUJChcOOLW5TjwKf5xh9lzH6bFrcJbi0A+w6rzK+SN4E6lNQ1oMU0Ki/CvVNdQ+G +SmWQpDdBajAIIWPUxlQEHsV/Q6m5UK12gsqHmRuqqS+0rrbpWFE15miSh8L3lvzMH9Z194j4CcM4 +QQyiwEcIfhrBgzgs3t9TdGDh0TbH5RWf94yF9oxSriiy0s/BSprceY+/GAwBfmyBdFFB0fLARiV2 +LHTbv8AYIXT5LuQDkP+cUzlHjY6u4QrJ/dVkTFf7b2htzxpjvfC6j+DUyqm2+dVmRFBUDJ8ihdPE +dThQI+uV4eqs/6wIwRUgv/05GLzW0edfzxAVhSh1f17LNc6XzgvCz41SdHx1mj1P7Nn3A9S3O2Yv +JOtoR3ujRM+HRhvX18GbNXKs1V4Wh/8twdcBZsIonkasDbSHCGNmefVXb4LiXSWFMaY4iQNAw+63 +fGgLKcxpH2yd+MVwC5/yr2xdl1d71D4N4jqlqaDOZxNLxIviKTzZ8brjMeb3sDuHpAYPkKW3GEZN +f5tEZmOoKAm6EEdct1Vw/C0zOqG/TOD3YO4LMKFmTl/NbdFjhjL6tpf8Vaj85R5bosbaJVvuAhEq +RqJssmn7MXy6gFz36NwONJSYF/nebAELnurEJ+c2sU4Vz2S5h5RHatU+IFa84gSY2UauPazhesV8 +p2kCVyN4EGsgh/E/t8Vpn6LlsfiWoz4Mz+kEPA3nLc+0Ib7lYPaF80uEIIRtKweI3GtadIHncuPA +D4yAm867M/7m9cKv/Owxg3sOpi2zdRm16y7UDLtc3zo/bVtpii53JMklw8x37Czla7Sv394XAX5G +UHIpGqw5H1JkCWOWrWfvx7whd2GYwjeP4jRohOtTquGA6Z2uYWyj7OeN/dG8ipThzhCEf/y6MTiL +lYjuEyh+lIaof+TayY1EckkTSR9Wg5c3/ULOaUw4w87viJjaoEBv05gEo66E6JRJEKxG1aLgM4Ly +8FvWac4p2SjMn06pPTBpIGaU0AAsOaVCEAMGY2Sz7m0PTh+hCPFfY6W+VYz3HKq3q6gnzj9OlwFP +KuLyDZfk/TuTyfj+P/8ju8UvDrtbAD/NmBlL96E9gOgtL5DHyT07Z6C7zuobJ5IdVPTW1tsRcd3l +IaYNmZcsBaB8oR4EES2f3CEl00bT/6LwEJaK6SAMP/wkaCaUgST+lPrKcrl2eB8xFyacpec6moqd +67kBaLkhlyTkPF3AysKnPgxm5DigCs6UkafLh5hnJIdoBIC/mvK0atNjWZ7V/qJsZElAWNswUIZy +xq6ost5r5v27ecijIOlZtw6fFq6C7HlknkjePrn7O1hfl2BkIuePT7FLSiPAkZ3MKqz0wN5QmAOp +gHLVCOL8V52AnQ4VBQ9Wm0/tY4iiaOll2b3ToPNmQOt3g2c7IkgWAwIo+5QNW0JSasGl3yE/184I +i4ZcADantxjU+vXToOPwJzyZgQqOxtsbiFAqs+ibNUXYhTSllQ+/xnc9RabqWeiuj+nCvULyBTmZ +xYwN2/Z3Mg+fA8Lp6bkSvKdOKIKXQeWkP7Y6FseVCWLgtappuA6yGDk1DE9kQJ7U87Z3Mgr38o/F +fIWW5SM0+QH1pVzdaWmpuR6N262i25sFpx9TSaJ57UCspZ68FaM6vx2UsOwMEP3EWa0IRFAlYdVc +KLM9kXp2qdfjqj6ArYFcBAARPkiqX4RPOAoWrn50fIb1mf+leNtYLqavKo7tZuhY3ZdavyquH4qC +GB2JW/VmDkyuprUG09An6Ps+AYwY6ob68IZfIsfa9aa6q6AysWjXDgw6eoXk9BexiuO0rRPQWIjC +K8IcdYt4wAbhTDBzYMRyrAGAZ1RNzIlp2SENKMCxiEDgR4WDLa3nZvk9qnLdGv7YhE8G7GKzIVf4 +Sic0orienhN4scmHLGYzpOf5uMz9brYOVbr29+/eqZJo953bRsLeVM/TZqdOLIswImDx12TWk7md +uTWYGaUJ/O0aGrrXK/yERzewuz1GRk7mzXV5gojHzOXATOeEhCw9+uKikDw8lwqNGeXtVWu5dOMt +OdY9tshVceBTYWtzBmgXZ1dFU2ujaScWM3/bAGGT/Lla5zsGKhM3eSfB5goV7kYf+2UMaK6NMhPe +lrBKVSaZe+vTyE7deHhLq7VgjmVYrZb29oF4K0HwqT9Lna/6vn12PMO0uhImtqIXCb8XmXkSUYbt +tDTprd9PQLSd/wqHKPPq2rSr0RnZcIxI85r8uWIXDzPuqTgSprcmEqJ7Ypdh+1OHmYdztWnF4u3J +JCXemcjsAoDjnmfeCVSw9Oi4t4pKtKZn0j3gcD53gI3FG3Z6Twc3RIu2UCZU35Avt67/oy2Ox85Q +uTjurqVnx3mAsNStPC9XKlZ1o3LhdVW49CJGzDl7SLvVkOtYI9lelTX18kOv0tRQCFxKr9C/bcM8 +/CbKbu2pzTPz0wWqkosqQ1d3uUOYi5lswiXOpP5gV7c7vu+I1u0FEiPUYLR4zpBY+TFri81Zduy0 +j5BogunRRFx9JZXEcNFswuFA2XzPs552GImEVs/3nO7cg/ZWNNMqvwMsUeAKF0JfjLCEhhHB2+3K +FbCIjoYhIf5/Wy8MjjlYdmrWHrnq+VhJNYFYAmIgzNPpUxVjwguMFPx/OVm6YblrQcaWK5AcBRpP +UmKLb3ba82YRX58IRX3TtMnsEmCPx8WYeSgPHcX+9CGatbI79Ia/HUbULoPe1eCEmqkb2KaCAGh9 +yEWimfzlg06drS0r2NerbD21ghapAl1hoSQIPXu9A9bxHh18yJvGmdjU0WVQ2x4SYJPgnf/kVZPX +twoHUx62UPe2MEi9rfCtcUEb10QovVNIqpsM+u3RQcwK4Ncc2s0MoHI1y2nZOnpw+khU00RvtYrh +MLF119WLqBWs4XNUCKfZG5WFGKrSYZj6qzkBWmmJhQuE7KUuDgxd2MJpiSQhzPy4OeUy44245qza +iT0VTWlBEd86GOjJuJHDu7u8jnsfCWjE9Bf5GpDnbt0HH27lyBknXxleSKIUrFGrMo3G+tEqZQkm +jIb73oev+4/YVQjicitf2jHLANUacjotlk7blZv1HOAUiyBrl4UmtLY42A/j6ki8KTAkjxWonfD6 ++nHK8tu6PXKOYUrIzY2LW0bwa9Yd+1aE94Hp+TKOEdeljJ/I1Rq8dXrxCGAajBZykteqT6z/7Y1N +RJrk/lAx7ZjqGRLFTtC0fY4OxSxmWLkP0e/ebWRtqFbpWOwhVi2IH1/xkm7WxT2ENKJ79vYuuIzG +f5G/06Iyir2S4ciyaiaOsubxnBsjlNmkwRbPyD6hSmXvUtR9R4vEus+Iwvjs3vD6KXNPO6ALN9Sn +1/PJEJwwR9M3CCXGYwG72HS780yji7fYLEgtIlW7solHZlBZNR1XWHFbQxvtfqPFVn0LR3fATZkU +ykA9LCdZcXuhaBZXHS2KJztAX9yNLzDU4aLOFd1ufYBLdC8ACLvTiibgw6RJ1Nz0iuwdqEfBgcc8 +Jr53t8IV12hoeWOXWq7oFcJzWQehDlo/eyttgAGaiAw64NdZq9CHAwazX0Hsakpn4ORrOIFo2KeG +IqjYCodjHJ9chmQ3UZirfKsKZ2wPBT7EiK9aB/R1WTORaa+M6OPgMsgm7drGctC97S7f3njw2kFL +oa/pK0A5oZzI3f2iyzkkHtMr2s4uTy/Ux8JU6GVzu8wunA3UW2hxxgrsvSbtmFvjNknx71+SnJ/G +UYAhGzRLdTRegnPAZmpyFJhOySKe3AHQ6gP+d7wsv0bWYNt94dx7z0qQTNNR6rjmqsVPL2+FjOSO +0tQIM47iW1LwPqgn0bna+r8fZU3gjU2ovhDqAvLiTxQnF+BFnXhR6DZS83haOw7mDwhIVzXndX76 +iR6LoWqHWDLrF6Vg9gEcPUWOaL4RKhdZ/rnqrk2XEeOckv843CbvmNT9gtgZqiBCXrsLV4r8uCSx +BAJgXnXF/mDiOgNhiqQmB6FodJJVqyWVgA4Frwc2lS46hzktHCd6TIq6yx9jxvdxjTuCWBE+9v4K +fGlgC0yMHdOSU7aJu5T2cB5q0tzhro02unG+ELFQg90qvNwZCHYWolJQECpBln2v/Svj1f0i35SG +HoANjuWSiNdDKyjHvQLw5TzHbjV0oGTjk779UCzgyQKzHQQu1ybRZSI+/YRiv8NfJBm606mRbXoP +1ajK50/ttF5IFAyNjxORygPc4rZqanCj1TRKbwCCoYI/r10J0u7pj+oUzjBcidLRYW6YlWAaAv7o +1obLUIKCrJOSxJXqHwenGtVTuqNzTPuUSHInlTDVppyP5GF/I7eQQ3ealo9iTFErvoNs6VGcP82V +n5djxLdBOB+pQJjApH/RPH6qR5ve+oSBFVMalym46ICtCyjfZ5jLNPjF/B6SgS2arpFMhsdfOMJf +4Oqp2i9tIBeQ6eOEhAIvq0sU3p/nDwco5602mIVcfxi6AuL89PeI01orzDiBvdW8NT/+5pKOqivR ++xCPtNxaBgF+0rhj8UxkpJWR294bRlhzX/tFKZPCGaXm0CDAmzt9HeQP4B07Hs5JYiehP4GheBWG +Sh97SE2lR4okF72LFinvKGGLX0rK63zJeimErkHAcZOhMTwCMSihZzksY2VsEwPwQAuZt36eqTz0 +n3v19kyvE/QtwfLVO3QLIyPEv7W5yM+IQmJI21e4jI4gNj9hvSRA+skKPghhXsG9KpQSjbr5wlNy +dtUZ9f6fZqa90WtRLod9jfCM64n3i5ZbZibql+fjC4ZeWe+ZePhdwTyjzwTpKQ9t7snRy0/YhCwS +LsVquPnXr/tMgl4IF5S1Gab27/5HcqiM/A0SJN/WYjpnzLDchzD3DZiLAYGZJU97qlQXQpKTnb97 +J1cvpnZg/zdYh669wyFjjar3BA3OekbA5Og/4B7cRWDuwMdES8iqVkfhnBTAu4SNrtQ9J0GI1TUY +sqzfCpqCGCanlmX1jw2/Z9zSVIwhB5/SI3NrarXnUXnq579oBnew8fjlKkN+3gpwCnHfoDwplzvv ++Z4woz2Uud4MbriZUGK3Ty2pWDt4vM9uGbMxR+vpSv3AMOfanOpybHq/au0k3B3O+vaRHjI9Ffli +lgvnr4bWrL9rnjpiVV43pI0D4aPav7BGUauV30pdKMIA3fvs+/Xebfo5xsksAWudpwyr055yrXaU +M4KGJ7AnJSTXz8QXupDV05zu6GkFa6BM7H1rJoTrl9D/mbOyf1dP4hUEbjEBoJx80ExRIJjmH457 +g22X7fRse8kpL7kEYJkEWueN1SHQUC9QrSDXKEKrK95GhABk8roT+J5Ta62inp9jUvs3fMyv/XZu +zhGp9yIOB/bQU/6NFRd4GmljY2OXNw7nocwwwdChpriRcm+6MJJl64LLLl7fHCLTQUjUqFNTdHr6 +7cZ2ksgjJf5+IMi92gSEUKtz8gOv9i3V+6kgMCsnMFgBSgnihcIRKNlgNA9uXAJkUxKcKYwNiu7z +JCz4lrjcT9S/+WUnzfmDL2S8nLSb/6jVS1W8RjgjNgEkmufQwn+sGpwy0BjDKHdO/wSRWOHuGLBr +FMhWLar0zirslxbWLYf80VXn6I8o2+G8m16KrgfZac6G/PDLZpV+BdhX276CZsCzJfm4fnpY5n54 +c+PDrtZdo2oVmVVnRQIN5xf/Io7n7Bt6hZhoSsM4iU475phOM7pEHAdaMVHF5oyf+ugcL8Tcu38i +sEjb/WYcfkOFtNwLGjEmJ4KLHMIBs6MkVIJUb8imGuLAvkNoGqoItw6TI1MbdMGKMNKBwToR+FuV +VdCLY+kbO6x0+pfh7+uj+1PQASnSfbQgfHx7hzvZQT4bxwcQeb1zeXwiozuo+kLPC+2HRBB6P6Yp +iNrsZ3lv0S3vYxci6hhLTwRMmvGTPx/U65kwBWYvj+hXsrrC64jVh7iIrKqM+bqniPjtMLIRhdc2 +TOUgZURlc1sZRvsHDPL4tTIA6UHn0aw6aMwn2wocQRbUbr8uixpuoSBkoN76CSV32UsVUZMaSyNT +X/iY5RS00HcYTbZVzwZq7fB1mOQh5EKerxvlLZpjkzZ5JKQdQDIibDXMCgiFgLwAdOt1xd65bzZD +2zRZ4b5fR3FRiquqIoQ8j2KKU2tXXVuSDJDWhuWTU9GQmyfK0Tyr2H8xpnBDrxQUgw9+GITId1CG +Ww6NXzyUqdJqN4A0EUhPtLGhln6Ul7K8PA4U4IthZwWP+dbliynx7vc03PC/nmeK6d95rsDWIDNu +L9gUegTw8MoMHW5tgehK/naDO4rTDKbXTkVvhRfvxND72daSYnSZNLvInMhFnZheir88DAFErvzf +5Aaa/5t4T0tHHYjhQyWIR3x/Uz9yiyWA7mHBoK04LkQpNz4fpywcN4swIngbMg9lTwQskvL45lFm ++3JJiV5niz+QD2s6h4xTetmtO2PwVZrSXcJb6bNrA7N8fZz3IW34mnZGX4ypq4rqpqtaxrnRXX0a +amS5xNdf3r+8P2w3BOcLRthC/FDx9yo9fWzMQccMzQYZtYNF/QVPFGnekOwCLFM/jK6MJpAwdYDb +4XmY0pF5sQoLHMHZQwd+GC7ngHszVrucpbrRm+kqDHyArkJ/vx7coHSkOGDanNH/QLnbcGAnXj8r +elmTf9sbOLIWRGkXwSCUz1pOYs8t+fMDKJp2eFf9D+mLY7Kl7l+MpxzYSJPiOK4xgoK1S5j4Cwsi +rpVm/pv4oGX2zT9C2b/sbwWJu9oncU3VhEcBuSTYh9GlNX53WReLBY9o8V+7kN06guipnqWkMwX0 +XmNVf3ibgEG5aXRimfsFMHddXq8MpupAGJY/95YVOOr4o3IixeRY2dOJ6K6RG9WUIkMzJ0I/jGl5 +i00ZPE0JKASHx+h5PcsXwXkldKi9TBIjxBwAo908lrhh6Ce3B33T2jbQTgxooXINeZPTsxalZCQO +uzxNhBwNP2+Zo2lfrRzZrjWuraN8koPjBSCYiKe8+5FK4GwST6o0BO3TFBZ7TzwmW7E8+zvwkpLN +9HZ2NwyvO/5fYIsLZ6/1jzKyMXbOEFkRpGMtP5rrqVAiLKYPs7ajmuRNuyJ5qMkGAb7iRmADJtZE +3vJd9px1kGwIiNEciv3N2PoTdy01KyMmmyoVBg8Dm6KELd/rEcf/sYa4patYQPtUWlaXJ63JeqGA +Kcn39REpPhHj1e4N3v8hPWb+iW4nQwGEka4s5RTGu6YxpBJl+WsrVMqpTdEOL8QfaEFFn+YWGZOf +DCiiPNM9sBMut4OfZnQwyVpGPybD4xkp6Q/VtyK43CamROP2cz/Oj2fWkNkjKCzYLCfyWq3V3BWf +hwoGjKg2UBvyIUOtTHr7Bf+j60HJ0yggiGiNleZRvLiZKkCFA2i8Tg3MWEiRQ8SFR8zL3q8O2wub +QVi2UVpCFouDixlwsbPbhIBxnsALTGUUx/vYc8LYVk6GvduLXJJndXVpgFKR/2qKfCeawB9j4zeb +8XkhSrAwAzKuu7cJIleNf6E7UFUGH9gOVELQ4/2lR1M2jS4p6LTtLb5cTeOQc4dtNQpU90lNsByf +UmrmpWmBLSfoix1uADN/nHDhHSaCF9tKy/1cPQg0GKSdHi5fOmIK1MNP+t/nbU5PsdoO9FEB3QAq +SqfqM7abpN4W6nOeWw3KabqoYuvUmstIO4dcALsq5E/8qYb9StfjnB/QxkB/0zOm3xxW16NS1IlJ +UCyGF8EJmF5ZHgtc4W3nqVdTeNMKf3MeKkA1VDVXq+cbhEGo3TM3IXGb2gqhhRtUZtqqk4ksQEhM ++7voq4vXpKZc3qqPf5bix/EGdlHTkofoLEQfnETJjJrXHcQF3kHmWalpAu23+mCYhCl/aj70VPIj +/kBKEH4P5zEyuSKK0JzfkxTiXKWTs8i8nbSWmgEyTlJ867M+URmjKtj9LMDrr+1kMuIeKLaEO/vC +8SfTCoTP0BNn9mgk5Y5ofgTDvxia95TSvpvSXr1vMhIiejXKfnV4mvgKrNqXa6z5/Ncq4g3J4hpL +KdnpdXRjsB5+0GQVu0UE+xBiaKC7+bKZT7A797zr8c0h8mIj/y3iHT0FxJ68lVFz02iLy9IuVBBf +8DH6sDJgjbAs/Fj7gih566QrzXORnRy79kqBsE4xl1EZ4WaA/TeNcvxfijTjYU+sZCFfaM24T9Xb +qnvITC3q8ieK7dbT1oTFyt7Zq5V5T1sFEKu9PAISqZTVf9mjMFMn3LQnIkmPZNvTKn7oQzCTc1Ig +8kP/IVyMIissSWug3Ui+DvQF42PHMyqUJcpVqPNyG8rc2A7ww1nyeYil0Wk01mf1G6zK8QYAC6xf +AL8xy+1qdP5ybmXFLpC/hWO9dMlCpBOR91qCXmLnaXmlSU4ypmpzW1lwBnMwcxxwLF/I/13CbhpL +hf6+8WIODdzRkfBzgWoj0i4ZpgaH153HB3IGNEDh23BosyhzmltEDeEkYyZMyZf4oNZpfKgNRx8j +bTgOWq29vv0ZaVbqJQFdD2JEahow47pIOOfXO5thS1QVdGn6Ygj7jX/H36NM6khaCw+jjgCzsg25 +Y0YtIS1dS/dxw9AEp3kCDnf2fGOBaPG7g5Ijfz/r8NER3a1RDOpLAMDipes2Rq4asUUsdccBsYmS +aK+zgXfwkEkGCkqMxLfLjp6uZuB0VQIjjSv8ZgUK1QFErt7XTDacqFHynNSmw52andzIUGOVEodM +rKnb6ykOLhTk8ngC1pip0hW+IC4qlTc1CypoM7+Ff/y79netAr5TLt95xIXYav4C8XJSj2h+nHeG +mncrRN18FK9Ek+gjZ1bFcfzvx1v2JFax/0LXN6vCH3f/N2ABFx7wTzvx7Cl4FOhbzin8PBbISWjI +vXd09t8184QVcqJA93CgRrQeRD9Gh+s7u1jM4jh5IX+AnIwSfU7rWvNP3YX123AuYNlfGYp5GiVF +Wi7EytgL5mQ4PktkhNGd2NLalIMBxwfRvRE1T9b/0CqMQhPqnMCgeFlBX1KaX27FORz12HqyyS6d +IGEJWXUdsbEtRLcjhQHB+TYTCTSTNotQe9EN+3OyMHUgFg0exiq8gTg1QKyu9LdlYEeo7ASEy0FN +tf+yta8pFt0Ee7oPtSSL/97pQHkF5S/OqJ4i/8tlHDVPvFArNopRnW2IA9qk60Z9v3P2rL6/bpK6 +0NCuf0qprROayINEGxEkllFhUg1VGIkj5S2PshtS0DkM+vMZNZq0Qhv6O0fTdg0O7sk/gniJvQ1y +W+oLjKf0EnRhDqyxzPc2vL6sCRq6nBg8EdctZLHURmbI3uwBe+eH7pSOpFkWyGyG/aMhyiNfSkg9 +EodGR0PrVDw0Yc3XaEBk/D9d+feiOQXjnqr3SaQuXpex9eI64cGAkFmj76a6puLGKGPNUmyy6RuW +1lBRmWpOXNbXJ90V48GEFV/tmiBVVoy71YTz4qMiPG+ALA418tRbKmjYA+8Di4+t1ZkTYUkRX8ph +QXIBPHDw6KUDbYa93iHONAlU3O/2vH7HYbk48So20to/h0hnABit0mz4YIWiP7ArYPm4D++qZQ9V +YrCF6qJpK/D/CkSfzgNiFELhEvGCqe60+gO3kSFu4HKvUD10MQMXTtFiR9NJlEFIqK/OkJ88hhR5 +JPX2tV2fZcPVIS0sDqfdWuqZ8RsZ9/ySzNBomggD0Y8P0sSQDz1e6Zsg8vADlW7aFjT4kJqV8aYl +y6zgr3jz4DQuCo6D9d7KZPNpLL3XVPhzQ9cRsFbtnITcHOyv4ZiITORP8qf4v8gfM0Inv/Zr775D +AxYnUTgNFyvFrFp8WEkUd5iXyj+GjlCEJRiXU0oJryX2h2v1xhkvWDX+c6l5xQHMWup2JFT5sez2 +GN4W4TYxe0QLICr4VbEjG/O18QfEwgIOHfVMRS2xJ75XQvOLfzbJogTD5zHIxwviuo+dapOy3vMe +LC2Y5gGCQzUrPUKi0xPb3QipzDUmrUpaJfM8mhuYOS7n9K9EUgVXgq8TqvjHenDAqGA7+wdJauna +Uir59am2yxJJdA108uQIj/A0SuJuYcDPA5zIcLSqAemUA1rgDSYjX2atzjQJ24tsHbwIYqaFbTHN +jTkoxBCAdZYaNroX6zbrJahnS9hJ7BuyBwmnwwmoxUDJjZXUMpX4mfsjlyZY2XZTwpOPBK+hsqYb +gwhy3DfU/Wsg2pAsznI1fHAagmwjR8UUYzHhsIuFOphjZ8SF4notCH6KeyhRHbzSMPjSIkmixlFh +a3S6dHS7BnYcrb79CdWbD0kVjLCDd7ZIjVaUd3LXFqDxIthfQ39qqZPD6MuqecL+zFUQ9qTxOGgO +EIUsyKO39KRQkcZDkCwok9daW3H6ZLkX+xahXFT/dqZMlordju8WsCCY7nOQU16/excUQuJNRxh/ +ESZcL8KriNpWAgwWi9JKVYtmA6xVLKzWDoATMqiVmeUDXKMBNFAunw4XerIrQOAq5Wvb3JgRT/jv +WIJEprDCQpq6iOuIhUgss0ABf6ZwFJ4t+dTxCSE2k7hMGFi7kiZ07dRJSZ/jJngVbCss4W21jus1 +JdgH7qM/BQYG8tDx7qnGEPZoai37SmyFjRqSSxGAiFeP3s7HP4FeDVCcLY81E7DjrZnHvCrlXZdP +YT24higBgGiHhj9ESGNwWKpROiX3tTX5xez53frNOnUW4Zt1wvfZ923/D/XhD4LPTMYMLHPhynqa +dx83GpEyVnEVG0+vC+MrRQfU0kmfVIm5dsm2tUbag3rvBfSawbpPMoPktRy+Ls3zqHVRaMGBSYJD +LtsY610RKgHItg2BjwpNULIH1e66Obep7nBLypLH7MD25REgur50cu5+X1iPUwqR5UgsZVu4RKjQ +d2TydWx0KZjZd/jCZHzWQwmave1u63Pc9lNDwnSS9+B4FntVUho1l5HN8Yn6pxh3UspaVF8l0w/b +nHFJLFrfhNWkQizTBqmwmorqXTccpb8i3deVSgGFGoCb8tpzj54JxkNZRusqPqLQilvvSzDvrHWG +NZPml00YP4FvdEDa7NLL8OoJ6gFDJ99rEVZdGiRqM+20TcHIqEo10TNcS71RPMps/s3dFRoFkmjB +eYlmjlhF+Y6+ZluwpKmuT3aual5z1nGcO0GiODJLIm5jme9fUL817Woy242zyIuX+Ua4+IybyLmX +FDW7L3EkcntIS+up4sdKVH5gUJ8djZo4WiPQOiCXJ4fwl1kx3FHx4IBjsQekha1yLNtOp6PN5oKz +1siO5vOlGmHzKUaMukYFIvGjslJileEZaAV8Wf1+4ZPi2NULY7nrGJ+M5yzMe579drSJOoBrenKM +CZHsCeIpljTrSTqRrMnnwjJ7I+1CTVKXdrm4g+Pn2ASG3p0Sq3voWK7ehVQN+s4+O3YT5udkS2KP +v3GIAVJ4bIpPTT2as9rziuTe18lZ60eqPeH5pd6uWmNGPDVrC3LLG0vNjUmRiPwNolKc90aJz5Wn +YQT/GADmRYyVeWLdV2fpm43Zm1a9gGCsfuPBpxDwqvF9d1iz98JmNvlcdeyBFXgVFL6gSlM7fWDx +bHupV9TJ2e/SNNHhfBi85PR3naqNvk2mvbZjTlDAGG991PH61VYlHkdzy/CUhUf7732U0s0N4nU1 +dGG1olbRsnWZ1dyvhyoJehE46afWKeBrfmUlie+gs9XHM6V47/IiOxGhxu9SJpV4FfJ73PIKV5uP +OmpUF+fjir6195aJO9xjZq8u5FzBXx3zp3I5XYi8jV4536N/bbv4sW5vG0mDGXsQtw34A4aV1qep +qhPfKn6cMdA2XEoDBGWm7UqnEDb+ZzJuGgofWoY7FNhpDy0lCnrdf6+HOzYH4TvvEeAC2am1bsBO +rchknCvTh6yPQZ7AJrXbw5XL1tzMe7DaUUrDf+3M9gm3aqKhnlEFNJ/ppxx3e202udH4Eq6ivtrl +cwasvxq7y7GazdTCqimB87a9VTmALWg3glnLKJaYRC2UAcTGz4KoBffepeHNwrx4USusLSPnIPLB +9z0TcnxRc2IWpJg18947ZAmTSEAK59PzelpH3fUAm3vkmCh9UoWXm6Atu3p5qHrJo5EBz84zPhnu +wXzXSWKJKBJxuMQbP5NHyt8AeRQd6dSt/v2sIeMhcp5gFYEoJeSt0z/Q+kCC6m9xdyU159MxGka3 +/ybpanAGKCpjmjQpap6RicWv8NEr1vEK/94n2Fc7uN6fYNwu/kyIYCMd1hzLtQ8q/iyMn3Zk7iNS +33AvhytkbBQuCUblI9MtpOTLZe5p26ZeCq0aYHWFAcpGJUjAVc9qNZcnhgfeSm6K42O1/dmWvEg4 +tIwvREJni7ppRB+cChyV+chEbXvdhsd3hJGK1PtVjGDRCYrs3LJq0i3eD4OC9R+JwjG00oBEcCwC +2wJNh4gVE/6jYVujfnnwfwp4mFV/3ToxiF3nPSA2041Qgil3JZKU4YVkvkXaeV1L8dZYIcyOqg5u +qtlWwtzhU/O8imVzjymdkU9lQGtdFbhnY3lP+oqrI05kAxAxGyDpZtVpyo1zicQEDaOdHOUfQmwU +BKSSV8bvsBMfDSboL8Vx+NYZ/Wca74qAJffRhpK5/ZEvqleJTMT4Oltf9Ppo7JbjynbN5R07082G +M9RsCpQ2bNmnKb1T6DGuiyucor2Pi0mttBFxvRYALg09jfo+vK8yFiT90AY0RJF8nt9EFxJpYYfS +m/UWze0Ns8YGlVwVLcwuTHhws3LpIM5IxpmnAKQ2XNbrIfNaOiilT3ov5HKf/WbqeiDTVHcf+y2P +BFOQxzQBxszcW6IPTBUX8PFM3XQua7//g+VbEH60lun7Sf9Usi8cVJh1nx6gC6pelYGhy/Zv/e3C +KMkA8rVdIMq0eGPYhMuFMheZZvp3YFEhmbxXWthNk4o0J465ab8lPKtI8k5tAa6n1lv8p3ZGv/er +FRfXV3N1otNTQiVe1JqQBQ14BPQFhe7nIWRFWqCTplKpuC2Ar094XVMY0T+WWpPvqnR8Ezyg625M +e2Sme8gxj65V4hFYUBDRxIknwoIEqWJBv0596+mfmUM+J4xBDfpuaimqmuc0/ScVOfXduRW0HQlR +cxlAS9YTsM3sUZI98h2a8cb+StWGDMth6l/O4+YboWkLcniTLjpVTtRgrJnZQdggIRLyIYN7naGV +GWxEdjWpzAoPBPsDeSbVYnL6mPCfFAn2IpW0Izx9LQ1J7ABkuJS8dYUCUCI8u9YA+Hoo4QbaR89u +n6yB/a7GuSuj2fN45zm4lNsdwxDRxOOFd7ZPBS+V5VLsXCOZUCD70YtNgCTtFFrdzeXdYFySJRJf +FoSM52pt760OdCc89owin0fqVDHZaXsSCMc13GsUqKXYyC5InAPVwW9ucOQVPcxfzqI+kKW4erDw +FPIK/SF2xMgA1q5B6mWO6wfJbmb8J/QeaPCrhQmiR22uVIw7CwRPzLsmdv91l5gMu3MdzBoIDNw4 +IZWHWpUx3FMipbVClJJzjUDpT9m17RMcYr9CbtJh+Qn5ItTzkSeU3hTBhb7KX35CfeZHKdHB/6Wv +TjmYG/TimQbnigIjcUqL7M/3fmyPNxK6vsO5D7pNooBAB+eHQpatzxmSG8P+OphhSqdfCrLw4ZiB +fvpMEiAkhNmP55jHzWt41dEtMqzT3pd21Nc0s20twAS5mMAytwOfKfOeDVQokPig11VMZSLwXorO +J5wQkn6y9hHUJ14xoVYgCm8Jt/XpRNUq3P1o+LCu5hJ+vZ0yFujTEVHAKD9hW4V7brmf6k2KKpkV +CPWF0IUCZIKwzdlF5NKIiPazj91fOUMFVUJSIht6a/uOAsTILRfWlAQ/nclumnRkQE9GwDL4s0NA +9v5mhWfXtcDkwHC6AkfQGQdV6Pe33ym0NmOY5Gx6/ghKt2cT06Vd+W1E1bqu1r8Ebibb78W+vdpK +Rl8aSYWjU1a/AvwkI0y3bEuXSAWCUgWHHbcDrNY3YZiKp/HfhYSaeWNJMrkgC4WEKDddTYNiT76x +Kwoyv2Mh8ohE0Dm70EHlmcZEB+NEV3ho7qsVa60zV+PT4REPcHO4p2UvpJW1a01sILFQgZQmn5sA +yYeRgl/csYMWy8WfTVfGq6xhJlM3higSKgRQn9iV174MjpAXIHgp0xYO6szM44bFz7CLlFCPfGky +iXMV6KpgDE9gVl0pMJASRlkItH3SHCY+5ROkRuwT8vLgw/2sU9OYYRTsUSu7Hex3AVFxOcsbr3h8 +6qF+fQKbnC6XrprU6HQZDJHCBY5PR+C1EHU9NJao1zy0yKgNNufcHTiDnvIlmnZcty8o/RSxUaGr +UrOyXzcaMvl+gk2uz5oCswAjB9lugthOHhuGKzAi8Q80a9jFUc1H07jDJh3zSsw0Vrdr0pykFOsj +IyNDtDyNlJsSJJZkRMYDVuKOENFasbMqznvv7ZC3E5YXJJxLCiAVbOE5JgYMDGNGSfiaazpCMZHF +2XWWU4Knxi/GG4JYhk9JKXUikbcaSfeuLr0tZdG2CGohwMww9OicQO2dCQ6nfzgwVehG/IOTAmVf +f5EC/IdrLNth0N2mMnIsNcLSgMS5/G+Sy+mjywhLuMKKhZLJtRfRB1B7yqK0LDXiUh6gbBaaliOr +hB2u5gILdva7OhGTqC/jOazCddL0m/m/z5CRaRyn+w2UxM5bPp5FJsNUplF4uhOdeZMnB4y1iN8z +EsTFQ1aXgubnUd3Dy0hoGyIkCnKwi5tIcdMroZAZH2DT2Rw97XzdYB0oT1bqKEzKqppAQToRuPoT +p+hyJY5CImrqktmePzoJifVh0tfHLTGdKnUKjxQ3B/dmqD9xmYdjMniUC4xxxHkk1bAqBfqJIUv8 +ggLMli9VFWZH0uU0uok97izgptZkrCIegzvpAmyC2BrX3hJREWwCu+wn7Ggga6uT02uhw8wmoWUh +uCHLWOAMH+zmPN9/yZcN288P9h+Om9+DS/A1F0lHx8fvkMW7d/CfmTa3Ke3DouEOYtPVdgl1wxRm +zgE2Jzvh7JtYj2i2oJMVrLHOlZFNolZngxUCmVltUWq6oCii/9jJ3d8uNEgHlneKCn7cqXDyZuC/ +ar4hafOf6mEQ53FXLmupwTBD6z369BBuEF1tqQan3w75Fz8SmPQJFbNhHrsdOUm/c7xXrd0GbDW+ +BrzP4tkGizYYg2mUNfqEf4fcmaIjLvYGdZ54TxiTdSKD3jFMFnwDfPbK9ldrSVW7WLG5RJFb2kc0 +VzljsWrwYEBf92+5v04eap2V6DcPwyPKPGFSlTGwTNQwj+Jz7qw55iUXkT+eXviyLT8IncX6gSb5 +WZG9cKgpQB+o6jL5wZlQ63MT8dIBDwtpltteafTSTP+F/FI3tqivERXVT8WTV2mvaWjS0Wi5TfJQ +GtL9AT32oh+Wn2tYXsQabtS88q9O98aFVb40AZxsRrqIsPzxyGoySdD/cALJwBgqBfYO7h2Q/oDn +jnByARQa5Q1Gvku540ps68S/rXrp6NLI+8eD2969DfkxbtLDyiowSIMgvy7lp9fC6i+G5Yi5OmAw +W5wivECQDiytJjKVXaKa5c94PPhyivTTiAjizMF42UHYCqDNl+7UrITkbZOShrH2mSQnnPFan8qF +a38E3qJEl62EFho9CZwtZmDd4IEeXTg0ZDL+fEh33qLZepiBfhAa9fIoFKgxpnnMKyp4+ucA6xXt +xzxHLsTL1TN+AkvHzifYK4BpdR1BZKUC7KnA51uuQZVKm9GnD4R4M+ITNtNIt1azjqUckrYuEQql +RjT0EM38fjwsMbHugobltaKKRW+fpT5T1TUlXkTFCgjUQ7xQGYQeW3NM3zNdAepgThkIamjOOJIn +VGiI0waMr1aDZ4I3fUYSExd1j5gS8JAZnpOIMlST77xTow9PT5SE5NnBxlcRLnAjDz9OcL49VRu8 +hQbp6ASL4L0yELiB5f1/QcikQv9sCwn5rKv+YednRdOZuRyoDLMGe8hNn/+3mD6wsWKp7UPWRLHa +2NupLYyfp7M0LwD7r9CX3fyM2Jmfn0nEoHCvLOQdAoyMnxD89srTquvrf3oHB49VV17w4Tn92Q2u +z74HnYzXYeBIXZ0xniTJmBq6RHLDBXTnC5QoRKpaMuFtLrzfVzIriimR10k4v5npxS4c8hlG+dxS +/k7DgqU2HoDbLCeadcMsSeBmFjA3wR6bteu3uHhlUZOvNJyKrAyUoUmnFPjS0wgKQ96gLSF1KNY4 +g/07I2x9kPzTpmmDePfEYcdeZS4owGlsD2Tj1P83oR+Rd8/vSx33KPQhmojt8lVGlQ1Yb5hpZuXt +JWyADit28p/ZiRj7Tei11BZ9wnsNy9Uxp6W1mrV9HfRLZWhPcUJFfY6Dxh0Eokr5z0RZBX2928f9 +fjZq4vsxvRNyl8dI284wG7P39c5hLz9NYS/ajlY3AJ1T+vBo0HlKXBoxHCjApIUCAxIpqx+8fFou +Q656VGTq589S8nUkJ8OCgwn93rSW7zihmmJccwttIvBMjEMuT7/1xbQqUgYrHQhbKzmpZU+Q2H1q +zRo4DTMJwohL6ye3Z87XTuW2Et3nRttBcv/2sqK2jSZO94WDrDIXkvKAnRHfbiBUUcUYxHxbDACh +miUiza7BfXQh9cIhrByGUQg6UwC3xiYz0PFLDLZwmGBDV8DNOhswF7CtcIsqgale8EY1HYGPO/mt +dzyafWGejyCZe7CyKpjw4CXtQyIcVVN/OUU9lGs0PJUWlkZzk3KLeEvUrhHOI0HabMsXulQf450v +wqsPgA4qV+0oCTeHorjBROb8iS2tkWSiAkTFlzDIOY71PHSMpPwjQhPyYUHw3baE73xzTZe7qgTY +GRiO3KMl2BiTdqYZD9lsRx4ICzUMwNI2NdhBCUOrMoMbPONcXYCHTLCbADFvnv+i23qygD81KL83 +b63Cv8A6hyKRFYF86iwr0WUjXqChQZBSGWdAj85+84/a62bFNYIAbNqK8J7g6XPsow97PiQ0FnXR +obd6+sQTiCINqfCDwwKfWCtxcLZJdwiVPgteLI98oL8VMk03Q7A+H+fG6XSsFQMmgZhdUBCWK96n +JtuZnUnskygB5OdDss+cIdzejpEtdolRF20rx7hHjuC0nRu0X5TFQaPpac1PPzvsevTBa/utYYyA +jiRLv+1pm/kBdaGQbeIca72HovdIheTtSUZCByPg2aMw4PKHrry6NSa63s5oBztpuGiT7w8sNFJ8 +druFYt700ed5thb+uvglKwhuEwFZSK7XUycjcpuaNjK+xB8gU05vjsWM62djfYOBo45r9q7VeW8J +qKmaOygpyzaWvjn+ZvI3ZrZdST1M19keCFr5uIfctd5MePdCpa2uoFX9mPzkZwV2BbXt4iS2yn84 +/jICwj7D0H9fCBiErfDRfzw7H++PK7TkowL0jc30Mtlp1sQzaFT8OcR4vBHQ/EKX77jZL3kmvjar +/Z5QatMXjTgEp1yfX2mw2qkcFA7Ty2vFq0OnXHD1Uih4xC9Tvy5BGsphhzgKiaEjNOZF0i1/W6ID +qP87do7DpcjUQDphvaupEhR8XStPvCs/+zJ9uOa97pEwI/WKa719+vzFJq/OzzN+IsNNFuDn3Toh +zDU4yLwqoI9OPG0tzgZaXo9qS4spT56NJCXdWaQyFfuneOsM/whnqyGehRDkSdQCSdbWE9Qy5szZ +vcGQ/AqYyWWnPSB0N3MHD7yuZMDOSfBGvbh/Qf6oTgezVstpBa0hjNBk6g+z/Ngv8CeNBdU0HUZg +IJ43+1KHeoA7wLuQGmERkZQ3hNtWA5Osk5EmQR9agDNyqa1KQusgjOjt5dgHdmstc8YDK5RU4+YV +JqXb/H6Dm6sHwWdCH9HVci73gNi1/SxILDWW7alUgZQg/vDakSNkgTKS12FPs1XbmMcPagXiKp1B +f5nWmU+wkJmRDopnhFURwqfV4jKWJtxhLwkdfNuSe/SNZERJhozBZlODUfj03QQeaC1QuC9WEjVk +YLv0ZPc7ajC46FWe5A0cxjgEXsYEw/GtYZ1DULUIP5xRObDwOu7WRrDHse2hCz4VeGnHIUefyzAr +lzBvGDqzKbAT+fUDKaKvk4Tz20JkaWL/+1ISvYUVKQM6srHjvwS2nIDLhcFglVxeD+usVOseGmIG +kyftYekNpOWeuvohSOm5OeKgQ3j88mpmwj1m5wQpaaAgSI6ZsnfvrAjIkcgXCfHhQAc46q2t/PjE +FUDRYD17KpGFRAtSRCBtLU0+4bA8NxgMQ056G8TXsyoqCvMomIs2HQXNJzlI8hD9w8isF7vNgsAr +CPIiAMqPmvTm4FpN5BmKDdyeVSua7+fNOsAj44uxFwEA1yMtJkmRNTUTcK3NRCBy72DC0dEfKIBO +IIJywv0su+UXRV/QRUgT2+k6MpNtEhMjpUfP6gpWWTxNxpBI9WdjgnpSSKFRu12YRuJ0tkfBMtPG +AEjmcbhQloKQxC+qt+j31ksNXjKdrfAMN5GJpOjM5lCb3IoRdP/MqjMP/o9JCZf4COZWVOpLZWFL +94HFaprP0jLlm010J2BK5SHwb+4hhlyEUlcMLFTO20kGWwUpUXzlezHOvjvhdz5iflKXjWLViwsb ++X7YM0I4M0hxzq7l0CTw8L5oRU5eyQIRePA2/uoY4LtOYp2GQH9mlabwOjgah6/lVpKpWoKEO01+ +JevGZ19v6aljTdV2QyWPolHahpO8zgrkI5nUWxV5Xcvl1/b5vQBhJVFr7gAvZMBpMAU004IatHaL +7dNN1frxlypjEMkbv4tp+nmAN4H2LDkQ4E/2iEL0Z2oofA5k7Ks+9iVtF6QNhFjP6nInGWfcYTzI +Q+25hfHY5yeJy9gemzI4XnXSS0b/Z06uXzVXvykRWt33kXDyDzZ8ozp48Kpg+vZxMuppiJu+rSUl +uBXEX8y5PjqO6MfIdssxjUPNDDMenasYQIqMz6bBcY3MhPB/BDhsEgujZlF57WC6ZmOm2vLMWl/Z +oj/BmXa/Qyx4W17eDf5i0cr5x9TarAkhBvMv56uwoFobSMxIrkAFHGtmHklIFzU4vaScbBKrafo4 +owo+8ldbLhee+Wq5q4qmaZBy2C5DvgxP9g/5NxlnvB44BPsKknubwnw+jm7hjhmYFbMIpHpci11z +8Dn4dPWQb657F/H2pJGNRY9HXTiIN0X4juX6Hc09K0+o8HUVYpiD05BRmhlDVuYxhLzAn9jY+WdH +U4YCOFYrT4nraSS3ThIlnA/Yy7ss3oCIBkJfftk9mlfkMnK9hxHD8gzlPwir0Jv4c9vCKMCAKOGE +jHbkNp+Zrh0OnIeruoABfvdmm+2CkbrOhl0MP2MUlIHP30aa49hi1IL9TFejfGrXsRixWi/BAjgJ +GT7VA0k6dB3BGH/NBlXsIt/fuZM5AM/Ed54ksk0B4gHAuqPXtIZ2SEJf3Ivq3UzVpoZaD83vnPS4 +g5wu72NgOd07tvuXAH7M58rfEqPg5Egx2AqFXCtcU++szLomrzGFqbsfMPB+/iLSQgk9ovCR2PLJ +DkDXUKlOrDovfIPfBqNqKqDJEs3W9DDUEAryyodZ2tBt6cUP99Pi0uitid0MeA7F5kbXzRZ/JC/9 +KfKN48mGs1CiurjUttZpPJYcuYpNcTMe3FnVfl64qR294l81fI3vvqizCiFgmmZ5gE2bz9ab9zW8 +2jdqqofjVKPlHp26asIx/lZesLd5kh8IuZ8qm0zD8BOLLMNeDzmFks4cVZbjBDzrKVYzn60Hzr2M +rZB9uoU8IBoo95RMLPSXfB71wNCn2FVJTFbHpmM9j2lOO1twyqaRNmdnaSFMmFJTT7afJyT2LUEJ +Ahrhs74hZGq3hJmSi+2OmraAmiWsX/LPyzX0S1KXFfBZV7QiR6gVaiuGgd6nK5P2IWeQuHiGKIMi +a1J38s/RDz+ey7hWyG2SClbmEJHzVm6gVUQAI8HHVUxau+hO6sCHEL/9qyqU+BlnmmCjgfEbzGdi +QD3YUDxHDskuJaax8yj8TWttxe5nA1kqulWjzd14gZpE9RF2Xvy/QFCOMrTTqCEKN7YmBBWT+oIr +jznY6YUG0Ao8osbkcd044MtC+HLA+o4Falvg0rhqkkbbEsnyO0OZky/+Rc7c8agSk7dpBjK8bvDB +vpKVcif7ETOaZiP2jVdVAZMkR1qq/CSZkqBgefV0b6C+hGD9QRFYEOc8uYh893rn5y8KGXj/bGmn +CkCTe+zLgQX/vaFuWznQGJimIkryqJVsbl0az3u5qyfvgt5mouF79XZDnz8fr/jmy0ywwzZxv92+ +xBFCe9gHuqxIOUX/rl/Pk0Rzecdp9YkY7EPEEm8/5hsZoZMHrVTsOtMnCYrSnXh4L61ioAZaINwz +bWXbUeI7Nkpy6LUGomY84sylXQ0dc7Z2w8aUFvp4WU9BXHrMuEOi4fUE5SNuj6s2d8NkZ6swjHZ2 +Mb3RJJBWoGBj7SEK84C66Sd/ZGPvJFuBEIdCzqgCfNZ+CZfrY0hadNcC23UTHHOXWq7jgcsK5dbu +hE9xfXQ8UpjRGNSFrhMqkYGpKiipQQlFPmhnCkAcS2PNhIELBusriEYvFz/08Tg1varw54NBvo56 +O9Llb+cu84d1B0/+zxI6wuRSPR9UkCfnc8H+fuQM2mWTH8bJzxgcWyF5KJZd3xlVeFzaokHi9EVX +C6v7l9bWWW50haWUsRUIMwytgBSvsGV8l10bCySgWG/LdR5P6WmAjgaQeqpE5Bs8Qgu/FHDUgUto +Qig61J/B+AR+8YVTluyTW4tG9HkQVej5vh0r9hAo3TFx68CWlJz+6uGQSZJa6Xb0z4p6D/2BS1RY +wPSGOXMfMG6A2gJ+4+nC5AkXnD0GW8Ida7AvLio9cxC1GPOEjUywymsSkofnU9EdSJR2IT182d1w +L7Z9T1BeP3wVLfiYnIvuB35d6IGGOdEnxilFqKowTPUASIOGKzqjtydm5/dugQB8X3kzqAMR5wC6 +8mJT53zCnCGTVXW7eob64ahAQO83pJBjh0cGNW+sKQUWNgLcMBa6r1VugmwWC/bgv3BSkiCbVmG2 +goRVqHM9lF1EUsd8UoupyFJOhEErwdEQb1msMzfh1foccL2GWyA5jXkaq+9bcFOm55qNT7vr1cQc +3X1stz71wgIohxbNIUNQcDD+jQKCFhg9GwFhS8wnwRDd3h/vevZWqLOXd1oqWKqLL6G7UOSimRJC +y7w8vYtKpuDRF7CAvHfuelFmFEC3dRE3JR+yIFxlm+QR30mOpdj+0jbKSQ1KatXL0Pg5hXfDw9qm +3W4MpPkjccCglR0z+6lo2UwsWXdriXdary/967B+hQOQYHcJBT9+MwaKOL+jZOtEmlTxq+DiyCtN +HbzWPbCFApWcJG/gblWMPaxuh5rCRORjVlzt9k8dj90jnNTeFNE0V05RM6jogak4D5dgg62Rj+pz +5dKPlSG3r+ArPkW5ci4geHzs+O27GV0roImqfsnlOeYIqC/4ImiinbciBZ7aHsbOoVz76aRjBNdv ++xSdyDXsA2Gw1dKB4OjyGGTIlYu0Z6FSNtqiJ0JMYJLiGMcNtWClfPWiYAZ9cTThGsyAR7xyDH1l +Chg9OZEsBafD+Ezv07k7W2hIWoRJBf9rFwJWVwENZg9eUMojXiobsMmcu72xMPdAWBQCIZnItPhB +tNUf7j43nthmHfQTMFCPP8BTuMdu+2y47CDgwHtDzCdcmmoiTm9XezbTJzrf6a2aRT02ElYwtpvj +7tmhC3yli3p8UBRrlCRtkB8MQYDxlP3M19uF33SqoAdlF9KaeCCEKxcE/pmFUaltj3MLFhQg3gGq +Bswsj9og3N6orqJHD6IFrLxR1/5HEf96/B8LGF5PNTltebbk5CYpCUdrGti65HrizdMRZcBd3ONz +DujIIefbKs/73l/soGLdrENkwptosJ7uuAO9FUUTYLb190Frjy9lCgs8HgFfavbYrAgM1RQyjcks +0YJjmf0fDKPqGZ/T0TUjNi27hrukm/xlwJ2lHMN1Nf1mQw3bG8A+3nYYqro7jE/cuW93LMGJzOSl +6nQtxyM8ddEcsRCi6vo+JQ2J8s12dWT8uCLgJWMZSLLQfBVNGtvyW34vU0r72JIv/pmilmzLrrt/ +w8pa0z5qJRPyfkU/xp4gaswh7CcCc3obriFmwot2sCPLTqJmUE2qfDuVUwk+DYA0Ks1Bs3m9IrmR +CD3p6IonMup60V4+IY6aQm7gLH9hFKPSm+VcCuhC2p/GXik1fkt8pAqUOkoPa9cMLrbt62mvRrdo +zPB3/QuYj677xthb0WF/XUjYvxSdhwzv9WRWdj/31kE5hrtSFgYy5eMgsmoDLK1+heZRjkPNFVZE +2RRlQGwIanoFcifyZv5lP/B/sRkct6zfoJL+22sB0Nc7aucjI3IkuLEmzc1NzrhgqOkFaJevVxyl +QM7sW12PaJ/B1qOdWXJs7gy5ZVMqrtDmNcAg+zzNeGSUAduO//3ddzvEs5u/1/wNgIU4yJl5XHll +z9qYGKOH+53I4S7DF0HuWQ73ByiJL0bxVuUaNYtFKom4JloYSWUGHV5/8Lsds76umjOrscDVnBi4 +6xKAKq3r38G8F96+3EUFn31DhNcRZdU7vjw4RWAXbRrlXrZGAY7BjmccQSXzBkRZSAff3e8K+ooM +IvdixskCSoDhCuLP2wyjr66zhQyTv24F1yO52dNMGD7cA37YhnX1qnZc9TgPYLk+4chgGxMTbh+X +QUtXLBOmb4/zNpbKFgr7Qdi7ASozd7RyJceZQNcBdyS9IFRMzKIFLP5hRJ5zXFhw7h7FVRYCRfxB +MfcX22k6WbJ/ueTS1lPIHOtgChwMbT2cIhQOvPXQ0jOcct/WaT6Ymlqp8ER6x5qeJZ8uThr1aegj +NPL1gOh8qRMs5zi6vYdAvx2OPnRMh43iF1W2PVuskL7Fc5Cw9j5cyoWDsV1LbMI5f71uRE9NAcX7 +JB3SFB4DtX5LE/GQNyzIi/vUSNbWIST7jK5MFfQJ0E4B0eeMLIgwtAvYA2VJmO8xbewuc65scNiW +wtlqtT/pSF/icfp2dsYp2xUtzXQs3rpfdvhi7PVXDbVTr0tQJAKMJaWk6U88EvXslzVSvyTEonVz +WCfrA2CpImEpSeog5mOLL2iQOt1IbnSh4iikKpJnx6IRAB4g0T/x07BV3VWtzivS+C3PuEQdLt9B +BeMa3S9GeboUmU3EZqtESpr5qEFQ44c3vhB/a5IUJAPPwLkf8rM8n49VGdpXyqVQoGGTB0/8wNvc +iGxk7MnXa65yx+lfGIIlgRFfMoIwz5NS5pIdDVKuXGgBxNtmvXO5/0HoT5MiKIW+NQc1bH9aW2Qp +PMwXns/zAs/wjtlc9hlFbpkMmJM/LauswACO03CyATnrqayoSeplKkD38WPshj5hmFvArw3bVCOj +vHQlypJDzgjRUc3nT61xrSTU/svDjgbH+6QxNFqiCdnaZ5CtVSr5DbJG4d3mcJzrSM2+k3w9Q28g +92Ylp26+1WDFzBxolfkKLSkbnHmXXcxBIQVExtqovftWKfTCCHuvktXkY1mBd1GvT3BTzPtu8xk4 +gvLq4AYZQrvNFkrE4uNWGp/Mwqn88peuXKQaPYyOVURjYKhkKtPItP6+B/UJsd/xB7krI7nLhQAd +5zYVoFWIrr3yrVCaB1SVcCpv139UMv3TUI6cSMDJpCAOhvzeYqc7xzV1mSr6beo1/sKsbklvHcLY +rSjIyAqbkOYhl80n2H1vnNqOR+BuV9aX2ZXqk0/WXg9/MJX2xjQQ4+eFWsEpSL0i4nso9dEhN82R +zUw5FinpbmcoKlJ2K+IcG/+FdAe5ivOAKun5lz4ZT7/lwH7RodY9AuyMZR216dx1aWDjtaN0JeON +YexbzBV3m/UbWxHzP33PIjlL4yjZGvZwriUjkAhfzGISi0nEQiC+cIYskqM7VbXbi/U542kp4awQ +VemYCeOLlPcSoqUDs0zpJECMqVhG+RS05G1qVIuKEpaUc1f7iRwUp/Ppg/WHnaToaY0l9K6EGkNA +7lDAtkh1tJwzyoLrTzVL7ucq0ZLOF38+U8RtOo6+Ta/3/33gqwzovbO7q51mMeKwd4AG71mFPz48 +6IxIsKFdYO9ags2qD5sVdBXUCP04LrdPYrK3vULLS8XnrsMhrGUBVvJCGhDGYNfrRt9MNhY7/7mZ +4xYQpRdog20b2Hdh0sW+CKGuiT/z5EYUB2Yb6qzdWa8gqZ6QAyf8doOO6VBgYEC0BT25oHKcL3ln +TSciTp+zXd3GXAusrycS4BsBqhdEROKsRZiT+8VQotc7h5eqzn+daOPxIVBdaAFYT4bVYnzNCqFy +i5raYxWVj5wz5jdW5xO6TZUmfwTdO3mLJBvRGuJ39w+iQwY3r0kTeZ5LcdKrWpPGg8gQKVedJ8jo +UKONG9WpJTGss1In85p91+jB76liCBYXgQat2yXKSEAcKIlvL5krebRDVttZLBMj4542Qwtp3xte +Df0nBzKEIWRauamqkbBN8M60Ii/HdLDWar4OVbwHD8uR38W57bYYj2Jpb66yDLeZq5E1DdnHL2fB +sHaqgb4CdpXXynXMp+8w5Gg7mGAJhsSwYSzOjyqO988hx7Tw8Rr+1LItsWdwyrG4AtSQyQvxbtK2 +IFv8UIdtQx4Xm3zbnMWeKz8BCV+v3efLphDqCDqQIn1sMMrX0NTfWretUfcwywO8v4rSSNnBqabN +9AdvGxbmfqWtYVFU936WGI3tePiOAUU0qLIpulj7QZ3fdenCR94R8TMEJXkm3DUa8wmL4SizYzP7 +V0YtuPyVqyjlWcQz2N5lAd3gv9Or+kker/50kkugmsbnVQBQ0krA7o4ieMgnbj3nn+KwNEi0teWI +U+TK+7lROf/JBXXAx0bi/9d8nOrIo8PbOKzPMF+x/PDDT+y7J6bFlFiVeGbKsjpdKT+hr4VmT5p6 +FYopNX0nVKEsD1O2UYgRhaBTb06YTzkgv3bt7z525ke5NGgL0eF73FRS84sPbEzk1JhPo55MuMU/ +3GFZK4H3zAn6wlpr1VQXRmfmkvNTZRFGdbfaqtQBQDtjA6yuypQoiRgldvqHL6Vj7m1IxNL1kmka +E0+LdktJgSaCjWrN6QfI8LvgzYn1+8N71T4TETPNgiB0FISscdeT5hgVo+tSAumETjM/Anmb3hK1 +3JUZh1mDhLzX2fXTGJaGgoeA8mKbfK7kZXHkTpI57o2YmEZ/kT7FMBCcPkVVN3uIiT+1NlfrWxSU +ICSm9jCi77swTyMcI67g0im3ojHabMEgbZM0s/uFO0OuSbPpk7rnhpOp0jOU6Z4fEvrKemWA3U9J +ltsEYf4MjOkDkcGRhii0+9X3RG5gY4X/pMenZY5wYv5Z1WnsuMGWMX64eQ6Gg00W5u837M9qe3XH +hyEVnEs1lC07g7yHq6YpD1TRVSL1fVB2kYksYuqE4ky7jxMakqUYxtNy1Z4LKQJfOziIdwmUFcVF +ZFtUiuU0KMIBvTvKJMynNktAUVrlOj0XYMdzfS2JcVtSulQTaYTquqZXkARk/8pPBPy6uthiEO7V +4chY20qW2FXBi1M2npRYcFWsZHL0CIjMnhXp5hWDR4T1quOK+pu5tFLGPD6X8QQ/EIkSzZCWeQYS +oTImmnB1zEQq18c+zOcRV6qkvU3G89362ttP6kHJOG6W9CDf0G5fLEPLq0E49vcp/rkLaRiwTFYf +QAyjalrloawEFCbEXNgx1rmpsb9Vj4xLvVXnAfgD/FC/KQUEPcQ/iYEg91VOrXFpLh8O+hZz5iY5 +Ok7vR85sRpYuvJoJyppmxIOBotEDeDwtAkrOVYeccv4nRF3aC96igZJPqytEMhdb4OjxDroIJjp3 +9wHJQaG1r9wEBTmS1/SNWt2LFoAWDPK9lTqoLRt7AOg0VBpAZXTeW0q1gaI2fZwBB4hhTQuo+4KZ +rB5H3FyA56g4Fi2kWvt81DXsSrgTcPweIVIMml0QwVlxvD3hj+rnH/iUvas1C/F0ZazrBTreagUn +eirU0EJh69Y2GcvHAFqdxo4dSI/0s+2OJyyrae6K9eU8fINfzqESwMOgo0D7OVqMy4n8xwCmdTEB +QJk0egPXBq8M33EBc3eeIB2Ix+ZGP8sBKQeypdCdnSIlEmHum5qKA0hcG3Jabp0lb+YtDo6pJNRM +i7VkJ2PKjNhAvt5DCsFQF51NzBGMmXRa7XYByK+fJCYf41Q6TbhsA6/7oD2wZvDAfEwYYJ+A0nEm +7Rf+08NmkXux1HOcmF58CBjtMG9otd/fny/qDmCwEiOeKIFe8JooUS+doZQrd341zApDeWDM+FHQ +eEELgYAD8n8sIGjPMYlh6wUwDiXTjjC2CmarT442+8Q/yJ2NHlv3j7+qKjhKb4ajo7JRNEmqfnV4 +oPKXxYUPxXM4Wq89P+kI9HIl4kmGZpwjDmUgG2P2l2VeP1BUu+9H04VLy3tVfGqSHD8Vn3t/UtdH +HvCjMm3/vjE2zvEBqkg6e0Tsf8ziDb3VyQ5EpjV+3U1dqMLuy7gvd4i5jRKX5JF7ggrXcX8m4c2Z +Sp3Rj7iuUsmdwBJ9fX2lPP1HVarVQRLkyj95to5tD+cHk53olRKC0iCvlNsUJFjo+gcTE6n7IldA +Xll8/JRAg95NPJonuw8O0jC+bSFaZwOU42omrStkum6ucivupMAkFo/QgqXisf3+OFmk6sz91tJ8 +GG5RpP7rcp4ktYXbg52PSi0tS1hNNkwfKzg0mgwkPasQaVhksv+aEd7snoLKcTPxT/CVro5+P+z8 +pJ0dp+T6i+ImFSybb1ZuFmeQq+fkMc6XSpWpEN0SCEkzbRB6c1vwCKQOXAzmZGXxXLk5nZ1abmL/ +hi8S1SR2oakZTX0exQ/lNqEtpF77ZtBfEc5B5qo8xT94AurHoeyDiedeSbuaTyXYi2EG78J0TXwP +E1NrgPpWanmhf/vPlh+mUnhaZwlCywSvyVd5n7lNGsaZl+cVAkPbpBpFLyshwPxStLR0ccUwIjZT +ya6kdSlIco2wL0AXqOnr6hfoSzXSdDtZcK67cf4dyerFcoT/Nn5DK/BPkxDvJVLSmcFECZhpLZZp +DxbO5YWk1zdopbnRl1h5gV8j8rDMwJHNnL+C9MH+Xp+8M402rLh/ytzO+UGSdpjqqMUyueO90gMY +H2wi6NZ0o7wwUV5zwEHwxzRyjRaodVMCHb0TK5ASL5Rf6CnA6BrbtE2YKDgPsfDg7/tR8IszA6W7 +Dn4Uu3ICFebnoblVkUuuZ7wQWJgXpv7PW3YMavmw1eB6mR6jl27RYhYsswc5P6k3e1nYkVzyEFWK +C57fWIOk5PHma1ZYQGkukUYPcer5yFhQ3qNQ/nJxCD5FnjpjQNndiwPPg7sySylZwGYYPvL6J1QF +/GfWmYpInLLSmzBh//2eLHYMSJrtz0PqqvjaWLtXX4CrVU9GQkyaqGkdjd6vfAHJyFcdgYGPpfiH +c2sjQQYr/gFTIpBJH68dqOfnqTIStQHPDvMlZ+WslR56gdOZwUA5i3Z1dP105Dmvb0lO/8LN/gdV +baCpjEswDm/r1OEivVnGEprtxx7rnHtOpRvY8B/+gp/oY5IDP/b8EC5vTWwmeSRdhTVGZUHKq6a1 +9awkUSJkr4VeFbLQeAKnvHp4zNUz/S27Mxd1sfKsJSK9ze/fDBXHt6an25RVKtdlYUHJ41GYgcpP +mED7kQ4+AyniINo5Y625YuQjLegSoL/JaGEbH5oW8d/ifOE6Djk5FdPQJF/PHROvSMrCUhmiwX3y +KnKsr7ciyEM7tZtoAjSdv5R2cUBY7QeOpcOLl/icNezzdaRb5ahUq69oSZriqUAGSJ5RpKzvDO3l +zualkQa/gA7Nw5Lr8eID3m2oNVxz5CR87YcgrSe3kVzmeKtBBDJkS2agcuNis6RExuQwOaUOJxm0 +dCiV9SbXinqcpOsZxmRW5Up32/NozMD3Txc9Yzr5P5qcIuD3xkrMygf00tsoNntspT3pgjzZ9mDz +eYqQIDKROq25GBKtgG945ypYsB6e4P5XbgwbSxHqErewqSsvZMqKtzoYVpXb97KB4RBACXUPLI3a +u72hl30NR/raej5OkXFAsyMcHuU6nwPV92InMQmaq8WuT6i5JyXU7fSolXk8j5jo1rxV6QkT5qMw +xDrryUIE/EwRbBhdhuHmEU/ggErTT7lsGmjiH45Q17hyNeuXi/d19+4LXCVcw2tCafau1eCPYy+g +sGZTJ6LtNGJRNdY0gqq1iQbCmVq2y3QwjNlns2+6KlW7wy00LakHhhSkwSOiIA0MA+Rjd5rTgnZO +jhnhS11mYdcpVO2NFaloGMFNBVclavD4kGsMUC58SmIn+9coY5fDvnSLXl7sXEZTb2inEfOvhahj +98R2HMtfyuAJY/Yis/6Wb+yMDKjflJ+xXe7l1ytehGQpgxO8mkA26vQjlPVvsfvhrDmKnhEHATRu +G25w/8qa4q+9lLAiAsW3gUTIwRjHZto9WM8lGHU4KHDbqgsRmVafiKlhh66BcCqZNtfEvEWEDlM4 +Y6Yq5cYJNwuXRQiHOp76x3fq0rb2rowsBb0uThq6BpQ0o48wNlM9HdfikdnIgw8AGZwjHHgAUyoI +Fi+hqMgCu1I4mceKbJKH2ksdVWRbT++OXRXAxJw4ZfrznHPeKtLLKoBQemxXF41qQzdKfU4WFr4A +JwCNUirRWK+3zG57lggdMMrr3px5pHHldPXgOvrvSy9uRnB40OjCMcb4u2eU686rWiRFLDS/HkmV +dyivqQH1srOBN6NhYLvZKjVgFGddE16qCta/UMgJA2/WxBlTBGdjMjrzcxKnUj3Wy+uBqYmHwD9Y +UHN3AD/3eHX6RYeKVuT3Ft/G1tzgwxx8ksQ0Jitwx7ZGlWtPusfiUd7U4EOBgMePRCZlZpIHdjO4 +8jbc+RM/23RhunZH6mTrIeJCU/51UDubLeComBp99NwXyft8U69UWrO0TCSotLMYKakQ90CKx89K +lEj+zYJs3DYI+cHHM+2aW74Riw5mjnoE9GplVF1eSJJqAG3OC85DzOJ4pEJshbEKF+APT2/prJ+0 +0Tie3N095Ds1SBIBReZS//H3/rN2Sa57S4Ix7KjQmbl9MX+31FXPF32N6ikfctRa3jDWpcR23ubH +tWN+7j69u/GahTja3w5QRSXELR7MlY8/Jn13QIqeBljX/ezvJHxvi7TzlLngM2R2MXrla85yNLDd +26qFbYZR3O38be9C3v4HHeyndjQAVNW3UC4GnX8ySC0XA2ilgjhoLgM/bqIqAmPI5u9Qi+XjcA6A +GSBbzldlXWSX6pJ9JmzegEy6QSIFk0XAnjR3dAvAXlx/6SWonS/KiakcRJmcxtN5TXasm4uoyGUy +K3jKhqE45fOzy6mQEf+UrDrrzDtWNfVXOExQZ9g9ZLDUZyMyz/X84kmY+I1O40pqtAeuw/k2wT9y +6elYAkhwSEMfVlcdmoUxJAuesPkx0XYR0lzMN+lx5zl7FkwLpqkxHxFSch55ulj0vvO3+pHU/k6n +Ou3OSF4xdYiV9ZpMXKRV+9Q+HUue7OEJlyDz9sVOwex05MYwmIy7S4xZAPdP84Vb0x2XvEDpMwzD +Z/6a3rZJzwB1fqwzV9s5tKQrxzhGO24l/sg4J+rp5UGP49x173HVtcSfBTItC43IF5LLOv/S0tNU +tq8UzPWgCXQOVW54Gk0YKUwL+U9VxNqnCwNFCX97iHT+yFrCBie9AEfga7o2tBGvBYrADJURIm52 +LoMiHT2KhldgWuL8oRwyu87S2QDCDciZSGbfLHhlrggYrLK3ImcudpVCOa7cJjvXrdEyGpKUzrxi +9LkZt7DsWkZLbKj3NKOIk68pVgoG/FWwop5GJfaHI3Es8Cuexd2K79FJo5u3jVB0GuuRTUYfiUbM +l+1U98Ssnk7Gi1bAKckH8LHfrp+y9xCI/goxjvDD6B4NOEa+a3HsFwAx5HhOY98EEGo9q81oVHgv +jamPU2aitwJVbXGGCJ7JSbpy9A24Sw3NXZj1eOAvZy1SDdCUnGmWw2ZlR0wvjkV15U4RGfZyBaUj +3hqCHQxmTq65VGr40Y14R7+schZZmGsgfLQtiZuTX8msToR6pBTLBMN9sULYWTCh9DDF6yJHuL+S +LEJPbNeQiTPKWzQyi245V23Sz7jv/vYSiIo348+PpVbIJ2m21IhHqnJVnewp9t0sI70fpCz35kw/ +X3bj1RFsJkfvkHfeGN72NiWy5fLfutPmx8HYjDWAxFgFl7HjG5nekH0i1Dfp3+hcPt1WakEoI+4W +l0VDKuPEUT43RWXF6pzhD/X4OoqRCMfFWLfbAFEeJ1dh39RnUJFRItjcy+CHIR/R9lZ2ociVkxGS +BPbCqL7UC/jiYFhkDmldDsgaw4ONCnZa6WVhYD7bXWUq0AKcD55RY4u5jmrCid4vyG3R1BZGJytP +S8JfGkqRP4GeTB6fyVHz99kqzr8NhmCzXPDe4GEh7yLSgWj8C6B9lIcPj58CcguWy5qxDeggf4RA +r52eUsF1Z3/L4GY1kNhfhyBn2lF6ul1MRlEK1U4ud3FSieYWqIbDozG85QpB2FVKJOr5e5N020ID +4j0FcAtHuQp70ZpNhHIpfylpelmUrq34pIuAzm2kH1VBVSjTT2yMDfPr8rx508VA90H7EaP5jr3K +9XDLKcLVSTtjijRKatx+DJmZ0OZ3TdvCpbkednzjeQxM18EuK/fVd8Gd0GbxWQ4yhET72isN/Q2a +jOrcOg7t6lnPT6cMFOzXmyN36uwMSPb1aBy/D4tnfiMRN+mRs8RhSUFqSnga20+rJrUwCAvEaHa0 +dtBkn2rxyC8Vw63J9NRGm1gdcsIDJBvWWyUX77/LOcbPI/h/s5QLqLboaLmoPa+7wntnW6Kq6PI4 +25noiDRUiMiNxGFq5Hfv4NGXXoiFk/e3/tJbV8i+NDn7yeeK2ZgUkbmyIs26kCHrRsw4YOjAX/Ni +5AvfXqNDHfDLcrRLWxXBMiZPC2CeZb7i4LHWczTGImRovs8sJbLUQdxl8WdJ5bEwZeZ7EOT3BRHC +4nx4PPrb3x62uNzMKAhm7qzcXzyRVKXMObZ0p0geXHbQ9FJeQSoHkHsw3jn78fQgxFqOMhRrZ7fD +w7kn0b+2fwtCeraaAnyPObBBx7l1KzQrI5wZ1iKWlEMGtElqJc4v572q4rxZEh+4puKHvMFmk13/ +VH5MZGNqs8Nv+7E5hILyH5I65gvEIcZXSitd2MCs/uV+TFUum9c07L9ht5Q5hMUDgaDzr6dDto2F +Hb+I6IRG3uMvW0vhV6Q4vs/WV7tH+k9mg8TlGJv1Jg4kP5xbYm0iSaTAWn13onZyd3QAsHrV4JGv +vYmiDLWQsIhRqzm515TOZ44cdZbBffQSozj3dazA9SnM4naTaF9tNvdY9dIEigmOm2QiVImTwB2s +W1iSRyQhMwfMluUIFqJCvjZq/F2JoP3EYzTzA5/IDp/F/Bk70uydIoS42/GmBZMvFYmm3jKrpF7L +SqEMixpxyrY2pHoNSOkENy/9yNSfy4+yKhGSm5DySMi5LRcTpAk2Mig1f/E73ls2SUD6IutMZxUL +iylTjC7fNi/4/QTcSP5s5AiUzo6RQnHIbJZxASrRQfrwzU5ox+g4qWbpQeWaI+hN0V1181e80OWO +a+ihfyQmzoFroVQi5RAvApF536zJ4jlKBRUW/GY71su+2ROQvXPKXZ5TXdoxHuOCIScDAvEaslOx +G1s4B3UlmbRpAKq+bGG0Z2Dqlvr3CxnTrTvt4sDKWILOU9WD9icRBFzIhctwqc9w/HFZt7D+sxw/ +PvrZFtN0BVUhXAoE9RkP/mGgYnGyQX+/eWHzlwVRCwFn//pxR4N4kjwoo0O6YDtptnuvG7/YA1vm +CZwLE8igNp3rGVqhWRUWleq+O58icFWK4/x41jteTrASkfi93GN4R0N3gzLpEPhQ5F5Njyi47j32 +CFnZX216vlxsPm+iSLdRx0KIZsyP60WM5j3l4y9XD/bgTJ8E3aFPFqYWwiR0JKvh9b/OJo9XGHtI +BfAabVcS+jlMSJveqeRDNBkjBCNs/5XTKVnVVCLXj6boyDwZNecf4mmUQJIZ8vao9GT0VeOZDxY+ +AOVAh5ACeGfT/mQ3qc2y6yz8S1xw3au/G1fzMPp/wQTI+sstDPE5BwBI8VYEocDVJFiQsk5+8gwr +H8l31LnRgRz4O4FO0X69XWVtsy/adv5lJQP7i0yfc2gWhX5higP7l9B1e2HP7k8tgMKRyO1h8J4U +YiR9g7SfdUTajxXL/uYmidMjBSrL1Hx9PbFZtJaizNvLbIbT+CH0HzLoE8ayEB2lIDyoR3LayiTS +z9gy3dbHLoAUvaXYd+VZ9WfxUqZrw3+qtEWolE/SJOXHKohj7gs365ovMIdrgUIkNx4xv+Oe4WDo +AZNmP6SE4n63rsPp6gXDRs9kLuicHshIEY07AhPZcRyyVZvq9HVq4bUATo19WX2952f8xd7GfjGE +1NZS5rYTgB7OU14n5AT8w3Kl/v/2glqao57KA9SSgKVAsJqTJh+eqbfSot4qQjABKSGefai3xHai +1lORBaR6lEy9G3YU1oPZwf6sJqV9JxzoTc3EMZSwNidcUvpd27+0eJnE4blRO9YDSXIGKkpH1qjQ +YZAUNBm8P4dUM8mkJYx8L0RG47kTcUF88+N7WyO3houuhUN0e2fsBwdzEBHCXOkSNfFBQxe2l4LT +RXQUlGLUFQXoAXMvnlF92AXyHGrX1Hk8v9bbRLpgoW+bcafMtYAILRWg8knBY/UMe/+o8WnhI0Ro +y6K6lYm6fE6YR2YhUnqgkchQagv/i66R8oko1zWtlUAa1RyFgaFGrSVLAfHFwKVkHJRcUgWA7us/ +zO7g2eVCT8GWZL1icdQm9hS19HqYMc9VoJ2D1y3Qo9FW98NSNvM/8zRwGcjKoM4JPPPdjZ+MKBuu +43d59ayrdUp37zn5DBRvfjj2Xaw64qvgBwgMoUkRdVV/hXZOuCSnUMihnXI7Uii7K5x9uEag0TAW +6UyndZuXwKoEI08BH/3qgn11iJq14Xmfyjodu482C5NsGXdE4SC4YCxit98VSVuBO3KZ8n5LGBnm +K3gto7tNXg5vjvO9jctCEB6ZnDjo6CpBJ38Yb8/m4GuwLPNEnsnTYyRPQ2oCS5dr7BUqEj14oCnT +MYczIVxAK8/Zx+dgogLNpuaJ3cyVr9FheBS0BlxRLUtK2yps2hXVlVkEvqf6Ah/F2vAbZ6KMUh9p +zsbl4Ah0JthMJUB4ASYteOeJMQNq6Xcx4kCoEBdggIg+CHD13Cryn6k4X5D75PnuUXib48XEEZiD +JJjhqWj9QyUiCiX+h/gXLR1SNNHp0sROBD2tYK7XUvUQmS1vbvFcvsNYd7ZnM9yWE4v0bXLQZjjO +oqP9pZjztK62ZkkEzS9tM8ZUPQxqD+45RN5zxhzHJxeFBXLVKiYXrj+opcu/hCG/g+JJEXI06gAU +rNdRLjoQyYgG+KZtkCiWGLhxLZ0K5uUObvcEEGbkRvX1EKXTCptIqiPzxpiLCKnRup0M/+U8kf0J +hEjl+/M90Jj28jWqlCy6/qTC1ovXfoyol7YSDB4PfCuDqPwD2uPrWfMbWobyFspBfzhedy5eb/tf +wZHWtKwY2+l647hOogxiDuMBpUvMvntKQg1hk3CXjjw+cZkWsiO13mbnQcbUdM60lPJoABtgFTPv +rUG633zt5MRcPg/zLCN8Ts/SvU5qkerWmGjhUuTS1RUGJ9OJYlv+JmuUnFak3n5QNGJe5SIwtYqY +oBwH/BaE+SrihDvAOXOk5iUERE+Ml/WyekgIZPXwpDyjk3SgKrRYt2kQ6hArxl35ibrsunJzVUeB +h2jURh1wQgV1RQtfnfB42Upu9837h83TnCktszgvxnQiRdaTK23OAgYt28AqhYBbpsxyhIR7Oxe8 +FA+6WXHEpzf4BV1PdLqLpZNUS5Tc25CY3qXt31Y9zK0MZaaEXDJxJI+ULlXyjZ2j7kAsxCxB/aCF +p2vO4khUAxIsTAZDs74DfSE1FpuZWkY6wha2+uOPo1EJvSkFcofBHTA3FfjD9dLCVoSbDKcEZ0vK +/o0eXuGpsYuZT40mgzaJ3mcHUdYM5dPVBtIWQvfBWeRhS/owYj6hk+MVC8fey6Pb2uRXsRHOVDF9 +7SRE1N494l1NS9tNzOaYziV24ow6y/MwmS/DD2B383uuq//FwPdVFxGXKsErH4Xo1lditNEuC9jd +iTMEg54KBUmiSx++srd00EQxYSMSeW2awxvjDxiPoXNDCiiNTVg8cbgEIGHRRaKR/sl3UlOZX9EA +eCkalgLbanvzmtCUIQL4lxQxddvL2TsJaOT1U8C5SGVPMWSFVwjStzYmnjpNTlHHV47NHY3a8d9E +im3VdxX/sZkcrb5GBxx++OrgLupuJcaxfn5sjcwJV6ZUbb6FP5Qa2NgVcf42itQydvu6u88FXhZY +yyRnd/wvpusO9pjnEcJy7q40HRaUbwd8AZ7GycSEBozL2x+s93s1UHBU90x0nQMAZD9iQ62P0h24 +EeUcS1BMnMK+2FkNn/kHTLiCl1c68WHlj3Z5N0AjL8LsV0dbBrjexaK6ENOTGWKgMnXB4PawaMNt +U+RYVLNCHXgnCb5yddqtqAyPE9f5jLxglyaeekTE16uRisn6gdzy20kZbsRJyBI6byFVe05Euidi +bC1EoLhM+B125LkIq0rkEmFMWxSNHCQ2Cfqi+PjhSGF+gKhTDiveYk18RcwsnriDupX+NCF9ajQP +mO4AHZJiLKmKJUGKPZCxAvXKfhWzY3v7EJ2BtaJ1miOsk4MMPJ1X2YtyXxrVEW5hOlE856yBygZ+ +T7tbHszgOUezX4ii6q0jn6xLduETvTCPwwcxOL/lYB8gWF8PegqFRpbSXisJRsyGmX7DFLytIvg9 +pCzb5qlU2AN+6FVs226gwYs+YuXmrBX4+okLhJkQyOS/7GOMhUOjtc+qFodB/o+R35Ulzl1cbMN0 +GSTwGCvi9aydmURcMWnGssShHWAG/57aEsqW804tONP6pS03+iR/Rs7ZUs08Fjkx3crkfvpei3SW +CU2WiuxjwzVZeYnz3W8lwmY2HUSdildyM/VBIzqpyeHVJVES6vPMSIlS3xYRVd8VtPRqkLzQMsrQ +o8BM12GQ+n9Bc1wfK9YWeLMGyn4/ghJLhNLm3p692BNk9OxrfvnG6Qfc1tU/lxZtRCUm8bebF8Es +WkNEaQ2pFSnzsprOa+sxMW6Cs3td8m5EG4h/OUY+Vk9KABIWtThTchMqZ4KFvmW85qBMEcwqFvBy +9BUzYJBf7euVPUS7qwUtgoe0aRY+ZllUxRi3/3Bzzhd1iZdKNQkP6jtgg7iVxLvfiXtuaOTqlg/P +zGGvL6mZg+lV43JbayksZ0v/px1IsdB4KnjgYuDjoHN2FEzhPXaIJsc6321CYoblPjA2etAzed2V +eeCxcpH8dCIrWmFy32AESc++lezFyPjI67596ps+G+Ri5jwyoWm+V6cxSSU89FZkJSiWx4BBSYgI +p+IVDvAs88oc1dIuKPp6Eu20qSHU7Wjv+DZnwkg+X1zDhkiNnCulEhdwjF6yPtlZD1w0XSTMlVL6 +J1NMufHXxTcWuGsT+WhKV+LoNV2m31KZKFcZlJC9ivnFrnhS945gu8Y5FEyoOOPj8DKdFkIg7fLP +69BjThGglazcmFIx1Pf9Rr/QxfBp9XiffE+7/zHklQv0NndYdTLYywVG5W87bmddZ+EnHGy5dwD+ +p/g3is1zZi88aJ121T+rmX1y65YDhiAT7/WJe8jssa7QXbt02ngVmKizTE+PrYCVEAGMPNA/9Ox7 +/YwayRyG3EgVfQBQNa0/HuwruET9q912cc9UvOlMlQugXKAuCmXEhZDtLa4goZksOkOj9ay7dlfG +/+CLfdwabDZ2R698KQJX4N3KlKw3nFfPg1+pLoCpZJpbrPIn3FQvOCdqoj8fsa0GKz14zTyN20f9 +3uzU1z6F6ef1lEzitrlZWZxnCnZoXJTid9yzRhjGmLHhnOyA+IxOCQau1xj63qTdDGtnHcfbTRlf +Pu5/6B1avhVd/vbbQ/H0oYh5G7yxkBDHWyvUjNUt1UUpV9AWYmyED2bL7VEfEa4z4U81F0Js2cSZ +A5zHgfWq+02PvR4Qjmn1Hno4lanwEa/N4otoXZMJuu1vQHVbf8lyLBFkmENp6rH+dJlD9NIM9HYl +70qe83xPHeXDL2hvIIG0A5OldUj1ExikUsGrMcbVVEF9kCN0imcP58f+aotjjvjjdsEC9KNz+0/4 +jegRNGTqsP9ym4Ikg9LOyGGGIYRSxV0Muh2cFtd0lZ5GjqKGBuOboiMrJ+cyBooGnDAhYgkbUlOL +nqTJjmBmRU+VH9ihLSU/j3eeMrKG+2nsXWXHWecpzKFEd5Yrw9oyPDjtGeGzHBKBzu2g2ARxK0FC +ywJdoBvy5f5NTUqTFNx3oUPotDgTZ62dw4rD1uFIjK4xbFPfkkWUK4gfSy5CzdImjmFEVbkY3isv +TWG9SzQvzeFlIMDrvoSw6YYHQzLVPh3e4d7rTsEBvx29vJfNfuYKtYgen3H1aJdBi7p+dHXnKcMQ +qwevZFw91+xQLu1FhQSzdeyUAp8n3ONcK7uLAiF+Ybjl/uMRYxCBcpWxqMTHjOg957FfPEiJro3P +GXkHA6KgNip9r+fwPbKhgcG9h2xnFY4g2U2jyZfJHFPlelZmgm1pbQtFmOy8SX1jZlblWnVyXBo2 +9M6R0QQfmhfCxTiV18ouUFDUQtv3mweqdalV5eNK5Bw8R8t2XPBVBgGIpgF1ji4FG61EQvPsUaEu +UztrtkhAqktA44QE8YWvZkPDTmZOSxpCx9U+fnIx05m7PZyGWiJMV+pP0QtprrZWHZmymmtBek1w +Yr7mvJA4yzGuwaBuku0P+tep71Nc1fH/zYhL/a9jLPiUM+RO/fZdEQqL9wt4TYLTVV0Ca2JByFDJ +2DcI5Ey0DjvJWdhbe1NEk1eFKgPAH8g0qdfoxFymfkc6o8xMx3UKug4yTbUXAk7BUKZQuYadpWmH +rJUJMgQG+IEDZqmOa8U3nanxrRaqsEjPjAdUp9lSu9bfNyY2VnANo9vN4RT+MFU4aWXbxNSbMpEM +VGOGfZ+jxbRBmqQ3NJT5PL5LtKX4Z4Ug9AeNL9Phhk6rELzcRhsnQnSdHkdNnkYFo+XYxmpMcC48 +oJPcVuv9K1hhrGzNyE/WrhBm5VnI9b24kaPxoIky4B/ZJlw/xT/r1ROVY3dxqdrdohTCvoi/DUkT +ijXBsv3da0Da4yFOslmfWf0QDiU0h91FplyYDghtvFRECukzuIfyCTkf70lsaIOfSNtjgZRsvRB2 +S18j8gI34zhsaF4LyJzvh4DZd7fmzD9fW9QL/m5MRbdBJW1qcgUn0y4SUhEYS6B7NbGdL4v9l2zr +lODzyjQbMAA5tX5m3MjYP6irx66U6XW5r0nDIh/5aYUihXgEf0qxU8v2rYI8+ouz+U9g8yQ0ypbK +cRUler0g9ioDSUs/hzer+4EQafQEqox6ZHwRW+TK7RTJdJgQQQdxXuCYDgp11CNDuzuGubv33htC +pieVIN8BNsXFlwjzSLeeXwjBnej/A0sb+zVQcHWKJfNAoxEe2unUWYeV1UQMgc+2Nt4J00N8L/Az +GPCFQOupDbHas4PMKpAcn1srFmHtCm00ClReGvWv+51S0WMJSbxDuOiJ77cl87rvhnHIkS9tEuIO +gg== +---- END config.xml ---- diff --git a/devices/tests/test_files/live_if_rulesets.tar b/devices/tests/test_files/live_if_rulesets.tar new file mode 100644 index 0000000000000000000000000000000000000000..9dfca7ae27477589bc931c3f3384255169048b9f GIT binary patch literal 28160 zcmeHQYjfMi74>KTimjcQ+O7mH-W1ieqe@P#iS1}ir86Dd!vTpUi3kJ`JZ!6R{(H~j zwM&r_AEF?|fkv`O3Rv8`yZ7AZUS|o5{cz;?ih3|Oif7K?^h}8}pCylW_C+bR41+wP zRJBbi|HfbNrBRirhGyBer5mXhq{VDV=?zS;`g_3&XO4f7 z8=m-}{`A!GS)39tB2I9b|NQ+Q^~-0oWYT+b^zO|OIXxp;;!N0+Ly}HomP|w6?UD2{ zVq_Sm(>;>L≫+F9&Xt?vYWL1u0SYNWw;8;3mYPha~abo^Dc#??V!^Pd)YU`QryQ z8-G4xvReKPyO95%Pj(FekcSoWuS@c8a{1S6qm}C%7?3_}r#1R_OmVJb;n!BnfvKdQlvv;VAU?N8xPd1n#~IN5N&aiJ+>j ziE!JsUFUuw|Nr*VX*S%k>d#47CI7l*O7hS3ziDY!YyYpaez(1*(Eo0b)K33${~!82 zV55{P{WK(#e6P%@eQFWQCe{H#0LSVQ+a#7os7@?=kBQ|HO1TwK=zbjmfYKgN@p9{k z!uohZZ3@ey3_9F)(!Qblh5VnyVKfZC+yMZrl7E9t^RLhmu)BiZn0P90YE3V zU4By_fkAAIi@r@c67Uz@>+!(62Ry(tTRaf*p9lXdvurT)l9`i^rn`WHmGZA^^ZH*? zsb#kMzX<@C_x~mH-=rFOHul5MJ%$X)(D#xla}P;0idpYT=lzHNhvegL+V9W)`Iir0 zDCIxTJKhlxA}^$7f`pybXi>@`Ga-{#oFSoZP5@UBRz7 z3z3Y#qcKj*#w=#Ri1o7A-*Y|K9F9LkGC_gM&`HD4PddM5-sr-OozIQI;l0iFpK0@sP%{c3|3j)saaN)IX^AW`gvv6o)*%tRBjfB(-A@;}Tx-}QohcZxY= zpAgJRb|L&=ah3d=s_g&U1PMXSuv__WVt17OKVS6P16Te2O(3jJ6z%)YC5wxl)8peQ^@W4-LWBv|3jk=EW@Fb>a{2u67G$Pz764pU zlUZN5C7B|2`3Fp93i;QKGXA&o0)UnBugmfOGXK}ge-q;WXi5BULHzTue_yHq5W#*M z-aTA?$+cJEW{Xh&)}DP?D*)}hhytsVmG?%hz=B3r@AeOoGxmlNbgtFt&)Gs zv?cpLmw#Pt<-eM~JN;eCf09hESO10l_g~#U{$Eu9b(cu>S|n&S4Y*v%jrmV3=DEn8 zHh^CcIq5CJiGn=Hacy+0MVNX$VXCqdk~B`z&i8_lf1}j@=gbdSx=RRHA^$3!^Z&X0 z+thCT|DCV(vH>Oi|J|#f-n<;VeR)naeKGl;gFvx2>aCuK2hwmb@;$hVdvk$;T$XH` z>PtkOXB*dlT7;b8=P-5=5%@L3GvIFA^x!f=p}eT>Q(k=6@odHhGES{wgqV$VG#Z-40X@D0hsHzW`IkvxR@`03-P zq2*3Wa{+ry2_qJG!8kn(Yy;pw3i~$1f0XY( zJp51BNnmhhx5xif&S`9l|9sLG@P8|!H^l$jQr+TzBa(PO4_U3C2$cB%{9D$nq_l)WxyYG3vYnW-c@a| zIrX2Jb3sre6JO?;coI3WGh-Eqsk8?@b(XB@)-vjZjq&1v#g0XKQLs_+=8JfyPhHKei&+#211_xXs-nR zt7ZR()8jYIp8r&}x$~cS{}+gT8~<-&FO>-7jb=B5Px9O^-xuTzSxX>l9@GbT)``Zs zRxoQcf~b4Jz=#xP@P@>^98E*S@FV{cq@ZdW$4W8SJjUmhXKPM zju!v7U)A;REaV&QAKx3Mq8gT>T82V9nz6^<>xQCP#oO+Iq8VNM*(~0x2fCu!il%jo z_XnC{>1ZNqi@Qxz(P(vf7sDN>3P#LFSM>wM#E`nU+`xdee6LyqMv|rSt z7MgJArAZI1y7pe4D=$BJ{^F-4C#Ppc=_sa2B)8224S0+sgu@f$J!d>dxfgQ>DExA> zQA&=2%e~Tx7$Ee<<8%2(Xbj?qmowDSk~0>2c@_h_IFoALz=}5soHUEsAVHZLKc`FH zIKPB3*oqlH={G>77_S{1sMwLM2=bP)Kjszv<&!pI`F?doKnNi^$map=v2q>uWSE>c0H2C zya|!$O+^A}|9^nnqO=n*E22bQm2`E{n#ih;F9ODz;Z!oWH{-O3 ze9f~W9mTu{RwPq=Gghkbm|zBmpmx8^lmV!3x=Xdde_bo^|6SAlpH)j2_5TJMhvogB zTl@dcFkc?Hu>Y^Le-u`%u6O4Xz*VFeyM$wC)e{c(Bb!C394=mV+NqZAS&}AGCz*Z^ zC|(Y-WfFlKt)hp#d~zv|7H^;R`X3pl@9gywM7Y}jgB1S*2yf3-VZn`% zeUpG!3DvpozDsCW-h`Bs(EsmSAWU@txUlE&LruP{#sDEoSQZL8Ntn> z34cYUR&Xm9Qx)9oA07WFaa5kAUf^gAO}7I+mSbu~{Pn9c&%=sUIcjQEBqZHvk44U< qA3E?B;WLu&3B~fb{B>Slu)2fQ4%VUVReNWM*ACpyKsy7~4E!J4f@d25 literal 0 HcmV?d00001 diff --git a/devices/tests/test_filters.py b/devices/tests/test_filters.py new file mode 100644 index 0000000..9b4417d --- /dev/null +++ b/devices/tests/test_filters.py @@ -0,0 +1,30 @@ +import pytest + +from devices.filters import DeviceFilter +from devices.models.device import Device + +device_filter_query_parameters = [ + [{'name': 'FIREWALL'}, 1], + [{'name': 'test'}, 2], + [{'name': 'device'}, 0], + [{'ip': '1.1.1.1'}, 1], + [{'type': 'firewall'}, 3], +] + + +@pytest.mark.unit +@pytest.mark.django_db +class TestDevices: + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model): + self.admin_user = django_user_model.objects.get(username='admin') + Device.objects.create(ip='1.1.1.1', port='1500', type='firewall', name='test') + Device.objects.create(ip='1.1.1.0', port='11111', type='firewall', name='test') + Device.objects.create(ip='2.2.2.2', port='2500', type='firewall', name='FIREWALL') + Device.objects.create(ip='3.3.3.3', port='9999', type='endpoint', name='ENDPOINT') + + @pytest.mark.parametrize('query', device_filter_query_parameters) + def test_filter_device_with_parameters(self, query): + queryset = DeviceFilter(query[0], Device.objects.all()).qs + assert queryset.count() == query[1] diff --git a/devices/tests/test_firewall_api.py b/devices/tests/test_firewall_api.py new file mode 100644 index 0000000..a884aac --- /dev/null +++ b/devices/tests/test_firewall_api.py @@ -0,0 +1,236 @@ +import os +from unittest import mock + +import pytest +import requests +from django.urls import reverse +from rest_framework import status + +from devices.models.device import Device, DeviceGroup +from devices.models.firewall import ArmaIndustrialFirewall +from devices.services.firewall import InvalidCredentialException, IncompatibilityVersionException, ConnectionException, \ + InvalidResponseException, FailedUploadException, InvalidFileException + +FIREWALL_DATA = { + "name": "ADD IFTEST", + "ip": "192.168.56.103", + "key": "nWM0Pnj4w3DJHbkfIRQ2CbUdqIc0TMUYIHohRCSqWJ5TycVfLo3JlIyurmOXN7MaRMQv/hlUIPbD89Ng", + "secret": "veREg8dbHC/V4hSCi6LBzuQ0NF5eeS/50d7K7Ahut6X0N/77peVQE5ucIJ/fyKhp0RNlbCHEcen2Rk8U", + "port": 5000, + "type": 'firewall' +} + +TEST_REQUEST_RESPONSE_LIST = [ + requests.exceptions.ConnectTimeout, + requests.exceptions.ConnectionError, + requests.exceptions.Timeout +] + +check_connection_exceptions = [InvalidCredentialException, IncompatibilityVersionException, ConnectionException] +upload_file_exceptions = [InvalidFileException, FailedUploadException, InvalidResponseException] + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +TEST_FILES = os.path.join(BASE_DIR, "tests", "test_files") + + +def _get_json_mock(value): + return value + + +@mock.patch('devices.services.firewall.firewall.FIREWALL_TIMEOUT', 1) +@pytest.mark.django_db +class TestFirewallAPI: + + @pytest.fixture(autouse=True) + def setup_tests(self, api_client, django_user_model, add_user_with_permissions): + self.user = add_user_with_permissions(username='test_admin', password='test_admin_pass', is_superuser=True) + + @pytest.mark.unit + def test_getting_list_of_firewalls(self, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + url = reverse('firewall-list') + response = api_client.get(url) + assert response.json()['count'] == 1 + assert response.json()['results'][0]['id'] == firewall.id + + @pytest.mark.unit + def test_getting_firewall(self, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + url = reverse('firewall-detail', args=[firewall.pk]) + response = api_client.get(url) + assert response.status_code == status.HTTP_200_OK + assert response.json()['id'] == firewall.id + + @pytest.mark.unit + def test_getting_firewall_with_invalid_id(self, api_client): + api_client.force_authenticate(self.user) + url = reverse('firewall-detail', args=[56789]) + response = api_client.get(url) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.unit + @mock.patch('devices.services.firewall.firewall.FirewallService.check_connection') + def test_updating_firewall_with_valid_data(self, mock_check, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + url = reverse('firewall-detail', args=[firewall.id]) + new_data = FIREWALL_DATA.copy() + new_data['port'] = 4545 + new_data['name'] = 'new_firewall' + response = api_client.patch(url, data=new_data) + assert response.status_code == status.HTTP_200_OK + assert response.json()['id'] == firewall.id + assert response.json()['port'] == new_data['port'] + assert response.json()['port'] == new_data['port'] + assert response.json()['name'] == new_data['name'] + + @pytest.mark.unit + def test_updating_firewall_with_invalid_port(self, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + url = reverse('firewall-detail', args=[firewall.id]) + new_data = { + 'name': 'new_firewall', + 'port': 99999 + } + response = api_client.patch(url, data=new_data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'port' in response.json() + assert response.json()['port'] == ['Ensure this value is less than or equal to 65535.'] + + @pytest.mark.unit + def test_updating_firewall_with_connection_error(self, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + firewall_id = firewall.id + url = reverse('firewall-detail', args=[firewall_id]) + new_data = FIREWALL_DATA.copy() + new_data.update({'name': 'new_firewall', 'port': 9999}) + with mock.patch('requests.Session.get', side_effect=ConnectionException): + response = api_client.patch(url, data=new_data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json()['detail'] == 'There was a problem connecting to the firewall' + + firewall = ArmaIndustrialFirewall.objects.get(pk=firewall_id) + assert firewall.name == FIREWALL_DATA['name'] + assert firewall.port == FIREWALL_DATA['port'] + + @pytest.mark.unit + @mock.patch('devices.services.firewall.firewall.FirewallService.check_connection') + def test_updating_firewall_null_group(self, mock_check, api_client): + api_client.force_authenticate(self.user) + device_group = DeviceGroup.objects.create(name='IF test group') + data = FIREWALL_DATA.copy() + data['group'] = device_group + firewall = ArmaIndustrialFirewall.objects.create(**data) + url = reverse('firewall-detail', args=[firewall.id]) + data['group'] = '' + response = api_client.patch(url, data=data) + assert response.status_code == status.HTTP_200_OK + assert response.data['group'] is None + + @pytest.mark.unit + def test_creating_firewall_without_ip(self, api_client): + api_client.force_authenticate(self.user) + url = reverse('firewall-list') + data = FIREWALL_DATA.copy() + del data['ip'] + response = api_client.post(url, data=data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'ip' in response.json() + assert response.json()['ip'] == ['This field is required.'] + + @pytest.mark.unit + @pytest.mark.parametrize('req_res', TEST_REQUEST_RESPONSE_LIST) + def test_create_firewall_with_network_error(self, req_res, api_client): + api_client.force_authenticate(self.user) + with mock.patch('requests.Session.get', side_effect=req_res): + url = reverse('firewall-list') + response = api_client.post(url, data=FIREWALL_DATA) + assert response.json()['detail'] == 'There was a problem connecting to the firewall' + + @pytest.mark.unit + @pytest.mark.parametrize('exc', check_connection_exceptions) + def test_create_firewall_with_error_check_connection(self, exc, api_client): + api_client.force_authenticate(self.user) + with mock.patch('devices.services.firewall.FirewallService.check_connection', side_effect=exc): + url = reverse('firewall-list') + data = FIREWALL_DATA.copy() + data['key'] = 'invalid' + response = api_client.post(url, data=data) + assert response.json()['detail'] == exc.default_detail['detail'] + + @pytest.mark.unit + def test_delete_firewall(self, api_client): + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + assert ArmaIndustrialFirewall.objects.filter(id=firewall.id).exists() + assert Device.objects.filter(id=firewall.id).exists() + + api_client.force_authenticate(self.user) + url = reverse('firewall-detail', args=[firewall.id]) + response = api_client.delete(url) + assert response.status_code == status.HTTP_204_NO_CONTENT + assert not ArmaIndustrialFirewall.objects.filter(id=firewall.id).exists() + assert not Device.objects.filter(id=firewall.id).exists() + + @pytest.mark.unit + @pytest.mark.parametrize('exc', upload_file_exceptions) + def test_upload_firewall_config_error(self, exc, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + + file_path = os.path.join(TEST_FILES, 'config.xml') + file = open(file_path, 'r') + url = reverse('firewall-upload-config', args=[firewall.id]) + data = {'conffile': file} + with mock.patch('devices.services.firewall.FirewallService.upload_file', side_effect=exc): + response = api_client.post(url, data) + assert response.json()['detail'] == exc.default_detail['detail'] + assert response.status_code == status.HTTP_400_BAD_REQUEST + + @pytest.mark.unit + def test_upload_firewall_config_without_file(self, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + + url = reverse('firewall-upload-config', args=[firewall.id]) + response = api_client.post(url) + assert 'conffile' in response.json() + assert response.status_code == status.HTTP_400_BAD_REQUEST + + @pytest.mark.unit + @mock.patch('devices.services.firewall.FirewallService.upload_file', lambda *args: {'status': 'ok'}) + def test_upload_firewall_config_success(self, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + + file_path = os.path.join(TEST_FILES, 'config.xml') + file = open(file_path, 'r') + url = reverse('firewall-upload-config', args=[firewall.id]) + data = {'conffile': file} + + response = api_client.post(url, data) + assert response.json()['status'] == 'ok' + assert response.status_code == status.HTTP_200_OK + + @pytest.mark.unit + @mock.patch('devices.services.firewall.FirewallService.reboot', lambda *args: {'status': 'ok'}) + def test_reboot_firewall_success(self, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + + url = reverse('firewall-reboot', args=[firewall.id]) + response = api_client.post(url) + assert response.json()['status'] == 'ok' + assert response.status_code == status.HTTP_200_OK + + # Without IF parallel execution + @pytest.mark.integration + def test_get_firewall_status(self, api_client): + api_client.force_authenticate(self.user) + firewall = ArmaIndustrialFirewall.objects.create(**FIREWALL_DATA) + url = reverse('firewall-status', args=[firewall.id]) + response = api_client.get(url) + assert response.status_code == status.HTTP_200_OK diff --git a/devices/tests/test_firewall_live.py b/devices/tests/test_firewall_live.py new file mode 100644 index 0000000..926aff9 --- /dev/null +++ b/devices/tests/test_firewall_live.py @@ -0,0 +1,78 @@ +import os + +from unittest import mock +import pytest +from django.urls import reverse +from rest_framework import status + +from devices.enums import ArmaIndustrialFirewallStatus +from devices.models.firewall import ArmaIndustrialFirewall +from devices.services.firewall import ConnectionException +from devices.services.firewall import FirewallService + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +TEST_FILES = os.path.join(BASE_DIR, "tests", "test_files") + +LIVE_FIREWALL_DATA = { + "name": "LIVE IF TEST", + "ip": os.getenv('LIVE_TEST_FIREWALL_IP', ''), + "key": os.getenv('LIVE_TEST_FIREWALL_KEY', ''), + "secret": os.getenv('LIVE_TEST_FIREWALL_SECRET', ''), + "port": 5500, + "type": 'firewall' +} + +TEST_FW_GET = [ + 'firewall-download-config', + 'firewall-download-rulesets' +] + +TEST_FW_SET = [ + ('live_if_rulesets.tar', 'firewall-upload-ids-rulesets', 'rulesets'), + ('live_if_config.xml', 'firewall-upload-config', 'conffile'), +] + + +@pytest.mark.django_db +class TestFirewallAPI: + + @pytest.fixture(autouse=True) + def setup_tests(self, api_client, django_user_model, add_user_with_permissions): + self.user = add_user_with_permissions(username='test_admin', password='test_admin_pass', is_superuser=True) + api_client.force_authenticate(self.user) + pytest.firewall = ArmaIndustrialFirewall.objects.create(**LIVE_FIREWALL_DATA) + info = FirewallService(pytest.firewall).get_info() + fw_status = info.get('status', ArmaIndustrialFirewallStatus.error) + if fw_status != ArmaIndustrialFirewallStatus.online: + assert False + + @pytest.mark.live_firewall + def test_live_get_firewall_status_online(self, api_client): + assert True + + @pytest.mark.parametrize('url_name', TEST_FW_GET) + @pytest.mark.live_firewall + def test_live_get_firewall_config(self, api_client, url_name: str): + try: + url = reverse(url_name, args=[pytest.firewall.id]) + response = api_client.get(url) + assert response.reason_phrase == 'OK' + assert response.status_code == status.HTTP_200_OK + assert response.content is not None + except ConnectionException: + assert False + + @mock.patch('devices.constants.FIREWALL_TIMEOUT', 30) + @pytest.mark.parametrize('file_name, url_name, data_name', TEST_FW_SET) + @pytest.mark.live_firewall + def test_live_set_firewall_suricata(self, api_client, file_name: str, url_name: str, data_name: str): + try: + file_path = os.path.join(TEST_FILES, file_name) + file = open(file_path, 'r') + url = reverse(url_name, args=[pytest.firewall.id]) + data = {data_name: file} + response = api_client.post(url, data) + assert response.json()['status'] == 'ok' + assert response.status_code == status.HTTP_200_OK + except ConnectionException: + assert False diff --git a/devices/tests/test_firewall_service.py b/devices/tests/test_firewall_service.py new file mode 100644 index 0000000..5dc17c3 --- /dev/null +++ b/devices/tests/test_firewall_service.py @@ -0,0 +1,144 @@ +import os +import tempfile +from unittest import mock +from unittest.mock import patch + +import pytest +import requests +from rest_framework import status + +from devices.models.firewall import ArmaIndustrialFirewall +from devices.services.firewall import FirewallService, ConnectionException, IncompatibilityVersionException, \ + InvalidCredentialException, InvalidResponseException, FailedUploadException, InvalidFileException +from devices.tasks.firewall import download_files_from_firewall +from storage.models import DataStorage + +TEST_REQUEST_RESPONSE_LIST = [ + requests.exceptions.ConnectTimeout, + requests.exceptions.ConnectionError, + requests.exceptions.Timeout +] + +check_connection_exceptions = [InvalidCredentialException, IncompatibilityVersionException, ConnectionException] +upload_file_exception = [ + [InvalidFileException, {'status': 'invalid'}], + [FailedUploadException, {'status': 'failed'}], + [InvalidResponseException, {'status': 'blah-blah'}] +] + +TEST_ARMAIF_VERSIONS = [ + ['3.6', True], + ['3.5.1', False], + ['3.6-rc1', True], + ['3.6-rc2', True], + ['3.6-rc3', True], + ['3.6-rc0', True], + ['3.6-rc1234', True], + ['3.6-rс-41', True], + ['3.6-rс13232', True], + ['3.6-rc3123123', True], + ['3.8', True], + ['1238.13-123.fda213', True], + ['3.9-rc1', True], + ['111111111111-22222222', True], + ['3.6-kek', True], + ['2.6', False], + ['3.9req', False], + ['1234123.rqr123-e12', False], + ['f89y48fqyhiuyhf8o71y-82y8f82y73f8y', False], + ['1-2-3-4-5', False], + ['3.5', False], + ['3.5-dev18723', False], +] + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +TEST_FILES = os.path.join(BASE_DIR, "tests", "test_files") + +get_addr = [ + ['1.1.1.1', 'api/test'], + ['5.1.1.1', '/543'], + ['1.56.90.255', ''], +] + + +@pytest.mark.unit +@pytest.mark.django_db +class TestFirewallService: + + @pytest.fixture(autouse=True) + def setup_tests(self, api_client, django_user_model, add_user_with_permissions): + username = 'foo' + password = 'bar' + self.user = add_user_with_permissions(username=username, password=password, is_superuser=True) + self.firewall = ArmaIndustrialFirewall( + name='IF', ip='1.1.1.1', key='key', secret='secret', port=1500, type='firewall') + + @pytest.mark.parametrize('req_res', TEST_REQUEST_RESPONSE_LIST) + def test_check_connection_to_firewall_exception_responses(self, req_res): + data_request = {"ip": "1.2.3.4", "key": "123", "secret": "321"} + + with mock.patch('requests.Session.get', side_effect=req_res): + with pytest.raises(ConnectionException) as e: + FirewallService().check_connection(data_request) + assert e.value.detail['detail'] == 'There was a problem connecting to the firewall' + + def test_check_connection_to_firewall_invalid_credentials(self): + data_request = {"ip": "1.2.3.4", "key": "123", "secret": "321"} + + with mock.patch('requests.Session.get') as mock_get: + mock_get.return_value.status_code = status.HTTP_401_UNAUTHORIZED + with pytest.raises(InvalidCredentialException) as e: + FirewallService().check_connection(data_request) + assert e.value.detail['detail'] == 'Invalid credentials provided to connect to firewall' + + def test_check_connection_to_firewall_incompatible_version(self): + data_request = {"ip": "1.2.3.4", "key": "123", "secret": "321"} + with mock.patch('requests.Session.get') as mock_get: + mock_get.return_value.status_code = 200 + mock_get.return_value.json.return_value = {'status': 'ok', 'items': {'product_version': '3.1'}} + with pytest.raises(IncompatibilityVersionException) as e: + FirewallService().check_connection(data_request) + assert e.value.detail['detail'] == 'The firewall version is incompatible with the current console version' + + @pytest.mark.parametrize('exc', upload_file_exception) + def test_upload_firewall_config_with_error(self, exc, api_client): + api_client.force_authenticate(self.user) + file_path = os.path.join(TEST_FILES, 'config.xml') + file = open(file_path, 'r') + with mock.patch('requests.Session.post') as mock_post: + mock_post.return_value.status_code = 200 + mock_post.return_value.json.return_value = exc[1] + with pytest.raises(exc[0]) as e: + FirewallService(self.firewall).upload_file(file, 'config') + assert e.value.detail['detail'] == exc[0].default_detail['detail'] + + @pytest.mark.parametrize('armaif_version', TEST_ARMAIF_VERSIONS) + def test_check_armaif_version_validator(self, armaif_version): + assert FirewallService.firewall_version_validator(armaif_version[0]) == armaif_version[1] + + @pytest.mark.parametrize('addr', get_addr) + def test_get_addr_firewall(self, addr): + firewall = ArmaIndustrialFirewall( + name='IF', ip=addr[0], key='key', secret='secret', port=1500, type='firewall') + assert FirewallService(firewall).get_addr(addr[1]) == 'https://{}/{}'.format(firewall.ip, addr[1].lstrip('/')) + + def test_firewall_download_file_task(self): + """Test download file from firewall(mocked) and add to storage.""" + file_name = 'test_abc.tar' + tmp_mediaroot = tempfile.TemporaryDirectory().name + storage_file_name = os.path.join(tmp_mediaroot, file_name) + with patch('devices.services.firewall.firewall.FirewallService.download_file', lambda *args: (b'firewall__data', file_name)): + with patch('devices.tasks.firewall.MEDIA_ROOT', tmp_mediaroot): + with patch('devices.tasks.firewall.get_storage_path', lambda *args: file_name): + pk = download_files_from_firewall(self.firewall, self.user, 'config') + + assert isinstance(pk, int) + + storage = DataStorage.objects.get(pk=pk) + assert storage.file == file_name + + assert os.path.exists(storage_file_name) + with open(storage_file_name, 'br') as file: + data = file.read() + assert data == b'firewall__data' + assert storage.format == DataStorage.Format.XML diff --git a/devices/tests/test_group_devices_api.py b/devices/tests/test_group_devices_api.py new file mode 100644 index 0000000..fb45fa2 --- /dev/null +++ b/devices/tests/test_group_devices_api.py @@ -0,0 +1,133 @@ +from unittest.mock import patch + +import pytest +from rest_framework import status +from rest_framework.reverse import reverse + +from devices.enums import DeviceType +from devices.models.device import Device, DeviceGroup +from devices.models.endpoint_device import EndpointModel +from devices.tests.endpoint_utils import mock_redis_return_online + + +@pytest.mark.unit +@pytest.mark.django_db +class TestDeviceGroupAPI: + + @pytest.fixture(autouse=True) + def setup_tests(self, django_user_model): + self.admin_user = django_user_model.objects.get(username='admin') + self.device1 = Device.objects.create(ip='1.1.1.1', port='1500', type='firewall') + self.device2 = Device.objects.create(ip='2.2.2.2', port='5555', type='firewall') + DeviceGroup.objects.create(name='group1') + DeviceGroup.objects.create(name='group2', description='description2') + + def test_getting_list_of_device_group(self, api_client): + api_client.force_authenticate(self.admin_user) + response = api_client.get(reverse('device-groups-list')) + assert response.data['count'] == 2 + assert response.data['results'][0]['name'] == 'group1' + assert response.data['results'][0]['description'] is None + + assert response.data['results'][1]['name'] == 'group2' + assert response.data['results'][1]['description'] == 'description2' + + def test_getting_device_group(self, api_client): + api_client.force_authenticate(self.admin_user) + group = DeviceGroup.objects.create(name='group') + response = api_client.get(reverse('device-groups-detail', kwargs={'pk': group.pk})) + assert response.status_code == status.HTTP_200_OK + assert response.data['name'] == 'group' + + def test_updating_device_group_with_valid_data(self, api_client): + api_client.force_authenticate(self.admin_user) + group = DeviceGroup.objects.create(name='group') + response = api_client.patch( + reverse('device-groups-detail', kwargs={'pk': group.pk}), + data={'name': 'new_name_group', 'description': 'description'} + ) + assert response.status_code == status.HTTP_200_OK + assert response.data['name'] == 'new_name_group' + assert response.data['description'] == 'description' + + group = DeviceGroup.objects.get(pk=group.pk) + assert group.name == 'new_name_group' + assert group.description == 'description' + + def test_updating_device_with_invalid_data(self, api_client): + api_client.force_authenticate(self.admin_user) + group = DeviceGroup.objects.create(name='group') + group_id = group.pk + response = api_client.patch( + reverse('device-groups-detail', kwargs={'pk': group_id}), + data={'name': '', 'description': 'test'} # invalid name + ) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'name' in response.data + + # device doesnt update + group = DeviceGroup.objects.get(pk=group_id) + assert group.name == 'group' + assert group.description is None + + def test_deleting_device_group(self, api_client): + api_client.force_authenticate(self.admin_user) + group = DeviceGroup.objects.create(name='group') + group_pk = group.pk + response = api_client.delete( + reverse('device-groups-detail', kwargs={'pk': group_pk}), + ) + assert response.status_code == status.HTTP_204_NO_CONTENT + + response = api_client.get( + reverse('device-groups-detail', kwargs={'pk': group_pk}), + ) + assert response.status_code == status.HTTP_404_NOT_FOUND + + def test_manage_device_in_group(self, api_client): + api_client.force_authenticate(self.admin_user) + + group = DeviceGroup.objects.create(name='group') + assert group.devices.count() == 0 + + self.device1.group = group + self.device1.save() + + response = api_client.get(reverse('device-groups-detail', kwargs={'pk': group.pk})) + assert response.status_code == status.HTTP_200_OK + assert response.data['devices'] == [{'id': self.device1.pk, 'name': self.device2.name}] + + # add device to group + response = api_client.patch( + reverse('device-groups-detail', kwargs={'pk': group.pk}), + data={'devices': [self.device1.pk, self.device2.pk]} + ) + assert response.status_code == status.HTTP_200_OK + assert response.data['devices'] == [ + {'id': self.device1.pk, 'name': self.device1.name}, + {'id': self.device2.pk, 'name': self.device2.name}, + ] + + # remove one device + response = api_client.patch( + reverse('device-groups-detail', kwargs={'pk': group.pk}), + data={'devices': [self.device2.pk]} + ) + assert response.status_code == status.HTTP_200_OK + assert response.data['devices'] == [{'id': self.device2.pk, 'name': self.device2.name}] + + # remove all device + response = api_client.patch( + reverse('device-groups-detail', kwargs={'pk': group.pk}), + data={'devices': []}, + format='json' + ) + assert response.status_code == status.HTTP_200_OK + assert response.data['devices'] == [] + + def test_create_device_group_without_devices(self, api_client): + api_client.force_authenticate(self.admin_user) + + response = api_client.post(reverse('device-groups-list'), data={'name': 'group', 'description': 'group'}) + assert response.status_code == status.HTTP_201_CREATED + assert response.data['devices'] == [] diff --git a/devices/tests/test_sensor_api.py b/devices/tests/test_sensor_api.py new file mode 100644 index 0000000..5259919 --- /dev/null +++ b/devices/tests/test_sensor_api.py @@ -0,0 +1,221 @@ +import os +from unittest import mock + +import pytest +from django.core.files.uploadedfile import SimpleUploadedFile +from django.urls import reverse +from rest_framework import status + +from devices.models.device import Device +from devices.models.sensor import ArmaSensor +from devices.services.sensor.rabbitmq import SensorResponseException + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +SENSOR_DATA = { + "name": "SENSOR_TEST", + "ip": "192.168.56.103", + "port": 5000, + "type": 'firewall' +} + +VECTOR_URL_NAMES = ['status', 'start', 'stop', 'restart', 'reload', 'service_info'] +ZEEK_URL_NAMES = ['status', 'start', 'stop', 'restart'] +ZEEK_URL_NAMES_POST = ['protocols_disable', 'settings_update'] + + +def mock_sensor_management(response=None): + class MockSensorManagement: + + def _get_connection_data(self): + return 1, 2 + + def send_message(*args, **kwargs): + if response: + return response + raise SensorResponseException({'status': 'error'}) + + return MockSensorManagement + + +def mock_sensor_redis(): + class MockSensorRedis: + def __init__(self, *args, **kwargs): + pass + + def get_status(self): + return {'status': 'online'} + + return MockSensorRedis + + +@pytest.mark.django_db +class TestSensorAPI: + + @pytest.fixture(autouse=True) + def setup_tests(self, api_client, django_user_model, add_user_with_permissions): + self.user = add_user_with_permissions(username='test_admin', password='test_admin_pass', is_superuser=True) + + @pytest.mark.unit + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management(response='ok')) + @mock.patch('devices.serializers.sensor_serializers.SensorService', mock_sensor_redis()) + def test_getting_list_of_sensors(self, api_client): + api_client.force_authenticate(self.user) + sensor = ArmaSensor.objects.create(**SENSOR_DATA) + url = reverse('sensor-list') + response = api_client.get(url) + assert response.json()['count'] == 1 + assert response.json()['results'][0]['id'] == sensor.id + + @pytest.mark.unit + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management(response='ok')) + @mock.patch('devices.serializers.sensor_serializers.SensorService', mock_sensor_redis()) + def test_getting_sensor(self, api_client): + api_client.force_authenticate(self.user) + sensor = ArmaSensor.objects.create(**SENSOR_DATA) + url = reverse('sensor-detail', args=[sensor.pk]) + response = api_client.get(url) + assert response.status_code == status.HTTP_200_OK + assert response.json()['id'] == sensor.id + + @pytest.mark.unit + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management(response='ok')) + @mock.patch('devices.views.sensor.SensorService', mock_sensor_redis()) + def test_getting_sensor_with_invalid_id(self, api_client): + api_client.force_authenticate(self.user) + url = reverse('sensor-detail', args=[56789]) + response = api_client.get(url) + assert response.status_code == status.HTTP_404_NOT_FOUND + + @pytest.mark.unit + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management(response='ok')) + @mock.patch('devices.views.sensor.SensorService', mock_sensor_redis()) + @mock.patch('devices.serializers.sensor_serializers.SensorService', mock_sensor_redis()) + def test_updating_sensor_with_valid_data(self, api_client): + api_client.force_authenticate(self.user) + sensor = ArmaSensor.objects.create(**SENSOR_DATA) + url = reverse('sensor-detail', args=[sensor.id]) + new_data = SENSOR_DATA.copy() + new_data['port'] = 4545 + new_data['name'] = 'new_sensor' + response = api_client.patch(url, data=new_data) + assert response.status_code == status.HTTP_200_OK + assert response.json()['id'] == sensor.id + assert response.json()['port'] == new_data['port'] + assert response.json()['name'] == new_data['name'] + + @pytest.mark.unit + def test_updating_sensor_with_invalid_port(self, api_client): + api_client.force_authenticate(self.user) + sensor = ArmaSensor.objects.create(**SENSOR_DATA) + url = reverse('sensor-detail', args=[sensor.id]) + new_data = { + 'name': 'new_sensor', + 'port': 99999 + } + response = api_client.patch(url, data=new_data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'port' in response.json() + assert response.json()['port'] == ['Ensure this value is less than or equal to 65535.'] + + @pytest.mark.unit + def test_creating_sensor_without_ip(self, api_client): + api_client.force_authenticate(self.user) + url = reverse('sensor-list') + data = SENSOR_DATA.copy() + del data['ip'] + response = api_client.post(url, data=data) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert 'ip' in response.json() + assert response.json()['ip'] == ['This field is required.'] + + @pytest.mark.unit + def test_delete_sensor(self, api_client): + firewall = ArmaSensor.objects.create(**SENSOR_DATA) + assert ArmaSensor.objects.filter(id=firewall.id).exists() + assert Device.objects.filter(id=firewall.id).exists() + + api_client.force_authenticate(self.user) + url = reverse('sensor-detail', args=[firewall.id]) + response = api_client.delete(url) + assert response.status_code == status.HTTP_204_NO_CONTENT + assert not ArmaSensor.objects.filter(id=firewall.id).exists() + assert not Device.objects.filter(id=firewall.id).exists() + + +@pytest.mark.unit +@pytest.mark.django_db +class TestSensorServicesAPI: + + @pytest.fixture(autouse=True) + def setup_tests(self, api_client, django_user_model, add_user_with_permissions): + self.user = add_user_with_permissions(username='test_admin', password='test_admin_pass', is_superuser=True) + self.sensor = ArmaSensor.objects.create(**SENSOR_DATA) + + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management(response={'status': 'ok'})) + def test_sensor_interfaces(self, api_client): + api_client.force_authenticate(self.user) + url = reverse(f'sensor-system-message', args=[self.sensor.id, 'interfaces']) + response = api_client.get(url) + assert response.status_code == status.HTTP_200_OK + assert response.json() == {'status': 'ok'} + + @pytest.mark.parametrize('url_name', VECTOR_URL_NAMES) + @mock.patch('devices.views.sensor.SensorManagement', + mock_sensor_management(response={'status': 'ok'})) + def test_sensor_vector(self, url_name, api_client): + api_client.force_authenticate(self.user) + url = reverse(f'sensor-vector-message', args=[self.sensor.id, url_name]) + print(url) + response = api_client.get(url) + assert response.status_code == status.HTTP_200_OK + assert response.json() == {'status': 'ok'} + + @pytest.mark.parametrize('url_name', VECTOR_URL_NAMES) + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management()) + def test_sensor_vector_with_error(self, url_name, api_client): + api_client.force_authenticate(self.user) + url = reverse(f'sensor-vector-message', args=[self.sensor.id, url_name]) + response = api_client.get(url) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {'status': 'error'} + + @pytest.mark.parametrize('url_name', ZEEK_URL_NAMES) + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management(response={'status': 'ok'})) + def test_sensor_zeek(self, url_name, api_client): + api_client.force_authenticate(self.user) + url = reverse(f'sensor-zeek-message', args=[self.sensor.id, url_name]) + response = api_client.get(url) + assert response.status_code == status.HTTP_200_OK + assert response.json() == {'status': 'ok'} + + @pytest.mark.parametrize('url_name', ZEEK_URL_NAMES) + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management()) + def test_sensor_zeek_with_error(self, url_name, api_client): + api_client.force_authenticate(self.user) + url = reverse(f'sensor-zeek-message', args=[self.sensor.id, url_name]) + response = api_client.get(url) + assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.json() == {'status': 'error'} + + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management(response={'status': 'ok'})) + def test_sensor_zeek_update_settings(self, api_client): + test_file = os.path.join(BASE_DIR, 'tests/test_files/good_local.zeek') + file = SimpleUploadedFile(name='good_local.zeek', content=open(test_file, 'rb').read()) + api_client.force_authenticate(self.user) + url = reverse(f'sensor-zeek-settings-update', args=[self.sensor.id]) + data = {'file': file} + response = api_client.post(url, data=data) + assert response.status_code == status.HTTP_200_OK + assert response.json() == {'status': 'ok'} + + @mock.patch('devices.views.sensor.SensorManagement', mock_sensor_management(response={'status': 'ok'})) + def test_sensor_zeek_protocols_disable(self, api_client): + api_client.force_authenticate(self.user) + url = reverse(f'sensor-zeek-protocols-disable', args=[self.sensor.id]) + data = { + 'disable_protocols': ['test', 'aaa', '10101001'] + } + response = api_client.post(url, data=data) + assert response.status_code == status.HTTP_200_OK + assert response.json() == {'status': 'ok'} diff --git a/devices/tests/test_sensor_send_message.py b/devices/tests/test_sensor_send_message.py new file mode 100644 index 0000000..9adcee0 --- /dev/null +++ b/devices/tests/test_sensor_send_message.py @@ -0,0 +1,168 @@ +from unittest import mock + +import pytest + +from devices.models.sensor import ArmaSensor +from devices.services.sensor.enums import SystemMessage +from devices.services.sensor.rabbitmq import SensorManagement, SensorResponseException + +SENSOR_DATA = { + "name": "SENSOR_TEST", + "ip": "192.168.56.103", + "port": 5000, + "type": 'firewall' +} + +# each case contains the source message and the expected response +test_send_message_with_decode_success = [ + ('{"status": "ok"}', {'status': 'ok'}), + ('["1","2"]', ["1", "2"]), + ('{}', {}) +] + +test_send_message_with_decode_error = [ + ('', {'status': 'error', + 'detail': 'sensor doesnt return response'}), + (b'', {'status': 'error', + 'detail': 'sensor doesnt return response'}), + ([], {'status': 'error', + 'detail': 'sensor doesnt return response'}), + ('body_from_sensor', {'status': 'error', + 'detail': 'failed to decode sensor response to json'}), + (b'body_from_sensor', {'status': 'error', + 'detail': 'failed to decode sensor response to json'}), +] + +test_send_message_without_decode = [ + ('', '{"status": "error", "detail": "sensor doesnt return response"}'), + (b'', '{"status": "error", "detail": "sensor doesnt return response"}'), + ([], '{"status": "error", "detail": "sensor doesnt return response"}'), + ('body_from_sensor', 'body_from_sensor'), + (b'bytes_from_sensor', b'bytes_from_sensor'), +] + + +def mock_pika(*args, **kwargs): + class MockBlockingChannel: + def basic_consume(self, *args, **kwargs): pass + + def basic_publish(self, *args, **kwargs): pass + + def exchange_declare(self, *args, **kwargs): pass + + def queue_declare(self, *args, **kwargs): + class Queue: + queue = 'callback_queue' + + class Method: + method = Queue() + + return Method() + + class MockBlockingConnection: + + def __init__(self, *args, **kwargs): pass + + def close(self): pass + + def process_data_events(self, *args, **kwargs): pass + + def call_later(self, *args, **kwargs): pass + + def channel(self): + return MockBlockingChannel() + + return MockBlockingConnection + + +@pytest.mark.unit +@pytest.mark.django_db +class TestSensorManagement: + + @pytest.fixture(autouse=True) + def setup_tests(self): + self.sensor = ArmaSensor.objects.create(**SENSOR_DATA) + + @mock.patch('pika.BlockingConnection', mock_pika()) + def test_send_message_to_all_without_response(self): + service = SensorManagement() + response = service.send_message() + assert response is None + + @mock.patch('pika.BlockingConnection', mock_pika()) + def test_send_message_to_all_but_no_one_answer(self): + service = SensorManagement(proceed=False) + response = service.send_message(message_type=SystemMessage.ping, wait_response=True) + assert response == () + + @pytest.mark.parametrize('test_data', test_send_message_with_decode_error) + @mock.patch('pika.BlockingConnection', mock_pika()) + def test_send_message_to_all_with_decode_error(self, test_data): + service = SensorManagement(proceed=False) + body = test_data[0] + service._response_callback(None, None, None, body, True) + response = service.send_message(message_type=SystemMessage.ping, wait_response=True) + + print(response) + assert response[0] == test_data[1] + assert len(response) == 1 + + @pytest.mark.parametrize('test_data', test_send_message_with_decode_error) + @mock.patch('pika.BlockingConnection', mock_pika()) + def test_send_message_to_sensor_with_decode_error(self, test_data: tuple): + service = SensorManagement(proceed=False) + + body = test_data[0] + service._response_callback(None, None, None, body, True) + + with pytest.raises(SensorResponseException) as exc: + service.send_message(sensor=self.sensor, message_type=SystemMessage.ping, wait_response=True) + assert exc.value.detail['status'] == test_data[1]['status'] + assert exc.value.detail['detail'] == test_data[1]['detail'] + + @pytest.mark.parametrize('test_data', test_send_message_without_decode) + @mock.patch('pika.BlockingConnection', mock_pika()) + def test_send_message_to_all_without_decode(self, test_data): + service = SensorManagement(proceed=False) + body = test_data[0] + service._response_callback(None, None, None, body, False) + response = service.send_message(message_type=SystemMessage.ping, wait_response=True) + + assert response[0] == test_data[1] + assert len(response) == 1 + + @pytest.mark.parametrize('test_data', test_send_message_without_decode) + @mock.patch('pika.BlockingConnection', mock_pika()) + def test_send_message_to_sensor_without_decode(self, test_data: tuple): + service = SensorManagement(proceed=False) + + body = test_data[0] + service._response_callback(None, None, None, body, False) + + response = service.send_message(sensor=self.sensor, message_type=SystemMessage.ping, wait_response=True) + + assert response == test_data[1] + + @pytest.mark.parametrize('test_data', test_send_message_with_decode_success) + @mock.patch('pika.BlockingConnection', mock_pika()) + def test_send_message_to_all_with_decode_success(self, test_data): + service = SensorManagement(proceed=False) + + body = test_data[0] + service._response_callback(None, None, None, body, True) + + response = service.send_message(message_type=SystemMessage.ping, wait_response=True) + + assert response[0] == test_data[1] + assert len(response) == 1 + + @pytest.mark.parametrize('test_data', test_send_message_with_decode_success) + @mock.patch('pika.BlockingConnection', mock_pika()) + def test_send_message_to_sensor_with_decode_success(self, test_data): + service = SensorManagement(proceed=False) + body = test_data[0] + service._response_callback(None, None, None, body, True) + + response = service.send_message(sensor=self.sensor, message_type=SystemMessage.ping, wait_response=True) + + assert response == test_data[1] diff --git a/devices/tests/test_vector_service.py b/devices/tests/test_vector_service.py new file mode 100644 index 0000000..b92201d --- /dev/null +++ b/devices/tests/test_vector_service.py @@ -0,0 +1,32 @@ +import os +import tempfile +from unittest.mock import patch + +import pytest + +from devices.models.device import Device +from devices.services.vector import VectorService + +TMP_DIR_VECTOR = tempfile.TemporaryDirectory() + + +@patch('devices.services.vector.VECTOR_CONFIG_DIR', TMP_DIR_VECTOR.name) +@pytest.mark.unit +@pytest.mark.django_db +class TestVectorService: + + def test_create_vector_config(self): + device = Device.objects.create(ip='2.2.2.2', port='9999', type='firewall') + assert f'armaif_{device.pk}.toml' not in os.listdir(TMP_DIR_VECTOR.name) + vector = VectorService(device) + vector.update_config() + assert f'armaif_{device.pk}.toml' in os.listdir(TMP_DIR_VECTOR.name) + + def test_delete_vector_config(self, remove_files_after_test_vector_config): + device = Device.objects.create(ip='2.2.2.2', port='9999', type='firewall') + test_file = open(f'{TMP_DIR_VECTOR.name}/armaif_{device.pk}.toml', 'w+') + test_file.write('123') + assert f'armaif_{device.pk}.toml' in os.listdir(TMP_DIR_VECTOR.name) + vector = VectorService(device) + vector.delete_config() + assert f'armaif_{device.pk}.toml' not in os.listdir(TMP_DIR_VECTOR.name) diff --git a/devices/urls.py b/devices/urls.py new file mode 100644 index 0000000..e5e05b8 --- /dev/null +++ b/devices/urls.py @@ -0,0 +1,18 @@ +from django.urls import path, include +from rest_framework.routers import SimpleRouter + +from devices.views.arma_firewall import ArmaIndustrialFirewallViewSet +from devices.views.device import DeviceViewSet, DeviceGroupViewSet +from devices.views.sensor import ArmaSensorViewSet + +router = SimpleRouter() +router.register('firewall', ArmaIndustrialFirewallViewSet, basename='firewall') +router.register('sensor', ArmaSensorViewSet, basename='sensor') +router.register('groups', DeviceGroupViewSet, basename='device-groups') +router.register('', DeviceViewSet) + +urlpatterns = [ + path('sensor//zeek/protocols_disable/', ArmaSensorViewSet.as_view({'post': 'zeek_protocols_disable'})), + path('sensor//zeek/settings_update/', ArmaSensorViewSet.as_view({'post': 'zeek_settings_update'})), + path('', include(router.urls)), +] diff --git a/devices/urls_endpoint.py b/devices/urls_endpoint.py new file mode 100644 index 0000000..5073429 --- /dev/null +++ b/devices/urls_endpoint.py @@ -0,0 +1,11 @@ +from django.urls import path, include +from rest_framework.routers import SimpleRouter + +from devices.views.endpoint_views import EndpointDeviceApiViewSet + +router = SimpleRouter() +router.register('', EndpointDeviceApiViewSet, basename='endpoint_api') + +urlpatterns = [ + path('', include(router.urls)), +] diff --git a/devices/views/__init__.py b/devices/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/devices/views/arma_firewall.py b/devices/views/arma_firewall.py new file mode 100644 index 0000000..c3dda29 --- /dev/null +++ b/devices/views/arma_firewall.py @@ -0,0 +1,111 @@ +from django.http import HttpResponse +from rest_framework.decorators import action +from rest_framework.generics import get_object_or_404 +from rest_framework.response import Response +from rest_framework.viewsets import ModelViewSet + +from core.mixins import ApiPermissionCheckMixin +from devices.enums import DeviceType +from devices.models.firewall import ArmaIndustrialFirewall +from devices.serializers.firewall import FirewallSerializer, CheckFirewallConnectionSerializer, \ + AifUploadIdsRulesetsSerializer, AifUploadConfigSerializer +from devices.services.firewall import FirewallService +from devices.services.vector import VectorService +from devices.tasks.firewall import download_files_from_firewall_task + + +class ArmaIndustrialFirewallViewSet(ApiPermissionCheckMixin, ModelViewSet): + queryset = ArmaIndustrialFirewall.objects.all() + serializer_class = FirewallSerializer + + def perform_create(self, serializer): + check_connection_serializer = CheckFirewallConnectionSerializer(data=self.request.data) + check_connection_serializer.is_valid(raise_exception=True) + + fs = FirewallService() + fs.check_connection(check_connection_serializer.validated_data) + firewall = serializer.save(type=DeviceType.FIREWALL) + + fs.firewall = firewall + + vector = VectorService(firewall) + vector.update_config() + + def perform_update(self, serializer): + check_connection_serializer = CheckFirewallConnectionSerializer(data=self.request.data) + check_connection_serializer.is_valid(raise_exception=True) + fs = FirewallService() + fs.check_connection(check_connection_serializer.validated_data) + + firewall = serializer.save() + vector = VectorService(firewall) + vector.update_config() + + def perform_destroy(self, device): + vector = VectorService(device) + vector.delete_config() + device.delete() + + @action(detail=True, methods=['GET']) + def status(self, request, *args, **kwargs): + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + response = FirewallService(firewall).check_status() + return Response(response) + + @action(detail=True, methods=['POST']) + def upload_ids_rulesets(self, request, *args, **kwargs): + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + serializer = AifUploadIdsRulesetsSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + rulesets_file = serializer.validated_data['rulesets'] + response = FirewallService(firewall).upload_file(rulesets_file, 'rulesets') + return Response(response) + + @action(detail=True, methods=['POST']) + def upload_config(self, request, *args, **kwargs): + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + serializer = AifUploadConfigSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + config_file = serializer.validated_data['conffile'] + response = FirewallService(firewall).upload_file(config_file, 'config') + return Response(response) + + @action(detail=True, methods=['GET']) + def download_rulesets(self, request, *args, **kwargs): # Deprecated + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + file_byte, name = FirewallService(firewall).download_file('rulesets') + response = HttpResponse(file_byte, content_type="application/file") + response['Content-Disposition'] = f'attachment; filename="{name}"' + return response + + @action(detail=True, methods=['GET']) + def download_config(self, request, *args, **kwargs): # Deprecated + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + file_byte, name = FirewallService(firewall).download_file('config') + response = HttpResponse(file_byte, content_type="application/file") + response['Content-Disposition'] = f'attachment; filename="{name}"' + return response + + @action(detail=True, methods=['GET']) + def download_config_new(self, request, *args, **kwargs): # todo remove suffix _new + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + task = download_files_from_firewall_task.apply_async(args=(firewall.pk, request.user.pk, 'config')) + return Response({'task_id': task.id}) + + @action(detail=True, methods=['GET']) + def download_rulesets_new(self, request, *args, **kwargs): # todo remove suffix _new + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + task = download_files_from_firewall_task.apply_async(args=(firewall.pk, request.user.pk, 'rulesets')) + return Response({'task_id': task.id}) + + @action(detail=True, methods=['POST']) + def reboot(self, request, *args, **kwargs): + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + response = FirewallService(firewall).reboot() + return Response(response) + + @action(detail=True, methods=['GET']) + def rule_fields(self, request, *args, **kwargs): + firewall = get_object_or_404(ArmaIndustrialFirewall, pk=kwargs['pk']) + response = FirewallService(firewall).rule_fields(request) + return Response(response) diff --git a/devices/views/device.py b/devices/views/device.py new file mode 100644 index 0000000..234c383 --- /dev/null +++ b/devices/views/device.py @@ -0,0 +1,35 @@ +from rest_framework.mixins import RetrieveModelMixin, UpdateModelMixin, ListModelMixin, DestroyModelMixin +from rest_framework.viewsets import GenericViewSet, ModelViewSet + +from core.mixins import ApiPermissionCheckMixin, ExportToCsvMixin +from devices.filters import DeviceFilter +from devices.models.device import Device, DeviceGroup +from devices.serializers.device import DeviceSerializer, DeviceGroupSerializer +from devices.services.vector import VectorService + + +class DeviceViewSet(ApiPermissionCheckMixin, + ExportToCsvMixin, + RetrieveModelMixin, + UpdateModelMixin, + ListModelMixin, + DestroyModelMixin, + GenericViewSet): + queryset = Device.objects.all() + serializer_class = DeviceSerializer + filterset_class = DeviceFilter + + def perform_update(self, serializer): + device = serializer.save() + vector = VectorService(device) + vector.update_config() + + def perform_destroy(self, device): + vector = VectorService(device) + vector.delete_config() + device.delete() + + +class DeviceGroupViewSet(ApiPermissionCheckMixin, ModelViewSet): + queryset = DeviceGroup.objects.all() + serializer_class = DeviceGroupSerializer diff --git a/devices/views/endpoint_views.py b/devices/views/endpoint_views.py new file mode 100644 index 0000000..bb51fb0 --- /dev/null +++ b/devices/views/endpoint_views.py @@ -0,0 +1,125 @@ +import json +import logging +from typing import Union + +from django.http import HttpResponse +from rest_framework.decorators import action +from rest_framework.mixins import RetrieveModelMixin, ListModelMixin, CreateModelMixin, UpdateModelMixin, \ + DestroyModelMixin +from rest_framework.response import Response +from rest_framework.viewsets import GenericViewSet + +from core.utils import httpFileResponse +from devices.enums import DeviceType +from devices.models.endpoint_device import EndpointModel +from devices.serializers.endpoint_serializers import EndpointDeviceSerializersAll +from devices.services.endpoint.endpoint_antivirus import EndpointAntivirusService +from devices.services.endpoint.endpoint_get_status import get_status +from devices.services.endpoint.endpoint_services import EndpointManagementService, EndpointKepAliveService, \ + EndpointDownloadConfigService, EndpointUploadConfigService, EndpointUpdateService + +_log = logging.getLogger(__name__) + + +class EndpointDeviceApiViewSet(ListModelMixin, + CreateModelMixin, + RetrieveModelMixin, + UpdateModelMixin, + DestroyModelMixin, + GenericViewSet): + """ViewSet for working with endpoint device.""" + + queryset = EndpointModel.objects.all() + serializer_class = EndpointDeviceSerializersAll + + def create(self, request, *args, **kwargs) -> Response: + serializer = self.get_serializer(data=request.data) + if not serializer.is_valid(): + return Response(serializer.errors, 400) + instance = serializer.save(type=DeviceType.ENDPOINT) + EndpointManagementService(instance).create() + return Response(self.get_serializer(instance).data, 201) + + def update(self, request, *args, **kwargs) -> Response: + partial = kwargs.pop('partial', False) + instance = self.get_object() + serializer = self.get_serializer(instance, data=request.data, partial=partial) + if not serializer.is_valid(): + return Response(serializer.errors, 400) + instance = serializer.save(settings_changed=True, ) + instance.save() + + if getattr(instance, '_prefetched_objects_cache', None): + instance._prefetched_objects_cache = {} + + EndpointManagementService(instance).update() + return Response(self.get_serializer(instance).data) + + def destroy(self, request, *args, **kwargs) -> Response: + instance = self.get_object() + EndpointManagementService(instance).destroy() + self.perform_destroy(instance) + return Response({}, 204) + + @action(detail=True, methods=['GET', 'POST'], name="Keepalive") + def keepalive(self, request, pk=None) -> Response: + try: + service = EndpointKepAliveService(int(pk), request.body) + except EndpointModel.DoesNotExist: + return Response({'status': 'error', 'reason': 'no such endpoint record'}) + except json.JSONDecodeError: + return Response({'status': 'error', 'error_message': 'json decode error'}) + else: + response = service.get_response() + + return Response(response) + + @action(detail=True, methods=['GET'], name='Status') + def status(self, request, pk=None) -> Response: + """ Function to respond with current Endpoint states. Current response states are: + config_errors, + request_config + """ + return Response(get_status(request, pk)) + + @action(detail=True, methods=['GET'], name='Download') + def download(self, request, pk=None) -> Union[Response, HttpResponse]: + """API for download endpoint config from MC to endpoint.""" + render_format = request.accepted_renderer.format + service = EndpointDownloadConfigService(int(pk)) + if render_format == 'api': + data, filename = service.download_as_file() + response = HttpResponse(data, content_type='application/file', ) + response['Content-Disposition'] = f'attachment; filename="{filename}"' + return response + data = service.download() + service.setup_endpoint() + return Response(data, status=200) + + @action(detail=True, methods=['POST'], name='Upload') + def upload(self, request, pk=None): + """API for upload endpoint config form endpoint to MC""" + try: + service = EndpointUploadConfigService(int(pk), request.body) + except EndpointModel.DoesNotExist: + return Response({'status': 'error', 'reason': 'no such endpoint record'}) + except json.JSONDecodeError: + return Response({'status': 'error', 'error_message': 'json decode error'}) + else: + response = service.upload() + return Response(response) + + @action(detail=True, methods=["GET"], name="Endpoint_config_request") + def config_request(self, request, pk=None) -> Response: + """API to set the Endpoint flag in True for update MC Endpoint data in models from Endpoint instance""" + service = EndpointUpdateService(int(pk)) + data = service.update() + _log.info(f'Request update config [{pk}] from Endpoint') + return Response(data) + + @action(detail=True, methods=["GET"], name="Antivirus_update") + def antivirus_update(self, request, pk=None) -> Union[Response, httpFileResponse]: + """API to update endpoint antivirus database """ + _log.info(f'Request antivirus update [{pk}] from Endpoint') + service = EndpointAntivirusService(int(pk)) + return service.update() diff --git a/devices/views/sensor.py b/devices/views/sensor.py new file mode 100644 index 0000000..03be3d8 --- /dev/null +++ b/devices/views/sensor.py @@ -0,0 +1,119 @@ +import logging + +from django.http import Http404 +from rest_framework import status +from rest_framework.decorators import action +from rest_framework.exceptions import APIException +from rest_framework.response import Response +from rest_framework.viewsets import ModelViewSet + +from devices.enums import DeviceType +from devices.models.sensor import ArmaSensor +from devices.serializers.sensor_serializers import SensorSerializer, ZeekSettingsUpdateSerializer, \ + ZeekProtocolsDisableSerializer, CreateSensorSerializer +from devices.services.sensor.enums import VectorMessage, SystemMessage, ZeekMessage, SuricataMessage +from devices.services.sensor.rabbitmq import SensorManagement +from devices.services.sensor.service import SensorService +from devices.services.sensor.utils import RabbitMQUserManagement +from devices.services.vector import VectorService + +_log = logging.getLogger(__name__) + + +class ArmaSensorViewSet(ModelViewSet): + queryset = ArmaSensor.objects.all() + + def get_serializer_class(self): + if self.action == 'create': + return CreateSensorSerializer + return SensorSerializer + + def perform_update(self, serializer): + + sensor = serializer.save() + body = serializer.data + SensorManagement().send_message(sensor=sensor, message_type=SystemMessage.settings_changed, body=body) + VectorService(sensor).update_config() + + def create(self, request, *args, **kwargs): + serializer = self.get_serializer(data=request.data) + serializer.is_valid(raise_exception=True) + sensor = serializer.save(type=DeviceType.SENSOR) + + authorization_key = RabbitMQUserManagement(sensor.uuid).create_random_rabbitmq_credentials() + sensor.authorization_key = authorization_key + sensor.save() + + VectorService(sensor).update_config() + + return Response(SensorSerializer(instance=sensor).data, status=status.HTTP_201_CREATED) + + def perform_destroy(self, sensor): + sensor.delete() + VectorService(sensor).delete_config() + + @action(methods=['GET'], detail=True, url_path='system/(?P[a-z]+)') + def system_message(self, request, system_action, *args, **kwargs): + sensor = self.get_object() + try: + action = SystemMessage[system_action] + except KeyError: + raise Http404 + response = SensorManagement().send_message(sensor=sensor, message_type=action, wait_response=True) + return Response(response) + + @action(methods=['GET'], detail=True, url_path='vector/(?P[a-z_]+)') + def vector_message(self, request, vector_action, *args, **kwargs): + sensor = self.get_object() + try: + action = VectorMessage[vector_action] + except KeyError: + raise Http404 + response = SensorManagement().send_message(sensor=sensor, message_type=action, wait_response=True) + return Response(response) + + @action(methods=['GET'], detail=True, url_path='zeek/(?P[a-z_]+)') + def zeek_message(self, request, zeek_action, *args, **kwargs): + sensor = self.get_object() + try: + action = ZeekMessage[zeek_action] + except KeyError: + raise Http404 + response = SensorManagement().send_message(sensor=sensor, message_type=action, wait_response=True) + + return Response(response) + + @action(methods=['GET'], detail=True, url_path='suricata/(?P[a-z_]+)') + def suricata_message(self, request, suricata_action, *args, **kwargs): + sensor = self.get_object() + try: + action = SuricataMessage[suricata_action] + except KeyError: + raise Http404 + response = SensorManagement().send_message(sensor=sensor, message_type=action, wait_response=True) + + return Response(response) + + @action(methods=['POST'], detail=True, url_path='zeek/protocols_disable') + def zeek_protocols_disable(self, request, *args, **kwargs): + serializer = ZeekProtocolsDisableSerializer(data=request.data) + sensor = self.get_object() + serializer.is_valid(raise_exception=True) + protocols = serializer.validated_data['disable_protocols'] + response = SensorManagement().send_message(sensor=sensor, + message_type=ZeekMessage.protocols_disable, + wait_response=True, + body=protocols) + return Response(response) + + @action(methods=['POST'], detail=True, url_path='zeek/settings_update') + def zeek_settings_update(self, request, *args, **kwargs): + sensor = self.get_object() + serializer = ZeekSettingsUpdateSerializer(data=request.data) + serializer.is_valid(raise_exception=True) + file = serializer.validated_data['file'].read() + response = SensorManagement().send_message(sensor=sensor, + message_type=ZeekMessage.settings_update, + wait_response=True, + body=file) + return Response(response) diff --git a/docker/compose/config/correlator/Dockerfile b/docker/compose/config/correlator/Dockerfile new file mode 100644 index 0000000..64092a3 --- /dev/null +++ b/docker/compose/config/correlator/Dockerfile @@ -0,0 +1,6 @@ +FROM golang:latest +RUN mkdir /app +ADD ./correlator/ /app/ +WORKDIR /app/cmd/correlator +#RUN go build +CMD ["./correlator -config config.json"] diff --git a/docker/compose/config/elk/elasticsearch/Dockerfile b/docker/compose/config/elk/elasticsearch/Dockerfile new file mode 100644 index 0000000..e2157fe --- /dev/null +++ b/docker/compose/config/elk/elasticsearch/Dockerfile @@ -0,0 +1,11 @@ +ARG ELK_VERSION + +# https://www.docker.elastic.co/ +FROM registry.iwarma.ru/iwa/dev/console-docker/console-elasticsearch:latest + +# Add your elasticsearch plugins setup here +# Example: RUN elasticsearch-plugin install analysis-icu + +# See https://elk-docker.readthedocs.io/ +ADD ./mapping /mapping +ADD ./elk-post-hooks.sh /usr/local/bin/elk-post-hooks.sh \ No newline at end of file diff --git a/docker/compose/config/elk/elasticsearch/config/elasticsearch.yml b/docker/compose/config/elk/elasticsearch/config/elasticsearch.yml new file mode 100644 index 0000000..aff78b8 --- /dev/null +++ b/docker/compose/config/elk/elasticsearch/config/elasticsearch.yml @@ -0,0 +1,18 @@ +--- +## Default Elasticsearch configuration from Elasticsearch base image. +## https://github.com/elastic/elasticsearch/blob/master/distribution/docker/src/docker/config/elasticsearch.yml +# +cluster.name: "docker-cluster" +network.host: 0.0.0.0 + +## X-Pack settings +## see https://www.elastic.co/guide/en/elasticsearch/reference/current/setup-xpack.html +# +xpack.license.self_generated.type: basic +xpack.security.enabled: true +xpack.monitoring.collection.enabled: true +http.cors.enabled: true +http.cors.allow-origin: "*" +http.cors.allow-methods: OPTIONS, HEAD, GET, POST, PUT, DELETE +http.cors.allow-headers: Authorization,X-Requested-With,X-Auth-Token,Content-Type,Content-Length +http.cors.allow-credentials: true \ No newline at end of file diff --git a/docker/compose/config/elk/elasticsearch/elk-post-hooks.sh b/docker/compose/config/elk/elasticsearch/elk-post-hooks.sh new file mode 100644 index 0000000..6ed694f --- /dev/null +++ b/docker/compose/config/elk/elasticsearch/elk-post-hooks.sh @@ -0,0 +1,6 @@ +#!/bin/bash + +echo "Apply elasticsearch index templates" +curl -w "\n" --user elastic:changeme -X PUT "localhost:9200/_component_template/normalized_component" -H 'Content-Type: application/json' -d"$(< /mapping/normalized-component.json)" +curl -w "\n" --user elastic:changeme -X PUT "localhost:9200/_index_template/normalized" -H 'Content-Type: application/json' -d"$(< /mapping/normalizer-index.template.json)" +# curl -X PUT "localhost:9200/_index_template/arma-*_template?pretty" -H 'Content-Type: application/json' -d"$(< /mapping/normalized-event.json)" diff --git a/docker/compose/config/elk/kibana/Dockerfile b/docker/compose/config/elk/kibana/Dockerfile new file mode 100644 index 0000000..7bdaa8c --- /dev/null +++ b/docker/compose/config/elk/kibana/Dockerfile @@ -0,0 +1,7 @@ +ARG ELK_VERSION + +# https://www.docker.elastic.co/ +FROM registry.iwarma.ru/iwa/dev/console-docker/console-kibana:latest + +# Add your kibana plugins setup here +# Example: RUN kibana-plugin install diff --git a/docker/compose/config/elk/kibana/config/kibana.yml b/docker/compose/config/elk/kibana/config/kibana.yml new file mode 100644 index 0000000..5c09ec2 --- /dev/null +++ b/docker/compose/config/elk/kibana/config/kibana.yml @@ -0,0 +1,13 @@ +--- +## Default Kibana configuration from Kibana base image. +## https://github.com/elastic/kibana/blob/master/src/dev/build/tasks/os_packages/docker_generator/templates/kibana_yml.template.js +# +server.name: kibana +server.host: 0.0.0.0 +elasticsearch.hosts: [ "http://elasticsearch:9200" ] +monitoring.ui.container.elasticsearch.enabled: true + +## X-Pack security credentials +# +elasticsearch.username: elastic +elasticsearch.password: changeme diff --git a/docker/compose/config/elk/vector/.gitignore b/docker/compose/config/elk/vector/.gitignore new file mode 100644 index 0000000..ee67534 --- /dev/null +++ b/docker/compose/config/elk/vector/.gitignore @@ -0,0 +1,2 @@ +pipeline/*.toml +!pipeline/default.toml \ No newline at end of file diff --git a/docker/compose/config/elk/vector/Dockerfile b/docker/compose/config/elk/vector/Dockerfile new file mode 100644 index 0000000..4be8573 --- /dev/null +++ b/docker/compose/config/elk/vector/Dockerfile @@ -0,0 +1,3 @@ + + + FROM timberio/vector:0.17.0-alpine \ No newline at end of file diff --git a/docker/compose/config/elk/vector/config/vector.yml b/docker/compose/config/elk/vector/config/vector.yml new file mode 100644 index 0000000..88355a7 --- /dev/null +++ b/docker/compose/config/elk/vector/config/vector.yml @@ -0,0 +1,4 @@ +http.host: "0.0.0.0" +# Auto reload configs +config.reload.automatic: true +xpack.monitoring.elasticsearch.hosts: [ "http://elasticsearch:9200" ] \ No newline at end of file diff --git a/docker/compose/config/elk/vector/pipeline/default.toml b/docker/compose/config/elk/vector/pipeline/default.toml new file mode 100644 index 0000000..b86ff9e --- /dev/null +++ b/docker/compose/config/elk/vector/pipeline/default.toml @@ -0,0 +1,22 @@ + + +# ░█████╗░██████╗░███╗░░░███╗░█████╗░ +# ██╔══██╗██╔══██╗████╗░████║██╔══██╗ +# ███████║██████╔╝██╔████╔██║███████║ +# ██╔══██║██╔══██╗██║╚██╔╝██║██╔══██║ +# ██║░░██║██║░░██║██║░╚═╝░██║██║░░██║ +# ╚═╝░░╚═╝╚═╝░░╚═╝╚═╝░░░░░╚═╝╚═╝░░╚═╝ + + +[sources.null_socket_source] + type = "syslog" + address = "0.0.0.0:0000" + mode = "udp" + + +[sinks.null_file_sink] + type="file" + inputs = [ "null_socket_source" ] + compression = "none" + path = "/dev/null" + encoding="text" diff --git a/docker/compose/config/license/Dockerfile b/docker/compose/config/license/Dockerfile new file mode 100644 index 0000000..7321b60 --- /dev/null +++ b/docker/compose/config/license/Dockerfile @@ -0,0 +1,6 @@ +FROM golang:1.16-buster +RUN mkdir /app +ADD ./license/ /app/ +WORKDIR /app/ +#RUN go build +CMD ["./client -config config.json"] \ No newline at end of file diff --git a/docker/compose/config/nginx/default.conf b/docker/compose/config/nginx/default.conf new file mode 100644 index 0000000..766e784 --- /dev/null +++ b/docker/compose/config/nginx/default.conf @@ -0,0 +1,56 @@ +upstream console { + server djangoapp:8000; +} + +server { + + # this is the internal Docker DNS, cache only for 30s + + listen 80; + access_log /dev/stdout; + error_log stderr error; + + client_max_body_size 500M; + + location /delk { + rewrite ^/delk/(.*) /$1 break; + proxy_pass http://elasticsearch:9200; + } + + + location ~ ^/(en|ru)/license/ { + proxy_pass http://console; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $host; + proxy_redirect off; + access_log off; + log_not_found off; + } + + location ~ ^/ru/api/endpoint/(.+)/keepalive/$ { + proxy_pass http://console; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header Host $host; + proxy_redirect off; + access_log off; + log_not_found off; + } + + location /ws/notifications/ { + proxy_pass http://console; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "Upgrade"; + } + + location / { + proxy_pass http://console; + } + location /static { + alias /usr/src/app/public/static; + } + location /media { + alias /usr/src/app/public/media; + } + +} diff --git a/docker/compose/config/nginx/nginx.docker b/docker/compose/config/nginx/nginx.docker new file mode 100644 index 0000000..701abef --- /dev/null +++ b/docker/compose/config/nginx/nginx.docker @@ -0,0 +1,4 @@ +FROM nginx + +RUN rm /etc/nginx/conf.d/default.conf +COPY ./default.conf /etc/nginx/conf.d diff --git a/docker/compose/config/python/pip.conf b/docker/compose/config/python/pip.conf new file mode 100644 index 0000000..d957039 --- /dev/null +++ b/docker/compose/config/python/pip.conf @@ -0,0 +1,4 @@ +[global] +index-url = http://nexus.iwarma.ru/repository/proxy-pypi/pypi +extra-index-url = http://nexus.iwarma.ru/repository/proxy-pypi/simple +trusted-host = nexus.iwarma.ru diff --git a/docker/compose/config/python/python.docker b/docker/compose/config/python/python.docker new file mode 100644 index 0000000..2ea90a4 --- /dev/null +++ b/docker/compose/config/python/python.docker @@ -0,0 +1,16 @@ +FROM debian:11-slim + +WORKDIR /usr/src/app + +RUN echo 'debconf debconf/frontend select Noninteractive' | debconf-set-selections + +RUN mkdir /tmp || true; chmod 1777 /tmp || true; apt update && apt install -y python3 gettext python3-pip libpq-dev + +ADD requirements.txt /usr/src/app/ +ADD requirements_test.txt /usr/src/app/ +ADD requirements.txt /usr/src/app/ +ADD docker/compose/config/python/pip.conf /etc/ + +RUN python3 -m pip install --upgrade pip && python3 -m pip install -r requirements_test.txt + +ADD . /usr/src/app/ diff --git a/docker/compose/config/react/Dockerfile b/docker/compose/config/react/Dockerfile new file mode 100644 index 0000000..7e74318 --- /dev/null +++ b/docker/compose/config/react/Dockerfile @@ -0,0 +1,5 @@ +# pull official base image +FROM node:16.14.2-alpine + +# set working directory +WORKDIR /app/ diff --git a/docker/compose/config/selenium/selenium.docker b/docker/compose/config/selenium/selenium.docker new file mode 100644 index 0000000..e69de29 diff --git a/docker/compose/correlator.yml b/docker/compose/correlator.yml new file mode 100644 index 0000000..e714606 --- /dev/null +++ b/docker/compose/correlator.yml @@ -0,0 +1,18 @@ +version: '3.2' +services: + correlator: + build: + context: ../../ + dockerfile: docker/compose/config/correlator/Dockerfile + volumes: + - ../../correlator:/app + tty: true + stdin_open: true + container_name: correlator + command: ./correlator -config config.json + depends_on: + - elasticsearch + ports: + - "5566:5566" + environment: + AGGREGATOR_VERBOSE: "true" \ No newline at end of file diff --git a/docker/compose/django.yml b/docker/compose/django.yml new file mode 100644 index 0000000..8f30a25 --- /dev/null +++ b/docker/compose/django.yml @@ -0,0 +1,126 @@ +version: '3.2' +services: + # Core django application + djangoapp: + env_file: + - ../../.env.dev + container_name: ${DOCKER_DJANGO_NAME:-djangoapp} + build: + context: ../../ + dockerfile: docker/compose/config/python/python.docker + hostname: djangoapp + volumes: + - public:/usr/src/app/public + - ../../dockerlogs/gunicorn:/var/log/ + - ../../:/usr/src/app/ + - ./config/elk/vector/pipeline:/usr/src/app/public/vector + - ../../public/test_coverage:/usr/src/app/public/test_coverage + - ../../frontend:/build_logs + - react-volume:/react-build + command: > + /bin/bash -c "/bin/bash /usr/src/app/docker/django_entrypoint.sh" + tty: true + stdin_open: true + depends_on: + - db + - redis + ports: + - "9091:8081" + - "9041:9041" + - ${WEB_PDB_PORT:-7755}:${WEB_PDB_PORT:-7755} + # NGINX server + pnginxserver: + env_file: + - ../../.env.dev + container_name: ${DOCKER_NGINX_NAME:-pnginxserver} + build: + context: ./config/nginx + dockerfile: nginx.docker + hostname: nginxserver + volumes: + - public:/usr/src/app/public + - ../../dockerlogs/nginx:/var/log/nginx + ports: + - "${WEB_UI_PORT:-9090}:80" + depends_on: + - djangoapp + tty: true + #React + react: + env_file: + - ../../.env.dev + container_name: react + build: + context: ../../ + dockerfile: docker/compose/config/react/Dockerfile + volumes: + - '../../frontend:/app' + #- '/frontend/node_modules' + - react-volume:/app/build + environment: + - CHOKIDAR_USEPOLLING=true + command: > + /bin/ash -c "echo "start" > /app/build.log; \ + npm ci; \ + chown -R node:node /app; \ + npm run build | tee /app/build.log;" + # Postgresql + db: + env_file: + - ../../.env.dev + container_name: ${DOCKER_DB_NAME:-db} + image: postgres:12.0-alpine + # Redis + redis: + env_file: + - ../../.env.dev + container_name: ${DOCKER_REDIS_NAME:-redis} + image: "redis:alpine" + # Celery + celery: + container_name: ${DOCKER_CELERY_NAME:-celerycontainer} + env_file: + - ../../.env.dev + build: + context: ../../ + dockerfile: docker/compose/config/python/python.docker + command: bash -c "py3clean . && celery -A console worker -l info" + volumes: + - public:/usr/src/app/public + - ../../:/usr/src/app/ + depends_on: + - db + - redis + - djangoapp + tty: true + # Celery beat + celery-beat: + env_file: + - ../../.env.dev + container_name: ${DOCKER_CELERY_BEAT_NAME:-celerybeatcontainer} + build: + context: ../../ + dockerfile: docker/compose/config/python/python.docker + command: bash -c "py3clean . && celery -A console beat -l info --scheduler django_celery_beat.schedulers:DatabaseScheduler" + volumes: + - ../../:/usr/src/app/ + - public:/usr/src/app/public + depends_on: + - db + - redis + - celery + - djangoapp + tty: true + restart: always + + rabbitmq-management: + image: rabbitmq:3.7.8-management + ports: + - "15672:15672" + - "5672:5672" + volumes: + - rabbitmq:/var/lib/rabbitmq +volumes: + public: # web app generated data (static, media, etc.) + react-volume: + rabbitmq: \ No newline at end of file diff --git a/docker/compose/el.yml b/docker/compose/el.yml new file mode 100644 index 0000000..0632b44 --- /dev/null +++ b/docker/compose/el.yml @@ -0,0 +1,57 @@ +version: '3.2' +services: + elasticsearch: + env_file: + - ../../.env.dev + container_name: ${DOCKER_ELASTIC_NAME:-elastic} + build: + context: ./config/elk/elasticsearch/ + args: + ELK_VERSION: $ELK_VERSION + volumes: + - type: bind + source: ./config/elk/elasticsearch/config/elasticsearch.yml + target: /usr/share/elasticsearch/config/elasticsearch.yml + read_only: true + #- type: volume + #source: elasticsearch + #target: /usr/share/elasticsearch/data + ports: + - "9200:9200" + - "9300:9300" + environment: + ES_JAVA_OPTS: "-Xmx512m -Xms512m" + #ES_JAVA_OPTS: "-Xmx256m -Xms256m" + ELASTIC_PASSWORD: changeme + # Use single node discovery in order to disable production mode and avoid bootstrap checks + # see https://www.elastic.co/guide/en/elasticsearch/reference/current/bootstrap-checks.html + discovery.type: single-node + vector: + env_file: + - ../../.env.dev + container_name: ${DOCKER_VECTOR_NAME:-vector} + build: + context: ./config/elk/vector/ + command: "--watch-config /usr/share/vector/pipeline/ --config-dir /usr/share/vector/pipeline/" + volumes: + - type: bind + source: ./config/elk/vector/config/vector.yml + target: /usr/share/vector/config/vector.yml + read_only: true + - type: bind + source: ./config/elk/vector/pipeline + target: /usr/share/vector/pipeline + read_only: true + - ../../dockerlogs/:/var/log + ports: + - "5100:5000/tcp" + - "5100:5000/udp" + - "9600:9600" + - "5500:5500/udp" + - "5600:5600/udp" + environment: + LS_JAVA_OPTS: "-Xmx4096m -Xms4096m" + #LS_JAVA_OPTS: "-Xmx256m -Xms256m" + depends_on: + - elasticsearch + diff --git a/docker/compose/flower.yml b/docker/compose/flower.yml new file mode 100644 index 0000000..04877ab --- /dev/null +++ b/docker/compose/flower.yml @@ -0,0 +1,14 @@ +version: '3.2' +services: + flower: + env_file: + - ../../.env.dev + container_name: ${DOCKER_FLOWER_NAME:-flower} + image: mher/flower + command: ["celery", "--broker=redis://${REDIS_HOST:-redis}:${REDIS_PORT:-6379}", "flower", "--port=5555"] + ports: + - "5555:5555" + expose: + - 5555 + depends_on: + - redis diff --git a/docker/compose/kibana.yml b/docker/compose/kibana.yml new file mode 100644 index 0000000..1081db8 --- /dev/null +++ b/docker/compose/kibana.yml @@ -0,0 +1,19 @@ +version: '3.2' +services: + kibana: + env_file: + - ../../.env.dev + container_name: ${DOCKER_KIBANA_NAME:-kibana} + build: + context: config/elk/kibana/ + args: + ELK_VERSION: $ELK_VERSION + volumes: + - type: bind + source: ./config/elk/kibana/config/kibana.yml + target: /usr/share/kibana/config/kibana.yml + read_only: true + ports: + - "5601:5601" + depends_on: + - elasticsearch diff --git a/docker/compose/license.yml b/docker/compose/license.yml new file mode 100644 index 0000000..1134465 --- /dev/null +++ b/docker/compose/license.yml @@ -0,0 +1,18 @@ +version: '3.2' +services: + license: + build: + context: ../../ + dockerfile: docker/compose/config/license/Dockerfile + volumes: + - ../../license:/app + tty: true + stdin_open: true + container_name: license + command: ./client -config config.json + ports: + - "8050:8050" + networks: + default: + aliases: + - license-client diff --git a/docker/compose/pgadmin.yml b/docker/compose/pgadmin.yml new file mode 100644 index 0000000..26e9d71 --- /dev/null +++ b/docker/compose/pgadmin.yml @@ -0,0 +1,13 @@ +version: '3.2' +services: + pgadmin: + env_file: + - ../../.env.dev + container_name: ${DOCKER_PGADMIN_SERVER:-pgadminserver} + image: dpage/pgadmin4 + volumes: + - ../../dockerlogs/pgadmin:/root/.pgadmin + ports: + - "5050:80" + links: + - db diff --git a/docker/compose/selenium.yml b/docker/compose/selenium.yml new file mode 100644 index 0000000..fe883cd --- /dev/null +++ b/docker/compose/selenium.yml @@ -0,0 +1,40 @@ +version: '3.2' +services: + seleniumhub: + env_file: + - ../../.env.dev + container_name: ${DOCKER_SELENIUM_NAME:-seleniumhub} + image: selenium/hub:4.0.0-20211013 + ports: + - 4444:4444 + tty: true + firefoxnode: + env_file: + - ../../.env.dev + image: selenium/node-firefox:4.0.0-20211013 + ports: + - 4577 + depends_on: + - seleniumhub + environment: + - SE_EVENT_BUS_HOST=seleniumhub + - SE_EVENT_BUS_PUBLISH_PORT=4442 + - SE_EVENT_BUS_SUBSCRIBE_PORT=4443 + - SE_NODE_HOST=firefoxnode + links: + - seleniumhub:hub + chromenode: + env_file: + - ../../.env.dev + image: selenium/node-chrome:4.0.0-20211013 + ports: + - 4578 + depends_on: + - seleniumhub + environment: + - SE_EVENT_BUS_HOST=seleniumhub + - SE_EVENT_BUS_PUBLISH_PORT=4442 + - SE_EVENT_BUS_SUBSCRIBE_PORT=4443 + - SE_NODE_HOST=chromenode + links: + - seleniumhub:hub diff --git a/docker/django_entrypoint.sh b/docker/django_entrypoint.sh new file mode 100644 index 0000000..acf97fd --- /dev/null +++ b/docker/django_entrypoint.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +counter=0 +timeout=55 #times + +sleep 5s + +while [[ $counter -lt $timeout ]] && ! grep "The build folder is ready to be deployed." /build_logs/build.log #-qs + do + echo "waiting react container is ready $(( $counter * 5 )) s" + sleep 5s + counter=$(( $counter + 1 )) + done + +if [[ $counter -eq $timeout ]]; then echo "timeout. react build failed."; fi + +cp -f /react-build/index.html /usr/src/app/console/templates/console/index.html +rm -f /react-build/index.html +mkdir -p /usr/src/app/console/static/react +cp -rf /react-build/* /usr/src/app/console/static/react +/bin/bash ./start_dev_django_app.sh ${SERVE:-gunicorn} \ No newline at end of file diff --git a/docker/generator.py b/docker/generator.py new file mode 100644 index 0000000..007ff2d --- /dev/null +++ b/docker/generator.py @@ -0,0 +1,223 @@ +#!/usr/bin/python3 + +import argparse +import datetime +import logging +import re +from logging.handlers import SysLogHandler, SYSLOG_UDP_PORT + +FIREWALL_LOG_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|pfalert|PF rule alert|8|cs1=63 cs2=deviceInboundInterface=lo0 act=разрешение (pass) src=127.0.0.1 deviceDirection=in proto=icmp dst=127.0.0.1 spt=46084 dpt=53 rt=1604793739000 log_from=filterlog cid=None\n' +FIREWALL_LOG2_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|pfalert|PF rule alert|5|cs1=61 cs2=deviceInboundInterface=igb0 act=block deviceDirection=in proto=icmp src=127.0.0.1 dst=127.0.0.2 rt=1604793739000 deviceFacility=vfilterlog cid=None\n' +FIREWALL_LOG3_TEMPLATE3 = '<1>CEF:0|InfoWatch ARMA|ARMAIF|3.5|pfalert|PF rule alert|0|cs1=80 cs2=deviceInboundInterface=lo0 act=pass deviceDirection=in class=0x00 flowlabel=0x00000 src=127.0.0.1 dst=127.0.0.2 hlim=1 proto=udp payload-length=76 rt=1604793739000 deviceFacility=filterlog cid=None __line=Oct 27 17:14:09 arma.localdomain filterlog: 80,,,0,lo0,match,pass,in,6,0x00,0x00000,1,udp,17,76,fe80::20c:29ff:fe69:de4d,ff02::1:2,546,547,76' +FIREWALL_LOG4_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|pfalert|PF rule alert|0|cs1=73 deviceInboundInterface=em0 act=pass deviceDirection=0 proto=tcp seq=273959436 rt=1611148012000 deviceFacility=filterlog src=192.168.56.1 dst=192.168.56.104 spt=53738 dpt=80 cs1Label=RuleNumber' +SURICATA_LOG_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.0|idspower|IDS power|5|rt=1604793739000 log_from=suricata deviceFacility=28775 gid=1 cs1=429496728 cs1Label=Signature cs2=12 cs2Label=line_number rev=1 classification=null priority=3 proto=TCP ip_src=10.20.30.50 port_src=80 ip_dst=10.20.30.1 port_dst=34568 act=start' +SURICATA_LOG_SEVERITY = 'CEF:0|InfoWatch ARMA|ARMAIF|3.0|idspower|IDS power|8|rt=1604793739000 log_from=suricata deviceFacility=28775 gid=1 cs1=429496728 cs1Label=Signature cs2=12 cs2Label=line_number rev=1 classification=null priority=3 proto=TCP ip_src=10.20.30.50 port_src=80 ip_dst=10.20.30.1 port_dst=34568 act=start' +SURICATA_LOG_MMS = 'CEF:0|InfoWatch ARMA|ARMAIF|3.0|idspower|IDS power|5|rt=1604793739000 log_from=suricata deviceFacility=28775 gid=1 cs1=429496728 cs1Label=Signature cs2=12 cs2Label=line_number rev=1 priority=3 proto=TCP ip_src=10.20.30.50 port_src=80 ip_dst=10.20.30.1 port_dst=34568 act=start' +NTP_LOG_TEMPLATE = "CEF:0|InfoWatch ARMA|ARMAIF|3.5|ntppower|Ntp power|4|rt=1611238449000 deviceFacility=ntpd dvcpid=61184 cs1=4.2.8p14@1.3728-o msg=Starting cs1Label=ntpd version act=start" +NTP_LOG2_TEMPLATE = "CEF:0|InfoWatch ARMA|ARMAIF|3.8.0-dev.19|ntpmanualsync|NTP manual sync|3|rt=1664869933000 deviceFacility=ntp msg=Successfully synced time after 1 attempts cs1=1 cs1Label=attemptsCount" +WEB_LOGIN_LOG_TEMPLATE = "CEF:0|InfoWatch ARMA|ARMAIF|3.5|webauth|Web authentication|0|rt=1604793739000 log_from=armaif cid=None url=/index.php msg=Successful login suser=root src=10.20.30.1 outcome=failure reason=no_idea\n" +WEB_LOGIN_LOG2_TEMPLATE = "CEF:0|InfoWatch ARMA|ARMAIF|3.5|webauth|Web authentication|0|rt=1604793739000 log_from=armaif cid=None url=/index.php msg=Successful login suser=root src=192.168.2.106 __line=Feb 21 11:31:37 arma armaif: /index.php: Successful login for user 'root' from: 192.168.2.106 outcome=success" +WEB_LOGIN_LOG3_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|webauth|Web authentication|0|rt=1611148011000 deviceFacility=armaif request=/index.php msg=Successful login suser=root src=192.168.56.1 outcome=success' +WEB_LOGIN_LOG4_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|webauth|Web authentication|0|rt=1611148011000 deviceFacility=armaif request=/index.php msg=Successful login suser=root src=192.168.56.1 outcome=success' +WEB_ACCESS_LOG_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|lighttpdaccess|Lighttpd Access|5|rt=1604793739000 deviceFacility=lighttpd dvcpid=29727 src=10.20.30.1 dst=10.20.30.50 requestMethod=GET url_relative=/api/core/menu/search/?_\\=1569482291550 app=HTTP/1.1 cs1=200 cs2=65734 cs1Label=responseCode cs2Label=bodyLength request=http://10.20.30.50/ui/netsnmp/general/index requestClientApplication=Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0 mechanic=Lighttpd" \n' +WEB_ACCESS_LOG2_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|lighttpdaccess|Lighttpd Access|5|rt=1604793739000 deviceFacility=lighttpd dvcpid=79894 src=192.168.2.106 dst=192.168.2.1 requestMethod=GET request=/widgets/api/get.php?load\=system%2Cgateway%2Cinterfaces&_\=1582284700985 app=HTTP/1.1 cs1=200 cs2=2425 cs1Label=responseCode cs2Label=bodyLength requestContext=http://192.168.2.1/index.php requestClientApplication=Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0 __line=Feb 21 11:34:33 arma lighttpd[79894]: 192.168.2.106 192.168.2.1 - [21/Feb/2020:11:34:33 +0000] "GET /widgets/api/get.php?load\=system%2Cgateway%2Cinterfaces&_\=1582284700985 HTTP/1.1" 200 2425 "http://192.168.2.1/index.php" "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0"' +WEB_ACCESS_LOG3_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|lighttpdaccess|Lighttpd Access|8|rt=1604793739000 deviceFacility=lighttpd dvcpid=79894 src=192.168.2.106 dst=192.168.2.1 requestMethod=GET request=/widgets/api/get.php?load\=system%2Cgateway%2Cinterfaces&_\=1582284700985 app=HTTP/1.1 cs1=200 cs2=2425 cs1Label=responseCode cs2Label=bodyLength requestContext=http://192.168.2.1/index.php requestClientApplication=Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0 __line=Feb 21 11:34:33 arma lighttpd[79894]: 192.168.2.106 192.168.2.1 - [21/Feb/2020:11:34:33 +0000] "GET /widgets/api/get.php?load\=system%2Cgateway%2Cinterfaces&_\=1582284700985 HTTP/1.1" 200 2425 "http://192.168.2.1/index.php" "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0"' +WEB_ACCESS_LOG4_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|lighttpdaccess|Lighttpd Access|5|rt=1605610654000 deviceFacility=lighttpd dvcpid=44121 src=10.20.30.1 dst=10.20.30.54 requestMethod=GET request=/api/core/menu/search/?_\=1574012278012 app=HTTP/1.1 cs1=302 cs2=66085 requestContext=http://10.20.30.54/ui/captiveportal requestClientApplication=Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:70.0) Gecko/20100101 Firefox/70.0 cs1Label=responseCode cs2Label=bodyLength' +WEB_ACCESS_LOG5_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6-rc2|accessalert|Acess alert|1|rt=1620805384000 deviceFacility=lighttpd dvcpid=94479 src=192.168.1.201 dst=192.168.1.101 requestMethod=GET request=/ui/arpwatcher/general app=HTTP/1.1 cs1=200 cs2=118524 requestContext=https://192.168.1.101/ui/arpwatcher/index requestClientApplication=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36 cs1Label=responseCode cs2Label=bodyLength' +ARPWATCH_LOG_TEMPLATE = 'CEF:0|InfoWatch ARMA|ARMAIF|3.0|arpwatchalert|Arpwatch alert|6|rt=1604793739000 deviceFacility=arpwatch cid=None message=new station src=10.0.3.2 src_old=None mac_src=52:54:0:12:35:2 mac_src_old=None mechanic=Arpwatch act=Destroy_all_humanity __line=Jan 30 08:41:33 arma arpwatch: new station 10.0.3.2 52:54:0:12:35:2#012 description=Было выявлено несанкционированное подключение устройства IP: 10.0.3.2, MAC: 52:54:0:12:35:2\n' +ARPWATCH_LOG_TEMPLATE2 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|arpwatchalert|Arpwatch alert|5|rt=1604793739000 deviceFacility=arpwatch cid=None message=new station src=192.168.1.100 src_old=None mac_src=0:c:29:e6:74:14 mac_src_old=None mechanic=Arpwatch __line=May 15 14:08:36 arma arpwatch: new station 192.168.1.100 0:c:29:e6:74:14 description=Было выявлено несанкционированное подключение устройства IP: 192.168.1.100, MAC: 0:c:29:e6:74:14\n' +ARPWATCH_LOG_TEMPLATE3 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.0|arpwatchalert|Arpwatch alert|5|rt=1604793739000 deviceFacility=arpwatch cid=None message=new station src=192.168.1.100 src_old=None act=Destroy_all_humanity mac_src=0:c:29:e6:74:14 mac_src_old=None mechanic=Arpwatch __line=May 15 14:08:36 arma arpwatch: new station 192.168.1.100 0:c:29:e6:74:14 description=Было выявлено несанкционированное подключение устройства IP: 192.168.1.100, MAC: 0:c:29:e6:74:14\n' +ARPWATCH_LOG_TEMPLATE4 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6-rc2|arpwatchalert|Arpwatch alert|7|rt=1620805293000 deviceFacility=arpwatch act=new station src=192.168.1.101 smac=00:50:56:bd:86:c5 cs1Label=src_old cs2Label=smac_old' + +# These logs are for networkmap +ARPWATCH_CONNECT_LOG_1 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5.2_7|arpwatchalert|Arpwatch alert|5|rt=1613559551000 deviceFacility=arpwatch act=new station src=192.168.1.20 smac=0a:00:27:00:00:00 cs1Label=src_old cs2Label=smac_old' +ARPWATCH_CONNECT_LOG_2 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5.2_7|arpwatchalert|Arpwatch alert|5|rt=1613559551000 deviceFacility=arpwatch act=new station src=192.168.1.21 smac=0a:00:28:00:00:00 cs1Label=src_old cs2Label=smac_old' +ARPWATCH_CONNECT_LOG_3 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5.2_7|arpwatchalert|Arpwatch alert|5|rt=1613559551000 deviceFacility=arpwatch act=new station src=10.10.1.11 smac=0a:00:29:00:00:00 cs1Label=src_old cs2Label=smac_old' +ARPWATCH_CONNECT_LOG_4 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5.2_7|arpwatchalert|Arpwatch alert|5|rt=1613559551000 deviceFacility=arpwatch act=new station src=10.10.1.21 smac=0a:00:30:00:00:00 cs1Label=src_old cs2Label=smac_old' +ARPWATCH_CONNECT_LOG_5 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5.2_7|arpwatchalert|Arpwatch alert|5|rt=1613559551000 deviceFacility=arpwatch act=new station src=17.234.12.34 smac=0a:00:31:00:00:00 cs1Label=src_old cs2Label=smac_old' +SURICATA_CONNECT_LOG_1 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.0|idspower|IDS power|5|rt=1604793739000 log_from=suricata deviceFacility=28775 gid=1 cs1=429496728 cs1Label=Signature cs2=12 cs2Label=line_number msg=Test message 1 rev=1 priority=3 proto=TCP ip_src=192.168.1.20 port_src=80 ip_dst=192.168.1.21 port_dst=34568 act=start\n' +SURICATA_CONNECT_LOG_2 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.0|idspower|IDS power|5|rt=1604793739000 log_from=suricata deviceFacility=28775 gid=1 cs1=429496728 cs1Label=Signature cs2=12 cs2Label=line_number rev=1 msg=Test message 2 priority=3 proto=TCP ip_src=17.234.12.34 port_src=80 ip_dst=192.168.1.21 port_dst=34568 act=start\n' +SURICATA_CONNECT_LOG_3 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.0|idspower|IDS power|5|rt=1604793739000 log_from=suricata deviceFacility=28775 gid=1 cs1=429496728 cs1Label=Signature cs2=12 cs2Label=line_number rev=1 msg=Test message 3 priority=3 proto=TCP ip_src=192.168.1.21 port_src=80 ip_dst=10.10.1.11 port_dst=34568 act=start\n' +SURICATA_CONNECT_LOG_4 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6|idsalert|IDS alert|5|rt=1604793739000 deviceFacility=suricata dvcpid=7814 cs1=1 act=429496728 cs2=1 msg=ARMA_S7Comm_PLC_Stop proto=TCP src=192.168.1.1 spt=49238 dst=192.168.2.2 dpt=102 cs1Label=gid cs2Label=rev' +SURICATA_CONNECT_LOG_5 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6.rc.36|idsalert|IDS Alert|5|rt=1639485803000 deviceFacility=suricata dvcpid=81078 cs1=1 act=2002752 cs2=4 msg=ET POLICY Reserved Internal IP Traffic proto=UDP src=192.168.244.1 spt=68 dst=192.168.244.254 dpt=67 cs1Label=gid cs2Label=rev' +SURICATA_CONNECT_LOG_6 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6.rc.36|idspower|IDS power|3|rt=1639485268000 deviceFacility=suricata msg=Test message 5 dvcpid=36147 act=shutdown' +SURICATA_CONNECT_LOG_7 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6.rc.36|idsalert|IDS rule alert|5|rt=1639485222000 deviceFacility=suricata dvcpid=36147 msg=Suricate message cs1=alert modbus any any -> any any (msg:\"SURICATA Modbus invalid Length\"; app-layer-event:modbus.invalid_length; classtype:protocol-command-decode; sid:2250003; rev:2;) filePath=/usr/local/etc/suricata/opnsense.rules/modbus-events.rules cs2=6 reason=parse signature error cs1Label=signature cs2Label=lineNumber' +SURICATA_CONNECT_LOG_8 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6.rc.36|idsalert|IDS Alert|5|rt=1639471522000 deviceFacility=suricata dvcpid=51009 cs1=1 act=2210056 cs2=1 msg=SURICATA STREAM bad window update proto=TCP src=192.168.0.1 spt=51784 dst=192.168.0.3 dpt=51033 cs1Label=gid cs2Label=rev' +SURICATA_CONNECT_LOG_9 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6.rc.36|idsalert|IDS Alert|5|rt=1639485548000 deviceFacility=suricata dvcpid=81078 cs1=1 act=2002752 cs2=4 msg=ET POLICY Reserved Internal IP Traffic proto=TCP src=192.168.0.204 spt=443 dst=192.168.0.23 dpt=52659 cs1Label=gid cs2Label=rev' +SURICATA_CONNECT_LOG_10 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6.rc.36|idsalert|IDS rule alert|5|rt=1639470621000 deviceFacility=suricata dvcpid=51009 msg=Suricata message 3 cs1=alert http any any -> any any (msg:\"Black list checksum match and extract SHA256\"; filesha256:fileextraction-chksum.list; filestore; sid:30; rev:1;) filePath=/usr/local/etc/suricata/opnsense.rules/files.rules cs2=52 reason=parse signature error cs1Label=signature cs2Label=lineNumber' +SURICATA_CONNECT_LOG_11 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6-rc12|idsalert|IDS rule alert|5|rt=1634546072000 deviceFacility=suricata dvcpid=77565 msg=Suricata message 6 cs1=alert modbus any any -> any any (msg:"SURICATA Modbus invalid Unit Identifier"; app-layer-event:modbus.invalid_unit_identifier; classtype:protocol-command-decode; sid:2250004; rev:2;) filePath=/usr/local/etc/suricata/opnsense.rules/modbus-events.rules cs2=8 reason=parse signature error cs1Label=signature cs2Label=lineNumber' +SURICATA_CONNECT_LOG_12 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6-rc.55|integrityalert|Integrity alert|0|rt=1647277200000 msg=Test message 5 outcome=success deviceFacility=integrity' + +FIREWALL_CONNECT_LOG_1 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|pfalert|PF rule alert|0|cs1=73 deviceInboundInterface=em0 act=pass deviceDirection=0 proto=tcp seq=273959436 rt=1611148012000 deviceFacility=filterlog src=192.168.1.21 dst=10.10.1.21 spt=53738 dpt=80 cs1Label=RuleNumber' +FIREWALL_CONNECT_LOG_2 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|pfalert|PF rule alert|0|cs1=73 deviceInboundInterface=em0 act=pass deviceDirection=0 proto=tcp seq=273959436 rt=1611148012000 deviceFacility=filterlog src=10.10.1.11 dst=10.10.1.21 spt=53738 dpt=80 cs1Label=RuleNumber' +FIREWALL_CONNECT_LOG_3 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|pfalert|PF rule alert|0|cs1=73 deviceInboundInterface=em0 act=pass deviceDirection=0 proto=tcp seq=273959436 rt=1611148012000 deviceFacility=filterlog src=192.168.1.20 dst=10.10.1.11 spt=53738 dpt=80 cs1Label=RuleNumber' +FIREWALL_CONNECT_LOG_4 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5|pfalert|PF rule alert|0|cs1=73 deviceInboundInterface=em0 act=pass deviceDirection=0 proto=tcp seq=273959436 rt=1611148012000 deviceFacility=filterlog src=1.1.1.1 dst=2.2.2.2 spt=53738 dpt=80 cs1Label=RuleNumber' +FIREWALL_CONNECT_LOG_5 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.6.rc.36|pfalert|PF rule alert|0|cs1=79 deviceInboundInterface=le0 act=pass deviceDirection=1 proto=udp rt=1639065415000 deviceFacility=filterlog src=192.168.157.132 dst=192.168.157.2 spt=58642 dpt=53 cs1Label=RuleNumber' +ARPWATCH_CONNECT_LOG_6 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5.2_7|arpwatchalert|Arpwatch alert|5|rt=1613559551000 deviceFacility=arpwatch act=new station src=1.1.1.1 smac=0a:00:27:00:00:00 cs1Label=src_old cs2Label=smac_old' +ARPWATCH_CONNECT_LOG_7 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.5.2_7|arpwatchalert|Arpwatch alert|5|rt=1613559551000 deviceFacility=arpwatch act=new station src=2.2.2.2 smac=0a:00:27:00:00:00 cs1Label=src_old cs2Label=smac_old' + +FIREWALL_PF_LOG = '<134>Mar 2 09:57:56 arma.localdomain filterlog: CEF:0|InfoWatch ARMA|ARMAIF|3.6-rc.55|pfalert|PF rule alert|0|cs1=63 deviceInboundInterface=vmx1 act=pass deviceDirection=1 class=0x00 flowlabel=0x00000 hlim=1 proto=udp payload-length=76 rt=1646215076000 deviceFacility=filterlog src=fe80::250:56ff:febd:4716 dst=ff02::1:2 spt=546 dpt=547 cs1Label=RuleNumber' + +# Endpoint +ENDPOINT_TEMPLATE_1 = 'CEF:0|InfoWatch ARMA|ARMAIE|2.3.4|white_list|White list|6|rt=1639592220 act=DENIED cat=not whitelisted fname=Firefox Setup 95.0.msi filePath=\\\\Device\\\\HarddiskVolume4\\\\Firefox Setup 95.0.msi\n' +ENDPOINT_TEMPLATE_2 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|integrity_control|Integrity control|5|rt=1613559558000 act=CREATE fname=test.bat filePath=C:\\temp\\test.bat ' +ENDPOINT_TEMPLATE_3 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|integrity_control|Integrity control|5|rt=1613559555000 act=WRITE fname=Hello — копия (2).txt filePath=C:\\temp\\file.exe' +ENDPOINT_TEMPLATE_4 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|integrity_control|Integrity control|5|rt=1613559581000 act=REMOVE fname=Hello — копия (2).txt filePath=C:\\temp\\Hello — копия (2).txt' +ENDPOINT_TEMPLATE_5 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|integrity_control|Integrity control|5|rt=1639592258 act=RENAME fname=C:\temp\Hello.txt filePath=C:\\temp\\Goodbye.txt' +ENDPOINT_TEMPLATE_6 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|integrity_control|Integrity control|5|rt=1604793739000 act=MOVE fname=13245 — копия (3).txt filePath=C:\\temp\\Test_dir\\13245 — копия (3).txt' +ENDPOINT_TEMPLATE_7 = 'CEF:0|InfoWatch ARMA|ARMAIE|2.3.4|white_list|White list|6|rt=1639592258 act=DENIED cat=not whitelisted fname=vc_runtimeAdditional_x86.msi filePath=\\\\Device\\\\HarddiskVolume4\\\\ProgramData\\\\Package Cache\\\\{572DCD10-CF2E-43D1-8151-8BD9AC9086D0}v14.28.29913\\\\packages\\\\vcRuntimeAdditional_x86\\\\vc_runtimeAdditional_x86.msi\n' +ENDPOINT_TEMPLATE_8 = 'CEF:0|InfoWatch ARMA|ARMAIE|2.3.4|usb|USB|6|rt=1639592452 act=DENIED cs1Label=pid cs1=1000 cs2Label=vid cs2=8564 cs3Label=serial_number cs3=JKPQMZ1G msg=class:8 subclass:6;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0\n' +ENDPOINT_TEMPLATE_9 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|usb|USB|5|rt=1604793739000 act=DENIED cs1Label=pid cs1=1000 cs1Labe2=vid cs2=90c cs3Label=serial_number cs3=0376119070023321 msg=[class:c1 subclass:s1;class:s2 subclass:s2]' +ENDPOINT_TEMPLATE_10 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|usb|USB|5|rt=1604793739000 act=ALLOWED ccs1Label=pid cs1=1000 cs1Labe2=vid cs2=90c cs3Label=serial_number cs3=0376119070023852 msg=[class:8 subclass:6]' +ENDPOINT_TEMPLATE_11 = 'CEF:0|InfoWatch ARMA|ARMAIE|2.3.4|usb|USB|6|rt=1639592541 act=DENIED cs1Label=pid cs1=810 cs2Label=vid cs2=45e cs3Label=serial_number cs3=Љ msg=class:239 subclass:2;class:14 subclass:1;class:14 subclass:2;class:1 subclass:1;class:1 subclass:2;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0;class:0 subclass:0\n' +ENDPOINT_TEMPLATE_12 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|antivirus|Antivirus|5|rt=1613559551000 act=scan_start fname=file_1.exe filePath=C:\\temp\\file_1.exe' +ENDPOINT_TEMPLATE_13 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|antivirus|Antivirus|5|rt=1604793739000 act=scan_stop fname=file_2.exe filePath=C:\\dir_2\\file_2.exe' +ENDPOINT_TEMPLATE_14 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|antivirus|Antivirus|5|rt=1605610654000 act=remove_scan_tasc fname=file_3.exe filePath=C:\\dir_2\\file_2.exe' +ENDPOINT_TEMPLATE_15 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|antivirus|Antivirus|5|rt=1604793739000 act=find_virus fname=file_bad.exe filePath=C:\\Documents\\file_bad.exe cs1Label=virus_name cs1=drakula' +ENDPOINT_TEMPLATE_16 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|antivirus|Antivirus|5|rt=1639485222000 act=file_deleted fname=file_2.bat filePath=C:\\windows\\file_2.bat' +ENDPOINT_TEMPLATE_17 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|antivirus|Antivirus|5|rt=1604793739000 act=scan_error fname=file_3.exe filePath=C:\\windows\\file_3.exe cs1Label=errfor_name cs1=File not found' +ENDPOINT_TEMPLATE_18 = 'CEF:0|InfoWatch ARMA|ARMAIE|3.5.2_7|antivirus|Antivirus|5|rt=1639592258 act=scan_failed fname=file_4.bat filePath=C:\\system32\\file_4.bat cs1Label=errfor_name cs1=Scan error' +# ClamAv +ANTIVIRUS_1 = 'CEF:0|InfoWatch ARMA|ARMAIF|3.7.2-dev.3|clamav_alert|CLAMAV alert|5|cs1=https://secure.eicar.org/eicar.com cs2=Win.Test.EICAR_HDB-1 src=192.168.1.100 rt=1657668458000 act=blocked deviceFacility=clamav cs1Label=Request URL cs2Label=Virus Name' + +LOG_TYPE = { + 'suricata': [SURICATA_LOG_TEMPLATE, SURICATA_LOG_MMS, SURICATA_LOG_SEVERITY, SURICATA_CONNECT_LOG_1, + SURICATA_CONNECT_LOG_2, SURICATA_CONNECT_LOG_3, SURICATA_CONNECT_LOG_4, SURICATA_CONNECT_LOG_5, + SURICATA_CONNECT_LOG_6, SURICATA_CONNECT_LOG_7, SURICATA_CONNECT_LOG_8, SURICATA_CONNECT_LOG_9, + SURICATA_CONNECT_LOG_10, SURICATA_CONNECT_LOG_11, SURICATA_CONNECT_LOG_12], + 'pf': [FIREWALL_LOG_TEMPLATE, FIREWALL_LOG2_TEMPLATE, FIREWALL_LOG3_TEMPLATE3, FIREWALL_LOG4_TEMPLATE, + FIREWALL_PF_LOG], + 'ntp': [NTP_LOG_TEMPLATE, NTP_LOG2_TEMPLATE], + 'web_login': [WEB_LOGIN_LOG_TEMPLATE, WEB_LOGIN_LOG2_TEMPLATE, WEB_LOGIN_LOG3_TEMPLATE, WEB_LOGIN_LOG4_TEMPLATE], + 'web_access': [WEB_ACCESS_LOG_TEMPLATE, WEB_ACCESS_LOG2_TEMPLATE, WEB_ACCESS_LOG3_TEMPLATE, + WEB_ACCESS_LOG4_TEMPLATE], + 'arp': [ARPWATCH_LOG_TEMPLATE, ARPWATCH_LOG_TEMPLATE2, ARPWATCH_LOG_TEMPLATE3, ARPWATCH_LOG_TEMPLATE4], + 'endpoint': [ENDPOINT_TEMPLATE_1, ENDPOINT_TEMPLATE_2, ENDPOINT_TEMPLATE_3, + ENDPOINT_TEMPLATE_4, ENDPOINT_TEMPLATE_5, ENDPOINT_TEMPLATE_6, + ENDPOINT_TEMPLATE_7, ENDPOINT_TEMPLATE_8, ENDPOINT_TEMPLATE_9, + ENDPOINT_TEMPLATE_10, ENDPOINT_TEMPLATE_11, ENDPOINT_TEMPLATE_12, + ENDPOINT_TEMPLATE_13, ENDPOINT_TEMPLATE_14, ENDPOINT_TEMPLATE_15, + ENDPOINT_TEMPLATE_16, ENDPOINT_TEMPLATE_17, ENDPOINT_TEMPLATE_18 + ], + 'antivirus': [ANTIVIRUS_1] +} + +CONNECT_LOG_TEST = { + 'arp': [ARPWATCH_CONNECT_LOG_1, + ARPWATCH_CONNECT_LOG_2, + ARPWATCH_CONNECT_LOG_3, + ARPWATCH_CONNECT_LOG_4, + ARPWATCH_CONNECT_LOG_5], + 'suricata': [SURICATA_CONNECT_LOG_1, SURICATA_CONNECT_LOG_2, SURICATA_CONNECT_LOG_3], + 'pf': [FIREWALL_CONNECT_LOG_1, FIREWALL_CONNECT_LOG_2, FIREWALL_CONNECT_LOG_3, FIREWALL_CONNECT_LOG_4, + FIREWALL_CONNECT_LOG_5] +} + +parser = argparse.ArgumentParser(description='Log spamer', epilog="For Endpoint, select simple format") +parser.add_argument('-d', '--destination', help='Host to send logs', type=str, default='localhost') +parser.add_argument('-p', '--port', help='Port to send logs', type=int, default=SYSLOG_UDP_PORT) +parser.add_argument('-n', '--number', help='Number of logs to send', type=int, default=1) +parser.add_argument('-t', + '--type', + help='Type of logs to send', + type=str, + choices=['all', 'suricata', 'pf', 'ntp', 'web_login', 'web_access', 'arp', 'endpoint', 'antivirus'], + default='all') +parser.add_argument('-f', '--format', help='Format of log', type=str, choices=['simple', 'syslog'], default='simple') +parser.add_argument('--replace-time', + dest='replace_time', + action='store_true', + help='Replace timestamp in log template with current timestamp') +parser.add_argument('--simulate', dest='simulate', action='store_true', help='Instead of sending logs, just print it') +parser.add_argument('--timezone', + dest='timezone', + type=int, + help="How many hours we different from UTC. Default is 3 with is 03:00 (Moscow)") +parser.add_argument('--timezone-negative', dest="timezone_negative", action='store_true', help="Timezone sign") +parser.set_defaults(timezone_negative=False) +parser.set_defaults(timezone=3) +parser.add_argument('--netmap-test', + dest='netmap_test', + action='store_true', + help='Send logs to test network map rendering') +parser.set_defaults(replace_time_utc=False) +parser.set_defaults(replace_time=False) +parser.set_defaults(simulate=False) +parser.set_defaults(netmap_test=False) +args = parser.parse_args() + +if args.type == "all" or args.type == "endpoint": + if args.format != "simple": + print("Error! Bad arguments combination") + print("For 'endpoint' type, use 'simple' format") + exit(1) + + +def init_syslog_logging2(addr, port): + logger = logging.getLogger('cef_syslog_export') + logger.setLevel(logging.DEBUG) + + shandler = SysLogHandler(address=(addr, port)) + shandler.setLevel(logging.DEBUG) + + if args.format == 'syslog': + formatter = logging.Formatter('%(asctime)s: %(message)s', '%b %d %H:%M:%S') + shandler.setFormatter(formatter) + else: + fmt = logging.Formatter(f'%(message)s') + shandler.setFormatter(fmt) + + logger.addHandler(shandler) + + return logger + + +def send_logs(type, number, logger): + logs = [] + logs_source = CONNECT_LOG_TEST if args.netmap_test else LOG_TYPE + if type == 'all': + print(f"Due to type \"{type}\" selected, endpoint logs will be ignored") + for key in logs_source: + if key != "endpoint": + logs.append(key) + else: + logs.append(type) + + counter = 0 + + for i in range(number): + for cur in logs: + for template in logs_source[cur]: + if args.replace_time: + # For endpoint + if '2021-04-14T09:35:25+03:00' in template: + delta = datetime.timedelta(hours=args.timezone) + now = datetime.datetime.now(datetime.timezone.utc) + if args.timezone_negative: + now = now - delta + sign = "-" + else: + now = now + delta + sign = "+" + template = template.replace("2021-04-14T09:35:25+03:00", now.replace(microsecond=0).isoformat()) + template = template.replace("+00:00", "{0}{1:02}:00".format(sign, args.timezone)) + else: + now = datetime.datetime.now(datetime.timezone.utc).timestamp() + if args.timezone_negative: + now = now - args.timezone * 60 * 60 + else: + now = now + args.timezone * 60 * 60 + template = re.sub(r'unixdate=\d+\.?\d+?', f'unixdate={round(now * 1000)}', template) + template = re.sub(r'rt=\d+\.?\d+?', f'rt={round(now * 1000)}', template) + if not args.simulate: + logger.info(template.strip()) + else: + print(template) + counter += 1 + + print(f'Total records sended: {counter}') + + +logger = init_syslog_logging2(args.destination, args.port) + +send_logs(args.type, args.number, logger) diff --git a/docker/print_dump.py b/docker/print_dump.py new file mode 100644 index 0000000..33b333e --- /dev/null +++ b/docker/print_dump.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 + +import argparse +import json + +parser = argparse.ArgumentParser() +parser.add_argument('dump', help='Dump you want to print') +args = parser.parse_args() + +data = None +with open(args.dump, 'r', encoding='utf8') as f: + data = json.load(f) + +print(json.dumps(data, indent=2).encode().decode('unicode_escape')) diff --git a/docker/scripts/set_mapping.py b/docker/scripts/set_mapping.py new file mode 100644 index 0000000..32b54ea --- /dev/null +++ b/docker/scripts/set_mapping.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python3 + +import time + +import requests + +# Wait +# time.sleep(90) + +auth = requests.auth.HTTPBasicAuth("elastic", "changeme") + + +def check_elastic(): + response = None + try: + response = requests.get("http://localhost:9200/_cluster/health", auth=auth) + except requests.ConnectionError: + return False + + return response.status_code == 200 + + +def set_index(url: str, file: str): + with open(file, 'r') as f: + data = f.read() + headers = {'Content-Type': 'application/json'} + response = requests.put(url, headers=headers, auth=auth, data=data) + + return response.status_code == 200 and response.json()['acknowledged'] == True + + +# Wait for elastic ready +counter = 0 +while counter < 100: + if not check_elastic(): + time.sleep(1) + else: + break + +print("Installing index") + +data = [ + {"url": "http://localhost:9200/_component_template/normalized_component", + "file": "../compose/config/elk/elasticsearch/mapping/normalized-component.json"}, + {"url": "http://localhost:9200/_index_template/normalized", + "file": "../compose/config/elk/elasticsearch/mapping/normalized-index-template.json"}, + {"url": "http://localhost:9200/_component_template/aggregated_component", + "file": "../compose/config/elk/elasticsearch/mapping/aggregated-component.json"}, + {"url": "http://localhost:9200/_index_template/aggregated", + "file": "../compose/config/elk/elasticsearch/mapping/aggregated-index-teplate.json"}, +] + +for cur in data: + if not set_index(cur['url'], cur['file']): + print(f"Can't set index {cur['file']}") + exit(1) + +print("All done") +exit(0) diff --git a/docker/start.py b/docker/start.py new file mode 100644 index 0000000..0b92517 --- /dev/null +++ b/docker/start.py @@ -0,0 +1,397 @@ +#!/usr/bin/env python3 + +import argparse +import glob +import os +import subprocess +import sys +import threading + +from contextlib import contextmanager + +CONFIG = {} +DC_ENV = {} + + +def read_proc_callback(proc, output_handler=lambda line: line): + for line in iter(proc.stdout.readline, b''): + output_handler(line) + + +def read_config(fpath='../.env.dev'): + with open(fpath, 'r') as f: + content = f.read() + config = {} + for line in content.splitlines(): + line = line.strip() + if line.startswith('#'): + continue + key, val = line.split('=', 1) + config[key] = val + return config + + +def is_container_running(container): + return bool(subprocess.check_output(['docker', 'ps', '-q', '--filter', f'name={container}'], env=DC_ENV)) + + +def get_containers_ids(*additional_docker_ps_args): + names = subprocess.check_output(['docker', 'ps', '-q', *additional_docker_ps_args], env=DC_ENV) + return names.decode('utf-8').split() + + +def stop(containers=None): + """ Stop passed containers or all containers if no names passed + :param containers: list|tuple names or string name or None, meaning all + :return: list of stopped containers names""" + if containers is None: + containers = get_containers_ids() + if not isinstance(containers, (list, tuple)): + containers = [containers] + if containers: + arguments = ['docker', 'stop', *containers] + print(f'Stopping containers {containers}') + proc = subprocess.run(arguments, env=DC_ENV) + return containers + + +def down(): + """ Stop and remove containers """ + arguments = ['docker-compose', '-f', 'compose/django.yml', '-f', 'compose/selenium.yml', + '-f', 'compose/flower.yml', '-f', 'compose/pgadmin.yml', + '-f', 'compose/el.yml', '-f', 'compose/correlator.yml', + '--env-file=../.env.dev', '-f', 'compose/kibana.yml', + '-f', 'compose/license.yml', 'down'] + print('Docker-compose down') + subprocess.run(arguments, env=DC_ENV) + + +def exec_django_container_cmd(*cmds): + global CONFIG + subprocess.run(['docker', 'exec', '-ti', CONFIG.get('DOCKER_DJANGO_NAME'), *cmds], env=DC_ENV) + + +def wait_process_stop(proc, timeout=None): + """ Wait for process which supports interruption, i.e. it can stop itself on interruption """ + try: + try: + return proc.wait(timeout=timeout) + except KeyboardInterrupt: + print('Stopping process gracefully, repeat interruption to stop immediately') + proc.wait() + except: + proc.kill() + proc.wait() + raise + + +def wait_till_output_bytes_line(proc, + bytes_lines_to_wait=[b'Starting Gnuicorn server', b'Starting Django dev server'], + print_output=True, + reraise=False): + if not isinstance(bytes_lines_to_wait, (tuple, list)): + bytes_lines_to_wait = [bytes_lines_to_wait] + try: + print('Waiting for docker ready') + for line in iter(proc.stdout.readline, b''): + if print_output: + print(line.decode('utf-8')[:-1]) + if any((l in line for l in bytes_lines_to_wait)): + return True + return False + except KeyboardInterrupt: + print('Stopping process gracefully, repeat interruption to stop immediately') + return False + + +@contextmanager +def docker_on(use_gunicorn=False, use_selenium=False, use_el=False, use_correlator=False, use_kibana=False, + use_flower=False, use_pgadmin=False, wait_till_loaded=True): + global CONFIG + arguments = ['docker-compose', '-f', 'compose/django.yml', '-f', 'compose/license.yml'] + was_running = [] + + needed_conts = ('DOCKER_DJANGO_NAME',) + was_running.extend([is_container_running(CONFIG.get(cont, 'None')) for cont in needed_conts]) + + if use_selenium: + needed_conts = ('DOCKER_FIREFOX_NODE_NAME', 'DOCKER_CHROME_NODE_NAME', 'DOCKER_SELENIUM_NAME') + was_running.extend([is_container_running(CONFIG.get(cont, 'None')) for cont in needed_conts]) + arguments.append('-f') + arguments.append('compose/selenium.yml') + + if use_flower: + needed_conts = ('DOCKER_FLOWER_NAME',) + was_running.extend([is_container_running(CONFIG.get(cont, 'None')) for cont in needed_conts]) + arguments.append('-f') + arguments.append('compose/flower.yml') + + if use_pgadmin: + needed_conts = ('DOCKER_PGADMIN_SERVER',) + was_running.extend([is_container_running(CONFIG.get(cont, 'None')) for cont in needed_conts]) + arguments.append('-f') + arguments.append('compose/pgadmin.yml') + + if use_el: + needed_conts = ('DOCKER_ELASTIC_NAME', 'DOCKER_VECTOR_NAME') + was_running.extend([is_container_running(CONFIG.get(cont, 'None')) for cont in needed_conts]) + arguments.append('-f') + arguments.append('compose/el.yml') + arguments.append('--env-file=../.env.dev') + + if use_correlator: + arguments.append('-f') + arguments.append('compose/correlator.yml') + + if use_kibana: + needed_conts = ('DOCKER_KIBANA_NAME',) + was_running.extend([is_container_running(CONFIG.get(cont, 'None')) for cont in needed_conts]) + arguments.append('-f') + arguments.append('compose/kibana.yml') + + if not use_gunicorn: + DC_ENV['SERVE'] = 'django' + + was_running = all([was_running, *was_running]) + arguments.append('up') + + proc = None + if not was_running: + print(f'Starting containers') + kwargs = {} + if wait_till_loaded: + kwargs = dict(stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + proc = subprocess.Popen(arguments, env=DC_ENV, **kwargs) + if wait_till_loaded: + if not wait_till_output_bytes_line(proc): + stop() + raise KeyboardInterrupt + try: + if proc and wait_till_loaded: # prevent process queue overflow + def drop_lines(line): pass + + threading.Thread(target=read_proc_callback, args=(proc, drop_lines)).start() + yield proc + except: + if proc: + proc.kill() + proc.wait() + raise + finally: + if not was_running: + stop() + if proc: + wait_process_stop(proc) + + +def test(enable_warning=False, add_args=[]): + """ Call process as subprocess.call, but on interruption wait for termination of process """ + global CONFIG + test_args = ['docker-compose', '-f', 'compose/django.yml', '-f', 'compose/license.yml', '-f', + 'compose/selenium.yml', 'exec', CONFIG.get('DOCKER_DJANGO_NAME'), 'pytest'] + + if not enable_warning: + test_args.append('--disable-warnings') + + test_args.extend(add_args) + + with docker_on(use_selenium=True, wait_till_loaded=True): + print('Starting tests') + test_proc = subprocess.Popen( + test_args, env=DC_ENV, stdout=subprocess.STDOUT, stderr=subprocess.STDOUT) + wait_process_stop(test_proc) + sys.exit(test_proc.returncode) + + +def ci_test(add_args): + """ Used to run tests in CI """ + global CONFIG + test_args = ['docker-compose', '-f', 'compose/django.yml', '-f', 'compose/license.yml', '-f', + 'compose/selenium.yml', 'exec', '-T', CONFIG.get('DOCKER_DJANGO_NAME'), 'pytest'] + test_args.extend(add_args) + + with docker_on(use_selenium=True, wait_till_loaded=True): + print('Starting tests') + test_proc = subprocess.Popen(test_args, env=DC_ENV) + wait_process_stop(test_proc) + + print("Exit code:", test_proc.returncode) + sys.exit(test_proc.returncode) + + +def ci_test_coverage(add_args): + """ Used to check test coverage""" + global CONFIG + DC_ENV["COVERAGE_FILE"] = "public/.coverage" + + test_args = ['docker-compose', + '-f', + 'compose/django.yml', + '-f', + 'compose/license.yml', + '-f', + 'compose/selenium.yml', + 'exec', + '-T', + CONFIG.get('DOCKER_DJANGO_NAME'), + 'coverage', + "run", + "-m", + "pytest", + "-m", + "'merge or selenium'"] + report_args = ['docker-compose', '-f', 'compose/django.yml', '-f', 'compose/license.yml', 'exec', '-T', + CONFIG.get('DOCKER_DJANGO_NAME'), 'coverage', "html", "-d", "public/test_coverage/"] + + test_args.extend(add_args) + + result = 0 + + with docker_on(use_selenium=True, wait_till_loaded=True): + print(f"Starting tests {test_args}") + test_proc = subprocess.Popen(test_args, env=DC_ENV) + wait_process_stop(test_proc) + + result = test_proc.returncode + + with docker_on(use_selenium=True, wait_till_loaded=True): + print(f"Processing report {report_args}") + test_report = subprocess.Popen(report_args, env=DC_ENV) + wait_process_stop(test_report) + + sys.exit(result) + + +def parse_args(argv=sys.argv[1:]): + parser = argparse.ArgumentParser( + description=f'Project starter. To open web ui http://localhost:{CONFIG.get("WEB_UI_PORT", 9090)}/ with admin:nimda') + sub_parsers = parser.add_subparsers(title='Commands', dest='cmd', required=True) + + run_parser = sub_parsers.add_parser(name='run', help='Run custom configuration') + run_parser.add_argument('--with-selenium', dest='use_selenium', action='store_true', help='Add selenium') + run_parser.add_argument('--with-flower', dest='use_flower', action='store_true', + help='Add flower. Use http://localhost:5555') + run_parser.add_argument('--with-pgadmin', dest='use_pgadmin', action='store_true', + help='Add Pgadmin. Use http://localhost:5050 with pgadmin4@pgadmin.org : admin') + run_parser.add_argument('--with-ev', dest='use_ev', action='store_true', help='Add Vector and Elasticsearch') + run_parser.add_argument('--with-cev', dest='use_cev', action='store_true', + help='Add Vector, Elasticsearch and correlator') + run_parser.add_argument('--with-kibana', dest='use_kibana', action='store_true', + help='Add Kibana. Use http://localhost:5601 with elastic:changeme') + run_parser.add_argument('--with-gunicorn', dest='use_gunicorn', action='store_true', + help='Use gunicorn server instead of django') + run_parser.add_argument('--with-evk', dest='use_evk', action='store_true', + help='Add Vector, Elasticsearch and Kibana') + run_parser.add_argument('--with-cevk', dest='use_cevk', action='store_true', + help='Add Vector, Elasticsearch, correlator and Kibana') + + test_parser = sub_parsers.add_parser(name='test', aliases=['tests'], help='Run tests') + test_parser.add_argument('--enable-warning', dest='enable_warning', action='store_true', + help='Enable warning for tests') + + ci_test_parser = sub_parsers.add_parser(name="ci_test", + help="Run tests in CI. Differs from the test function in the way of running docker compose. Use for CI only.") + + ci_test_coverage = sub_parsers.add_parser(name="ci_test_coverage", + help="Run test coverage in CI. Differs from the test function in the way of running docker compose. Use for CI only.") + + build_parser = sub_parsers.add_parser(name='build', aliases=['rebuild'], help='Rebuild containers') + build_parser.add_argument('--no-cache', dest='no_cache', action='store_true', help='Rebuild without cache') + + stop_parser = sub_parsers.add_parser(name='stop', help='Stop containers') + down_parser = sub_parsers.add_parser(name='down', help='Stop containers and remove containers') + + django_restart_db_parser = sub_parsers.add_parser(name='restart-django', aliases=['reload-django'], + help='Restart django container') + sh_parser = sub_parsers.add_parser(name='sh', help='Run shell in django container') + psh_parser = sub_parsers.add_parser(name='psh', help='Run python django shell in django container') + psql_parser = sub_parsers.add_parser(name='psql', help='Run postgresql shell') + clear_db_parser = sub_parsers.add_parser(name='clear-db', aliases=['db-clear'], + help="Remove migrations which are not in git and drop container's data and tables from database") + clear_db_parser.add_argument('-n', '--dry-run', dest='dry_run', action='store_true', + help='Show actions without actual deletion of info') + webui_parser = sub_parsers.add_parser(name='ui', help='Open web UI') + webpdb_parser = sub_parsers.add_parser(name='wpdb', help='Open web PDB debugger') + + args, unknownargs = parser.parse_known_args(argv) + return args, unknownargs + +def main(): + global CONFIG, DC_ENV + CONFIG.update(read_config()) + DC_ENV = {**os.environ.copy(), **CONFIG} + + args, unknownargs = parse_args() + + if args.cmd == 'run': + if args.use_cevk: + args.use_evk, args.user_cev = True, True + if args.use_cev: + args.use_ev = True + if args.use_evk: + args.use_ev, args.use_kibana = True, True + with docker_on(args.use_gunicorn, args.use_selenium, args.use_ev, args.use_cevk, args.use_kibana, + args.use_flower, args.use_pgadmin, wait_till_loaded=False) as proc: + if proc: + wait_process_stop(proc) + + elif args.cmd in ('test', 'tests'): + test(args.enable_warning, unknownargs) + + elif args.cmd in ('ci_test'): + ci_test(unknownargs) + elif args.cmd in ("ci_test_coverage"): + ci_test_coverage(unknownargs) + + elif args.cmd in ('build', 'rebuild'): + stop() + arguments = ['docker-compose', '-f', 'compose/django.yml', '-f', 'compose/license.yml', '-f', + 'compose/selenium.yml', '-f', + 'compose/pgadmin.yml', '--env-file=../.env.dev', '-f', 'compose/el.yml', '-f', + 'compose/correlator.yml', '-f', 'compose/kibana.yml', 'build'] + + if args.no_cache: + arguments.append('--no-cache') + + proc = subprocess.run(arguments, env=DC_ENV) + + elif args.cmd == 'stop': + stop() + + elif args.cmd == 'down': + down() + + elif args.cmd in ('restart-django', 'reload-django'): + subprocess.run(['docker', 'restart', CONFIG.get('DOCKER_DJANGO_NAME')], env=DC_ENV) + + elif args.cmd in ('sh',): + with docker_on(wait_till_loaded=True) as proc: + exec_django_container_cmd('bash', *unknownargs) + elif args.cmd in ('psh',): + with docker_on(wait_till_loaded=True) as proc: + exec_django_container_cmd(*['bash', '-c', 'python3 manage.py shell'], *unknownargs) + elif args.cmd in ('psql',): + with docker_on(wait_till_loaded=True) as proc: + arguments = ['psql', '-U', CONFIG.get('POSTGRES_USER'), '-d', CONFIG.get("POSTGRES_DB")] + subprocess.run(['docker', 'exec', '-ti', CONFIG.get('DOCKER_DB_NAME'), *arguments], env=DC_ENV) + elif args.cmd in ('clear-db', 'db-clear'): + print('Clearing migrations') + git_clean_opts = '-xf' + (args.dry_run and 'n' or '') + subprocess.run(['sudo', 'git', 'clean', git_clean_opts, *glob.glob("../*/migrations/")], env=DC_ENV) + subprocess.run(['sudo', 'git', 'clean', git_clean_opts, 'compose/config/elk/vector/pipeline/armaif_*.toml'], + env=DC_ENV) + subprocess.run(['sudo', 'git', 'clean', git_clean_opts, 'compose/config/elk/vector/pipeline/endpoint_*.toml'], + env=DC_ENV) + print("Applying docker-compose down to all containers") + if not args.dry_run: + down() + elif args.cmd == 'ui': + import webbrowser + webbrowser.open(f'http://localhost:{CONFIG.get("WEB_UI_PORT")}') + elif args.cmd == 'wpdb': + import webbrowser + webbrowser.open(f'http://localhost:{CONFIG.get("WEB_PDB_PORT")}') + + +if __name__ == '__main__': + main() diff --git a/events/__init__.py b/events/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/admin.py b/events/admin.py new file mode 100644 index 0000000..e69de29 diff --git a/events/api_urls.py b/events/api_urls.py new file mode 100644 index 0000000..1317862 --- /dev/null +++ b/events/api_urls.py @@ -0,0 +1,8 @@ +from django.urls import path + +from events.views.elk_string_query_search_api import ELKStingSearchApiView, ELKIndexListApiView + +urlpatterns = [ + path('elastic/query/', ELKStingSearchApiView.as_view(), name='elk_query_events'), + path('elastic/all-indexes/', ELKIndexListApiView.as_view(), name='elk_all_indexes'), +] diff --git a/events/apps.py b/events/apps.py new file mode 100644 index 0000000..3854644 --- /dev/null +++ b/events/apps.py @@ -0,0 +1,5 @@ +from django.apps import AppConfig + + +class EventsConfig(AppConfig): + name = 'events' diff --git a/events/constants.py b/events/constants.py new file mode 100644 index 0000000..7aa4b3d --- /dev/null +++ b/events/constants.py @@ -0,0 +1,13 @@ +from django.conf import settings + + +ELK_LOGIN = getattr(settings, 'ELK_LOGIN', 'elastic') +ELK_PASS = getattr(settings, 'ELK_PASS', 'changeme') +ELK_HOST = getattr(settings, 'ELK_HOST', 'elasticsearch') +ELK_PORT = getattr(settings, 'ELK_PORT', 9200) +ELK_URL = getattr(settings, 'ELASTIC_URL', 'localhost:9200') +ELK_AGGREGATED_INDEX = getattr(settings, 'ELK_AGGREGATED_INDEX', 'aggregated-*') +REDIS_ELK_INSTANCE_KEY = 'redis_elasticsearch_instance' +CACHE_TIMEOUT = getattr(settings, 'REDIS_CACHE_TIMEOUT', 120) +ERROR_STATUS = 'err' +ELK_CONNECT_ERROR_JSON = {'status': ERROR_STATUS, 'error_message': 'Cannot establish the connection to elasticsearch'} diff --git a/events/migrations/__init__.py b/events/migrations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/models/__init__.py b/events/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/serializers/__init__.py b/events/serializers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/services/__init__.py b/events/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/services/elk_string_search.py b/events/services/elk_string_search.py new file mode 100644 index 0000000..257d07b --- /dev/null +++ b/events/services/elk_string_search.py @@ -0,0 +1,147 @@ +import logging +from typing import Tuple, Dict, Any + +from elasticsearch import ElasticsearchException, Elasticsearch + +from events.constants import ELK_HOST, ELK_PORT, ELK_LOGIN, ELK_PASS, ELK_CONNECT_ERROR_JSON, ERROR_STATUS + +_log = logging.getLogger(__name__) + + +def connect_to_elasticsearch_instance(elk_host, elk_port, elk_login, elk_password): + """ Function for connecting to elasticsearch instance with provided credentials + :param elk_host: elasticsearch host name + :param elk_port: elasticsearch port number + :param elk_login: elasticsearch login + :param elk_password: elasticsearch password + :return: if connection established returns instance of Elasticsearch. Otherwise - throws an error, write it to log + and generate JSON response with err status and message of an error + """ + try: + es = Elasticsearch([{'host': elk_host, 'port': elk_port}], http_auth=(elk_login, elk_password)) + return es + except ElasticsearchException as err: + _log.exception(f'Following error occurred when trying to perform actions with elasticsearch: {err}') + + +class ELKStringQuerySearchService: + """Service for searching in ELK by index""" + + def __init__(self, index: str, query_params: Dict) -> None: + _log.debug(f'Start search by index: {index}; and query: {query_params}') + self.index = index + self.query_params = query_params + self.es = connect_to_elasticsearch_instance(ELK_HOST, ELK_PORT, ELK_LOGIN, ELK_PASS) + + def _pagination(self) -> tuple: + page = int(self.query_params.get('page', 1)) + size = int(self.query_params.get('page_size', 10)) + if page <= 0: + page = 1 + if size <= 0: + size = 10 + _form = (page - 1) * size + return _form, size + + def _sorting_data(self) -> list: + """Mapping DRF ordering to ELK.""" + + default_sort_field = {'event_timestamp', '@created', 'event_severity'} + ordering_field = self.query_params['ordering'] + asc_desc_map = {'-': 'desc'} + order = asc_desc_map.get(ordering_field[0], 'asc') + if order == 'desc': + ordering_field = ordering_field[1:] + if ordering_field not in default_sort_field: + ordering_field = f'{ordering_field}.keyword' + ordering = [{ordering_field: order}] + return ordering + + def create_search_body(self) -> Dict: + search_body = {} + try: + _from, size = self._pagination() + except ValueError: + _from, size = 0, 10 + search_body.update({'from': _from, 'size': size, 'track_total_hits': 'true'}) + if 'ordering' in self.query_params and self.query_params['ordering'] != '': + sort_data = self._sorting_data() + search_body['sort'] = sort_data + else: + search_body['sort'] = [{'@created': 'desc'}] + if 'q' in self.query_params: + event_filters_array = ['event_first:', 'event_last:', 'event_count:', + 'event_timestamp:', 'event_severity:', 'event_src_msg:', 'event_protocol:', + 'device_vendor:', 'device_product:', 'device_version:', 'device_action:', + 'sign_id:', 'sign_category:', 'sign_subcategory:', 'sign_name:', 'source_ip:', + 'source_mac', 'source_host:', 'source_port:', 'source_user:', + 'destination_host:', 'destination_ip:', 'destination_port:', 'destination_user:'] + default_field = True + text_request = self.query_params['q'].replace(' ', '') + for event_filter in event_filters_array: + if event_filter in text_request: + default_field = False + break + + search_body['query'] = { + 'query_string': { + 'query': self.query_params['q'] + } + } + if default_field: + search_body['query']['query_string']['default_field'] = '' + + return search_body + + def data(self) -> Tuple[Dict[str, Any], int]: + """return elastic data and response status. 200 if ok 400 if bad""" + if not self.es: + return ELK_CONNECT_ERROR_JSON, 400 + + search_body = self.create_search_body() + if not search_body: + return {'status': ERROR_STATUS, 'error_message': 'Invalid search request body'}, 400 + + try: + _log.info(f'Sending query: {search_body}') + return self.es.search(index=self.index, body=search_body), 200 + except ElasticsearchException as err: + error = ''.join(map(str, err.args)) + if 'No mapping found for [@created] in order to sort on' in error: + return {"result": "No data found"}, 200 + return {'status': ERROR_STATUS, 'error_message': err.args}, 400 + + +class ELKIndexListService: + """"Service for getting all index or index by pattern""" + + def __init__(self, query_params: dict): + _log.debug(f'Start search indexes') + self.query_params = query_params + self.es = connect_to_elasticsearch_instance(ELK_HOST, ELK_PORT, ELK_LOGIN, ELK_PASS) + self._pattern = self._get_pattern() + + def _get_pattern(self) -> str: + pattern = '' + if self.query_params: + pattern = self.query_params.get('index', '*') + if not pattern.endswith('*'): + pattern += '*' + return pattern + + def _data(self, indexes: list) -> list: + """Return data mapping by patten""" + data = [{'value': index, 'label': str(index).replace(self._pattern[:-1], '')} for index in indexes] + return data + + def data(self) -> tuple: + if not self.es: + _log.error(f'Get indexes: {ELK_CONNECT_ERROR_JSON} ') + return ELK_CONNECT_ERROR_JSON, 400 + try: + indexes = self.es.indices.get(self._pattern).keys() + except ElasticsearchException as err: + _log.error(f'Get indexes: {err}') + return {'status': ERROR_STATUS, 'error_message': err}, 400 + data = self._data(indexes) + return data, 200 diff --git a/events/services/inputs.py b/events/services/inputs.py new file mode 100644 index 0000000..578002e --- /dev/null +++ b/events/services/inputs.py @@ -0,0 +1,3 @@ +def create_input(): pass + +def update_input(): pass diff --git a/events/tests/__init__.py b/events/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/tests/test_api.py b/events/tests/test_api.py new file mode 100644 index 0000000..31f0e9b --- /dev/null +++ b/events/tests/test_api.py @@ -0,0 +1,68 @@ +from unittest.mock import patch + +import pytest +from django.urls import reverse +from rest_framework.test import APIClient + +from events.tests.utils import mocked_elk_query_raise_elk_exception, mocked_elk_query_valid, mock_elk_indexes_valid + +mocked_target_elk = 'events.services.elk_string_search.connect_to_elasticsearch_instance' + + +class TestEventsApi: + """Test events api.""" + + user = None + + @pytest.fixture(autouse=True) + def setup_test(self, add_user_with_permissions) -> None: + self.user = add_user_with_permissions(username='testuser0', password='Passwd123', is_superuser=True) + self.client = APIClient() + self.client.force_authenticate(user=self.user) + + @pytest.mark.unit + @patch(mocked_target_elk, side_effect=mocked_elk_query_valid) + def test_elk_query_events_api_valid(self, *args): + url = reverse('elk_query_events', args=['1']) + '?from=0&size=1&sort=desc&track_total_hits=true?q=qweqweqwe: q ' + response = self.client.get(url) + data = response.json() + assert response.status_code == 200 + assert 'status' not in data + assert 'error_message' not in data + assert 'hits' in data + assert len(data['hits']['hits']) == 1 + assert data['hits']['total']['value'] == 31 + + @pytest.mark.unit + @patch(mocked_target_elk, side_effect=lambda *args, **kwargs: None) + def test_elk_not_connect_to_elk(self, *args): + url = reverse('elk_query_events', args=['1']) + '?from=0&size=1&sort=desc&track_total_hits=true' + response = self.client.get(url) + assert response.status_code == 400 + data = response.json() + assert 'status' in data + assert 'error_message' in data + assert data['error_message'] == 'Cannot establish the connection to elasticsearch' + assert data['status'] == 'err' + + @pytest.mark.unit + @patch(mocked_target_elk, side_effect=mocked_elk_query_raise_elk_exception) + def test_elk_raise_elk_exception(self, *args): + url = reverse('elk_query_events', args=['3']) + '?from=0&size=1&sort=desc&track_total_hits=true' + response = self.client.get(url) + assert response.status_code == 400 + data = response.json() + assert 'status' in data + assert 'error_message' in data + assert data['error_message'] == ['Test exception'] + assert data['status'] == 'err' + + @pytest.mark.unit + @patch(mocked_target_elk, side_effect=mock_elk_indexes_valid) + def test_get_valid_indexes_elk(self, *args): + url = reverse('elk_all_indexes') + response = self.client.get(url) + assert response.status_code == 200 + data = response.json() + assert data == [{'value': 'aggregated-2022.05.20', 'label': 'aggregated-2022.05.20'}, + {'value': 'aggregated-2022.05.21', 'label': 'aggregated-2022.05.21'}] diff --git a/events/tests/test_services.py b/events/tests/test_services.py new file mode 100644 index 0000000..f3fbdeb --- /dev/null +++ b/events/tests/test_services.py @@ -0,0 +1,112 @@ +from unittest.mock import patch + +import pytest + +from events.constants import ELK_CONNECT_ERROR_JSON +from events.services.elk_string_search import ELKStringQuerySearchService, ELKIndexListService +from events.tests.utils import mocked_elk_query_raise_elk_exception + +mocked_target_elk = 'events.services.elk_string_search.connect_to_elasticsearch_instance' + + +@pytest.mark.unit +class TestELKStringQuerySearchService: + """Test service `sELKStringQuerySearchService`""" + + index = 'aggregated-2022.04.04' + + @pytest.mark.unit + @patch(mocked_target_elk, side_effect=lambda *args, **kwargs: None) + def test_create_elk_is_none(self, *args): + """Test if create ELK instance is return None""" + service = ELKStringQuerySearchService(index=self.index, query_params={}) + data, status = service.data() + assert status == 400 + assert isinstance(data, dict) + assert data == ELK_CONNECT_ERROR_JSON + + @pytest.mark.unit + @patch(mocked_target_elk) + @pytest.mark.parametrize('page_size, page, check_data', ( + (10, 1, 0), + (10, 2, 10), + (10, 3, 20), + (9, 1, 0), + (9, 2, 9), + (9, 3, 18), + (1, 1, 0), + (1, 2, 1), + (1, 3, 2), + )) + def test_create_pagination(self, mock, page_size: int, page: int, check_data: int): + query_params = {'page': page, 'page_size': page_size} + service = ELKStringQuerySearchService(index=self.index, query_params=query_params) + _from, size = service._pagination() + assert _from == check_data + assert size == page_size + + @pytest.mark.parametrize('ordering, check_data', ( + ('event_timestamp', [{'event_timestamp': 'asc'}]), + ('-event_timestamp', [{'event_timestamp': 'desc'}]), + ('@created', [{'@created': 'asc'}]), + ('-@created', [{'@created': 'desc'}]), + ('event_severity', [{'event_severity': 'asc'}]), + ('-event_severity', [{'event_severity': 'desc'}]), + ('event_src_msg', [{'event_src_msg.keyword': 'asc'}]), + ('-event_src_msg', [{'event_src_msg.keyword': 'desc'}]), + ('sign_name', [{'sign_name.keyword': 'asc'}]), + ('-sign_name', [{'sign_name.keyword': 'desc'}]), + ('sign_category', [{'sign_category.keyword': 'asc'}]), + ('-sign_category', [{'sign_category.keyword': 'desc'}]), + ('source_ip', [{'source_ip.keyword': 'asc'}]), + ('-source_ip', [{'source_ip.keyword': 'desc'}]), + ('destination_ip', [{'destination_ip.keyword': 'asc'}]), + ('-destination_ip', [{'destination_ip.keyword': 'desc'}]), + + )) + @patch(mocked_target_elk) + def test_drf_ordering_to_sort_elk(self, _, ordering: str, check_data: list): + result = {'event_timestamp': 'asc'} + query_params = {'page': 1, 'page_size': 10, 'ordering': ordering} + service = ELKStringQuerySearchService(index=self.index, query_params=query_params) + sort_data = service._sorting_data() + assert sort_data == check_data + + + +class TestELKIndexListService: + pattern = 'aggregated-*' + + @pytest.mark.unit + @patch(mocked_target_elk) + @pytest.mark.parametrize('index, pattern', ( + ('', '*'), + (pattern, pattern), + ('aggregated-', pattern), + ('*', '*'), + )) + def test_get_pattern(self, mock, index: str, pattern: str): + query_params = {'index': index} + service = ELKIndexListService(query_params) + data = service._get_pattern() + assert data == pattern + + @pytest.mark.unit + @patch(mocked_target_elk) + @pytest.mark.parametrize('pattern, indexes, check_data', ( + (pattern, ['aggregated-2022.05.20', 'aggregated-2022.05.21'], + [{'value': 'aggregated-2022.05.20', 'label': '2022.05.20'}, + {'value': 'aggregated-2022.05.21', 'label': '2022.05.21'}] + ), + ('', ['aggregated-2022.05.20', 'aggregated-2022.05.21'], + [{'value': 'aggregated-2022.05.20', 'label': 'aggregated-2022.05.20'}, + {'value': 'aggregated-2022.05.21', 'label': 'aggregated-2022.05.21'}] + ), + (pattern, [], []), + + )) + def test_get_data_mapping(self, mock, pattern: str, indexes: list, check_data: list): + query_params = {'index': pattern} + service = ELKIndexListService(query_params) + data = service._data(indexes) + assert data == check_data diff --git a/events/tests/utils.py b/events/tests/utils.py new file mode 100644 index 0000000..05eadef --- /dev/null +++ b/events/tests/utils.py @@ -0,0 +1,83 @@ +from elasticsearch import ElasticsearchException + + +def mocked_elk_query_valid(*args, **kwargs): + """Mock ELK class with valid data for elk search.""" + class MockELK: + + def search(self, *args, **kwargs) -> dict: + return { + 'took': 0, + 'timed_out': False, + '_shards': {'total': 1, 'successful': 1, 'skipped': 0, 'failed': 0}, + 'hits': { + 'total': {'value': 31, 'relation': 'eq'}, + 'max_score': 1.0, + 'hits': [{ + '_index': 'aggregated-2022.03.30', + '_type': '_doc', + '_id': '3658582492_8e24a7cb344bcb40451894acef8145a25d56016d18beb35948b58cbb5f84b12b', + '_score': 1.0, + '_source': { + 'device_version': '3.5', + 'sign_subcategory': 'Auth', + 'device_product': 'Industrial Firerwall', + 'event_src_msg': '<14>CEF:0|InfoWatch ARMA|ARMAIF|3.5|lighttpdaccess|Lighttpd Access|8|rt=1648644419531 deviceFacility=lighttpd dvcpid=79894 src=192.168.2.106 dst=192.168.2.1 requestMethod=GET request=/widgets/api/get.php?load\\=system%2Cgateway%2Cinterfaces&_\\=1582284700985 app=HTTP/1.1 cs1=200 cs2=2425 cs1Label=responseCode cs2Label=bodyLength requestContext=http://192.168.2.1/index.php requestClientApplication=Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0 __line=Feb 21 11:34:33 arma lighttpd[79894]: 192.168.2.106 192.168.2.1 - [21/Feb/2020:11:34:33 +0000] "GET /widgets/api/get.php?load\\=system%2Cgateway%2Cinterfaces&_\\=1582284700985 HTTP/1.1" 200 2425 "http://192.168.2.1/index.php" "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:69.0) Gecko/20100101 Firefox/69.0"', + 'device_action': '', + 'destination_port': 0, + 'destination_user': '', + 'event_count': 1, + 'event_severity': 8, + 'aggregated_id': '3658582492_8e24a7cb344bcb40451894acef8145a25d56016d18beb35948b58cbb5f84b12b', + 'Index': 'arma-2022.03.30', 'type': 'armaif_2', + 'event_timestamp': '2022-03-30T09:46:59.54129517Z', + 'source_ip': '192.168.2.106', + 'sign_name': 'Lighttpd Access', + 'destination_ip': '192.168.2.1', + 'source_port': 0, + 'event_first': '2022-03-30T09:46:59.54129517Z', + 'source_user': '', + 'source_host': '', + 'Updated': 0, + 'sign_id': 'lighttpdaccess', + 'event_protocol': '', + 'destination_host': '', + 'source_mac': '', + '@created': '2022-03-30T09:47:52.414485952Z', + 'rule_tags': ['2'], + 'event_last': '2022-03-30T09:46:59.54129517Z', + '@timestamp': '2022-03-30T09:46:59.53439731Z', + 'event_id': '6b1c34f6-bb8e-43d5-8bf9-39fcbfd45e09', + 'sign_category': 'HTTP', + 'event_hash': '8e24a7cb344bcb40451894acef8145a25d56016d18beb35948b58cbb5f84b12b', + 'celery_done': True, + 'device_vendor': 'InfoWatch ARMA'} + }] + } + } + return MockELK() + + +def mocked_elk_query_raise_elk_exception(*args, **kwargs): + """Mock ELK with rais `ElasticsearchException` exception""" + class MockELK: + + def search(self, *args, **kwargs): + raise ElasticsearchException('Test exception') + return MockELK() + + +def mock_elk_indexes_valid(*args, **kwargs): + """Mock ELK callable indices method""" + + class MockELKIndexes: + def get(self, *args) -> dict: + return {'aggregated-2022.05.20': 'aggregated-2022.05.20', + 'aggregated-2022.05.21': 'aggregated-2022.05.21'} + + class MockELK: + indices = MockELKIndexes() + return MockELK() + + + diff --git a/events/views/__init__.py b/events/views/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/events/views/elk_string_query_search_api.py b/events/views/elk_string_query_search_api.py new file mode 100644 index 0000000..8579061 --- /dev/null +++ b/events/views/elk_string_query_search_api.py @@ -0,0 +1,28 @@ +import json +import logging + +from rest_framework.response import Response +from rest_framework.views import APIView + +from events.services.elk_string_search import ELKStringQuerySearchService, ELKIndexListService + +_log = logging.getLogger(__name__) + + +class ELKStingSearchApiView(APIView): + """Api for getting data from ELK by index.""" + + def get(self, request, index: str, *args, **kwargs) -> Response: + query_params = json.loads(json.dumps(request.GET)) + _log.debug('Start elastic request.') + data, status = ELKStringQuerySearchService(index=index, query_params=query_params).data() + return Response(data, status) + + +class ELKIndexListApiView(APIView): + """Api for getting index list from ELK by pattern or all""" + + def get(self, request) -> Response: + query_params = json.loads(json.dumps(request.GET)) + data, status = ELKIndexListService(query_params).data() + return Response(data, status) diff --git a/finalschemaAPI.yaml b/finalschemaAPI.yaml new file mode 100644 index 0000000..2d1728b --- /dev/null +++ b/finalschemaAPI.yaml @@ -0,0 +1,2082 @@ +openapi: 3.0.3 +info: + title: ARMA Management Console API + version: 1.4.0 + description: ARMA Management Console API + +externalDocs: + description: Дополнительные соглашения используемые при разработке спецификации + url: https://iwarma.atlassian.net/wiki/spaces/ARMA/pages/25362443/ARMA+AMC+API + +servers: + - url: http://localhost:9090/ + +security: + - token_auth: [] + +tags: + - name: assets + description: "All for assets" + +paths: + /api/incidents/: + get: + description: |- + Показать список инцидентов + + Требуемое права: `can_view_incidents_list` + tags: + - incident + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + - $ref: "#/components/parameters/assets" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/Incident" + 403: + $ref: "#/components/responses/403" + /api/incidents/{uuid}: + parameters: + - $ref: "#/components/parameters/uuidParam" + get: + description: |- + Подробная информация о инциденте + + Требуемые права: `can_view_incidents` + tags: + - incident + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/Incident" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + patch: + description: |- + Обновление данных инцидента + + Требуемые права: `can_view_incidents` + tags: + - incident + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/IncidentEdit" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/Incident" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/incidents/csv_export/: + get: + description: |- + Экспорт данных инцидентов в CSV формате + Требуемые права: `can_export_incidents_list` + tags: + - incident + responses: + 200: + $ref: "#/components/responses/200_celery" + 403: + $ref: "#/components/responses/403" + /api/incident/category/: + get: + description: |- + Получить список категорий инцидентов + + Требуемые права: `can_view_incidents_list` + tags: + - incident + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/NameDescriptionModel" + 403: + $ref: "#/components/responses/403" + /en/api/incident/effects/: + get: + description: |- + Показать список эффектов инцидента + tags: + - incident + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/NameDescriptionModel" + 403: + $ref: "#/components/responses/403" + post: + description: Добавить эффект для инцидента + tags: + - incident + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + /en/api/incident/effects/{id}: + get: + description: "Получить эффект для инцидента по его ID" + tags: + - incident + parameters: + - $ref: '#/components/parameters/idParam' + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 403: + description: Forbidden + content: + application/json: + schema: + properties: + detail: + type: string + example: + detail: "Учетные данные не были предоставлены." + 404: + $ref: "#/components/responses/404" + patch: + description: Редактировать эффект инцидента + tags: + - incident + parameters: + - $ref: "#/components/parameters/idParam" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + delete: + description: Удалить эффект инцидента + tags: + - incident + parameters: + - $ref: "#/components/parameters/idParam" + responses: + 200: + description: "Успешное удаление" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /en/api/incident/recommendations/: + get: + description: "Получить список рекоммендаций для инцидентов" + tags: + - incident + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/IncidentRecommendations" + 403: + description: Forbidden + content: + application/json: + schema: + properties: + detail: + type: string + example: + detail: "Учетные данные не были предоставлены." + post: + description: Добавить рекоммендацию для инцидента + tags: + - incident + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + /en/api/incident/recommendations/{id}: + get: + description: "Получить рекоммендацию для инцидента по его ID" + tags: + - incident + parameters: + - $ref: '#/components/parameters/idParam' + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 403: + description: Forbidden + content: + application/json: + schema: + properties: + detail: + type: string + example: + detail: "Учетные данные не были предоставлены." + 404: + $ref: "#/components/responses/404" + patch: + description: Редактировать рекоммендацию инцидента + tags: + - incident + parameters: + - $ref: "#/components/parameters/idParam" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + delete: + description: Удалить рекоммендацию инцидента + tags: + - incident + parameters: + - $ref: "#/components/parameters/idParam" + responses: + 200: + description: "Успешное удаление" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/assets/groups: + get: + description: Получить список групп ассетов + tags: + - asset groups + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/AssetGroup" + 403: + $ref: "#/components/responses/403" + 405: + $ref: "#/components/responses/405" + 400: + description: "" + content: + application/json: + examples: + invalid_page: + $ref: "#/components/examples/invalid_page" + invalid_query_parameter: + $ref: "#/components/examples/invalid_query_parameter" + post: + description: Добавить производителя + tags: + - asset groups + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/AssetGroup" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/AssetGroup" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/assets/groups/{id}/: + get: + description: Получить производителя по его ID + tags: + - asset groups + parameters: + - $ref: "#/components/parameters/idParam" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/AssetGroup" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + patch: + description: Редактировать производителя по его ID + tags: + - asset groups + parameters: + - $ref: "#/components/parameters/idParam" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/AssetGroup" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/AssetGroup" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + delete: + description: Удалить производителя по его ID + tags: + - asset groups + parameters: + - $ref: "#/components/parameters/idParam" + responses: + 200: + description: "" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/assets/manufacturers: + get: + description: Получить список доступных производителей + tags: + - asset manufacturer + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/NameDescriptionModel" + 403: + $ref: "#/components/responses/403" + 405: + $ref: "#/components/responses/405" + 400: + description: "" + content: + application/json: + examples: + invalid_page: + $ref: "#/components/examples/invalid_page" + invalid_query_parameter: + $ref: "#/components/examples/invalid_query_parameter" + post: + description: Доавить производителя + tags: + - asset manufacturer + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/assets/manufacturer/{id}/: + get: + description: Получить производителя по его ID + tags: + - asset manufacturer + parameters: + - $ref: "#/components/parameters/idParam" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + patch: + description: Редактировать производителя по его ID + tags: + - asset manufacturer + parameters: + - $ref: "#/components/parameters/idParam" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + delete: + description: Удалить производителя по его ID + tags: + - asset manufacturer + parameters: + - $ref: "#/components/parameters/idParam" + responses: + 200: + description: "" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/assets/os: + get: + description: Получить список доступных операционных систем + tags: + - os + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/NameDescriptionModel" + 403: + $ref: "#/components/responses/403" + 405: + $ref: "#/components/responses/405" + 400: + description: "" + content: + application/json: + examples: + invalid_page: + $ref: "#/components/examples/invalid_page" + invalid_query_parameter: + $ref: "#/components/examples/invalid_query_parameter" + post: + description: Доавить ОС + tags: + - os + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/assets/os/{id}/: + get: + description: Получить Операционную систему по ее ID + tags: + - os + parameters: + - $ref: "#/components/parameters/idParam" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + patch: + description: Редактировать ОС по ее ID + tags: + - os + parameters: + - $ref: "#/components/parameters/idParam" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/NameDescriptionModel" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + delete: + description: Удалить ОС по ее ID + tags: + - os + parameters: + - $ref: "#/components/parameters/idParam" + responses: + 200: + description: "" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/assets/elements/: + get: + description: |- + Show Active List. + Permisions requred: + - "Can view list active"; + tags: + - assets + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + - $ref: "#/components/parameters/incidents" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/AssetList" + 403: + $ref: "#/components/responses/403" + 405: + $ref: "#/components/responses/405" + 400: + description: "" + content: + application/json: + examples: + invalid_page: + $ref: "#/components/examples/invalid_page" + invalid_query_parameter: + $ref: "#/components/examples/invalid_query_parameter" + /api/assets/elements/{id}/: + get: + description: |- + Получение полной информации о запрашиваемом активе. В том числе: + - Связные инциденты + Связные модели возвращаются ввиде полной информации об объекте + + + Permisions requred: + - 'Can view active' + parameters: + - $ref: "#/components/parameters/idParam" + tags: + - assets + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/AssetDetail" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + patch: + description: |- + Patch Asset. + + Permisions requred: + - 'Can view active' + parameters: + - $ref: "#/components/parameters/idParam" + tags: + - assets + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/AssetPatch" + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/AssetBase" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + delete: + description: |- + Delete Asset. + + Permisions requred: + - 'Can delete active' + parameters: + - $ref: "#/components/parameters/idParam" + tags: + - assets + responses: + 200: + $ref: "#/components/responses/200_no_content" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/assets/elements/authorize_assets/: + post: + tags: + - assets + requestBody: + content: + application/json: + schema: + properties: + selected_assets: + type: array + items: + type: integer + minimum: 1 + responses: + 200: + description: "" + content: + application/json: + example: + status: ok + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + /api/users/: + get: + description: "Получить список не удаленных пользователей (пользователей, у которых в имени не присутсвует префикс deleted_*" + tags: + - users + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/UserInfo" + 403: + $ref: "#/components/responses/403" + 405: + $ref: "#/components/responses/405" + 400: + description: "" + content: + application/json: + examples: + invalid_page: + $ref: "#/components/examples/invalid_page" + invalid_query_parameter: + $ref: "#/components/examples/invalid_query_parameter" + post: + description: "Создание нового пользователя" + tags: + - users + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/UserInfo' + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/UserInfo" + 403: + $ref: "#/components/responses/403" + 405: + $ref: "#/components/responses/405" + 400: + description: "" + content: + application/json: + examples: + invalid_page: + $ref: "#/components/examples/invalid_page" + invalid_query_parameter: + $ref: "#/components/examples/invalid_query_parameter" + /api/users/{id}: + get: + description: "Получение информации о пользователе по его ID" + parameters: + - $ref: "#/components/parameters/idParam" + tags: + - users + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/UserInfo" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + patch: + description: "Редактирование информации о пользователе по его ID" + parameters: + - $ref: "#/components/parameters/idParam" + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/UserInfo' + tags: + - users + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/UserInfo" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + 400: + description: "" + content: + application/json: + examples: + invalid_body: + $ref: "#/components/examples/invalid_body" + delete: + description: "Удалить пользователя (Переименовывание логина пользователя + изменение состояния is_active -> False) по его ID." + parameters: + - $ref: "#/components/parameters/idParam" + tags: + - users + responses: + 200: + $ref: "#/components/responses/200_no_content" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/store/: + get: + description: "Список файлов в хранилище" + tags: + - store + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: "" + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: "#/components/schemas/StorageList" + 403: + $ref: "#/components/responses/403" + 400: + description: "" + content: + application/json: + examples: + invalid_page: + $ref: "#/components/examples/invalid_page" + invalid_query_parameter: + $ref: "#/components/examples/invalid_query_parameter" + /api/store/{id}/: + get: + description: |- + Получения полной информации об одном файле + + Требуемые права: + - 'Can view storage' + parameters: + - $ref: "#/components/parameters/idParam" + tags: + - store + responses: + 200: + description: "" + content: + application/json: + schema: + $ref: "#/components/schemas/Storage" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + delete: + description: 'Удаления файла' + parameters: + - $ref: "#/components/parameters/idParam" + tags: + - store + responses: + 200: + $ref: "#/components/responses/200_no_content" + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /api/store/{id}/download/: + get: + description: 'Скачивание файла' + parameters: + - $ref: "#/components/parameters/idParam" + tags: + - store + responses: + 200: + description: Возвращает файл + 403: + $ref: "#/components/responses/403" + 404: + $ref: "#/components/responses/404" + /en/api/settings/events/set/: + post: + operationId: api_settings_events_set_create + description: | + Permisions requred: + - can_change_rotation_settings + parameters: + - in: query + name: format + schema: + type: string + enum: + - datatables + - json + tags: + - settings + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RotationSettings' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/RotationSettings' + multipart/form-data: + schema: + $ref: '#/components/schemas/RotationSettings' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/RotationSettings' + description: '' + /en/api/settings/incidents/set/: + post: + operationId: api_settings_incidents_set_create + description: | + Permisions requred: + - can_change_rotation_settings + parameters: + - in: query + name: format + schema: + type: string + enum: + - datatables + - json + tags: + - settings + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/RotationSettings' + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/RotationSettings' + multipart/form-data: + schema: + $ref: '#/components/schemas/RotationSettings' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/RotationSettings' + description: '' + /api/incident_export/receivers/opcua/: + get: + tags: + - export + description: Список получателей (по протоколу OPCUA) сообщений об инцидентах + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: '' + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: '#/components/schemas/OPCUAReceiver' + 403: + $ref: '#/components/responses/403' + post: + tags: + - export + description: Создание получателя событий об инцидентах по протоколу OPCUA + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OPCUAReceiver' + responses: + 201: + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/OPCUAReceiver' + /api/incident_export/receivers/opcua/{id}/: + parameters: + - $ref: '#/components/parameters/idParam' + get: + tags: + - export + responses: + 200: + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/OPCUAReceiver' + 404: + $ref: '#/components/responses/404' + 403: + $ref: '#/components/responses/403' + patch: + tags: + - export + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/OPCUAReceiver' + responses: + 200: + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/OPCUAReceiver' + 404: + $ref: '#/components/responses/404' + 403: + $ref: '#/components/responses/403' + delete: + tags: + - export + responses: + 404: + $ref: '#/components/responses/404' + 403: + $ref: '#/components/responses/403' + /api/incident_export/receivers/syslog/: + get: + tags: + - export + description: Список получателей (по протоколу syslog) сообщений об инцидентах + parameters: + - $ref: "#/components/parameters/page" + - $ref: "#/components/parameters/page_size" + responses: + 200: + description: '' + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Pagination" + - properties: + results: + items: + $ref: '#/components/schemas/SyslogReceiver' + post: + tags: + - export + description: Создание получателя событий об инцидентах по протоколу syslog + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/SyslogReceiver' + responses: + 201: + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/SyslogReceiver' + 403: + $ref: '#/components/responses/403' + /api/incident_export/receivers/syslog/{id}/: + parameters: + - $ref: '#/components/parameters/idParam' + get: + tags: + - export + responses: + 200: + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/SyslogReceiver' + 404: + $ref: '#/components/responses/404' + patch: + tags: + - export + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/SyslogReceiver' + responses: + 200: + description: '' + content: + application/json: + schema: + $ref: '#/components/schemas/SyslogReceiver' + 404: + $ref: '#/components/responses/404' + 403: + $ref: '#/components/responses/403' + delete: + tags: + - export + responses: + 404: + $ref: '#/components/responses/404' + 403: + $ref: '#/components/responses/403' + +components: + securitySchemes: + token_auth: + type: apiKey + in: header + name: Authorization + description: Для использования API необходимо в header запроса положить токен авторизации пользователя в виде "Token ХХХХХХХ" + responses: + 200_celery: + description: Задача celery успешно запущена + content: + application/json: + schema: + $ref: "#/components/schemas/Celery" + 200_no_content: + description: No content + 403: + description: Forbidden + content: + application/json: + schema: + properties: + detail: + type: string + example: + detail: "Учетные данные не были предоставлены." + 404: + description: Not found + content: + application/json: + schema: + properties: + detail: + type: string + example: + detail: "Не найдено." + 405: + description: Method not allowed + content: + application/json: + schema: + properties: + detail: + type: string + example: + detail: "Метод 'PATCH' не разрешен." + examples: + invalid_page: + summary: Ошибка при указании номера страницы + value: + detail: "Неправильная страница" + invalid_query_parameter: + description: Ключ в ответе обозначает параметр, в котором допущена ошибка. Значение это список строк описывающих ошибки + summary: Ошибка при указании query параметра + value: + incidents: + - "Введите правильный UUID." + invalid_body: + description: Каждый ключ в ответе обозначает поле, в котором допущена ошибка. Значение ключа это список строк описывающих ошибки + summary: Ошибка при валидации данных + value: + field: + - "Недопустимый первичный ключ - объект не существует." + parameters: + idParam: + name: id + in: path + description: 'Идентификатор "Primary key" инстанса модели. >= 1' + required: true + schema: + type: integer + minimum: 1 + example: 17654 + uuidParam: + name: uuid + in: path + description: "Instance uuid Primary Key" + required: true + schema: + type: string + page: + name: page + in: query + description: "Page number" + schema: + type: integer + page_size: + name: page_size + in: query + description: "Maximum page size" + schema: + type: integer + incidents: + name: incidents + in: query + description: "Фильтрация списка по переданному uuid инцидента" + schema: + type: string + assets: + name: assets + in: query + description: Фильтрация списка инцидентов по переданному `id` актива как по одному `1` так и списком `1,2,3` + schema: + type: string + example: 1,2,3 + schemas: + Pagination: + properties: + count: + type: integer + example: 123 + next: + type: string + nullable: true + previous: + type: string + nullable: true + results: + type: array + items: + type: object + AssetType: + type: string + enum: + - user + - arma_industrial_firewall + - plc + - pc + - server + - network_device + nullable: True + AssetStatus: + type: integer + enum: + - 0 + - 1 + nullable: true + description: "Статус актива: 1 - разрешенный, 0 - недоверенный" + NameDescriptionModel: + properties: + id: + type: integer + readOnly: true + name: + type: string + maxLength: 128 + description: + type: string + nullable: true + AssetBase: + properties: + id: + type: integer + readOnly: true + name: + type: string + maxLength: 128 + asset_type: + $ref: "#/components/schemas/AssetType" + status: + $ref: "#/components/schemas/AssetStatus" + ip: + type: string + description: Asset's IPv4 or IPv6 address. + updated: + type: string + format: date-time + readOnly: true + AssetGroup: + allOf: + - $ref: "#/components/schemas/NameDescriptionModel" + - properties: + collapsed: + type: boolean + description: Схлопнута ли данная группа на карте сети + default: false + Asset: + allOf: + - $ref: "#/components/schemas/AssetBase" + - properties: + description: + type: string + nullable: true + sensor: + type: string + maxLength: 128 + nullable: true + model: + description: Asset model + type: string + nullable: true + mac: + type: string + nullable: true + description: Asset's MAC address + maxLength: 17 + ports: + type: array + nullable: true + items: + type: integer + description: List of open ports + AssetList: + allOf: + - $ref: "#/components/schemas/AssetBase" + - properties: + incidents: + type: integer + readOnly: true + description: Count of incidents + AssetDetail: + allOf: + - $ref: "#/components/schemas/Asset" + - properties: + os: + $ref: "#/components/schemas/Os" + group: + $ref: "#/components/schemas/AssetGroup" + manufacturer: + $ref: "#/components/schemas/AssetManufacturer" + incidents: + type: array + items: + $ref: "#/components/schemas/Incident" + AssetPatch: + allOf: + - $ref: "#/components/schemas/Asset" + - properties: + os: + type: integer + nullable: true + minimum: 1 + description: Operating system primary key + group: + type: integer + nullable: true + minimum: 1 + description: Asset group primary key + manufacturer: + type: integer + nullable: true + minimum: 1 + description: Asset manufacturer primary key + AssetManufacturer: + properties: + id: + type: integer + readOnly: true + name: + type: string + maxLength: 128 + description: + type: string + nullable: true + description: Description + required: + - id + - name + nullable: true + Os: + properties: + id: + type: integer + readOnly: true + description: + type: string + nullable: true + description: Description + name: + type: string + maxLength: 128 + required: + - id + - name + nullable: true + example: + id: 2345 + description: MS Windows + name: Windows + Incident: + type: object + properties: + user_friendly_id: + type: integer + maximum: 2147483647 + minimum: 0 + nullable: true + readOnly: true + timestamp: + type: string + format: date-time + description: Date and time, when incident occurs + incident_id: + type: string + format: uuid + title: ID + description: ID of incident + title: + type: string + description: Title + maxLength: 128 + category: + $ref: "#/components/schemas/IncidentCategory" + importance: + type: integer + maximum: 100 + minimum: 0 + description: Hazard level of incident + status: + $ref: "#/components/schemas/IncidentStatusEnum" + assigned_to: + type: string + readOnly: true + event_count: + type: integer + maximum: 2147483647 + minimum: 1 + title: Event's number + description: Amount of events in the incident + created: + type: string + format: date-time + readOnly: true + description: Date and time, when incident was created + updated: + type: string + format: date-time + readOnly: true + description: Date and time, when incident was updated + events: + type: array + items: + $ref: "#/components/schemas/Event" + deadline: + type: string + format: date-time + readOnly: true + description: Deadline. Deadline when incident must be resolved + comment: + type: string + readOnly: true + description: Incident comment + nullable: true + required: + - assigned_to + - category + - created + - event_count + - events + - importance + - title + - updated + IncidentCategory: + type: object + properties: + name: + type: string + maxLength: 128 + description: + type: string + nullable: true + description: Description + id: + type: integer + readOnly: true + required: + - id + - name + IncidentStatusEnum: + enum: + - 0 + - 1 + - 2 + - 3 + - 4 + description: + 0 - Not assigned (Не назначен) + 1 - Assigned (Назначен) + 2 - Delayed (Отложен) + 3 - Resolved (Решен) + 4 - False_alarm (Ложное срабатывание) + type: integer + IncidentEdit: + type: object + properties: + status: + $ref: "#/components/schemas/IncidentStatusEnum" + deadline: + type: string + format: date-time + description: Deadline. Метка времени, до которого инцидент должен быть решен + comment: + type: string + description: Комментарии к инциденту + nullable: true + category: + type: integer + assigned_to: + type: integer + minimum: 0 + Celery: + type: object + properties: + task_id: + type: string + format: uuid + readOnly: true + finished: + type: boolean + result: + type: integer + minimum: 0 + readOnly: true + IncidentEffect: + type: object + properties: + name: + type: string + maxLength: 128 + description: + type: string + nullable: true + description: Description + id: + type: integer + readOnly: true + required: + - id + - name + IncidentRecommendations: + type: object + properties: + name: + type: string + maxLength: 128 + description: + type: string + nullable: true + description: Description + id: + type: integer + readOnly: true + required: + - id + - name + Event: + type: object + properties: + type: + type: string + Index: + type: string + sign_id: + type: string + "@created": + type: string + event_id: + type: string + rule_tags: + type: string + format: nullable + sign_name: + type: string + source_ip: + type: string + "@timestamp": + type: string + event_hash: + type: string + event_last: + type: string + source_mac: + type: string + celery_done: + type: boolean + event_count: + type: integer + format: int32 + event_first: + type: string + source_host: + type: string + source_port: + type: integer + format: int32 + source_user: + type: string + aggregated_id: + type: string + device_action: + type: string + device_vendor: + type: string + event_src_msg: + type: string + sign_category: + type: string + destination_ip: + type: string + device_product: + type: string + device_version: + type: string + event_protocol: + type: string + event_severity: + type: integer + format: int32 + event_timestamp: + type: string + destination_host: + type: string + destination_port: + type: integer + format: int32 + destination_user: + type: string + sign_subcategory: + type: string + UserInfo: + properties: + user: + $ref: '#/components/schemas/User' + comment: + type: string + example: asdasd + timezone: + type: string + example: "Europe/Moscow" + expire_date: + type: string + format: nullable + modified: + readOnly: true + type: string + example: "2022-04-13T14:49:41.810932+03:00" + User: + type: object + properties: + id: + readOnly: true + type: integer + format: int32 + example: 3 + username: + type: string + example: new_user_2 + password: + description: "В случае если не нужно менять пароль - данное поле не стоит включать в body" + writeOnly: true + type: string + first_name: + type: string + example: asdasd + is_active: + type: boolean + email: + type: string + example: asdf@mail.ru + StorageTypeEnum: + type: integer + enum: [0, 1, 2, 3, 4] + description: Тип файла в хранилище. + 0 - Unknown + 1 - DB dump + 2 - CSV export + 3 - JSON export + 4 - ClamAV updates + default: 0 + StorageFormatEnum: + type: string + enum: ['Unknown','Plain','JSON','CSV','ZIP'] + description: Формат файла в хранилище. + default: 'Unknown' + StorageList: + properties: + id: + type: integer + minimum: 1 + format: + $ref: '#/components/schemas/StorageFormatEnum' + size: + type: integer + description: + type: string + created: + type: string + format: date-time + Storage: + allOf: + - $ref: '#/components/schemas/StorageList' + - properties: + last_access: + type: string + format: date-time + type: + $ref: '#/components/schemas/StorageTypeEnum' + crc: + type: object + properties: + crc: + type: string + type: + type: string + enum: ['sha256sum', 'sha512sum'] + RotationSettings: + type: object + properties: + rotation_type: + $ref: '#/components/schemas/RotationTypeEnum' + size_rotation: + type: integer + schedule: + $ref: '#/components/schemas/Schedule' + required: + - rotation_type + - schedule + - size_rotation + RotationTypeEnum: + enum: + - 0 + - 1 + - 2 + type: integer + Schedule: + type: object + properties: + period: + $ref: '#/components/schemas/PeriodEnum' + time: + type: string + format: time + week_day: + type: array + items: + $ref: '#/components/schemas/WeekDayEnum' + month: + type: array + items: + $ref: '#/components/schemas/MonthEnum' + required: + - period + PeriodEnum: + enum: + - day + - week + - month + type: string + WeekDayEnum: + enum: + - sunday + - monday + - tuesday + - wednesday + - thursday + - friday + - saturday + type: string + MonthEnum: + enum: + - january + - february + - march + - april + - may + - june + - july + - august + - september + - october + - november + - december + type: string + SyslogProtocol: + type: string + enum: + - UDP + - TCP + default: UDP + TypeReceiver: + type: string + enum: + - OPC_UA + - syslog + SeverityLevel: + type: integer + enum: + - 0 + - 10 + - 40 + - 70 + - 90 + - 100 + description: | + Уровень критичности инцидента. + + 0 - Minimal severity, + 10 - Info severity, + 40 - Low severity, + 70 - Medium severity, + 90 - High severity, + 100 - Critical severity + default: 0 + BaseReceiver: + properties: + id: + type: integer + readOnly: true + type: + $ref: '#/components/schemas/TypeReceiver' + cef_format: + type: boolean + default: true + message_filter: + type: string + default: '' + message_min_severity: + $ref: '#/components/schemas/SeverityLevel' + export_status: + type: boolean + default: true + OPCUAReceiver: + properties: + id: + type: integer + readOnly: true + host: + type: string + port: + type: integer + node_number: + type: integer + receiver: + $ref: '#/components/schemas/BaseReceiver' + SyslogReceiver: + properties: + id: + type: integer + readOnly: true + host: + type: string + port: + type: integer + protocol: + $ref: '#/components/schemas/SyslogProtocol' + receiver: + $ref: '#/components/schemas/BaseReceiver' diff --git a/frontend/.eslintignore b/frontend/.eslintignore new file mode 100644 index 0000000..dace482 --- /dev/null +++ b/frontend/.eslintignore @@ -0,0 +1,4 @@ +/public +/src/react-app-env.d.ts +.eslintrc.js +.prettierrc.js diff --git a/frontend/.eslintrc.js b/frontend/.eslintrc.js new file mode 100644 index 0000000..50d6769 --- /dev/null +++ b/frontend/.eslintrc.js @@ -0,0 +1 @@ +module.exports = require('./eslint.config'); \ No newline at end of file diff --git a/frontend/.gitignore b/frontend/.gitignore new file mode 100644 index 0000000..10c60d5 --- /dev/null +++ b/frontend/.gitignore @@ -0,0 +1,6 @@ +node_modules +yarn.lock +.idea +yarn-error.log +/build +/coverage diff --git a/frontend/.npmrc b/frontend/.npmrc new file mode 100644 index 0000000..e827a55 --- /dev/null +++ b/frontend/.npmrc @@ -0,0 +1 @@ +registry=http://nexus.iwarma.ru/repository/proxy-npm/ diff --git a/frontend/.prettierignore b/frontend/.prettierignore new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/frontend/.prettierignore @@ -0,0 +1 @@ + diff --git a/frontend/.prettierrc.js b/frontend/.prettierrc.js new file mode 100644 index 0000000..7d66049 --- /dev/null +++ b/frontend/.prettierrc.js @@ -0,0 +1 @@ +module.exports = require('./prettier.config'); \ No newline at end of file diff --git a/frontend/README.md b/frontend/README.md new file mode 100644 index 0000000..98f963d --- /dev/null +++ b/frontend/README.md @@ -0,0 +1,25 @@ +## Доступные скрипты в проекте: + +### `Запуск приложения` + +`npm start` Запускает приложение в режиме разработки.\ +Откройте [http://localhost:3000](http://localhost:3000), чтобы просмотреть его в браузере. + +Страница перезагрузится, если вы внесете изменения.\ +Вы также увидите любые ошибки lint в консоли. + +### `Запуск тестов` + +Для запуска тестов используется команда `npm test`, \ +после запуска команды нажать "а", это запустит все тесты + +### `Сборка приложения` + +Команда `npm build` Создает приложение для деплоя в папке `build`.\ +Он правильно объединяет React, и оптимизирует сборку для достижения наилучшей производительности. + +Сборка минимизирована, а имена файлов включают хэши.\ +Ваше приложение готово к развертыванию! + +## Генерация типов для TS из схемы .yaml: + npx swagger-typescript-api -p ./finalschemaAPI.yaml -o ./src -n myApi.ts \ No newline at end of file diff --git a/frontend/config/env.js b/frontend/config/env.js new file mode 100644 index 0000000..ffa7e49 --- /dev/null +++ b/frontend/config/env.js @@ -0,0 +1,104 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const paths = require('./paths'); + +// Make sure that including paths.js after env.js will read .env variables. +delete require.cache[require.resolve('./paths')]; + +const NODE_ENV = process.env.NODE_ENV; +if (!NODE_ENV) { + throw new Error( + 'The NODE_ENV environment variable is required but was not specified.' + ); +} + +// https://github.com/bkeepers/dotenv#what-other-env-files-can-i-use +const dotenvFiles = [ + `${paths.dotenv}.${NODE_ENV}.local`, + // Don't include `.env.local` for `test` environment + // since normally you expect tests to produce the same + // results for everyone + NODE_ENV !== 'test' && `${paths.dotenv}.local`, + `${paths.dotenv}.${NODE_ENV}`, + paths.dotenv, +].filter(Boolean); + +// Load environment variables from .env* files. Suppress warnings using silent +// if this file is missing. dotenv will never modify any environment variables +// that have already been set. Variable expansion is supported in .env files. +// https://github.com/motdotla/dotenv +// https://github.com/motdotla/dotenv-expand +dotenvFiles.forEach(dotenvFile => { + if (fs.existsSync(dotenvFile)) { + require('dotenv-expand')( + require('dotenv').config({ + path: dotenvFile, + }) + ); + } +}); + +// We support resolving modules according to `NODE_PATH`. +// This lets you use absolute paths in imports inside large monorepos: +// https://github.com/facebook/create-react-app/issues/253. +// It works similar to `NODE_PATH` in Node itself: +// https://nodejs.org/api/modules.html#modules_loading_from_the_global_folders +// Note that unlike in Node, only *relative* paths from `NODE_PATH` are honored. +// Otherwise, we risk importing Node.js core modules into an app instead of webpack shims. +// https://github.com/facebook/create-react-app/issues/1023#issuecomment-265344421 +// We also resolve them to make sure all tools using them work consistently. +const appDirectory = fs.realpathSync(process.cwd()); +process.env.NODE_PATH = (process.env.NODE_PATH || '') + .split(path.delimiter) + .filter(folder => folder && !path.isAbsolute(folder)) + .map(folder => path.resolve(appDirectory, folder)) + .join(path.delimiter); + +// Grab NODE_ENV and REACT_APP_* environment variables and prepare them to be +// injected into the application via DefinePlugin in webpack configuration. +const REACT_APP = /^REACT_APP_/i; + +function getClientEnvironment(publicUrl) { + const raw = Object.keys(process.env) + .filter(key => REACT_APP.test(key)) + .reduce( + (env, key) => { + env[key] = process.env[key]; + return env; + }, + { + // Useful for determining whether we’re running in production mode. + // Most importantly, it switches React into the correct mode. + NODE_ENV: process.env.NODE_ENV || 'development', + // Useful for resolving the correct path to static assets in `public`. + // For example, . + // This should only be used as an escape hatch. Normally you would put + // images into the `src` and `import` them in code to get their paths. + PUBLIC_URL: publicUrl, + // We support configuring the sockjs pathname during development. + // These settings let a developer run multiple simultaneous projects. + // They are used as the connection `hostname`, `pathname` and `port` + // in webpackHotDevClient. They are used as the `sockHost`, `sockPath` + // and `sockPort` options in webpack-dev-server. + WDS_SOCKET_HOST: process.env.WDS_SOCKET_HOST, + WDS_SOCKET_PATH: process.env.WDS_SOCKET_PATH, + WDS_SOCKET_PORT: process.env.WDS_SOCKET_PORT, + // Whether or not react-refresh is enabled. + // It is defined here so it is available in the webpackHotDevClient. + FAST_REFRESH: process.env.FAST_REFRESH !== 'false', + } + ); + // Stringify all values so we can feed into webpack DefinePlugin + const stringified = { + 'process.env': Object.keys(raw).reduce((env, key) => { + env[key] = JSON.stringify(raw[key]); + return env; + }, {}), + }; + + return { raw, stringified }; +} + +module.exports = getClientEnvironment; diff --git a/frontend/config/getHttpsConfig.js b/frontend/config/getHttpsConfig.js new file mode 100644 index 0000000..013d493 --- /dev/null +++ b/frontend/config/getHttpsConfig.js @@ -0,0 +1,66 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const crypto = require('crypto'); +const chalk = require('react-dev-utils/chalk'); +const paths = require('./paths'); + +// Ensure the certificate and key provided are valid and if not +// throw an easy to debug error +function validateKeyAndCerts({ cert, key, keyFile, crtFile }) { + let encrypted; + try { + // publicEncrypt will throw an error with an invalid cert + encrypted = crypto.publicEncrypt(cert, Buffer.from('test')); + } catch (err) { + throw new Error( + `The certificate "${chalk.yellow(crtFile)}" is invalid.\n${err.message}` + ); + } + + try { + // privateDecrypt will throw an error with an invalid key + crypto.privateDecrypt(key, encrypted); + } catch (err) { + throw new Error( + `The certificate key "${chalk.yellow(keyFile)}" is invalid.\n${ + err.message + }` + ); + } +} + +// Read file and throw an error if it doesn't exist +function readEnvFile(file, type) { + if (!fs.existsSync(file)) { + throw new Error( + `You specified ${chalk.cyan( + type + )} in your env, but the file "${chalk.yellow(file)}" can't be found.` + ); + } + return fs.readFileSync(file); +} + +// Get the https config +// Return cert files if provided in env, otherwise just true or false +function getHttpsConfig() { + const { SSL_CRT_FILE, SSL_KEY_FILE, HTTPS } = process.env; + const isHttps = HTTPS === 'true'; + + if (isHttps && SSL_CRT_FILE && SSL_KEY_FILE) { + const crtFile = path.resolve(paths.appPath, SSL_CRT_FILE); + const keyFile = path.resolve(paths.appPath, SSL_KEY_FILE); + const config = { + cert: readEnvFile(crtFile, 'SSL_CRT_FILE'), + key: readEnvFile(keyFile, 'SSL_KEY_FILE'), + }; + + validateKeyAndCerts({ ...config, keyFile, crtFile }); + return config; + } + return isHttps; +} + +module.exports = getHttpsConfig; diff --git a/frontend/config/jest/babelTransform.js b/frontend/config/jest/babelTransform.js new file mode 100644 index 0000000..5b391e4 --- /dev/null +++ b/frontend/config/jest/babelTransform.js @@ -0,0 +1,29 @@ +'use strict'; + +const babelJest = require('babel-jest').default; + +const hasJsxRuntime = (() => { + if (process.env.DISABLE_NEW_JSX_TRANSFORM === 'true') { + return false; + } + + try { + require.resolve('react/jsx-runtime'); + return true; + } catch (e) { + return false; + } +})(); + +module.exports = babelJest.createTransformer({ + presets: [ + [ + require.resolve('babel-preset-react-app'), + { + runtime: hasJsxRuntime ? 'automatic' : 'classic', + }, + ], + ], + babelrc: false, + configFile: false, +}); diff --git a/frontend/config/jest/cssTransform.js b/frontend/config/jest/cssTransform.js new file mode 100644 index 0000000..8f65114 --- /dev/null +++ b/frontend/config/jest/cssTransform.js @@ -0,0 +1,14 @@ +'use strict'; + +// This is a custom Jest transformer turning style imports into empty objects. +// http://facebook.github.io/jest/docs/en/webpack.html + +module.exports = { + process() { + return 'module.exports = {};'; + }, + getCacheKey() { + // The output is always the same. + return 'cssTransform'; + }, +}; diff --git a/frontend/config/jest/fileTransform.js b/frontend/config/jest/fileTransform.js new file mode 100644 index 0000000..68cae25 --- /dev/null +++ b/frontend/config/jest/fileTransform.js @@ -0,0 +1,42 @@ +'use strict'; + +const path = require('path'); +const camelcase = require('camelcase'); + +// This is a custom Jest transformer turning file imports into filenames. +// http://facebook.github.io/jest/docs/en/webpack.html + +module.exports = { + process(src, filename) { + const assetFilename = JSON.stringify(path.basename(filename)); + + if (filename.match(/\.svg$/)) { + // Based on how SVGR generates a component name: + // https://github.com/smooth-code/svgr/blob/01b194cf967347d43d4cbe6b434404731b87cf27/packages/core/src/state.js#L6 + const pascalCaseFilename = camelcase(path.parse(filename).name, { + pascalCase: true, + }); + const componentName = `Svg${pascalCaseFilename}`; + return { + code: `const React = require('react'); + module.exports = { + __esModule: true, + default: ${assetFilename}, + ReactComponent: React.forwardRef(function ${componentName}(props, ref) { + return { + $$typeof: Symbol.for('react.element'), + type: 'svg', + ref: ref, + key: null, + props: Object.assign({}, props, { + children: ${assetFilename} + }) + }; + }), + };`, + }; + } + + return { code: `module.exports = ${assetFilename};` }; + }, +}; diff --git a/frontend/config/modules.js b/frontend/config/modules.js new file mode 100644 index 0000000..d63e41d --- /dev/null +++ b/frontend/config/modules.js @@ -0,0 +1,134 @@ +'use strict'; + +const fs = require('fs'); +const path = require('path'); +const paths = require('./paths'); +const chalk = require('react-dev-utils/chalk'); +const resolve = require('resolve'); + +/** + * Get additional module paths based on the baseUrl of a compilerOptions object. + * + * @param {Object} options + */ +function getAdditionalModulePaths(options = {}) { + const baseUrl = options.baseUrl; + + if (!baseUrl) { + return ''; + } + + const baseUrlResolved = path.resolve(paths.appPath, baseUrl); + + // We don't need to do anything if `baseUrl` is set to `node_modules`. This is + // the default behavior. + if (path.relative(paths.appNodeModules, baseUrlResolved) === '') { + return null; + } + + // Allow the user set the `baseUrl` to `appSrc`. + if (path.relative(paths.appSrc, baseUrlResolved) === '') { + return [paths.appSrc]; + } + + // If the path is equal to the root directory we ignore it here. + // We don't want to allow importing from the root directly as source files are + // not transpiled outside of `src`. We do allow importing them with the + // absolute path (e.g. `src/Components/Button.js`) but we set that up with + // an alias. + if (path.relative(paths.appPath, baseUrlResolved) === '') { + return null; + } + + // Otherwise, throw an error. + throw new Error( + chalk.red.bold( + "Your project's `baseUrl` can only be set to `src` or `node_modules`." + + ' Create React App does not support other values at this time.' + ) + ); +} + +/** + * Get webpack aliases based on the baseUrl of a compilerOptions object. + * + * @param {*} options + */ +function getWebpackAliases(options = {}) { + const baseUrl = options.baseUrl; + + if (!baseUrl) { + return {}; + } + + const baseUrlResolved = path.resolve(paths.appPath, baseUrl); + + if (path.relative(paths.appPath, baseUrlResolved) === '') { + return { + src: paths.appSrc, + }; + } +} + +/** + * Get jest aliases based on the baseUrl of a compilerOptions object. + * + * @param {*} options + */ +function getJestAliases(options = {}) { + const baseUrl = options.baseUrl; + + if (!baseUrl) { + return {}; + } + + const baseUrlResolved = path.resolve(paths.appPath, baseUrl); + + if (path.relative(paths.appPath, baseUrlResolved) === '') { + return { + '^src/(.*)$': '/src/$1', + }; + } +} + +function getModules() { + // Check if TypeScript is setup + const hasTsConfig = fs.existsSync(paths.appTsConfig); + const hasJsConfig = fs.existsSync(paths.appJsConfig); + + if (hasTsConfig && hasJsConfig) { + throw new Error( + 'You have both a tsconfig.json and a jsconfig.json. If you are using TypeScript please remove your jsconfig.json file.' + ); + } + + let config; + + // If there's a tsconfig.json we assume it's a + // TypeScript project and set up the config + // based on tsconfig.json + if (hasTsConfig) { + const ts = require(resolve.sync('typescript', { + basedir: paths.appNodeModules, + })); + config = ts.readConfigFile(paths.appTsConfig, ts.sys.readFile).config; + // Otherwise we'll check if there is jsconfig.json + // for non TS projects. + } else if (hasJsConfig) { + config = require(paths.appJsConfig); + } + + config = config || {}; + const options = config.compilerOptions || {}; + + const additionalModulePaths = getAdditionalModulePaths(options); + + return { + additionalModulePaths: additionalModulePaths, + webpackAliases: getWebpackAliases(options), + jestAliases: getJestAliases(options), + hasTsConfig, + }; +} + +module.exports = getModules(); diff --git a/frontend/config/paths.js b/frontend/config/paths.js new file mode 100644 index 0000000..66fafd9 --- /dev/null +++ b/frontend/config/paths.js @@ -0,0 +1,74 @@ +'use strict'; + +const path = require('path'); +const fs = require('fs'); +const getPublicUrlOrPath = require('react-dev-utils/getPublicUrlOrPath'); + +// Make sure any symlinks in the project folder are resolved: +// https://github.com/facebook/create-react-app/issues/637 +const appDirectory = fs.realpathSync(process.cwd()); +const resolveApp = (relativePath) => path.resolve(appDirectory, relativePath); + +// We use `PUBLIC_URL` environment variable or "homepage" field to infer +// "public path" at which the app is served. +// webpack needs to know it to put the right