From ea36b8385c3ecf8134e1cbc820975921fb669e2e Mon Sep 17 00:00:00 2001 From: Jade Cho Date: Tue, 19 Nov 2024 20:55:40 +0900 Subject: [PATCH 01/62] [GPU] Add support for i16, u16, and u32 element types in remote tensors (#27573) ### Details: - *Removed host memory data converting for user input/output tensors with data types i16, u16, or u32.* - *User tensors can now be directly used as plugin tensors without additional data conversion overhead.* ### Tickets: - *156709* --- .../include/intel_gpu/plugin/common_utils.hpp | 3 - .../kernels/reorder/reorder_kernel.cpp | 6 ++ .../intel_gpu/src/plugin/common_utils.cpp | 1 + .../intel_gpu/src/plugin/ops/parameter.cpp | 3 +- .../intel_gpu/src/plugin/ops/result.cpp | 3 +- .../src/plugin/sync_infer_request.cpp | 45 ++++++++-- .../gpu_remote_tensor_tests.cpp | 90 +++++++++++++++++++ 7 files changed, 139 insertions(+), 12 deletions(-) diff --git a/src/plugins/intel_gpu/include/intel_gpu/plugin/common_utils.hpp b/src/plugins/intel_gpu/include/intel_gpu/plugin/common_utils.hpp index 3c14895befb101..792745193ed550 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/plugin/common_utils.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/plugin/common_utils.hpp @@ -64,11 +64,8 @@ inline cldnn::layout make_layout(const ov::element::Type type, const ov::Shape& inline ov::element::Type convert_to_supported_device_type(ov::element::Type et) { switch (et) { case ov::element::f64: - case ov::element::i16: - case ov::element::u16: return ov::element::f32; case ov::element::u64: - case ov::element::u32: return ov::element::i32; default: return et; } diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/reorder/reorder_kernel.cpp b/src/plugins/intel_gpu/src/kernel_selector/kernels/reorder/reorder_kernel.cpp index 8f4fdbf1f5c992..08bf9d3fb81794 100644 --- a/src/plugins/intel_gpu/src/kernel_selector/kernels/reorder/reorder_kernel.cpp +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/reorder/reorder_kernel.cpp @@ -10,7 +10,10 @@ ParamsKey ReorderKernelRef::GetSupportedKey() const { ParamsKey k; k.EnableInputDataType(Datatype::BF16); k.EnableInputDataType(Datatype::UINT8); + k.EnableInputDataType(Datatype::UINT16); + k.EnableInputDataType(Datatype::UINT32); k.EnableInputDataType(Datatype::INT8); + k.EnableInputDataType(Datatype::INT16); k.EnableInputDataType(Datatype::INT32); k.EnableInputDataType(Datatype::INT64); k.EnableInputDataType(Datatype::F16); @@ -18,9 +21,12 @@ ParamsKey ReorderKernelRef::GetSupportedKey() const { k.EnableOutputDataType(Datatype::F16); k.EnableOutputDataType(Datatype::F32); k.EnableOutputDataType(Datatype::INT8); + k.EnableOutputDataType(Datatype::INT16); k.EnableOutputDataType(Datatype::INT32); k.EnableOutputDataType(Datatype::INT64); k.EnableOutputDataType(Datatype::UINT8); + k.EnableOutputDataType(Datatype::UINT16); + k.EnableOutputDataType(Datatype::UINT32); k.EnableOutputDataType(Datatype::BF16); k.EnableSurfaceInputSupport(); k.EnableDifferentTypes(); diff --git a/src/plugins/intel_gpu/src/plugin/common_utils.cpp b/src/plugins/intel_gpu/src/plugin/common_utils.cpp index 8a5e47279d10a0..ce1e85f7d454b9 100644 --- a/src/plugins/intel_gpu/src/plugin/common_utils.cpp +++ b/src/plugins/intel_gpu/src/plugin/common_utils.cpp @@ -236,6 +236,7 @@ void convert_and_copy(const ov::ITensor* src, ov::ITensor* dst, const cldnn::str tmp_tensor = ov::Tensor(dst_et, src->get_shape()); ::convert_and_copy(src_ptr, src_et, tmp_tensor.data(), dst_et, size, cldnn::layout({}, ov::element::undefined, cldnn::format::bfyx, cldnn::padding())); remote->copy_from(get_tensor_impl(tmp_tensor)._ptr); + return; } else { dst_ptr = dst->data(); } diff --git a/src/plugins/intel_gpu/src/plugin/ops/parameter.cpp b/src/plugins/intel_gpu/src/plugin/ops/parameter.cpp index 7f5c4b73223326..0b9874ffe694e1 100644 --- a/src/plugins/intel_gpu/src/plugin/ops/parameter.cpp +++ b/src/plugins/intel_gpu/src/plugin/ops/parameter.cpp @@ -29,7 +29,8 @@ static void CreateParameterOp(ProgramBuilder& p, const std::shared_ptrget_output_element_type(0))); + auto element_type = convert_to_supported_device_type(op->get_output_element_type(0)); + element_type = element_type == ov::element::boolean ? ov::element::u8 : element_type; // look at the expected color format of this input auto input_name = layer_type_name_ID(op); diff --git a/src/plugins/intel_gpu/src/plugin/ops/result.cpp b/src/plugins/intel_gpu/src/plugin/ops/result.cpp index 4172f56e483af3..eb6df76c39b108 100644 --- a/src/plugins/intel_gpu/src/plugin/ops/result.cpp +++ b/src/plugins/intel_gpu/src/plugin/ops/result.cpp @@ -30,7 +30,8 @@ static void CreateResultOp(ProgramBuilder& p, const std::shared_ptrget_input_element_type(0))); + auto out_data_type = convert_to_supported_device_type(op->get_input_element_type(0)); + out_data_type = out_data_type == ov::element::boolean ? ov::element::u8 : out_data_type; auto reorder_primitive = cldnn::reorder(out_primitive_name, inputs[0], diff --git a/src/plugins/intel_gpu/src/plugin/sync_infer_request.cpp b/src/plugins/intel_gpu/src/plugin/sync_infer_request.cpp index 6d48849102765e..cc4681d2ac3387 100644 --- a/src/plugins/intel_gpu/src/plugin/sync_infer_request.cpp +++ b/src/plugins/intel_gpu/src/plugin/sync_infer_request.cpp @@ -82,6 +82,18 @@ inline bool all_host_tensors(const std::vector>& tensors) }); } +cldnn::data_types data_type_for_remote_tensor(ov::element::Type t) { + switch (t) { + case ov::element::Type_t::f64: + return cldnn::data_types::f32; + case ov::element::Type_t::u64: + return cldnn::data_types::i32; + case ov::element::Type_t::boolean: + return cldnn::data_types::u8; + default: return t; + } +} + } // namespace namespace ov { @@ -446,6 +458,21 @@ void SyncInferRequest::wait() { iremote_tensor_ptr->copy_from(plugin_tensor.ptr); } } + } else if (!is_dynamic && is_remote_tensor_impl && output_memory) { + auto& stream = m_graph->get_network()->get_stream(); + auto user_mem = remote_tensor_impl_ptr->get_original_memory(); + if (user_mem->get_allocation_type() == cldnn::allocation_type::cl_mem + && output_memory->get_allocation_type() != cldnn::allocation_type::cl_mem) { + auto plugin_tensor = m_plugin_outputs.at(port_idx); + if (is_convert_required(plugin_tensor.ptr->get_element_type(), iremote_tensor_ptr->get_element_type())) { + auto& stream = m_graph->get_network()->get_stream(); + convert_and_copy(plugin_tensor.ptr.get(), iremote_tensor_ptr.get(), stream); + } else { + iremote_tensor_ptr->copy_from(plugin_tensor.ptr); + } + } else { + copy_events.push_back(output_memory->copy_to(stream, *user_mem, false)); + } } else if (is_remote_tensor_impl && is_dynamic) { auto& stream = m_graph->get_network()->get_stream(); auto user_mem = remote_tensor_impl_ptr->get_original_memory(); @@ -522,7 +549,7 @@ std::shared_ptr SyncInferRequest::create_device_tensor(const ov::Pa return std::make_shared(m_context, get_tensor_shape(port_shape), - cldnn::element_type_to_data_type(element_type), + ::data_type_for_remote_tensor(element_type), tensor_type); } @@ -553,7 +580,7 @@ TensorWrapper SyncInferRequest::create_or_share_device_tensor(const TensorWrappe } else if (usm_host_raw_ptr && can_share) { return { std::make_shared(m_context, user_tensor->get_shape(), - cldnn::element_type_to_data_type(element_type), + ::data_type_for_remote_tensor(element_type), TensorType::BT_USM_SHARED, user_tensor->data()), TensorOwner::USER }; } @@ -785,16 +812,16 @@ std::vector SyncInferRequest::prepare_input(const std::string if (is_remote_tensor_impl) { if (convert_needed) { m_plugin_inputs[input_idx] = { create_device_tensor(pshape, - cldnn::element_type_to_data_type(element_type), + ::data_type_for_remote_tensor(element_type), false), TensorOwner::PLUGIN }; } else { m_plugin_inputs[input_idx] = user_tensor_wrapper; } } else if (is_usm_host_tensor && !convert_needed && can_use_usm_host(engine)) { - if (element_type != cldnn::element_type_to_data_type(element_type)) { + if (element_type != ::data_type_for_remote_tensor(element_type)) { m_plugin_inputs[input_idx] = { std::make_shared(m_context, user_tensor->get_shape(), - cldnn::element_type_to_data_type(element_type), + ::data_type_for_remote_tensor(element_type), TensorType::BT_USM_SHARED, user_tensor->data()), TensorOwner::USER }; } else { @@ -953,8 +980,12 @@ std::vector SyncInferRequest::prepare_output(size_t output_id is_generic_remote || (m_plugin_outputs[output_idx].owner == TensorOwner::USER && !is_remote_tensor_impl); if (update_device_tensor) { - m_plugin_outputs[output_idx] = - create_or_share_device_tensor(user_tensor_wrapper, internal_name, pshape, device_tensor_et, need_lockable_mem || convert_needed); + if (!is_remote_tensor_impl) { + m_plugin_outputs[output_idx] = + create_or_share_device_tensor(user_tensor_wrapper, internal_name, pshape, device_tensor_et, need_lockable_mem || convert_needed); + } else { + m_plugin_outputs[output_idx] = { create_device_tensor(pshape, device_tensor_et, need_lockable_mem || convert_needed), TensorOwner::PLUGIN }; + } } } diff --git a/src/plugins/intel_gpu/tests/functional/remote_tensor_tests/gpu_remote_tensor_tests.cpp b/src/plugins/intel_gpu/tests/functional/remote_tensor_tests/gpu_remote_tensor_tests.cpp index baad7361425cca..11c2b034d20821 100644 --- a/src/plugins/intel_gpu/tests/functional/remote_tensor_tests/gpu_remote_tensor_tests.cpp +++ b/src/plugins/intel_gpu/tests/functional/remote_tensor_tests/gpu_remote_tensor_tests.cpp @@ -2873,3 +2873,93 @@ TEST(RemoteTensor, smoke_CanSetRoiRemoteTensor) { compare_tensors(output_tensor_copy_0, output_tensor_copy_1); } + + +using RemoteTensorDataTypesOptionsParams = std::tuple; +class OVRemoteTensorDataType_Test : public OVRemoteTensor_Test, + public testing::WithParamInterface { +protected: + std::shared_ptr fn_ptr; + std::string deviceName; + ov::AnyMap config; + ov::element::Type_t element_type; + +public: + void SetUp() override { + deviceName = ov::test::utils::DEVICE_GPU; + std::tie(element_type) = this->GetParam(); + config = {ov::hint::inference_precision(ov::element::f16), + ov::hint::model_priority(ov::hint::Priority::HIGH), + ov::hint::execution_mode(ov::hint::ExecutionMode::PERFORMANCE), + ov::hint::performance_mode(ov::hint::PerformanceMode::LATENCY)}; + + auto input1 = std::make_shared(element_type, ov::Shape{1, 2, 10, 10}); + auto constant = ov::op::v0::Constant::create(element_type, ov::Shape{1, 2, 10, 10}, {1}); + auto add = std::make_shared(input1, constant); + fn_ptr = std::make_shared(ov::NodeVector{add}, ov::ParameterVector{input1}); + } + static std::string getTestCaseName(const testing::TestParamInfo& obj) { + ov::element::Type_t elem_type; + std::tie(elem_type) = obj.param; + + std::ostringstream result; + result << "OVRemoteTensorTest_" << elem_type; + return result.str(); + } +}; + +TEST_P(OVRemoteTensorDataType_Test, smoke_RemoteTensorDataType) { +#if defined(ANDROID) + GTEST_SKIP(); +#endif + auto ppp = ov::preprocess::PrePostProcessor(fn_ptr); + ppp.output(0).tensor().set_element_type(element_type); + auto ov_model = ppp.build(); + + auto core = ov::Core(); + ov::CompiledModel compiled_model = core.compile_model(ov_model, deviceName, config); + + // regular inference + auto inf_req = compiled_model.create_infer_request(); + auto input_element_type = inf_req.get_input_tensor(0).get_element_type(); + auto input_shape = inf_req.get_input_tensor(0).get_shape(); + auto output_element_type = inf_req.get_output_tensor(0).get_element_type(); + auto output_shape = inf_req.get_output_tensor(0).get_shape(); + + ASSERT_EQ(input_element_type, element_type); + ASSERT_EQ(output_element_type, element_type); + + auto remote_context = compiled_model.get_context().as(); + auto input_tensor = ov::test::utils::create_and_fill_tensor(input_element_type, input_shape); + auto output_tensor = ov::test::utils::create_and_fill_tensor(output_element_type, output_shape); + + auto input_cl_tensor = remote_context.create_tensor(input_element_type, input_shape); + auto output_cl_tensor = remote_context.create_tensor(output_element_type, output_shape); + + input_cl_tensor.copy_from(input_tensor); + + inf_req.set_input_tensor(0, input_tensor); + inf_req.set_output_tensor(0, output_tensor); + inf_req.infer(); + + inf_req.set_input_tensor(0, input_cl_tensor); + inf_req.set_output_tensor(0, output_cl_tensor); + inf_req.infer(); + + auto tmp_tensor = ov::Tensor(output_element_type, output_shape); + output_cl_tensor.copy_to(tmp_tensor); + + if (element_type == ov::element::i16) { + compare_data::value_type>(output_tensor, tmp_tensor); + } else if (element_type == ov::element::u16) { + compare_data::value_type>(output_tensor, tmp_tensor); + } else if (element_type == ov::element::u32) { + compare_data::value_type>(output_tensor, tmp_tensor); + } +} + +INSTANTIATE_TEST_SUITE_P(smoke_RemoteTensorDataType, OVRemoteTensorDataType_Test, + ::testing::Combine(::testing::Values(ov::element::Type_t::i16, + ov::element::Type_t::u16, + ov::element::Type_t::u32)), + OVRemoteTensorDataType_Test::getTestCaseName); From 46b0852046d1cb709291b439bc46c194c1002c34 Mon Sep 17 00:00:00 2001 From: Alexey Smirnov Date: Tue, 19 Nov 2024 11:58:43 +0000 Subject: [PATCH 02/62] [NPUW] Extend ConvToMatmul pattern (#27561) Support case where Const has no Convert after it --- .../intel_npu/src/plugin/npuw/partitioning/patterns/opt.cpp | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/plugins/intel_npu/src/plugin/npuw/partitioning/patterns/opt.cpp b/src/plugins/intel_npu/src/plugin/npuw/partitioning/patterns/opt.cpp index 3470739c848dac..da3962feba66f3 100644 --- a/src/plugins/intel_npu/src/plugin/npuw/partitioning/patterns/opt.cpp +++ b/src/plugins/intel_npu/src/plugin/npuw/partitioning/patterns/opt.cpp @@ -1501,6 +1501,7 @@ ConvToMatmul::ConvToMatmul(Context::Ref ctx) { auto matched_node_transpose_in = node_to_output.at(transpose_in).get_node_shared_ptr(); auto matched_node_transpose_out = node_to_output.at(transpose_out).get_node_shared_ptr(); auto matched_node_multiply = node_to_output.at(multiply).get_node_shared_ptr(); + const auto& cvt2_or_multiply = uat::_(node_to_output).at_or_at(convert2, multiply); const auto& shape = matched_node_param->get_shape(); const auto& shape2 = matched_node_param2->get_shape(); @@ -1536,10 +1537,10 @@ ConvToMatmul::ConvToMatmul(Context::Ref ctx) { auto new_reshape2 = std::make_shared(matched_node_param2, new_const2, false); // Connect to Reshape - if (ov::op::util::is_parameter(matched_node_param2)) { + if (cvt2_or_multiply == matched_node_multiply) { // param -> multiply matched_node_multiply->input(1).replace_source_output(new_reshape2); matched_node_multiply->validate_and_infer_types(); - } else { // constant -> convert -> multiply + } else { // constant -> (convert) -> multiply node_to_output.at(convert2).get_node_shared_ptr()->input(0).replace_source_output(new_reshape2); node_to_output.at(convert2).get_node_shared_ptr()->validate_and_infer_types(); matched_node_multiply->validate_and_infer_types(); From fe65df322493cadc5813193e6d9b7746e1768086 Mon Sep 17 00:00:00 2001 From: Roman Kazantsev Date: Tue, 19 Nov 2024 16:15:56 +0400 Subject: [PATCH 03/62] [GHA][MO][openvino-dev] Remove tests for legacy openvino-dev from GHA (#27495) **Details:** We are removing openvino-dev in 2025.0 so all legacy tests are no longer needed in GHA. **Ticket:** 157072 --------- Signed-off-by: Kazantsev, Roman Co-authored-by: Anastasia Kuporosova --- .github/CODEOWNERS | 2 - .github/dependabot.yml | 2 +- .github/labeler.yml | 6 - .github/workflows/coverage.yml | 9 - .github/workflows/job_onnx_models_tests.yml | 9 +- .github/workflows/job_python_unit_tests.yml | 87 ++------ .github/workflows/mo.yml | 58 ------ .github/workflows/windows_vs2019_release.yml | 47 +---- tests/CMakeLists.txt | 4 +- .../common/mo_convert_test_class.py | 3 +- .../layer_tests/common/utils/common_utils.py | 32 +-- .../test_complex_params.py | 61 ------ .../ovc_python_api_tests/test_pytorch.py | 190 ------------------ tests/requirements_onnx | 3 + 14 files changed, 43 insertions(+), 470 deletions(-) delete mode 100644 .github/workflows/mo.yml create mode 100644 tests/requirements_onnx diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8e7eb099540439..3598e32166a809 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -108,8 +108,6 @@ /tools/ @openvinotoolkit/openvino-tools-maintainers /tools/benchmark_tool/ @openvinotoolkit/openvino-ie-python-api-maintainers /tools/legacy/ @openvinotoolkit/openvino-samples-maintainers -/tools/openvino_dev/ @openvinotoolkit/openvino-tools-maintainers @openvinotoolkit/openvino-ie-python-api-maintainers -/tools/mo/ @openvinotoolkit/openvino-mo-maintainers /tools/ovc/ @openvinotoolkit/openvino-ovc-maintainers /thirdparty/open_model_zoo/ @openvinotoolkit/omz-maintainers diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 1511e6a2c30170..359ff683c9b22a 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -41,7 +41,7 @@ updates: - "rkazants" versioning-strategy: increase-if-necessary - # Model Optimizer, openvino_dev and Benchmark tool + # ovc and Benchmark tools - package-ecosystem: pip directory: "/tools" schedule: diff --git a/.github/labeler.yml b/.github/labeler.yml index daa5375b175bd3..e9b2acb26c9072 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -100,10 +100,6 @@ 'category: LP transformations': - 'src/common/low_precision_transformations/**/*' -'category: MO': -- 'tools/mo/**/*' -- 'tests/layer_tests/mo_python_api_tests/**/*' - 'category: OVC': - 'tools/ovc/**/*' - 'tests/layer_tests/ovc_python_api_tests/**/*' @@ -119,7 +115,6 @@ - any: ['src/bindings/js/node/CMakeLists.txt', 'src/bindings/js/node/package.json', 'src/bindings/js/node/package-lock.json'] -- 'tools/openvino_dev/**/*' 'category: PDPD FE': - 'src/frontends/paddle/**/*' @@ -183,7 +178,6 @@ 'category: tools': - any: ['tools/**', - '!tools/mo/**/*', '!tools/ovc/**/*'] 'category: transformations': diff --git a/.github/workflows/coverage.yml b/.github/workflows/coverage.yml index 6cb0b2c5b6233c..cde1b9cf67e2fc 100644 --- a/.github/workflows/coverage.yml +++ b/.github/workflows/coverage.yml @@ -50,12 +50,6 @@ jobs: python3 -m pip install -r ${{ github.workspace }}/src/frontends/onnx/tests/requirements.txt # For running TensorFlow frontend unit tests python3 -m pip install -r ${{ github.workspace }}/src/frontends/tensorflow/tests/requirements.txt - # For MO unit tests - python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_caffe.txt - python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_kaldi.txt - python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_onnx.txt - python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_tf2.txt - python3 -m pip install -r ${{ github.workspace }}/tools/mo/requirements_dev.txt - name: Build OpenVINO with CMake uses: ashutoshvarma/action-cmake-build@ade188313bc7eaa6f14349569a64d8bc716342ff # master @@ -84,9 +78,6 @@ jobs: - name: Install wheel packages run: cmake -DCOMPONENT=python_wheels -DCMAKE_INSTALL_PREFIX=${{ github.workspace }}/install_pkg -P '${{ github.workspace }}/build/cmake_install.cmake' - - name: Install python wheels - run: python3 -m pip install openvino-dev --find-links=${{ github.workspace }}/install_pkg/tools - - name: List binaries run: ls -la ${{ github.workspace }}/bin/intel64/${{ env.CMAKE_BUILD_TYPE }} diff --git a/.github/workflows/job_onnx_models_tests.yml b/.github/workflows/job_onnx_models_tests.yml index c879f0cb6a1efc..39a4b467e74fc1 100644 --- a/.github/workflows/job_onnx_models_tests.yml +++ b/.github/workflows/job_onnx_models_tests.yml @@ -86,14 +86,11 @@ jobs: run: | # Install the core OV wheel python3 -m pip install ./openvino-*.whl - - extras_to_install="onnx" - - # Find and install OV dev wheel - ov_dev_wheel_name=$(find . -name 'openvino_dev*.whl') - python3 -m pip install $ov_dev_wheel_name[$extras_to_install] working-directory: ${{ env.INSTALL_WHEELS_DIR }} + - name: Install ONNX Models tests requirements + run: python3 -m pip install -r ${INSTALL_TEST_DIR}/requirements_onnx + - name: Install Python tests dependencies run: | # To enable pytest parallel features diff --git a/.github/workflows/job_python_unit_tests.yml b/.github/workflows/job_python_unit_tests.yml index 64be9ef4bbcc44..8075f3299fe063 100644 --- a/.github/workflows/job_python_unit_tests.yml +++ b/.github/workflows/job_python_unit_tests.yml @@ -91,20 +91,11 @@ jobs: should-setup-pip-paths: ${{ runner.os == 'Linux' }} self-hosted-runner: ${{ runner.os == 'Linux' }} - # - # Tests - # - - name: Install OpenVINO Python wheels run: | # Install the core OV wheel python3 -m pip install ./openvino-*.whl - extras_to_install="caffe,kaldi,onnx,tensorflow2,pytorch" - - # Find and install OV dev wheel - ov_dev_wheel_name=$(find . -name 'openvino_dev*.whl') - python3 -m pip install $ov_dev_wheel_name[$extras_to_install] working-directory: ${{ env.INSTALL_WHEELS_DIR }} - name: Install Python API tests dependencies @@ -112,7 +103,19 @@ jobs: # To enable pytest parallel features python3 -m pip install pytest-xdist[psutil] python3 -m pip install -r ${INSTALL_TEST_DIR}/bindings/python/requirements_test.txt - python3 -m pip install -r ${INSTALL_TEST_DIR}/mo/requirements_dev.txt + + - name: Install Python Layer tests dependencies and for OVC unit tests + run: | + # For torchvision to OpenVINO preprocessing converter + python3 -m pip install -r ${INSTALL_TEST_DIR}/python/preprocess/torchvision/requirements.txt + + # layer test requirements + python3 -m pip install -r ${LAYER_TESTS_INSTALL_DIR}/requirements.txt + + - name: Install ONNX tests dependencies + run: | + # ONNX tests requirements + python3 -m pip install -r ${INSTALL_TEST_DIR}/requirements_onnx # # Tests @@ -127,18 +130,6 @@ jobs: --junitxml=${INSTALL_TEST_DIR}/TEST-Pyngraph.xml \ --ignore=${INSTALL_TEST_DIR}/pyopenvino/tests/test_utils/test_utils.py - - name: Model Optimizer unit tests - if: fromJSON(inputs.affected-components).MO.test - run: | - if [[ "${{ runner.os }}" == "Linux" ]] && [[ "${{ runner.arch }}" != "ARM64" ]]; then - # required for MxNet - apt-get install -y libgomp1 libquadmath0 - fi - - # Skips under tickets: 133405, 122666 - python3 -m pytest -s ${INSTALL_TEST_DIR}/mo/unit_tests \ - --junitxml=${INSTALL_TEST_DIR}/TEST-ModelOptimizer.xml - - name: Python ONNX operators tests if: (fromJSON(inputs.affected-components).Python_API.test || fromJSON(inputs.affected-components).ONNX_FE.test) && @@ -153,32 +144,6 @@ jobs: if: fromJSON(inputs.affected-components).MO.test run: python3 -m pytest -s ${INSTALL_TEST_DIR}/ovc/unit_tests --junitxml=${INSTALL_TEST_DIR}/TEST-OpenVinoConversion.xml - - name: Install Python Layer tests dependencies - run: | - # For torchvision to OpenVINO preprocessing converter - python3 -m pip install -r ${INSTALL_TEST_DIR}/python/preprocess/torchvision/requirements.txt - - # layer test requirements - python3 -m pip install -r ${LAYER_TESTS_INSTALL_DIR}/requirements.txt - - - name: MO Python API Tests - if: fromJSON(inputs.affected-components).MO.test - run: | - # Import 'test_utils' installed in '/tests/python/openvino' - export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH - export PYTHONPATH=${INSTALL_TEST_DIR}/python - - if [[ "${{ runner.os }}" == "Linux" ]] && [[ "${{ runner.arch }}" == "ARM64" ]]; then - # Find gomp lib - GOMP_LIB=$(find "${PIP_INSTALL_PATH}/torch/lib/../../torch.libs/" -name '*libgomp-*so*') - export LD_PRELOAD=${GOMP_LIB} - fi - - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/mo_python_api_tests -n logical --junitxml=${INSTALL_TEST_DIR}/TEST-test_mo_convert.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - name: OVC Python API Tests if: fromJSON(inputs.affected-components).MO.test run: | @@ -205,16 +170,6 @@ jobs: export LD_LIBRARY_PATH=${PIP_INSTALL_PATH}/openvino/libs:$LD_LIBRARY_PATH python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/py_frontend_tests --junitxml=${INSTALL_TEST_DIR}/TEST-test_py_fontend.xml - - name: ONNX Layer Tests - if: ${{ fromJSON(inputs.affected-components).ONNX_FE.test }} - run: | - # requires 'unit_tests' from 'tools/mo' - export PYTHONPATH=${INSTALL_TEST_DIR}/mo:$PYTHONPATH - python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/onnx_tests -m "not launch_only_if_manually_specified and precommit" --junitxml=${INSTALL_TEST_DIR}/TEST-onnx.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - name: JAX Layer Tests - JAX FE if: ${{ fromJSON(inputs.affected-components).JAX_FE.test && runner.arch != 'ARM64' && runner.os != 'macOS' }} run: python3 -m pytest ${LAYER_TESTS_INSTALL_DIR}/jax_tests/ -m precommit_jax_fe --junitxml=${INSTALL_TEST_DIR}/TEST-jax_fe.xml @@ -230,22 +185,6 @@ jobs: TEST_DEVICE: CPU TEST_PRECISION: FP16 - - name: TensorFlow 1 Layer Tests - Legacy FE - if: fromJSON(inputs.affected-components).TF_FE.test - run: python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow_tests/test_tf_Roll.py --use_legacy_frontend --ir_version=10 --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf_Roll.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - - name: TensorFlow 2 Layer Tests - Legacy FE - # no longer workable since TF 2.17 - # will be removed in 2024.5 - if: ${{ 'false' }} - run: python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/tensorflow2_keras_tests/test_tf2_keras_activation.py --use_legacy_frontend --ir_version=11 -k "sigmoid" --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-tf2_Activation.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - name: Clone API snippets if: runner.os != 'macOS' uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 diff --git a/.github/workflows/mo.yml b/.github/workflows/mo.yml deleted file mode 100644 index f48986d4a0d304..00000000000000 --- a/.github/workflows/mo.yml +++ /dev/null @@ -1,58 +0,0 @@ -name: MO -on: - push: - paths: - - 'tools/mo/**' - - '.github/workflows/mo.yml' - branches: - - 'master' - - 'releases/**' - pull_request: - paths: - - 'tools/mo/**' - - '.github/workflows/mo.yml' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -permissions: read-all - -jobs: - Pylint-UT: - runs-on: ubuntu-22.04 - if: ${{ github.repository_owner == 'openvinotoolkit' }} - steps: - - name: Clone OpenVINO - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - - - name: Setup Python - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 - with: - python-version: '3.10' - - - name: Cache pip - uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 - with: - path: ~/.cache/pip - key: ${{ runner.os }}-pip-${{ hashFiles('tools/mo/requirements*.txt') }} - restore-keys: | - ${{ runner.os }}-pip- - ${{ runner.os }}- - - - name: Install dependencies - run: | - python -m pip install --upgrade pip setuptools - # For UT - pip install unittest-xml-reporting==3.0.2 - # MO requirements - pip install -r requirements_caffe.txt - pip install -r requirements_kaldi.txt - pip install -r requirements_onnx.txt - pip install -r requirements_tf2.txt - pip install -r requirements_dev.txt - working-directory: tools/mo - - - name: Pylint-MO - run: pylint -d C,R,W openvino/tools/mo - working-directory: tools/mo diff --git a/.github/workflows/windows_vs2019_release.yml b/.github/workflows/windows_vs2019_release.yml index a416f577cdb3e1..1b218cdf7d430b 100644 --- a/.github/workflows/windows_vs2019_release.yml +++ b/.github/workflows/windows_vs2019_release.yml @@ -296,9 +296,6 @@ jobs: $ovCoreWheelPath=Get-ChildItem -Path . -Filter openvino-*.whl | % { $_.FullName } python3 -m pip install "$ovCoreWheelPath" - # Find and install the dev OV wheel - $ovDevWheelPath=Get-ChildItem -Path . -Filter openvino_dev*.whl | % { $_.FullName } - python3 -m pip install "$ovDevWheelPath[caffe,kaldi,onnx,tensorflow2,pytorch]" working-directory: ${{ env.INSTALL_WHEELS_DIR }} - name: Install Python API tests dependencies @@ -309,8 +306,11 @@ jobs: # For torchvision to OpenVINO preprocessing converter python3 -m pip install -r ${{ env.INSTALL_TEST_DIR }}/python/preprocess/torchvision/requirements.txt - # TODO: replace with Python API tests requirements - python3 -m pip install -r ${{ env.INSTALL_TEST_DIR }}/mo/requirements_dev.txt + # For validation of Python API + python3 -m pip install -r ${{ env.INSTALL_TEST_DIR }}/bindings/python/requirements_test.txt + + # ONNX tests requirements + python3 -m pip install -r ${{ env.INSTALL_TEST_DIR }}/requirements_onnx # For getting rid of SSL issues during model downloading for unit tests python3 -m pip install certifi @@ -318,34 +318,17 @@ jobs: - name: Set SSL_CERT_FILE for model downloading for unit tests run: echo SSL_CERT_FILE=$(python3 -m certifi) >> $env:GITHUB_ENV - - name: Python API Tests - #if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test # Ticket: 127101 - shell: cmd - run: | - set PYTHONPATH=${{ env.LAYER_TESTS_INSTALL_DIR }};%PYTHONPATH% - python3 -m pytest -sv ${{ env.INSTALL_TEST_DIR }}/pyopenvino ${{ env.PYTHON_STATIC_ARGS }} --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-Pyngraph.xml --ignore=${{ env.INSTALL_TEST_DIR }}/pyopenvino/tests/test_utils/test_utils.py - - - name: Model Optimizer UT - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - shell: cmd - run: | - python3 -m pytest -s ${{ env.INSTALL_TEST_DIR }}/mo/unit_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-ModelOptimizer.xml - - name: Install Python Layer tests dependencies run: | # layer test requirements python3 -m pip install -r ${{ env.LAYER_TESTS_INSTALL_DIR }}/requirements.txt - - name: ONNX Layer Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).ONNX_FE.test + - name: Python API Tests + #if: fromJSON(needs.smart_ci.outputs.affected_components).Python_API.test # Ticket: 127101 shell: cmd run: | - :: requires 'unit_tests' from 'tools/mo' - set PYTHONPATH=${{ env.INSTALL_TEST_DIR }}\mo;%PYTHONPATH% - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/onnx_tests -n logical -m "not launch_only_if_manually_specified and precommit" --junitxml=${INSTALL_TEST_DIR}/TEST-onnx.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 + set PYTHONPATH=${{ env.LAYER_TESTS_INSTALL_DIR }};%PYTHONPATH% + python3 -m pytest -sv ${{ env.INSTALL_TEST_DIR }}/pyopenvino ${{ env.PYTHON_STATIC_ARGS }} --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-Pyngraph.xml --ignore=${{ env.INSTALL_TEST_DIR }}/pyopenvino/tests/test_utils/test_utils.py - name: TensorFlow Lite Layer Tests - TFL FE if: fromJSON(needs.smart_ci.outputs.affected_components).TFL_FE.test @@ -366,18 +349,6 @@ jobs: --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-onnx_frontend.xml ^ --ignore=${{ env.INSTALL_TEST_DIR }}/onnx/test_python/test_zoo_models.py - - name: MO Python API Tests - if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test - shell: cmd - run: | - :: Used for 'test_utils' installed in '\python\openvino\test_utils' - set PYTHONPATH=${{ env.INSTALL_TEST_DIR }}\python\openvino\test_utils;${{ env.INSTALL_TEST_DIR }}\python;%PYTHONPATH% - - python3 -m pytest ${{ env.LAYER_TESTS_INSTALL_DIR }}/mo_python_api_tests --junitxml=${{ env.INSTALL_TEST_DIR }}/TEST-test_mo_convert.xml - env: - TEST_DEVICE: CPU - TEST_PRECISION: FP16 - - name: OVC Python API Tests if: fromJSON(needs.smart_ci.outputs.affected_components).MO.test shell: cmd diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index e4b5fcd5d1089f..08b4308479ef03 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -7,5 +7,5 @@ add_subdirectory(model_hub_tests) add_subdirectory(samples_tests) add_subdirectory(e2e_tests) -install(FILES requirements_pytorch DESTINATION tests COMPONENT tests EXCLUDE_FROM_ALL) -install(FILES requirements_tensorflow DESTINATION tests COMPONENT tests EXCLUDE_FROM_ALL) +install(FILES requirements_pytorch requirements_tensorflow requirements_onnx + DESTINATION tests COMPONENT tests EXCLUDE_FROM_ALL) diff --git a/tests/layer_tests/common/mo_convert_test_class.py b/tests/layer_tests/common/mo_convert_test_class.py index 6a57339cedf111..7eff4f7fee9e8a 100644 --- a/tests/layer_tests/common/mo_convert_test_class.py +++ b/tests/layer_tests/common/mo_convert_test_class.py @@ -63,7 +63,8 @@ def _test(self, temp_dir, test_params, ref_params): core = Core() test_params.update({"model_name": 'model_test', "output_dir": temp_dir}) - ref_params.update({"model_name": 'model_ref', "output_dir": temp_dir}) + ref_output_path = Path(temp_dir, 'model_ref.xml').absolute().as_posix() + ref_params.update({"output_model": ref_output_path}) self.generate_ir_python_api(**test_params) diff --git a/tests/layer_tests/common/utils/common_utils.py b/tests/layer_tests/common/utils/common_utils.py index 0c8ad494c5cec2..620f2fee9de260 100644 --- a/tests/layer_tests/common/utils/common_utils.py +++ b/tests/layer_tests/common/utils/common_utils.py @@ -14,36 +14,24 @@ def generate_ir(coverage=False, **kwargs): - from openvino.tools.mo import mo - mo_path = Path(mo.__file__).parent - mo_runner = mo_path.joinpath('main.py').as_posix() + from openvino.tools.ovc import ovc + # Get OVC file directory + ovc_path = Path(ovc.__file__).parent + + ovc_runner = ovc_path.joinpath('main.py').as_posix() if coverage: - params = [sys.executable, '-m', 'coverage', 'run', '-p', '--source={}'.format(mo_path.parent), - '--omit=*_test.py', mo_runner] + params = [sys.executable, '-m', 'coverage', 'run', '-p', '--source={}'.format(ovc_runner.parent), + '--omit=*_test.py', ovc_runner] else: - params = [sys.executable, mo_runner] + params = [sys.executable, ovc_runner] for key, value in kwargs.items(): - if key == "batch": - params.extend(("-b", str(value))) - elif key == "k": - params.extend(("-k", str(value))) - # for FP32 set explicitly compress_to_fp16=False, - # if we omit this argument for FP32, it will be set implicitly to True as the default + if key == 'input_model': + params.append((str(value))) elif key == 'compress_to_fp16': params.append("--{}={}".format(key, value)) - elif isinstance(value, bool) and value: - params.append("--{}".format(key)) - elif isinstance(value, bool) and not value: - continue - elif (isinstance(value, tuple) and value) or (isinstance(value, str)): - params.extend(("--{}".format(key), str('"{}"'.format(value)))) - elif key == "mean_values" and (' ' in value or '(' in value): - params.extend(("--{}".format(key), str('"{}"'.format(value)))) else: params.extend(("--{}".format(key), str(value))) exit_code, stdout, stderr = shell(params) - logger.info("Model Optimizer out:\n{}".format(stdout)) - logger.error(stderr) return exit_code, stderr diff --git a/tests/layer_tests/ovc_python_api_tests/test_complex_params.py b/tests/layer_tests/ovc_python_api_tests/test_complex_params.py index 3d6df0c95f31ae..57c98db8c45e61 100644 --- a/tests/layer_tests/ovc_python_api_tests/test_complex_params.py +++ b/tests/layer_tests/ovc_python_api_tests/test_complex_params.py @@ -71,77 +71,16 @@ def create_tf_model_single_input_output(tmp_dir): return save_to_pb(tf_net, tmp_dir) test_data = [ - {'params_test': {'output': ["Sigmoid_0:0", "Sigmoid_2:0"]}, - 'params_ref': {'output': "Sigmoid_0,Sigmoid_2"}}, - {'params_test': {'output': ["Sigmoid_0:0"]}, - 'params_ref': {'output': "Sigmoid_0"}}, - {'params_test': {'input': [PartialShape([2, 3, 4]), [2, 3, 4], [Dimension(2), Dimension(3), Dimension(4)]]}, - 'params_ref': {'input_shape': "[2,3,4],[2,3,4],[2,3,4]", 'input': 'Input1:0,Input2:0,Input3:0'}}, - {'params_test': {'input': [PartialShape([1, 3, -1, -1]), [1, 3, -1, -1]]}, - 'params_ref': {'input_shape': "[1,3,?,?],[1,3,?,?]", 'input': 'Input1:0,Input2:0'}}, - {'params_test': {'input': [(2, 3, 4), [2, 3, 4], (Dimension(2), Dimension(3), Dimension(4))]}, - 'params_ref': {'input_shape': "[2,3,4],[2,3,4],[2,3,4]", 'input': 'Input1:0,Input2:0,Input3:0'}}, - {'params_test': {'input': {"Input1:0": PartialShape([2, 3, 4]), "Input2:0": [2, 3, 4], - "Input3:0": [Dimension(2), Dimension(3), Dimension(4)]}}, - 'params_ref': {'input_shape': "[2,3,4],[2,3,4],[2,3,4]", 'input': 'Input1:0,Input2:0,Input3:0'}}, - {'params_test': {'input': {"Input2:0": [1, -1, -1, -1], - "Input3:0": [Dimension(1), Dimension(-1), Dimension(-1), Dimension(-1)]}}, - 'params_ref': {'input_shape': "[1,?,?,?],[1,?,?,?]", 'input': 'Input2:0,Input3:0'}}, - {'params_test': {'input': [np.int32, Type(np.int32), np.int32]}, - 'params_ref': {'input': 'Input1:0{i32},Input2:0{i32},Input3:0{i32}'}}, - {'params_test': {'input': [ov.Type.f32, ov.Type.f32]}, - 'params_ref': {'input': 'Input1:0{f32},Input2:0{f32}'}}, - {'params_test': {'input': [([1, 3, -1, -1], ov.Type.i32), ov.Type.i32, ov.Type.i32]}, - 'params_ref': {'input': 'Input1:0[1,3,?,?]{i32},Input2:0{i32},Input3:0{i32}'}}, - {'params_test': {'input': (PartialShape([2, 3, 4]), [2, 3, 4], [Dimension(2), Dimension(3), Dimension(4)])}, - 'params_ref': {'input_shape': "[2,3,4],[2,3,4],[2,3,4]", 'input': 'Input1:0,Input2:0,Input3:0'}}, - {'params_test': {'input': (PartialShape([1, 3, -1, -1]), [1, 3, -1, -1])}, - 'params_ref': {'input_shape': "[1,3,?,?],[1,3,?,?]", 'input': 'Input1:0,Input2:0'}}, - {'params_test': {'input': ((2, 3, 4), [2, 3, 4], (Dimension(2), Dimension(3), Dimension(4)))}, - 'params_ref': {'input_shape': "[2,3,4],[2,3,4],[2,3,4]", 'input': 'Input1:0,Input2:0,Input3:0'}}, - {'params_test': {'input': (np.int32, Type(np.int32), np.int32)}, - 'params_ref': {'input': 'Input1:0{i32},Input2:0{i32},Input3:0{i32}'}}, - {'params_test': {'input': (ov.Type.f32, ov.Type.f32)}, - 'params_ref': {'input': 'Input1:0{f32},Input2:0{f32}'}}, - {'params_test': {'input': (([1, 3, -1, -1], ov.Type.i32), ov.Type.i32, ov.Type.i32)}, - 'params_ref': {'input': 'Input1:0[1,3,?,?]{i32},Input2:0{i32},Input3:0{i32}'}} - ] - - @pytest.mark.parametrize("params", test_data) - @pytest.mark.nightly - def test_mo_convert_tf_model(self, params, ie_device, precision, ir_version, - temp_dir, use_legacy_frontend): - tf_net_path = self.create_tf_model(temp_dir) - - test_params = params['params_test'] - ref_params = params['params_ref'] - test_params.update({'input_model': tf_net_path}) - ref_params.update({'input_model': tf_net_path}) - self._test(temp_dir, test_params, ref_params) - - test_data = [ - {'params_test': {'input': {"Input:0": ([3, 2], ov.Type.i32)}}, - 'params_ref': {'input': "Input:0[3,2]{i32}"}}, - {'params_test': {'input': {"Input:0": ov.Type.i32}}, - 'params_ref': {'input': "Input:0{i32}"}}, {'params_test': {'input': {"Input:0": [3, 2]}}, 'params_ref': {'input': "Input:0[3,2]"}}, {'params_test': {'input': (3, 2)}, 'params_ref': {'input': "Input:0[3,2]"}}, {'params_test': {'input': (3, Dimension(2))}, 'params_ref': {'input': "Input:0[3,2]"}}, - {'params_test': {'input': [3, 2]}, - 'params_ref': {'input': "Input:0[3 2]"}}, - {'params_test': {'input': [Dimension(3, 10), 2]}, - 'params_ref': {'input': "Input:0[3..10 2]"}}, {'params_test': {'input': (-1, 10)}, 'params_ref': {'input': "Input:0[?,10]"}}, {'params_test': {'input': PartialShape([-1, 10])}, 'params_ref': {'input': "Input:0[?,10]"}}, - {'params_test': {'input': np.int32}, - 'params_ref': {'input': "Input:0{i32}"}}, - {'params_test': {'input': (np.int32, [1, 2, 3])}, - 'params_ref': {'input': "Input:0[1,2,3]{i32}"}}, {'params_test': {'input': [Dimension(3, 10), 10, -1]}, 'params_ref': {'input': 'Input:0[3..10,10,?]'}}, ] diff --git a/tests/layer_tests/ovc_python_api_tests/test_pytorch.py b/tests/layer_tests/ovc_python_api_tests/test_pytorch.py index 1a49a989c11df2..02b4d569927909 100644 --- a/tests/layer_tests/ovc_python_api_tests/test_pytorch.py +++ b/tests/layer_tests/ovc_python_api_tests/test_pytorch.py @@ -366,188 +366,6 @@ def scripted_fn(x: torch.Tensor, y: torch.Tensor): return scripted_fn, ref_model, {'input': [(inp_shape, Type.f32), (inp_shape, Type.f32)]} -def create_pytorch_nn_module_layout_list(tmp_dir): - from openvino.runtime import Layout - pt_model = make_pt_model_two_inputs() - shape = [1, 3, 10, 10] - - shape = PartialShape(shape) - ref_model = make_ref_pt_model_two_inputs(shape) - ref_model.inputs[0].node.layout = Layout('nchw') - ref_model.inputs[1].node.layout = Layout('nhwc') - - return pt_model, ref_model, { - 'input': [(shape, np.float32), (shape, np.float32)], 'layout': ['nchw', Layout('nhwc')], - 'use_convert_model_from_mo': True - } - - -def create_pytorch_nn_module_layout_list_case2(tmp_dir): - from openvino.runtime import Layout - pt_model = make_pt_model_two_inputs() - shape = [1, 3, 10, 10] - - shape = PartialShape(shape) - ref_model = make_ref_pt_model_two_inputs(shape) - ref_model.inputs[0].node.layout = Layout('nchw') - ref_model.inputs[1].node.layout = Layout('nhwc') - - return pt_model, ref_model, { - 'input': [(shape, np.float32), (shape, np.float32)], 'layout': ('nchw', Layout('nhwc')), - 'use_convert_model_from_mo': True} - - -def create_pytorch_nn_module_mean_list_compression_disabled(tmp_dir): - pt_model = make_pt_model_two_inputs() - shape = [1, 10, 10, 3] - - shape = PartialShape(shape) - param1 = ov.opset8.parameter(shape) - param2 = ov.opset8.parameter(shape) - const1 = ov.opset8.constant([[[[-0.0, -0.0, -0.0]]]], dtype=np.float32) - const2 = ov.opset8.constant([[[[-0.0, -0.0, -0.0]]]], dtype=np.float32) - add1 = ov.opset8.add(param1, const1) - add2 = ov.opset8.add(param2, const2) - mul = ov.opset8.multiply(add1, add2) - relu = ov.opset8.relu(mul) - sigm = ov.opset8.sigmoid(relu) - - parameter_list = [param1, param2] - ref_model = Model([sigm], parameter_list, "test") - - return pt_model, ref_model, {'input': [(shape, np.float32), (shape, np.float32)], - 'mean_values': [[0, 0, 0], [0, 0, 0]], - 'compress_to_fp16': False, 'use_convert_model_from_mo': True} - - -def create_pytorch_nn_module_mean_list_compression_default(tmp_dir): - # when 'use_convert_model_from_mo': True by default compression in convert_model is disabled - # therefore decompression Converts will not be present - pt_model = make_pt_model_two_inputs() - shape = [1, 10, 10, 3] - - shape = PartialShape(shape) - param1 = ov.opset8.parameter(shape) - param2 = ov.opset8.parameter(shape) - const1 = ov.opset8.constant([[[[-0.0, -0.0, -0.0]]]], dtype=np.float32) - const2 = ov.opset8.constant([[[[-0.0, -0.0, -0.0]]]], dtype=np.float32) - add1 = ov.opset8.add(param1, const1) - add2 = ov.opset8.add(param2, const2) - mul = ov.opset8.multiply(add1, add2) - relu = ov.opset8.relu(mul) - sigm = ov.opset8.sigmoid(relu) - - parameter_list = [param1, param2] - ref_model = Model([sigm], parameter_list, "test") - - return pt_model, ref_model, {'input': [(shape, np.float32), (shape, np.float32)], - 'mean_values': [[0, 0, 0], [0, 0, 0]], - 'use_convert_model_from_mo': True} - - -def create_pytorch_nn_module_mean_list_compression_enabled(tmp_dir): - pt_model = make_pt_model_two_inputs() - shape = [1, 10, 10, 3] - - shape = PartialShape(shape) - param1 = ov.opset8.parameter(shape) - param2 = ov.opset8.parameter(shape) - const1 = ov.opset8.constant([[[[-0.0, -0.0, -0.0]]]], dtype=np.float16) - const2 = ov.opset8.constant([[[[-0.0, -0.0, -0.0]]]], dtype=np.float16) - const1_decompressed = ov.opset8.convert( - const1, destination_type=np.float32) - const2_decompressed = ov.opset8.convert( - const2, destination_type=np.float32) - - add1 = ov.opset8.add(param1, const1_decompressed) - add2 = ov.opset8.add(param2, const2_decompressed) - mul = ov.opset8.multiply(add1, add2) - relu = ov.opset8.relu(mul) - sigm = ov.opset8.sigmoid(relu) - - parameter_list = [param1, param2] - ref_model = Model([sigm], parameter_list, "test") - - return pt_model, ref_model, { - 'input': [(shape, np.float32), (shape, np.float32)], 'mean_values': [[0, 0, 0], [0, 0, 0]], - 'compress_to_fp16': True, 'use_convert_model_from_mo': True} - - -def create_pytorch_nn_module_scale_list_compression_disabled(tmp_dir): - pt_model = make_pt_model_two_inputs() - shape = [1, 10, 10, 3] - - shape = PartialShape(shape) - param1 = ov.opset8.parameter(shape) - param2 = ov.opset8.parameter(shape) - const1 = ov.opset8.constant([[[[1, 1, 1]]]], dtype=np.float32) - const2 = ov.opset8.constant([[[[1, 1, 1]]]], dtype=np.float32) - sub1 = ov.opset8.multiply(param1, const1) - sub2 = ov.opset8.multiply(param2, const2) - mul = ov.opset8.multiply(sub1, sub2) - relu = ov.opset8.relu(mul) - sigm = ov.opset8.sigmoid(relu) - - parameter_list = [param1, param2] - ref_model = Model([sigm], parameter_list, "test") - - return pt_model, ref_model, {'input': [(shape, np.float32), (shape, np.float32)], - 'scale_values': [[1, 1, 1], [1, 1, 1]], - 'compress_to_fp16': False, 'use_convert_model_from_mo': True} - - -def create_pytorch_nn_module_scale_list_compression_default(tmp_dir): - # when 'use_convert_model_from_mo': True by default compression in convert_model is disabled - # therefore decompression Converts will not be present - pt_model = make_pt_model_two_inputs() - shape = [1, 10, 10, 3] - - shape = PartialShape(shape) - param1 = ov.opset8.parameter(shape) - param2 = ov.opset8.parameter(shape) - const1 = ov.opset8.constant([[[[1, 1, 1]]]], dtype=np.float32) - const2 = ov.opset8.constant([[[[1, 1, 1]]]], dtype=np.float32) - sub1 = ov.opset8.multiply(param1, const1) - sub2 = ov.opset8.multiply(param2, const2) - mul = ov.opset8.multiply(sub1, sub2) - relu = ov.opset8.relu(mul) - sigm = ov.opset8.sigmoid(relu) - - parameter_list = [param1, param2] - ref_model = Model([sigm], parameter_list, "test") - - return pt_model, ref_model, {'input': [(shape, np.float32), (shape, np.float32)], - 'scale_values': [[1, 1, 1], [1, 1, 1]], - 'use_convert_model_from_mo': True} - - -def create_pytorch_nn_module_scale_list_compression_enabled(tmp_dir): - pt_model = make_pt_model_two_inputs() - shape = [1, 10, 10, 3] - - shape = PartialShape(shape) - param1 = ov.opset8.parameter(shape) - param2 = ov.opset8.parameter(shape) - const1 = ov.opset8.constant([[[[1, 1, 1]]]], dtype=np.float16) - const1_decompressed = ov.opset8.convert( - const1, destination_type=np.float32) - const2 = ov.opset8.constant([[[[1, 1, 1]]]], dtype=np.float16) - const2_decompressed = ov.opset8.convert( - const2, destination_type=np.float32) - mul1 = ov.opset8.multiply(param1, const1_decompressed) - mul2 = ov.opset8.multiply(param2, const2_decompressed) - mul3 = ov.opset8.multiply(mul1, mul2) - relu = ov.opset8.relu(mul3) - sigm = ov.opset8.sigmoid(relu) - - parameter_list = [param1, param2] - ref_model = Model([sigm], parameter_list, "test") - - return pt_model, ref_model, {'input': [(shape, np.float32), (shape, np.float32)], - 'scale_values': [[1, 1, 1], [1, 1, 1]], - 'compress_to_fp16': True, 'use_convert_model_from_mo': True} - - def create_pytorch_nn_module_with_compressed_constants(tmp_dir): import torch @@ -1208,14 +1026,6 @@ class TestMoConvertPyTorch(CommonMOConvertTest): 'create_pytorch_nn_module_sample_input_int32_two_inputs', 'create_pytorch_jit_script_module', 'create_pytorch_jit_script_function', - 'create_pytorch_nn_module_layout_list', - 'create_pytorch_nn_module_layout_list_case2', - 'create_pytorch_nn_module_mean_list_compression_default', - 'create_pytorch_nn_module_mean_list_compression_disabled', - 'create_pytorch_nn_module_mean_list_compression_enabled', - 'create_pytorch_nn_module_scale_list_compression_default', - 'create_pytorch_nn_module_scale_list_compression_disabled', - 'create_pytorch_nn_module_scale_list_compression_enabled', 'create_pytorch_nn_module_with_compressed_constants', 'create_pytorch_nn_module_shapes_list_static', 'create_pytorch_nn_module_shapes_list_static_via_input', diff --git a/tests/requirements_onnx b/tests/requirements_onnx new file mode 100644 index 00000000000000..1dfc0077b5d075 --- /dev/null +++ b/tests/requirements_onnx @@ -0,0 +1,3 @@ +numpy>=1.16.6,<1.27 +onnx>=1.8.1,<=1.17.0 +protobuf>=3.18.1,<4.0.0 From b3f4427c7e4133d3b24e58e58f6057299988ff2c Mon Sep 17 00:00:00 2001 From: Denis Orlov Date: Tue, 19 Nov 2024 12:23:00 +0000 Subject: [PATCH 04/62] [GHA] Increase timeout for C++ unit tests in precommit (#27590) ### Tickets: - 156790 --- .github/workflows/job_cxx_unit_tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/job_cxx_unit_tests.yml b/.github/workflows/job_cxx_unit_tests.yml index 8fab17043b7465..52a2b3f4d287c8 100644 --- a/.github/workflows/job_cxx_unit_tests.yml +++ b/.github/workflows/job_cxx_unit_tests.yml @@ -29,7 +29,7 @@ on: description: 'Timeout in minutes for the job' type: number required: false - default: 35 + default: 45 permissions: read-all From a59e5a0d998135450708bffcf929e5261d963c91 Mon Sep 17 00:00:00 2001 From: Aleksandr Voron Date: Tue, 19 Nov 2024 13:31:18 +0100 Subject: [PATCH 05/62] [CPU] Test changes for e2e (#27409) ### Details: - Potential fix for e2e test ### Tickets: - CVS-155043 --------- Co-authored-by: Daria Ilina --- src/plugins/intel_cpu/src/graph_optimizer.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/plugins/intel_cpu/src/graph_optimizer.cpp b/src/plugins/intel_cpu/src/graph_optimizer.cpp index ab7eb223ba17ce..61590b8691f4b2 100644 --- a/src/plugins/intel_cpu/src/graph_optimizer.cpp +++ b/src/plugins/intel_cpu/src/graph_optimizer.cpp @@ -2231,7 +2231,9 @@ void GraphOptimizer::ShareReorders(Graph& graph) { void GraphOptimizer::DropDoubleReorders(Graph &graph) { std::set processed; - for (const auto& node : graph.GetNodes()) { + auto& nodes = graph.GetNodes(); + for (size_t i = 0; i < nodes.size(); i++) { + auto node = nodes[i]; if (processed.find(node) == processed.end() && node->getType() == Type::Reorder && node->getChildEdges().size() == 1 && node->getChildEdgeAt(0)->getChild()->getType() == Type::Reorder ) { From b4b39531b302563916fc050a9ce18ee153d49950 Mon Sep 17 00:00:00 2001 From: Septimiu Neaga <111509085+SeptimiuIoachimNeagaIntel@users.noreply.github.com> Date: Tue, 19 Nov 2024 15:38:42 +0200 Subject: [PATCH 06/62] Support for model reshape in protopipe (#27398) ### Details: Support for model reshape in protopipe Added new OpenVINO parameter in config.yaml `-reshape` able to reshape input layers. For ONNX models dynamic dimensions are set to `1`. This is consistent with the behavior of onnxruntime_perf_test. --- src/plugins/intel_npu/tools/protopipe/README.md | 1 + .../intel_npu/tools/protopipe/src/parser/config.cpp | 4 ++++ .../tools/protopipe/src/scenario/inference.hpp | 1 + .../protopipe/src/simulation/ov_layers_reader.cpp | 12 ++++++++++++ .../tools/protopipe/src/simulation/simulation.cpp | 5 +++++ 5 files changed, 23 insertions(+) diff --git a/src/plugins/intel_npu/tools/protopipe/README.md b/src/plugins/intel_npu/tools/protopipe/README.md index 00849ad8bddc9a..807bd744851b70 100644 --- a/src/plugins/intel_npu/tools/protopipe/README.md +++ b/src/plugins/intel_npu/tools/protopipe/README.md @@ -60,6 +60,7 @@ log_level: INFO - `ol` - **Optional**. Output layer layout. - `iml` - **Optional**. Input model layout. - `oml` - **Optional**. Output model layout. +- `reshape` - **Optional**. Set shape for input layers. For example, "input1: [1,3,224,224], input2: [1,4]" or "[1,3,224,224]" in case of one input layer. Examples: ``` diff --git a/src/plugins/intel_npu/tools/protopipe/src/parser/config.cpp b/src/plugins/intel_npu/tools/protopipe/src/parser/config.cpp index c2a1bd6415d595..b9f03a97ba3f69 100644 --- a/src/plugins/intel_npu/tools/protopipe/src/parser/config.cpp +++ b/src/plugins/intel_npu/tools/protopipe/src/parser/config.cpp @@ -345,6 +345,10 @@ struct convert { params.output_model_layout = node["oml"].as>(); } + if (node["reshape"]) { + params.reshape = node["reshape"].as>> (); + } + if (node["config"]) { params.config = node["config"].as>(); } diff --git a/src/plugins/intel_npu/tools/protopipe/src/scenario/inference.hpp b/src/plugins/intel_npu/tools/protopipe/src/scenario/inference.hpp index e4568c671438bc..f9c8877b05c53e 100644 --- a/src/plugins/intel_npu/tools/protopipe/src/scenario/inference.hpp +++ b/src/plugins/intel_npu/tools/protopipe/src/scenario/inference.hpp @@ -91,6 +91,7 @@ struct OpenVINOParams { LayerVariantAttr output_layout; LayerVariantAttr input_model_layout; LayerVariantAttr output_model_layout; + LayerVariantAttr> reshape; std::map config; size_t nireq = 1u; }; diff --git a/src/plugins/intel_npu/tools/protopipe/src/simulation/ov_layers_reader.cpp b/src/plugins/intel_npu/tools/protopipe/src/simulation/ov_layers_reader.cpp index 57527cef0cc4aa..33e01e36404570 100644 --- a/src/plugins/intel_npu/tools/protopipe/src/simulation/ov_layers_reader.cpp +++ b/src/plugins/intel_npu/tools/protopipe/src/simulation/ov_layers_reader.cpp @@ -128,6 +128,15 @@ static void cfgOutputPostproc(ov::preprocess::PrePostProcessor& ppp, const std:: } } +static void cfgReshape(const std::shared_ptr& model, + const AttrMap> reshape_map) { + std::map partial_shapes; + for (const auto& [layer_name, shape] : reshape_map) { + partial_shapes.emplace(layer_name, shape); + } + model->reshape(partial_shapes); +} + static std::vector extractLayerNames(const std::vector>& nodes) { std::vector names; std::transform(nodes.begin(), nodes.end(), std::back_inserter(names), [](const auto& node) { @@ -148,6 +157,9 @@ InOutLayers OpenVINOLayersReader::Impl::readFromModel(const std::string& model_p const auto iml_map = unpackLayerAttr(params.input_model_layout, input_names, "input model layout"); cfgInputPreproc(ppp, model, ip_map, il_map, iml_map); + const auto reshape_map = unpackLayerAttr(params.reshape, input_names, "reshape"); + cfgReshape(model, reshape_map); + const auto& output_names = extractLayerNames(model->outputs()); const auto op_map = unpackLayerAttr(params.output_precision, output_names, "output precision"); const auto ol_map = unpackLayerAttr(params.output_layout, output_names, "output layout"); diff --git a/src/plugins/intel_npu/tools/protopipe/src/simulation/simulation.cpp b/src/plugins/intel_npu/tools/protopipe/src/simulation/simulation.cpp index 5b1743651b6ef1..11336c77a477e9 100644 --- a/src/plugins/intel_npu/tools/protopipe/src/simulation/simulation.cpp +++ b/src/plugins/intel_npu/tools/protopipe/src/simulation/simulation.cpp @@ -22,6 +22,11 @@ static cv::gapi::GNetPackage getNetPackage(const std::string& tag, const OpenVIN const auto& blob_path = std::get(params.path); network = std::make_unique

(tag, blob_path.blob, params.device); } + if (std::holds_alternative>>(params.reshape)) { + network->cfgReshape(std::get>>(params.reshape)); + } else { + network->cfgReshape(std::get>(params.reshape)); + } network->cfgPluginConfig(params.config); network->cfgNumRequests(params.nireq); From 59f29698f3f18e85e7b39154538d3082a292049c Mon Sep 17 00:00:00 2001 From: Karol Blaszczak Date: Tue, 19 Nov 2024 14:42:31 +0100 Subject: [PATCH 07/62] [DOCS] benchmark data 24.5 (#27610) port: https://github.com/openvinotoolkit/openvino/pull/27608 --- .../about-openvino/performance-benchmarks.rst | 8 +- .../model-accuracy-int8-fp32.rst | 178 +- .../performance-benchmarks-faq.rst | 41 +- .../OV-2024.4-Performance-Data.xlsx | Bin 335026 -> 0 bytes .../OV-2024.4-platform_list.pdf | Bin 240925 -> 0 bytes .../OV-2024.4-system-info-detailed.xlsx | Bin 83937 -> 0 bytes .../OV-2024.5-Performance-Data.xlsx | Bin 0 -> 249213 bytes .../OV-2024.5-platform_list.pdf | Bin 0 -> 191948 bytes .../OV-2024.5-system-info-detailed.xlsx | Bin 0 -> 74177 bytes .../benchmarks_files/data/graph-data-ov.json | 19560 ++++++---------- .../benchmarks_files/llm_models_7-155H.csv | 312 +- .../benchmarks_files/llm_models_7-258V.csv | 364 +- 12 files changed, 7039 insertions(+), 13424 deletions(-) delete mode 100644 docs/sphinx_setup/_static/benchmarks_files/OV-2024.4-Performance-Data.xlsx delete mode 100644 docs/sphinx_setup/_static/benchmarks_files/OV-2024.4-platform_list.pdf delete mode 100644 docs/sphinx_setup/_static/benchmarks_files/OV-2024.4-system-info-detailed.xlsx create mode 100644 docs/sphinx_setup/_static/benchmarks_files/OV-2024.5-Performance-Data.xlsx create mode 100644 docs/sphinx_setup/_static/benchmarks_files/OV-2024.5-platform_list.pdf create mode 100644 docs/sphinx_setup/_static/benchmarks_files/OV-2024.5-system-info-detailed.xlsx diff --git a/docs/articles_en/about-openvino/performance-benchmarks.rst b/docs/articles_en/about-openvino/performance-benchmarks.rst index 75c7ba90db7e76..d874d1808f7aaf 100644 --- a/docs/articles_en/about-openvino/performance-benchmarks.rst +++ b/docs/articles_en/about-openvino/performance-benchmarks.rst @@ -64,7 +64,7 @@ implemented in your solutions. Click the buttons below to see the chosen benchma :outline: :expand: - :material-regular:`bar_chart;1.4em` OVMS for GenAI + :material-regular:`bar_chart;1.4em` OVMS for GenAI (coming soon) @@ -132,21 +132,21 @@ For a listing of all platforms and configurations used for testing, refer to the .. grid-item:: - .. button-link:: ../_static/benchmarks_files/OV-2024.4-platform_list.pdf + .. button-link:: ../_static/benchmarks_files/OV-2024.5-platform_list.pdf :color: primary :outline: :expand: :material-regular:`download;1.5em` Click for Hardware Platforms [PDF] - .. button-link:: ../_static/benchmarks_files/OV-2024.4-system-info-detailed.xlsx + .. button-link:: ../_static/benchmarks_files/OV-2024.5-system-info-detailed.xlsx :color: primary :outline: :expand: :material-regular:`download;1.5em` Click for Configuration Details [XLSX] - .. button-link:: ../_static/benchmarks_files/OV-2024.4-Performance-Data.xlsx + .. button-link:: ../_static/benchmarks_files/OV-2024.5-Performance-Data.xlsx :color: primary :outline: :expand: diff --git a/docs/articles_en/about-openvino/performance-benchmarks/model-accuracy-int8-fp32.rst b/docs/articles_en/about-openvino/performance-benchmarks/model-accuracy-int8-fp32.rst index 3162bae7254704..e87733a1445356 100644 --- a/docs/articles_en/about-openvino/performance-benchmarks/model-accuracy-int8-fp32.rst +++ b/docs/articles_en/about-openvino/performance-benchmarks/model-accuracy-int8-fp32.rst @@ -1,9 +1,6 @@ Model Accuracy ============== - - -The following two tables present the absolute accuracy drop calculated as the accuracy difference between OV-accuracy and the original framework accuracy for FP32, and the same for INT8, BF16, and FP16 representations of a model on three platform architectures. The third table presents the GenAI model accuracies as absolute accuracy values. Refer to notes below the table for more @@ -11,7 +8,7 @@ information. * A - Intel® Core™ i9-9000K (AVX2), INT8 and FP32 * B - Intel® Xeon® 6338, (VNNI), INT8 and FP32 -* C - Intel® Xeon 8480+ (VNNI, AMX), INT8, BF16, FP32 +* C - Intel® Xeon 8580 (VNNI, AMX), INT8, BF16, FP32 * D - Intel® Flex-170, INT8 and FP16 @@ -28,73 +25,52 @@ information. * - bert-base-cased - SST-2_bert_cased_padded - spearman@cosine - - 3.33% - - 3.22% - - 3.05% - - 2.88% - * - bert-large-uncased-whole-word-masking-squad-0001 - - SQUAD_v1_1_bert_msl384_mql64_ds128_lowercase - - F1 - - 0.12% - - 0.03% - - 0.03% - - 0.28% + - 3.06% + - 2.89% + - 2.71% + - 2.71% * - efficientdet-d0 - COCO2017_detection_91cl - coco_precision - - 0.00% - - -0.52% - - -0.54% - - -0.60% + - -0.84% + - -0.59% + - -0.59% + - -0.55% * - mask_rcnn_resnet50_atrous_coco - COCO2017_detection_91cl_bkgr - coco_orig_precision - - 0.05% - - 0.03% - - 0.08% - - -0.09% + - -0.10% + - -0.04% + - 0.07% + - -0.01% * - mobilenet-v2 - ImageNet2012 - accuracy @ top1 - - - -0.87% - - -0.88% - - -0.88% + - -0.97% + - -0.98% + - -0.95% * - resnet-50 - ImageNet2012 - accuracy @ top1 - - -0.17% - - -0.18% - - -0.18% - - -0.16% + - 0.74% + - 0.76% + - 0.74% + - 0.82% * - ssd-resnet34-1200 - COCO2017_detection_80cl_bkgr - map - - -0.03% - - -0.02% - - -0.03% - - 0.02% + - -0.06% + - -0.08% + - -0.07% + - -0.06% * - ssd-mobilenet-v1-coco - COCO2017_detection_80cl_bkgr - coco-precision - - -2.74% - - -0.11% - - -0.13% - - -0.12% - * - unet-camvid-onnx-0001 - - CamVid_12cl - - mean_iou @ mean - - -6.28% - - 6.45% - - 6.46% - - 6.43% - * - yolo_v5m - - COCO2017_detection_80cl - - map - - -0.40% - - -0.32% - - -0.32% - - -0.31% + - -2.94% + - -0.28% + - -0.28% + - -0.26% * - yolo_v8n - COCO2017_detection_80cl - map @@ -121,30 +97,22 @@ information. - 0.00% - 0.00% - -0.01% - - 0.01% - * - bert-large-uncased-whole-word-masking-squad-0001 - - SQUAD_v1_1_bert_msl384_mql64_ds128_lowercase - - F1 - - 0.04% - - 0.04% - - 0.06% - - 0.06% - - 0.04% + - 0.02% * - efficientdet-d0 - COCO2017_detection_91cl - coco_precision - 0.01% - - -0.02% - 0.01% + - 0.01% + - 0.00% - 0.00% - - -0.02% * - mask_rcnn_resnet50_atrous_coco - COCO2017_detection_91cl_bkgr - coco_orig_precision - -0.01% - -0.01% - -0.01% - - -0.05% + - 0.05% - 0.00% * - mobilenet-v2 - ImageNet2012 @@ -160,40 +128,24 @@ information. - 0.00% - 0.00% - 0.00% - - -0.01% - - -0.01% + - 0.01% + - 0.01% * - ssd-resnet34-1200 - COCO2017_detection_80cl_bkgr - map - 0.02% - - 0.00% - - 0.00% - - -0.02% - - 0.04% + - 0.02% + - 0.02% + - -0.01% + - 0.02% * - ssd-mobilenet-v1-coco - COCO2017_detection_80cl_bkgr - coco-precision - - -0.08% - - 0.01% + - 0.04% - 0.01% + - 0.04% - 0.08% - 0.01% - * - unet-camvid-onnx-0001 - - CamVid_12cl - - mean_iou @ mean - - 0.00% - - 0.00% - - 0.00% - - -0.03% - - -0.03% - * - yolo_v5m - - COCO2017_detection_80cl - - map - - 0.00% - - 0.05% - - 0.05% - - 0.07% - - 0.07% * - yolo_v8n - COCO2017_detection_80cl - map @@ -213,46 +165,60 @@ information. - B, VNNI-INT4 - C, FAMX-FP16 - D, MTL-INT4 - * - chatGLM2-6b + * - chatGLM4 - Wikiset - ppl - - 5.24 - - 6.03 - - 5.24 - - 6.03 - * - Falcon-7b-instruct + - + - + - + - + * - Gemma-2-9B - Wikitext - ppl - - 1.65 - - 1.76 - - 1.65 - - 1.76 + - + - 1.57 + - 1.57 + - * - Llama-2-7b-chat - Wikiset - ppl - - 1.58 - - 1.59 - - 1.91 + - + - - 1.59 + - * - Llama-3-8b - Wikiset - ppl - - 1.54 - - 1.56 + - 1.45 + - 1.48 + - 1.45 + - + * - Llama-3.2-3b-instruct + - Wikiset + - ppl + - 1.60 + - 1.62 - 1.17 - - 1.57 + - * - Mistral-7b - Wikitext - ppl - 1.48 - 1.49 - - 1.39 - - 1.49 + - 1.48 + - * - Phi3-mini-4k-instruct - Wikitext - ppl - 1.52 + - 1.55 + - 1.52 - 1.56 + * - Qwen-2-7B + - Wikitext + - ppl + - 1.52 + - 1.53 - 1.52 - 1.56 diff --git a/docs/articles_en/about-openvino/performance-benchmarks/performance-benchmarks-faq.rst b/docs/articles_en/about-openvino/performance-benchmarks/performance-benchmarks-faq.rst index 4bf0b3a0acb19a..0f70c93e9c8b96 100644 --- a/docs/articles_en/about-openvino/performance-benchmarks/performance-benchmarks-faq.rst +++ b/docs/articles_en/about-openvino/performance-benchmarks/performance-benchmarks-faq.rst @@ -31,10 +31,13 @@ Performance Information F.A.Q. .. dropdown:: How can I run the benchmark results on my own? - All of the performance benchmarks are generated using the + All of the performance benchmarks on traditional network models are generated using the open-source tool within the Intel® Distribution of OpenVINO™ toolkit called :doc:`benchmark_app <../../learn-openvino/openvino-samples/benchmark-tool>`. + For diffusers (Stable-Diffusion) and foundational models (aka LLMs) please use the OpenVINO GenAI + opensource repo `OpenVINO GenAI tools/llm_bench `__ + For a simple instruction on testing performance, see the :doc:`Getting Performance Numbers Guide `. .. dropdown:: Where can I find a more detailed description of the workloads used for benchmarking? @@ -50,23 +53,27 @@ Performance Information F.A.Q. - Public Network - Task - Input Size - * - `chatGLM2-6B `__ + * - `chatGLM4-9B `__ - THUDM - Transformer - - 32K - * - `Falcon-7b-instruct `__ + - 128K + * - `Gemma-2-9B `__ - Hugginface - - Causal Decoder-only - - 2048 + - Text-To-Text Decoder-only + - 8K * - `Llama-2-7b-chat `__ - Meta AI - Auto regressive language - - 4096 + - 4K * - `Llama-3-8b `__ - Meta AI - Auto regressive language - - 8192 - * - `Mistral-7b `__ + - 8K + * - `Llama-3.2-3B `__ + - Meta AI + - Auto regressive language + - 128K + * - `Mistral-7b-V0.1 `__ - Mistral AI - Auto regressive language - 4096 @@ -74,6 +81,10 @@ Performance Information F.A.Q. - Huggingface - Auto regressive language - 4096 + * - `Qwen-2-7B `__ + - Huggingface + - Auto regressive language + - 128K * - `Stable-Diffusion-V1-5 `__ - Hugginface - Latent Diffusion Model @@ -82,10 +93,6 @@ Performance Information F.A.Q. - BERT - question / answer - 128 - * - `bert-large-uncased-whole-word-masking-squad-int8-0001 `__ - - BERT-large - - question / answer - - 384 * - `efficientdet-d0 `__ - Efficientdet - classification @@ -110,14 +117,6 @@ Performance Information F.A.Q. - ssd-resnet34 onnx model - object detection - 1200x1200 - * - `unet-camvid-onnx-0001 `__ - - U-Net - - semantic segmentation - - 368x480 - * - `yolo-v5m `__ - - YOLO V5 Medium - - object detection - - 640x640 * - `yolov8n `__ - Yolov8nano - object detection diff --git a/docs/sphinx_setup/_static/benchmarks_files/OV-2024.4-Performance-Data.xlsx b/docs/sphinx_setup/_static/benchmarks_files/OV-2024.4-Performance-Data.xlsx deleted file mode 100644 index 9b53d90e0862dbdb9a1e614cf9ae1950ccc6c4f2..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 335026 zcmeFY^;=x+maUDuyGwA_;4Z=4-Q6X)y9IYAxVyW%I|aerf;)WVeRrSTr+e@72YlW2 z!-6UnbwNFA-gB%m<|8i!3Wf#*0R#mE1VjvUgi>m82MPovzykz?3IqkBC2VKwY+~!I zr|fQT;-o|GW@AnC6AXkh7YO9@{QrIY5B>xuGIZ?L7|}v5DbD$G*JbrlnyNA+J!fs^ z3T(?4E+AXN{loOdnqKZM55c$;7LpU3qOvj+`5d!%9|mM~x%oIf)l~wy^JGf*SS7@E zjfsq_A{3{JM>kIi`*)4?X;Z+f6Vc}P6jJxjZr|DyPKPy9{kb*ME$V8T-p5i$F%1VN z#TSLz>`W!ATfW=fWiImQUAOA5x_J#al2yjKm}g*(9*?p!?KUUjd->WERxuvCP>qS)lCgXa#8tVsl)ZN3uUP-BrT|O zWZ@_CMs9}ckM%ekLsNyNgFvfinxKP>Wvxceix`?vdRV5Gy|woPnq`-J`C8Ay6n05E zF+0d(Y8jeY^FgVn#0T8h`{9sAhSEck^!rAC?t{2H(ijQ3>&KM@9=^|S^RxG^S}EM| z9!O&tct=U_AIRdmqDn0ELKYerj4W}iM)$~sP5h+jmph>9}t}pBwS?_@Od1`N2RJ5sx4ct4V@n7Lj`e zj>Xwa-2U;)9z0h|&D>EG&PFlSgh4WugaQ6lIn7Lk!f!{YUml+YR~DX$u}U1G`0@gm zk^72H!t`^UZyAY$F;T!MxCurQ0SI6eCd!vs13V#)F$Cn`&}Jr3A5fNgDfP@nfnbxR z@!XHZynFSnd8wudtTfWe%!yf2M$eo`nY1R9Vktun&O{s9lbkbp@b+?qvO1OV)3J$ zzz|d9v5SCxU1A=_f1Key(wMbjrrQCZg1^K;!K8x$B}+#P5ujUi&nE3kWgzX!4!wQB zh~y02sYB3~$XHaK8DyV?1=SYxi(X1c66@}GnkdGXU}5r_nqRMFG9;VuU^-`~OGUDo z^ERcXV=K8H)`z4l$FT?~+*&7{+E)6QExoCox14 z3Y!?B6{fhk77W&`O$wRS`WbXwFr}`3z01etG5;s%K0ZK!f`1V2b#en#L<#_1Ve;}wrG$(! zrp)i^s!c@2y)#!3fnCpnfD{q<{OxJuXD#dTr zvvtL)wp635x3AXMA{b5_y>+rykN>!XVB?w zT>+09!m^OrhE-bFhtEP0gKUW@A3N~T%$5^S5hOo^XpNaNm?PEo�Oe>pf_fEvidk z8iRHPCJ_auN=CxlwW(Zsbj%iPCyGMauw`M!3?+-8?x8icO36wZsO>?0tnIco~&s*9JCqImW$QtKN5o3(mKe!Jq)n={GlMa9iI{YRY?i3 znOOOO0ReTv0s$d@evWR|46b&LR)%(VR)54-g{qC*E(g-P&)gfNUQh$!#oR#n&0JXB z7nyoB=oJ_kFYUAe8`^6{P6V{Pw@Y@}O(DQOk}C>nW0w0tTH3Htj*kuor-$Ls0~O90 z`{@NFngr_oeXraNS)+|*c{+=6Oi79I^#7Km_C8jLdH6@bi0ZESF zU({vEW~y1bO4!XsuaR2ek&G^{;I`mFClvg~AT>Bm6DULwXwjf4mbGYj~>%sehf>g3aP;vH~ z=_wHUkpKzOWbYiAj;(49HY?8tvKzSdFhz9!aGur40Ke;<)9~48YD@n%lxN(bD8CAk zCC#S8z4oPIUWmE-^mu~@fw>D|Ms$d7lhKZ_U=Co>GkXj!-Xf!Hq`b8oJI71_8&CzB z5YMMwuERBh*IXS?gEsGfkK<9me=Ah+634P2aTe;qF(7h@%8k=@I8Y~MFg|l6Z=C@p zZ?joq4a-koSTaYhPzmIbY{^1>h{1vB??^U8rJCpRgMKCR;HWHqM#U1$20^q#)^QAW zMgdCHUZE(hB;CEqoD@gamfJMddE(4VAk+AlX`=~?+d6=LyF3Fm(BT^SKPV%K=JW`IWSC`iyj5O zi3teEz6=Hjnu+ZRD7}!GfFL{&D9-RgRcdWku?d#cutZIE8A=`bkxf>wMnR8|26nnI zF6YfgOmLVc)nPMel(kXlJAXUXc`-^$5xo>!zh4g%%EHlInyT_n6heo3o`(CdKUV=*Y#!XX2S$z)DH@k!*)UH zD>WSal4T*Hdq;^$hQk6Bf(#1k%{t4A2)kGls7;66F<&j6xA+&UJ-2*wG_I2cd9~WD zkCxb;(U2^Cr(q>p7V16anLT9N5TZz=v4V&^Wl*nnW8q;$8C=SEtYNNRZzV+Glp|R* zKy+yS_DHy^vC3n;oGzqSexs=}8^}eWKpQ^f33jzvb3*;%fTpY6g-Y-D1%d;b(+9o| z|0AdS-k9BXL?4chwoKioQXZTh>fV#pvA&hW{WHRU+5ge=^zR%#`#gl$odaC0Q`B=_<8RC>_{0WJK6fgxzv39omHHTu;D}Nwe6ffr(_fNuxA6#%ES9_pD~nT%}GL zxbWEAAa!>qR|ZeOjm|!($tscI)jyex3Z2eb4of7>NfD%B*Q(E=Yzl7j=-g-)buoHE zEx7jYJ0P@l7NptSht9|!-kOJ(T)T7tXD&$rYWZEbGZ&Ju?^b67Qb321l3IS_>M24# znrVm~hC|hn$oV6d17V4%Xb8!3Lhr{F4+Ne9m==;U)@-Ed8pipi1#Ml7WJV8MYG z>Mv4x-0~kxi~MZxI76%j>{EJPGxZ$7ueaz0B(;ZuD5z*7`0N*nq$Gy<#6QwSR9T2u z!BjvYAyvnpJLg_}0;cOnrb#?<-?+b=*l87zmWqmVN-mugCMI1leW#KfVhm^eEz{Pl(DPn*q5 z07L*7kwPwEy&>~G#rVk1(mA9MPF?IIkDEHx_8e$`2GUSbNN#qL6pvy!qGDLbie+Es z%)X}c;U`7i>t`{xeL1EiMmD#_$zw-EGoM-GJm!kdfdBGc?#qe z1a?k6=f0Ve$o0$L(AL;vrez+dJat&EJ+G>{C!JVABAskBh?<<06IId^vA>~mtA^RD zSm1?aAi$X5)HUm9Be>|1mt*kEWqEkLgaFx!IG7(^9)(J`%etY~ny&WJNRyEOpNq*o!Y?G3zx6+~Lj)ZjHX|nm8e@<-dG07S5p_AH>i*f%2?l*i=XLW(Imx+m8&1+W8 zC^|;;cry2_@}BE`Ea{C0Ukv7|Av)&j4Ye*Ug@yLv?v+!gbYxrVD{=ktRzfwK5KA9s zkv&9puPs$xy#WFiw$zI`|B^=d)l3WXHiukU(#wW-jv0Ev%DJifpcXM*5J1%ERn zX=S!zbdKl?(JO`MmF~}#{P_6mP)9W-E<@)aK`XJ){JGBm@v?HPj$Oc~SMeb^4=@q7 zpK1w%j;CemH;Dalw4oqbilMo-vs|smlx}l)(jhk`_RFp|I|P^fWF(mIhj)s$3J;>e2QE8u>RN*4gVk#;+Kb6+P7oy02Sp*+2Ws6 zb>B0y5yrg_Oiz940252muFJB^30H;8kNWt z!Mt;xIEDpSbeEmT*OHJ}HbG23+sfkwWR;7bvC#XzxuG_4)2Cg9 z@5YEjk{g(;FcgUb49;{fGJ?Kd)d7%_6}#S(+#AAza{9-+J8$hzf*F-Iz<7c}cCc$) z?0ZofvjID73!t~D77YsP+vTf_+*n(8EfU|YxzY-x9aiUAh}j<)o#(}Y3o*FCJGq77 zoY(Vw0Aszn=924yO6%6H!bA-?NVUmX&c(=*;V4fxMav=8+-EOXgW9qAy;czqXNsn; zK1vc21`UfpVB;pH((M4#>O(Cp@wPABu&9F*J-W}pUAaYe320<$OSj-`o+PxLeXp0y zPqItkZ#Rj`Dy&jvL^KH+NW+Vy>O&UTwD~G+)b)-}%2n@kgI>3zn8Gtrztd=^UQBj0|(Bj+n>P6b?{#i*is9?p35a)Rr~vymzp_O7jqKA^rLqv|AS%GxJ?s za`vKodS(H1HR+M7Yo>g5gR(9pQ z7&J#j!uMjv_LZXwQllLspB6fSRF;sBG%+meyXM<-4z|e;Ki_}WPhvtO%%A$v6TJs& zbC)uL5oP!y#RvjVq4TTj18rp{u!tz?{B^U{3b1fzUlvlf;QjFXS%`9bRclc*^PYnp z6d~<*Hg(m+`zzPt(gGcQAz`>8Zakj2QWzr&0Re5*rAhYjmhMT<7Nq+?sFPP>)s_UP z*V&E}K@3vSZ1s)=OD_$TJD=f-HvZhiW5glF$8b*6J_RyfZj{ke5}Z3E<60(=MbBod zSJH1UEE`)O3sdk2H@5;7j+-pESLcVlFU$lGk z=L1sZ{Ff{yfa^OuG&f1FH4w&R41*nCu`v7cz`7@WKQZzTxL7OB+Ga4Ky`c|S?SUJC zx~NbiLoR)@Nv4%7zMBMx6o)Drl8`8Rdy!5Smrl>QXl#&fi$+6&l8QiGg-Irt&3z}FsmY*D42KLUue%~EQU?(QPZ=>kkP zT}ft656>z+!=$XPhNnN$<&dTI#8~`}#?)PS4NKXae;x4-;66}fKoka8x))Y|$zMe# zz(Oj=syk`K7Z}QG3Ls%=AYvwP%5W$r zFLry7R{%l09&{&gUigA^|I36hV4^0@a{5%+udXZc4Ppfdfe&Jg@~1h9K9oKRsvnbx z{>}~+QKBIl2_jk0o7yTNWj=7;rDrc16BI}&Hci(qNCYZ``k9~+3(-`fF4S;;E>3nN zX4Q3_ZKb5O^z~Fq4`rgeLwTb$y^hh7jwRg#yE0TZ^dbq%@V9hdS8(#`Y*(_|yxIv& zw8pMNk|foR1yID67}r;-#f$4(#J{iU|LS9b1*V@Fd`g1Ve-Ag7|1Jx(_y%31Gs#NX zo%0gsb{oHp*M^y3ORSP6c6o}8k!wM*8uZ?;B){9e$9=004Ls^JD`HI%4*p3bLV#Oh z{a8~v6LU$^Es)J-Y%b0KshUSJe`N6fzG2OQP}z7xcq5dOY=N7y#jqv83FqO1I$=v$f=&w+2s?*I zFNJ?lN%|<*S?5z3P!2}$I<@;(mps}e{~QfQ@9v9d@2i8IpiiN8?I@kOESZY*z-D;m zf~N4{a;JSY$qM#SJWSw;PKosT`WAS=vP_|u2st^zhYIZZd$LI@;J4S3{ff6%ZLzvm zo~SZk(S7&)>z;M>hx$Dell!uw?fC))Bf2J2mtwW{O+aW!qw#V^ zwc|(Jm5kX|L4hEQ=xP;ZhS4NzunU;{)~mv5AgG&10Kf=*Lj-~T!riD1rZ9m>CZBa; z2m1O*7MIE=>)RKr{hvWP_L5-?V;l(aqzDN|At=e~RK(d%2Z9R5?#g?yPLmB94q79= zH^M)nL9T-1Nd=QP*bt~Pk#;!WDhpOB*Yy4q6chFAK5;PEJ%;anG~V;12aR0Bs!=F0 zzf`($kHvX_QV}3y6I!+Dv+N%yZ!>otFtNX~eE4dOqD9mO@5&<+iBcznm3uALD<f<+2&8b6hEfG<0xH0}v zKyZ4;L$9K)o+_63Ua()FYCv%P8~Z|a(@#EQ8LR35>6Lbd2&!rebua!*IsF%lvvzXo zY!3{H75`?yR4=-l>50CC5Pa@tmhuatV9qYk>Me+|?r98x0e|+O7WFM$u^mU_4lE}f zmuz;fD^bJP{=2@L)m^u}O(jXIJ_Q>fQ?dT+G-l=|s>ndc@etZt%0frk>}j(;O(*up zGD2ORP|joqZjO`$m(jM}Yz8Q7=ad2U=Z< zcZRlFZXSh?KF6?LPA%GK_}~j%t6Y3_o}RsXn#sZ~#|LmPot5lV=uK>*_%&&39g8&Q z;Y%63al#r(6CpKVsx{#A6x*%uzwc30O;57d?Jwtq6l$uTUeK(<%c+VWsw9P)9pRHm z>DV%)b5*ceM&58d`}jh&j_lOeot;eH_+}9En9WaWLDhwkf#sE`Z4{{-h39@M;0O=Q(Nr#2pLj8=yk7Wa%G%qQ`mNM_c5g( zKSBa^R%)$i-p#6eF`JD*@~o0uM~PAg6Tz$pJ{yVxi5(vA=S8a!FasSfb?sT_S!qjY zW@Xu`%xk{dEB5UY^^&O7EiH=;6<63kKWlyi7A1;pBAPO2EB9?R+y9dFWN88(;?am{ zza&AkaK`+UHt9kIlG)UfmREA{<11`&ftbyuM{lau>!&gB-{S?vU|b28Pb^b>+6({Y zZjJRHSdRWnbJE=5({4I3#?lHBm$O2Cw<{|fwi-^=2j5+AkuAI5;AWF>DHI2olj*}9 ze3*tjVe=E1MBZ!T=Cz?DFb;*eN~`?TN_jktF^M%Yrah-#D~nEf9Dw)9^m=9FqwHjY zKg0meV0);wU>2HoZh@3^j3td<&-Dkf+(xlxoQJiw1S&IeKH3>cBUv|L*?TaRue;($w#FoN z^^ds?`e`P->2=OTRCc;YowMcm2P6HA^F?FHR*A4CG5h^ux1Ez?;#c*v`F2NFUiQkr z>3@335f37kraM#(PqDp>mU4BJ*)t~Zc{x(%cQ^jz)2BAnu<3t^g!Z;`zS{_fZJC4gIjQ^7Xae-%6m zB40xKl(eWrrPWsX_pyq6zAS|IP@RD`%9`!d9FQ7*4VR%j>sl**))q$V2K3Cg%cxsFk```(Mj2KSoO`PC0a}e2$?zF5@bPIEmn1UDD zCM1$dlQtzZd6uQlBcyjv3Qdcim>vBMT&yV8!8`7w6ZvnDc*bW`6FM51_;v{=l}Z$i zFm-<5ay6Q-V0chBIK8WABbVA(z?oE!5NRUZg+rIh9OSTZJCXa}RDHU^v;4;g<4QY4 zVPFJYxk_DfBvOp3>#m@a+^Yy@9(r58<(}HV<`su{8Oqf+QEcYA_B4*|+1jZa-F2U= zsfRktB5qhSOHa-bc7tr1aUhfhaBlV##V!S_q`&cIj6$!v*)rgQwORA6_hy8wo8GuU zItT!8XBvbNyW{!)*@lf3wB!JcXz!n-R#@VY^~)_%j4xWBt78kiVoMupgb%mD3r&`J zxpexnB5>o*xf%DS<5jdqDb<|3g!={i+;n~#Q~;+as-{CHE}6WhO}rBd9Lav}ihJ(X zwus>Dob5fl%<7F!z5eBfFF~W#x*H>&!a(jRmtAnZET@UcHY z1vRwkS$@m$gUC&Osq7x#LDJF^B8Yf}s=0-Iiy6KHy|EL*R!3RWFgPB1VN6{(=q;8Q zi)S(9EZF!XetY&$ffTR-)i zo`i5qOzKh-8xW#h!5fO}lL6s!YT;Wz{m#%zcM{D8`8Y2Zobx*N726 zEN0G}1Rgzw`(uEq(qTgN7${2}gGditmsa}^q-+@)g!yKC&xl2`bg@C|#%Y}?hAD(+ z%X5wLs>1cP9~7}jnp{@cjYU^bLA+WuP2lp(AnhO6xit`^U5wSsX76LGxXP*(~ruy8G(a+(nF^;N})uo$*x4A zi~!1p1)5a|I6zCL9Fg4`eb#Gnv&XD-c*HDo6F6TOtXh4 zOAE7rmVo5O!sBM)k7X!^LvH^|$0>PSljE^DLkZISJM+h8VDZsJ;o49I zAYh8G-Q*^_9w)zaD%v{~n`xkAKXbEUm5=UpjPzU6 zV^a3MWfXrOaf_0pNTAMPuzh>_PC`Hs&(gm`8IVg}_=Z-?mvJ%d+?X`3K0V{EX{=#4 z&pRhXUKYm|Fk%<;@?d<#6<`GBe`f8vJ(Ja5!;W+R9(=%%o~}3|#_o5C<4h_+2;xK; z0hON6hip4(F#od|4gU8pR)kJgX@m|RoM&C0-?gq=2P}gQ5y(TAS?hM6n9f4(52_H9 zSe|O=ZofnW+tq7SX*)H9x=3m)dlt%!4@k!dMyg2CTBop2)BKgW_6@X|o|>*!XcWrQ zq=LBbeGI7~8%CH-s4mCY;LtgL!f2AJG5^o-z<-z2b(zg&L7$lB`1g{U?Qcw5ME(gt zx-p*#gSv*By!O6K)k&~9TTm4qnu!und`aGy91}hEy;N_-00GqKdwQ8?yAIu0-qxt0 zuLjgAE_0d)ia;+2VbG`EruD|^_GMqQsBfYH5{s1wWL9Nb zg&3F*!k{(#+vW~26tYEdd$EqsutB(OCn%#2DJ;OFr+no_2{|ALp_ii4FB<$Ncpi-e zitei3tus25ZSj|nTfr_PX1+*dNKA8#K=A3~_8*#5!s{uoJY*MY$9DFT46$&IKR&Qa zO2Xt<((Cx`{Z{{^bf7(wqXY5Ay)R}+VV-}bHKd#9pdC%P38t!7FqvJ@cZQmK*R<_# z7@Thjw>dk4T&b?637n?|tMj=xSaN1rQMDGV5}pZ^8SjO=aS#7Aa+{ZIb#S7^f(MS9 z@liodWC>U8vzsoxr{tiyUv{!NHoOAnWb(@FZE#p5H6Q33bepYmYRgK{gJ48XN&1+q zw_Dzg=Hwh;U+;z9fxq`f&Wyj*AFX>5NHOH@WsG?*wj&ay(1haB!p9!Su+So~)PU$j zT?@`7GdKssb)b*|Hp0mFJPBRv?8`HYI*G>TY`aqwzOXG>+?_u9XU)Zgb8zQ`T_ujh z-bsQ?4O8fMP~W|55Zoxj*4WYmuJsD_&9VcoL?qN>NgWbvuwc6q0%&-_cUzM`_Cwx> z-~MTbCMqj@-XbHt)386}$!(F6o`*mwtFUkT`d zU1Ux`5nXhMY9Ab)Y97600KR&$1ea_T*Zinq^1@UdvLBw}ryasha$nkW?Q^G0#iZ(? zNRK))W=%6~7-bo2M^m$b@d`mp%^ejJ9;!3>RM^2@7XC53e-!pvo!xgwq4JS)nx1FK zuLPSiA+F2B!XtMBw=dd;DCl!>f!KuIPAe1fha9df6Yc;d z=}p-wDeV-7A@}RzDs0Y?sNU_V+XEX1VV6EU)z>DT@l5TwDJ9D}nioA(vS)Pe`H^hd z-dtgG*L}9e5#ubo*|93FJUM&kjdo8g`?JlEkE5nE@y0=Cq}z&xS1_Xo-#k#Dda@nA z!*(=_EfjwM_?#{GduZ32*!w>_^dFJ^f57cAnyHb}pwsCxU#WXcwl(J|g0AYmoz*;LO&TS2$J*0lZP25AwMBD1($mZb_+;kkNQIH=(^=hKdgM;~RemMlUq!vK_e0wHfcNP}Vdbb9EJlAR0 zU0TsJ)ZD;4R8olpyaL?=<6dX9wtTmAJa~SORvnjMsx$+cR&xPSwxx!4{sx%Ly31eU zi*mwH7lll9@86=f;*SuXkgN-I)-h|+xPAqsssgvue}QAuj)0;r{V}wDX8fB=u5d_; z{>YBG+qEs}xz->kb(G>?k~^F<=-&O4+*ok`)(m0)7Xlj#ij>kgiBj~R0vl^}ioCq+ z@>i|Xie#Kiev%*X*+<6H`PJ(-<;zyM70H4;G0Gt27O2t>tS0p5r+kEBO2%Xa`18dc zn%fC@5A{zsgrBmL1^&Ik2K711-V2Y9yDu4Jz$$i~i%h|>*I+JX>`$V^~`VGZ4 z+i&osj%rfo*1C91^=DaipWxR0xqV$$^ydU=wVY&8Hh8P}L4&3UitPwAhZfBQV;zS1 z4$7TwbEXIHYZb_us8`se8putZc62C)gCBkyuF;^A02n$64YP~xzs!(#Peko<+r+h2 ze=$m87p1Go^|fYdV{135ht9Vmgq+qD7mO)U3={p{JAqZ6>Q;jpFQ5p;P{XoNuitP5 z_MuQ7p#i)CW|0AMGR0e!n zVQ8;ON|*gzXn2n0K~O4etcrzY8)bldCdVk~rGUvb1i;%1L8vy7V&MUJ+SSV8MAZ2} zvQSGDXEOP>?f{M0Fh85rI*y1#uy+xqq1{(H)MVIw3}ZA|@pOAR9gmBEPbAxB33iBI z5(ZN>l&0-~WUn-#VCdgH-n?%xOPa@#+dPw2fYMSMJ`&?loK!M0`XcVmx~0#s$x!mE zx*bt(Evxms0i0Qly||bt>4l-q@ON>Yi5@!m**K4K`Pm7kD!qPyKQS(Rh@Po~>oq3e z6|y7=lhakFRs`1`^_?9F0a@8>sbm~V)~^fER5~*yPJLx7VqIPjX+thck_?qryhJe3 z>U8aeE}9%X)u%yYurkd)=ikcJXD~QEIjQQFE)&+2wM0h)hSk;Yd_}-&bdTJ&h1heY ziUOKK^9m9AnW?$!OOKb3G~%G&8uvrbP7rfTau&6*S-cm-{zA5FLyY;B>C`Lg8<;Rp zX2o|x>;bM;V%6!p;hTn2P>{n+3N;OvJ>2?C!une(&m`?~p^N*F?=G)u1-PfNQ^#c8 zGX7JV&*y4eEQl0>N=%1JYszzNHKUOnPQ)ESG@!9*9^*j!`beuVh9jKDWvR6k{DYyIO4(cY7GpJjKzQcf%`>fbV@yc?3y)mBk#A@SY7(8? z_?IzEN9+Qo=kLwoSi@Hi$%})A?)wb%S`JPL*;pY7U;!b@_l;q9tJXos&w^Do*^tHa z%+rv7_c#n@mC6QaxYvsoU)K;@lvHq}k*;%H?cV?$jz?7#3O(^x8pq!`^{)y_O3?17 z+CDLY{H17u`u8Gf0#3?YW03XBE=SlV^{Y^W!ic*}*GSjJ1O(5*c9{9BSh5bulQUB6 z^Sx$&bT(>#khOp)`xN%a@Q4ahRQI6jG2f?kU0I=Whr5%QxZq3~pr60)(iJepF{~vI zu-tNqmz1iIXZF{`whF7sVGYueD1BgE7CH{EV7vAL2HiH_4qksD$>H2v2Si2ycrE9L20toGj3v8OB_xdv*O#QFY z1PDKGT%;}NUtk6WgLsydG;Vx!@-8=%eWH16#Pt$n?<<7Fie~#i_s)#!mi@5BW+6cG z(8*g$&4Uw`(fYJQ_IfQh1PVjHZ23o`xYVg^yePO4iiLWDkH!PtacqLi?pyy? zqKJzM#L2VorR9T+L7b;HXY=N2V2DTEwA#mcx zdDD?f7HNKAl*#w(7LMr?_f4w1=gO@%voIeq=87dafpG7cVG@P)4s7T*4 zkgW*GN%(TJ7IG$9*;rI}J}@};GYGZH{k_0eU4jme!Pd6x!`q7%8IgFoQz_rsE^`dN zeRrJ9R_*~gW43O}NTQtZ)|7AC=eyLoaH3`!s+jdpS={XwPaav>RuMZ`0T5~jPGJtm z#$hAZ8nm(L9++Y#*@#VVWP_sAS_XzktsAwM1Yb1<8LxXO0}&xCeF4aez*CUDc8`s*)8LEm`r&iTrPj1XkQp}&2`|2p!K4XnCu z^8e;D=YQoh(gUB7o#7{+IZHg1RCsk!qm0muw23HGydMx;;&rIzY#ZXwdviZBupiTS z2uuS1M2l-6Vm@KWc4^e2?Nq;~32E$t;%Xb{Eb=vVw@}$19iFfH_i}HS7BaQKf|k%) znwrJ~dur09EWH-5FjT2Oqs8Cl?Ec5B#U>2#GDx8Y7~<5LJV=h-h{7sK`d)b}ve!!p zNRzmhc|J;!Q`+Y78-WSRcCJ~&iDy8y7(Z%?2_oW*pXQ5H(yMSk?P}z)8HIk-{tWH8 z+Z*<@0)0(v6%E3XUinWWWLF>k~`1=dh=Llw~{ej82^+lWso(D@^ru*)GW`O|)KdHtwq8vr1T$d%7>7HIm5XAF59xXVgcFfWj;cB|jo)GAN0@=*$%*0#2UWeVU8@Y$)6 z{@UZm5Z|Z6ZI8=)e?=szg_(-PFC)R`A=wM0;HA;-bG7k^0drq~Ie-B3T8RQvPw94+88jbZUPjhLJ zGzH5JMIB0bH6i=U@E{^cpxyUxgIecILlIbak8GKsw+l`hQk8ZBez4Ypfvo9xbAK{m zk}jQ?6Lw+9grucLX+dqoTvSz0^$wcVXO%c6mZGwwS!YH6jZMAvfVi4Ft^PHNMhHNX zgS*uRx% zIPY19e`Jyo&8o(!gqLAzs@P5bweIngP^O zpyZKfkJTa$sFb!R@01hB9(?&`o>4{wa{V-RjXJOV<)d3>oYz{bW_R!*Ky& z!hX5>faQr!7MjKqz(q77Qkkt!WX1_#vdL>z7ooo+NW`_8#d$evyMhbv`5|4;F6ulw zIONmg3da@C8YDn?^CjT9qb8{Oxy51@Qj$E7#1qexnBw+rfm6t6Q=-EsLK_WHmVDc6 z`GUQ@FHyR-NP8idl!j)F_Y8oLS?6tCa8d^qKw%M8aUX`PjkNI-C?KgJ5CD8yx<46i zfmXBorBA=VFf2EI&3s~#0nw+tbjEEQ{LKNwS+ZHt&hlFrw)ejr`>?m{WK{V?^YZ63 z9`@&w{Hq~-#=qJ8SH%C<4s@c;Kqe$1dGRDl!P80B?^>ScpAo-)ly$KL>v{L(mY1_) z0b3auUX6Q>Q}mn1vwdvk!&oD784VJ&(B4&asiboT$lJThGxl{1 zwO)~DVRO;;x`)7#v_Fp33!YIw4t*)K@-%m9X;Sh+U-2*fc`I2Oig>!{KE}cL2!t%C z)~?XJ6HUf0Wors|GdZM5txr17hyD6Lbncu^JOi%9sH_qD3=@r?c8iqKt3Ja-0hFU}ET{R~0zi$B+mnGU#|^u7GGwb-Z#FxWx1w1@J#u{-X7p`y{9 z*Rjf3c*n)e80za44*>JPl8#jyCS!+Nhs-4+sAFCf;&by*ge!)c5JsfG5H-mFA#;c= zl1TkVvR9i`E7_GvKBD1+(wENt2g4B{D6%_OFI?)n#er^1af~B%&eC;=i8F~xpHQyX zXO*0bah8vn7wl+wl+9bAo8kYfI{!NSUn=>3IsE@u4(t4x>-aN)S3JWZo9Le|DX~^3 zsA*mgY&XJMbsoPm9_F5(5NSc zm(hPBx%+}9&?7N!C%(~|=26qlUhJ#jUg`@@Kuku4ZZH4E&zBcV z{I9u=GO+ZVBody~`YN$$juFSR-F2x$gR@}1h)y`~X>2LYqI~57lQqj z7y67K-DXn@-wV}!jQ-wouAVR&r)o@4`>V44t4yZVPE&d91On3b1O2xw{(Hw%qLtO> zYK3$o-V;Xs2@GAsv?SYW`c}r;#?1zkjT8XP@pGiOFPGf$45LtB)viEy0anT8Lkb%Y4MREl#et`(Pv;}!4j61!%iC`7Eo3Sa_SMBaJtE~#fUtc zUmH|-MP|||XBcRfM&p+Dob2I|8QN{*Zi+w$LKda*;n6f?y1dq#v}0x9zZ2REIGz%1 z?e;DcHZx5#xLB&wx9k-=g%p z*TvRE@6(84$BT&6cezuXFnRt_bk9P$8d_o8Dm1&>j#fk%f~0_tpOvc#OK3|0IFqkt zOq7n~5{J7Azt*&oO6qgTRu@lbHhl}MwJc=(@db7pZjuMD{KxQU5!Y<=?9y*=uzp*8 zmDba-r{zp-F$*qZMtgR_f>1yS9T{^)+50%7kcKQ4prXzD<%rqA#D$5#5&J%MZg`Kw5zWFyVMBNAwl@U6V1Q#kmKv~a4|la!Cv6)jlQ!K^goihoxUj0R@S}{kvs+T;5VaO>1 z=Q&0r&8P)wmuK7v6Np;r5Z%*nZr$&-Y|HLpQ>@d^SZ^&n2e56{7wlZy$8Y3YEt{GE z8v;e;A^=J(bIAIcur1Q@jABMtCS!S5CIZ^xLZKdC?^cek&c{{gDgW#7J>>fb zV#>9vTDPam-I=S}^1koeTVzhq_NMpa^+n3ZYfSlROwKQzw`sjDFAt|hUWW7oK>4N5 zW5V{n^ALU-M?oJbb=CT+J_b`@G&xcYAr=>>iePcYeISkB^t< zbbCFW-;852_`E*e-yHASUGlx2zlFvWvtQ~Z9@hC>9pv!6-;LkcKMsEW=kuvhs@N6 z*BvtX`>XApyzd)a5Ulp3>%)7$-aDYal{+7?j_+C!X2mPEW7Qlchv-=_nd0W=`>m5} z*~BIL^UEzThOpL&ScC^n zERF7XWgt~Il{^w#s`Y+_BBsUcWD`?9wH8kMl<4sV)eG_j)o(l`^S#yjJYmV&Wp{dH zGT3$t5SX&*Ob|T{9}6P-_^&wZ8jUqmt$V#(^~+s;TrbPnk)!Y*3%}kRGW2YAsdu&U ze+)eQei|hFSay0+lv}*n^l`rt?W*MexPHG$nYZJAPc-EnI+-sf1|F`kgQwDyLo)FO zWWn^eoI)`=QvmvYfH)se$+=u~WdU*i!ffE0q$N3inbT}dBTu}4E3LU1a-NGgocBkc zCRXIZ9cxw}73rU`C66O*%C3p-m-y|rJ&@UJGa2=D`uFg0a#HQ?dRG+ptoaJTcCeq3 zuJeJCJFj3OraHv|OMA2r1D=iTtt&plw23SAWSBd*pkoK6+~MF6l4V2l^yH6q6HNt6 zxZ;rxbHG%iW16Ohg9%s7j-;m_jl>y-TKpDW)sYqYn&<|ia5f(;6vu@RptG7H%*9u4 zq4*c$4VMs_uwIU3{Z6MsnB$gm+NNFcrW07niIT>G_v%+~t!#h~qpL%Tg^*{rIj;3- zlDEa#+4#ah*=aFodDSI?vo}in;+@2A)#X!wv(6^ru?O(5j@}xBZ-DtS;^b)6M>UDS z*Tmu}`Y?2+?_Qo|lj-Fr~fi;Pa$W*RGP*S1dIQFh5dC!&QdmAXV`< z36CC0UCMV8wko5Sn~%ACIOeWE*zoP(x1fLGPbXSq7R+AWmNoj_xqbzk>cnNSr5K0? z*GAh-`E}wk?{k&LW(wO<+BO2hb};aHMJ?XU`>RZk*YRjln9&A)zVG4Z`(`h*pLlc$ znDON5!K_I?sQ$px5`oX!3#+S=bt&nLCbpSAG{Ht9lJr+HqjxIztDsXKDegwWo!D|n zu@c6fsd>gp?@Irem~fh9Q-zx$p@FgAJH`_aY!?6jh2QA-Ky{r7+jBBq(Lr@Vu7k@V8sf^7NVZme8ZFujpJUPw$(^Ty(n=wREB7^<PZu#wSyu z=&$GBJ_#ond8yl*HPV)8cK5LQ?QPq+ncVqc66sTc^0PIL-5w0*|I>6gsb{PT1^ zuIl2WxWd%ma;f`k9GEwyT}_B$b=GCIR(_eaLR|3+G4iw8ddB=;wY@fXxKuCk-7j8O z`kX`^4Eu1;1RL)T)vj+831|6zta`2n!WeouKc`CSm?z!bgfw;^=%$h)V@Ejsu+xV+ zvNHU1(}$3k8!2DOV^}i7MuF0=WJ?GxucWELKXM^ze}^(G`@OUlH!r*XZ(YZkkP!j$ zJ31YYVfMXw2YMT)c@H@F!JX5y60B$3=fQj|(3_4(=Ua|Qc8gQO!BY2FOQf|5q0@pP z`m8(fm$kY^mNUplPxI0{la=4zFlS;+0b1i7O_&sM0VJ|N0F*4ZB302(Z}(| zo?bemx8ewPKxSH~7Z7yU9;xq8`2G6JboN=|tqunF|Z-dEdGM6UNER@teNKAzF2fk^4 z5=(BmLwo-5#gj8(!P&<=qTSpr@@Bhtjo6U9e}$ zFNtR&tTL!h*c!iP76@%le(wVj%cEQe@5-3R^6?|?94|9KNG#Q18Pi>-;ASJ?YRyTA zeVspblc@*Fh)#(vv*RHCL1iuTC)Hn!$CJU$w-US-Tf%9dLebM&)7#YX08q!l>h`;{ z8F(AW$CC4TK;??bF_QP7KR;N1un;JOx9fq)k0yUUaLtm0VjoFTk&kz@iPyS_D`VP{ zjrwc(3|a{&>T-2yuWjy(eapa2oLnAWEMS%A+Y*p%Mp2{8H>VryK3&hkhw*4CkKY_j zr%|wkaPg<%t)Z4dx(Kf;Vo%QJ63n8}w_vE`Hnw%ry`C~J5_UyLsv4IOY+$3`h>IlB z`s-4%AHaJ(>00i|7*$F9gEbOnhJ5SkKt$@HD!Qy4G^6>Av5yr#6^cLi z1n(>y;TmYg({Pd%cp~yyIS1G|aq>Oqfvu9TC{51`*`^F&9{xT!5Njs^4nNSSA5#%q zM-SztolsK{UAeL=F2Mr591rf(v_B1E#CtzkahBF+5PQ@6`a<`!Dd83fapo_e(PTqj zv3p$*%mrXr+4NXttWE?a7%#tKm6DmY=#BGneX#dO~W7+oB4^G{85^c91C28s+JLeOF9DrYE^p9CWuNmH4W40 zR2Sh$U4M~p;D{=hIwO}}0UTZ-eNmVj-nJi40R%+*>2ism$+sS-^Jz3o*2mlUlc-K;M;7i?T>6 zGiF2$W>OJ{cujY#S8%EL--B+#`6)PUDzH?_#vjCZ>6MuCy$v?^+%hkB0p$v$dp~U5 zFil>uxhsA8?r^^doG&LRAVbht4y@S%KwqW%+_ZvJ<3wzQQK!nn$07#dR$-b z9c-faQj+)Gitb_hXH)s{izvR(=$y5E3m&jOF8&N0;5o!Al zLhan^RvO>!rb*8e-UAB+3vSD}Y-zf#uA*}#R!(;`KnyB~fb%5nswL68S>HyAY=2Q- zn~0Z)zd6|jn+fWtnQxl?$6R+oqc_~{;1sr)-x@hpiR-JK*;H>HrP!rNm!{JM&PY^C zc>CWn^BN+8XY~f<@Rs?QR^dMX3}GFSXyP)IR}axfIFt+Du|*HjhbBm^_0io+x8>4a zr2a;q9!+6KZ5r3)a+Fl&x3%&i(+tlf)|L!wYqFoQC6~cmUTyfLv)#4r!n4GMdz~L$ z>9q@?z#=7?%;D`~I?^ks*d#tlhsNvS6(6qMpb%9%pogz&)}VX5E^kTeV4}cSWhdve zpO0AOGX@)F2)b9t>s-;%YPqUnjH`_hAKciK>1W}48d~0uhjFuZltaTA8ZAfN3iwxf z_jyt7b~0eetSpw1SMkMHOlW{0muvaHqHvcGYgn z|7u5WE!&2O*VK zh(HpCS#o4ZF@4G$9LcPGKc2;Mp4}6&UnlRl&hG`3Et6fzINg*sY6}I(Z17F+UiX zCg)|AT$s|2Dp&MjU1ek*{C#KOwC55p-t``gZDJ*SFc9w7cq0>YxL;uTm&q ztl7+GhS&z;LoDBv!k@#NFATe6s&R)cOA?Js1X*FCtX{|isv_KAi5Fy$Rp&oE_VcCG z^$Mlb)5)dci6UO#Zo7~KyXm`9U@7&m=r(%683lr|D7xYl7O-@Q?brLs|y(2C8v zQ*`?a9vU}#^-W7xXeH%MgrN`Gn}+6(%ls5g^f^F=p^AHdYQ%O0PtnY@y{oHdFC{*; z`5YhL-Vbs#Hjfd`GTpdkLqg0(I-?b*ciP;HZ==X~a|eq-p@Pmcp^Lxv`o&qKUa5ea zml+)EPZOyfPhy^q7JCa}{r#w&ssaF3I;zLY8td<&;i1()f^uEjDjXz#38Rggmc4p% zrbV1%Zm?&xDtpDbPVi%I= zpo&{<;>jBOs*1{GfGnPvRk6q_xlj#cMFhLH=pW+6m~888d|)tfv}Nx>1>?S7m>%m0*#gN$ARxA1&O;NL-o4JQHi%4d1-c zq%nWgjIHk__;u+RBJ*9-Xd@46iYK_eRPvhCBY=66i3ME0Q>SlQ^)69PwrXQ1G%c5n z>Xi(E{lOBW2dCIOlaeta3E$Frg2o1ALTOiK1e3J9%VAVpP`L9Sr&DJC*U@!~KfYL@ zMcks8Ozzhrj^~lOS9lFE7WMaVvK~IzMVB1hL3hRUGv@h?bc>UkYMA=muIp;%Pj7cj zuhJhrv;uRj{yeGK*RZlHN*x=j9C$B?@19|2hSWa5D2BzxR_!@O(PBr4?>fV^80ADq z{dHCW`OUZqp$=^3vIms~(!+XZ?5MC-dN?MLK1@+1{`TK#9Dc2~4Dk!v7115XxjFEL zppM6F%H%iu&?9c0SDjq?rpo=2HX@iYH?D!qz(5ZqQmEXLHl;98y*<^|U?C4pxMI>u z?2pIvbqF#Dvp(Lx_d;I~{Vf1|k_0Yt)f;f`pqcJA#Ygg^^!N2Hwb#~P9D77l22yvN z!(7tt^N!rt7`Vsk2FW~BHMJ*m&)&r9R18yiXV645pAZ-=VNzk`@A{h@RXj>(G`|mBZ>QU`^7uS}O z;N&?_LSrGdjBE6rRDV>*rLmc&#g~zn-R|`pgG(@bAD!(C0FwUhak*7H8cp zzn{k16^Vs)8y9%9!fX8k{D@CLQ%mFo>wO_aEAbM*$9taB`M^Jiye&*d(kK^n=$9wmHEH4I5Oe|FXqgKC4g)p zLRWnL$BK^IE8E{k7U? z;92qHNM*>BCMhrGQR4(>%ngk8bL&hGu&#hX#o-J6Cae?lA@) zwuM4Us`6v`#dJv&z;|y8alTi)h&u(^^5a1$_~OF5A@I5i7F#;#=p8OKvxrI@4vc>Y z3G6r@XNv)(lvKMqa=b=WpiXU-ss4w)v)0wuF{0<(jA>rm#{!~jc^NY}9a zt-+G2e~g^FhKg9r77;m_unVM2*cW{u1J=y+Ht;qmhHfq^R-0($B9Gieds9IsGp;7A zWw@z$qyjivn3q!HDt#pJE^RHUHJM?45(0x!f(AiwRKMQ{a98$5U!|zt2+m=bt)4P! zv*B2Gzlpe}Nu3ASX*ayD;qm`{xxUxVA~$Iad#XH^vwP)n0mOU*k_F`|SQ~tYw3w$h z&{V8FB^)9!$-c-FGVdT+ww7i56vg4Yy#(M{>YAOeyNc^CyFaNYSwl^5Y@jUzT*q*V zorO7iGH(!xC^`_mbSFASA=_YAl;&41XWn|A&m_NSDTjB4lGPhkUK7RMVXUb@zQ>zN zgt92G0GKQJros@UiTDx#-~{NMOUuZxR$%3mMpicF>xjW&(|96M z!QM$QdIr)4wtuWff2^n&dNcE*r=R1BSwmzgcv9X5Pjrkn<&T$Luc^mtO1@%=F-e?6 z)o}c63*mKn^yaz$<>>*RcFF4+%xiVe5QfxbiFb>O>3#T7znV|$VM-Grf z+74!Fhmtg9JF6>K6H+R>kTr#5Q09cMlXOu1hOZcVyTl?oN%$j0@;?n+e`(-Q8Sn2% z0b@uzZ=vJY(vg#m6Q$0nS%$Y2Sn3i3cQTsylnb@Uo#J8Wki^%ET*Gk<>~{yD(*#Oi zIfrz0ELwX#Y{_IM07>$1b({eTLHN<4xB+S&lk|M*;DUf$1Q{5arr_nbO)R7nlRv!s zNEO6^sg;VdtdVRYv?fshlzEL~oiJlfI_6O*5-^sa zVTUs0wbwhkxMA z*H_`$k^yVV(L&u|IQ54MksGKI_+6(SVq$5dH>p(d{sM{_!bVFtvV#9r3DBD!|=T#MU3j{)9U%%P5PI5j#bx_;R*cYTTg;_CCcp`t@A#y16Ul z7L1f3S2$}X5OIlKf=d(&>}7ZX{A;5fn-(k*p`sF$xDm!Ri_6(t8@=R=6g7u&{1oZ? zs6=0i7Wh)MSHsVLxq=kGVvbi7OCUQwSkBI3i2Zz@#umc3|8_<4@4ESJ?myI)UC>57 zY<$J-Uqp8Uoh1chP}6gZ&itT#j+}Q(PiL~Ly~(wU-<|Q(@AK$Wzw=MFENZM8ZsWVG z@4&>=X8XGh#hb?%r^*NaT?M1k4bErRDjEGI?{n<8O>eMuGg4Zr@zCWph<|YivcEje zvFNe;E#t|C^e9nylnZOCD$Q~G%1%Hov9(TP0VUUn(E2)^9T)q+{s3wnkp@hpj;7bw zyejoH6eWK>OK4hI#q?0(tzwV(u0g`37Wk#uUA{X`qNOE~%M@QD6{#7UT7sx~mcNf? z_jjvao}Bb$0b)_V|6H2IfC@hqz>wj)=Pa$k`9eA}5jA9dq`wo-4A#`?jMA7lRg|`j zz~KdjJY*ogw}5)i2^o1O!iO2)f&W}G<_8_ueKY-CSPGZ4ud;gQRE@&X`Ql_4VVf+< zSSd6V&o#1uk)cEd&q`U>yfUvS3J$tb0xrfwr`ta?adrl-uA^)u3$47j7MZVn;xNqM zUClYW0vSgObv$!JhFT%jg?z>GAg9V)@ndEO-)g)0yW#|ACD?e0C7Q70_<_YXeW5dT94v056yKj(6 zCx8-eD=p8lkF-)o88;9``k&LRh~x`WC2B3^oLpVTa+E(~W+n&9<_6mjuM2*G&2&zH zn|A`|mn*QDeHL64mw95c2~=uVBd9&_(Ql5!V$|oTRhqF1U#80MPvVHUnJ+yt#L3oC zF^7Uv=35n?Kqe8)`^u}f+X+d!(La@?-j9g9eUZWTj zj7l+XN$nPPWT2?3e+uPhnl4k&+?p_W{q{EfU;8Krc#-l5pll3h0L-FuP z0CW7;paa{(Qk)5B`bOr$@m!2DQH7>VAf82SAmg|#S4uxy1@AB(9iCWH2a0>J5{Q;Mr8Iz#qK+ekm=*{(}J_C0f&hKAW~Tgitdq4K$;s?w?exxP61%g=a8S$>I~ zql(@tkT2xAnjNf|sD|32!H&x+H;SOtQ9><|miS&w2e-crCS|6Is9=JfHpiSWdBqi%8`@dpOh;ltV3at!Wb3-I2!*54`pXxr$BSCrEHE3DH3+WlZ`U^FpFRp zt?i2D)VN1o)OGq&^A(K7ml0Qg8L=+#eqXOz?zW^D3%X>PLEC|Epl#l(WmL`MUO(+@ z9@kzNQ|b6)wh5+OY0umKww?H6-!r)d=$5cLTpIE+=kRn%a(Q{iKz9^YYHdlcKE_zm;Tc2 z@#~X(CvFdqRzsJ^_V19?yOC_nIa1jYxi__!0_%^?;%`CXDN04?9(W648=g2)b{bLH zsd{93=;EDvzt_{`lB>wy@S4 zRTa|6dR9uS9#xicG7R+!YP|Ikjo0o`UHv&jL|BSfJx2|Tg(zi-{HbD0P!_J_WkcFM zZ{Bd#SO47|qu|+IL*ILq&toixJUB0O=#^F&v5mG6h?~p>s+=5R)5*^*tndVt5Se81 zy(l%-T`harC=OLa7q|aFgR6}zlsmCVD8&;SMmT=j3LmF67aPkoztjS&<|sgXR-T># zfdzl=R|(>gSAg|oLT);C;V|1K&%4)eVR+S!4}xr?iA1qic0>Y`h(3DN{mo$7_+y#? zkAg5i7df6nDG&V^_iE;W?!@c1>J%|HSu{c#B+r}jgtm!v&Cz;Ee&-jeuZjnEHN#pd zBaWsV3oz;kwKzvpv$dJH?-o^2CZ$1@Ydd06w6tO?V{cSg3Z8Df0z>KOVe@8qJcF=d z8VbXmu+&N@#FQ99Q$R6mm!k#qqh*yS5Rv#hiv4(en60lf82X`9AyL5wpcStUe}G!X z%bDAE{Mzh!7`BHG7+WJg%B>ZPI70O-v;N5Ey|gk@KtcyTg(-^q;SM*sJhj92H#>wz z$eOPshf^5|Fv8&&MRPJc60;XAf>BoXk4FE9LHSi|_BMQTgjO{Qttdgje1#_+2ZA=< z@EnLE7l#+vZ*5$s2~Xa*qD{mN?^_Hc8ePM~pg+UW2sfsJlASfYKoeYx0MSw!;d1!b z%f!=ffL2mo4#qiPo7=VX{)s&i@EKaz=k~#EXps{MM^o=b!^js_)-4}+r4z&__%KuR zUnLovx4Tq2^<-nmN;LI5i31^wcL~GS>EYjy>%P}fMmZ!8VrR=CntJsj$(=WctAq_w zSOw^jdFp3Of@tPIu7$4Q23TRg$p*ZBOTcUS&bxRG^D4SG~$v~dfMu)lNe^=f$>8tn1et8ImCd1~pLlW*Qw`JT}{0zK(8 zVI0PIJRC2c9cU7l@{*XaH|SG_ztwmPai8?r-4*BtAmV~4gjcGdn>~k!f!^+~Ks7@y z#}u-7S85+?V9HDzUh*P#zA46T%`mA#328zc*|nTMMN~5ISlNzmEGhX|d1Tb+u?Gma z7#IIF|3b;=dc)@A3F6d48tMPqT_&VIu5mu-ZP)8DWb6AU&eB|$zbKIwF1hxc z#9taWH1njQu+Fu|QcA0#^ucR?nl|=2hlIpF1T$AL`;JJ!PPJ;t*@wFY%Y<*iGx51l zE{%4OqwpKaKrLzdrKPhk&bX&-P4{9FTm5%>k~n0!$zCkCaWrNF_x3!=dohFSrD#nP zjz&q}-7s)HZksXC_Y=TuP@XszdwJlspy%8Wu2#D7s5xFn5_!a%8(Hh4mywe__to|L;Sbsl9@$QvCBg#7f|-mpF^R` zuJkvN%3WmuM`vAqLGvC4!uNV4kiWe+sKyD&=EC1eX2zj!O#e6!{k%M^$gP^t@Y1bj zv!9bIb{Ffav`r7cwye5FdF@2-e?3%}c<5cz!d%!x5nHYBaVn&kYrwc>0Y{TdU-egB z9kk9vrF?q`3*JWUOeii&ZA3Xo-e3sZ7yHo{hE<3XTn^JT9n+g|hbOR&NY1jR#el9L zH*+-4oALsdBBc7uW5eQdTJR{%$}&@(=i~ObdHi|!0yO4gaffZgwFo?N9;9_mSKx7; z?E7wk6b(TNYo(WSo?-KE@LrbiG;Rr`2jK9!Ir4urH&vnUxR)4jzT6QghobGyEwut2T?io$C0SI7rbiU#P2H;1zX`2 zDe%QKy|}J@_IW9=k@cC}FR2HD7c>;P0)2rH7^=!CA=S9f#ypUk!yppXVrv?`1eO&xFPlBEJifFmOj|DVV@RgBb=k)EjUaazC z@XEooYdG~igfNg2C2&%S@{0?NlGb`k2jjOpQ6X_B+tNj~v0F)E>=;Gcl%e&k{PGWg z^+25wAIA_<(YqpPX;#3vjKA-yax@9K8dremz#gaRHDmjmip6Nd=Iol9RccL>;j3B} zvd{GR>bRnI*RG>k5YuJ;f9RKHg1Cj|7qhKBjCB_9JXn4yz+nGZfKvl)svuFRGKdRD1MnRvr4mV^|aCOGVm+O*%rd_x^FYvBKDN&5zGN_4dt+UBeFsQ4hJ#+ z*m+ZpNRvua4@NYnQZZhp8NsBE+UYNA$RzpME$;cKE{AbDb{pJpn3z)&Qu@=P;f4?# zhnv53pzh{KGksxJa)C84}#zXwlaP-5?sBPzDseeXF7oQWb;5qjppU>V8 z*n0|hkya%_GisYuEy`WFG@KL2=eiLbh7l!o<6jqFpr?r*jZd*JtfmZbYpFbk!0e#} z&sugKEV;I@D3|khz;SSFOE{&*w?)|=IZP^(34T?wf`ih6PDrQBoZrah?Kn`+$S~Mq z+dZDE1n&KpGKxc`22P@Uv3T-HQbEu80QYtE2v~S(>rVD%j`@3Z-sf{{g*cLRu%S*s zlDiCM#KZaGI%?*)VSGESaotFUob)xDV%%m3dqji$=pBoy7Qa=z3%9Oz58fb|2{lex zm`Iv=xe;Ek!J$TT>>zyG&PJ6~hH3cKq2-quNq?D9++pk`zg$Ggjp4-ZtvTOMU;Wjf zq+T8!p0uNUB5-)!I z?AraXAYM#hO1L&=OaXY+?i$o0?FD?We*99U7bs*w);Drbg2F`K^~BL|d?yLs&LG>K zQUHBbACX054m-R_5-Rr74Ml6m^~!ms2G@DGkP;3JBHDSn!mlIsjp_92l!#!hhy&Yu zVmJ{Dgc17a(97c(JNg|>{OA_#nhYa^L{~N3ha@hOBWu2=GvBs!!JswwH=|iarz@?H z)uy~mFb8F$7yL!Oauz&}hP$osNn?!HXP89gUg|@TX0ZzFE+u5fI;6XDX6kHDA>`}v zc+&*qSlcMAnYXo=Ln$PhH*MEs_e5}TRJqI%hmIn%vD>J;c6pZMN!IrY_S73?Kqr+E zv|6tUmiFkfFNba|jTOFM|n z{K7e%;}iUX&#+>>{{wF)XBvizI|szIo7S)}Uck{OoQDhiNX=NmYl9i#9pw7+lmBUX z{*cdw&?lS{1CAq)Z&8KY60OP+BZ{9eU;CDHt!Hd|1**hIcb%y7yU>`JyYB_X5%Nx~ z9t;XVFW9#W*NjvmWSF8r3A{oTcA|+hoGCA9py+1fO#l@#D;sfS_Vv~iuU&VL9oto= zukZ4Qefb*S50ky{_MJM+PY?Ba2j<9?Z@qMWaf=W3fxBm8(_en=?aWbx(oQql9j(l;_?m;IXNktm02X@hlYZR zlb5y4ykaV9$Dt*srdr`-5=?+0#2I0f8&x{j2ywF)S7;}(#S`xFAn8dPED@uJB{Uvp zovj*@UcwWMW1FBO9ik9?+!jYV6zmI24A?c_54BO$qqGmL5!!~FBIW4%Y#GI8HOG`X z+H83(bJ1}Kz51A~Le5+2x?NO0(f>q8aQUh0g^usQ-4g&kVk685M=t;5oB^| z#&X5TxF;EhNHo$}`~xrhL#dw!h9@XqB-ay~xbk{bZT;Vv>HL+yi;z|5EjR>~#1`H! z>OQOuGMHFdh+9`))m}NhvCpW;h7dy0)%*f(2-2mv%LM<}fvm>a zAGmx$C5I{!4wzu2L7ZZt`PIu5ghabpX)hRLTXyv=-Ogd2)OfXMzYYVWxV%{#6&=E$ zLLtg^&{1lzQ&I;LBN)AS!bKaxlje9TLg35utaa=L82lk9W0m|8n#gB5GzwDij5muE zZ8EuWiku#z-Mnj$uqFND1%EmHEiu&MI6=rLMJUYG!S{9(tIj7BS}9ji)~qCyDLxpU zZ-mzzFmR4a zkP7o9*Fqd6+Gt)BopehGZoNJdjc+EuGsA!AzhSMDLKW@cLKP`4n@y8~5iAU{4!^8_ zrwIp~qe+Q`WBeL&BD)cpOa+PC83khikNA^2nP4?tC6%Iv5-+vdY5_4HxJQ7N96}@i zjr~Fd4#@pn9*Hq#@Y@X82pk49-0hcl42(`w-5HrdD4!rQpf- zu`~R)ulZsTJb<>}cz1iTl5mQUhtFI$G5{guJIG56{GS-B2z}~`Biiwid5})PeLf#F zi3#ovpmO-~E2H99J+ExOzAZCAl<16M7aHJ1gRVtrYefOY<#`c{nO4vGvD`N5MAOlm zM7v*5b<~WOCOQI#yEazKA<&n158F<_M`5j4HwR*7^6z!7G>XArLA8l`C~!_n2_?&G zV_pu0&?q&9MK?Zf$}nY$6f41PqaavI?nbB~ln5s?(uJ8p9}=Y0f5S+IP0Q9W@};Uf z<7t;E=qm_(=@}Z3n^HtY6cV!5b_^@7;SdnZyodGarv;rXk*{C8aE|R~eDD;eJI$*| z>93NeKxoCRtBN!$f^7&wx&i{ug(kP4rbijEkq+LR4b6uLK08m+@UO>F1>zpZ1$RUZ zMpVbUT1)h3cQSSKXb-a3h1s9+o-)B~trG`W<9vdz_S0(N@1*|@NQ9L|UyKuwV!C80 zWA-`Cpb8FljO2#ZZ$ZkXFVZi%BA5n~9_KKU5nWq7;qEg!&L(LOh&E|-9ir&-eZ0R0 z_#CX7cN#$y1M_Og~L zttDWnYBe@wWUOWY7BM|OB@uSe(kO2^pTZ}@#_HG2>PhYWdg+Q4BmQt7GA(Mr6{HJ| zauLJ2Tw5M56X>O}#67$pFD4SrKu3V1!96V)KSpzV-eLJ7@t8I#n)rHkVk?NEz9(Nk zzfdQDhtj@%I^w*8E+@VzepjW9u;u4&bH-X+VN+3-T^nJ-nAsxBZ|tiURi&D#;l5a! z?<2XC4&ziIw|Y{i9{lMpreSe^BuRvRo3$%N-9QTIe2r$6T3JCTn*N;}S$PFhkr0$7m#Ak+nV%=*CoblJU|n=@9{tvY$n%Cbuz^t< ze^LMA+_RS}2sCmaK}NHFm|YV#;oY6@lG-4xI~XVXNcupV1$0m>T`}_~(?shQCG;Va zBmnM;ICB}q#@gVoLhGePoLso?kDD0nn zju1zqjE)zPT8NCvIzku(IH_=8qlKMfrTqfB1klJ~NXkU#nZge~tXfPuEc6!}P3o)) zhfyUA;5GxYBAPqB^bCm|)Jk+ueSz%Qqn>?UX_a}aDvB9M0Z1qn9Azrs8e-g(6nh61 zX#+IbUL27&^0L7UeJQGnBK>t7&UuMb38+}fs08%7R06&Zf4Yga*Up0TRkf;;Kw zh}3R67YgmX1FVSs&H-5$XkI{=|;NN9ody3JrwY)``ARN=70fts673zt4nkBiK+!c#-7`pG|{ULHtxf|f@ z9ht+IGeGHu|h7c87?-aE{X7H3w!ga=UXPOjtmZl_`-)3aNbS!lGIgPYU-Hb16qh zLbDQ8^O?0nlZ@3E>-!f$Q>cg=Eo#N-lOPK41c{!YKhmfXsg6mt#8K&06I4_^u!y75 z!pfN%P`Yyr8s7X)#!=Lh;!BL@(xkQ&U|PQ&Y*5Un1PPM)a4!JtgvZDtpiHFLIb{GQ z>%o8v*0t$zG7Q>P|AnZQN?twA3dhg0IGT>Y#<>AvF%Fqze5)qQFQ;kQ;MB_$4WeBy zSZ+9~QdLq4TF&vo(4rw_IJ3LDjlL0N=ye!$6u)GM^y263$!|G@5kobSS_7nj{iTME z_R34Wz86Z`g37Rrcc5Yz(nP6SOBbL5oIX=WXZyiciDTSiRvuaC#)u1ej5;&}@+btP z{&OD~e8mPS37gip$5>guYfjiXtvfBU0hxQ3uq5k5GtpG&myVj}LB}r0b@#7a{)-@f zLFTg-`mq5gjpPha`z3FalFJM6NMf7g#dwJ0I-`d7grD$^4DHaytu$ar?~Oe~t8#BL z^RIGSG$VTIaCSC4XYUh%$JZapSX2q7wCZ&sSgN5e>jREA{mSK%@u!1uZ|;VMYLVhj z`K(%sr~&P`Xw}b*Q+irEtrdmx)}KxB`a_ugE*k2dm!ov|U$+rXqh1TkR1za6hPMPC z;LAK8W5(q;ZA>i1GqfLQ^UxgH)NT&&^bH>MEPp01RQ)781Znn{U+XWrrwda}6=n0E zM3^?|`CmVS#TzfrBRvT=*N@bhhZFMKd8md3XpECKZ7bkJ8gqyKH&}tH`M6)n{0&quh#qe&gGR3tfoO=F1nGjn-eqk@NbMD<1sX(!fPc{EstkqFk7hq~M^ zParyS%T>@NBLOLpYw3C-lN=KR%w1$(f?es8)%a$bu9a>t0nJ>F@ zUv;dNK+a?Ivk3E{#%PS{FNpTENMpY9pTdt(X1c&EM@yyGocF0xZCV|uKTl(Q)8T=` zWp0l7Q53y}=gd1~VzqUc0EOs^oU1t@}4W` zP1GGGZQf}xLQe%h3w@RlFVW%nMw-M_Xe}IPj#e#ex@dT4BoS5URvWgKLK(&y^}hx) zqQa{=!1+l^m;use`f6u5VHFvOv25f+;->~Vo0vH6^r~?Q%SvhcE&335CaGF+t_tfL z``~p^=}9ZXETY02l}|JH(TxH0q70rSx@!%Ev%9Tr*ed;_*){T&Ce}VLpS=9ddLz!{ zf`4TScHsN`AJ4~iy{w}kO3MpRtBIl=%n`QW;-gRvhyBe-mYEO2t`lD1sYV722^NcG z$mNZG#-4Qx2lWUmB-=HE{?b7xtuBdSx`8_M#I|TQ34XsL8o)M`&+q`0Jf%XFSypn* zRWi())jWNq8~J(Fc$|S7#?5(Z>pEM$AC~xSmC8F3Z@Er*{O!)eowA&|zY}cV?k_k_ zT5_npn&>sH^5SOLjPcSeDFi=X*8|meA%5I^P*(9_`wr_T%)fln~&?6HSR8hsC zc$}P6RIp2kLR92;S{~~Piqk;$jtxIhL~S^p2h`bXPh5Ul3Xg!3 zSYr7;p|P7zYHRE2l4p*N7?3lF)B&PoHO5SMlp@4sG{p13etrxqP(37fxJ1d;&UNm$ zx@P|deKjk#4Ef1#%-<5#Zv9%tr$xZECqQ@+aC2Q99S}q~xC_J4-s@pjAZUIV z{#k}KL3IyG%p0IVEeS)6oWI3rjeA7oX@J2cD;OqdYIqJk3z5j_L*GUWdz?>iu z2Ot2R=(FQs($N^zOKLUc2-y-*&a$sUq7P70#C*Bqo+a-e!K43GTX|n;xx9dBH;y~u$!QLopqLNI zGI&C3t!wggc!@+0GrEQ(AR|JD3dEmP8W?0wzxR7U^28MUev18E8|i3$Q{h6J=>>nU z4_oni2brgT2(Q(*cE9c+;Afuuaf#L1dObw_57e0l2OAYOC^xTyyHn>*N;3UdC z$RaSl(P}fch10%59n6_7hy^5{B&Y5t2X_Cyn(3|A40^|waVG>M0|Brw;cXa#H9U5` z{>BMg_JwjKNNAm5-PBWdhH+{AgbX__a@~{zPNbo%Fea`amK$At7Q@4mPETsV&5CniFdFt zn=VOMhbDzEH^9%lYGOG`97cbDQ4=C&k=##Cq}+OWWoq7OX!N+<)1bm{LsdEcQw40X ze=|58a?Gcpzq-mc^-jQ3Qle{jBB__Jp$UvlqMtT0yG_ol%T+K57%C;g=<-NcyEQYt z(Z1W84}rN4*d${Rc^DSKpkWiOh|7~pcZ9GhA@JFXIQhVuY4Ly!3^QI}b$JFN`E`@> zEvByaW)kowlus+z;e0kGlbk~v3tSL49R2t3dOF3?b6-N0)E3pWcFDGC-%1`P9`$+H7In&gRHpSWOX$XwxFz9; zZC9astW-bDBwJ6?uK&Z0V@SrP7c)f}RoEH#XP$Ylz!ul74DG-Duuj8XIw-gH8q%Go z;}W8Z{6v8Uy!2vfBW1UN;|hQ7s_LQ(kPWaGIMC=Sl{1D8w6SX6(_X#w!Ari+AtOw; zi$Pn{2ct>Uc4vOE$drB8KMvc`N7;YZpKcL{MIDt~VaqQBhI0Dk7dfqYsQKiJ?U6mv z<%ReV>T`zI|+cUOf4|&k_IqtoCJiAwYR~hx8{;S!E!4^ObCdqC)O!g-x=F zV#;ETIb&4M+h|ogW^;L7oEe5VjtA8|o!6u*j-dC0FCXnOl`{jOpqxU4{G`zkfj=Qw zHfib}r^`b29G_VF9rYa=~q)$dS z_Giu+scO0QPeck2XpnrLJ-=^>H^#Jf$7=M3Ia>eMn_f!D=+`d0VOh?{>BjS3$6IA1 znJRNsSu|q>OS_Ij+2t5f=zvzl#3=|H!%6*Un;t<8w;>$>OZ)F{T;5&H&&Pij__i!z znD)%Ywk$b~RbYpMMg17rh{@YjgebeALQV8*#;C%lzUm=#_Wg_uX$n39wX91=U=Sp8 z>rTXmT#|zRx_l=<3Dp9c&4$r`QmI%KfFGJFx0XLCSNJFny{u`oN_%0*5(>NENR{QX z@5=m!2>};h^wsoFp0_U&uc!(M5jia}3b|FZSeG`^3SUmSl7~>-S2?@Qtha%m+-OvC z%4Fm*gY_7$wINy|T-t-CdKv~tJgTH2O|;cA5XOO%(a0%uAkFtF0_UbP0Y}UuwJi>p zywtA>j0q`vUEfAcK=+OV0E2BH5FpD(5;}$#EvJ#~)mL{}N~gBtoKV-6$a+yUFdkZT zpe5y-p)xqFL2jU+ot@mb-_4!X_sL!6^HWG-`d-M6W@t zMu_t@@rRAiGa;Vv!;>;ziOMmYsE`eT=rFb-$=9BdHVn$lP4$XcvT+M3_vsz1DhZ@B zB(JzHiYjsAF)AY5;XmBG%J8-TS`N+crqLoX;Q91b_eA(e^{i^Fj44l} ze|@`D=TO#}hyMR~ddJ|(+NNzdwlT47I}_WsF|lpiHg;^=nK%y;wTaE+pnN|YhS4R`N~_ud zIOm}NiIcM1NUK+PbFHWmaX{#DY*l!p^j}N4$geR^(X$VlSQxc3?}nwC6ga8{&czUA zw6sljc;RD}&P=6xZ(Od%;ZHIoYF4MA?GwfOg(f(0`=}_NNK!+7VP#%oiFsUced-D_ zLv2iJd?Xt9R8-`1^r9e)L%B})Gnh;{T6$v!2^!3*{Fx*wUm8S?c>0KvL<-oZweN5?OjQ7>k|??JD3S_%)$p{~S0< zJuIKll(!VWW#MhMBXA#z5cL5{!jHkk+V=Je_Phoi%6;d_CeM6`u{5#WH2i(Lvd8}N zVfg)eU;ll`^*t5v-8|LOJzJMBRr&Y1f1I<#MPd@|UH(v2&Y#TVPV@R6HiSdrQbH1Q zKs1ZludI@7=#Q4i`}_mM4wOaD@7D^I09)<@Q-QYhSeL+vg}dJ#TQ`t*ptqiu_Jg#3 zQVBo;8n*F5f%{Ls=hR!C46jB%`uXCu>nQC3Qc08QHEQrs)6!$wlJvH5<;JlxpQ&+@ zG$84!uun3?*q`?Y6Y=cpn_5D0{9lV(lw-8zSUQCwl*px6eNYqBf{%`)Im56sO0P0r zN?(q@se~`TXn23_E5&RBuz9}r`$BsF=bIN8?;`3}BQ*RJD5qxQMYM<4=|5SMWC%Od znCECjBoaw~`cDlqSWsu1|hR4I{>(gZwwDePdiDsi6JY($Ix5iG@m zwxC`|5G=h87VC%&8x=~eWE3a@G# z-a^7qjCDtTNFea9z>a-;`-<{_bF>}tF}4XJ+Gs?2j1P>=aRO?Kgt@B7M51(=mKNrj z0K1YXEJPJxP7ybG;UZ2ae^S8$;7-jdO(16dg$+_EWEJXkCDYq59;8t&sHCt^8W|1k z@#ZfDszD7A10Z*P`^Q3VPEr9Sv-@KvnR_w!qV@|D8XM>A4e#*n;#QFsMb}3lT$%Q2 zkeUEx$TDuG%z7Jv>|Cf?1XCh$CJ{_=>F1gvGdSnC!5>w=K$nh|B|tjd7MC4-lXU)D zkmEuY>15`mLA!rSAn`I$^ObNCzYA%N$K$+~Fhmh3JI66Kw`4A?o5j&U(HAH*1W%2- z&&a&o-a1;aiW9Q%X&|<+O%12GL4u#PdPSp>NeMcYYw0}F5tUndoKwkulT~7rA^hxz z#tX5owccz3(;?{8lVG@7#T~RRP6g<31!~|!2|;R@(y0A)mJ!zsRtLnIzL%+}@FOxe z@p$O3;*eZNyE+CEYRqR8V1Nc1HdyuJakW${*{CCd3+CoU{`=jh{4qf6;VcA0td6o4(K9am@eYr%+0PJn`xz8inmOJ9M)G#z!24j z<3Dj?la%@0T44+}AbmNq- zb|n_3d;!o<$f?tuo>{s#JwbX<BqLj0o-^%`1oNQtba%ZZGG<5uEhgu3DwA;xkfU zk)#xr>k?Pe!6lAjwkSV`m09r9R!P^OIg>NL^Jk)t992F}XX%LCNbiUo^l(h41U=T% z$@r;yn9eHkX-Kk;y25W1JQrT3^O~r=KdO`J&5!%G3}MjIzW@ia>tmj0S^mLkf=Fls z*%QLde0I=GDwH|gLjIF3j5ZVz#a6T?FyJW?Ke>9!xNNazl%Pc7vWYGbBM9ub#n$iy z%<&GBSj2<5kws1eQAAR2bO50-9d4m{j(!-FtnQ(wNkL>&=&bP#$S|MbUqRxZ2md|# zJ^so*pZ;DA9W66-yJK&@`0DE(y}Ayvx#$*LDKqroYP@(6@Em=@^V_=~V0F5hOwif{I((l}zvILp&&^rxzzcjb*>CPggF z$bX{NaA1GqU)RL((Daf2SLGS^DyvEs9$!b==Wj=>XYEZx+PNt1|SQ! zs*se(L6UGOc3+=GzFPoh0>a@an2L4&x5&2%r!BWS&-@R>p3Cdo>%=oK8 z<9n8GQ@mlm(j11@NU%1@-z>0jzaL0fcVsq@2n6Csf`}b_`^D$?j*}?`b0iom73bZh zb#|abv`eHJ<6NOp-Dq9IG+OI~(__30$cu8(uHzSEy(Hu20Z3yiT!>YRY(=?y3pk#No}p&SW9OcV3j6`*7NH+zke~)> zX*Z600HO8ECwC7V@;rooXfmFssJXA3k@0A_lwd_A`NBwd>=<(`s+w6Hv|4v5_5A5~ zeFC^u;1J)3INf#vNftG3$5I!%W5EZTz)}o6K<0*6zzvg!*lonCg=+;re7{GmC+~NX zvU(HUn1R+IG=X}qnp;6xWLCMK#o3S*ca`PD&bd;e88sVO2K~aSdMCYAu1M@w(hY4E z;=t*qC21hS6jPh_>O(gYw?q#^go$SPaQhAf561uM>MasUoMnoDxf?-;cm+p7Z5cJ( zW(crTU96fTL?xY8b_|#c*Ax*&kghzmGBf;WXd$IIwfOfwkbrdPPX@ejzAzY*mfT=TkL+$Xt~tXcIwy z{g3Z0gwR)@C`fJNrWhKL&wQ0q`;--ZkZE$A)CI~3oGBvBvJUHpw)hjXEh#CTMKc5h zjTZudOBlIn{*;NN94~k-SqyR0c$fxX$a?U|YyK=Q0l}q)#2~dG#_U(WE|%1Hc*qad z*r)cdY#I1hde@3DphPjCL=>RJo5(SP7TP7Dmb6vz_1n3KRME^h*~L5N3XH8^v)xAH ziJEqL6gqlnG7bFh9A>_+>H=FM+*``n@E@saC_yP}qiCePcBWLkGjj{qf7wJ^oGLn82C{2=XKVK_32L<9g;yabMDmnuJ=JR!jD1o>Zx$ zW_GMYONaOeoqSqc2~PZFk7EIx=&;|#EOkwO@eW5jS!_`#NO zP)s$8Ne=aJ96FIETOI#y#+#=H9C9p48D=yCOzXQ{GEog*r$7R{P!0Je6ujBmk}qA9 zE##9zL=|f=@3dh=LV|-Qvpnitv*B`+H)YSQV5Dm-A|dzZ&I{}Y;#BcU{G#R`cGo9o zFe9ltZjS|rr>AVcHwwOA#sojZ++M%Bzx~$zzaxmgUIO|)PPe~r%KMet5g~5S0$#4_ zfq1|Qv5{-mU$r{JYcubQjYTr>?R+XTUmqfr-)u#M4{m&-Isrm3t;QDF(w#&(b0-_s z`zqW2_Iuu+2#@CupFSYXzTe;bKStfI2)1F4|M{0Ietd#^-1Ydp=jmPNe_3D;taX<| zhIg3e!AB@pK9430Euw96GStZ#y3P?snbZTSy(^O`p=-dByLb-?v2vXilakbMXDAAA zNa~u#Aj>!R7xJ7XRL$@A{JM97&+pUeI|Z2jg<2Y<_xdFYd+_kP`>2<8!_CGvi=8#U z(1;<3_U^)-lKv~|5A~elRP4E7ll;65YjsgJ4 zc7r!Ws|uQD|F$Xl)+TD%Xy%GDF&aCxeu8qi(-T&N;4)Uz7v7XS$aUWT>=IHDk zZOXQxw;8Po|C5afm+@BusvTP8&zTrh%;k<4RK%aNXp28*$lQ9%S!|y#O*(H&yiSCa zPgw60QK)SzTFbGe2pJr}MuX}&RD0x-^uNZeNxC{3n*6#N0Ry{DGjc~`8GnRx;FJ8ISn9kuP!LY{=Iwex79$7?@y{BCWvw_dZ# z3VB;X3j(-kG$j;j$uH|$C6ljYI*{1}>c;4<29QtN5peXkKpF;t^}M*%m# zckQh;y@4ZTJYG{aPr|-N*42x=tyo9WrT-;qK5HE@wE0nn!&qGkSO>2(=U7rRW=K!} z8y7#7{+GzHqsC@8brwq(+_n^b63(Gc5~`<@%FOc~8Rk}16B+mB=zjzUmGZ}GsKmb> zh5WmtS-a!u0^Rf~=TV=>V%Z#`ZRmC1{&}h|e_yf$j%|m7rlYrlD9~JO7Y1gvft8w9 z7Y4@WNFolSDv`f9Qowt+hGkQB<;VBl<%;|$Z;a%eyb?tj0raWkN!F4HF;3JrUe?BF zazMBwGcVQqRY3Y_< zJN1Ge?)|D0p{E|f^A{exN$_IJH+-#E*UVPrDATuxpXbOLm7)i&$n0oddgu#zD?q^6 z>op^C(KQMy@wlb{Ze;e^s@Ze=o~QThDL$#=$M$QBSpMNvwURigxrX$+pmYiu>oZB} z@eMzN!kA66Agm2>S2V6MdFZSYOZUgsdF9IHC2A(74Uz|!ow5|F&tEf2h`}gBDXOzu zcrhbX2$|$`HCW2Mfdle~Yj&g2VO#&9)2}yF+9~WG^&jp7bC(eX9%9X#ch#`QhqGYe z_JSXa0 zv!%t$u{Wldo5M16z%U7-=L&fSS0Zu7^xHx7^^9G*RryAX{U$MZ?4eGKx>}30WL{?= zVPP4AE3EXFPG5Qb=-aB8g~?|?V5naEC%NW~R2`LT`Uj9;&|kU+jT+hBH8-PhI%}v% z-PorS1L4P;R5*I86y|LmOKu=g5ie4~xTf+D!i-yepF1Lx2u3qp=ipjUcr&c`Oypw_ zrP>neR{T$^TE&5Mq1qRYWI4aeQH)}HrbMFAYd&&C?NFNPjJRtXwv1#eHMkaH+X0J; ztt?y*;m;&oEGo_=P@34tie%p(^doU%A~bhq7|OqAImN}E z6_~|d_4A|XiA@&c5Y1sXo&;o2c|sYSWut+(AqX(313~Fbq7w!%-9|lUVJf3rqmEpQ z*~utT8-LX-*uwbb4=ZaO$Rt52`cn}w(+5+&#yohM9xuv+Ep!soQ?o+222g~N z$_mA720uj&qhq|@6BSuf7A{`KJXGw5W>PN2+aY^%so;XeDTH!XTDvI6NfX+Ly}>7^ z5A$TBiHcezpe4XX^Xh&!We@LVA}3T7ghu~UbXGZ!=fz3IInI>oytP`Hw5VK;PnX%&5^jc!ep0?WKrx)?&y#&B z@=s=-EAt-yU zc6a{!hzhDXJ!^d#=+J4MGIN*hqF9+y#hy63Kpl8@>(Z%%p^hAt<}p+%`Dt4j9hK)q zg6iuWE4QacH`viT70M$8UvZcb6CW@t?tzBmG%x)Hm*F^n+y4zYJJ>t8^BO)9zJ>58 zrR`;F`3#b=6?4BuK&9W0B{t>A)C2!%u&_V~p9dAbu$})g*xBUZLt}1^qw%i$EhaOM zN%^MPLtIvSUXo%=;q?pa7UU6auk)Rsq%B?R;=bFwF>a+6Hg29`h690QL?VPj=tTk7 z-cpguseNDw@!N*$mTR!E@cJuz=$E%Sf|`ZPn1m&D3gp8-(tuVXW?0{eN-P_mvTBR&zZej}}X z?^hc`)U8I`zm|dz^A6X|=Gdj?H!eAfgk*F)f*Md%7<&1GWT)i|mqJUSBOuuFsLZywV#*nd zkMNw*g_|R#2_>aQsG61kG^xsFTw*Noq%<(d9#rN0_G(vXx~Rc?_0-`LJzkfjfM#G>x`Cn@!*=i!21V4ibC`WgM5Nc; zVblHr4kt2}B=wJo7pBZCHt&QP6$hZIof88!9!1$86||&!d$ZCiSy0&qIs}FSktY@| z=O>)uuh&I9Wqd3Q6EV1@2!T}$V^o?{?=Uwi5M&N+4{PgodAF6Q6SODw$WrKwpNR#( zK=sm>>oQ;tY}>GQoGmZ!gw@}VyJ+&M)828F2S(7r>MES;VR+3zB>~uauaR0#wW$Bl zCS=xmzUdnK52dWO?9?k9qCLQv>)1N!miw`qnpl%$CulAuuUo~WCeKJ$Bh&S9c>43aNSWA zf4L|;er#k=>u73$PG^w5%XE@u0kKkx7gF7vHrksMn}K_NC)@-3PH+ljKb{XL7*?Mpj$b2;Z)@iq~ zb@s_C=a;)Dg0wC3my0$IwDHgE4iJQNm_YH{9nur9n!7#1fjd-|f#uAH6!bT{El>5T ztGERsxib;fLL_f=1>-9sg_eUzDZRUdKj0P(p&BkdxpG(+j1>O;yPboxo2EN*r!F}M5rJ~LLae_?rH%aC7aIGkqZ-0%C_m07t zm6Y#iv*MIdPdp}zZ;Jy0L;yqNrY*or-!wk4#IjUQLy!8drl}uc+ zGdXf}&FqqGQab{#K+AH96B6vTJa|m)YbM0;1pP^}pZ6MwlAi88HvsxBVz5wx_KDy2}})t>Ms=~Te$Ll$mEH5W6`mUNt2D0X^^r=t3kR>D#+XENBtH<+ZT-6ax{gD$YFQr8C> z*ZU7Fq6_Y5o0%D+?xFa-VMy+rMv>h-;5$E71Wh0HHAXm>Q7{-I{i;wr$l0;2Q``)E zuQkdK*98Q;)mlOg`BnJ(P#Iw+KMPp&w0LMj<%+(vE0!R1{O;PVRF9SL2T zfik;w95v@=P<#>dBD_JXpoqv|NAz-`_r9lR+L=;l7ZiAX@kdf!a_k`Bd;N-|PR-}R zsk`YvGV9CT?Z=xhv>q$2>#Noy5osAigu)e11AX@Wj89cddU4+&S%f8B#TN% z_N19GPtud&))@P{B6wgAWXn~IoUvUFmIqS*_!#kah{BX{Sbx}7I-M_8i%efTlSv6h zQq26kSk35$RJG{Z$S^lmx!|IcpGp8v$yf^N%0f)y^+1B{R@WBj{#l~qA2BOoZ};lr z*Uuq-0Ie?4jBDPGzrJc>j~ghK~rPR&=AU?Ce5~O;^M7rh;!n%SlTRe zM5A&Gj9+3aV>Ur+^X4*How=!$ihe;HjSA&S5lx<5#Qj;Y9T_=bn_&>Eq>QHf&~Es- zFZcmSH?|;a%60RxgCvY-mas?U@9crj58(^l(IzXOP0ww!2U1F0q_9XPrfZQ>zsn`! zGuEOoau`_f(4wS#xQfUc?S45+&h~FHEEr7i4(B#lq0#M?dAbv7SK&qwg%koAkjqc`VJyP#zW8UY{NWx_!FC8 zlO%K=cgxrPcdPs_UCcAwC^f|Fzq!{PYNvYlnOOu+Y``x6wPd8(FRJNvOHZDC3{^>J zPuUYFMm7KMdco0o!@~qy)5JgP=-R_7LoYr-Yp-&bz&UQ3CoqbednJ~|VD{9hZ)1eb$qiR^$;7ZF%GF{DsCsnr0D=`8zq!pafnNPV zGDXEX{3K~kmG*#PP$`8Qk9S%)DSrfagc7}f;0uEUBE+?gV_}`VcfjVpyys4|W2fG} z{n>EMEd9G3sm4LYDx;))StC5*Z~Qfj1bSZSiT$KBrAu1}S+G`UmmF3O6(4xfoEp@w z-WdYV4L=v`L1*bo%Co^+RnJ5MYfKf>9cX+W2uB~;Ffzd#-ce=7>)n$bB-D6D8-6F$ z@5ATj?rJVKSUy4HAq>%qc!W*;{KR=lAW9SG(`{9@?JssxO~>vd0gSno-RF0G-QA4W zfbR1zcq{v>?(VbhR#3}@N2z&Gp~G8Jz z;a|n4G!#Z1pxSFj2ofd*nx%4x2!)UC^(*%QWRIS@T9yQE2t}<^Pcw;1vX`S$UzVGv zF&d9AgCyrJi#MDb55+Yo<#+<;2XxE+&+`Juz$D}DproNd7du3WAYZUzqhXTRW$c?6 z^MTsu8aR$;SjbOqI{gB6FG^0l>R*%tt^A$Z9b(*Wj@Sn)wl-_YUnRkI6YVL^`aZA| zJR%j7$3~=F1-Z1r7J$0B+%gzg2?dz|P4Yo4qm9IO$xu#cg`%*yAk($$Cy~hkmRN3g z`2qQeb?QV#BaB#XU_g2IYrJTG{H+dvDn8C})eDF3+kVaZ4U2rD9e$g# za4vyK<3!^O#~{<-BBrxE#=~4t*e(VhOm2Nsm~@oCGeB+LuN!a;fAOqdWF*t=5`2IV zOaHzlkQtmbj`76=pait>8NR& z+$EWaFbNY4o12MhjB`3|zZd{>dtp6DE+bL{8|)tjzU9Q8VsbMuPtVg?;_Z#6!>h_R zzRheADZkejXqY@MFJJm~8-6(j=-GIuLH|iyBKi|!oC*X^0w2sW?ygU3a>IMKN(p=} z?TrJ%F1dju!ewzjsCiv23t|$F>SyQ9!tEBCoJ{r-xs*BeqZVRms7HHksr%XfjG2ls zRtw%DZ(dnZq$SVHfakrV!F~j=ljPeMJQTmF+Ybd25EzX_0+faVlqO;schScUSQrJ@ z40}j2KwVPvA&U&*y~F_Dk%K_Af1;yEC{9N`=na!5c+8iieBkpE@VqdOAwSr;qCh-k zbBhyQ!bi=4FfH=jxsj(f{Z;|j!I$awqYFz9bVQm*8>g~_mB0s>>lU}fykNZ9`q`>i zS>yYMau0B)8!0(Y4b}==YwHlsc7EJRH$lp3&AA}sTbCp}-N%9HhFVSifI2V+Hhgg) z8h9(*Btj)u$j(hp(~)M)F3II{7131gFkzDO45vKtfjRCV3ROZK(Ws@;knL99il>Y-Pcr}nVC%u`7GbE;F0*Mj>(A7-Hv z4)Ag^@OkBG!a|ifDSYFWOlKSYi7f3+kuQWXAxEed{F<>1ecu9(J+b1s9BD!(8mCj$ zMlJmv3LcwVM1DyY?;U9)o9;qhFhJcg1pHVcPcFqrzX<9$ezH2&dWnqQ0TW>vsh=n3 z?xU?wpn&>+axM&wKt5^?zfNB!&u+uZqs{J^y**a$v0w`=<&nbKVZ*;zk$?3zj^AU$ zpKRs4?0d~jSlG*hv_Fo(-dv7u*wxpnKbCr}-6HjY3{9v@v7$nUy` z)sZe6j8m(n1E{EO;e$j?Fs4IQFk|!_HXv6vTK~Qzmh{f@cXa$~gRg79Y%0$+rXBQ0 zbCo27nU*~|!anH ztF@!3?~!7)e!@q6HYeA?H$V<^_c13Dx^{&>xRFy4p`HWGw;7_ttydjzBn7BOa57zD zUFZJVwu(37oFRPa?LK_Kr^xNdHKP~%q&8h*qE-#kz z=y8f;VL7@dqRvR{ycgT17^t$#@BTQxIyg5E{8WlQR8oL(Yt(Tc4wN2pVk+6pZtnSR zV8y!8Y)~{PbRNnPKK4*LhTEH-Y0^9Cn7Y%XQ{ui?Uc5bq63vDuvuXqyV6-s96uuL^ zL!tL9?Nbyfds^wAtqv063MfC%EiRMMj2CtaH6TgSUB6XY8VJQ!Hwmrrh{7j~B! zG{bla6#Ag!8U2sLUUT->8+(C-~*%{+Bx zFqm?*6i-GDKIxz>7&<|aoKOYEoD4!*Q%_ef0abA{DouZo7U4vH*#(S&Q%#wGP@Pd1 z5LNRj%*-i{frI-smQ0|L<$#9+v;GS8J*fcZ3uh^1wGv-PQjqKmj5MzA0hL=fB2(d^ zw1?ke>5kLbUPE7oG` zJ~jB`SYDf=uPyhRmKE$l(|V_>`?Wh1{v=<7e9K`L&*hW5YVHE-YI5okvjxMDmm^c~vlBv6&5w7?28Rgx~Mykzjg% z{Upw?lnh~m-Wm*8TRdU|pEBWSI5a`4oHK?0mIlRtWz^Ma$^kLwH4UKwSaX$LrDIsgGSoE9l#h6HnC+y!{7R^$B z(A@${K}OcTGCwA110cvZV-oGqHlr>j7`Rk<84AkP6-KE^>%yNH5}-L)ETJthk`602 z@MwtDxq?*0{0rR>N`wSJRhaj%rEJum9>o_GX1ep(67KgN#s|EPVYTrqEgV0cEdiz^ zu2YS3nu?Z;h)<^IxOpCKcS~L#TkgkOZZW>r+n=w}Rrr(B&;JOvBNr0?o2Yn3-kfmR zzVvYBQT&{RKs_r!!SG5@oZa$R;V|imBNUHhl~sBFg%=Xz5~6Q&jjsnmyUG#mK~U~e;vfDz?D(4GdB^Qg6uXdC1k7H>&w}T zMXSjoqQc*gaMSOBJ!F39w~3G0lEm&O?wxhFhvFX7y_T1yq0RI`Qt z6i7{uK4JF5r@=CZBX(ivGj7pW46_z=D|zFzBW6dR2eE}5=mwifF?XeTDXYGUnJZ7M zm_kS3g%MGiN!GlqIfN`XChc8LW3rx|SN>$HTK4PR{n!W~qrg(!TkzdxYQI-`6Iw(( z-WHIb7&%|`gt?~zb5i-~TTpSZcR7phZC7W*?}I8RGYSL)g2{A|ilX&2i*B`SRe2R+ zibemG{J}&rV^(#Z zI**C@n`XjGk|?@=v4ZALb32!ck*lZs!DfmX#t_y-yFuT~GWI3_(r^SAXx8c#Ry%LM z)B#tYA=-r}-NSW>Bp^XX-Y6%aEiZc=#Dm`p=ka!v6>QUg42jq?(4HMq9Sq8!6~Pdt zmJ)+!Fs_M^rTaT$R=OAqDn((LQXYY~TMjT`s=^!zzRD35FCYJqBFfdhwH9WzjSZlVqo=FO?FS*ikTA2qlFgkNSiz zZco6I(JOA^5Nvv`3SX|Rk|{*mtT-AEe@U|%wSF6ymAwC0h?JrDnSG8~)fX(Vt`zfm zkF<`lJ!~sImuvWJb>7e)-dNY^y!6Xg+S**kejpWa17|xcgX0LCE6vJ8qjqJbW8tl} zsyl?W0aZjRR@mm6D9MSvVltzR<;P-&q}aYajW~r@7Z@9Zb%*H2LyGAUN@h6L@`ydB zkIeg>I)dAcDhloa*#nC6c4U=Y`bM=0x?o_r*>=HEQxYhb&{iuYpus*LDJt%wOSTyE zqO93TyEO5T{DW9LuW&SKit{NUJwk5Gi0bO}t7o^aU>e29OyqCr=7GzbxQqWE?uRY7 zaxXY18FE?WoD3LucRF;v+}W0#-ICDxp)l5eYQ|+UblE38$-2p}>ZeXHHol!w9m>tcY((?0;1iOYCyRgKlw9GozC_a17^9>h=#;E*i?0P8rJ+tzB&F%2j9D zQB+bSI`KT7y>Nubcv_V^=4%wy1;VbcMp~Nc=mj;j#-hyWz>E$qc38w@{72*OT8f>T7QV!0~BZzR_c7rtQjcoi(SnuB> zA}P8}adIuu=>Z1YZB)iE!w!26gH8?=vJ8soR;~o|9hb8uP^;Spig|Pt<0KuR(tAaB zyAmV$owcA(%DjpZ3wd3~YA~c{829nHA|WMDsL-_s5Si`mYC1HY5lVExLpvFGXiMo7 z55RxB9_&<${Pc}?!A#qrFhmD#>~?m)vGO7>)y5Ay*y#_eYw6C}8<_VFjaZDWRBvt2 zp@~7-ekDOLh5dvWNtGhe9?6zI<-a5!^@{)3>j<9QB-%0j!1waDcXNP;B*dD^p^+?V z>S8BP0hF2ulq#fS+U>wZ>+XKzrt{PN2G}~?+{@C!X_%Uj0?24haztDR%CJCexju}VqR3Xhz53VEa(HnA7Zl8R3jGFsHL7$s6W zFq=dSFUF!Q{ox^}+J zY!O6+{~o|;S5yW5k)06xBMM{;++tHed1Ck+eI!uZwAUg1u`S9|-Lf6H<2L;l8iex> zP3vg7Nh4E7B89r=P-u|_$gxmycIroUC;qQ#d`BfBGG@Kb3-6D8e)YQ)>wzTEFx%tcVj zVK<%iPJ80M(X?E-o|tEh+@#z*lDl!lABJbC*dJ5i)OirIeq~SuGjnTf-(faYhVZ+i zrRgM?83qBr@YP@OB@g!bOJ2g*qXES5|G3K|wFFdW-AN80fSZO*m79@u<1cJ{BIb=8 z>N$RxH?AmY3$PIe(GU*^1-~4y6Ot6_#jV79|eMQZZ_s_C5Z@ZCrHIE-67!k6Vz@p*cfI zes{C9wepJ(5ev`^M{DL)RhIpOpp{P9FOs{-#l5z}6}D4kJ_qT?Ka(W3XFbfb2(QY&Q+rbHCj8&-WU4)!^L7}AgPnhb zp>@_h$ydkOk>e#HrJDS(b9fJHeK5h?g-_@ahzCRqyKGd;+0;R#S+u}N##2a#vEAC% z>fCKDIMcg(f;_IRx@O`>Aqf5_VcZj^pS1sSlwvQ|r4 zA|qB)at@60YY|exe|jX(Wxs6`J5Fwq!0u~|N#(cT@dWJ|1o*TIOwX6Om+@4Q2Ym<3 z%-DG(?{n_glfnRRjeIHKA^MR7dMVt^%L8RXyb|0JgvjkoOcU-`Ks8Z(D1Gn+(0uaZG$A#2H_-lz&)F#!vIEhULK z&D)IVw!2%Ep5+mE7rJf>NVkDA_Q^$F&89r-(C zGv)#sqQBak{Urji#I1sgeJ_?tj!#l2_4B%M87x+i@5F@F#rcTa8g>8hC^Ki~zE^u} zcR|*EQ7aI2)hp{Em@N?3;Z4ulKj#JHZyhP!iTkR#XN1D-W0gZiM|P=14xZzC(J81w zZ^RM)ApfaMlriEXD?#4v=@p}#N?46|s2$G%<11Et^Bf2h^>Es<9*FoxN3jk^kL!vD zh7oq^nYIMOX%y0=SxtFR-}l(^VPSagr1R+X2G8nKUB+nczL(wFN}+Xx$1o%~`KL8? zEzKMk7nA;>_QxKPIiPgP{!^fFd3x}a3%UG#$pCFq=p?R_acMPakqn>w=OsTUT|oJV zxH=r}n6y-THLPz%9!2(Igs?e^+e2naa6^~eejydOCmjO|jYi_5+dlW(c?~;%_@PxU zv_B=~rYgkbjGb%4Aj|A*TdRccvg;x#t^v8>s_GSeY#nw}xxEYb8>xWIB?SO16igdd zf>?FTr!Ry_HzNKKl37 zOJw24E~qOCt$XY?BnCrSRqr|bcy1Zea4qHKq>*{n*_HjfmX0MmO-^_=OCSsP#W*$? z1Dk*+h&zHy*g9;RS^Q-Gd#SG3GYUX($>LQInqku~3l^AMnkMeIC>(k&C?)jv5OCr5 zib3dZUP7!f6#A5g2gZp6Bo0d!BV_9pQFEjRZfJprl?iKzf-GL4M1-cD6O-xuK^QN4 zFMrZ~EuK^YG7Nsv3Ls4&83)wTnjN}wo4@4$w5&w!G@yN}EKyxSNv!GeenMP>XOrKB zT#l?lXCH%JNGbTIQ#+fh&^n;ijm+(`Bs$#8Qxpaorh}1VGJj$gB+we)awQ|3Hn5DP zq(4Ev%=a>fVtzO0?N3`QhiFOA$%9OjculS9IHHwx)bN-yw(RH51Z!?y)CX&rg{J$K z9*DV2MLcO%_;QbVhwrCZ=;N4Ms6IoD<5UVJ?h}JL-|{g|t;`wCB8IrbEp=`q)Y2~( z^;|(xvJ+(gORfc2`}mDdBH71tLIVQSpN8@6Ku-wPON9E0B?d7~IkJcTI%Fko%H9JF zi=?zgsnWr(l{&PTU(-X|yxY1XTIr3f`In?fBYz6l9|P{}t&qGk$*1-LRS%{Jl=Wxg z3GRtXKxvUkRSF;=Cb~muqK)M)_UeOs6q`fI?cgX@nRt7?XeYYy$MoP?JJkIK)myUK zQ(^@RF$g6D=?^fd2gYP8duO#64O>)WfmEr-bLZvp$>yFeiL^I9qUUGo{a3RgCh%oa zVbL8$0kkk{D^-tDwAbQK{Pgi+E~yfkLJF8MR_xUwrZ zSDFr!8d;Z)7cluaRzGR#%7wH|f;B^BSDO2qJUZ@G1lAcb2k(hRjfiw~SJx^(uRt;~ zBp6so`&zgL+=|V`^$(OC?`hTE# zN3q)~5gyxg`ySgVx82wG4w6))*;S5rph!Ck9Di=Qp5KB;4E!29S5(3^uSOEouf)Om z0%6c3;F*3Xjj)hRGBxBkdkMBBR(#9Nwwm3)ApfhT2vhBS@IFhup(B+)^p=wT6CTHT z?XvSqDlqv%0jM$BYXKNp4~r^#&+wKTg{%3S6|&qj3Q-W(N3NZlh<1M+B3b2m*2QvH zSL9ggVBIL-3%LC#s)gEA+@vOc&x7t!PQ8+$yNp)CPTz}DAcg3X>uytw#k?Ev@~^n0 zITIA+!LP}i{J4tHG_04Y@fuR8R7zR@Q)m*Qxow-SJ^3-oDsN{~ANoXK;Mc8fRe4WG z=O*_L|HJ?Jrh68O5`dtQThA06Xjx1)Bs?9pC?#q$R->d>+=m}6Yqfho{QsDG=jh0q zFI+gbCbsQ~ZQIGjHYT=h+t$RkZEIpDlXPr+{r>KE*Sdf7>2=QPwNBNp>ZhLl?7geh zx&($e_Z)suoNVH9lCVucFPP)(_oMz1g-Q#9(#qBFnK!6ZliEwFT7#xVdJ%33Rh8Pk zpb$*{sbgi(nv0rf)8R_Jhf3&yhdE9Krl~p-Bpa5y1aLO|0D?OrWub`NNx6{#vfb7G zlNDFu5(W4eYrn7@AHJ=8)FM|g&ua4}xI+bi+4LmDw?jLde0PlkYUgH1R47G+;qY$f za})ILGZk)xAPfA#R7F{qtW~!9)_h}ph9;wRpd$GlA$i3wK`R&RHID}}J!ZuUDW%U_ zX~zO8SqZA&GCz`6qJ82$it{8aP}@`+6g4L1x>m&_I!Hykr|@B$^g-!+8fT%Kjoc1pmU zm6ErZV{Mnfw+j4wmP-#%n8;@h?E?tIpF0M~Qxsda;S-4~c*e;=+W_A-Bm1@Eyg@(W z=VZApYdnTS!uI^Yo`PP5?3poSJfP1>1YHv5U6opstr1fpElZ%5a)VPjQc=l8#B(~G zk^s7*I=XJiqo?64ohgLNXDWEK@vcC4KUV?vH2q}$rAzYA*P0m?qZ8~0LFt&wu37Yc zIXBzRBwZ;~NL6way^d@;0`{C^5%z+k-eM7%stfsgF-ZsQ*ZiK&Hgl|K4Sc@^v<@g_ zGAH%x5O_(%{I8;Jhip~`{;&nYXB-P{JpA|;pkb~4GpzT?&9gUDyf@AOx!PVO)-=jl z;CLb@g-edV9f<2Z$eq01C&2O0^v{L z+rAcywjMQUp(4n#YY{8ryqbPPU-!#yB{I$R9X zCb{uvj5ih%LQQ|#d4Mv}5mX(~xOyokY1%>s5qJ!x>gY{~x(4@6C~a1Bl1x3vCRPn| z`3|=&zmTd;o`OTurFsxWRb9+1^FnkNDOmOzv^A_3-^t;bS^Gc@FWXO{(x_4s(W!b& zXl0h|-9dA)k3*Xmq82O|p&_N5V4*-LTOGJe^zU-D_vg1CL}iMvS1(Lk`$|aNUpr`b zt;&Vc3r0Odsyc^NHNlbpku63tuUDy*DkHYUQHDhsab6Y%>r8ol!PArRHguWrZS7BG zT~D-MN%{ZBy)5DO@EY`dm3sheAe?*PV$w~ao<=R)W)3D%^`}8|E}D--1W|8;$|H}@ zxJY)2;a5dEs#sYG#4CVU@%d}c|7{=5xNi%f13N%2LR!b^(!eA`3O;S6dkCK6U<`;fLG92h@Of&KKha@W2*dJOf z1yx$#6cmV>B7RpB57BS)5{URC$hGX-Tn^4dO*&5SrXjP#1~uQni8>Sf}B{B)I76zfFmfE2qqJI|H46_E{ zl)INp15!UgUn1v~RGR~lUX*(@%~#83FCh((iC3zaG3(bRAyc*QFwcR#)vFj;fG*wn#Q61>>z>%D@9%f|W|J+uhtl9R8d;n-nL85u3k&>K*T;Y^qAG-H zNf$0wI09cYQ^Rj(Tv~2?lCj*5^x5l7ssiVv8_D8=>^$V>mUnsWr&lwUYh-b`Ej{OI z@ym$8Evz_-T&%DwECRb=va(}F#3IsK&-Fqy56H7gp<}`;Axk3Nq0SwxKj2FefTru= zK>UU5C5YjMC2jRDGGESg3D`m37?_ykvMzi>ALDJYM?UsMIRpvEpX-n#l3vdod>pJLAIhY>IuurCyFj5~wV@Q|}UL^hOagv0Hau z?@7et_r?)=PV42*oC9g$Qt@6=1EIXpwM(YE*AYwLFKSlZ0AZ`Em|}VVDBed^_My@w z=thlbQhakn^N{cTy8C zfi~2J`>vM6sqt=KAdl}$FK5G~0T~+#W&pzCe<++>cH_>mWs~&0h?wVRtVV_Af2&{r z8sfPl-S*%<{e#>ymRmKS-F%8`ZG3Xc16e{CQR$%g`R4gwu`|QihEr|iZ^fz2)rM6) z8mok|t)dxD?Vy5HzJR)aBw@}lK3_0(9CVl`<778)wzI$pFFys)uCxn&NPz~!bV+KZ z7r5BiE~Q%)*3nas7TbmeB*F7=UCLMG)U>v#c@f#@{S}| z&qSm#effUJzJ}wHY`h0SKHyH7V!;Kq4We@_eJD8V+vWu+!&Cdb15)y)>2WTSkwRX)gQK0 z0r#5gGG?RcA1OA8WzWi5TPgt#E9HzZfQvsrK>&h>Oy}hueBp_EfIRh{>w!CfO$?H- znC;f+@+Y>#-oyq=7#$(Tt8bNRgKUfFV=HF)eWygm2$zL+Z=Li_T)KpS5(7J70(g5O z>c2WQwsSdNBn0LU=8W^b6xVYRKd@ps$aMYqwr(bg%*`GRg+dmf!OegMr(41|?*&mS z&M`!tRNhKOg#3|Z5G7m&fdhimw(!um)w z75xzo@Oyg^g?bZ}hpX7g+)u8vKEY!dGK`DEHuZL|W{|{h|&^?+&(^b~5f!msK zxZ3AIXwPf?n&CaKI&r)}1LZU?FAt!>P0+QJo&98tz zZq)2tgG1{%4i=MMN*-S?cgVjC?aq4^kG2$W<)8y?f|*Md&$_tRieOU1j8eBmBH#5+_$z-@TpD;= z7}2-^>INDRkc6p8r#c}z6qHko4u2$fHtq@riNSIit?}msx>P=9>Vx8S&xg)c6M-$r zeX?#efJJ+PWBZv=t9LQV>V1Y&b4~~s$x*wrxUsC_{*u?{O?iLJAYp+{#$6+n$P2|< z${UC6L`1?tPXfd5l0}}ux-LXRZ~Ux=u5!@fKGaw~duZXAQ1Th4hdianyWXsEl6RDn zDB?7r4*$T=@!>LbpB%kJ$U`!)UT0Pd$MdXr9l5YNvR_}ZHqfNq@=I!|;Ekq4Y0P*g z)&{5OS|s#l%!Yz>svky!FRv%H^8B(w0k2P@E-Tc#(1+B3ot>&(oYcT|wuw8!XSblV zD@G4>@)QS_%k{K`FTFl4B-ectaZWdvXSqS^%6~RyzFKG8I3}iBs-#||u9+e#=ZUZ> z=icTO14h*dsh_CeT_BFPCdg-Bzt_3Iw)A)W0>=Xs$)O0furq$XO&VRJ-0ee=V+)lvJpYT3O4p*&yDyX$&QE8t=TN=&^7Ym0_v|>A+|$91)Zh zf_@PM)uX+}%XXCDJ;)=8W*GkXU2%}eY#gZ^xbt`pc9MxHlbsDtQCFqTXWbbpxMAIl zAH5V-VS)jBDc8US$EDeP_>BUL^4Pflig3(B<%`L7%93>o=TKexf+?S zonjZ*KU3=bp{ro%7QGq*K-X=QqdGGb{80Ewuh!e9X2YF@ z6WO$Kzd`#x!S4PncMtRBQUcr-f-CvABU?%H*Suz74%B#VH;e`4pDn10Z3C;vsQq-O z3@fLg#&1`Z9~6}|$JEkza&f8TKsQDjxU5K@+*|~YRqOpAaQ&8$j_7a{%E*(1igoFr z3e$Rpmj;hITLz$)VSU2siS%h3@T*DPj!q|#3$l=5a~NBHV6dYF1?nh<%*?Gks~5ml zS9~g<#)9FaT=LigRpACwy8teY1fcIi4D?+fGlvS05qI3o;ti6~H)&x~|E3Hynq1gK zZPffc=1l$2v%83hi%6HI-!IiT9o_Q!BrEpgPFgLaWQWS_RWuMT_+Zvm9i~KL&(&bi zu=6i%F^((#fdDF>EP9QrVmOE5`34 zc484~yycp=1W9Rx&@5$!dBHPk=|9yZP|)E_I#sXIk>*;|mcyiWBBZ@-& zzTdr;Pc)_Vrs34fZV{2G<(+9~;$(~<6MM;Cr^^Lhl9s}3F_l!L0{z7mbjP^hOMz$_ zC1sqnnf?z97O2>GwQ9zO_()X|B-<=T$`Bgv^&V_#(D`t-WUqO~5N+6WqxhEI9tE+k zGF1gbjxv+_I3*?8B0HuT0F;uFJgvxlpKjFf0IaH+opAc*8L86CPMpdivl%G@Mqvrs z#~QVVooUHf2QdNb9yI80R(ET&;#FSuCgrC(E7-$E<-TC{3Y;T#ffMWiTKLwA2OcBbk@8T05x zyYcBrR#CP`rM8ycndN6`5UFi_kl zfRP%~_=Vzf2wA!E5f~(<`Nyc#%CM+%HYR`G6n&v2_X+#oa}cWnD>NLbs#kybU;cks zcE9QkT=6in(_D`#RA<)5kxLwKJuvCwG3(UutSUn(* zv8f0bUGFn{0rlSzqbFlgeJ#j{us)Bq_1+XRs^~O z3@pb~%L#}0wA~TNxKfLtYs9?GgTUl!>!AA@1x}=|rn~lSX3#k$!`nq8jAd529gRy> zO>mXEBV87WSx$|5IJ$1-#fcP79Ni_FMEVBjgXI%JdUT$&Xd=@ad2_Jj!Cg z@;*beXk5*z<6D7wDU)0r@l30lnN9RWIHb)k69)fiXNGLf8IvDEf2zpo&#ybm7>chs z`n)yD+c3{bs&Y^y0ceDOM>n8O7mi5AW4_C&qRPnhJG=(m znvP&G%IpQ8f`isXjhSr47S)2`m-_*cSRT2ty)j`nmCc5FGdw1_y*h^#34{*twmVEL*oQW(xQ+u;zYiqdM#?sqTOE zpPbkaHOfkm7$OM24~dn3bjd2{ypmmojYF(;ODz|z&68F`a?|Kyp21mzPE&+qCb#ve zz$TnK!E=G#q7Ab2SpIF_{D_UYf-&4uVh3t+))472>Ly4y0vu=E0@*1gEQO)tK^z`8 zm;>T$Y3+*?DML5T(&!QM)qXA);gZ~`&a5#XdObxEB=9h&i_lh|iSUC*Kce!FLNBn& zDs5-%&g)cNLSC?YAJHVCE(dp<`ffaXj&{APxBsffHFpZkr}{Z?(TUgh#A(nKVVv_2TD5JX=U@cE%5U zb@m@1#BRq~pRO{Nve4LX$o<2mr^U4JHX$vx`zJoIT#bc4fSdph$O$wOxQU-$`cW<) zRD^vFACh`lq?lSld`gUahZ+}G8k+L8VABWdmVaX>DS4fk^IVGBRC+D$rvIu)c3czz zo-XQvwz{ zBRB>5EAEqGDI-FWvigBB&k<#gLAgdRH_HXT2Ua>SBM!&~EU*3OE6&~Yh_J0zPe)vfqa{h?`~V}Lu<*=m zt!*Q*X4G>MzU37#`JESFibbLA2}VC_W-4t3Cf4ca z&nG2XI2@YB z2vfB<^%oG+xQs*UCUM)?Uc!uB)Xry|kV3R<`WJazV#o#asPko+I%=`s0H$w|gZ2-R09)X2}^w<+?G zc`bQYJT-*rE}@N9U#W*d*2a~=uE4&rRmudwh4}9oEv6F?51xPy`q(xODCM<^U)BU{ z1d$(jd_T&uG<5ZJu&GJ3S!oZf+Ro`rYefB-i9nb#sH$dliET#$FO6XTn9XSA@XSkL zJ=n!cU`H8;Z4{Y;G;c51zE2(DimnDWk#7>ZTBXdYOA9_sEVaK{D&&K`)(Swb#XMRe z)s_1%a$Tl}$b985g?8hQ!FTv~{{~d!_2RxuY|v}|J~9K*oHFuinKbr6leRp)3J2tMxr(SR;sVN| zln*+m^t72a0bs2T>~Zx;!B=wln{O5{wonsPRUBsSF<4OYP+0x^53#@^4*T9yEXAVB zzrrr29{sQD$5z-uI|Wy-mmP-_vF5%+`_Jv_M$-0te!$cyhCSKk?LPXryZ_T(*6o}? z_)fkL9^$UUPgsnb%g|YV@?~xtv9ual`PU6Zd`0m!c!jG)-57d4aI0K8+kNkg=n8G{ z%W@^?=VG_qUG+;N}fNSp)+ zr$E0htYET9=c-rB*$jv~Jz#JW$7TipNHKO6tp_VvO@;0lbi?-*vI^WB?vJwQxhGF8n)TAEz_;W1K=TK)b-8ua8>b^T z`%#&vn5*Y5g>`sPwF2WaydntI+B<*VpPDdDRX-QvUG=KHtOk{t{Rj9(HW@ZxRLV5) z9!m1}xg}`Yb?_5dmje!48G6K%q5yN--2azN5|LT9i(l3HaabTcS+ZMKC=n@zxCDx1<{&x)C z=OJQlI_!}&2n%2phl^LFCv#1#S~oovXK;J3$Yec){~?wTn!(O_H-?=0eqEi-67$l` zoloB`|I=CR!E)%&i#hUQ63L(Gd8wJ~@o;i{gjtqa2%ub^@OhZxgx@cgT*+b?yIR&m z-ERO=a0^0sY0o>6hMj5QB@1Nv_o^216ap$pqpBRkxm$$soYlDG)V``fl;0rY1}Om5 za}7@shr($)G~Wse7ya4ZgvIP5@Q>n{Ll8Jk*dS|#3<@XPX|p+ZBn_D(EM+hD5o8!C z0S;i8K`zZsx0pk~nrd|x8r7zH@rQ?WizuQMQ}8Ih)SsHEJ*&%OsVTOWiXK#LSK_+X z+7NGw6(XBWd|gC6C*8KR5T_$|Ramk_Od;f%usHWMr2a{?MYKuK7v*QE{7>8)(FJ<< zoLeNBuGKitq>E~-0dOO54&_J+ZREe7e~|vkB0u+%KL&Zv@3hJ)@)g7 z5{^|^vez>p|FDu#q$5;W47z0!fEE*ttn+si9H56iW|iK)jL^eoF4v{a{g-3+Q;MyR zQw&{Rs7w*LwIlA~>H^;>12gIAiwf|gjXO9k=P-4&b~42gWPVULRX&z6;ylhbI!OCI zl)XFRx(|JjBPATIG%U4mnkZA4>GemOw66fXZG0IKqh>^q^MKvxQ@u?f6r^He_jg(1 zxxru{&@YfJ3X0-6j`UXAr0Pp!_f){??fVG(fD&%a;$@}#@wdf3XnMgx$OJ{=0s&Li z$xQc`N!6d!a1)uabh}_5th;jfw=}cwqMxnCl&Wgp%GC!vp9=<*nHarYNV<J4I0uy;NnHqBe2mPQ*?_w zEPN3%=tuDzcs!X$-B=#06`X(XY2D!LWrBItVe*%7=@3;FJ9bo9D_u(TOU@A~a$aT= zCgy6AHfIleFAB&Gm;hA*w@J)DdIp_8bXitoxv_DWr@86vE3{B2JB3jdO<)?9 z#3DQv7?(OG>k{(yOu{+qk2Mc(Rh_J7|2Mtb&w&tK4YJ-{HH z=?C6iVJEr#=!%7O1qelk{+rzrZ=@}1CYWS={hiht77K(Ex;pu|^1)vf@O>8Fge(G* zD?3UWL4V{Jy=_%$lLIx!pj*^KgH{A#U_!~wLEvHLg<-I4n4MGTtQj6>7%_;Es3^++ z5kM-)Q8FyLr^J1@rs$+G>{AybZ1El8<+m#oYjUCK6f244(<3N||Lz)HbF3Caq;vS{>C<6X$hc zi=?E1-e8hgdQ7S*l9L`vva%mF>@kXIxHIw)Bp56bcC(9Qf72Z<-<`+^aZ-JZAt8v9Eyo2Ps9N++?Sfb;GnE4)!evLG0k9$z+9D@i~vY+rS&2gw4oG2*THK zAQ?s+zGY35CDdXmi(TMS6r{`YLbDGZvFKR5X-KWMD#%9a(gk8Z&ASVCl5Z#eeO+w{ zhz0e`;d2J(+sA5Y3gCyn?#xro!`gC1P1_*3ojoM6f&EB76e@MjD$0(L+9LUkJs3cB z#`25S|KX2FpRabIj=1&T@5FNz$tjMs46*e=`bCX@2$EP0aZYms3ojG1_wLSP$x0gb zEy0v*XzF>Ha?27t`Ln3S!q`?0iH(A_5q&3RO3c$S*@UmRZKT}oCXRaZQKgHE)s(G61V=a zcI8Ng*VKV()$Ft=#R6p#4XQ<>OO}enomKtZUpKKH_Yg^SvW}8Hu|qtpcigH!!zAl{ zb2l2zh3kcD3@n)j@q`OWGepUTnPFPJk^($}g#w>Jf3Do)dYsqrN#u-Un6vP^3QhEg z;5r^}R$Nvohc}xW5L9-J$cOrmwP}aRcoU()Z`kxX&7>My_krag)5~bK5%i; zwfRij*t^=L3nhcSiZNSfH|}NqRc8D{)i({g9sN?VZGt)(sU8uj$&6@=aj5fxfv-F> z8NE`bAc*9!jnvw8>d`i+5lsCBkAgCLOemIT_cySssMWet z(MR^NJzPY&3Ya!mc}5Ero3@Hm5^d;}#OgwVqEQPq>G%t6r_T=gTyUsgds-UOujD58A zho_h3B5Ip5^9f{yIRsge7ZVntCKvC}y`{6z{eJ1@?jv!_E|eDLl9)>Ghhu2-dCEhL%yZcek4fjm>GLckD?M#hyZg5!ysdVuh=GtB~JFV$|^pr(&~v?hGX;&61Nf~TF7u(^t4?}LZAv`ayNX{AYDpsF#=I(83O z0R-|A=6q^KyvxrNY%_X#tP2IQI@F%W0DFu80uJB6f?ift0jyM&6l=W8@gBDI*mUX9 z#kKE(^QTjc2>d`Y3BIoU(H9LNHyEtfW;&mAVD-RqFCJf{RoK`$TSX_+E;R4?BX zBHSvUAwzl0{F2J3la()uxxj6dhr{?L4Sp7#TMu+>)CSl^OAlJIZ%?ppCt;l{hsaY> z;TWTkcu^+<@Q>_m4oydBNDDt=PpD9Gi9zS7{%|O0=%ZCg%R5C478NyQ)ZMh*bzzlV z@1fR?<q1)k4d9Bb;eutZ5fdrqyjg?z6Mv zVoW@<(RP~uQFcCu+^KOaVu8h=S5U^VWznul9xjheQl(){N#vV{|0S>Pv(%Slj)KJ3 zddXzgMm4KE%e;W+6JU9~at!h9S(D=2V4Os>L8AsctkfClt%&+ZnP$zPQV7fA!N-?P z^@FKhSNu}ru1kE*k}h2*>wByaYF`^QJ#|p5^&CdsA*yuTxC+1vN&jULk3o8E*$^f5 z(8Q7nMRPkW(2Im;=UanuZRuH!5V39PEiJ8_W?49fO+J$t9dnCfjJvDTwwwP^ah48- zq`^BDPF7l^wnZPn`0G<3RYbb}I;(`EmETYRQaW84`Ti0A>}PO6Z8& z$@`IhLMi?oKQ)j>liQ9!JL|x++fkjegO1ORqQK1zoP@Y>76?g8JYZuC6@e-iV^yhB z<6i#BFBpY^g$k;NQ<1L`Uc%^0uY9yNtKfzxRSWWiwtwlbPGj;;N42B7pTM_e+heEo z+KJ&!^{Mw-C5YP}8gaiS8}nGSc7b2qjeNNO;EnfH3XZusx=x4~c3QZp?=A|Zy7k|6 zZMQ^e9q$#5vghp&TE=6f5{&8N0M-mks2-`9vr7oOb_ z1r`XJZ%y;FKjo635z{}=7VpX-wlvwXXNL=CNUC zkO~1r@+u8f>E=L10>O2Acng?e>u?#oWc^%WhqU~+IlD%$Kq+;9``mT}W)K9Z<(c}* z;~`@Kw(x_TY>25{6W?t_Lf<-eYz5GSqsfzbi9Xc8Sf;hIZQD z*CfCx$N!vTdMy1N=lm9>xFf^MRf68CW9Y;Gs`f^Z*Qq|`ndeQr6J_Cm(K*+~&1d|h z?WECOeQxe8q9C~iA{dOeU?LG_?&dY8hHVi#3xfq(fuEJRCg);@2mLj+hRvp8pJWFF zyQ+~QM$e5=-ZLh)X58o-m59O7$d>L@*Srd?0g=#P-6 z2)atc?{%v+;Y0}67X1WN2@gY^VKQn z`}|o=^!4;~IJfKnkywx3oHX{UAZX5LiI=Vr-QMT;9TjT+7ujJ$_B@s)@9W(Rt^w)iT6I7!fQ9LSdIWbI&q%gdD)tiiT}9f zW^9gr)UI*Co7Z{fG|Z;k!+09-z}mUDwzSvHORMN1KkH|8_duBtcII}7jCn3OKYOt{ zGlh%IZC_q3JzKy2_kh!}ZoaTLo6GBimW{HA>&56-6SmSip6Qx96%NaXHA`y?Xv-b? zLNY{1k{rC(U!!(+_RW^bI*y^*hCU(EG{n)aL5Q_anZEwDT9tReT72G31%UF65THH# zRx9fr@Ab=hoBgAi!Wkx6DYw99%Z1KApcgc*yQE>Cwkr~r+B~YQtsqt}7)gfrDMsQo z=e-D+3db{nce$6(@PG)Q^ulw7?{nvI=?7@w5VasF5Ct~S$MY{N!9+_DQO_N21 z|B=JqV7q2>+^*P!x$aL-_%C&=`~ut9mzf>3EHZl3!3;?o0ZWjUt+MlSsBb?nWI_p&st9bTwo!DP%0sRP9E5Ms|sH^%@LG|UHniyp*8 zdEwvycOnqwc5TOD22y__LiDby5Q7svrBVI*nKt?#R$}#U`z8!csg=hjEkjvYI8?c0 zv}wiG|LLtjfba&Y7(Yc{y|QO4L(B^O`LWWrRUb^8KJyQ^V?HleQ|}nOzg#bIUnBUx zH+9io*YqnKgWkqx1il*E^GC8m-mYCUM1vB=n!di^x2KjEj72=WkJ_KxKny z1)NABsYBW!%3cI{xDAFKnii9|EIzBKE>%X{pSI3LaO5#GmXx@)A!H`@-J`Ufw%*q? zDeZ8)(6Y@bb5e%IC|hdqMyB$?s=2lPQ`)(_N<=fgvlbMy4 z-u?OpE1^+2fnavMHpGhKbGYFgHyCJ_tHr+R08_HwjTY2RhAt?{FescG0=2U%PjS!4 zSd$(ghm-*T>t|7aci>hcehI)Q$e4Pm#Hm)-|IE! zH!=vw_czFYmt{41=WJ$_PdbFPnCRCiyHO7)X7V`UIC*?;uk&$Y2o5PROz9lCZ*Ry~ z%dCfao6@Q4Y`vPDt<=#md=kGIYDrio>O{jx8afYsP%RRjIHugZO6$gyArn9NDG9@- z7JI1Xi27FJEi?W)rU!Y)ZvyH`lASYZEkxiFq&Ew=wc#S_(~UrjeLGWYb76Onv{ocq1-Z>35G5o2p`e-E@NvGXVS5!kFl-8 zThXHw_Soucn^oKmf7_8o75gsoNoGpFoDxU9V8&G*2~Xk>;FCt*s1<| z-pMgKO~&tz4-O1Yl5BLkkkXS|Vpb#UEa)3|98)r%>R)E~%_v5qN5>m>5pO?bQ1K09 zI5S|QQeYSJV4EEfY_Mm(@Mqw#IXXfseb)1|1wQ9~q1Y1Jf2AfaBPNn>d1+lcJFAW> z^}YZ%6I_BT5vh>e5hPx12B?^2^gpYm>@sIm7UCt%*=wHp*rBt^AnoTepoHg?K9%R# z^~yQ1y;R2#B=XLo1-oR36zfPNewelsOIC_yVt;(K1y(5SeaxIRUn?nlNOG62WT%s5 zkgj@(B*#Qqd|xz!lg!5caIlvC0Ezg$?g!JVt2&B9{{m#%?&Z*vNZV7{A7KAY7;q3J zS#St+5GW8B-~=K8p*WmZtq}kL!KVQQ!2v$;uw`<0aI$f>G&6H?X8PaPPZmZGJKLNT z9h)s?^s!y&cO((Ot+<;HtmD)o>gvzY^~Ya{$6Cvce9~4bqrLBOrc?|NGTIT&wz8-0 z_Wqupz6k_gdz&yTnp{YvF&=Yj3~zR!{$E;ia4|8ZzVf^)_i%zv!F6>zeLB~vy_^iE zn4pE681^*;m`0XbQxxohxE{!Kv5QIC9?M(v2w6*;yc3M%$|<2qQH1d!p}ir6SrW{C z)$0m(J8>v8yw)WCOHl*(R!Mq6$!fcRFdS7UkYNdzK>;sni6n;W)}BR|CE@LGu2KX7 zwhtKHiJvk4{+QBmWvR-shynO<-1+GSVP;YrkuLh*;u&0s?EWbB0gBXjttkhuk>YuV z6L>}&uqT_PSiH)bFMuj7$&DF1zNu4FcG)Roe!Ba{j1f}|3Ua;FOXDwO1BM=jUTX;B zBZG<%93OY5Xjr7Ww3dL|uB5~h`B1^(XvGrh?I`zmikBU}E=dipVr>b3eib{_7SFCgqyM zkV*=tI}Mb^8)`jL_>Oo=e=XTIC?J;@7B(qFO!^eYQj2>&Lm^wZQkA_~XI#6qAF1-~MiIZ7tLFms*R%j^F2D zK2d&ex9{7_$0gR9?ES%(!w%0T0ROhmV43OK@>}GRsIR7KKRez3<0(7cfq&*xWNBWV z{DNX4n5t60`@#Rgl2>DUhb6yv(%yi9T<_nb_x6pK)n2}%r412NW&#&oy@#I$enTDx zJ55?i=5mRtS0%;7;m?Vgq?ilQI?N>d%#hPL`ok^F!p|refeLHGvQrC0{ngc?+?e zr{1*Nn07y=$_4rSF|rK4?jByZ%jX39USFQhtNlISUY^SHcYVIvnDAx{dc5xe>G^s7 zUpMLJ`90rU&o>`mOJj9AzK{(at$+ut3B558abYwrFV+dVIRg8aUIo!;*!hufE3 z?f|3HQ^k*OqWrww?(drivhTa&tJCMc_P%eY=kx9DdqqW9RsR<-SfcjmV(;%;gYUKI zb`Rr2P71u;AK!0~-@0|d8g?I{B{G`7tCnX4h3`FrPB6pJ>{B&mMV&~_!q(2$@wor= z=|sT0W9qmIu-MOaaQnZXA9g)|zlF~^q>XU541B##u^u@X)OMZc?>5XD=zk3f`X0OO zmA+o2BRk}6=e?dH1N!(3{9bSM{MfgWR$PXy*}16>>R+f=J=UeOy zo<*w}i^p!1CI2ojmEv9$*49ob=055W$H8~Ak;DJN(_cd2EZ@xbZ$o*9%rj_8HE^VE zrYE^MjhF2|PAQSbp_&CHi%34z4U{v84l@?|i&nS;bW+(7BD-#ov1M zr9RNru^6@N_dCN6FXu0CP=_B&FFL}g>UX$)il=T`#Rqi~6aX&L2s%C<_|ZmjPc5ya zPNaaHB4Tzv3P}Nr98s>$mM}@_kUYvi7O2ev)$i=ZsU4b+&&{Kh*3(=G^}NWowp=H_ zDnQO82QYnr&K3G-I^2bev+x*Q47Oi%+8{ zX%a=gcTBa%m&vZxuiEVnsc|BW{&$yBsT~RM1aU-!8VSRcbyTGQ>IOl9RD|WVs}DGaFM?!Z>Kcf62wGmD zAH6M9+(67Qh_YI42bOQB5~^Jl@mFkizZK-KKm+=|uK>i!WVOXTx1=uafC(i7rq6x@Iei{Ai(}02j@gL7+dN8EG8%ge*yoi3%r-on zx;ti%{g!J+Lz?~6Hl~@h2C7SuIHI!ZA8Q7fZk~nL&NTG z{WmvCA$|Ka^mZbJSjcOM#%jKgZ9y8U2trZCcd8K(mv0N2d#IxD+AE6upogLJdg5&< zkLzi!$F5x(_GoSnlO+y2!MNhIO+s={_Bj0FGMg^KMYxJuA`#H#a9QMff~o-AW)RQ& zo^q*WAy>s@U^cvAkHWy)g6YgKv`BvWZSsnGx?fct!9{7|RStD|>IJ;z)6o@-26SaS z1{@7E0oy2C4^vWr4lGKJM#1g@*P-~KP7|4D!YE)Y!-I7$?LiQUG(CVzI$U?T|3`Y*!$>}cr2J+o6@7_ct$|DI?;hl9f9GbYrEfe^*k8j5IqWw_Kzq#F0qgk{;?U6a!o5z#VE5C&V z6X0cD#Iu{|zIMGRf0a6uxkmW1vqZzfyuDPS+6=jLiC#vepe(^)Pu|xt=5Y`PkA-@X z0JY6&n2sj{Agd!YXx;9|G1hrN(^S=K%T<;P-Rbcwg2cb!Ly%klSmGK_E^jh>ILf2U zWe}w7DpV=);nLA~w}a^0v`N6a#$(*D`j^!p>N&<Uw(eW z8KB8W_ir!_+Ijx>i*Qm~H!YdFutDJoKv~;zq?)|Wo8u)U%gH&IslHcVJ-Rpe0d^JY5$k&#R z5w=wN!Sp-@JGH00A;?${?Azi1+@zq~NCqs&M=RTWARW=g6>i|i8Qg3|3gwk@5--}1 zEa8v~L^^AHwU?dC`M{3%Tr5SkBtkt4*4sekxuuuKKAvnrEKAseNy=mR4D$W>Y?`31Y$#m+o(> z<)J6X`8ditQRpXaYT>b1@iDK;Ykqm==--Eq(T-Gxrlz75SO8&_cr1&(V<-GAFMJ(y zg$cc+9Le-+$GHV$Q~c9T*|RWJ_=<4}|s`s2t!U*v%=? z(*&`&4GrVoAKJ;qXi18$jqE7*7QQhZ;U5syq9 zDEUO@$xtSKeqGKlP+*li=jMX-zK+#_(8(KV0Zq%vuU|c$*!*4-)v%eVwZfIte`}GJ z0!eG_0tPCfCUwW11AeP^`zByl^3#K7qj;z0hS=J7g`6YTEonsd-2Uo*7SUfs6wyne zi*QEtg(zH3JeZ+ad7~T_`537ZVCzq zlgNYiPa5GbQl==F>=308d`-Z-D+ywoGrfn#IQc1!$A~@BqH=cYvgaM_K2;Kr5s#t! zvOo_?i@n;BOJB@fA%pM*8((-rm#qZ`b6^duH7=5<7<%%-1{_^uBenV?&E*)gRcYK^ zoHYzpG|&tb{~WaLM9=kVTee~yNi@x?=+6-V3YoDI16}IWG<+nB+Pyyp;$`E6nS9MW zXay~RklqZm5C)UIJoCCU07Dc&q5xl3L}D`|!JUw$8ACWkgRi%_BM=3(oOFQprAuiA zVkL z)%(C-EFNJv_tMDFpUZq3kJX0)ZpLfy%a2t=r;7wLKV^-KQ&yDZq_{siz`XCT09{!T z`9+$h9mN0x{7RiR$o5;5{0dk%^!GpiIy4cy(wRZK@cqsdO0QJ;FH8ovIYT}%o(juC zCXb|pwBgN4yy|?9&W$}kxOK9bD&3pOnpDM*;hlOd{T2($)Cskj=pcq>kVSM0r)?G$hfx@s^T)-HKCAm@14W4Y7z-e52e@H zHRaa8=^AAX-dUXg-xkfW)%o7$Vl?WA`I8xx#Rao$&RXT#JeVVH{Nf3M;I9p)DWq9k z8DJ*)J>*ADdm0jRLPl-Yh(Fy3X5BaE5%@URJb!_4TBI2bZGRUcA@RsmROx1#m~La! z1S=W`7TpZ>bqeXWfH*hAtpxgQXxajBlk}^>FrKiK3)`*8=(~0_g1+HmVI83B*-gHw z9Vo~+MPJYmdClow3JLtS=_R(pmfVW+k2QQe2yD$M-;w<|P-q$(&_Q@)FfqANC=P*H zf1d$s!A|g@yCJ)hSTXMqqsL zfMgg;z`2J9j~K=T4JvJshZYBgkruvk4@DC$hMIs&0J~T9pzRx3$kLUvGe}gpHSf1Q zA2b-rz++2yB?yM?2k*=?A4odV41WsWNPQ|CmL+zj%HB+R4LNE%J0gyxgR{2(A3J%m z8!fwvQjDXx^I}h%&DNCm# zCr!=${MW+Bn6xvL{vv>R!?GJBt5u2SifrRXs6z|(hMiI&5wl_UmmR|jz@lFY-teCs z6N$t>hfYYMS~>=l&9fOy7#>|1yD*>>Di&d^-7Xw+gSk^}gLPd>ePv4BrABLiW5O)= zA()n^)bWiRR%e7~b58)SlWfPnZcUT~&MhO)&Ou7&N>a&$P}2)}f*OWng@lsh8jz)f zh-_~@dm{fdMeS%!LPA}JJ7V5O=N~I9R&G|Kn@{dE3ta{+UP^mR3i`yN^5p%t>W{B9 zN$(uXtb!&4@^}~V#YwLgBuRow{N6N!5o(3W030VgTvKffCWqp}dbJ3T1oAGAvD;h5 z>qE+Usu}iAEW!d(f`U=o^v{pC2idW=Az$Ji(G@rW{Bn=srzIxmtQCpvr^ z1Qzv?`Z%x9OFEL7ykZhWa>a8jvScxlKf2lYt2Glr8MjWq6HF=wT?%`IR1SrJ8UtcK zL3Yn3@ORa+YC5SndDqegkQk>8s%M)Zk%8@4JtJVP?L%T4yJDD7A{ca~%bTpQ9txr2o zw5g}j>jrovc-;HCvC*Pxy3#CbTQs6{;8NfVk3-yvtHz^`;KvXhu|xHC^?$t<8wZ27 ziH6h2Eb1Wcd?agcqP6K~P&#WPont2Ffa%@#UFpmHaeN%IP%Ny0_85hIVPuNc^0X&jNu97yjUlo%W| z4A?OTk>~^Tjf;1Yakd;=WqBkAndaTI5@m`WdWE~uh5uQOEc$@w48t9D1I20?{7?9a zvulW(NrU2Mjf`Ysagv$4+$ z*+i!0Fy8oTii|6Tx?r0Eil)bnhOQxKPPrm=mUiQtqm}i25pD1yzx6xAWyeN$OE#Y* z4!y;$k&fFjiqGr+%pvb#-XJq0fuy>Ypplpp=xzl^cJg4;QOVND{>C&FA`x7D4-?KG z*F((D8wUJW;-nnL4-UUxJPXiXI$>#9;l0{~GEt=w5;}-%xrAbvA$BPwyy_%<=pGz) zd^Vy&7XK6g+x^${rDat3O+k=(WcohoCBgFdc~Ut}+d~CwTUeHfuk+F^Ye<&`{u$<; zWCAMg?Z$*gIHblRdB2#jp5bXHK?LS+D2F8x>0}i;(!q%v<$lSO0S0hMcuN|(V^01|N zr5qr@?Zf9QMm2Fuw+f0hB^0)C-OK0~AmaP{DH*-g4K#1@g_NZt5I#YuJYW|^G7`>F zI8xSSCx`3Z5SgA+L0GGZCkLakRBfRl?BIOx@o!vexa&Lwh8mG*ehrsPF% z5KwuMe$jVLFDe09#G@!gwnX5&k~(RIPsvDop6AislOZ;CXhfNlh6|wTiEp=SMT@|( z&__1PY+k*?rlE%Fdf0_CY9E;{V4Lg)4z_*1p5fZuL;v$je`{XzjGmB?y z-;2+xCS8P5NREHyW8i>>5*W;GO}t2EP~$qLp0_*I{Qy$2rPp^nWB{3T$(B65RSubmKwMeL{?p;NbUhF~9W1=7}z zJN~MeVt+M%1XOgCREczAgN#p!9RSwHc~m6Hz-OwD&hL1}=D((;cF2>6UZx&~U-5S+ zg8YdZiCjhNy!o9OLN-qCdd`QrYJ(gEg-j2g;2HpjnQ09f=-c5Wl8@q+_z^`eGl$|2 zk+w8m_D1?yrPJ1Q70KgR5H$WfmIB0o)~tlKfL$oLRu@<@5CjrwsE+KN2(Lw-26iMY z$}v8E{(Ux;bQNM6i?|YP?9qtDmmE1H!mXrmJ#4peWfS%vdulvJ_Ei;=zwEaHsVq(QY5j_j^fi$oft7M8G&fvitB)%ywQsX%55PWRNn;dgutn|IYJVG9Yaw)~8EWU8}{E!u#iq?(bRcSV^F zTSFU}&y{aeqpPc8oZaOzxdKt%1yQakuY@3#+0BW}VZQGzH$p0=j9LA+a${v}a?7SD zKY*3Hocl!5&&!F&*YiU^&8P{Oi>n%~YkS>_bU+!QH38vA`)C`PUJ?+F6k!%oiS`T# z(@w`yEUdHyuh6Dk3j!IRRH-TtEpQ|Gnm1`OA1mE3x-8s!ROJ5SL{DS?l~boST48T2 z2t%v^dDIVPxJ4%#ipy`BxmgxE*iBrAWTDisI_y9sHa2=@Lrf-5xbJpsqO}gIkjzRu zdMk;^gYokQ_qM64uBh+ZCYKW!h#Ud76WsFqq)Fdyiy zDlurtjv5Kg2B}6VuM(f>I|`EcJv;G=v|dAHHXD8DkF8$3|E#L}3dB&(sHHy@f~X!) zee!N;qtYG_@q;?$=?!6j?7(+ah}vA|8gP;plT<8+QsAh_ZP>o^R2o`zjM`G5UHVC# ztX6%T$?~A-=oUWi~`zA&US*J^!AcGIWF$T zD?rEFWJj^JasH|mzCjAd2CZMP_nB@E^pkczW6abf*)6{i4xEf7I)v|(z4}3(hs`KX zk(ed(<%Z6NQAYkrd!w)kVss<&mMRU9i6 z$47fstCrR>uhx#M1TW9NF`Lv+7NBi3iN5n1UNaL1Rnhu0TKj&_|;P zD(v<%r?_fk1ZG?v-LBDU3fim?hhWq-)~-DjQz{VC{4olv60i?AX|c;|csY;!RL)yN zqtqbqhrR{=NAL^=&40$6+rjX%5F_v#qm9E?g>J41B$&3|Uu_?kNwPn}6oUwLsZnxn zKT~E7LA9@f+%n7YgGF40P|2(B#)F0ZzGTUkKFB_27e&A9ML9AP3zVW+fo>B^8yJNX)n zVaDwK$s67d-4IMF#~NA@D;jVesI(=mRz*F_2^cLArAF%$Bw-`1N;xCPGLFt4e{zwVs57-5t?UgQ z5Eotv?QPP9|MQpO7;DN5INFJbMow>=&=_T!A?I&)InaBbJ%$u0#_G8q`TFYOC>uiy zxhuHIs3!F3}Hhv%kl-G_w#e}oWQ5FZ|j*%w}bz?7!NqF zEKe-CvP^%0C4SVn#BMj@$UCkqMoV@0j4slm?wt8~zr>`zEa?|Cf8Zk^=3nyYZP0nS z`+;&joQN~%0oj%NP0AP{pzFY54E0~#3NJEe4mO5YA%>JnXW!` z;2LcJFc`JLKaE5>%4cw*AcQDuPc1&)cZFILTe;NslL%=3xbMLW%E&xr`(I~KD9QFWmQc3T^O-EW-6nMhABI}F+YhHHpYm;qzsU| zL1D^)DAVQ-OPTg%76B8bUADfyVJM3e?oV`mJ_(wKj0VhU$%WLSh_r5`Ihk!%3N%GZ zhW_*c#{G-Pm3mGGI2-Xgl75Cg4{~gMpMxjQt|ZRlGjA_IIMRp`0ZtSfOjo4HfRIYD z$Ar>kp{7>G4EPu05ov?&DsZgsaOH4_7bPxDdIz-FKpK_bW})Fn9mxUor`o`~(GFC- zHPWpCaSUWZjiDb@srr|v8Xq>v9uuzQNg{Kq{_ZWxod;9l=%`SRhl2x;=>BPn^XWMRiH7L*&~BdHs4>(4T_v9{A-1dGS2cewF#!GpH;O= zn5CT}pBNZ2`Pj=U{o+Ac+Dj5#q<-$c53xh@i3Tgy01IiOFbdNOwuwgu_qMEHEg%aL!z(r%QqW{~%rEv~o1`pmE$-+3s&eG2-O zGs8O0b1(7_9>DgfJ=56nJc}>t-(8jw7N7ZsjjJo=u+H+NkEm06xJOiOk@Eqoun90n z7^YcCgLxF}pAa|@7hjanl99w2G-SV#vmTZ|2*PZ$i133WkEqbH(4@8y;5r>t9BiK= zsDoEGglynLueU*Q!iwv0qN>70NN*VdtkX5E;{8iPg=5)QDspC*PW8x}%|Qot7GlrD zYBwxXO#WE#Mm%u@BoVR&u{ut`uE2}<6e2N2i2YS&$?u@oM=P;W6Y4EokS!}v@WhMR z6d_ZK!kBL`_M!w)_Lw$aTM?bjr^GUU3BWTCk13lXb!Cs^L=@fRvI%liK!Kbo1R-`4 zlH`uap)^rtNApI?M>G8nKOr=6>h(4n2k_R$lS9Ko}EhJn%ZwWl8wFA|M#mdB*!K>6?3hM0fi2mG^G# z8tyR#<%`Zuy8z*A3=+?yh_pwFkz>AZqO-O{YE&zvh;g6b&5$9251C{p`urRY*{48Xat!onSMlxMo; zoA$(}GFJL4oocc5NkAUxJ2A5f?Fl87yv%>taM0GG_(_u-I2$cX4oaaSz7;e}>{WOoGrNtE*mks$^iI(0hIo!4M z4^8<&w{8ayWm-H*!_HXg1-jm(Uny+7WD`1HYh*Sx0V;U?uxu?K&*v%4!(a8C0|>hk z+n)|*-q(301%TZ=Ynq6a%Au~-4fk}{)fRgpj1E-R*Ai6LVi{x;)I&%LR)1r0oJ-Jn z4m`_<>qq$}>(-lAD()OwV+w193siF)FZ65FD7A&Ak~bX&kM$>s0S0f+{;4Az-K3q) zr?JXs`9z9@`yBUuIy_J~!U3%ZQT3|wOji)rYW1o_EY@pI<_!1!eTWNz$gLNEfkk8n zQ+W9~z~Bsk!`x9$OeqDBIRr2ls*tphXUm$f+z)GV{pxrfo^H>U>%DDPnm?)-FUlYn z59H&Z^*##U2CznBR~#Hp6lJt%)Uqx*F%_m$Y|Z^H)I59Zpmco@sur=S9MMP=ry!Yx zEKiAv|3$TuQ$l>pocOSMR=-pTb0L8{Vm1 zX1o_0X7F$IOcAAs9J+r>_N?oJ6{|e)^RFV!?J+LFhh=?gLPMiu zCl=8i4}Pkt=09b7WW4@p9FzMisBjub8&)921yzeiS)9PU#xCXx4NatQ4mFrb-NkyX z-7H1#zo5b)HFu^6wI?}{AS}}`9%=-z&40Rv02u;S0lG5EOUQrrnZ-*pH}}-jbvhie z(uYHEP<&AxXw!?P#t{oBAS5&Gn$Fum$3VG}EKSldVsL{{4U=^g%`!;q^53JTN+{!5 zSFL0PjFLw)nksys!AxngLAGHZAZkU*?9i{KqQQ61~tNoZE#ZFa!-|XuS83!kmU`_ zC2`G^y-=RSG?r2M<8#4{;J9K6k(sd5Nx{Xad{e!b&XB&37FDT-TByrfbB)kKD>j&RKuzIc(Sp~_saG*qg>Qt9p!dPmv6{>iNTWQi58~dcJ*yz zZSa&*b3`7#A}V`rSi&5`%>_ja@N6~dZ!!-wpZ_)g1cOzALEP^&SH$5I#=x^af~yf% zkq-e&FXEsqg9DqvhS@|g%Gibo^(&G5uU>?3EY`#ek}1bS4Zi`D3=VDv8%LAwsNK&x z{Li|61s~7ROu?j7Al1^0XCoy1|9gjp2G0?r8TkD2$V&3^$K2TFesluOvYnYzyq#HZ zoL$*)PQc)Py(z3shY5XUVtw0d%fZ5L7h7%3SN)487|9x9PM3JzH@gcyl=~hBCSfDp zPc6Bl#c9ML>%9?Y1{8({%=d?FlISl!EXeLH`SXDcCvk6 zo-V-cZ6Dq$Fjb9?vESGRyDM5cdl%6cw z`<+!Kw#gaM(QB>`v+)Ej^T@xUmdKn*(GOVifjd{IjPpF@P*+ip0mQ{(#4ynU6uHxc z(|{c<3G|D1eK01ddLr~Prq$_Q8pLJ*E$3^kW69%cmdv^sf9JR+%qXyc{SgpVCO#S+Ar*>kw+Uh)f4uaQO;k_QR*x13!6yHhi z@Yygp#Ipc;F`3iu7fB}`9I+w4nDnj31QWrw&JVvTdZOud#053`lUu$A9T6p1h%<_7 zmSAp<4*gG~Its7L0+wbi(HELT!^{=CSDWo+HqR$vGZ<+J#4Ml7PhD#TJ4ogi%WeZS zUD20u)BdM|uY1#9FPGQ8j~LtfSzkZ! zBG<$3&r*V1z721$`}@Rn@LF1Fe4j}8gHU$jLnaXw=D726)_8{;i)X0oR0+-{`%37o z?Al71rYBniZEH{r=$^k{ly52i2;kd$^c>&ScpOA`g#^8SI9ZE-6TT=N?ZV#N?TrWc z5B}}g-_hp@Lgl>)T-I}d7vh0Y)$ScB_mo}=Jlus-&aK(dH(pI6Ocj%^2vhAXER zpC~+0q$D??rdjHd>!p$?xZ2>wt^9yEMktifIz@?rK`b8a@@cF0*03k~Qcn>cco3x% z+uXy}p_l`W4+l$`q^^CaEcn~9X=hi5oY($faCASj{JeY3M9vi7{E_w^Wm(kB_ky~& ztEQMzi6;T?pNpb=PkSw$Yu}CJh?6jt*F{(j_btw!WqEz>VcJk`D})n2{E@aF)iYB; z?G5adHdki)0VzkbKk-w0p^;2@e zUN@phc5~r>HY~W4Y0FOU)aZfrsDI>{=>ZqN_2Kr|Wnc$PM&UOV1TzRcbBi_kj7PvE z@^thMQ9Up5Tr#q#woHsm?t^{?D}_bl8PSq=bg*<Rgru0|BE~%lU?l*ONuM6ENa(MX1{fMQ(Z=Iv?n3C()ih|JqeNscl=knu3=~(f0 zAcY99$B`(t9Tj7a__zez;1Cp0Xii$JJWP~@TFt%+aUS^!e96p(0knR}T+6S%t=;k^ zIJQ?_t-_8jE?S{>L#kc}#*+FyIdS5dtrk3f@+&P7 z_MnjqF;`W`&%fDvjK(;wlk}uH<{(7++&wUKRM&SR*$%)13fp!J?g3kj?XwDYvKjV=E+e;wOc%5&z zQ1w8sUzjgmPv_b$e-ClSfsqv83>uJdQF)zpAoJ~{5Wj#J%nFUQP7^x^Vyxb4DCa;& z4jwxZ2Nc0`+}&^O#bqrdkwc2iY6SE*O2JSq_DlK^{iNHWnhdL%eNxlytsS+Bh@Q%C z=P}0lW^&Aw9pmrp>4kmB0xPFl$Hv0yY{kfn%tE!32q=XlN=*WOYJ`o#VF1s>t6#S* ze24m{%ty7-06nqk4&@|Ya+i@4AW~ipdguv7x9lJ844C*P9#bA>!;~-Ho4s&_n*=t= z8z$2p{!8_wv8&wrOScL}W+kz|FY>W)?lCX5a1Y84s#-2#AY@-Kt>})qb??mZ+3Eh) z*FK@p(~VjEr7E0ziYE|B%$#h-7U_yn|N2*BA;LLtU6V<2r1tV*shgd6E-1AAgQxpHNY3y73oII?sGXR>i24J5 zujN(=$lD8JhFmKM+pbwpl?}rDBf%Kduy*QeE8=R|I-ib=f5n-Ynd#_Y%N~ok$1KJP z?FI-3LjkBy6fT5PF&!RaRF;4$rx2%!@r4wj;~<77`yxA)MiTC zg6Yon(ZtC89sbQUh@bk?7+#^}4diK^0egT>8Q8wiN=Kp{W~%xwkH*RQD?&yw#Ubs+ z6G`WJozP+9Gz#bZ$i36>%7~|y&G>h{xnLU==;2#{dP+>n(Fq>4uhrf2> zfn3klxe5CrrItW|{8#)70ryEvE$BW-%Dz?c)a{ewEQPmi!qkGOy-EDj=KaO6gu9SM zF8tBF-Rg}O6GTf5Z|)NuuKJr9qeSYLSLI74F(2;5@4+Q;Zd4vCbxJof7@ADKaug4p z?vGSFijb$0iwdz3>n2i+u-;93Wo?JY7#QpZk&2OOmOxvBv4My;U7eyZeWi9vaM!^@ zo#*W23Tp3}dfV6D*5{5{PGQ>cF?qE&{B2~h4BC2G+zcj1;aj8HDn+eX1er$VL9Dgr zUAwa7CjIsD$IKM@`HUF-O3)D?>zl@o)BRJdAu|xH7)g3EGln&R*9BxW1KjSvSte+e z<8v_%vh(byM3}@bq63M%Xu@_7%TtKQ^hgX$k`tiBxH=<)NzDH^0)P9?9n8R(+t4jD59-)}igul=Fzy2Jvi0wed39A<2 z(PnI*`h;w!TKou*zmbO>%OTZVC!TVY`~6#e6#g{4$+zFS$_fcM*3SFHg!_R<=TDAV zhQrfE=U>+Yh!N3l@%umwWe45~qX!dG^mPx}br-NWvE!(k>ZvDkqWh$=j>vqf=Gb#- zJRyz8JW!=*9K=Mi%2_kLX1|~LUM)Zv3tt9MCwAV=*b|7m(w32j`CD)?XOm7)s8WoW z@Kn9l7^qyHCcPOuoQoe#wA2D3+%_U=pH#N0|1_b)t-D-hZh50SY;w7=n2F>^&i)ep zd?TfW_y(rD^Ju%u!}o%aikU`Q3BB~*7+Y&5D{vQ~%C}De{Ha4;Qg#R`@UyL+KYP~4 zz%tIJVg!-%cPm0fmLX@!l(`P=oArboA|Z`!xS%``vo~8h7UVf1GZ)$8`HWr5n`3T( z0rfC9){Yz`+AQPeJrd$!B@k~^XCv!*$4S2#zEdX3NuscY|K>t@Km7##KZF}u+LsBN zZT8;Dnf4V~O70wB0)9nOl?zkdg%ZkCtCFiz)77^p7OckRVmayB>)-dJ?m+xO1Y~jv zpbbzUn>!G+>v!0E7$PVH-ZQ#hpC7MYKAf998$Iry4==}q^R5_Ydp94KPrF#(4;8%} zAEK4-27c$uJWo!dz0tf2%lSqy`3`{5Ka7rkH_p%i?y{rKKE~DF^tP3 z!wX2G#c?Ep3xCl*Pm)g49zB_btX?KHw{C2#`nL6*90^q8*ur3eT_P{)wT#H^e;K3= zVTH*OQ9zkv9hd~I$oHjE0QySVM|6ETe7@H+5niTyKkg;J&-UzYD+|AtmsN8=9=d$p zIDKE=x8XT$^ttZ=W9wJI3ww6+sBT&d+NlJ-_yr&e!S9)B87% z9t7{_&?sQ z&D|?5)Z;uep<5qO)YaGSh;Jt(a~iF!VqF*SQzy&^@AXLt-8E9dL)~_CIY76wq%xV! zd;8I=nBt_Qe67aX!?NnUv&ETL^MOPoRK}Z`O{6o6y(@C$AO4pQ7zRCQmh7O;t@K3i zUgt=t+%x~zgA@1E-KTbVc zACY?S0ydIg-j=}wTM1d-lne9Jl5!ujD=cL$+iYmGP*jOdTiDiey|i2 zj^W@GjP1i+&mf{>Ynon*GsOo|pVBy7Y;@uiJw#-{)3h*GFX3&; z&AWPeQ<$x*)R-&PT3Z7)_X$?t@MmXfuyG1hQCUfaIzWhc+0Yt{2Yc{|NR71Bi>h z^ww3{r&`j;OOjBU{*o_tH&n2#ImtTHW&T6LgW_Zp5xNU;eE{<9AX9J!SMao=Za#A4 z7Lu?iwpwh_p5L`nsc;5F8A4xaFS=@+wv6;P9Wur4NF3D@C8V==6@CRzv)32{uEqvzRGCteLtVPO7t}8_>z(+NT8Ju zPh>~zHRr}XHC0P!#1ls31Nj>|ihQU@mSqcM8|OP90?PBR1cmi)r0xy-0IWjMHK&Q) z%WB7~gaSwZu=2n^C)M4W0bb#hT;Pd6a%bdGJUQfY?G> zi**FGd*;powOzftWOxZm{@!mGbg{IFG{d!KPLr`tut627+q1KZ%FZ<`?d7G65M5FQ zw|W0sC%U7uUo_YDtwy++J7QU7T;vLx4zN3LQ-HM7rBa$@bHuNl2NvP9 zKh{woadSWT;tMkIQ;Kz*ISe~h43+Ig#cm|b6{Uwj_fr;lm4#TENU=YdoMh7U=<4f+ z_WuOS=(OTpka^vcvJu0O>Dw{N>f`x3Cv|R^kX(Tr~K$SlU`p! z>BSsJjea-TpH;&-4>&8zzRMM;FDSrqGacWBs02nt zQDRUeY!zOfhFrqr$4*-olnw|d?E7=q3ObN={2y^mGHu#RTdP{AjuAsnfk$5n$*8UwT3zhVhyYk=v>fhseVM# z%uM$9GG&FS%_JO(D;OB^w-lt`7)K;WF)1wE1T_az9j|q*KAls`zG^8dd;t)DK!c2- zo|T_sa_zS{kPvLi2LBHsrJE1tfSrmpg~?n4%>h#=($_11KTFHxZTIL9XP_!xdOrGK z&M8aF=lkyE@g}up_EjS3(_6Rpxf?(Mw~yCZDyV>{PmTMAPk*=zPx)i&s3Uxk^n#dU zWZ1)GPw>7kn<}GyCwt-5ArFl!bCNRk&$*o;KY@er{!T_60W zveP!apCd(01!+$JQ_{_9tk@gKHmwG-@pq_TVD04Y+~sD<#umg)NPGn2Zty-x80HCl zydz`<6Y*2TU?+4xHF}6DN2C2GT6}@@va6Stp2hASjxV$!#^ux@|Wz z{~dZ(m{+JnKlfaVj^{!jNT@kUYrEPAGC&oHe5F>e`K&9nw0gI8fHeoJnNv%1zF!zR z948IS(q;jk1koW0^Vpgk+Ru{@O4#86&%jZiz&{mN|LKO-j#&g+{6aTa&1S_5>F+ur zal{CL+<*LFWz+g-ZX60npUuT-huD(a!vmY3kw%{aG|gP1{{T|tx~Dor|BxdnydQR< zW#c?+pgtdI2xuB|;J_KCmb=wIga}elN~t^1Rc9BE40cREqawr`Izw0(K@AK2-wr$k zmX;7>h7fd%-@tq@ESbe-v=mFen^n2aF=H0>5hF3CHlH+RZCNgB#R+Bdzi_Kkxc$Yi z9-_MpeXDfp4&<}ne(Yt$`b+HB_%q=GNRxRfVG4ig%%_B*CV$FiA!N~@8i|(iGX#16 zIz;nF9!6*=v`dwRC9;7FqCqjs$M*;ULTz@?{kVV4s%bqJ+s&);y$ZKuQ~TQK-}Cav z5T-a=g&Rj}D5y)HM2urni5yAQNmOYWmK83B^97fNH&z8@wnbU`XA3R|rBCgzjgSqT zn<7#Yf=tI!wF#Oz1`3exJ4=I7@T#8fz6wPko9vM?BZ;vN07Pb0(7Vv|)oVg@%#&wg zBv*i#LgK7u2>IYuk&DtGL(s$s4=D4=Wff5RQGYZ=XnWXQYE_O7e@kTUqvfK zu!C8#-to8T2603Y6F9YyE}sU}jpQ|u5oKSw#=^Toq1G!zaWm|pL~rCdy-&ieoQt3| zeAI6WXmVK@(?6~-sJbbk8gd<9e@X6;H&4+m1JearRGh(Uy86+M%fx3Bh6X{Tlm5|M&?#3`{VTe)Lcs?e?+ znHr%N997&wb3`Hj6k~MlpX8z1WMtsysn}#;=hz35O}fbAf}Av=4h9z8H?Hcx3$o zhb^H$#R%kDqzhh!L>7lOAJc}c%wJiwU&c{o^i*`38ct^L={3zhhdV=rwU;UYAq?yU zumC)cWi#@Nx1ZTDNg?*H_&1251Y9UO4~VJ7Bm@mRkIPG1zqd+YrMCq^zfb=0&@&!* z9AaUR=ReHKFpd%=CVxdu5W)`t>Y%&+cFKK^vKdKswTfU$2oUshM09sXTxqlT<{;1C z{j(@D-5e?r;b}8oqxC zWc{cFaO6XNbC@p*r_2b7ZM^OEZhT+ELK=cSZ8vSoC%SHU30^ODw_(4`Mn^~>B$!jw zgm{Cikt=zG7NJvREn;IR31#39n@|+#i>P`*8Tc*O-Gm{>#Hit*J4^YY`^rPAz3^8R zVowYcMSJmM$d0$Wd%Gx7aN~{Rq}>m3jVrHF`nwCMiZOtl0)M}ZI#CrlBRrEypbrV9 zh`JCV*sT~2wqXHq7=HGof&aLemy=vy!hRCv9%S}EMTkf|LyJ0Z?Ypvm$^-<9~*?w_?6{L^}82)4TfJ9HqIum8>Tfds_h$1Z_x2o&G9H#D<#5S0$j z;Q-Cpi-fx)jZ05s6$D8IT0?RN6sVUw!u~`vtX|OJS>6#m6ve>8hx4^b%>%5)q3o7` zVHX~AT^xg!70$5Wh8i&GZe$E)sdy5O%^<%gY1Bzt(VF_~*%uALoSl_Cij@xda7k~u z+AT5ww@ceX;ZFusJygk65{6cksE;}jI5F_-%FrEmAr zGpYNI$FZ?j{?>FdaKq&!) zGl$J$4kTNk(&|k-3<2pqI;zN{jZB)4)Z$l(RisGl1U@aid2cvtRv_ahNo7z=onR_{ z`MqjdW^UklY6E;CeWTC{*)7#Ai_tB7$0sAUkq!q+=9SWcV1^?imxhEJ`;NLn`a-5a zAPKua6qj&}z@YRIU|OI7_JrlLJHgQ#c79=+w=S|G;gEs>NP{Qp)JXj9d&x8jdyc1O z@1M7c^N9%|xR2?lVIj#w420o%+6K25b<6e-!Vbx)@sQiEyh;K?5(5MfyCW;ol2Sp# z`WP>`epmR!7IqZL2n0-j-Jm682&=?uae5(ZsxV=FzFKdIRL4k!xe>QTM?NoHmZ%So z8U~DG5-zLh(+Iy$y>;W$qy|2oRG$T#`yOGKZot~0C$Ps5-I&TA%lui=$V)tjLFt8t z=aRv=w}~vm6BhWHUNJ(cUsR=w*v=3rcmV?$P#aMP*q&qHZKfs<=DLs?P}o6E-x)YF zXXo~qQ9@J74a(7L2#`$g@hK>y3{^&-BiQiUKN8?Bchg~dH;nmiW0Oz^1x+I|Nvi%J zb@CRW%OZLVx)~xKy936mN^}TcBQRe{;TqgUJ31?!1O|YO-$JU>hk-E2K_c*Q(bp`Q zRF$CKdBw^km@3NZY1fe;AIIsJ#*=OXIg?UFD9iC9<-!X8%6CN z1toCvAt@QiLD*%a{Pyd)%48o)B)s*X0I3nxp3Vt2-w;6+2pnNIJw&1*PAac$MA~~g3QTj10nwtg?Wm{>i87Q@N|>~j;QnbSDYgvdC*lo{DG~1< zwy;JDI>2gqCT&*w6p99Xlh}L!;DDL1_{QHguoAQnk&?X^9`MI z`zTT-gLTJr5;|ax4TV(ox=sZvMT)wj&%Ni6q)wrR&bIY@e6|Q!f@f)(m>`)~V!Xpb zIA4+Qb_F*@D1||?vT1@8v$8O{OM8l49D0;NcPKV?;^kExw=^7QM4LRJz`ML)MmQcK zJQk@MHG8pX-^2K{^}UN>aN4k=?orf`0{@THj~E>=6cxE=a-hSZ3P}Vo@(3M@g~UN_ z`UL+G+)hy(sjk5LCuR1>SvD`V_jsuWFH}pmG0)iln3{RbmlU_PZQqh<*gB zyBzdLl!7erxj1o~bdktAlHhvj!Gnfh}%U6v?Qtx;z~xhoLuu&tx5 z2o-qK^cs%U|85J)@7!TP(szF$>LqB!?nBscBm`Kut0~y_@wEO>SY+$s_S8V2#8^-& zgqhF|rwlxV6xnw5g&DpG(QZcQ_m^}^_vu1G2|C41ps=S^Vw6NGD8eFy`%OdGDRM3t z=n~?icW7rXGOU7Q74wb(_9X`Y;O>I!xCsN4E3)&p3tu|Qrb6%CKVCLtZhbq)lWYcd zqgMs+FJV&1k@PTeY6|*3BSiQ`i25>!ZQ=5%^4(rUQGZlw4s$tf=6_X7=T+t@(D&u-O}Cy$ty-6#;x1T@O3m;=V9uI z2pTMh6K;yWrUdl!+e@U5`Qd?96+o^Xp%@O%)4-AhAT`kkKjs$)G{mK*b zQi)Tj{2Z6j=Vp-pEiuAj&_24bSA*?_F zCzKN9LX3vGu<%;$M~}6e0;oXv2=8@Nr1b7LFY8l)uW@7z zn4iWGE6>gESDobc^lAcmB_0~-F8OxqN2w2yc@mPGoc9O{GZ!S?LCZnW?^yPYlF3pI zr_2|XKtaizITh~G)^Cf~Z!Q1HVh~LVh*2dzb;QVuiV&y zDic4Gn)uEDBXl~D|Gj;U>{szry|HjChi-M|J|}U%i}9bJ7+?X( zEwS5CaPi;zVS?^PHK7h6!QN9mdcG@GbR-;&q*$rGYS@SUt`dj`1m?uF$9TQ)UMLaN z-dvzFMC$&QNh4LEBo=xkGx_1()ssn&uN6`Ixl=^D8}{ggd6I7cuS#fa&^7EjQQ~-E zuXX4P>$zwCMa0dXX{Kghhy0vMRJc&gd&M{)zekQF^=Eu#b+;N2xNC21l&8m0mzU-kK#E7$`<8|iK2Z-=}ZX; zI@2yB;9CJ2^l6w1C0>dc#|(m6ZR*KJLTCp>kyP^i2h=)5_ZM0q83#m&8o)6eUx=ah z5hJoAsjZ1lSu>dv~q*!8c@)GKA zEQS#+NtW3X8zcGUxX=STaOLa|cRS?Zt4!5UQ78lx^Zscv%7AcChJ;TQLP1QHi5EMI zmz|(1027Dhry8u#`qzwtYADi~ut~JPnLWYqjw#5;1i_e= zH<2rtmL60pVEt2XO(Kk>r=!wOa5#zLPQ`SDSzOLY+X^8MDy6k+j5hJX%cz?7%})im z|A)SLbTGaMM)Z+HIfD_|AvHx?B#SboCoFkLq(~5&wfiunA%VK`&s;7e3fE>eBy%uk z@f;&Z9K4z(hO2EXV4W+&JmBvM}CJ;nj(}$b~ZusS$zqr?{_Ln4g!-OCd8f#{x_caGH}Tz+Be^nk!PVL z8*f@(GM1vJ%&Kek2p%oAfb&_!TVlK zIkeV+E{q`L_>e~CIhpdkx5?(e^h8MO6m!Kdg0k5i*mshq0EF~DC|KdaW$ak>T=M|^ ze}f44e)GwtVF3Vug#Z6T1X%tHB9Qq1fe578ODK%~VF!1ytEd>U8c8z%H8$bStg7ht z9Dy6t6Zun^><4)HlZpB9^0`OSu^VhftfEhdG(@q>tK4+ziu!)%jVz~VKyziaUY?_V znvDHe_xft+qvBzwHEjbBw5?Ls^p}E>t?DQtLjtxlG+xY7lGf`Smo4!4-!swL9!ZQ7 zP5J3uy2809MujEF0re1E!pk$vTxTKLIeRt6WH8wpFNfsQH$#xj7$Ac79OTD&K1E!W z*Y=atf(^KLu7s4P{k%P?y*!=e=TD}L5<_*=A8Qa3ubQ{!FU%an@tUHIUx+{)iq$vK zCRlfV!b`G912wgKpzJo}L@h&$PCHLnyHL-AYW;M|MX7ya0*}m!yeNm~WCVjA>V&gs z$r#hjPu|ex$6+K~Z`3Dqavk_i)|N!Xt&tw|s?UmJp1X3sxRR~Jv3TcY>8?04(Wd9_ zX1nU28@`1XZ95DZ%Wrli9{T2xz{iDh z#5(32`XxyO5KurK;CJZJLqHBXPspuW39d%oTBD&D`xV!>bJ zr^VA?a)uJ=eC=Jw6-TUpBd~4|hk;H)|^cEf@UzhU)BM z1AFJs$FKKpo}VvEYiAp%GO-=LTiRdsdbr--?HwD{(=F_krIi!Z3@013pD#n#bzSY- zy1n!t4;_^^9-lrgo^4!STZDx^JAn{721nP|=f^ESocrbilxctj7kbi5ajvzfeOY#s z&~J_J6Gtx%uLC9c_?_qXRW=oyUldHVt9$TY_Q_8!=rQg=H=!~^je@!M8Gf>68K;m% z_`7dcQ%6g7Jv%m`yxROOQITXrbkM2JFg6?JSZA?IikuSj8&ar^XZL)c?*3vy{!8H(Y+fyNfey&!pe} zWW%~zhv4&mGEGcb!Pak z-sQFgUAxruEU3iJAt`^DbJl`cb+=s7)_EYtS0nGvJ$fYZA~*WLSlAGgj@7x^G+g`X z1yd$Q@AmcIWmF@cNL3>q?-|x+wP4`NF6eLP{-^h!@2}jYiH*pur}MAt>yhi2Ubae; z0g63Bxg(&=e|5tNcWUy0_fyq;U7VLSKQBF%^c@@6uPfLBE0vi$J=N_6WBN*L#q?oY zi%V1a54IJ5BC9HsayJi??IvNx53+MB+PN-xj3M=Mkr)T!J2kbh0!W+ry1TkHSVh-F z|Hh!qR!8^Ag>I+ys{^lEJl9vAw>wMA^;E>2?Wz%&i*R*!Us5qhyWW~#WDQtkn3 z&AEGweb+VeRh2-VpP0fcVjL!=O*=tB4}jd;Yv3Fl+mdOtVka^N%R(bU>l0QEb8tnZ zh^_K4*ZlF1J;Ji)_=|A9oq6X9_QHP!x}qB zFhpD4I)UKlsb=o6(nA6W_NbE;LIT!SMmATaf;U%%**3A_0?OBO!<#7c zAwYkxVhE=++3c@dG+uVF$>AFSPAuUX_IEHN$16abWtaE^@?@& z*Ikysht}&uF3nTZ)P7^|*!7o!ET8S~Qz@Ae2jKTht*7tG(Ij9p~d!RN8d7=rtIUS$QCP1lP6$AJTzyT7bs? z2KE|k2+womh-}?J@kXgZW2=SdExNfrknW`>KaRU}k-<0_Q^gA1jj@sH-P@2xbm z$pd(PTi{{rX}&fq4MK%>L)|Y|5Nb}s@{b!g&v%T|Au^Vcdb%_ z;vNGb`bma`v6(jh3dFh#9QPPpbqB~)4Ty@7O_1>HwFSV)n8IugiQG2C%Tt_v<(7qzd6fR?SHzjmuo~Ifb15Xs-zjaV zL)VCyJpO~2?tX4OrCI4aLJErQGs(3+#vmEU)bs^(>sB2^~J+7NIi(3}i)b5#y(Syux&jyu~GyCfw!zO+{d+Au*0wZV|4h||z$A9t8--YY{G`n&=l|2E)X zU{&0{d8w*Mt^W+qe~-OY+>s%AOD$sc7?N!jt+S$RKTg)v?Q~f% zkbpRS7riM+-(t@oM9GIyxZ_taAtMx8FW(6|anq;FI2mFFj0g6vlK5*tt&MGhB>x4? z6Obe4w+}w71d8WGr9?5IEV-C=6{*Dj*P1El?mt@XKf3q`#GF^5``VftYNB!ZpBS6Y zT^f$`*dKg!!b^!Dmu*Y*gK1&ZVs^ct2e(fx5y^dTidPfHWt1<^cwp_GpmyHwr8Lm^ zLlkaN{d8kP@-p@}BG?rRL49n_U7$fFbOhLTzbVphmeu%MxX0`+)GpPvYY9nbFuF5D zU#LZ39NKqT_U4rbDCzY>$k4DoYx@E=%pV)&|}**Hl(fcz;LRX8o$t?)=uC>PDV6E|R<8kR&FyaHV0B@?5pv z)pLQCYAlM8;te>Tq|iU0tTm!)Dv(So5@jKXIpP*GuHq@v}Hz#n0kvKGej1P?x(eTnDn`h&_@#Un7&a@!iXZzE5Sdw@q z5p136!|XldY*9ek9oG1_5bvRQ^jhK|DC`5M;uCB@H~%A}TO|&*7CvBtq#RwY$mc0CE+hinAO|CM1J}riF0- z#s{T>FCUAEr=?D0?*zrAcHBT~#TDv@M!^5w(0w)W7Tt}gKn)((%|EvU{>`ut|D*)~xN2C7QITh-%(4hmz6WZL6kaPWg`zSeaEa;Id#r(m*KJ91a zPRHWz6zUZdP{9j&;YSn%co%drZnNDu*yO_bor$vm#5@B_zk+)7&Qea&++Vq7{|6~n zp&yW|AFw@_ldU{?(C4$Ml7Vvfk>OfEkQn~+mEEFKsC2*Ils=rx+4yrYCx3B^OypCC z+5qP1h|C?V^jR_*ivJWAPvs@;sE9zjpSL|8AqdfUe5_h!R9l~03YH#Dedk2+22rmM zyNaOdpInfNC*;?!q@LOohuQEg0@*Ufi{Uf1b_TF(2xgBSgjO?=z!Z|h6c)l5ha_Iy zO~QzpQ#>$DDkiLrQ@b6Y-sUYxPwQYAA{pnW?V3>7z95O)1)X@rMp^pDao1`?^GkQc z*8O42$`~Y0a(l~hd%HHDHvB`%i0#s<3y3<@7hXC>^F62_t|T`#Tq8o9V{N<5<{*~%i=IrKG5_hU3;sfu-`rP$F)}!Xi-k2*8{SCw%<+Z zj|a_|c}C=r6H@+P{3f&A5rVwwcLS`_2AnQGwW?_Quu@oBF$X4;Hp4<-H{%aQARRgO zM}~RkRo=27a`9g6)Dme4SD~4Zg@HnLkV4U6ME)IeB1mS~A`QErWyF&PHd9xVH~1E; zq2--7LtKy*#fO=?a9KJ?TnbuT8rU;S=|aL17CThqmLkcF!B$s`tnI6}gHKdYZJt28DrHN$3^OJS!>{Rnat6{$BP9 zz>?x#`rBe=GET3`n*e})c^DijxHxsELev3LPwjo*o&0jK24TcO*DZ?Ke}E!X0{JEx<_FOI>xx;O_(ELT^tzr$VE@csrFH4UqA)Bt{>KkL0JVd zi7KWN0xh;UQs}ArZx)x~#TAd3NE>I*0dIeri{`+!tK3Ib5O`G6!>EH^PFw?Jr)ZAY zr#7#L8tU~#5t>eKfIIZndk2V8?5+u!P;<;Q2RM^YE`%^bU*X6FrVV0aXg2 z2&dz$r!`KqmT4q{TajgjNPR8Qx7LPG1JMM?q>(KkAXEsl|v8*5{X13apR8Im;o8 z9lixs@dI81_}m$vNq?ct;jIXFtskek@hs*AZX@3@H$kl1@DQA($YSmp%p4_5kcg-N zTNunq33A3o2$2$ zQgk*>>D@-NXVk5>+xmiD^d&Gr4|?v4O2Tt&sTfxlC%@C7Q>%rDF|JPwY0$jGCX53_oJMkk-r$A-t5YM!#F~&!7-<$J&uiQX4rQW=MUTgzedUB z4F?=dw{<+p^9Y-e8F`6Q2i$Q86dx1Wuvwq;s|s(Nd#|M$%<(f>rgz^?-zamqYnm7+ zf1@)fxuIL2P6jNjP9ib_EgmptF?@aGCkMtO68$6m>#p!qFp*hyj8=etN65KVK(B1T z48YBWKBF)qxKzn0aMh_fuPjtXw%>F3hoPSqq4CuHkQ<#$CjGgycf=f`VOHtLV@ek4 zr0J{8sfRKGA$FW^bE)yd%i6Nu<)*FGVWqzPSnqj1_7P9v>uN%YYodE@-P}jw`ciPA zW)_(D!<6JVFtPnxuA?}p#Zt~4WA-sn^1f8vhDYM!9r+bV`?iU>m3c)&)};J|u&`Mp zId=KQBv;fcfzE03DrND95V*!4l3-_|Q>06)n-0HJ)Zh{kk2UVMN<}BA{yRp~DP(ba z^P=}EmS^^OiZOMk#5~5OlCh5=ZrnxSX`uxN<*^I5P&o7G3ZZZoBi=9}RYZy^LM_!W zRj#rVXuc^Uf4gC7sW+32WB28T@nC9qO6Kt8L1w<*O29u0jvR%H1wg{) z;+zYg?C_}C*NVW>L_2fn2aiaGUWjU2$G$71ZiC|Yhdr-)l}g9HL+O=1?;GQ>=;S2G zrQW9J6P6?glo!Os% zPWsR96(HJtSZ9ktZm0`%hR;+-U(t@`>_#fj_M7Leg9^}}hm`+ww&;3PDOW^PexK=% zgtj&hsD&0w+GZPvIom^D90u*~4M~J8R|&KfYr{CTxfsXHJK@&x`5S}-B%zAn8S5C= z8A2}yjItD@x=_v#T4qXid5^B>(LHfw4!(e3WTpLS8B&H8v2r_VQhsAXYjl+^5!wDdm28z=wzy}15a0g9} z!VQ3Eh|!@W$Lzh)p1|~_WAC&Qp2P(2b)9^K1298ifj5htiS@MA3v>k#g$s83E$$Mf z*hcBvV1#2#3_J2P_5cUK!|Tq&x|8-v6Zdx8JCxQeKccF@UaF=afTZRLsCYe2&-FG2 zC`*rcIQgWAeT4NQpr#1Rb1zE#TG_h3wf9`*d-eT)xl*H)HXJhiWHO%fZyo=VUZwN?_7^}A>NW@(z!fJ+1(1Uf z=9vPD6V#3wmYYye(UcO5w*{aBJ+Z4?&~SlUd=T#O0945127@9HrtD z^J-03G!~f-ET!Z~$oD8`8rpv-)>J`TTAs5F9v?acSvJNXYo?V3?fqOQx%Ba6s6ljv~yd{ib;U5(q zVtpK0=}IYzVSMVOf$Fy*Urjna8p^I8d}D&!mJBHYb}jWfn{B&i2=ngowx;_s#;Jw5 zwpl}d7|o<*J5IviPYphK)-PFL4u&cGBmlcrl#U)%NG1InX{2qIr~#3)YNJ`0t1En? zU1QqBGc>2&ZsejZr_ZXLvi$@$5L2p=aaU>iqM024G7TuLpeKQGJv7g?BQpm`dgeI= zX`>;Wk^^}{;werE!f3)eZ(=DgVp98yMnoHFc zaEP;iuw>~B$Y>wWCFEE8a|e^RzB@BGG5U;U)D3P@VC^NK!DDl$3)Z^_jvj7cq(dch zU{V$aqW%Z=zOHTq2GX!&`;BE%l12uUq&*h2wtW`G`BCM1WcwDN8?v-^E$qQ{onF?rPB*4y1%*L+27ef{_hEKwlK9ZrT?#Y z#{XpYE;Z#NvDs02(BE-G+PHJ-M}AV=0(qMoWuNgi$w|f6Qh3*wq9!G_b&-KcIiHj$ zii%T(?0A#r1%P3@9tUVh{+)%MCYoJhRv(dMT1#pD1vV7>?kdgQ_Vl=kcK#Z+l&Chw zaq2H9xzcm8h87oQBbqC++_`#za@p7i5+xSV49a}QarziOF3r^u$0q6}Y!=|#$1bx3 z7m{I*5akOp9IUOUjM;5rj1(r>Wz9hmXXFCxBBV{hDb<2O47a8hA}rg&jvYhL_AZ~_ z)}gpja*GB@1s2A_msrwB1(0nL8gsxUW#w`tO_^zvH@?7_D}iN>S(U`kh1w_xgMQ7o zR>}p+ z+e28+vLCy6^l)kEnoS2Ljje3=F5P$I;w$ax>~EX1ZJ~}%k}gp3PY|~Q)pxDXt1Q|> zrRY;vyVdJB`cQ?f6@ct+54c&vQk5T}RuGVM{uht}clD%nB|VxpM@!rZciW!W&x|56 zz%+e`VvGhUyjZRQJx3Vn2@od`0TNEbn7hx2CB*X)mLMooWy1tXa9cuQgcFjTu;4U) z+w*x+PA;DyfQ@lfl$cMbkD1^_!xuHC!g8gZw=^xN$K&hse7d$)xHFB%&*%9l&DAnr zGiH--k&x>7bg<&<`xE@MQp?}zaWBjC1SC!{_~Wc@}xo z{r*r&4t7tVU9!rpdGfNgJ{;T!%0N8m2Fwp zjXs3N7-%~S_m2*IfE!tu;qiuG8K5ghp$!@~tiASha-+R3n(SoaNF&cRoX|)~n>UIj z-1T%xoJF`~F}^|cF`sC>05XoD;n>b9;&8~)7P&~{1svvZRAYVJHF9Ni*c^#B(<~4r z@;VXWtEnv!M9s2wqtnbE=0934;P@93t7u_Zqc4n0lWxdUAW*VY9~4FIfp)}*X%cvo z2ZnenM?gHheMy>Ih{u~Gjpzd&PI5i4n8TOPxLoiqCN!h*k|%6NG_jX;#QXw#i=*ix zf_NX8`~e7}%Y4XWuW$>lVP4?;c^1_jH;7ejA5-DGbR;W0#WVtk^o^^#6Pame9}aRO z5Sa}!)NoCg>k;9MTP*$4|Hz2(7I)g!z%6Kq2lhlbpB!p`$HB< zyl`BOeOtPvF8l&oR|z+pIJFNN4?7!|jH{`k=v%1-f%G)M;a}A@3*su71kh=G0bm13 zwt^x~zGOrf6-b$wN5k(WQ=0(M@yjGhwO)Vn5C>5bpt-wC>8mb(Sa6SMEj zm~-u~;%CwnJP3|a?3`TA2lgV(76S!caEj%~Q^oxj0Pv1H41i$S!{Za`jmY@Oi8vr; zQDQ=LT*_GX%FiKNN}>kB$A%qmO+21+Yyo4#+;E%fy390Zed9l$LWGa2YL(WmgsewjG*$GAECS) zwQuvc|558X2Ec?m%;v-vs{YK5ZLVQF3fF4u`HQ6JG3{!r@j9wqocf6aXgQ8tAZo@@q$6 zF-fL>2YkR>IBQP3a6;Wjgr>|El zQLb?^s$#TiLi-Xs>x`qWar^~=LbU4sm$Yw;7F)WQER&w>0F!xXAy<>MHH2eISxx;f zIUEMQ7C(1A%lqf~MAXh$@hwlPWt}f9{n+kd&1* z%nGszBMIZLR_}@8U=QsN=5d?Z4B!kj%wvsLfZ>$dnDldJU1 z3VX=D3r=wO?~LTbjE5MCIEDm?X}p!NR|6mrOHwi5DN0*pEDM>FRtz#MqSH*GGt*a>kdLm7ZEQMA^maIS;&E=SAtWM=BUSj`Zb%G zkN{VP98nNDg^0>nzrLUoNr}Wncn3v#)J)WZ*fNM6gNP19r%j<@l=ELkZ5;(gbqVkx zkVEC0R_nw>iR(>jz@{RuE=^%wxxV57WXvennb8a^bqsC0VW0NKBYa#P8_8$LJ*&05 zuFAMDJxLsb@;ia>bGK0u zx-nkaw^+SGA(Q9Twi(sI<+dG#W)Wn{5T}6>bhka3J%!3o@qLwQirj=2(t{S-)7B9S zn?qr*ZBy^iA6QMbXZI<+;KTL+H0yIqtqjL{{R+G*sw2d!KY!ejRqSxZAD>y_a+86y z4Qtln*O-nnEW277e7S9=IIDp)p?Q}u+J=`T$EQcRF7vEKt%~VzDuNdSYq(bd&55tb zvgTp2_m@d&8x_rQpPyvDF4Eqbzl&HkB6{w1fMPJ6IoML767A|P?!AbDUZbAs`^oqq znrV}q4(CG+3&~DY$SVRF?*0gBnT|N6jkAeAai3i3nh#v(si}JBh#kW$3a-q^gB$48 z%EhXdgAMD;et3)S23=W+cL?g+BhgA|I0^~pUO?ld^fSixP4Hb@P_oGg(qfCL?} z3%w>6E)j2+MU*jbQibvdY*dV6utNH3VB#D%fjuROCW~XaM7UYejFj9{iK_I0M()rD zMcfx0Eb9WuOy#-*j6QDcjx#4if!x{T^-TyZnidcWse7ReRF-{Z zU(R|xe)(AYnA`H+eGYlHUcJyau4TVBbLFk|JIv<@{v3U>+O@m>*W(Ki>p3LQsutZX#*_VY=DMqw z+VEw%v!Ri8WG>YgsgoUzq|X2=xCRO~SzJ)^R*N`D)uMl0cT(=ZA+P^iXa(_KSQxj!pM@w7t|I)lgniGx(?TA~YR6n!D9iv1cB2on;9XuHscNg!& z@I%jTj_OolK_p3%0$>8s$*O2?;ns~4WcUvJM?R8RB&HKyVa}$EHT8?9wpl*A&L6K2 z(kT5IlvBm8i;f#|&}p}(y+ixT7aIedm`wuMR6tl#eSA%9GA(?uKd+(2)v&vo`TR{O z!&Na{xL5R=A}-oBX}xyGN}`LqwSRv5i40lfrADPwTsq^L&xTW=)83lWz^q@zcA^ai z@Q+*`J{2y%YCPQ4W9~5GhKdP`u-BFRSqO0|-wjof&lhR(yUr(LJc-m$3}m0HJI=@~ zJcz->cdbegiFbLGs4a^{7mQ_5wpzxL7ZPI)#Q4eXdZqbH1S2-J@D0GXA}0QJ1v48n z$Ups4z7eO){hi~U(8AumV=+L)vG!C=;5>xJLgV#Ea4GB$!57LrEBgHV)1YjCr3CCk ziSb;szHId_%N%PL3gv5l?Sstms7;T~87i`V?cF@Gkb=_m}axYs9Qk`Jy@lK}}@ILKRl z5+QLKD_|dscRvjIJ7)=?1J1})ZY(Hs8K>5Rp9Gk&@0vR2mcRjr?^nWq3$%F@Z^&#QuB0iZXm-LmDMcwa9srM1zGXS`H1m-Wi; zldP;7zHYHc`gdz?IN>GD82Y4bTu^{CJYno{_O3EKOv?d z84Tbpk&AmLLYZyur|L~Pzq)D9boS=j{aZQ4uc$DKOrx7cwHsmKk@Y2ul692u zX>nic{#;)FF0*D!DgUY?g|FwUf2k)2JvYg!Cc1#I8>8vw4B;0<1@$^~s10oLJf3~H zyGEat&}T>39-#-*X;inpFpN8w2gQ zlSu+@KN%&`&e(Q){}17_*R)=HQQt{sHRvml$^b&iS72XdRyN zIF|~><46NZq8E7Jv$)sXz#rx7#rG5XI4T1JTZU<7hC%lPC-mEo&=eYg93xw9;fCx> zWceBt#WSYOsI@DvF%Gn@i0V}kb$}<;$@mAnmHF3e<$_ye_sXtCZlYwO>(6#v<1wEE~U3L#okAHoIjhg2S0rB&eHCR)?bXiVuO``&QJEtSh# zZ`kjhzM|Kj4WID1`&fe#QK4WCmI!>QF-oI{o%zm@Iwjj=yy}I-Sp)f<}v}aWPVr_Dm%ILif zGpJN4L7=xv*EE1!ZBZJZ4|z3|(I0gtoAay3!axyoEhY^b3FGF0F+rdvkwhx)>`i>@ z<-H5!T`su2y0Mg;?KzvoHMP_j^9=B;b;dT>mT;<$)UgN!Lz;yy>|-&lBmXIVPw7b< zka_E>??G@~H~0A^H5ZwmSlnf7bONhV-PN6}mFBv{Kiz7lR~#?xrcQQe#D4z@83_0d z&ITyeJ`q2WjBQ*z!3~hx9X@WvBI; z!1i22E7WKcgF}V_d3|8gAH-d!6!Y6N=j-x8k1L?pZncY7Qr&79P4 zdB!zKcaoil3}StU4M(uciB;FqN!|Yq}+UIf2ocoIFnAy#sI)^^O6&?xR*A* zP1tgmSiy6aZT(%QKao1xnNh+04BYykG{XNj5}2Ww7CL{81G3++G4X#J2_}w)uK%$Q z{wHw!KewF!an$}RavT)f2i4DjAaWD%gYaxSPZcJypeJ&w`UsrjyaBdBVp4dzgMC{- z;D+@#)VoocHP#);HNv`UL17#Sg!6#=0t`0w!J!Gb4^lTY(Y0t#5=OR5M}<6t;fNT| z#vqSiw z5&!_>e+~YBhW+1hVy!LP4Gy$!eS;nU*(tVFj(Xf5QVXWKhXu%4%fON=9khQU;zAVv z4`JsNokq(`$2v+AU3W-ulj^}CVM7+0;$)WOXb>NnB=0yLr7)NJUw1=@^0`1KCda# zY#E}bo4KR8d9K1?&@aeTG{WYyRxE0ihN=%}_LOOzu;@c#e)GIt802n~9UB@czz;%9 z5bRzUC39=wU0$zqqI0$JbRxkjSgapz?p1ZH%fTBMp6gz_ZCv%&(ns$`$b~pk?n$Xm zkKQor%&?;4dya1k6T(rC(#W^Kw3&;a?9u$WFJJuCDWj8b^9{wIKVvSjwY^=wqR&^P z^VKjPRdoJC@6Yq;tEIz|p1{4sOT`KtcaMJidqN0LkXu)7X`)$+V;e3+XODB)#@hGp z!ed~oAy-@se>BPQP)t%r=ZI^(_%vfwlr+~})N6>5mNV9snCu-M?G#jVlfF_I2Y2}~DmZ&i(I-7m`HQ$O$nllAN_wnuQH^Yr_b8~DsHP1%O3>JHRO^P|EdWQ& zLGTS7nF|=Z!ueF31m5yzthU>|f(|qFXS3dIUc-4~wouc)$M1!{<8^h*Z=bv>>#Fw- z*+R~GdHrB;a9)`~-gsc@#iVahd;r%~*dVL&>JOT#aD-Oj) zr#L;Uoz4|8pL4I7oI!mSI%TwHL>lr#{9U{asmI4#;Fi69!mNA|Sl;YOO6};|`tHWg zq_~zwkLzE7t|7t`=B0z>`L-&^!Q2o)I7!s)?q$e8PA0Q*!z{AYeDG150~^CYDKlco zXPu2cYSKa`8=tpr>M_^*F)zJ!F$xml>|d%7?dubd2?lyYH#O!wt%><{EPqaWH#c6o zTDy3rV3e4QVmh>9{}Rue?GSm<4b}OY(P5C;d;Tf?MfW$(q;^%ckK-R2Nxm(mX@!j0 zBmd#`74>J~O~b-5w*)!0{{#>;k97P%fpH(VGWM*Q2kNt&P#Z8%N`cDSVY*-1M_sv` z`-6R$(+(!cLtpaF(8aZN^`~JuWKu*O^PO`-qW>JC&`edL9P+;eTPbtm)y6x+5&YT^vNV-;82K?pKO7kH75h3SZ7!9wV)DOVChnivuLnJ2@VCkK3MHU`*^;90ty zzheR92GCKOgcU_P#NND`{-7}VcA5iue_Y0S562a%73*XyScA_IPW`aUV0MDR`>7qN zGeQN;2Y)4kCvgA97*9irt2UbE9pgmlQB?jLtWa_r&SQue^Gh_8ZdI}HKrrN35^HQ+ z*J9=;@%{VQO*r{{kvrS1UI3o|OJd}37qzii=FgvY?#gMCh=4u595;}{% z>c0b02n7?o+Nc+{NlPR@KR@?8<_Oy;2A#^5FD@0dlawqrxw-J;m`!>RK{oz&@-quDE7pKi}5=o;tmY-QESlm-m`%~-# zw*OhEv%LB#mU|j=7)J8S;rqqG=fiMo##GXb4>6-5ZA@Z2x7Xu!L*V<03wZ%OT0VqM zaMSqEb%4mp2&P~557VS|j}buH11rJ{nZ>n@ELMq84GSO6rUjTKwS}R;gRjM5Q#Z`Urx*o`XBomiO?wT>ZqYB1ou>zm6qH zoUYh4`9SeRKO;n*@b#f~{yxQ^&pkRbFIji&-B$>!&VZQ?5AD$?-_!c{W*y#hN-@X< z_n{BoO3{qH@25r-8@t^L*JCR0{bq8n^)z- z&eZG4)RcT9T7FHEW6DA?{5EQ7U$Bei#3Vg*FD1}ib6R+9$nEV+j0?({dCOMfM4UU7 zjD&=N+-r@zcWJ{LnZ6yeeLq=N)eDfVPGs=etfP=0FFk>zgb~sl_QaR47+LW6WQPI% zJ(9H$A{6LE7s!GKylyx!HhTB2QK(oBL7lp#Q`n^ zaeytCD=AU1KJ49{+tbx{JR$9{Oet?mU6Lm7%~K# z)k|1jSXDduLovlPCE?tO@gEokoJMooBze8mIFF{`h29&nE$5HENiRM*i3jl17PrxE zFxK|X3*;qj>|ItASkb~X=~pyxKLWj-4D-2sI}^0>IQ%d2oabg;&GmZP6TI+10t;!d zY{MEh`tn_udqAurT(IF@y3^jqsbpm#{jnRZLU=dZ`0**Z%18X|*qgwZcue8gMu;ME zN$nN=sputw>zPEBn3~1N?&KVh%w0pmla3xZRhHg0gvoy2g}d=bM`mtAS|eFH>)?Nh zm}dpx5ouPhZxh{>Kx_bwO9N%nMv|vRL`>6-`gO_pO@$1rd~mgB6(hBEbe%eyk?_O( zMFqw}Cqnx%^_0QU1z2zYU_*>_ui{36Hy+)R!d22!Eji%0|DZvRVT;Fk%8-_(K_d>v z;ak3TC}xT3LHRk~>~3wi2s`9nQ36@7Xj|g?fYwVKUBJ!1q9r_CBd8$Vtl%mip9$v@ z+Mj>D0lcjCO%2W~<;|x*HYnL)C-_AC&paa$(4^6&f4}AbZ={9we>NunkQVDfCiu{s zpq`+4AE$)@tRi{DAfY;)XOICU6qOB@RO;!CSuQ(jo@=Uz?{{Ap&h0=Ajfs6uoYPDU zG_lE`sHA|pzwe81RMc3|4|wB!!eFCccYC)&1pknh%fxc&2xCJA@*Zn?w^IKF$3jg; z(i8uZSkDERD@e+Pu2!EvuN>fS2r4;H;IC|#f;nlb)9#nMw&r^cY^g!VPA+aGZd<99 zSJ55a4JRL37z!z&o`uGJwLEJ#S?fp-JNMTDhwl1XHXoLc#|fVt|DOB*bkR%&vT**t zkPq|!R+0Y|@~I{w$+{LodaAqt4|Z9HN|H^7G2YpJDAwck%Etb>dCxdFqZNCMpbKb( zTzt%D`h_EsOW$pve@!4H@ez{GV{ zZfKtt!P&7cp6lxLH8@?4=<}WbuOgxNDmfBx-UNvMARnt^S~u8SDa*>7vSO>uz@74u z%Zhcz*b7#%{kBeVTeeYFbvWORh>ja=kvYG~X}__$?#AVp`SUH(Fo*xNAVx->c~@W{ zAngA<#s5nO_g|5xON|ZtA|@ms1FLV)*POr@X3P*(;=W4d<*UJKXiX@bW`9zV4OO*o zpBUy2_LOz$6zvMeai*k6-d zvKBMj#2^SFf!VWuU+H$v$lLA_Os4*=a7QK(>OQ?d`YMyE!h$Vq1I(J{R9&{_w7C6| z3CNzbf~de}_&s>0Kxc`z5=w^&Pr8f_*G%8Q%V#_|iXNp;=mO66PgWURWA zrzsqJrtk?N*xpI!Si223_g0~jslc7tyHl3%B>Q?1OrtAexe1Yw?W2oV2F3>k%1|Vp zCz*)7vnhzzp{-1}j52SIz*-ITNeDaGq5|1l!0B7Fd+$^9N~h@M`L#Bd8UZLYR?S0X zpR<^l1JRlIpz)%CTTg1Vj*{3e^Q7WsDfSOm>WtOc2JA9UCf36j>ib4==ds0I1gSBy z)t<|yC~q+AL<=U46kdzb$*<_Uk$p3*24id7H8vz2Saa8jQ6fD<_cLho&Tu_c=-es$ z(TE5zFq1@Mj;9_ysS7(~v913{2x5*y$ic&a$r&udo z(ld}coG;|5;HHgI%l8COQKhQfQ}yLe@K8Y5H7X|8Qu)=EWH%~UvA0QAS90IO z0Oc=V^rL2*7jtG~Aq-JlB} zM@8O3hlLnHN2hkV3B0JQz8Cfd!zK95odm;{Km@4S$>am;vpkPUu1mVj`fv(=+=(>kvK~O%pV6C|G5pMT|Y#0!}Cw!SqA+dHqd_u1OMykzX=>U zI?lLj88tf!LmA*W{Q!X%QF{`yOop-XviM#gvLCw21a=X2-xJxdr$IT$xi_v>dG{33w^OGX~N z!aHHp<@Mo<2meRs!+x`LVje!Rra&JetWIWyd zb}X5y8?!&we7%0M;*?@8?tR*Ek4N^TmmH8Cz)uOpULP-aAzT?AS9c#=S#oC0lyPKm zWp5K;I;i8LIhenXJ}UL>lIc-uS&yc!yeCI1ysLj}`f|xr5JnmlqaJK!?WZ-JeI9c6 z5Xf4fGK>yfID}DJ9MI*+c~tegBsQ!Zy>@P$M($JsW%k zJQH|lJM()9l=W`KnV)?$3pDo}qj_xPg3$8D#^&62I@hZsm&z|lkR`?V?wq#G1k8T&^W$M2(YkZ9YJ3P|qyA#bX8bhboOOsiA37vd$*uhG zp61lLpK?^IJR#>A*YExuWBt2#u;uggZ93ZM-DM2U^>S3>6;(zfuzM+XbN-lh&{ns@ zyPo?@Q~PIvJV~eRccj9#wy#prd9*p>7w6k8bByRE6sk%k5uz=Fwp04rkK~ z<1L&2_PNsM77gQ515ew%@LnWR1@I3JCm(VbfADt5Q9O^osD1tgf0lS1D_tR6Fb;mZ zUt%PjNSJugT|uas`@6~6iofwR857w%YJT~?*)D&{DSzk@>u1&es%G%z_*A=AM~z4* zz=y+ka#X{dF=3~_*@4=j`pC$+?evxPa7=-3UU!_K%Q=$jEyMM;&vhvdnUM^1R8`#k zdvuUtaIudwS))G<829<%Lo1+Le846%c~_U6cr&tiX`4zh{6H5c#;?+=U=AL@b#L?K z!%QQg{H3?KO^6kE-N!nN&yBY;>nXl?u^*S0xR%!M!t2A9^Aooe9)B=t-u3lz^;MLE zH)E!D@&0qNhlW6fA~0_eckS?a_(q>q2qrs|$)ZF^g+Wp)4;eIUUkYUzf z{6|o*lTK^rB$Q!Pb#_3MUe~0Tw=c65yaIoZ2TU#2_p{&QOyil|4RQQQ)AM~<+bgc` zgu-b~bRCNTheJ01Sd&_Tz{U;^of>C>H}B*@^=R3-lmSN>`5>{srK`RPnUA%;34q48 zCS#=)S5X`l6o%2kS6YCH*q26P=AHzY^zpQIOOuPwRqNwC@$+J=1$Q5SK*r-;XkhcZSn`*z#d2%YXD+ z9?@j88Z++3m3sa#8t8QQ?$NqaQyFmP8Q=~_>CGYym3W=hukEI7SS$Fxn$~SSmT#Ww zKY68Z`r_!}E{-1>U5&>~#{6a~{J0?nuhmjJ=cmEbXd6v(wmlXS>2a*!TCd@6LFeZDO`hWme4feXRjI#Qro`|1!As z2x@}9@_mj)Zv{qVCyHKB<&d-1xwndE{_ixA%NnVGDWctmy1mj?=mcqvJ=f?%5tXTE zq%Fsj@nc6*)vwuBS;~Dn9q*rXKqITEU3;7B#Qm^y20Dk{KNOf9X`W5npV5$icA#r< zwSodHUNHMh_O$N$ID1|24_I6nvN>IR+?ifR!654&vybVkeS(tS$4}edAIxBWd@(d3 z8pQ7D>1%nz%|^d0G(|VPe9&NVx><(J3rK5D@t?5TJCQ_=w+RAiK?>6sF>gg)vFsuU zBaQB=ZU;eZ*|X%m0&MUx0;$GAkB;o1!MU;K`u(R3{Sj^P@gNbb?FgY(Te07OKikm7 zfvgEt;H|Az5~&V4$laoSOLbzkZl)s*i0?<-zwsdoa4!BZ^*3ka7WE;L?6eIKHK!zb zLE`C71_EI=iNfjm>6CBOA@RI7;Jr`LdE5@hTEHGH*!qYgNz*MTT})OY^#tHukT7OO znlI;Ca)=BQ5z{dIrW*?e5L(ZM-FCxj2d9oY3htd%IZ2rGh@vU60GmTnD%UCqWOzH# zq;6EvbkKp+;XFj37Al}Hli|?%mW~woK}`^4<`O~Sl%(8X8k_?tpGR)c3NSg;vVE#pW&@kfbkgWiC85nK+L~Gd(6o&6-2FIgfaiP zM8Z=+FgP0^08-xYUB2(KB)f$+vOnNAV4n(v@Fn2Nqrh2DZ>!nfl4V6qeN5Nk7n+zb zKI({bo<@_75RzvudQC)8EpmArN@>q8x%Ju8o}5qimDWgvLS?8 zfE<$g_!Pe@sxPTW-R(XlIx6cjHDo>|iUyk<{x`zOz>$Pa@5S?&OMPp~ur}4|7Jo5D zWC^JOcU6CP_q`~kAuBH*4{_3%+`iOeY>5xnj-2%V(#m98|u8P zGPcvHD)D<^WP_^spi!M%McNpSm;hr3u$0UW8!A@@BUYR7$x`VGh{5n=KC$KY28#0I zaEKEjnFjzeaFm6bXozlrLV%C?%G}BT%xn-({ur{NJdDX9cqKg!XMnMP($Tweq(6t! zwc0RyJ_FMM2okwRBq>epk;;@1I2D-w*nIHsWK|G@%AQJOF;nG}TN99Ube&qv^N!GP zWArkbrvhxc357|J8&4UceK%w=eq{cA9O>u=^1}KnUHC;{ad-gQU4c*}B^_9fIm=1~ zI(mdWD|N`Ou!e4S0?^GCkthBLhEhn*Fs@R{Q38OAtf4%xSX|X(Z5$9vd#Fiv*3SZ-|Cofc|>JTI-nI62&bY@(mWJv>$J$ z6t-AsNW!+;@ImOy&jc=Ew0*{gnge&xyh9v`j}(#=MG^mTgQ_H2YBTAANi=~X6qI=B zKqQ6)&Hu*6%SO_}DBkIMOk8T8dPG3jH)$lhbRP_*oR142hEG=!d4Ykz=^T)hMF5Kt z3OC6+@5AvFw%Y>mp{AP*&%wJzHwtMS8KQ&^Ig!X>Q4!3GSRN&xa+c?{nFjI7EKW0NH2c8WXQq1=k1-CL2$W0 zJQB<1uz4>c1trv7OvTK2x3_l3-}otMBVWxFU|~yr?6ihB9z&3*4^8SVfB!(zv&P5$ z;T>MlX2scn#p8kwoGt#B$XBvC7-@a$l(|0fTyqNfq^L0x|2%I)Yzs@Xw;<1VV;y3) zdJ@!kJHs$1V7S3tT?O~d>j*vbdbIY`moEwm*DchH%K*@+R!{C0@{p_-w3LLTI|U;M ze?C3_w*PW4`a+B{7Gnmzw)w&Y)hBZkWAmMAv*t5(RcZQNN-_(Z_5J>%lh3XtP)vYZ ztQ$U@jE)BX57crbQUVEs1h1vhp;4=1Cf2aunmnF7XZ0fhbq-~n{`Mg*%`=5 zn&OyWIIMdJ>}LNp*pFnC56mrRB{t4S<%Q@}rDG&xJta$W-bGfE^HJ+3(SQG&jt1q*q3dcx}sssz4>> zRSbkkA2uok?C|nEarjw5x7(1ukTEy$1>NL+#Nu?Dvyj!uA zNQ>X5M>6>M>CUZq=OIW#%VO4A&F|zaCsb%cz69iW(7)cP04&hRtNSi%pzZIhFlC)9-zvN%1-k z&~5r%<>7Xk`8-f>qYhP=h-ocuGbQ}t#%mZGVMcpE;|%*rcOOh!+hDRafqknkBj3QE zB=iT)Nc%nAk>CfL__|ngx@j7khK#i{?)7>LtHcT|`q_2vGLbH@SJt#YBDMi}!BGnP z`Ii36uf0W}+&K9~py8$4e*0|VquEe^du)3JNEnO_x(<`QRhS?;>aeoo&Psv+QGczp z$L?WOoW!x>Oj+7(f9j|{u~9K&aXym5@B&gG9!#>auY|l50{C4aP!y@+Fk`CQ6c;H^ z@*Up}1mX$0u+e9q-6wC&Jt*2b^^v(j4v)nRoI_U~$Tlk~hXya?ZdfU6NgNx^*l}y6;+u`&J{!20r9%KFhtK(WRFr|G>R66 zvSfeyF|Q!)A?Yi_@srjO)@TN+C}yxkEBls7FXNN>fL;@xmZ(|cQ#u|c^Z1u+>W3!eSh(-b{3`&-*l;jK25F{%-Nxg+u)-W2OBvVEd zb#xMc7$!-XD8cI|(bXXra_%416jhJEl(`e(x>!*|!7(+GJW4SYlUi#2@r83z2JMJU zOSv!gQ7A2{hWkS?GqD{e+jv=)k)h_9l}U+C@k$s{nuP0AIHJ6g8)lDSj6u;#x)UP= zMW*7+Hq*HS)J~KrPIO`1FBN#8MT(r|rdxx8S49w4`O!#^649%Vt3;B(QN$cW^Go;j zbeVV$Sqqi7)zgJdLgJ?j+h|(>*Z$^bzjwDpCiu2cU@^dh0(pWWSnEoet}Ck(8K0se zXD%Z#6&O!C!PPoIibPV6p$wnj1LY}q9?`pCRmh+05spO-X~U_C`0s)renh!sypxn^qy)iWqyAn?C0K#;!npBLNlEb$gkVtP6c*ZX z;>Jns^bymEu<>;xJTaC*7{ov|ih=Qj++b^3`6SW3Mf?y>Mt{J95>+CDV;6PXs#}+3 z@q-r~tbp6(s7p;@eZG6_Vr_{>?}6acSO61hWFI*8H@^%m z+k0`NU?9qX)DWZ%MGeZ&IkspZG8Jcm91-OoDopDLiLs&ZEVXvj4_pZQ3r-}=N607_ zW*($K$h&;{5kb&&N!3#paU|8^Kfz0%y{nYRI=lqIvJG2MEm-^@6wtp#z0sFan&U;l z#^j&{;1^JnX;y{R!jv#g6gR?WlaDVG6JZ|aFib8tooTcV_bHUz#p{@%1JxGq$N0f4 zjJk+VnyQAErbX>K6Whl9+pEsfpX~3_Z)5DFc=h9wY(-OAq4mn`Xx?0b3Cl!w>rpOi zs(L@vn+_$*ZnEwM zj_6gcRsTp}>2H)AMPmHqoMW`CB1bWzR)?>oEtW&-%Fi~r7&&03bz#AjjPd6cJgTsu zRMa~TMVwF~BD1T2$PHGsZ3d2!aT4v5y8Pw861A)m`h>E9Z-L=Nz3QI5kPSl}J|T*T zaLXC)6&Va$w4X_S9zyHZ0V7YTcq%iL5P(EfSO~jZ=DpzCjHUdWRB)B|Bw)U_HGX&J z{leTMcYOT6s*4H^_TqyNFJ7tM1aka%qzP`ZVT4IqsqPavxpc*Qwl=S5A0HlPjK2>* z&`x^4Ca2}%oy&8t5 z9j8?zN%0N5DoIYRc=96=_H9ir=nvGKX_qw9s{iHHqq7{3AP9ts_J`vCw-wXS*U&9l zKBGG`p2}5FRWHOp7f|n`*d8A_kV%p2iXBJu9xtP&_yLP<;~-UKoqw1XZjR ze$Gjw1ksOTi6MC^J|dkC4y%U$BVUDUZIAA@B#v`msimY;U2G0PuaIY#JzsH%vgj>3 z6D@zJzXD|$W)nCP7mKP{F4BJG3^s#W8NOA>=*hVOMpi--NXf!!djqYctaltiRF&TpfWZAg4y&3v2NVqU z_Z!-GRl8EFiR?2JcET}&9!5pse_qYoQZ zvGE8lRIu@+9CIjH6)sGIN(qJDz{Kw+79&$mxBknm!ypHdhL#jILcjHW>Bs>mE?nSN zS`j)O8xCWNTg-v|RnaJ}6lT@9Pk9q590IpzWc9O!>ChD=Kfo-w9L}$!e4bYvy>%=rP_jAIg~gyu z&hCSdNsZzdhep{n7KDcIX6bJz20bmqmKtSclgl z>i&jdRKB7KhSch>0SsebA_TZ`A~*4J4_C%X>1Zi?N^$X0R%G+~^c5MpdKqDTCj2e{ z;XI`!IR@xwV=!J{*52Yyc8%&^5Bi3_7-6uMF~Z_BXez-Lu`MmOry4;;6ltK9Pa%J7 z=0)H8fYKl!L=l$Lqh(ssNYQveROFt!UK_wyJV>gl03ZwrAfCe$JI)M`>(t`%M)(Lf zO*UlLN+KyFwdp&M?&1>FH4UEE66j5KSjo2*x9ZOt&OQ?C0SKxKN!p2AvGjIL-7x&^3Lzjoksvwc60s$>qv#O=wW@Fq4FO$h_h@NlAk*-K<0ob1#c+>Hb!`lD~KW)dl$^ zWR*p-KOZe9_+ViUM^F&Gl(9m!@wAl;T6$8xFTKx9@zas&EncN|J)eXUfQrrlo&s$# z4L2WQNenfTmR=2`N`NJpzPSvupQ4q_N~hU(GFKr|z^~ROXeS800^wQq5Rp=(gz1E1 zR$i`PqKTR~EGR>(x035_n7Y!)=CWM1pDZWS20Qh3S03UewNjVqr3yRzYUqZ@M1ef!qvHT0)jT8qR653E` z32h0$>);C|V~W6nMz1hSb%jH*Mt|Tec=1m})rpB+tOvydO5_v?>awgmM)pClc|vFt zF+u|8?e!;}6u%3>$fybYaT3LOB2D_k_uHa>sJ|ERn7KF8&x=1i@+afi1w%- z#fHJh@s7v?L#aNL;+^l4we3ao0TDTrPgp9ciOBz%--5+X`ZjKIoo`yaUKLrFkBuE4 zt?M9TOAkWjlQ7h@N&(>{`2G`eobW1=Os-pPl_ijhmIN*%bA}-!Q-AZdqD# ziL68VE&NW7E^#8FVFbS!mptlURz^RPO?Pq2?LhDzfXt3GzlSvE_Y!OYt_X zOel+VtNH@Ln?q*PH`^L4BP#%{=zb`t$+Fi9g-Q|~L*T|wx?rqO(>S0DTYk{-m7Te@ zI^HnJ=_$RS#<`~2PdB&a8RZ2n>E8h*huM3SD|X4|VV?!a z$XF;OMg`@%Dw5?q?J`0T(_j;@riqYET|aoi&;7CBzrxV^DTS=Tp+w*O3%djxi958c zKg~kK#zu;5V9@jDFa;uhG$uJZ>`ywXBi&IuO6z%8a@Q2v2yF_7j2y@&*cEU`<3xxK z6?CqO6j$KhitSjV>jGwsaB=dbC6LJ(s!tKQ;}ypW)T$v8Lp!&trCy&rHM`%vLN3H8QDm2$j{hMi9m{$XpTi6bl9FrhTDvP-wt_j;sjT z8WT+-4#|7-)Z*oIguMLNKa~iTWG)+uxV3DuCDnyM^ePDK2N|(P8kmDq6=bua^`|n) zW2ylr6w>plCMAyYxLv3Sf9v>_VjJ}9ys~_a9ABYataI(emf|vO(|_OXIb)z2LkFfH zH8ix81&7@Oda9jn&c;coJ3A-qOts90U=yhjS=veJua=&WVww=T{UE(S{(fkqC{eKp z!~qdy&FVKv?%cuyYVO?qjaxLMeigLI*-t4(&1z{HR8;L?W91waEfF{hspzC&E~d|- z$~kj`Irb&E4E+pkA?5{RQ}EhAV;B_ieNklPTdqoa>&k&bZ=~JYzV&g?1kp-#8~=pz zN9CVHV}m4Rx>b3*dlT@+4bAVCadYFxB zc49FE+&<=IvzHGUHI}sH{!xNS?3es70J4Bne9H#0%iyV_b*VgAsl*gOb#WEOHk>Lr zuI!s_02%ldl%CRfyP|P|*Pk#fJKWQ{HW z8So)MfI?VrF7CG(T7B_8A3?=~Y}Q30jnpOkP}1e>*BQd@)}INc81?r&nI_Z!%);9> zuG%YdG^D`3UrFu)fL@3&g@G%(Z4|8{+HwFl>9WZ1oHz-T3kjoa)hC*x8df9Z*Z_qj7 zG{21o54&cu|4l z)qR0~F;ApdWC_g2I|@+k3-XyI#X@pIX^?^qYAq|52!{$m(r3Wu8%tNt-Rd0XngCV& zm(GtIZCIvQC04n>`XWk}T`mgObmh!`=m{Pdl^QT=-JIN|6Hm^PR;P~p@ryf$h`YVG z{pizC(lpF5&Xez8cJV@3W8Nqh_|ql5$ZQJva9p{1^O;)rR`wKHW}nFNTkfmb+S$Q7 zoi52q&|?nS5a5bf!;KoaWOa$MDz%4B1owkQo+w9j{~GC2RF|WW5C1S<4|z4BoVFa& zC7I0rq8*r-9!93qm3tAGg%oEOpDXw6aOYbu0o0w!w4Bi11)zdH3(KO}qgqXO2CRWW zQ@DgRhr8cO(L7fM;`MPER1+#LiliKbSp(UhR&6IA5vF=}S|I3ZkUEnY(R<6}>X zZKbl9yy>jFZ0Lbvudvu?gDZ1Ha{0mMOy}Kp3Y+Y=f(yj##$^u#^0rkVaY$?7FR}wE zyVVwY>Q1K;>*fz7pTV&%ZKpE#OB)0D0Q9JtorrR%B1v`E@=_9-)xC^FUr?#O z`MFz>8<9Otspv-PM@dqZCuvtS^mBlmOFj~Bz93q`1$X7LF%Tz9CDUSAf)YL6gqYix z=A}|8A#_*V>sL78ALQmnh`d~m^ZE@j{B2a4g)L`>`Sa!w?*Gt;*gATz?i2}B9Tt|y z>_`-=d2@>3icSZpI+-HX2dFeJkNb)MN}EX%DVvgX7>P^aU`euL2rk9^_cEzdmef90 zK_#=*g)@SLKvg6(c|lIdcrR=FYJcb`UlMxoaSzkVGWIleahke8+Cb%idca(8YY zkgc;|NF+!T2#<>xMx;_9tB?s;S>L$wU|Hl<{Ewnj3u2XeY?&>I3;jDGSF)U_N_0y~ zggzHrXcChd^sTuPzfK94l-w)9vd97e1NDAmb!m}+&8ktyRi=3lC0lXVutn;W25x5T zX#K&5^_q?zyuL!~KXd0lh$@ovY=zOHi>i5#>~E%jq+4mwT{V&?#h?-QxH>B%PyQS; z8otOpg@}_=GC1T{lSck7%8F$ms$-ILm6I!Yl=Mf$E~o`Lm$ETZinmP9&QKExTIrDd z`c&pA{J-TR1hqSy3wu;dl+wYu!7`(k))|Dn4~ER|*TcIhiw0QKdNWSpcDO$YmUlv7 z^8BGfKw4=Jrb>e$4HBX~)}ztN0_+N_)q_xaS-PsNaE)sdzZ!l*{fLJr-nx5v4sT2> zDwS9qDy%k@hIgZsd$PKB7_vsHQ^@q%ZDD_k5ZCz>(c{}De-y(^&EKdi(%NpybJVU7 zS?8dZSu~_3uhoHY91vb$hp_ZAGJsO;k0pEPZv_iIo)})A7M!?~zcwbRGn7`xUQqL) z9S@dUj#E&*oANtxBh(xJu$M10PJDS+h+Doo_R#UGviTO~99aT*dwILWzO=J+I-75G zsV$8kzxPAt5AWbs`mT)lYMOd7bZHd1ZXYR@+bCYicRNVo*E}fWx{5z5E>7d_?Mc$LF5@Cm+dBHD+qR9UIzo=Ixb~F?e=$_i${^CL330F@7?CILN#i z^ON@@eX4PDeJbnm+JZDW5q!+khUpgmo1`ztXc+w9W5G3ga;`)0H>@fS{MUS)z>n28 zlb&OftKCCimbWW@AFXwqZw5-FlE%eG`6fCQx>L3u7U*A-A8-#$c6$lRW#cAmN`LqM*^P^qzJ2|2CGYa?ExCj$*HSXaQgGKx}3O-0mz3bdpo5c~kfrJysIP$TBa z8kH6gu~BywS;!&rN2%F!IZ6VM<}P0m=#iH8Uc=`gy5BMtn_QA1=2o2SWgjIO8NcZU zi7}So>$hs8hfj7!Bn2(?8-WZ4wn(Z#Y!j92&JtAE$G`6){tTuq;*XW^=seasiH6$# zfapg-CRmj-0um@Cb-%%3!TZ$gxl|L1b9g2wDV;5Z3v>nY+oqyY(g+2t)RzP>Oj}2b z6vYDoYJ%ZP45P2{tXPGGlEzz0Bzkflafvz|j-s{fd1OeoMmY!fTDAn{;tv4|#~6A^ zl!S&n2vvb|D?#zu4I?w1Rd}`sxB>D9OE*e!_OEwc`g(3V4mn`Pa#rmpNP~5<<5^vr zcezo~vv%VIl7*4t^jky?cN}ydj&5r#_u3s5Rjn0_b@60KQlctV@^fcM>Y3D4UJoW^ zVfOyYnXxZz+Aw92{pFDKeBiQufuP^}zSWCYfQ!D_@AnkRQ%nhsp~3B;g5tlSsGw`6 z7s`f^qzF@yu9?8EEK(}MT{U-)%1Fl+lE_g(6xNN02eK$UPgJ~_ZxI ztfS8N=E1eDuv~X+#>b1GaBytakjiy-tJX9>f;+DU(KW+bGke5H5Usu;JQ~_xJ6MLl z(fszjR};}|AxJ7wm?|IRz~mx2r`y^cbDrw2puB3zUmDgd!wa}bWDg>(LEkmx(~7gc zaskD!$M)#>nUJp3v7HZc)lOlOjmJsKyYTzS^$3(BPF#zS%c44-f#hIQ;4l1o%+Pfo zPZw`5tUFejIx2L}77FwRVOgy{2F$!cZROb zHXg+kPtz!|5RoD_Y)E-6wrFX$ z|MlDQ28Ls_5E9dM@dZ0~qwiAcoT*iYofAPS9qigzCA_HKLnx7l6X`Aa{ASwh`l^tg zP~F|yGmVK~TE@L|1N6$cl2OLPX)C1x)?V!)wo2Oiij%WV8OHRRk;n%7ME;CrSd5Vp z_}?|WZEBzR$e=bZXfY`EIlmNI9^Hc>Koc@S_!C45)i&pIZt-J(%1`=V!r1iN7f8~_ z{rn|n!XhVNFX;+mS-m9hcXEI?fEj@%L@y#BtjWLJqzyfFHp2B%t=m9~a1&gk5|w#Vl~mgLkr-I@&>L`d`?4$Jky1ehsj;ZS&N&ZQHhO z+qO?_yFE2dZ5yX{|8=)}?;_c~n@#rHChw=o%w&+6%*o!1@#yp-QzB0niALaNztHkV3Z4M}@2rK>FjkR!R#x=AOcAjm?F z8ylc9R#6*+EYZ**{04ZSWFJ24T{2FMu8O-MfrN^TS zCYc|9^S>%sa;2#NUhr%?{huR4vw^2k_$lJj{nTF3NFaKWB$%u1|t2G4%V#bTkO4$Rv`CuIYE!F;^G2PRE_bB!-+xM35Lz zj5XeH-00M56xtuthy+BXaa_G`aI?*sL*6_0hxC2SK@W;w_amYSdt#fP2S8L_3Vl16 z@kULXHqF~EeXAaqr>eaC<9}5VWLv0#kQ6>-grjaWVEZ+wYs|Y!7gqG1>Z2bd>{y8X z%RsF%k+fx_>kfYQH9LP+&5MABvRKhi7oTFkF@2dGVhEIxV8)TLvI`2pdf%Nb7tZDHQ{?|K+`fY@802^em6SG zxpH~blLRJxLRH%VEr8xXWoPTR2(&s~T!vRp{QiWcu)@>j@wCDjv7RrrlTI^tX;{Lo z`csB;yY=G;3epqfCv*!4pLKWUi3>#I8sRYptRE6}x+v^>3GNY9#~_b%V$*pVIzQ+c zv4fJ%=S5#cDDJ_?9^KC+i1VUpwS+n6zN{EG3y+Z9_CDE7Ky-Qbd)lrLnZ#23Fg@r7YkN7^6g4&Eu)~IWWj0`3QSUrs|^oKUEY-3Sg2;EhX;M4ZIn&5MDf8Z-xK4sXONqc2f zgTre3oypV_R3sX+_&w+E)Ph3zV$vZ7RA0jeZ~(x2V|27y)B_pVk;pHxZ;xXhZeVU- zy}(;t({Cz+rD;qbwI6i_Ii?!Ii-bCZtE5;b<3#W02o}&Yg7p(XhK^%$(in|(6_X=> z{@R24!y|^cs!fsGSmSEI0~n|7Gf@viR6~`6w6Sy_?A=4nHz3+c1j-biDDd!Rmtu26r54$SW_2J2}?vWewV+4OU>SAQl?gPU^L0<^5u6ke13u>ph{;2V{WvPN`LgqH4ha+v zEJ-S`CK!r6)$ZJpr(q&tt`wMGOa|8ZRY6`ozc}_vI7{a1w25`c{-Ep!!(_ZL{dT8) zwXKC9PX@Knx+WDO7KXRWIp;jr^d82=u!t(_K_M=$BU>8ACo%P&SNv<{D&bwi-|hwi zLkPT(-X~ZgJvT(nWX9K%R3zR4cQ2g!mlmu7?0b<`zrN4jv)lj4pBEE$;^Xk~q54sT zyYoxs?HGB4Ea0`>wCBECDETvWi@&VosMTR%J*A&=NYZ<{)c~#beG#-S)o}1eJpF8WT%l#aKRQE~P>KW%WK|%h}{*$Y|En7N# zIORGOZuzUgJ$~koRV)n8_kTBvH81uFsVDjV81lOJmo3?6^~oPD-I!(M-`)EukM;~+ z+*$Jd`LD_k@BZ2IXJhu43oW=lxBK5b(01}%%?5NUm|OKrM5FcdUm-SYJq-~Zo_^5o z|H`s(Xks`_{gjcvzIPDo-}_(vV(!*Iez8fz<(poCtB1T9Ip2PPD}^%Dojljf`TEL_ zM~ZKaet->kP6MD8*QgW@WeTEhT-4n) zeEDx~lUGPHhA-~CPwsq*RhOdhPSDqxaNko~mqSbr;BP8vUlsamPAzYWzq8d+rt)L% zHTT~ZH|O4q;YI%pqV8va%)qsyq=3Z@Hty!E5q(&qCYWVW*3Bri&}Ie9i8r22ATp(9DT%89Z0Z0xJri zI!9`$vVSBW_-X2Uy)bz;Q~Nzy!%r3ZaZvcK0BdKU?-kk2tpyHO-0mjjPI* zE2Zr|b8m4k{<+`!LG4QLpmT!W<5t~HIH4dVLd-InX%-`=xTq9dxqXGnSiHF&oogB( z*&q)#U2!$pA8K=UZ`h(S{3N%v;TkR6`ZKtC1N8p1nCAtwZ=`eZUEkF&FymR z_72p`nBecy)wwk=*EazUyhlR_1VVc=F`d{H>r5QQO$^K&W2@;XPVwh71bNs-7ql@* z&r>GBtH#zX5DVYsR+N5S3wK5M1+Z3-=Q0S)+AoSCiLao9%wjBc0gl(4a~lMle5E8T zwM;|MqmcvMNSz4e7G&*Eq;AVTYPkoQQ~j`AJaGMtaP@LAaMvzX>RFEGaf`%byhLg6~a27M?eh&+qanD0pyw!EI{^CXIWr5My zmO+Wd{4JCc3XT1wC9CqQr4&%D`;=6Ky`I+`Z+9ZwqQGVP@6d@0>t?$qTqk@(DS&TUq^xc_~$A9kDS>5 z%^>_=3Elr~5Z2yy-rz>^M^7Lif>4Gs5;k8jg&OSSntq-FM(bl?85{349YfcVm zuoWo;25IrIv?IG}uXfPr?t@+;$~3P0UWOI#In8uJsvyp`&V%zc09QdVaYJ4dgdr#K zLyla-R1yU}h(MX8E_lgP?>=O(5L##LV2_c6iXbN;HmO+-OHA>N&m5qw%_lUOOp+KP zK5y~)_YBb%QBe6N&PM7!{Gx#}06QzPB;1ECmfDz|D2fd2`l&aNF_`I0Y=q3)V4dtk z(QA+ipX6_^!;~|UKq70_YhP`qF${_n5LK+|n-%l9K-_{Aub&yi%)%?p61K2fH%01dL z!DxgR%AzyW?_!!-G|f;h)dQdtOHatotsv3zZ95hQL6NxZ<_T{9H}8pTCZ89`<`jdm z%>yM_zTYR!f-E`0in-&&ky6Dmr|hdt#LH?xN1Vmbl5~565~g_ z!hd51k6xN|zCx!uVv+SEyo@1Nx)hUR4OzPh!bVXq5VXxTQOI859-Ji4JmFSnA`F7( zt8k+Ps6H}Mv8Ud`-r*KU#E=gVCk22$AYG9#Hp66_3VV7|^cJG0X|ydd83`BYQV!h8 z^*Py8`Z~TkD&r?L5oqGLAnx?rJ`X-S(SMfzXX%lNgRO(J%YV(T z-zh@&KY5j6^(e1EPAEjRB!gua(cqL=a<;}3&qSN0jy}1HmLkL`13d2+{jULJ4D%Y2 z*;^ENt&#}hn=gAdE8z{*oPaeLP<}QTe6M6;G3MOgs#KnbdphfM3{5?IYr(L5VV{P3 zgkkQ3qsYm*L3=9Bo3KOwBqyZp1+~(#kTv%@2GX5U8^OIDQg zm7M$*l@wz8#R=jI6tMa;_q@7y{hwjZqn=mChn^F%U5|EeTDgDnJpb?cF}54Com)Ry zJ78A-*HQmp6aT-XQuMZ+H%HO?Ze(|Xk4-5sp@}aA(oLvIm6XrL+t}JtT)iQKh{9mN zuz*raO`TUM&p;oE->CeO{kwsvl2O2wFm(QwS^3BNylGPsadbdi{rJW{C*FWHKe&Gj6J`eVgxxg!-Yqx4h$jJyM8b@yL=f zdL7$c^wU#zX5@c5iVq7No260@b{PMT2}LdXwy=zcAWHh?UMkL}@USPz=Lz0Mf{hC0 zvfrE{dI{hn95QlG4W4mI|E)svN(Cd=F62z06id;TG?jNK<%}QVrD-xc3t#svm#(Qq zLm@S`P_O1=T_B<&E#|w{g_y7(ySU7ZkxGoOf>ql1ywh`Ink_pc!g*}w74w+l6F~5J zm%=`#PCmD=ic(&^V)IvwNeKBc<~}R#Kvlh`l|Q)OQ15OAfLH-$Sz00<*x$@>PL&!r zp`rV&K>F-KJ*~%fi`@E)n?eE6_uNrcFvtnn8`o3!tzw)A$w#zM46>V3e7ru7Cw*Q{ zolH|Y?pRqN%=9Va1iD%MUU>O8y-?m3>w9mk9``k@d3$Inx12LE_heL)i$r4`r1&hb zlN>KBvQ3EHc~nx27{;*>F8H;H$NlcPh+d5AS7`mRA-w0DLO2J^+BH)-DgIF|#^+d% z$;G}FRrU%LCzD6490DPhFBqlN{v#z@B3?7LmB#NLU7J!r6+$h44`F3HS$!-LxM#p2 zfb|E48`T;T3c$ruhs~|M|IHR^m`d?-#-|MDXL?~hrSw>T-bb%lB%i&gu~*bd?Gtxa z^(@}y>^F2xW&h=oY?1=UZ%+-%Vy{%uS12ZSG;DvE=DeaXOVdT8-Hr@C7uTfShJxjx z$_{T_Uy?K1q>(Vm;tJ8XI+2;90mujYqH$4nFu^+p{t}bg=f0U`N)ta#u{z^^&mdhz z>uRx5kID+ylAq6V5Z}Z)&VS(Vj#NISE6ZyZ7zT5_%&epjxJW&*Z&VT8H%1t*J-^Vw zw}<$$h1=3%;vw_pAvx)4rJpT0oacf;#+=boE#|DKq%%bR#__x0bc)6S;7u`p)uzbF z_)s|El@C!tR!GQ0MGlY#D_T@46}uXq1YxH=!1K{4=((yEW~p#eU7^VY+Z>rwY;sDL z3pq3M^fU7*A8r|K()i(;jy>=_4 zPnhEo4{_q0bm4E1Z>>S{DsYO7nCTe1ON87Wn8)kiwUX&7z_7xRfygdLLd?%q6)awX zkeKRiR8!EJj_g=j9pC_c{9&;yI9WlVfB`KO^!P=A&$|l&zo&bevJULyXQiGc#K6y! zR;hK4JtdjDU(HfyMBmc4p}l{5Gv?%y1HS(T!JvMg5`Ke`ozWX@Jor26_xU?4_O`fK z8t<(|zEOOh52rhZ(@dlFDinenLD`K*Z|Wpl^`7CCi%<3A^~H-LS!J-Y$R``(xmJ3O95tUiwcT=Fyg^pyA3dei8>10PCf-(2ihUlPc};bF6!XL{dWv=4 zU=zDV80O|sNR}T)z+L+9jl6-Wj6#ud9yHTqb$)MS$Ttj?3`)fFfWx5Fdd&S~k zwSR%fNKt4Fz_s7Uao7?dIWJO$j#{((?PuTH_G}$DOQ&k;kUiQjTav+6^$jty_EDne z?YMR0Jdot`J$^9>x=?NjAa(!uZO^8mV;*BAM>n-sOXQ7Rc~wPmIZfO;QZJt8O*Ecs z3sae(FtKo}nHkYm_OIN0LScBrZig1Z#;+<}HsOz-dOZ#XnP_PXpJv?VL0{IoIMFUt z`YaqE;n&vKEQFImhqCe?4wGP@ryyCoj#)he<}+BJT85T;Xgk!+t>s{;IyAMd(VpH|OQrG^9lt|X&~Eud=jGF^`V|s=>(JsS zncOi;JybYLUDVr_T$(+G6{O>IQn>LHySGY90$jVLMRcd(QX>*bnzvW-Q!`E zKExdf>_@$G;sZ8pk80#)cWcup3O{RbiI`xfoizOW@W_xi9UqFIixrP)Y+jze^Lgoi%C z(XIK!a{%Y3EmsEJY11BuVTao|#6vUJx?tg0$c|w)<323-yD9Dnf(h+7s^gvRTeMWs zh7=~Exh!wASijQbaTE4KTN0QI_&AuRG<6!$blU1nmnVH%1BJAL2sOa))sC%BxcJX+ zLBC84xb?b>qk{~&Mnc{z<3je!S*6RL*A#{O33QW?J!^0*+K9*+^c^G4cql#%i&fz}8hrJ*ia=LNVlh^n%#MD8h=oUl9XE+DxQZ zW>y5;7F+4C1PTV%sxg63qDvAKYnU_Xu>LuOG6zFop9L4=h*+1JjE_=-s($H70}$ zE0Qfjun4nL@xIu)Jg8G97*;d8!R0~V7wg<8(I@tu;AnU& zI+70X9T*4*SRyjgiG>SDuw;g?OFJw7c((P>q#rKX-U&!WM(`_R!Jv>+>#?i(%rX6C zwP*TEv?aN=-sOIc2wTu`#>?6*$n3W}M&!UkP@aAAdtuN&5nl(Ybf|@p)R0)v(kD&K z_u0jHWr3^WB6grzsmzVX26k9ikw|`bfn_=jN!ty-F&woviv3!tj%Bi%RxQLoO7j&} zxWt~B?u{-{e?sztSt1BctKShqSoqnMhUy_USYrpPHIU$USgh{@3&^){fG3u8&U$w{ zZvNa}%JcyB5$f{aX;yzD+y_z?-H9d_-o-H}<6bdf!A-#Ez>rb!P91zHSLU}iA3N=? z>h<+AbiM0;cgV!YBO)T6ecgQ#8|Nq{NLKz3Y^>m*J8&B+k z(=Ehs!BYwjg}n47TJJ7;TL#onK|r2~ZQCIhHL+tc9RzWeS|zKB`Bj18Pli7K*ctCa zOlW7BB11&(urtG_9Wz-Q9U`fN3)`k@45;B78ea>Aa*13Pg`GoalxyL$FjmHGUIpED z4a$~tXKt0FAR;0-B}7L6$VUgNNuaVZszruBbB{5lFyNxYZtc}Xx#2#3;gBHwvO5aE z#IoaA$X6I1kr)vRN#r5On2`f3-JSs;A7ny6lc<@i>^(^3>WhYOvzJA!0syoDgb|qa zNO>*6%(zbr;7QI;pE7ySa3tr!q6iezBL_6-46gdDsUJ+9Dk3<^Wu9AVuwwJf&HY@w)TEHVhN^TPWm^tz2M+LE_=z>dRiFaz}PwP_7ii&zgHA!Kwb-b|0o;+a1`006bDHAlD4a z-65mwM;`)eI|rpSbp@%U3qCpO%Q~ip4{4y**njd)_bmR@#bttjD^HPjZkmq4oFo5B z-k;h9E-NPi^Xe1xe_{jvUx?rTW&{31{Qigd{SWc`AL94_U&QZ!&946tzd#T`P(QRd zF_3Six7sW;5Kues|497)S7P|3&ZhHb2loF){F*bcr%ImJM75C0JjZvtA4KJ~Eb>H# znl2Fy02LyxuXpoH5f&oOOPr-VLJCSR5$j8}9mJW;u2BfxL*5<=91i6a*qemcMJ1+E z@{QGdB9KUplV;2M%tXYHwqFrdI-(ejqAs{xN4ol-f4?d?4BC4}g>fr5Ng^RhnQ~lm zWk{E9TdRdmfXW7B51bW^O~+AJ~#rqMF2CFO*!ysoB;ST z7ZGFMF~&$0Us8b?X#ztYk;|D0(LLsnXcl2DDmqS3R-xQ?SVy6!fEfFiaObEIW}+F1 zeZ*@>tY$vp%~qa^ho@B95kykC#l>!bMcnqT>!6f0miVc^z`VZnb z2~3>QO6lP^!ldG71#Zx4<$ti(RbQQhCgNMjd9uuQ{aXn^ z7yaTE4WS!*WJJs5V1H#3p^T#FEoBoA{#k&)Ky-oqEk7-h^=KoUZa$t-%o++;s7i=8 z;U%w6A~Q0Jf{-GiQ}Xu92#532BGya-t3_tn*W@vwp=nN2{zUZ`0V_s{ivrnH6vCAo zk<2Y?5{T+fVQ~WGA-v0hb*{$S3sb!X`n~jVNfsXd*aQc1XgIa zHOSCabdD0VUP+q$I|~H3g9O@`EUW78D%C9#_%rq)B8-G`=e{>RZKU41wUPavF!sgJ zfGeYKyWzVT{TlSDwDlAH{7?ZK5SzL7xT&zP8%bI^VnghSjNIl7V5H0{;|+eJWtRn6 z-2V~hB4)5)U-k~xkA)US=)`z=;4+qi2qeUf-u<7snmjMJiHK@0CdWK7V zKmzeEisW_h%87SR?Mhn+q|MbXw1S}5tSn>JNCo`mPW{e`!DAbv7tckl5}ipc zp^n3+_XyN-@b;_vQ`=pKw+)$xki$c|-BX)MT$Nm>$3q*9w~m94z_o@GE<^OY;>cCY z^Qj|E_;d5z=JVmX-6BL?DHWH)y0(KH>w6)UmzV?Y`*XX^q&#;Gb?R&W+k$)L9ROb@ zBjj5mp_E47V!!;=mc(Y(F3vad!UmGAeGwW#cb2e)cMiHlkw8oqH8s8u*Pp& zOO@QOIB`WN7QWXj-j%U-vGn<8Zv8p9b32=|bb^;WcLK8-nS>t0CCk>&@Xb3!i9e%` zm+r3VC_u0mSATf$;3v*kJsVwP{W-Wm}+_aH@^(+yq`R@uUC(Mz{I zH+%xoYHIvYwcspk170Tx{epN8RNA$*>dwo9<*kDMt{v`ghi_M3wsSqla07j_yAbgpMJ9@e)EL+Vp%; z;e<<$+^4>7G4(s2NXFYK&qul)kX7xMP`7xg!s|7vX~iqO{@}&EFyoJRmVJ)Nnl>QY zV8zD>)y<&JXch^K&a>>Mg&;O4sufvK88FJqHPX&yTKs5iny3c5tmf-8>O#xjT&h$d zg*O5-eMikV1|p4Z=dlL4(qy&v;B|V1!;;9x)a_qH_xzT7O$a~sRLQ?-7!%Z!5OP)Jt)(GHPZqJkrk%8 zG2xqt1k=y2JrKgwgdJ#NMktnO`#EtW6F6Hh8HCHS9T)gvM1iI3-vk@zw)Pf7Ru=j{ zw_3lf1P7LPkPXD!m)Ga`QIl!-#m{GR+j66+vDsj8oFfN7mNOWWOrM`0WO$9dV_XzC z+}>i&Dr8BE!H=5G<@QL(-L@~qI{}|10U@%B*pX;p4T|$`k;DDCsd*@Yw<%DnEijS& za|OKWmhSXH%+`T#c{zsu2bU`8E3bFSTtJMnv1xosN_a=rYwalw5%0)=-d$4ZT)vTz61Whh`j#$1Mojs|36s&KUn`iSpWZDSpUCf z*MG47|AF-_Uh^N0emb!@|DQXt{(IbO7&zoGA_Ww*+ys8eH^vPB|8!ynRW2+&8NCCg z2SLC)np);moqH765d|!nYL}>4m-dm3>0QtC?QSRheRNjQSn;FZRAxmM#6;5c8WK}y zZ%2N4VrixPW6iT-T7x1-5H!{Up3z)yP?jSbj(DZ4{Y!8${G17-x=tq|e(_#Szxi3X z=0RKwme^*cQo`%PG?}kr&h6L`%CB7SZ?4(Zui0)h2VfCick!0&H2BP{6gNcUB#q^s zeHvmPHDa-C8C_KtPPa$+7E8R;+Y&1C{0o$hW~($NnL9?(-WUvtyf)HH`P&(DUD->$ zLHSD*(#MP`okbzzRe>a~Kr*PdHoX(3-CENNwF#Ap^Q|iE3=$BLpzL=CpV6Em@sGDJ zacwgUV7MA_FEPVL|~E|Zi{8BEfY zL3~bp#;Br6=@{<8(9uaZLqLNTdsAicT%Rv?_VXNpeRqK~PchjIj{Tv#;NRQlnq@Bf zUmO1tFx^!omwjnfR~y-p6;+-q1k@N^=wkrV{MK?6^H!W8Uspu*T4Qf-{oY3%qbmCY zBBbzww=AQz39Mi*N?W%CH`?ym=EFMVk#w|04S#qADj(mt5TJ=aPR!A9&v>;Kv$ zc$s%$ziF8q{>{(;C`1N%Ou(M!C5&~4XhSi>Nx5cs&IEr}5R_@Dg_rw@=Zzqk>hDeN z4eX5UOzlnfi>9Pb7Ennqb)}JkjI`5A9akW%-l_7wsJ;3AIHS1si)?yjm6Lx9F9Y}} zG0Rgf9-aUGexA161Nc1f%2F8RQp`(zyRhDTzn=AfZvqYv@7ywv>n-*<&+kYkK0JLF z`ez>2@8;)yeet(LUFh<}paZ&Pn{yaXS1->klfwwT?s8tnm)rmqo=N^sAo~~$j;Qh} zZ!fkfhl_vzo^g7o$2&L_lQGPa#2heMfS;@`uJ`!2Htw6BY|!k;j%jf#Fu2&|?%<^{ zs2iX$m~%=@tMQp7Q5X?m2iXJoL+_*vZU`qroHBLO5Je~Ut}gBeXnW--=1nurp$W$p z9vnu;%AE+zpc&X?Q*xW@RxZp-(EFjA0qxDeP=3^P6s`<`94WX~AJd50MDRv2{W=fKJ{BZS+oz)1Sen z`SIwaGN%7;P<0Hk1p20=|9H6<9=d>U@^|yu%i9@X|4xki)i?jCi8TYUh@&7*V$n9g zAMW(kE3@$?wr>mkb@sypyPQzrO$o0Vv+NSWD_2dj+2Fl5L5q{;Y&M`1zPVx-HiZeX z+1N|{n|bPWJJAL%;n3?#o4)tOj3QxbhUJw#ir&D9{lUPis@8=cb3{un=G#Hx4Z1yh z%YFTkuU$8}kKX?ru=7&{8;V=r5qfZjxz8J2?rd}Jo4OoK z{k?>n*U#_m@}(`$$Pj-j=ufZVXLn`>%U&>==b+@zIlXYC@Xg%Y72mYuy3-x$d@Ww< zkuP>wxi)>cgDB8@cZ>%Ih)3nDY9|&r8mW_AS9W@Nh0S5Cr{|P99-|x`emt`h0SJ>VQ>Xy12oWyI@ggo%!%Eh7mXVq0Zl*z4j6`A*1hKG_Fl}2EE5W z&c3|n!xE|2F+`(huguXiO!;U#EGP5B!?Pn5GtEhh^7YV7^Y!;M0MN>nnk%)0F8}8> zUNsFC@c?o!G{eD9eV5bMvqEpd%&tG+r(Kq^OxO9&6ahDUjg-qV9q}&9>sV{DJ?EHj zrjRxK@DNcU;E%!R_nXBHtAc}f0q>@3KBh-?&t8NvxrH&q@XZo~?w4||uFKep$En{6 z8(xCbw1=i}kGmVvV<=%r*J^P9qe{_fzcNx11{Wbu%(I~t;0%4vA-&<#f-B;hg(`1X#E z$C0Sdo!Jl^e)%G6`N9aJ{E(V(fNT;HRqx5Q+PkY~pxZW=>_(7JDwXH0o6%x-b~kR( zsfv4}EVtAYhM5VWKvlvp@mjIEBuK$X?$KW9J~ygX1Zy z=iPA2=iJ|It~U3Hv*(pD4{m|%)%E58Vp*u5bAQ**(LR;1su1y2^)Kby#3+Ymt41Tz zJ;=n>beXx)%P_Y;v#+t{bo}w&C}%3u7-OEOf?KWbL}|oIO@y%wjX;n$YL2!cSly|2 zyj^$M#k$ZYdn*^dOuOIMD%~opuR>?Qd0rK7lsC&pc!^ffnP!j;c)a~Dhcs^qoUUpu z|2^!otbn^@ILZ2=f zBy`REO5SR31)R1M&9>_hHF!QoJ_)Z-r5kyTRglF7zimX>%@w^_ou7nlxzMNj_7cw+ z=MeD%Fs}*q--Kd0#IZKyRA_(S4V}{jyIcvzHP;Hv^X2i$~;k-Mx0*+J3Xu?)icekp5Ec zvK!~-8~DzT3h;Zz?RF@&sTC?UJJizKBDf}L&>N$j8`ZOPFmbTUYQxYBGCKplgNhV7 zleO`<+Q`1R=(5A+k}Jka#I#a#KwogH8sVTnLut6(9+9}Fc`%QLI-G}GkPNDMRH+a6x@ z{>U#>z^-vjMDqP?!CQ3vX$r#?y@YTov)d4D#3(Aet}YqH0nH3QITJG6^MvM*Gr-TX zj|GO?J!PdGbJs0U%*xb0#=GFNdLLdC{V})J2@S5U6y^bKkzcd(MwG}>DxP3l!y#p6 zJDj7@fa0~=fN-c%s*PQSW4zR1bCL=Oo>1b{%7ui@2}_CI300 z!l5s`GFPfSlvZcp%2epcAXuNtD}ol*jDM@;qETYZ;9hcw zpa{jI1_CirEaAoR1u$vhSu+KJ8+`4)-&CNrcYnC=g(dX=jl&`mGi2r;3D*qfp<|c< zI+1b$cPN+K;j7^3-K@+?i|Eg}vagbAGu8|g4fCGzm@Xs4G;pRZGLiL4YC zn6Bv(Ey)bVF~&0vlVX%C;H`s0{&sVg9V$*g)6rM78XKRdi)VxRhhTzz6w(JSb%@TP zO~B1edElSZ3Lp1$>nf3=o(pHo>5)Qd8gMYGcY?)$NaWGqu~APtuJ_r!_0?t&U+LMX zmCKC#1*U|S86|)2ho}YwyCIJd>4}L$z*%JJj-OlX5T|Vuqhk?R2zNTl#|@livEej? z@#akCd~dN6%6C>kVaQ01taIx$ar|}`q)urHFidOwIR>H9RnpAmw=fCVZx9`m0?DwK z4K}C;>hH}X7G4FZ=87a3$Tj{;=ni-ai9sC2*9(eI;84drpn9338^}Q!MSSH$1PMqC zPIftNeso;U@yTMDp&C|S`6*&4AnGNiCAU2A)`bVYkd*sRAm`HiRO=<9>@suhF2wz$ zkQzbgIqNwi&~_k1T_9iC9ceE?k?P027LXiQX+3_}%eKW|ej#M=NMT@7f(Mr9pzfI_ z8S6=T#_`eYO`%*{waujJ!19C{_)^XP0>0k^yj2{4Cf2<6@pZj_Q#rbwAzOi;dGk=M zxE{^fjTe!LgGeQ$UZMF&Cv@aPR07SA4uil7msQt+s!r4Oo$w%pfsQ>1f&yPow%I2D zbwvez!%Oyo{Yiy?Dd|wwB*37RJ?9ZknQX3VPiHVyP@zg8FzP?6S|o*c04*lBL;4+d z8EB8-re84%HpuTZks!E9ssD&4kR{3TULJ%8o(dHdDf&(WQG0rL5ut*Az&5kf9Y0vQr{S5miF zQW1$EedlvkhAFJ{$rs312#I;RHM4=FNUAU$u1veDF@l?i73GQIr1VDss!L&?;GclD z-9W(c-WELQ0Mf zX$3Li->>1{JG$qnX)63uy&~R(5>C5)a!OXL4<~^;_IgW*SgO}{6z&2M(J-u7YVQIW zatDV{VSvgkYvj=vIrViv_1)U$E{K+DM|%#Yf?*oAdyI9z^h2Q9pj!jZ1Dy3Joca+Y z#e}O+?nF4`0VZM;-#sYGPOmx&yLLj&ZE4CSFtyjgZ!7jJ&?E{V!pjNOj}}}1cr0{< z^|+KTHCvPtV9A83Dp0dDqk^bo;$J1-HmfwQZeY_zNNmDP5(BzqzFid?l}-z|zn5Oi z8LBl%m})a?Z=s^n2L+G?+`i5{um!lfmv0$YHdSNo(uN}oYJ^G5A@5TB6z>+_ER*Ad zH{@uw)R}JvtNbF=(v?EhN(n~t-?THSiwDT=Xg%o%M>`};nu|L%yG~vnZb8vu~eS}c4*K;~VZPj)jiw+}zw z+N}IGFuvZXAYoRCMY*f5$kev*qe))k-;e29nw0KPc1ZH4dAC*9p|vg}TeZJStAwNC zRP=M<4)0;??uAT^7#&6=7C|*(8yxC$;=`MCe~=z@Y+>14TZOF_b3}Gw@duFf%@CsN zN2znFdwCuQwc{ZTixpNmKGlgpW#>G*EOUs)rD+;aGFZE+^20BoXtnp?lJS?v95dQ9 zx4}ib8%~(*;cF2ci}m6BdoEt51KCGw&j=i+l7X%7F@EVT#0Cnr%0&**H{T&Kh}^cK zPXLj#;lxT2m%!VwAl8X6`-m`)8n#QGAjB^}gP6bD;__Rr1L!Rlk8Qhh<(T51ZDg~= z;Pp4c%@nuwH^bcwRJ`{fvew|F)ZXSO?2J2O{gwJO^A1U;T@+xQL1|p!{SuGs*|k&^ zBvAX{u{j?xsAn6_o`k~EW=={u`W>`wI9`vLS4YpLm43N2l$1ze_?rekCyfu=G4ijh z+2u4UO+?bMz_e*#h{SrX{)m5ZNMbW~Zx}s0SNrfb7QAj|d^~pEtGygXhj_q~YJU-2 zRvsKcW@~FSV>B?in%A{2t6>dQ5gaXedS=^XbuXyH6WaxT`E>V}alG7#sNvPguZQ*g z+hNl)pQ=R`MesOzb2&t_L&(UwC|=+%r1mk*nT=K1^lTHO+lw!{*o1NDH9WnUwz_r$ z>lU1yv67#QuUYKCT(EZ?xNMHsTW!4PgXpev%nt1-P3WAp>WfUKf;|MuD_@t(L|l|o z1<5t?&N=EuH1b`je;sL^l<93uYFrZx7NLXaV=dj*2v2eE_gK%x;YbVb$dvNGtw*7gRcE0buhyL_j*}xx$1+L_N}oq1*D7to5+A&evvfjD$zp5Uy3mbP(3-eX zoL*DH2>ygHZLJiq%yrDIEZ(5K=mn|gA(!=BhjCc7cu+J3}NQ7NC4|B-`{$3 zHm;e)CuVryt9y2d@OLF6B5Jg5s8 zE#WtC_TbMn>X+fnMe$_-YM#ce3v(%8lvr*ob8LwpBE<%5pjgDCcsSHrITkMcJJcX= z5;aIp+%DMLN|+@|qu3-Moe1uMJB_&nPjfzs#EJ5enQh59luJOg`rsEZmOg+nFGoPs zRoocvc(El}{`qK<7ZX#C;-hm=S!u_LyBJxi6g>p+&aOj2;`%rSMF(A4)fuyi8lB#j zeFFvCX@LY5z&KCOTV+CNHeUbrBZTqaGq}5`_L@{>{-zL(vWHjK8A||3=G4n2Q>TrI z?GtrGMJ#@`Qa)~S_=W^93nmhi9u}O~LK1Z9e2QRajG{pTk~$Ohse2PX!Bmwsy7GM# zpY$W1THYg-)3hO0G^5^h;q!PMi!g$y(*b_2LzUm-H|d7D=t8@ov*-SmM)u@UAF?IU zgap&4%wfxto?sFlO>(4!ltA$T0j4sY%fKb2l+3)cav>I%hoVJ{b7GB~Kl43XB%WwW z>KAGN6YeD{f6umNJgw>@q{_gbpRq|tlDum|AMW#JtevB&fXunBn9O4}8gDR@z_^h{ z4v}c|#aO&os{aKopztfV;WuGumyK`t;-t4--AUiY?FJ|$)QLVG#m<6-bpz0a2{f?( zGg8UVLugPl!&HmEE@bBt3+G)c12!h)3Wu{jP&74<3KM3lYtByJR2Q9GAB?J&Hgk7( zv~V+BSXH)9eo*#8*7dv4<7W76gC8>NrISC74*}fw*|--@yEGZWtO|So3qn&XENLPM zt_%8(+&A>xpX<2>w@pW;l>tLiY6FKfQVIW=uBeq|IZFZ80&v1eD4ce>mNu*}u^ma= z^P7T{VX|o{2%C%u48_%ZBi`UBN<^_+ys%0DaIDY+DL<^Q{&sTwMMH+oEiKCwU0tr= zkEM--bDE?j!|PUwr$_}DM*&y4*sp%GCQk9$uG1jrYf2?nNsw_tXWoXsf;WG8?d(j~ zKE9pJ`WZcR3#L|7L>ykq1)EL-RSw|ix6rB||Ni9*!rxxq(ZBW&oZkDK1=7k1WzXo} z3WrQOp#(+|9;(={Yhz%Mc1t z{a6*;rRN1qKx}ojl&PAF!?>9i;uQ;)Km>yY-RmcnzOl>IAe1#L=rjfFHJy79wz z5zz0yme6j2*>d0e{Cr*6>T>zL$OsqK`gmQ$bKE>l&j$4)<1BO_{%oop{0wTCJh2n~ z`pcU+(>UOrbG71K01@y-Kp98^A~CO6nBBo7jXPqPl_g$Zgax}}%9mf4z05r&+4p*XSk$-`N_K9_wcLwnk=7>3Nft@Fh{EZ&BG_ZRv zR<7fT-eNafO8-b2Y2b+#VF3oW*W=(8mg2Bp{icmiZ)RIr$m+R|e$mDJIHuEvS^DB| z>ZG%aJiejBe4O9vbA|b^;jh%2HmhoRe}$OjNS^Ql#t5i%xtzAW)V7DHE@A`jEjizX zn&gPw8Fh*RNE9CI<5D@0ph_BR1IqUX7N{CfT#cYi+55{~EO6`s?LIo53`1Bu!zGmhU758{j#RP<~~5|9(T7bOSg zy=Ip-Ypk2MTUY#aqwb%Ni&A-5bs`S2MY8{DilmYZ=W#m%`dXX?C7#D)xlY6t52bnMt^ zV(pUX)ue#j-jpIsRS_nay3}|%CzcljUtXH0xB&rbr(QKGVNao<)O7BP9i;`z;fPeD zKURBSI}TZ;W$*PQ05Ia7dwGI@*)*qPqL)1jyV`vUd2oxPKXjsk6LjinP!%nAYN)+YyF<7h+IQLC7eI6S`{3)?O6_q%^|U<$Q3Uke{4QxGGO!b(Vj&OY8vo_f|v1m zp%OLkmNx`wI&fjDGEeg^!zpr@woV3BXzyN4+btyvX(joB(an38NqRPCEae35C@uNz zEjV*S4mg19qeLpUoCW3J_Dq^{GtiukubTA(;MN_RxS2IiDH={6DXY90HMQCby!KtB zCD2{XdJ85-G(2MtcGkc1+-CJl&>$;d5mf=If+cZVmp?6-Mfe~cluqLyZh8(@+8;_z zsb;m%n{s>$FK-+GEZ8&9*;#dzAQ7g2cW5gS)6}2{OSR{;ZeiC5+59aokB|iyEPTC3xrj-Aw#>L^ z+0UP-^2z%kgO%=3h55o(8@u}3F3BXP^IkYw^+Dt#iU{9-EDU-+Cp-+C(3X6uu%K6^8B^4A*>TUkuo0;0tkhz7epp7rYFh3J-1*H=>uZy$xd>sGZJpxM$os} zK@cGIz+Wq*)Dii>1*+4P;YJZKLiG<#_{OU=WdSB`8kk+%r^3^b*7YXRgc?@3~m zF!J;!nf4&Al7A~^Az^Lr3)_vI%zDI2(z`8u4wZI3aeHW9NBogWf4jE z!kVmOR&I%o_M{I9ZQ1U^kxhw4yHd{kms!%!hK5NdD+vC{=AjF<<3?ZJTxW@J2M!SM z=tZXmP~*Y|;7NERuaOsPs#cqV2gRLC3^}PquTT=0+(r<=b3Ng_1!2^J+p@P4+vtpG z4I5OVnZm?|gppMwX^IxN>puuaslNSRc+ze)53s10Q#o(5xE-qkbftD%(2M{1d zH<2`W{dJl`rPrJxIODfG)4%!LoZl3HY=hF7oYEp&07{xX=Cg~NESrx&6p-x>v_w|I z{!qF+sM~O+zY3tE7XX~1#_5=ozc52<3_`}u3a^ip8eMJ(#thp~dOpfjBdqI9P0nCNadllH=#<6<|4caaKD)51L{pcSV);HR7D0r%s&s zNu`SA`);7bM-olPGQ5TgY7kN*)gLlmMkiQ3cd~Un|kV}8hhgh)A({EO=^Mk)BZ3p~ez3 zwcySbUkDAm;#nu_5&Bm8w9h)V=jO8e{xtYmYO=J!2(h_lix-V zpF%5YLO(xt@=%pp{3hwAAYMvyXU&sg`tWJOF*7qrlVo!3qhGQ4uX<{g0LsY1nl~3c zKL43s>00gA_J~Z1@!tuXG|?&%rsF+F+6ui;&R+CfOf!xdz5l80YOJJZ*)(q^p5CLH zv3@6qm@;~z@5g#B?0;`0`rh?F>j%s2h~IBq?3eo9_$1ZevYd<)mW|f4zI6tVy;PoT zKQXqs`N4?hG_5YY$W!j~NqD}U{Bp*O;SA&7CJEw5J$MxvNahzYcniDpK7v2+c#=Ev zM9H;m7Fs2K(>6}tXK3F)kN~sOQ*>l8f7)$vyorfTd>q-}$nE(=aI&La zW0R|5r0%-Nel~IN?QEq-mpDLqlEQoZrB<4nj%7fzKw+L8Z1I9yz+JOqGA^{ z(&rvr!mJr!-!f@K(>TT(RQ)`o8Rr*MG^wH+*Q27C*U0A1{s3kpc3Y#7dsv`>f|l0! z`2m%O%`20Ui@1myM1}D{?{Rq=F#_Z)4(H8ppxU+z4#D^O- zU1J&36X!&@tMJELQRE13P|P@1RP(R+<&KItqAi=~5#V!%eD;D78OfsC3+_wT6SVp^ zjXyqk#4a8BXS_10m!)<%b8C7WK>uh8G-eFVofo*2?~DJxMl}8}J*oeWX#A5?{U@jT zPfqopoa+Csoa(>Uu77f>|H-LVw&r_e{A5(!jQ?v-D(k<}csl>-Nex&s{^?0A0!Y{u zMD`L-skSb0T}I~KNZ(dOA!#g;2r0txa(k|kp9en`epGxX`2vXJ(yS+$*fJlSijfE; zfY`tN0{?vdJP!L%tlGMxiJd(7ql%h%Guo}m=Y5Vn6O^GH0w7* z;fu&5Ba~A{o2P(=aHY^k5s`fIZW((x(? zN#ckYpqW|_GzdN`D~GD&g9=ex&hlMi#45)}$$B4Jz7BjX6U35@#{eHwAiWj&b}vS1 zbWV8m=uI?*>@cGHrm${42sDF?I4_b6&`iBzT`wCJ7j)&uyePZIz+a2pFLRH}-Qw^K zYT#z4To?Z6Wg{=5C1KuYGWdFsr|1hqpD z=Oyq~t!Ya8*kXCj=Wf^0=y&NM0}{EY%8+)>X5oSspn6>mV2FZ z5cs)R8vPc86DLJQXnZYYrc{f1=pQ5}ux_H0iSNQAynBmAz}ok-6Dwam7KP}ap?gg& zpLV6kNuuXYMY&v%BUB-qPc-{1RWh|L1KvE3y+Dbx2p&s z$wNfSP;QaH;i2yd>B0RHBEqYVO^H0MGW=~LOrY>#+%>XJLQc8!O1+hms{bfOdkJ0Q z5nSID(Ssx+AU}+%Pb;jgRKyMoR2b_rP|jO9IE)+V-XiTu7aQS}igF(WC<)*(cox&m zLpQ`I9Evba%NHuC?P{Nq3j0rEU}gTO1bN078Qt}^x}Y1vPAr=6^COvKhUzog>#eoRzeQ!o4( zLgFlLh;spwAa~%S<1##^?XuT4FAGA7Hn_Nc#ce8m<^NvpVC-Efmu_;qskKpD&$gbc z?~}+6oRk7RHF8viqBGID2uh|>nVz|qE?wx&IM+kk5X2BVQC^>I_}qd+D-fYzz5%;= zLkRq+Q5hcxL;`a}u%5%bQYR~{R^Q*{*&>g@0-5j$oKx@O?PYK&ZCyn!kXC(xtQLJl zeC%V8b#5&chpga)jC7GtJ3>BjV1RN~<^e)=uyM5_>@4;ozkq6)$U;Y?LKrI8kAi_mBn+E}%-T@v-Gfl?yNWd5hum1crCJEpo(`9!8Q zd}mI6nns!5X-av)jn>qKt9NS6R%=GZYuemFGm+$-W$#7`=1aBQ3xqhFGspJ8#&hG` z`3d1)#D^;tmu(pfKk$>mi}JHSQHlg6iM<$vFYuP*T#ox2+K zl1ACJek8pCJI5%twehz1`!jXfRmagmf~{T0BK<+m?I>+yRaJfCFgEB0f0Ybf4te#^ zNiOkZ?T|T~blG3TX?;?E$gJgtnM!W6RGT`*rmaOb?c%NG%4GWnd7m0a$3{&g)FiMeawdx>oy&V8AL2a-GOF3W@*sKpQ#U>(< zz;OQ0sXJ-amnW&(U86SB_82vz6VFubi5)(vKE6tlzc#;i_*Y>Yj9L zFmaH9vH4UFFH#bgMz5RC$dk=T(X!I&RK-FzW{srGgcA=o{8|juM>=iAWKJE{I!z2< zKOdNyUov(uCY&}nno*4%q4~^H$|rau6ICp5g{1dR(9F;yq#)?BxNDUc_l2P=PS(tmFxYl zOGG~SQN8+(axiWZybYD0Xl@sMP25H3uXj$t?g2kQm0<&BmFhyzTp-mJJq6m-sbhv_ zZOx70*gb26UwY?#7p~m;(v;|XsQ5;~Rh?WetFfBFyr^L)7KFTK&3c8~Uv*p8;3LVt zux>3T<~P2uuawp^{WT=e0SGlWwXj)buJsBV43thZJ4AFQzvH}^76UcG8a&uUy+e%* z+rP^tpzoj#9}SGPh6aGG^cbJKR>1-}DQNVKTbEC2*_zT)1k-OK&KYJzh9s^I`@{w< zGa=?WqjFB!Tz5`&SDc)0COHdF!)p#2uCj5CnD(;-%9pX+92~pqL^yFu!`5Zm>mF)i zw!C;SYPXScD7^n6H4IwbhvlC>V7d=s-aa*aVKy4$ywiSoAWW^$vtrTY}FZhNnT z!?FEvMwNoYQF8Lt6%TCYfLU7QJx2S$vKPFG^q4@Gn*9pHGi=vz71KmgS^2e z8>T_mtnXgCwaUaZgyc;NQ5KagwP*SARw2#KLmrVVX!VJ-t5h|npNX!BJfMX zy9=T^CqD1fhTH%@gGLncA^--3L zXWm)|(D2qKEx}-u<)Hp&Ynk3=(x#?vIRWSkDsPOqx8VS|?jJast-Ky}x^PM{9MKR~ zc^E1rw5E_5*lYTQ1T9uI zFXlSDSzJ`;dT%>U42>WRoXj&2T30;M)-JsU&(Lbc+gO&)n1K!G+lq<-QoT}RWntCj z9hzGNPcO|NkST}k3QHcTx6**HMKdxh^ZYzqTz?zn(n3b6L~>z;m_Nd|0Z`9#I%)ij zu=vnQ{y)PY;{uB+Rq+<9(;-|e#3NU{MP;fWUN*yjKf%lObuc0lnvV-aWwlC=^Fu;5 zrqka9jLRFQ?NWYn&y45O*Pv;9?CWm+8Kr{+XY4?=aHI$0wwb05}chXQXrBV&qQ=zvvkkI}{m(IB3 zDp)ZX^u&P*26Lb={`+eJMGZ$NJ7L!q?25I~#{uO~=4ZY?sOB@Ry-k*=qys3C6Na49 zL)#u8L|Uf>6qs4La#C(*~oD6rUR*z zBKktZZNr#Gx|=jTI$-TWFS_IB{}ZmX*&+mor(9IC;uq#^N8 zoq8CNeFDfrcSmDyF8mBxU=y4X!V*cDm?*6T%wE^OKhRE)yR9!Ox0r(ff-Ahy)!ta! zKBuK}iKdM}IZHJmX~^C-tB&&6ew1t}ZqMq}TdFd~WT~PJ}gX_0zco>sP`EA>>!^@Q+gNqExU`n$v*M`G}k`Owr0i zb|v8zrE!9X)u4@1#t~)i_s&n`Dw}<_)i2(kXIA{4G!|nlCaTZ%gvg#9)qkmstujVe zJ+11hh1pj+JR{DA)39Y}&FKgsG;JOcjxNMn3VntyI~Sdu4qAAVbV=fK7qL|2ZyTcD zFBEO(?cHYzNIQ8Y{GE!8!jO?o!;PftKiof*`O7p*N&&bD6iQr5K##u{H7k);0(ud?@k^-C z9z&4|J{T7%mxTB)Yv$14ZYe=ZV7n+?HKjuN7OXw+0z)P(^dy$Vw$WkBCH#chTYOTqXt z=bi=q$;7`=@`9B;fCSYSE?6lKG3_WO6wK_5Ckwd`=_%dU*O#LIHssyro#h!5w{}0M z@M}Oxt}AF;_AauR;3^}j(zPEZ&!L_?8h=4)^fI`j*4$5BAP(+ev54|?tC?8^oydNJXrf4nWqP>p5 z(xeh}vk#tbjvVkb@6{FXgSmRufhbC4O@}6w-^zcx%$o3k5%SgHPlzu7(>S;2j%5#h z8kzPv^H5V5fzWxZhS6Q&tr^G3SUbllvJ?k_X1YQV%bt!{9T9q~B%KPGc`U;NsN_QL zNBlGJYz@ZMd4qtM04!t_jk);V)Bpk!8W6#lpk%Mk z>W^41)|loR!_Ajc_@N!(2Bw?$>k+G)|Lbu=Pq*9W$UP;ksSC&Uk zg0(A&7$eBhOYo!O0WJ;k>&8zZ74-XKrB|_au1;>Y^W{W#?jRY(l&VfwAb!94#Faq?K2fMF48Gwa)hO@;orhYjD zHjTtf@PoLcUh@IDMxNiWg_3Hu@MPg zI4rpx3sy_rsh4kxt46jKuGXghvepNV_k=_@EMz}*l);9{{GA4wm9yn3+sb7oE!x)Q zb9kTK&1w7uhjB(}8HGTF+dKeU%#$J?uq&kLB@_foz21OdUoC!F*g5N!8ILRua#!am zFNpkz)0FTefDpd6(K27a#0yj^y#&a?O?VOCRY5Y(zVYQX_GQ5`*D0eQFjaI8#^hjN zgfv7HtK{RDwrQ?gAF!f*4>{=AiHKQ_c_MA7xTS6ML=EC3(DI~4S|#f*4PA z=#;2rM_ux+_Oe?Wv7$f!YJ4x|Tl-bH*zE!fX8=cXQD{63JU>HT%i=oKx>ga}l7f|S z?)~vnMhr?sY|O^kjZ45rZdrq8?Af|hyY7X@WZaM+5kpS5@MMQ%g^w?Chnu^I++eS(tLKj}$6F<%Gwnwu@-;Ey^e zyIQf-q*g1`YH`2`YsvuTPB61z-L-}@XMMu|ZQK6QjHT79LU&hPV1aG-@LTa@t1J2i z#SmP}BXcAYkgwK9%7=XrGFF@=F}xam?1?d5;C^OjR7naW>n(~9wlj$+OE5F`%?pMz za^lJ?cxw%Ajg=_g_~<>yo75i6!l78X4x%w%*8cs4BAAEsjij@D5c0Ley?DDx6YX+B zdx|_&Zne6^ch7mn3vA-dE4t(=9ijX()0P$^opvN_^k-W7W{DB(`<5eJ-!mo$(b}q2 zEO9)w>;RrALFsRZHQ|T*4R79EByS4=EwS3G(xMSyNkYzHsvQxSL+BvqvM}a+ZNXL3 zmC7BWqoPyjWjoI4&a|~A$?2qs6&5(0+W7jTn~c3jib!>1SzOOMwy9c8`oIqgq97V- zb&(b})HPec7Bd|FmPyYU{0!ShNDJANCDzX9oE4mP-WiLg9$X%3e+8*`Y>9R<|8sOv zlfP~+m4(2?X+CzX4X{zd1uF-AuY#=+_e%rBeXn>4&geTJyx^$sh*@Lu?e+-i?gMl- zH`4bcOukox?!(+=Y%Ix<)VoJM>S~|fP7_I>7!{9XK>kRp$wS)7IAgFYED`dz(Q?}q zIvC?pu_0GDJ)E-6HWaGYXcV*d<8wUNU!w}3wkVb$i8!voBZ}UVLORJ9KShXKNbcCc z3FIB$phVUVF)Bd!PFOwDGSnMksdhomaov#e3 zC5A%>uB$31GK@!!@Ydo^^0Xzfs>+7n!3{K@hUbJ#kZ|Yd%vk9YXbKk@&Y zX6FBrVfyzD`5zea4-EMShWrCV{&&HUf302rz>xpIkY9S^%B(-Gy}$o6Kl>-p`ZIvm z*wMhv!q&{`|3CRx>?=#f=6_s!fB7uF`|sVL(ozvn%A|mliz_a|uD~_^fUxw%Uy+qp z{C!#bLo(MOq1D^xFv9Eh!edr@9V-m~oD>-ojrn-P192*$u@$`m^Y>Gc6B0ydzy1#| zwguQung7e%3Vo%~)I!jzeI9^Ve8mh*i_*2y3Tja?Fq)gSxHzf}27aANuJ^?<4nE4` zZ0G2Mq537qaI-mOawa7ZE#6;{b^lJp;}T{MeXI2scw&p6c|NdagT{!)3JiulnxK??hwm$*S3^mnnGbZh4z3`GKt&T1`z=GdfZBO` zsn~l-39A70ZIj#X3I)Vu)l-#mN1;jlnxsbpjkx0wvz*k|j4+p8FO(mOkBi@P2 z5p^N>>~rNhv28@QwkiuQp_5@ntAnr_#C7Mlo?D_v1vO~$)u9k%wKC@fCQS}S#e$=P zDCk-UzP!v@?S-#THf0&?Jo1)%9zBP^1!<^Uj5ku2lwa8wz0uz{LwMLVE%2q5IQEly zT&8yK<$W@G{xWh*Ua*T<#$L1EnodEaIj`S0%;HTsQt$~_6HtPD4%=iT(6{mO9;wm-7E}}jMG&EFIarF*TtP| z6y*vhJD`!jaA>vdk(b_0)-7xs)X4gT9xKY!A4ec1T2bKuT>rZwU+I z-`zZ@ggIbzNB?U1{ZPh%5h_CyCcxtcK;t&Eaq#3cJjGP6m((h$vW3vdv2~^ErLz>W zKp;sMc#9l8m5txZuj4BZs}3HdS#BX>H{K>g^mH7m`@qq~^+~weJ4)7S_C_!xnQpO<;r(>et^7`&&-4kv@nCD2EaPm@zgfjRhxI%{n z^+LUEhNnzp#t+U+l@T0^J!e%v^hUkXh4Z>Cob5r%%*NgRYHj}%SuP5@lR1=+zW&>f zUzQ{$E*%saZ$wZP^aF-Rk#!1*1(+Bv>7Hea?nf9jn#}H0Q=X8|6N4}68qBHc52A1M zK?{u!-7jV*i~UN?;=*0EVm>yf7Emqn2mR72@TIsJ_qTR;N2*Ei`-b+2$r{neyEL%_ zY0pz%{qntCFJk%7%2bfHH&>9NK7$rw@U@|2`BT4JLX+2Yr;J(sPB9lUO>?2qOrCT2aKt55 z#GWc@0!(v^k0@dTp`2|SL1okJzllaiK(?Pg4%)Wy?YjmFq-m-+6*@BV4H_sIegt(- zM=o(YF^wdh;7Vzsh@NQIBZ-+6$z{ITN60_U`BU|3z)Rbk0?OtzJYTm^omAss6oE&+ zHi-g52ELEmuUIquo{x#lME0y}jW4w7KY%U7mhAK+{*w)0Dd(0Cj{a~eUIuM`KCj5T zOmDg0cu|ZT9x3}P}fm*Pkq3q#Z+eUOES6U)qBLyq%@7d#BOe-=>eDYw{S9A-XJGFJbpUTZ|Ak zQ3&*jA3?33oP;p2mG2%%hpF-P5~|P#52`65f6OK!fCu)-C+UOYoDKCeO*$U#-n~~J z_Zj}|$`#C~X>{anJk6W|;Xg3Q@(Yzn7Ak4t2O%|fykeLdlvvSbEMpE8K}Qd|RO2xjAdpm4=bGSC6+_gKpH{kGHVFUUc60h~D^DXgxL?O+FKd zIm2yNA33!fGpjXKl3pW3fE&sfpBkgulN??81| zS_t&svKSg4(qis_=e%r!()j1bsv(m(&!mA%b+N6%`D@A68;LiPBW^=0#cN1}VGoyp^c?PUD@>wEGT#s{^|tx0S3 zAP4d)(PJJlg}i9{emxKGg=V3VXQF%p_yZ?FUMry z5XOk&1p@%`9dBVh3w!gS_^gnf&kdjs34QK73HBPWD@MdiTl+UEG3*5W;+XdLJa(=USax3F#?)&eFAFSbu|17DWcdJp@1+W3F zF_Ilizxckor~hSkL$~ttnB1@H1YM1*C#_{@Z0y`y1K6i@aEyCxxM=Z6UD>jL;CV6w zRO6W7aAb@^)&6UDlQA_)S_3cpdah^W$qr+>&=BO;(^bCD49!PN{j@=9>R?j9M7X%6 z<<3-W473s(;IrLm)7#Yb*+=INVQ+hq1D+X0|7r#v6c8MDPbJu84Qm??YqY6unz}iB7Z0fklXzlaC{?mV-#m@fG#Uyx^V}D$U>XC$EtOpJ=xnQqO=GP z1@%sy$^p`?Us8Z=!nN+D*O!dQYI^#_Nh*B|ySi@pQ->xBEe2!FI2e(L%EnGMk<0Dp z8e~AVyWPQ37rcReu;av&kbfh1u)fc3$4uaRHd+6Srwz% zX|U#?y;O!^PGztpR0FYkI)P@1qZ>S+u|pF$V}<`qvK!2E^w99;toakD>l=r}%hjHpqZz zG5Qhfs~LCn+l?Ht-ZOj`z|wtbk+qcxHg$;K0x%*N!JtDv5vaRIk zB#WP15yjSor#==@`iube0GVR`sQipr^Gb=h1OUi)uf*j#Ye6f;dcn%lVnpX~N41@3 zYNoDi6|&MNLA^C6pD6H#u8b8Ma0w{v7Y11oPW>4oYT6>@-boIV1z-C(X%TJ|(#%R5 zWGtTIsQN%j@`toy5rA0N8$sw}fD?hy@kUsSbc6K7l!+qeS`iEaVPLVp*Qp1Fp}{On z3F6GGAzluLFmOLo_M+Rlu^jY>iX4H__c~(ZY-EHQk>j=*VP)ax|L2XmjpFz`g$aBf zO-l))U%7_BHFsHnZL;j3OA{->%HY`8Pp91tB=pEF*G9^Zaaa`Rd~wtRNHu4$TcgT? zaRB~e3~>^5;+Q)5FbNo;xy42QE)p#Q6j&y(s&CPvUCIKk;Iy}0XZyV#SCwbhXSHaz z3e#sSPU97*gcMM%bIxA3BH|b-n`Fy}@t|!JDfSf#srBl7fDlYzzp7^TP9$U)4+I#& z6`V&TdVu&Gz#Wdhgn|3ksw1vm zBgAVuCET{dypmk2i@iXSTuExlP;JP&mYnSQgU*(fMRG0ynKv1pHI3Yl z<(f&>&I+?d19NL*2~czn50soQ?}vZL4B(ThVx4``BWvNgI|nstz>kZ!A>e{l<=-ON zpzY9|R;VNM43iyC_;%d1<{oT=E<@XFttt60Gq##mP;SuN&ngo(<;!mhYh1D6t}0RZ z5Ec>EuPE>`PWfyWd;M+DE=lkv!??_ClB7pa2qc9Y*g5p|jYI(y&@sFS*ha#2;FGBl zlm8sy17q>zY;$Ktm&F*Ww9R868Ekl~?>@KVID@^E@tsX!uGh30_94L(oU~;;aVI#qpJUsVcmw+WAlq; zGb`*0L6jAQ;#CTUrZFl!3H}yBLl@e9^zNy4y$Q@gx3%E`v41Ea*sM4@G_iWu3i<1z zrq#l@5%X}QD$Ma8 zmxj@=XRA=VfXX59G&ZDA{R;3Y?(+_3<3wt`eeX;_tqIuc=%CC`hEU~`d(pg9o0 zh0&vevYTn30tnO~76F$zg~Lfvmzis*f=Nc53bAWuDr>04zlO7aWCwZ#&ZZbnf_Pry zJ+GqEaaBke^!hXc-z)4(`b=*v1+YPdTcOr3L=3Hs$QU`;%(Ghe)a(rFj@YC6$+kCo zLS=MBetB$qp>5`Xsw=+ncQU=c!d6Hj7i&y`Hc$h333^Cr7T4rV?808SCKsYXr=52? zTLrsWG1k-SLp7sg{>kQI-5jXXRIU(zTG8pPo`8$drX7d72-L@RTbHGqVQi8&fDAhO zTFRr{VOpvs)q*`c!U64%m~&Rt(DWwRk{hU~Et()v`sy5JVlQqU8eITz9@ z5~#mMp)kDiXZ3hn3F(S8P@6c0M886gLvPl|J3Xgc)u_wwbK}avonRK^Zune?kuz!x z?Wt1T)apQtE8@wwq$-ovi_NjJL-%;&dQf*deVaR=NN=R&WlsU)f!<;8?h4z&bKO`y zE1v40e`-)HN3Sv|K2%EEH5ku?&oPe!Ri9*Ly3 zHlB*{!I(x-O*nl?(q_FQ#_5R)0n1F<`#4-#ogj|<3LvgI*|!!2_2JHbqFDG)zbHy3 z7ap`w9#=USgEymt)1gq>iK7vfzrWTG?s5nFr)VT2zv4K-2P+%0Zf>q9-NY5r{tqD5 zaK-{x*e!|Y9kVdOi&B8%wWk`FYAk0m{g)`(PfLVfw!-VJutj&A)^74bThQC6CXegY zmHI6gVfQK<`ZiW1^cmZ!m+=MLY2?o*osyN0=j+z5q{=H!*uwsgU83-a{@2UF7ycZd z*K7L2oCZ`E`UWsfSig@slF^6M-~1x^CLqlI`Y-WP-OVFVYhV&70=gu3Ys9I-GbJnb zFCDv5{uu&*_<;=sPftFw+u6Y@D8a9kyRc*4&|e^pkssy3f#g92&fTB z=*AU9X<$P^63KHq2Xy&C2Z1^meB21I#Bgd>pf>$k+`FzIH$=`1B$1w#gpKqZaf@qA zi?xl^ckiaX7&-3 zgeVAGKm@33AuY>iivqmn-#m0Wrkwf!&w}1NxSA14Tm+i8yM4kUq`^>?%u~q6!i(^A zI`RbdNW~71!wy(g;42_eF{AJHv@z&KzRn)2Y2gCO_@s6MmHt^$K0%@RkZsVC5oJ+| zr{GL6IhOSftqui~0tUF+0rxzL9Qn}adp#f;2Qh+Y(l7l@wCgJJ3VIDe=C!xO8dyfK6 zAHV?a`G>!w0=J|R#zik3AwYR-nZ!XsgUnaylEj84@9u_pLugN#eBrS$0iZg{_)?2s z$23hiI;|m>PzpuFVHv+pB-nd=S_A`zaXDK)LpBOLgkXqAPykW^^=os#FMd$OxR=CX zm6rK%B-n@I|Hs~41=YEI>B7g|-Q8Ib+}(n^ySux)2Z!Jq++BmaLm;?YaQ9$m?Y+DI zeY&c{%1}=*Kv79smC0@i)JYqa|tVu=k3)_Z$&{=m-)Wu#!7@H{$x7 zngP_W^OX2alF0p(SJ+&LI35~Z&`ubV35KhegN;}>i5*gyyUoJ&h;wVMd5)Z?GH^`` zZiER+q>s(#5*|zp9!4mEZ~Eu)N8i<9tgW&E=&*UtUZpv!{E^h8ReT~!?NyN=tnipD zB0ut>=Uw!f`d#<6=W@(hRYx z2?gVCpyLO8uqEVv4(7NJGcazC^Tr4C_|Wksw%DZgy>9JFd8v))c)A@4InIYN^b&`F zrP1)Hg&9}*0{3yQ0+fby5{QT{3?QDn};q7F@^fPNC;SBN*N36phk#bGUN zH%qKiL1<3tt`R220IsN{vQ9z1z9}tr)#{{*T1Um)>G3|6lYK+Yh#UlH@s{4muGo2n zA4RdO8IT`|2bX(S^E1Nln`p*9*6uf8+b)kSrN{n(eKF{78}6Em|AB#XO4>(>K(IQVXnL z0!!p&L}wtvGS{V%0y&i)1AV15BfSHavT}+Js4D1N$&F~_ACn`2XdcM?P~W-NjT-uS zB%u$^`7)tMe_y=;*prfoh;t?DZp1NE4MK;;v5k}judMk}a;k_(&pzMjTBanCbPeuF z#v&iy&oHNGjxCT6(@89=Fn5%jXRo5A(i`|iID%rEuV?fK!c?TKCP#7p(Fmcp*FKZNkM3Ufst!MHF(zla<+m0h?P zC(J&*=KARGDPvDr`HnW#H1Z#ZHEj2Il)KVD19Szd2hR@SO>g<+CvM9^BR$K-;wx#= z#h$HFzbffNPC=)k-lb1)F{GTqC-*r9rh8AgMu3mboES)+Jz*QDP8}!nhYpKblnXQ+ z_77p*N`q>)b48i3K~5hgs33m;ABp<-00&W&0f)f&S4a1M2=DyUX8RM~`4isx6W;j~ z-ud4Z-ue6N`V-#y*HLf(11F6T1_Y!-=f4f_{4=CAi!5(;Py)BGk(yMYwk`9h$a#Gpi1cpxc^YG}}Q{7p+Y`u-a5Q9dKs3CNQ$ zIK1~(sxHBO4sHD5%U*ebUHNJLEZ~D7VOru;uD#<^Ud4iM<{Ptth2Z+zi$R{j#pzh9 zTiNH?K5PCU;8w*w^IJaL_@%~?=LX$?irho`^UCoru@IvD>=NZimrBV$+wkn%D*l?|A>M# z)4sl?F8Tg&krA7JIQ?C4Ev>`XUw1OPLwEY~gtNNFyzQf>y1|D_-R;r_dgj~f{gQW6 zTb?-^e)V>l)i2@+Z&or#P40{yTYqf_cb&qm(;>E(Mw&8ouRbMEok)|B#EEuc$aQ&n zwH?~z0QS%5+!=kE4%_T6*ZzXTV!1Il&Y(-}HAEbF<_pfc{CoiO+czEXON5<0_ms8D zSC#MCPDD)OursLG!sOMVBDrX3=%pfY_XaCZ7q@L)z{c0Po^3==``!lEtM85M_}C8l zYkB5di~Vowt*rST`?VRt<+KPq{Vr#)R#xu}bEc7ZOMelIbGKOYwc(ZS=5t`T-~unR zXTvVsS;ao@V+h@TF@`#3#CFd99Lo41XsqSjYN^j->h0Vx-1_0gxqH_#~l+zm8V?A#3bik7M$IjK6#I+TuA1AV-z}NP!EEt5F|L8bXTquLz)I* zG>Q}o`6H3=U?fkjSE5HqWHu?jUfnHxFzQA&bH3h50bZFId}IpamQHqJEda|fl(@c^ zvsf{GT@Z0&5aoDyunj4nQe;*z2@zaKb_3+D*F|q=K36L^ew^)ncU=(W7!-r*89XDo z?Y4Ao^W~=-dWvB#C`!VI^tawLcm0CdPq2djn5x zI(SQr^+94k1{qQNOrjMZyiuQXU3(L0Bq*YvE1g)9B?zYgmJ10N8ct}GU(f-{9=KQ5 zsh&r;B3=}aVU4{RZ@P)lkr-$Y>CJKH^(YOZu#_m`+_Z~n*ocHENdzw|$hY{wgf4I~ zfQwM?b{jLPXABBKcP=Mzo+Yx1col_8NsLK%G@B>5>=4B{fro@VLny3|kL#kK83*OZ zXR$I0)S&iFXk3Nv{U5Kw00~bm(;J4*Ue4yYq=T_ZSfQ>-nOsUdIOp6u`od^m9S57? zMy)Y$KwMPRMn=#3O$>AzzTk}|dm4SDMGkR%pBDTd&l^%5g6JXk%J z`^FQRQ2rYyn+~}I0t{8%L|n0sEEp^D3S<{rq1YXq)H`Dv;*&>~m$Wp&kkA+k=I5a~ z*!n2%jCfLvC^H3`Sd!iC^h)w?$zvdvsf|_VuO%zE#HCn{6P_sVNJiU}}2$h5lj_e6A;ym;Sv69W!nVN-phj7^z zP7tbb3bf&hn!?+Pt@c&Y4Iy@6#0!39?HiSCoRktK77Zqe@~cj+1>QqXAClCfw^BgU zRiR4^La1`a=*-0lfu$#w)G7rctYNB>?^TIqTSn^qN-q$BR)G5yIw+bUA&8q;G<=g| z42uP?^M39|N^*zu)jp_~_gh|v9E`5OX@+A$AU&XZmZ7} zwLuccrC(mo2|q$nULRAL-&e{nDEFyyC%TI6Uf+l|zPvQQ>dc&7x2Z4|#JETb=E8;M z0jdk5WLwLT(%Dq5Zj@6#YeraLD4a`7@H~P7GxZX~H00}eGt1s&b!Hrn|fOb z3*7*OK{+CR3h6@m9ZNf6m#`oeVU~_h2eeElyX~WZ#IQ|8^n&Mu8YeA7gtxU~Ely)X zNo9!AU#B06=2!@VImit)9!JbR^r|OGFiBA#75QMj#HU*7;7hBjs1os~2lyoM_YxV$H&ADg z(T+8kP|MHpC(Eu0+D2i>nAQ^!SB^XQcS=RNhiMVbyXNaKs4O|+&C$qPfWHk475tcQ z&MssV2`%x=-`dsvaC+J>*bMdO`W%yIK{JSlBIP1TaO5)0+Osh|z%8%1`3+aTz(HCo zm}|X3i`V!FFOs^*F3Y@>A?k<{^bP_HHhSKgZ_S)9M}80Ga=uQ|aeM)QLlp@QedCpV zfZ05I!t{bPM1h1mb)3>FlG%h=Z>=OWd9!+75#Cz_<79g;r}16HG(XE*hYc-dS`}O# zSpKL;{6QA$h@qI;@6ML1AQB+PWloy_XE|=PNZFHA&6P{yu4M`(%NZgrZ;W=U({Ahc z1Z4^B0{1$*xu+mL=_oKOI-ytB>jwazwEw=;GT|3Cq+OGDs}sBs44B8Nv|GCTs=_4} zDy!&8>pAb)=I{FB6I2b0XiSnZ1*7LAY$`TzZfI!7_X+A59d9F-IaZ00LwKU`ipiW? zP=Z@f6k&gQE)45RlmwnxGQ<*^QSm`*sRnl(%KN=s2;me@TGSFM&Pja( zg5ZLoUSdBvJMT;JM3!umsfFyTnCEXI^W=c=&wA*;SQ8tg4?LJ>RTS9^+yUJXUuBNl zJLj5ZP{44&qRG=pi1YFZk~PapU|*tMvy-@ZVqgi1cTB;_rOR(Wz^ z_ka3+*>~kpo04Bxhf2c4TyFb{4bv>e?w_?oyBrksE}DcFO7DzQ?P^%W)9+hMK3S}& z8UtKhs73ia6sRR)VW>gpCoDiLZ)k#0?@u0DD=8Z{QO^WZgE?|prx$6^g>Exe8C$;6 zA7OzIQD2wvKF6Ez(d$^<&Yd*mTSl16-SW`t#g}DLGStq|N09PzNuT0`2iu2X+|c0M z+~7{R=Ga$Urjno%Y@dc8r|-;MEJF5^sx+9~4P24D0aBrqYk!0r`xTTug&fk3vVj*8 z?coKJZWb%J=s^=k*^E!A?>MY&Wdy=SH#2(9$1L;)bwJXqK}Q=V5%vw%I7kTM;ND*s zkI+&B$sUS`5qznZf{j8P$vK{U!7FUpP6$@-#+M`HexFe?CHt#?m`t@CnCx{4%bprh8G-uYmRaDF<=A#az$kE00ak*gvr&|aTwfj zxYJe3Iv97!5Nmuq{C%PB+xthrhs7~u@b*{L0$CjLomsm66AAgc#Z_t1OV%|SguX+W zQsf*s5Q1V5mF!%~SCTYNkb1s0qp4h} zMvKPERDTxyph4o{xhFMCBOUC`fN#(NHHQ~9kLwTc*Xug?L?B}%UKUM*orVfXb8~e2 zF5Z4I7D6Y~ox3afhvdP9UzjG8*@fow8H8R5rBfkwnnE6G?VxQ)QfyXi`fvA)SxBGw z-hI=4GD{fx#eur8D|sL7;qM59f%Sb=X9{%bfq*8QIgSIE^tj;QTr}a?g*oX|~TX6bW^yv>+5Y zb3mIYS$lK94g?^L78TL*CBTgqdvEs z5*;&1_{8;auAT~`5><4@-ya!pq30j~DVn;1D^8OlYI7P9SY$pI+4O7v= zeN9c0JYAM|*c0+GBgtl{ztV8ps_Qk*6twd$Y_u}OhH}5eJ+j9;!bydF@V2?syg)8% z)%4M(h`&9pliM5S=zCgiuQU_`NjJ1?`5A*JSGgH5AfV0PxNzRPQvF*K*)I3MO($;8 z-n(j*Zp6lIeLA?NJ<+lGbKDx3Mq8A9GpTy3*@}G6b$cTwJXO#)RRU~FiSs_~+h1J# ztT4I*YnKE#H7m$5*Fh%u(0V>w7aoK&u)*P8r+llO?Xf~4BcJA;RD$BAxqizGQ5Ir* zw(Jna_zbu4u5t!nkMiOW-T_)!f2-vJmR5t&bZSH<=Q7wx%vWwGB5{3jBcB%(i=*qS za!)+UoFnMcy6%zGE));Yjy4L$(Jn3(4>*JAz(LF&VEtV-2FP-RIn0L0}{18enS<3!7&Vx|~FnT%_NngB0G50(2WpIis zk;*DKJLKBEWtp2x4rIPlYavys4p$>eo|S`~wnOh%(VJi)bVJcUKGw=T3DRH(A8|ax zAF~}cm-dPz*=aCpHBjMURUME^%eYoOC7Vr)DTeV z>V)P%{nkNHBaRF-Du5I#BJJ>vB7M~kCqldXG|sqtbpv6|or0Up4#si~CE?&g9U(k* zEj9r-hnW_bLw znY(6i9G;KP}B zHzC_^bRbsl8X;owuG4+;6VLEW{9~P9Bl5-6)>E9m;w|yDc3`YODKiuR?h@6)ZYttt zQqq6nGdOPKpyt{QNHJ)MYzhoNTS<>QH{1jg4ta5ei=0mQs$#hbuQ&2hin1?v)5KzY zAO;@o!mZXy>$lSGXHRlqIK4k3ZLGb58keJepjvdt4ABZkG$Wx*SD0}ax|5O;$WQbvYB)VN z&Q9BvAJFWtzzxy=pR4r$+_U~v>Hk#e|5WM!RO$bBRq6jeyZ%(^|5c^u%Ey@C2a;Oo zza};JKcqf&UTedA-^%C)pBS5LnJ*Ai_K+(g{L;dj>xQOqR+RG|A}MK%s1Fw0ZEEpx zFZCP5FH%4{uZS2V>6vcT72B7!;YZg#TS>2(PTa(%WX=&VfUY#@vl3e=qV#42bA@_Rq+1eyiR;%uPh1- zvYbO?rKIpf)!HrG{;;c?WT1jE9(N{0&Y7{YTUMX6X1hOupXoPlxqk5*GR}EmLT`;Z zf#hzX*I29St}Jns#mPeZP`)X7aK1ljFhJ6^2q!2Wl-!I|WU8>Y-~Sr%Zv2{rP%7o< zGmMxE(+(?<`g??QW{`Gl5O*hmcO*rdWOJ9rWbA&lC`K4dL^8w{8XN6>jID-S>a+O+ zGj(<*RFofEh)Hl7a&B)U`I2Nf0R7PU_=NN;!!d5!2N;h}#ENO&mG#=NZ;7>fLB}t0 z*6<3c=Di(CRYv*gg&Zqg5f~8nZkEfQ|bH#Ds2{FWLoLqbjcQxl=T>(>)u zi_CSUyD_;}7c-{L;8To>U<#IV+YKViYrkV*H+S@qv2Au|`w@PCe&{XafkZm43SIts zfL5XGo)BCKMsoAl)359@E5osh-*ScIaohEn*|hqGxDwF{IJr{^%JGC^%dvQ}lnk=X zMQ)(2%Os>%wM`%4D9#^QRr3iUq%P|*isn3%bSbcJb`n_VERE>kF26R25jaXWQGb_n zre%3|ly0L&6?O{1!IFSJU+tUP*->uuXDASuU>)U0bUnn}Y1Qvf!kmF-E>dI;z=7hV zch0#28CUTr*M>G^+4_+|z>NQKg|)pmFqYK{WlLx_%zZGM5bgv8j94;`A_sUgdod?8 z)Ib@QeKtJW-&XZ>9RB90p0xc{xj+?<{CHNn|F^JQ z-Q23OsGvnl)aAe-{a>WsRLC0888teuCMJoPB9yY6m+WWlQ-^7QQu1R zS1f}TszM_2?6T0lT)C+DnkS1)ay?SO;!ml)yby*=bks+D+U}FZ-})lP_c$F6O@f=` zk&2Oy{jg8NSS8XTS)*`FP~`v3CKr*P7$%N!4nV&USW+-WzKa;84yn^3`oh^VfG2sN zeV^$Wkfcx=lN*tDk;GFBv3Q7)UKuy{M%tBlp+bjp5t9u*{dPv^##_A^WJ2g3&tJ)Z z>R#2i(_8Odv-YqW;$AA4@!BA^mg7Urc_R6}kjv4+JCj%5Vl)-Kwvzh`;`WB{$a#AB zr=4?zGNe`|Jdl%N7a<)E=!+DZM^c!^;G{r2_E0U*nHh3fH^5x_Ijd0dacwO!7S>um zz_Fd#vcfi7;ivq@yb(dCSH zhoppw?mTEMGOZ09Pip;Og{LjQb)Y+Jz@0jZ=1juCCxWJ$0#+Y}7F&Y&5zhR~4Sm*n zQxBHwmU&-Bj3e^5ozI2OZ1#0 zRP?;3wbY)k#%|_^C5vAl+Ttr+;NvV>-aBo->_$2dK!t>#?&*W7pGn0_R|Gi3-boq#0`3Xu_GdtI&SN{_$+=`qN2%`wun@MHfORtnT7) zt}m(3r1F=iB-0L1EN-J_ZwWon89oW^C!LQZF32nzOjksvSVsZKEU}l6rrsxPfu}cY zSNFo^#7DM>*&F=WO*;043(9>}W@lEU4&EG3I4TvS)TS57ERa13r^NE(lAzf;Ur4T=rwa4BftBB z46Hj+r&i2uR1VYF*gKkcAFeu1K$^cirC?H)pTg{kuoTYiYU^rx6Q2d4jZ_V${pZ*& z9oxq#<l!WxyU16ICsCWqnv{DS&_iasSSy$>n zgytrlzgIhpD|C+U5w(+P`F$nCI5vdjp|cEPAz9HXF*8MbAx^<=_>`yKAZ|O3l(0n8 z@MF~OXs(J@6j4@UV!swkA3`X3i5q?A7&F9q!4a}k9MT8{cO%lw8vXURrg_C-HNr4G z5mW<~CFPfw>UrLt@bHic5A3Hi>8^)1#Bu?>mlm6-r6Djz!){3zqwK*(bBVWM^Z|Q? z%1p;k#`xlAHbER2fY)=>wLJhHI{jc8Tbb!kMxw#pQOu7a zzAmJ#2rCVRwOKexOFSp!yoBkTWNvi|$* z`XlRq$=ai&{tGrx))+upWB#+e%EZ#h+12IK-ydeCf1|bi8W$F@TiqQZLBrIjIod)o z!TFPjOIjBqxkWhH9s8ed^@(!ycyY*cs{#vh9ddnqgF*wb)L0@RuiM-)hg}YFlw|bh z{hfASzQg_JnI&dcM%tM|Hr!12LNh!z*f}QYU!u3?&x0o!5{!~)XsJ}LmYcKduU>bD zg8ax(eZh`SsfmGjBuV=rhL((>qfk@kL;y?K8;Q&x5GIW&BVzeuhf@j3^eFLK^H$9~ zf7u!~;WJ94REp~DGvn?23IOiN2oHl!;?TN{XJ=_|pXA$eRV#uNsYdhwvglEh|Z zYBtlNGG_HUiq?Yy??Xl|#yb~pAY1)U&$&+J6;Wyj4+z~XriNpEhI2^r@?XnSP25oek z3BME|Sh{6(m8j&>bJ(;(x3?LoI*i(LJGV|9cr2h%h1S!;GEz8LfBIz5MVN-ZltF4i z%bbKvh89y1Ve_2zxzG468==I?(`6hn7tRYtA{R0_@^CkFe>cT(G{;Vw0j2iZW_Yf$ zKngbmAeI2Ng3MU-8e*mEl!Q3->L<*_j+g{-1T_Lti`Ol*mg++Rp8AEg-ECpiRKb1x zATMEh(~$icx^vauh+n0-Lh_9<+#F0T*?Oc9zDzeMx14&uEfB>J4&$?(aF+ljw^^d< z6xMVy!cB_!4u!ESI}q1-f8pA$(nAxqM^_f`EY1XaEv2m&k{WqI+t z4LG>TDof1%+G2nCIt3r->#e_hovb)sl9JLqb(+IYe?1(cIF~D;ircPL&+augO7T^s zV1_%9K+8Q4Plex=qcDhTENmZ{O=&8nsrl!Hn`Guytzv{b3WeXWasu6p6kKBN#4xFu z>g=3$3KGDi9LGhviS@e_JS|I~p>*3PVc}u9yJNqo20SpD8^v*zE-)~dy zaQctWVIKbF>w4fLkxM{d-=K`EJXQdG&Av68k(H=;rSi0Z5wI&xQxemolV)cOjeb*l2D zNZC;2Xp3ud{)9`j)+s;oE6IWYeeHGzc-USI{N$D{66MY9`sRXjIWfna292L9t5vgp zh0ck8#s#YC*&GC zV1}21cl;M4q$20)4!O<%MOmP)#fgEwE`a!T2+>v<_cH{vEA<`d>)e=N@VU1WVmH24 z-j?ct>A}|3#yE?6EOx9pa!i`K5qzCMT6$Tgph^!(|i=7pMq)#q1_Y(X%sbiJPA{-KoKOW!}V3FZ3sba zxuw=@mTPY7$s0Gq_nKR~)N*FJKB-v*rSZVkt?z!(K`no>=Um-fl}UJ6n>v+%6p_BL zXVa@$vw8MkyFudptp0uBu{wl&sXne_x?ujYBlq-$eSX2K?$~+>3^GSJQS1`lTi1BX z!0Am{a8bC&BeHhQr1J79Vby=jjYt1*G|Sarg>xj<1=dM`zZl+lOW;8M`uF)#?^0L5{b2I zccGYzkzQVq!Y`qH=QHX}+M@9_YqcNipwtHRDNTZi(u@vq1~PTu>^pu@yy##=i{mmL z(;DI+xK5MIcctoWx{SJo_2*z2%j&J!xBq7R&N0`>&mXL?FwoQ19;+sA%v;PyBB?ud zFvWO)3dTeSS&5DIc}&Jx&-i8aOXMyYP8V7^>C!|U>b%T4DYT{6==ZU^X%%V6ihDjX zOrV@W+{Cu76t?hKnp#Oe9w{45eQ002(Wc_1llqY5jg8iaW5a*JN8ca<$vg*UWEL9v ziN?6hEQXZoL-yGVL$^~*2}owTmD)KAChs+9NU8nBwjHVo)KZU-wli4r~%)zqHbCa>vc>qD_a!emwZGO0BVfZuEoGbBQK#1`bd$WPA}MZhaIO zO)o7&6H2DCLSsTFBYRtZUocw({PU}o7oLDE%(HN)>}FgxiMa&`F2kS_IUk$Lj3wPX zFpe8gS5-mNe_Aq1v4E!e+C4gAriNvldw__0pc~Q9qJ;HC-q03)SNUy0kB+(YmO(9j zYctFz?Za5Jyf~oO@6(<>X%Y5oEOFhM+c33 z@`u>`(YNu&SNe}UQN*L;hJ#F)QAxUv`l7IS%nU|5gO|3>Glq{0tGoaC`K$kYlX7wO zvNij+o75jQ|55WFHUClb|9{nt^`8~!VC&%Q^3UP*ht2=NW~<|Q9w3|T{~IW=}dhEOAaS=3?laqb zZ`rV^l$vw~y9tFgMx$Kt?4EjZ6^IAx$b%A>MK93kqa_E!7)?`3)o6s2kN@6k{*Rim zt4U(lyc?`l(kG8wPW9x3iV|Q>5bS2!NNjCa&2>0fk^rvYa#fF|9f_slXmhg)USPnK zcYMf?6xeZe*@M!k-U=D-AsrYK{_t>!cdp!xTBP|8S7GfN%!P(v0MgAx$0l+SGK0TpIW>|qu6C};) z8L+-l@ch|WpPQkrR@D8aK0|1sX!ZJt0Jz_v^kkfEniK*DY)DB*F@8o;UW-C)9!hYG zMnMaMNFt^jza04LsjiBFF82wp`e5z6V(WbeN6Gvt!*1(syZwdgl%1^B6xeWe7qBC3 z9C;cuZ3I31%V2NNSVZCTm^L(J2l?m{Pp~emxH!&zW|C^OKEYrT4|t90ZxIP&7H|_F znuGsBGtysZ9$-dV$~>KZjhE#YH`5Sm4l6Ss zq*z+G=U^$D2LFh0Gzbp9@fM^zM_O%;*XbQIyL#6a0axEo@WDHohVQR^ODU$GMmCl# zFz%y)t5x6UIUr3bJyvT*>b34Jbm2A@c-~>S8i^?6bU;k!9gk5NFCb;ugOKPp%T6SI zzcaoJTTAS(kua=MFg)8ol6`U<{pPCU9hSjVhhS$F@?)qd^F83>_v1MJekitd1dCN{ zkxC`iBBym&^QtTE0a#KosS05)Oq=*gvnWc-b1YY(YUC+ht=vm0oH_=_I9EuImP5{+GvXgGP zf$*fKp756t-BQKc;H}S)?eLM+-SLC!NJ{T30k{WyqBqkwjky46$o{WNjzS%IZz1}v z^Ladr{+F|@onEyzh>sC*3m$qikKy_rb*+yRh`Sd8zej(Ft?7ve=}gT3bm|~?Z@enW zY#apnMd-d!V+Db>I~{c9h-*Q*ZQlUWX$Eho5apT|%wod@TlGUIgZ-(K*sP*35z6k7 zseB?(I=Qcue#XD+|3))ZkF=KM-TL(QbkN1BP&QNfo#u~OXG7NWybEuW!pseZ@vi5F7zD3~g-oL7+ir^e&`jye3|%D>^E_Z}A<4OGH)j`h z*Ito+IsTQ09kHO+Zk(1o;X9aH1OSIfR)aSuB4Zp0uAE=I;U2MNTw!*SX->CJ4%bKF z?z08am}p0qzigX)bEbPA{ANxr9+3buM86j|g-4o^eqVI+o_TZ-ZaHXnZx%A-Ek&~4 zNrGZQ|K1W<{fMYi}Z+W7B)G%4SsbcvAwcjX*hsBUndsvJYa>`wpABR96d?N-6Tup;&V3Mbjvd z?9X77P@<-jaOR*ylWIm{p3h{$G`A8d+o~<^6-=gqDdb@%R{J~7#N&?EtJ-4Q)eTvH z0RgZl%cZsw_uXO~JUC@pBo|kOh_tCN>AN?nQ%Wb52IZLpU*r>0|2jQ0iL)AXQM&#v z){1!LmzZ2x0{&8pLhTv!q?j!yXQe|pgpd6$(vWa`mM;l1Ir=r=&pmqYcGPAD3UDt{ z$D9zFNG3+>VnxHiPIN!Y->e3;fykSo{nlvq^f#-uFtU3j+9QCS=olf&Uxs%V7Z^A{ z5xS{i4zi0^x9uj00#+26C*MngL;KumW6wo~vXWQTYeOYxfFW=FjA9FY?3>8Rq^hOy zQnLrmAYjNl-%-+CU5X&%_8t15_-x|)*LAe4dfEmOvm3k&6un7yld{Y)tFLs6{a;Y5 zes>s>@pKz1))x1N@WtZM>1p9(lpYCIrc(Pi!n>2?8SKzC*nZo=PZ{FAbfVRn|cv^7*q&!;G zO>M+nn(sCTtLI`G8lSY^1Zkrx6+r7i{O#4Qe_Xop#*t9Mt0jW$?ABL%(TDVxR&Ih4 zP0OcBssjUP6fIrR-)h`v9PGJ`LxlG^Tbn^XSs$OPRF4IBJJYUdP)ljbKf`F|*rZnc zI%kqovaO1<#nn;_mBLaAUiC@2ZTU^(lN>N(b+06-{XQNLU53Q3yXhtiVY(0aU6KnA z%z5jv^*lIxEmwX}u|NO6$@-tm)*o5_k@X*0|B?0oGFkt9c>R&}zhuq(rInWsDC^Jv zjjUPzk@d!3S?}&k*{vX;tiJ=Z-qA(xRC951S{ai(QPl2p@|PrNNtz*oq{1W9@|O?) zci6kcbSyQes2=@TgbZ{Sg^Np&c$(Y$eZp+82|n|{2rb^V((D8em=%WE8N!`juLfSAUKznT{_^-`!@rAgKZ00Rlv6CV5mI~wx6cX!i!^=Bo5y+Sk&67v#iL{(t0I-b1Kttb{ zrT@ZY-#RhR!N(|^ok`-$xJ}IyvlFk<#)#Ci|E|A)BMS#py4dU51m7>wQQj&}PJS)I zKD+GcQGwT36bX*!DTQUii#PN6{VLEYa|oFBW*(`M`6unY;+2S(ZXYc^ErNAJ51gQW zC1~n01M|0ZIF2Bjv=o^0n`k!=axs8P?{e?ipnb&r(wuEHU5ldz-!K`cY4_c~HoHHC ziG%#TeOqpC=l73Oe`d!VT;miBa;HAg>9Wo?j4iFBPa`v9j;RnD)ETLiy0*corSEK$ z&i=B_{vs@+ImPe{+YVRk7Gooc^1zOCXaewIX-0|_YQ2~AMAxE2Vz?wFJ@Wc^bH^Bq zh1d}%hbZzLBRW5)0cB7T}=07s~s_F1bU^(lj_Pdlha%b&4+ zWIgM$uc^71MtsH52Zzs_-=^-6xoVBp8lJEv5+DSqR5^{pP5`HvCmuR*{O z>N@(b-3g?`#ISNm1xTO==qi)369qUSHbYnVy4Z6X4ucXW)&|x!SJzS!Z;CjhJ3v}r z#qDUjI8a19Yi8vx6Q0jZIMx?sVEtN+aXxyOu}QHafN952ZfJ4*YlM2WZCiN`J(8AO zgWjF$Qw9#T2+2>Pk-tW$%a~V04qZ-e_p;&Nh2VjUhIFLP^aDbl{eyouhLDu=V+E_O z$SYs%%dX4P6lSNUesk5Qm3b1Dh;la7kRlZuCZ3l31uLYeuh2jDD_`%q6_P*~Zl;#v zr-e9Dduh-~C&|j!v^vwFYheUh)k)&;zJq98YAuihD{s|e@Vaz|W8Ika_yBM4v+MVR zQ%*ctLbFv`;1kOX=MYfc#W{(2?{{Zc8HkmnM5+GNw=f7fZSw8V7Lzh*#pdbGi1Ui- zi^Tdd^fw=GZwB-ZC=j2Dkc#{9U?bE~)j6HR|E6`;3RO2UId#rB;){=<8Bh(OojZkYA1~J)ed-3+CjF3UD+6&X!1^D^Z)h$C= z1y#lhtJv-Nl6jou676kg*dgZataYt9v}L^w%MffY2fSCm#Zoc*%Tok1l2vaVXV$9c zL^z6#AHys&u8*&7W5dEX*^gW}5H4%sN6u4X644XjVA+;DpSEzWu0cI8)p<+^S*!`I z)yZm}<-D1+iz+(SKeR<&9Iw_ZIbJQ{L&7zOXn?{Z_Zr%^{LIFaJB2ZvkazcfOFg~2 z1h(aES||HjN-GeZH`KS-p<`$Cc=zsf9KFBCl(h_U=(M;^z#NURC;ya#R3Fz{w%oP- z?%(dHtG6R@hX+f z+i=GKqeSBB1JiKe^>XJxRpSWNvv2nLI{8}0fsGbo`y$`xAALPnR zwH2Q02?sSuRZ8t1jCxlr##3If);ze5)K$;#G$CVKXavlwf8K7+?LMd=>a4(J9}c<{ zDq(3+8TSj;T@oRHNtQMBH(gYhqk!Af*l!5^9Se>6Dvsbo*(=I;YKg9*vQBDe*(SLV zRW)lePBi|^N8%-H241KV;4PA=5R5Kt){st*N z1Z~6wkP5P7BbzwHC|F|IM@bBzSkXGl>k%Q0U@>VB5HjERE7t9dU@fs|0wFYwW(JIP zhxgbM-~eOY#i0HA70)t*L7WyQ;rH^{<@bn-XaT$C1Z>)W7SB)#mEh}HiSC^zbD4DDQxwmr-8RE?D z2)QnSXoT_!80*GEf4$K(FZsoQIQ#{GAf`1dE%In%;r&jGhs3z0yPaJ5dv%XP(?<=e z)uwrA1+Bbyh=qa6x=dat9*F69NHWxquBTLU=ATf#RL z-y{IPurw*YBhh|HTET`XX3^P?r%T6L3A9#WtTro)(ZKtfau;+~n6OgjJ`2<9JRXL` zqU;Dn*sIH=CbT{py;GT+;c9ucA@%d|i-p~L`72_%{6lF-4php-T|v-DKn>|z#iCmp z*z}6A%=NEFkL}Hacw~CYr$?^KVlA9om7f#j5P%tW8Le3kIW$$Y33452EDWP0ah$?B zqUMfK$D_=rExF2w-Ps$BS4c7UvYp-OMbrdT)t_>5KuIgB@ija+I{_vA!S)V(l=y!b znf|YO)c;(r{z&?dr2k0zkEH*XN&4@@>yM=WCFvDdochl|NlX7Xl4kw)X!jm)YT6+G zG9M1KMVwSeV~gpbKXp&IZj2(`v_$;l2bx`Fff%Cr0n3-SCF*1FJJR1Yen~`0NdR7V zV+(ipWZahshQwV|FHc6|8E*gEJw4eb{K1BpI@KXstre@VUS4*yJa-`QI--GIZ~_b zCIW5;WpxaTKbLJ3n1wjWO3Cs$JU&YcWx>U(GtN)?bf|f*E`DW zESc1#NPgpJ%WPP6!Eaav$SzL}2cZhf_$)Nvz`dWYqp^KwRs^O0h&`iwfnouI4l z!C-<-k54K@%-Q$%z0PR>$49fk6?^wg_xjp8&53if*IdJ9l8L4v>#PNz0}QIj1X{Qn zNC#_Vjd;eFp0*GrQkp&N z$e?wtj#)5&Bm-6vhZi0y2vbrpE0ptI$Z>(wRR&QjgBgK?NFzN_DUhjN=IL?#yn!yt z#Yk_8y)@(F8!AkOV@PyC(%i1UhNgFvmO`Ip530zXa~|&@ROQMDztjjHpn22Qx}M{O zf`kX|j#7?}l+p}PpjBA)`+w7ykiw8B6WNcQ6+J^QX`8tOJR^+lvEo&p$4FewuXw=ANXD$__0k1kG`0MJ7i|((%(ojSzI{!v(iH61uI)Ig~YCl%2wm8F7%v zj7WUwFNM0O!)@-n5($2RpYLy|eSWzm%lyPN|4lXvc;$afV;|>&4bz3A)zWFx)@sDC z`-{=Ek+i~(Kt|K2rz?`8SjVIHC02lzp{@j5uo7I&$H2b_VvQ#TwM9u$DQT=5-|eZ zudZ7P(cM+fRh~%Dw)6yUZ+qU`Ug}{}L@KkrobNzRGhZR^g|@_6i{93GhrR{dZ?h;M zxZ?5Miq55w`2J?}{iyS<8#{FrlxOSdN^T7#*yc3ILZ4Z7IEY&^w~!7C^g6BLg$s%~ zBqgZLPvD1-_;dP=W|7buxnwbJRV^|AU>nlZdoV1dVzviH;bE;1&&=n{(^a^WXd`&e z+4>2C~=b9grI9-I$HR^U1W+jEAkenLOymR z*^Kp9no3J`z2?AOWIlzBc82&+EyTcGWcYvWBE#~r_|QBW4ODV#2qUZ!U3zs57J1Te3=zTc{SuDQ%gtYQ-m9(uxEAhSEjWfhOXTOYSP= zggd^vOmGI}gJ!fsJGK3HP?f-Bx#vXV&S3RaT`)V^3`UaJ)$mx3X(yhN%kSZw_JsH~ zEBCQK>P_%R?0jrJ*5VGI{HKPSA6lI388T;24y5jAN2nob-r9AecQoGz9nXI(<8)jM zS}MFQ*1PcG#l4Mvy76rPL4pC}LTHIaR1C%TCDj5{?n47C(*RMYLUQ8w%6lLK^0M=S z1##U0s7{U07I+Il=mS*8dJDbvh|1}qR>l=CI91~vmtzjrb z8QZQctjmo30)ByCq-Z-e7z4pajG(Mx&ivBZ$_R9{W;)|0dr{X?d!52O#V%U`f)O== z(^9W>?C}p`sbfHI8XhITWoJWPI#v2N4s^!5~#6^M13B{_?E>Q%kEu z#>xVXS;8Q@5wt-KEPRh{VqHepT$p&S?s7UuSa7QuwVZDuS-oWHA8v;6+fLJB1X!iM zVA0`WEf~WB&1p~xkuNp!-@`Mmg_!Bg441!ceHu(aAQ@31FCv-Fg{v!spfzFDD-29L z6*Wr8+76Z~;(|yh3#IGSRY!p$Ed&nM_JJ=4>rzj-ptB^2++u{cL;JqRW)r}{x^p`W z{)1bHA@nlTM3Vvc2G|aK`0(P4Pr7FeV7JLv%6K5@(s)l# zR>zUHcSx0cc>J~)gO-q3vnDJtoumsK-H%wCU~nRyrHwQuw3vl=--lI!C^)aE&L$Zh z+_na0I?d_m`?t$p+(dT4Fc0|6G88XnGRXx`(Fob<4d7s{!y8C?cxb0R<03Lz!%ZSa zGOw*J5zzrDZ03x(pS)e(GUEjFrw_O~+Q4rZpMA7Ty4N;aldGwJCM&ZdlNfoH>kOvV z!xkvZ)5qA+q?y8e9?gTWIn32-mPNz znqNcW%$Dg*Sla=Uau6^b^1s0BKVM#dG5arO|HbUTnEfAy*{_e+U(Eg&X46*G9&rJf z?eT9goBh9GHn?4+Xg#@ZyYW%Tx85M5&oazI@{vzClm9bl8<5L=%H$pzb+8z?`#v$D{j5@I(ZHd1`pkNh%A|9i;l`87Z0ZNuv*<1eRRGY z5Nbt&>Oy1j&H4g@CrmyB)itU2E(7|y9xU);Xd&z=-5Dwz+ht(=GN+CwVA52n7!<+#t>RIN)*rYaM&vRLY zv07H|jZgq(8L&-i!3F&++FLc)tvr@9*mc zKwAd@?KgzZf!oP=XG33%c)X&a*l<^~Sb|g-k-7%I8{t+(5#>c?lR&-OO%a8^rTH)l z+eVm{f)X17lP_`!{)YO&4tEZrK#c7nfh<%3oC4Gpe1Mw{S1_!e>^dBZd~?_2usSXx z>oV%2A4yh6&@2GjQ}u|4sY4nP=WIl2u*GT0d|3K{!xWc#zHHHe@jC@Dem4Nd?_7k> z(#3^{yC0!&C{(zkVd4;T!{({vowL`t&vD`hsMytg2qvK3*|d3n;LR@kr8%_%5f6#U z{d%sJLGVrA;F%A|y_@5%g3;Pyev8*BjR>)@C>$%3ON~utnD?e zfs5xgC6Nv}mf$IR^TpX8c1K$m_xMp&^e)9XAO9!ASjZqm5ROSZ2*&+`NWMMRCEar) zh>s#D5A{FE8_|Ho{ZLKR_7i$9N>Bu35Or(G5QjMlwbCpZ9RWz>1JXoa)ZJc~S|=mX zr`0T6BU%=gaTahyYi!GPV%&bn`=3e`logBiBuDR{Mz2t@Wm)*P1-V_y^|Rg@42yIqu%TN}EBq1$6sQ?v>8!xLYj8@Sjr zpM!SIO3j;>;O1CQ?vg!W^|^In{RY7M;RU=)TNFHe<0mX2vZb<5;(7aFMkiQ|>S>3O zp>8E_m2!no#|CjayRv`xe40D>L~xkHcJ=MLbyMeeP`jy{e#3FYgM9@TU@Q|oT+OSX z)5!lNgS4yz9I!VL2W@_80OSqo(R9+3WeovFI01^;J;#j*N=pUr-){!msFAZ(L_C8Q z0rsO9_-R~?QQoeTZ7TamrCM8?8@OOc1_hLV?}wDmnjsVDztvd6YYm)u>iN^CUI&wY zdLY_7NufMFktaRn=k#(I5G>(C{u!Pyjaq5gm{>RDNDUkPY3aZ|bFW+>c!>s!knTVi zX%}+D5WPvlYMX(^c`q zSDd_TT}4PV^;Dw4GOg+zYpuU&PF(-4-zIQH`c}Vk^KHm@qzD9|c{ZJkflH&*GqRh2H2@+s##dyEe8oxk9FbPTIVGGCOKibF_OVt;oYe^p{LH z>{RuRg{zzqPy$POdIOeZh$vS&IO;`j6hm>NUHvQXM*Ce1g{|`Y7z&QTHB*^Q@2>du zF;KbgU9GZ2LUK><6kJjiuuvwfbrIS;EMntc#&h@0lfoDfb778EO*}^Xr7TJg6`2qiX?bp(5+as{3Cm3`Gnw1JP zA>WNX&E=-R#8S*qDU@Z9`Hl|@^MATt|M}ATi`9Rz`Y%@h#p?estbTpG{$lmNuzK6p!J7fV zYVUu8)qhqwbx$0YnlYaRjZSi4A%BI609gGLw)o~F+M6zgr5u?SV?(&Nh4u^y#j5IQ zmnLi1kB}mt=y*9_-(R?Of2E^6Pbm^xvTdxFm=0Ik zbQP>MG>pp3vfqx+SL^nW4ZB7hJI3`uG&7#?~pI&3#-w=)$; zN$z+WRk)Y#9Eb8zm&H*L(i3?D*7m-9$=Zh0{WS(Ix>1YE38S;PHBik}uOO_mK_ z1Sf7j-~+0qrv$?}Z+C|n%SW;<7s1cb;ZzD@FDY`4dOk7cxw^@ZZC?t$V-L|@^EjRqhfrWg z3u^?g&CdH0qqYZfz*oCsuZ0wgooF#`u6-r&+J~$2nA2OKV~Y;!unxMZ)p&+}(8Z^2 zap%0v3(mBAK3F{22Do1{5w#s_fzNS?MSnNRXEdY7nSBE+$n;K`TqYn;e(-Xkr0*&) z2~4z{;>K#?r)c6Q1ADi-APc`G_hAg%PpFp0G&ThLd(L7Nyxxb(Z#RKvVtjsDC?fWe z`KW1CrmE^(xxH;gKcu1PlXq?Q?kUs_CEDx%k$xxV5M{?^RBM+M!u!UHjO*|d3`FhY6@;;ybsx32USG0(mCm{- zu|9-g98A5#8X8#`Wp60mp_JBC?*yotx=x0{^YawWQRn27OFXYU?rR9#g!jvbZ)C%D z%IbJ|hH4Ipt4sl7s(&ihQ12k%UG6l+t?#kkrK{pJI(`hs-F74+bmq`1kqLk*M+E=n zBqFmJ&w{sRaa+4g!NZHr9+B9E|Fsn`l(8(AUdWy_;_FZ`F?#(VdUN(cBm{H!N3iVP zefAIPY1h&%b(yh6$&^G6I+cNtnfM8k`l#iv^O>#7GYNE{Ec{BdpXl*~qxS&?_>^?k zj_NUTt!j*VZ8XHml}ze|qjEN&DC;q$kWOTv2-$jbuoKQoYi-RVYnjBthzvo|4+xh; zKgzSGprF$FD;p5?6(EYXoV|CkvVkHuB4;;wiG-_7drI^w4B{9%2cw9=VMaooU3y8B zH>HSr9uWT61EJre<5eTG$jdeWi)`ibJo_HFpEt@|1cv*)a@^_!s&iel7?Dq_hVxkLQYFmyA_IcTNnPlLfrlNMa{y7<-C?kiov%ccr zepAp6x&`M%b|CQCYr=i*+iweZkT{}&#e0oCW3A*>zZ|J&J*Cx^qoLnY8$&NdJr>)_ zW?*6joU&_yM0j9_P=QPAeMor|=VdDi_b_$KR}JvA(E8ZHH{?2)cC$!VYx77hISID2 z_8WISCalJn{7+8FGc**;LPecMZZm|4dv)j8vH?!!m|&;B=1v>YYn!37TxM{>6up1I zW<05^xiq)0AuauOpXB3z9ALOd`sMRjOm-rC(wVuRx^fl>+LHO=D6;{5z6KYn{3hMC zf}>2x_}LC_FMS-;Z2_NKJ$E&Xc$}gZ;zu#bI0edY?v0rNa_~jLK=rh#`+FnfE-3o| zy8aw%>rK)xJZ?%c&Zp$3&`PH(ceQIUoDb3H_{CtO5KvyQmpBTxCHa(rff}7$h zZt<3&f?qW55H9C|v0F=j!UfpwZ2d8P0X7|^HA^4l`v7m$ZLx{yDiTd#!XhhDQq(4qppYL9?$jxQ6*7(W+SA7f)X zT%1v*k|$Iij7L+-X(m&7u-lF^(Si=9ZZNuH;T;fG`!COuqcMAq97`4Vj+3V688lmY z%8u1VY6i?&Iy7zegLfTDJ2S4%h2@kX2sH`P%3oX48 zJYQt^h(nd6u-dj;S@iQXt(INzn={p?RwGuyHWTG?+;aaVZBguAMJ|)nMF>A{dN!7e zA5fdA;YFnhRuuaW1vMhi$cO`sYx?V4S~T?o_Jas%-<$O5dKs~tKrbXTrENdBuX>m@ z+WXNf58a>iHO#;la)NLsYCYat9oIzuzOLNki$h@y3bVbOs8!p`~@&=tR<*zpqeRx{yCkrQlatQ2k!%Xh;mZ@})e-TG zVV9X1GKjAe9Ps*=oICil{r7`OaXbW@eYORBJN$-VW^iAxoYuS+iLSv?n0%ln^KO8MLmetm=hM$ak#U-9c7UD==StG_V) z7pDKh^k10%4}^&-Ap^D zm)!{1XSbWqaZZuX^AKcSGXV0})@FgdYTfZENTa%GsX|E4w_$ILh4fGu8CB6#!NoVe z5+RL-pjdY-jCMon$A68>XO~=~|0zf6G_pHJLq#O~VJNNK{^8Gr78QziJT>FMg?>L( zjSj~5Jo3?Gb}TQf3|7+406WwI9T^7K55HFP8lyzB%!cIIyUiA6dWR(fQl=d=%Wwl_ z)JTh`YjpcLjKP5`b#+{vM>(FAU0d5^H2=^hO(qmt#cW>(YnNISp}i(t4#T=SmxDKGULMAL}8akN2A| zr320^{5w0X@_L;%Ed^s>Lo4=F7TC~oy4ut4z;7H!;F0|p*NPHnq^`d_=tvk6`4j$o zHu?0Di^Hk-paOF>JH8Y@Iu$wkZm`%dd^MW@?w+|Q3L+>{=k=rGHC@1pZ3Se;3d%NR09{ap!Li9PS$mX=aV4g8>Z{cAi0-*+h?5!IGJ+t;){Nrl3#zMAknOJtKz8 zT;9awfPaNBl3X;T-f(A+w{`j9Vtc`^Fukq12{xVr({6bYK7X+{v{7Ln!QIH{$qh$W z-s(@Ony=gL8`)@razd0!1&wyWZC@9C3EO<4s-d(Yf)Y9mTXacfxjqkDE|dvGIY-OM znclEL9U`9fp?--6Y3Oc7XdHXY@5J}9PvAY#_v6%05xFdaH+q{BkyhCNAmHWNsJ`8V2ds@6O*jPY`wF5)m6>q}*^if--|BrqWf>|q z9!d%i4=*ODUKKTdQkF3-g1p$$e5IcN{m}D?;j@8f5V^-USL(zc(#=8$1kDcKgMj#BnKNM^}3@YSO(`C>HLSrx&XP{h+ z2lAEE0T|Iz1xB=r6O-koQ0|-o$@fQ0cenL6s{50Pt0iDfYRQMv_eJs@4oJS0;Ccbc zH~t$75?yj4#l6=Nt#U+x6Bs1M?~Jb_S`5S+96IH2^>$(Rj&@-@vBPx-oM(^5f~m}2 zs6MSq2f=75+33XOypqTdmy-8NCBluY?~s^pT%3*?+QtW~K0P`c)miv19S~gMmu)tG z%bB&UYY7YMST;Vs&O^p?%UIFGZ{!jfBPd$2eE(-eD`&-E$v$0@y6YGr1u1#N6)p#F z0@Q8+zgW#=xoCX^P8zU$pPEDR2}Lu_<^q;)CE`Sj#&$t%ZM&dqAPk;UQteEl!gnPd z!@_HIa}S*dzUDpeSj+Vgt;OA5_53t&%co%Sm_hfL!4zG7xKva&F2+lG>`HEx>|kzx zIufjaxOUb2XGN|QhX(u_#ois?4vogkqt59Z3MX7~2Bhknb?2V1s&-CvFQju;fl3Jn`Em_RGK$p7dDP4!j0Dy!p6m zB~3dl?*!J4KjA-HeQcf8uSdd+l1 zpkD84!P~BBAc=1BeGCKqWXC{K|4ksn&#?f8Af`wLZ}*CyjjKJhyoJ4_PxKM)RoRm?V&>&$zh>)*QU zn31Gg-!)QLjt*PJN_82Exas23(Q;ZiH`ECRsswxp4G#Bt?No);SgjwZD_|NTpD{H_i28`iqtL+E)HQ#xyX{2 z-tm*f-4altxM_C6i~}-o{zj58U`C5(i`MLU$$Sx*(Ynen$xg5mt2J-M=+{58Hfk5= zO-G07iQj@zARGN4ff~lpjVyOQ5o{>=bCyPyw-!gO0E`wAIk%w@@&-!^1f$8tPJPcq+bR!AS%z7GtEgV$BpZIcd*(;at@x!TLn1 zYvkt!kFJHNnam7VznR6&2S>-AP=B8p^xO;l1}kO`tZ3y|Vj1+xeV)$vxVM!i(qz*J zw)!QIIXu&$m$eS|30Tqk^0J~ON2p*PGAZkRBUj|(3y%n$e8O~EMhCLA$(ax0Cb3)| zVweoy=zm8#1_@Wni2LF$i-BaN`{OPf87>CgWt1qd?lKJ@?^kyj+>5)+rYc3LlXTWi zx(`|wo;p}m!&9wSomT#$iK7sRf=7sI9Ov1<&;BzCzSAu95(SUpaebZAYKyF{Nx|eW z@B8V6Yrao1{H>x|MP(r*#nM?+Qf5BxYfOS4@}DSpNJWdfi{g$hEzh4Qcz|}vfM2gC z)3ypdqcVRCN+mB~1$SHGmQmIO++{R>tl&|!-Wb0)d8wT~`*m5nD$~4u9QF%(b(eWk zg0aR&w3k@@H1{kW*_<wVk}glQ>8A99nA~beZ+_v>~XpC7(YuVa#S_ZGu9+$$4gNh;Y7^nb4@e5 z2)O%cV&|riw@#f-D$2&s?C5O##QnHQwA%aIBLG`yLL~~3I3Dx2hY=(9#KW;WDw9)0 z1lE`{ln>jmYVXgau_d+8@&FIn#d7=ii}B$lusN0}(kk&9n+u$q`*umaxBmU+5p3la zUCbi*+{*Ovt;NNCieXJR)P*vmRw9m|rU7W6pQNAI1=ad8-GXtE>|%qt0^cc&YLNbzeBq$SulnQg}^T6)aAd@FzNv5Jo!+QzaTn!=_}RB}>T|HF-Bg;UxO7?{LRxhnS#7 z(SW^h)qVJbi=VKN#{D=uKN9II`nQF#LFXb?Zaf&xDpo`5YG=!5_<#1Wg>H(c3_N}3 zQ+|&7Dd}QK?sqrI?e%_&Ngmt`7lr|*lE4}Wwullf@dQLRE z7{LiSOB8f!R#+D5;X#W(<(={!Ky1Y8*B1~59vjyKg>r)4<>!3caw`((_V5c@tO@xe zF4Iv|f9FNwfW+K|Z_Agfucb&$#1fT&*HTb`lG-RFLXYMoY)C}DxFOtf?%?9&$aCvM zk;_5@AM4Gpk*O8FBAGBr(Ex1{F4DK*%i5+BZ#!I=!>`~6G_H@fUmMpwM2euEANalA z+xX}9^l}dXjcfCa08dApH$db1`1r&P49I;$L`4IT&BD8j+GtWv05q=05rM{amGiTW zcHC>@`Vwed%N{5*0*z}+%-6CLBkQ^-%E0A0Ai#R>l$VHy(^Y1@RID)2-~2n7yz=n8Rcd{ z6oTqu&fZh&W7$n19dy(aj0Px|n>WBtoFspS?b;pMeaQdB0(Rko5MK6iYE!Ts^+}By zMGuDp9H4C1dDvE`Ep6ybXS2H+MWZVuuTPlGxbWUoUNqQX1zfy+ZDTMI(wq>q8l$MfVU1~_L2G1(Hs0_HL%+xa`@ z_JlCNTt+BNd+g!nV`OZMmk(La36*Ap_lB%VP3aOluT8P2gKH7~c;L5W<*Flo*ID%A zixr$@Y<*XLW0#eq83=ysxD$4x(iD*iw>Z(!1+A&Fa`2YVt#D83RYT~_4#5O7QN``fT3JW&xe9isV57N{-G=A~F3-f_qRN(Tm&oqq`LTi7F_Blf8+J3a z1JflKQv|ML`GpzTE>|FIrf(`mw(N&|mHmlX)vr}61iJ0J(3)Hb;D?h7Li|q1lI?9L zkEBY;whV!xDP_`XEeTNHXBZ|#)27~t0)1<}QsI$!JyZ=W>?`*G=)`zP8{Jic$vf!ubeM*KRU60o5$bwlPmm? zYf7*G5p}a&mVK*q_TA-Am=$&ZkMHn^48$V4wdzI8>Ciw7q?pvDwAkJY`Y;t~MhnwO^;x~o@ z+9N6#E+M9OhbFeJ5r(IFsWi+Oe|rvL!<~Thxek4`mz>4@H&F;oAk=<*%k5EY(_ao_7lYI0ZG}oC)u+ji= z%fxKd{Ubl4LUY4qhJxt9;3(p*+>7tATgsSk(ZLFsAuWlWKAjN5@JMS>(^i1Pu|;TX@*{(~EE-z#YK8)()t zM!6Cko}Q4Fmmc?xOF*7sciS>3aD>8aUUK0sW1>kl(KI2vCZ1*^Q}{qGJj+a36FIT@ z8w%}Wub;zPl7!F$!?PRY*ShsY@N3=L2B=%JYF(Yuz0|GmJCu6~(mu$EsvH+0n{BIze`(LN*X4HT9!FMtg!_fT1RW`(V$y7A^ zU5@6_`K*iHqrl316$u0tdEOdrI{bVIdXM6hASm+&L`J20Yf$k!_eWQfIFKp`tcMHZ z_gCl1OTkuiDcdXx(9N@~Ziq!B(HpKTlWPWw(OKo&_ywq>?0w+Bk#_HydW4>Sn(u|l z_1;Q^iUL|Q_M4u7_AUO4rDJ}*4Tx=qw4WIFI|&rwJq)_FWT6iJ9-@o&*6vI3;Ra~J zO2n}_+~4|ZeEO&F_>s3pdo1&VyEpyRDet^Z!70($Xtr zON5)2Il9-{ibK8H!A15(Io4-9ymJA_{thDrYEEM(wRuN=DFw8x!AkrK@h$Yv=@ue4 zVNJ9oEIM7;w72LfSvo$C{0L(TaIehoL9AZ0M?j`+Zx-E~0LU)e z+k3B!lhUE7rnj}s8CP^Pc9cY0XkSUN)UUv`#P98k?V12u3gUV34Cv5c`kdf#X7d- ztU~9{RWw_1!J)elzzsemF6Tbps}#KJXvBF3?NMvm{lVg$X4NAjlownn<; zZLwO-?vSD&V#3Z_Xj_|X38rZ9v+Tw~aoK8UQqw6Yo@AFO(7`mnE_CGt#Ui#6Bul>? zrU@&KHAs)|xXc~_%Ui@cDgn_;9s$bf>YMLzvVsV(_xxm3gPbIg@}iNV)TL4!U4xr2 zA|hDMIVj4=JFuDM_7IL_3z!CXd$_ojSh)1m(MlIRoQ}( zCvbpK8@%6j?}?wudg^xSrFYJAIR)O^9q*dAYMT`HHF5Uf&)iSoB{<{4#$kX}S6RH6 z!DpSi3S!k_l>>kMke4?&XDSfInz=4~F@r0URYOkfa&J-bymE`M|){+9Iwa{?mNuiqt;tpCRj~H`;%kTJ6#1Lq)?wws*DCZI+0Km2c06S_MkN*wX zPEYnYhBA_(u1OU4rGsxc2I(e^1s13X0)b16q5mDy3S$H==Z;cJS=s0?! zY?%?wjl`b40Kh&Gdak<$0Q>EMGUeB}Q}h1|uyyP(Lf3y42sK%H5fzO)(>~6hJ#rZM zkF_)Yc5bSg8j9Nb{vha5Bl1a2m|AfHjWKovPgsj_x$@Nvz9c92CN&x`gI57&aOL?q z@0;wsR-svI@1@N8-|`vPt4YwOEYhny3_lqGGx$pmJbpD}9WwSF)3K3r&ogX&=ATM4 zcoe_rc%WGgD9v>LD9zpjO0&k4F_`vnp=Bz!KtO4R?ysxhPk4W;`O>U*D!@dDU3TnJ znR9E6Gna3k?%W$~DWA%|Waz0mR-D{_zr5=4_yA|s5+i8ME>cg&%&7xneq00*7`H!42h>By z%JMDi5ov4xQG*8*F7}5=|9UA_?}hB%GOaH?G_E-{JVcvvx5Pa^>OSBV(mZC!o$8Ul zo%oejbT}@?O}gz0Ve$qjzn;>`l76%0Ys&;bjt+3D?Cl0};5}gJ)=$lc(c|R-gb0^} z*z*e_#yw0u)PB`irB!fOuFc2uQnu47ft3`1TKKJTF z*SD4VzQ6Z<>>idKI%XUA%20omqt3SLBu%sO>2pUV5FMK0S?E}l?Xs^CGERLB`wzN6Z!kOI-Qr@u^0%m>@Ve3uUFIa#tALCbvW~?O51qJt#p;#8px_BdkG(m41 zdAH5d$>W4RFrpM)S%=pMX8Y2S#1)3<<{VWx_a>Vw7gC*>?v;&-7rsR-<5$RCg8Lfu zp}7!x7fUKLh0Dt;smyC#V~m(B16hjoqAFdLkaRGDq4zGX8oQgHwWI7MAWqZF5Cako zO`?fFa#H7k()=w*4G@~eK;-?TCGgVpl#HA4lFd4U9%m|T4ZzCim=yINr|gasrd6bTmOGmA zl{{94m)Wa@j|ag9YC{d?P*2tTwB91V!)_jX^r=2Ac-415`geE#68f?vrlU7-y0$e> z&GNJ!B&(MHVn#=lm}&;gYhwjkC-$XAb}#(#W{rgo;OTGMse@5b#AK?J)g*LZ4w@_= z;4I(i6+{C?>&ApEtl$BvIiP4AK-XzM0~D=`b%EWjz-%I*Xx$1Ft@}2lF;Kp3`*paQ z>^;9|%pTw$cMf`x8WH1zON|`Xnkz>r8vXlN>9`a0^lP&Ko;KJHgFaiNHOPjq{D`qR zCuN(hck|v~V(q6+p17dxzj&HuhYLGd8Aev8PXR}*gk`KqGdB?4u#6|+O1S!7*Z`%C!{j$qKj(DN%h4N{@&R)DZcsuh=xl!XGP}{KhP2-))}H)&?^lpvvZBd7`ixnwRfbc*5FSWu3$AZ?T@T-WScZz8YsME|3;c zqSxFkVA2!{I8 z+d!`mp{nv9)n9Mcs**C?BnhR!lJ$I%o{Ql!?*UGlgeCoqJp&*w^J>>|70f=|k(|pmhnC$9tYvi3VQ3|)1rci8r>M)`b!u6bnAiOLv2EPO}@h=m8}s9G9n z!TE>?u%svs{IJdF*Je*k+KR}?QqD8R*0ar8Gi}A_e8*JZF6^io~noCfAKLWh+Nr$6yzcaOj8r*RI;??j$h-*)Xa`(8f zDowL48;AAyK#EN2`mc}LeX<*3}DH+wr-M8UHa`QlXA#43V9o_ zWDR%mk+q9)dkkBR-pPUf-E*QcWhg$`@yIW2N(sunF_aq$%E5{OaxT8fa{Xnn4|-tZ zKr~#q24jmbtoZ`>j4xZIe&<0Lfu&e`lI-VknjRZr;JJEc=?_rmw2CvmPbtX0+|ajY z*ITnqpg)k*Oe1)#q{kXT@C?})r;yi(qCfF6&>Y!7L|viI^YZYb3f`K=c{2#~h+{J& z+FJ#!UgZ!!ulfCMx`vWDeG9D6e~Y`chBWrcv(PTJ&;5d|VM(t}o9T;0c#h+YZTYTF9YdAx@MWh${!8;letE%cnLIUl(hZbm_EG5k$7A z#C1Brn2!$oT9jNjhz?tqmNFY2NKGByCHhJ1zU$n{Io z<7(AcPk5Y1$QQS+q<=uQ^!D2qsHRRIqt1%Ixsk~?g2Fmu?j0XNPzF)kWOK$1DHA|Pgc~eAmoyP+Ay(*APSsR^g`_{j> z4PweE4K!SE@jyIVvJO)viBW&y`xEF%%D`B08Gtkw!24&R?FmAfC0U_WN&-Lg7sjzA zijugs0{-d;T|4)k-QAqj@OpL#?1`L=!Zt5c)}d0F?k83rGj4)MT&>4|D16-MQs|k} zw}(D78y@mY&EQ@m!-L(5r?gal<|pxA-@n-*_zqcF)hB-S&KLO(bc^z zZ=N9hiv8#y{8yWg&Kdk=p-~3}mzP-m+ZM?@(+A(_^Zsw9W!CR~Y+aX~zHQd?xwjmL z&Yk{_sIwJVT;!n34bfzZ9BM(x-a5U!2{hJx&q}B}mb6sW>^o9#H*h62UMz5(wn}vH zwP}`pOWOtg-A;P}7|%r#m5AaqQ(#6mg84z^$d1UfKPxh|%ZfLV@(!qZdB8gO@aWHH zP;=ZhxM9E&PCflRLp^O>!aU0fp^c9KqT{tPE|^jk$ZGG%G^#CDEsb2`{Q)|fh13~>Z6-k-61nj=HVGJEx%o8^e0rS+Thh(0K* zd;8p3zXG&0Ob48f@JfUG9b~QjiTCSb2Soc}#U0h}HI>Lo&v2T@RD8EZU_iyly%|P> z97a;mXVsR;{H#1G;C!t({qwLKXoab2OxTz9Eg7# z2YC#WqxIA7&2Sl-Q2!RjK!6`1rbIm3rb7Uo=U+t0^<(vw2`poy-kY?L$Al|h#=~w~ z$pnMa*gRwrcVJBA^w34`&E~{Ku<+9z60z*~+UGm9e(Y&mmbjU>Yt|y9b-tIX<;zd% zNlsV&-8hR_^VL>{OU@U`A1!bD41S?^y}k1GK74!+1&zSd7dM`uSRrw zNnCtEM-q~!tuRLqQxDW3FPIsHb|u1`7X9(CYE*tg7bhw?Be+`7is9Qs$%O#WvW8|{ zGhBb8_2kxRANw$U<@Au`qtDm{NuMM`nGaX_Q*fZ#5WV(S^ z*xlstXkwlM>PJ5ol@}#-NVZ(&ThDgF}w`0U5iA+z^ZURX9cFaaY%t$pD!riOh zQjd(6*}jGbk46@&8Io0U-@ZkNVQEK2yYxLG2h; zgD*_)CbbMkb1q7gRi8p~ANWXLD`!#`1Vz?fk{wqR=p}rTumDip^Q7 z${^OXoGs_(Shv^cgVy4jP{dupj8VF#vN2YvmAC?0*w5x9C4!mCu z^y{_BCj8|O^(fRH_)lV__GW(D1jWy(KP7#|G8`@N9#&#mMCBIzE0 zq}P}MlJ@g*yDXiH?=F|@oCgW`_Bl@t)n5oJWbQkgdLUG6O49xDv?PqdibhM%+ zyPCmmYC57gr6z0-Sf5)7?A4R)uQYPk&i7sEN zp8%$-UBwsorMDPYfaz+kC`n0xdK-{j=b*Yl(dA+!ypq5~qM@`?OnMJiX_3C~j}L6#mXJLvrGV(5@F)Bvg_0m1EC)ZapcE|vEiF;q|FDN+3$#XZ}Sy2}0|Zc#eipK<%?-)04~1R27b`f_NOY|*w7_l%IxRo`Zb4jfJG`y00k72SYB*F@2Oj|o?Vty`)i}dnk+PU5eyB9q z05#yl^xf_5CJ4fZD@NU4H`2a4_>>~AAoN4(d56n z)dEx~8ls&rjH`I8tc@f-Fz-s#1~qD*8sw@8@{hJ(y49vZAS2{h{XX+Rx4KLDF@PEy zD@jZpmvmGMTeMQ*mPGQ|>@hxA%ToD!?pO*frt$(8v)2+IBtogwJUYy_9;-btW*y$) zzz;Ya`22?+2~4Vo6~F&7Md7xe>T|9vov_9lDe1vVP>93D)CiV3X&m*TiQJ- z6*qPM33?~wXrv_(0xfe2HJ>*Efg_*Kwd+TesJVRT$OoWZ4HH*glL7b6OX0O$-6{cT zOzhr_Ix<6fduW=Pn$^IH?a2Mn3}{!I0Ato4UfR`+LVD|wbRc}|oP$sM5smur9gNBA zU|1vUZ~C`P7CrvJHSC>ZUxW0zA(n(ah#l+u+lRb5CdNI!&M%IqUsZ^+F}2kr`}n~k z)#p&LxUkXhMVhok(kZzPAbq1 z#h+gXnC-$c3=c*0xQSO^!qC39t7jltCN_c*W{pgBaA}?i{}{o4wEj{Z1dQM`8;{pW z%Po)4CIuLNF&75IGNgsdT)@zlTe(hYcRj4BvdT-l`mGVnXL@Nc$ZO>qcn$VKlIOtb zJ>fX3GrDKQKX}dh2d^UlyjF5Ee_VXQ>wCJH)93#UUjO5Z>n~pa#p}O#{THwQv+(-Q z|Ap7358OTS0A8#8&xdMOmVblSC4LI|1Umm;hZz*?|L8EI(EaZYGr!hPDQ*%H zxaS^5-W+rFR`{nn*i2B@Es-Cm0W)#Dha`5)erJ1b$?sRZ#<>ilKt1Ot0j&cL)el{r z2C}J_hrprwdw4!a)+Dx%3nMA1Gac4dDlw4=zlqzL+mf3??>KBM6-u}?_M6Q<#?8AA zgix2U&^qF#))pkxWN7%($;RRi1MfWd68H2K9~#ztB) z=aT*d(-?fI?ywXvm>J_*9kudtMF(1*MUD6C_0E49zEGJYVv&(RP&Fl#8|B^Aszo0l zBW0}V?Q%t@e}`MGyt-$;)RUYeVDRp=$4gGgEH`Jr6RCBF0X_}b*;*=jf9Ts@q5XC4 zsOUUYrG|LoYS!j(+xrTGGPsHcmJ#!HXKVGkEZ}fdIjp5AN;|f&~xm5Zv80xCOW1 z!CiuUi{v?bpSSAYf8+FaRa&&6vFaYoYmV96QByAaTVQsDcD1D%uYyH0=lm)#bHo~_ zATfTBI3OWpHEd$6yIkj7Wmj_Q`auuQUm0t~AtAn>htOE98)g|oIN9EHlS$1U@M%4!+-8qjei{Hz`?KfJCZrXCO z2o}k2)tJF!pE8X~8bhMj%sZq}x^Aua90!pE6|#CYx$EaIIh{eleG_^2yv@xARgT7*jg3@mg66wl)i@GD<7w-do{G$95a4KHStBWy41)H)z$SljmUg-;&(T% zg}%*-fL>p`0-+PV76Wp{b;B7t>jHRz&R1Xb2_lmj>b$+pqSpLq>VPM1sGHK<>ukl!MDTZT!L+pK{w{(s zF-zqV=G)w1$HNPj46*%sRd$|Ch z0)IEAWdnM9qYY%MV31BgWZV=Lk4yZa4W!Q$enTVy~4`iMv0tyimIkl&Oe?z=;>yY=y=ii9ulwMGiP@M^JMpA`Hqm#Ag%KZ z58OgcvR>X?g@^C29NXSSMCPQm?|YHD7t3R5^Hv*Iaej|s4bZ!9g7MpzCmK-uJKTLM zzgrs}evEO0u3fo+x*AH_pK|iRyTq__QeV#VtL>hEEL3+bSek`=)Q|{KRG+++!xtebY<_aC^z61$f=}Ff)|)vYj;;!5h>R-Ke)q;vZ&*#+Q#UZZ z71ms|@ULja496o&?LDo8SNF_e#RQ#W{IhDW|u&CpjeMhQ6C%u zG+!-3+Z`Xz)CPlKQbo_S6@k0>H*KYOJOgnb@-K0hsS7}b*Q|4*gLQ(#SN9PrQl&5k zD7SeA?-QX>K_!U2$cIELM#9lwJ>enyb+HYqB|wHPJqu*m6z5*!#6NBP*#*4JONuu_ zS4g&}w_``7o<1ZSF3T>?e=hP>2}DFqlqM@*hMDd14bf>JMhFct50`q|;w_EpJGO<0 zdQXXsZD{Win1*CL;9Tyn(B8dodzyfw_Nihpd{xKZ_>5yWtkZ@L$IMT4PS~w6z2B~S zW6aXJC}S`@^Yig@yNcvQ+uasw$=i1ciP0C2RZxWFbwVz&Kj~`NXudun(>N|_xX}*3 zrwgMHY3>xw^pra+%lu^nx-x zk>^n|{T2;%;DSK>5(f0pX-oyXUY<*myIP=z+(a0aLMxcL^ia9-{b6>6{1bH1ci{kw z=daI5xxAzHF7s8WvC3+r^3w2(BWS8voi~my3qZ`x{`h}73jgQ%^_R8(vi4ur{>$3` zWvu<19sjcSzgU}gJ>JU%VC}H~z}hT-SbN!Sc?h?Ianj8o0%SQiyRQ~0ye^iSmg#%+ zry=Q&YoCVV`aFjWLq&;d7Fq?QiL#NWBo97cW#Pu>nheKfj8$YvVWV;P-gAe&oIOFe zKc91uEU}w;$jt=U)ywJjT1w?{=^|VKL3Z9(Yi>!)U>Nyxq0Zjs=YIv+M&4;TDAqz` zW4|_q^c^#=|NnNZA8e(?7-6$1QKXd8)wdP}wg7Kzn7dprX*ab-?dzg7?>`;uq=_RS z>EtqDD{@+cvz%7%D-{Q29RdOf4DLl7- z+Aq~dSXn_}mZ^zr+F+*C)LV)CfE6~fX+4`YwSN+58Rh5zqn#P@Ic4V&W9Jc!d4pFL zTcv9Ph_d&`ga(5_82h>qa|vAoY7|>sdzCmQFJmB$H0PD@L|BjhU9$FxzbyHuWZnH* zvOWb$)|BqE?1loY++Q_)vBCKvmC=m`f6$^dCn)SBR_KDFccGvVG(+6qA?&2|iLl9Y zDb7w%N90M%g}0$PhJ*La{!hs|N}_b^R_;|~Mh0ZrR<8eu%(`vcgu>-Z&sz~Dgpm=h zIw?woUqxmTuOhQiPM#he7V&zZWWA$4{A5(6C1SRrM&lvuBap5zIDiWoIzYWU01dCn zmv0oQqvD@qUw-qnwhK>DHeq}yJ9|F+r8f*4A*_1xGf=nAel`!a9(x;)KNhw$ygX8I z^S}DmVvmf!`_?xYqczpOK;OE5)4rR$S9L=j=v&Xq>z$a&KX)E2Y@{NZtvKNyvwZVbp=1_I+~FeBj<82iU?&_^9gm}KyiP`^ncyfvLT zu^`cW4Yc7Q7^o@-9ZEd2byJ%m7n%`3{C`ZwVsM}FN2~3z8@Yj!JC8ks;v1nM70`UH<_|b#^$-l}w4BF)qa*f6h9OzfA_& z^uw9n5V^Sxw0Nf*(fVdIjlWgCp)Pq5#@!c9_F5w+)JGn`?W9LpdUx|y$i}P)$o2Qv zAevC)t=Z%^ke?jWlRQcEr@yGSfv5;>A}&Mm6QMLQi7?Affk*?1_IV?fmN}p_yXdSZ zyb3_6aA^e+ZOmHS?wPw8AC)4UfM;&VqdBodL!xy!a2g1hin>71vR~X>H~dAcHBr@q zun2A^UJ@TxYNMiUTLYGz+E1pY7-?sZ`In9@NbM9)fZOnr)#^3@E%`R;U9vfl+Z(>+ znR=zJ74B@_Cb=3I;twq#;j&o%f{;r-aMNWZJTEZ&G9gZUF5&9WRWmnX?fB(GhWPUBv^ADaR93zxIgu)>oiX5nF#5d7SDCD zUFUnxNz{WuUHcZKjfoCFXk7`-vg6p<4XKV3DljV|WGvDv6jFQU(Q+O?Ll+$QC)8z^ zklVb_V3pkDqSyPOizuPK{-qgGk206AJg~RxTr}Cr_#T+)Fd&2oV@YZ6@+{FIkGNzT z51Z&`FljV=$|eZ2t=)kC7eVM(JW5GulZ4L2X;9ABwk0t}>0Ubsx|#3~+Gx4D#h}h< zEoQF&T>9k3b*o?rND3t93}LGBqRqeFIHgzMLR4&idNS^1_X==uK3W)X7<{wb9=*ja zJUQw6oL8T<9L{<7MEXe(>n^|VtDwdRC&4^3;yhtYl=3|1r|RW^+nDqSua#foJlXHx ziNgwTGFw<$;{kp$4>Tx5UwVXxIfEj&0h^W==mih|jKkeW5MJaeCqzsl1M>r90t3qI zl~YJpxFDn|bl(n&qbs?=4=!z(;VZDPw4L1{-l8MNDeXVEj=|#CkB(fl+bi^u_lGqX zIkL4q;M(78qxzV$_pDz>_OqEOVtJ+F~YlQ>qGHcOI{cBNu+7p1=Mu* zqv|J3rB?LC8tU&*vCLU%7RZL!(e~MWL7D!knRGxk=#f`s4Z57{-i~Vr#pyuiMBGZf zLlUU7DAE*{(2sH&r=`yxN2V|_X)E1$PAv9`7u-pZdxQ-r_8u~3%*au3}WCwQ_C>LUxPs0yjsTJS_jjf-1v(3Z=LKeo! z-@$#bR=7VLh!H;i@KtU9BzbQE*y-S(v%ND#IN&}wS*KmS8ikj56K97OqH|w7N-aC7 zEDB#tg_iNoOwy$1B|vNV*AF}TO))bQh>|##0>lHg-)l@ap^ zv{Ml4AnJgA&kbclkpr&D@z5Wyfot-y_cT$QbkSm3Q`)3=*5={9@UUUJ9i3g+8tksX zH95VSsq-)OkH9rK1YMF=cl~gG-Sw#bayLB+?Bj?22AB=~cgnV)Hl!231MdnDJ|OU( zoTrMLz@xM3GkW7MUULX@i613P1tDJk16b_>JZG|p%EB$*OOZ_tW6MNeJ!ge|tm~H2 zN8N%8aHwy#oIj`4LUj6sUtT68?3fog5%znyLKG}&+!xb<1*MoDkceI{XeOvczF+`4 zaqt5T@tR*4zT;TDkpi9kVP08+3T1OAf<5c6&1`CnCE%X?fn8P$hBF64CbnQCDF-r? zX_!>@S1=W$?cY{+Gf3%NYFk$LlYH|BJyHijT2b00x)%4-C%whrtgWmzyx3 z*Hb&dM}|imW^(#I7Q<6cN6Ts9PApYZx=t&+2$PZJyawJr!{g8Y7M&r3#21H0-4 zvG#Uj@&MDB3x=3%U%y8vsSQXR8**+=GX^RA(lMX*tLZH1)pQm``Fc)XXB-Xa#6^|? zo%k|vPL3utK^#jQul~a6Kj-Azvhh2Z^+N}aKruU5EPcm@K^s+eDNVEf z^_=|J^W9qml_&^TDA&Oz5oe1fv-cb-2@KBQ3gx$ZEpfG^pBxBSGJ9OvfB|d+Imlh= zSU4>jgYV>>@flp1#J*Z4KruV$cQIQXC}y`qEp5IQvjGSf<{A$(+jc(Dob;dpKhkSm zcOCHw>F3EH#%}zlm<>QUP|OwwirLQ89y9JuzE>6cNIK@B1Oe!qYUp`xK zdwvSU{rF+xPceI2i;bKQdyop-{C|qsa6mD;BJ6(`v$N#3((-I9c8TC$i`i^}uf^;+ zd8*&VY|yd(W}uioY$EGAu%j6D4};ek|5MDy7G;I}UCbs2_OHk15-oKCQMyo+2(H#y$_{)u67J7l zSAJ;I*ckz^fBk43ul-ugE_VTn*}wO%6P46n_qzUF%%<@X4!~*m^Kv&7r)nmx?w`II z2u$R1j1*CRW$=67n0pnN!>5Hiw03@Wr@sBDt#Bz`+_E-s$QT69wDEZT|W9{gLegv^q*{jC*Q`JG0w1TRednlZIhBCL3b0rrJiAT?GK8%_c8 z2o<_@M9ys1~?S@lVq$RmeILiwHG zZ{ryxS%+4f^?8Y?$&PdX;r*F+GwY`cn=J7H`u55lEu?@N^W%hJVY`ppMnjQNYSL`P9 zFg7CR-SP!7fWJu+GH2T&tM!eI+u}EUGvnBMYUSNZ5yaM(i*8o_`#sOIH8bRs4iw~$ z+5^xkz$rc;a|DmI!$f~9(qS?`=nk6i+hg5<=qW(V)g#qgrUuT2xFkZhmTU5^%9Qst z@UDVZX;y!t;PX=&HN(jItavL;Xbf?)hf}42UM>mR5r?Ek131MYp*3%v+!wCf*A}WN zRxK@Dc6HbBiLJjasCiEw!a|xk`L1VOW=etY*t<%rFD7N*8w3Rwn|t{OE?y#wmQMH3j9;1 zQQf9@-L&_t?ki4rU-$=633m4VUW^|JYk`M6MiK*B-l^;H=XbXxx^b?&XkD^{!1gs_ zUiFkq`}6mJlO-W8bEAXWp{_vjg&W0XKq-#D5ERf34dvQ^cttk|^`*TwjP4djO{apN zKEBA)ZGq-s`?LYhVpc&9no>jv`fY20`J9EeB4Qd_bsw7 ze)Gw@E23q*%KAi$dRT>#TZ|#CZZbD-kc?s++JKK{*AlY39zMJXyh(xwZ?p-$P_&`L zGOY=3Rw8)!Jvy*M5oN#o)Q}&t89}dROAQZ#5hX5MnsY^$#IT=<4*qoCig01VbuJq= z^qY3_dUpOn@SKbE8Go(=BL2|5<_aH?57@khR}AgM%6v3JQGr(f9&_OZ(;OE2wZa3L zV||JD#NwY)}cpv0P;DZ;fEXbH@7kw2RDb{gDpV&Yx7 z$*LYy;TnQiKiyZp%GcbbUoYqr-RIjX70;L8 zOT7VUn(-WkHzHj>b!ydIY*Yub12qX3S?an_D70cR;C|+aHO>X8aNIY3QAf5C8PUH- zMw!bA8{vI*hv(;hcy))9&Lo~$GbRJ>@KeRS|C2kM<c2$&m#F{Ci2C=( z>n~CNi>T*9f2HsMME&kR5H;IB7v#~)fIA#WwUt6Zio{9Y$I}mU7!kSexKwfd3Ok~p z_k2~IF_6<>)C+R&>t^L6PloO1T}kthk(VFUbFPg^*GeC~s?Jck)3ah&x8?=cDQ24n z_$cWJ(|!+hNzhRo7+G|*rfc)h3_;2mi)lrM?GlcUWl46vtR7mR(4%5w<3?72icNc? zAdVHFEM~8c=aDNYtr_7ev7R~H+N2l0dofQYNZn{dOO z*mXkNg#8!^Q6w@EXdDS)PplsgsKdphCsv^`(9s-{YMU*@=`5{{=5qa0w(1Yxee#!P zMq4;o*4@IJ%CY=nyC|jHGKt7lzoxq{F&@b_;4tJ4w+zJE5m3f7Oo{<&Ln@%P4EKpg zediG8UrB2XT5#zcC8$u{tQL3o*(Z^b5$+7z`ibCr90b_j7I{qso~Ys)Kp*QZ)8ArZ z9XdldH|1$E4pr+mFS$PHv4>FSI)~=c4u2Wwz0SLRa@^oy-KHX!xDY`Ru5M{cTu&Dp znX+a}slGc&$4`fChLy+^SM70f00Qo4uAvNL8G01+Gxnk*GYcabVJsM_ARA}~rspX( zJFah`X3sf71Ye+J-daFK!|JUDgPjmIOR!}(H^l-4?NpoL@#e>f^GEtoiWis|SKE{c zxv|LNQzMJQY%7&xu4b@A*~Q-r*eGB)V3B!A*>AFP2vkYm&4fOQ1*I#aDZ&=~l=w-4 zL_NQP-5Bi|iZu-T(dEnOqL9MGZ*%zSN=e$XP)#lve&Y>T2}cRZ2@4-IFB98KdOj9r z8i9m0AEZLDDSlLZXcRq)^ma#v46tRGA8Cn`*Z9dIR$4 z^jswj11cTs4nczsj2=%xjt;R8iem)8N);FBv;va)S^!iQjvZU06ti|zRNl-;=J_DG z79BIEl-+0*2XLp7 z?dd8^1G}2#sd5Vgf4>|K6SA#sUiWozE+9O6w%);v->6r~ zl}mejut3xEAR%Awmde7!vWY?kyz`5a2k6) zMAei~8as#&fo17doP=$RP@-#Hhd@8+@Fuu8p{WRal$Al~B-uf6tJAesY~^|W^j^5m+L`d9#e#53E5BaC@b}%`cg!!Fiq$(sFJE|O zDB7a)VK#L)6qWKjynpIC<`hJB-O7~AX2hH!FP>Vin)>F#!_SM*n#6gITwbw?TprXL zZA}Pu(sv9^Scx8rNV_Mv_{|010n$$?e_J#lMn>nsx?k2BO|EOev%Tx5qUbzao8@|j zLQsSt6#iFa4tzNgSeE%}vjbH;%~~4|tnUbZv}O1G8wE%$Wki>^XO75>8ml*N^iDQ+ z;2u_cN6>ChH_5XXi6Hf4!jR5Xp1q~raqAv36u{~1Al>KgyO6bo2kk)kKH!ttbjhwG zH;DFl!Mx9a0kOLATqVpd{4pxSbo3rF+1~$!z+Q`pw=+>nawdsh=>{9^qbaWS0d+nO zqVoXBOnH*(o7hR)kP%&cFtvcK1wtPLU;vx@Rei=DG-$t%|21mBRmLNj__97W?2q~k zNkfs0a(WL!#T+bvVi(bf&uV{ll=7dN6qYH z1=MGvH)qzn3&8UAu}NuGq7k?tmwd1Tkv(482c}s+Vz|%{Oq9=|cNdtUAU3!zQj_br zon%vUuxc(Atc9hJY8Qx6Yc~p@L22jL7)Hd&a`Zb4ZzlB;lJ5>78+n{#kZGBe!YL#THuwf#UxX+Lruae z7Soieq5-x>{N4i}r~HSlseiNeGQifsa(~#`>)&imho&uk=zHKj@gmwAFoKMIH#X;g z?0~=h?5w;}QLmpYYY+O-R2|0H(|5|#u*uf%Reg5a-^ga2%K@m**dHGNvOc^A$eQ=x zWF7N|tPx+y8Vy*!{zKMt1prys2bYdYzQg>7tcmKn{$H~8QoN{5Na{EFhpa=sYuq={ z9sD6{?Wzd6sDu}kKV&VV=SIB+kadE+uiqwW%#Y*4h7ytbZQn z7&T%FnSOc1R-pAqo4U|=CJ}Sl-@K?W77>zAnc`%pch`zLc!;Y{9_iJ4`cORef9q!d zd2Ib9>%V0Em#qJi^?w;z|NeOWCF_5Y_2#>If7lgeX(Kxr~)0XW} zl=Zh6;%KNsUF#I6Pmk_+V!-tEvm-El{T7of4Gl}u*v3U)$#CN4!&7W0^>tdv5TLd0ak zYEgTwQ8DJVT}62d6C!x9{O}66s0#tksAQJHV~xuz&63Or-fxP|6e4=kuI*vb8Yk{fG}}Sw9~H(Vh2^q z!-0(H(7UrI!sJ;%ewKQq{&!~ambF#vHto3u%8+CECk#^9p1e{@bR2Yf5 z{_@A&fKHqaiYIBlcHTXxm=Ke|IErVkjS?=B5-v&tNAoW#@O`))W^^66DGAI&A|RHr z#|t4Tek`Bx72*R@eMe!63WZ6D#uk{nKjJHHtR-2K`6NZcH#@t09!lCyAIYN5Xz#U| zR&=kM>++_sQV)0-hcbhBjjMxbi*+#ci)m+?0#OWLFeK@P`2|R*O#{@%pzfp}EJp(m zxD4c0rTFvTYj@Gc6 zYa_t|+ZEef#pLlh*j`a2;L#If`#aR}QtjC0655?NeU8A0$`F6X&vEOXQ$DJT$dMo% zmEx3j7B1gKFWI&o#@?v_A0z}cpwm)jv<^_o94E;aXE=`8dp%HpnJD=o4SnX zN&uR}*4p44AhHUg+uXD{mnmIu-kPk=z{2f68Ut?n43}N?_4b5+%EPZ@=ETNzX`Izz ziTBH!4P&V_kMv$$&4fIvM)(8L4(=SM*oTOUdyXq_Vy^n%A$3|PM@)Dlna(RwKdmny zc@ZaC&wcoD-)qBSctv{WoHYweUL{EB|Nfo-$?ZY8bGAx=y{QR-)QS0N@i7N4bOXCp z4F6Yl>sIv-v$f^XTXk*fl{>D;$S=(hODB1Xc<*A5O9_GD>vrCTW%H2;jk$DjsBF6l z7oC|i@|q0L$xRlyLZRj0 z-B6|h@V`6R0?O&0`6FEZBm39-n?;L{ZF}~dUeU(sYYVR#b>aZ3?>MISxJce%9e&Z* z6VVpZV^=b>RMyHbPV|zb`q!z;vfjKxV?vkoxS2z?)xy1Zbb7z0uqxld($@Xb z!tfs5cZgQ@W56~cj{gSoojTP9UFQbwwkF${cqqltV zqKmaFynp*+S$F*}rP`U&#Z@~mpV?bR zbM8k1({7T5$1a2(RE&tj#!-ZY!r$^nCKw-y=m^wOYKNidey-TD2s0p6g!B zD?uKz=GR@G<9C0@UKX{r5%A5$rYPY7!m_HFk70mMNJo;~`C6=pn1{TP`=*jFe4i{ij?x#jKE_ZqIA6sJ)0D;y`*$ovi$* z&-e>SuaiGF_^MnWDknKmMVV4r8Fb`OL6IPYl8>X5OKjgqlCpgnBo?`dNzsRTo}-rG ztHV_-0H>jc&TDK}HBe%|ctb+!h-(dm*KUt4TYXHwzC@z5wC=FFenkJEVEO)-K#iYr z`RRQ@Vx|ZE(A#O58VOqdA*Vq&=Y&bHevz8mp^c%a+jZ&*Q~kAavZRGzLSo4HqKY`E zDM%gkP?^*OEDt04w(!pbMY!nQ!|Xwh@9ZJl#9<9lgjT~}-QqjdqhhaaaZbLJZ;?5P z!Kd@%5Knk`(SNSNQ5ptqhv0s~HS5B6Gho|;UW&)VVrc2k0s~#I;dSow@9-LXP6Y_B zM=76gERH|YKK%)=0lPRP*1LbgYpH(30&WbE^Z&7nPq(OA%*!O^(1561hyE;$8ryc? z<)!yo%|hl72_7V9(W2H;SrdRb(CWKjseQEPN1Wb9Az#oq5Y{^iNip{okw{#SN|O85 z2@lxCL$5v>E$G7WQC`ybXRNOUcqPAxmCf$I+QrX^(XgqCC1360Sfv}b1cOxU>XHq? zqtrcQm&77F{8k>;gtKkKo!hfKRBy)ob~;7X!rm~Prcr>g#7A~eTW*?a<$jg^uE&7v zwWFUG`T`9C@&W|dO0wV(82@^7@w)yziGTp?926iRn1CP6-GZ>l`}fD|FJb@dV{c;plk@c&{67&k$3KMqI@9&s+0_vwLB&+7 zIg~^qZ{fbM#Sgb~;%&`0X z{n=19-(BoY=E$H^wEl}XA@f%J1)bcFoC7wSnQVtxxgK{8w)HF`O(496h@cE6XC_SJ zWMlPv^K_0}86?a^sgqtnmJJo&w@8eW=0q|rT$>8%gcny9m(M1#&bEn)+O$)<8~m9T zB}TK>bUCAt)Cdb=2pvX@ENbN3`}6s*Gai&PBkV=^0FYj*NY#D3z3RnrBg|U@2EPAdT^;(&mpK`c zb%wL7>?HsP#Ve=uCMY4R53H8q4%4>pJpAk<*#z%rMItkCWen~@V z_4Eu252XNtaVehxKrmjPmucwXoG$15ey~|rffb{NGi?b-*V&VrS9@2lb~*5V?GjGVt&qR7l}X}Uw>@-eLxpGN*D$H5KzFyS?z zcxlg3v;xCwEC*TlpeUPQ$rp!4y<==S>}fitQtUvI5JiciBw>d|C#PP;sE|*!zxs(> zvJ^_mxHLqehz&(DHTE&!xg;*WA!)9BSE~03jNKgkPiS42Mut7y+_sn(1*g3b-eX%p zhsv9dcHCnZuM;W9goAz}OQoc98WK`3+gE)X0QDO)b%&+v&N+cqwU zLosNu5PF8M19d39!FEc1sg#3PC7$l9MW;mj@`3)mv%kCP^N-6L<;o+#n`TllT*4pV zzCRDsXssle|2$n0YEY$Uu(f@rc6ZjBd#3IgS;7Ql)&@b{JH`1g>+67N3Qqb=g<6?r zEbhvb92n^;)I6{)$-C9EL4&Zr{g$=D+#WNt$CTuIWdD#fZcxhz@27Kgv~4lUvg4E0 zylN1}j1sx*ln)P_f$x8NHH$|b@*lR>ImR6znLHD#?N&m-eepbfM$fq2<2gRDDq8iV zQ1CLI0ry52=9PzQg8x`UgzZZ5g;GP(8A_#dl6*Zh6B(ii%Q7X=N@!+s7yUd*>|-GP za?T6wvLSN5BmILsU+0&Y!KloTtdn@2T;K{E#{MhPWDsb3((4s?MN~TY@YByzH@?Q! zElsZs@O3Li-psl6Ltgd9IjdjCzRQQlC(oBy7GA*e^{jA15tqHWXBuxbr$HlnH zh(fC!^A~b&V~1Sfgu+IsGC+0)ShKY`z_$}XZmBnFMB+g3+3mFq$A5Jf@Ss}Gx+1Gt zLX8{-h4jB}blD-K(cojF)T_ogy8HO$NJyfr*t{1 zJ_f9MK{d=HI@1q^j;$?-VtGMeCR@i`?pLo7zK5jLU#6KXPH&5rynDR@&oO^ztt|m+ zeN$uFh+nbyqfW^?J&a@WU=eZN^`SA#g+TQwDyRF-5{aXF5_jMg|%e&L; zi6^GP_gFhThjnP{%vb>#zU{%f8OwV9)V0jj&u`-A>1jjRxNl|shhHZhtwD{4;1h7F z%Kj6k2-cAB;)-;vwvh`RV^2?jUlDd z$t@T!a1?&jH-R0J!Cy3W2-^Muf%iR{xFXggI!Umsuyy%!i4q0+ENis zF>H&%3zD8k$=UQhpSu>Jl)el?zTKD^*^?|6>Ln@MjH)Ix&c2RVDR3Ij#S?dkmfC0& zT+tG3-1^|(Hap|Qw;H4|*ZYkN>7-a^&{IZBC>H9=@Rm`JXrGIk2#{ODZ5cc2Zaggo zg(JNkYDWPfoqxv`c|dI!t6axta(?Ea9wE-2MM;L$xfPr{j)C)h;3X=7ps|hFX?9~v zYsyGz`;@w>QT)BM=Q{S{`bLu&{dzs}Ly3lLIUp3zy1SlQGS_&pHJsvf|4tP<0Y+EaZ*&qMUTU2j zNhBJ{xt@erVw*Dhqzy>#CIj_PTsv zYCor7nws@*dZL*fFvwIf)@0-^z`qK`qiCx6wOFWM7=X|kZJP0;At&`2!NhkAx7TxU z`DgTMrM#DLKq%g;nVysV=2a-3`ZcgIh)lSony03~L;{r}HH4YwRC(lMLb&Bn=Qdjl z)f;->{~>A}bWmJr@pZMjvl_lROfuQmYw(|y%6TuK{}-bE^T_&3)PITkFH!#`>i;sL z{{8X#OVs}&>R)&jpTq#7_WTc-HRm6q1{`Nxc);-Wx-cz9iY}eWp~a%ev2bpmIFmFw zo8_%Na137ld-qxt+Nn{I1(^xCI;KXkM)3FUwF9twZTWlmTA%MR>{RgXI);;as)C6Y zEe+A-tMN~~aZ)_WMVsQzP8@t6U0SYhMq(DBVGr~SS45J2kG+Rf;k)7ImjvRP;G|lp zVxm6d9&emm#n}d_X?WNMaj)d5Wl_W4a~uvJ=z^aoINQz~xsao_lllXRwYao;jQMg@ z(TpY9COBRq^>|YnWG{rnLV!``wzxcfjpt~GD6>4N1U%19MtGsFc$ppsN^P56Z7Hga zN)%TERN+@$5|&x{c3M2xZ)T;S<^#W+=7ua%`?-*)M>e>$CyJD-B)QCRW8o|Z48PC$ z7Q8=pk6bWK#b;V2z~Yvc>iMu!b3J99O*l?{6n2B;M~5EzG3Slei4pwDpq>AL+b`W# z9$>C(2MV&2T$6r?#kP9M?oy9CgqhSNFjuykvpnpJB=(ch#!s_w<@is1kcwR07e6Gt zYsu1L^zsZuww757nZNauSct#*gsy&OtairQ=YLA4D+MYTQc0aNFG`3G))ouG8Sc6O z`Ay(piCy$wiBTXa+)!6k3u3IwIDL+jB+N^;3>7keW6$W|f&!cV9v5AZIkO{Z5hH6u zc|Q1FZo3TaUKi;aK}o5ho%()k!I1tipeV~Bickg~Xo3^muX$0Ij8n=E1w zmgGWWhy)2%wRPeH-8};J3_9M|hD>uXZuWH+{tSh8wW^$M0VeX0NOn7rA~r0a)_2|E zJPnFp*vAlhFu9}Dd6-lPm8+*wG2^L!eu}FsX^=U=4jMya0WVS+frq=>CrVJ-5*0+R z{o#nhhd6Kb_4KH5qjeYN`}8}?=_6>O3}ymy5q2oqQ=x9~-uj!8q4u4(w%)r7OIdrv zHqog`7)uaNu|GS@lm@iuxKz#tzs1jeYhhRIg-qsC2oQ#C(Vb=zulK>lEX!8lMCYAm z17Rn9f94$d)>p^AT9DY!F+aQ9-G!1{#GzAYcrh?;78hQkw4Q>7yB&oSI>jKGmQXph zJYA_{kV%E&C@Cq>G}j?vU?Rj$<0F ziDh332`}q8@H-r?(IWf43Y9VE2vSeM8aDV>-a;GoF#UAm;AB9tx^F$# zIkj4xcj#rwbrM>nDe0JkuTx)v0wRm@6{!bct|pkf$~lB$z`Wlg6r&I~P0K(@o$Q@e z?QuV6ni5P5qH~&bYx6d>usmMY@LCzD;Z2%e>V+}&C-!PRLyXaI7HkrF{cm4;)wOwMb5`2o zp!nm{)ko2DoX}%MFO#54zMaON<3_>ht6S4;R#kfe z?OKKYo2o;Ep%4cs){pD#>4kcs4U0Yi zBW=qFq}Y=U0U1xx%O6CSy9o&{RSYSc9<#uC@qf$AmbC3cFFMJYvoxksJNjakvmG&- zt=P-H6^L-M8#T(?YkQ9X-f6DNt1EiN>#na$n`ZXtwa>my{{NQJq z`brT`u?X8z^639P2!%a#<5tB0W|d8uCbzIjym0D_xRr8RdV+eBYVR^Qc*`x_LJb#! zp3kc?Lh47F_tyOAx8o*YwyQQPApxA1OQE>|Mj7#%6DLv(1+-s8Hrz;5>;X-5s3B4^ ze_uaAmuzYWHo=#QokAAQqy;HqZC3#)XbJTm!H5@Ijk>L#dxsHU5URejf-9#}H`BQF z?)*WmQF*IiFTb@@z2zWvKAewqmCUL!vYn=gPFND0K092>`Fkp19PA6F8RqH7)`DR4 z)L4G0PioY*QZdCn6|eztX<+mB#{F$5LIsrMo5Sarpqo~ka7_4!!2t$8wdrQkHcn$q zDXi914BHH)>1ZO7(7N?`M6X_A?H6`h;k_L~jCfa|7De81H`)d)^b^Q+*<>aYL{^UT zP9QWMuYkpMy{OyBwnkbZ8lZa&RK9w|!R(7Kd7xb?kPsqVgQTp-UY!aDc*M!8d0!0Z#oVK2EokC#w{I05~D7-!v__^(unodC2N)6+%sv&*}q(NYHGZ% z>{B^@e*4&-PL3-WCgKGHD%)ws0KYkqAOc(Cyy(c*i$ak~j;ppU`su@9m^RnBMn*KU zc)?ud-F*0wQg44oo~V$7k}@YEa2KBXRplWu8Q#3|eq?t4|1^os^5@z0m$Cmc_Fu;S z%h>;AjQ#uL^_Q{##n^t{(AZJ{V;lbm#^(ChU3d%X|2WJni$o*x-9J-I zE=V)J5TnIw20?%ac5IK{|NAhU0pyQKJl_HBAPktqM$k*)Fjk1N!9a#k>R%3*Mcn!ICpS?_<3#p(ymBT%r{Xs%2l53^L zgVPLXTo>hAe<_?#6uXK9@dOLNAlKs0JZ8xpHRp)-+pnrJmo2u(N2J zF-MDjV_As)erd^ugC4B~l`oR!kmAx`=}Z}26t0`t3_3Gr7&jA4_Nc({o~RvSX)_Q^ zu#WT2CzVMD;d;n1iO&Ab2*}JSly2+ND`O+T`ASx4$C@q~)vxZhJHknZ$5KaDKzz_f z=8R$Z)elT!hbyZ@{lnPm0Ap)M>E(7O47lyd87F*{pN!<=k}TCSD4FPw<(n#}B>8(Q9Kw?QIFIfk_MaH)A78bu;!&@5nP~UpP_t z8u597*HQ~~qXBg@$Ya)P9?5#Jt=J$nfwr((Zt6TpTQSfd*eUl2c(4p!AiS2I4zPz0_zR^3M zu(Jc9PLLt~9*mWqx{iAH_f>dQt)^6dY)n1**F+cVoKScM)AT@pG+++fFftNg?A&Zu zPILlGU=BNu-kqZtRgk)A19?B|)gG?3Y8}nAvi|!j+$tu7cupNXForoxH zVjVA6gUe|Lq5nzX8xI8~dghVN5hbR3bS_P5hnE3B`nH5t0^k^2qaM7No~#&grs;+=^M+`(&V zO+1K8oSSyF^rHN;6a3wTt&Pt)(Z zv5vD}@Rz&a!0c#We>r;;=FUL#j`MC({0Z?EI1C4ATms?8+jzez6avi-`i+Q>HH9lPeAb4@aHbvHuhKsY2{1ikLVqwA;%xK?3Afc zN{y#W6&2!Oi{l`Z-KJ@S{*JZW+1f17L)0KAs$Hf*q@Xr+zz=RBsN@T=e5GY@r2_o& zKYQ49UDv=Kw(%AzncL4e2II#b$FXks^DXZcrzSvN#+<%31#h@>TSi^oroO?*R9$`1Cz)HG~uM%O_R2qA3NGV*|!ToGF*PBO1VxLI(S39=y}c{MofFQd1f-md83G>oR-TCncb z8Nse)kH+7U3sm?YRI*bda;9Hx9-~j-5JeI8R)a4$di{Uwol}fvYoo2pwq4cbF59+k z+wQV$+w5|eZQHhOTc_6gcm5pgos;alovgd>_Dg2o%x6C19i#K$seNmI$Du&FMDCDd z`A?<kZnW%m|$=O5oMJYAs75BX~n zHb+0&fvH>a>y;a0Jq_HYhh#JPPD|v+a(IYNL+4k&P(Y=em|e-WbEnMCj{a)=ffQdI zPX6t4y8PgDo#~ow*Sh!SE!gvV9$04L#*Tu}khi(^M>~br z1vgF+cXWbc#v6?sSp`f>0sc;ge;0*2A9Dx%RTK_pYk2!S1{U6PZd)c0t5;M8>zWIt z-^6!wnAs>Vm~dqDjoCUUY3Ko5gjK}h_2KqOT*aq$cpdQ`x}5y?Hnv-(`fzv?kwo628s0Z&4}Ehs8D`?9ApJLI=d<4AWJbd;6nt~G z$KRZ-EgRw+nVWBe_1$d7pvrt_NsAuRaoF|Wb=m)V_4yKH1qKa-cPBa5D>Q`H# zXyQddI7Xb;_<{eeDtt;yOup7=OlEdD>|Stp+-`Dx@*v|^w9c190>U0t9aqswwDySlaPvL7-g zxU{xG4q(%FP*zP6NkRqVCNdBOm-x7TNG~<6NsIT-gG?hs5R=P{eOje_IcyZcm9U{cRQhyoh_#1XytAU=%Kt@$db!HYJ`ILcoxJA#&f zU%{Q`yLJ5AWw-wAvX_diGv|-7GMT<_y2@D&uoT!u~5T5L> znXaghBzn0oze|x1wf)1}Jp1>@e9QOt&?D&aNH*4@)Bv?Fw8;VDvMMt zSa{7w4p2A}(--04BM+a2O4t4zn@ZRu^3ihqCkF#z7&gqsG zB_`smT!%)^x3*YlZMfD<#chr-)C|&LK zv&#hGa%Oi7{F+`-UBx>Kh>|HHPFZ|PD~EN+1QIZJ!qo?Ap#o^V&Fem;o^u~_rlgiO zVgX9VA5wTr;?q|>+wjQ0`P3@dSh1?#Fg2)mS>*DK@w+2c>eH4Z7cEtRe(~4I`HQkq ztI}A^UVOI~3GZuBU}Kp|pb>l`;*|%{9tb|M&-ig6b~KnXu0Q$dzbPBfr&aQsvgQ7w z>{9^l^F3^R-XMUU1bMn|$_5p+?pnZPU)1-&QQQ1;q1n-wZTM|8q1&^MT;Sonb%CtE zW>}**h93{$*3+_AafhxF4*JgwWE6mNZKhnj<{&`7uV3c|3=u~F92EWHYcPU~RRmZs z_8o%~23jof8-x0_2~TqnCFa-Y5i7x{rTZT$dn~}OOkybLFO^L_c&xm1+2wD0rQfz= z+lW16=R)-L(saWrrTCmCcCAG$vShthTs|m>L1^p1OIFlYb13;>Ik(28G3GUpulw(ju1s@ua3?ul@?1Gg z*MkTM01TLm;B>_b3tT`ecJZcz2ds_gIZED*km#v~HJv_mXGxz-YwposcU)~Vzcd#R zIw}kk!4KWM`|8>j5E|R8BX57>6fc-PEfxXQ)~?ejllw*R#6_J}E}X9NZ>xXmzYX7> z>*(R)G-#cb?BTU`$c)*7@=I?t;8$F>r_1JT(9S*^`o%Ewd{k)FF*~;sVSJERyZ6it zb6o2qb>y;w$-ijpFtDD2!LlsT7qieXkC$3z2eJpy2C{cyjVpa7KVX02nOy20mc4BD zjdPD{@%MP7mfn2fnPBlkjl=AF0d%H3`O(5Nc2v&UdHI9^;s>Syh+{j06M}H|Np;Pc zy}%oeIT!CdC0{RWhU=y**RI*&y0E*P*lFIxbSR~MZ#0hovB{?$c#&7N{FV5@9Q`cN zrrV0d6zqi>FsxK@aO{EH`V+C$GGMA~kIh+fBWg*bHM&YeucVa|)5f0&b8lJZCp=<* z*WqrFm^us`F6M|bviR;j%7)pxt_Na!@AkUR&3kUfD6H^2iVFmZ22bFXQ1danFv@{v zHiG=SA5j>EfF4M~Q?MPXO(>c7Sjk%Un2t>Qt2jSvb--RAwj}R-w`af)Xb?m>s(Juc z=H^UV`e&ntuOIwhx#xs%w6Z^%1WW1D7zz69bMG5j9gdY;T8g*FT5=_JtRpdfw8801 zKBQN`H#UGU+t@={LuxA;MZ~7WU3|)2XBlOI+=RHqiA@iPa2dbR+8-^JgcxgVIx5uh z3zu=?Ex#T~o=}m&e>ntBUGQDd*hU)i_war*31uj!tT8<5T?Hbd-YSUXs!p0{lA;d0)dq+)+;gf(4pzR zFJ7zI8FI1PaxtzqcA*J-;F!mRfJtlA&ma^-H{-EsoPZ1?yy94Uc=7fq zIO4XjuU#gNSmrXHfh_@}fKwU?YGfP+|8NSWdcA5w#nM~LfQT`jhn$j~|6Z}#zuvX} z0qcLj`X8|V2dw{xf%V^q*FRwWZ?Lx5ow#NK0s#2@t7qe19UDdt`Yz@+roU+ae_FS?a0i z{6juOxwz6PoJWOA#wFBdGGruHE7_88=SW$xGTF{wn$gkW?pPb$cSx$|?EDqSkZ}(< ze#!7&zSaW3K|!Tbc_JT zgy}z4wLCd4-{gWEqTk@4(b7n=Js zY_i17k&lJ7GdPS|Cvr$5R5tdc6hZawD;Ina_et24vRqVZ8$Bja<`wfV{8S^erOVJ- zbOI(F+bwChAzFg4F7qs%zdvKT%`{2)lSv*eSuDkxH%DwHdSDCjhi{ZK{7=mf~cd9%x` z1U23DL?xr_e&9R0s-E+r|#?i#oN z0PlDIcbm)Kr7nzge}BT!)^u1COa90ST>%c+3H>Y~4IJ-Rly?aEBkCmFR)%7%`q2N* zMt~4c4+e+^FrwgeuUqvSoh~A^@LuB3h!n|Jj3gnkJ3Aixy;>L{Mu^byPK<>+yH)pu#)-!!^q8=vOG;bmhSguTsKUw!4dQozy|9p zzD%RlOak{_NHqEM^1U(ZXxhrkSvGd+-3;OQB?lDacwuIf+}BhZN~gwDR}yW`J;JE5 ze2Pj7N*XlqTcgt=JBD4ndY1>R1g4nNz`#bvmZ;i-Qf}^T>^kzUWbAm)$2rcH8Y-Jx zHwNCHqc|}IO<#mOT$882$L-4!2&?E%O*+vuC!Pr@csHm_F)oo<-O^0Vy=ZMbUSBOU z-}VjtUQ*vv-yIT?#hsXdruxKl$zWGLFJCNGA1_w@ky@e;6owH-TR`4IoSw zP_;$w%6i(kb2ha(cijQ2k_F(CBqM$m$ed9j`mEphj4^#azn&dm*QRdiCMMLDya=W$ zjXiTwm%myoEuhPXIf)j%Qd?CpkQfVBnLHFQi@e(LhRIFLfJRohPf;&ZouDjHezirU7(GWhGKNK&|u zZFCX44)f<~(g|6+gWHYFJ9Cz2FJbo&SNneI&<)xGiV=lRi&fX`8GU7=lXtEOhMS1J40t;UCp50+B+2a>zt9b5cpERY(Z~TbNv^51m3G4`)rTKW?8t zpAtZASS#Y2hmTP44X7u51#g-%pBhGvBC5UcVswPKviT-pD^)yYM#VLxX%et(3z%ET z6W9(M?(A+ao~{B!X*_e$2tG>&+^MQlYmYzum`c($C!jx<_Y;O+eAi$x%t;%GRaM2c zx+*MqJliRE#k_4=DhHJ(gU+F~4#atm3__uUlALH%tJQelG4FU210!rsce%eD(v8Hk zn67>m7$mF9v^*L*48%0Mf3!glYevI4l;l3m z7$++wjEoG(cD==@ZnfvXI}b#&?GyW>?1EFDA)Af1&X76ijk)`mOCn<6Z+3p7X)wHP zeWmfGktXESwnmbEMmAsKzhK%9FwwNcZl8)|se2<5S?a1jSlA$y$>RtTLC1BG;<|W5 z&1IIS+g5eTR{l`2#ZFq`nr1_Sw;mhe^cKcs_YC^NsU4sSpSdgbVv9<_n2e-MRgd&J zgeR+7%c4Z(sW5n=qNqw(k*0}G_4-sZ8aZc7?uBu^Mn+qVq?~$zkr0LX(y_-VYV@S!MLnTkYu-0+3^ZX2Gth|4pNt3|_ zHw#_UifXpcp+(3(GpEXeC#j3cWaOFk2KoD?FFZ7JJbaVU-gLlWE;i38o5p(4oqF7n z3pN?66te_dv&DdI()aztuiHY#7*(?S31?C*JlRmD8GI zva(+86N7nvt&EK5da!IvqGio7nzZ+2aD5z*zBJA>`pC0#8=jA6v+taK+zF_Lv8_he-| zI%mH91rv!+GHcTY7uJ2xofxT3S1e7E^Im z`=lUa`vmrLGP-LtI@Us#!X98`S2rX=irIaLEkm&O(rt4KuxZP?8S3xaNbN1?ycR%E zPy0ANO8`pYDXJLP*l3C3AfQt#hgk$~P_)2XC^i|(c}ihfh#TsyJKT?vq+odJHNziU zbuVV}fwR}yvK74o-JEkx$;^ zk}m;9Pg%`x!4q2TQNg7S)Zzy=B6P>_z_YoDU$cs0A6yf61z$Z}*A93^Z=p zLU6n%Pls>@(+xiRWbo1F$85}qZvQeBBUeg|CFtW6Oocrd%9M#Yqdp~l@soE&e}Ee{lECE@iDM?QF+5?qQ; zh~5T7AQb|FHv_!^B4tA@5%TJqJT9woFw8psL95t{TK-sX0ttd+o;!XF6grgDC9L7W zb2ub5@8jy+hTewJwgEhd8#Ta8OlzZFEubBDI2r?r0AE{B(qfkCxn7ugG&a1vJOQ=A zNa^#Kd8Ea|@MA_k4Hpq#V%bARHX9g%$^mSMrRV;-wSAb2d6$;Vb`9XSGcE!{G2vU7 z)bQIT$kOIMAb6jOz=D-p%Ij-z5Ik~!V!yYE(cZU-B1gMB@WQDNiw8uPPzzS|GkP=~ z2W)9H&@oI*VA?itolshP(Q4Uszjz3AE^^L59f;Wj^5#Z}#^DvLi>Nd*uxG@}p}juGR?w1B>&(^6DH4h%_+aDqQ>Wlk{LKzMmCg zO2_W|Ik1ZN=B{2v8gLO`dInA@QIimBG5Kr^#UJ%M&dZc7f&jid%`%!$7ULJ&D>(!d z*J&I^8FZ0yb>k(*n!bq3a111J_Nzt`W}*v0`UpySdGuWe5ORDHt2i^%?LR_0<@7+x{P{)H07LiQXl4@R@52aVrNePD8?bKdieaM{x6u5m7Og4I8yhiG#Y%9PpGsOpCnh}PxM7mH z=qn{~^_%1|eb!X9P&J@0`xoZZ9u}oRf>r=i4JV}2a(7~xHp&wjX3zaD;JF%Q5U>Oj zD8Qi{*#N8dZ6s$N`V@4uF7d#YY>zA!%65`3Qnnb@*Q0l<=}Fb^ly0O4&PKza%e)WY zxXTQPhyhgTJ=gFURRdeLA4PB7Byp=p+ZYek;8P5}xT-1jvW(%gRD?A?-SiCr$kjf=oC9 z^F#A539W-)Oq;$)lDC}rXcPn-e!|^d%gsr^uduk}iztad^+Pj+V>Rg5?mI)APUVk~ zrjm#iF&W%;i3XhF5&4*-gVEHICQ%CfAQaU!ptMlLOfnd_^s6a>VAW?(&L zI3B{P5oAi=l2q(v1>Q?>3g)?G3g?mI&65g}(RwPwji$_Bh7X z_|fR#kUOF`)$j`}Ufu+JIhK|hBXBy%`rKq?z(nxRq`dddmAvPDyjAzXyxuW!6dwnm z`SH&l9Ee^qW`;s}!r`0vJ9lvLNZ0qE4fO9fRt)pO9%9?4N?#*UI<+NDpf_R!3{i3I zao}JPXywx$NUy&y>`#O_IleV~q*A*wx=(~ZG#pxYg4q(mHpw1M&S)2042^H_5|pzl zVmBQ)9tZHr1D8xen`1K;=mWXalxWHSt~-doevcV7W`ofToCJ| z!F$W3!wV3S=Sgrid(QHc49~b{t>Qop*+D}mkqh!SMXjTEwn^G^K7<#U*`6Cy)Gph| z87#B6;j%GITcmzUN3ga4|IE;DhZWUY2qrEvFzKTm=Jw;s&eaWcI8`DOlLjYYTYON2m2Og`6d{ z7C9R;-uMCRLR%!wZ``HxWe(!%iXY4{N?ThZBOk#G2pm{_-&70l+%U@ed=Ia;5R>JG z6(2wL`;V!*k`7s223K^M5ziZ!^C2eZoz+rW2x8pB@G(yvp1xAw>m}0LL%8IQ0=RQ2 ze*9#WwbAesb;b@&{ff(3Ln;c$OEs5QRl|mKw9oWW58M^wKK31f+Zn%^i(w(Dt4l2U zXatLtcpA4L4xmpXbON!efX@25je-K$@WSi1#3bf(r(x7i#>Lf^Z|RJ0fV@DxBwuqpzuu8VHf*eK^mOxU>QB zL3mMfs=@Ny^E+xD*jwt{6+JHI0FN;OBPWS4%M%mOc_6jK0EY-Xb+W!Gq67T;D(Yqcn7t` zsgw31XOm=1E`(w5RMQ8>+@v1f9^M2L4jc$ZCHF@$^8#ldHl&f6U{}BntgY|aD*xHs z(o|e+x0^R0|9EG|Fbf(RAhBQ{7woKiPx`x~-q4dffTmOYigH3-kHs1XG>)2^ZMdf( z+-j-{Pf=2g`N5kMrKuE3Vb4SR&^!~^87A&>CP30Lm#by{v^-HrIvx7Ncd#fX_JV`oAy~`_rP0GJ0rAwtUM*q>d!8JvM608&7@agqY)XvrfmSA$JC{Ui zQJ&1;=DK!*$RFDEH5MpmeLpTaj(-OzxP1VQ;+eu|V6FoKIzZ!IleDh#d1M5G4TTK6 z&G-rdNPFL#{hlB+jUGx1Y_9U>l-}OdRRpDNfTqMuPDrhsdn=W0Yt;AhMp_kC?-y4_ zJ=ph%#eGL^aFM+IlJkfNpZB2&ptD$Vg~2TU)kch;wpX`=f^W^H%1+k6NggpMm~k+T zj*^QMW{GPts>BaZLHy$UlkpW#i8_gb!l*a)IK$oq1kd~M!|dBsR6DomSc6qzn^ynN0wh{p-3%B6e%~`_SE?A)GZqKlm z(}zR5_*NmqxscF{Ct=B6n~Kzhhhco`GYpf3R#lGK9Omsey?bZwZI~Fi+1QhcTlf+q zY{QaL{5XFGKgy2zh{s|YRkLgDjK#DTrEk4J1b};xh`0zs`%Nrt5o$YBmibk2k{w`> z(mj_KsEF!V2oIX?Ky2>m9cW&)KFQj*eefcbj{WeQaaKqi;r)Dk5)Yq509b!J>u$>) zzV<)JpblFlE?ZV?#u&ue^Wu&2vc@@8i#4c5#rJvOC@rAF0u5p+G(=vV(>%> zpy1<=gO>R7tD0+F$|o)qlfd;)I`#Cdbt1f&g9Zm#&O?<;Ko$iknBwEQNM(}tK_*9T z1Q-_l3+7g^DXs>{gTYq)4!yS&SgNN*O#I_H4E*lR&-3{-kxOQ6_8rnMH(Tdx_l{#| z=Vf*vGXE*3oEhkQ!fTJf8tmhZbOlAb74?!bR&Z({rH(XnpC6~>NC<+A(PxM|1hWQE zW(28#C$r&DnzXV>nII;>8(me_-19$nqFvyf0&qFj^K0=i!+br~hy&`wAo+M=7&so7BYYBV+iNA%L%ah<8r1eJ!>EAk`ZdJ;)aa)mu4;>6xjTQxyj_9V*O&df85C;V^%LUg zWYIaw>DKM30i=M^+p`nm(aO65bFCC#pA0vpv$h8#*4{$gu2(_O04RtSw_gk%>g)Tz zK8auWUfAQy%ZV@Xs@n6Ot2+~@s!|5tKl3vag}s=ve%=KB)EPW7S6{F#DC-Oz4VAqE ziL8Bqe?Fq1qyrXDE*zt%*mriMq!`sLPM9Lei9b!rD3PK=c8i>Fph>4CWA2PF4MtRX zXnW<6?4O`{G0Ql~CR61#P+F);HCun!zcmZ@QM|dDm{HE>h9U^*X3+7w;590kdBE)HZ@a~ojzBRA+@sVz9%zP z={PE)e1$g^ zu*OiMREdqbDe*iRWNr6sIS-4l9d(>s3Prh}825&}VkK1{?gM9i4iyu&!PH6mvUMBP zol@zJ_P0dQ5@6*fZRVh=t% zCb%x;ut{W(3;^rxm0~|wgbA?$*r#k&)AxE{u*(1FYDY`igQ(xa*_IUQNAj^9EJiZ9 zf*VdYPG7o;$1;jt0&@41%6C)9Mi^~W= ze==VGWW4^#c>T}Hc>U|}`X}S{?~GUXSrZ`3cgE}Ezs`95-RqL6Bps7MhtQ#V>Qn7W z=9v~K?58Xy&@gM!7V-$LHVC3rO|+I_)Uf)cvz98ox<7ph8jN{3G45eVdnGFf52rL7 zU567~2LLn*QN`IfwljsFlp;WYYQlWUUcl~Ih_s2W+eIbYFdXebZ1PG#yTn-gR#$pJGvwiac#vLMLf1GP{>wXS z!q+SILfLRrK#opuL=dTyqdTnw0)BGEJCWJ*1xk}ZwE1H@n##h)wH2|d=3|2VVeJkP zm+chcbJq1Gi}GAPtSdWQL>ktG=ofj}N^nQ29Pn9}ink&Y5cr;(Ot^V-o5vN9akfs4 z+MU#+PQGOfw-kwkcog#cg(up(Bxll?lYI*;+SXfnx>R0kANP%lkMdO@O} zUvI;R1;vX2MvD#tv9dT#h$DdSb0_!%>vp#caz%rbeAAnkfmUrDfqX_LZ~cO}91Aq0 zji+9*`afF}hf0@Su+|Wmem*Q|>)nk{D8GaKXD=3*m%SJ9hyVa|UjKQ@i~e6JuPujy z;g&Ah1vhYEX5|((z`5z^Tn)~`xQmI!w3V!MUXQZL2^5iK(f~f_VMALA7q@sWKD=G7 zQMdTJGaIbfeU@4(k{O}aYDE8{wv$h39&N2e@$4+PFmqUNd$fZ&;x=|% z?^jpz=RSV*pcN*mJ1QPrp8kVtc+;<&jH`TkM{Kl(5I2kgV5az_f@4QlP360p5pm3o z!MbXuJGac8rnC0cAk=2G}}mH^RwCS^WJGLS3UdSB{Ygn5u$bA5>UTa}+cTOw1#7P#u*= zmW?x!Q@y)m1~$2EzhG`D#1l^WVLV(PaaXg~4^v}$-kwv075WcO=m_hxfL_-jtCz!k z43@4?14tZ|RN&32z=fzXVjh3HRUY>mrxvN|-v>=eAMGB_KDv1Cof=)Y^)u{(D=QS% zur(j~L=d-fztP}mAIycg4CD?|ej*SaGoFSJ#6w-O0|X6Urs|j9SgR82Rv*;92e4tmb?PB*qgmn5gV?_) zs|}j>HWdXuVMdQ~Nf)Jvr-##e;twX&$JXg?Alpl~rglse-)0_Zn8`f4(7&C$YGgal zpCB`pYV<^Cx&dsltmL|<4lQ;2&F%P1G7wc5{jzVq6~r0E&v!ToDM5)4kdAbf;#M~3 zgpxwrVsxdn=n5=2J%?oxLgRkVekwy^9VveqR<*@+JQ9y=U7O`dNw=iw|rR0%ujh)YQ`Ww*ruqgj+p3aBaB9hZYt=3=k>nFce@VLXl5(v z=a{U4n7h`86drXCug&bTc>!bG+&KhsPY-9jd2qr}wjB+3!S|3_A6ZL2yJ_ZRW5eIR z46XYaLJLPq`6gJYSL6N6c?b@L*(aRjv#&e3$5gqC1#U6RBztF})t^VK2>;?wn`Y*S ziiRw6dA4`=TLdLDW&R9o-x98sM%65lM&i$dD}_%xbVP`F_MxN^>=>QMRK7ezhpxtw z8~s4=$*g1xxNpeSo~8HD!^+hrU@^ODoS&l?Ox0{Nj_23q+!Z))=i7bTR%ffLv4`jQ zuEjblB%uELq#H9}T7`uHu?K4v*s*{Al$oQ;Be0m&YnKnc6dkv4_Kw*$d$I?;*fq%J zX;$hKdkBt9mD1U5#IZr0?E;?HcdV8iNF1HX5LPFrDJLOvt_;7@C(_n;%!PTPV#^98 zyy+=Y9jB|7n#DcHC(xQM!vMrHJf-*!N}GZ06$|lGDut}NBU6!-r(yS&iOR;nFfNQU zvZ~1q3tP8_28J$4w|W#t#;+KR9DSz2bm;={HJ-c&M0Rg9Rikw*>($}YcS<$s(_hvEf8^m%}PaL>o3hZ2mBD&Y)@X&*Iu&7O+28CRY2Qq+-GRALk(c4Y8 z80Re>q=p_nGgyGpo>vL=ISm@DW+>+#)d}LBYbY1o3yT6K*Zy>|TGSwAq|U?B>20)^ zqn^sm!E#MODebLthH8`_Hwk*4hJn@xK3oX&uBlkYce%Dm?0HNY+&qLA^yCBZejSDF zJueywz&@?F(UqV|MJ}O7TWit1t;416aU_iBav&Ym9Z0VtHnh(WN_wmJ^UgDUOj%3^ z8_SY{6=qw5G+ylur&ZtGjV;mCd=k3a&Y1E~&y%yUCxEyo$t-2$3l^U-YEGMr{#i(w zIcX35x~lTQCLSl~cj+>HBbisA&`t`74&=zjgM5gS;|-DH1SFehGIJfuX zO16vXWFD%akg|}*%9=K z{uCY88hbUmx3vm0!?3mS2vn)6b#VhSEA6bN+m?eWuytr5pp}KZPI`AW@h3@viiN={ zqH7Uso~5$W(yjfq$xP%1T*(Mf1ikJ=i%vs{WdI@!L3hMi(r&KRyiB3306n%VrT;M? zBW$w$CFl`Y1MU>Wv8$C_ZxLDj4D7Ys7p^yyZ39Qnl_t zfrk-p^q;;sCpoZ3bTx=DN+AxvYS|k&+G>QyTFCK6sTlQnmE=;LJ_5%;G<`aI@{9r% z(Gr*d*=m4UsxhglU#Se(`P$SY%FcWlF8;n^rzHgjre0u-Sat1uPO9Z<$|iifsdggJ zCC+LR>Zcksk?wRl6nEu~OA_i5Z>(N6g*S*`nwP0%`HRH?ch=|BQ_^B zxssCi%{+CW+{v2=oXp^zTa4tJ%X0?;@!O&(3?z1%-wZ znPgOSKjIiV^yOB-wwnzV*Mc$D$@nQ=rMRG=#>wxt7Y2AW@c7s-bKeq?%AJyCHuE4@ z#7OQTqE(7)t&(z=5IA-!Wndsun6d~$z;r8h2O8ssuzQX)iS41g6P z)-30@s!uf$1={Z&MlKTh8D^I<)u|THow(^26O|h$i<9stPzI;b*w%9gYre%1Hb<1* z)tPUYL}~RX`p$DKyGvx@lu=WT`9;E|NZe8DY0g_5OBISh=ZQQECjdk?L^Qt9G#(9N z2gL8!8w^h_DPg6RD!zlt8wz6clxt&*89Q#6JO}~{!%$5wz8CtR7Yvodlj;t^?2@wz(B+iz01&6{)l$n(?}OxK<~cKRwt@VufsRb6l#zY>a}Gy1MwuG(44z*G2APaY4TIwk zgXo&mw|Y^@OQwyv2j>Sm9Af1}0U+9ea-45P2U^-+vc(5${W6R@m_Mv>Js5goyYIc0 ziLr>f!ALRKNP(d4+kJpz06pQz3AH?S;(9k?=$?7z@c^;A3yzi}t*5O)W0WH2Y_G|u z{hfhh@C>?24^DPE8anK-5qi@-q<=!;=?@Y%9=TWO|CY_%@jJdYxR3*}M#yqVzm^sN z0zy}Kcyachr82Yv4c&GIHUUZA!#aY?iln6z+@mLKHa5WRm$AhX)HXdh29sYZZUhkX z0bh~f0Dz>?o9czzA_9ku?F~!b_x&MPcra8cgzFaMCE0niMZ{I-T|PqB_cGsQFpUA?%_~65-^jT5t|5 zKKH#{mm9$p3=Rpa=VxdvPOEH}hMje`nEGcOw_9bdJ%4)O8fDx5QKh^K@t; zAYSjA6Sa%ojExIV2!NMolmd(5*vSNqF9-nB@pZwgGq$6UXZY4Zdjq#Q@&&;yq(tE1 zT!tMuoOJZUMG_Bl{4UlDr!nQ6>$D9=+X2eb9eV5qG6q5`qe>0SqrmDaBVi>OH7c|M zDRNM6ySN5Lz3`WsOSW~Ol>jKR__|jSIdFo&`DVQlfM|)}>cvKm>=dhYXipZWJ8}^S zi1&weSv|or0R4{0%FXr~1pSBi3)W^1TrY~#^%@AF@6H0HHMW_MXTZ@J@)(mJ$nF69bc6) ziC$6RMt0O3ixyOUFwj7cdTiyJ8w=!-ED6Ux-*UMVu*ky^c_4$WwJ@HAvp}mMqysk( z2IPe>+Madh-f!O9%B;P|F!Hl#5b$Vnss11YU8)zapBq+ntfDvo!c+220K)Ln)VX2_ zi-vl6rBH_SH-b z;VW$CYPqFTSshs`62LA`qXyu%9cP@dW}sbgbHvqz+mPQc=y*4y0j;=8BCPNigo!QN zB?0zE^SywUTlH_c0jXkJDE@d+EO5`&?mXV%00>rsWiYiMX4~c3P(|C3T_awv7E_0) zkoa!NAI(v^12$@qeT-GkQ!c7 zGWFZKI8s3AsCB0<0lgh2tb~!wgI=<>X~*k=W46VP;`Ej(ioV?^Wr=x5;pdpZYFDPR zqaVIorUWADkXk69(+yI*tbFGA!{ux=mB8)=^C6R;SvzIk_(-$;9Uiv zX4iv1U;NVKE%z@?g#rpE${d5Em6Ce;PGbf!sy|_rjQ@noj5I{N{&-mv)*!LHR5-7g zGy%6@uJ$kH38%IGa*m-@tz846>7rGc_k$}<-2RrV4PnGMkVEAcfS}Ru-m&E!`7!ux~V?zG^TdJ-8jEUXN~Vv z_EU53rrM2j9JLFY0?^FFQ(+r}>=eL0Lw;h7N_}}f2PnC79kTgZZ z8&Dt2FO$Q}8djI1JaAD8qak&$k0fwF}{HPi13fYGoYu~2nbPnk0?oeGdww^KPR zM;gD$)S+?#Q>7kE!88luO3x%_X934hdF6@ot4CiyaRJ;`@_vU8HNFZMop)^DwKwEECR)})ei zaRtubgkS#8)q}-|%o~U*aQXT~^2)0yniKOMWe5WFKg!T<%djx8e(7FP&QhUDBDkJt zYJY153`|p{aOr#ss)yJqS|_7@g#rFe0K*cyhQtl|Myyi7u$spV!MUcfKu3c6sZ(wO zho#u-?tRG_RQ8HELo{KBn(4;T26q%MA$m?=g!5^TA|8*FqFPLB1*+D> zii<$+6AqBUDUF<+ht%FL3W4JOhDgV+)~6lv=S`??PtK(SKSzisVox@Cc+(FM zW2b~QtNi1la>$1p;}ng(_@5ts`=QhFC!+FZa^g`)xs5Q^f6CAv;V>4K_w@unIfYd3u}?tszpL||0xzs^>5h!Ng0Zw8hLRc z1_D~Y`k$2{=Km@~*&7ZUtH~iZrIy?NhV2y$C`z1!Ds-jBTIbgho!UzciRJC~LZqaQ zaKP3;3B__p^#5%nWQt{$XWm%{l8itTR3J%=hdfK7^&hhM^XKQ{w$|7CW2Ez<%oVp} zjWs;>{axYWWxy>>F4!b|L17qZtqn&?)aq9 z0PmSoG7CQ@_C5FB%f-zuE?OXXMTlL-)Ns-)RmubZ@H1TxUuJ$67P2=60Z!HtVNvM) z2snTOZqn4XT*yn+8tR}w^ug#)#^z9> z9rN$`w2}J9uZz1q`=n5d$$W9CEXSoIH{Dj8y9=T(FBV+8b{3m^BOKWWPv?!Vl^uu6 zxzeD!(=U{74;IR}{}4Ftbq0yjwUH=TCPE$+$t4@z1a$rZTvjleheC9*6C zMJMDafr(XGZ};z|;gg1gbAwSmOPn!=H`Z9;r-|%OnbHIK+Fp1BhJ>*%&2{NS9Rgv7 z%xL*iiQ;Dz3l}sY| zk8UD@#&qO~h4Wa&>79xRJL`+Em5;U=`2t`pB`3?%xdwy?yHC zrsByr0Upc4EGgmt2(y0KoUT-X?GVMZ(4zvy{Z-oE_Xn(SFfyI;Hpb&ohztQvq%A^k z9~&0!pLN}S9W-?|A;=1ov-VDjcjCc``-2-q-GpR{_duZOA;dRJuXfz+o{Y7LP6Jl$ za5cj`efw|HZd9H+J>SPsV!ASMSXrKgI--Wl7tz!FUJsq>JKfC2F_jDr6O;z!iP@%a zP@L1!v()s~DKDI!_0utkw)#InXc9kP|He5ym)>Fbtf%duTP?k*zqTvxvc|MX)70?W zUqy7+KAo%erb?1|uwwnagK4a!Z(VBdSTT4%_U=n>Bbq;2hK}z2V?du~wGg)NXch## zoIl6!wWIPoT(DO|1Ks+E;f$s5qTZO?Ih`VFVQt3pDrzN!JkSwrjG9MwCrU|SAobIr zKxL+9WGel4AB=%&&YVYB#t-V3Wk-}lTu_b!!_{7#8$Ua;cK>k49K>q;woltwA0aYh zHX|Gv<1F9UsCbrR>=Aas-K@}TP5Tvz+dOfDF+#Z7oO+m~KiM#r_u60qKg3$r3FBHn zIC63Rn}7Q}I`ul4XKPe7Ht=HO%s6i7PWNE<{YdVmYnae>DmD%3r5RDJ3d6l&IJLX& zX+n8gRnMDC(93b_*jvEegrJApy*Tp$dm`qcwdYn@e2mnq8a|!>TzcG9abtNoID@Mi zQ8wFI&=p>qLw9F!@#AiFp{AsneG$I&y`v^w?d~^Ui!__#Q~m*ZsQS(e99-Cui5XLI z#h4Xb^Dx_(-1JzU3xS&Ba#Ki%jeCfdXG}gfrOD zjHe$&B=!hs<#9D{rxAImtC8ne)^Y2$Y=rvlO1y=~y16m@Wh*bM^n*ko2So`Jr+sv5 z$W2BI7tR-8%yw(tsyPLZ{p6NB=i3AITOr^}-yL=F$L$ONFu;ysb31T<_8b1o;;hdh zw`1w6jM3F$3hut73$=?r+6}X7>R%xZq}L;-4boOl{@sZ5$thps=^A6QO5r(ZQ|V++ zkmqG+*UH|{bBXa$Ie0w~@#-74m6k~{aeR!>-~r-b2y*Z*M@PtVRj$y(LcWokj$(7jsI4rQmV> zZ8Lg~+#wVWwVHv{40E_1@fMtQCYrpqo@8b$8Brd`-IRAN-(vRcKM)S;9CAQTfAd_J z_wz}-ah`v8+G&Bm2x98|-g1FQ60dbGr><2s!%QS2#1wpBgSV{(rui3U=?oaO zmzimS-#O08#8cPNZ6J-Y@yLND#jV-)^G2sU2n(0LN(=e3T61O-)n+bY??^Y{sJp`~ z1B+MNSqGEYO+0I9$!%5#>^L&SwE;DLsG(~!FzD$_huKucCwtU^B7aLa1+FQp-YNqE z4u7Bn+#cO+(B5ID8FSWY?9PZ~InxAo7-E7XZD+UlFVKMY=R(Ngj~y>}zb{*Cv_A)T zGi1rrvfGFFpg9j)>x;hhrk?QqeBsu-ud@rP|95WhkD09{0s}rn-p7!txD$S5`cr1$4BJ0Exe%8F2h7C?e zv3pDnDC2I)fi;5g0qJ|oo@Gu*VcFYjYIB^XVLMxsuI}|Phb%lTi?$z^#oiM;@?RqT|gkvEA=OY~y4NChI|#12P%d&;ZvMs~0a9ga=>$HGm{^d~4JkAjy8cDM?EgptV60UO1&!O6; zI!5;5Sbw_c+ZK9_DDXf)JrnCS96qnN%P(vyMeqp#I_}6jkZ*V-YUB#Q>###w`*jb; z!cyK~b=GU)m=!mEgZ%6|amLlnrv(tWNw;N}_`y_%%$nWQc3quy3J$q=wPn;gP!GAP zYqN&Z`E^nn9}HDew?S}fJl_2BF+t9Q@8No?t8~UK|7mf0RcXykf6Cu(kbu23*5koX ztTQm=;tk`1tOiN#&8v7zPzvu_pG*@oW9dd8X5oK)oYDG1NeIWVGB*^A>6^OHbUWnS zW649>b(g>=QtMnv!;QsdzC!GFB}mXB({F9OD}?zT_RdVt#^s!mHnXrny-hpNKHLzIXU5*dRw zNtNalys*7dn^nE-8E9&PPP?GJ2J>*Lpy2eZ%Le>}-%ifQD%S2>QZt6}nmW$5toID% zOqZlvx~P@RH*rzz(e_Vmxf9lGhgGIRnO&l@&NF#x?c-`Tv7InlI#L7tEsu+`j@MWu zhZH30?~&HO>11HNRNvQ#x(2u~wZWnLZ^rq1s4G`ZL~YQf;U^b{h8j7`1CgPEM8Gn3 zc*^}P905IiN!Hzi)s84>#zs&7*0pmUhQx{mL{2JbH+QO(-5`V;AW0g=3E3y&<4!A+ zEzotjtvaGm%(=B_J^a!&8WzfX81Cp}W;ts2VMBIvZEd@$iSM7F+t}~v6xl4zfUUgh zd}-*)$7v1xau3%~e<0(RR*cp_n{v+iUf}iCSc7@)_GUS?TlZIUQ*YQj zP*^6nkX{^F0B@lkTq!GS+=r{=cGgg-<@|=Dv9J$KJPft+!N5d_j9Qy;Kr-v(O z?YPsfzYIMPMGcowzwJHfea}bv?-3HgY`Ctq!_yb>lD!~x7W-}{p1`prEJQX!b?i;x zXa`NFZOrCIiEJGROUMsi+Bu!@%d~71^*;c(%zu z2BwYY+JO0o>sgGS4KVUs$2+l1udSm2);3#F=rV?o!qR4`HB@AzT?%c$9;KCR_}!5u z`<^H&QYrZcNkBc9NvtgmunlewcRUo{n`~RJVzJ-4RqSL}zBCsOtBcMD`a?oe`2N(vFZ<>CwCr)LHl^&nDpR0$z4l^+f|4^558>)8U+b`WMst zln>vqGD;dbk{~g*PQ442S9xdQ<2ae4pRKk&T2Gu)zT8%vahHlNs+sLz2}h`ki!?of z9nkQMd7$+tOkD>=sozRbUIW++XmT15^`4d1bkx+~4 zMFAp+_;kNEv#lUxWr#*QPXJD68te#h%L2YR1w+w0JA$p}TEu&BLu{p5j1FJM9Vly@ zB`>AFELF8?mav>l!;Q4p-!!DQ{ng9=dleRo9K!iK=YRu^j^6U@l{)x5kE`Bg^QAfUZ%4XtoZCu3RwKWE+E_mqSzNr+)ZI`a$r)#%N zi%<;id*bD7gp_6WHIGC?uU9|qwo85dd%20xJcA~QY(nNpbJU!bKW(o0Pe>1^G8uTq zCdDLnYgQr(EHD=O5eo#{le0`@AJGgK|INj`o*9#Hvo4wE5IP<{=tEIqav;n;myX$y zw+W$Qb3PxI{ySeYa+4Z-9s+?5*v_FVn#}?hnpvrWvy{|9HXE#IVSd4ARWkAbkkB5o z*`mex9Kc5h*v;x>@+I)ZVjdDhL~lJ)e5kLfCcHu!fZh8W+6bZ3S_x#my5B}z(GliouG z-=m0siV}IbA4w)`0U*HVJOPeG0_1BpV+^=es-9$hADRh!7}YL?W~U4)=5M(%uP*08 z4{Z0wX4of0vJO-;NkoWV1fpU^`DB%_Pa-u<8^dh6Y|(a|sbyz~3X>XGe{mg-FbAvM zsD?Q166)iWVl^={{9tqiQ?FhffU28%fcbRD&ZI}-;Io#Z&!gc_s;_QsR7&(lsk()a zNu{eL1@iKC5>jQjShv8YeyN;m+s>X(=tsrO^0&5`VtSQbuX$jTz-FnSrb_MTg@33d zU@5>X*@5lUQ7Ejh?;_3=!bbW^DI!KR8JthnA(}CiBON3zSS$)m*)J~0vU8CHW|t%Z z;n;yeCpFk>y@Uve+sa`jkAh4Ehvr!L3}wh!<#lLS<$UM-&c2o z>9&@p_)I?WHw#Rn6K3BJX1+%D75po1*^=&b?}4s!SLN4zjh%GzhyG~v<;2%Z2-=E< zuzX61dees`an3O^3Ig<1&P~G{(oGa4VcGOcu zJ7NXh!V_s&9H2$$2J%DPG8L=q_y$qXJTAPtX~|0Jw-#0)%LM~g#(AebaVG%TuloQ1 zu=hwyUx!D=H|rbg?#CLdA2XC4f3(eD(@Ed4Z!1Tn;Hfqu)ru}<9xS+1zMroOWuH2& zplI(iHVk8^@JituS@R-Tj^nV6e)-&X}hv${Gs(e;K zk5m_<^PPy?Rp=^b5G!K2Pi2}qdA;U5{+TJ_ky|JsJr!_TZot<(ffxo(3M#RH0|Uy~x~wyix<0Rhd)Wk~ z?OO!%VKHxo71J33O-rg#hyH?rW?qJHdfU50Rp$gldweil|s1)?A-z^`&x7FmQMT>-bU_P16$zQ$wS2avs1!M#+YcPi+9@kAyB|Kepca~U zHv-dH54r(ZdW1z+D`2dP^~$9Zz?Sd9oA(ri zlCR3N*MDcU{cd=F-9?fGLx1qUSr$5KsCoK*MUa8VxNlH@puLn~h{_#9BiP1g&)*In zN~qns==B7?n_2hF*Hpxc{!%R`8nb9w&OTiv>?L2wdu8tfL{*?A3x7qVn8TV99R(Q> zO!cX1fY{meL);eQ(#9}{qU04#5Kb-ZZudUoY^lFmHj5j9cV)ha=VC zh^RF(kt!!?5-W?;Nu4v!P|XpTLCx1fVHP!EloKUvP3dSdFA^^h#F>x`#43iVV+Lcu zdYZy7^r32QDe5uhmGzEpHF)t6h+61=`+|1{#=rj0CD8-(eA9Q+8d>F=kK4mQkQc0d z7MLKJ^ovkLQ5%+k2;74Jebh<9-aEWH632~BL(S0FGt7t5-6jvq8FyepR^kE$~uwrc4c>Qk*td(hzMVqVxc$Bg_W5#6pWJYfUU0B0p^nVfTkZwvedC%U%%Q zAJjoCyDp0?DF;K6yjoZjdkn9O*BW|_&X@d=g#V>`Bb&LiG4$%4lV8L+x~VWh)r_OB z7@^RQo7%x~?mNW(e7Td_g|`c*?|DT*6>ABN zScWR@Qmv2cx`G83Rj}n?E%~yJWg0ytP0WxTqge9qRg@!rgCd47Apzfw9n1C>w_F*7)sG5ni1vA+QB)&NtSq=Hm6-fsy-rcA?&w zzTH5I1*eShF^R|By#Xx{s=FM=Ll`X_ zwKyfOv$MWj3otVa-i0b(3d%z#VNesVs-GN}I4N(--Oh8)pjAecC6dk6#pWU^QX1Wn z%NBHaO?*n@;U@}rZgq-z^fiy1}7DdJ#;8h@0WS7P9j@sv~yEEe!(L;E(AS1T3DhBuo=p317ZpHfMcJ;kp z5um-J=vZq1$nM-}|KM?YIV&+$+Mf{sul0z9@m*X)E|XA$cyce*6;VCwB&+7PXQGU; zT~L_0@*qI;fyR$xY-2Ad^~}?o0&JpGu2FZ_6gwX;K@=b!SbTBxvvXd2jW|E)>dB=I#jYwa>c=04|M%eR)|5XZ0@aSVc3%di6!+0n9 z*hDHfilZz&SzQrTHbI8@JHFn|Jqc3LtcYO~Eh7NS&QVkX4Ld%XO&knzY#|;x*of2! zK-bCp>kOe-sv;NnJS_!Ugf@8*GhY>EAq>4PxzXQBoEUSzl3e<-qGcj7hBU#Kk1U}q zpUmS^4L52N^=pyfZZm5RH7nYgnCT!~*l5NKn($=I`6i~_IQJ58q*)5tyO~^SY;Yay z*WPq*054^d3-OS{DneISpyiflqp&DhN|XkYhQ^4=!N7a_!D<{V7CG8VMPc4=A0#5z z21yS--4aYUPt70jnSe!03vc(!kZW02v-$n?U#OfAG1(lfi)wPw6;vdxfKwm(oG6w4 zmq!5pyFu&4OovMziK4d_91%?)q&=&tbB zsJJF#R4KByr82ArSr*PN0xINR{4@>}&C&x(ESyF28Y{buG{P#G3AQS`MbfYX%!j(g zm+kDE-Hd;OG-RvLdXs^;L@T1H3_qDI=0LEsHEK?Gnv}izMMVR}prii$uj4WsRy9uH#N{$q54IVEs%eSc|;Gv*0Xg; zSU(zk5lh2j4s=XLjE%1i-INztR{0NnJTdW?4-Fe{Q5IAe#)CT%I}ulpnA@#^=`B^9 z<+*?ZM`9I8*~JhUYGc4R?AAH8dqX!k`0C59$PG|V62?f`xuzHo>nYkF1=LkGySpoG0XPh{fFJi!tC)v2A2XNn1Fe${$LN2ag|xq^9n1`lcQYAJEyD zd!_B52hU-S9e&eZ6-)Un5>jF%Ej%`?y*g!>zI75!%mlE;8D8g4X8cUXPEZRn>_*Jt?2+Z!v)R>RW1?$AcD zcf_Eh`iu)om+EN0cyglh)x~vQ3As^7Zt|Y)j!bcK-s*Gn&rEU4{X6I~pnys_ab}p& zx(3Mik||E#z(|m@mxp?TfYx?yt_^P3?hD(nQvNI2a9KkkDN}r}mpcR(UH0 zC+%Mmz9oksjiAH!QA#l(ui=9*NW$rfT9%})vEg6GbaIKbJU!ZakmD=epHXE_icr3Z z6h8zdM~jHBl{#_}uEZT-G3jf=?CD$Q-G44-Qd2cG=(R+DCNMeQ?#RbWZ)|>TR~Lg< zD2>Co;i^?7HMov#DC#z+d^noha%eC#*?YyV%UBNxC z8ubjg(5SaMP}zAn9c5@UTB>?_9VroZLq9MWYSgG<4igsBG3&b!h`rE}7urb_k_yNT z3ex6Du7yPI9Ak*JACnrGp;c+b#A0L3_z-e~Nvs@M3E7#h8JUiGeLS#!aK((ft7P;h zTCjTtlHGFg(9hwe<~14G%WX#>{%XO+c!R-G|LgO_%(_;IsebNc6hIGq?iEdLZ+O9h zTNa$%-7Us0U25+`aJ(9(KK}fQ29e0fC!4@4ldQ@HTO*8@UTKjvO)e& zP~(5;&;O-A|Cj#!U;6X^XY}WP53m2FKmRZKbJ&vf=ResX|1bSd|E zl_emR{8n8F{V)B=`Y#(qR!Q~irZL*0fecYwf7gNGWHZ+B>gcQdrq5q$2_{F#iNoK| zp1LCChWYhD0-KgdA>%f}DWDOqhZ+3lp;kYo^mmDbs-yu_+wUne2;Axy3oPpOx|}ke zdb3SYD`L-rsnX5hWKs!{EmuR<^r8H9X>sZ$#*#>45I({;Fl~7)1V{CB{s{Ps4)oIK zxWLMk0zz8q4zC6{0OYb0&>yeb3p_IWpj(N_4jqk%Ki6B!c-{yoGsg>SKuk=K_M0a` zT3M)thc3r!czcI^%4*}oUxV0>l;gG4BrD(?Z>qEhwL zW z=I68wU|&P-xM{ro0{A~dlftv9Ire;;zzNPa6eF(`aCp;tx^K4sV*`|!C!T-7KLYgR{C_4uS^h5qRKq3YMmgDaw@znC zN>M~R0a1bCVX$psIiPTVhozclFML>iYFEN&QsErFG6%GPC zb~rgIv=SvF^nXJ@YdKP-Zj`g8O3yCyYD3g|F=zgs8FP1Xp8kAZ8zVh>KJaG97LaBS zLw;?zZU3yK{=6dY#Jr3U7SQ^7qDLPPK&DTf&D4a8K9c**|j1Dt>t6C5%~^=>9(`(4%X*>vOU|*6@3c;FN_AG%`r< z%3ccGL~|EK>oBvx+#L+u#zznKaEe0K%sWoC9$&6)0hxQwO#3oL?uqtuhc0^U=vR{@ z@2(ykyLR=>!!_3QlU6C6&A5i9&8zMGu45Fr0#dHXzl0{%YCT=A=l3@a2Vebh-IJ^dh;OX&Mh|=9 zKBfzIp{YCKA7*EV&#V_F7W9bu9KATXGL7!vj_i9(PpZmJ&0GY2N2$x|by~m2tJ|dXu;spY$I5`X>^7L{(K!-#~lvu?VoNF?w7p|y7}B79<-;UE%l`> zB#D^(rTJ5Ln7vo7awFZ8`>%|a!+qSrd4q?`yYuCQa^|P_rRH<-qtvxl?njfJlVjvS z(2G82V7)Mos@;tnF=w8)Di5dp^N6qnlW@O^Bl~4~KP%O7m1k=IP9SQGWn7t-U{a>Wt=?Z+b~5Gllb7Jc_PAX~pf+YrIYw8e z(Pn-k;{%Sun>TZ`g4AlL+d^N)kj3{9dm_}bKeFcGG5hnLT5g}0UjGKgz9xZHU~<1q zFn8lot7Ovchjv^%VE^bueZe8$z!Ug4X5Kuyo$f#W(}B;dW+ZVxHn-r?C5ikDzsU&a zug;pAFL!%8OYMU70Bj>n;6MJ8`9;}_-ZQKB^El~9Jt`PG?2(>Zdf&a4gZt|(bEn(w zOL*cc<3IeTkzzu2%{_8odg`r`&Z_G6!R2rS=l0LzJ&z#ETkg-bf3^M13F%Bhj(|eV z2PD@%1xu8A!*8f}6$yGDCiNCK-AZ-G1svSy5uhd^zvkh4``SAv^3jK_H^e*e%F9v| zp$SOtm`M>UF1JvIOS_-pJy;5BrjX-x6N4LseQZn?q$cct0BAdp(?l)oz-V4ZErp^B zGGdLHX>9)qq6G>fKMOJhRw!Y*8mQ3x7!a6}^T@){oyNtaQLg;H7i0Z-k4>t6Dr(ov z-^Mjh>kQ4T&gBnEGv=9{*ads7)>h@KY$NjFOZHb|(=CS>TH8gsUTr+z-DSN5bsu~F zT(}5^jcGx_u50SgBZM1V&$s2hYI8d+k2}sNqd7r+fb8aFof&u(rM%TA>q4Z&Qd2 z;RD%$NgBZFLU=0cIiv-j+V(AXfwj6izqARp=F^P+HXmAPYSW<15>Z=r#m?=7nRiBj zU~#(3GKG2ufBk#>=)+lEe@ayw(YxEBQ}2pksi--m=4z*1pEh$z%<)ZFWnuX#MCcUa zd0(K`Cf0IVOJrxx$d3No@xrCtv}!84Dr=y{>P2CxOZAx%O0D){8^}?6f=caEiSFlU zG1_)$H9=~!blW+k|9fM!1=Nbw0pgnG#%)()&)m+*`l7$SR{w0aRo9dCeIi2o%{+8o z^RBq->tgLO_2RV?Wy`k`!g{xJd9DvS_N#wyl<#n1&i)@52rUsR;mh>5x!p9-{vYKz zfUe+8Sbqf~u8!2{u1h|pEY7ARw zeUt@b-kQi!UwfuZd*lGFkRBpd&%ey|PV!t3D%>Wr1K085xZU@t`*~-Gg5JIncvW-? z`&?dg5jQR5A@%5jRO2yyv0hV-4vIYxTpAnu6RYA>;qPFC7#MbCKGl5_#tp$Wx*1hc zKnFAg6AoJJG}wAN%svEZ3=V_WtAo3P2f3Z_HsEP*mzLKZ&iT$@Vz!9?3PJ6*=hINu z+>TrR=gAi_foop1Ut|5{vQMvrhW*N$X&w_j2tM^n3rChQ^xi!yNG4>V*UYm{@AGpi z|HW);^S!q~5<@BkgD zD{zdO%8T}OV?zQMY;`0X#Ol5mFMzh%?wUcBCGdWb<=Th~bWsR@7sy7ocpU#@M_$7C8@y7_{SOwh#+-Sx?QGst1$;=Mo`DfVt?{&UxW z%qHku`*ndjlzGX6-uGde@vvA6i#pA076C<=&ULdy!->;MY%s;sZ6V__fO)F!; zM7JABcI2pW&WQGTGbr?vTUQBcH>&5^lU^|@__b(9YSPrWn%g%T?ivw+X`spGT^Y%` z5s}>4!S<#}3e&9MvXJyKJmcxu1$%BL(IJEkO?o#>fNk`DC;&Qs&`&m=-7Sp~g=vvi zES|fcX6Fg|fx52yD46XSP;aAjZCyL#0J|3m#;Mu_&})0CrIEpVCPW+fl&~VNTh|SL z^&r>r%~SE(r$2mf%tOhoWI&u!Ng1rLP}p5{1NIJef^U~Z??DWKFz<)KhLQ_~-qo0# z7@+J|`99S?Ri&3mE}28*1aqrse@1|yfJ)h!oG`^P>WU0b6HyIb$mdg84ojr)w1-;S zs|Q3qDBl&PjADU*p~>}V20T&wsV(9OBjy*xn`3qcDKv{U6r?{D)AmHn*r4wA(L!5_ zAWEaT0ww}!ArSrgcEVKpi434nbAd1iZixGw617NvFD=-M4tz6wCDJoZ(YUir~&* zA*SaVn|k+xZ-%zH1+5X=8^2DuZubr%k;#%nA&mU9tMp3fl4v8wINs1khoG#ls! zz60+$u#&GHL_=_&M^m;U!Vz?Nq?BApTpPShj@SV+$D^=9TkK`Dm+%zwYx!^u(ilWX z0IX%qO~QCSttYMOln)?ad6TfPJYq<`#K{9qG$BlJS@Y=kb0l;~Kwb=MZpr^QA z0rWX~u!Zp=T@<3j%7C5K{W~tqjl1eb)K&r7pS@eeo%8kc_s?~*wl_Bs`EV#>thWUoU z1hzYR-PT5aM+EI6ew}OIHdS3y5`zaxX$)TD-5&nI{Z)i&C?cYupKxUER3F0q?mdHR z#Z)tcw_8M0*-C}nJjAMc4}Ga6YK^N@3u?KZo>uj6{qA37d_nxJdo?Y|2Ff}QleTNK zrWGZQ{P(ca`fz!rW%kNY{N$(5R)k zU4?F89CAC+#9$m)1~)Hv+~s&hYwaatDh@u3i%6US0Mh|_2H5g-(BdK6Szb@zJHoju z+*7{42jB|p{EmIh$TO%qeB z0n`gz#X-G}JNVR(6v6}S+2#y~N$Q8O=du4QXikb(p-8-3?@B5~ubadX=*vF{cBY|c z)@E;!N2CCw=-&Mi7!JtoLtnNoXZ#5eQF{itxROKOEACwYcfTB$gS9?mB@fVce{;^w zX_$-&R!5oa?D9%YO;xW&DcC(~{hF$j36fXYtm z7UMo1>5%HNm!;x7^Qx2LJ++{&Wb10mJ5D;CWr|Wui;JVoC5a~8w3Kcmy;j$8eANsjqHbtC` zK4M*WCh9&bUcOoVxoA7C47oQwUMo8fm!$_P9d~$Mx*d01FMA-xV0wVW6u#Wg)Cp}; zZ;A-)@^eZcLTY=uk5V#>sb8uDt|vpaPS_c|C>x;|IzDE-@*JzVS1QCudk=*8q9KEA zNzRn6t~LIZ7GaQNhN*FtST2`42N4hpgKVYc5f`L{=@*7A2?qte<0uN-uSpxTX`!=( zZ)1p3jYsc-FWePS8U3b1?Vx0=fj$`s7cZ_HYrY@r-6vYt5>CfhzAsU*5wrhy`T2zV z-XB6`D|KyFq=fqyw8@}#;XCzOp>|g`PvFd`68SySxwtl3rybiMDcjSfqH+1GdFw8l zWfAgiGauGogv|Y&Es`PJFs+ajwk5M%&)hc>65J#{u1(D1nY_tRQB69oky^sXH4A*b zRvs+3iRYjj*LRhO2Z6+=P6AC5hAKbD@UBQIiGnhB2{I5tsgM7t3Oh~*!J9iF2hu{c zn20bPGb4!>`$AWpRY=!{N?V!!U8F z9S%U(lZ3(aw=>yR6l+5?+4_Km`7dJ<(NQTy{MA@xy3O08N@TB=ZKekRk*b!-kLb-F z%0U}({M7)oqZg^w1KqCU4Jq|x44sXo$oe_sGe5BsGY9$htX}-Ia%Rst+-tbq)Vdzu z)sCq1Jl@Rqiuf+fIK0qNOKoAt?(EcGQj(VKs?-L4w0`rMm&fY+;{U7;O4 zG{(oNttUWbvJVG&DEK0MRuJ`AP^$pF8&-!S0YFfr0KI@ErXQxTerGp~-T1tnpI0xqg5k46w;g((& zc;MbIuPj7qJZRF?o6@t`bUZm`@zgxuF(gN0Z-W;sXM&y-s>l%{{BV#?P`Y{yK@bi8 znmM$@JEg5cFLSk5q);*Sk^4s?Rjg1?&oyHlAktBw}TzPGY>2E;$Td9^hXC*ryGXl_vk2ZPV1#9|aT2ucuPH6SNKQl39 z>8HRN^9q`bgw(f*$dWKm>8xCs)GZRe7=CCyu>K-HOo95b5bz7yF^Wiol11J>M2lAq z^XHim8Y(E#rdfxc-YoMbDTH0*RC1K-w_oh+V>*wxh1hA*tGIf^<`O^2;>2iP_HZD? zd&mZPd8a;1bNAgfAU~@RL1;1pvg0WR^eq-BI4G}(jKx4U%-~oO&Y&#`$EAl+drG{h z*G15$Voh0n(VN_+XjA;!;VF%wvv_BMmN=bWvcW3vugN@iyxy#w^UHI75cKQ(T-<8c z;a_0*Ns!d#IVJ9D7;WCjGqq8#x#WucWH76RazA>L2FTZMBejiCOYpLBf$(6Sz3(y2`Q8$e<#tGLbRK)LvAmo0y9 z`J)_v50XtVsJNl2us8Ep_H94+*!t>YTqbNS*LlC4(B5bEY)rlf_N&@ zQy>Qjb4wvJ(VTO|!M5f+9}k@c=TX2foNqYQwmmVT#upn#A?p#R&j&2U!i%-CD1$}+ z&d}8#@17N&6R`oR$3CN{4s_P{|Hf{7UoxmPuU%1{sgR9*x_DZq^foakbQ$k%wTRT{ z^dY5H!o3yeSxeh z0sx2L`RWBK(vo*xD-MF|1p4(OIRd{G(Gv?Aod#3I+5q-i8SL;V(jH75bb~s880MWg zA)x@iEQnjyDoJUe7?+f%EalB^1pGw5=KG$pn+IE!@MZ3DF=w!sAC}WjF*TE+Izt;| zS*%*giXX}d<*_0?%dinoTqevK2m%a)wRUike{(_i=c^;F-6!K3TPk;WqbrFbk<2kk zZ{~ixl#&ZbM8@9Wh*ovxqEnNA;`Vuk*K>QLQ`s`oyw`ttC*M2td9ta7Vohz})h51< zAz-ePW*M~rb3Shu>C9NmtLgT#_yALeq2by1@71w5Sy+iO6r``N`DH?+jsJ5VFPKZf zw-Vk-2w-s*ufuF15iSkwoIuPz+R#f>P6};z^Vgi5BXYta-7Z0<*t$E-;p{Hod-Gl( z%(c}7t{Sg9cOFU%(iMjcG)^h|$W`%GBm;&&WxCXM_j5a)c3k+|?f`y}Ik7YB1;L&p zfZfs-drP{#5gnZM3k|$UpvzCF-P?(R_xU5c{{!N$59AGgm+P&~;dl?L^iM{zKP}%6 z8vk<5F1{>|QYeC4NFROtsn($`_rqy~=WI{VCpa5j8RUgYC(8MbBt9x_pAS^u5I1c_ zmOHms*m1OCB=LqEnP8-r*9!i8NR)Lz76{pmR#lCc`xWAynUI@Z@-lwWaONDozw#x} zz~=2mQBRLc7qwHXqM8I%w32V6B=5INHE?r~=QVC(NPMv7NLX<6*6*e$%(Cb?vG9U4 zTDn<1)&RyC#$d`!4P|<_zxJMHo#wR!f*++&xBk!1%jfqgnjVyuJfI)&NnJ6CJBLut zV;}VHKfz<38%ZZ~A4Yqmmqont9yX}3e`Sl@rUOm3S(}#>v`h!)68}enP;<==6%R=}oU1IaOtdDUywD}N&Tb$x zVHw1g0WDoM_h}M$7|$AZDZ-N%^b$;);fj`eMX4o{WJw5RiA%Ihi)wPdggr^N6A^G| z@!=}Lpyw4HmWG)FeZjDVEegZ(mUgyEcd`yZioTQKDOho-PV+^8nnNOwg;Iyo1xJ&2>WBO3t-=8_FHcU=YX)h0cXl3@ zD;HzLK1aHfwTvAB(fLF5 z8Qd-Y*}a*!^~8JWm*=0p@0DHLdvHZwk*Qv|a<$A0B~311dk5EOl)czG)Sf8p&=^>g zgpyjYUf(^CC3 z_D2~tFlBBfESjBD?+WR3&prZIf}0cxE~i8c(P4ciPDr9nR^2cDfevxVV7*3`BZw4M zwq!_E7Hy7;g&Mve&UZ0>oXwaBwKU_sVwfJM0yT!DV~fIM*B$=a9|=(5|Mhmz0~H9ci#Ro7=XwmMZ% z`|aAz=$8j{lPoPRhA1|*SPk#T=k4s5MVm328|Nrgy_JD?dqtDdT^Btk*3D}&yvy>+ zEt@g8nSqiBx(lC(A;Jd_TZx<+1x3<~NL8qZjU;k$U&Vw{a6R9^j{>SpVmDpsaJ~5= zmSqGw1d#%%m_3hJaX*wcQ7nnBi7+&{m=$qBP_fyy#9t9B*I7VSn7_tsY)6%fmnKPU zaf>K6C&aw^)S%X9E+LUfK$bap!K0fv+D( ziOK_Y*KUt9lBdJ<402G?=-YC*^mHJeC6)uKxnC!dTE9O^7G^K{+&WK9IV_oMPM+jC z0csSUEgUu-eJ#S!h_w1l?^L z#k*wOivd?7x7+a<1Fo1h!<~R@VcUS4F)rv=q71_0ULTy2U@{h3Qy|l1o0ww=lb{py zdv|tDRhB!>IFqrlj(6DmA~Fi%mS)P53T_0vb+VDO(jg7#`uCjR#sE_eJk&78pM(x1m^?Bh_&@tk=L<)I^#*` zCH;Z#dsg|l4rqvs81GH}PC2h##$=gyVd0(pq&2O+ovCE=>t8yl@)3FN-obh6 z@EbR6eBnD=DWRtf()BomXoRkGOk$!eEgx-1!TCC9JH}y$WW1c;p%? zvgRdiRi}m~KyOe{;@B739|2TbygfOol|fbMw2Ws$Mq~6G=|wKhT{BV+@W)PV+SoJeb&INHc3 zxx+UeYloMxwGj9De($;X$Bp|cVS5eV>A*ftS5n^ac`KF8tIj5-%eP6oaXAi7x>Dq1 zyQQ38SW|lgfZ7-MX%7wt)D4su91=Ss>@9ks_1!&!^F-4#@=fuKU)Fm%AZ`q5NEqAe z-{&GO95WV~3QZmB@~3$-F5D#Xp|p%xJK-mZM#|mSaG;U5v{UiCaVJN2mniI9qK27G zdV=_OrbT}l>iEyJ=zpd~|1&N6pJ~zm$EHQUiPwLoMSq_b8z zSkOCL+nL*&7#cd*(`(xqTG-Qn+xT{pRl2X+WiexBJq$2!j{VyGrrMmQdoc2(==CS} z7hk+FrVddh!WLKm(3s^+K~qpQVs$H!#hY_@WS;q+94#`0rmh(%U{gMrMaAU>ZzVo| zH3yWHm_OS1VAZ~o^AdV``g=k=eIrH6UbI&+_a-oRh{gk4hWU%1hqmT!qWCt zQZ{60v|N0u8zPO>;mm%WupTh0IH9ps?iF@HY>MW++y2ADCXT6>bd&&lwm^P@&9@Qc zZ`kz4$%^+dAPlU5bqEc)$CYCf)mYI}!Jr5fRg#KVTB@Y>Zn8C4AI~yw_Y0%wMkQ@N zzFS*)dKKqr9*gQw?@^%DRO0c{`Utz;ypvIaQGrEjLwASf4+Y-tb*rC4jveb-4 zg)HSTVJZrX{3z1ardMy^<)l4#pF`&n+i5FtABrf;EjXL|>lEm&vuHah@p3wgOSEu0 z&igi>bJDyeSRwYdPggq2V4qK21G<_3ld$&<)`L)YKn5IIoLHE=SS~U&;Z1<`NFD>f z7^|=KEXIl9C}Q{>M>BUMGw+9n^Arx1@l!x}^6cpe!hAhw0h-$Vw|&$O zsn@b52M9z;UD2U|CQZsz@vs+Mui2m371*_Z=vCK#E09!PAwX*D@%CODHb#;qZ#lnv z$A~U>*6HK;lX**N4Jl94SuI39GHv|nZJ2j|?BWOhlISxVG`KKfrW%p=vr|>HjDOSnuZP+eIy@!wu)ur)`s1|cNO(@&?)2~z$J{2 z)e7P0VaCOd%9~AqADF^fK)`-L?h9j1Kivp?;BTwdmL#LO`{D2Y^;8r1{VkYTz7F~R z3iFs00A~N=*R4XE+)myIg2htw0GR1y#$!U*LlO*d7sO51G9|^qNzb3-0EBx|Y&4Mu0(9N~l#b*wIFSLGNznco9 zzBuHTyFgD?$T;`n1%a{{K9^7{Y{H)8R5H|i3#NCnpBe*Si=>Jg)jv$4RBx*F89Nzf4E@M(S&3n_-1y0l3kI@9Jz=%S9h_kYi zxQCTnEj=9sMLYgw3-^;;7G5WTBt!!ip4AgBhm&zvmK!GMOw!=Oazgh)l=yVp(lRM* z6E=LXJe)OTY`5H@#@3u0L9ll|M#ddjk}9`gKVq|K3BCvU-(fTBk6`xO;&xK=0mS~XMeza^cCg{7EvpQ>vF={ zn8HCzDj_TS0u>9lqx+5qdU55>HhNwzey(Z3B^}-yV;WOIuz0b8OJm!PCb@8$FSeBk{i*6OJ>SswQh zOun)Ocoy}$FDBhJ^gmP$pxL#dzeQ)Z?{NNJHCkwM1&c+|`9ndW<2jj5Nvqe>iSH;5 zJ9eh!1WJh#tr{l0u5Q}YY1lKw=MZwgB!`xy2ZL5!^y_7W{z_+4mPJ>8g|BpW0?-+# zCP42Z`_*h`+jdxth; zwq(EaRPU*{fOO<}GX*pqf;J5LFvO=Xiffjw@Y7LW@N8f5B!Ak{A}OLmok@ z282GQmR6P4H#5Cadir8Dy)0YG0Zy9AVAaQkIMV z%WT&8<^zp(@<02%^gV$RhCvlk5Mgz!>qz+TX0u@GcI{Vjehc6_$-j(_8U8{wxG9l< z-{(UUyz~Vd#(4)&jqm9EhiatJvw5PIcR}nkpoWCM1XbbEua@e&JNvw+B<#b2wZi#T zHG*F{xiWeONFnnquU5(fGR3s4P- zZ>o{iYiD>$KfIY958HCg9d-!6hVk-adIa?qVI?h>U4=RAq1^V=8-BAVrN~euOfpMfsTCCt zOp&0Hk;pD0i1aY^OGy`EN(8yZ>@;#c8u2@JkD93qmK_4ujOoUXdDjXpHG_Z1xxt9J z?uL5tQY8EoHKk7nsV)T94pJ<`e8*n4*jXf-UR(8kC!y^+Vs+}oe$9ecw1$mkK*c!{ zIOrq9fc85v%w&~`kcnw0uO`p#@oP)T0pNU^+4J3wl8G*{GI)1L|M0Ag&v9~ZrGE%0 z)M;||-vjiI*|eIzBp_NkGDL-PoBE=m$80GyBl^1E@M?|Q5pl(^ zD{SBs>Ds8fb^YoD-I9RBoIWX3N*GMldH=dhNaNwgSjHgIH+Lt7LlENV=~?dvaCdwK zQ-Se}VDhA|a&0OV_Fg4B#_~R^nZDyNUuT4)+O?VnDQ23mR=giW_yWmnEzac zD)>$~&$0Np~x(? zt&D#wuh0>>pnATUH1xJH7ngszw(9#FDwkcF z5c0+g-#}AyKtO}S=C~|S!-!xIOZlo{)`rKzSR4UoYL0_L9LuU#CShjCO&X-%p;z-* zl_b;c!IJ!SLfEpBk@Hs_Q# z>`SbY)+QUHt|Zue!W1*Q!H~f@1;qlx1OE7iba6Dr6L@`3JfYTjH%~lgML}-vPOd*} zVZ6a!_L&Ai5e0B5_<&15RZHJQ*Y5X<*0+uCJV|ty7(n;}0WctJTg@B@2(zvD5gaIf zfO!774mQo8TLR(kvJkyGOeBiA=gE`Lp2xGGm^X%0ra`9}55-7Yf3P+nYGl6$nWBU) zicanoG&NLu;9biG_A6!e*NHO1tYT=;;^kYa=BLnPVK>5=pTY45E6Sp)&3YMN0i4!ys)C1v4!%7k+o=Zg<>QfMZQCdGA04&xMqSq#o4wA^V ztx8^po-TT2c~^FB4GzOy$Q1#8W_loy@j+qC`QW34(WerYM89oQJEuISmCVvZ-*>Im zJNK$e(g}gCg?gP<+@I6b5H8k^j`)_H)fUw5RlXcW^soay^*<*mIWB`&KgHua8ISJz zTJQ`!_G<$aJY}c50R>NZKY>ckt7rYPNX!5{FvpfA-h~$&H2s3dsROi@r!3lltk8$L zYzj3zjdT_djWwre$qjVvq@f29wa?{Yc^C`YDfyjMs zn4A#ud&%Sy{a&ddHfs&i$==M_YYelllhG4Bc<&uWXFUes#*NVM#PvA|qVj5~>-QTJ z>S~9!5?<_F)wq+ zCWp$tiS4uy<}cD={+o2f zVva0*_)a=L-{bn-3Zh95@dKoTffLIk6~<$E^%5W*^DOiHkgzlqHb>6VMb0n{0O{Zj zD~%@s-6{RhrB1Wgva`1#NVM?I->K2z6a>N<_Bv`3gl)ANPL3sGulKLh#GJjtjt z#j2$)DI-w8`hT`k>SxPz^*A1Tjvu*puHdnNAaWf4rUog%vA;2IKf#5sgZEGvG0v$x zv$a-C!vbYs`}EU^5qZX$h%H8v$|e;Z6=QxKk2vO-jxnxyhf30#1Meu;*3#t2%QW~E z^$cBBc-;GyA7=!=a3)%EJ~bmdn1hiG^l|q5u{@I7;;QIgO$rHLbiM zAD}IU%ajK11aIz7Ac6BZxrftf^|d}2h(*f|Wh!(Jf{K!bzcwLa z)z?stXHQ1w{ovI{n}WnkxcQ7(4J4nnvuWmyaEGKIfzZEHsFyMKzCXp|yE5rN;<4~=8Y%z(tdZ{gT_eRI?L7I4M-0GF2>_Qr&W*mS zXZ|TUDq8RhwrGYBn15RVk1%bD%{Ju!Ejc1TT%Xz)J@H(HwES9EP=fs@Ugb$6~ zm?~Q69N7Bu2{K+G?G-jhj86inRL{f8X@qY%ElJo!E*5EsPr=-MIE7V?Nx_Chi6$A{ z)ErwPX04+#H!|MsBjU&}*+3Nduq)4-=CXGw*7H*>-QDtsI3k;-XI6P;ZAH227A5pJ zO|2hTW%3r7GeD6CjkO z(gu+6w8<3^0Shx2XBM9}&TdaY#Y#u=!s4g8LxGDvJsw>LPR*jllA_2)#P)T|nY+%Z z-TMxSih$X-Nc0ZPvYnNMoh!5F@m@USZLh&#I7v9-(Vlz*qyPX2r3+b3iBo25p(&JC z9#(CUSqDc2Jxa*hK34S<&V&l>wIzAKLpkAY$JRFACcD;x*}ch^qnN))kL*W$zx*7K z@AJi9^P>QOU-Pt0@4+aLf)5a^RR0#_$OG~tNx=F@dO%?& zBMA+hSJ2fb+VkPd{VkktScu;#ifoTB%)q0e^EJP<80_f*QkYmsnJyjaVSxd+KEtw1 zw+cP+mW~+iIZ0eKJ9Ey#;-w@2kX?2OTTGVCkx8E>x{}na`K|>C34K!N>+FaKlW=n#@PpdtK0IO zum97;K?|*%!2daDF_;EpAfN!zNeb%ZA%>geha z!=$Dn`;ldN6-S$~c~P*&pQu!}Mn%PpR$47gn}PFlv5r+z2*@SacE zH|!vRxbhhy&>nd3*>vWn+tf}_nW8488AbV~slIE){RbtB^pX3_FV`EJUs{jrm zKEU8qRUwb0y6SyuRvlWSjPPd;d9$Pt@!rLLrv30E5aa2fcD}u{G@zcy61uzqJ52cO z3UjM%Y^RHgsuKn70oijF*GAch1^IgQs`til8moI)#Zy(I3baj@OyO!yYZSCqKc=p%tgVJR2Zy&XOE$v02UGw1aYtBuN zDj%Tm#7!!2ON-91fOUKWL>uRWacHl#W5m2kq;VPAeFO6O`pDML z0r^w-`zMA7p#6Icc`im(hzLSnQx)6k+SI|_Cf5njkZYP!*;s&vi~%%6og&vEow9zU zmsHPEKgIFYl@If0p|$^hhBTT?L;W0(@8jP;Gvv*)e~%$={)ZXzyUF~FAQb2**hHk8r`h=Dk~4Kb71YJqM^J5gm|?+mFIUe&=iyyA{RJAonpWPp8%Pu z0G>GP(0XjKScskuC25-(ty9jMX`5UXdgPrq1(B|EkcVK#)1FJb`}f~8L@Lk61It}z zhgI<$mzyI^)rBfzE88_SeWh5lvB<1NhtB$d(>rlizCbs#1fyz6>b9W-R^W;D_)*Af zC!rh=-5ld_di*e9#J4{b_&FedihqA8hyTA7?j0%o-Ehy?wBhZ~0r@`u{Y!>yeCwCQ ze(RTf&k#KsWrNdr7zascsJOhZ8PbQFhtFW~D1Z!!BF6_6EV5^#=M#t@EuddA0qBZ1X!>q-})sQfPTp?EyGL+W#&?Y%(f^e-6kW&4GW2LzEC=B(Df)e(R@^ zr7$3)0s3j*Wk`_#w@Y2)A_m@E2o^9jwL4?+al_8d1)Q|6a0*<^MCYZ?2EgsQ^yqvw znc8A8(XS@cVM1%wWm1q6UZO>|ZX%BBI=9d)&7F6&bSe6qmjS=#xda4e6<^(Mq^Jp1 zL|(CJYU|QBlWA0;t3(N28^NlYrkPNI0l3`rq)mI;a?)$0LBk;>tU)cS)p*E*( zoBwk_{xVWUL;T+cNLQta_|F0P%SaUs<$oI>@9LRzeh$d@k?Nl{ghPyTC$PJ#{O}8q ze;!F8pf+sJ0_eH^p&>k;V3k(!eYPxo?XGZf%< zACFD~yArWt`;281VtYH~#Jr}|mVKwhgfQjXBzmV3xz5UJ9kafP>hDz+*b~ydsum?eN78qqT2#meXv(+V!QVtEH(>FpwYhJe!2YOMUdQ|Nt<(4~>u^lpN2=d5 zB;J5R74is#zKjH}KvB0E^R#Qa56o5Qir3;}?A-<1bBW8rN1>SbQGBx`6b*n82x zEdo?o3Uj*vU3qv(%jehM0Ll0U$nsx_YToenx5D+m2IMazRVV&8{~kj?|A!f}^LH77 zoktV)wYT=Q^z<)#Yk$bdKlIk9f?v;Q3GXUifFwFBfiIC5S4W|rf zle2i08dCtjSu=CTp+BsTqrh+9=kvaqqJhebDf>|`v|$EesRGp2D=_^&>+o(Bi_676 zZKwbraX1`99$XKH!ZiAE0SG7=T)#aDEHq^L?Lg2*z*qCA{Wi1``0kr|6#il!ufCZF z&tx7hH9hWX?@w($; zdaysV!LnB4{Xb|!#t*eYK@kNTl@D{$gR!rA1|jPEzI)x(kNWPa%1qwSr6se?nxlU_lfYj0$RO) z^kl?uQ~2W6pmr3M5WOK=->SYck+^m_!2Us`BjD7eErss5=zO+j5&ucCe4qpW{=wS1 z^f;|lOKOL_b}NgRrbgp93ZFf#sBOr=^Gb!b(7f8MH1T?(r&LRV{aVF4tt@NL%&o(# ztaMC|cS;T|uFs(X=U8c8efeRSD9rR>=X}Q6`EB2f1M?kM+60@o zw~lbS9n{>nkDe8%2l2aP_vTPV)-;MJ7&bN>#^V>WM{jah2fR)vh7e8vaCL+T{I7 zP$homs8bt$yH?rJ+ndxbK3)8XeVZrlAmbIhtc)nBH0`L0QY<=}SLlil1g}2G8`bX* zxP7@lJpJs-e0w_ZWyC3s?{L?qIYmn`ttn=E#w3h0P>{4vwepeEH%08;Re5Y@Z0*T} zw-raF@?}-2Mb?SY8L%W<6`Th~S64_jXXn!M$!@U^v`UFM*TrB#I>-!nd(Ma!5z$7k z$FE3=5a+H9Or^p%aGMa?Crbv)M)9BH)QNThE}eCb(@_^4)GyL8$hoVg`?(2GTRwh< zn<*ppJ+ABYogkVAC#@4N>uP&Qe@~q*W!Xjb$=Bo6u5fUTL0^-FrEWl-)N~#PBky6S z6jpK%;X?m_-N(1ZS|BN&&icF}5vwn-)HP-yEU7_h!&z=bPricIS83^pK-hSh zGozvl>n*%X5HBAx3~M~g=_mJtY_!@$60juTGRv2CIvv7o5%`QAGR(+)V z&A{`M1{q&zOLD~A{8=@Vk=t%lP*YWvPl826Xr8Z3{M(}2!(h=oS1H%hSUBc!HKVx- zD1hI_kIW_Uq>DOP8-KVHHlZ%4(rWNJn&%)3P?v3Dz!>zu>`t@VsxkP`PACLQ87{F0 zzW=c?Uq7r*Gnb7wQ4|T%4<$qF8uX&z3l`<@hKN1#lVy%lH9vm=8BE(@)k|V$ z6pwN%Lczo#x7KSBNG`;7@}+j-I@xZnxx7_m(E3r)IhO%Y0YR|`-=q_vpv0ctD8zC_ zA2I>h_tVIN?icu}-cP2zD$pQBD?r*AkcT{GFwmS)(f#P}n%)BL4PLI zj^T!R%+&H0|4F4{UrV7?^L@Chs524z z_sKpvgc>ognF_1#GZc}$^L?3x*nHBRAo}qnfIT;@YREVCrM9rc8)t)l16W(paRU;r z6k#h~fAtR_B61|SsQb#w<@nDl7aFylpO2cS)2~DqW>cgU>L^t*KljT4HXNj^jt3@8Q=@+%I@uK6O}fYX9LL4slazf; z;XF=8uc{(<{HfRWFS$%l%<)AWX4FZTVHzfpySVFtvP>8iVS`iAE%qI1gOmUIdQQ+=xqvH~om%A!zHsC3}VG*5%Xt|Ou zZ*D&3-lN25VScqds?Lm45BBMCf zx#NSN)>~O)dmtJigKU4DjPtk&6hPSKQyN(RsvjQ z5UWxrVG<1?BF=T+BRPdH@PC24>d%T0BLNOSY{oQfvq#)8gi6{!W1wG_8*FJukjFZ@ z#%lpf)*#AiG4U1#8KDAa(y2Z`>i`)ih~Wr+iQEF{_1Tsyx#ne61*W{X!vS z2z|IM6gYKdZT`scW#Z(6ZSzRd^G!TDmn&}xaDw5$$e7^Qf=H(xQ>1v1D`qL@mGL=6??3ku84Obx&1c+qM@!wV%srd!-cCmqy*6evj zFXVtKae0BrbBB^BChsw6W6&rZ2{|ve0!ZydyC!_G^@t}z4*5mcJnkhnk#ax4N(Twk znIeGK;F$IE#*ly&Q>0|8W+1%tXA1~p)k{QZ=9tDL5)Co*?XTN{{FKPPXFd|F>O0mZ z9fCFY5Mprl5zW%AoN&oP3fP_mc|OX)Mo__Sk)RuLw+#N{3?8Bs?BN1(OEvFDDq*}A zJh(XG%()PjH~i}ak&s0^WCd*3t&_GAp&c*Ia!7M*bVfx*c#x$0gr^2TkJ)qtl#J22 z+S&Ncdt@RT6v>+LBZC9%i!HMe9bS^x- zfejJr<|O1RK(qwq*AlzV!zMa@BL)B9Y8U>RWD-6bH8!Xo4Nz(4sf z5vGlusTCl`ls9y6_;yImKWsD7>6u#T>14w&)G*Yp0AYQ)@9@cp@rn6_s>~|_6}@d7 ztfOb30~|Zbp9wESB?O63$b=$mg8!PE_&p557-(+|f4?^YKRed{M{kGezw_-F-P|1g z*U(}98*fMN8#=1^feL|01QLc6xs$xnPOpkF2~r`04Sm9&_}^Hu{1BbfDg>Q9z%>7M z=d))BfO~wMXw$XO7c$W`wfZ`I>!G<6iNhZ2sjc%UuzQ6`$z0G*LjzxG&pe`-65zV< z@JwJX5_Ygg7c*?7V{OAU7IDZvG7}58l>{@~9aMGg;lbPc=6=tc_u=u&j&?_D2lnIT z_R(w+J-+wD=??vqhxe8C)4Rtnr$@6N_;@{TPJ00N@^JIM^3G~seR#O*xL+p>~*^lwf6Y%Wxo>N`}SfasM7oHOQW>*(hPdEZ&FHk)014Y3x^(-d&*1Yu;Cp4=HT00)C^x}`i}K1K^zS*T(qEn-GBUbmv)a{S(NTB z`3c}=8r4-<#i^eG`H>09dB_0So&OU=j5fI44Ga?zb({kN(@TN({J01_M48C4mN$ijS%N z+u{-hxRy+Y!NSKN$zbB8?eNxN~X&ZNf#OIP+y=4sz=%he#@nyIhA`HLA) zzis!gFX9|d4?cGM^|{s<0Z#)*+>~MslNxY3lC7HD4!H3Q50j3 zKQ%|dr=GE6tOo~J_`_!{fdl=Dnms3lr&eoB{?M-X_YBR)@B(xL3z-31M-5j8zx`P> zUhy;?zF7MDH|7&VW{SZQMYDy;0$n70P0~$@h!+Mka|BkBasebIb*jdhv*azCVI zsJG%%2_+TLaA0w}p+gqF?}s}^LLcs26szQ=D|>&IX3^#LEd0^-td(|wa-+3u(b{9< z7HgTeLLL%m@5KZkdfTKTPo?2*$jK3;tZa)WFGYm2x`RJsb+heb!*EIKiaHZ_3+!Tt z4|0=gS7OJus+uv9+lwKFcuDMLwUb^@)pLMuUVoHrin17CG*JSAVDdCVwIY}?R_X#r ziYGsEgc6-Q{?Wn(SrC1>B8V(oCK%IO8C0qg(r7zIEA3}S|kba_RNfoaBXp) zoB;x&Y<0n4mGT|qxZpES)e47x5F%R%HBjJVB*~hrp>^e69TPl6*=*kK4~4u3iiieA zM#O{g`j8SnlvGKXU>2xV`29#HoZ*J^RJY}{jw}twbifU8v#O&H7!3`Ro=3`krk!(W zx#JIeu0>v~Y;nzuyFBp>Cf%J>Iy*kqtx)r>6os?8ynz|ig>Vm{qeDfq)y})hRY^LH zIiP{r7u2ZBfkZKnETC{?&}v%#rDm=%@ugbYeza`JvZgv}X5x1fvE2x@!Sci!JQyO&UQ+Zm%7j z5v$ZCUl)0`ys^YGx^%YGKvD2Utg=tQgrK8hBDPOM@DvnGoEOau7ig?*Doi0B(34{V ziz0O;Q`_ST2^)y*l-I{WkMi4CuU2t4V;ktxXGD3AAyN)Z-_i%IhGQHB_sbP@^;Vs5 zz@3_aXdG#@7}B~%$WDNh^X^Y2+9J-By}UfN}r_2;biQN zQ3y#2MU>J(X_Uu#jogKb*&@_$1r{vSe-vGvB#d4;K&W5oAKS}bwn%7T z0f+e^R)O<1kDQP>t&3Wc*v`_XgiV5TMzO`8N$%RtNSAgs;7`r`k>o4G~%Z|8RFx9{kCf$Tb~)_PddVo z!4Ze*?o@LbBI>Dv9q=SBOOlxZaYq$|UQGfVgmP74$4HfDX*#miB$>|chYKBMH&BZ# zvnt|=fvUH=&KadW@fu7(?r8d1Gis4gIJALYbPa|DG7DmayW*dbbNA$KK{wS5#L~O! zC*}Dc1+cF0C6VeUZ=g3z`i!@eOp&s~U1ywqie|=uXL1CF&5-g(B9!vXLm4HoV8cj-_@rVC@ggbh&!If31^V z+q3@w!KSeX3pJ|;Vr_Nx9Kvm)_m=QD{V?7Mi)8>q)!)1WYMuyp^7)%O>-;MIfKS-M zL2r}cBVnhwbmqoSyrS*z7}M%0)0d(c;qK-R2QbX#Io{$Pc`TXg#4*-9idr_2#nP+< z2w;mmW83kys?NW*!5|yEM=(RAitLMHa8Ror_w_e}SU(VNSq~ zqRA?h!^}}je_)%vsJ@O2QRgC~7CmmAZ{!wXN%z zwr07jOZ&*SEgeKW+#A(VJBD4-_^i4!o|lqE3$)d7~9R-#UN?OoKKaq1Bk95Qn>)yZi#BNc)Jc6ipmm zn%CM8S-C(iu7f!di+y&TXR~@4OHscpYS2YseCRd;jvhlFnhi%Qy1RyxCJ11TpO5U& z6AKFZpp9xnX!@aX2>p`GsVj%4OA>Vp&PYGSs2rLnRL-_3eNh|RM;RU4(=5`mQrdSb zpq3y9cis1A@*M}Vn+q7J;T-90x( zfQ{b>dIl$qo5sRFU`!{c{~dBxEb|+jXIojApw1NSwl>8G?uN`--KCjgoFOQzFvb!W z;$Q-wP{6tZ;tF&Qo^{D0Ny+y1#4^uqfa~%>(J-29l#y!?l%lA)Y?;uo3wqV$x(1b~bt!&OG?B^hE<;4&Rg-r;cGgYCdEMjLOsm=y20}Gy+lW@Ej=3E$+tK<2hlc*^T#D5zCL_`|rLS z6fjzvcP_qhNXSXD8@2xICKPK&q=3hHe2vsYuoVwVG#&I@4lS*!SmjJ$7?N6CVgRd? zR8Sesn>wrmAS45=x)?Q^uSwNlJ%PbnA})bW2ap6h8(ymhToY^S=n_IAcJ1W#22QRE z`HT3{kW8G5B|9mDTZW4#Hd^%5PzdHyc=Um2ccLTZ^16{r((~kPT*)wBlPu8k zaWR5THlwK&9;8%EBu7I30(FqziAC|={9*`z24$FTUZIV~m)f`glfae(0hPwGZu zBWY5=R;Nrd*(|BP5150wGq2hbaveKomffYOuQ`UiM~zlmk;BPEfc*q0kv4 zK}axzK$;1&$vTO6?qBN=7_O%435C?ccjn6So^@r9`+TTlm9-34(RsG1-D4sF^-BTQ zEK;3oad%H6pk~`D;PS5Fwn{^_tGdv>r7)A)&y3N%^Tg55I6uT9cX8Fbyu*^<^7zP- zPA|!Y;`OE)$M7>4)?2PjL^TOKeA;c2Nj>wF-bQl6;KkbXds_WWh^t`D(D8A5x)t9d3;wo(G$F~wH5wha&2GQwNEOLm={Zffw3_T4aQoYDfuJnRdK z+39NYQfJ-SUg+g%LbBc#2os-+!m)0SI4U8lYf2?xoUqyv2&0ZbIJa=Zt4nr8=p*hy zVkqhow(h;h;R{<022WjWjL~O#!%2r}2`4q`$GshCe#mpurvNuN>bDwc4$;tX3LL)l zLFbqz#P!4_v|eMGr)h_br6LWYt$fv!KpwZUl_ELrmC(&0r0jP0`Euad{ky%xeA$PB zbP`7EFPeIcQJzw7+Y>AL=k2eoKUJ;OE-B{p9l+6EUL5KxePIt%0eP-r>7;1F_*O7% zBj%R-_1==yhi}$L8<2DPfFIvL8{6+MEW5F_;8m=xpbieIpFvK+_HD$XBs zTgaN+rjN&^ZHTct1L7Tbq}_t+cyG8C$+bnohNq@?dncYRlU+qVYjq-DB)h9w+tjk{ zv`CcEI7RB`xU&j<@oIGPg*x=KO?COuErYzL7$%8baD2{cXEAvlN0x)#9Ld}(UN{H( zK58I)#<1HFlKeD={zR3f0Vyz}uHf;Vn&du=0YbFeCi%~0h+(SX)xa@~(0-vaooKbY zmii8GwHOP)DqNd^khJ|3dRAd7Y;kR*vJKua|)*{5RDyjzNQZe!W zw09<8HGTiTKPf6{RHhIkg6&@{nC%&OTl^l13&pq!Y|o2v-n;T*9VqU2NciQZAkGZ7 zLUsCrY7VJ4s><6~bA`X?6w7VxerD}iOT{Q2yKsih4}V$5t@2D@A@-PR{| zoEQ`Ju|0XqG)&aT;pEp>_IK{BvR26Ee2~B2;oX%og`k&t@($WYuVgEp2r|j33Hg+~ zB6gnU$p28oGSqr++>x_tGLpY)Mp-qu{~+tKk}2tSyMPx0sg1fuE=4k>LPHLXyk7`< zU35kFJxRH%9Zh&0Mos$`mKuD2Ui*0S%E%SrZ~c^q{fyfyMo!O&@G2`bsc`xRU5+zP z>0cUiQ!}*uaMyjMktqg&#pe!r_y!3w8U1>ERyrYPZ7xIV2J<3;MG`57YizG1UH^8x zjWsDpQS9Jp?Y4@CPE7)QN9|skDeV}Ju2A{nX4V+$m0?fXxLkL+6GzRMTzxwS+qj%y zO4E~~doi+YWyg^}i|_09d!@a-_V$jEj|Jz*wxK4a(UIjh#{)C68@u+`$+o|eEf^0R zQD!jvNS~8kKqOtFOLP0e-9G7D9QWEXt+=__$aroMG_ zpJuS^UcdJ8W+jH$lUv&A{gikoiP1c(q=utqr9xHiluOsQUl;9Sj$!9@EV*BAb0R2R z=}1HSiHM@+DGb0#p{zwjP zN446Ax(~i5D)J3_64cKr^R1Dd?h4rNcktAuW-I;&9*cg6`wzv&Sp57QaIq_N=eE6j zCaXP*W0fyABzanjwk9j2ddCHf?2yjR_STswbrst2CQP5F;d#9Dz=n(TuUmzU;%`M{ zPl)iGcqBSyB{qD)h_xx{bAM;MVMX934{53D&jp#+!s;B-R+Q)SOWV38ueV$oasFm( zw1I-onw`rg^Io(@m2BiZ&!xmDsubrCzD_G^eDcsX+g}&V0$$&Obfd#B~?bUFyHkI`b(gP4$xrJ+8ZU{+tueMUsjm&FS-TTicmK$djij+(n-&6mjb|y!Rl-d+K zx}fE%JY!4~t7683&FWf9bRPW>vp>|CPF%-0l9S{f*0oGQY)uRIxTZwHOH1Pzflmh3 z{!GU;D@hmEl-e4rU2TbV=ezf|Z_{@5F};1dq*DLSB4%Z&ZMz?FWp*z;kYupx>biWn zQf{xqUkm87EiyRF^`d^X|3g0}rYRV4;S1Bn;ep}+S)t4B`$$&J@mzjewZih2?bzE^ zdUB6>K12QeDXFV7dx;KWQ>|@DHg6tENnR((EpD^lvd>sX^uyA4Hx|WEO;J5F>x9c$ z`VrOwX2wZ!wRtj$CaGePOU|%wJx_QhnQpRPw(5zrd$(j7r}2Pjk3m7GbuM9TeH8zU zZcqI_{yf`qwyplTxBEI|LeqJ|$_>@TlvCQC`X&cmEyz5{V`|0`w9q6*!Ef(0vkh-` z&<@AawK|0^0l%JoHLR7>+*`8j(9vb2m<21e2cs?roRlK;Vl^xGcL|Fv zY)TOYfh{3JPu3D81s5bXFmG+jeYcV$CnjOTvDl8MJ65($g(cN2sqwAOu=Nl0CB2YK zNhw>(RmQlci)Ub|w{~B4j|@wA{DFXwPut%S5*I(pv}T$p($!aKY(F>UvDv!3`)<(Z zx{=2@T2@JoPxpz9B`IqT>JL2F?Z3R5$Sh%9wOEOz;_RLES4=vUjH_jhveZ=;L^LxB&9!FZllusl(GlY)iWSjIVS=YI6 zMC|+|ZfH4l=9_uUL}_0w+Y;4R<4x?vJi=GrP4exJveNc<{L|^*soWKOH%`*|#?Y_f zmo6l=52{7yJyuV7wC9~%m|j|w;?*zSzIAXy@VhssPh~`#bWNmHLSyWeKDyp0!oCRU+QqY10-F3>H5r zO!8%l-tj@^hZ39an-$uk9}-P$uGpq|J{LF8&*di6>RoAA?4_M>F+8L>Ke%h%(Gs`G{2IsXWkdy1b`5PbNSN%=4>^63{adm>Z^hkHk`ap;a$n}( z(hBW77|U=eGIDpj%aYEJ^gXrOPn1i~js{+MpC?Ek4xW~eFWsm?A-G}D(XChHTqQHYD?^pKzEyK89+?C+%os;vO^am?+3FWX zWeRX;>X_D@)Kw2{G4byIZfGApwIx_lK(CfXK+vN{SNvwfdc!SIEz2gf!Y{EMw*JV+ zND`=xemfOlYBjj-D|4Ztn?7B}r7F`*n;tP^fxv9ZghGoDgr5yr9x zFRCATHQQuwO%*l!vBys!un(|MF(v?Tb?R=5|IFE>E{@ zC#+c!Avf(Wa{UB%4ZlzHk`0T+$|t|X^KUtoaq7qD_}P&^;E+e!mE0N=fM(S z{rpPs)y~CM-`?HY&f48tTF=_X-bu&#pv&w7;^g=zU`Hdj)+yqf$_=lLxa#&bRV4U_ zNLs#HusO(s7nFUjY$>$#R9f=1=c)Uz#TPaodnQF(==Ykg=1`Ky*tUnF1+ffgbhPvQ zqONuG%3FMVJXIHOB|YFGwNxS@^^uu)d&IEN#}20z?#{vTJ`Ijn+AcqMQK1^_I}jGf zc;fz^YPF=YbcLVwat=J3>KHX?>JS zWR3lwHIugX+nB_{+efDB{KtH!_n1Bw4LOuyS>z-Lingd69rS*<##HIk!kvP;dG`I1 zlI?7Q=4Y;o9`9%AU+{+Uqd-<*YC=+M=vS8SaRV$(m%PKx>X~IF16`zEg32vNhOcz~ zvvD++L*9yBcZ1Q_y)qM*1bKx^SBd;m+GGDz~EfQh*IO4>&b1sX%^%t5> zofk3-FnHmtk>=qNBcT60txbthRe(3Dt7P-n7v`I z%WS51enJl~J^c0X5lOH`#0o40=Rha1^|53BKZhypHeOD`iqZL<=g(G-K6~H!@aDG zf!Z3bt65TZ-P8Z}~1GtukiEA)!9-R&wmErNbp9j!{Lv*$DR z(;O735Gnlf-TJ2R@zue?M@%M8@zi+q&#*7t^7~8Xp4;}fYKK&|IP<=3W#gPs|5j#7 zV)HlV+fjRAbo!rgUGE|X;zake*x{jj54w3)CZyX3baoXCKV&Trb}4gi;@1BVEZgeounjBCQO6(I&FH)>_?8@;>mrdFF zQ+KS&=s?oZ<5D#}jccctbtewIymo3+Ob-i47usQB>XV*25qxLjNbFSreiM%TdiH-P^E zR&t@Ya{TKYYxKWS_WayS(x0sd08VlKBQUtRLt&YbquJ=`pglKRkEA-U>$uXv?)j?a zr;qm0i=jb`4_`lRda5?QW$@6g=a#p&^Cxc<;k8rQ+00}v@#4(jQP)hXB4I6=jqYuv z$KFevHS!u(V7#6f!O8BzP*2i`+!%^dFg%2~Oc^tadvhRj~@n`d4jr=U4Ro}GA zfvfFG$Q3=5(neQaN#&HyxeZQMv^+HfOtRo@2!RP^t{cXmIp+v}M9am(5HvEfc?fb8nXMnN#fc7ZW=c0NFPNrCw{ zx#A~MFbaF2hNAtE00sh~3S7lbxeB}sWJZSS*}|bVE-pu4Kb7YqX~Gr!R8Da5>>78< zONtG-nEZp$2&fTOAEkvEfP5A3X6(p4ZOYB83w zk{e1v4*S7U7=qIWt$`gKG?eeaG>9$82{~AbaVW2jC!mM|&rmChT?3^c$I4(S>FlES zT%i;dP`C_=cu#`yQkndiAvH~0fO|2+h8cIQS ztYazp?pNL)29zZINHluxQq3Ua;#mU8THrOX~cc?BXpdnbwV z3ekX4z=#HEdXg4tdwy0D2o?eK7!1dDI0l*BVyzK*-)vH_g+SmFqr+gfG0?%HBnwU7 zy95@`L2%@}k*lh_0-{!U0UQ+xNLURY_+S^Mbe?ooyw5Quo3BK1?KfpcW2PqxvWe)JO9@~;D0C}s_2 zArwf6kd%S;1@Q`yvjFoPbc8wY68~5E#)mf?!cAWWNMe2yGH(MYU^3)PUn# z=`f*_%R$>jsUL*(w3_GV;YcvDtXPK)zDX~dfTR!9%Y58dul)k9bq1px55>jc9ot7V z!Pdpe#qBs0;nex;&Kod9GdN<6*fKzcn5~Hfbp*Z)y(u&SkpU&(%V7M4CXi)75x%+J zd!8{92)Dshhq@stf&1N{gPO}XboD!Nq zb_NuYcl)AOI&j1ZJ8Vp!C{rP3WkBaB_%hCJM-vbkPy)UTNNb(F8;Wlz=befCZXBmH|cVKhR(@TQ2VRY>!3N*Th>Q2yALX zV``VWwQnOB?6g|3&iHYF%8*%SK<9e+&an1D6A))W3HZ+73qTXd&VVAEpRYK(nu$QD zFvmJWFA(YsXl86UD8QizIxPb*Nu}CK`e0fFsaCmQh(@T!u!Fg}@mn96!BK1xlQ0lmy?VV$23a z7CJ4!S2FqljUX$5BJh>u*P#(}N}vq2=-X=IK#4YulDG9#3`7ZZW`M6mwHb{dD}f^L zm3)7RM$9RJGCG)7+|&k2bZL}`wox$yt+fgQX8Oq69iP<167Vl9bAkd;6Y_)1pGq7idSpbV)-W~bR+*pNoaE;%X&q69j) z;w#BhLnFvapa^^=4jO30oDwMG$Ixdvb!a zS5j|)Mv#@j8P{i&YxIB;6PlPz8d5P3CGf!&b^n1{iKiJFK~@4q;46_kfJV$IfifiC zCHdF^CE+wmY)MoML$S4P8NL?FRE(ersr&03J zg^GbFfexL?6DXrKY$kPf2Dyhui3%uth4KcX1Uk6lE6IsK zBgjgi2z(_rv1r7c5-6kRoz>)4pu~no$>}&M2BHKyxZ*2eN<|~cN}vdQCAAr7#GDc+ z!|upurP=JFRWfm&ih(GB4zBo0%&wvlWF=4pzLI76XvCZnC}U<-VXPKV;!P71?E)$W zq69j);wz~tLnFvapa^^=-gnW6IVDiWDPP9rJAe{f8YOx6s2GS6=-`U4WL-TPK~@4q z;4A5EKqKaqKpA2=t%@o@i8+murBA6Ch!W`Fim&8UD;hyo0!82}QR_q_=9EAghkF}G z6`(h&oL(ag?2RgDj$S)U^fm(zbbgLw?*_EJrLtt!ALO&e=RZ@hok1fIf4~v={@^Dt zp!;EDRDv@ee7Q}h2>kJj=Ej{N9m2r+1Dfe8hpUH@I0*z9Kde9Q(NkH1_yao7^=Zzy0uSt6WtRFn3qXw28`wI{W78EG|t8m%u^WN(qYSch|x4!@pwNPNq z&9%XBV9D{n0Fen76zG`C?<)$mkk{icKxEzn1=6*?dknWQ*z+$yq-%!)vvhB%z=5An z`~`^g1yJByI=@^v(8%jAK%_B2fm_o*6wmrF4@6m*Di6!O{{kf65{42<_W9-PfVi55 zDB$}yBGQdQiHpZlv*1KGzrPXjb6e6c`Wl>AaTs;`vkydw|1+YJ`-i!|;)@R8eG}LMfbk$WV3iH(bu0)0(yo@kj|pdk zx4`UT30l~xU>q2}WfxEWGYN;qhv2~QBhkWgTo~@R%+SC14Ik*{vK^_5L@V`r7>;%L zQLTRa?I>JoMK}%&zrQkBV%!LaHALXR@cSyOWshBE*Z7El^q|hSPOyFiWH3D6@TI=5 z>gI)8_bLjyq)z!%tvrwQCAZDFu&)pK#bx(G=Q_Gk7%o){3pOOrh;R zi$}xv8@5O>g?2a`u8WfYom?qAHq z4=%Rs7X@f=9RZ^Eg+hveVf}~RY$Qs7fp<4c5FTcm8qKz~*-cRMhia702a0U;!a}hd z4k^Yyn->(>BZY-x%O+5uR%M8#R7Di9S7Q&?pj_?wk^%py4J?NGhzbe>tPpKQFw_TD zU@xaW8~}a!rgqfjb4LYWug30sqg;RC^?YK5Y=g!^v3r;)(DiQ+D0P;SKTV?l4ZlU< z+gtW_KK3HpbFuc~=axp-e4xlq+xbJE%?pa`mz_V<>fL-SMRvXZ4LZBSkMbIt7ZiLJ zfb;^`J*O0^U-x`W1q=8fRCI?$Z7X}vyr|I51wX4Y=S4*}JYwC7Z>m`DyiA35S=jBE f6t~`)A9c>wgQ=an*cHMVoK4o0hiTzm5m)NIn5ooK)f2R&Tgizn$G6t z>W+@CAORHmx{9&0t%8GvBT&ZSS93EEaGSZPD6fjStFakSAP5HIhof)Rwl?=LcfRCo zZei|h?qGUY1&kkYcu7>0n84h@4BeQ+YEQHZ-5OpgM>hvo5I?VywV8_^2>lcY#((%Q z2*HQ`K%#2_p^iQT4$-ND1b!T$Z_x++T+!i`f+ct&Gj> zK(YXADQ9zIS8GQHX=7J&PH90H1crb@pb!Y^@COV*aX=s(z-=myX8+)hOU{mFZl>nW zoZk;HR4}YDOfj4>95Gyg9}5guj1mkGh75)$aMm2d4t-<+TyX}@{_$QA1{?#4!FSv^ zpyB_%B7e5fp_Pv$GM=t-8m@qufbVh|ND%U{ z2Y_z^Ac4bgfKSLeI=BKCWuc%W=aGehpy+-At^wvT)i8I};|1K5SJT|nl~=(Ys7~_e zL+a>5;jjyNB^(?aT>&qELJrFXL67)gLQ*(7WNdS?iv55 z@^fDt7JS?nvetI4=FYsbc7Uy=%}pK6%z2f~9V}g~KnQ*m-{Dg(u7KCt6BBr*4(X3O zPqdTWw4^$~B=wpR>^~#2t&po;&M8xF+Hp_%xoYkP=yl9X={)=BidY}bM(AsJpuQ2q z42H_Zg*cllVY6A=sda9B3q;AU`%V zk4hcYu9Io@`OCt3Kg~JBl@7Ulwe&adwzk$U@9~VXH`X>jX}X!F+?H}J<>QqaOzb-Hip8&Qr^wxSGs(nzB)iCpmJf~>@_!Xl(#1FLyNoE*~-q*sZXN9k!;&HtR9`N z5%6>K!B$~7kjTE-@}cTyn zs4s6<+f$!}(2`u9VW09S1^ZDnrpm(LxYv8C2U&2>ui_LHYJl)PpNgdI1yuQNDzerll}K z@5VP2qYoT%6fqndL-}aeP4)JQGJG$WG?mP_3Z{feiA+0hTN7rgaC@Jci{s~f_(if?3r=?( zM~Re?BdKny)XUm%vm2J`Fl{KE5M_4IaFAxe?q;PT$?kod03rg$0Ua$Jl*jAsvX4e0 z7xYzom-loN*!}hU0(Yu6mn|c?-<;9z!d*2*CidHndgn2?3OXiNewuRFd$FA>17fkG zLM7-6IjXL&(tYFBy>Lm-Ak?v|J%q|Y+=RnHX++)gE1X$2n-J%;Hy1R-!`qmvlw5t# z|J9;D9(Fi|gxak)-ESAMR}?-k$Tr_`l!uZG@AtCV1>)I>>*s7X;O?is>OX6-A1k(( z-yYXaEu2>rah~x}GXmw6qnLefu=cG%?)=g_m)y(vkq>Luk(Ib3S0BML!u-e~4|s@y z3CRA5ka7*Ds6PEno9G;gk2MB1N!sGTXb<*m7GAvn!RqV%)+Ur<-o3IgzhoD?8`E(Q z6WGYV2@=Z~j6fRhLzK!2!`mfH)0@1tS|wX?nW)W8^WV3ZBS=m7Sy=`?hn-$7!VRr= zvWQkpySg%YUSY|{E6VQds!|;R>k?9rOY&Uw$mcoCHixSrwW}kUgM!nm&-dQUo2E6@ zC27sFZ9Nixq#t|^s{yiGK=@W-X=$Gzsvv*v#}|~bc7^(S1t2CrNubn9MS6V4KM7%dC(|FqN zShv53KaFJ=sx_3YI7gPq8mGyuT>h$3ckkYs)c^!ej_JZ|V|NE`T+u&udX;_D6>4jo z!)#RNUdK5Xn5g_{m|tt1De25euBqu}_xtfV{%#p&o}SaI@UJVoQ?6P}weFSA-`skh zsQTbqKE{`EdXu?liFcaw>00nR`(@zYgSBSBjw)FPUL!I%?Ap3-*C<6dhp~m z0#ZY2!w14*s;1l4*HGu|%?v~u3uK~_zo{`X=3>x{O$(h{((v*r`&#s3L$@aW(k*3u zuF5a-MYB&qc_fg@2H__L_KO5j+awkkni;D@ zm&JRYv1&0=Y z3D{jfuiAe)s396r{-$bfi@lZfB>Ccfr&z78mR)Jy`j8gr?Fq$2PnRmpE$ux-a5Jyr z%+|Ps$j#T@P>(cg;eBv`Pvu$~hKA)qJY^WHm z9vI#4!M4|o>!L5EaFdZV7n`?*@}*5$_EbcOLCf12vtqSI^6ra=|@Udg!Z z*c!;L7t$R|F*=ag^08xOC@V+wHeN|M$1skZ2q9@|D6|pA9hm(+2(Pyu5nxDjc8mFf zeUm|>0-O&ol`-}7vsb!%K|**n_uej_-rNujF`f2+KN=V5%^z8`;`vU;Xl9_=_?7C- zo*zf&@Z5yII&Rvo6`^n3m#;|cul06uDzuKWE?aN5c}v{slqLs>;+$Wq2wda5T>tzY zyqOY=J($21nT52xHjog(Dz;iL$M}%JJ<`F^2WDKft3oBc49h{j(;9ElE=G1G3Cb{8 zQiZiA6Ie=;7|qm03zGncFT3e>R!|E1uC>I(oo)gPNaz~s zV_W-3De}Cw6%BWd(Dn6dxx&gsE*YoS#UApZxZ=W!!(OQu9w^}xuxTvHD;8Eh&PF_x zW0r{Bl=asWm5K^}Gk|yPLHbe*NoL0vR^eH7jB~C1Tm%$d<9%f7DKuWByA4&uXKs|Y zC$O=KnQn~78U=QXbA^tZb8ucHo5(TG=|=5%H6#=@6B|t;DfL)nm`QzmVEmc}w+8Kl zb1kT~Gv2E~yfW{ocS(I-;x_WrotK@X2&H$g^zpIW=%MEbB4NQLZJURe2Pjc|r!blh zOXZ8A^?#xAf**pYr4Hg+?EPM!R(YRswCYiK#J<;C#z0s?7_kfUh`?K4{gih^tRzMi z5(3rQpHhhNF(2ErA`hyUCqv}14m{UY{E~N9HuBho@DXPqpuvsA>De@cs@sJbB%5&| z0cPu5q>pcKWRI4!IM&n9OGO{(msXAOw2%+KzO=s7VrmdH!42~Z&)A&#F=u)x>?M=}>CA&e7T_NghQNF?-W#{HCczOl zo*7|XZj@V+Ty9F)WK4M#;*SZ~=`|pPsM0ef0im+E!pi2MPE^W+#ohc%r^BsqnmA2` z1?H4SE_+HT$!!Ri=`Wm_=O;Gv0rlHt~UA<-W}sO(~D6Y zNwwHqSLuDAZd2ls1SVNn3B#QnGhmwe+vL78w&}Dqi^k&0Jc`~#PL8uCC>NP&#vzA2 zkpx0FurQCz>b^nUrG&P5_01`dg@D%2l)X1_Y38~>oaYlrpH$8<1hld&kreLWddrhi zI7j=a8HwKUp;;^xRbG?%e6wjLG|Vi3H|x_495poplK|eLPrEp3sXY9={IBOPkY1wt zh!pi-VOR{6RL+sv7Qx)Ypf}6lAcbbq%x>S5RB_1XeB&@o($*_noul#6~CpQhFAy1uV(MwLbbCl_R15M|k z)CPzKe_KR6FQ4W*_w^X3X*`o9qU(BG^Q=8Nv$$BMV>v>$k8h=`w6)>4oAlnBe^Mz< zW0+! zyOKd@-O|v~_n7QygOSA7r!n|;9?d5rrXF?NE3S(@!?>uRtQ;;ADvIgMEwi1Ht}L+4 z2&|p`pU+%~b+=FkDVuCwKJTVNiM?sY#x99vUHkmo=@0md6UmEfo6Y$C&Q(#B8U>g+ z1`I^Y1++~X{x|#uVaCJh z7v+I7jJ|~7QdcUN7)>Gto!=4YMUrezfS8^z%RPAJA!iPI6&vKq8AiW+dZhu&E~?9|nC>l*9XW%g7zO9vz4I=vSwT0{CIBZjiu8MF`~5d*%`fvQ zEc#~P%qa7<4?(k^cY<2*>JptrG7*MdA?c{+bEau3?=@fYKB#eWern6i*C$)+=1>&E z7I`*guv9)a(TXgPE+s3FF%~hhrGa`I)zhJ3wjTM8(}G2`s+%EBMTY4s{kdW$+wPS4 zY|C7%XRb*Dxwh)lAk&NQd#Fdr7X5b*!Xux>=K6?0f~|+Y^km#+3ikEgj{D3)2f^}) z@7j7sp7#b)#Tv?Hj}d%IPiev12>- zv>?i)l;d@}%-N)#&6w!9!|ru9tf&UZKag(XhRbzbyeA5Udjt>HMu*v1=5Uu|=d%b5h*^sTtq#Ne_PXPS37HUG09uox#ihi;!)_N1}I zeQc=je$1;{au%ftxK9d|PyD56`Ff^Nr>7iSDv*|r2c5T)QXyrg-_+<*U^#pw^*3$< z6-Rv(`(9wHy?a~pn{D4ERa)AZ{g4{v^DQnFcyCfEaBpWEP=<0zd9q%gR;_1Bw7!8i zr?%G8s^>y_^<`X8BO_Hx5uA$u!=mC4Dh{~YX|u%U_S%lvBEw<3%A0X%>0{cW6kFb< zXQo0RIjo{zGh*b*`JZhOF9hAnTq$utRk>Vweo0}Gq2;@~SS3DRQY7yOG$u~(0D9&X zv0X1sa@^ZxQp9}HqrqQ37qAi$89%t%!(txohi>iN@_ZDZQ~}lUmChIlgmt%NflG?h zkm~;-%89Y6n18*Q;K+yUXXng`~uLr-dPv$g*$filUqcGjd@c3 zIN-F0@YX6|@TAdhZLdQ2(ba^YTZ}7aH+#-}DcpBx0xG}7jLpdR$}2T8(XhLK0Pp z`YcU{&jFTwjQTJ-!h9STI}SDNflmIM`wFuJ6=;`5cKCz++i^A z#6yQ+Os$~XWaJ8K6^-YeD) z5)Llbza1amC2j6v>TK=g>gWtY$1Hhet({$5rL2sdL2wuljsNi-ib4R7o4HyY2GLRI zJAZEC;fX&q@%N(9fmn3=j@typ5-nX0Yfu0v0BchT2TMD15QJC4#q==ND*)jG!j(tg z!7wNUU<#b%&8;o1TtO%pKd*$lCAuvj1Of`M0G>w&FeH)>UD1`JI}s2Fug;IBV0;iD zWNCb81rQt##68XJwSlM6w@aeqt6<=1&|zIbv=#z{T4DSrqxXlA+!F}@tmmX8PWBu+ zV0`@4afc~eJJ|B7o141ofnZP+4;%rsNPq`|0t}1bfrH=(B#(dq5aX5903y0q4x_TX zvLFPn#BqFA!t{_H0HU{A>I#QHoK~)`PA-DHyZ{}-V-3WG?RZQb?Ria)Ld3it9v-}I zF1+Rrhu3+X(4>H?3ojIo;O9f|qVETLnmamx%>nc;@hH@PS%72b0{|`(2f&BTE6T^OY zVo51!85tRFDZooioUQ+9UxpsBN&ky8!=S%AGe7#ypPd=~>~XK5ht|n~^1C)Svh%9Es!shQ|*-g`-eB5P`qVQ3V9xaOBU9>H&obK%9Q^ zQyApfQQ=_J?=JdBmxP@F{WIP4C!qOYa2|l;`vIB{jyN2ne|>ZU&Dk87 z{DJ>|AEg3t0Y3hp!CN}onSqfoIKSOb27p6={yw$=3=I914d5qi@H5TxCpJLx!+3z% z{)Y{a0&pIHulnmY_}6BrL+tQBU|v0|NK<_$8mt>?2h*&Xv*?wAhYtbb1Ll7OFxL5a1o#2oore$kSBCmu z?0Ogibz%&lxpXYrSYdd#yXD|W|MgENZw^bho1{T2Mjp-lz9(qF9fR#rZf+>Y3W3uj_cWgUpxX z*}*G7KS)1Y_K5{@IxOy@P`SB`oGSJ$WhEGeZiS5CNMx6zvewq-$Ctw|zo@#fVdEC2 zO(o4;5z*SxIyXdZ8TmZ_LHf;`Ul(qk@9TU1d|zq(^?G!?ez;^(uN7J9N);7PeXr;G zcR)M%NsP=fGP0LRbZc&_XPs=~;S>L9*q5g1CU&y5OHq<|R?3oZdiA{bh5+qelZfcp z=e|xJg}C&bkA9`O44wlFnXXG&G-F?T^YWS?VTN+!b>eC*ECnr@ZkM{;uF^2ZsikeT zHf3N>6pELOP1sWDM|={&pyX;OldagVC%~OJr>hWx#T(9Z8r641R_`(esJ4M4xM^xLhzQLLYw-Q_% zF|gts?kvyIJsx|nxWnVMzCv|1DCi)rEPC^FjnY-&T}*_y24<=98}8?=^Vwn#u8A6? z-Hv@f*L`1+!|}?Ow?p0c#k`uFIe2GJF_f$YdX@$J0tUe}e3{BX;}9;x0JBqI|QvYFtY0)?`uY&ks`92bR|H}>W+ulYae&frZvzoq_1Pn5rF5DeS@IKA)heDOB+MFvr-;EY24rLdbXXQ#&f}2ATMEk zQBbCo-@RY==_z4r6-yS4BGdr^i67`}S7C3D$(to3h?yVr_cB<~o5u3kSyjUC`$?~y z-YD#RpQSk{Tx3XWyqcY*wEBW=g)LuXs;T%RjS)`rilse$-Dx4j+yy7S7wOtdy36+MqO7N1Nqjq#v^oS8++td;K>cwq$C*O*3 z)UUSohFiKxp+3v-zzq$fgspUwi&n88208O&8WKJ;5CtQHdsRXVR|-}{*`oFvaPL9& zxcR9hE#JN3@((oN{vI4H!MUa%nLs)_f>U4p*zW-UHrA%+?6*?$1F3o~eSK(r<#|c^ zhKZ3EMs|Z%7~{!4pXdE2VJdOgm>IaCEyOsD=WOoKjq@>UL*3n}H$FQ;AF1=VlRhxA z3ZPw58@@g`hevp>?aEr1@VYgzkSRi570Pz)EG5yD!>v+q)Fd>M00h-ezK(dnk< zC(e-An@ah+cbIqIchgifO|C*s`co12mnB=Hg2^=rVVe63!kKFu8#pBRhQnu^-`PnO zn6vjP+!^(=>$qn5pfv7f8F7+64m%6?d~?a`IdPmoM$ct-3^^AM&&|)IiK#w9 zXKkKhRo-ALyVCt^qlk#*cC1)!0PdMm+{k4oVk>GB(BsfDzo_#>@A=e4c9pT!H|H+&H#p0&OF8X{;LG%VLcXNONb%1fMb zJNh2fzXR->Fs#v}dnX+pjiIvjfO`RHvB!w7nk_o)rba!&gOiQ+U z+UAuUMgA0f<`Qdc8Y|?Dtgi~!Sqb0WzGpTO&(hO*!EZ1}6*rSL@5UOpV&S{|M&fLv zY`gpf4hN)3Sik?RrbO8Lrxppz4--rv&)LbpJquzPjn&`J(+O-bNhsh4d#aZl6+k`(_a3&hrc67pOC^uw1VNta!M9 z+oQrX@6UL3UKh;WE~AbQT3i&QijVa%;O>Xc63nL7`P^Y6Z{3K~rgB2I_AHf&U|$;_ zZa8)DE{_QJc{c2OK}l5EW-gV;2HmrC2{o*TDB5-w719JZvaDC7-)@sHa7XFB>nC{g zLcT_jI4NcqHBF_I$(40pu`EH3ui)Z&>)@qKkDm0r=mbWd&xs_J4eKpfvNCYmbkPE^ zO8KD7ytfY4(7=Uc5_*~mu~#tqZZrCFn-A9|opCQFwLi$PgfcoIUd!NqAEy_NCq=Zh zB3c>*Ji}7$;dl`%k2_-0&55Q9f>a)y}!x zVfdcRL(7rUuwIJNrLXel$xpw&6Wxb8gT7TesyTfO-kSug-8v;4n!>)b(cx=b84tH& zID_-TC$SYq$9gUS=OaO=q@kZIcjdCuqV*NSl=4)AaZUKu>}Z~T;l@aj25nz@sf>-8Aee1Ixl0)%cf(=*oZP`Wr7f{AvN#w7gX~K^|@`OZB zIH@x)NMk|&dSbnRK)NX{abQgQKEc>Cr|>@Uv+hj+^L4Te?|fg}#6`GoJ-otqQMlZy zH6YfLYl4`tkCc9zjfgq6fq}aBi@lCd)nn*qRUQ86BL5FQ0dt=s%?bGP@atoJmW z8HRn`J;+x)g%sRmUR~$0Z`HJWa<7mew9<$n$7Bi5zfB;kzsozUNr<&Cuwifqms+j+ zwJ0bRbHwM&#AC9~&zw`ZsyJ%#q7ft-3Y||+-F&vUJd0EOgrxSshuw)=@y&Ma3)qX? zJ{v+}QLV5`*^kPQ}WkuJrHE z6uMC)S!xcSrX8_(l#H`?9{J3x-78|$K8r6l@@`tRDU183v#O-N$}6{M95K%!*92FE zG|w|P`1#)qxkYBfpt>n@Q9K#<8)gk7u55<=Gve?U=Vv~h`dD-8p%A8J)f?Oig+PVH zsb?x;?;P=ZJ1hEg`tv+ex^CS_)nHXM<)fljc=Q?07|9&+CWD?IRU; z`Lsgh+Wpdn8QUS-$b;nX97C?}!rv@%2`+4WR7m4GfAdZu*N0FWv4e`221JOO{T06p zf+c)N8B-C@;vJ0v#P~F&Exk+8b3y++Z-y{Z)8GSRk2X2ga{DuUkf2Hz+OWyg!l_Vq?R*orT6fUW;lM6@+vw#Nct?)SRE3RNY-0cs~%ZMv#^e;Hu*jKe@KvR-C5Lm)AhC-3lY;^Ebg+D$`GifhVA=ln9A%ifpWCiu)b@twl zLxwD<%)i6K1<;DlHO5`|c)2)x?CzM%=?~(5CSr^`&8JiZ`B$AxyCz*ZCF7T#MKW3q zV4`mKlUaqiI+-%wjmna$a#18|?UJrGqDhdWD!N!rj*7h*)H1*xep9#b+V*-#(FLu& z)C|EHMyf6n!^KAJb;9>G59O6I;ke(gNz{j!t5HIc1x6sB8?~A^=i1!wyfBF`gj^mR z%?NnyYVdKO_RMqlprdmuz`2mO`rsjq3i)zIBYEXv3_HbMY+Co2M_G#*l+K?1AiSWE z2K+4yp&s-7%9Y@uxsg*&2P$VV#pe3GqHBb)Hhm&2X|Wcb2#9)*?Lup8ZAK)b!ffT6 zvh-MlTHKQS{oVwSb8;C*N*kwaOF?yPCLcV15}!ly<&CVmpGZuQy=VlRH9MDWwua=g zfo;B20s%;!ke)M;ndDy(vv7%K)_#l$$bYkx88-omnLC@u(vUNgR4jGg zRU`)UI?G4OOgt*Su~i0!u#JnO>CjhgbSec$-stc_msWsl ztRcBR@HlGutA1sIAK~>M6#7wo{jbxB|3^%4f>c9C{f8H&K|fvnS?}@(k>Ih*wOpMhljktX-!8t1!)yyC*I>`@=9L; zUWo=&cECjqFBd>lclcsH2##(ST5^DLi#>4kF^VvXrpC59p<@sG;=m5A67*3{h2+!+wpBH@q|eU3PGdMFrJ zdH?q%7IuQXMe{ws0DZj6IAX8?wGu#Q|LsKHziXhMU~kbB*DsJCD^!nYYe2XJ$W{Ml z=WP4I6T=qHFj^2iNiE6^Vj(Qg9l5~BHhOh*@N2-m*q6d3#Q^KZ29=3h3cKg1P_3i!a zE%NH^`!+U?MeTR5CUsrCVykttUQ~GJwdK(2Z1an-krHn|KTnKp#VgHfv#5FK- zpH?nu2itHNK2+tx=}U^=dr>sv`_^Lj)>x_jc1W}I@YeT_Um2_<4LiGriXTo*7`fr^ zaB$CK^|lWd7ftP*K3&d~SzTRTt;I`1`rX@geRb}!fVbYh!};;-@nC`aI)lgAYJ@jj ziD`74f(?t_hnZ6GOt&5^fTvs!Ql3v2p_;^mJ-=J%kX19tnAzH7-(gQX^0UKv#mzP0v7SijX1;eTZ^1|pz3vFwM)L0m%c5MWC-tJJjLxd z+5Sc%TE3=V|Gm+_Qg>e3a77bOlm0Rxim|}89%-bWUq*s#Zq#X1%Q!RRM@?^JYFRp! z@j!g-iWqB$|DxE$(-6bk<3)rQ3;ZIg*ZC+#eMrTY2zb=iIt(-NhE+NRy-o9k(jxfYo=fvbU%6E&DUZNT_@H?D;JbRq&$WF7 zV>Lnp!=rf);}Z5#`cK-=l?w)#%Ca@JR4mO=ZPMJ>w1150%faSpzvD|MHP{{Yy!>M! z?Z<{7L%#9{!8aZ@UCcS1?2(Qk(yA-rp+Jr*;r91;>=9l#z)Y^!d?1EfEY?!{fVz+? zBLoAhfHttygrdELZR$}r>04`+*Tmq<*>zV@J2|C{T7nC-Y#@YwB8D`n=czSJckJ%> zk_BNJZ(8#2=i`depAQ#vTO0B@A0#Fu(*8-ZHI2l#cuxPzpfB*=G`Am_BV>)slnlZ2 zL?YQUcRr-AWG=)(HFEtEx1YD2x1YkYXYtJouh%ukcB+C7cxIFX2I=<1@a+0j3FC9E ziTf(9N?MGs^i}b|SG}IVu#n)fSH8U z`5;EbZsqKoDB`}F%~gS$^|Nnc-W69g4_bbKWE3-W;@)qIRO#DiGo!m8oyB7xA@G9q z!;;_vcsfL4b~h%((LG34fpeoZC`zG6Eku6zPJZ-IWPYUmk$!KvU7o66ry(w)CE5d#Oa_6S^v3o6e{`{`BX}I>& zc2`1%-PYEe_!29g*4DA=>jO9=TWyrks*o(s9SD%UR8FA-T36BAX=x$t!hp;~N8u zm>VZOPe=H=zqfS-&&+BM^{hQ=+p?*;o{+rfr#O0t)Y2ptqlB?JudHvMA>_vWpytTd zZkn}uVWN-eA`NX&kSe=oQ?X`!=B{Dz?0muj_=Ch*F%8mUt>Uda%ofFWIxzRk$O{5K zsOSb4aoLKu;AmjYr(6|lyny63Nv2N5rq849qdKyod$}4 z=}UFa{Nj~!H$I0Xx{F+Fp!-Uj*^>I|W+G&#JSb6`lilQ6E;Ku8+IO46Um}ilXQ2{u za2wg~t`%msGG_Gt8UCX)zJ>{9v6tH0O5F4q>I_#{H=*Sz!@&j(YAdXouyP?=+xK6> z(%Lm?zgo#kK3sRlI(P5gurWd9a%Qdd=w{}(I_`Qy>cFNv%y>M_8fuLWvYf?Jwqcde ztuCcMbH}BT6q5dT#PT4>9qB zmlhGLS63+nh?Cj}4WOwC;0&o`>TM3YQ>2)p7tcN+AglOhii&Aac})#n2OH%!$AoAh z?d8>PH@D*yCPtR7?tG+X)bF$@)ucte!BUA9TA36Q+0MWuO6Wl^5p^g-0r*n_zmOM=dg&?Axv{#PILMhV?HKK8I@evOKh_F=J>|iKGb-p=a7uY@vJS%M zAU&dn6U*<#qs&dkE~17VyW`EHJQ5_jB@GGf>j;*9YG9R8-=`dW^I<*IYCt85*Y6vd zC05gNkF=)8%$}oD>CBKmed9GZ!YBJTKazTOZ+ywywra#_WWd;WAw#{J-QEq^jhSLi zb=o4CHIsI{A}P^=Pa#9V!2(gFYhalT!eaY(nVw<>UFP$x{e=1!BzPN67t8_eZ9_U z6h}_RjZ_fQa%|H|m&~!RmXR)w1U0U4cJ|JS%i4zBx>1HFMwn9Yuxw6m-}<_pTiXuA zYG~x#^kfOKlGH2gI2%rEzQ;8g5#p9~M#GXd@;6>H)wxI5pIWug*r`ZShQ69;!H(>S zj=Zz3J-NZfet)Y+@p;EHSTcmZrZ)z+m=rq5UE4o4bw_wiGPzQF(=r?@8n#I&V#^2(WWvNOedHlta)nRivT=35@sOuujK zU0N6y_xPCX2qJk<>U(kZE-qtowA%gI*9RZvvD2>lyvVL%)pphsmR87hw+Jo+FSTxS zGagqjCyiFJZ!h?{rf&*VQaVIi?PI$15N^% zotzBbwA~b!*m!HoGI`ybZzFBry%h)(cr%dLTlvUFZ$b}ZGe%ORtA}JJ`6m6%3T9m4 zyYu{cp?d5EY1ZdbPlFWKtJDWnqTA^&v{P6KaMKcV^$m-UY#JuK_RH5xwYVT0#*p)| zde`=4;NW-ZKnFbfN^j9}vh9LF-%17L>;r;im36zj*KCyYd@tk8wliXPR{JICj$$mBCiM9aw4nd4UC%6@@q|pFI=FU$}B4S~+w)nvh3M;#`(UVuW})lryyj?D_Q?^ih}s5u(JOA zG$y-uFL`ZdZhI4{u7@hkkiw*NXDiAk4g)0DEk^`xhQUT{@4O)2 zJ5coFk)}i@R%aabinLE}Pwif14o+3oTZy>|{&1OJxii#PB;cz;K~)S>ax3~Y z4>L1`53|uHDq)fM5PVT+**0N%+o&h*>Bw7PhREgveA>~N^r=>?WZF>{`3L?Q+#k-1 z3BL`!B|=wc(3a~j2jYO<4-p0)vvwhuN$L@Vm2HQnwyUs4!gVBX1w`NxY8X~9;MU~4 zCxMnL^4bUmxH0v;cWL^m2bq0Q>_@^{& z2-!KLHf}!`B$sWYQ7-X%q_Lusx6c_04)TaN2CTbyI@@?wnSlmfobWW9GzFoZJ@pG%^u}`Ms&J4Wjl1h9$&vJU4Br_#4N>-go zlAG-#Gm}K(<+V^oB_9!mNb0$oc3IHYUg)I(e=hKHPMkPff?_xm3*vzy&xd9SW!LCE zR6xn1&f;}3W!ljFj&=zwYErqZgYxUI{9Ei0(Fed8#>S{{EM?}DYXNeM{IkJ5>*A>`AazjE{ zz`kl_Ds&Y1BFf6v>kZ)w_o2~{w>p;la`fKtoncIBLj8eCPtQ(r!ke`8k;NB3;EM>` zg$6b1*|Aowm9de0Q!VeW^zj@e$GKTMy7Rumu3(2j&~|NXCe-*#^~&j&%L&g&9*Xy6r)0Iz zCY{x^s$G7?L3Tl`lMRnCRX4gUrDh9zOu5)>IpTN_6+-U{ zMkT`cRH}-?>S2d#6RJpdx|^s>z_wk88Y}j!VXVNnuBZCsFLY@sBCb7pw5NH|S)ot& z0v3}ijKH$6O24iJ-{9R9T;JLcR#|5wK843;hN^x>V1>V~pwA|929lh?^xizfuul|s zfTS1ej8N2nz!12fDckWaJ^!}$>ggLkCG?vaobmLbpGDLN#~o~<+gue{-|-IxJZZXy ztCnf!Pttn3tNzT=$g9@9p{DxTz#?Bm%$24qcooE@4>qW$pA&Y@^EKu(GQSFG4IC*{ zGOZ(6C@Pe6yb_ft(fz@naVCqjDnvV7PvNC17alUStg!Yqw>%oRiuzm4rtBUFz z*EEF|*vPvW%4jpYQGq8haV2mCo%kF%g-g8P=(j;Hs+2~OSTBwb-ZQb8xw^%gyR1we zuaIEsEUo^C+pJKap+snC(BAUnCF}L-FUx|q@@dWTL6r&@Xgs+wPA5v?I=DW|Ufs@U?MqHufb!jl`F*hZ z+E)6_^Rt4nnG4#Ou<-F40pXTl!GW(S*Pl8R6Y5$rY{s` z@ZxN{rW9-A^Y(lo#2-lt&)deWCyR)(E`dSIL9wR0r33U7W<>WgZUg{@b#6|7e(m;9 zMM7||hJ%yZEI{*MmDTjl+M}hHFAode{Uk*TNIS5%7lM|R!xtH8#jy)~2+x7X`PAnMFm zSQnEUBh-QR>IlO!L8m9JEV6Z?@B^5Ql>(wXh8&{`58GNb2Q_7JWQDr}Z3z`^y^c-h zU^#NrG#{uSl&R)Z_>)Y1?Clu~5-}>XfLA@qv{N?Cjz7E_BEqkT(0n;z3 zd|sr~MVK{5Uu*8yyd@76?Q5BHV@gLkmq}~~eh$A0!N5Nu-u{cpU0@J6zX0@@f^u{=2V3(2FHS)qt|xfM-!dm0(~hWLQc)+Q z4(N#QFVO#BUVlp&f~H>oy5Qws3e7`-R6zX4Z%><|;lsiFhg|dT)bnrLBOHwU9rZuX z8Ua7SKcfTQzp%m&^7XeIB53yYZ?giBJVD^36M*>vC-}(<0>?Dukq01tP95@3*y02c zjgI>N!WKUm+23-BpefnE(H4l49)SY$1Ft&%oJj=wKgI(B{1XlTPgvswKaD0oeqoLO zWldhj+%-JFA1C>dN`(&&Wd8!F z%fFmetD09*O^sc-p@a`2XA3{}b#dC~Cfc@J>Ap z3cO#9&L;x=m5{?MZ-591SP6g(bN}HaZ@@qofSlY12tXkM|6?EUd-|;7Rp6%;@lUXy z;HuFK?l1cN|AGRGABjB9ba%8HK!Kr9em+aoPb=XcC;uZ|4%oxm)e3Cw#la*#2Y$*l)6=BTwM_c|Y(c*iY;T(d_*%JmJ5Y zT?D{>oL%4uy8sjz87HvAf7=lrWB;Wv3vpr}$ag4N_yu+#v*KYk5kT_{`~fN$9CDa$ z0np|M{H2US|I)PoADIFWCpLa)p~f%3AFuUC+N`7eVSlqQ3G$zu0uU!QerUbQFRXFA z2lye=LT>^7vNirSSNIQ10f-a3KD0>Z7uf%6TCGFm|LzIk#Lf@U+WkHO{NI*r0m{fj zl@%}@p+AnKSI8sj74k@Wg*uX6p^l_isDD~|1*A*+_erl1C-#@Qzoo)6j`buL#7iDa#rzyG#J%Ke;~uJhw^^NyTY72kb# zQ^+7u3Plv;iXM**KPd10vTSx!ySe*LjEIlt&feQ`4R#B-yN`E^uLvzyvy{<*bqVEX zZ71_Yf_R1=QT=n_zCOlzSFkvxjp|zKf{en#5LRy%p zk;sZgs2Ql{`R?7lDHMI(M+iEfvK-3bmpr=8H(}IFn?gWS6L4y-4cyMB2z_Vo1b{#Or5?me&Gt-9yj z^E<1ZWE)6e7@ba3E{4X&R{VoGC>a#j2?%IjU$S&-1!$P6A3exWzz z$b33qwjt?&W!6<2Ub!CH*w616M33tm!T114(fWeh?>)}p=ysvO&|f~KzEW-{FJR)T zc?6olC?`|mxjQRr)U(A=FQdGN+Sixq z!9cosvA#ejdFVDqvUo)`*(eK8K*7v-wA731`A&0PQk2=J za+xd;K8Dl-8W2m+lJU7$1Q5a;mbr7^)22 z(v+7FQZq#?PH60Tv@Dh>;#G!KCHd2a;ui+&iEo08+qt9nap=`7t&?FfAz9p)oQMnZ zX|=non_lHrBP{enn0WMUFC1X5_-Hb5OX$>GYJXj>Zs}_^T97&+eIF4jboyu_^tXk&v$RC zLKhNySmsYaVu>|`Nnc~Yn!-K!y{QH*b~|epW>Qzs66b{IMg26(OYtQGe;pi$trT=vL{kTtWW&KzO--z7-GfdV+MK~JI`5W8jNM(wSQEBu2$ zRpj@5%AK6#m!gsvDo_%%>WEwt0)ygkuzoXLp>BCO%3UNVkt+*_LOi=zPWe}?B~j8u7T0-4t5cJ8C54%E|416psw}XQLl_UqY^%_<|;3&!WauBC48vF5V~+9_y8p}!&RyD z=!1x2t1P(2;tg@`Dn&P9AJB{i+k1Nau$(w$4#_&R5l@#pjFS3}tvEw0`E43sSIw%a zrF0B@<5yE+iEkvT)LQamTYjc0$9F*f+yswpeAu zIH)bRwW`^4Sgk7`Ws?ECeL#KT zJq*Wl1ct$~Om47FLpnU0)Ihq|1R7IikN1X@hTRs@=x`{+yrZj42^C0jq!gr}bh8&8 z_{}QM8#P$k0d)Owa>}k5;F<}t;AL3E*7LDbYuH}$P zU@Ph8JyXK^+SV5N=^oLcFk& zj4nSGP=U$=7Epl~I47?)uuLG3DDg6mamjczCA1zCHla%hP|M20KKM_l}pqPpND3_ zdGtIawVw&k0bd%}B(@e$p8J*P+lVI#nq@)!x<8#-`|TsX)BS8ZCB}8u@`>BDZD@-n zMon{SRg$`ikLRR&O}ieBSkmpb+O5|}E&8B{gq@^df(N_QDh}FdY36`p+a?9UwBUf$ zs)@K{JfVVZQ9yiBTYZIMzX9)dA(uF_NjU}F29;kCIp=BPOUn60I; zFTpiwlX3`=Fac#O*NbjGmP*bmV2UrQ4yI6(b@YtfWlL<>XH>QiKij{PcDjQ3U|Nmb z`j}rnlqBQ|FDA~Eo6r@>d&_csU}~|Qx{h3G3z#cvUOjGZAEz5Y$=L)b+H6qn4V?*~ z&eS2^L3FTBC4{o|3RGXkhgmFO zChC+-uMH?_%4AlMEtLtbQ*@p7w>Qf}SJluac@U!2q~x|l+S~8uqP72wg%7_Z1Jk%; zUK*c(es%%O%JtP3QccRN0haqSESdiMZx%wfYkS~Yk+i#Nvoz%%xlsg+1-79jraYJ* z4)nQV^o6wjN7y;Ofw{qS&mQG=xy9FQRIJinR`w029^9`8Dn{B6+C|7>C-J_HSTOij zUHaAwtczzxY#!P%#uJ>zy3^HGb|O|%goOt@t)-Z_7pe~51zWJ0N=PwTQluodLASD+ z*gz*nb*wDxgY#!aB#hTd&4);{SAC9}tRva2{PmH=V=bm2Zo%gNfVg zL5O?WjQk!~t*3A|*|?6efsTf~xw7%*nghn&PnOin7IGE~AE@Oq<1MlxbKf_}jG6~5 zS}70M7w{Z9KDk7E6ASJ+fWLFIJv{8t>!J=jyZ$^3fLkZfaH}jYz zNZWz?)X6{gu+|X_8|Z312pa(7RZu6>AxOxn^VgcR5Qz5KTky9o+G!zaflDK7ep|fV zM97m-(&V4wpFwC3{+Y{Pk)#Z!b234PbH9yTQ~-hUO9zSyV(v-zmVX&8-r4%AG-qq! zw_vPbyn@N*hp~i9_>|2r2QTtM%y4xy5#lw3=?KaQ%6&M&XTz$BnOJ%#%goOt7k7Dk ziU5U-q-Mk4QD>u+?ki?y?(0qt)*D;ujh?cXB7`qxU^ zf94rkeq)^3{!5Pa#qjzE60FQGn(MzE34j%o_4oUM_6OZ+|5t1)Wop08Utr|8T;-=Bvf05$M9vzwkDH0>&Rq>`yTM zNOJh+!T5h_j9~qZl73;X{sf3WNZ6l1{Gr4C+kyDk4-u@tG0`u)-JgK)la2j}!B0i@ z-wnb4$oRne8zs&De}kB%7iA#?===aK^q;E${j;tTm9dFC}8!`TJ>R&hEKjv)#Kghqa%ueeZ zJ1#X;K9y6og7_{;4(Ga>Z{UbGDqBfCOS!@te&R&6so1^yA)tMiAZ7mve zZApZsB^|Yi^zz)nX3u)ZO?w$1F8-3|g`}EY89c&cskGX2`FD=zr&}`dGrftTIzj(2 zw7A+t!7qn;1TWUvl~a~lE5a34o~Pq(n4JSr<%s5mgD@!_Ewu}fxZ6gTTIPf*WPTv9 zU~J{+Hr}!m-dnd;A8%J59o|P*+wJAd`0p9x>9WDILLWYrc*sTC@U!M!?Ie9HNH}>q z>r(o@gE35Xo905Un`qsB3mhF{|Wmoa!4Zh(O8ILgrf#cQI+1hCMCdOcbhL;+z z7E}FgMSS=yhE(fqgK{{G?>m2|mO^}5H6HD@^Y60+_LI|giQl!-pRdo>#`hbDz4*iI zXQsj^V$b;1%eFSQQl}^AbdA?SpS~caOsB}KyJH<(;^;|G>xFQNJMw=(t*&)^c$a$( zBJlvvbT`(f<5GnqU;&;$R)E?yI8vefSXD+Yuj3&wYv+9^S}ewX8}2KzuF>?& z7%+7NMQJBpdIvg%VSYuMeaMV(^F4`r#=i|Ka~VZo9Y-gJ42VyoUBV^!vu6~v*C|p} zkKFSz`P6w6E190k^JrX11%1N}k+Nr_Zdtd`9YRK%&}I+Bkly=mdM3`!o)Ze#V0=x& zc*6xzmcdaG6`l^C^lMWNQ1>!vQ3Se1lS;X6x9$kuJ>CgFg1H`l-%R+dmOlB;F&*$` z<~rSRYbR-!M5{oEqzwHGPb;LFvZ3nhIrzSw2F-;HFgpWQxw6;zIq3@VkwQK+1%IN-JH(Z~ zs_*t_d)ZW(UfQZ)6Xy#J^lD%Twgs=6wb10B=?`Nx`#g%k(bMbQ;lwVM+Y_l<3!>;z zvcN(vo8Qj5AzW%Q`o>9`o?BeGfK4h&>8bMy_VYWYr+Ofy&DMPxbv)ZbW3MI9vp)Rx z?Nq8G;Mh(lD{GO$^E!koh^jz%`Z%B@=l@Uo;uOyV}pt01o>af2dhY9Dw7 zh+Xx#6jI-XSZ~wZDC*aVacNbb8*GZ~KGwg!zQN@%T$Acxj2@Bk15r}t53r$LzeDzM zUmIe0RVXW~YY0oMaPQLd1jogBeyis`tem04nH8ic0;HTnI zp-B2ebjdXfOkH;T^dnqTo`^A5-EAP08ER0PT=2tXUj5GZc{bTdYu(Kd(wNnV6{_go z9+@56G zvibVp*hl#`k;u+-4MUiYQVyNXY7*!I^kT+vZ)&;8J;L>5YTI6M1?0;sa;@peQ=b8Z zy*+WdG1uhmJMYZKgXm+z6XBHhP+41WYLgbGlC(Q9#vs;`?_kk;8t@3DHB3B0A$P-z zAlZQ`;E*V=y~n_aq`93lCeEc-sF^XOL9ld^Tq`)Nq>~sE>>lqKx8Ngv$Tr0XI7BGM zm*!C=hx}$R3qImv4-ku(kIkhg`(L_Q1gxWRvczMBjb-LF_YZsw7gey*o5`ptBro1^ zSAWap6Gem|C!Dqy=KU!^ERx5Jm0EocrM5dsrSxL>$>gk(t|HOPB}Q0W#uH0ur9krl zDzu>1lLyYHD>dIqh55FikiLsNzAP{03%QoMshQW*Mbf&`vn=IxbTqP{0SpW76fd-# zbY~o=TRcLF00(!MQ&vC)IR3La0#wekQ+I%}+95!8(=dJj&wIyYpQa&w7#U#1i8}R| zl!e$9Wz7j|c{8X_QD$W&X<%-727?nZ%^KIk5Nd!CH5-98B*t6<1@(FSK`bC(0qyJM zl}adcBP_fcI055f-fePRa!fK1rupujMKWFe-eYu9$@_}Mck<=ppv4~qWnq0N9f{Bu z9+HG*3o=m~EjgO{0Gs03sEsqY_)~z*Tj68|f6EvxP@W;iaa>nnOf_mQ%IC*P&!Jb( zdF%)ZIv&LCkx=!QgDgN?c52cOMD?=`CQs`wr^Q|7OVKNnJ*n+Y5c^7W~p_HU4z(Ic1vd z5-f^4OtZTYngCz338iwJoeDEC^&=Fffk0kvk{)`kON!P2w4qH^`|H4h6Lu3?nZT*r zCQIxY8zAiIkFkayM})Mj*_t$VOB~bV@oF=9H1mTtR+1|gEypG1WniQV>!o4j%d46# zJC5bUWhIM)ZYs#9_Kt5ewTHD!dro@$v)_b*$5o)byH;U;)c&NWY!Sxq>;;j-WvH=j zI6;`uF(u>Iuc0<+lw*W&3Vmxhhkm30xm&>ZDG$zm6G^*kMo=db>w{QnoPcu38uqA* z?cpr}QL1HV6-usUK9yV-@@ir)r>7J5XCH}=WI}@rMZD${Pzx$32i@@&*^lspG+Y>jYeiO_WWiUcO{XCiNJi*szARns$mw z=YSf96Uw${j$-dUxt4as#n~v%2zPkN(iD*d?2B^?QGRdJuv^qgwW;wH=kV=W6N%H% z%pB5Zf1JqScy5FYncFHG2Gq1YBu<)c&g1}-h)(PfQesBSmityHNM81l=~*@r_TvZ%ekCJ}fI*8ywn9*?nr zM1EMsHP1PYC5jF2O2>47i2l@J(|VTvHcvSG(GsgOd3nR^vHw`#C4PB8AG*hi^ZuKKw@al=+)fdnn|TD`Ps}heA7*hwNoxB`idFpPB5c}O zU{1bw5kN1lKQ2~4jeJ)KPZ~^E#>80v9%^jkakF}vG>2AOgloKgxxk zgtwnTE+p#YbB9wyQvPJc9Sa|fJO^3LS-S`Ht_>bpt4ij1{qv;!Dg8*9K3myY$(Q3# z4!g}2m^*m=PN)#%YgMU)gQQ=KSU@DRX4K8kjK8gBvt3aPOo?s)$!UJ4wN#oFri4|3 z@#1|?uLWvqXZ6(84Sn9S8cPJB^1YrVpZeoY=+4S>X}H!C`t%^!1paI>SNl<^q6sfl zN|-5p!FzK|G%GFJQyuJJ1!JY#ac!2MWJI&D?51nq!g1pd&a(BUR@Pe@PMR8&;mik|tj2YmD57d(k?3;WDgty44PDUN% zXO>(uNk+v`Uzvv>OPTCpr|llac(Wq8gzxGaQ@b2KddLBVswyK_RY37DEWpaOn_2dfzdd_!WA>#{6Y!H}b&Jn$tC^NcqjXG$KFuE*~b3zrD$ z^V8)#rty2yH22qGD6lD}LtkwJ606UBBl3AZ2AIW*k%}$=vGBhcEGTW=k_dAG4)9gx z5ecz{=fqSA*x{#C7B)vZGL((vxXX_PE2^6kSJY(f6a>HgMo6Pv5OT5FD~MV@Nh0wv zM~okek%)5eY&Zj+`DR)oSta~gZ1Hw-kQ#F$*@bqk&?$sR32xW3J+rEMgS-Gl{L2Fn9@R|Pc- zAzy|x!2a>GZ8QTc;y$-<>iP*R#!6 zZi?@uu4IW;gCKWWz^H#rp96^v8qcwahH5^41rJ+>!%G0srNGnFIkhl^d&y4&)(lsy zd`y>APR%6ch9ko;Bw+ZSK}~#?ay#3#U&bd^?k#_8#^)`{aI&GC2>yK?%Nq$QEE4YS zsISTU&{(0Z*&YxgVvg}UsCXnor?ymVWqV0nB}8H=5>1F=)F>kk;wHpxb*pCUOAyNs z7XcIk>Ba~?nGdFJYYjSIz1Kg08i>Nj&0iuxvB)T3ZP29yziZjF0A*oRz{;fi*ii~R zzFciGS|ej(ZzbRyJByx9hb&9H<~4M7x=Z*<)CnU&P^Q!h_An|zY!WkpNOIVUAat+9 z4k>%8Z2&Q56I)|0KWv|zFs5yo3P-yXE*9MSq5hiO?P_r#=eGU8uhHH-(59kp!=c*> z*TwYB)ydhKihr;p{y=SEr>jyKXY6c!*FDH7O(&~z@9Gbq?K_Hi_wRY+;16y{A4l20 z>f-CO$1C7S?0XPGnJmDOIuJ{2_%YPQ#Fs(D%Kz}$4seRS-WP=j_-sKWC>Cb`KHI~d z)unIL&@o!iF>q7^2HdSn%E~Cjwe{4NBvfdyZ}L*GLpR^xUq(#cWoK7pPDnB%PY!w&z%1$cHPv-`yk7#$ zo3qfv!;vFqaz=!l9PQh7Zj;4!V9I>rBwI>ZtdgLftmPXJo+Zn^lPye?swvk+A6w6~l$2#%+airuVzrD4z zBu96)9_0^nu2{u94mGLq;wiwK3xcIiy7wNk)J-c4ew>t-u~MH<>;_>u897%7<`^mr z3`DTXS3qEKv>ps9D9*{lA8@)4d!N}qAqVDu3c&vH|CW7|&-?Mt#7MkP$1}GSwLnb73!{j1i&2vs~rm z0qwrBmPh8v0EJ8)iz1qis`OK|n_!KRa%U zZ>NG`fjt47#2o^NBUUcFg74_-z}$1_WdMF)OvniXK(GT6y!zg(0^s7Y2Hf_dh}ZP* zUoQT-(`Pusat?{9;0Vx^ZjOb$;ZQ`o^818z=zIU4B<0PE_^k#7z{@ zpVIy1v_ha*qlNkR(XeVDNXcLW5?{!m7BP@vz#ZR&J`L@4qq!9VoXoJG4rn?baF7_m z`Lka>Rw6dC5SdqK7Fn+q+f41_(=f`d!~xACD%W(Nlt0+FNcA{f

M(iAuJdNS!~& z%@?I1Wytxc$0C=9JVp#OVTG^tBUl|1t-5x3LAdZN;#o58!D`v6pE;3FK?UgW?>({G zHtAsgU4{9Dcl||H{$quinT3T8;QsxoFf+3P0y8=P88PXf(@&V#nAm>PSSDb|D9C_tbb?K0n4A4US|DGZ-0?p{sf6%eqKPeZN?vqPhNy(W`N;`;h$Q4 z^8e1*@SEEHqB{Kv3csvAKT-HYVg7fc@INp%{7fnR^{aRh-TnlHUlyOADEzYd{+sUw z01E%=v4QP3jr~P){1X)ZF#G%j;Sa~}zZ--9nUR6*H<|rK=>3Jme?(6FqtFi_;Wwfp zBGkeL))t127WDr+*&hJQ;16~7x9pD>h4-%`AYOKQX@wRc$BV<~ud_d1{_?Ah|B?Oi zH*NX#!t>vc0O9yqg84^s$X}v22svJ?MlTT{KV|FR+W+H_UoHG^MS#3q=GU42-P5UE9N7%q# z+{D7n{O7*}UCq=ijGfI10Y83v83VVU9{{P=%rDnd`?)6zAT34xkE4E^LkSR?q6#?V zWgj6SJ2x#tT2=<;mkSUwG66yk0G9>4TmC>2AfB55a_O0VD*wM>_e1vo)#G2^;&1UE zfcI54LQcRl>4*LFN4)_yKynMfdic-0w*H@b4jCAK6(KP;a5kX*0icN^t(m>ILK_`EkU~v9ev+}Ex{}W!x|FcCm|7A=%=ZpXGUsO<@?YCLR%c*}s z@sC%>Ffy{gyo~AYq0SWA|y#5nVT$=UB+yscT}~Tleed<#O2`>nWoBa@0 zhhR0K`&f!jx#={5){&zs?R7K_ck4$Le+UCCJR-buxtw5AeK55!3CJZabU&Cy$u#TX zB0Dp*#V(wx9*;_z3Ss-_&~)B>Nuj@jb2yGx}?7I1mi@A!9?HL1S1!I2 zZ3h$PIcX=vL&*|H_8>eo$7GpSRUXfJ^H{SxSs$PHSnd?Dp-#Ik7zyXf)m&e-lgR1| z%c8oKbycUEhf(ps$+>2;ZM}K;l7Q>@>V}TT(01w_9j@PoF7Nz1hb2KT;t5o6J}<#K z73|%wx8JXikfT2bdQS}je{Gw%8a{@mZ^i^>!+Dj64>3x3{ zshYIcuT;^tPAwL;W2C_gyhomgvQn#y2a!f9h-9%~5qu!y6S-t{u$!Ht%&AI4rubyG z=7F<5dE-k7n53kMmeuU&!pEfYs0!o5zA5=6qqj3^@lBKhe<q28oq%APB z5k`(+J{YDXN@z_HeT^}zCGYhm5ncb0Czt7*-uWat6-kMRXj-3S4$cvx^;X0oL|%kb zos-M*?foF0@d%R|k}qhZ_&b!CgkILsR!GMfP;%l)^TUR-v zl{*b2KnU82c@t+8K3fMg;jkPDO4u#Vp-#~pHzbByojqR!HL2Wq&W%WbtMPl?#VKPB zni^<)a$%I)(q`NfWQFgys+Ly02^e*^Mhklr;=eTm?~cnmOTj!v1*s=wLVbe%b_=d4_`Cy$#?=g=T$p-9yvRh^alS-! zC)le!cZCTue>-OsL~Jp6!oky;-Bl$%8SS7!2h(J@;2q{Net;=qr)m@`Vww>O`HCPN z&oCEXOJJdbuBnxWO*~Wj%Op_3_)-DUm&XW_RIQLe#y+M?*#H)|JRd|w(>jGOJ@?%B zX>xAdUzQDDPvT-GRAzJcfF`9pC7ks6wHk?E;S?K(UX=k;;Ug74$0Bh13e*z6HkT3~#o|e`}x%#8KL960*r82Zh}44YbDr%BP+J_Yfj(n{l7h zEa^}dqX26XLfmk@Hz1Z2ELs$!97AzTS@Dj9T;IH2=97GvA^&g-QPQIkQGLhnZo;wl zcWE`wf8QF+fA!G+uUr_(_M44?^T(!F{Sh4JNy@t2|hSjWa`vG*lQB;;X=h-)3{ zCagIsMCrX_T8+a-pW|fyK7>f3iHym7WqjR!^nl&bNvnJ&I%ppX@d93S2r(2*$L@Tq zcVjc@&~-+oSoC$qNIQ@5m31#X{O-}`Wjqb1U3C%-BTG8g$+`}Z(^1#AZ4bQ73(NDK z7fV%_hs$_~EdJ>s(dMR>J*_aG6cZ&n@}}37%6;0KFON?~xRP|+vi+&4Is(5fb)X%l zu%Gy-#Yb+`_G1QhMAy*u;q#|gw?5slW0cFv+Rb%LUZGY!;zh#Fwt*dk-oei(wl)-d z9P`Yw`KFF(&&?#^;Rb;5VGKFi&7T)9<8@fxLRelTvP(Y%&&J}l1FJ(qMQrC(&c!XK zx65UyDe-a^f7|jlA^Sc+KUZx@MV>^?e)c@QI1n6QSL9o<$$xyp!*>EsC-$nWX1A8G z4`uDngjFRjyqa3Tr3+}4*|>s8nH5=C$;!4c_UmjZK2$|5L+`O&k(O|#=4hm@fJ>+a zTmd2@E=Ll|5=y2Vbs?D@@bs{iKmzLn3D0In+_&d#AVqFuMFUWFTd*m5>UDB8zrB0B z$2(30|8^B2xwdlcTt`2TjLviHkTS6smVlsBy@3ojpF`M+0pZ)A>}@E zcFju+h1blHWLNHYwf z@yzD#x}RjUp-;^m#b#1~G$5HJhnBu<1o-L-Zs(xbC6WVLLNhZyW(aso7{8*X^W8=p zB^<8i#lnOltYo?Ag(0M5aUjy!Rpwc!$R2iilSA4b_~gg&@0yfILW_&WfqDv_#|PIN z2WE)T`G)YibxIzg5!U!rLm2w$CizC^3bCFtw}kLzz=*HhFM51KYNQAAnF2lox%iUu zONi0wRplz?#$W176~8M~|3pa}8kW2xUyD9TK!)bFjDYr-=e3^OPu{Fi9DjETUm#OB ziChrs0k6M!nF}XYpBp59>|m0|kRj0z=F-@seS2|)inTtNbX*x0w6rZDt|#ng;h+r8 z0rB3pyHejxZCQ4Gp0Rgpb{`L)SAxcH1@6kDkG!#Nn0aE+9iFeuj<3roM9(D+YeP0X zPsk`w{Trjh=OAUQ8{Bk0vXh!Ns?Ch9$*W^V)7%;}V z^~#7{bQ5jHA!f4}yOfe&fg(|#BT>RA_ymGzI1?26D~nUZ&9}v_mk}uxS5Obx!4jr3 zr3W;+GQ~+1p{YDQF(}xAsuVZgY@(j+dmy4wB1}W?s^!G1WC_g%^V0y40Xh21ks8)eYrv?||;3_j%+T4zN2DNd^kuC=F$gT-%f{TU|pMvrB@=-*!KK#+`glD|J=EGpm zJ|ap=H}rIdmy@To`3|*F>~d|$dqFbyD6b^A9ZLs0HWs7A%6{Dz)@Jh*)7srCN*8OR zMx{XUxo5~?(AB__FgjWceZBk?_kxirmX1V&VV`9s1#f!J34X6(g5FZ^8ZaBndyEC*MtAnq3iLb7@A_e>i?A`xZ|Kyi zjblYfDegJx3l<_dC!O_rIlqQP(zwZaj&JD(^M3U&jp7qVX*p|A&5IFuMZN&)aC`mj z6XJ{}wKWSW>{@PrE3Db|oo!~Lx#m=eHXLw{Mw9%zes3`lQ73z>iX)>O;GF&%SasLT zali4B(BqaKD@h@HncWq=PwT4k>b%22X!oMF7=wpn4&~-R3=)A89JbJD#5Kc`R_-!r zoQ7hbi5klg?JbjZ-npeE%EsnESLXZ7LyXb&O9zE2bsgCi>PTHgDZ_8-&4ZT&%DEp{ zk<<>wE-8hwZfCHm)8(3-so^}r%zF8*xO}4+dyn^t({ib$Bd;-z1x){e!!RQdj{FF# zf#a(tc4081Xc&GHy?+8O&TRh zve6I(mm-cLG}Nh^(t;MX!2;3V!iGm{EgYrGOh>k=Scoh`^nvDxS!WmX&><@yt4rzf zP^Z+ByH4}dJgVL-^lRF*M1_H&p&Y`{(z;L4KFxk3p$0^j7RYIUeoq*8Pg_sNRBnH0SS zQ_G@q>hq*fIk|}#d5RG=%aW8JQY)CiHRk8^5+2|m=Qv69L9dcH%sBHOM1#5CWWiw1 zC8H5Z>wJdpBQXAKA`IPQ=+pz$>i_5r2~oH!>5#iu2Sx&`@ET2rvIo_MFW+Iu{G%Bf z{c$cL+=`YV^;1jKOsL2k$C5o$f6wT<8{UBB%%~Z4lO`GbCAj`FxR4o25&J5Vw;dWM z8wsX}C^FwflB;$j4OmksYP-TQngT0zj-QOK&$v=31`&5FSK-``uBFA4w6n6~e0dP9 zQ;-Jr&G(0*kZ$xFCjIl0`8W(FpWXk3BsdWywep28?Xl92#(r!#0XDW5^GGaH8mxl&E2zIYA=xsj&kT9SO2$?o z)-FwkPwq@La=Zy8yI~UI#CB%xys88T8s;~m2PQr)>c*it+P1VTELMb9%*1LPBcpuf ztkhQ@*lCtKF-aV@Z!G5=gA`zFE<{ff_tc&mVNQfcIBCq}=)}bGH7TZlr9G%a1i9G> zHx!Mlg?&krtG&T@w5&ivlO8k$*Bwj|mtigy30m;T58h>3zW#H8L?WGv(YJB5J#&Hs zdAYg*iB8%#iMX~Tt#B()MPzr?h&mUU4yBTI4zuEq8=Vhi4JkO99jnzLH=o^d$dOJ@ ziqUu^j%rYYVdgvQq-o50rbE+rCrS>uN8E{Pgw_wPbv?Bx*6!Js0wi(z^E3ql7R1AW z%^e|A&NHHeVR0-)z`;$CHOt@RCw_1_(!vg1?ft@vchf!Az)x|wP|%=&RZlbtpMT*1 z!f0nrg%os2msezld_y*=ns2+I)B=9u4o-waA4`vS!yVb~`MR%YUO9i%GE%GHT#?T! z#KyZNLva2hpUJgZwmT}jOjrJ4$Fb>?7umL^R@H%i{u`KlYwA~EDs5XnT7-l)XVObb zSLglRpKHcF7!GS(96uNz?h%00%?Q__xAk)8Wc8=Y)wPwEsu^_CLp&wB-I4m~TQ6b* zx3vl)a2?@5BKFi;K@4QP)1QfmU+&N;!HGgdVEg7P&&R%S0CPr`}c9l(vG8{l49ZI8`Oltff1# z`((;RrQgGVX~}qIxj;z^8bD{*6scAUdVISw4P;4ExskJ<2v}LpXlJe@Adj=sbQAr6 zOpk?xga#7SOIUAlS_z5LI-0G`pu(|Mt|Z=2eL+qm^?~S9B$IlTAJAv{t~-a;kL6JZ zdnC9}5+M>FNPT! z4dj#*b-uEIyZ-tED0awOvIC?FWIK-ul1!YI_d?NrvA2T<`0qG&JU}U|9K>rpLQ#Mb6RFZjCeAf&X_cz=K5`qzbB|0kGAdeL7&od4>H>rVwn%+b!p zo`>gWko(IAdL?>gM*~|Ydjm%kTO)URVI_d7;c8)Iq9`T=XoIqWp%Y*iC1*z$BWL*^ z`vAh{*?v>PIsaOz`4i3(P=o!4Nb#46>$JwA?V>26H%6xKoET$)Dz_(5V-S%t##ad? z@-kX>fdD>^x+6%JqbbFOom1{=jE~of*~8cpFz+^O#50(gxtX~K!6B^8%qsZZIZX^X zj5qfKpT>{HI|gIlv1)6oVs_-2D!x(+TOMjouOFRLsb`+!9+>Z`S`;(@IqKxi|48Me z3p5GczzTbY*61pi&VT6YbK*L8@*D^!%3j=a*S{YYc|-8r(@a3Tj@wR1OYg)30&j9b zq)G&N163sCsO}5T64G)}dk)LV2a)KMT0QSx4Yne({&D{a3t>)hCaHBW4lU!8o>^n_E#pJvu+NdJtJ&3;Z4_X&1xH}0 z3m`Ygk5?*ojO{|o)WNs#p&cYyMW977+UQmVqy`>9$}HaR+v2Svl5<+`0$FApBV!VT97x!Q~x@S!1?>chFgKH^?>nemW&j zyqk@kbWGUO+R1*V(L!={YW0r3#Pvtne&vv+cY1fqO$fR-dYfh_KJxa33&DKSR=xRx z_kO_>4j`!Fj(5Tp1;cI1Fs5RVoLK%nt?#n(b%+A#LOAgcc7@_|3_e8#3H9UJNzt1G zn|_6w(BM3Og?;m(SqkzKsK5jB_S?6kk92Jsg$AESRIth@+oS10mrA>M8vy1xp4C5AqT^c zn!Cw)4H%48BNyXfP^1JaX-?OG;D#viZup=?)EHV6)~|(KG3a{r#~*nQ*cPj?vcg-CZOQ-MCrWVeKVO-ud?>ODfP$>O+gv?sOMExM7Ik+YzHAQk3LScu{ zw*c`9Dz#6iYTv7|;R{I;KRj_{I}?Lo%R5O$%bGmoG*gfy+)8=0?iDs*$%F8jrmGVr zn9wUQjEO{R*ZVSv34kx4=#@t1{(`D8@@Fa?yF9Ca+HuGs5~9*jJpAh$5au z4$`EF^o$EKK8qEr!iP21)fBlDHj|O7%~ZRNr1%};;f+*p3p|ex^AeDyWf8pJYH4+} zCWunG$}RziblQ9+zt`d=!ftoK#b{_mKBvJByg0g{S;`k+_4|Mi&J@V9f5Kufq2wU7 z2&z}S`wA@+PCKkmgfB5%@{$=yO+<@7$K7HfE>-u_b5uaS;=<>s89!#3zGHjQ?}ZRl z0YbBs@YTW~6)bmk%KG-f9{O9LZ0Iah#MTFJg|Cw)i2bc~cA$F(Yi>Pw^?E2Jt+`@r zFXgv-DbqheefC4bA;dkCq83@tPwJe&hVO%J;O;goT8?O_dE*SS#Zfnh|2$seAfFz= ztqysLj#WyRP!uM8C^I&MXYN9aY*S7ZL)@6A!{VtV7{&z zTVxR5v~Cm;fGN7KgUny4|zZ^%3m?DFHdq4~oi@yO ztP10an0?!F2OOAmX*pDZ;-HnQzOsAiz{7DZ_!;NTfkhb9hB+79Dq(Myj0;Q<0qzYZ^p)OT$k_bUCrcjK_{jO3+=E}i2+e3vK<|9ak&jo?k=N`fKg{-VZd(vmsEEIo z=YMi%r2~fct27UyYsmD`pO}fSJ1!@ik_#aUN<2vxEBwS~&&V+Pg~z+tX5G*bU%deC z07A;bh}JUR$L}L!ChY-)F7ar9JGk~nmXD|}6do8?FW43Hjue#)9J6?y&khLx!hs#& z%%&V^Q))9hQ7*aHCE(DQAd%YfVreOFax%d!B*A1 zm*;2T^vWIEY4BTs_N!IoXJ}*te-$_-`Xt~@Wq%q5Oodo)NLge*8V0G@gG|*uBiq&K zvgRC4cj-^J|E7T%CLQvU=8*_4CpmKAu8FHSWpj{|;M7{uXtgtRj@2wj$9X1^y zwD#$Ast=qTz&4D(goK$(Jl8Z)ib#pa+)2$Scj_#s54ThX*}#`c;C50h2P!YpeFgQT zTu@HW!RK`c<_n6R$b`>D(&=SQ_r*_eor>5*xruzqo8RP4_zmzruV9o)Ac9L7hCC3B(aNe_Z#x;n^UD~zilz_(wc!rR5iM8`AV3lY zY#v$t-*+?UC1YOdQx+@nhYn0v28NAkf`AjzmrFbmDxz!D61SmS)C#d$DHw}(UViB~ zAnf7`@RbcCg;Ble6UHcwJ|?Xcy$&}x4tgQ zgxfL{BDgg-I~D9Am6L(pP&>Dr&}r$}3gTT#oB_>}(MBLN0|7I6JfwEMY-seIYrfZ7 zek5htvDe6(X|}x6z=2ykZrX|aNJ_NtohcHN;0){q{I*jkx^2)?FOPTK= zcX7saP&OuZWLz_XU?UrAWyqq^yD)00x$&H?q?a!2uW~I7Y;A{Pg+$5EiP-I6cZ@a5 zV@%g$lL78-#pLm1M-zs;;|U<-*jM&ozSfcvvQLbK{eECLmu0Y;A1q|Z#ui5|V00&w zU(4^s&yGMzB3q_I-3y|!nG%{Hb*ijcI(Cm7CAVLtA6bB@Qb^{qC7*oU=}Q)WJZtS$ z7clYDXfyS4lQBBZOV=HGEPVa?o~4Y;jOYY4N`Jx3!y>}XEVsoic~TznBAOV6AVQs2lNrKROVy<575O_s?3DC+Ld1n@@OOWYpH2^pAB`< zF9wMp2IvyaO$=b^kVZQ1%rAA0=~XdWqPYs~(N$F_80@1jvk1SSIY3D9^|Hi+19-C> z^_W^6O(NMK(hK4L#n@XwRn@ia!YHMbBHb!TcXvoB-J4G7?rs5Tq&r1m(`>p!y1PT^ z?yi4re4g)p&l%^O@jDRjz2>~HdFM5Cx>zd(nFUb+(^zl4(sD@RMo*8dZ!VbKxH5Ci zuj?z@kZ>t>{OecKG)1^_`NG0dJ+CsvwyN?YbY1oScvKH2#gN1sCW_{ONH!jj+V z7{R@N#bbWMf1M!oKuhmEEo1*oK!OdNdB@8Z(enO68Oh|#0S!lXG+^eudiR{(k2p+ zpv9h>7(6Q|62`YLt@vZM_NX;bRozWZBX>o~6<2_Lsp{>2hS=|=yYAVW_(z-Vvh?6%_M|>X+`U%x>nx8sr7C zKBeDjdG)$tL$%g{LX0J-HC&dP)@9dgLvOz?6-+A}H;`Yn6x*DqlcDDO!{bVMLvAsE z@j6x+Mgj|XFL$(KYeL-jyh^Us*Ps~CFV#vE^Sp<>O-hxPSJ@(Xi)o&I_i*FA3VmA# z?zvwAn?c})+Q1BI(&iXb$Ar!^szof0bx?)ibSDKwOOMnj7CjO(pge3%=1@t@BO}fr zxD83d`I#*LWHZfSdwEf6MjOr4(w;75lHOikPr138+xr~%vgR-6frUdf{J&!!@i=Nf z{-P#zmE-Z)5Ob7i*xzX!f5bgDC@cJk-3qI*X+TEfVU~&>`5eXoZ_!}%5qI&Fim<^& zl!h^yf(LEDr}huVQOV48BP2tUqEPLFhSU^t-&XnnTqRS9iK*EB56^sQ-e)6YVOO)J z@M64nO6$UOr9QKlG0zF8`?dllemjO8)Zw;-rT40<^`eBnN=!pH?q14HsQ{A=0s%`J{K8ayySt0G;b-h37FKxctk!`tNP0)E<&hUC%2YEkaJl-#6 zJZ`^gc|2rjxw=f)TYlU-zdOtGesne-;5G0%Ui8Vh-z~hG-<_}JFzCWN-SuWgd}njy zZco`r=~GDJ2MejQVe>O5@i*sB6|p&C;ynND2A40oCZ6TAL~nn^gmw$slv|g*)8TD;xZ_;>fn^>OxXKn&IxFjiAM&LIy2T zoqe6Eh4W;^ny9pp!9d5(Y}Sl)^-r>$KRYmoH}kMh6w9v}J5OJN1K^Y`K1v-wzwPWD zF#3f#JZzLiQ{C}(pHYuT>PnfxxoKnKje=4+m3+D?<}sJV#|Ub6ik~k0pFe+6=p^sN z95$v3R_inB#AGPbNgM?ahTx^&P|?|oa#>IYp{|$x1~oyAFz->AG!mr?rsS72g^Evl zsA}N3qQpPfw7Ksjl~b@3d6&KTO6n|C29D}8CYeDK9f58$a<2$>3P>6b*ZIB_m_hVQ zp$O+uxxhM9>B9)vpjcmB5a_9xv(x-^@lxs6EGJ>#3qrls>+mw``R#uF zLJd4y#Hs#1WLSapOMwwYA5fflx(GFbO*r0+oyN#zykkZ85?4D z&AdR(=7iFy++RkWxllifpG3@IL#hM!kRuBfsiWG$zD{H{2|zs7ygeq;{%ZU$$h2NZ zsZ;Y!i(B1pmFP<~2`QB#Z?#C$kbZ^pr%zJHfLQgF_5(T1h6}yq^A)v*hbFTxfh{EWo-1@-~x`Qduv6{rk_#J4#`6WO1C_pCzMFl)) zd>$wc_zPr+dJPcg+oSpaT7Cn}`lY}MlHIj<1AZ#0f3$#-f@%RJ#UwfVkNRAh8dC@I z%}5f>j-D`|MSc+K`9zdgP4h)mZs1@DAK<|OMCp~9+02vTq;Nd`Ut54O1IDR<7@`|h z=|S`Zan7<>K>rlfOj!p0*x}SPdc{PUn(eQ}8*ak*Iq%z3@2gE6uj^~;$5Y3PV7zf1 zquz_!@MEKq$Z@*;1g=qQiIt7Nh!^UAbD7a zz~NWCYR3KEPe1q$KYtZ>f37#T+F<-%>XiG>;n$>eM!ew4O}^mD+S5hi3=suAuYtjM z3aKu{eJdZRsjdVLaf*8U#1Fe)ueY~JF6)w8LaXy0#3Wj-_es30o2fYWXCaU?tK9~7 z@IhdFL&{2XU(?~}fn?acQ1gDnJWt)LlFNeM@fHaTrGJVhYg{z0SazAYZ7PApOXsjIo{U+x`) zymQS}^L+A}Ru;z_M^6*!5aREY58k6SlYYGu^9m z-;wh64If)GPngI8!x7|v}J6k{?3u<%0y?2SW}etkt`@4AmK>MEgeMod4<_3x6% zn^o6Adkar#8#a3!3u(IJ>SKM9*cb{gWV(lD+u(-HPJXHc*m{Q#vQnT6dBBofn$P^P z`WbQk(9mW`P+jQNVU9eiN^C@S7nA^}C?!S*ewJ#$O0xZ*gLOnhT3j{%46f!T-6cX% zKK!J)Z3L_ui~E%!MRnl|)inwEMIudg5e$Go1t$FRU#F^RnY3obH-2={c^*4QNjCa7 z&KPgl31*a-0mfuKg=KA?>Y@OO;u1neB3m9gxb8#ZMQ5M7GxtAf29{h%^|(z9r3jS; z9kZY4lcWO1*P8*@6q^cy$J^A`J|UhC^oVDci5^0o{5(LcKDcf~8Y4fHpR=`14rrCp zNuK-$3gN@SerQXA=AWjuPJTXUL~{X*ZUEzaoBAO{qI`IG-KQCKbvPgM7Dfy0>h~qf z{~nW(;I1amV~$wK#;nE}@ht^GZ}Gab#F9N02WX`n=P8AFeuEl<>n)i(lecBp`l=;^ zOUBXipZxSkdzcJtbQ51*bS^8?AD5iyXXHpeySH3t1_J(SY~6=nt8-bA>f&=f;Yf|% zolu-UNpAl$fjckYh2>TM;|mZxHEujxYn`9Mpj+z}OUG_mP>Z}K3^BvY*U&AQ8ta))Zf@DxqJ3wX9RT_?+to6MN-jW4RDd zA9i1G)}jEvlwp|e!Gm%>a7SR(bO2^o8p>1l zTg|~`Q4Ph%)`u$y$+Znw+t zr_}V?P0ms~tSrl3wc4xFD|su|+(+Syt2Ny>)rNOo7LCY_Ua!lpeY~&5s<#=$VV(7c zvR7q0xcvX5$2Kn>-Y?Ev6-tu8rDlvo6Vsu3N7OnO4`pMFSGOkoS#o@QyF%fyeU@JZ z-;Bvo5X3^sWQpuSWu-9u>bX~Y0-CWI@M-p9pO%}c7_J%f@V0p#<Mct!8N@}$KUys1?j%X7lje6wq|BCD2j|8k1>aNf!(eycZq`N5_6 zbO@=l{G)8j@_oaGb0qM`aT0lgcyamTt5T-L;Q!-MI6LEi|0LP}^(dT+neG2|6b`&; z`+xn9-`upt5?ZeB0f*l=h)z%=9=*wa!RuS;lBq8)gxA7M;G;6Wcvri!Z%mh<)ig_C zAnN$e(Oi78_pE}HFe6RISOxhQ2^79+cE#|MW-_?txQ}Voy3THM*&}Y^xJJwH$hyfa z_$t)?`Qx7k_0stE$zyA}p+b&$jFRt19P8%fgb8D8JduSUlq4}EB;d zx9bDT4C~PzH!J-7t&y~*v~@8Ntvl402%C>Mnkb|@Xa!CE z*p-WWvsJIHJK}hS&F?dc$c>x>+Z2ruf%ls_sied9!?+`LM6H8 zbcX~we!gbr?uJBVh?E*1E+VP)h&~|(=+)2fGdjDZDJxXC0gK2xk(xQ9xOVyCZY|@u zVXr4axVmqDkB|{I68f%f^>DHVcbUONnif4n?Q3-;!V6+CJ<_NxAzgI~+~uyQ*lc@# zEKPOy)$WCVgw(bP3(DOv{lfRSG)KMC9k`F;Di=bA4zc`O++Pec?wqliwz88X<*OaFXbR%AD1~X2TnaH~kiaJDC+pD=3U0u0)tlJa;$B*56f_5QV#e zbXh!aS8h>=JdIy59~U4 zC?Z7)amC5B+r~(3_0)b2armO=kj}GhZAzt5}z412AJiGHmg@RyW#l@#=^dtQ3 zN?oBON_zhs1oCWc?IH7Ei>9b-1Br3?!!^$dYqmNkY%mhCxU)wjBk2z!Q(OaeCMX1tvVmOlLZ)(1BIFFJ(2n`14cM+?5O z>yKsB!SyJfr^lvVM8(Xi-}q@jT|Gu+6Q(6Pbs4KGzoM}{ zUj^+ku2)ly*q9DBLzjs;ZXMn1lb5BCevTyunz*4|oKj z_SjPfBaS9wCaa@bnb0M}F6RnU#s((25kLAK6i#MyAt!ZT2&aoPb3>bk`ozkQU`5{< zuD3EqcE%C1z&An)7UCD#qFON6S7NR(@WY9fBPG3IBx~Gx+~}rABiTk}_;e<3lT?{o z`qTVjjH3fajL@<%PB`O1h;%ZXT>EPaI8#4Fwa`RjxcENdO`%{1A#SiPd=AAY^kN** zwL(%_<%tb4I|`FQW=e6D)&O!81AUK~w&CUZ(6iZD$T_QCG;)EXXd0%5X0ek&5K*%25aE1P7l?b(GL}&&0AbT}LnYnK2Pz-^gDf+HZB%4=uhE<5YX!g=GBhDBl@iVMH zcm{H#raFy02TpWP#)ynE>yiUwyG0mFdr3-<`jfXmcxiA=MEe(1SCsZKNva(T($!OA@ctn-ckOJ7m+?@xwxjVO>7!s7$}Za8o-qZif|~q7GZO8)&bg zz5K0Hk$rmd+}FzI3mAz>u2KlkR|3 zW1P|?f6Zcm8sNKtZ?C2Cnwq7kAHCvBxNakTW-pb{TXQ3IypImL6Y7YV$uzVg{T^pJ zCkSPl0a;j(lCsyBg4fzoe!XQPV}>Q>IiEHL*p4?+Uy_e=GQZr!Us@?ZHrTA6g3;BT zQhuJG!zm-8t=aZPb<0|+5j{TR;jEtn_z_FZWv}ale(UD@lN9fV^IL}H`+MuhtB&Qz z)8XaE*Bjm|1aktnv)*@K;?POD%fgoLABLBgvHUf4I&C73 zuNnk4@3{_#4ZZ1a?H4n49~Le)s|7Ca>kfwvyoqm5X&Y`vi}=4^fHJ=jV^sRQI0P{8_M4Hm6fOAQS@T09n=XJV1Qnfy3h}a3Q+f?vnaQpFK z!LjEoRl!uOZYoyV)R!#PANn4OXQHIkhnbZ|_O_I-qCglng6ZXsnC zZz*VFu%{6(iG%cVKzDk^B%BPyrm9vkjxSX5zE>Glr9Q+@Q#85|{|N~U9o(i;LGPyS zZZsXO^~RMMti*s*;sF@i3!wiB&6sGFNN17BB924Vp`3E_0&+F>Q!1y`7F-+xhDNRd=J+ zzy5a{tu2J5rhGSUvI`*=_)u5h*2ne5yf)vO>f$XFqCSMZu9iZY3K~O8JKNzD1y;a` zIoq{#?p%|9K}h&^fBg;ORWqA>wD|(0;!!T=c1k2glTX4f++F1cKyiy*!@n##15nN> zd)W@uwrRkeeN5I1kTDRDiE$d|VeVsodCq1ow&qe$G<5FX=Vgqo&rvj=|3dg_euhE= z)7K=THCNN)nzKW42v58CfUH!F?VtBm;w7<@jvK9Y0Li#K?~B$P*EO8Fgmc4Qdy;{Y z>52PL_Y)erC>lvSX=qr!8GJWb0$id3$5}mcWi2c|0NvA81o-P`vKu$?FrdCu`Pc z{5E@*=DP&flDr1!F#~#K0rk#1U@*RY?YQzc$ng4Tvz z!&iD5*t zfbx#)?_eXBKn!Aji7PJNp^AdUliL0A`klNBvK+qEI*d6h+3Sh)ESzca*RjrXECW~< zvirS8?#c7%$I9+!cCqThPV=Yw-ZU!i>7f%w?nBl4ul7eJWz7QS%w_L5rZP6tiFn6; zJLIfBDDKZ!=**P0)QDz;=Ns=X&ssQE*QBDtZ(<+RnL1f1rUv|mU^-(946Sb@&1VEp zyxX!(Q@lTXxM^tdPgw1jXtU2NW(ni2mf-%h z&aIfIg(~rD<`T-?PhE6BH}S<3Kj~jbT?K=;CEWneMXYZX^8KLa5BYI$BGp4L$kTyoP)#mT7o5&K=`1;_trnw7poA+3mQG8X(yL0hopEAw6s#>M8(#N6B$?9Dib> zs&I#}%-E(1{nr-rzt})l*T+X=Jc2yPGP9QKM&MRwoLg!VW4?oRZZ|9eVwzh^zQJNx zhf>qG9IsAE=rFKP<$(^;9sm4|ghi!TOFlqer$P95#pS(hKw1D*9e|NO+r_y9A^aTG z@7meVN8>4qE`A@9;07IpGW~yLBYbMBdl6NI+*3f892(3f=oSTe3yS}{MG4UZdQUr? z?L?_d0g#~1%fsEh|26@-s}0>3Rk*5g6MthN3#Wfk zvBlabq{p=GVXB+XpKCH?W(vIOFJ7(}Tw2cV=Anz^blt-= zdc_M?8QgJD&lC&x@D9PGJ&DnbM(c3bJY@3Nf}UsIbqFEEzW-){jJL*c=;M`|8*eDz z=E;rbNGjFZv%PVDm3y}R0}7{I961N2Uln&ZOxPdXyov5Y^7+C_m)+#EFB95Jzs=oS z-^Yf%X==gu5>ndp&}mW1F`~W1`BnO@FXC(fj&yRYH_t)A(i%rV+ihyWEWXh!b*eS> z+;Z?7ga4ya3uN`-&lQ$DJXRBq*OCrN%Uf!vj#3g51pvIK^cy&_`C=}CLu7w42&aC> z3-vDef#tm{1E0ASn`Dc3PqtZsHUx9!OmZ$yR9Q#qnT&N*2DY-6e7UICy=L5-s{EZ$ z0O5*0kwa-+Zu@-F&O{wedN*r(N!<%GhDAMGa4=7Kvx zI#8sJ$Sz3a6H@WjsLKO@ga_iY!XFlF&IWf$*R%}zPr82Rgy+4p zO>dsWAv_kvLd<9m1tDM!)opx%tSP@iBa3)SayY>Th<_JEWOHP36W&&R(%zIwnc zSN}hrjWYqi{(syj&c*^f?(F|@pExJW|MobK+_b?GvO3#bMtO z27>>0Dika`M5B%M`O^uIo_00>-d9qd|7*yir3q zbh>tq?HvCwUWcL9=wmTWI#K*G=XVI3D*@~HP&0|$uwmmXP-rL7iqkW&HHTS@+|vX9u~J^vkzjVGA$#N0=wcZWFICo4nkpn&wHJib)7P3pOo=OV%?k3@{mT z@kl7%XXAfE@auZUNKL5_{#lX5+F+# z+kG}yw#0`WB~K^Vwcbf>tgT)2ySV*(=y}CU;rKU_u3qG|glLt)3M* zS*n={cGS0hET?r;M?%*qb}kJhMp~7mM|6^2@MYT?^EIDZ1(kYbtUGsR1hnEGv|6c+ z6R9c|ZKKipR{=97)DK$SYQl-Fo_*KuTYmx?t}QddWdP$fhaS!-2H~eoQ+IKTB&f)z~wNyikgmdZ8F07>W^pc zY1(B5Yff-^Dmj>9VK%lOu<7uJ9g06>em=O~jg3 zh^wE^N6TGuj6ytM#9Wy_gK|!c@aGtMq!u_cO;$ z9eKK5O4bv7DFKDzQqgWZy{_AVGxtda&$rEh-JWy;ul;UZmPZ;KQCe`WxlaiKk||3`8{`Ey{_kP;B=HWIJsX)YpiNge9cn#)WCDrM-CX2N?UqL~O4>@~q8 zy*t2Oq91woJ$32XevWd}bX(6!xkzjD`l8lIM1P@5^6=s)4;-yp#T~+zpG$OYX_NA* z);}lFll&~E1nc0xme0OR^<^}_*@a)J-C#rY{gwPMrg!?_cfA(OZ#dy3p^BhJLEg&`Zq>;;#1kb+P| z6m8a!VGCcm6f8H`73B^DzLvI=^BQDHYHfo)QEIZ$0D0TEiSR zB27m#T|Y|}*+ZQ8I%#pDZP-KIB_75lwXuDawcUv*-)XkIKq)|a-4XI(F(NBlyHx9< zZ-v3Z3K5is&;0%HBL|jZjqkMiqMY968iNa3-i4uGp_z)X;{-rqB4a^zoutO1t-85` zq+J#x*kV2~@IWmM@Mv(>4z)|!!G@M4+n!@rvi+ ze~tUMUM*2ah;-KC&`W6hOs+fa2_Vp%akKfTzFC|Q<7yOWAXge` zwCc|8inhx8V>rJJ8w~^iSJ2|Q7qp8Ei+gGqNIw16yULu8Ds2(ZE>$1fXI{l?!NhfI?tD6TNn4(`11DK( zLg60(E7W$J1W{8K+tFS?Zh#Dy$|0~#wZar{Et-RuLp>4zL#FI_kR?BN?&IpwzU0{= zyugR$QN?CHYeNI~M3;?+GqQU&?5*suoidxv^!v&V(jF8UKc|`c5cQQP?AOJ`xCh=F zvj_E%m(fipy4qud>7(ao*Q6(%eLbaK=j+|(=nz5y&+GjJ;CLPQeVF0>a5CR=clmg? ztMl+XQs8kqf#hL!`P`&QF}I;1V-g`@T%qHkP^@g)`c=Z>uY!1KvmH_ZTK9Dmg|<2H+;u# z2aJ5qiRnC2rEYFT;_Gj;OIX}W4A&KY&efTDYB(}Z_9_D}Bnx{{Y@_T7sw#J~j4Rxk z2k*Q8IY+9Yc%Cliy0cwQ92Xi>BI;oo9T)n&#CELeTWK9Nsn+Tbb95a=WxXFUUmehO z?3JfV+PkHLuYlWpZCN6xtIOxuRpz_Edj)<`#J-jv)l0sWW~>zhP}lG{PF?m13JnY#Es7%es)#Jexj463 z|Li7yt|KR1T>CNpYYDMM_o^vdtb8<>&7t#S>p{&Qp@-^i~F=#~~qZk16y{ z=Yj(;%COntjB@Mw)^n3>3*5B9*L6z0_11b?JPec0()&}y@1}t-zS% zt(Sp)>J@fr4B5s3#f}eHTX-ZHlB0^3AFT`}56{=O3M^_?0+8HL`KJ3PYwp*#3Nl6% zA*cZ{nVSI$O@ZmR#YY-%*u-V;!_vR(W~%t5H!A&pXanVfZ42zqCWf~PUb@Xl+#%=Q z5T7lqxzzV!sL2}eK9B~`9o-7t9dBBj#kvjWNHlETiyowFoN0fhx>uoF^|GY2AMWn^ zxmxXVfHIu+yN-p|IdF66XaO&sr=!p5L&k|nZT&Qv7iV9Pn3>0o*JmwRFAwV3Bk(?K zGQWUy4!F5^Hiz&ZGFERdK07QM~17!@C#Yf-PMi zNtI(#@J4o#^!0nM@ciV^3O#1j$!5L!gvniPl~Q-LdnaP7(B~y$N<_UsRfte*JYKel zDM^b`z!K`Tm7*)#%Oh|$`{?`wapMVv7<2ZaRq77h)|M}I7i~JgdNKm37GK_x{8r78 zx%GJR7GItXOA7!~YZOxg%vr%k)D)T2jIG+vCN&I&TJasQqE4?Kq`Rx_+ho3N`(%Wu zl2)$@b~6h>2z#^O@_WE+IGbX*)~avdI?897tDzTk*=F<-bCp=lJ?QdmIJt#{YWXDu zKP?YmRC*pX@7rG%M-7c!5ep&b&sCgvJH2UTYra}~4sL!c0W&RwREsTl4ln<_*2%6F zUddA^wwj8F>O6Thz&>?$L|*_Qm(IbbLdw-go`5Z{*K$4CKpk zSMcFeMVK2lIFFY}2QSugL!Pe{DumZ6 z(a=q5n>=1Aovfe7L{rdBhN5$JD69uv88U#qJ*cmfZ$4AS#D!QAt<|bt`ZzhqP*kLM zx!iec2wk<^Z%2;CQUsp}c=6&`GlZmJ3JtBkRiTqZKaO*E0UsF1};HJ{uR z$b+sP&siii1-M*xjE~4#@X0K49{_elNVo0E?Q0w7_ouk_e5+S@!VtT|b`*vm4_;p} z&1j3cdlOZ5dgWR$6YYgWJgd_mb#I_t;r;g0#o_bMq${q!L-8FMn5LV2K$j<<78x=e zW%qZ}IVZz^xoaHHq_7W1UqM!`Xg7Ho`iBd9SVrAnKPZ_hq<>Q}7~ko@Vf)IkzkEcL zkxezr5En^vn~{#~)w5|i*u{E{eKnul$9j!0I-l%sda^!V!-g~Jaq%O&gxcM#EJyny z$z|5~&-$uLI|Y0*csVC8x>(`5HvjS3^g-gP`X{6GjTq*?HgmusIap%*;XG6wD)Q}n$eV_`j zx`wYa(3JpUt6j@;dk?f;OqHqKkN~to1AAGa#FjqY$N!4qmbX!A3hycepv@bPXYXjS zVHbY*6hf;W&`kA)^4T+bzy@e-7D>n0KR}1r6@F%Mb4XpLBxAj{cF6nNf>&IRKqq5#X5_pOjmtQFqS~>a$*HH2|I*_p$KLh$d<{wGtH4u>0(^pW!BA|lEHT^d!{8<)w znGc@5$w`N^>BCvp5S-Mk1o+zifey-pC^mPg-pEiz^KrmI!ATsE*-uGYes#Gu54Z*o)`Bl;xP^;7~moH|2-towEW`>$d^^=k~rv~#WT|#^6}Eu z#qtM>$qCCxbY*WQY)nbqm8-)SpP!-`k#U*;r3F|*flY?L^wP`q2)*Kt^-0+A5jW;Q zz&+EQM;ZVzOu_H-mi+X@kXd~|V5I;V%2EGlz-s=@slp46eGlD?XkgU7zuK=;iIv^xGvlR!NFlI-f{9UD*<07M2fEg@s@H-Iazm>2)0 zxmJ=#rYvH4M=~A)ZorSom*-FB|Fd)-Ou|1QV3!`ch7#xYb5DHmEn;D5HCNjB+ z{6Y`dZU9Td|JxD?m>>c2ei2fzQFc=l`+h(~EGg74u|D4a#hrZByIYE9%I=??*xiC* zHk}{s)X%T8yNrX+C{}LydmoPr_73w$S|ud0(9a4L&dZdB(_yG4gED*`1X1h z=Ip@hu>SL)>;Kz5<-oBy`~U0Moa^s9cmDHum13>l2-;ao=;Kw^NE&NdUOpX~GiQo9 zAEGdj3H@_u{@CP-Yb!jN*4d#H&whTTzcSFlBl0CyZD-XbR;|wf`>rrjf#5~CfR_=U z+4kFwoD#jSY1Wp$`H|H&;CS)U+AQKo`VgL50~E%hQzo8>iDOkndaG`O!C=yxDAS^`(pg6){5B-zsDh?X>7-z$Zof423qiL?6>3GWR4BBB)SXx z(Mz_~1h*w4MY9>dtEarwmAN@fLkx5?tq83YecQE&&pGQ_c@Rqog5)vOlj^GKhjQ?% z-gbD%Yrx(vg)5k_b>%osJIs`iriC7-Al9PTUpkbctk#6sGu8F;S+#+mE9OyYi|SpQ zM1o;&1|Lii!_vp~9Vu%Q`!(=Zn7eb*-lgeJ*wOdv?>uM($?s^Mjk*P`8>tY$-_Wgc zESUF42-)U@EmN=PgR9n=)VpL~H=6FHxX2`uCc7!nx}L52s5sdQkPk_Co-x0bj*>3P zaLCaVuy?m6woejs9T;@PmN5CyWsND&5SBdDseoRm@{=ns<3W+9nm1^$l|KEK`uB0MOnkoqj3#%&Q3eTka z&2+%-3ufv?CW3gXY?rlMD%`CGYHuW*BQK5Q*zbyt?T=JdqAM0wH@KN{g_1OFBQ;=t zeUgL18Tk!WZ`qn<=vcDosr$aFVMxUq z0exh4M|1d-#&rRxucCc)JIPWsTdont%HbVP0=#Ihu)Cx#Ta``F@%VPCbk(X7R&E_L=-JYilwi=dv0K$ zW=ad+qx5R#{ZYJLE(_#QWBC}FyZ+r7Mp*%>-mzUi5fxc$#2h+y+vBc@*R$j;9|K;t z!j6WkD3~b4#ub`uYaE|a-GR3ziOOvX)FPCB7(R75?S!sWDe7D`B;|#PkXIrh!YbzNnz(dSC+F?Za5Wup-})@`uaF8| z-P#Ik#3*_!D54g`Hg zOT!?AAq|QYd#mc<<0J8g#Jg_FudrxP7?HA~u{+d%-d5uDAcPD{JkJl7zrJEA2)PlJ zJ+>?o55-9{4Km6Jfv^Z7c85L-3UzMSR&mK`-}mQqA zc1Fa!nEvtOkf8xS#?+)*p0+(ZghZ|s*e;&@q@N=-H>i{d@bNPwjHiXbZOdu~`|72? zYpR%y0|^>gD94nWSSVpcf;dUaxVm@p1wbVenIwa+xW*Y2d2SY2L+*-n!yjmLjOY>h!h_wpDKUC zk#Cqjv#r#t_}+4Cfd$$l$5i=jCMBVD zewSpDri#!E;rg1iFL zjQ*Z{zp~q7*nt~GOnaP!7bOYL&1pBgg+LaCeA=i!OvuX0{Kh9sWorubO!lh9bT!!6 z)bNMafACnomERO zmEO%r;tH#h{toXb`*E=GCQVTUZ$|YVI@Sj>+Bfej1Soa~Rc?#rxS z8o(oF)R2sL>&6DYK?qK6i|RxWDsh9=kv;8r?ib88;afva&W6>|%^f<;sVPwCKK8on zlPwy#KKtdDj;edktY>6q8!+vv-momXy>Q>~Odl_2ab9ZQHhI$F^;oJGO1xwr$Vc|DJPhy!US0FXBaXbYx}zvY6G? zUD1{4u@DK?D@n5mgrB^2k=5`CYTO;CgstgA1T`nv)?HbfC$?$ffe5Lh8KqyUE-&S zu_f>?<9*=JW4(?X@dsVJY38^9iAf@aU!&jW;eAvG!sM8$!5HR4zx9)+b--wIss6D} z&3c3<8}kRT33XZB14V#aylp_0aSzbpVt{ku)gm5@ScHbB_op`34Dgc^#p#G%{| zW}Z?^@NGbB21YML_xgTZleM=1!yE>}0C;47s|iDFTD`-FtQbM=kAJBJf4C87FJ z0UKo61uC)G0*#xNSOKuQA5xaR#)d+xx@KdbH{eMVv%KuBSmwH(5L$iLJ)$?L0ZPR? zJ+;?Z5cf6KhJ=GA!n>qJ!ze_Xu#$o2qz>>ld>|e(J&}~1wg;JG&!{xWXC}Y&$|pV} z>V1#*(7K)|T?*-ru^~teF?4(!Jk@wl6W)FI2u>aX()G=Z8NpytyTHu3sbt)sjQJU!r+F zRelM|;Nf1Sg(}?=l`UE*il_)iEsTrjTRFuXx>C~RipWL<8RJ(6{d>lDzNUwtY zS`^Aibp+d6p4<(lpkl) zoQFYk>=RnEop~$~Cf{FPdE2w`dJEq2CeP1)GPnR7;cy)}F4GRE51;lzGyqyzu4ZZs z+pC8><3uyD^v>*WWrb^Dg|otfe`9Vy@^*&xW@x2?QAqIE%3QORJa*F{wW!K0DjaU( z2Rozo-%UDR<5wHIpepg}ROq-OUru!-!&aM1WpWd2Y8X%J37Dlz6oz3(1DwwTM^WEG z9c{fT*rT+%g3P2WGOLY8+Tol}H7dM8%LZgN?L0~-sC{b|>apJ>I z6w-5QAWw}V?m(T$mv$}I zi}2lyI#Dm}+H4;I4?soD)CR|8XWwH>}&!Ws$lj3_?=dZ+VOq}Qfc_2nlKJvZ>O1;YNt(xWydkgH;J`PI=UAGN) z=v=mfzRI|5^>1@6ijBjaf$4xk zBXwJe%AzUO)vnvv0H`AS66mEksbI`pGxEd>mh!A^^?;(5`Kt1(%8ta}|GRsxs;T@= zx=4{z`-G)4_te96*^@_YE9^D@=}U}Cm?#S>K*{7GhD{B(!fw}93P|UvBlid~+*kvyQ7Yrq z80h(3Bjtxy9!9@ktZeoSKcY_6WX`EJz=H9K*@q798O&vSYZGKR`q!*Dw?cqwUX{3-Ij->39>y)`=o@N4nX>hF}*vZ}MeU_Cz zIJyx4B~0I|et06nmIcsuCFN^`|A0#r8&A99l)iAJAni(Y_|aDcI99rAmj-Df3GAOu zEd=9#OdPUWet41%H6Z+j0D52P3nABx7=upIU}G-(K`iqk_dXD&1PRYkcSx|%b-(QG&Ce#Unj53K{Qpr`U>ys@gu=C} zFl&jd-*IBAC6bWFM1F^wqyh||&EBEE7|}+zBX}-#@S!IH<5D_fDBaWAx8CADRNEz! zUOg5%gts~4kJ1fm@n-XEl?iO61F1nxJcv4l(bJ%eYzJ|lst|$>`Nbsf3|Ge9Reo6? ztwVZGJ_n*99BnRSx;I&0Y6Z1yVYO#1b!PW3Y8|-g>_op|ah_h|%GSiZ0=6 z8$$8Tp5otX6^j!gS<~pn`NY!j7E2;28Oe|TuqyEv?a*Nlweu#)KCggUsj3+rnus;A zfA*norC5oHjjkf*+>+vO^;9Xs5iFV&#wir1uA@(_j}{4y8#BBaM>3jvWD(b@YWJ*i0zFO@7HXx>b4HOd8ouB@Vk>`MlIm zwy^djFo;V=$Xkp1iT?3S-Fvq&5+7#+MZ{Vpa?3l@#t-$KUdro&gf|f_fSzzHPUuO7 z`WWX#6EMP2iTxp*HIE|A8xo!vE9%mt~ohN<6mNFx_#R=f1?x zA>r9+F$1Vj(t=~PFXC1MAd7v#s(7Onj*2G%SV}SeW35OsNF{Yxcbs_e`VhzO9yzm3~#L*?R6(j@g~_>c9k0bkas*g9UR9~+iTIa9d-8duvY%`Ch7 z4_NV7-*)gHF5qLs{C|G zB*&2Th1%S1S1e;*TP+cBA-%<}OQT5M2o!>dgNE|zXbgi28&39XGF8fGZt_dcGEde` zQM{Nfi zk^4bMSohYXEl~H;Zyc$Y@EbuQ+PN)HP3>eaQU$tObVyEK{A7*MHew07aMvg$^JNTU zNk6+={nwqEF@c0W(?tnx^=|#SKDo&76QpXMepv^a=mjXYJwhkKkWf2u^kHj71_)$%-MI3hFz3g zOUZ#J!o#C76dG_McSIUOiLJ=(SE_=G42_*(Om@cXCtRSL)O@udcPZda)?s!C+&-vzFT+-k_OhH{9`(Rcg4p4CZsze zK(0yp&r2hmz)?^N;JvYSUZ=jK5?oE0c|f(igc{Q1ABcaQ(2k`Lz~BH++$Tci0Lm6w zcB}C?ql?gyRrosQe#)d*{`69SMRJ~8=}Z#>YAN;?1F+aa(9nOUApOUM0fFJ+Ny?RiVIb;t}1 zo#9t651T~!47eJj2KX-<=4Ujk)M;=v`rN>Cy!p8HRrtT{>{i?1NiX=DiJIh`VasOz zvTt*yJq5Pf-7HH@md)Op(x<0B{(Yg1{>KCVNT;9oYYGHR%mGL)@Fe8d2;n~@^kl7J zi=P+R-zDsN3ge3>+p>4%|GH^UPe=DZJA$EO0faRF$0s@+3mqVG`Tyt>osos<|A+um zp}7`i#02XpvWt5QhH`{s77)-Jg772Py1-@YyenH@>K#A#N zdg$o9|M$5ONmLz0h~kkJJQ+3G2lQqr<6ACA$%q#Ob^nd1=5bHRzCKQLo&>#{N%EFpOgY=f zNI`^BiE%9<3MX+qL+SoVG>%wFo=is=fonwKeG_JcM3^`e5)TLom0EPcdn@jHy~q@? z%jjTt!RYF8Q%@nTK55|>h>d21C+Q$j`sm4Z@QueEVMZ*tX7)U=J4NStIaBVc?yxii{grT;vBX2O%^y6&Z< za8~HtE!cOaXj+S87=@Jy=5*v7AREV63G^4BMrjfl7Y-5Riz*HgtL2$)3f8~x7wFWR z=n~8>jq1ZIpLIPKt_N5wip|N5Aq)K|h=b%$?Q^4?CKA!htE3zHtuK&`i3(?$TH`t* zb0sAz^E0F5@*bGbsDtKmF6D!WAF=^WLmscvzyg$lQ&HvG__Qdk)%MEys;nX+2r*ip zhLIRnS;~N}#7OF5Q5;iR6VchTfNTlvSe@3|pm0@Ls`ysXrau09sv-_^M!|a;LN!?L zWOFg!W`X-YUz3jep1^l!RZmel5~Ieq+#93T(H2O*>bMm21qz~x^@%m#wsyUhv$+9C zD~SQp4y9JlRj+mOhZ=mo^M6620A~H~zeuLlYaJ*IfY`SS?hEF0!}Hlj_Uf|F+h4jN zJ=Py$1@5iX`HA&9FXuAYbD_;0vqw~Kt?orm_Db1=@da5_3s0|$tc$+BENY`HupUrk zI&_7Z4vJ1+HK@qdLPVbWtj*wNrzzzOsyO%WhaL+#g(>ZNBisL>06gq z8-VJ1+gtcRv!S-B>ip~h{%C4@y);n@VS3w-kLJ1>r)U> z+RlhBHCKzm%;q##jl^8aFCvLfn-pmvi^q-$3o#QA&*(L zfqQGp7JJeN24hoxkr3rtI)yQ2uoK5T1CbYvfkuA!)9VmR@>A{O_lS2o5F&ZtSfCI; z-Q6-0U)DXW>pc#TTLx6QW7%0C&cXs#W5=aJMD@E_gEEQ4jKVdt~tA;%wdVSOTo!9+SEJoeW#*>_HcYaG?0p6 zSE5@xIqPEeN{(2SqK)u&$VH2e(#HS10`w|wupo;5r3=D zh7_6BiXpqbf@?iD?=vlD0`2KC@;XINR9p~!MvLtiUJ3+GGz79aoX`nx|jn4T=a^{ZqE zQ}3u+Q$>vmcsx_DbZ@tptSfA9P8iu{>bXV0L!C8;!5E}ot)MDvC=f2~aUDFdU`jbW zu|z;%`~uhT;xa`*AW?6x8ul_QW>z3AUN69v(wQGWtJ;>pG6qLEJnAAiM>xJZEbIGHPa&ix5~#F4z$L%R|h{>=N{rr{D!D^V;Bhc+vL2NUsV&IGHby1 zBWH?Vm9TdgQt!7}L!I=P{env)m1?x3;&V2`PH2q1gSRZIPRovsQwDk~!G zXF1g+@JYXoN_11`s?>t7u%B@FbHoc5pT*E zY}#zFyyusM+zhXJ=>(!Q=U!WMWMa*ux&1wYkks18i={E$`IAyf*=Wv);UtNUiQ)%@ zy0+5JS&J){-t8S940Dd;*UT)n;CPNSR1=Ly=%5}s7H zG`m{cJnuj1bnd@Iws^cfKMo&{M0nag?p}+>@*9U(SI>{y&YDzoIzL~cIWkAj+yg&U z>>gGwG_W4#(p9?FHzU)K@2@X#ZM?tu7KnyY^N}KxX!8$igJ<&PgBRk9(!j1StU0QS zdrnPNw1A~AtlwrT+vY>`h~O6SsU5qgsa=7}XN+uX%NBGCMJKn7uK2({-6owBuDA zBwC0N=?8+eO@J8*@FOe1Y5FsHM%+0}9=?XLTAYt@4ki&9n0h0PYy;sSK^U@v1HBL~ zs}*f_*>PCm`~V?fSKm45w}B)Jt~l;(f}N%*jsur9Bi!ny!k>f8U-uUeYhzbD*gCjF z2H#-+z>oW>Oa?3mFc}5ub)#YKU5Mz(4xYPgMKzt_V5O%?6~5ld6wtQVcLhOX(H7d@{N*d7 z-~0{N8lL{y@Op>Yw=LiYH;6~G${%z%aG3He27bCca2^V71Qgn<5Lc0HiXS?3^pJR` zx|;~kVcjbI?F5HE>8`*%gtjS+g=wyP+6k^ve5(M+aSr{>8v0RRxWk_{HzCW;f`3(O zeBn^zmeEmqf)VQc;THewBMcbtk3S1C2JnzPGZ1L~=tRU<@B`5H04m>HcOC@ync+!& z-MkQ(`}zpjTe@2XAaKfo_{`Xzf_N4WM8Ng{5T6-7RlpvFkpE&L<#1A~LE2hI4F7;N z-$Flu103WH8Zr>4)2%?u8u^eDm;!DFaTxLA-xLN38M+g`7e$O10hdLL9E=tMPf;Wz zae{@=`zZ>?j4p>6DvmI8D{v;K`N&XWOhYA?{g6dIF8+p0Wb4{{i_#+K3wN)WQw(7G zsq^I=PunMcr0W*TeM3|MZP)wC=XV^eJ{<0EqTbQoOjt5G(83+!=wJO3n`L$|I zit;~f-!aIevTh>~Y6?Fsq3W{#t+HMBBYWLTaXVfkCQNt8N;L3JY4d`%*nN0AY7%DK zJcaRz3MStAn+i}fCh-RCNst(ePuX`(04v3zuK=ri;aequCE2Ps3jlvp0kHm}Ky8_P z!rd!^P%3-a{=tAASuXQ23O@f8l<9%Z)c7CUuEN?n`jj!?`Z74P+JiN-cP}8EvuCvQ zrE+vHe6Pgm0h?Uk8UML``b|Y-`pota*5vY=3IL`&3!nhWzsd$g7ypmIPdE5)P9pGz z{97$Zcak0L*(PA|x>z{)E|)4?fW7@<2*8CCqTrX&V(}0Pjf7!LzdyNPe|E}%fl5zl zI1E@MJ{QpF%MDdWV%$}yq{?j!mxrrk@mOj-x&)oy4VO(|d4m*!#{~jMGt8J|Y!ki- z=qXc-o4ufBhE&7c1*h^}YoQ`w3YjLP3XW6y*_?$;lk(?}OX?RP^5=s`_P*87&xNMS z0I2XxJe``5~HP4M@5`qW!_j z8jt9z`KE82V8|+?3a)C5AWmCxpC$M!RumbLLBuF-)H3-5;20Qzm=F5 zi3Q_zKtGaJ0%$X3{h*;R5UVB00@wl(4TlZ(1iR_0Wc~6Li0&w95vuP*0k=?cG0!ZU zYaf4~31G`d&b{p&Nn%}w@97Y#I2jVg0RSekZt4*!UrL(NVK62Mu>KVw81wfkiqd}{GZYPV1r`hfrbHqDG*vF3CD!l|@z{Dv1d{dvu2HlzazFg(7Z48FdksY(aC#6@$DIQpq7ktZ zhRX$s<8g>sNtc?$4VCfh=rt9C!bff8LdO9&3~KT|A){v9!~Aeo+?gASg)ssbB39I& zUGg1?rxgzG;zKryHu-K?PbHj%D*9RCx2ka(WrGzccjRX6fY{~cP@LU_(pV}bgGyl~ ziL_0K#46&phNVd_iL^b4#;U!@5-A1{@sxT=g&aR&#fweyh{o#^wsRu-B^!ZPP)S(E zT!9(u$o13y?Tt*sT`Semve!N#{RsdhftfLe@p)r^~_eojIyk z)k&ToC72CLGdQKy)YY?(HG85Op6F)6E~@S6Z=$BegB%+AvZ-Ce*$hlHU9NS% zWnYa7w$xlZ9F5 zFfIAV5r_QUK%wiKbtmk8FXbz3;BGig6ixp4nqgFx3u~tnosCrsZ8%>YX1@mo=X{{C zUWhQ1q7!)CQakOGw&HC0)VSYFIr>B#%-m2g8VS$mahDF(AjSWE1B{;K|NYnM|GdY= zz)JW3^>ys2FlOyf3*UZ+!d(;$zQ)N<6p#=jxH}yz{`MU>3AqH_aM-u2YK6!Kg0R|& zi-E&p(J8|cN$Fwyb6zToe`-`51+(?Az+M-ORe3Zn14U_52Epr&-GXW@W-j+ES6XpJ zRd4v5rf%iIO*h{$k6R9Ns7R_5B}9+gbO{vk_lRf z_C})1sq7_qV+%^6Odf18^>AmKkU*5kh>9jB+g3y!o~n;#vUE4v9f`%c0U!BJTR#y| zGW$IV-zyjn+x^~4BUf!_+?~mZoU4t5sdHzakF-~ZfIyF_45X`yen!iUxh5z(kC~M* z#uXgw4Qpp2AfCKMx|3n#70-t}=*j;ylK=nW6*B?iK(hT8&XMjH9UUzL{r|ChdF~0} zp(xb&RMPtNm?UjiN3$WeRU}5h2rudr053{H$QLq-@^mm4-FSg0|Z%Y@_--cc~Uplwm2cI^VjvhOq$X-B743-uUYg$OHOx5R`?+;gHY)DF7B15{C93~(y zMg)c~f0b^D8>)LO%$A{{@WfpbltLm5oT$)fIxdubo#};|Mg<)IfYZ33ooN3sF?QT_5&f41Xp4S?>hIJl#8ebLJAX1U}`$LSxcyV%$q| zfjgxMY;XMIDwJQZWk%@aUOH{_doS;>&&a+JDg6a_ru(!|LR~rFZ3b08eG>Aea4bEU zd-?MwJCsC%bd2I6KFtDZBNcOzWspd4wK9tqhOm z+dOCKnH^@utV&y_dWF1<6f$B|NL&HIyf`@waVYDbY->DudRoA)5uuf#a~wbF0e2z% zPgZ=z*PmJg@l=q}3?rI89vPrPaC43-n3&(phoTp!larqEZq1hnl}&L5RzLcCcW6;3 z%CO-X3KH3vIqs~+&)XX;K`I){4S zFi;=gnD1}5Bpd0e<81@fL>G_7M>~u zp!Kh}V3s|%n*PaJ|N4u;!O@H23u}9?wW&l_Z9;li5NZ%QD8S1KFeEgq$GUR|edX{GN$hZI%v@wXsc z=08QE{XPYE8TrYIgQfaP__U=L^fgbQR=cenC|0#5N#y-JepaBhqQCNL&xdX~$0hR( zX_%FH06XIPQh!%BEsKkO$5HU4FKYoSb~Q#PMuUbf!hR(Q}s`nmgK)%67NRslwxarQ^OOWf*L z(LmTG>dsTw17mKq93$nsE!vy#S9J2H!O)F`jF+&3EOh?!{?L&tz^n`(`uf4915bhHvS||cFZ4QqT+(0LK8z{BaRHV zppatctfna<&$pM?x$bv9e<;Xc=%31pob85!C!%s%e2Gd62Gl<}Di7&fJ$n1a;)~~{ zGo6IAbYBx0PQR*>$VAgW;Sgt=Zoxh~kFSJog#*5DyGB1Lma0tEIxSD)2EtLPQ+RAH z7gU(0Dy!P`SLatDl9S)Ygd-_`%F(wsn;ysX`t|tVg25hNoh?1Zac{P7W@%ebB&P2< z7@8Ov7Zev35t5XILfkyErZ4C-lhyWPJXGB(vY56u@g;X;NznZKb(C_kEM5roYN3Q9bya?EipT6J|MPi`g6q2!kJzOQm3Ta)~pn!j?PiVg#d>Vj%PDSSTfZ_HW{Q9U0eSmn`}jAy$X#2-*2zH8d)kE@a2Qk>AL-CWSGKxXF!hZ z8qs)6bVzh%!|!QaN_ho(okbd4C|NB%FEFZU=Q+~xDlKbubsulNLu75@P>YMM>u#xi zNT%fujfL{Ix1~{(T}h6=CwxAw%;o#5tsnrWC~&;-=GGNVY=fG&&555>lOQ5F4R+vL z7Yq2}+2IZqDdP_HA(>GGpy72*3#WR*#^);XvHZ+KNzpDGiDVv3dt@MRw%J}r28beO z5ETG{;UJWs#iGk{JJXjuxjS@tcJL2pR27a{M~C5lPJgArKe;58VuFv0AnC0Q@``bf zUpa+?bW-gXuM8ho#Gm53MjCUhj$~g!9k=t%V_#N^rIVYhw^_KQvAArHv_($G6lt_` zWeYNK6`EUzPR@R@EKJ0{rjcc(iOrqWm+A`4H!?M#9pFC6J%V21cu(>4c!H)b1 zRG2(QKuu+DqdwX(pwDoV9=7yV!6cF_TU-c|ATaAbbo%m-4ex{y` zZYK4Pb$iSK4m6^vD#^rwUZpPeFB1aA#v;0QDXp-#$ahQ&%S;WHA@yih)$PuG)2p4~ z|MrPS7G_L?=pgv=jo0PK=W_wvp+~ntqzslB7)xMhaRo=}0r=@g=S~MKS((X!Lz=Yx zi#b@kgY28egb@+YtGl2SEo*UP0VnjHb3L6u8K$QOZ}v8n%l!&>^qRs_L`!{Q%!bw) zEhctx7&F`&YsthEvvjI|)$$REIg`0je)lj-AUcLo(j~pH6dn$l0s)X~r0;vP31i^CXk?d$x^OCy{32#- zCn32NUnIHtBN7@5gB*r0v!VIfdn3MgsCR%jGR@W1e>5}Iw{ zFnq*k7Gm0RSFXP5*nz#C@JO@5)z zU%Tig7MVH5Dk|1|O5Y<-YnV8Xv1eR|-l=hAk!6?)StD9Xhh8bBc0<&+q_QB z?OZJk3wT&AC;s5LHM#d1nbwVuV(nf+=fUIV1Hdf)<6aH^XWU_D2g~7 zsWPIibn<Ji*YyM2aWhHMY3Z<8DXqpi<~Ls0#F zXcE1np#*_^w;f|`pRb1%+0?O)02LX`QFc8e4Rc0Pm5n{FbJ-OcSTh|ReRe4L=dZ{fGE)it73@}woAc5^<7n{8*6XK` zhrlTM`g}4+b~ZmgI1h00LnQ(94R^rp&ZP7m$%&Mn1rBtKyTTV9y7E)$E)p-4vgt6*2v)G%HoEz>wHD=7kkT zr?C6(fQ(Ro2*Fp+!maHYTUD`{;oRB(OnD;|KqXu?NW`DLOr_7vt()}XkR=x(PUp(( z+WMPXH#guWp}mH-3Rn98zM>}FJby^I;6rt0ex^e=)|eE+0r%VWQzK{N@7=&y+n_8> zh8J-#XpWo;Cr{Umc^%Gnw1XIlTd|Kn$n`pHWJ4%%)F;=*t=inA4~2WY)DKoq2hERS zo&_0?2-M!9w+@8GD1=FHF^mD*9s~<9)ZZ_E;KcQpO;VDf9gxqa`&4vmbqjPS`?}Bx zlAAMaBW^Qq|CS>-5S?_IC`@_+9ExLxQV>m`KW3zbPRKC<6`sg}=vM2V=(ggG$0HF> z`eyS3=SLQu`MbREn^xd7SN;dHzr(nWs;(gx!3xHMaN}MWZ$MNwa&Dplfj$vJO^*Qn z01yR!z04Q5)Fg!|$~>x$Z8N8q_z>Y-?z4PB1dmi`L~MAYgjoSli(y#ObK!o}=BA%j zFkxy+KasHgFbX$i&Es={pB)z5-CTkswO3en#Qb&qPY5xPwb7xQCsVX*c-h^n9oOV8 z36`6^-b2wbpEaLg9}(YTUt%xpDLT%pYiH8fE~%Gp`fW-;WAV1la#Fh@6%TAMbQIf- z?q5&|x9}&o_)BkD=qgxZiEmlDJ9@T!%&QvFU}%<2Xgl9Y_eR6%F``K0GqmA%tLnX1 z&~Oe0#QXALMLVv8)JY%Ar?8-(#w=+vk05WJI2f}n++TiuVxI8cT=ZC=zc9`EAo;>{ z7xE%CFZFatS9EjuuKBXVYX>QUn7_Mjhj%ZT!B4|wfY)CquhB_{;95N?H;>M}NW80! z`V4Tq*ucEvdk^N&TyLS@vrTJGAH6VKv`~{r3vlH|LYDd+=4P`vwmlVYQ`|#4R)z|WUDlEu=B}kf+t)%kpcBg@G{5y@<`s|z=Zu1+OI*6h#hCO z%1ra*-n1TGF52$A4kAssna<*i6zcq-Zi|0I-42=|+HQa74`94f0vOVNK!OSTai3kU zxGVD@pi-2xhN}V-EC0QSQgtYBUgc3+KbisSr0O|$SCtKq3dJQenA(});;X{q>bSa% zVTFa71(`+Wh`ML_!ic)PT3L1jj_v@h5XIe9Cn?8z_K146Nrg=9BI@F)QH5)H1B=Ci zYAjFx#)SbCjJ#1ylwHjUuSgXCz1Y6B>O4%gGXXI+zZ)_-vz-^XSqFZvT8rP~>9Gpx z2plDus^5h@Fif;AM$|N2P6P=14;FSm>8Kok!2%Z^)Ws6xPk<$T zH~DV!BMUSQ+EE&ajViRXc}!N4G!s|J~_$n|~i-l4U3)TyH9Y2mCfiNv@Ol3ia* zd&unEO;%!tp%`S%b4T`&_l+y3+E~RqY^Pd)wUL{WwmButSWNQOb4M6I$WHJzxY0*POn)~! zx!N@*htv5y`1G?Yn@#;n1{qp72DFs7^bO-=wxM&{V)ZzxQlcLHre#Xh@DY5C=0M7l z-0R<6G9E7}=JFs2-a?77me=HHCC^#%0o=wrj8XT0i0gO}hribq&Cr}7$T`g(Hpn|Pi2D} zw9l9yTm33@Z_7F>simWy=QMu6r;lR!%Q|Jm;dW+kroIril2l3U27>|xau1F{%5#?3%?1IK`dYDZCZcq;wGrd1&EWG8xNWY-WMM`xxfmdQpA4e#F^4@ERp7) zZ!1xziDA2hG9WL?<(6taeAZvJIR4V!n%0;4Jm0?x>2CB0ykGk-7Vi~z&VNo!9}q{ z62>RehTvJD%8WVJfC7!AF+Nr9+KSOmBBOuQ$l0F|>imKFgodEAUgv--NFK@PY3`Gz z9eZSvmHB{T1`qLNA{Mg9K&KO_{M1?h${y@*Ecf zNO?NbSmTu$5Ior20&4TwgZkWUCqz!Fr|eCQWzeV!x`myL*h~_~$@i2UX5Kh#2Rqg$qs8GyqQ`X7?OLOPk*e{vU+Fevy7X^SN(R-J;g-+N zJ-O4Sc6ko3#GJ>XN2*e==+08>L#&I3PVAG$!&V5Lh|TNn~cap4yO|( zaklYJ@=ltS{6g|BK_+k-n7olIUxFNy#9Ib4o9Xf>dNsOoE5`=Sfkvv4U^NPR@cUp1 z5Tb759Y_;h`~5K2{$sLZPDl%@tq3>wVsR<9*hB>OD+WwPn1P9nA{G+dkbb zrEB54&wK29EWLm$-%Gb40~P&-K4j{rAH^)`o!}jzeTQqEYt;*r1W@}O_(g8E&hwP# zd2_vTY24%V&vx0#UX2-v>mxNr5zMKb5)ZJoVH@2xSDOR&IdE=2Eol=&8p{vptwEkW zf`Xg!AgSCJ58DD3h0=DU0!40qk^}#gHHqUF_V~<=VVU&v`N@bc5}yxYOv1MUv}2eP zbcQjw+4R~-qw4r~lUYHR9-4hQ@t-Y)0RL{Y^iDt*J4%9$RaEKQL&?v5I>jBer+IFlxw*#<30B8kiKgG)rD6 z?G>mcg_thvdiz_jiAmn~9JB*M&InY0Rr~z%BLwJR)+NEP*Phy6M^Y%D8E#6s9|8l1 z5zhS`Qm`i3w5cR1_}HAt7w;5)gvLRt=wwIaWb=BVp49^lhL>|0UfUnXc^a%f)v5YH zYiUltJ@XU6{f)jb4OgkO56C6vD^_MlgIZfhl_T0{Hv5K?=+je`jhF8QjxlhwH@&wj z*jdL9+?{4(@B$EOz!j1}ll*50fbsGUqa&Kl5qvOfo8ZUKe2A7F@sP>vnYgmSCBoI((4Dg*8+{b>-j7X%9>u z%k-txpnh&uN^4|wavD5zFT6Ar@T7e)t2OR(CQRB)=&(do*>9d7w#}&t2ZhGkh$;cJjQX zDstN_e>!l}9&M6^2Ygp|M!>h3peK>sER4P%;T)ckW}dT&`uT z0Xs=SbKUvy(FL{3^ICFZeEcpv$qHpt!~53qLyUDaw2tJAue)zeI)jp<1y3)BW(V}a z%%1-w2$*kP-rslS>563>%TTsOVo*RqPuq@o zDGT926OAH<@^~;w8j1+hr@6+ew8Q`)a;x*3UfJWpP}>= zt@}!~o7pBD1}0FG{%yxVR6`(i~zudl02NeqWv?1NBgWdCeT0&QdQPV+s(w z5xpZOYFN=a*D4(wt`XA#-j1ZwQsE)9S1L+9$Wv&XNQg1VLpN@2K<`3%W{md~kdQPX zICnz#|19pNeX^w030gEAlHrJB&JKka(*hh_NF5w4>eoGDu9!Tm`p*sFUZnL$)j7jm zt|4Z8jxHT-vMxLQPU%`F#8Pd}SbKd*>VGne#-+&p8e zCIML@e+nv%B{$i!y_ykdzC^m^RgYy&-yJ9W3h{xA|fg`B8xE;q(E>MxD!U z#E}ST5JjghPq*TI|L1_`lOpp5L&4Y255yqr40gS}i@hRioD>i!sv$8I{41ex7<5#- z8GVYcS$rzxapE)m+$Z)L+Rae3yih)c&kfwr?`ymGveW115GZMQ zo#DtN3oY^Z?E+m~#z;tC;#r}?F7I8F_JBqu3{5fyS+mvQUhksme4&Xtwa-1ztm9LXOW5kN5hED-{%Nn{r#ly0{p zwTm*!$M)|;(uo-xbB^3!F7D|pdNPTgK13`rm{b0&xHyyOQWYhn+U+70Wj$S<@6&&| zY}&v##%)Y|EIzQT_|O-N!S(KWSPkz-W>s-VDYv0fx1crraYXRv)2w&8lI(VSi1~L*8LOfcAK;;)rpYZ z&b7E-hL2M^Fd3K#EEd3kfk42~7N3eUcK5>gcsbiRY-J^K*a|mSlf@~To{Z3JoUJLU zI5Cq{oT6sa)CI3@zovaf#r#(vd~MYa_dT{|&0~+Xr`JgmdkGxKi7)@?27W^55#^ccnOo+gRA z-~#z3zM0=^k)M#6l}rkktO~Pp_Pouou1%~HG4W5qq406z1OxMDf0E&@PQzUj40naK z{8_>oyFJeEzHGbV$L&)QeIh z!9i>QuA-b-O@2kOGP_mSx(pW?<6;AgyFE!JlX)n@S0Vp;cloQeKYA~Jg?#;}HLj<| zJU3NANQ$rT$lLkQ&NirQRf z0%~&YVuraPegHbzGBPxmnuON_m$+HICKSr=qItcGw5-AwQE+Vgr!JDRM)53C>6tm087Dw`B$qGPt>ssMeVXN7i| zYq94_!}WvgpT;8Def8~7O>pz{H&2)I4(5Wopd1&rvBWjEvNfsar{@RGEMu8u{Q zr|5(9LNCw@y#NOEH4DO*hhf-HX*p{m6d43ZA4s8CQ;R_rDkRZKLhrm7Yz73O~_ zv@t0wMgyW%e0^TD6bx$^kDsY%X(H26a`*D}Pd&7*cB)I4)^u;W?9y$nj>zY~y87ki zOBZiw&3*dzbN%2(-@TjnZn$ojYX`h~-J%T}H-UXVeiN&oCzSTg(RFMhD}7Xc8=ewFJ%*L`yJnEM1?`=g61H_sM(Ymt=M-JyrP`L+i+8Q8GvKtWBbMgocXp zGR?SXnxPd6$_ffw4#7i+I6|;9ZKD_l9YVcqW4hte11xJx2TST~7R=cUPDO;j5preg z2K8x$PnSmP_?E~BzEy?TMkU2nM?npSAtL(d&Gi}r|Ha$`qufc1 z2@TqDjewkXvf)&NBb6d4m(9bH4DxKMsv_7UMJNT)S3QMr;?1;Fo1#mHrf4P@Y#=2Z zq=@L4v`bD)DY>OFC8rZ9xwIH(=#@;t?VO>k>)yYq49f5~sl9|Q!^>9be* zX{XJtJDo4vRhQlAva1U6b7?0&b=tlYMdWrh?FQy$98ekX2KIpn4ATyLL5>C5<=T4f zc8$@#w-56XALc_*gyw@j%ZK?|!}`-;1f_zz(UGI}+dIF_mz50mWrMs~7Dsu=e~~fB zB2SaWnaRzqgkv%LzfqU34Gc;p=d&Qu0AuooLEWSzDRQ$H^ zrpLjD@f?;JE`XaA#bhpkr3m>0zN|(Cq6-UvH!v6N#)xTMB*553b;bwjPrN{X;syE> zU!xKI(Ez0(SB38F!~x8?wbmiXM~!L^SwnICH!;x-FyYL5K z>}(2j0>Ijoa-vg;5^$9gopLDwmOy5J9nh&7Lz@7fC?io5$76KVG5v~`QVy*iVx-8W?N^31qd7sGK+FYD<0>FXQ+oICcwmQUM0?5m%C+xe>>dFa};PcSp=msL-z z{=wh=xM*SSUw_!zdp(!}t^<3X+kNEBhuJ5Zx_8|3)Kh4dE<$+eVIN0he+$V@*pDiJ zLBCL7LF;zprh_bSVrXik|>4eqN(60(>#Q&kDBduHj>9J^YIarTkyCS=3z&LBv z+%WBA@A=v(9P{9817^F3AsEya*<+wXlxd_Z*wz5Cwc4>}28E+&j*pJi_4i+;Q^f9n*CETUyE36Q&x$}Cr^97b zvf$`s!O_VAFOXcLro+e!sDM&66{_9fj*c3iW%@qL^nF$m#J6TI4d`yTtUYEO6tfPB zSxJ@H8n+dyhuvv6bhi$F!NM8>FeG_$GLJWT@~o$_WGIcIQQi%d_J6#;IP6DGV1GDV zg*QXPh_ipZuOdvmf`L2P)y-5hip$#3$PFWIArRp7a^enpl6uJg**{T)CpFRBf@|Bx!&7FGhz1*ogF5a~KlAE?HU3T;6$*nVP*xj~a z{o{0?;-1TPzW>qAr9ZD2a`fh>`w`7QeEV}?<|P|%T(D^K#xwoXTBkqOvf+u{l$r0t zmKCBZ;oO0o!U3rSp?lncu>BNKOAPV^7koHTQjYb~=OBvdAo&u9&oLw^m4|Q*V7i^Q z+g(&603--hGzT{eV5(7ygKs?AN0Zqa;&E%pfFqs7Mx~#r3vKh?61vZ`$N#kdbIGlPfR~6%vfHxWqH0;2qEg4rk_wKK z`cLJLlM0TLz^JriFxhGyg0Z#}VXb8aamxzgmKDU;IPzBDNIRgTH95!%5CobQVCsLK zat%P>A*u{L0HQI^lPZFQg;|I^02*}a2XI7P8Ad*| zrI6z54w6|+0IM*ERDD&O=emn#tZN(zMjl$(bq4Uy-`;!e+J8Ls%lF|ck6v;0{yo>N z+XZH5Ykx9z{kyAVUuHQF-u(z@_vQXO_eJinx%OW@OV|CP>*)R45d0ziK1fku6O$kn zd!zCeMzI3Ni;!zz=mx+sHrRk@HwAHyaF<}#9GWqf^=hQjkRmQ2(8)4_RRkDolmTkp zxN>~()mLAon_hkO%;TiyeR=Kz{w?+`>Rf6z7(vcBn~7*)Pb3l_q14(Z+9&%aL?*>1 zO+Gg>)4sOC?ul1`gjg9&RE+SCOpT9cd`-c5kxWHqa#LoBZ%KS<#Z~^b!PT*wd>j4S zg11CACyMM^qn)B>VDPr7Wz|xn1SQ^c2>yr~PfdY`I>wKtZ6%oOj|So7sw7PIfN4}2 zJk(Y7qnOGAz8l_{R<*_-P%(X{8mrb;ArjUD_D})1qhsu-${0Fcq@r*~S`3c>BZ}tC z-e%T_diyYA&-R||JErwzd#S44-Yl}QV`!wtW{)8aGLauAssiaiWo6Z9wX90Dt21Uu zl4tZ3TA)0h@j}UHTz{w?i|F=PqmrwqYR1-9ncSO{a$;U(0b!v@{mOh2MIeiqtT;l1M0eSpv9BokQAXhs>67yDn(jed)trte(9i zRodPfstAtAtiI{VT-&Rk=hnUT7P#x5fCCrL>8$-K_ryQ`l-rW~YW&Q_Yr%6M{S~-n z^+mt!dUxV1my+{rm^tdY<{xdoDBXNn`r#?_E_wHc2f^5#^RmC_yJ(vlC_ASSD7QZj zN`Li1ZrSJm%W8@~eU*<(a%J7VKvWWX6&U(iR+(&tJ&$Gv-^q zHk=MRjFY5i%=ezOc0$?O2?lm@HY@Alu~PI_KrC6w7n{w>uvmgR+IP!w)IQAzqg%*!tHCrHzHpBwL^!;I`g z?-D~SD}*?j>xQe+A|=S5r6GW@2S^B0rL-=GsUW`;eyyqeYdl=fxA2g^OMUwt!@(4x2m19P)A1K+y}igF8vMwV8gQ|*7nkgkDCFHd z%ty9HLrW^PSvz{ve)Q;O*4QGBJ!P*nW6Ivp1#=C*J$FtAqtXK3(|-cD+8F#~b#wMx z5-Itgz&?TNriZ8H6@owk^1l5gM1KX;}MvIOoGB`+l+O zJ@AivCzTf0vOQl<0#D~Az`0=i!JpoG3-Y+zQS9*<(q#vUcK>A5Gm(O&D{wWBVJ1a0 z(WTKf;zp6X)PE(rN?aq|$lfS%Wge0Cl~sm3K@suJPYd+@)3eKLXp{-2bYs>f&oQ!Ed*WG_S9mgbt(=ox|IIIYqfUlGz zs>ivoT5>bvgFbg~jB9YSo!0`__?EJ-n}i?#`ZKF zT=L_`YRjH(S+;NCaQZ@rXX&)bmsgzL$;+dbPrC3LT!mQL|5x@Z_6;fssz`sqB6wL4 z0wcU5WAkZx0hYp4jk1VZMO_hWp*99vsr%R`=|`1=bcgbS@;Y@a_+`*x*Mp8=kgnv) z9hJr5l5>@eYqmR6bP2mWc&&bmejk0W{l4Pe;1Rgn@s{05xhTKp()70FYlMFO`fyG({*1Et+pM zpd_yhd|d7T0}~Nw7MQwjsa9sG24-fo{5{V+D%pok-ZfLZ*$mGJ2*+ z`W|=*aH2WUW3>o1`2?n57*{a~V>*tUll!kvdUNmm;wdox$RELwG0)Z>xogjV&tG|b z)5Cv(aQNR(KL>vDhflz)eILIvbmyH9<^F!hq1+8@zMZfd5Lc|Bf_?sh?BWAR9hS=uW@ zmbC-?`^mZ2H(jyj{Iz$ydQ)y6NZs-1@QKrYzT*70+;7<)cW~;(xz~<9p3CjIsHSb? z@QI&2cKlzJA>4KH5K7SbJLu$@x9E**Pg`#6FLO#89vc)UGdH2J5ve!G*p!m7xgF7HYk1Jm z+#iNv2|~a0|2ZCM$>^Xl-B<3c(Xfq~|DB_8%*emtIBw8L93Ug{jUmv#IRd-s56^r8 z_x3g7yC2=ww-jCJO5|S;BL5l(kCS`%2V4O+Tv!HvEI5Hq$6{1O_rf?8g2Vy3@znyr zc|&$O65>Pv5@qq&AWM%GSb8jP=}IORrfD>WWeZ6a>KGXrLJG}t)cZtJ2r0DVv%u9Y zWuPo*4R+8P>|oxh1`}Z$u;rbqjif1TMTtf8207JfTK1GVk{TIGOnLH|JT#oRNsp9M zQ#i6gHa>xg28#W~Ma48HC$zXbQIZhiOd=ZhDZvQkQJoQVu*(_d(W_E69s$J?GIo~( zJ%z+bgo@GVi7c){Mvq&T^F$1rKa~v}pwqEf#7@$tohtya;|x;GffbynJDqkfap`t? zP+itRx=65J9}#y9^6VZjKMW$*j+;Yq(1aX@gPsak-ky7X=exNFJK90x2M+>pXX2^I z#a)-*bmXUzQJVq05dx7D7otUU85r2nDJjSgO#O5yCvr z;hH5OV@L(k5+P)y-ofFjgqx5uKuB4(MF9$wEIp7cJ&+7NIFOOTX7|tGv<+R<_b@)e zG`<2X@R+VBtbnl6s4P@gDa@EAUozXAZ~Mt(gP~5z21A=j170dyMF0XIs!iL_tfrL} zG))0Sw*?jwgB&}+wQ&&&w^l~c^CL&#*GG=@acocDV{q=*lVE$_G;}x5BFA(C8gm*n zl6oby?w_^<$eRN}BVn9khH?MS4>)DX=ulQR0Fy>S{?OCLe2_y;xhsQx9mLI0?W4{i zO;PQ&b;fpRwXrR)Ft*XSu?>cdt{MDaf;ci~4>4~y6@c~l`E&NSphzv-)V#W( zFCPNosx_fb`S_qZ_6Uw<&^V(AhP@E$Hk?WPUx+TPPYL4aGTR7lr0rbW zYdyjq(K|JXAA+jGqu~Y?o6y z-rJ5|3+9%=+aTEw95`Wv?tPDBygJDQ7342txm*}O+%_@u4!#b z4k!R_L2!trXpDyxr5OAu4Wy9r{i(}?L{EytQy}Rnq12UP(ykPqlFm31-Kl~Yx(R03r{|QbHK#9pK}gJj`1^ZA~7X=N|O;=3=ML;=tUu zap2=wgL+|_LD3e?B6pehVs3O-e?YeMC1FzcP!y-4JJ4vuYYCN-;dmet26a#%I53>C zU{@Rj_L0RSfyI4|(0+~4C+(H^}Df$4{NW#ei*k-B~2BftiI(Eo;VqfJk#WvA=WwL*xL{ z6#GC1%SN7gE5Rk;0WKlEUo4QrHoZ5S^CPuXkgiP$+5?KQif zl1;0dv&fs^k_(0BOTqq}#a|l&RWm_{{~4%-B1!MadJsXh0Yri09uNcbsy#&`zyiP? z%4MF)&0%{^e{si;8h=5b`Faxb%IOiz$EUGJxF2C$365_dBvwiLtOT)O<=*yZbjc!6 zf=~94;`pZ>rf@PHbvS-FOOVMkM7Yii2!;g6)3hKm5Q@CO(1`k-&Qm|Sfcnwa{!Zkh zIF7Yop3PG~*06QN$kKjdaM`c~!cwEOP+BFmNUS6j##(0d6*f4Zf-d>{#aztUv@aiX z6|jnAHrYV*CEI-Rn>>%yKT@g93{e&fm+1bF4^HJrO*%_ z?wXVm(lv%ylj0E?;lxr`5fW<*5$}x}>p-MvinqJa!--#?>~bP8Xox{1y0Q4xzC1jE z0;@L^Nu~t~@ZW?bBa2rHF!cnE`(L18&kJXANNa9j)+3E+Io*Q4wGBmtAF^*!b}9f2 zPBg`@0+;4;1-yX(!)T04@=5__kGISIyq)%XeE}E_rXACr)4gf`9CnU4Tbt!r;GFAS z;LG@D2X67+3$>yUP3s{^bPtLT+yzSFw&H`Xj5~psCO%WJ}`lB+^ABnxpB0RTsF5h4VWkimTVC+8WAH2NTg!Y7w{@jC=)5 zda@(e_3Uf8p4~5k;5#3Hz%`%U@w?nR@MW+PJa8oU=pR4I?d*IJ%zY;JuiR^(4g}hP zbXV>ZBQwYJAzN1{9~fpZ+9i(VE;vP-;+m(;b1{+}LXMpB`i#7WK4?O|wGp~KqbGzz zXnKrnygeh3jJKefpm;K^<8c!FVLw2>exG8ExMGdCk|!z3f0`gUJ6%*%$W;ci;LXOA znA2h9i%3w0L+*WcnIuZ=cAO~jeS4y$#v6iYULp|(lJiM7SaIjH6?ZoME%#FHW^nD( zJF-)UZ_I6Bd+hp>u9b&!eSN>A!M64DZ*(g-H#(>PHui7Ge%w?!q{$52y&&;m0v7q| z-B2oKN^o+)RpN?rmF!S&GBJj2@QzMQWv6;4C$j9U=$ypm?6ve-b{oBoy_>p^euR3G zev5j`^9l8d_Y=>5GvUmEnC*DdhaZhEU&XY<^_DwFHSTZp>C6N*4IA*!$ z7S9c4N@j;=mtM*)buUj`o4Bp`w!{a%KPHMKA8;cq+8;8JLBi7#P4 z@RPpdyH`Gm%>TXIUqIlGm&~8H?yp1J6YZsCbq@SGSXQT*Th-j6zrWiaOc;Mh$7|*m|2;h! z332;*#pi`*Nb`$V7OxhswqK*(WV>1Yxw1#?RzJ1>Rn?F$4?9$s!=XAZlij)#R=R=xk7ea&dc5sthK06gk*zf186_A|e znMhiyh$N^uQYFlWW2<5sJGn^|2i9ljgG~e~I_5KL6qv}G zUJu3I=tZiEtVc4IBeD6_p#?vbnL|n2gtVGcwb2fJG^P-snQ&M;Vj}*clml@U9X;&n z;*^F$WUaIWJ@VwEDX?EG!YRwNw4$hp0xDu1pp+Pf>0gb!*EdRtUXRxqrH4V}VWY&` zk`}ngF1YpRZ`Z#3#$t#D9f1>IJO0y*EWjOI%-XRyc; z{74vALXJ}kYSMCnEEUMb1#-ZWQEcur6#`lUFobk@W)Z!JSwmmJF!8bxbgFnfJ(-^x zoLDj;HmPg|-Neri&Mx2Lv`2A*94k-E65^JSu!J&8h!VXr4mO0iB_u4N3}@6PVX0h6 z#9)jri;q<6q7&j1tLBC?(OK~o(q+nW`%>2u-!;-&Wv#kSyE3*WzKPx{ZBe$Ww`w=V zZj9flY*)9tLuS}KG?LH*3BQ=A011k!@as&?@C3C4xgKTcHGwSw7>Ik6p`o%kh_fCx zU(#bkL&Z?YLlf$R)7x1i505ulvbL$J*Z3Dm4~@s{io`~WgQ0-Ha|{hR5Ra9jeHHOg=d}0=xw1QvZOF?&V zo;aupRxI!j;y@Fun4l^^1*V;LJDgR4?}wOhg}){upM_XfZz~!jkkE1DgWsiF9zf4` zspvDYqbM3~7B8~Xj$yFU%&M@3WTZ5V7f@<_*(C1JN}j|czyS~dLj#_nEJ^7Ml{_KBhCH-YIYZ`ZwDg5wZH-xcjFpjQR(F;Kg;9KQW_m{+Qf~3oc!D{TDxb_}`n@9<^=H-d(9t;JrC5Yd4*K;Dy|m z_kwq{pWHh8hifKGTo(0Sl&pVv$>q;2zVx>@*l)S*hI!L#YnPXg>AdR7*VbI|8O|$J zBUamkd!~SwM6jIYa(PSOtZX>{e`douD;v)LAK7r^zgZYU%0`j@6h`cZYudv`3(x^B z3}6-RRtBIGnDvKG(-QIXg6ZVHu<~txvAq2m%e&`{Ob-4;q3hnl)EoY`9=PZIw*l7U)X?x3gTaD#+r+v$Tu}Ob3tZnF2E{nZC`(h$8Hyj=WzAL1;!ZP zOOjCMFmr^L1jf^C)^X~XF~TI~JmD(!G4@lHmnrDL3wt@yH3%?W1qR`=0JA;gNV; z7&C)rprHK&>05@pw?l= zYa1Xyo+j;R3@*NJ8 zh*;Ppys8n7CmPHSv6d!U(7+(UgdAZ|4ejf^ObrT&3u|j%#4xyP-YAZGSfKufUst!xoK%a(zo62v6}SONu7Tq;jB3ZO-370_z{(h5wM zrxrkPJA4pAi1#?cY$FS+*@bK?dzd}JvTQfJxm{Yg+ejrgzq%O61xI>OR7b)Z|6;qrBe0>v{F+CoFx643c9 zR%J&YVd& z9DTM=yGOW3g$^GsqiN*XWGb~9@E0ZBb!?qHft?^vanEFD%JbZd*~Rj5_Z93F^0jW2 zb^kxYz63ssD&4nE)v0~2UaGgGJ4tu84g``8As9`evIw$kk04Q8P+3%h2sq*h4KyoG{TohIkJ%LUJD5|m)=t7IU{(j1SpPeSHLR5e{!!XeAT+sDOF zUftQtQn3uaRV)+VI`blen4wtA&4%nSpkk~bR7PUqNF-({qF5QRkZ75Tu4fE0Y#62` zDncy6>ZXB=G5Sa>6Ek#O6p5RWMbNiHv0&ED4DLva z$9IDVx4FqfcOpJ*AQ2lFNW=$X(O{f*r0NmjUz|RJ6|HGkdVyp!0PyallCN<)t z-wochcks13-VP7Gifnn;FcLGi+&MWSs~^! zl4;145hz#azT8(dOI_AldEHp02ZeHzHp`Wkj{j4&oK1vc{)6IKDTdQnL;+?TL}j|c zY6y)5?aWwVtTns?fdg0saB$F}PdU zWxWDl75)wWyYQ~|j`cA-Djd~5vYI$T)1sItPLJZFEZ`$Dm|yl{!CSMkLPf$xOfor~ zgCBJ?EEya{tH{OBG(qLDz9O}7muELxnW6~rE`AJ($3nWMs=zQ*(+Y)TblGTCW|WX5 z1J0mBq7({cC=t16k)c&JqcCAbVUT-a7@7{Ls-jRra|A@tO3NsYLL)nN<*tmhLF$(n zX*bxt<4Vty>~oy#*cL$>qY`ug5AT` z;Cgo0a>Jw0=u6k}3hNa11#jFHwr?(R4=%o0S`#F5mvIdt-T`pd9!qvuxP>*@5pr)J zo{3C$2-V3BOuo21h3iuNPCC^QLK!}TzEv%1cZRH}eXbxPiGfIxab04hgLC=|qNphDe;L0KG)hjSu0Lp&OU$r9L zKB#;Qq&YRCF93CK4h+!wQWY!CB6eI+a05y;Vcp*_~>OtX3-Vh9n%1Z1M5Cx*+LtcD#IuM_o3S^Sy zSWWjEF#P02`Try@yNZH1j)CW(>-KT4W#cvbv7;dY#?gVn2NC0KF|wTh-8lydupS*O zw2FmBsabPF3?33<=J0E_BQoaBn`sxlkvPNx2(&s`VrDc>zUVESaFd(t12M zVW7j*tv2!^sldd9poj|GgUl~8bi$oy(#K~k}@)|&J=1FwA!s%B0&{|b=$`@k;xMrPXJ#5HSf*#Nej?j866 z&J7w+cDohpCYa;KXn}bCOu~}wMN7l0RaeczWH3pXEHaWHi(ahkXbPpNK(16!a8k)3 z>p0NaHL%aKjQQswx_$saQG$Xb)mBs$>Lu_MO0_ZoqOb%?^>V5J@BwR2FS>dfdG-^tZu9D0TjfVyFhM|E3Fb22yc&!Fa*~=JN$6w^4XX5MOKty>J@-VsO!S zze~~7l%b3EBJIg>jTbd`tKE%@)kTd9Mm?b3 z+wi04j}yO8BlZ4RLmlDW!_wyXbM-spd+Ybb_t*b9@?O1gVH8wi$jQWsu4R?yrrNOk zo`I#bosQ+38Ven;qv;|zxv413&0i-h%CA(`E3YbFsbA&I_JRhW(VQ*VEmh&zl?^vH z&<&|(txJ1E+o%m`>_%;i_Jzi1dws*TtEed+C+Md>OXvwFjQPhjG9#>Uc!pT3^{lEE zBQ|qKQPWb4y_pMeta3Q zwWqx@q+FypVnA|LSqwjTAnE5W{E&s%retxSF`nzs;;IEcpGf#CMrn{lK5*8@WU{;I z%N?!G!JJK*+EKMzYS|841;J=t?e3v>Tw$-Dwe%KiM|Y6!gUXu1sE+;)dQ%7JurW^x zesNpyyIbsBth(8!25R;bp=Nh6J7%#t`!ctmqtjd$M{{8x69|{sh1}N}S>!ZQjvzLZ zi;+46VJzTCos`o^O@cH)bEC(WTUIeyor@dh^9EXCgkSJj2@N5&)BJ}YVg%$!zUvse zd&FgGEB$hhi^}?JQxvHn5FQq)C%KDa@_9wk8g?@yaA`GFCowHVX5^S{r@}776kqCO z?jplPq&QX@1I4IfJNegEsB_6!&Z7y;7>!^m-*ux@B_P zl7rWS)(Q9Bd0WNS*i8rSzwf!3hG8)uxovu!`rp`|-OK(VX<3gR$&cwyhixb^Kw|bf09lav4SXipw zpx+d|Dbb&PS2<`OjQ>6Kx&3+kgNnoHp>#aMHtWsdQEZp)uovqy+3VPY75{{%48@3O z5E-y!3I#h-B&ErC%&lCuhy@bl(#V@rU5sT9$iR>td4arEhVDd-OxCO%^Lph@`V~C4 z_$vG4oHAyFA*E9SW@A;7S0LBrDWT~q;m)GjN?CrZ1XTT23F!9L5InQBlAdfDK?1*b0sVmR94T}kkPI#2j=jr} zOOmm7j&~`=^Bq3eT7xurOlwOe9Wkhy>N+ND2RIx?{Jd}ZwyU@FID`Lq>BS{<;nMG~ zeBobjUHJmLXW*ZY%y{IFw+wzU_|8wkzr1|ugKxZc;FUL!FPS-XlsSg{MuNV|jR5SS z<9@B%*9JUQ1q;aK1e ze6D%2>fd&!acMEV zh*`|usw`92s+*M^;!bIYqC}N@l@Dk}tG-gdS-(?fbb#D|Q#*?EfiFNGYXh~3I!ql$ ziL|cERQWTe&_^ePt7SmSS986nXElNNo9ZM*Is{Jye8Yc~oX}^002kJyQMeB z(`Vp#XAO*Tt#AfCMD)xdN;a9OR)VP3MX88yyd%D9M?B+QRg7f!^B~Q4@iecI9wZ6B z08hL!?~{k|ino0D(aFNT64dXiVPLpmxwok=$fz9aD1h%RCNII*IjE z3!5*KL@?OSnjBtOCb70ADTH91Fg2_brr+Ib`dz%{UjNe}!@9x}N>p<`FP*DpJBwD9 z_p8#lI|FOpImMUx{rn+*h=*AFX7CJONtQ4P4ERcSX%mek!5N<*8X2!F%naB`oPU%0 zN69pj2Rlj45!uVmBVK6)f0qpzIDe31NR?81F_zJ2TBx9(c-FU`FJnHO$d`LEC3zWT{~fA-MVPj3Xw1GC1{+Np`O z_4*(G_bUfq--mtkROFj0k*$m%jf)bEv(r>6g7VaEwp+YZUdSwAZx$EILd4~-C+GZ# zGaE}4DZ+qey~~~opG?5f*0}iS)Oc%JVti_rH9tN(b(M8v;;PhYZgu1&eKKZHQJ|}~ zJu`}nR!lUdZ!k6)v|+$xO5&+K^mBNv`d+NxA(z@fHs`SrvSWAv_W1vsYPz>*J1PoE zNj*#TX&hA?WaBGFa2c_#v9MJIY9fs%Msl?Ue7g&WooSGc?)3xWoyBNt$sd$QWd+|) z8(H4THWqw+@zu;TQ%=5!o8L<+lp-qR8nqNr9x@<>RcN@Y?aq^Slq2Xdq)sOZq~jOW z@Ia2oI@3AO)9Ep-c%$cJmLlm?P{c*8Mc#lNN zd6SaFyh*%2_I%|P-OYKkwYj?|VB*})$}nF=%5|WMP_%K(6?>Zgf)RSOj{!IX)`x#Fxc?to%g%g(@V}dUb2OKMvyrTuY=26@tnrdP)KjIv)-} zhNGm7VGte)xfg3!G33DV=(ukXY;VR{MyUj(82V8_+Matyk1de)=X@$%*g%1}gH2G} zQIQGqkd}IavQ%TEL^XL>%Rjt!qp;U*^eKs}$nXSV#@;=2iV*ctPsidff<06fbrMLF z56G0RIfiCXR!FcnaIDc>!^C_|unD@%f=NHH_8*Y+w;fv)7 zVNR@wS2W4B;ij4nc}#eUJTYA4FOzSOPf7obXd`Qy>dvbOX3}s59*-7uij3>#I+NcoOLt8EO=XbJh}t zMyW1+hhwm*lrBxEPAO6}(wb|<{M12C>VE`iV0(i5Idu?asy^0XfW50ULG7drzfkuh2n`s!sa~ znf4Vj?J1;POlM!#_v?rBAswdmE`0`y@qDwQd!A}!Q72|aPhhpvs|gif3V$M(?^GwA zNss57R#aiYys5TtT8p1mh|W}VAoh5eS@)@qU-m)an6+jU#y zmJ3$gabHXWE4TjT_)UNM-izPfyznoZe)H)MH{Y@5*0ijTEIQz_>^N4YXEJ&e?SAGb16R^duD^ZG!*>kqMON!Rq!68W?w8_e$({U_&aAMP8JdtVKJJwZ zP6q(9vdji#=*xXYTK4mFU*LR!M?&dE2LjnuS!M$=40H=X@9HwX!1)4?guo{)S6Rw3 z8<1fM=4&sCWAJsJA#M;iiCe{f@sN02?g<~xAmSPn`YpJOSw5pIH8VLvs}r#ehvRh?t&7_VIn%ReT=)45@w=9 zDs^99UmyHz|NgHdu=eYNXqg~=VZOxk%Z%mDc4?&wl=w{@LgeI@@}N=}SRzsPXw{um zk#kcAuE@<38D0HP_9VxMK9iE$11|Z3=nHsi%ppDGmolp)+Tt=HQi?j>Yt`Y>)bT!a zYq8`3QhGk_+>IXLAY>ueE>41MjvFCeCaq#_l@2l=aQtQt)Nr+YPUzsqie2gqbuOIC zUB=H9?|`?lKNMf#{tVyYj&Psw|HXYJL@Y^S83xjLcv=+D9Z?W+?#MI)Va^?+mXLZt z+)o5q+{7WvlmvGJ-2g>Ey1x-wNZ|Bp0pE*cNM3KagG3up0A8M#_N|z#cob)Hjw-di z(Ed;95<3l@0|3QnUDOP;!jalIqlpbj|L-MHT1|S(7c?453r#WXRDg7pYT4kQVP|XfdYZ z(Y5ZLxh}1{BZ*BF9imWC(TS&{w^!gH>9@9L$j!DYkG!4CF7{Af(U}|S-(E!q+_p#Y z&HLL8GFpvp$UTMJY?J-jM~q(Jx3b=YKnO?CyM@D@Gx5w}=f4(j0J{@$=b)n~S zLn|3{(@Z2YZi^0q94?cM(yXW+aVC&$@fotjX zw+HdgQFkG`+fMiv)}3v2OC{NFu(4;@($Z7E!E&BqAp@^^mOb26WQo}$%nI#eU5151 zn|nKIw0rArve@tBP^_S{X?7!fh=nuI;W*2r*=6inc8G=Olt?t=I#>Kk#K}fbRK1Y` z{nT;1TVmPMep&Xk6#-A{DyN$>2wuYAcd!l(`8}Kg_UivnMmOEo5rMSTZ9rOig8qi{Qwd2V6}eZb%1mU_)+Gj?US3Ld$J1 zfe8uTMP|B8QfVW2SD4WBk`0KknwT^}y%d6aks6!n5Al_;uic=#H|B#C<*^yBsGt^O zxeZB>B=3|gy;G9BQxX#dnE0KNB=k!X%r%LvDhWoD3VzR|U)M{bx7tb3tu_9e)cEGP z#xu|1VoeSlpa6!W=`@9nG((JZ#aTw0RNSbj7jd5UbLSIIn1r6L`fIM;&gOQ5)xA}d zh9@;{VS?l@fpSH6kh~AyA0YifJ&~SzFEU&u61Okq+=QqFPwe>XFhUysZNgzq0$GxT2SeH?TQ)CV z`FQ#}fBe~Vy*2aCTlT;DF1z;Pb>m>|W7Ds^`m#M+b`I3hKV5p|xW}Fuc%0t8diBg7 zeShFxKkNAjt?MZGh-Bv>mf=G5v&L@Y1LotmmW`22Cq}X!ndCb%$#>*Zz9SQRCs&hakYX4q zl#zS~-T%rfj!9nC?4&!gD3g>*2I#*$J%;UT0_Q%l<1u>?I_T1un*^O8Y0$7A}!EKlt)sZ|dS9+HlzN>QX-3~LdUV*MPuq)uL4 zH=s-FpStzE1y9a2q`t-_lW+MstbKgTgk{rO?ijd*zW1gZ$3Ob|z>CO9T{v_U)*)?C zsW=$I`m!?;^9Dzc5}SgV{+)$bijyB$ycAa^ag&82H&?ixTO6na zJ>x};H$}GDpd9qp*ecSLU(hOVW2kr~3Tku$!@}O~g+Wa+dh*>UJnbH?U;eZ_E+vo# zpE>Lt@oM&J5#lh97^-cu=~fhEXuI1M@__;Rc6Xx|g7O9-nG2t}|M!0Z(c3?J@X+9~ zUvFQ3@Alq%)^Ddnpzh(7gMS-%|rx^2$59QA)1`XNNm}m-@_qk|F zlPN7eOrhcdg^HIHDpibU4@kg}9BF~HR)QI#RvE_|g!#zrsgq*Qon>( ziB5)OR)A0|-C8a@`6kJ$KsoWEm7}O{uH5?tdm~V=H-;7LPo7z@CrwJoSCN8!dQy7~ z46E3e3ikOEN6xO*yYt!|!%Ov)W@=Fa>uEAFIb3QtGos|h2oQsH#rI*+NSH9u*O^Q} zVb(u0?$I0WJFxiHL$}X+WTd%yji^mOo%{=qQ zzSrJ@?D${woF?J3;mA*~kx*}cisqqb!n=08tsF3Wc zyfAcOa!zPY@|w^!$pw{bE8pVYwm#xMQ9g|s4Rp1Vk95#&%0=``YGOi^0q7%qi zFG0pSfsAzm8S5mFB4BM4UqRiS+2`D+lc6eoh3PBJnVX+FcG1k9RhvAc67JL&`7AMi z1Qg~8D9lIQMBA-0RaBk{2YYC601Ebj;1iu^>xZiz0EUTrKw0BdIYHDz9H_AM zZ@lrlKfLwMA8=fA4@J?hAnh;#*=fu);B=iKZY5q8c(qm0|=S z6JtP0V*LgI2-TSo2+`GN?k5{O+y66GTFT(Qbj(2Dz!4iiEM|4#QZ9fsOt&WO`zE;b zh|zswIi?=P=eWb)363&eU9S5<` z%da@^{PWMbB3ub;pX`}D?&o!rx)v-ScnhDyuA!sMHgpb0!Ag>im&#(LTp^Beu6CmK z^?@Z2+?u0y*hW=lIUA5`0&;agt_sMRQV_YO2&==@KW-5%F}hv)c4l@zBXF#Cv(-lP~!_( zU#NFsIPW=FsPToYFI3<@eyg%$o=}%lB$&w5MxZ>hB7sX*)$u0M-VpDK&xl_c-xA*+ z=k$0wesla#9H!%s#Oe4;NR1*$!%5}a3FD_5nE%TF2LMe00}!6r-f*-)D&m-y4}g*L zE0$K!6{!dh-9}6Tv3}&ks2@2Ytcx%;Qcfp8A{%!?u|f;}^%hdpjJZc_CgNoO>39bJ zW;}y`D^3u)I4Mu#pNjE z^?VYrBhaBTjP6!xHA$+vMpc9cDGBQ9M2XA8B$YNQa8}^5-jV}JMjP|%)~%xgwi_eh z-Q@>>o19VrRLHfp6(ikkN=M4^L?Ti_;=f3_i4^aBd*&Xcb+vUP>9)d{_Om9`(LYS; zL&|qu+x7czU$eR`_k&k{IAi?S#_!L$tnO$&u*_tRfME$>-aVE=WYDf3&>h!hyFn~vOlD3 za1%kOGQ$uddDt*-S>^*W7my{)ty+t*@qQlD#xoS5KX6PE_O{KQSb_&xMuDh9@EyY39|4+V6`yt3O7?5vOB)_?4*&DK`q z)c`3;avKvR%sPyWnUPx@aVbC%hp9;-@!b+fwwG&ZY4#FOPlV#yLZQqP`qL`xQxD&U+g?tEPg-}!hp{R`9w296?)fyd!Iz$68Su}t;L~}74!vhI| zJ=g?wuq{%eFEy*%#7EmDyul88#d@#4Y7!elN5#(8XMQS zV{IsuL&|RwxQdaZ98rTVzZKW*FRQC1WBE3*24k?ybgg;&il=87a-VG8G;7ww=k)!g zZ}N>Z+HRpA9q9eu=t;BYJaQl1L276TTz^MeAko*HHGdNrqo=-kjGm5`p!{GkE|fcX zF&9FxJJ)6fN)kB0l}6;VgbuMeADomYgYvuDSU^>q9TN81s@Wk%tyDq42O7DmUUUn* zTM1*lVr5lUVwNeMwCQYU$Nkvek=L(mJ z*MaNkMZzL+HMI(?qHhyci>svdU_E^=b3cEd@PPPJ>Iw1t(hJm6(o57Xew*|v^?T_c z^|tgm^?~#?bwX-Fe~uKRqEbCoE452AC`S@m$BGtMq{an*Cwt5fz~NvQtGiA}8YtrL z@VOu?UU=B_ALt(FPQ;EK z>Gn1UD^=Faj^R9^c#O;&hbVsq2l?^rKDpMx*?lsP9BKvNSA$D`b0n9Js8Q3fMA8*(yazCCw;mpARJ;mu^fUWmCx>9}|8f_oeOLSHbk zV-wR=1tV4AmgSvY>1+XViWuUOV%%aO1vy!iHNi5d5EJH8LQ<~KvQ&<56!KbuYU9TV z=V%u)lQ@T;CQOwt&?lKwtjqP;))M|&;d<*f?%Vtd;n&#XMRSgPGxCEP1K!91?~96U=uD{oeIP&>Ij+W&#?aPNvo;Zgl#>jZa7Ov!|9O(92v zb4ShG@R*>yo>@ssnhq_>6nG)W>p2ZWR~pZ#K*_1Qhu(48u^Us7b!a5`lnTNjPLj=9 zDQ{i^XG`)?PVzoB?1>Y*Gd1)+n2{TRi%HQ=iIC}+s&RdX__?6*0fEVv<-pMhOI3v z(4tUip#suYTEGfrYm2f8l|`wD3g`^XsDt=ADo~)5aojqhBB-MdKNr;B=ckU2Q^()$ zWf*59m-n2Tv{an=yz|fdrs-XBw{xE7InQ#=x!*^lcD|yO%A8WIP8SqQ4PvoWP5A6l zsSPTXTBg-#f~?FyE~iitDNTkjNOdBaMyph-gyQLl4?r`wP}Qc>P%KZ@aD`I=OMc3fVgg?8-*Ers=&9anH-SSSn+Ger3vJ^@$sS6YOi!aDtO zW21!x1>{SWV58-q!$hRCqE=Y`8z>P{HCICads44=RzH_<^T?~YDyPzke{=0K5>S3d z+W*^qC|~2$4JTz5-qo#suJGpTND;DYZy%NBgYdiS=IZAnqhTau*FHNcbtVt$ew;Rq z0@}TOl7w93fyC0heN389A9D&7T=KQYMNVIeoBb{K;qe zzB5Mx7m2xeAJDug zeo^wG^qfYniJM|+j9zY3rEx_tR#(tn%8Ug!(WwGR^*bgld36n&BkjMr62aSfK@DCb| ziW|Xx;8=+UYgq18^r{?lo)PqiS5=_$j0S*^Z~_1XTcZQrRMlO|OjffU^XAK%g!Pu=DokStg$8{Q~BCd3%#M>GQ zrd>+2oKLfIbR~3m3MSTeTw@0frF(}x<~P)7o@JT1Do$j`>CZ3Z=t8QL>r5((&ZqDx zeX3$*v8qVDU#sxz{Q62$y{=wgZ*0}I>RXMSk~OMMZI_|TxK*`7yGyrAzumBpeNyoo z?u>T8@F(_<*pYqE?SU7n$@5yG^8-thrmmXsjcZNOsEVMzD#Z8jX@8 z)`j@OR)b#e(Xj@yqfrv`;Zd*#g@V=7m&O$mdIMzV>^Nk9!H(_yxa@?6P$X{X$Ft&! zGF@DU7wBHlVO>8=JgI?9G{wf!Ljuj>PG!EbM#o$EByZ??Q;?0GrbgRB3hbo%IW^J2mg_qBOcH_f z+E-M;3n@MX6>PZn$;oJp&5Xv>L`|G9#~KG^a(JU1rs2 zSXQ8>;G-wU7=6iE;86iMDCMscmw-f@8w}%eN-E7-uUNq^fB8}{Gb8w4ef+WsS^4Yh z3i;cPaQ-aYQcb$ZpV+^8)A}`d=?||xJF)&|+DiOHGrvb#i5k8YS_%9Vi9fEc)Ai%m zWmpH{B*MpqxSVW2sY94a`EtC9Yy_}h9^zsUW2ZtnE|*P}*KqS;7M>+*kk@j{U@=}S zYm;}wDp|LDCv25%m;W6uW1Cgxg@7z5kI9~pT?8rB8mBm8AtuI4PJy7^x)|WGau&;2 z)&rRM9~fw2RlG2TdRGM#PdccpgGXN(UB%5$#OXAvGX% zT)j}eQT?4-EC2>%(L?H0h`kHoSp+qx9bH2V640BGRl}`vQT0hjeUg(Lxdh$#JSei>sU5Z2~P6WU!qkYjy>d7cA$==JSC>1Fwlj*ru z)C(9O)NlCYm_`7YNUpv*WsAvWCfj)G9Un8tgpLu`6vGDcpJuwQVCgKx<1BIH`E1KFm-dlE_Cn6-TNS2(rzx{DM$ZXjKS@h@*4dn_C#_$G65a@^TD$c{#Z`!R&xIFTWtn6z3QC zBXnOD3IvMCIVYk~rz>7Y&$hcz=I%_KxfkVyLd7*s&^X(j8=QY39?)J3IpLU0!O<0B z#?h5`R%igs01TqB?{G9hV!E+1Wyg5iDb}qQEaJsHOm-oGp_&<5eZ4;K}lg_v$=qFZMXR zCNJxec+)(k?r}Mw)fikT1MDs^%gL5iO14ToWjUZ%aV|(_$ik?wxR6b3NhNLjiGQ|9 znBdZAiyMnWAxk_^R1}TM;w~4m$mASKWx5_ty(xqPp+P#hI11&TLVufw(^`kKgf!Ri zBQ#{Q+viP!AtwhQ$tJ$?TWEXZEWdwnkl+8>IcWRsLH^yR4nxjkk3r6_j_|+d&0yZW zaoUE~U|K=N;zO0Q3$vz7Ke+N{f5pNF#qYktKlr;>_R5qN?WcIDYQC`T8czEEvFC}^dfAK^+W41ITlmtA@M9L7a^6JQ&X|fz_?@D zr*#7`4jHKE$mIQ4d`$d=l%A_bPhcmL&n#pZ#%lcFG~^+{i7L6AxO|1Q`zu$j3?=6z z32&Kkf>01iFsC{b1%Us!79ovJA>))tFGmuL89JX-NI+f|2}Q^!BnCkVhvPAc^6!_awRn{*x`fR3-9qkLOuNm*^kr5~=r4RsnAeM(!ld%ghX;QD z+^0&y&wh73S=aFBZ(JBGL+OIPeN2?>?@IOF62g8dBu)^D#wL5LQnPVi?Gs)%AZ7YO z^b`z=caSpW3bj{bU{DD#CFq>!gTD&hXDCF`$H`rep^X$KN2nitf#Hi!MNfV98NJI= zdxqD^WjwEUuACWgwFm0eJ+onw@hRLV|dd zToL9JQ@xRth*V~JqL5FMhh)U%(m06$0Hlx1s4~Z0ahz^7#E;oM8B@)^cxJuH~ zJ7Lk<8EqO$b}Q7UW=Yvjs8nfe8l}rIRx8x!^Dxyjl(fW2$%>}tEJ%IA0GQDrCC-Y} zWwc5HW#F`V$|p~pV0<`z_qK=c9hZCPW;blcmtwKHcN~FWeraeD*WS8j_N?kL9pke) zsW*?$;ccY;LG-Lpf2~`_>D}CPjm9JfUp_hGH>8?kQu6?zvohW4TaebChRf0*jn>~p z>TgX4%^`ij>5~P#pCW4@%?ANw%^4=bP?8FX%K{lzQaTW7&>CH(a}Kpa=dKDs?B+BI z1;>fS0aEu<>1kH0$&^0AQ10`vpm!s5UThMx+H@U@Ln?7cO*axInZBBi8o2ftoWXGw zb(M3JMzzc|v$|y5m@@P3z@6)Qw^U53pY`)SyY@1_(3F@QGos-V47Wo`&{LQ**4#F0 zX>GJlZ)%?0xTBpSM-6}no&>5BEYBm1|AGL2gAo4is2D;V9r++^Bzgy)83g? z8CzOM{xWZAaHKVKwKX)fbulfOtU5a{&zsqTLZ6(Eh0ecyUI3C0g}ArRbLUA*BU|U` z=IDRnJP0M%+=b-%H=O*F^9cnE?#!aXNZ6NJ>r-oUYCU{hdc*#W>l=^XxbL~){=?5> z-pem6%->5Fe~T1E3bN=H9}SbwuOkHo5quL}Cer8+c+1GSiD&W)!(riMc#R(8=hNli z=*fHO7V}V;`m6;J{$eCj@G04Whsj1Ay;(O|!1LiyQKFJ;J(!Ak;X~fM>;OnW z%Kx2s3~!S<7clk6f$HNKUSqKlbDTLK)hcxXwMs=)s1$)p3FMChCysYr}^2>9b(GuGJHmTskvuh0e~O7xlKUpH?0jLn!tRDS3!c z%tYGrjJU?E4T$Zeyr2l!5jaR$<7%l`+ABRI#l*@=8S5ERUJWhvQN0F*KBjLi&>}Z# zBV>z|dMFYa;yxaV5a!a!V01pXIax@jmZOQ!!5}DacP6AC)GdBAZ^OFko95iSfjU&CO?!E^Ij!Y}VBpRk3{`dh+dJ)yM>5&Msk7zIQm zd?h74DnhhCgo$qsy-CfBPp7A@AZ8x64DlPW=W^zHm;mn1TK;o>H)8ldkh%v+-Hpgi zG+AZbNMbu2FqsU2tZZ4pZdV4fiOf0;Ei|{gObc=>7L)t6h zCfNqUNF%8X|M5XddRFA&yv{Y{)iY<@1-bm$_8D>5xpB*;&ES=$nbHYACv~o=n9M)G zFDosCJv}{JD2fBk=VBI(M|vWGmGPXVI%v?rQYADfp-Bd_$rm?cEbizfk&xMy-r>}M z#^7L8X%5uSBqy3fs8PTmAq8zret;r(8PwRFN#_ieV*T)pPou$vW9yEu8&}qOykqV0 zwIyYp$2uO^)BEuKd+ryFufFfX-CN$eue$oax3=7U;lAptrEi~o?d`W-Kl`>YXOR?l zz9>qxi{}*khFlDo`06x^f`sj7vX6q2_|}3DKY9(o_}m|U&o{GM^`hI z#f{!)^X{&ocdSH+`I^+-kDiFPO!YvQX@?2#FhZvhnzc};h3#rsr-rFAs1!k_&+kv~ z*dc*&l5PnONQm}PD6-G_;DGNb-++%<;oIcHwZ4TuJkbY!-xweEfv!WZuK@tPB-5wU z`8bDVfY8i`&m1K&J0%^FVO=anSlvj4_Vk~A16}Q#rxA6ne}Xj{}jdrL#z>ME}kA1m(c z?uv#hOG|xO6Y|q9>l=11$j_U#Zfa#)T~)wa63*6B+KlI$7@p9kg!mKPTgSoHDBN8P z+rzNE02b$TY2Aa?+B4o~3hu7tF z=6Sv14vR%on%6-rf5%-KsL@dHGDZ_B%Y(eUAS!V=oaq`apM!KRozn*$g+$1X>m95* zMXqG?&_;&n=oq0Ip+>6yC#iYw(l0}1NUXu-cd2Wby|>>|J)jDoKie{T*f_{T?_78kYBX0 z=cbMab7@N~;JZcRL=ht2YE%)osiX=KAo&}s)mzoLg#KPQ(35zH^|%MOt}=M zFwg*Cvxz%&I)%e(u_)}wY*uzC5O!G13Z>DkRGN*9#y)`0pfo1?xW*w%@lkLA1wMv} znj?l*2sQ*J3fA^I&yp{qAa`b#Z+I%=hy{jVPwz?K_&)`{(?ls{5d4^E>R-*F);+KdQ?MD0Jw-vf4yYF^0 zUN`h)!kSFzal%w9WEnwhG#YWM9(p)f!@(*I>{7v4Rh0@)k;5!$lN2{Ga5DpA0JMYz zL`T{PH9Rh4?x4x@v^s}_`1TfqLoK#AWUAo?9~rt3rYbWW7L=x-;D0KtLd|LKRuX5) zB_PSUGqDy%=~A$Cf7y5SAMd=&UnJfQdi+aEU%}b!XQBMg{rk79eflYU`DOmfTkr56 zkuvUv9@urdkNcYcmj58p^ZLE_o_Tin{la)=8NY$KO!&^CvUvKvQdlNkFU8f;c~a~r zP6x5;N~S|3lZlX&a*`#=mZ367c^3(BNhL;$ObnF-F>W2AhM&27k$*Yuv;=IQCL+uU z=D@d!N`5B<@j1XGo?%cO;ew5D!GtnV9(ppqvMCQ{lW%r&HeTw720tv#f`%+uoC(bi zSZaqh3v4yQDkE&u!&*Ibb2~ZQrP`swo$PiN_W-ORjuTQFI?~c`2j}heVy{=0@9a>k zb2>6)AXAzYNMv>d0`7E&R_u0&M_4jsjzMTdk2H?>690rPjUdt*M1(1HE^HCSR!w?= zzENx$ZSEKu==uMC_ac9J--eIB`ppa9Tzso}`_AT;o}En_o_g-iEssCRSm*Np`8*Hk zrJZk?MU%e##fP6A{Ez9AHZ5xE**bsuhJ@|$EnA*^x_iUZL~>{GO@g(ype)oIZ!C6} zJ8_i*Vh-4z4nY&7nLuuWRR(A=K)n{`Q4(!dL%A9X)lkJkn+)cWuV|?V+m#)t*8&!c z#H%OLo7o}J>8%dLFd0UNMxEi151aQ9ksYB=g!-5NgffyOqK^OyMsYj6KI$~=ewQFcLkshu1MJ%CcVPt6KYxEyu~@FNT|-%|8+K1!d{?F2JM*r& z^ZwV?nVF4ayfeyt0c(1CpDV0`&DFEn{T>M?t8TPD<5L289M3shLZ zt6ihrre$`hpj!#8GH8=R5d%I3iZSeVz+FUrIAFd5raFMQ=|(Sda!%}Y1~N=E5tfF{ zU@?mvPC{gx+o54>4zYYhH3-U@Xlp?+5KGmV)O0@)eoA4lR{*pUX2|P|Mwf%3W1Qfk zbLG`o-0ocQ=qoFKk$ARo!870QzyHfavwwJATGf4I$)QjDS2r=M^?x|i^ozAq;hnXo z*2mjk2lwg6LH$x&>&yH%PgiVwwWs{Ir=aSaE92Ikpj!J^2#Mzt>e$hkxND7kn;dVI z!B`R4oHSb+a%dDzl~&>~F=~vIL}CVs#->3<_`0v|(xxPVzAqdxMux>fbRe{#chTk} zi+wMilw~aJ>YcIfwfk$Cs-Dvw;W_ueaNE^S#RC^3b0&D=d;i*Z^{G^8WY3V&6r#5H z1uii>iWko9C?F*We~qve&QJS~-3D%KCJf zueo#Q^xOOIu6mIipSyZrz5Ab3Gh<47?WASX{K@K$K7nR`CsKNoc!2PsIG(Xn4qb96 z5`jkLq)vdsft)s4E4f;xBqBY+g`}MkY6IHgHb?V@7r9eyRSUP|E!a)D;D70)+_|u8 zH{n8Wt$5(-se1-i(&sN>oc}0)KRhQ+`0&5`n7Nb>R1cn|ctt1}H!GwhOk$i)d9NJH zMRJE!#xf3?aw#!LvlfIJ6PE_LOM?aZ#G9f7bQO^TjKjM=|2(nk%P++P&;0PqXPyz= zLht)Hk+y|2zZ%*aul+#=U$StDy-3;(i)2!nlTHw3WwI-Bkjj1XwelYMDY;lK*Fh$P zfngB|)-X5`Yalm3N-1`V!C)$>s?wL0A?*3@)%GW^s=m~JXwzQye+mY8-_VJgR_!l9!0MehU{&45+ zOZ#S2?fc`;HvImfn+2@mKQf;Z@5G1#Xj!}}>aXzQXl6ww9^}6 zF4AhK^O3Z0k+3;Z6oHYDE?ITv(sdX9JM^74wd z3U{cswWqdr<^ykbZ@ADq{ zj%Vdkj9At~Q@H?(Jrwi|izq&}6oX7IA&xNuHmTHJqHzDAM$-HQp{7F4Sq6#BQ}L!j zTS~Mez%U4J_8a1cLrH#ONu_55r`Aru8B>RHqkhBkSePi5O zF>^>JE2vvrArktDX7IB_eT27JXbf5$FN;PiqBvSmQGmzfR_5Y*MXLh0V3>`;W;b}U zva@wrSz`;cGGt{kEF%(c&(P>4AqS_DdK|zArJ~<5ApjJ3I!Jh$cymO*d)%Y=X`&2G zYRQq3dv4@!MAUlZ)+6gHOIIIWa@&Iu@nf`JSvkYi;;)poR%ZuiEQ?QAQsvL9X=}fy z=7D!M5yLifX6^PLmUyFXt>lnQl25AqUy0R$~F7D&rO>fZBM$YyXzmEH*ZqMqYZd<;@!EMW(5iwduC(naPMI{ zkwFLXay#PFW*2}U6ZJl5a6zjPnzV465>_j=DsYnm)=GLLSd!P}*x|tS#1-p6NZrw! z1zA~zc^ys?iv%1J;@xIA^dwM2WvS3(q6 zZ{ozYhnrfCZmO!j_uR(jm*2d!ap6B+3e|O0*Q}cE2~4}AyzmMyTu4Vum(7EKx(ZYNrY>Q^7qX4n=7lz4?%z&lTV1gxOB;I>AXu;q7p8 zzy%!?Fwf;MiUpy%PJ#%nePc2{{Sgsd(Nb}AwDwDN2}<>;x(|1BK}OHyB{O2#8LIq; zn-)FVUNWisx#g=*tdED|;UBT8Z<78N1A=XIZ_GgkKUV3Ay{kqLiH&BpM~zQ8q{@W(`qZ z6IZAwH6u?Tf_Ul!xZU z^HCm`WfN!loi;#z&W~+=n_rP{%<~|HQ-Kx87`!7FdUFru;@sR!S5Vo{WFOBAD%jzG zs&NG!BWfgGo=)h^Z4{ul=p>_5_pFP~+M-*Yksew%`6;#OVxr^ck9Wk^9A3hQ;kDOa zH;2j-e;z8FKgH*pSRc6zy6G+Bt+t8FXI?{iAvdO~E^p4AwO-MRo?dt5#8{p>tMvb{ z_a)#>RcXWb+Xlt(dmj4B-Hs*<2KF`6Yt zO$=&*wLK*&ib~m(GMw^A3X_w9sFbi2E`>=+NmZpJ$`F_al@bL>9is}xC)a9~NIr6M z&0j3?-faHGCx2*Ui%vE%;?!jS{4FSCua0ZI=6>tCL)TR{-S);Z%XU}oxF;$_}JFt-hB-&ej{O1x-8NU$^1yTWWNz%Vw*OmGs_W_B>#pblq~WWUR^cph*c>a!F}Y zQ7|eGLS-~!Q^G>IP&za;CQ3s|I9RV`nG#u4CIy7QJnj;62>cSEAY(={v&{umu z^M39v-S=JhtqU@9Et}1!*~Q*3yq|badQaS$*L9cOv2{TPmNup&87PrcHKl11iJWCi z;hR2W$h@ogbtRvx|qluMT_f6RMmqld$$d;FbJ+XxuvY?bd28AW)}#h&G7QBcO)OeB z4y8s8Mbg)XA*%v4C{U5CN=6%4G?_*H2qh*)$A_eZhN?AGJQrUYKOE1-M{8i`APxyl z$W*JxhpS*BW$qcTqN(h_gNf54LBOb;u_j3EGbS!loT~Pl624VevwJ`HezESu;a21J zk2j&)njh4Af3V)(kyq4p+roX@ygTXOZMzz7e|ycUcXqURf0uG-+TuGGcHQkLw(j#u zkZmRU)0IZZHBy=uDI|$M#d;*48#~R4sTsC)s%=6(IN&Bx*zp9-{}X! zi$|1B!gTyyuMm@u%Z@09Em_C`_P{#PxG*7RN3aJ$Vqe(Ku08GT*u8fzJ@GHzXnOo@ z-euriU!x~^69oIi^H4-(l!o)zMPgPkcZLul*g0ku<0qC4B4gH{58X!>vWw4U^KeAj z04IvNyL8@GB^p$sRwXhhkuoAe!!k;iiD1{sx5()wa?~kD&2m%$qERUL^-3YyAVPz% zXoXgV()}PlnhgsTv0^4lL(?%qVh#T6IxS_C_zPzyz^a5K!m5;afWN*VaVP%Z9{daQ zZ)E2Ci{t!dJ{*P}DY=b8(GboT;K%+&*}H?hyRP%@W_P0?6rmh1q2&oNDE2gS9ZcRG z=PV$#=t=EdV&@e4grMg}qGv~GVG~RaF}{E@ASA)hAcDpvVxWHs zNNA>9u8_t0myS_xns5-Q_7M(lAd+0?@e9a-fnAPCxjj2Q-p}{#JGWs&T2+_+EW3Cj z>^Ekwv6Oh5=~?r8if3hp6G`94o&`ydhj{x`>9(Ofv^E#5&qC|dQGQ%S99NcA$7cl%RkLf;s+Of&nM+LJ+IQ->?C{kwFCO^dEG>)i zhm_28>G~B#3umUOW?s2)!Eit9Z+BW&Jioe}-NP2mT{5GzYgYRBx*OdcH_ywQ(s!5H z^1#xWER$%qM`cXbsB_a(B9*$@{?=JnwdEFBZ)gslK7U$5oUSY*twfg*6E&sIJhj{n zd&kxlM&Va?na{}1DZW%HMjBXS*~}okj?;J@$GgBo=U*q`6n;iQ&3xuvw%5Cyd4@TB zE}J<#jA7gaTB-#YiPYTEg0?tY9NiMt8AV?wL$HQxdjTs-D)p!?|o{Pf;xMlqga&ecp`D!D-=7 zqYv}lPV;PSuF>Y~&ZKWX-t z4x(8N{wM>@Ftj3+LJ9<~6DcWFk0%aVjgSi=3qptvWhsWt6PgdCV7E7wTt1Nv-^lSe ziAVyA#pG-NeY?8*~Ndm5f-nPqLS2VWpa5c zQ-UR)Kv6-Hfp!flD19s~426X#g2RGCG>TxQBKQ^sQYeDg2cxcFlopJF<@;h#dJKw- zLGl>1S&HzYH^I|)6iogJB9?!DuSNSu=ydxwOJ ze{YD54xp9(spQ{KO`0xP!kVDu9k!z|!is{~=qr+sG`b6hah^Y2&Ulh1iy01O5+_B_ z!eEJKsevVru!to*XtKkHtUwu|w8sebaGni+m2=ywjetkP&uw_E#jqvG=w}OL)6WHx zliZcTnIsym0MeS!S*`+DpnH`t9aiJ{0gef6m3t_~C^I#aELG^vXth4GQ)= za|A`}@CD84453?qI?wFBf!|h}!M=XuQ45A&1+)3ZR@R@|Fsb&m;dtHhN6_#j67QDV z3jZZ(W3Qd8mYuDm2I5}#EO=|3`}^7UT-r<=E!}9%kt8bFw?d%`8NMILI8qm%_(TYE zHC3Tt7&yvPI>}Eemv)xaA4s4O8|!G6)Nqr%(iX+Hh<>(vyJ0`;RL0%hEHTzG5>wEv zJ0h#oH*+Kw1!jyKW*&+3^#x(zGb_V0k}BH%0BuTU!A*16mFy4?fyrKO$O{YZZdT~` zGv*2hxra>qs7(kYKTmNS_cY$s-=PyKA!ipr^T*xE9zj^r^WbLs-hcCZJhu8lcMmI= zE?v;)N-Du-(ktA#7}H?oZb7b8Vu=C=W|AtxQ!ZV|7T zg*<4j+0tY9EPHYj|L9$Jg4!$#0(3UmaWtWT7Zav$>fq>X*c#oDceCt3LewM#;<5yF z!??MKC_(`-51gK@IQ0-wxo=UmM~g?{Ig8G1b{r<*`flaUQ!QzAJu2BuYnv&c<%=;& zQI%8VyJ|XCt2Wyp4jg^BNX5l-<|oZ^!k-FtN&z)H^ZjHx)@07fNW?NJH+jhSb$&MW zZZ4WC8LH`M19NeOB6(TgTts|R7?mVD1F{(<6}i|D#zwnViTq=jdZ3g`kn z&RjY6>PXBaQP-pWYxb_gR4qtl1bOeS^LYCuhQ7I&30ioBcY&yz`;JiOHaciY- zGp`XmGy^$pGeQIXt|LWdpW1jKM4_RCf#I;Kwnb4uIuO!u1u7tT+D3RF<9-R_Z|)L< zNmz{MHn63-h+DO=aXGe8=5}GC7R)rpr8!ipPp?{A%y}A0(^Rw zXO@Bs+l)eB(REICT7mV_rZd!(6zoN2GIru~{q_``PD>T<6lIp)D`D9e z$}2SEV;PvtUJRR@G1Y?(A=wWkG%n2e#MviAML``fiaQ-hMBo4$U&nJvX_Rlt$V9ix z(o~LBs*<0gP*H=RQc3mZ=H(Ed$fY73()6niTJhDfM0T@kG;wb|<0KCG*mX5_pRMMCr)d&BUc+g*8 zNb7Y*bS}IRhZ2KUgH{WjK7pUd8AI6xC` z8fY4L(&IVJc#A8N`gT0s$uiGyUwRmZE3Rik`5svCrbRd(o?m@OlVonIG})|2`Bh;| z7?@GP_zU=t%4TdBd_=zI^4-wa>d%E^Gj1jcW0l1#+1#S(Wv468#FU*>L+18nH}~7@ z1avA-W0d-h`GlDQi&3l86{TsV*@Zdg&heFn)de0Fmy37X&XyjBegfq-o=W45#alGq z7SE4Y=mQ#+_N`j~b!cgt5DkUSBKv9US@YS;)nfaigxv+g zQxfcb?9X-d+2fNpy~RokjMV;y*UTu*H+U(ll@I;bg@bb?yU5FoDUCUe`SqFg>GjR_ zgH5vgAFdKy6B(th75^~sx9OQFq#@+lnb}tis!t$U)_cQPX~J<~aX)UFrx08Mql}7d zUNC-6e!oMM-938e)-r}T6A>j-xjxm2L-qZK~Sn2emt)bL3A#S5{?sb zZj34)u(*f^$gsHg6zGfX&&koK$y@rp+cubI zPRyym#Gjdm21AGa3azH8QGX?8^si{8L2~vHdj>#vqjVO@I)u1bGPpjs$ymdIIxF<{ zgt(B;L1QL_l`u!Rf=+w4(?Ivt`sQ52kt9t{N!LK`0FWE9JRX`!NJ1-;=U_tVJu=8F zEr1sVYZmv?;3z+`@V*>7c_6u;I>Ef)@9p*g8#%bATNz&k++s4{80ed)dC+_283Jh4 zyEtrsFnV?c(ts-|3)%1Ob*T2lu2TOygH1XI{vi-Cn0S@6JT9$vjb z>LgutvW7#cKR3ZGsCjTEq}wA#eAP=j6+}!RVTrw_z}?xp%z!z61SFpzzD)SYq0Q|q zcw|kVA3~0H3q4hR5^`Y+pkgtBjGRyTm`Mr%a-H|KS;3^(eDZNs$NjX_PrfaI&@Kp? zzGNr*i$wUkWOBp&LQ04T53@yBmLlFQ&kKw{0ZUVvLA@D)M1Fp;$=iTsQt@#2fQ`8F zUgi4b4=dEd3*|BsdpI=cA3Wtp)x}4vo8k)Li2WILufA5RkE~?QpN*R9J9yK&{9s#7)`XIE~-;5m%cO*xmEYz3B`| z&A>RiZ=(lG?{}M%Ks5D01f&ey0-+Kzn+a7y(aE<1-{F*5Q8s$JMcT~=f8S(%MBgN;ML`MN+iVU(@XM* zk>7Zb$Sn)We3sgdKMa}F!<{2>!j~wQPUU88B6)D+G0JUQbbv@f_r6H|$;>-Ky@P3? z59bQHzN6CLC}tP6Y)+#7lE=AHQZK+j9McHl@TXX~)rmL{X)@xOGP~yI?_l8pR+pg7 z8bEnU7MyqylFQRd4OkL1%}oq?&l&bLs%2-N+uYk`8NX-Y=Ai;wchN)l*3SFF8mxU= zhMSTeky(L;3XM?N>)`@@rsBt)O8}GdWhUE&*O1pDixrp>kTOZNL1306)%G$`8AID; zcSg6s-sI`B(O{d#JtR)rpw}XhhlceNRlh)Z*&*6%t=2Cz56EPQODUc4>t}fA;$!gT zD_ecSf$gCZLo{>Zb>Us<(}bSWr6LsUx^zT50_ficXSr`<`J5jxoICCo`e~Jq`|YU@ z_?HK<@BO*eb55QZLyeJN(M!$r7sAQ}xUo%xI~@U(*dH}aU3^uWJ4}1ujR`0yrR_Pg z`^n1`-aRzFzW_7?mwg7-^%Iuf8J`aBjn!UW^D&xwZq>h6Fy>mg2mn9?BHBN*aNJ9Q zBEDn$=$8Q@s|-;Lu;C|K9_hfTaB%YMMmkSg{lt1wYKxK8TmwZ&@7q=9?l{=x*(7Jc z$K4cT#?SqgN$(Pu`UIt^d=9*ssAVh3lP#N=>B>?q2kt#FiSMxwt3LfH3w>2?rKkXA zQl4wbR}7}~bze>Q3p+wBd};HvFop4G8n3PpQL;YDz6k1tDVixFP{(gkm9!BN zmnC*b1U=D^|5Me=e^5!L98?(Wml+;cIH&{coKml={}g;Asc#A~>8=~mwGU^9>W}_Q zRG6NWA^8=E`?Sp7X$5o$AzT9{8G7J1@@z%;Dp*jQ0^Hq9(VKb?b|a{`li)B~eVAGp z(h_1K^;OWz`>9Irtwp2^XHzpsc!uos!+akZFFGU0www1Vw-kTWG6D%LraZ)oJAv4-emnMGq{nY&m)ohA6%q>|(bzNx@P)ZakPg+uT zPJQ}KdC|z8affccfY+!qu$F_v!f@j34AX9#4(og6=&szgT6O|M%9*={di=1CLyBvb zLqhFnx8l9p+7EfX#PRWEkZn6wjWrmx)^$VXWi}v!J8NLMiSskZjQ{^t7*yu6(|8BvWDK40X z4~D2%Gr*`~V{dMWfXj+V9YRk(X9ia`V{l|UCvHqAWpP$#t{v}VN`i>Aq%LeCU5xa$ zeSExMw&>CboZTMYSxI|WuQZ7|1?T2c$!SWtWo4@!HN8=(PwPLp03kDDi|y^XZ#JJY z4WL$XVrg#XE-hU=xc=7kz>6=LImS8lEywYh9{YzcY9~_i>+rEH8`ZmEWI}HPuwK1W z7(5Uy^AuPbm<;nb5evB5_DvtyQQ`Jk9o)Sc3-q=N;1&?S%-jTJri8-LlL4DvQw(0O zz*ikfVt_~^5Nc#S+UcADNEzy(SMLSmZz1u?zECI$m?djqa;NjhJMUuEvLwm1!co&p zOe)zV?g-v&4kd_hpwD*8D;B?O6ZpJgOM6WkIf5Y}au#gPiCLyj?4HJ#3;ph>BMU1D zIaW`GFskmCUdm8Y$wrS)#s>1ukD6s@-(X~D=%87MVSJn4B-+>9*TVS=OfP1=S&_erf16;fO1 zqd)Y)F#NaEYOxLTK%n~t+JYx1K0K&td5IfWAsN6|1N%)bF2TZ+o)?Nm3+X7Hlk zEwNv<#zW*);{LF4jE02HT7A9Vv)>i#7W3*g*Iu&#gNH?^ssJO?NKo+T0TJ_Lh^zd>Ce0+Hj}L+P*@ACOCxv~B!^~F+nc~A8_?FXl43X0+_<^YQxv zQU;rVlowNeusL+o(UQ~=*!uW2m>wg?bd@DITxgQYX|1Dp?F*MJDetL>Q{>`1utDI?Wf)O|&Bqd~JfU*n9 zUR{s@xha6$-JXdz-wOC7+^?gi<%KynyQU<%w4{`}(rW}A+c_3thV`7;_IYr)*Z<#nBiA)$LrgWGF_VwXHq^92Ddbys)q1oAdqsQR^ z-zlS#2iU(w_Cpe!Kp=dzqnp1w=Rn8=v}Maip1&W0Y9Hmr)N9@g9Nb~Tr)fa+LBy$Z z2hTOy!4!c^dvF?dpC@)brm~%PX;Wn=qp!zY&tvp0!x{A>L-&EMRcIabU9M4D&*=PL z%ja9I!$*hRS=;;OZCm={#!~X@znA_Ex3e|(yXIDDc0HrZe=VQ7MwdVwDoDQx-1Z}f zBNo_A1I@7O1BqHyn7zk!jB4Z06r896Hi)m>Sfvi%713Nk*T-f0Ja^v9Lpi|$bRZ@p z<>&kvPnb_`KBKNOA~FvemaxNb zFM|;$K+IReA{>R`qf}KCA#hkKV>qq@Vk&V3C>0fv5zZqTof7)=h2Aoz*|DJk={Nf* zn!$BC4caqIU^iO1U-yKSb~b&ko+^zpBY!AA9x_B&Ei=)!iNPV)Xq_t2S-~gdzfig4&xbCOVv|^&)|Mr(IDAX?zq2}aVvO8$CRkl zVzM+C$8Io7!3lSFxm9d^RYva^KKWEQgAB)Q8@74bg>~4^zTNzKW}W9~Y5Dpw`0S}! zKO*OFRSRDEl4A%wJNt9!vb~1Rcy_~PyhfQ^p9v`yw`F`N19GM9;3_h*vYn3j zeJt2X4F)1g1@t>kUIr}N~*+LqO9N^wIun__f$DqqKchydim?Bu1a zjeSksy>ouw+D0gKIT@*3ey1WwM9@;x(%QrNAkU{q)IZiP_=he%hZI^n-}7;^|7eC( zjw+A!JoiWXN^IKeF-xo6coRRqT4Cw*I1V$6GMplWBdTE{SLtFP^XS>x_{C}U_KNxT z$WS`0g7>@6z0WpvUjT=!GOMg|6xdhJ9CvkszJ-$#`&u^xY%WI0Uv#vzf7p<_<|n%! zj(q6Gqaif3O|b2!*9@wNEH7s!-gTvOewNkYwLOl&8+0yz*d!lT8(BSFZ=I(dt<8#R zO}M`LTZ7GCw!Lc3rIN){^cbvXo`)M>D!MzB7T)&72Zk1GetPLS-NH*xq)^gVTzE2m zRz9TP=Zjmd$veFbO^5hcEb`Ph-4B1HKXyK3SZwpNSfm)WQ0-9VK6{_)M`Gi;9H&&Y z-=J~3@{MpadrTy1sax;tO^5rN9hR*onXcKKNaSWym9*UY!?%2IIajtXbN@(OpLA{~RC9?7VDneC%-IN_<(whIdCZTg|+CQYIR;WHU>(_Tjp7yGjhWkewv1=n4{z_cMo<0R>XLeeZEQv!=c=>3@rolX!wRzZWmR*6T%pyAY z+UC}g6dC$ym-*&-*=<;|`4(Un*qTkX|u^{zG*XH@G=p} zzxxp0QY5~f3eD60=iXgN(A!B+`s9zwr_1z954-d>)2;IQ_`#kGm*d;O&W;M#9OI3g zM4Z$={So@}Jt48yBcMjxrPa<9EsKk5tLr+NE~9kon5jta8<&J4d;A(IQ3@)Hxd_lo zB4AA&qc7PipWleazmp8%x%FUH(2G^Z?nY-*erL#ozMpiDj__`j#kp8|9IxDXdgi)a zX@!$+x1U9^!d+p$==xmf^|y3J>eJ3Jxa?IP~=&Svan8BUpBahT}CwvEZ)2rKDoI&5qs}7SCpJ{So@}h$BRlK+2 zkk7the}e#{V$p>CCHrZKgIdfCQu6aMD@-y^QVAq88VU-dagL=sde|)AJ*xH=rXh^XY!JLd%F*glJ*xHE%HIsWbFZ?& z)MONn5+z&mS{ox-{Z=y1n?vsI_LKBNhg$NY$F6(jT3q8FYrbQv230N57q`%@Buzz&NP8?#0HT^1|F4eqv_yVIh0kXZ+}Ia=xp?hfPI2fv86p}L)<{8K2n z`HreTlb+J$=1x1g@|VMs@+58UwGT4eYbN;&HW%VBv1e8r*XAQ{YkJ*9zaRa$%bwD?7YimTh5}r&7}aN)3x6N`qb)d9}P`WT3Omz?bp}N3gJ42;VfRP zH|pZ**OWAzyRNpq}|Ll5o*DG*wr+$t_Yiw(xLEL2sF z$9~I%DnOt(tlu4p3%6Xdx!ya!H-m4k?WfclYW?Y67+)Nabat2@-(0i5{H(Lhkd-A= z!^#wh+ruMw)`k>I)AriYW&WXJmLi@dVpBbUab!}`nT#qsRV^;wgzH(tT6F(g;zVQ^yt z@GdJHmrxsi@lbM|H9|{0v8KD7z=h+z&(F?9Ha+#0Mk-uI1a)5po52iq<3eh3)A4rq z1LF1>>yL|7xy*Bm|MaZtZA7g96JHxhnjn z)vK%yMFZ9nf~S3C)f@-PoVQVpB8B=*QdFC$tMP~SFF2=6o>aK*Y5wj_ohiSr(Vu*h zhCJ61wk5w6Hy>1|Q|Ra6Yu(c-ooQG=kT}7z2edp)9OJ7J6`}Chw^+X!W1#;U&6%b@ z5K%z@9At2n7Fe4-H>m(l`tw19}7JnMU!q$i#t^CYupCdyi zaiLZ!0^H#oL=AQK1F&X>%ZR5J{*=1!o#0sF_<0XI2R?IN=YSSO<@zt0wv%9EX#nV0 z&E6r0;Zcmb@80>={jhvj4?EBPKxyA<4z<5rZDPu zBr+DSG&Z{F8-E~Io6oA4rc-Wr@(f!VjC}{MwQ(Qpbs;#;Ih2P;XD`W`i#ClRv)rjN zYIha!Bu|d#=1>=wgth*;`AdB*_1UoEQp4Ch?` z9*<)7uQ#tF_x_%u&jhGoDd~I7tVEx1R!D&ENI+_<|(1>Q` zv@4)bBZ|s{uGa^Ats;3ZrC>$_B|T)#*OUF=h9fETRi}Ipu)u>gIN}WS+EAn_T8Of8 zN7p%UQyJEmTmJ3i2SIvq<~IW7VTN;xCQ#otEr0FwT6D8u%j)0aVtRr+eW%A&RT46U z?(i# ze4rAm7;u2f3gmdWm8);En$A!)T&;e8z{TkQPL`XNlT)g{G~nj@auwX=94Ka7r`n}o z`fzgG-4u}4$^eS!GB>Bjbt3MsQ%6t0t-hRFJ)Wc^S|2mlEY*60{0GH{dZf@fzw9PB zW&e7erAy)Etwk-_6yi!`vs3W$vY_BI_fh#M{R*<3rQn$*$0{Y8mVcn5CF6%+4(8p| z_?`gwV%s%`CBGm%-8 zYzJJoF+51;n|Ua#@SGkm)!yH1=7mZiuk=(m_^pJs`p#)6)cx->nu+p>+Dv1Jnw408 zcvII`hn#kihfDE<+XumDIo3eBDVNso#>iy8fwm^3o=a+RCknH~#L|j;e@sh7b33YM z+;6DtX-2!c8&q5{yEC%ob7l}A6#&%L5$w{n;AHVwjajN`l@YZ=k@Sg0hVJtEsoK9r9(YaExLA9x!0jjaD=Aw1js0t z%$~yDY#xu={iz?nwbhn7VpP+`Q}cJ&Cwk0I5KIZO4aa6c}sx(LOp z^!$$Wt>dTL(L-~Y0$jb{#>WiwxIy2eyI4J<9Ol^z92S%ia+&BTFP#6R3F#xcwENYN z=pts~xcV|NC`ZVKduy-_Mb9x$y9l$UNyJ^sbnJN6qf~GIlR`N)z%CJ;g7RM2>ZWQ` zG@(98R#bwX_{UINdLGh&OEel@FTdK2QbX?giEg4%5;12qsEI%f-c07vYhs@#juheh zFL;UYfE2MfNT_YE8t)Np+@b8C0P?y=kEKwz{SI0mTM1ygAEGc~kWZnHr52~LAyA;% zz_8>e@;{#+>8-ADPrP8Dg6ThJem*vX90w~rp)l)c$Ni8475!Z95+VkdgM$FhR(y7= znboybAp2>jr{J*K9u=-SoS4%8y@d~O6kUhe$ZG;X3|2eTw&`J+geC&;K2wrv6nXl7 ztdzXRx)P7V`27IyXTfzvp%HKrg~$yU$sCjrY}xeG_i8X*6x51hyn!^0t^qi*LhyOff&S#Z-mwe6*KfaRSgvb|;R9)4t(u93X`vu{$WxO$ zbEzwFWbfjpW=D6`SiY-R4!oSyONWvtp?zBAw;v28T& zRQ)?zcWdlZoU57C2S<4*jDMhK{%|9ZvjYRaL+dXc$-_b!SnAu_*%;_p{3BZFn!-ZS z)3dPP(c=9htKreA;ju8$YT)59sNvDGuzm?_YIw}dEMEd0of;kk1H-=xD;pjYE8}0t zKlQ)ge+v^6^Z!=)mudzkH9R_6I;Ot{{wXZ<^mu>EKXp1fJeDt&f9wB(Gcy0<%0C8J zza04o{e}38{!iq;_xuknY>bTm*3L{v_tgyMFD+kg{*_q&k^bc{E$zP@|9>`wo}T^- z$?_NZh4`xB{k8nxoB5ZPf42W81z*^|-u+$oALTEB3}0$k7{0jF!22)!v;I&1|JtVg z_vdeGX#a1L{%_5+|J%fW?ef=+zfJx>B>mq;|LXXkH2*`)e~g#^M)_aV|3A9_ZS#MU z`F|??*Dm?*TK?(J|I(XmU$*}_nm9RWgiOrs3~XqG%ysMx1Pt`7^bKev3@nZ8jPdAM z>A1OJq5kt|bV}2RmNe_bLkPJ12w}5?7uSukpy5?$Fb?}+JJfk; z`~X5;#5wlL-fL!46cIm-6XVD-g283CLK-;B$`L%JTM4_16_@bAcOA#C_nYs)C7Ba0 z{#ZyUA<9k0`o_|GTr-@CyJ~No%x>kb>Y076@80~J2F{o>PXa<_X_}uV+}+~4AK^+v zd``>5Xac8W|Di5o&5#?mPy`gos)$|qadbQx70`Zgg0}Ho=S%;*gQK&QO$!vzrk#;V zpHHZ5m6m^`a^{bO8un%ucU_94lK;ukf%!VIuz@=PcI0{qVnLnTe5siQzver^BOT zq-CV1{jap2xr3`K48A;Y98y-@<%g6tg;J=X_6O(&mofH34PkVBmlN<6LkQE*%^@Z% zSi7@yv1A;sG3`UvvyqgRxNKjG!<4jI6nM^HyNt-8D_Hyd9Chs6?6^uhOm;NfU|=vZ zoG8nQ!o!EACN3EoouF!NeLCbP>?k7$IT;$wNV;8m3rG}od!dxw+LtdVfNweIE;$$2 zp`0rzQk7Z$b9l6Y8GK?Jy{THCH{4OXJr9$U>OlzARN0_&;WKY_+8cx+%d^JT=<)kp z^wcn2)b5ozM_FlcUO@@>gaGHhOl_*pZpO_2@aH?^^c(tp zXbE{^NAn#Uz$_2}7Fu1Vxh~t2|3qdlw_RULW>&#@e??MUWLE!W%s(F5bO(RJjJVns zdD^QWKrl3wy{r!B{d(vIHk=8T<&)70Z8{~>y45olE41tE);7gaos+|6&T(#DV&>xA zd|y6h^W}O#*n|B~6_HaW`eUB{d0>O%rP-&EA)7XKFUqU3B8#aDa_=-dR9tdeXB^g) zd&2Lk84nlNr{BNtbfDqzOxkhGXB>G9rhHsQv%Y-j4d_6jU^Pmi+jE>`U>>(@x_A1< zVX>wJy!U(}!_H4WogbET3NUzFwnvnF&DD%NgjenR_|%z5}{1JHIe(ik{cIFpF>`^ z%cMoZhmZO4tJKZ60~Y3Ua>ScbXOy~5Ta3)=mk+CY{!2bS*Tck1ps&6WeY0ABoJ`8# zJ$%cs0b-F|us;2iAlbb&0oyptCuy6sS17*BStcRZYe_ORlaArv6t?xkk4<$ss`Lt+ zFI?NZ7Xn42zBkLmOZJVclOFKzz;c;93y_s;Rw z`2`gPWe4s8q5(vKKymMB7paN?I|f|vG=)nFY~x++C4REIc+qsu)7<#CqyM{W|7q>N zINWK8{rEOd^>=7I1WJ5-vawIadfvu6L6Pa?*+cOz@aqF^j3h&b1%Z%ZQKYz{RwD!C zsZpl%$ptha(*5_kyhTk!-($ZM8W4ty*bo`4`kte@RI))E#tB72rnO%3u)vUaneLT%P)jHBT(K^;T-8|Yn+1wr$ z&}l;=jd%IVqlvMbPC|($@gf5_aY)OF});N>SCVW zFjpd2?IcGpa1it<*00cpz)2hj;9Wl+3DNF)x6A{cttHa6g;-BksbH3IT&ejdv|T~? z2c`6dwa$-1J-)30r-jo4B{^{Yuo3*a)Ij`FSAKr6mkh;}wos}2!~0HldpJK{US6ER z;JPq|K07)pwoW7;8VXe?k-=!)^WMX zm>Wp-lX(Pyj)MU(hl7HlJx)8@g9(&Z!BLD z4IzxrhR$^63~Ar&5Lr}p=zNnnSAU?mhp(w-ED}LbNSvpIQSa}^7r^K2!hx|Oa5>$B zpnBPcXn~R3yKlkeBBp6jUszSdNVAxdrf&E|IvBJMId%<-UVV0}pL&etsQ`3!fM0ip5U3}^%Yxd2CLZ_rx2u@7#X+2`wBEq z1?yqq5+dC3Y}2u>aOMzFqS{SZZiE^Khj@=d(il@U=!1x4v>|QwB8N@HNi{@_&7zP^ ziKclb-2kVY@8C>{&Dt_~Hpd1|x$+O;sBpaU_N%5`;+9FW-eqFIG)ss4Rk&NLuylEn zydp_+O~HJ0>gcYL+==x*UlX~z`?wiqRK!g@%26LD8;oFnhMll4$=6j3Y^!S2h}O!q zShinz94U1-JA_<|O4>!U!aR|yvK~JXlT?o_M9gt!E5dO#V5wXUd=`N`_WY{va4S3S zpguuq(Zw8P!E|g=)wP&ud6&HRDxOWuY-CT~MupRTqyIc6oCBkNs!>m@w5R5fm(0+$ z*l>PyZ`P+bBQli06`7vWNnJ>ed-_cFK~9HA5ytU4KI){1Rpf|_ZC5{my7VBdmN=29 zHa$yJNh=jtj`KjOn(_~pCDP`ha_T_7f$z)Gt*H$M(^XLxP{;R|9PG~Y6fEe?aM@l; z+(Xv*{6g*hp2DAD+cDB~-51$>uEn_nvIkvQHYf=r9mCbk8JB*=5s-rW`2({L<1sP0 za%q{vWbi};YD9HdW8H3ridm9C0NN6Rm-5W^rw*-9lF zh18C1C-2%3jygPun=#0L1ws&nTQKEr;1Kr62N6DdTNrtWNMI|spjI~H*A!fbC<|2* z=Cv=?Gd(6PoqpPxQC!%+*_dYJl^`7!A`P!u#o8&$VNPTYc1{{VnT-_(3qzlx=N0hmQidS;D+8L zoo|YUHp(t>KsszvFhO!Ne5n6(ywNdZxTm5&!R3briJok3K?uHt0@$K&_0iB#Z3_4q zPff%5m}Op-Rjz}tuCj_yyY#0qG@(H|(Y_F|cuBbij3~cyzY3lzv7a5R=e=q7f`IE! zVxlE1K_5#6>DRRHTH0(lTZqe8PS^T0Pm0kKRf>_l3*PIb(kE1$?H`gj<8&BQa4@%i ztfnI+7r9v&hhVrKRV%U?%h zsuK*q?dNA`lW1B3k?qP_mI;8Z>2Xmh#@Ar)QODc+%0ZQkR8cGbLMFQcCE%-~0T$cC zwBK{#&nF+N7YqEkJ+Lq>cR*%4lA16>LmBTM@+}gFwDI;n@ID*FYNf!A;&Y+#8{kCRV@k zE^g`yLHds;XkX)8_WOPYQWQWzX&{_M4$}?nk5woMGL_glV_XN+4{Uvup@uD2kQDMz zZqi=ldiiQSvIePTV*)vZ8m?_Juz06d>vtI^tmKlK-EzVaMZUk15 zV%6(dzTGRGq3e)LJ!4e)vBtjP%@UM=@9i5Au^($*NgbO%wa2GN^E%I$^YyQ83bRk6 zs!&PXRsNGsgUbO&9D(Y zBJ!&!I2vdP&oTC9W7U$gd|TROMB2_ArcOm1*-~=%-h1o|Sa5HBM@}mL zhY)&Ku!w@3B1X35ka>uP#htX1qPR*Qz6)ucW^09F^LY8RKXPQXEMgzITxG8gWn~iNQDU~nAw%OJ5e-jB|Et$$(0r;ZYdINia6}bDyv5K z;2{qwpaWZXZz2DVO7t=x2R2$A%Zekw*>#PMg-#7GGX8B{TquJ=w-Ae5$j+sjD4{jdn|DfABfd%gtchqCXWQ{dCW;&x-W3;cMMu7u}& zWTG#(VaY?%1B5}0r0d+Em2ANSdvpC_SEZNM{q^WpVi%2%);FH71UYZtoY0;;uH5&C zV;{c50*QTluPXU&39JbUe1t3jKn{8F(X9_8MJxKA;uY%sEpFYvEfqmFN`u2z?T^oo zNN|?`fLT1NMldST0%^5su%cL*g04ScG+w2kPmV}RUN;ypF2+b!tr9FBZ!Du54S6q3?u1v`t)kMQ(v1(2 z#a9g`C887)AV^$2vh+KiG1Q96!3pR#sXRZDF8mm8_4PGzMn*Jvq5<*>yQ!lk~bCphb3_56~jLZU(%@FBSF~iM@#jOUFM61V_d{ zi3EF+UuOewNv}Hqmt@x!fJ@Tr27ndWbpgPN^tuI5Np@WVs3g6v0nm_L=KyF(uiF5V zWY=YYNz!WN;KFz(v0##TC%)jpcqfTq;dm#3;K2B$+&-Q-Cy`+A_@(kb5V1449S@=l z`5jH73t8Plz`59&%#I~d14Z>$szfoTcw_mV+&&Glwc_hnT0kSkwIh+G{EiZl zrRQK=5YK%ECwsB#dRiDPbzUmHIz#qcXt3S>;b87(sz2;E$w58_hwC=0!CsId94C& ziS}d{>m!8%D;C2mYUZ9N%K|G|`38`>-pI{gOwnShU z{tjNDhv7@`@wdbGRCyly_pUpxS;q}Oz*OUgUvZWd>pjz~vV@k+23p$J?|PnD?upm1 zW398dxMQS($Ykyv{Lj6)z_b@H`AdfO!8_{1_TD>Yj!j*c*kjm&#pWTiG?(~e6syEo zUK-a{36S=_JA4i;ZI_H=wgRK(AqEN8zI-6s&DDJYXf!^hwD9d46~9FEcEMLc3iW>1*fb_^;r8D1*L44pxHElIc3JEWd^Em61B+U%LfoCV<$hc?5utn(vw%(K5GFq0NscgcBA z+pKMW8PTV2hnIqt8?diq9)7QP+O0wlW!h-U&YG7fZF&)_X!J2@W~soa3oE5DU=KHe zlxL7fn$?9{ADWjoFufX3^hUL=@>jP3lINNwlekSjux@8BXkM#Ptfe z=32&d{caq?(tvA>kCxoUxPcjJ>rJ{f$68Q*HugAB@qOP}kHyQTz1r*-+r|>AOalu$ zE@Qy(*EnTd!3=+FWet|kZId;~ik2W~leIC0F&eoO3I~I|6^i!>aLFCvMNeb#GpS-B zhh+>Bhr1IRj)1)q8m>>Cn9D4E+~vrlV`EeK8o^j)Eb=r3UnAHm4l6%H0xn|NSC811 zmP5Fj&i>FvRAN1;Od?o9qdg%x3$)SzwJ3Cv9us*Ic@TLN`D+}b}tI71aS**tBm>pJOMvpEx~_sZ!zq>Lmx|fOM69nN8%r>e4xu3y&^n)bHa6k z?{NGdti4rCB}@>ejr+mf-QC^Y-QC^YVPJ4)aCdhGhl4XPxckA~ba=G*}F|D zo$k8ntFEN0>aAz+2FwN0h3u316#()AaedfBA1$*L1qcR1BS6Tui+h9uAnx;h;DZQjxsykhfa~Rh;87+poTfC-vSbuo7Zqg zcAMzklfU^q+poMqH$W#~J&*}FUHs|D6XpWtESi5Ym@BT*Yy=y`^vDHVp3@;)`4#O2 zZEl3TYXvL^QUOPSQovo+8}X4IQp=XBk7T;v58}rg+&{M*1$Yj%H~ED|y(S*8+UpqiMr(@8d@51n(RkwrfYX}k7ALL^v;a0jhp?t0tk@m;mby})Yi)iz?+}W_bDnRjevPJ4REIc6p@x23^O(?c^XLgx`4uzw7HeV2W(1)V$IHEw;}^T3L{G`%prqtr3+ zTk-m*bc&WGN>g{;js8Ge&cu21_3!ifl$hU=^(vM+DIDdsx=UV4s}DWrDe$wDwha-& zcK=e03eN78Gg-Q`%ngm+9%n0k6?%n?lhU^13pG<+T(y}WfJmjAzkOxwgDvx-<6B4V z{7gn-p2`q=Q14I7uEcr%qqC#gOs(GF+|vj`%0?qrjqnw3D~@6a9uJ)nyJgT=)%149 zP6~zD_)QoI_q8cSKTO7xsN9ZB7P7D{TyQ&=m0iLYlvS4-2Q5~VUADj%U{@P4OZdn# zg+Mg@<#TX*Ft@3`pp5nBIiI2xx(1)(z-D4smz4IFv~Q5*6lNd7F8-q+6LB4j%gZRj zzJ=ZJ->H-M1|dr3xlu>gOfJ~6ib>PMCQ=!G*|1oT?_a}QFywv6{A_(oVW5r8BBP-- zle0ViuI#TE>ugW?#t-AIh_(-V?uA3aNlPcYIDwuVzT?Z$Ch8+c_b;8?*II(xU6&6J zAq)OCj%s}YNL|D{N;1s$Hf==|m=<*1b3NBy@*$pWt$^%58gfI$&Dndj>83At= zMM4Egf@71SM}rN8v65oKfro>0ph7`GfQQ;C!Zz?1fkTkiVBf|_0L4HyAQ}+ZtqVv7 z1PclP34jbh1Rw_x14sdc2=Xu$X+n8Ldd0b=yM@1HyG6TI0ZD)$K|vrPkP(Oocat*=}#u@etc?)I>VGCjl zX$xu#ZVSv1#Sx+bvH`jQyaA>Gq5-M_j2V_0k{OyAoEe50;tR$Q&JZjQB@fmSY6f}+ zTtBQetThw>^fx8aO0q-K{f4+OG5tiW3kiE-Y5Ia*834ZF{NJ6N5NnW7&tPFfqNXUQ z|4+C7`_7=Sc9y1R=#_rp29EAKgoBCc6=L0g&TpNY;qN2uXB{5Uz?SXZzA`L_9ek@z zN?Vab46>1Aetq)sqPhLxs{e*8&OGEJ%ycnFQA3fEC*o(m;3KJ!a^u{05s^N2Q|Qnm z5vDAgBoQqCGf6kJElTw1VHaFAZrW#b4!lDMdN9+K;;%rrfIR3{Wk&e=`f!gEH5`v?qRl#sa&(GJ`Eu{k@S2 zhaf{r>y|7FwDuytVD_S2eKBV12@-$W%3ifoQS0d?M#YS9o&FgiKhEqwLH=)@r?FCX z9MFaM8^Q7Ls3*LUe5Qr5p_C5ELEqn76S>s|uPhGs8+6|Wj`Y8MNyNoho;2H1J_Hte zk!lD3jWkhrh&O{AGo|wPm9E5oc+L0rZPPBjX>qCVFJO)gajL{u2t1%wCj4b>S z-*TRn(g_x18dN5e*FAFjjplNq#+Ra#Fut5GeN0b5wcNa8vY5HRr5m<-Krf?JLr|n= zWOF+6<$#j8ZCU10mV0U~7-s+|&l2ho-Ieb!bJ|tTwL|PrJe`UCr`$d4>rQM>!Qxcz z zTrATpAta`hP9(`lxsnuLIn9>c=veQBu`g-JO*y6#&HVkjbIZ_UpvZ{#hppu} z-I${IVJEqg{%hjM2;+ESJTSpuQ>^E-gNebGV&+5-c2~HQ=M%K~UK&+-gLCH6_wpQ& z(fBM4P$Bu)iMs(1ntF{=|GcS^Sw(ZNEtFDKU7{ebMIkskXA3uC*DUhjHlz+Mxgo=l8c1)KH5 zA7$#Atys8m{6b7k&h1Wa-rB=cJ zl7j1KvuUm=f>lbG5v^!^1Yz zCf-Xy%ulz2Z?^DbD13B6A_+DMHWz}t?vC2UHbQT8I;EHZr2*);BW{VFwYvRBF8lEZ zp@B{b(;zwZI%CQ`dZL0jF0*%h`bVy>&dmrf&i#SmL{c20Qd;Dq>E4@;ZrFZ#4d=uVZAA6%v9c{)s81PvlO=9%LcLGmY6O= zL#BI{cvwHDxG)IJ+$mpVr=#)NJzd$y_?4LpuDfv;Ogwl?4ku&y$d!gN_{z_h!X3USfW8uAhY?CjCaQA?9Lp z4ah@x?;ewe^4{QsI0zJo*|OL-?{RQ%J6z(5v;#lZUA@GDt}PS{Sy)miG)Utc5Ztv# zo>Mwj^%yClt5nYZ-R!R`y$rob!^Dh_j)#d}b<;#CDKqkXX2sNXR_oC4{jUMf?YVy? zI&0)~&6#bmZz15QKE(AX_I)*8PDuq2f6n~f2*#H=J8qarO`mfsE?0zgS73h_92^xIYMP5kV6V%jf%%0yz{{hx$)ryVST9*Qt?udB0!>)!h_VR7OoSKlzn_kB|& zDN}=!rE6xL*QIS$AW2PaD*9UVUs^GfH6wW9yv05i#5@4&Kts%{bG+VTU(+SzU-{ zqHq+Gskym!2~NHcRX0?HW|?25o+m<%g3F@6emMlmQkoh|6O!q+19OPzcHO<1-=$af zE;EkJkB{GC!J9?GEp~cq5Dx8O?bwfNt!i61Op-j3e`qUJV>GHp=obEe)7&@R4rGtB z0XJx03=?Hu|A*87Z5)Q3L^Q?30j|fFV+cPFMk${hQ*m0ch=EZ_KMAkS0ygfeAO53? zU+kf?Y|_r=cC9VlAR~f;C@q!2sx2=TaTzn8Mn`EH0tzi%K+*=>zKKt+tSXo8XZd=D zSCmf3dCcq~&dvLb=KI&!f}L0qCy+c)&4$fBq27T{LDGO7YwplykN zo{2r>Z6Y-wq~!tV;q%=d%WNU%DY*qsAaK{$iX!wjkHXC#&L#tvh9=7d zjm;m^2g%2l?*jZ$CvL1w#!?%atR*@@e`rN!VYrfv?5+&USNS7vQEcX{sO-E*smUCg z<3C9qTJSA|a0C|cCJXDjGn52$XLp<1?u|tI-VO0W7&q{D|1xkhKW|MacRcJDFF>y( zG^a5%V92l6JO0we5n_qkf`F`D<+z?= zrLvB)0+mn$E|Csu95dVkVX&D5+^C}OW9z?<36sdrXvk>xN?>AXn(62Rw!SfXe@!oH z!n#Ylj!KC0q-zGQG;+8e|DoO&D#z3!2gCni!<8(o03Pdt!2_9P+3Ed1ne^VDygU^@ zMTpXtT8x=y)wOG}h()D1WWi@o7U&%66g@tyP7we(c_lEX3p7)UeYac*pj5irL`bEL z#J+G&gn?oY9hzd5^144|zjv%X{sxMBfU{k}b2Zgrm*^v^)kjvXh-*5K)wCxg0!8T8 z$P~ntO8IlO3fs7rYWuiKcd)GHjuU_~f59Nq`v^gF_rEfdFJ!IR=OFXJz=yTQo2CKm zN%>-g{8_-|kslA3ocCOJNRRwzWXo=2hwTQRaP!mYA8;ji8A@k*i!;!hmWwI;S=_pcc=giqmB8!L@a$+?1 z()g=*1RkdoZCP!ZjczX7(VzySjdQ0REYArouB%p=K=s!Zo<9IIuD5a9`yA{+)TqQ~ zTW7R)*4Hb)_bDK8zV+GSRF%yH0xBn3OBC zPX8iCO>1%_R<*ZGL+RRuXDq^0jEpE04HEANt8xD|QHcE<{Cy0|RkDj*-n|l*>k!p( zg&Ui1OHf$#K8cg*cm+s7aSps(yefY~*a8c)bqKyQN8*EX7BxGK4Hta$9)c3r79If{Ix;j|TvUv#2y80Y z=;;G#MzF6o6bE4ix#=3?ORbjtFj*6WwXBQ0F1@duVUP0YAmUz5~s#@tXa4!K2|K&xxf+b0X^l1{b5^ zQFZ+-ETcfJ8VKu00~sHC*=+pkZuOaqJ4hAlP7GPs3b~fS6DH(1el6wCw2)Y7!(7O~ zxX}EnimR9smXJe9d_14cO58Lk%^12F*py-m#Gn&I8^RFl^{ew{%vwlUz6Q)m`pSyy;@1!?JEiy(&I&5U2o^m?x z(T)lu$NOC>k<|#@%5kukj zSI)U8^$nWCD9kK~c{}@ehKEpYGqJlsNpkl-O#PFP1#6799@o9=2Tb^2hgu!ec{8k& zG?B!PykMk@QwQ*=MsND^9Y2Fi3nnu=(AcI(E>9CYySop{#1ek@LpC`7o(?aEN!CM# zeHN9nVz@Wrh*xp&FDj8~AHU4w3jpBxN)*igK-SmJui^g2H0K)^!oYNbMNl#zW0!I( zfG#3uBb*LgE~F(BH|8P}58nQz*K6*fa~6^%PKIIJ`URTgXIsLIM>&BA;ikdv2sx2# zs2$rnb~IjjNaMi|V0RWwGwR|c0&NH169oGd*?8!*RdSh`+T+v@kpO;H0bKk-Z9ID*a)!m$@mfVSKP_YW_sX#OksGiJ%_OO^BT#P zMLCzIK?yQvPOR%}Y;nS&;X(imB9$H{9LkPRep@Ot1xPn#`=Y(4amqO}u|x}FORsuD zSidYc%XgZba*K|q=j~ua`9zd9K2G3Ku~*86WUJwM^K>sXAEmb;c!jMd$mH#`RoJci{dQ}6&Q6NqT za@xZKlB@y&t$tGDS#5iWYRiHzOGn$A{NJ53;%d9*H*0wuC{HEBafW@<{0Q zdILpK;>3#IbcEr(DO51FvRS>fr#lv zmRwA^c`ddnBvrP6Q7)t_Tz$WrF~^7F0O<=J>2J+rvi_a@1L*ICBL<6(K3|jXiBFbm z6VKsT>$Hqs70>&4flKGzuUoHN;`I$%wgY0I@Pzut6_XQpmtCpE5@7pfsDJZ6d0+JU z1y&z|OPE@cshlI%zEg4}ij$rM|EoRNUwr|ra0GFWNO&9KEx ziqH>$12AyD!S2=@Urhd%$ypXJ!lSmT-HAflvJ%M<<#w2?VT0UrK zmvTiJOOleRj}UkCodbZW$rl_lE4xq3(1bnO_IAR}VZYN#THog^NvC|oi)Edd^B5h! zhC{AM`lWUFzWPzJQ2+bPBR-?vAJ}>9(3%>e!3IrNv>aMC%92hTuSen3MJo0fF{0Xt z1Ms>%LJ$+n6eV4H{$z_RnFrA)t+ z8pW`;(*3y-oCK+n#VV%I&z^|iq$vJhYbb@&8tPE*E*y!jhgvn>2M)B*v3<_MvVL&| zc?bQw(XuVeX-$aEbMs^#gtSy*sFJ(1`=Gtpn%VSHn#W(tSs22AUbKGC3Tw*PAGDd+ zhS<3T4bPzr6M7C_sQ)+7W4x1z6!$2x=Jzy5 zi430ruK<&qf#&UrMXLQ~pn`t8TP%{@F9Xph&+X@fpm*hKmz)02@por_w<6cSBCZcL zS{+C*K|es9E)y!g$e**Y8IOU_Cq5*BXJ1r8J_EP^v{2-)BJ)ua8~iL~3|<)#q85?QR^iv2`QiqXMU>dkut*);1HwZqpf(CSiQ3);=cx4V>{e?$=4 z%&KH{#-k3OFOO$cgfz9G&F(Jl#1g7Pt1_Hb42L_dU`!45lEYHI1Gpg*k zTP^L2I^|TWm3VorPF=kTZ9(9O1?4>I&bbFY3UA)#JDnz=(sw82DQuP)%Pxc#&JE?jvjN{ z59OKD&nKOk{+n`WGXAq?rEDuf0x%=FHfvH@{%xrYz$X4OJV|XcDhtC+#s6_FVjqy2 zxF8!zJaF4X(Pz+8bTW1EQ7t}ALgc-lU>dMx?7vVDaT9zw@B!|IR^*A=qfw#2VH=C{t>QwVi~$jEFd6SR<7(Q=~&gI zFFnquAXC)`?OB95hJLLJ$r1DtF5Gfa3oo62k{nzi>qoQQ!xOv}YmHi}yt9V!e0p1j_0z8P zV7c-TLA4m5si<~xyFps8$^-1g{V*@S>%1l7#xf}7&m+Gq_@cbqC_DI1_DpzH69Kfj zKX~#kcJaYI!w%;1F29Hr_K^;`{`E9{?-wJiqOe2UGUY=6rqQytb&^%E!CMTIKYdBa zAcQQ*6lvO0=z=HtDkJW#s5~LQz+TW#9##7FzyD^-NIhttt)cJ>e;z4akZi6afE+wt zrKmf7gP1!S#%H62b2k|lUQ2m`%?uQ*&-XT?jjRs*&YBZ6guqcV>&d;f;{_A{fm@kC zdZ3v8poJK&1LF@7Mr(R&)8=qxMAXk;7P3;K`-()%LdLY4GUJYIR?!abC`PQ&Kt;qr zVcixD%vsQ<*b%KyD3>?U)G(Yl0py6r=TV0;EoDJCcbKTm6L-}O0#j!V^87WQUGE$Y z_3MepmX#5+0v?W%mN>Hl1iTR^@9k3cu^m*(*B+6Ae-}B~)N=>%;#xv-M3j@RtsPfy zRBNJAD;*n(WO2|nP83AwK@zZZB=D8)V>4d*v|A$j{j!Y9Li=x72(PW&DSk}ES0Qy? zOmRuAq+8KGcp4=N#r#%27>k}gz_Gg*i4jSOpTF^jj_344{)YS$olFlSUauNy6O+VT zt?LXP*@ja{ntAqkTW9QX*W;ec&RZX{>C z4qzt!nr^x?8tV#9OjArX9D{%5NNj67X|f!@BN!MQq$mlQNUTh^fzEdEMt&AUM9hN6 zFIKI?7H*xYh%_J9GPdI(k(Fcat~5{&8sH{!P$8A}@MW84CsSh&@IS zB_(Te7ztfQj_BZ=UNEEt!RK-5apD_DEAZpH&lFgq zhCHl?WIfWXZM9miwH|4wh2C__3wUpvK~)^2)GZuu3Cyq!V2-oU6U?o@smUJL2y*hrOs19RRe>3f7<*U*yS<>Ib6iPd3Gh`p)ED^9*I zvzoFF8)V2nF;b3NoqGn_?iS2To+Rw`(L{y6;1HBSoqUMu% zLI*8`5?tQ+BkBjuBa&;I>l2dhx`gU^C-lRmS)G^Wn4OA^*v!k6NAIP#xM`U7J^N7k zfuSY3s-wf=PrOj^PQddu6# zI*nDWnjDDyJAB>Kx>FULh<47dIp3{z9&;X2E;XE3c`Du96Sz^IF3oa)5eX5*H*)cZ z5lLbLUMye{UL3Rvyx-UVM{%;RVwG$|ZbyDPnk}P=3o?y?zB)JiD+TitNy1x}#~p0l zp{m#VAzM$38C|mkF3VqO!0qkPAUx0N`6_-^Uf{h`+l3(va3=w!P%=;RU_;6JW$y{8 zU9%O|_8)s8fOHte1%1|L|CAbR9mAcG+#%t-32{ENKmaXpu5{?T>kaB&K(Y z_4)CCCE$h~5!|^@-=$-&`4vqQiIcBUBm{jaX`Dm;F@wBfsQq8Uc?8pRz4|GQQoZ4M zP6UK9c}%i}2IdVI^%XyvlAPn1oDoj{R1AJ4GQBwVKH``D5X(&@>)i7aD%N6P{?zz~ z^fUlBw5b_?6r}+(NG91=kgRj;hI+U5cQc_*=u+Ia^TEVrX1>iq&FBR%MWER>Q#5Xg zR8^pLHG({;Rkpac<3vFTCD!Z*wc?o1a>`DWjdttPNY{dokYME!>}VM;tAtG*)BLCT z&(pyTG9hh46yd@aYhLp-K7TBj&{E9EZHZn5PGlk&*qJ?U5oV6zbv8H~-Zuo?v@$## z0(a7bcjFWz7;(VA1D>)syTmJ)GR{F1-`^GxP z%?_Wo$-sRg)`jgL+0Q_#+3P3#dTFaBJR$HtW0@$}7snxy`$$niQIxhM9s#4XosEq7 zwn5v*pD}d^ip%nlz%nU-#R5R|M7UxjOsa|S*}@X1?;p3}$zL&ttRGwid<9V?qEp93 zZhytJ!p!9KC}^;x@-#8s)SHv#q|j(=;GDpDiTk<@m*nv5_ZS!)A%Sh`)dsp1C$%xt=5yc+7StqWfUc@VWN?-g&&7PhqFz}j_|*U zJ4oMh6Jms;kAz{88)wJkUR6z~MLhlyQrZ}cvGMl*Mw6Q=Ll?1!3z5ElA-xbM&F2@n?m8s`4e!aIi z^1~vad1dv}ehQeQ6GSmNCSlB65(q61RS<7u9L^k#b=pA*iUL!p5-*oCJVjmc5U8VeFL_u zxWxMs=;(<(w-Gn>rhbqT90cGC<0LENUNauku| zRP7V=(@u9+G}hLx;sB3NQd9QKD;=FIW4Og{7^e)FM!w@I05AShz1chJ6{rT(tCCU+ zdt3SpM7QCn!i}~QnW*;&R|Cf=X=N}w`4E{JmVwtf%O1Aha9^#~YiOuU!CLJE3Kk}* z3>K}vSBC$dj#_0+w3e+Hyf$9D!j~yBfhFd;ny_)0CCFbDziLf{+P#T+5oj@eBP)@V;}eaPI&FcOI#>0u6uNUBNA!h5jTO=U3nK{1#w zwooFOgB_`S+7q293$KTnV5J7<;P-Pap^NpQ9YGPRB^R8oaK(Qwjq$eJc+tC`Wd8Qo zdZ#%erT}B+)8?KO4dp+2(Qcgm>RPAln7N?v9Ed8oUJo*-k?!9sydMMNMEA=270?Nl z`VJO+w*&D6y=fnA%QHm5&ib{AO}!qKiihU{J~~_7=ZRo-l<=l;ziyyV?Z>1UmPZ8C zxbBu~qo$UIt>@zXr#NzIiLXDub5{sy5A~{zVf5|juEnsY>>{K-PX8pp)g;B1>{l6C z_}=_d;hM1W(^3)X)k&dG8i&XsR?%-5k>cvABE*5u#6ImyQ$U0e?rr0!K{CKnDl|fM zqJWSp9GnHeI~Ggk=0z0`4pkg zw6k+9LQJquK1Q)&NTRtB56S}>E2K`#C(6z6=4i1@K`^mbbQipB5#z3|*9E8iUSg`5 z-PTQU=>*ugJv`2xyc;xJ$lb-v5BAT!+Pw97q!RySN`7RR{afsE6N@I(Qd?16sSQPM z3TF$J_3jJlN_4&07yWuYIev>zL5y)5O`O!^8!<@1X&fg%DHywXrN+Cj_933MkK>Oy z<5z_CK-u280(HTwG{?jyHoFK_2*WP+oEt1@YsU2~hfJ^o4YG-HNj!iF=iqhIJr-BgiM@P;TPyg z0!uscIqpcp?09*-?GX7K?t$$=QQiehgar8(ea}@GdVd!0g-H;2tb0Xa%2D@6wgmW4 z7_`=+CAlKRo)BBBjbv(oro^iMgI{Zdfh>|)vmj-~JBwmaBzapKcLgKpP{-0LCsmY5 zYuz~$nU~}}bT%+OnX4eW@R88ImPR_@+JBnWT_>*>*gFE4BMdrgxlUHzC7d(J-Q3eg zDQr3M$23Kj0?%M|!p~hbRMIK%>a{IG-@i*UZDLHoa~=-_hcI#p|Dq%!?Qp*?71bz^ ziDinHGDFK4vF=gR@A@58{gC-+59dioF4v>cBvh^v&tqlv6hAtQ=yd;r`}#2kq*)o# zq%#mj=?rZ|V>K=ailth6Tg`31K~#tY3^9i}V&_0Byx(-^)vSdc#m94s3Q_q0XYb*> z?`pz9mMxRiSVPlKjSAt2ld8qeW$Z|AdxG_FQfIg31-T?BYH5M zVMF%-ap^b9xTSNAca^vKclf8)&Zx_v$#6wed+4#z8uQ2WnX2Me2+rTbHg|7lnN`iq z2{-dqOs+s!?%sjZ{J!cAtE=ao!I$xjL?_%$F6ot|<)j1~<9J6BT#-eAKpY+imK{iO zhje&oc9Z;xJu}@@3225&6}%(gk*qWieiz`Cx1Rz<&wy{5k9MjHCpR1~$5_(8F)*9C zzTY;+>A-FO@mX0=u(z{z3l7(9#!;JHu$VLr^F(()e}eyVFQ7Tu{-_pMqvV9U0-VujdP z*xS_aN}$PoW1??Uvn|4iT7SB#nk!st?u<8qU$D~#-mGfog7a|<0g}1!g#Og_)7P=x z*^p4J)@fAsWoF**59L)vs-pONZV0uv>h8FHb3pP}yqP#_@o7Xv2?Kg;gDmZDn&Q)C zD9zG3t->q-_V$~eLo@hLbcWd@x@I08RprA1UAiKy#H>6>-(j$(@K>EwH+Wd6wkQZ> zGC5l-Qh)q!+$~{k@U~yKuid8wBux8oxzbYm#H_dkZqM_gC&7$Sx=>Zcbq(f{rE;M& znUv7mk}RFcB7vV0qD>V8hy2~{lmmxh@{>hH^uz?R?P91whRw1iF6zr{>mbHMl~}KZ zZAa_ypw%fs3PUk4GK$N6|ALCt%9kC$18cSxO%HeiEsVX^Eos{_{#2q}jckzVVjfoz zl`Q&_*AcNh_?Cg6VV|FH31B=L@uJg{igj2_5IplV{16{q$vUAm_k53ZNL49Dd~!J7 z4NLVmERF;3*AnWt#43x`*{$~p8_=}~P+J7$ zss6Sk$qa9Ft?@@u%UF*r--sB7;a*By2AeZZ)mc`rRO=s3!Q^M;w z8OEf{ER9!?RUwkK#4aUO5LHTw1sAq-brE+V);7?I2Zv_#=nOeEqH~X1uCflSjJ*ei zt94&%;~2l2P4_gGQz?8K$@n}RDY3iii<~-!5LPX*I8K0KD>{fPQ-bB;}y&Ch^}DWNqG#tI0W^yW@sT%X~^8&6z~>BHFU&gPBU z6(q@SKT^6URN=Nb89kuP%@lM>m9r+I!S2-B27^~`8Q-Cz;_g;kmihcPdHU4){%-#R z5c{pxdfH8-$FtS_zQ()$8wIV|u{<@iw^*u{&fEV_?dl4cW)evS;$DFcWU* z){!@W=?a;7KL|ad?vSNm@E=i_*(D+FpBrMx1Xsx+|LM^k=|~tWbfB4U(KKvLATUnY zy02&wQSe?3rRqKu718+?;^t!r#RGq5>*Pr^0`L-?wg@y^k6XcZ%-_Wc!jIm!%owj3f{}B@ZSD}(4u+LDJ?loyBSq1Pec{H&*G9x7 zXI(Vs4?$RqX1J1axZIMX{M6_rUq#Dn3mAz8AMhk+PY7#JDBqs!(EHys(!a$JGQ8H= zJ)~Pv$g+J{d(sF&$;wOed1 z4XR!XOXbHh=cKH)qN5w)!uh`3zV@2Gmb~_Q2djW%4CaEYK=fJ|rHX(YsjV!*Z8FSS zvcak&p zpgA`wV4oCkcJ$8I&_M*Nz$s8RAS;WZUTablp_kCkkZ+sh7D^e*^r-WnFL<2CoyZ70 z5%@*aRVBinp3{K@^gqIKsws8L8FlXX8;er+48vcOMTCMjK7LMNe~!hw#;qfnOYFo8l-}#A3Rba@?y}0pj8qhwUt0Y`FAA+My z-Q{K;RrkH~PthcbYfzUEmbB5WMpM^x=?)lKQLbm3^&cpdcS|oB@5;|_Gqs`^);$j& zTk=jhMbuqe`ehI=9l7qx#oOsWjvyFyvvsLY)|iBKB3qRhP-q;$nZ6_Ex2qx8`U%k&_hf>EG1 zO?T$XXW-r{HOR6SP{WF&-2zpdcy^N*kgDsRMbMb$vl zQa9h!Ll?jg@Y>g$y>#W=YNeWfLDC%pFt;HPLX&_5ikUmif2>(YmLI*Z(Vh+sWue8J zvDFdOb?FlP7yu6-@IQ-1An-?X1YWM`Hwoh)C)03n?9Hd~Jw)U2yb#$O(34+@K`|6; z-96UXK(4t8o`4f$Dpy#)3G$e(uN@xgm0yw8^?f`a9x2k1p4a=R(x&IVjrY-vOCH;E z9R7jm4fT7p1n2O$Mdvr#y$cWgWip{7can%R=Vhu95L!^xnM{c?SnlQHTaEj2ZdoVz z{aRRmUeTz(6#1>C!*aghX@{~~y^4ej*m-`U^GQ>yMe!$&Md9ZC_~d{L>ftwBCW{(( z!Km+l20SH3{DT+{?C6>&ZobGRAY|(iE|A~4$b{)OpAOFW(5a;zDf2bSS880;=Piv` z?BrWJMD*e29W|ZAxOP3m;0}qC`a6Oi^yHnBO02?tKa)(i4&>i25by z-cRzyADoUo`MG@7geEr^#%5}fI7%3}X3k5L`*hXuSIy(7d`Qu%296PpBafLegRT+9 zIzuKGR>Us|RitMBl6r7VEkWt4A8wS6Tam$lUbw)KxE|xqL>!Vj1?xve|Bj^6ou*%* zt17Js*caWG2dCk3KQ-ik4ecRBprzfz_QAZ8KJr(*Kwc!_5^N91O+E%6)!>Fg7OR^g zDuK<&a4wnIKBTngEwN-L`j>*!JuxAIIO#={uS60YP_VK4%4PG1`l~$lr$X z-G(Vuo8@gH&opaaSPtF_q&d)O&x)e0D-!gbC%yIq+_?UGU{+J1NPWBTFA+m-n|}|MF#l{3e?>;4j(|u;wUO zj6A~9uHz00$FAFwQL3)vj5&&=-tV{_gnA4G(5cvDSv!TV&(xf&_kWb+=zaz(Y-enI zd8PIVrzGb@IVh1d(lflc$-#!)Ki!Q*P}o}4Y>ed;U3(7ie8aElLtK6q!2z8{&atYwW2#e9RfrGVq4cM!+th3 zFXt??onyi6EkQ_(7dfW-5nDnhjUW)WtGjdgAH_pTsm2a+l62yrF?8ydu4OGKmdO&h z@lar|(munFvT~G{#q9rq(;iYa>aa*theyv^EA)l_EkS%Tim|jeDOmC6C`PeM)alVz zOzsw?Oa=3>yZ5=X7Yu?czH$VEDvWMV*PVayuzgiv0o63a=Z_T}((j3*n#+pxN~-IX zd8l&o_8C`x)kR{bn^|eW?0J5jpouZnTY^~ZTJ-B~UB(`I0;W{a`AdLwbDh0J)Yjg1&_Dr>7&e%(7(>0sH-QzCn)Wx`pojgf7@h~UdpC; zFN&vV3=h$)81Jd_^9u^@8<$fdfk!m#JDfc>5pxvKwaFM+G6+NQ^v~t!Tj?~Z7V8MT ze=^^?tQIHs7xMDAqJEs%)t}zCXgEaS{hSBPQxQIcZCMcCN;-+?H^9~;fAkU$cKfre`$-|Ppq2GY{g>bAL0&sooAMBuA_`$Y0fJcWvi2{^UO~m9~=u#%a@jl@f65 z^uwLIK(ApiK}>b?o(lj;&;8WQo$g@!K4b7zX>W#Y+z(ttC9Q_U%N=P;R}+j|8GV#8pEUi5twgGW&y7GmH0NI0e~EQqRI%@I#i*yJffGyb4I~Hm$-S)0 zAstY%RT7RGc3DAZQBVKrktG?GLahuM3F^#g)u5Kd8$XYOX;+R6Pg?@*di5#;tbP_) z*OG?PXxit5&Z7OSGRyzF%~Sc9Anb~q)j@{0yCyLxx3IUU;g3(u)sEY;dbly3%drA!gXSHFgng13hT#NlPljr z$TML*nP-sh^GPNqs=KnpA*esS{qtiX^`st(F>KyFRQ=BHUY(Bke*j28x4)@)8l9W; zfC1|6<>x*kpL@rObMN?=a}QK!nxe5xQ>=KN3N*$djZN^}FJm~+F8m+x4z&d$rwA61 zd3~sWeTeIUt^Dqrh_HnU;og0-av&yxi*Z*gc&xvxLOjwAqlyP=v)^I&o7D!5 zHSBgrB#lOjxZPo^23%i`IU>HTGwLWB8p+=>-k6@lym#>z0(8q~6v7cPV6$*uAo zHYs)@;yW-8X2vrY7%mF$cMk?{mV3F3ATFSTIXlYNw4vh_Hyt7xKJKPZsh|#vc8&%- zEmS$|K0&3?h*eHU(4x^;f(~bur~xSyEl%+0nQD=7Jbed$bc zYM5HrXmXfY)opj{1O8PeEBRBT zkHuz+Mks)$z^gPoU@+@xT5rDc;o{-%72DPD2mDhpUp|Ioq7}Kt#czO(e!o>jy0&3~ zAs(atgl$!3oi7fJ4LOILzOi|rzPKSkWi$1oKJ+?No`Fh!ss3{IQvG6V5GMmaO{q#f zs*}?eO0_7TOhC;uIb;;JOZ73OYOGYXyk1eZ`NlZ492CnjB8{cd*Q|1>R>qbPkF_0q za)$^ggVkiQLr2w;9Lv?1A^g{C^vTtSM*hn~=}oiO&u&b|Cmx(M!E%yWbh4!A>d3@- z6)DeQGRgj_md>p$_Qp+zjvU(52o8+gxuuEvpD5*=Qt8z{r3Fzk`&^<^P&xgsgHtW5 zGm&U|WNvNe;SG)Sf2SEZ@@aJH9}uiy;S5m21qY|LxooyXKGZtW6pl5F%t70mg6;ei z^(R0RU)Nxm<6GGD!b@jad}tcQd9gs=o?)>ev=&5Q8)506L8**l6WDGg2lHFl2HcR- zhBJdh+qP456FmYBn_Q$F9sg-K}=F=pB@sn^--+&(z==Sggv ze`q4SK}}`qYsP%r5!p5r>RJqn<#HX8jU{urP8N>r>?pxS+@Kwh;R|0GWnQC|*n9d%6ekD|S5M-}0n;!@j0OL?})N z+Z*EM;(AwlNaoki5NYyXu`Sq1Wj<*QZiFD11~*_FmI6m|y0!$@*45R+7#YGY6`l(~IkSmDgd9PS&1GHJ@2Rg231UO{cw1)YK$3b&$Dm?QYOAa^D{r#0wAE!b z2OAu|+OW^WdP4~~l0duL3gs~l<$>&OC-zSzy91YV+9$pBUH2(1PPNkF8ka1NT4r%5gJ{v<(rIG6Y4<}D5*(EzL4Z=k zEs=rdkRD1AF)Hgk4a3c~?fX_&C$>B|X#teZB1jGc0G5vRB25D5@2S+TnwCJ#;M_>0 zcys+gt(6b7RRh)-&DX}SyB^ea9~g-f-;`Zjl~(hQBxe+KI@WAJb8|+?N`qa-cz&WI zvuij3zKTLNNN9CWGJf8`3Y-9O1!oqi;&&{KJ>z&YE(YtJ&gy{2P<+ani1_*9Jh>KO ziLDEl$rM??oP()H=3wfPIhe#V>;&|j#naT*t2vm&a|Lz+g;tAa3h?_+&B2svz2(KD ze|PbQMzp(ofMHA5PO z6dIiAY1p>shMwbN{4Ylu00@E*B93L;tK`&1c+tH z3RQQy6pNjREp_$R%yyHhs1{ek%SW!&o@N(8+|+XD*_rOS4NW=~MW7ihreSQRJHNfl zSH0$@!JBoghNRRib1=WBKV(adq*}JEs?)-rfy7C*xn^_69w^_VxAYjai?HvzY}eNBgsz(vkkO+aK{$aExa~-e3y&E%CM2 z_q6R9YtrFV-Pi$W^}YoySw{x3C>F=2WG^)C<>qnvT!d;y4q4Sjr67k&UMQ4`%1mE!*)j{=;^JOvQ5#FHAPP?ufggCu zI>Gn`B1DqBg6hgVW7N~o^U;96P$qAU+h6N4yupga>+oU?_(`uX@HTXEDTr=VZ$kyM znA{ErM|5bA)2L=OA3f+N0v{EX=yXhGYHL?~rEg)4;9t&%>%w(9`#j!z2Ge=w0d}^z znM%)tmNV7k;*wG=vbHTTb~U5Y=L@iSH7b9uP(4nGl}faX8TINo@5RW8C89ILX{7)L zVsH0t-`t(qHJUPMDHNz3$nRX0-M6~NH*n*6cOq0JI3%|dcWDd^WfF?5-oC1tC->B! z-}kkdCe|#OLSD|wX(Wfm)3v8RJK63cCHAQ>e{|t8#M2(E z4(q{o%e`l$Z65DBr?c5~_49c5MNAi8XlkOW;NzYVHs$AmxjX_aHA;ESUDE^&3KmBE zBiOluu!%zDmxhGpZ%iReR4&V~k*YN6mRd+vPQv+F>cH1$$8Md>26-W|`lhE3ga$hj zyc*CtjW(DWuHSOc`WRu$4{uEFJ+V3Tg48sg4-EEZZQk70T*p+q3w(9mS8nVNuPWUC z)Rr~R{=);iTQvqF@3a|hoZ7${hHn4H1nZL6%+3d<+P3EXdW+k5`*VA1YDab=-AOHI ziK9?=+*p^QI~;!9S(>)+^Z2T>n1$l!LF-xDI5SluFI${$JVV*gcjpT7mz7NBN;sMv zS*2J)>Fqm52}(m3YiWbzw|j#epuumhoYJrw%4EcUZ!j}7@wU-vvl;$&33VGP6g0I; zD>#gy0Bv+2q_!PO^v9GJj^in;TVZ4V4i?9)=%snMHK%p4ewPXUYSS;`5ZYlmyep^E z*0)fxE7nb&R~ChvDe>^H?D5mwSK_o?jD&)cODt9PmJzwPj6k@=k~8!w-ujW zJH2gPsIx8Os=xip6Y+suyVui}@2{TTyH{pi!iae|@+7f5wo|qMXDzY-lwjQgE#N{Z zSM>Q*X=DJ^Xs9utR#o;2r3foo|9!b498grKOMv=cJ)x3h{Z~z|)PHhim(aL8eR{>p zB{6TRU31H`2O}e0wStzUb!ttxeNAfm!SNd0)-kv~x&P#NsG;!m+)ZEI7JlB>Ii2g+ z((14_Z_W>X9=x&UA0K~kSF0AfU`{(ijhw&@9DRC%b&C4tT@Q|~|MHQZ@h9IueEai- zL~`}chL-8hfIO?(2Q}%nm6)(w#)RE6CbTG+&~i;oXenbt%V)xb#A~&MZyvqlnH`bj z{%;<={h6JS7o@g5qpPm#a7wN6)`c4jB}};gRfGwDaCGA%Guh_74{a=Mp~XH2P2q7e z0>`yc#bP74EywaMo`JuZBpCghBhWHUWrX!EgoiJe!}ZZ{CKz+D2#v}t0V>YJWgx%sw)#J9YfP_Kmvwk*wH8%C6AH*t=gcWx=>aOVz)Nhcn$tL8AMum ziD&4S${_NiTQ$55^+ku^lYJRVr5W2+Mw0t3B>eCfOU>gqonyoV4Vyo6HafwjDllZp zq$>azLix%&KX^px30<%yi{jbGf|Xm+_+ zgUM;Gvb%$BU;DcH^rj`C=*w+tjP#}5J_r<9yU1INIzv^JeF-S48b~UoIB*K4iEUdpKP?XD|c{$7Zf>II=jP;)xCldc=GM-Qi zUYQB!${P%HxlqmTd-qn6brN4G9r^82oENFbV=l0Wu*23BphZg_Uj7l5r-P6~3a5Nqoy>)%t#-I!u z+mM%LG8Ucw{^ZQlGnq5jJu%a4v`TtCZ{v-2UTt+*-TCWQwQtS3b<05`g{P+Phq|y2 zjtk#~n6Uxt!(LU|Xw||?ISY#qO|1g4>$6~27IbDoLl#tJL3SSR%$al!hwkPS*qZ_a zDbSn(u@r#4Rp*6+7(feCC^uYrDcWx?!qZ?$9nj4${57YAJG$nDWReOh0JQ5W02-To zD;Arad{55wma(Y(3B55c28ZNi^FjeuM=#A(AZgd9fTZ-Nfu!Gs1MckTtrKkl&KO^P z(mS5pQ8A6@cz7+!XwatGs2T{-yO%-qQijr7N=i&c9=g2YDtKm)Jz%lY52DC# zqpUSDknjGiKz@;a?kXhz?bg|+_wD$H1I^)q1Kq6?5XosOAv?f)U zfmptEXCdnA-rbem+UCCduG{YfgKO^`k4MLD94cY?(TQ}|k&O_`AIR2kS=;ZCvHaF( zSJH~md~3$taP-QP@qvyuue)7F^ZQ`mGzNR`r)4zXrOdP~NAsQM0;v0jeL+qW%|Se% zidI7Uiq3lp)}y{VDp%1JJUN5`V5Q~AzM|*8Cb+MRvM;?VtrGG+ZAy;b@(e`x`GiRg z@u6B9&5qViKd`YHPo119JhUlPxA&<-qqj}uLj3c-{8YAMqQzl_E^csDXB&89?SE77 zy~*S?p_kNfMt0Tb{_BK0*|Ph=vGtGL&;#A7`=986_&%9fy`!OZTW5d~<+t+ghPL_6 zN`&7iBm71g;Tsg@eZw`G_YGy{eZyyB-Y4G49C~(U|JMqs=7Y~3MBC@Wz1y0*c6Rzg zz1v#Q7MJdL^NWKWcfJ1L9dCYNFn8A*Cl7yRD%`yH;R(2nwCsHtk$^1x6}&@+FmH)T z9L@R)XT9>QZ^v1O!g77(Rn+YvDW1`xN#Blh1xJbBI`-3=3lSi_+&FWur zW*tS4TJ?vFV6ht|H?NZ9$SnjPrtZ+W??ThPsnhZcctf^6Hi@=ofns`ctC_o0PyN^8{rxNBp*$x_o9uV0&uX;2~-POACz@N{bZZDSEz$KlOi2k#j9 z9^Piu&=514T~>?!M_n`HgI-@twPg3%5NDy@#Opbi(_B5UBh#>B?%0MWQO?dLsBZxo zx8GUD?M+wV_FPUZ1NRRhXn!AJ?p@-CiUxi%V$WGiko`jh*WZ`n`YuZRu&kcTu{!ww z#_ECQ!rxCfZt1UO;jph`m}q{ot$9J|ais=pTBq_=Ali4sSPf^fu{yzS@>n4ZvnBH3Xm{L78>}Xy)j-*j-4Rb? zM6&s8l));nW=_w#943F~w)VPpoi$p5ism;#s}3%FMBGhQL4cYp&FUt->^xXF7osRE zF%O=}v0})c{D7?Sa-7z?Q>zeRCyW_{*O!6y90!ql@`FM}QK?)O0`_ti$0wt87cRr~ zyN&EuR3^8GGLMVZ&^@G(Uki@WCU?x`i@Wq+f#yUPAIFQ2g1w-{>wB*pNcA4YIVGb* zvKa7D#-Nq$?#dArXT?XZ{6cvf`|k+}BB8wUE+rJt{PZGbZ&omS(>&mEy6W~{L8YkP zAyw$p$G~;D0$1>~|LXq|_a)$Q6xX_4-P5!6OfS>3@B6CRXQa_STCK&BWlNS9d0*s( z!N%AS%wl7;LkX5p-Q7F@6wvA4&ACdHs-3Q`ZbMk zx-~{Yom{eKWYrdxU}QJmxY+A$9!7kWJpBpIlvabNJ0wKiiV(E`XWS#^P0ZG_AZm0E zB5HK=Y=}BH7g4jw>#2O1j5Yc!o{&I;@1Hs;2s)m;LnAOW^=qBoV$yv22!oy9c!t^* z^!t%52WfZ*VP9^8eTfkDU^NFoiX^dym#oq77M>vuP?&e3qzURmXKOwcmea8}Et3o0 zlsXp&@jQ!~S6{7diM?5}OwtS9oax(YNY2mH^-7KoH4sti-);3<@7b{7&J8J7acu{L zgO>QJo7b+qVOh+a8*MJaLGbFjO__m|NuOHOykVK!(y+3#b7>mFw(-`k6?p?-I<^ml z13l|n5+i*@m!+{dSXbCM5DRp!D#Qjlo9*_N-bLU)+lLH+oXeT1tFy!>%6zbpPFWod zxt!fy2%GG6c<12~NXdnek~yNc5*g*N3$9AV#|fS%+Hoqmh{0fO`Z&>f-0xQ;CaotG z^Jk43^{)}uO*DO6@^>CD;pz(ONhxFDSrDT9-0I&Ngjk7Hx{wWZZTs4$=x|$zmCKQ5 z0j&x)4y6V!9te=eLT7(q#kHfM)Ve#?_3j*P2?#%MHjZR+LpjTa&Cb@A9NF0Z#p^Fy zp4ad^qvmv4U7U7&?SA+cmnZ{Kwr8eJ{Rc73US!xxwMe9gnL)st!tWhEGa zG+G2{gghbJs=l@LkgeRLp{UZ>P70HOzt4k*MU7xPSrR7cz~9e;3}xp;hKr)B?%Lk7 zW2`kOL?-UKXzN#3Mjo-&FRpJNN$PC*#SQI?69TEb>Xkbd!6o$GuZ>^*vpW_I-2BQ{ zcHg%#(N?;989c^YOLxPOd@CF%br2mUh(%TXukJ~wAP{Coglpg86TZz6;bRGJA>V*$ zEcB3jfLQdQ8bx&p){zSz&mgQCH-Q=LZ}yzp-#S{w$WJ{@GfG;fV(52&Cm1v`G=~8t z4cGTNvr{L3UIFiRpiFoQ4rRpS73ANpQvuFp5{w)}f0vR`Xd9d2)7?^UNm=Z|B!2=*{@DM4F_Q9?%-fQ)Kx8Fae?-&ui6G8L!ct-K=+* zjT&+TBUD7;scmj@@YI_~2MlF?yjJEnLzy4VMR7i+LPh2`VVR$3`<>TICpMWs{w$fF zXsXD3Z`jJG|zOwJ9(*e&a-v)$3In!K&wsT7@2pv)tlBvNvUs$ta^6 zOhK^^0fAuCsmlLJlChCX5I?M!#x9cjtML35gHoS*9BJdA)F+A@p(*?!k)VSgLy3R9 zDiqF;_F|&&2Po}nG59f-^v7qV!3F5gBriBA;N{F%kFU9X-M}SF3LcIRb#1+4eW0fy zsFBkEGKWIp&M&AN-m$<U;m+l!!^ylm$%Z8hl_ya9NNXoB)qoiZ5HMb~P zbK4|qZvKe%tWw@uk@D8_O8FXVZU=lBy1bj#LNV_R@pACJQy-uZ?E*`FLo29f1pUK| zMj;7$wP@2-L_I|;hW$4L`_D(DiH@qIkAl;Lj_{*wKX$Flu7m%S8I*IFAX-SOC@3>6 z4aulqPKDkyO}4*9eTJm3);5LSESV;$_P0vZJV`Gd}=8b*PVE3y0_>C!hOZPB%zkO(^=*%W#rr?IM!Pl5cSxw1Ivv+Xuh$O4p zBz_;SXn(RO4-!kXB5r3xoqzCp6@}wJITh*o3Gn5o>8&9*5c4xILIa1ruEnMAP}1v+uhrL0FQ&6H>5X0 z$sK4J4#)emc3;(}kM2funQndU`<}bz>Z6wzTK7G>{~CCFEV^jV!15~wJdu&zgUhZM z^pFSc{^;P^@gJQ0&OIj&tsQ^pd0z)3`^}NG)xf};ypnni5CRV-qAZ6Q#tE#%4 ziCNvwwNElap|PUgNnrKPC{pi)56j1D)jRX;u}@YSAtg^jQu2&qB~SRVl6-8AlILv3 zS@8m#`jBj$!0N%JG-m+kDB`0}hnAK1S5D_ffNPB*L7 zJB>b@Yra1pr|=e+Hz0pLZ^VN(`)W-(PUrF4l4E-ov~FA6$dGhqtm4o2iA+Z8G5(vJ z#edo={`fgUm6jHt@k9HzO;*dMHG=&-voF790*F6Tc-vk zF56DC5C>jf)%{G*;K0m`?x(J(O0@kIfx5-!xvX^#D2Ur=AQb#7hJuTct#&RyeQq=; zpVa_i55M;9i;}EDMj;eW?}#Fa}AHS{UHOQO06!8J&d!_Vl(_lC2#RVEaXR#8AtV4 zNpZGL{eS4AMLs=x@V;BNv@pCu=dc=Vyd3*zJ+O+mBOk4smtY^Q@BVK0{vTF+v|848 z`DcB!ASFa11{h2U%e;W>foeeZIEr)~K=#r&s0p=>BJS%^j72>%XX?%9>=`nS7WkDQ zx^Btb>Xst)yQM&j@c8}{@aRX8g~)hOrFO#(DH4<#SvOTE(~^3Tnj~-xKn@4-$0pJj zuvMQ>6Y6-Kt%5-UH5dfNtHbt76sC^i$gkY}x5Mj~{ov$%cb&w? zgX{0Td})Ja!XB!C9@g5U21eh2W|fzJl~R^Yb)6r#=q zBDi;SC~98{XXim3k^%%GJ`s`P7DptKN2J)65sBmxDJ)c?lJM4ls&?Y2rAgZ_M~!U~Gz#hv*t-{Vq_z zqGrHQpBJjmMlb#U;!{Y{b|twYw&2;^N6Q-6lj=7LXbzXSovp2fd~<@%1kPQ0<4}09r`C+sa zb2TPKm5*fvlqU@*YORj^c|ZSLuuZ*=TqE_BOdY5GL&Iy~+D@$izobQHozs(c@^=b` z@^>m=`w~yE{FiE=VF00r(<~yJKBdDsmr?%dpZdr@oocFhN1mX*4pCzp@o8mF4K;vJ zJx=^W;W_n*%9Pq5&6^F#qltmTs^Mu!(=hC$Ff0wDF&JhSWp-xvXQ+$=b%XAHEqesu-+#Q^`7G&-kO zzfl>em2N=&Fyon7R}@O#rjJmzVur$>b_U?bsX0Ci^MAH~WFQTmSpcCc$E6 zHG)+qAO}r{)!n^i#zAuy>`M-s5HCv}nh-C)g0cQaIMNPbtp8zUP8Q%Cs}?+9p}_Q`J^HYQ;;fc&Qb>&7kPgm+@$L%B8^4o+?*(K#fD( zJJ=D|gjZ;qkqrunji3&RK?3UFJRgaXkZ2&zFh znylQ2B;8OXYOB)Fab+4bru);|(o`%Bpjd=!R)S^S1Tx^`iLByc_&6E~%}Dmarp*Y? z=}_~_nn8VYZ}Vg`fSwaYvnAg*@JvaO;xn1$ev--a7HG6*Y?Kzmb=%kynB07{g%5okM8Vpg!;C2z@t6bw-v|AJDmcTk^R-( z*iR82@aW@MX*&hPBlauktEAo76~(Qz8(Vz5gm2C4#-`8bnRl5|XG0aHL@lu zo~9UxNc9%2Y)YY`A#zbE$UP$pQjNC0YSr{kAR_@KQ_%1U2T#|NTOdvAD_ZfVAVohq z8u!FKM0SeoEUHzecfxx$xtFLbD;sC3T3Sq7ZP@fq3Erl1FO^`0%323MuY8`*-8nbP z7|AUfwj5=UqlZ$f{${4tt1dPNn&WPzOeLq}x@dD>xP5J#OPg5Mza0!~xZ530nZ_va zMp0nCo*FLJo0Bbiqh8LLc%w~cF|h8!aNO6kbZhr!C+dR^$TchA8xB|f?Bd{;MQv}t zzqjD;?e!NZ)-nbDTC@`EK(uJI_C*i+NnI#Mr|I`-1WAI~tHBrj{SSIXxG$*6^|^)((-GuTy*pewl|wrF>6a$$oT zX;0M}xjWMpS-8ZU?o0J4)ks08R`iT6Z)x?nr#*1FNK&+>uDLJRwz}0ZJQVK9*$su| zEpCkYM9Gum}}p{2o*bn|i+u3+>Ud1rH?UNnhjhgPRmX-s<7o?8&>+r*QUBVB|e zFmO6W9wocy+0?%92Ml+RK$Z z9t868BPgO3Nh{>Q0ejYOv4<_1%Ne$Ooc!cdlhyvsYFxx`%HaEP7K2_5o|G$4KnW=P z!fx=bGS*@AK&}}h+ab1FRo(e=pz1{jUMq40>-`bhAN_Xx3+5MRS!7-aP_fDTBeX9% zUjKF}{sr3nVl7L=QlD>cixeq*qw5*N}Ul5ne!d-=b%S*nvi|A?VP;VKTzr$+flS>oVi7p44RTr zw?>KPJf+2-augP2&A`2~xp!@g^>)^s3&w|>qQ4;!%6qv)!`cPmhK*kuNo`)gv?D+( zH5#KqFlcF|QW5A_mexD`#TBjIyiYXidsfz)jDalVj0_wD>mX;iD$$*vB6#p&#V-zg z1Tl(%(!Fi{0&~@TO>t30S#@uh@U8Rt#7St3Tt`t#S@}H*g_*OvISM4ow`w(%TrCIx z1rZrd%2=zdy?}2^iBI<}1Vt++i2Eii)Pq@J& zMYWZ2Sb6wQ8Gi;Gh9a~994^|Gu?$7{cpv!}AMhPC?B$wVP2}k&&~%VOeY$!V`aS1v zXxP}`J6JO8rJ4?wsF{APZex?8n9ntv8;!MQ^Ep;?@}6(s)_Kujc*XKSLs0MS-qPK@ zvE8|_f9dLn+S-e4oF^05V-Z8G;XvNY_YW=X2NzvzN)KcfY>WtzWWiH6n0H#!J<;~{ z{>XaJm8eTaj6S!MFO;9NgnV96bo)c5Tsnz!O+aeh0I7wL>Za;W3CQWl_pKS86pbOS z=qi$@i=g-@+5r7R-Cpzain)wlLFxl}e<1JUSWhk( zTllNgn|D+?B5C~NoRe4+_b zrEvNKax?k$S-e%$=?SNrw*bo_Zodoi$RYYVNCL(tq{JT>ZqA z{e@*s4trs=&^?hiIGab|*i4`P1YA$v3Gr5;ink6G)fjMnYTr*En1i;8Y6Q1FE#VvI zjeIkMwXRnRHiKvrFxHX*C`O)(VBu&1g_kB#sPDfID=3v-faPl04nP7x%4meO zS|F=gAwBDfu`1Sj;|YYd;s|WXioA+90n?%Ieys~msE4R)5p6S=xHYD6gt`uuU?nPd zs08cGpsv8@L0wj@y1Y>Xm)E46VQvg$oZ9+?Hy^S>WKUBx8_IY1AZ2Yrq`zS;aIo6E z-2zdU$Qg8;>PlZOQD+Hdco6{yL9gfaI*l#U9r3hx4aG+=>WV|&;o-Y3CcaulT}O)A zz9D~KlfSRe-$bG4+HV%!geISl@F|dbsLK@rkq2EIr+2yNuKg}O&_67(7e)n1{=_c$ zziOYHx7PuFN>}Qkk}L9Hsrm`D{^2SMRz6_fhEmyibcpqHi8rf3geZ&;_f58W+j9X< zu2L!O(Uzz$?c#Kywz^J*x)Nr*W5GbK(UFTdcWY5z98P3p*|;ysi=0d&vJ9tDszpI-iM0kCnTSIvvxc$9 zuKx5VWGi_qMq`_1(b!E@G`7E}Glp!g1V}t2p{#xC1G9i^*IB(BA1c+ zuwACW7>zFPF@c`)F9|GlKOjZ&i$tN~oBR}!2K$ft>-=>Z+Z5z1Goe9a<^c%5dB`7+ zMy_!gf$^c4=?TrzyXIjtXYPR#tRG1}T8dml8y}jPug)8-CO8M08RsA~F*3NV80|>d z;UqxIJIN~xzQ$?1v!@)My~x9}d&J z8@L~UQ*;3oesDG#%*d&5>i!a}&$%B!b9Uhe=b*ui?&7R?ELaQ2;@-}+E#2!{9nStM zC-kwPLxUVW(E24}O?O2_;2Ms%F3Fm1PPVu@?3^=hcf@S$oAJ^1Kx|}B|Im#a8fgW? z2t2RX$P@~>GuIQ;>0SAObiPsKG6PAI&K;BTyOA8^h}r!K;B^Uyy;al@2AbCa-J|Af z7{v0B+2>#}+N^uDgzwG4VpKIEZ*8<(jwF@zNs19s1O}dz0lNG$qt!Cts7kA#$Wg6c z(5cHBH2tF(pWw9B;WSf>0iv(~?6VB)Gvu?_RgGF61ve0S!VhjhyAwQ>-3ggzz{ALA z5gaV))bR$I4}sA4E&E*^pyU3U4m=0)f(5ltrda!X`2c8pqp|lrsHYU`Q z8F%NV?#}f^w;@>YIa6Mpt$9sX$3(%}*U~fk-x4iN>3oYmVB-beoAsK)E{-B$C`Y(VTWr8kolq!pDS)r8!Wc+E27hCWi@7-OD8x@ zYz%U_4^qqxDHerXURiTkVpHJY-`S`i9Z`{V57o7fW=-CX zb?seiTkW^-u9)2#b#RO&>~wa4H3K(nZc;EBt-!i1YDNt?JEYS)@_o@nh4~vB29kQg z9kbdZ4y}N+bOrJXax?896*4`X0=@`K(&PbfJ#B#{2C1Zl{32<^b&OI;9Jb$|{w3VJi;Tu}LKXSSN$)*rgI5EOFx!2Pz@J z>A}IRxM*bD}nhEgh3Dg~uP=_^!9MouYMJp&gwlf+8dglw2l5FBBGE1(dOBpQfz zVga#;SVpWRwh%jsFA)2|LX2s5jBG89l?qo}-EwvKl09{M-0L>^H!1rTY8Dd3Zdo^< z%IR~ZtM_bL*qzIDFWj`}>XO2~e3jWgaOvg4mv>&duXkT&`;Ph@)|G3VYjjJ-jpJlf zo4idOO|Xf}_w86S-j+zTjj!3U?{Yjb zp+6_H>1^7M&dLoL`143Moz9X=(BD&5R6t%oQ}@)Psaz)G2kBfc4PHh!%d61elc@Gx z=#2V$27#Y+w)}QBn|>EwfN#K)ar9|d!9U=+Ofr9}51!qfO65qmRH>|hC-0+XzsjX@ z33viWi=8}1zCr(qlq-)AXnfyB{(}5A{d;(Q6x*g&l0PAzp#MT-rQFnnIf(N3@!Cb{ zwdr@$lr3#bM?BNIDe|Txk!j@~X;UIf`VNvODk=R$F6~NxJ56aTwRN895~_bx8JR9A z_f+>J))>^~irST@(PYG+#v2S(^WNcoS2%vJk$eJzxoG=H-SD*&+1%>uhCB8|c@@j3 ztc-Q6B{|aMxn#4WAsNu{1{I^B#@r5#!ekP%o4&So!?#L}ULWh#!!@{qcL(~m_uPDw zO3N$M8e@ePuaO^MT0DtqaT}yn2mO1(CS_uwwJ7JKMz13D^t4th$fnIxv@;Zh0t##r)bxW6 z*Ly~f!&`ob6-AbO$)$I|XI@E8lfR+=0-yP5sXvdA$8i5XQ@sXeeElp@ea8;^Kj8KA z()GVm@6zwX>lg6#t+4(3`1(cZ{;T9ZOp~AB>t*nIFTVaMzLvr3CHVSfeC;J)l_lvv z!t0-53!KDngqH|ZP|}nBX`e%*(K#Sdj_77GqChlpqOv0PPnYIZFjs3}%E8IFk_|QF znLA56Ns|I1|DV%!MyH@A$0mPH{*C2TB%l@Y zBZq0FoTQY3mi&!EB_m0hT7GZ&Um<#uMhD>}Vi5OVfwK)+iD33eU>U&>F7Rb2W9s1W zU4u%JFUEQR&9^8_9xyr0hvRwhE`-g`YHfAC zWozAGt2UwY*N3<6bH=%}% z2lrdn5kb8wjTAx$Wm{(jQ;Og#u*0oj#SxWdT8k)Pot9rJS;F9}BS&7X1&#^yo^tDS zNq*^E-AwKKVYuTH@Y{TloBZ152U(kqeSGrvb?Mt|P0PC{COVciJ7rrYzqvUBANx78 znBR2ks)mhy;Zwi$_H4!T7#rU34p#P>r%SMUqk6i88XQgPr%TD%yp`y;M$1&2t~2ke(W;_k;j-HG z4EYUrgz(F_PjYP7y;FNK9iwE&Ka%3wg>EX?jz#3Mq=Fk=D+ zx~i18jk75^n;t#{O}~9=w}OXDdn2oWBjMkq2f@#QXLh2wVkbKsx1dh;rPR2ueR(tv z6DEdohS8aGw=k3Q(1g<0zHuVMpUB{c%%82MxmtA&wPtd0`D!vYR+Efmk(LG%lX_Rd zTMVL&Rs0F4I3vd!Srzzq02I90fO3mD#a@%!!pna~y{^z1EV==;s8Nx>gD(evAiayH zo~4l6j!Z6t=P%8azhyPRj}T6MNNROfPEKnCE!urtk!eJ0MeBK($xeas$6CYsXTVZ8 zwHIK=a0!bSaj&H|MOX4f<_(DiXVj@p%T<}ZDjCQ^D2M7<60A5WQ#P2JWhe4|6{}MP zbvCn^dyV&tKt?K9$lqpG;S>sbn?cWfhqGHP0u)f3)xaaZ{}cryPzWwF_>!Z$F1I*b zo~WkrCEqa{14-p+lv7xRhis{i z1xd?oj#%6{yfB#c>SU+dHui+d|25N{?^yIQHdq+Q2eM{G`BOt62j4|PAVfXUUCEbt zB+*VZz)AgovG*nLZCz!)chPdSOJ3#u+OfREikCR?9$WG*$FUuGNzyE}EXlSKSyClA zc1mbflN6^DNXjrUv>hhpb(k_OOp{V5Wu`cjkn$#F89G2`fL8;*ALR|SHHA`lnmF$} z=iIBsS(-91@9}%mm&oUwdzSD2egE$}N4~Q3ba`%@0iDo{xs`1fDFcnZ=8!im8&(Wb zgCVo^B9FW$ezm9Yz1*-dLq_}8*MxmJuhP&42`yo)f`!q1>qT#Dypep@D@t&8Rm6V) zLuqLxdz~FJgs;(s9Xm4<|IPy)Y=zDGh(w){qR-3G$oiC-c^h*ZP?}2z3+3pZ$S>Db z_0|{-riKk^1-V8|w?cZUs8pSw)nhNL;dRU_#KlVPx!mdvscX+i4fjK13QKcETRY23 z+p3GzTuyOy{^v7Nz!x;`8u@|jJP46OHz(I88?m=_}gy!yL%6=kbu zWO*qUy?JrY-+TklCw%)5V~6a$Wn5fMwl9oBAV_d`cX!u>0Kwhe-QC??8g~g2+#3t- z!QFzp1qs|H&&-^e^LyWW&)g69(?IXNS5+_hueEA-*MjbQ;x3K>aW?^N1wIx#qUj$;XQ3)K8%iy9{d{q zh<;TG+Pd%{sbUJxtlBr*A|e0F*X zwA{O+BD}5VvLP!QFG(U&3ummeq}^2drbK z7qdksT^{>=FE)tlQh*)gCoNng0Y~$VPBA>o??`Uvoo;0lwQvi^fc1j;!ngP3%MF7~u4#CWO?Hc_^&$z33$!Ddo+DK|HqaK0Zq9ze zFZxKQ96yV&cI`esWPLRf80U#unXRAM;;%?8Ej^CJeWH_wJ>&l20MS67kARa!{&A@+ z)ro__w942_f%Cbc$kRmvT0%v_4f7$YNJ3%;Gn-x3d8=E0G+$h0Iy{ge(_~7g=OD{X znloHasqO5`8k8~4Hr^!8O37BA&z=AxWvgOlFUFj!9WS$ky{o#(RAnfq>)g+ysHW89 zh6PL~TIf6myCa6&Zobh~(+ZrbQKM)?FNT54AM8A7sqHj#>Ac3LI3LG3OH7mJu}th} zrALQGJ`PWe$vfg$EYnC?49*+ICZNx|l2rek%}O_jfW0Hiq4nkElNca7fM?1|%U2v` z+npo@4ZpR6{MjiR!siP2%1w6O5%lmCQB!U!Y9S^jFPl%rJLnCebW1Je-qgooW4N*` zI9Y^Z-GqMh#kF~h!mm?#(^fUQ`Dyd?;h@j7Pu_gIQ)*OxSgOFE+QG3?N%^51kZqV) zGKkkLy|{t;UJuLc;TIpvp+(&jvp|;Q&qrD>D+FVJcD3@Zs;e@=MQdM4Y9P9gC%g|o zozLEY0?^HiYiqCt`lfq?I7Fz^%oHdX-t9mR9&TOv{0JmCIstr=u1JE zOI`5&^sD+}4)yC9#qxYGE8BQZN=e^>pSv$sH)?dv-a?~`p*b0+WP+bldAnVQHWt2# z6(`Use|tYWzumNkxMY1PaUo>SnW8tE$t{M|qW~P@Q6rMEO)Rb-fxghK%%(h?|RV{+kO)Ke&v>fH2=P& zxQs-(Ui$cRl~Xr4aKI`7C>D+E4N1CRZ&8CWOstA1{i}na!IokB1 z%O|VyZk3C_#ow3l#!+sU-wZ>1_{q4KP^Zb)l zn5WD`fjf@YNG&h}DJjEGeybiV!X_k`Wp%Mzz*mh4w z=z#&^+c+6YJ~0)=V!aG53$$+%h183A`z>QCZgn#ph z#uCq=m5{Q<-YV^2S}I1Yv37}`Uyz`-W1tdkD_YxXl{dq0e^uzPLW9SHWtQhto*x@5 z7AJxgnH9WR88K+WCdCac0QC-R=hpRYuEk;7)&v{LAF^yNe9bAGc4Mg{CYr5vMfuM9 z=e31-^JTLo2M4h?H;pL`mOBaUhf24F;}7sLnJFcvSC(Jf3cJT?7m8B>w=rX(SLHB+nlazrP(%w-mAIU zOY@Y*m;6-QwL7gGE4sd)=R#60jpMpkz1=TPKZ=yqLU6emtO#w!P>Ultt6LSCElwMD zhH(!lh);7lEB{0@R8v1^dps^@v7uzgo@yuJdyqfD0(}Bb8^fAogjnI%pNUtX{BadH zbw_cg3+-ronn};stvos%+Bg^>dSm3t&`v?NV1K*OARUxyQTB0^InQSl;!*`db+jNd zp_>6-;%6wLiAj}iLndL>cf}M@)kw|LwsE>9H;PsFFRQJfSr?zXv-@rj{)O4j#Mkez zQepc|8q1nm*7X)mJ-Swtu3B8DOI$3R%WakYM;whRrg&VO7~|@nB`wxFOrlylGB&>5 zjaD=vLeEz$hk9<`PfGpPb>S=(Y%&mFI#FXv`h5gY^D^q{8VtpFeeiaXF;E?R4v0|= zQXQr-J`72LcKJP*A+xToGdhuZ)A5r}v;BUqm~YNIsMXjizsbVu*0tn^=c`O}Xp$K3 ze4j^gm5*8@36)>vxHiu!ymz`)8DV4kw~?k@|JQ_#ax14qt{yVW5qs5f@03<3>K|jM zAx$MWU#SBx7#M~po#7>hs~zrB(Q=1bjq7;@2Zg5>2-AGU3qYGtmQ$ZWA}-LB^~Nv z3UQB$=DMJ0L{aCM;_DgaRZ@DXc1@GE{!iE&XER8dfJ_+G{SB)q=Je&r{E$tjSMT=cw9oG2IH z8TdT#lvtzIMIo(XF!x<39=}t!f^K^^S*pNVQ0*y0d`Z-%&hFuGzPsY{*C%Hm z+23J!@VGhF%A6A%U9-;Btql)Lsf7$1wkeG-D#z_MD=S@Q8Vxh^agABa`+QftFI`GY zJSz`96Fb@Gu0BO8ivo~YC&M{}s`6eeMjN1lw26qLQ#eWBRBhD^p)jk1M z+mE%{`P^P@dNw%E0z4J=@D*)FAQ_AKEYpn@-c8s zL^DcuGdM&u@^`;K1+#v}iHQ=`KwXfME{`z_{zhM|BgcW4i#~#%lA}P8jFG(lt3qCN zMr`_pwxah}lf0_Zp@xL02ANs?QOLJcRuQ7qg6U>E7MdA-!7lDi9l59 zKqDGo5^EhB&A(o(8YH_uQKRws#cr36D|0;81k50fi1CyY@0E&P_$9)NX~8B3DuW5} zb7mJtW+sCSoON-n9Tw%Ozu0)eed~(w3bo=oXEeKcZpN-2i8U%40%4=G7-) z4@=@Dzc~$BW^X8{tk2VW>7Qha=FL{(PM{Y=d{R_H?U)Kb) znwLd)y3L81GhE9wKMg@EChX2)wJGRZfZPx-A^$|!>2ma7y8|*82c#7#UHb(alR5d_ zo`V-%V8TvUFf4j86i%aCjZ8U$0w z<1SE^PV(?ZLcw4gP|2O@rNm9py~hFjG(C#O^UUAH=Wh(e<*Le&YfyWAzZulXh$3w7 z&^PJBO&()Ra{Wi+Z1@7azO%GP?B0aK-}k{fOoK#v=ak{wBo#Bx7M1TiL(Xt~7b88gpD+dH;jv1h zxhOx*Finvm;kWbq6qtCJrR1T;$Tv~h;?UR~HgB_gA5*y7WeoK4=@#}YOOEB^-i>|8 z(1WeF4&BdU6Np}LHlQp}L6Y!6i5fe;-c1G`3pa2XGG4YIKen^Dy?9-HWzBGSHD0wgaPq7}`WW=7iecEvxEjxaJ=lW~xEO)a?=V#9b%9Ghi{FzYuyP@jMUQR@WcI1dJ->P}qCOo0Yqb3aacpUiGhD@Cx6p>@unPO>B9~M>!V@UWu zTP(Z7f>qvCz;YKnlo`-1;H$BFVJ>G(sx>%gri$Djik35oiUg~(V+&%tnC z$h|Jn1X*krDaEQ2d2Qq_WcP)Pe8LrdELLK!wmZ=^Tx-Z!kf*1^T{n-^C(QeIIT7QG zlHpWm$}BP0*;2J^BS*LZrZotd%U}{so`gBm6C7B^cIr#-M#0IialzkQJbGfTZ}0^b z+-vU>)Ci_R>Di$L`mI!Y0dCc!*?Xcl-@i$muoGKe`?{8{tRnteb#c0ha=P$P;3Nv> z+@?z4*md5!1dww;UsP|sw@N_o9|ne)X*&>iE4JQ#!8q= z%m$mlFLuJ-RdnZ$R=a$DK+9xM^XQKCZlqTT3K4R%01<|l3fWzkgDYFjsBOXNLfqZ9 z8g!0Ji$tfPS*`kyepk}zFjHx7mr&i12p2beF^FHcX;{}bytZK!nqTyFR(nA&mA59| zj3k0psQiSz3fnP9#NS;=(`+{?F8TOHaJpy79eGV7c&rCAHfH0tZJDF=bWlfIcG&H= z%oG*hmB4ewqL5vQ-(R%=N>y$JSzyfzy14 zBx-qU1p5bXej}QubpBwieJ8q?CDwYFXZ{BD`pyu(t2?3pBOzd?jfQYR!G`aK$iH=J zkCiyWUkopR@m+~Y+eD;I+ejyxM zH&VxLJ6FQWleQvCbWI;JamJUxac_5UEeS$$#is1A zYwQNnBbp$w#kjPo)Z1!d%=e_h=?=qA*eJC{1VtZjgZkw)>JUBaI9Z-tfRiLs_~RRp zYElRA)l#EwjYlR-X0Bob+~9VPD!OldPq8ihCtw8^`o; z%MN)CHD9)cSN*j6KxT}2`B=7jczhaIWjnwr;)y7w*0nT-l9ju|rDj*X<>n3$@f#At z`N{X(<1cgo=k?qry?%6iBNL*nCKmoF~q+q#v35-|S&E3^}aV?Ywd z1T_mKkn~(+^nFL5chaMG*4x~b^u*;_M2P<6QZ_(!isK|FAVCmIo%p~BRxG81VeCsA z!PZh9Uq9>`SRYy_2Y`f)DAU2;AUhT;c{)Br!)_7HpT?)2?Tq;5eQTe4APN!OV-LYc zo%FTi+sVsWeVuhSG<)v8j={mOggOy6YzShX+El;9+zXklh+$EKdv9zQFQisF4{%e9 z@LY#qu@@5>7c|p&OOvpwNW5U5r_e+xUHD9YErDsmm;wS*;(d#lycE>67jTwT+CsyemP5C1AXdCUb||0t&_` zI>FNu3XjVP0q;Fl^G9AZ%reN7k!*Lp0)745dGV2gGb<{zea*@oCGj=hL00riLA;db zkum!ejj(`=i|OQ-y3HTL$v*n%+aF+P(X->hxeEaCoxdh-?Bqg65p0;G5?+T%callikep744j|7&FKL?XRwLSVw zE0gf%%WMX&sPXLc?7nN!A(Sc>;#LWi;A0<1>E66tuKB2M)7T2LV&Aw!4 z>5CgMx3?L)B#+`gyZg*c{7BVKnuXk~E zxhkGFC`-$H&}m9o@0tEU=(+e6lFaK@kL@~dZPo1oPn#aU3Rk$u}3o!eLkEB|gu5OPl7~70J8w2iuh) zu*`Zq%El`()hE#H(y(ErExYr;m5g)JLns*9If0KT6=w*=6;f_SwFs zv73m=gaG>VRJ}X4Cttu->kH_uj&E^e2|DIfCoiM1vpb3&I*<49YZHNK*KAwg@@$9{ zVlTbxs_c}lnjMyH;BD1KjZr@*x4*vgd!*>ad`p<*yM%)&ou(~DHoL}^&;*gT(;#L6 zR;p<1{v=>a03#~`D@X3;hl9YQ!af}q9p3h;-^>flxcD2bib)+z@f}PTg4S7}^I$tu zUjJ*-JqA#W^zh-ycZP}VG*XGMwAVoj1#S5eB- z0!$!XX&GOc6tR~dlH);|d1#x=aryb|(vL3(-bXH;;hte#L8;SEhsw)eu5ai&BVLHt zgma7d4rXt_exr1T`G&gAes=cn68H1>4d}`}EqnzK6i?RbK00uI6dLtS^v&@}?&=qm z^Nae8@!RN*<56gF;jnp$3;SJrFZQAy+rvqg4D=ITNx*N0*@3Qs#iiy9z9kjYOrNk; z1aD_IQn{Zw_Yv3Nz=u!32kcFL1nyi|02BDcD>U~j*QR$CVF3=X91`H+??kp~4F(|a zcLK9N0|zhyJ+L(JanVBB$#)k+cE#hKiI4F6duZY%Io40|MqiQoR$D4YBc ze^euK=RyG{!6)p%Ce9%z7QpuToH-eauMgRQ!;llSz$JL@C-8k;;2|Wy4RQiR4ub9D z03XOVy)o@|q2Aocg_zKTn5aCV!b$Z>R;Z6-(>SRC+SRbc_}9bQ$3n; zcY~|>f^qvHao0?HYC?0fft%56RzU+!{b!N52f@w2?0wcR33{Lg$a^p|s!ez+@?3G{R`xqffqRY7kE%|{YER6nisY2Z05(RzagIR zSyS!y=ZgcDkGMg@r5@GZ78Wq;kJkC^7u2RV?pr=KAOUuY?dAk7u!#F6o_L%1V1O{N z{SUwg1KjK*=4`I_B9q<9=O~4n& zUKb3oA^C(#73SyZqoJ`s9Re*U?R2#_TdIbMg{3AhfvFx1G-3e`o5yNt@WUXtkOt7v zB@Ds0h1Re;OOnRnA6o-44xFG}cFi-B#DigR$uLIf@THta5U^fg2y-9N37sytjpBTs zcc{K>OI%)=zgL7+lVf6ZRE$jedbQhAPA%sWE%g0fslQhqF5gU_G>P;yI5nM-n$BP@Ov>L)@Ar&oi(5 z;K&A)?oh_`3`z7!NVV1UC>j;>OrsRZ+J+i+z(QO%NuO6oP0C{9nSp_Pk{l`t>h~n} zhLr=T&EyycIFY6dso~PnG)4HO3)f|z$wljHtr+X+)o;f#*yyDkR@=R#QcyKXb_Tyl z(RaMuv_P-Cr+a6)J@O03x?5!nUS>$EAO&pj-C9LuSg2gh(QW_1qET-O%x7Z?-WA&U zK~xc5J$n31tbq>>}Wp`?FrrI+UM%smV} z44DZFB7K8%B!Jv&9u&UyVwRW0E{mUm*U$SX;%xAI1D|gi9oV9_8;Y+y)j$frZ%Qs6 z?PqC0@Q&vl_KKqs=7=GE@_;%ung(|Ffz6bBfO+_fveKodu~ZSVcm&uXNkH^*06(Vn z2sTze>JDiasbhjN6r(7`cZst8mT+p+J<>MNp#g$T>ZmXxFFhs^m0X;hNO90fpDc_H zFP*>*(#^F7w1O?E-lyP3Y>1Ip?{(O?e@4REEEi`c;u@mUbTOTcsB=D`p z`!lv!n9`;k1KK_)OB%3MFEG)O133aZQZ4x*kik0RAH&Ol%AmF4rlty5s|m{Tig1dt zAM$W_NnrweP^X8eQ{MaRx#GuIL^x}Z=BIp{y@;na50T!jla#I`{z8IapeSli`~^W- zmwb!CK0^; zps3yt-iSVah?JUxjwRx2%1c&~W*l)4$W-=<#;SL~AAdJWE+sJm$F4SPj#2P{=)o9v zB@mwWVKFUsyE-B3gO0>(1$XUv)ZxcmkFoNUSJqh-_CnMQ-*#m+gNn}>4T`r2dAua; zs*;uY=g%x~B8B3P);aribZ}t7A~Yt+_(Xf{_83e(TGir}*Qo5cYB35UJ4XsrbZ3@P z615EFlAr7%s`>orJzWU#pfR-)>XS7|&B3J@I4%efH9w3}2b3izOP7bD@g<6)eBND$ zqc@9CS9a8-K!Jz#j|}qc>I+0Pci1g#bG-FMg+REkF~rsGL8pubeK%SKoP)4VUf=Er z9R~|14qg72Vl?W)_CZ|^qxrnLy?AvQdxLbdu{LWC4WE*0+r2ZBH1lBV&MHKhnNB1C zoCon6HV@1f2BHA&$qlprHU5w=^YhJ{Ru{a1lERCoK6Y59x4)iX8oVR zM(LVzl^E~&Gux&y=hr~%KFE(ba{?2!>CG`?x7W!nFV0$ozXf7*e&CR;+(G=XrOaOX zdt`>DO98k{P*YdbZBxY zzuX}|6h(GJ{7{&U=50fy>W^2IE8lxhCYgXDu81#g*@_}g>p~yy?5J6|dBM{&Lu`PJ zF0n`hg%BEaJSSEPom;i__{_pw3k9uMg_IC%3WQQAEyi$jC2WV84ys9#H@t9Brm92F z@{^U4&@ei~FiHhl&jNtdWj z0i%Aacs^@+c3c^c^w$>CXn`NRnl{`_d`#{3-Pc9eySq_IO!J)`CT^o0b<658$k9?v z!^Zd8Z8u;0b{BafurjQt?HeSAdkZ~|AUtI_3TN{ROs-}qY8S+#aOcjTzi9KeIU{R# z!y*eC`Q2P2?B*Mo#~52g6co6kXl@h2b9|lpZT>KK^!Z)jmjZ?)_sn5TZ*~t44g5g4 zf@`d_)P1Kps(lrE`^1>In%E>MNJ1KFFka9u4W1M~i{gI->;?iYBzknx0oE){GIF}| z8!bs6iZW-IqbVnvUrV#;z{9>BKh>|Ko%CaSX`JJDX`q^ncNueGTiqoT8=8veCw?tL zBw0bDRa~MC7ER_L5zS973--SMxOaHyv2onKe&3pN?o(s;({YmL)#ee!&D zAM7bGSj^yKZH4CyB`zErBK*M65S%4080cT}0_flp1?!{=@|4SAKadkY^bXxXqrmpd z49G})`8Fe(afpM%v3k@)m}jwhQ4$1JYr1#)Qtjd)n}@o7KOS3Y6&NI&Cjx}`081l+ zNXCViV=x(KNhdLf!&(G)2!}Q1I>4+XM-P6~86}XIo9ek;RR+hkcKDs?^mU;HJ~dYJ zeBz$dql&{?U{G^WHfcHX3uwD*92|AYQywrR1JKejDzLY9-61+){%zj(J~}y%x)49R zCO4b}=<%*NtYs0cpm%hNSu}fmUuLjR!#90quysWJ&zCu{J7Ca&%r0nE?BC(O+`Mk= z=~_WLe$gz@XdTazM5zep^t^pYCYn!v5HsLN`8ag^zSJt+mi zMI%L521?XDV|Ov$G)+8L^_1@uIp0tWf8yf(JbT&X*{6XHL*yTr4oQ6xK+pXiKB#U~ zj2P~EzqOdGHD-y>iivZqe4zRs3_j6}7=9FNDt}WI4xMjo1Y#7)xConaM0foq zrytCZo(?F3F;hF6j<)(6;(4vk+&`B#-)#SR&jI}eQy7qmWQMhxekKQ0clWBcvE-ba z@7)B-L73Z8;$zA~+pA*$P*s+S97fI&I|W)9h3Jw46<>kETKaXUOfSiQ&n#N$A#A#7YK5z~X# z!sqyn66L2<(PQpwSiN6gcWYPSc0<#ht&sZhRzB`wysCtiwZF*eF{xyuTv1YoO^56j z%ca!Py+-TYBqq7`{-@8mt2jz|lbwm{k&+(`W0U@-ZBvk|c!rH;l)c_Q)b~+g$AZJ> z&v+fT*^3CpO06#Sqd8H z*j^rY^dkiYiS437<3d^XBkRMu&=7~}LmenI!@_hvbUD`G=j zAJn@YcaLeu`0c=`D<*M~+<}F*KKe7N!o(Ng1_$Wego)e)y?ZL-E0TqHr#Fro$Wm}d zO4VZ23Wj9qO3sHSM_5`>Mq8X$RFzi+2dDTML*xQXn>;qcWFN9Qgv3h^3Quhe;e&)d zT0Mn72S*TbFBBNv#LVPO3}`v^JRpPn#+Oog2%pY!#n#(vd=f|fQDGt&0c>Yovx@5x zun|`OMEdl1wNLX-)aVB9vB6=I_OcNI^N|`xw#3Y>{e*rc5uYYgt{Oj!gM&!toE)hSkE(L&cqs4=%HV1VeO74- zFZEYuIP+Kj>3D!~v~1>f_{^y;?$#s9f%O8($8EU=?i*jBqPv+e`>=@eKG+*ayr>cc z+u6Kdx~XmxNUPadkLE>Hf_)s;$G$4ta)wk^Q&L>3l_>hLwLp0%e{Ro z0xDXSdYX=SF$?>W;2v1XnimX-?qAbV`irCDW6aBG%kw141x9_lMX;Gf-y%Fg|ADCH zu#{B|O?Fr8_fEPFa*8A9=dBM1=3%12)_M~?eFQ^#c}Ci56H*uY z=$Wyzc{Ep^<2uvTERa;Xr6<8XWhYVto$W#pl5A{$^~xChl=LkE?18^=8XJm-fVdQF zx9EH%Aq3KxHSgssEp{@(Q>jV@?okZ^W%4X%W4sfU`=;3a2b0clUtsKca~~KqI}#Zf zOQ8H~5RQu-fUV!z=15}g6chYA{k)q8YS+0-$^hImxRVIv^#?N>nI=N>%}B%=Rv-Ef z8Mm%r$H%1JilUoC=cY?67Xp{^aU=MUFT*N9<{xd+422Kfoy9#Dxle}M54$*xZcM!= zbof4WH4nmTkGp!z#9CD3D+z(ut&UglU3q2Hyt5&g9K92y1TE*5&XV!9Qou|>aT3B^ z%I^@h?-%qY5%R3_J*-$!=gwVamcY2$oRz}&r)mEBO&|MKSifdl_L6jy{(EcB$$SP= zlS>%W){Li2J2>167B8$jr1glSAIS6-&!?`n)p+;&&pCkSmJA5h?0VZ2way)dd$&P} zEAuXEbd)2(S~#=>iyxlyRQf;0IJA5jJvA2Pg?e(G=!HDHeM%+79Zw$4s!NYZBG2cl z)pa=*Zp)brznt^dS!_m^qMa=!I$H1Ro1e6j$+~jZxgF=QP!RCZvHAR#4eQ^5r#hc( z)rLDTQJ9R66`)KJ`*=K?K%|#(o`Ecu+0S{)(!qD>Nu@(I`@-_!*xtaQczOQH=tTm8 z$q?3gPbbMyD{(J7Xo^po4UVCg$qhb&;$pPrhHlVm;Im`h&Gy_(hxVA6m-D1?f@{YX zd6)$NuWoaT4MYBuP}E(#S(^MFl2E}9Tkpw-qrkRmiM^p;N-!O5#&(g;?ccJ$$7U~T zi>NNQ&+i|J6O8Nc`;fM^5FxLA;uCQarvF%qy~lQ0WT8%et!z$~+Ixs7&;4$5mk4a_ z&Mx#=DEr7xA4OxAmG-2HkCgy_v?ZaypW)$y5s(!UPl*S-m57V9wioWbW@6)*NE;l4EXq~WZdkL=I*>L9WGfxED6|&swOUi!Rd;2k zAOTHZx_|OpBPSieTTYr5IhXo>o_ao=>TFV9Q#As7ie@e8NS6f*871J$Q7cv08=5~{ zqf_#zwMzPE7I0&Evt=;H!lz?|GpLowQj^LtlJx~>&WK*pJ&kndAUtWM8fmMsOf+Ar zCRbQ*F~d#55-(7FqEMsvP8OL|&TB8kMdWhl#OQ{>5wybejI9t+zx6le-w8v$_)py4 zQhhx40f85ogvdjHi>_sr?-S->Bb?vYR^_O6U} z_47BBNgV+~S9~x>p7?PECUs%V$|6Hp;*ZP+-k1-c7+cYZn_npkP3ed)Q$AaBCwB&j zrH2Mi7J@~$Hfw>Y;-mb|Q~}N)d=dCyWnmDoWMa@EE1{zC*)*-AAo0B_4tb%W|Ik9f z&}-qx&vrtA(a;~xvBK+qdCu@f^;SVnh+zrCyqv&7hP6%Uua_Jk{*bF-uy1+hrxEy- zMs3h(X73uH*Yu0(C1!wDU!{EhaiklgcJcPWFFWStu>X!Xwg1(;`f!WS$pf)%2O+Z6 z)u#2_*cCN8_6sMuU{%YsmqMg{ozIW*%;wb7qG4)9DmR(2^61lGmuIEJ%un3WeD$9W zwm;R&O&}$l!A4lFNid`q#otR_`?e6iPOb!fO8kX&Us~bLszx4Ro>JUEFbm&r9-49h zbIHbV!zg;-K%JGtMdhn_ro8plTS%;knWy)L_<~tO$n81%m@Fi#Yd}{b)>-oQ^x#|4 z10y$qURX+~ifNct-AYibDgKugpOKn!=e7{TWX)7Ju@I_8$NTb>w3=%O#gd76nu!}n zli>}Y4^H06oJJ?TCEE_ac5siAurg!AI#?Mxsv13hWsH11|KZD4_Tdb1c5&2fI@cE6 zKF(MWd0;w((3@R{?%?iesceNa4C-X~ei;_&Y<-pb+yv{`?6zh?QsgOM{)vF<`%AlN zp`YU2r1uk66kbC&=@tMUr@^e5!!QkENCAr%h!5{vB|aA=C=>g0?Y z@!06$HV}MJi>HFQ>J5>2L5%Wj-JM|1`)wEZUD$8fczb#zx|*W0UopW!3ec7}cA-E0 zLX|@xWabRAP@4=)QXIr+%KAyD{rkgdSkJU4}DjKKn70k_WEZ_eHTu9@u#J z9EPyCjI?XMl{U%g+W~BBk0E3b>_bSh>$5t;(ni74s`5RxvJj&)k(@TrYO`=S+3G;h z>U|00b2ZNC7%NoRUhsk9KO<}BXmDo`rDeOPHUh$q5B6j;yObW+WTV1=ip}>wf(VD zPCngEE1?~StHp{qtDAaB$WY@;g*hDEAmZk0*x={0?DpisJD$fWn%tzV2Vb4ZNd|cO z%A4YGuD$8p$kDH@KsV?*wvviKXH|7*B@)gv!E1*u#*uN=XbMHuIa5D5`46-K6gkJ3 zFIOe789rle8AacvY|@Cv2_rq8yuDD?YROo{(lLePO(T)YJnr#X`6G&lW!LT!^6*}+ zlmc`w2lF?nJM4tjNgjGk+EC|K*kq9(sTYD)png^w=9|yV{B-~Hd9Lyfa$H8$j5IFT zx-@$zK7&_~lxeidC0}b_*A~NMoPgg&y)RNc8oSv3G9xjMS-Bc`->FgG1(1;J zsKqXW_wv(kv=Ynd;#BJ2%lbplLknZWF>gdrK}4oNPDl85g3)Hq24P`GN8!MbGLLJz z&#o@KFa8brLLLm*#`j{1l8vuS z&jz>QyB4pZ$U>SOlQb_$dTygi@95puP5|dADm{XJU1hv(Mi3QeHZo=Z<3X*lsy+L6Ct_Q%97Rx16g}z{Q;ML`qa;w4*L6z9?d-N zQ!+7yQK6WniG#ZZvl|PG+-A7G7N@aKGwtu6Zvcgio-9@o$*qy7kg3NL+Hpw>Tv##8 zSrxsN!yxF zpT`T5EbrD9Ck~Vm5K@g-h{;%fUe~D4;rYEX=dhu>LqR=~AH3lQRiMkG7EX|eTp5CV zz=Gw2vg~q-aJmow!=?V0fm^_dDLg6jualKC^fl8E2N-#`S4aaYc}PTf6I){^XGaqQ zn?IzTp(P?bD=Rk(F*ETWT8o%ji{`T}EF5o4+*-u!96ZFFY%HJ_`&&Qj8wWdxpOu3ZL~*i$I9QqAC=f3<2L~}HJ13~c z{ic!S%@EI7G@C1&i&TO!omsS=6RE2<#;pl2L>zGA8mgyaDzw=);AEW+#p_d z9&QlH#`Z>Xy@6q42a$gPY%{?}5mN(uv!|ZPu*m(Z9 z!}3P5y_sNVdxQJO8aw+RpWl3A=Kwj(&c+5J-XL*vyv2;;%?vje$T%DKA4lJSvN6Ar zJggubT+D9@dES8j6=;yxAT=QRe{$#$>k$7->RA6Pb#Lin{`32$>@Vu?XArD65TITV zs{e_hKRkbz{+m`Vp1%)*xAcQ_{VnxxNqlu{aHy)54P?X=2`PSlM`O^ZW>yO0S6_hU!4NBFY9*{RYEN?0LOX?4Tf8^s| z!t{pV&j{z|XHxdCH(`=BaW*hEa5i94axrv%>rj`KVUjViHFGuxE!V}vDkz8u|MxqE zU&!6wz{twPnb^?8%)(ZH^t`>Bl-R;pfK-h`mRZ(b%*5Q{qo<>ZlBb-qk*Adrk1?r` zAiq1WyN$gK$PBT&jkT>4ue$)Lfw7&T2``9#>t-S){=?#IB|yr?$il$I%t9&%&+lk# z%Bv_Y`8Od@OMuke+1Z|#iOJ2)jnR#b(azC~iG_!Uhl!b$iItTB#KGX?Ve4$*&S2|A z_IC~9CQe3<7WU2-cDBTC8VwBXT$}|+Ns0fa*Z!aM+Bz}*0mf)#XTt<~f{BHZjp=wGI11?wJ>tDbFwpa?yV-4a5pls23<|<9EtzY54yKF_}}FJONO86&GtWG z`I~?L)bp?C{)!Yx2Pjy7^ZLJ|bN>gte^>e6TsJoQhi-cpN9#ZFG&W)~u{N|8|5E#?E1H-xDOq@$fGT`$4kk4% zVisa{Vo*iPq-ba73~DeFD>8kuHMJvtE1N*1xE8+{Gmj)IhZtzK1e!5Pva`O;mi~FN zB=~>k0O^;suy!_aWRkQ7%`e4GjO>g}{$6vjv+{8MbIs)r;8-zfurXQk-*Qq)oLO7T z1|4f_F(2LBJ~ihhM~dL-!_rb0hev=%`WyB(XDmEU#5u+AOy+>LFI-qX|1J0wEnvI4 zn2RiEesW$u81oeG1(Rv1-(dpt-Op10`(f>eb?o%RFi5sIA?8tZcW3?iCm7Rq8~wX> zn4j+Y`V(HTKifR?&sJc6wi@WKYr*_%x7B~4gT-ty)bFu^#cZ?I-$H=JY<1S3enqT$ z{0X$QL#%q40OCsRTm^7o4LCqr5wO%NvEoL1CQf*!$3TJb`nq7@;i)ts6tDh0VD zj7rs5Xl=#Iu2K!^*0@E;g-acv4ux$~MJ35@yR9BfhOMTR>Un0&Y0LNi#@>INGe-MQ z>g0XbTF;tuuHSmsn(O&)8Z@QjkM(Pds?S{Bwqw@jFY4Y|+~>fy%jybVsP+#0?8~kD z4{X}?(2KXG$Di@%vx=8>95uGSv!?Ih($dN!)|MW#d&&4*_Sm*_z38#V=W2TIDLZ|4 z)q}Z{uW8s_Q}Ar*pxxCk=EehGTN(~D7u;5Q-?4(Q|^Ae z~e(_?O5I=^G^!C|krWd{dN+rFxz$JXr=Du!>Z zf4yVo=9O2pL7%3lNlAa&6Z64aoTU_{_|1O-l`iOowj;!#ckEk@0xL0;{&IT ztz1!h*MkGc-u=pze$9?C`ZN21_A%eRuI%la_A!hpVuWOI`KXmHYm2SJ6#I*zuWi^i$(uD~ZNpW$U(D=0 zX8Y2j3yS}6ul}oL=C{{A>q0t@*)o08wC~-s^i;(S-lBkyez~c~f%}#(AF$)8&wGsB z**WaU!KH^+9x-^{F~zH2$&J58bFJ57jX$dCJ*VvS;#I?QXZ}uet>3eyLw8sICO7VP z_4{fDtSXzjdsVO8w4sf6l^ijk%$vCKKFziIH%t2ODjhy?&2MW@y14%IlHsE?*Lpo! z7HwP+)y8i%{;uT64@-M)TvJthVys_q9=NV_+=3O8G}juRZ|?n0*%=?*b3$#=Ee)?V z7rdmoR{e7AxKA72OAr6NY}$x>dTOpU{yaTmd0VdX@N-+fG2-qGi$_#%@0X4o*V>l$ z8P|5o`jvCaPd~5e!L;Z6wuy7AKPjI)x&F^}eUEEBX71`;<>MD_yRYu>OWIDGyXyM( z8FQBxmY;b}kC%ogEf?>*>z&1g2ezG1S9oj71^ZUrRzB@O)BN`9Du*7~^0g^TiuTW| z9Qjbw$(_DVUF+u+Bi6J`sOW!wYriQ~Pq$s)dD@mAENU&R=>2}vtsN&+7LI!Bx&^oG z9~%DKOGPc`Rt%liu%=_mPn%wEpR@U%S^=#KJj17xgoWO%(>2GJr)jhW1zNs40QGLy>GeBuXml_H88ryz~C*@y&f|) zOu8GF7F{<>!{msG-68rjc*`TvKWSsH&d`x9EpmI`KBP!9aPZsHy()L5nC`9kYF@1< zzIN?TyYy)sd)`I!Ph*?sJ$K?a{!zMg75?%sZ~l0Rn`aN#Xe$2g z&*As)9rHssBm0i)`lG2h5~XZray8*#E=IFCM+)gDd{>K=ae2_T=hY2(lzyetmzY!jorQS zTe&l?X?^QN-N6RZE6D;ipVu%z#R(xWG?-d=m^#f|rt96qyb{KQpP*G{>=X=O=| z53jtsa^RDt$8N0t_u8><)xTXbU|rdn8&?(7PK_IvH6Kw}7A#o#@}@D**Wc1S^qngw zEU5ZP?a8;)f9mGkaUZRAbFThCI`Z?yD*(m^|0 zhmEK@^3v$}ty^wQzjO1`uQ!e>Kk2+3Gty%p-+%M^Wx?X!lecGS;qP1K&Rz1!;-e>T z8&fyz*KIe>T{dBHucvlgR5$e3S6=w|hvmmD+*VNckJGQB??a96mS4L3gR57+zJE{M zsL$N&+u5FJ9=3hbdD508MQt-HhOF6NSaJB8w((O|Jl#5>^Ykst=C_BpFJ08Ku5-|q zs*hV<>g=&)&HRqDHsAHo{+$&gUvHV;IsWo(11g3WwS3mu>-Op^S}!`-|LK;bGrE2G z`=u8?c)^pG?|i8J+|74RYb&Z4yr|{r&Z74k-swE!z3sPk_W9xmceGCH{Oo^hSn~3k zFSG0q@As)*x~OK~j-xaX|8c@}Z$egAh(De?n1#vhsNF236zne3+Bue$%Zf&cBb zzY51`V_(7Mr0$TxZ+D0MN6?ip@2bK}|7F_TDP!L*eXi&mPYR@-IJHNAX3Z*`)3i5t z!pz=#ww~gza0GqMyTScAV&e9hWRWOv-@+e zRsC}MhT^_gJe(f?rml$P!w&42-1px5PW|+jwjba8aLa!*Z=H2dzueRvO{dfh8`Utq z=7hCne(?{+cU(|>rzYQH4SQ?)1*N@<*Zd|oL3f%xt1jPlSIvN%N=NRl>Yjo92b7&S zapl>y>Ch(4z)?+p$??19eQV>A-)=hQ;%#r2^qNo>ZCn}Ep8i(jkDHJ9u=IqDYyMPw zVqCwYx$irrnt_vQv*#P{Z|;3*S+Zcw|8_I*wdQ_q2Cg}?_KZ&(f7;yVwz6p-tvb1U z^q~5Nw6B|i)lKb-NBrRA#RIE%EKY}x+h4K%&hz(AU%z^7`Dy1he2^9#*E(_iikr&g z*P33eJM8xY-Rf24<0m)#wr=1ht^N1id3|~G)Xs5>Pk3tkt9AX7*53Q7-zlH4u;Kf4 z18!}dQ&;#|>&VL98(#hOaqWj4*m+jPQ4clE@5oJEGp=Jy<Q@2VKMM#!sP)GGL0 z(R4{ixTSj0{^3(rEo!^8bMltEKW@9N^TgYi9M@4+IjnZmtPlS4(<#dz+W&aP$fEu8 z#kw6cJG09hp6u*(c~eoxICr!8v1nNRe%an9rtLpLv#;#J$CHksU#xtgBRP27-|sdL z_W5$;wL@O`tYg}^b6Y;I|KZ+8f20d{y8Di@7xtaJ{^RxwCtXl{(w?pJsteERcyq^v zlWr*1q;R)nU4JgR{=zAb>dvj{x%AQEFPCKt2VSJRPYri>JEvRHS+oA#4gRkGa(AtB z=KSm5{>$Bq?!9nQ_rK}So~>c@|2e&IZNqf0dk<&38|mjfhZ)^7A3LjChwh8rZS~oE zc68sFx&!Q({f3K_;&Ac}BYM|u8Z+;Y@BYifD@u=fP{`5UW%IS;FZjE#UElvs&GC;d zb2pg(4}Y4{WLfv6;J^8^;fIsHn$hLqBkrD8SBpVS?>4u^USr*+9t#ib)TQR5Yb#eT z@8|V6r^mX!bIN8H-}Q<&eCo>axnm#OR;(JY^n1n2hkHFQZdg!r?6X&%xBJfDct`$j z$K0AxpUk^_;?iE54ja1tq>^DXuUtHF*`u3=+`sL{lA*h#LqEpXz2{-{V>jpSyBqx%X7Pn}M2+D?Tir z^i)GZ-N+3$zPox|d0X@BXZ8(lTKP<4`>v*!w>H)HDXRQc#k*(bzPPGq(UiFT&GvO~ zwY^-qr~bn}RfV73G4I=zW^fKH1TlfFm3v~Ct z(F_09=Eb&63nNTi6x+5e(u?&&w-;qOXp8j_=+L;>^Yz>*uPxBBg8a5Hr>91FadE8I z_IYh-X!eohQj?nmzB#@mN^P5#U*zv2^>XHzQ>_LuadFPJrS@E9o@e%}l^mu&vKZph zB>(uloEPLDpO*{FIq-5Z#HE?Z^?cKBUKD9u9eVD)D2=o{TsX91sdx58p%Wv~@N&Ybh zexS8!haO9y6O(@qbR|j5J_0YupZh@&nts#Dve+C;khvvShn%Y*3_P=sEH+~;$kI6f zn8TbVaNhQGH8o=`%*Bz-%{-IqdFEV&UKr*dU+5)9Z|J2#{&`lL882Gy<(6k1at=b@ zUU#*$%Z$m;kCOazs;Om^C0HrY7PAvW4L(BXp)%`KCw@od{bhvtlp10A~K zv~i##03EtU*j&(|YmSN2{hw_E9b%r1105mg&^5>Af{qY$=swZpicz)=bZ9B6jRPIJ zUfDR%q5Bsb2Rd}mYU4mh3_5i0XmV9U*f!81mf1Ma5rYohKigc;p&G)(iRZQrbm;!Y z#(|Cmbcj(l7jz_`L;SV5pd$qxT9$8eQ_!J$!N!4(6m)1YxXlF}x(BgwphJthO`Mi! z+cwaVfezi<+Fa0~8p6hb4&b@gQkz^KcT$U>Xd;p?8p=2{&-@LV^%kZb$H zR}E|S0X+AC=RWY<2cG-Db02u_>t56z3+PatY~w%&@Z1NU`@nM_c<$?2hS@Lh+uiYN#0G|7*8*MJ=0G|8$49MgH&wV{#vvHsUcTsXj+5Ou72%tU>famVnlGz^cJOG~SK^NqL4&ZqJ zJP)8g*AqL~haFb|@H_yX2f%ap?8ck};CTQ%*Hb~rh5i7Z2T-5uA(Y)7@H_yX2f%YZ zd4%o3xB{LB!1Dlj9stkvz|-y*cwi!06Y(X=RpMH3V0p>&ja9j06Y(X=K=6MfcjjYYufV%^|^b$WzIA3Jb?N< z0G;rfnKz$wn&ja9j06Y(X=K=6M0G{ikaeK^ApS$;YMjP-v0G_*#wQVl+ z2k<-qo(I5l_X)Mx2k<-qo(I750C*k%&)qv9yIx!xYb_TYL2JP(2A&KSiWAIuNnc?diYf#)IcJOrMH!1EAz z9s$u0^B8#U)=Ju3 z+aEFT+^zYvxmHIE^|@OEZgPR=S||;1cAm$;^B8#Utn}=DK?m^Mtuwc|paXaw1J7gN zxij}O`v9K1^}sd`bO6s|c-|fZ&tu@ZyeI8`f#)&s+^wrO#|%7=f#)&sJO-Y}!1EY* z9s|#{u-+aY@H_^d$55X;TSlW7cpd}KW8irVJdc6rG4MPFp1XC;=J+97faeMDJOQ33z;pM#nwdwy^8|RF0MFew zSY~^`^8|RF0M8TPc>+97faeMDTpxbI@xeR?o+rR_eZ*$B2Ru)J=LzsU0iGwo^8|RF z0M8TPc>+97faeMDTpvtYy-=Sg!1Dxno&e7i;CTW(Pk`qM@H_#YC&2Rrc%A^y6X1CQ zJWqh<3Gh4to+rTb1bChR&lBKz0z6NE=LzuKS*GXD=@fXL0?$+6c?vvt-|X3aSbn5X zpQpg{6nLIOeXb9@?S6shDeyc6p6g>(*dFKro~OX`6nLHj&r{%e3OrAN=PB?!1)jTa zX-yvi&r{%e3OrAN=PB^qeSc~83p`JO=lWP5azO|1JO!S+Z(8lK!2AH7JHG;x3p{t< zXxli@0X$EE=PB?!1)itCbA9A)j|F(10?(bFg3$py@A^%Ty!YMCU%=+t`zzPl4wt@H_>cr@-?RcNL4BS9&okh;^O(s$FB$MW1D?xt#pVLfGvIj!JeQ#d zY|qY*40xUa&okh820YJz=Na%k1D|xk!1D}vo&nD@ z;CTl1xlDE7SS&v>;CTl1c?LYsfae+TJOiG~{MsHL@H_*aXTb9ec%A{zGvIj!JkNmV z8Sp#>XTb9e>T~BWWyT*oZ!-Z(b7d!E=7-u02i0btU#ZR9C#ua{uheEZ zs5YZRZRWbGHZ#xF20ARyWqJlWEYF=enmsR;=ZXUzwmw%J=&(Fj9O$q-_i~o!YJ>i; zJXaj_hvm8Apg%0n6$kxcc`kD~7+03(iUS>%=ZXUzmgkBC9kxDK9O$q-m%S9|usl~B z=&(Fj9O$q-R~+cDJXaj(usrv2mgj1N`C)mkIG7)n=Zb^*VR^1Nm>-tsii7!Kc`j=z z&|!J*%nj{4w>(!I=&(Fj9O$q-R~+cDJoj>z=W2s-WqGbR7+03(ii2@wd9FAZSC;3p z`-1sld9FCnVR^1N&|!J5IM88vt~k(PdG6&b&(#Lw%JN)sFs>}m6$j(W@?3E+t}M?L zXUCP`XvVle18ufHbWwyj+aH>k5NG>Cg9zeme@KXp1D@;05+M$B0M9+(x$_~-yO+`~ zKnL*L`Lo(w&;dO6faf0Y+ykCFZ(Orq;JNd6v~kcMz;h3H?g7s|;JF7p_kiclkJcO? z@Z1BQd%$xKc<%f*%|3wV9`M`)o_oM^nOwp?U|a#uonNUvJ{VWPa}Rj#0na_)xd%M= zfalJS)*K)3+ykD=`4@7bKY-_Q2)4Pva}RhfhhWGB9l&$vcWv|n&pqI|j7@DW@Z1BQ zd%$xKcalZi9Wu-{+Q1)9xpsblKb+!hfA~*2ReY~;18!<&;dLLe>mmZapeQgec-td^|=o`2Y)!>9Qp%z4*qb;h5i7Z`%s_D z=->7^@Z9~xeg5?T{NZ#90y==_;18!<&;dO6f#={4r|nsO_`q}Uhtq8n=m4Jkz;ijS z+j9jx2Y)#27y1Ku4*qbS4?OpQ=im>gT+2b{b#M9{crLSShyxu^pUVv0<^s=s;5qoi z={6R0Kz;55&wb!I_`_)*Fs^{-;1B1y-zYHW6nO3f&wb!I_`_)*&>z5a@Q3qca&C_g zcz5a@Q2f}fDYg}_`|6r0Uf||@P|__=m4IBKb&%bAHZ|)hf}T{SKtq) z(#qcNf$^{+3^8k1b{%|TAK?m?WfchN#;j}$FuD~Bo zai9Zu4*qZ|YwbJ-e>lb2`2qfLii7?Dp1a?q$-lmUKb%TzJ3j)b&%qx~xzHa_p9jEm z{aS@RKHxd{!)d?JAHeef>T~di(<24Y0Xz?Y=k7Nn%z1|T9Q@(5U(f;dIrzhQ;18!Z z&;dLLe>mlW4&ZqJ^*Q*%>G2DUE8sc!!zmYZ0MEf6PPw21cpgA~4*qa@WCS{(J_mm| z<$@02IrzgV7jyv6!5>b!Fs^{-;18$AVQ{?yo(E8$gFl?M2ReY~?l)x2Jc9Zh{NePt z4mb$)IrzgV7p}WdpMyV~a)BREp9jEm@Q3rjA5LwyKfoVOaaKnN^*Q*%Dc9-%e>lb2 zaRvTxdMs-D1N`9>XXgj_!zs?zAK(wCIM4z0Irzirv8|ow`jsFvCcz(0xu64hu3r;^ z?b-PO{&3odogX38=im>g$IqYx>T~diQ!b1v;5qoiDHr+!p0|gJACIq+OR;%3JX)aN1aJOrMDKb(#Q<~i^j{NX(C zhf^EO58yfY!zmZ$2k;#H;gkzHfal;3r&lb{AHZ|)hf^-hbKp7n!zmZ$Iq+OR+-JuD z)aT$2=Yc<*+F)EkeGdL`%7ys>JP(2A;1B14Kb+d2KcGGbe>mkre?WZ>{&32L{s5kX zKb&5*+3OYf!zs?r5AcUmoYevTaEi125yA6z@Q2f@Ks&C$A5L+$KfoVOadupRKb+$1 zJO_U`#o7JlCq1RcP0@P|__^at=9{Na=fI-ov}faekL9Q@(*8WzSC@ErW%lnea< zJddD02Y)zi5BdXm4*qa@bqqRy=MmKB;18$m!Tf;w9Q@&w3p#-3;18$Q-Jk<_4*qb; zh5i7ZgFl>dVV*;M4*qan1Uv_SIK{y{2cCmJoN}Q*fal;3r(Ea{;5qoiX;}gE2k;#H z;gk#g0X&a@=im>g?ST&9c?3KMe>g3Vfc}8`9Q@(jk1Csch6w6&@P|__=m4IBKb#jq zeGdL`ii7Je)aT$2r(C%1LVfOjV>tgg1Ng&v;18!ZJFdVVPH|R84D~tq!ztJ5h=J$e z52svvzZ(P3!5>abXY9BFe>lb2`2qfLiUS=`pMyV~azO|19Q@(*cB z10BF~@P|__=z!<#;18!IN}vOtw}U^Na_u|^e>lZiet^Xq?9Q@(54|}}=e>g7&o`XM} z;-Ei(=im>gT+jhL2Y)!_f)3z$4D~tq!)eJK=z#hh{Na=fI)LZk52swv0XzqPI1l{c z)CM|$=im>gT+jhL2Y)!_f)3z0_`_)#BXAJvbMS{#F7N~DbMS{#E?lpG=im>gWtO%c z1b;Zi+5P~3xUS!$&%ciVe>lZietlb2 z{s4bC#o6mF_`_+5s~uP152rYL-35O*#eojsIrzgV7y1Ku4*qajmJ2$7=Lzr}{Nc1c z&;j*1_`@j|bO6u6A5Ke&K?m?0{Na>q=Q;SpDGufb@ErW%lndhuc%DFg4*qajCJo~X zcn$^{)zpMyV~azO|19Q@(5j2q@T@ErW%lndhucn%7t+SJO_U`Ei;F41w03T zIOW2)0-l3EoO0oM1w03TI4y$*e!%l~@P|__j4OEF4*qb;g>ePX+rb}BxiGF^JskMM zdEgJHHqZe)2Y)!_f)3z0_`@mJ>HvQ@eS%=e75Kv`&guYvIK|od0se4`v*QZ<;S>it zpgvFG{WGKNE0rhzbJWruM2Y)#2!_IT?htp>rc7A|AoZ{^D3jE;|XXiQi!zs>= zEAWR?9OwX^gFl=H{%~po9l&$&hf^+$E8sc!!zmYZ0MApX&%qx~pV@#8sLxa2c?$J8 z_`_)*FhAgVJNU!tlOGsYz;p12Q!dO8;5qoiDHp~S@ErW%lne75cn`kV>+19%Sp zaLR>o1w03TIOW2)0-mQ(pMyV~2mWwsgZ==XgFl>dp+A7<;18!<=nvpI_``YN52rTJ z0XzqPIOT#4;5qoiDHn7A&r_(+!5>bazkv?mc?$J;3OomYIPC+*74RJV;q)n<lb2`2qfLinG@%@P|{J?GNyW)8~Vr1L|||hf}WQAo#;6&WdVO&9d4*qcZ92VvWygvtjIOW2)g6Hkv z52suhSMa<&1D=CFoIc?N9l&$&hf^--0G@+CoN_@2@ErW%^ocRdbEwb3A5OV&-36Y5 zKb&&mdIdZOe>mmB`~aSVKb$_FhWPv^}FkpP!p?r8YA^)Mj+3&5SFxnfakMqeE?GT&c~>54D-=mD)gu zt^D*zpq=ZXUzc0HWpK!@eI;y{P3&lLwcEYD@J06Hws6$d(OeXcmrVR^1N&|!J5 zIM8A1bJ;?G4$E`Jfey=a#eoi6pDPY@Se`2mblCb_Ru-Vc@?3GC!}45lpu^VZiUS>% z=ZXUzmglnH03G&uyW&8H<+%=ZXUzwmw&!)dBu+vSzV5Wb~Q8)mR*o_oM^@P|__=m4I3z;p12)Am3I@ErW%WQzm+0XzqP zIOT#4;JJ(@?70V?%hZ2k;#H;gk#g0X)}YSbNOC zb1l+@IJjN`&$T!Ya-lzf=URXTxzHcLb1kT`xxjPqhm(aBj4R+d_`@j|#ue~fFKq4c z0nfo7PIg+*AHZ|)hf^-}2k=}k1mO6fKY-_Y=xcL<=Xz)faWJlc=im<~OEDN%z;p12 zQ?Bg~6}#s6R5(JL)uDn0;_SH6jW)zt9l8O8INKk(F|l#La{&|LKnL(#7hcE(9l&$& zhm*w{=m4IBKb)-KKnL*L2cCmJoVEu#fal;3r(DnhJO_U`9UkZap8LRa@Q2g(KnL&~ z{Na=fI)LZk52sTKI-oxHp*{zHIBgGf0MEf6PPw21cn8U~;P>T~diQ!dO8;JFVx z2Y)zi4|D*}!5_}|f#={4r#QG?0ndHlIrzhAdvLu1o`XM}rWRaxf#={4r(Dosd9FvV z`TNkXf@*%af5?qqSkS-g|LB)J#hydjoZFY41?9D=eDh3OsM<6?F4ed?v`woSRA1(| zxpk#^ZCYZTcYFe^^ZPn9&dJFAz3t{3=Uunp>KhCCPnl9Q=eqA*Jz7_(xM=pGMK_Pu z9!AeDnz8VjMWeM5_kX8fI_0cfc8(vNHT|48oPN$Zq5pNg1v@8-&q}_g67cM^y{Z3w z#ATzWPA%wv)6Mg4yt(4WtFI~O@7{dHUP1pcW6nPRoPz#eHR>1(`u|gouIPW>c=au# fzdER*lG#^ZGw0?T^q=m$%R&`>p%^ zgLC)l>F(-k+0|1|S9R}Jl7)iC0DJ<#0RVun02fZ}QZEPqAOr>gKnK7<>WDhnyO`O# z7^r!EH*?lw^suufDTIck&Idq#)c=3a|KJ$_t4%3DvY_^4JtAl6BkBA$j4}EZ;HCEE zH-t4`9n#O16GS+ND_db>L=iaMmbI4KS2>GPL0zsIO<@sy9||us4U8RE=H(f>YZl*& zZ&8rao<+L9l_HVyaggmDo)_ey>ABZUD69(~$th0p@IuWZcAiYJREi?}7jz{`u&t4A zdNzC$cs(E$39VTM^WPEKnqAboz@FY!`wL#mF)a_;l``cxIZIKR<+`4?;l=3YY*qcx z-L^O#gXu!JV2E-@Gr3gU1?Y%X9C*(Jw!nc~Dfb$gxXB-NEofJ(&w_{-WA8O?j#n=P zV|diVv~yY3tDU-hPnh$&Y$4*F1pAwd`xq&)4R4YUXm38aX8K+lSo6;leN--}D>$Ps z-MS)SJi-x8jsYtQ-kTK6X~do;=N&(RXFaCc}{U40a|3h>zNM3P(88Pfi>Lq;iVd-ZAs)($+uvF() zHNODqO_b)?B69qny;KCKYIwm=lK$O(FXJ0Ozr~)8e*Lq{4vzkU!AI8OULBV9?C1(h zOX-v<;RyaUh~~C*zw{?VQpS_otvjB!s;#_0c5ItWV&Ph>9(9IM1Mf3>2|*YZe@3A8 zu)Nlm@na3dl8EX>by!_1SN>_zOqTyjYUv4*U<8lc`FsZEsI#%fT8+=B?brJ!JfNy2 zk5!#q0yFZ!zwgF^PG3Mu=SN7fn15sLhKf9)pr>xmrCL9WjORinSh zf})Abw?3@b{|%D*CiH<$C;))tGXQ|{F*6>vOzsX&HpUJPHveoI7^vq^#f|D~VD%2Q zn`3;78I-9jOwyUMZohQc(y5pTRjLUyxvr2@-tq1`LSB4vu|&Jk3A8rx<`!UcHs^kC zu(RIFfFJYVW4XpSz2rfRx;!YX+9+S`lZb9cY`U9 zyl0QSWz`>&pk*-#U#)rtWJ`HJcwN!Kg^ot2Ef+w&F^6g=(IR{1w+;{6c9JT*%`M~Y zRrp7k-|=XO)8EkveA_CG+IK){r-=C%{3j@HCZ;AYjL;kLEZj(AjWE9Eyy24Sq>toP zI}w;&K}L5AmN>;rHWWcSzF(#Uot!66JjblPdedrzJ1#TXc~zUO7R4&ThEZ7P&d{dFjWhTj?tzvNuF)EqI?Ri6>A6)qMd$J9>!A!VI0Tma?8jJwK#wBEb7d$|?0&_X0s*Wa1#g*R9hpESN%7c3TCMj zUE0@?;_GaLO}p4;{8W+mO)g%;gGkz^!+jBl-k$y@h>D4^N4!aVhRS7{mz8=U^!$Vy zuo@+r))w$Bd?IZ%!%RQGXF~dL^`-$)ryy+-%#Eb#z#ca;8s8g;O&Z~(uw2P2C2OJ} zhvIVg>p03b96hFxgHHd-UpQP)c4ixCV2$gd!oKh!#(uvF?tXq%psr;nE7GbxZZANv z(+G+Wf}ZR38j4O0=7u0%Xe7LQo5mX)z=q6kfSvQ5y*2WQl_)$i>leoHq~3(6ux)7L zYV!-46BL(l5lJZO@p13hB`#Z%l^KZ*?2Jw6ijJfXM=CVvm?EMq4<})`WD9 zDDb3$i@;<*rWxjErEJABF7#9Y@gJpcA|^8;8D~2B+-@Y|H6~MCi(@a!spgTjs>QwB zbMJHRe>P#}jQlDceNu|APySqA*uAWf{&jBq@n$6S@Oj0AN!2@H z?2h;Y#s4?XL9of^&>t&+;0M<60B{f=IRD@F@c%^nf7?cg4>$Is?*Hru44SYWWJVN! z2z?8l>v3fDRSI)7pHoS<%km5Op3E(046Eb2e`TY>JJy$A-@qdEFfIH_; z0Gy8+|79zjEzQhaoSFWUu>8kpXm8kWNTB*M&HAH0+DD2qGO>c8<4jsyuNKLNuN%gQf|~X+O@CHT}iKO{dM2=cmJcLZg9$1os}_GSF5q5w?LqP9nIUj z&gYD+(T}xhKw3!sF+7S0GcWJSQy=aN)uofE>fGGs)DJr4 zpmAjU<@2PXRJ_%;|8=omqK28CeP;Vu*YV*?!^?qj`>0daiP>u*dp=gmn>I-xBaqon z26at0P!>x;4r{{{PQ%s90IadK{k8=BqEFAHC?MAV6dA?1G{&m`OqE6~psP!HL>| z?(v1&f!>?x){}Z_=h?T7Qv;=xL%rXzTgqcxW=}BaeypOtx4%VORvOFBg69S%bdNq| z(K|%dJ~uwCj(5^zm2UoOTyLIFZ{-9&$zN@>x2XPLWj?w^ahG0&KI>iAc5En}46RNY zQM(0pHV9}mHgB0eY#bJDzG;E8I#bf7_cWgzQ9bWE4JN{gT^+r{hsxW3?0dFzWK3>1 z>IKqm!wGoX8W4BTqjs>n-iAa2`h3nMR;GX`vVI0x<9|n5oLfF4N!W>q`sO9bM<1M( z&dZD3n@rjWQoRXOC-+)658*$Pvh+yB9tIBG%zzf0e zBvc{@gC(G!4rUYctogEK3W5l;8W)i|+UX0bISWFK-OusrZ%K;WtCtlDP`^OJ)N-O< zloTMDJ5W_Zd4{?_ouYykU6!ik-7U$jr0RzzA#iws3HGEF_N4PjpOReB6YZJP!C4V5 z!GWsXF~2`jVL^`|ds1&^J`paolTT1VWM5q8BUVT9i7SmM-wW7C>xtR^go;XkVo3_L z9cF=u?_b45UuUsd{DHfH&N9Gl9`2A~Yph*XuT%Dk|Cb>tow;dfkvCAJ%Sfn$I4_i1 z4kwfV_19qrY90=1UKh0$(Jt1*j7B9Ms*}j3vC!E_h^S~p)c)EuT}O0Dh~TF^^UN>D zIHMnN4-RV2VH{mMM`;MnCo&6}^ipKiUwRzP?&9cem&k9dV)n3_T3;G)lvEXl^TJc1 z**>+{qnXOCXENh(c|v-h^dXnYgkHVCy$fha%x*ers8v8e7%&3nX1XvI%fAL6K?qzdc%~5A}D1AEyJR zC;E=z>}NBMeX*#VIo@vIrrQmd#Yr|MA?SH<2$kcSi2^)PRLAS%pqXVwv=t0BkR?Wv zP1cNZe-t~NkN;8+ma_?@fF)ZIy|~bH(+=t*sOPO<$rJHeg|ty@N<1T-CPJMa=)ml| zh^kK$h0Do#w6(!Skd;~?GHe-v(*%p9Z?OD2gj$BOA}iE#V-B3PhAtw*Al6Pa6pMW1 zakCr{+b&7MaW|xNVLR)?VcM1#ZP5o~Lv6-n)H!JLeQF~n8-o0qhKz7HAa_DI&a-nl znmG+~Bq;Pyk{7(8_lV6gpF>QTE->f6`4DDW0On;WWblrn2lM7V@sd)5*)!_dVBqiE zc&XDTpj3dG1F-3PY~q;9w-DyfqiRO6M1~48|}KEG3DDO4QzI%2NBAGIKAySK82X1x9Ow_{AjNQ3O<`OyDJ(eQE?Z4BGK%QCY`@f zHTW{R^%PuB&a*2R#(1L~i`);@+jCTKZ`2sOon)zGr@-<8i|3?=wAM3pLJewk_JrZv z?yCZD)qjQT&Q_fMHqTumKLndVPqJn@X~CWyH=#igt^ZVj|FTZ6XE*shgOjXtRQ1R% z!#;yty8LY>5W=RvN!dV=yem~}8z%iasBAIq)okwVJ;wh(o2CC)!Q`B?xc%|!5`&>t z6z0pxjz4{z2_0qzRZKwNR|_22-~AB7P#eFy-s)P~M94MBhu)g)npL9Fu3>_;F+p#4 zwY~2v{;%H8XT9&wX@ak}#P3%q?=QCw;oEP&Yv2E_yx%fC8NC1ft@5_L{m$@yd)O-Y z^7rcfwe=nM?fT{2@9#w3+aI6Hlc)V{e^SYJ3d#4|G;IIpJ^#0xGQl_b8JuOdxZYP~ zl$h(jCZaZLi^;RVGdDw{lwb{>k?5W%SbfP9F69G;DuV9uPLH{V>poGnCsd$7!+46u z69&^W50OU()DC|l3DPKz8qIx! zM<$^Aw_RHus}W-nVmcXoV0#^MNB3{NwmM@&#-aiHxZ}1ggfs5#teQ3y?BzuNb`Dj^ zb%MD9fv#1B`TFLsy@(pibDR*T$C+fjU6iJT)AXiCPryHINtlXN5U6mRUF?UF4=xTw6*t zmW4T23RnpLLxy$Gg`2V-Q)khg<=ab25H4Pf7b!@)A0!A#5Slp?;w>nTEb6v%1%0)O zVGkmU-H{^W{v*L`8k;Eby%>$a3VLJ;TDBJ%&dsnG z8TTBV0((%BIdEc_W%5#5($mDGkR``Oj*F90zUp$^$W4)V{BWGTG%2p4z2Ok(e!$#G z^)a>?-h%YU*jl%holfb&NpTJBtg$D>CLMa7%Y&yi1cy7|qAoqpQkERo53x9EKjh%y zKKmy}dgj?NT*p}mF4w{?&qp{*CqmZwTpoPNNLaclr|Mo>!Dk}Ok#;$GXp-*C7W*Pc zCU|DsqWoY8mt;?gV;9(D#)aWhSN^XYzXomNwHGPIRi1CowcUp}p&uTjFBKy5)lei^oaIbcgTp*90EAl4K zE$=&~`DXr+t$Sb*(V9IJ75}a_W?5RUi9}((ALYG9@moicAEEgUzKXw&lE38XmGo;t z8PVYdtjfEl(!1Dcp7d*d8Ik#4^a_8Pa({|@hvK*HG9srz6qR=^CBYo2H|Q^1mkL62 zLGFa039Fi81~BZO7@{3#uj^?%@E9i3bS3)ljnbvhO}P9!hj+V3T*or^fb2Oa0|}G> zo^B6f3F zhasNmmY~!mP~dR+NlfqUcaa2G)hl7kV~9{$y@Wdg5lJ)lbNFoz(tjWo06}Wp(q6R0n_( zu9;}zVq<1U;bQed3M~SXlEs1SPUO6x)0cUCz1UrVv0W&XzZ7s%buQ$%Ed5zPj(6gr!jhLTdLlzbEH3&>KvRp?)(Hc%$KX@D< za_vRJIBfdYGym{tZVr1uMC(9m3gs>!AS^;os_+L`+y!LJ4K$qG{eX0>VRkhyown7; z_VydHv)smoHoM?!6m&EX-rbz=bl(U%e#xpbxOT`@e7kj!d{WfGxio&NKGNNA7GWj6 z&(1QxRq)zhexo(F;veC53iX%{_0U)TR_lEoBK9|I*Hb6rNbhUz1ICOfQ*J`JfYLW& zgliw#!P|^o&+{Sl`5E+~+Y;276_Ib-)VWvP9wNif!q+eKgU-B|ki1A3bJAbmeR~l( z#W%%kjcUZ|)JOqG8`-=x%K|}dW&n)n;UOMh$knxCfk0dIz@T%|Hk6d7&zq%afCF~O zI$KgDms>a?YdnY@c``sZC1mK5Br53ev+W0eg+2(P%^nRA-eMgViuF5IKR|Z@E}$w> z=+_;Z7%>bFS1h3IqnZYK;Exls2p#eSvk$fn9>jMJH0o@Dpt@qj(JCwGK8}xSnQ#~i z#KM|lpG@c3A$Kw*0J^Pk5}^x{sINF`uv*Y$-{mI|cdSX3xP77L;wloj)ujkB2J2;w zH&2znI)W3pStCu-2J0AOG;d=mfPv27J|5O=6O`#RqY9iv*=j{~P2myvTC!W-dk0aoL_RHHnJ=nd>9!FXMf96U%16DHa zr9uL1Igiz%R@|UkZmgx&e0!MjCgws2ILn z=i%6B$i>zsF!`5LBAU#?tT(A!u^Yd{H>1?ropso~%jEUFhOR&82DJ)lEcxwreWn6p zeX*5B&>VISYJ09iB6IOvmD)?~{5908)}=Nxam!{i_3PJ=V@ujqPdFidBnYjokXcy1 z%m5ijydSvcqF6sN_*)t&?`IL_9-tf7gtF;s)-&vyotdhLC&?hDGn4-;gI+qL zUc5p6$77O3iTi?}m$IlA3y}Z$n504Ces99dO4JJ%=)DHJA=n?=P7L%eeN(0?{?4uh zpn|y>7Zc7yKEK~o!d>Zig| zt8{}F&O}qI=_29rqc0~o2!fhr9gcHKZuy=r^*x<#7Ex#dj#g4X;pH2akLnj_Z3!iw zn;_He`kinP$t-Kocp^4=YaC2UQ4Y(0^QP-};#Cj=mnpgbwd>;kLh!+L#k9l~J{Uy+ z-46#*SW1MML$m)0d-)!*aZRAeaK_$gOkdt~ z0S75W7V%}`GwAbhP}Fi9i6vVQb!9e*r+(E=vtwm)QITs~`DHE!gN38jZ-oSUMojj2 z$AYjle=Glf@_t-PJBI8Ova|$!P?5MEZ;@#OBP2p$g8S50sOZ`_aEd{|FEr4 zTt(DI13BX#G8CNkZX%Ky7pB@m<*43+$lK)K#BmY%6;n~^c57HP#IV?-WLBDs&R}?i ziamzyj4M6N>Gonk+jO8`15p%LyOW1Q?P+(6&wO9he{OpUto`LXORTFK@={S`JzMt@ zldC&&U))UUuMuRPW~Ira8?TjmYE~7{6+hu3e@!jk1{QB8-32H0#Elb6J(1D)S5o^^ z9*pQdMJD$q+^j_1aH+gqCR^qEC)_Bi2;|VtmjyN{nlSohYT3|O1@fD4QCp=&2+(Ml zj|tLfK5gMg2(z^R2zMKE14sXttlW!LJp;b-Eo0$8=9X2cmvhOB8A+m``B zlQ})RPSQ5%5wXIKYjcpo60(lU44IF?hvYU{WJpg-l>18%(6|gk2gaT3pS2jsb9E8^ zo4n9uD9-#ZvM`kruPSsOWD}S#Z=D;A zH6EWeW1$$%px37NNTMJ=`COIyQ?gjl$ItO&GvdUp@>*53(9EGpn=*zN)si|DHCbZI zd3I_#1xK=XgL?8B-1ed|?JY0@m2ec;+;t+iT&Xo(^^i>j1WCn_U=F)K&?ev2rOG>vC~^ z-BNinPGW*hoNhx)L!iiYI7h3}RoyRUSu1I+#4QJtxC-b5Gi{5I?aSJD;nZogKiAfw zbK}$CF1t)-W&WNcTrp`{fs2tOL5|kdGbL1 zx>M|6>G>oyPb~ecD*ntSEqEy&7`jF8Z%lf)O^y+`*o~y-)IC4 z8z6+WFMn+em#QSy3d9;cl|N}X)DzE+oEe|(tny`{$#f_gHwKrXDA=6DA}4u}9ZieH z?64vIkl^Sab&pf8oDqmK)u^l*!=^}UOvc=B7%8jK#Ta$yNAXLJ}IZ76oeBoKR#o1&{Xo zkh&VQM+L{WzaJtU&mwDU`AYcH78)rFzpoI~62KG1;VNmNgqr`3$>eniQ`BI^yBkb) z>qgt7u$RzEj<)Lf@nxU~J&OFS;QK_~>49++5&VbZg3|7lq->S(Zdr7Eo~DN0e;hm% zj@cZI_at`|-kbhPaT0#Yahoj+&YS8Z`w=3&ONOUVF#T6ZMUu z{7=b$%O>^5vPm?|(BlQtDp)3C9X9t@ebM{@7G%54xSPRz)hK@ND0~mQ+XlULM!k8@ z`7{}SRgS=^AX23!by*D-0k5KLp5BsTxJU4|iggj_KrRhL`n|DKwjy>MV67+sFRb)wQ z)_5J9wejY15@5Th1KcEK=Qv$~3IYke>zZmR(Obts$TZmg{g})!x1d?Gt7pHzgJc8j z#wioQhUt`3a-K*WEOJTarCsJ(IHl5TNz}VpDM_j7)A1E`yn#$-BQ9F8wb=8<;~A_J z;v^RG8KA-+``H;F(hu3>q|x?QM9VHyDGlRlNgsXdj~1CKGiuKC4|*xqMywrx^uatRM+E3 zqdd+w>PTs;bd^=tr!TOh7;LucNL93J$_+1@(6pv`xr^H@S@Pod3L}~Jj~~UEYOb)S zocPk1QcMw;zzC;*n|iT^xUOnLoqq*2bM|%}{>Ygyscz;op_Mk8qn&E!%`|-JAPk2> zs8SxBGU*UtlqqO*qm8oA6VK^Ywc68qa(YvcR(^7#RFqD%i)p-Ztw(iKi2nwm=kArO z_(x&JYfuAe7{se$9#P# zepi}%7kVa^e*f?e*-ix2@A9PH5l;N8kNwGBujJo7P3$}v|G=KSk^fw%AoanMdSd52 zd2DAio95xApa`%e%u$m*aZ*r_{yeHhMFmue%MMmmfz(~@mrhB>IoeE44oGyh&sKRs z{UiO=F;!mN8o<5WFowq=2fZCIxv!p+ei0VBgP#oY>%nKI$x{ z*#l>QKp*Sg6gP#}8Nvh6jm(yJrcE|Q60Mbjqqt(o_ZWCib<5jz^D`FdC({i8>S1Dp+ydE zp7rkE)aGZ$R5@w*E-l$%X~G|;{4^rd(KOP5v}Z$)>O<9x@KGCNMJm%FkySrJLMJkr zp9fsy5xWP?UNtOw)bKch32k~FB|^P)G^i{(s9!F_SX!*M#!rr%E^)S25m6|@0z_c33ku1slzKok0z{|D(opG9?0n0bW%tJrWgeHvnC5H zvoDrI4t37++I6IdD@(<$5NqNr&N@c4NuJ8<*;c!hY`ze33t=3xGBPr$Y_hB+jf~dQ z8KbsNg0`QKsM7M{ZSO$t*gfVlj&6ySn|gy;dVzMRYtP~e^4`@ zsTKdvNv(N)4HtvN%d=9Osx81W{=C2J^A+v$mGpZ-;d|hnAn2_u>dj)_AABsB_=Y|I zu08iI@w}b&b`_8R*YRDrLs0s`r4lahlZrrW#+{XUNl7s@YBe2*mPT(HlZFO@=PasN zW<1eZTt+5fqJy*o^kq*s5%h(*p+>5*IEGubxR5;n-zLgbW)uXrSW3ZFTKF3M+yo`lR?&GI#%4G(s)8~ zdWB`g0vfJA1(8$cmJ(C+e8R`@2a~ACkO-46uV+|BdYN?6Gz$9luwhXmTv#q0qREa!WTfKx-s*$-~Vm4ag+;gX(*HY9~<)aId#&{!T{&#FS zChf5>!%w1aou3mqi{z$t>PpI{?OrCWtPY!PCHj--th9gRIN{hX3M|9?;xzJ{NMRS% zQxY+6poF7gfX^c?t7;SNPn#R@P@HzXE3BX)ejiGcngj2^I0_TyxJ?#*`@?5GhtJN2 zfJGnjWX7SR*9QJ_NS|=Yj(oOSSj>Y!o`oIFx=#c{OGBeR4LO!P-I2_LP+aJuZkP@8 zAs*8-Bn{2=#hYj6_Ji|J9+GhEUgN+Qzm^A~J0;Cu#)(0VlO09jjH@J%ff+6IevFa5 z#@8!ZZCV4yMvq~^9uvY{bTHSs!5-8w*MY&0qryJ4FxMHu9`rERgTg+)NP1F(9*2c} zwu6qf$mrjah7`*{&sK&{3+bnMhjHsFsx~3o(Suc?F8+|N9{{M}z^Il7RY5RWBrh)F zIWN`%co2ZJtvNYJzMPZvDbiWy<-{@ zfO#zTi;JuNF6=kuFFa=@i!JT_rc-H6c5E`F7QG~v{zuY^@B12;7)NX>z&5BdSpOS3 zt{O1x0Gp?Misu$`(pU#4;`5fk<$|)!l3Gm|qT5*1NqL|kpM+h`jm$psC31PrkL4skC4|(9O!EU3Gesmv7!v z$!1!`&_VI0lgzyVUzziK%OeD5-zOM+s~}k<&fLnlQ=$&v6%c-z)#k+*#I+~^uF)ay zx(Y@-&Z59xr+V+|(L_8D42sf3pb!j7BJZlwM9dHjn$Sd0A@3rV5av|?}e^jSO zg}qJ+MztvuDb?ij%B+VQRv|FHMvW{hDP50n#hUSIfB7T~CQ^rqd!qlwA(peGssiJ; zbwhLi-DF-%fCBjYusRQ99XzWgNU=nmL-fhW6D1l(j2(%Do>-$kh`T+gj&SKyPGeAq zUF^&*36z&?`TT(KXGa}j&DI${(+QyvGjkFv!spqh%^ zEKgN=A+0*v9D7h*PIHIw5x5Xu-Wa!@_6DdFtGMQ3`DLPfaT)oe$km&>((NBvypE-8 zf$R8pBl9FW2}e4dxjsfF(U9Uk6-_dztA*{2@N|&1QjByNts=@*%}5VS#y^EaG-~5{ zh=C9Z6gJ*cuTRj#B)=>#1l_8uR8%7=SWu*R;VgQ}Rur!bxumK{V}^LzNCR`4G3eea z7)qzL=kvz>IWdDLAkI*J-iPW6uzO+&E%ZMOtR!8(pgNwz$^(RG7@kg00V1 zxEh01a2&YZW)dTIc9zT`s zr04C?jF(^#!{tTpiq!;rhihrvFS!RZD-#+4XI*--7BJY+mBmUES+Z0PQ9z)I(AK6E z)c5;TAeX}+iUX8}gBH+f9}>CS45$K=xz$iv9%a^?d&p1~RjH|-Og4k9n~{()V>qp& zPO%h+K$g0spwn$ef&Pe1Uv<r;MXsZ1rwq>gIkX?4Z-Mylmxx1`3> zyD}$9khbcX)mCO@6A(;;r%_+}fiwI#rJxKVEE^g8ukS1n;VmDdoygahMiA7O%D}hD zuQr!9J=tquRJHJhcIi<$2X)1%4FZG-`@{g;_<>cw9Rrp$U7S%p77eNeH(J%*# zt6s*<2<9@!W6mS;XlExP8(jx3_p+%Aw`=`Q+9($ony~y6+6rf1`_DY{_XY+Y1Ah{V z?^3?CN(gy{{Z8h?_@26(!n#^TLwG#;4UMg3(#tIw} znW>&aH+0#kso1pftc+HK+PXo?Y%$PQaP(yB>@SL-c|h{2Bc1P8Nb8dBis=@D?RJrs zX+ahnw08J-^3#Mj>5sf(xppSasfF{CR5hrrNc)zT(FW5ZZa4Ny6?UVlTSzh+VJ`c6 z`5?S6N$!Cv9Q(vqZ^?^za?`J4)MzW=!}7X$QJHN5?Do6QrgrG97OzM68cTX3X@Bqf z+mSJ12)_>ktRd3zL1&L41c3F=#nsMnhVaD$$sVF5LhGkT-wIWjnq8SuYJ&4uC4TPg z4g_ctcKZ(YHF2=y9Gk!@*2jNiz8+X16d2;F{7ut?c1_chm=v7~UCDffowA76Sn36x zpE0iFsB9VC;uELSO=ld0S+k@)cKtMD0B=Wp) zZ`Jmg4w)))1q)oK^g~r9^-Hk9|0~1}GYC6x^5&N!pnX|OU_k$~VrS@`Mh};n#^!oF zmhR~9cRp?0yw(BJ);PMaI*CoHKNr4(mrrYTk!a3EHneFDTS)4)b+nv0OdAob8KT>( zB`Yc4*L3^ZY9bF7@W;=D8o!aW=DO|GVsTBlTtb<0slZ^>`w{Fo@~M@jC+SI*&>hhz zf{F03)_m1ESXJ}|GZ;y3hW%dPaZ2fmzfqSp*`L@!WNc}6#PP%2fvr2sCV|35tzPSG znsGsungZ~n%j)$$O=uM#`NfZ1N2L)&qMjspor}vjkbD`X3eS6>#dRWm3le1h`lF`k zsPX+J>xpgqyC%w$YYHt-wi!eIeKJ?8!xC$yUcSJnFV3`VP9#6!G3aTQ(AU5)@4NZgt!QcZ!#I{DSsN63>&3CG^ zXlb=T6**G=DNUpzlwSudex9O4@nmoN*KHufy~Q;#JEE6MPvZE8g0@7-Jxw&g;jIEu z>>eegum^xpywCi%Kwt~ukVL+7wA%(;1DJGnUD>vcXJnLy$Ut{HT$-T?!14LxP&o>T`H0}2^%%()sk>;MHB+~L5qMpO#99PjxRcxKfvc|U2_KiW z)}X<-io+HJIAyK5fhM49QA?rY9F8Fkkikf-IG0%ksn)FGIGd}uQx|mx?VGl)0V;D~ z+8oW*Ab^@ddy@EdQd{YHEy?==CSK^VaV>Iq*uwzR)y{%iCnLF7YgyTLYaQ&^%7W3# z;gO~F@nc)Ek_oZ-c_5Lr4#Cs`qMjDHCQ-MfSav+e=^$BQG6hkhtky$#+BL&8;m#n% zJY7k8wCrHer?&0w`yX0bL|#Rd3IJwFT{o>QtFzV6#{@G zkRYDxI>!QCs4+^)gfEJr5m!6_YaO?nR>;-4*SDRh;{15CNV9*Urm)$)Hrunkl0DD3j*^<&8$0vA-hbk4j% zS1~B4qU(Gy9zOQl5zSmz0vCOX8dGtPN7<6JZ$nxHFfh;mj_!u_A#z_az$iIqWUw;d zMG`YP%oBBEy`kVXQytiu2J`rg67+Mvv#uGf^#<}+b#})A7qfEbpk`%E0gfz^PZjDm z(_-qW-AXNv97U*KX;&79u){sus|wo4X!{Go(m)&&L&Mdt5j3_Bch zpcpYK!o30sL)l?qQ`8*veAzuGG-*rnABT0aT{SD!1);%;LL#)X%4m(d5j?Ob2?Oj= zU{i_T8QNOwBEr7;8JdIs&%4R0z1Mdyg**TEtKQd{-zXVzGtmnlcco?o-`%{3zSq~< z|D;?!ru^_i407#FfH95WqGo*oL(E1#KTE=D8Qmil-CFLN6));ROs8G^xHG$o-mR2f zuNg=FA(`wEN|i@39T{1rVZAw`0fG!|5Up<|J{{{rEv~W2%D=KLn;e(TAmIDa`jg3`w!?*)xdPyO|n>b z4d-DeV2fFb7xds2^n8N*mQJgr$A~%&DoGEzl*y)+)4Hr@{7d;gT>3N=T3K9}M0STv ze1wk13EJ8-Fc8?{=STv0e9CM2>^^eUKnpYf#e>(^OaliNGC-8102$4s7DH-c)srOF z>ge7THxikQ^=?{ZOT~pq%4A(DphPGE`<_M*RWgRtOO98IhnE5M-yFNTbN+8!KIyTj=AM?N6xiM<%kU~xt9wjk7 z3F^N@ctoTK4T_Q@o0VzB5BycKj)>QUhi*G$Qw577Lr2~{<#CQ*o``xh3-w)pC`>DF zYds%33u=DWQRU}$SE__mot)2F@X&zmQujo3r>X~vJ>T?4P*wQ@g=^bp>=u7-gea>r_bL!mZDP`OikzKHkC{MFdf zH~2DyIL21HT3(Xcaw;+1({(x8!%rlXpQk~wn_*352aG)5u#vyOl(?~_oM@Jzs2)gF z7sKZfwO3T2R|cWKtBSffJ$9G@bePvH!Hi#C~{0&zWamPl}G}+ls^y1?%G|$MPIgb!>}X=u-drL~q}Hgg0*4 z_vPQK--h|?wXEK}m;F~4-uo`zN1yX#F5V}fy{UJ-^=}XP@1%d;QI_Aw zo{gE*KM9?SLn`HNxZ zU4q4Vm(6gj?SV6~!2P=iu?bDRWe?@fAYSK$4=jyI{y+)%=%(_^V|LEQ}FeLd+YVC=z$vB}() zZ++We{)tSqTLZP@a<)l>Qo0CyyMQuQM5EpnKVfkn5Ovd}-q=gw!|XUOq|@VBl(`-s zHGZSG-i%W(I=X<uqFJlesBEjh^*^Es-i9}`b ze4_8xaUy=(B8@jVRm@?!W3@j&O};UP1dgJjQ)hvV-;@-`F5Bi)ZZW9h4Q^6&o>Joa zgD_Tzq3Ru%>Hc!rR*&4MF>uD^wv@2dJL;n74dF5gAfGj7{cyQJCl0@dG%BW&Kt72~ zRJF>pH6`O`aq{`QYZv>?cFc>zMue!vv^(M8T;*&ktJ8qhA$JBjUaKbjN5V*Nm<;Ca zi|7OYkNa7(TM2*TDSVLN-jCb>lJ}?sgR_FcuCK4H&-f3vW+hKoTJK*^B}oK!xODys zHoTRo{1t3`Gnp;>{Cn`(K=a*3`W^qwALW_wHthMIL*%K73t(}86Jl{72-Ld`3qZdu zC134xIh@Mf$x=WN))N>Q?D~Zj)H(SPLpBojhvl+4_#mJVK7_&aFU1*}Oto`)>pd4O zlmKY3F&@G?)Hbtnu%diL88H5qLdKpC84Cxv&?rXv4tVWYmIWd)nAistP4*b|r&iB@ zqyK}%g-W1@C05fg!`^b2jKJBH?b|po8L+=kTu6^(Bx-~H(aYyd!IMC=J2eq1LeL^%zeMG=bijc)O2ks%ew?<+yr)o&=Q%_@Uc*M5HrbS? zPzB!OH45-F-2)}UVC>@zfs5%xk;zkt*JLxrgfmfI_RPxt&799Z5YpIv1Ipwk!E9Og zI;7nB;~t#bqqm-)tcKTt!Y`&=rIYajXcI-Lzl(MPyKW@)o>ak;O$leK=*G>{yR;jT zGIpZsoyVW+wMFL<=~UJgHWCgPy+n#B9gH-{bEp<=He|QxiUse}Z}h+7(Orkoc2ngl z4QG3Z_?Ydtc9oevD0-QZSh054UegT4B)iX{Yu7qia85Q2aWE_nqR;FTSq`N>RFip~sCC%lwkKgR zq_?i(=*jjvR5vxY!k-&#aggCZsHGE@em4tqdS~E#?JWI*8QrTew4FM*oie!H_$W&I zR!{y`LH?F8M67zv^jzB-_so50AiKM*`QlCc)=18Hs1(Z05`Nc&#KRK4Y~p)psx$LE z<9RkjPYW}EvplZq2c`q2x;W$m#d6?k8Mc=%|6Lh;ISGA^nX%nTWF8{Vdnx%vO=u4#-^sdAoNQC5G#TXNa#cbu3KOYc_-entL}b%)vL4WGbRc+m#>5-8ODH3Xr!)DD}mjQH7o&WZ?W?SkbBORe@xl2 ze7KN4)eJwtiwPPa^6{?1S+KJ=r6&+V-d_XMDaP%l{`h?E#zXeJwf9b0jR{?}RYxV0 z;ae<&^7B$qbpD&9yVX&uAs>Ye#AT>2TV8lU@|`rldCXqeW00j_+3zK+@48GHIZ$Wm zjjq4|05(387^EMt@E`+b4{W1qfv1<*qHAY^5CKy;fWAWo6q0!TrVeUAeTNHN_R^A_ zJ+q+M-=w$2nbkJgO1BBJOO2BFq1W*QtcNO*WLpqzD=MwmiO3V|XSVAJ; zj2d4;lFJ>z5qSh5IL?@?L+J|(oa{sCI`J@^du0!Yai=P( zc7u4p-|a>4c=F*BogFfEb`f|$zz6*TrhS|^daGcuQWG{*LV6gxA=p}Htb*ThQ^OCD zz#N5HdIWI+%z?t1HMGe1bL7Z6X{C?)ROEw%T=28)DS=;V(jBVGeIt-&Ll48X5iE5X6QKaP~4l!;*?F%1l*gruVIJ7n*iisYj)Yh zXD}`I=7~}MN#5ZZEA}>IsF($D15|i>rH37Kx`&+}&`*BZEAu|vntf~qSL+^9ey#>e zULBMDq6w4TbEbM@xjahjh@Z<|5N;z9I^?4SLt55v+PUb3_LGiTSY*Eho1B}MD_JD5 z7__Uoesq<(GNwi>@bBUf)Bj@Yy~E-7;`U)(f*@Ee5t0zGh#o@J1hKl^)w}3X61_x_ zi0FdVJIn5@8oh<6QKB!RhD0Zb-rw*DVsBS6V8W;MpTiUd-R`S<7c4bb6m_s9F1)ciZ&D|N2QNPp4ZYSN zPP2cvbf%$3Tk%jOliYa9w|R3d+0eA1l1Z)0?bI~o$O*NRP_qsn=;qv(cb1o6hvMKLiC)hE{lLr;|t_;G0doACbv4g0cnq`H7cTL`Bxo|>Hy9o z?N_{X$Fv(4>rwlsKWBS}p9KUxZFdow=c|xe%PD*M@r!Mzfz5>*?XnJSsl=!8bBjihH z;%iok$dQSt-=qcxZ1^k5q)Jy;9U>~piz}F<{cru~$<=ytrLL}hzUHqV1_omI(u~)n zZ|*KXC%zE~?#!1IU5QK~Y<+{buuLV(mu;qM-;gAbTsGSGlbG-%7S67Ukei5xzR4EB z!hTC~EIx?`#@b%^E>*uFu#l3DyuGXjq(Z->BS|f!fENz^`8xQFJ4%Lb50K5Fc_tqi zmehc_wTWyIqe?k_JIBgSxp)z*1HhK5^at0d#4P_`DZ4ubaek+OMpogQOgg4(y0grX zlcO&MQT4*mu;Ky4HvXoMQ=u-g$5^erDdk7L6hx8a-O@#X9#a7@VIt-%pIg{Q-HNTv z3rB#B(VINH`p#FfbKo`Qf^z)^{D%grtC?gz4vF3@8PZq(pR80q0nwXu9KFoHb z;aeyio0Gh=x`@o~PDRdC-}!CUa?9P*P2bBnW>5NsjW?Oa?8>M0ZR3WFFlbX~kXV7A>RGA{NRb@p{by4oQ>i|NQQux*_;=w)Oa-W|vGVj# zWKdUeg7k*d%@DM{L6#R`iY*O_yUv7Z>C^OeeN*4|m-lWXYx!w=K~dws=&dR8{S5>D z6JYiQ<1()VobRu%)R@OB8()C{notrE4lJn4h?cx?1Qn`EkLMJiBy*+bQu|sFvuW!T5Dc zg#2(UFRCx<60Z!BbTQb!7sk6lvR0H6+?cow?huuL-!sjAG`*$%a066=?AJdamickc zrJ+;G;I9hhj%A)?ZI1rbF$$g#q!zeK$A~Z&JRmR>PGT|pWte4&59duUK?qj8s<|ke zHs$fDN|sUM@TpbOJnm=&)IN(qO(sSmSG3{*<+a_K(U%GPF5H%h487ssQ$DYGG7y+# zhIV`zrmhy4Uc=lw-^AnwQv#LEKwh}mxRI`-yA<&Kla$w|hkK3I9nz5=-pW-oCbdSb zM_iLEDmuZA;sp;c{)$I<5Vqw`_CHHxcn7dOJK42CptIT|*WR$1lxv{5Z4fGiEgyGc zC^8}+g6lamGbg4oiEMIvHVI$;_F?qzN3fs^f1bn7PwqJf5L?VfLa{`WE=nFXMrKkh&?w?b(ciS|JOU%h(C2$lCFSxl(cH**`zX zWxC&JMG;HY9g4uv{*?XD{z#DFu5DC)^0g@Ay?0b^1AFJz{8uoR*Y2J^H2)v1TKp4k zkpBO|b4&l}!%)!ANqpKmpTAZ$O+&rFO@7w~!5!Y#do(=`#xo0k!vEsJ>IFTw+^RYT zme%nhH_X)+g$t5#kwJ~ldW;fe_cfSK1*V8+z#O8aZ6)_@jy>E-zM+t$`aU*K-fmXj zv^$Yc@64VB*Jic4cx?cr#`sye$s`^e`l6Z!Di*e5L-{E_U>V+i^ z)5(zta3vf_)_5MDtYLybs^J>^qh1*25c)tU1Jo)DMtoDjh634OYq*eniL>qtFJjyN z{p2j5Ka_l<|6MPrtQ;+7D_NW*BBG{NNWEV9FDkn~O`}*-68An?J^#UJo}@$9_u6E2 z-&B#b`ui=XdHN8NdVJ92LmPTru$&2DX|z|(4*#M)XYnHO3Fu1;^v&zuchJ0<;1iD( zj;3$feK1mT&+Z>teCsMzfwvVm*;R{@$E*&ge#}Nfh8k3)jqP}K@_OpPmG5l|QHwL2 zQTi8FraO=*4CNv5jwtO*2xu=EmzV1=I1`qc_COH43Ls`sf6wCMqzpkDyb#o`ieIzg zE0|S>?my50k;|7XulU+xeRHqLNoSffX_HruMs71?53TiO{5+2pT`dJY(gyr;r*G$% zUc7#UYSKbQLr$^cH}5_F!W1s4y-^q!-2M4_fhxCV_&Gwy?yzE2u-o(@8{O&Aoy5UP zfbP`(96Lve->j2(EH^x*>04(EN$@?z=0XqXtB?l@NJK{fh$FzSlDY*5AXFe;C9?r* zSXS}2DNViRm$K4XY#W99TD2-?m+2Xf&YV(lMK!`0(!LidKpe@QycR z>-&ZaN6CDl>1mUEy-Ud%?LI;7wpNvZdn^|#coPBZ9eBe_^MBYRI?xTz?k!-guCZf{ z@b8$y&Cd+}_Mlo&Z{{q*>TG%meLD1^4HDb${Q2g07c(gPsQO3C`}2_Qh*h`O1gPl4 z(Od!T903LLN19yc;qqtE@_}N=+!oWOU(d=4ES@(|N&m$=b|gGOAwsRJ=iw?SXT$+Q z=WW_3>fqjk0jM|sW4|)GTg5b!ndoNtJs@Jf6Gm~s$dn|h6UI|%R~@~qXB^5 zU)bkCi~2w;!*!8FmW|u1lGZAa()V2ZmNp+c_#O`&n8RisEVVd4sP=)9!NUQySAUy4 zM`ZcT!vXgN1r%0;Nf2Fpr%nffre^{+7*M?65hSfuQOgw73`WT%Zu}dav4?q@mE_Na z)UTOQ6vdhp{zG5Dew9v55f$KS1F_BOQwuTlYz6#htuEuY3h4hNRBcl>o5VchQ`Fcj ztUu!7OY12l6=)^Y9xByuLFjx}HjxjKLHuDjqxgf+`EfeVbm11MqD1m091vZ|6?1d> z>iV`n%t}I0LmS2NI!Dsxx^NYx2b)O+M`3}(iSahNDC7q@{$k(k@?!nxaahWGxBe8B zVQ>0$>_4h+DLRLzE|3SmTdzfC(1ax15roN#}t#9Yo+(o-C?y zAw9>ykKp-5cx_wnpS`!%h(TZadRDw$`MoVs`VH;F^yVc`Z^|SMK44s?>5!emmqPw& zLa;Z>d+&}Z)qsEUjnVRrt4V6Aji43e5!udHhW1eA_WuH$Hp`0E3?}a%CGWqUL?VrR zx^tgwr-Grql(~J2nPd>oMUh=-+820p(j&-9BW$g%K=L;43ihAU^iWiI=AXd%Gw`>Rg7!BtEanA#1$+77V(q1O=(P81A1w;7_rC(2!FA4I!Tl>M;n>8t>sJl67S)??+ z`mb<9h12hEFf>sfnMo3hbe(zoSCWP$uqYRrAyLYkD8K(eh!`$8)HEKz&MKur8MD$J z0_}PeUZhj%hpgB5j9dhp(osL`5m7&EQO^m;3M%RbM6pW26?M-GgNsV;e_12^Ejxu- zniSBVw^Nt7*3jSy@1cL|-84T+uf7ay9U8h;7BJvQ3oABS`ZJR)j^LpyPjD?8HCo@v zUB^v>#t`8U)7x4_I!*sLeSQyF#Bs`(=R%w6FpkdS*^-?BHobTGUl}Ev>?(MuNxV#C zMku@RGvM$@9^J9l`Sx6Yx&@~FAj^<3k`CbMGp!<6&o>7wrls+TOPw1(%CZW9$Tnl5 zure$X@zz|h(?jb^Q>ysxxCrX1?puB_5O8|ZJQyW|2uJFm#ZZ<=M7bdVfrlftNjoS! zNISr=c7v6}fQ@F9B|idXIlp>I*x}1}K3>suAag-r@_bhxCM$9=eQBSJ?weaV;o9Tr zhV9T_;_uu;hArnX4D>P&;Ow84T0M0+K_A-=&K2w&X{Izr>@<#hPn$)`w0p}WE*vAm zm)#@2}c47I-^T_PP|;?ZB%YQTSX@Owu-|v(M9M zbS3?%$#1z`jMDN7-@qlyzm`Vyt$n4Lja;S6A^D|z9}1a4gc~v2$AQNUckF;^If;ir z2X+vU#U0F%*}>f-PTkp`{KCcW8UhcCxgDZ8GftFsHhtrC$51Fl^Sn^_ta*nwX=2&HRF3=y zz3dA(TriA1A8P_1cBdbBNHcKLHlfDx$f)A&x3l28;{AvFIY9}6sJ~CyU(5#<&St-9 zfftdhS7AcCU&ZWlo6-1c4%~}z5S>(T`YT-&8t#$Dj^acazM)!~njRdFQu#5u*_U5H zf}LWO3V$rf8qM@}YLcBI`~|U?{2uL87UuM*j!1E5<7xs<3XhJ& z`fwnWDiwQ?D(sJCJdTUrN!nPFYRJr(x%Z+8U~%I(mKx^ zMO_Tnli|(6plsEFH`?57G$CxD2kmPZ_U~w^^q0%RWdod64i;)p8S1igW2D=(u222K#hu(nz{DHTO=m7X|AP=jt{X{GLhpIN z6k4w?jO`^yTFoV}z@^S@c=BHq4imgcA|ABJ2_#)m$q8oYsxjR`A?U>|=C#pa7L8o! zjOd|YvPKc_E^4<-nSJ99YPJ1EavLKEKn@JXZ+{F?Zc6#ETIslSgLnk%&E~flJig^N zek>3kv`K<2%Ls=~Xdu*0m<~>Zcb+39UuTwn^SY8bDS7q-2MEoxyCLd?)FCyd2Q5~0Y*3! z8~PqPlKR&<0@!%M1U|qh@k1#+_1OggqR_#T{q|6Z1-ebDJCjkTB2wS}M>asof!M+d@WJK;4+>?h4n{s&!r zfLQa-bE|Aw(r%4#oH49Eqgt?{T7XZ|$$T0R`Xqqn_}a0m}dX!5ADM#*iy8 zSC=d3OS=gBCAjtLxm-cqnrjibgwELMl?+DjUYF#67xCU$tq;1EVrjD49J02ZFIP}h za{VO;-=-Dz{dDvUov)|lf)Omo(dJH+0`(w$NdZjEjFs4n)3#8m7sm2<#GAEsQ(7Ufsv7385}3R=)th81C_7#(fp}Gi-sH zvq4!>jRveLiyHnN0bIUtBuZ($MfM%B1OBFM@YS&%s3s4I#pY6cXF#TmFTJJbfFrC; z)0Q3O3NSG4M|ZDEMwsFB`UZZ-`LcJhC15=c7Syv>M_v^SQQIsO_e5#wH;0odHBEQ| z!{^@}lis-ye4>m{=p{m0do=Ex4u1SueZho*;(iy;ed@TIVp6GK%j0^DJ9>HK+T}(d zVA+(sz(`T0&C19sMj9WlA(~dqG%~ADm3Tb}WFSOxkZ3|Cpzp)q;=`%^iSx<&;llcD zjAD%zTLnc5#Ys_&KzB&JC81730R{@+SN5LgxVn~s{6z{B{e%E8N{NdV4k=T4#6nJl zL^1sHXJN3jh?rU#Kd$&DuZET!EC~lrcH?B1LyA9C;zE?HzTl#_Z6$_6!vPp@n-vPy z0KtIR0s&$RV6Ed12N)RAg+>z~@`$y$vTEh6uTD84S?jsgPBcS0_3wEfDxp|M$#Uq+W%_@(L%n%7bRRhNqxlQy-iS z=)XHi+4W-fo3_Ns&I_`4ziNM`Q#jD)sDW=9 zmji#F&yPjN!#6}At0?(83=F7@S}ki^00VV12?k4D&fus(TN@k%P{R@U8@zt=X8?6J zpsRM+x}Vi!S{vv&@!D`((1tFq^qkD4sDxqw@*Rp>FP4o+eI}x)+eBBt+bvS<4^D5v zZBxDW^j4V<8%phR-pjd%!}RPGV$7G#3g z4&+PPFjHua|vy zbMN6Q#m;$VH^OTvo==g$bg`0{PWA{+8& z&|3=84W>Q!aGomkIxBJ~G}a!-t4=uUz)HZ1w$$72JjajRw2NbQPc^vcOX4=X)?R z-?p1z-_EJ0cX>lLRdonakm^5H0)dMneFVzGp)z24uco^fYP=5w6ya#bf}afPX#U#+ z*JwOQQvnQ-IvC6aU@)tL!3=u#!C)>JuCHh8)NR6?d`LU9tY}vUlYt;T6KzgHkq9BW zrJ`dgDp810(~{sT{5m0Js>3*gx(EIG_t~267q5d0h9B>cD^$D}1?1omWH#2c4nshjoH2(lxySh^fd;3 z4Ip>9iEzE~_(*+AjrgaO2v}BQ#qmiS)+Ft)WMXhU zqWk05%EjpwqVGI_u2SRyCp5V6>S#CR{Sq1N4CRXeSWAzd(|g{m}3=awxn&gqV@7Z zKm1EA6e-0z6r_<;5WU5=(UE_$9jR|LILgW*-TL)zDcKyN&7#UjRu)opKH1HrNMv@#9*N*Sql z&MJwple2HS()6rxvv)uhO(^zu^414oJhYedVFHiX>tE7+J_9Z_^hI_;#YLC!`5*($ zhwzKFS_WV9IQkxhuLX^JhBPvrafJMI+U=>0SdLm1qm3zjPeM(ZYzdf>{$LG-h-vdk z6duZuxMhAzt&2FvS13nK45a6jAMlO0T9lgRrZ#kYb#?u>p^%mO*rsWN2*XMDp!`_X z|F+MM$`g9(-OrJYHau`OZsiA$mA{{Fu1Fr!nOuCV>tD?*xoXA5Vc7cLl!@v1ZQypN z0dlFz$7Jhg8u*+K+x_r%Aaeu|F6BqqYvicgQ+APR?bOYRT$P{asEZ*b)iCr~yf$>2 z?4kaeBRQg>+lTi{`7v-k!KQY5H9B|VD6@|CSp+rlz?!cQQfP) zGTV zFtt(il1@Hg#&Q)sYA;RbgxEGjM6CzUYn;qpf zuRQx%p$A8cg(xio&T6MEETMdXP^`?LTOy^93!qwZ{)N|9!w#X2!XoSthU4o|9ndI+ zQf)JW{iCKQe1Y{sB2fV<4ZW_B#Ht3Y_`h6H^ir`G&l(7zWLNXFtf>|tE-rIHwC(H6 z?YGFv*?t{8zR{{#H`~E6>mtwjF8_qKLsn@=lWS){`>Ypn=E!h4CXh1w%TMJ@nc*@; z?6Uve?PEQv1urV1NPq>Fu`MCfo~N zWW#TJFfgDp|35TWnIR0sL7Omw@^Rm0}RHvbdK9`y)_f5G9eniTAf-YB7Q?u*! z-``)^AYu;PBcBpv-Im7%XT*r6jfw%o`bWb35>8l~A}dS2K_I+5S~L-fj#%QSGK8!< z7{6RfpY2yZ$oujDnCn;4abvbaM&>V--Iqd&$Vd)*Jje*r6$|rO_Re2V&_$QH<>?QC z(83VT^ig8#QnLsC2+5nBIf9M z#Y{CpiD%9AN{Smd?C|UN8V>x4q|x-@g8fg1Y#FhFzgH`zAO87W;~3LnryQbNZrT@2 zi5C_6+veQPDU@la9iodKdui!zqA6h)e*7#OS;R~KJGaE@tHsk5UujJGK4)D1~Ej>P9q?OcduU(R{lryb=6197JqoOxNY|| zVK{p9e&BsogJ^t!cSF*YRJPBWEJHLhY(0lzyq9?#7tr0YXk$}qW$bdoWnY?+g=^YG z?={Q>00+_NYR$#3g_b9Ty);6ZF=%fY)WY%Y-^apH%Z`!DASx#)lI}TH?t2Hae$c;4 zHO;w4TMwzXk=o=(YaP|LK^+F0hFM~#zjIa$Nlm%LP7Wq9Luiodi=e0nslF(ss98US zC=GZEQswWWgM=32_<(CbXY}0L(1d=k3Z;1PRhW zSsi6fD(Rx??YLO+)pTz8epddxQI$N74Zf~M!sPt8Kd`|svAkOuR??-pR;W1Ss9bk0 zYrJ)4ihP&C%wPMTKBdHxjQEuS?$L?)$@-xrO0kpSNXEbRt=yw_qPWh;TCH0wul@cU zy5s2`+@p2MGV4*n$3JpK<8{g|;8NUS$)DIt6}j&(^HUdFI!xO@<;+%jH)8Hvx;P?N z^N+o@f|_ks=_YuOcG^j5YIprbL&Xf6qjf+0mD|G9zDgi^Qi|7VFM+EoheNKW$*?3}DiAZ$TsPoiOMh#qsZR*U9q-fQpGeSH z()iXdCiAjp_s9Cf%K|fDwy7z^?7;A>;_+cx<6FdEP5S>E*dg1IzR_yC&Mx#R%Yatu zvZmHd=+m6gwX&kVcnhwvyB`S?QMJO4ge19u<qCzl1kcS^ZK{S|z4x5A}ex3s<5Zh*2`N z(J0EGe+;akOzeS}k>1tQqOyRu21Mnuoq@I?^+cxD{2P7`TX2J|`gJJm-?KU$_mTYr zaFQKPO)T;Tow&uKF2EWF_B+Uw(#nwc4hs){VG&}tD`z8SOjBHl4jdoHOOniTTZ#67;=M560ViubE`E}?jU4{$)j#})BOAPZWng?{3Z zWudD#pi({{IEL~8vAWw@PjMVH)VT-n9qDcnzwM)O|GzC#8rHh!O^h~+Oe|o7cnkV~ zOZSc09B5OsmQm#@|pO)Hk`@#=dl)$mCkEWfv&2zS5Qse^_YeQ|+`L=>b-y`?? zjxUX7TED!+ogxodZ1^|E#evtp{kVZ)7UJTrm>K>tI7dr$~5FhDVM%di9RltIL=Z~BBnSlsT3|ULK`0oI zcNvd9x!`@mp{~MFF0M&MHar?2d9r&>sX1y_GUY?d?jX+@KR3ozmB@Htu%W5(Kdstyi98?`c1&p?e=3K3_6;J`^d%jaV z{;c-}&PJ+L+M1~+4wFb4X|i75>c1JAK+-sOOkDni86FEd0ccxe?ht|%1S@LI?+|l> z^xjSidxg0Wq9H9fAJgMrjbRc=e?}6(bmv9^2M;i;PKhk241rdpU+vBG%IQ%9sj!Ul z!2ALrBLBJqb=JMr#7sFRZFVj<+t$v#M|HuKW_Jm^&S*DTkEJoOe9ER-nxC`r}`= zMr@Y@0aJ!#aP5InW!W2mj$|A3bOdH8(6(r9PF}m09!5Eepa>?*s3BQ_Q?Jf)b7suCx;3L-eM~v@} zGRfwD?1AdL$c@%_f_wi1?wR@g?p)9}wU{?0o0qkh{9FTc`*L&Kuzy?3wNV$wmSq^2 z)Y_-X8j0xFF@WeFy7!9zOUf`8je^~KNeL_qcPqz&+oFhtwx% z^F!;PZg#iEt`ifERoMm+4TBxQBlOBilTuq{SQp);QSO0@hK#s@vbugJ{dX z`X!qbS!UDbH97avav0%R~rJ?G#((YqY zzAFF>hfS(}C_OCyUHPBaWC`&Fq*((*1SP2oYxOe07KD4C*TLsqTTWZTtE#D)2`-&^ zUVD8Le1sF_qu1en@75>F5dn)7QfhAh9+~UwDqk{MAd#5+oLY;;$MjSun?C z+-nY9kT}o$o@9ErJ4qi*G{Wp=_V)1|v!Rj+-vRg&`LFfj{1X$$jX_9|i7f(pGNVEn zv(0AQlu*ZSWKIay!T~JJp(&)67ZdCRGLO{|#J6gYU7iEglZnQXKm%>DE`g;Ccy@@u zwR6Hjkb2Z`aY=ILK@ISBy%n%t1$oFq)y;EdK9GH+G{0gWYjmmKjrxH861>2V_ug#p zy+;JD1299&&=cDVq#c`8=4=;#6-nTbN zrq8ew7I`OA@F0ReHkpb6V4x70ES|Rr+mXwe+a>&e*oYrLv62RaK?1XjPtHa?rz8&tsT_^sxpgpE)#0Pa-1nJ3iiY9>JmP(TirQz8emH~IayQENO@Xnz#{ z!r6x{RtKTJyEwad7@kmad5z~vw*R>pGGo`OfX1#wZ!M=%@?#E1A(ND1+nI;};`U1QR#0YTeYvhOCM!DuSK1mbUik z>W@UmmJrJhyTapVBuo@EH;pkmLfbjn6fy}&KEe2T!DkLiV>+oDDc465|K)%dtCUCD zJ!@-+LTt%(2{hI~fD6fy)+>ui6{=e@U&Fazz+cuSoL0934uU zk?ZWBS&b&7K9~lH_!g=_`JM)asL^npj38#&pGt8~_pBEkPtJIxu1w83`tB(^^tQ5@ zo%4%B{m=7vcw74i0;pZ~0x0K)xL7S%zf*{?t87k?uX!CUw7rZQ)_y;*#UZ^}=$i^Z z5RBhfl83Z&SH$Xd_~x14rOk?iHaz8$ZK8|o4ThSarB;RU%g!jK70n{vw~t*c2D2tw z+id7uwb0!O1gJGw3=bh}eeKOpX|@6ejI3aeoXoLhlQu6{xI{eEnt*khYCRY`ZK9-R z#*EJ^6mZY?`cBcl>y6TnY-hjqvOQYsSgSPj_6kGt=nLC_%;(O~jW)_9LUt(6pJ&C{ za=cLXa9}UvQP=f-*lF(!<)ygSB>Y$lfo5U??vpw=p0ovYJybu^LRdG~Yq7zJk^7*l zuy{MC08owjvdkKNkNxpjkl&x@Ii|m!LzV;Stp!dg55=imb1^MXpSV2PXj6qO$TVW) znjbMJ(G(AKW|dZY4321%Cf?r^NO1+@d!7kqyA6k z7TPK#FCRX?LB&vNolU&{$>HAhaf1D@d(p2R{;oT|-?^*L+WT=k%PB8^i-`JEca3gY zBgOKf^sr0&l@awj*%GIB52SWd7iCZqcN|_fsx5e<+IJ*7cfjH5JDC5Jzz?sjW-DxQ zn%|tjT|Jj5#sazj%b2y;O!)2Iq3E^d&RtM);&u^u=TiR$d-y--rX6FTm=zjB)Q(}I zw;X-jcu%P~d6C7oIV5uW~hph1^3SQvSIcRb94<%VZ@rwFb14l`xpj6RUGq&z|15`i2TFrV*XVJyG-T zK?%P%slT}%G7KdMdv{l~xu7e0!}=c?u^vS=nI6aunoGW^eWOlPG`jUM zhnNX#HFHxS^{tzlWfGH({EZt`S}YkWmVpY&CZ^2u7CgLtC42vd`Ea>7#-Yer_nS8| z9t;UKtB90oB8eb;(N#)6BNk67X))WD^HZMPRs;R?GysQI%Fouik+7d~*`Pc=#1jN+ zh>kIqCZU%vpDxHIQWHVeC56-bV{W&R2ZaM|{%%M~&FctkiD)JrCO@Rjc1k|mP>4(G zypAI1U&F@#7CgyF%bVR@VnT0B@hK=rT)fD2vbkG}9Y0WTg?mSHOTr4kMGoNu zQT-|7H{_tTB)C+Yfg2pa`EGC*v6h zf*zSXC1=&6aRHu^(-wt`5LQ9^Nk-Q?s9>;MNV2VqY~xPzL%&oYrOWWDo4 z4dzyctz`e{{Fln#jfGVShNv|qTZ5|kT;r%QZA1L1=TXd)!A=3*yp#l8&}iU`m3ubP zT)*)vhMe`iSR+r&;|Y@u&kN!)iP|ui4^JLuWPaITGHa$BxVZ=Qv28OzWa}*iBuU5> zFJwGXNOP%{{6X%Njck$+(Nz??A~7Y${xlJx!yPtW^54n>b#b??jETCnCu-%+eJ?={ z)8|D-0BaMVg)JyR048pQGRIZQ0rzfxB_-v607M+f@>_uk6a07T7`FNvt0$(!H9aPn zn6}g?AN_qQ`LptPz{mjWYW!I<#8RIXaw`%_lae8MchzJF)@`n^pa`_oh8a zLrvHIeJN#~XF40?GCn_qG|!J;WcA;O`CT~m@F2igBrtqjE#P6t!FE>HfpFL+l5T39 zc`7ZSJAbj}Jg(oZbzbfF#Ye*^$CI(T_T9T5cZ5EIQ#w1BIU@zC1iy9)e(ezaT4-_f z^N=QSGIBeU65-KPN)_j0sP;)XCIY(~dK<-oiFF?Q{km-c23VUj?+qfnAx- zAX439Vzh|cPtRdjidBnNSKXs_7(-3duU9_g68yyhX#1dqN;E3s&ihsb`oTC@+T6*o zkyQ2iR`~T0nisC0yMw5WZi_r0G4QWkR65(SY?~iApMxat74h*`$6DWGa1f&l>#3KY zo_{M~o5V6?84aRsrsR8!PJAkB2L_UGv=^%cDFQMv=Jn?); zLjHJcCM}d*b{KVjc+GnM=?3|2rmks{cUJ67_83)zD9C9X{bt@?YmBd^Rh$spjr2H` z&M*V#oYx?k>^=?F+VbJGQuyV7v*7??jtkHMw0&3o^4RQ6+ zvxZ}VA+{1W9J}?=)YETDOQ6%d#W>lmzv%-o=d;%Am%49k$kJ<;d?H!!X#swqr#*8# zg~c^T(AUN3NzL9~I%kInNCz>`fJr-Jl^gmSW_QP&dcV%QJ~(^)u9{o9oLq2bBSqKv zTFh+n+lx8o0QL`E2|uYq``faVj)LIvb=%K=NrMYAuRFr^yK=kyABg*W6eRA8(@@Vm zWO#O{WfnyWKg8=>F^&nCq<&qHC^4wS2SBAsdkrcf40q^4lEPYXubtNY%#=ewH(82N z(`M2V9OuPn{g~iD^*8+((|=~1mM-R&LLGuED+04bLjNd2aKgnlyG+NH0fAKWpM7Pj zU)(OG*SX}d7uz-D=isp5AwK+vxFr#6-_rB984tA__5cpwZvbzF$B2jcb+O`eHWSdB z(57e-Dn<$eJz>%~*cLw`l${g?x|akWo%{fXT`BPv3PCztc(ZP|D`Q!xg=CNp1mE^n z@9o#77lAsPg{?PR#Vu$=GZe5TzCNd3p3U|3jXUpL%;PHJD#sP#qTw8iTv=wy%y={L zv*pzoKXyBD(uWrfoaB;&O7aan{J!&gD50MQP^AWb%!F&U&-coyY)hv3?|e4mQ?%-`3sWx`)?nt zzfbOR-!h}|zWelKFhsBz-J3rl^nF&=G@p~Y{LargA@qicw4CR!VW30_0sdW@M# z0E89o!^DbHLJ^Bpm%EJ?%mxv^L*LQdN)>9om8OztO~`#o7zA=cN!G{o6V!Ue2~zz~ zDx|WITROhhMr{bRMU{u9uJk_NL*6?nFdI?l3~;$HN{k!t9;lx!Pf*lr3frJ*(_@hZ zGi`&3Y2Bz6DA%QLEEow`|ND7}7!vP~!5oRer;lk|FQdZ);lt^h=2pCsL3++CLqu`2xp1ub4HzE2Sd`mbg|W32xhD82wz`)N^IJGkHQ#h~FE z5~N}Ihv$k&L;D<Ino=m3cfS}qM)JFdF2m)fV9B;! zbme-ZVsvG}ddE4G-_T9WGh3W(X{{vweu-}%rA4Y6mztMkAL?2~7yf>U!_Z(3&dyFd zv@XtW;+9k&Bu-RN&nq(0-k+Lq37n_k=3#M+y{x_~sHa7`)I-JGuK=U+&SDkJEb922 zwK}(@{B^XxKIfaXIyM=H(2G7Vg2w0F>h%VnpvXuU`l?Ei>etgs+ZaqwdV?weql+6a+q zWCwvjvZ%&}HE1oiD+xJQC>rXeh;{Hla-g1Y7X?@(js)yX3UI>;*a&nW{|{AP84yRa zbd3dv;1)ClcMtCF?(QC3Lm;>lf;()0#ogV5+u|Bzu>b*rhLCT_^W6L1@5e06?#@hY zb@e%?YI?d|;pB=L=)a@Iu^>?b<v*_t5;Zioe6%^2t*8!eoYkZ9J zHUf8Bzgv5}Nr-@`tmPBKwk~{ZDF3-7k(#H~%5$6*S95*{)IvclkZwA!FAHiTO7Ri( z4xSJr4EBnJf$FP5kUNi9rHNOplO2t*%kF2SMNR3pPv65>e~rQ?X}?xvW3h_tLQtb| z*)aXJpmg3*iFn}ib=KHs1835jbym*M2L>w@5_6^xaUfEt)ubV`lr?24VJdlO$P9YC zVUMGUV{labgWQ+#kug|98efco2uTdMRknKwbF>7F2UQg<`n9r^t{ue+l|Yyls^!n<%2q=P_b=0L&`TIlEcY1r z3kVUAx=G(i)G+SCfER6mzzT9 zrMV_r7>1i(yHHKI_Eh;!lyk{?Ew%9cHhLp3!14Fej9ukOE*>=T?NX5T!Xd2}wTTj; z9wL4v>M^7mr4Bg}XS~3sdGNB-*EXS$02sQdx4(|I3faOB=+!Dx_`sqi>pmY+p<9d( zEH%|W=YzKJX&YK@X+_(}P+sq03{J4`HS%iJu>%Hfu0qIMrB90@qqF z+Vyq^W9!Eg2wQUEjJ;`6XOFU~j1M?)c~+1NkBd+VxV5!#2eAksPv-PI6=m>TP!P8HJpvm{K^_00+3@J@^J!+D-V{z@ z%v0!mkLSQrx;#wHa1%@L5=Sr}hzKU<{-z*%x6``r^c+}puK0G><`;+D6Bdu?BDbh? z$n?cry3~~;Kj8bH1ql3gthuKIYxbhcix=D5hy>^RXiyMtc=+5R^A& zf6P0|5Z3_^#E0}ere4(#SoJZ$C{~yP9j64TL;e9#JARMo z8&0_}d&i)Ftnu~O=n12u#EH|)!bZBwS0C#TqgMhR%Cwm!EZXAxcHhvMtWyf9sTQwf zv?|dYQhP+_aa>y{WbwTsvpka?APN@uaB4y7V32y5HLs8vAWZ?Y-lze+2|RQO+`fT> znX5Iw*s7|z{LJR$V##mL(@9@hVr+!$>Yah+>doGtO%}&~T8l+awi4KmBKH}R!RwY( zlCC8;&zfa~AvaF}Z8U}Zax{dlQV_!Z;l+)(Cc_u|mTX&Pu`H7b^wz_QeGy>nKl2aZ zxu06vMK53FtRp`JUiH*AJCUbek!G+!9Ix*FJNZK8vTADF%mV?VlJc`@u-@UkX}L4f zvhdaCyia?H#jvgRq6_bxe(M`TaS2>PKw$)kvPUfqmTS3NcnAq|F_XFQc{upXLx-!$ z1?VW*ls#jBcBA zN<<#He!!1`^7ko0QLK|QzswsMdRv_^=xep)^@B5>FdfXv6IZM4`xGAiQ9=E01i-5b z;IrDdA}+$=>gN1pT6zij1I-y$29MM&dC6+O{bTBL7EPRmr43X9){}npIvdkiWH(nA zVXYdFFws#@>ffj!i*xB^%i8W+#lZxSnDPge34ym!QNJT$#D!mNeA9FbClTMO^G46rj3O{HxQ&ypA0_PsmS z0ZqAQ`v~0W_<2@HG?ga()ae-jvyWjiR?Z;}C@kt65TJ80D%FOfzWUi$D^^TlF&<>5 zMP!78FLS8qF9Bw+^*1p$-vtw((;?))z!q;Zd+T))_Bp(GICBW+4X1;4)WZ)wl8Sei z{D?-aweaYMm1qd$h$5B2eV!&eH!=ul1ys>!1s@uV-?Gah31JnW6-bt|IxT%7A~*^* z|8NaUpT>d>$BMoL8=>BAfSyqx==UOy*TsmyOHsSU&o)h}Z`iu;ElEXo9i`hfi~idT zfdsCG@X)qwG^C*x&oy9izZG0py)M}&?#Wvs{wKI1ZPVJ*71?exK~6retAJZv+fIesLKV)7iI-LWmTY5mrOC`?o(9I+H8U?}CipW*jV# zInI^z>b=cqzkkOrx{8J*KR|#9U21!m>D*UG^7SF zE2)*1(g&obnev8Yib;)dd=GZU$FVW@6830J2dY&!$mRTbzjzw{Iz*hsNhrZU2!Rlj zR*M3Yas1obaPfX)>F_eI;HdRCvq4@KQl*V7_fNFRb?=g}#dLSG8k}#iKAN$w*}NQ8 z3xpTt3q=$3$-L zIv13RukBa4s{zG#mfOn~>>x*-=*?NjWf&1Sbtp}~uh7Bzp2~Q^g&tN$iEW@50Rq(J z8eH-LCAJTXgm)`a`+WkBULuW`aaALht;YgeSR z|1`+>dwZnYBK$HYJoqs;U->?3D2!@ozzkT|c4!ekROCPB)S4&oHl)Br?A=O$)3Bggd@GWl1aX%4J37Tb|W$%zgHham7h*@52OXzC3 z)Fh@~5>CsIZaA0d2k9v10UIODApWcNclKUktGeLMJ|*Yr@NY>=nOKk(fNyt@g58dO7=V2=s8)RDTd3yKcgb zeT88jp4d2ceR5RnY2KIH>Kw|@x4$Huu^9hpOJT_9wia;1+-&7GPM1Ejdc}HSs0RAB zcCx|EJTt~uJS+Anl7>DSkvP7={heoJ4AJ|xZ5w6Mm0?aFtCo#I5-JzM0;bn)j>5W8qG%44S!W`qvWQo$G7(?WfXTCb z_D9FLYJt_to27cEa7P{zqEds2yfzPd3-M z{(fuIp6Zv66aJE2m+kc@V2nYXa6>*>DEd1%?&!YQB;!}JD-EWFh>A&I1`@P(UKp{(J& zv1f}gLA=-LbmW7XNanDXR(J&LP#2dawYXg=#M=^Z42H0PKlQ#ccWF``Q{{)l4Z^a-38;yN>Nyb>eB5gtL*jJp8F zMa7*Ua$z~)ONY`bbI~M)3Y%Dbn?nwD;>BzT^m;_`KB4cx+RRUS>3zHxs=b(Qw`x=L zcxrFkjkT3egg&vfk&a^N#pnNoi7_l8si(Xd{%i2}Q7DQoab2jh4sqQ~+_ZT7yWF9O zpEL$e^ljBBwk$GV&bQ$Dk;`9%#TmT0GFmcuf%0@4QYcZ3E*}{2l*CVU65m?-*?Q=f zxBI7Ub^^2GJ={iO2Jn{pg}k@gh5Uo^NK$3N2MYEQ+3Wy;H7(=701>l^#FCwFHxQm5 z4rG^R3GekNS?9GbXSCDJW=LQWR>kQz!KU+jT+1QM@eWamyvr z>2WYwBuhEYykj+)Z1YwK?7B#WZ(jzvASO=8X6uOjj<&m+>xkz43r38IHlMPOPl(cs zs}hK>*2-f8mJW4YS#bI*#FkORjO||qMc>A~`>M=(saUX!_moHB`N@=EY(xZuBgYrZ zmOM;4=##qMUV2B{S2j+}HBP0Cz`gX#@JoFauCR2V&;HdQ5*zcjZKJV{_j}FaTlCrn$$MHX^N%8LsQJy` ziG0wqORqs;mO1o>p9d}NH|B~xIGtMb3o&?sxtQX$)ev?aZtm_H??Dx4n9JFT; z*K#FV*lNv`_gA88eRq<5#Qy5{lwYizuWFcoC9CUlXf3yiItzxO9$yLCIGD)mEO)&O zlK>ZpQXm)0W{ZL2H7@M$#OOs=*Z8S1)~56wt}wG(*H~VspB8Yy1wjRnhZZhb%>z2Q znfuSba4^zWg&G)fCXV@ZSik5X;>FhkGppV;t`|1y}UUQ{&y}qiR8AHTpd-qnO17k)*bs zPAg8A|BQ>WRQ(=Ee>nK?xGPHWbaE*wh)b6X|H0>I13?PHJ>B)_?_@aJ+S@UlIA)UO z;>BSyT&WYXB-E&EM_N5Wi=q>Fo>c5?5ZAI+Jo=l9}-M%J?%drx+0 z>8e6eTrc}SR(z#rZXMe&dWTOO`9ilW`(?ETuo7b#YHT?Xf zg-k&EHb^&z#W%)HP9edl%!h7f2P=qe=ajJjfDbo^!+p{^$ddx!DBeIPP(QBEQEzTVQl3)~T|N0A{|sVOvu&G<^hy!Ci*oVO zBb0nE&PTX76c+B)R?CXmF||?Nk#c5k?4omox)zX;&IHy3w}@sVKLK-$py)p zhllXc-KaNzNy4Y)Wj&FsYxn=m{N`i;0o>fJ9ZutX2Ul@|=p^O^YiM5y-%Rw5qtm3f zBpAu0f^Xx^yMX>Xc%}2PI><}*frsOS$dLK+Oc+BR4z~&mJ!93OS}6)J1AgeRKyT^g zcV9c8h7pnl^Mt|p3h4<0kQyj_M5bL`rNLBnXpkTB{+zBtVO#r}74SUQ7#oc}@VHSY za7|dp2t{qDT|~#4XF4DeCM`D()OPLqe$B7fLC14gk53(xED=#Pqt2%o?j}?3b))Vs zd|(aGvGYHp%{)X=I<^$2Q&h0c4Y}H%##qYaNHVw(e7agAKjj<>YkkKk3Xn}g7MGud z-hNA)^4sQe25@z#!BxhF21(ET4;x^e|W0{qbgCUSl6W9^rAo9V)fwymqsYwuSk=O0?Kc?xk~?o5Q%wlZ^~#*VwU_;;{yW5$lp zfl?1|8`hz_oH2alvy(&CD?2j9Js#>-fYRK1}v$@PZ8Ia4CG1r6R zvdmltvhrS-RVrpIwvui1L=%U(@xbZ@B%mtU8`10O?r#(Bk}!hJk?|Q1abGJSuB9(1A zsKN(H2?}{>-=WtGuV>}49a5&4lZ#${fXf^f4UGLl)Fj87@bE_M;=&h_wnKa8YgF`x zwyzN2B{+OjH6(n~q{I?D4_wCOMOm$9!U+@+DM|)=I>Ju0Y+g!acNJC&MW4{lR&~2@ zwrqTfsk_O*tSwBOoD=(PA-bhQd600114H+)%p9h+E@(>@Xb3A++0wpj!OV+RNU-`7 z>}ePx7JhQj+-?C5R}*#+G%e7h$i~K1>XXmqXRZ`vWwIL^u|ZiFu~8TZGz@9XX+0^PBwJnDdJ*C({R9 zj@9B8)ucz?5Jv-#`Mhk*wz5?p-*M%lS+bsSs9oUtJF| z8kO)(Y|534b(yJ|>uu)N`3##~(S!~a{XzmadI?$QuB82*jtmc3Q5T87FAxO8cn|*K{-}=I3sp{y;`_>tb8KQ;vR$OG2eSU#{Go8 z;rHOzR$awPQnh7UcPND)&EJD@Bwn~ZsL_PQOpiq}QfaA`ZNVrc-mvP8f8MRXa?cKD zT&TarAan1#5*RWHNRLs4=-&7+5z)Dl1|6pZq`9JuFx# z5YPAHOEoV|C(!t++U%#50eF}Kl}fg5xwHN0SVn9(KLl<{`GHl?PEpqB$d~xJS<#&4 zevKg6geadu;j+KZFxQSIE-FowrcArHY~JG@LM?J<#Mp;&Lw652;+ex_(lR1uN9zyi z;+F!Pe4H&-5nUaN@`oE54(Vb|tCqH06+y8ii^U1cgKk*zKnq zk0T;p=GjYJ#U7PS26zSLwX`66x#~|AzUvsaxfA2f#MYGi8{L2i z;%6jhZ{h6OI^|Qd9)xe_Z>f*lr53&(PhaH7f|k0RMtjM*{GRUu%2o(5&5 z7Ot;^K1ee!8Hpw@81!bC@6=w+P`g1$}?zyO^P@fU19(d3rcvXUmBo{YoxKL+4M~nb&aXu4lq3)txn+ay~6l1L{CV+$PoP7=M zYUNbYlil6;gazG@uAJ-?tmnNtBcVE5l?-!IPsEz6i>1u69EoCwaNf3v8d97vh(dMG z5q2>0c^I+G!N$|9RCn5)Sd4KC0p2SJr}=&AuRpjmS#2qcrp09+f^}XM0Py}$& z_-p7@o^cSQ>UaYv$_57>H}Oqe-<|FJBsRSV^G>dS_;Wr63m|WQ8OASckO5lhh6vwi zL7e6LKF~_G@u*sQ(W=GD)JFv$uSxU`wQBQxZuAzZm5MNbkD8S3w|E%o%%u3-;FQCD zTk5?jPOS`F1UHh3IHs_S*f5~4UY-<-I~aTbl?d~ZxzFE?#~DGzJ36i<0oS7Pr{4}VTfpF*hnbcy={n&VJQz84S_d3Jh_k7wxjT{ApL=)8p|({iTa^L)Ngn&z+h z+8j(qqKP%yQB=gMJm=)CfaWpxJ0U6gpq?yKUM3t6Kle1RMK*M$ob7kolq4SV-EWGe z9BZQpEVKU228>ZPQbpIu(28B*wef3fw`tz3Xq|v&NaK@?a)67#9-ojY7KIsRhS|+6WE=2oX0=Xj$uPJxq$pLs?QF1q~sT9%WM{~7cme;|CI=6ri zH_u`zMgQS2%S51Ro?%9=kMC*~0TZyW^OIDsjuDZ`)~%J>Mln`-_n z;(mFjG1;2(bHLu*p3?+W3yas*j5dha5H&|#9g{XR4X-)Vd;Wr*^ z@FWAT>7vB=!!zP6H{VFHCBjF2={O4g={TCa+<*7V-CNomB<*n=uv^4JNeK)|1w&6W zo#$>^C6V#5RTYBHnYnM3Scer+fgjCx|$xkL8*@ z#5a=Z<@8AAsdhZOs_EO@k?-XAWs!01A2o*6&6_qaPzzncD?5l7GbRMhsI>IT`NnVF zp%|_VICV(hm@0z>L--e<{KQWFcMcQ$)HG`fCJtAQ#5=8}QFhQT9B{3rp)ews39)a_ z7^r;d;?>19$RPdNQ^Tu5*cEc-T0}8hAWVDQxW%tyBf=bvaU(T*;cBskapS1-7b*ro z+hs}Mj|j_;+t0t~Q@v&+x&y|xOzLYQ@3hj@<9IB-&T2>%itYstg_EnKhL@|Xi*g(h zHg-{r7IgF!or>LWH+ILXU@-Y7H?YrX-*m6B8QQI0A@^n5i1^-YY5ywCM@1;X!;Ta> zvj58dQ9)A>%Wzw=hjIF>g(!yKx9p%5&MBM?@ev%HkH=n82`}^#jt%dJ2J8T;KyPi_ zRL+`zw%WBB%0Oi{7ruIvpC*-dcI;3u)T+=8hL?{C&dJe~%FOT)r#Cc2t2e{<+`qW= zy+Qcf23nt2L1%PB1pd{!{*W5Ka5Zs~&Fit91dvOEjswKl<>V`B?9XalWc&zdd>hLi z-x8W8l(2KH>$dMEg2vyY>qo6em)t1S9}oiblnGVQQ8v|t)Z0wwlqu(%%K8C^8Thbt z2SHV&Q}&}Q9pzXJ>0<5Xjo3oo$%<@qt(CNyP$9@$#KGX+WP_;?k}^EH91vPmZhmv6 zk131#|2a)GmE1V_uk_|tF?MZRq9Llhp*hcLnzcOQeFur=n5LTe^^556x8y<%p2#DC zWLxCUaCle#ix3L@AD`ide|iQVh;N6(nJ}6GU6e>?rd{88Y|9KORrZ%jup=K6_ehU9 z8QH)C#%H$*Y`~*uD74*IhC&RX-s6;2(yC^hn7-^us6@Eu>y>eHf?_W=%2S&x(8@@< zjPVAX1aBE55G~#zfyKy3oU{LYUoR&jVV)r?;7mXaz0_PL=u|~)ZZkm;Ax3wqT^==4 z4jodt;OP*Su^!ri!DoAEJTbnSy@N}>6`PfoXfQSInY#5okS?S-dh|j-fK5{`WK9## zF4=PsmT}i^o8*8Ag*!uB_VW-{Bds~klvYKL1qI<`35bPDXNKqwOJ}BT zkui*Y*YDJSf|*U37?!uCm9s^}{T@gB z>VCW5xo=|H1Eq-n^J~ldaj@sl@0Ws(yJx27Pj>)U3Fph0|G8=_I^UnADiKU;UNwG= zGF4cY*^!c1`sZhMc}oZj_ynK!{3uSb?_l>OmiFU|~*@e!ap0~X-5^~nxr08$zZF=7k* zDJO5R5;AMj4o0SGnd65Z%E{heP0lo!99W2#m?8X8oUqOND&BJYQK=8R={C}}Csb{@ zmrZ#tlg^KFQtT}M-z{TIqDSiI3Snv&Wxl$vV+9l1@$1hh!#*?$ zLorQ3J~R^GJJPOKYf!TcAC3$eWmp>ua_nqR7`pZ7|Rw|3@fAw5GX^-26?7P zWXUWkrc5v2k~qETx5Jc`9W{v`m`ETO3->OEw!?Ld5L8LGSB@MIi8m%X0zQ%j{k{%b z{{t}J#SIU=cnp%(6V_unVJQ8EF0VQN8&GS2fl$waU>0wR1| zNqW2Qj11M=N$+3zyq$yXoBCEza_!@=>M>8n7q7Wwc($P9#xpv5UTl~;ueDm9LrXrw z&y9Jdby+x}d~K9n{F_q7VhWACdi|q4x-S7jkXElM>KzT@|6%xW=_7gCY^H}tdV%%< zYhAf>lPqbXv+dkHjoej%gol#~KK$AkCcG;9Mef#tJo>T`8>|pflt=^qo(0Fjf{(JAPxJW1>K(vKl&11uo>FCpYHUnu<@lZ(I|$ZOj%vh9pUkF%SEL z3rj2aB|;F%5`f*84WLk}J4?PLrXqgB6BH_BoCf7iEPyrn8+XV0u+l-HBH#RrK_0m$ z(#j`^KD>^?kdveUo*o4ZudaO)<0Yp2BSSL6cg%^4)r9f8q$H7TCStWk6W?Ycn+*ne zk9^-i{R?N?Ov>DhV4LL$;s13t?S&9>r&_P#i_S#Y0nUf&bhYaa7+$~z2ahg^a^9ub zkjd*Ee&7u_Cq*MFGmkEWA7+$O*9yYF%XJ+*RFd8bP=(%MG@{2TMN@381=>xt`gBM{ zQKFxFdttZu6>RRKQuX6>jW0-tx!tSY$X;EUF&Ssj{_@$5xhb+z43Ov%2=C->uj0v4 z@o3S={w0qq%e4LetTfA2#M;{6K@+(!@MIUc;*s~hm-8EO-b zHI}#p9(D?kg$}M)0vO^;zI}Lxg}V~It6DI z;9BAHxAKtY3^Y@%uJz!T~ zEIEvpXzOMxsny1f_9!FhIQ_{1@O`)_%fN1@QMuPMC=_>>4PeiwXULP<2j=K zYu2(0-?(lZ^HpW5UQtNx?lJKB8>pxTeB7%)d<2(TyD^`fvZ){xgRJS8I;YlL1FE+S z3S21lf765hYTILz0&)p3Ff>V)O7djWsZ~J#^`l+%2UaYCwyMzDI4Uz2A(mL`JWFs5 z>UBcBI)`aq8ZrLo`A^zYgrM&IzWbSE^+XPRRSx|N;s65zSs_KOY84$YpHc2anq?!Y zfE0<%hStrcf(XUy-#C)N`aVF8=aqpTK-Pk+ScK)|b-Qn{2;Fb&wZj>@4-na#nXk7C z#1f8AJF=5lTTyo2;2c|@OjS>#CS)t-KV{HW3~7!jteUNoSQ9I1te!1$ezD3L(M)I; zsm={D74gMgB{R*hBjqjSX~#C{^!v0X;2P2IL8b?y8rN6s&2!Pmq;C(s5vZn`h%D8Y z3B44w%f0u-t?xopCb!HIiXV@do|$(@o(uri5tx0|jlBsP{U{@%=}CYyM7mLz2$`F` zn`Eown;zL>7WrWKMbR#oMk(;g>T8hOnY*WM4*!*(SsuUi?tqUAH}s1~e;mJNwvXMt zx2>lKKgY2rr}_#IT13as?$5jmNAUtmDMM#NPg=(rNEpySr zq|wdGuv^g?Fb27p$HSw4Bp{^fb~#MCIiP|px&w9I$z|(8W5<(-u8r;Ph~|GV(4O%7 zTuo%l>}2YWj_I-~YN3SZzsD6a!2Zn zzB@hMKToF)+-b~FrM_%rCZQ17Jkfm+Gp0-Nb|{#f0|Esx=#mg*F&1b zHpDnp$DPx*{`QW55P#%C5|%3zgPC@|SL&68jVpQ7PD3PdwO@hKxYFQ)=kV^Idr-e-2FOy7HL%WAv68p#h1?p?lkDt^_0rRtU z2Ig&}dmEYm34aLZ4917HY>;(GO!Q$vw}aMY+6zYL_^i8bAFSle6_w`%*69{I z*S$U0Z0(5Y>d;S$Zf_}S>{=hhxk*@JclF(YYko40aY;xUzeh^n-&%Y1$(RvKmt(L= zY+bDUlR z(64zSV))$^2Y`7fqo{)xEqG)t>$|NHTHxu_{^`8aaSnLQQ_7>xU3)679FsUb>TLNo zoM@w#kO~zpd)x%uX}E7R5$uUny`I9?GTo)bZ>iJP?{_t5$xOg^hsxw`toPn1*hQu4 zYztPZc^=ufQT(bH8>H9e1|CzIxc?XR13=WP`EWqG#Dwj%#xyOpK;OQuz(BE?oKX|? zau~5C^%M}g8R&ejJegHeqY43!j{%2gu$^N^HkKWn2=14mKzukNM=~l)Gb(LmcmxPa z738$0H0RSK`Sxp#pxBX9RlX4SCflO^Fvqz-z~ek{z%j(VG+jbG@$dFaawyW>N8hp> zv@5j)Jzk(*MzxjBa%%q|VJU4k$s=j!Hz3e1^1S?Q#h~_Yps#185&zfbFttfRD2csY zS9)m)mJ0$4SO06#)p5rUvzC5^1urH z@lP+!RF6-S9%_@?7vkZs$tfLn2S$E5teP(>6X+{uZR4npQ-!rHlFoCkd_y;y8`nIc zy1U+0^wZ8s^C-?JwVu8E5&9}Vt*m?8j=#y6z+6EaDYstOPrOAy6@$orC0N1i#VTV& zGl5;UTVs{j+R)4m+$npE&Cl_L*clf#)ybYrLb|)j*he`wRp-EonXXbpnb4vDKZjkr zjF&w`eRgU=zz3wKEPqzmB-mFuN=E13u4c~0>ELFW{=skh{91JqR3^m3%V!Dsc{w$P z$joyP*n~8^b9Jjk;(ryWCErj?#HCc5Ug~{1na9uOMd=2e(5C(iiZ7o*F^0%;3bHt^ zDam&*Q%*wgKe3@tr>gwSwf~?AC^^D8m#WF@vW*=($S)>woB%nNs9$4D3}D#lT4qAp zBA^9$Mg(0^`nXs?7VZLI01Pi7m)kF5HHqZSAgw4gLCX@)%oY+_Bk!I z@tqq9U>nZ%Li)ml>wv20VwVyZ)c_+ceCr-h3~P6DQeU@GR8w{ONTmzE*4ue66cwcXx(H$NcUdK)Q6lN_R|t-8gI$3uN8aFpnW3SR+oCNNAk`aW&}! z02UD!C>F&ZUmBvyId3@CtWRP)GQhRvoeo7+5bcm`kd}F4x7^nW%=!{auA>b~x{N!j zy#`HoY9-J1e^3X5>btavF({7cun0K9_Sv5m0&Wr2+YFBi8kNqw!nV~_?hHi<~ z!12(lZ=xV_%lbTv`aI%=PDt4CD0h=rL-%_6Om)1w4Pidpu)8uLN0>D_tj8`&>i+7Wnkf(Cfin$) zDmX4?lz4is4gt1Id1}o9;#2s|QfUzgSdTMk*E~0lDtOFYYBMc4b(+w!El!WR*?y3* zY1Ge<+f=3!fCiau1X!8iF7K=6FX+0L%|SsWy-t@uXMekXXvL{pUmWk0Xvxy^F#Mb- zZulGS*rTib(bp5UnlOoHZ6ezmlzl!^fQ+@0`nW{E*CU znxOZBk+w9{-@4lX(h|2Vq#fe_FExW3eyu~Jw2R!KsS@~fVRc3g`f}C86aJ5i`NC*_ znYlf*77w|0$po63mmN~I?teU8DE0khDf`1Oe+JN9X|dS*IAD^*i^M;HcQxaGzQaHp zo9};Kh$&W;1sA0Yr&91lnM?Du>=(3leX(Ph(ea)1tzI-Ja2}p*hI1yATtjOdpwCjL zPnQty==#p^$3|dZ@mcKouW0UCMw|&-$0S!1t)9FkfevWWfW+^+zL&W*wmjOZev0#s z8}%lU739iN2hnc;YpxrZo*e(av11H~iSO6rfW4_Nm67J)-D|H+U(P<*btTCEgHF3V zp4Wb3GaV$Es+06vs(4`<0I*;Qz5_iOhfvz_N4wLh>VI)T#M?!4i|#*Njw;Nd@+-c) zDgX@)SEP~|&yo%nch)XJ<#^2XikX42$^u-YQC-uR|3;0&)J2pbqWmCMZ42SJW9A1( zgn!Jazm1lqs=7UmIzPNZkvk&ZxHVomW0jpUjE%4hzf7?eO~bPaYG08lMW+veVLh@I zJ8El7hQ0OxH==4(fYTm)N4K61_r(gcK|1Oz($~T=RdMYxh1D;PX<5AuK4IIksf4l+h&IXsI&*U=H{T8UTPXEa{Gg;5<`WRry1N>RL=;H^9UH z1TOmZ1X!l3$N(@AGev5jls;`7%u}QsyRwk3a;^7QH^4NIw@hYk<6P-Tat13_zxJ*1 z0;fHj==+el>aVYM2nF-#YPX^LUs9ystDiSTs4c;HeO_yajMbl6)C1@RxDvw1Sm+po6_X4w!op)*o+pjfdqNVCVKpGJ0<;-~K_L(L@du-877eHD+4Vv)nMGL9-3}ln zZQ4OPW0b$0&TpM|@)eCDyDRq__|_eOC}bivN-3>lEK2jss?x8+ToO37>puUGk*?GH zD}dXoPU1O_StHsB!I4SgraW6rrkxd%l3MwHAJ$coXurP}*0c^c2(zt#q6EPfUn z3cMFH#h+Hg%*NmDsnuB$d4G8+J| zO@GRJG=X#I4Po`tVDvXtmMX&iDeY8eBmgXK^18}%=aa1}4;E8n0zLO9&k9@x55sf= zVlP6;4K#=(@&Y(jjpdv{qmgq+DsJ3*lacz_NCx=)x2YX|4%Pt5U27L^HZ8HLw!qjU%rV`}>CUJUDD@}+f9Q{qp_caCx8 z-J9$!`^cHw&2TP+pIz9tF?Zh$7a37srL47=sj9eXF*9yJV z`?|b6!x%1Rg$Km{kab;`>?zCLvZnaO$@?73`PjU*{Q4%>R!+T`8Z+_o9p~@p*f<+1 z{WwzD9&Jl$^&Y%{>&`0&ad?73y4DMn{1C+D9O0KH^*=<1LQ^hWg)&y#iPhS&Jvg-S zVe(;9v4zNhZbg*i!6Erq6pTJM`#O}YS4W3hcCNN9x|U+K;_ zof_934XE(R3lj~AA1R~Yj2sn_6+qE)6@*ouOAL(sM~A~H*Zae1r%`hj05?$3>E}w? zGO*XU3F4PaJ~2>t4gwyXyd5G|10EtfwGorwYz0ut5v(7>5*H*i^JVd%*#r@af&{3nSq3$@YvTfPOA?VGD`BqV?;AyS!RzGJ% zaN7ki(OwpS`Xs5H3+t1bo>^Q$@j~@=*v;= zs%wAydZhMjCJgXEkt}kp@ieH&J65w@j9eGH)A3=Yz0|bmjG^Uvu5bpJ6rh~s<4b^B z{wa*OXX|2{ZqofXMHE1YQJ<97_96~MPLbM{EI{}tG=ZX2X-s=5fci4Up5XOK560&l zVAR^b`z_i3k4kT~A;R)nDv`;|&(lr~WaJeF-@QlLiZ@AP|_V_oRGN z5P;N7FABdGnrR1@!n!lj5Vj?@@ASpGIq8(1R*J}evx>pA$JKGq z)BcC2r=^}p>ik9P?+gn07Op1Vh#1!at9U%B9IZ>3q$PtLMMcXDh)dbYEsrFsDd;|N zeYpa2xx`aM8Yz7Hywj|Fo+K%enn!9*L^25|wG9R^j$M~ktGu{`|W4$EI& z?nr_VF@97CzYJ>5I7+9t8FTLPr1$ig?O=4n@}!T+{Ev3>r-M`_{TJ-Ns^f71rov4( zNNo$|J?B)l5M)nEtuttw1|HYYbNy&}NC`QQ^SKzQ82Kzn^#23&5FqsniLs!K9y?OQ zNu4j$W3^P*o)|=u2xlJongVsa!I6AU`~*_|J~ht~~%w_)YS9|D*08W7494y358aZe#$bViRxx#jaK;GZ`b5@4tNH zm@r>R1d!45uLKfQVCUL+;@iLC4&zl5@UVaqME?2nj z<40;ng;yyQ&L5j^AW++AO8S3{l%j?wLrC3%GNh^lRWKksz0tw_c&qd5$=MPh-@_aN zpF7pe|C5z)IWE)l#1CpZIGixkKIwp4d`?9sQ}#I(w2__5^G(pj}F1W!zd3%h;>i>oMhDsR_%EG*z!%2xae{A9LEyF9&g{@TlaQ^-RwF7s0ik8hI zu|&oQmRU#83!X5TxgU@EGzR?XKTp!9bwrElvNrs=Z#WATda@*E4`nCX)KLBKrA(Oi zl(GCOl6=JXx+Fx5#7lDMLxkQ-SkfgP=!e!;l#3CFClw=p3oV+NzNnO7ZJ5J8zeeS> zmSx!E3`lSa*Htp6pHDhBDAaUY?Z&6w+I9eS7ag|*)aUt zF1xd~a6qAYh4KHG`s#qVnf2=m#jVKVZpF2@6fI71m&M&(+X9QbdvSO7!WMUjV#VEx z`xoB#-uwIhNJuth=9$UKIp>K?c6=|Th0zzxlVgrVQGs>#8lV0g-_2ty88Xu(ol_%@ zGD{V1f9dbzL~zQrGO(=psNWuhCP%G5$1e5fJS&l~WW?arZ+_Yj${d8rTWQ?ilEalW z7e;$4%e8&KqZMl_|GRyyP0!PkBW$fNV#t9Sx*OBZCM0&7ue5j>3Q0jm(Ka^MXaGNs zj32gJ?0o7;*R7W>U>hJ&86Icp1Gch?Le+-H?Mu0S5#rkhibF$(rBppx&SVP2+<~#U z+s3Pq70i==pW@*+kn${r+0lvJZu9um&O1u2E(OHToGwhMCh_q)oe>{Ys5K2%4SE`d z&Zsq+e2RxV^s0lSm>>pW98DZV*kx#uvAWgz%vL2PNv1@ThrS1v94ihzL-WC%ZsA4t^mfS) z{Q?XuuJSJGe0{iEoTE){03l-DMec@!n)>MIR=3>`6x-7OksNLrXz; zdI>g?s~$F&p?WDV9X`o6P-X3pHN=LOSZ@e1DNiQTCJ(aK2FH|>JJlpoJ2fQY%7S%h z+E^;s4Mp3gA=S?HKyMGtJ;m4Nlk$W|1@$}K6@0SIv(Y^s&1|vE`auNaa$NF+_m`M) z=3oS0ty5I_nG*qJN-=R>xT@bAL~Ya!9QNda@hb7&B~vzz!)Q^cw>KQVnwi*c&@PJx zBYZEF=npVbY`apg_5vtu%cfOtf+en29VBH59Py}{W-K1BO6yH6*42H1!CfWeLsI); zpfCueJUJU|c{FZM&w#IHEPps@JbH++76{2W6@iWhqo61gKp>2gsMj0s@5{U~hYXNn zJDL$_OV?^?>VV!3o&|dQ@1SQbWD)zmJ44SaKiE9-D*N60k%@zm(m#~M+n%&+)Ic@9 zDd7kV1Mw$R{bvGN_os9l2e4w+^rr(9yK~LD~5T_KE%qZ8V{UpSBWa z`NSyc`bCjB2<}qLxM+^tYz+I7ji=QjrhxNm3EVsH8f5bJ`@okTbqT=bj6pPe1sDEu#nd*$jdi1yGU~=ENw8XzF@=gjK^$=Rgg2 z?bsL8IL=URpCK}PFe0Ppq9vXm&5Aemn><%DIXuNShSfg>`C7|Z){@!?R;EUhjKYz@ zXXX~I9n#Y*jFtla(a7;iZ^5I=uw%lDE--vR z(qB|N~LkzyrkA7PnK4xw>qEyADUZLvMTAT zWezpu$+W%^=l^o(U}XY_-N?q~Wt1V}8Xlmc9Z0S!AyT3S6)EYH{6CtTqcz4Jq^UbhIdm7frQzmVEC69%UKY6E$bfDr#+Lw@(pim`O{lXs@CqJDXIE z?rd@2tgKzBbH40fwILz)(>e2sd+;ZJZo($+LymDAMz>(rG3I^j3Gl8U)lZIQw)?dE zPG7ryp_IJMPWEqOs_gCXf>*8HJK&s45uxrwi4iJF_ERcKNJVj>XYi2BV0aJ@YLAQ$p z#`0vCn}_Wl6mt}-yX`GImoO-elG{BM<~IMpB(YrA2m%TDk1ztQpw!HZcKDAjvL{zP zVMlkSMiU+nVX?g)0DGse2FtFfQkr}u++iQJNJhOgfLohHOrkZ`Zuj_@P9p;wH_HWQ zK#}d|e%ECe7jC-e#%(oEE(#@Nr>?5;_E; zN+4)YmqKcGf{miRNzhKCGysEF3Xc2-)<6$%EbWinBDcR_TbwS1T!??x2>ylcsxbA; zH6VS(#!Ha-{>vAA2`)$9A~=+AkG_aVk#-HKCEekW{VXj4Ew@jR2GH^`JxJ4#b||Io zQB=GW^bbkj2~di(5>ilChzGc)1c#KO9-a~qyyK~(M?Ea!s39W9$Jm+UX2@f9$Laf_ z@_st2Axzb-RNYwySc$>( zZk}osoPN%bfrl{G&~0)zuV!~u##^3~*ecsw*U)dCsAqRoF5Zi5%GPET!&;nze8Rh_ zag4#!R*E_ne8GzL-C+Rz1`w925=pT;jY{>0A>*4la%gAl@Sf54^mg^cR?w})Nyr7O zFvO3?^q);?!>2&I5?jlwZq#GM?~;jSA7I%~|I2v}cgbHrK|z#3_tyPf>4Maxdub+j z$&s6CcN$AWQYcfmX@G$06hzrvnxd4xrILpJkMBNO$7F-4jmZB|@8o_|9*%$Lu9#x7 z@4t9>gHkH_i{O7K#={3~9^?}idk(J94;fIKsWVRX@v|j%p(t_sy|}-B{T)k;5#ADF z7zA50FKWOSWJkL3Kf054^eTfCqZoaBA-m6)4;{On#nFTdCcye67Z;hyIaj24x;Dy` zL+pQpF1V=H3+D6n;8OR9(?7gp!+5HXHjmP?^L)GTCZu>sGI&nJkdqx_eI$;9ZjgnE z4B7+$N{~}OBMLDr3|cqaQ~PM={N<3N4hg%*k^f6sV1QwiHa%ZaQ$oxF!O%tZI}D|* z57iAOlzEoo2*t#o5z;W&0CN{6r@hAJV=Xaw!HP;NwjY2+E`UsnER6z!t*=1!C)U?3 zCg-#+FD;~VPscIq@@ z;fh|CYn;$HH0Cfu;XDakOjREl3I`ffPw!)BDS6Fu~-Z)&d_7ALY-rH^DtopIfDkU{n?Tv zFQPqBg?(PWvcVM{0=T#T;|W>TAc!`2qKdrqf>m5?&j?G;d>(U1ArA(#Hu?Ne$}2`67R>q|BSJE2jE9+clw6#TYS-KgBqL#KoLg-eU@c z^x2UfTn>^Uh9N();bCgT-l7=GqCPgDRZjI%0zU2wRD#(5T^DIR>4Ktor|hVnW&qry zbdAQlMo*KtZvzXeNPi<%lr4;8|8;TYDE~M)F`$I*9DQ*$3#0E3ry7j{F{7W>0(}AJ z*~!Kq!`X{o_MH*_0*Ki-2}K9+ohg@I=P1V4g=01C-Z{iMyMaS>)#WRBt9tX-a?^HV zz8>$VD;v7sT^^Ki`2;l-_!btercIMzUG|lqx|V6&I+lI0`$qyzUJ-VFll|1eLbgC~ zj*CfaOvF~I9^J`0u$sDNGB&c0y?Iz*g18{47rFY0+VDxrWp4}+z+~3vmYFnV+`FUU z{iDDktQwEV@gyq+awhIM{1W?$nVfl#Keo`sO1RslfDc^;0|&xNEL=O#w$1jaIP${8 zHb4Y-8s5?XJ3awJ*K0ASp8&@}HhO{qgf{~C&K1=iS*`3lAXgdfmE_J}ZlJcCO zKbi2!WfXtyPinwF5PkrBr-mved0^Qb{Zl?f9I^5<#E&XPxgUnd%WlQvw4cjUAuk$% z?O})&+Nov*O#krO!y;msA3zm=>vMmL7liX4W5gXO`w2)I-k`|97e*rhHaSJ4fUHre z|9B&>p9f{Cnc$GWO=o+oGVve9+oP8+)nVR>_%#3Om?X0^=)8a2-aj1em8?5}WT)Ze z{>M-w>Op}=dz$|!-;jae77PlXS%e4_(NSH0A`qZu^ZD<_mBY*xhf(zD1eh$C?zCrvy53&U4T4n zy_gUb-Me_*dP%+38LxB^>i#N*z85Bh)~F3f+mxys15mR57fO<38vql^yMuMiSOVRP zQAFDSh4hl5{{y(EZ+?uDFBxur^m1QHs1ts{IZ5^!U<1e&_ebzSGzz;)|kxcGZN{ez8j; zt|b4OyqEwWe_;!>F6FXv=hf_{HGUK|!M1)sK?pGUDkX7Y^d%b3BX9x1g4*bfOwGK! z`DJep{>9>pEyT#V774o1?DWk)Nt3jWExFo9ea z*a)QG`t+u|jWhKBrnG=3#kwu&_?!!{puBswJ|0k^0|8LTzf1_!3!Lj2!*2P@WN$T{rQF1ZG8}Eqc_0 zqQmmuuyTYWwF`08ghw zE!wPRCE5(a8??CuzS_df*THTTDBcVtrM!v8mnLsl1bZC*aN-r}KM}=Csh8 zX1{MnGgZ`+(t@~L7)A}!ofwIv5#|6z-q)z)YE{cnQ7kr+?Tz#om#78gdzst_w$ZZ( zsN;qGDk#IIS;>RkX1egsu2>NiUozxQ5RAX7c#aYvQVTCK!!tVCmHkl;^iL^ofy595 zxaUHpzGUQQHoT{LErO?n7sKzT-M+r6rtwhPPv;^VGZUKxWQBYHeVDkDyM(`Fo8KT* zVtJ9Qls-U%RSkH8b- zfM6>BBbe(f2<<2zIDxAi|DVP$?E`k+c-RJTe|(!7PZ&HLWvJu9jZw7lk2MnB&%OWQ z!YH4!tLh_DQxtX{FOv-3g5Rflhx`|w=c6wTnk_a0;Q=o2e^dx~hnI32Uzkv>*yuZh za^HvlW&$*>#JE6S1mrJNrD3ZcqeR%ZX#a>jTcI%jG}KB4rPKS(K+6p;i+=Xi7e96o z(q=jFRq1uscCo_3wcm(mT*`e=At|w#X@DEMehSxleM^d6LwC=JcM^%9ZOX|=+O6>N zaMa4k&sDXdPkU-yWcl6u?>{VW_<|ifIhq%s{%+Cj+sm?7rw$;r*;PG?O-Wok(~~Rc z+BY5K%FjzNr{LBVcXpu^#`9Va_ReI+^U@}~!`O@9ojy@!o$c-;pUEpm3$o|UcVR(+ zGk@JRcELZ)C|z1GXumf8Al%3fNR8SPw`XPl~zkF*ctGHbLsUyK?L|5 zmX_0uZQ7=ubK0f{*#(nldKodSMH{@p`k|WtIf-&LY?zl=mduK#ExH~#YL%XWv{eVy z>x3uBOVjz>EAZW~3psNQ@xN7m%_4Pt#?e9*n zTrM$s$=?3ZMS67v`bjXU;yS$H!cR$Fw15=nXz?e&5$W=b$ZXuk1=jLvmc`!i%(_Rq z{qpXqT2SxDZ(p;w*hpr!;G)?ak@_KxY|R{qDc|Mg)7H9~Huz!j38$ey{eUEsYfx;P zFbI_DB7!uMIDWj}$aF)e_{!BGJT%_{BUJZ|BMh488v+_Vn7vpzHMA%w!Eu<@#)m0A&zp|FU3bgb%!^XV+kpS&HXsdOT!=v$#NDd?y-j3yWg5jEo;?2Ro&| zJe=kziy>5TxQuMt>cWoxY8Ci9#kOWvm?p9?IucOB$kf}=6F9(Vdv*E{WJrYIUT)f{ zr};^nD^7sFviLv)rAs>^nj*Ul_*~RN>!bbpFSf-Nyr#|>`XTEB(^KxM7KEs`z=CAI zr>LbAvlvJUv>r2VIT{5MGg8>E&c^8~pB}tHyCz1cfIzeEuB4&E&(B~hU$ueot(R}SpS!HCcVS2?IovtqCpQX<^}Dww!bBN#nn!LwL^SM>r zaNLgtmRIQeTOkLBkzlN~=*G{G71hQGOWsx+O0&iYz-bm>ljWd4U`Bt@k;E7yJS9TQI z(w>QvGoiz+?2 zwIykH<)!1AQi~2J){}|5?TpYqnM#XJ!%lQTOoZFQDmTU|O#_<#aj#G_$pJF{21~$=aiLj zOQJY22?mMwgv2{REX_0u9?pY;YBY&L_u)*Aql4PVime_Ah|lb>uTlM?K=aZ;J+!}n ze$Rc+&#fi#llApGLZ^}MMt2_evO=%>9*o>?M!yA}GPr9*uPKO{9gYx($snN$c%B~i z4K1(iRA-agTI}YmB|?~Z3H7SKkus(Fcr-6XFr)q6kIWb4BFcqi66Ad6xkm}d3FH;0 z<)@Vom3rBNrOa3YsLQ+1k2i@u ziDCb)9IdG!35IVpkL@=G`Q#e%rthB)QP<@ zE}(oOIC}xpr1dNL5jR8(8n)5~OU!$OTXFyX3KRofQQJIiCe*^>!D1#4TKC#J-G%RI zoaDL@@yjZ@BuK9EE#Y!A} z=WmpIY~yp9J_4Rs(_+vyO1p-qU4HFgIf00m&jU&}N)JjG$vWi6y93CN>!?n$dC?_w zl`bx1jBRa@U6W)qmU`Zh=(7uxU z5O@!Eu`uWwMpVJMbSi!`u zgUr*psn6$ng5Q`|4PqY0Lb=jSW)F0?BbK?m8_4P930gIIZ4Nz8E19St4%*jWJ)1HE zqgvVIhxK>5tqDoqL?NQ~Xv9Ey@oq2NtZiI_KhUH4b@|yEcG$D(OAoII)mF6O1|9Q6 zo@pbD`7$V-{}C-Df(H2#;@^m_OqL0vJ=HEAcJ}xCsoKLU0bKTL&g{Ohkn-!DDhkMB zaU8!B;`J8-;@BV2WmpX$H&833t@dVT<)I4o69~5=XBgTx56$2B(pzkTw~`5>p$jd& zIcU@yt8x~?Ve`iJ7x?;cV{L&H*|1O%(jK>_sYLSLoZxj3Y{TTjB#1Ea>QD0No5+Qc z)2h0+Xfk$LdRm^P+}gjpe@+s^CW4aN{X+8@Z^i{=WRSQSmSuL1bSl3E$!lwA{RCUQ zejYf~)%b1Ys`0W03CdTk?W{Oa4a#q{l8-AnrUDj1p-JjhbyKZii;&`j@T$6>KPN%3 ziB~L>Vv_5BSt>%hCzUyRCLP>2)AJV{;ua?DN}tEI%2(d`DfDsL2($-xme=-PG`b|p z`)OZKzQc}SQ$7f5TFL#`fd3X7(~q90D0xh+?DVf}yUxBTCw1e`?*4ET3ch)kzv1n( za~7x>ET!`;f+{*KReB^ws!Pu+2t8dOm)%fK`%}^8t?SGx3DEpud5c6tl_=athW2hb zH&ET;Nj^g)BVC<#YEuB>a_Ecb7}j8U2?`r==di(^i4>=-xLovl zyb@yO4syGYZ!}Lz+*h0J{y6fcv5^X&nCbJzRYU0h6?yT-(owNa!d)nn^ek@G@BV)E+5Ndu=;>ghBec!$O*Ywp>YCj&8`brSGm>oee2j{6fi->ilt+j;i*M&y?x_M{>O| zc;k3&&EXt#$4#ii`{60y*W>vqiOcWibSF#D=ez+IEv&KgdAah>NMDkW?|DNh;6-L; z9y%Vi_HJbR1e_fU;)r<#riv#P4cT;Dw0#3PfwNxYqxu7WK7;d|D9Px>AC5baS#jBR z1>cm5!oo<(mE=rDey9W;J3h)7esPDHl^8|7xtJyg8-7F;C1Vj*#1Helmzi;$0BED9 zMBvPdaXwI~c4{O=?{a*RefesxjeoAKZbykdJ~iQGc0V|(w(P>1*{l$m-Ab$l+PE#* zm@E6%XNbZ-yQiuDdu|WfeAs2-g+DTipHhy` z@%?JPM3~~kN9_f34(MTs-RHsI!2Q{)Lg7q0a zD!6QZU>ztY>d$}Azw<5MWxWaoWsbv}R5z{?s z>y1bhPjP%e11k)gM=Ab>ek93l9vK_=CLBp{0ppBTblavAb!WzB^ALpxX^q)*yBF2$ zVQ|*JCn!Ze=g+_)0*}sln+Zly|67bw|0)%pS7_-Vm(ormfczgz<{(?t% z-+#=aX`w&;C_~cry0`A;)UzdNP?u=LFSJ`YFBx%eDyb+0U6lIF=hkADF(u5%X%vPF zO5Nyr5m-ZyGbXGpoUB?c*)?;ZzrLH+qqI5!O?GzuK9gV5f^ZQ35f}prY~eae&kuGf%|9kZ^RXHH1ztWcyNPB$npK1RzPb#iRu#CoBJsZJ0-*|^jiixR*jfiPD)r%up ze$Qy=2DuEHt8WXH*G@~dYGs&9_R3-abe!Ua}3FfRA4}X;vXQbE}oQXQkC@G-7i@7t$j z>Rw8`1bX^l39`%seJH(~0PX@vj1p@m)3d^zZK=cqSI$SdyGbd#DfoA$Y2Hamv3_8TOSaS^c=IGaGxrXeS+$Y z1y5#>pI{3rhezg|GEJFsZ@S=tgn;$ou~Tm|5c~Gl?t%DSOS#$ZL6wXN!h3=DZ`RLV zSL!r#<*n_GbIuyA{0hp{;0X5?utJ)($QbGnW`0nbvu^}9`aHrloI56S`7G>HR$%Q7 z4U!`{h`#^_PcEZrQaRG)B;$AfiqoA8&#KNy%K1Q4#EFxoEZ8FVn09z*O}|cYN||Y= zfkvP~SG~$*!(qM&q`t=}DUg7E* zdC1uBB*idS7ojd6HprczJnrOt(U>Nse|ji>((LX;q1MLa}lZ@>ia#K3a^V&-A+2+p(T+;csKcsSqT^KRG!Sfb(ZVv z7P7^+1^1u8b^a&s12f|D=`Su|g0KB!3&6w5UI`p1=OkvWEZ5b*y5RbjIsP_)# z)|@AYf<8p|gJ@8?N0Hq4Yu6)d9M_#g^sC)NJP2xs87AZt?`ITI(q$n;QBv>?q8D@d zzMfroMY;{tb@U1hr{Ldzekvpv!wt!3i=eCFG#An?obs(gGO5!a^>4FLWIQsfi7!gL zQifPs2$_tr{8qT0`NDU!10;902XqYukKQN9%N9b*B#QyMJd2Q{^dl&0rlqqi@#H{U z;BlPh`5zmVm^jhY>FNSe`ZEi1_4`UTQ z_@xqZk}8jtR>$2iB6A!I-F)gx29Ug2*vE1`a!h-;YxAXdRAAOSl*ht72J>tn`0rDA z`lhIWIUgxfII11%9){-rP)9OMs}j-vVtngSn0i%45;VT__g2|87>-fBp2b`0x{DLc zMAHG|qF!DkYLw9xA<6xPKhejDu;ZRgqv91T~dAH{dJp;8>gj&_RnzU<+DEHeD`RbiZXyxD2M8tQ7saX!5EIU z3TE{aXD8-HnM93iZk7>yN0RWI;lu@ccwfQa&%!R0UOJ|rDHcZF%epDaF9@D5GsX9~;E`Jcw|!`{P1HKx$HO zx7`?G(GRLx{Yg9}f7#6phBJ|SYgAFPXP3++Xcf*{-QQ&U7&I@r$ZyVrTkwl#gsCyU z)b@V+dSPse7f->ULO<~4EIaek-T@}70uYIq^QL#S_V zp_VVw1}!vsFjMykIAycLKZ-h9wvqgBokccfpGB_XGvg4RMb?U>?oZ7&JE6Jn9Gyh{ zCA1nPE@WNc<+ND!!vDf}kKg15X7S{rpMKnzTwCM@WxC>4!+Ek!!f5@9DleC}1{4l* zQ(d3v1p!;FTA>}J+jK8whJR#fr14?!Dz!bBk9+!K6QH~QLNGx@F9PVKG`kFKfiqJ8 zVFI&s)RDdXM0@@FEOQ6(VM-rSb4s-rsW$jY-RnH$8H6~n5dh{ zeU+$wtJ#TFo);RUtFYUu9b&7aE25Vg#CL(&xdGvv*JAMy$FupLzGC)*e+kb=Fvcf0 zW?L$%#?Cm`6mAdCN*lf5!Y3D~q_E1&WS-5-ZIlzNN7FF8l999+M3ipxdEu>M8ahes zSaz9(f!x6%uB+0l8q0Qxwa&j7_gbngD|tht)&Rb&kPJSYafBT3XMChG9Z}z}{ z5k3bmDm3luN8ozkB@SSR%K|fQa=3*P^?YaIb1mnUS9jT_3J&@WG0?4%<~@>1J2e%6T;fvPb4&TIyQT->602Z z+6nX(3Uv`UW{GSkS{F)IbZFl$^6^13;_I=)#pB1W!N2r`o)K~l)Ub z*Bysw-IfY4%aiBQZd#qw0v^qjQZv5?_!f>^Rd{n=zEnWS!#3Up2K9NML zB1bZHMY*zDCy0($r9w3DbCvf6Z|+;NUyZNBtOKz-utBcb3@g)97Gx_Xm1JCWJw%&& z0~DK5QXvlNRE`>b&?dqbZfFw&=Y3hsx27u8p3b#GjYL8fZ2otSrzp~TbOy2fP4 zx16ZFUq+pO=rml$SBnkIM$bw$iMch>D57kyh=1|-c%~}o;xA)|`g_1jjOMYmpjNxo zDiS5*bS=#;X~T+=A{|)3ZOh^RrJ#XECCU(dJZ4_XfuIu1t#Mi(gqx0>bM#>1$Ycnx z3TP+MnPs@{d>&du4Y?MmRk=7Xn&X(rkOyB;nj6C|QkXTAs&=MQM=_LmVYb#0r{(Hz z-KkDQRsY;9L+~bSY)zkta!?;y^Tl&Z=-I(@b3w#~*M`#<*6ZxCrNr_dMEzRW6;3-$x^j8!EZ{!-CN9KNHX`Vb|U)=|n z64aPL7vV7m@=EL0^s}T%BZU;!udJ$7ygk|aW$aY zN-db06`2?@8v*-JR52T&aKw#J-Nmv`4`MbZ!CCK>G>yRl9BN9G?Rt3P7*Z-izZ57H zB`_)3=)+LOOid=f^;;wRjdW>bOZ6y@$p|m?0g%+%Ms=)&p`WOnQQFJC&@;iMJyXP! zkVaO4#9&m0v(AlF=V`sI-1MA~(6lul`B*+%tD(NhJvHP|Xg&+)o1T)mmc3rB8$J-# zqm!AmVLlU8e%Pk8wq^dke{u?`roO0#MIk_d-bgQ%UL@Ut#xf^#FUczcQ#BquD29Eh z5t@hdAy?A#Tg~n#l{1Q=-5p*QoZ*=Kq%a0CY#~R;TGrqPjAs?dO$Z%ekrc%*MIXel zfp!$-`1Wr5dW+~^RvO$C>c2(rt!NfMO%~5e{IVGXFY{hiR^(7jh4p(vFr8)bg<4K1 zTX=j?PUhSX?3FUOYC_XrX$i}a>U{H3_EEL<5#J^PS0xD!d@B6nKw1pDD}Phml``bB zjS-;Emjz6scyUmUKWlHl&a3rs@-x#Jw3m%JZ&%6~c+r84aQ|$ie2eADf`8eD^~>w$ z#{f}^GYB6%jf6-Q#!8vmFUd1gplgBBD5zj*p^_oOz1(j|v5hKc4IX4uA#D7K>InEI z;hIuw_JHE_aQatHvZ)-_wh-d*h(5TNCBDp9du!Vy)(+XMHx7X_kXS6nz04Cwe7zb7 zgt!z4-l zdNQINhk{37(o)pcGw*#~!RL}q_zdD=VrPcfDrJpzSb>1j83L1wXv*^!figTHliGG@ zC?Ir@i<7)!@eF4BrGUmNfi@6^J<0^-9JPfujDF#J|5@MphoomPa6HizK}5S`nL3z{ zI&hwnu~uz5#$@Xn85hxQFP*qYriFSoB^poW>lwTsbhAeWwBmiA#&!Gyje?)_Ra~r# zW7dUbBCc2#=A6?QRN_KB4h3lfC&GZ_F6TG=c^)$zlt09Lzb|Ps%0{+u8&sqE_tgSD z4>iGvq_)$X!iI(AfYMZwXzHfKtrf*UxKfRrUVN-iO-_6|=~xLG1_zz+t*pFyamP5d zzw7`tkD+vQi&8OtUWLj&VljP!XS@7yTEWjBMV?oV`My5wI?MCVevLZIi>o9_r8M_m znvR%G`FTUjqtX?mbJDKSd^VMEY*bfI-CtI4*d}K;g*l$c6&<(Y`Kbs-^*yCJ(h3l5 z;!yI8vz(czX!>jSr#5Y=sgj~2XxctC?Bk>v%h3w&OXpYoF9!T#tArMb?^;wJT9+h^ zYZBC(qa+8I8sF3MY%-BkjWvfz+Kf{N^-F5U9ZlUFt2p`qHBI%^ipA#U8)qOh3T$;* z?+1k?NPIo$+3$UNZtdA$d3tKn5NIBwPU#zSjT37}t@z8NW$11NRcb3gvt0AyVOTjCggauxhp~ral>IR2~iGzv&k=Glg=0<1k)& z?3h-HFf~_&%4eW9DYh-oEHL?JW)yuXjecB%@$)m0l+4xjXQjkxY7>macxNefC4EcB z;gbb4siH5AY0ijLXi^1V9A!g8Vj8yPGkeFnN zC|8+6l%t=C53{W<%CuJF`FYJgMmTXGMcd0du#5Yfxq?T&@%cG&6ZZa|0s+~woJ|T* zx`px^?pg~vN23$s3XhbQf6l&lM63;#3%U^5`gf|se6!Dl=%s-?#0{PLPTbahog3!! zPfz__RgdjfW2=iQ&K^(m1vzQwNu1cr-yVh|O09QCJbAC+&!F7LPX*C}dx8r&pv?L8 zq5F)My_HFrO>eEhqnX>Z4^n3iTFnncs7@RJ&50w94P8g$^|OEytI(kI3~kE6Ww4UHmP*ws8qjP) zbbEc~9SZAa%c%u#N8PTL<(xLIX8Vk9$yZB!)jCFF|QHcvQ%Y9ZdHBoVtgZmH2Ym$NWB1;8O zO4UGUV88zi&6>zkjZp(s!E*EmicM_pkG1=F9Xh`+Y{l``v7fi_StSpqzbA>t-IfHR zN45qZ62jq-AE_d}2j0l2JwY`4`s6@i;Qn(?8#OBTJYed3O zc4?6ve;@s*prjj;75%z2Ff;Bn8-I-^ud~D*IYS58M1eP5DBEb0=hjkByJkj*f|G*8 zB@F|i-(yBZR-~zXSnQ{Qr2C8r@>iX5lfYBV8r}MFCMiy>B0j&2{*qQ+zqJ1T;|CLR z#yDf0b6A*DXMqCpYieAkbsle7(af?}#}jes{VaNFCEh9Cd>=IiV8JHZddthVTEJtW zUi|vfWX8Igm3b_a8O(Vs%H(ye9A_i(T`BxAu>Zdy{h8x}(E+uMKNbar zBPDoMPUxr@^OPMC7Ctn8Te=Hi2oaZ#j^LyC^{Xv=ApG+HzGL<#qGP!+(!yS}vzM>7 zI%QGyhZ7b89_kp$%i_4i{7rHxP3bD_+K)bU2EF+lWW`RCpNhb!Mp3)iU-?nizTB1?Yl@_U%hSmma zxzyvA29_o0h*ZQceQHRxbyQf18SJeIt86Hp##U;So<+C8TH(>D=h8AoYG&`g`#M(n zp)2PyH`b_Qy&`q?7SbAv6#P44YpP^H6VJ5N`%VEtNuBQFxxC=}Vw30=wc?bWa}UWG zECkDhug#cQHjQku(KqI;7+TqF>NP?9v^?1g`F9-Xmx@bTt?ccZkFbr+^)!c}Gyac{ zU&+(Gp$*I?QPHdXe7Q5cS%Wh zLQiW!&sRd79$X|0+t9VAqqOHohs&M#XQAB0h(CKZcns7t?kpCNu-EJfp5rK=&mB}b z-s!76&cXcA-?|M$w$AiP1YZysC!>O<)=m@Ppk_I{pt_O@FPvPyt#2BgKE11!dtowtIgq#s&F0rPjXU+d$lNJ8pb}#Kl|p- zlC1b$D@-X?yDzs)0mI?+K|RNZ7Y$j!n*1M9XYGb(!Cpkjwuc6?Wr{`|tUnIKwZD ziCaHR(R?Kzd+Y_3(Tw@7oFc+jH^23u<195+3K+2IhMVegB{LSt@Ehk0okqek~Gtt~5G;|Z2F(ciG?fq*s@6My_Kyh2A>vHXUZMP?Op_Pfy%bw1*NVD3|2`d$pZSV5e!XR@V z%Orm=4>MoI$#F~m6<9F$_V-ViI{kLY4?b`J1DsRKF9Wu$#J75>$?Q7nPAlyhvL)HD zx*fl7TcmTQcFu~^&g2Hpj9CsrxE4wqur?EaU{Q81kwjL!bGGlu_Z=5zcIXz@yjk}L z&wgkt5xmKDXU)gk*BsKx(Y9xexx2y%b^1KJLziST#n_8$Hz7ZEhEAJQ8#*v(&ziz+ ziH4Mce`+x?R~t8{_~V1%I}aXkXIvP2=-D>@@?TsknM&=8&1UC5ByTQFmF9 zF^q0F-pTek1nk2@e0CLE+-)YKFLdoPmIXR@oFwCGTw2RwZ_#P)cUK;f@Mdrpp(xQl zt)H4x{Y%o>u+~;?dTA|4bQ;50LKqQZu$5lC)b=JLc zrI&vQ^ey+qjtd=_`tJoF^o0OdWCb}6&dKiQ3Ooe{J`9<1q_2mEZ3XyFsm65~YPkM7 zuh4Qu$r?NrE;kfCo8h?bCla<2zLf5Q0_jwK-18d>j$aV_JxY1yXCfi3Q>bRVe#3^~`UmSr{ zyQ{WTV&p<9efVA2uokFXX==};y&7`wR?+AOfz974=MT&#-pkoT1u3qZ78_>2HU?_C zU*WkQkaZ}^-NLMj1l=z(*ZB&J*IZ9kbwP|+G)_3o)v@4<|#*ofVZBsh!pD)WdpyIJ#!$TJ?PZbf~k<&4J(J>E9zaW|DLLx*ZZB@0*LcXM^lC zPZB-B$J-q)62T6y+dWMGr=YA3kNfkWs4PL>`{SuaE}_$FzuQ;ts}B#yX^n9cjy35c zpJht!Toj+O^jB{0{CsYJ?A_PhN*uKfSnd+V<_wr*Xxad+1ML4#Xx5AGJ+ zf@|aM?(Xic!QDe3xCD21cW5qqzvq1WygBa}-yd*qb^p*cdd;V*tLIv4uBV*!a zbYA-myv66P!tSPit^>AbgVkxEU>fM@j5V64z@Wd6Kp_}-vf<&qa6U`d0cQNLhup<~ z)Yumn#E~01wd0AiYSp+rM%Wod$I0W^$1_&Ej=y&S4mDL9#lSb{Pi3K3IrxBwV+b%K z=?|xrs8ask!y3A!Ti)_TqlQ%K&F~a12 zQn-&Ax(XV?X9%Q)XGqGB$=WT#K8?|aXvkhgHE@sDTMBHI%su3lzZf*06|pQ2Y*^d`nD5}z(^;+dkw9LS2K z_u_o_>A7A>g9r8;pP99WJ}XhXpG~_3axs1kdGSQuq3p4lb2$aCy z9sJb&22AFO)4j43ol7@A8m}LuTCoabD9D(Cu&YD9rIUEx$}f!e*Uc9ze! zlteZbt~-eHob!y$53nHmBE*X zZMNFzt!<7yRxg9i`e8ra-=cmtf)P|-OV?X2A zsy@KguwOGHre`Ob%h){onnR{Mx#@!=G-EFOBK^Rk4?CHs`O}GUT6F$|~;OF{MmY$J+LWz?Wa-Y$#U=wrRQ&8Uu=~*6O5xJr4Fw z?t}}QHu@Zv9`MDI^#LEf&Ed{l-I0)PN%QrGDo-)MLaV07gTuf1wtBwQG&1)i4EvMz zGmXRTkMB141LmMT2M`Cj|m+M&M-<&4m#pu;0xw z`T0x$%a-HTsl>KSR6gTdJw7~s)m_L%qa4}1J%sc~nm@$>`fpd*h;b3tOy^Fho2X3| zIa1d`@B2P0xDZBGitku&pUWt)l#uEoSt2qj6CSCG!(j)^R&H;uf&G3Ilwn3Y+_jEA zWjn&18{XzSJ;zNc!{3BV%1`d6Lz^6(5oq|OvB&kI9+m?d$@To&C-NUD(!VFUMv@NT zR|$H!$Wah}fK02{#~kl(N~;X*w&YRHeytLzCi`HREXKL6G_uGWkbz_@J+xwNt z2M@8$vhPsoy!O1~w9C8ZcQWnWkX)U)rnx9EZ$CxGB#iR9l|iar1(ZsMnx*H|IjPFE z7c%6=-h(cfk&g3q;p?Qv>7u`T{V&masCKOJkp;`!a{7*>1;TopAX&>PQ7s*7xml;G z_k=;@U)f8A28J{(OO0d=!%)G~Ts)&=_KZ7ev`K19wn1P0ojcc`qIA7bKSN`6ui~m6 zHMZ%QbGSE3)l8K_6^L!1Ja6s`%8S}f{_OI^?Kel&+Fut)z03lVY>oNNXW_+EtaxrK zu&;5f3qo#!fFwO}g0zHCx3xIPp8^afD`)!+>Zfhy+>?YB2!r_+8Zx%m_KDk=HgW|B zqlcnurwIYoXZQF?y4{~k*_I2uIg!O~^H4TCpIiN#akCs8w0ep*ozBJoR*C*O(Z{8b zt$4l9`Rzae0O?;7y@R8hm9fL$Q@oB=%n~Q2x1RZ%NTLg6ASq&`uZ4{nt#yTk>8Ddm z*x;~q1gXBnLyNa(Vjf6z7Ru-$WVz;~rTC23$A=X!0-uGa>^uMb1*Cv58(l{yBq+HS z+km+*Z%=H;@6}D^PV@~t416h{Xv1$&X4ZWOhK4Ibh;z`S*r>@ush%9-D{GKGI%mfL zhj2FWp)QQ@#JaM2j2l?CoB~foBL%Mb3g%Cd5!am|_~tvcnKcLKwFYC|hxl?@lF~$vNcv?>+y6N&_Wd2-zpU!{0Nl@tF!uZU9O0dpm zJ7|78a|zw;l;2l;jIW{yz^0s|-02K&R*QSJ zxTCv6*_w2~!5ut`rtaax&`WbEOyxTDiwlTMB0}KZTUycgYcgY=g7KQh26H-nx{;bk z$I*RzGcX} zp7xlj3rU8}wJ0``IwZqS5nB19a*sWA-6LF2e=65o_1Z_q|(H0*|13w`kV7>n7A3tqU02IxA8_Rv}+?h!g67I(ffTPY#?aA$$gn%JU1CqG&j0GLLx+^Bdf=A}P%v zY{)K&Q+mGLiKln!der1_fCxod*`(R`i|r@;H4D=Bz79l| z`{JOEJ`*n< zQ#DY;S*mJF)&m4>;OvY~YnwGV1aWy&>GHsD%}h##N9SsjnAB8=v?P^N!$T$Ozt%UU zPHxbUQH~w;cygnn?Aen;nEQ%b&{}i#k&$;wHL_xj+onU+WT98n3pEhFuDw^%_?ixF zS<|j6ku@WDR+X#S5Ri7q(@&M{s=BrjmX|w=&4ChTv4j~n8q6L7IPRru7<=-|>`Lox-vv-vZ-7S}G!uZ&0`m;*a2@WZK;J_CrzNZ6^v0%gw0CkY*;<61_U1Xup1;=T} zg~+wowDe%+rQ9nsrU_#~rB?Wxlj!D6RKc!``E##bO(w5Dpi$gOS5-~+YKVbT!?(Cn zrs&68X?#)WNXcdbqr?ivMrnQ_r=%>`1Gx#>{UeO6*-TkBnTZg{%8=GH^10PGG`#hj zuCTJkc<@}YHkJG>n#v%I-YB+Q1`F+XbkmdqJzmM;b%%X0EIj>!aZV+2D&+E@ru;xl z6OCaPRF=WPM8n%ab*%E{iqPP^EoCp^z@xA7-q30t_ocS*XRGzlTvy}ap*`&|4z5aP z_Ub~~BRc#Q@N(8t_5q93;RzOt(@G}JpBQjf<#DNZP-k&@e4{E7b$G*YZk4civ2{va zVtncy?T3%eQB_M*Mt5AldW_S(D1@`BKM0-mp;??sEHc{xetW)M2X`SI zjBtOO9U54+b{DLdxk^n`-f%c}@YB-jwABLTLc6326Om#zWD735z$<3J!$PQ_5wP-D z_W@KNOvfOe=8PMYZ0r!Q%25=2#CYpNGD$Z=O|jC7g)A*<0?+yU<}#|mBJ$|N${G#? z8zL0)^(it+&N;s}#`CqBXAm>Ft40dasIi zLY2>b=BBN56}a#%=-Q)t1Zoc*=PDF_E@(76mpK=)LMN?+Y&o;2Ax~_ z2J84w{(281{Y)G)b704g`I~M22X7y;kSt%5)y~^NFE|XPfiD6tAdH;SZ}F^J`U2?! zhMWK>(ER5(o~bvtmQMUtfl>D?gL~(NLp`zz*6%jwJYV0=?m*t3kM7!B-h?wQ{A}lK zr>D4w@Yj$1ULemiJwb34%XC7npxXV(z8H4 z#LmG1(iN=rqie6ZKF|7Nm4wa_*St;D2I+^xHn<&5Sv_v*J@^`W#(at02w9LRRQLu) zdikX4Ev5~T)y}6C)-{DFjkCx5$NYXk6 zi3c@jPft{;it44;%qm{I`;YueG>YZsfDM28I_rE2#k|t!tQb#YQXy=nh@VO(6b|8q zMiC{{zwkElfgBa%_2cbQAAH^#G~++mHOWrjmLL>QBcRvpAChSZc($d=wugHOHMuqD ze);7{Qn!DknN3TS>xI?B z%$h@kkiIF#F0&??Uao9WQ*`*re{^P*`_0iHb+m<~-RWvy=OiV^ajfIv#V*4WlC(ogRl)aAQdO_EJaTOn81ZLkC*#eoG(^?rSj_9bYWTs-I9wHOcgo zS3bF3sE=YTti@c;chpP}J^+=RQl|H(`Pw(z-F@uhq&{kdr$$Y4{>i0T+N4XF6$l8A zl4|?)AbEMlRdBY^pFvF+q*#)Br;BNmUHvnm%%NaLf$xMVSnoQpbdT5TPEG0P@o6BpfDci>-g$GI5Aa2t?(@ArHlo zoIR+v|ML48Z4-i(Ai%z{xfVRZ(%CjTTh@jxdzo|qP5|cKn(et?>;lDkL;#t99oA45 zEVDtV`IaEQu~>*bv^_4r}Z~f|JZ011Mb;Nxdq9xbV?1(@VC% z%J17`3Qy7X`fZIaAxgsS{nNhg6#!c)m9OxecKRBVQ99Ba)=!9aeAybS9qI6_W@1Bb z#ud~emvY$88mm@;f z@vCRU#9iZjvt}x8IN(wM2}y#MTSpaXDkX~DSEDpMH;#t}4sy^yg8MpJ*3_t}75A0* zPYkmts=ADjg5!aB9xLl-MGK;6A^2_HYF{UNuCQ&OQ9K>-w=1nJ8`43BVan)O0ZdRx z;I#0BG$Q9wkF(XX5?gszNfh`bns2bRIVFHhV-?z=8zF4MNm+eukvP5xvgeG%i@KVz zCfq_3iE9>d6O+gWL#+s7&0(ll4i?6OBAtV~UkNTSA6+&S;{(2 z@`ZjpcRh62xAmGNwQD@u^9QPMqSr zuRj@6_0I`CaQVF1CN4<#S8INR&NZhJHKRVR`Awjnu!`47lD6pQ{yg}ry>_-=YE$Vg zowUy`UVYrYFO19{B14FGcmBOT;jKrf-qt&aq6!pF)?g87Kt|%^vJa2`|=}udilFljBkWWOl+O8iarZ zA;I5zCwq?`;oxLt@NzH{GYO1`g*$`qQazipp!5D&JjEk@wrB05pT2#vgpB@1vJ67D zDke!4Q_B5Eghz|FB0SRi{K4F^%;O+q2*X?6>Kf5&+5AzCcA&<~{?n3K^Ao-)$GWej ziq19jjqqm+44Fq$El(55C0!+wLk?W>vF;%*vU<}zgEWy3$#HcoA^ExEuQ!S-VA?hU zm>xzvEx3ph271zKOW2ipPq<@PK(Sjsj#Y*&3zz_$$cTpb2OMOe)rMXMyLBw~*v(&7 zYAL>A$ebPJ1dHT6FtG^ZZ@c?0F^&za2X~V?;PJB7P!9kQd&nhZhWTZo>HLhf#lEnU z>$NgGE=2o~U$;G!06BA6FCNWWVvX8Vo z^p-7uQ_^J#{|c*Uw+Vhd$_+NAK)<0Z){ZXJ)Q}@cY}sp;IZb>T@HKZ_bnrp5kCV7* zHZ1;EaWYmv@^>SIf*aaWZSE_Cj)KJ@ZyvkR`n&mED z1n#zM(t95&I4B}5Zsdz3zu*StRL?VOUlty><^KQyHg0MAvM)|mYdqvPxXqGGCY(5x zLlD4QDqQ>0sk+O2Xcb?MKTw|uIh}{mG)R_O#FfnpQ$Q&Dp=Kiwr@dEjDwljV%y}_w zSbB+W3E+M`Q{PrA~YnPMK>Mph~|&{x=d6}`C`Q( zPsTQMoxjVnxCh5Qq==wXrLZcFCHjVE(ZnmJ4LM9$y{xzBoi(P|;2kYjsV5@ZFjm)_ z92z${U-;JXLrFqF&d)S4V*_323MoyIdJR-5;h^#oes3W)3=a!1qX9GEoFyAtk z1B$SE(@0h&2_4$Z(OP*~1#xCbj5c(ZhJfjnsu$CPghRIUtQNuI0#*Dj>li6i3cv*= z9Sj$YjYP*Kj1|=}f=!4dB|hATdp11+0%T39oF5u+GzeXm16`iYNEkEq@TX&k96}v# zW0*Qpu)4^q^Q+>4+)m_;4G*8_7umJGa`%&$Q&qfxuYO|)t$`v|_VlFPl24;aWVK^I zD+gn3FU7kF&Rd;XUb$&A*w<`qU*KOgPq~%pd=s5=@94h(Gt)*jU)J}BLMy}#{mnN& zyWDiTZkcxl<5-gqffR7b7Z;Wp+Uu1S^7HMrOuy&O2_N6TD0s~!P3eM>U@_&(`+o1a=D{|JLIksm9R7kcX! zBuYzCLjB#**hsN^y~2>@tm4ma#$zYP*g`ijDL0ZFIs{3YBy%E!z3jGGDt>~9&x-wb z;!lNx$ZrX3?$Y>SQ9s+(vCQxph*K*(^fub|U~uY=5~GXaRgB~YKDu>= zZxcFyz&xdW$>u$93&^Nt*o3U6ETCsWRM6VhAvU=-Mxu&+li*bnj-C%Y<-|&Nuhsap1%Ce)Xl#}>wqQ#C(N5L_O8gPVwyf3onM2Ur7N^T_^Y+4+W0&DK_|(p9ad1sKdA*mmEds_X3Y!L`+}YK;^D!j|LU2&%JB0Iglx1{*NTn(U7q$mNYx?6|eHG6{ zVA-X}tB}W15CCa1&ABPL?8;SkuZZ?xiza z2Mq&AcRr~M9_ye|62jn#{!KcNkp_~m0G_=J?9Ea^FD!Fe_L%QO)_%XI90ToUl|P5I z8X@?zR^Bj5;e#g5Hal*BHbt}(idyCx3@7O&o-v3;g=65eU<3f%bzk?5Q!i}je6B`> zB!HQT9?~_msYTW+Y?|NSgFan30Wlhv+KXiHo?FPZ7uWgx)7vb2NLU-uN)R_HWFAEV z{AICGakws6_<$D=?y6v@&{^0zqVrSD!q#LRKuc!8xRfFHT-?eJH{k3<9TE0@l?QBd z8WZ~#1K7g;)+NFf_;Ex?5}8%C_|TP?$$nYX+jm3x1gkfs@56;2k)6m3IAj41J?ko6 zW2NGOe3@t*r-$*}BD5Ikz-%{h5ST3X7jPAtq|^Z*4c(W{(t5rX(J&X_j!6O)t=Y{K zZcXTjLJCdODT(?dL8Vy~no(_aHiPqTl#7LShD`%J;^X1pdj90j1h7J)fDr0J@~vuh z$!S6zhUl(vkEJiQLL1-JwKM0_O0CNn+j$=5kUCle{hSHB+vBSQqoE$3WV-qiMu$`r zy{M*4>My@tp36k9{!#V~cOtf(2$3cd2j`*m8|(F0UnE${|V6p`g+aF)N> z$Hc(RdgS+VHfQ>^!;negx6BDqM7iRf$&movAZFvTt!x1qb|zdD42+NvlNf2|kYJy1 zjm~pL$&l-k0p~)jr^+Z;)~{guwJz-=LFu zqnmPe^YdoCgiEC=N!p~>B{)uV#PSVWME5u$i(RZtBMbLf9rq@z%vs8E_QS{1dAXl1 zN0Aqc!9@KWd5Cqg@_@T3CuqQYqm@hHP5qK(BAB*5~$%I>r zqDeXWys2wbgSoFKvUC@+9fM_%e2`UPEU=s^g|MIn=6svqE&}@6-iS_3lY8fwp7}rmLyQKy}9))9#*-1rF zt%(*w`0+M61y8}5ax?7WCI59#ACSO*ymdm(=>@`s1OQ?_0s!ytzcdmqKzmCEGh<^% z2Sz=6V=D*7zjywYOECPkKdUJVToyy^#5wRmb*2BhvbeY_j=GrACWH(X{zKHmxU5kk zRga=<>8$2l;yX11#_|RwNm_)y-*k9p%>7!&<@50nuhwhDF(rLO6olnjr6XF(a>Igk zrhtu)fnUAFQt5)2hQ>kEbs@q2)6vj}wYTc}(jrE7NlkWI?jp0#a??$7JoDCDU-lL2 z8EADTliI(J7bLI!=4lS!dNwQ3>22Nr{mPn2yFEHLbo!%Azy>U9?o?S+;Pq~EcU1@f zIOmsZ&T;;0AFu^3OU2aOPCi8JR<60kkf80bv`eo+%xe}6zbP+Nzv zx?NSr=H703a>{RC^8UdMJEhU23l&L;i435h+nI3_|-T+r*?7pegvb%p zu)s7$RNTynEyWb5<)p=AHzjEKPEOYsLYUqSb=1#jzF}m7NEa4bY}{+}OHm))-5U88 z$Y1^Jr|g7aIr;C0p{U_EGLhOK(i*Cee8R@io|I@HNGAn&?&vW147+wEHw=sDUJ z*kw!aNhx%I4i)+JP+@KbLJRv=S3UVtXo*piTP*`*yp zOu(Rk1W~T3=8G^gy|-lm!Tyvj@W|J^pjJUBG>_%x#e70&Ic79}@QWPj;!)ckwxTii2-piuT?k0V9S zDAPvB1MR7)S!OslJ-yiu5kVF&>DvLES35nvT+!MoI0^5jpCWQ$;eeLZh6nfcA8@#{YRo0!Axe;!nbM4WN#-x<&bW&nWdUxSM6JHhE_Y_DwW==c}Y$@s@U zGs8D?n-dK!vyZB{pyeZXQ8hFTMhP@Dw4@VFOZo__c*&(%a#j<{_G(zW^vq(z5>^A2 zvMP>Gq12(Fp${7(A3Sdi;`$iA9|Uy4M*PgLMd5!W$!Fw$|MKaTDs9h^LsHpjHJ{ah za-ZjI_vOu{IstPymryoVOfo$`;085nhL0^`$!;V;Ru&y|k}#Xp0zNS*9-Js2Cafmz z*N;0d>3#G=d2umuvq0~TrS9*+=f{U|8;ZO`zE{)yiCu+@0`8HHm5&~YKuk~{djcz! z%8+KYdRKy6q)z+!XeW>m-^)v(z~ziAztk(A7UafK7sA&s*sSM*d^ms`r%}77?jT?t z$tQ0<8d{ktiIVpMyUMt)98N4|?!U);SEAq!`CU>fFR?eF$XsIa%)mUS5NDHlpIy^n zywBvq4N-gpA;OM0P*bILiS!8gr+*QuJb9khUnH7TUYkk`cPOcjueF*W}rWWIh2VH0>0 z1eJchkJL~F!`>22R%ovbNilV^`t{&}80%Gk(OL4kqRMzPXwV+wmtJ6x7gDCsun0T} zA^ztKbN%j5r_yp>%rcJ#2aL*8Jq2m;Nzzo;iS|e7L`#J@!?grtOj-MoyR6AcG;ze7 zajYyi@v2gLbV^(4GUQ+Fl!ayqwc-^S(#0sVe(kZ~la3}%xUm}-*9ZG`AvvM=(WT4D zvMoopS{9nttE8c2!&-3!#MOSSpT{i0+N^?|2ZoLeBx;^6UH4g+vum{DKlP?lVxJ@v zl#?Om%V=^S49BZ7$LUma22`(w6J%|DV`4!_dEs_oyv(Hj;_%WOGZeEPUAop9eW2*+ zb2-MCljZpsGPI`mbhxwM&E}&g`uZVtJ4L9e`pd+fHcVY*%mvtnnyvVQO`@K1ZSWNV ze0AOnwhfyQqm$7&^_6TQ(7*oY`h5=!fb3OEu}i0yCUITug6vlvm)ITfyLN9500vuj>eC?bOrocG4PmC9EUtDqdbg`!Vl)$=V~1`NB|Jybc>UQzvBs&skYP2 z@`EJuAxEq_*ni%Se&eS%#si!vh-mv&=Q$(pSh=Ut=c5hM!y)pX82E<5)VjW>$UKPd zqTK%t(p#t(#kayq|1QVZ3he<)6hwTnM;|V*@ov){{mgCaq1o#K_|VlzKZHY}zSc{4 zj(O3F%n$xZxV^mewB5tL0ih1qvpYk&s53`ul#h>@V3C6E%HX`dI!q~*Ft*TdzVoyA zeum3M9n`R(JhHD7bL6#~&a1&V;$Wx7h;Yt-BAf~5sTp^Qc~O~1pCQAW13{LF9XzG{ zk#PJQ_8QTb&QMrqi$QR`sIIAoe&g6PWxhIphF(0$PC=~t)`2jb^3l`@&#FU-^|Gf5c{~d4ChcROF@!WWIcU?FK%Dys$Z~soeb>vuo>{^ZH z%vAT@%_TLC*1Q4dl3zi*Gsf}~=*x-v2^E-Cd^QoC_Vdzfg*`sc!di1)Gly!rs@srD;&}Z;#26BX{tNRV@ zk5yKt^QULEJk7C*eEqa%%_59-Ax7{g0LTNUnuJ>Gq9MU)0@%+1l?~GnI)vW{1g%GGmlY_A!yjx-$Qt?b|GNbZ)pP$wXC zW47E-CvH>yQvIYSdPoC%&VdOJtUO(y9K?%rDo$^4(^nbcNnF)NsK?DN;(BJFvz2Bw$Yb{`C zU}Y#`W?*jfmy*hAT2Z>62^Fy6())&bwpyekjb+aL+e!76SOkAwI3u|*6y&kp zGwkdJx6{63kHe7dCi!by%Hk7XiH7sjW&5HH^>8F% zP*aFkvnVBk_*&4hcP>RfaX13CPJ|z+XbF=IwYqpJ9TzJ4zao1KBwG{hm2!F5XZ|8v zKGU#_;LCqRo?$H}oFSYd@Mb~)GFnL7r%K5{6;tFR#bb3fAs74tQj73J?F-cZP$6*_ zjNf9_Kz*SqDKVGx{*S9D_MmTd;_p|^0sLKU4uAzS0vamV18p4`4T1K?e_c9~d`K|r zT)=yi|GT%y_`h5TGiV*^P2^|2b-GPxU=u{rW&lKQ*p`;WS(AQ3F!|EydPYz2Cofpg(?SiR-XIOnd+kGnW5~5LTze zhmo(Y+wcdEAscq3vi~qvY$R{lx?YL!pl}Zd!c&_XU(Aj_v5a6~ge8gcp-7M}7n-d+=3>3wEVb-$^hUmyO1})CY@z zm_L{MTm6-lPbN{=8o*Cl!Tu5SF_vH}4Z)VHwzuiyR;?=!Hq{eRB)PlwBl7XmIbAqJg8 z+Nf}YRAL%>5JrQHn-kQ z?9`w_*ljr`6eNKqPIQthr)TFAIXpRr#kQfZ6r%6e*KVH@XD-U0I{<`eSoN@EYU{!; z%fhC%gZBDGVXdqK*8C$2Y~y`84HHC=@$<%RA2+|CT9npIb1965Wn_am8ls(d`iJ^s zan&m(XxvPZF6nPwOdSRm6fSp-dTCoRv1U$t`Ax+G7?QM`EPzoV^f1RiTp=m+V2+4K zTN<+L@EVWIX99{+5gfbStmpF|j8VlhrEVyBFhc9ZmRDpN+T-%r4RShd#V`ScY5tn-C8s8cIY+~PpeNVNhV(J2IzYN|! znW%bGKN{e$Qs@cPgZ?h``j>ha44nR50rTI9I*y+;i{0NHI1uVzi8}xMWq*Cr5&^z1 z9c>)-RNQQh9d!Qk=sy7(Rhd5Cyf>A3AAb@4&AR^mkpE(_{^qX!G^}gD=x={-!VU)j zyyp=8k8R<7iu^A!>>Chh`Jav&50iKT`F`30?*RD!!xRAMf&bh1UkCn+Py26(|9BGi zQgFAy5MY2C2WSB9zu`c>_xFEiHvMC&Ncu0+KV?pTBK*li{)4c?{5Qh?A1nDM;GZ1CKY(1H{|5Ynf%uP% z#GjylMo9mFUhw@5`fu^lpVt2#CH!Lx0DKYv1N^U;;ZO5_ALjq3d8yFzuXMFW5WBsH_^9 zSyhn{PhQU*cSQV>1O`C{00V#k001BWD10(EY5)cRKw|rT0YCt02w7V>7+N{#D7xAh z+H29eSX$ucf&h_c0|0$r|G(S+;2IcD(~@1IM-I9oyWkUU$-rdO)3?aA>0vdU<&Dn66s+Ubw6?EL(|X3J0oo#z-@Zpq<4yTk5^)av56xjFIdi_;)gqFEJ5HNIokv!p0( z)uNf@OWHCLHO*zVq#6|&Lp=djI5`!%+w|-6$1lU2J`m3ZYyo&!I%ZF`wX) zB)}@2$g&gRRM(zEAsX=T5|fxQTRcs<)Xygy(piq(w8`-3VEfuMuv2O>wQGFQj8f09 zKXcKNw?rgqvTOiO2|cK#RU;Wq8w{9KB*UnfPi5G+E{|&<7=#stT4 zOGrs%WoW46dYE+hz!3bJyz9VR{b21QI13{=QashEXYeF@N?(U`^(5(ce6)+MnNE2a$?ayT3MzNksTd37q3xPyWpd!Fv zp`JG&m!BI)M+6SPC>pD>C;)Iikrz0}IG|4?twnhxKu}#|+gUOjjp#V=`y>{x7p=wl zGa!1ltB>;!8yu}=RUAN==t!3%;H8Gv~G-XU|OJ(1G^k4-isA#VhoF?jXA; z+)Gx1%2D{wnUY}b%F*N>jBt7c1n|osQKpBt@K!&7e8n;5rVa>Ta6o|K7z+!=gj_fw zzP~QzoDf{C0uLcIn*5UtZpQ&1J~^JDLk73^oFBis6r{5fdP@ZrfQYfR6WF&qKm5cg z4BiU-uODkrMa@@7E!9-+ci@t-aoD+Gp)!i1+7Xdz+5-`l#3*%9`ujwEe9o!LKM%g7 zl9Ydux=1xC1lVxFL!4uA7p_qVHskzVj(;9(9z3yZ!IsIG#fTXVn$`L0emRsv$t3cH%7MUJ0xLi+f zi5s!lD|abKhIvztEK`TN4QsgZq{>2b&NHWw|5-ItTcm77F|u<1ZgDG&YHun*g%nFp z%iCa=VBZWVW+w7OVZhg;!~q-U&@P+@MA7^lGPSkS?|DN=20Zh3r3S{)t6RIR44v;m|fs3a4%@<9Vy` z!k|_R8K}|U5`_?tkm{PTII-N=HdoFKEmzOTGtW;5p1K>gZnpb(F*F{2c*r_07S!#w znsg~kMXI!4ae=HU?WCS=dg#5r5MW()uRs2~M=Fi9J~RXb0Dv3@008d09$hTxoUQH5 ze_LCd|0SNvl{I9~*x|i&OuvL!OC4~X=K4eJQ7+`>nA@}jWI;i3W5m}N36n-`B@sBk zJcmvKr|q(T$KabE=RAENUgTCP0ilqO zO#rEw(2>^{rGHNk7atOM+JK9Zc4x^!#)uL@Vt;tQ<~Al%`u=Q^W0~b zLt@wofc8YKzgyx|O6iISV=OW<;t6J26;n~p@y>A_1>=(3CPwdwOlb^BS7t}Kx~;zD z!#j$H!6{fpu8L=GlS)=@-}<{Y$X#*$g_zR#YRuI(DE#XJ*0Vak3>NW+{Vh82(*%sqDTl6i-eyektOZ$NRl22-#U8AgziR{~wTIe|~4 zT6+)LQ>Pa|emRu5*vFHvHw3{lg`-=6_b^Ug?-2k#D4P;`et`88uMCZLzo@p;P(+>A zNpYfLQAz4<1M&$1)H8|sZlQn|0$_fmV~AgirPL~ z+zJc_MU^mAmx`0%i9c3qCf?Oy{%Cr@YYR5!P{c{6uLjLb4^MAmNO|7bZMhyyFG9zf z_}3LfwAhDom^xaY;5DO`Ao>dNLwI=zX97@LHewCs z2*j9xlQbQEYzNxi^q9fq*nvf-DpUbqr&Th+I(^+H!<)OuM-ro(AvE;`(e4w`C*+}Y zwQ^rQsyEXC*fL&q@D_|HUp(}*ccJEu(j4@>D_l1RVI&0m&&(fpmb3exqlXa&a!kq2 zChUdnyvAVxl-b!Z5{~HTt445(d_A8uFw%p1OkB~USoqn?!Sj73H@S^OEjWlW9O>hh zBCS?JM$(UIqXG@P_Zg=OoQ9y%2SK1b)6c6;y&IfVw~tzy*7gX#ox~^!Z3cq1tptWT z_~Z-e1r!NQKn-sfz$ROtCJ*Z5CJ!3JgT`(|$3+ML8`1z>p4%`YPStJF67lxE z0|9aSfS1`x4w!zN1zAu;adl&LIMC_0zd$ML+c+>;9t@8e`EpR zcjxc>+W*;Gd*YZ`A3eOtQ{bonOlM3&a9ljQT0L^DDL6R=)v z;LIRBk;jpZ=;OH2)5iP^nQD2sOiFS;HBg0=@(H2gRd7-;%AaZRjFD9nu@%VZi^eDC zG?n6lJ32kJ26R`H2nwnUb36`FB|#$F-|Ew!#B7Z*lfjMK6^f#209k{!y%*lFjXZPV zk7}dmymt?9MT;t(m6o^d^4AvgqQI+K*6JpF^k(fUyj!M8SHOa={ zKyBbd87YcdJx3$XVj!$y-jdZpNY&wYU?hM^ni9C%FHW&zUtX!D=HD_+bDS)lkH-EAM}2BS>dh!_ztFj56PnrMRenDS!(v3OVPh$?Vown z-o((*!Jh8#H-^6wYI@v=)fzo~&=u(G51=c?h7~c8G2f)(i30N04O24%gu~p}`=H#b z%p6TFP!9$mvFYf3k1v>`rTdd&gL?3@fEg5VQOHn!`#Fe?lHn;l$9I0hqZF%TW=u*u zAkj=CJU%J7K`SlrZ6ab5m6*x5&iwhCAISDZbN+*Ii>d?Mv=>3Gn?UA4#3ZJP#7L#d z)B9577d$X?jedQj*BL+;Q}EESuH{>fxz1>*yHDH5b!%F9_IYl?<$=hb%>3f{E-$q+ zgS8F)ol&MeE)Mn22G56RDSBT&#UZUCm@4oHQZmguEx%%T#AyB66m&0u|wLMpD z$pdo#QAm+C2Zb%#l>1CBdQSD-eqUdI?`kO~#iVI$#i_;To4#$P|8rO|Y6hy89!A8w zU>`VLK{(oxju#Tj?-|f>qOo>0p1F3b2@7j?jVCyIU9m(oo-0n5DAG^j*uGVsDVrzn zNn8l>k0;pI)t}a|!oIYf$-sQ>#$3HN4!_?^59X+0bv!<9F%TUpr!%Vip^w?>qd2Il zkG#Kl6E^1agj

!r0KkR6k8_Qmz3j<38o&)oY@jhik;U`VIncaG3wV1}`GHH6?|DveZ3qd->YnWF zezd&^q*Yh~VhadbL$7kO?T4$)`0cXJ1K%f`*2}H$l&#Qnp>IDli#u9yrsPZ6uFNqJ zuszW`%!vUOpmKqBa0$UUtmk;GjrM4pNUZxStXnt>;n!or*Cb{*6d_22BEH-eE(KL_ zoxh+{G&@F-w->Q^l#l4=oU@4w_<6=P9*P*4e!%R({S7 zc-)Vm2~CGzr%2-|D8>7%s8qdcZjrCn1%S`QAw&@Wl7!Ia~Fd} ziW`>`ev>6&uQoVp+VR`{x$K&no(En@c;@V!E}PjPuMG;i;(SuV8&C~a3f*>8{VBO&S$;V0xIL6MggI}_TqH4lEPfE=(%w( zO+igy1f#plmEw@@mf2(I z>N>~Ii{c^a!UTP?PkyNOv)gDxWh?a{O+VZ5 z$qoXYgqs$G#mi)(0ogF5_A933phuNk9h zny&}rr-W#bB3x-S*w>*7qVoAm-?RT{6?=gml&6uEY)9x2d%mVkuWabxl(Ng*{NZJg zvmA?=$(}>~f_FWkxx;8fr9O#a`sa?W^_hvjbVd1+@5EgtZvzJnU-#p8Pw?MU+_p#Y zr#mPB0PHto{sTpr{y~wou)k9rv?qAZ+cP#7q-~-_M-FsBJZ=?=YkG)kA{9j%b-~qU zjNNh|KN6}%l!a-h_s>_JjF=!`KQ|nT?gz^OsRaa$C0ZLBvjTE<%%jIaN`rqGV-}FqJ(sHFoUl}{l8vxR<&wjZ zAmQ-9lfI$`nPK+i$a_%b)PaWtC90Kgm5W%*FppUGh!?W0d37h8Bh(;QR8Qebgzm=; zDHSnmG%i2!PNI{!N2vHYyT#sY(Z%O)t79xI1B~qC9F9E>?XVlXOSPVVkxHcpVrB+# zb)zZXHpcGmq$IR=&$9L2GL2g%Wm<@6&GOkQg_ph=@D|USunz~p<@I}a7XRn;W@wgI%ufX z++?}2L1*ETC}d%EcPjxD!z#djHs7nV+oqmA3>lQ0!EpZjF;W&IvpGq1}5PXKcqn;{v1#BZ$ z0IJkm-O*~rzjo&|=BCkR>$T1Ki2bg_5hmRHvTUtb4#09Q>l)g-Gk3j$8kRZDZlX1% zUcmAzZ%uDiX(r0M@(H}aJ0EEgs4Xd~(~N3*Xz zfg_n3eB`8@z#dUMU%2@nTSL;3PBx#k7+o5{xRQ2Ek6%jXYJr38yCxYYK&+!TVI}IS z!4%Z5$b`*IsM_J4r7%?3^`jJ7f+wDP@)FQ}S{9f&8U@nh$gn5B8kxZHK4|6+;~ndf zQj=KfWH0#+#A=V4?;RZ-JEcyDXsW^^4f=D9n>Ba1_;`q==c&2!WJDiLc@n%wYi6XyP@9vt*6WG z-dBR8Gkj8bgbV=&zHU7tw%Ze_Qvz*oJ&mH@#mp4Sq@NQl0`RT+Xre!6Vt9x3OMrHv82#@7PU8s;p zf`P?Xu~UlL-k;o((iK~EM)?D`U$lYf5mR4nP3O_QBtiVrHAQyM;6Zf*9*;VPHwMX; z1+8}vUz57A!$c70B^L4~UCi2-Gnqe#UX`+I$q{Rz!WeZxXMz#IF+yW~JgDXTrXfQm zZ{2I%E3C+k&COaAc}!M%M1Ni(UE#O5q-3xnVF}sfWz20rBZjk%N0J9@XMe0@`d%@g zE{?;3J?k?Z6vwF-Oq-lhCtNClGa8vu^GFPQu|OB)i&|c}^(1S&|F?Y-nyo1z*Kc5x z{byh^{}b4ef6G3q2b}0D4s5I!eT~4g|P`0YL=1c<6A zzXM5bLSpIYJ=&N29x6lph%o+R@kpWm%07|gg;_+zr$ar&JGi&PJ+}8^62qN$(*Lqw zcH13(4vj$Xf3;{u0aX+i)^~3?l=<5+)Tmjt3(D&;sW^Ujez<3Y9vUvCOnd6Tu* z!eSZdLJD-$Sn$vR`4u4G%FOM@A>gix*05GbMv!AXTF%psLKIcW>Q<-;jH%L5>C5K{G-X5r?(} z)ABe87=ct-x_BCMaN z$8+iFa<}-6m@xh^&FGJBqGp4I_TquIn;$U=QDq6?$hnBAlHRdbQGJ3(4sL@|H3^{NYngVk3o0I1qU7Q~9$U zG{bjHM0UQ15Ck&2Ekp5*F8|IJe&s%Vn0t}!SVMnZH)qV|m9_}X!${_il`Jb;rMqEt z|5I0$=t0QVvOHgY#Ocu608v)D!=Qph77TINdvJ2&73)g3BS^aNV zQwAGgLoKm`6Q$p->yVm`Dy3RlS`n-UkX2r({<=B9x8F$=@1+qGg6Nvm7M zIv_ydZDwz{X78;EanCPUKSN8+KWJ3z-tKtg)LJaMz|uY!uCEKI?L7|gX7CjTUJEO2 zT4K%PMR%T#aE!PkD#LOsl+WC~9hd$L4ZW{cGmGONi|KC(mrtRRQEcGulP*X)FjbA& z)#A~a64)9gPWp`?!&#(fFisYpu`?Z>_Obn&2z-@*gg zlb(X4j;$i#j;rxy>zw4W4;I;P_04cU3hobWnx0Q}un!Ipj@x-)=WOMzi#kc8Ya#Xp zobwAgPu71O^ix0#Zn>A;vq#{&{8}u1#IcnyGXo1CSf*%dX4__jZAWS70JG9kRR0|q z3pqckDirV$O@Pk55Of}B@D;l=bE2J66scfl{Es~O|Bh~U<-W;kd^gG+)PL01EdP${ z9&|Ava)ShEOh@a|(joJqQ5>_;N>qirpK$3J^`m5rlp_7 zor1(+a^3EQW;525-@K+Am(!)bFkdanR5>1obxZbqr{AFJV4*o;1rW5RoLA?HN5@*U z6PL~h+w2p|tItQ}eE>@nuvd4FIk!O^(x)yto(cVR>W*G&3bH3T;1v7x2r|>2hkD9Z zfj%Bcy28UQ{@}zGATC29zY~r)kDA@28L@dz67%beG!9vc zDi0G25F^X1)3U1@wD4S__2P>bW~BOXs@HNGn$lHA+`z$*gs%ENqxuHrFi|pzOHSo@ zicu#btX-^cXx|pw$bbRv=no;}oy!eH8u5*te82aM#i{4561eWq&p?S~|EozWq5$JL z_3q2INZ^Sys0s0w(RJ^t7n0}Ab~m_mr6mc=jc%lP__mT~qsvhZ6voB&oVH0Xg|{h9 z3w7ut!as~AJ+9g1QIb~Z$#DE~r@y2eqv0}rkuTwWosWBlI*Khda;PJ2J-7*83<7`} znUqJiiV*A;v*N1{wZExM{1=taPC?=6N=Uz{+)_iebNUyRUH_soiCikFtpws|G5@?( z5U~V7cf{p;WjFZ?%kX4$_P023-h;_=6QI~gJVK(1z5EQs;ZRMm8~`90OBbo(p4;iB zb_G}!Ip17TkKYC^i$2Gs(Sk^|g*MC;pRpQZ<|`L7dfCWs`*5EHEgE^xM_Q5NuuHfs zSsY~+oz=&SBM~lcEK}buxnDME!3S~;Z`$RMLqo!t>eRHWx`CSY9M7yEX=x0r->`Mm z+mpdDr=LEE@41EZ&U8jwH5=yRXW$`SYO4IOD4Wj}rUS7!9*{kG7({AZFM`#C-dt`I zGVJvx^N$W@sUPiLn6KL0*EP=DhfD*uVF-g)8SB>HK+Ztu3n&*5Uz)7%YP&)P+0$uI zYCZc6ahcFi`YMz9fe2w$F{u})@_qQ05F`V+W`mXfAMa0J54u$-Xxa}>Tl6H16x zur|v;@5mu1n}$`9)Z6A;_Av zFzZ9*&C=+aL)Z<+$Ff!L-QRQS>}YE70??DdA|I{0LB1~l$OS(J6v1D%F~oL$Ah-nM zdaVChZ6p~Tl*=^E8(|j(T*D~aui69P7>N>8wWAAgK}6K;An-AK018=>iW@PJ@+f6t zYecMkXQP--DdOjrlSeEtMJyur^}EXNmXh^#sXXvjKS#Ty`xhtUTPUy5!@N_azfO~B zyqE=HL4`j{7c=5OI})qNuRm-hg=w)-ux7h5?9a^*Ux^h}3g1KTh$O1t zfjA8Y!D!Od=P586UOTfRFpgxQ9TS`J>HE8k*D8XYa@!bN^!nHw(d>vgb(YF)X>FBn z2anYFZJA)zGrS9H^xIHP@=XL&(ACzDSk1T&2M&R3L*v0co6x7`w~xrvwjn23Lb(MY z$7hd#4TX!0B{OaYdYJo8@l|G`O(IRT zxWh|xJuM?jTTeUga?mO;5q%>s@j!VQ{0WbpvR_~G)zZ-kz3~1MS{nlgB_;${kK+rJ z8TOIGw+>p@TcRF9-lRE5P>xGO=!(NFhjRR6J738$#90^Wngo}*!Cp8AWLl-@{*da- zk-OHy!_UwctMJZ|o)+?$2fOR8ge2>DuKtdK9i21!Cz@GZU9RPWkW)nlS?j*_W;wL{ z#La>h@*@GRs3VDojjk5O(HMe+RMrA2LzaZG z9?CjyV~69c{VT8oTnF#SE#WKw{Q#-Y&)Cq(P8Ce2*=kzNt*Z zo*WMTC0M5-;xI6f_u-R;drxc%;LE#j2=ZWR3aN9ix0d5P<-A8Nl7f`&pMyj!eu$l~ zeQ40*fZUqrl8Oz>=hmX-6iAVxC*5MgFT%Q5-^N!Dm0o+bDYhUh1aVowQ2Y5ad^`5| z#|ykgzScTgbqeREU$Qb_8!aIWt7aGkWl6-~`nkb1r)x1o5{{W)|7_qpTFQ6v@y7H8}4iQ~G*|!xgMW zF>CdWWw1N%%$xU2UToo`i{x3&N0MHF0PaNvwL34Z+$=sy?{IEQF`YP>$8urO!kNDF zkG2!-(g-UICR}(ot=z@r90(z44bqSw``vEIrum5d-DT>*Zok6kWr$npBXg;d1D=kB z$>(`sR+!Uk3-UF3-0c0x7ZHG8&Jy8X1(EJ?)A7yfa6Bjn076i%nmIVhZ$W5qmpT*; z76dlH(-BW`kTn_BM+`BZK8#3uN@CSnNdfF!Mqe7=#n~YtJ7TTQx}g2%e%0VK!FmV| zSW7}iRhB=u^* z*5z@L&mq;${#~k=EvWswgtc*qBm1T6T z<}ELnl#aNohA||N*g+c*$xhH;2QLeREgk!Q0GdNySJ*I8a~GT=6wf zzauN^>q7C&VtM>{{;$@v;*8Z_)?@nFT ziV^wC66mRnI1Ir43Z@XWm5V(3Kvot7&2FSMKIgMl}gnot>OAA6Xs1DeJnC}FV!bccSGWB$t%4-32bo@)*6 z&&GR}Ll7;@(=Sq|azQ;AyZA}PA5^BU0|qqVB50RxXh5pwz*VcZS#f*$$O8 zkOeJ=tn(4SDik>lV%?m{*G@a2Bk3(CwZS!VT-2EEEWUujLERxGRaDseEah=#?c~d{ zAylt{&IV+=izhUI*qr%4<9i{trFvAziAPIVI*)o8)Izb*60ugEKTsV2E0KZa-2Wg` z0~B56uX>W|r`tt!s8R*(FFerny!8(HZjV7!GB zec0%`Xd(J~<=~Ur8Azw)vb);y^!4ibg#>=$?Va?f1r$2 zk*{f{CldPY7oOOOLtFYBSQ=t3u@X1hVvmZvzj3Nrl^sA=>l}ukGua=Gce0MoHWVC$ ziTzCT|L{igQIx{abM3%qk=C^h>0pN-*v{Wc$~V)J(OUcS@JvB2bMM-0pu)Enn!bPc zO0C%t6_oeJEt{wB9UO#|KqEUwIErS6k?7jBH-jzpt3p>yl9Sb3ViJ|29;5%q8<|F{j%BHImikz85DlF^FmwmdQFS{ zSs9T`N9>)-?mDaPT~0v(()CQ!D@uT`1X)1$Q8 z{dJ>67P@~*XID4v#6bzt-UvT9s1GS?u`1PItvE_SpcXJ~9k30o0^%MFhCi8X-jkqX=Bk5K+d zxUfSU$yAdXI5{r^sQa~Jy+-7IO))sawdBB{Azefy>|3?KDiFU zwi^PxE?^Ro!4U0zKX@+XYzmj zj{ix1$BNAN9L4v)@XSmGIo*v>hff0b6F=Ov*ql6OH7`3j)J-4yICZuR2b+tZt)I2k77Gpy#b2^vgCBzl-x(G41cD{I^^wSvKtiykkK-<|^jXf)IWW z{v#*;zy7nMnG)ia2mk<^DByq6)f|7rIR5XEp}!Z>;ctP{XAwYElse5&N-H&7?)^1h z2s&<2B(8kXb+ybtTOvP7NmAqQiQ9Gb%tKB0Bz?inP||E-AE1TDQC^U{eL*U^)m33+ zzfUf`>NlTj$iYb;NK41-rFt3Qp@^ct3Mg1+r)auC!!0`##v#^fE5N6w)et1x-9z!}Hj z4b@S%NElJ24=&jvdaCPv!L_QYXRS+uj}Kpp0Xtvoo(D19(k6lipRrMJ#U^mH_Wo(Z zZ=hm52RG(o!5?3rG5F{<`^f?f`909qVNQTMu}i~!yVD(6KhDAJunpKI`sWgw>Vc)q z++GlsAo$lr)-sR9_>8&<4Q5@RQ%I0#0dkj``8|bu@{Doum(?W!&m(E~FRX_#BfT&F zYUlBt1CMcILoO&Nh3Z%f^ADwSUZ&2H@Z(|7n8nyXyEc8ufw;c=>(eH=xnN-LoUm*i zKTyfuIx*X)l_GbiBlgHsiWuyUl?9ym-as{$_3_#J!2GVYx-v6%7o|=Xeswd!M4MvJ z|9}bq!UAHX0T~2DG`RH0%zv&xjwL`GWMH(d$qKa2&-`IaA8C8@2Z>4>xt#7IWql{Y z%&?DL@X!h!%qs*~%n*|p3!nZ+G`-y%Gb~YBA2UQyljjv+a(Hrg1R?gzhy%@ZEbyR% zvUMnYn}9bgQbBlSFFgiEw%KoHmm1l91BqWYl&2|Y-Fave2F&`0q$9ul?Q`;gZ$|vY zUwh$?+Is46UeNw4(PUW0ZVWpXHpvSy_3okH-0or>n>+}sY!If^&M%G+XTB7X*ZnaI zS}{lyX(;&89-;33b1iQ~az`8uxz9mn8>6N-#_V7P#6BGL>?qe zsd>wPY<`(`vo63lmW~o$v4W3u8%q9&T?AulIL*Lpqz@?+$J*U$wkT7h}QRpZ7;} zbiD^je0*MBPbYV>aClq1ozF)@QP$q?Cp&jt+!vZ&HK4>esjI6W3BD^#%Hzc!29)3mezG*mHBanhsVda+3xx5a(8&R;jVpLB>Vow$L;0n z^0jjV{&{o%c=wjIag}Ao+tK!NyQhCI@9iZRfVEyO`}Db&_4!u!a26`wGnmvhn^B#P zoUsXlhCn({E1DTpi8WM+7t~sEZ$)m{rmz;Nu;!${AuYWmW7n$H?!XeY{#4XjveudU zo~7>i^v8jDA`yCqd9RnVX#w}Jn4>t-fP`?HU)!)ESgzWw>7uMdsQ_w#w@V=ifKimzSLd$n?Zj#KZ!bm6E~ zOL+Ies#5bk!K8yuB^RznbD6gv>nqJs^ce?jk#25O?fQf8ylb2rMRn~H9Tf>H<~{Tx z;9za(6+%O}w;wl6!QLwEu7&66$Zd9PTSjb2!TU7T%VJ7l%ju(ds`F2hcA=9!=6X7B zmA4sOyG}k)1T7k2#OqD^&p;U;(icwK-cyieMV`FeHIe?aeBP|mLfq0!HTAnnGwBV; z9Vc$szLgH+xf%w=IQ1?xx{l)fN|`-Xl?cpRFN84W8qE;qonVGkbV=r&fCSR}RaN13 zQh>;FNg*X?{VheIjT5Ffnb13{xfb514aEipp88WBI&BZRpGtL*_l_G+rv#U1HDWkp zfnqpb_<4~f!Z>E!X@wX=ohq}=f~}lSMWt4Sg{P&RB}Ech zz}jwdgASNxGKn7pwPK>2YP7z;AYIjOtk)NMTG_6bwi?fcJ)F zB!D zSU)cwZYpZ-NyDwX3}YM>4Uec|*Dk0Qn6u@OyBthylAGKSp=$!>= zwP0c01(G!5q|PyC4mFl(oHICvX!S08@$qJ86!w`lNMrW8+_+p~Iba3V+w%=JVV^DW zxM4guA|^Lr%Ig0i8GD&)csE5SDpq!x8F+|NU9jgF`WtSE5h|}=r#AUHgVd8AdJ$w> zwyBH80ohi575^5#1Invgj>*O|RSae?ztP4#!#$6p5ZG-7xTdj~Qbtfs%3w~}TVWZe zC4iV|xbC_5`PtpH(@}~1+7yY{TZR)mi_2WH_@^w94&C=n<8RUDD@lcWz@fw;;SYzx z?*~BG{RRm=l1p4ITm1>i*b5?;t_%MB=-$1wbxnC)v2#{si9`NE4`ebNMAzAgf8f-E3810gfmbj&@isYcdJ{f(9wQ zR^LCE@ZHqoaA0rB)W;ls)~bl4yI~TbQhP9~aWAy4z*a1?D>G@as3M?c4pMVwTlvh~ zd3}Br5DiR+c}|I}o2Ir6JmO~d6>yiM?x?6jGWv|#W4ri)k;-nsFhUe}imGsI{=sO{ zBzp{7x2~Hll$Rqi!YZJwF|+UzB(_gmyI!9a&|l}{ZlHCgugVe9D~)%j$}#fmE1HktJ&3s~1H;O~X= zouxvMuE}1}=LKlj4SQ9OMVEX74)l@_P zE@&zhU#AVX6eU;`Iq398Y-^$~ZYp#~vH>bI^b@y5xJ#l)4H8et+0BNjo)J=>3irxK z6qg%SCa|6$f(j!Ec#Ak+T_i@MfqaAdGghHPUXnb7@}yABrq9s^;UpuW>Ii{k=O{ef zZ;9kG$97bI_xKDp<$512Y47e3?1{V?lqDBTCAhHAjk% z*w>e143NnR-)N1~9uF_@@m}o2mL1jt_rk0>%@pUC^Du%t4%o8MtMhK`_$>wCm^7H` zBjegwBNBoHkb{08$7%`EwM=MKQON+{V(ajao?rI55;v7ZdLSyhR_a5q-D!5*>yZii zv>lL5HE;ZojaH1+4M{hrX0m&(HqUt8D$=$5T$^3fMxF1Vat?Mzm}nBlMdj*~x(C>C zw>Tv%+igb0y+2E^1d`X1vthv#GnxC5 z{B#y)<#x*!_F(W`ggy>4KI03+_RQ2z;p*LWN1mG27$J!@A$e@LPe@5y1OGnS zQ#botR}Wk}mdOo8B&t>a_p>{z)mz~HRmP;vfMCW|Gs$ruwdY}&Ye5bA`Ha^OfK2tkz!06JtnEpL)qpk97uSQ2Iaw4K36Mz2|)IV~{@{FaIe@d@^_0XwpR zv&#VgAjmmHWRAM7Gj7?etDTQ!YAp{-8ggn&!K$e?nN*uLli6iS>t2$w3dFJOiE0=` z#z9805tA|e?~o*5G8yY06HfH)vYTKuVH?YCNCaFN`MTQrx|(>~0p-$rdi5r?=Zd_# zO+V6^@GoCGUxTm=ji86dTZYg!5}|Mzc`YHcG&Kp;H_b|ow}QBg5C%j@zmC49;yDQf zl#e27-1K@hoa7%cV8g;9Sf|Up_ZokRm3pPEL0E;s>KSv{m}jB2>xPIKN4)~rR1MtD zcM+wvbRSGCo%VgohEA-wFP) z2btY1SDdg*@LYR^ut~xYjm~ij00PH4i9b{|XfK-WyWQs8Z|1%FI-KTKjsZU?Ra^ve z--MC9H3qURKQ zC^!c3m0aM#60G!zvB&EC`I9IoG{)JW6+Np1xe83)FNN3zzxzI3Hd#ms9qPuw%S65n`<#g*Lu(AfCv7v7F>2XlQ|ZCnfXsgj?6| z?Bkd)*c$U!#n_d=-N^J-{pHgDUxGi4OqhlPmH8+~HRBwCQavI3)gTG4L#agqcN5O3AQ z(pLhQ?!oFpbp%cYjw$7z1QFz)R0#8N!5Ek8s02>yu6?_{pHzQWc7#iZHQ+hFSHO%4 zXR-nCQRP*)Zv6f30e_VT&iE~&Re@_->F+ir`>mUk(q*z`T1E*ll7Jr`-AZXhA8^t_8OH!1!4~CP;EvAo3f^2&o3akUdx+apb(z z?;(lxgzS^-CTUZW@4LL`)E?1ZyM}&W*ZB$)``%YJtzJ5z7E^QLDk&Rp47*kB@8n*4 z9r>}9AnB96;Nm3Oq{IpJ`<^ECv?8xDCskYO?6DZV=Od&V{cxCFciN3&I)1#yJ4E*< zfi5Qij+f^JlFi5@%P*2MO?N7}PY`@eRP}TMQFYgVZug8k^#rt?1!--wS`AE+w%Ly* zbA_((H!=2|`LWGUh&5nR%A)84X4xQwj~Jb60pY$O6d14wO#dHS-yGaa(6k%dwr$(C zZQHhOCnvV^%Lz_wJ2^2kd-|tr4Tet3CyH&fhGdt7M(_P*DJZ4v@EbH7~>yvt* zVV@9C@BE6FaL|#MLz>^z;ZTV``k_J}*?8IhiP+c>1o~&yq)yC1fKH;7Pk+(e^RK5? z2_>60_rc+V>KB;(u^4b9v6awE`Gl4!G%nZ;ePrn`;>e#_r=%y(-B8z@m4QBvSdRF5 zBiqo`WwgZ_!FltC*2@h?+$qjn>#BVdTLS4`O9|Uf-y#ijX~jBRb2EejY|8^LC*n9{ zD+xh+T(Evr8c@8z_e+%k3NuL55|XN_5VWXOnh2J`pq~*hZ4C+SR72eJBbZTTsDe}c zA<`aL450C6>zO0cDur^^?UGOyg|ag_q!D*1Z$lD4OI`Xijm8BAsn5)ps~mef`(ohv zr5-dfLAVwaOE|EXA=i4EAKfD;rTtD(xYjIRc)F)>!d?5#y4zuYuPR{{nu=p`!}Q$$ z+4IVOvZzVy%4sZ;fp)>my+L+)5Mg#{&ScnC3B&s=gW`OpMRtJ`WNovi5>EJ8EtdzD ze%es`3e3{99nxQG*koM=A+wc;C6Cg}UBJ?e&Qke2#4Jdmg81v7=ub(@lZ-iy!vh3+#qnnF}Pp(^)KSIlc{_& zAQB~s8-!uugZ}iaerI1z5Zb?@t1r zGCR())NMO7)@^`GepP`bu+3pqP@r!yMeO{(Iy=s8?QG}i2*k%ADw&&Vh;qQhgN` z`XswLNN^+^)P;G7gHG3wWo9t%Z|bxZziGITrNx@DoZ-xxuzTBpeGRm?|I9lJ+>!x8yc=OUsbfOXhY}^$fBCyKt?oR zr$BO$9b+QUzEKn)0a!vHCq`5R8xl(T$%;#g1cQ$9(>l$VP69sw)G-{x>A&*QRs;GO zI^ueh_!^t*kGH<_D-?(*is&{nN@-{XLRKdqk&&H_w78K|Y?kM$W98VB9}n57qt^w# zA`w@Cuzt@?(=-R}gK+zRJF$1~A-D5l|Dge_;^!RT+MEOnw3OvbTc!#ky-R%?XH!ky zQ`-#|l$5~)s>6%R^N5nZjp;K<#Q)t6y@>r^DWg|FG+n=UrRzWa*$E2IEWo;pbe1S$ zr?cQ5ZacF0g(|rrjw#HvZyccW5(s+X^vw^^BkEx&%`$QV4lnv$C#DAsnEudmgses# zncBuYuS;lvn*;zT<^70V^Z?_9TqbK%%mal7&s@!NbW(ZSuWY3|t--Lr9n4JpeEPLH z`dI=QeGl3%M8N+sQ<9bxMoMY@K@2)#0_=yRoFkF+3LG`3gmH+$HFP*ZRyy~pUFFG7 z-YP_&&QTp>$3Y%%;G&1sKo&>g3a`vTb_SwOgLLoZu%v~SZKX-B*S%BvGvx5yMCmIv z5|Ankt!JCpI8Bz1H{DEweY|$U9AtvtvVa%6r(hPfaT)Fj4PE3d4fZ24_XeR`fuSQ% z-+!N<*~B0(ootG2aq)4yQoKbYJRMQ!!)8tX3#2M?D|}vTq?SXHGB+$ym)Hw zIaWz7z-gJJsQqu9h0v9gvT{mH=eBJm^EV8=Pyv!VwUfRK_>t5~(y{ei6~IdRLB|s}MDDDlNPgmnAs{s= zKZfO&pYr#F?>~lVNq0(p_Y^ywfW4OA`sm&!tY+IoB6a5LEr0%fEiKw?+qeKqflSg1 zonO#Uw;UHJP)>6QzOJ7K0K8{cB5AMmC=}NUR#s3d3t7j$$gq@aD*+HLXUtFXhtk%6 z$DTu%j9^6W!j)XgZ$fJFhGiFyNHV_-XgaB?bLWJ0QkQGW+EAgul}~%7ad*gy-76`S z6Fvv>c)|g7?a`ZaQ)|{6i?t{RMM}V2$c3@Vdj?;kxEt=%ph|g8AM&c1Yby@ow=gfL zOg}|6%@Q>Ap>hGQDV}Yd$o_eWgG~c9zVYuc9hte8v|YAm7UK!I11}UDEbo=S+=A|8 z>M^SJ-o%g?E*LDc(5abq-1HTAR`rUOjqV1Yu7$3 zpd{A(!#u`iCvuF`GfyreoXwm&f(_sDLKbwXo*J4h4;?tJXNtAe@yCl!%w&J@`MrA(}|DAV9rA= zwx3C(`q7n&*jE20Yb5}-vv8z{@k&m>NHq%`$<|~qVl)SOC7~JRi0h5*V}JFx30KlA z+;eSvU;pD#(%~VR>Kqo{N9BbX8eF>z$PxnInNR&Gun-jom~v*npBa_^a^*|ul~HOe z?Jj)sr^b@OsDet1ltrd7ytXppmdA*FNd9|Zf{o(-@_}k&Pvvj6T_}pqo6s^^Xp0$B2`hsh(kIJC9M&a+xKDkyrkuS0)z$u8WZ?K)vtFDfRaZ-6S5m#9@*3a9wSQfe}? znBQEQMBdoYT9byfFa&dIMWmx01_1Uj(@>E7`t*!hx@BN%25MAA$PK%|ZVj$6>qNW| z9DQDsylh6Iee)_39YO~gH6L@2Q1xI#MlR)q{ggTNHf!WgwR!aE!U zx$5c~tiPt8enZJ4<`KFd*my^HkLZIwc&CM)qdLgad@wCXwNbRO#+= z*4HwHDWwEn3{7N1UxLJcHl(-^`NtT|;i*#-gPyX6U9DXmI}QW_N_*eT{P4p7B5zrq zb10yIq)soq=YnYyaT1-@=>CVWVW z_@ODdmGmTjHeIY%HWz1k6L%eIh-xk2=1eF3de3)uNN?sr)Fc78oa|lkB!Sq!N{t96 zqizX4v37X!44FdRvyI_rZSJ%b}x$@?p8>W8^&**fQ89tw1UJL0}&?o*J= z6x1#@^>hPI+6NK1j=S?wpes$bZZc^YY=0NV=H$qeK|*;;$p3+B5O+eL-qxPRFuEni zAEOZX@vO3o1-t!HL_$lY0=^pbqW(k+ey5uk8@G${mo&J{#RKz|LF8jSOAheFTb6@O zK%FtZOKQ#}z}2IIggVE`su~kdLdkBpdUgMuW)aiZU4dl=UZ3M`-sitV`~b!r#yEz6 zD`feA7J)mbfCqfF=A&=`8SK%ptY>L%`ZRV+xfhubWWsND`2dX}-P~21LxA~Oe4CaM zGsjO29~rjWmHE-bu#({gR?gJ#w~77}#SOpI?I1T1ofltezle8Kj!9(#wWTd8QL;26 z`g$8njzK{Zgv(J;Mjm?B3%lhzL&NJ1N7t|KYwUL18;=RW*#szLpn*#@C#kq%aSm>08NnP^Lwn~ z@4FI4t4Z5)@XA&rZ3N+n!MlEdcnpsWQBMVF9A&)27Z_=C&>^LTT>1KyZZHolH z2*EY?10Ee39O#tjd2{AqDyAG#GC^G@RG|Nny>U3PNf;wwyu{xEVhSQUUy?~;A#dRD zglgA@vyhV9%zMN&_o*s*u^eIHg+(O|l#oFm{1caQ5C2Z{^95+9SJ5?ra0C|b^d-YT z)xCuFcVM}|)ORuW??6(mTzY>lm^BLMcOI@orJP@nAR|py)WI$Xky+KCTO^lOX@jUR zVNTep(JMTm-qrgnvvsYo7KH z$x#v;w&(q^xgFR`Dr?i!g~sInihj@>(f3b)c&tanryk!{p`z|7Pg4tf<5w?HO1bKZ z6xn0Vv=&xp^vj&lk5Als;?@-0i2@Kg2B$&DH{9AN;_0Ap28k_Lr#BkaQ|@c)7Vylg zfV@WPBlA5uCHnd6O zufWzuk#%;`Y=mnp^6QY|62in`k#etRRI2n{$fRA`d(=|KO*3cxyhH*w`MXF--cR0P zMZ7jmE13Hva6c*c(;rX-c1ut$E_&N^wdIZp(G7FXsIBt60qOt)t%yo1rl$%+9rDfn zk(PdtM0N;*$@iyg+vz3!dD%zseXE zI5%2GRkGXlvZgQ<2MmiA-RoM$zf_}N599ot*OVf_UXb$|!U#F$Zj=>${P(0C3+PRS zQA;Ux{~Qn}>IO-nvlL~rSz)r4mEZ<6&1F{7_^`IkFO&+!C%_*xj@*>_vKYE?cpx_a zy813!!IRYnGwJLp{W`^MqA$nOkmUdkR>(~yKx9)wO90Z5#Bw7!yj660YC(0cFSTge z#-S`rBji%kYrl2y*6_-IIe-_9iZ|KD$%4kcPfttTE~%oH7VP^}#Ou3-=P?8b_#7hk zvG!@u;E(B#k`Ybo;bz24Hr6XX_!!$gV`|W2A8og1M6mN3wHcvEJ|4 z_f|Tw*yfzZwG%a+Q0G63^ZOzF9Di7By%oa3X5=F}E%k+znyLX&xQoF&MUDF`O5Tx3 z!K>jfs#6INek{(Nu-3%G5a=$oZIBU{lPeqj{Nsi*KO zM37&b1 znF5=3t-|R#u9Pb@R4Q&R&zRB1Z+kpmvA&(fJ<$x&}N5L;SW?y4zov|ev zC*d>C8HazZFfL=X!1Bv(Oh3ZqZ|*|&C%zj-q(MEY-)MZ6qoyxK_N!o2k?{Lo(7_<^ z?PgFulJXjHTuB%q(%h{?_p2@*#J7Z&Ll`-h{I(h{M~FqWFqvGH3uM*7Zk)&?x*Bs-vZ3%{3U4AI6s z&Ew?=-Rp#m{-&zJaE^-$gK3r~I;;vDfdi5df|W_jjzC{0OS&TiOU6yGE>B|t7)BBR@SB$NX6Dv@G7XvcVtU3ijXvvE(Z}Kq2~|)@3p_kmMlY!*u&rc z4{?3ih)Ek@onT@NiFDwqk6dCQDk055M-)ZhjsIKGL)z%KRs10tq_l{P9C>vkRQlh^ zl&jB+GK()MUZCH{>jzFT?O<3s7TN3+htj4_uW{w!D|a>wRXpoKDAvO7dYN2?v1zHM zKtFpZ<|jTJ)7Sq%kK3%k+NDciT(e(l7xMxfMDRS^2XL4mIe0-Q<8ZS|pgEX^YlH|g z>VRXVQb12(*dlq8o$AB`H6pEX1Da)zG3G!@Inr|pU|8S?Q!xWQU@~s>2*c;dWy`UDG;w2@|J)W&ElTn1e9}M-QwTw5 zQV4Fy0Zp2oCcl3BeD7wC&T_QC3pSR)nXcXjJWV95O~VN*o!ZHHVF4I|W32 ztwTERW@ZAx?&h$Rel$g@P9pTpSFDVaU#o~?O`%MJ^aFcYPQ^(T`IZXSpv4;YZya~< zpK}=Yj;<|{w6_6Tf)r+_IBjj~kTsoaq8=qG49jzgOsR8d{LhPaWD^ zocPzSz}~~UdH?pE>}s}1Jl?@=vbUc5I}=x5_)~nSQXPTS1_@?v zS^JL>L<>ZRT7%K*#)_sqh?h0_IOhgXbe=3ohJCD6zp;Hu9X4u46d zGM}tJiUlxLZhwK3_iHl$RRGZNG!fB_W+>%FnypR94*vp^U3ft?MBhrD+i1CwDe=H2 zkaAtMTH7$2x~Q`Y#{_vHIMZO*^9f5hu}fmE>EFma4t2!iQy!kiAGK-dHbU=CnY&d5*M0Eda93pi}9e?5w*Zc#fUBpmu78Ns`+q< z2wlD=-92Cl=QpZ1K}uBW{4~;;A(kgEQGOHPrMZk&n9r;Q|6bP@ib^e5Y6l!JHNbZ@ z?w+cZq=NIb7H`MVtN!T`XnOs}Dr(KqJZ_>!9_#M?>ii_L_izZ*Cmjrbs-w9I;gIve z&ex^<+hA}oBb}X3AuiI>Tf5<(OiL%8#j^PrIwIY$b z0X53)Ar`-GjmqDa4w8K0;zL$jK?>(|SmPt)=>j-w$W~o%NG{IEmh>Yb?4N5JJ9|pP zW)F-A^VIZ?!=WYa)Ss?uf}^jm!NJUD!taj-c^&UKV|Czwk`G?KD-OytB1KB@<;y~6!6jzCC_ZzbP_&Ewt;AI-bm zq%>eMx?v~w0&7)qmn_4>tYV|&uZske{LT?=n>dasKwu<(olRm%_CwUvtk7-RqV`tO z8=YJzL*qp13?VL6zso{SFujtwlAh|Q&X7dDm^N0cLo1_6O308~y)iP>bT!V16dK3Z z4ooP=v5nM%BkzKEqCwD@_rZ%Cw1LbS+kTsR8pPb)#v4&15Z^KEd7MI)tpw&}WIiYV zDw{_>!ka&E?keD&D4JR>%o`?y-mvR=l%B@LY$$Y@SZ$hZ2~Xy$1S87}#8V*_fn@Mp z!1B-*tNc0oUI{Z`*gu#gx;xD(A5v+BySd#eCzEi`+%0H%x59KmJu&j}`9H}AsYB8> zgZ%Tpb6u4=7uzatPn)4>Vrg-vVOYR|c9& zj>xn$m*gx;^?TyvTf%a2dYmem!n7PLI`_*ZH}8QtmvO)2j*ehpw2b%(p@lmz|`?xE@RciJuTaWv)T;c4mYu?auAJ zf}6N*391|v6!k@OV*i31odO2JYRs#b{|`90_zP#`XxgA`9}xx5HeNlVH26^91sL+4 z!+YPXJmg#e33>E46RQ}x{+w&>Kc>rWZ9NpbB`|o_$1rf?hPzj3#$Jp+n;8VPQOO26a5WX_a6 zbn4DCC4kZX7Ku%o4$I0YW9u3Ynbvv+XSu0OhZhG>ru4ZE_c8=O0(~FN#m$FvfPR-t z$;apLnwDx-ZJb*8)rZcdw?sJDKm%bY+E>Frz%D2JII zdEyqU3VPAfWT+oWhn&&(rV46|CobhX<^nsP@<5rlmYZJ#@t{t0#PtOptvinp6%aDb z>L}&gHIs~At>^pYfg(8)l&vKWjnDm1m1}8Y;vSTO%HL>BAc{8nB$B>oGSb)otT&{)r+xz$e@uV~PZ;%t_0 zoMfurR%5Lh%N;Ay9KOb%rF(&qdNPwG7KSkoE}qIK&pTn8A|LJ(0ThGZ=AE}3S0$1+tOP?%|T-H4O4zE1RrTKtl6BNIQX*2Xu zU~E(#2vU9I*#;E`d&+V79m@g*A&Epk+#`BJ_7}$WWRmV9FZ*4ss0O584ZyuZCrl>S ziI5)(%A|e9%ud@Uw$sWRqtNznYVC-{pweCiRd&*pB3Z48AiU!h^ek3KBnCfrs*Oay z4AlT{ugl?$?wOm&ULNK&W+p8O0#mNqloX~;i#X5cl?O_n)jvsFp76`xD&a=?TX*7V zylutxd2!dGb&Yd_GeD}1E*l7YWR~RsTQ98Qps zakITnBhHfpGV+9dR9U&}_)7QL6JQVwYbw&h2Do+w{VD#JbfDR^E3VlT4y1H0O8g_@ z%JE-OaxC$RSr`N5Bq%~WjAiSkKFJ%E8&bbxp`B;&{KnYSl!&K@Fv>n_d*dd257^yd z(zL|z{6;`c1_*?y(0unFUAOp->&vz_v=U|Td@Mtv03}e)h`C243U?ZJ07$w!vWzx( zh|-!#ptLSi7*hyIeEOvd^Yu$@v4=IfY!7UVzbs~pbO@T%H~C(X=M0r=%D^TpXS+HB zDu?GLa157AVQ^`DRGAc=59xg|mg}b)f^xk0YQJ#TFVq{0tNDGQRLi8)`_-bddf0S_ zSPnFFM>9(wOKr{0E-X=PH1^cWJ0!fxD7Yt+#o#)_t4`plGaPmc@&>c@XGxfe5e*?L zG8H~VMSUfZxOtN!@Wx(bmjt>#&mwyb2nx0Y0>bX#R)}BxW;9+rkTd(Xw1kDn82M>> zg?9j;uyE&MX>!BqDHWakI>ja-RC%B^6wgxQyt=`8<9Vy_?E=yFn!3Ud{;^rfe}09x zc+?T~Yx&mJ(M{-_Wn;o_PliGUT7GA#&8!RrzPw3_~7h7X<|ThA>@- z#$SgJ!=PAVM{Wv1F41VAbmlSAq9BmUgJ>8T*^jBsr0~@udJz-JL90xEVq)6 z7$ejn^nR1)PMy|L^_~13*Ra9Gf?z#FpJsGQS+9*KFLj0;fKJGm;Ks)`!C4(9CEa&h ziwa0fl091gxcHd766t)JlB3WqjKvF;K;A;3TqH1sLqhH>eDBLPKHM4-BBMqWZMO!k ziZH`XOyY3eP>@J%`-6b2%k*y78I;(F&o>&D?ANx61z_@(WnwyNO!J&x)w*7f?&3RL z#+_5|6V9`|34)_ChPAG7G>2QZ*0W3>!HSuI5Z zN22T;hMf%cm*lLYD89UO;}xB#O~5)N>G*n25Fku?G;nnDa+Hoc1N*^k{1OMW%x ziqf0T(WY}z`+&1(xX4BeA$)%MH%l@$>`5pWgH+!ptJhh(QvGM+)W5Ce(zpJijPsn4 z{t6SU6`L_F^qm~MPPTiQP(vnZYI=q9#Q;XRTIg@cEes7Y4(UlEHtIha!3-k6wAZs3 zmnp|tfZxl<%V1N^Y~ppm=Y?*_h~MV~cwFWYjZs-Iink}Lbr zJ!KN`?1y}06O5UWHP#sZIaM*yQTz1vb${7bviFo{IODyIDb=vUz&-NVJod*GYFJ6* z_?hy0FHmWXEo4Sy%yx)qLc>24_aj;W$%W8z70Cwn+V4wd9^rJbt<0JG$)B>qx_M1KwLO?-R>KYmYcwvsy_8ShnquBo|47uHv@t(;ISMfN8>!~>I@gyg&1O78V zG3Oh?!x_8<%6wUjlnE_ra-cC_^6{>+#g6QLy0_lXSjMG0RuqMxzMn47cm4&?f#}lA zUZa9nzdWmz1@6j|AMo5T$Lm(TL$FCMY=*7UCU)^7qKsa67fTZsfVF|f_!sAH!v6pb zMG$3^X}uf5h-x0{tA!E{C3Q*F_%?2VB|}~8k)TSfmX{L+RWm4X{?$wcx}gc|*ymfEG4hBC6-c2%e0Vx0#!=urBU=O^8Ac1d{)|ON;BE+O-p1Q+X6x)(n5|!j zIAxNDHwcHsm5nVl5Qx^$q?2@lN3gUC(*a6HeT52FM%zww1n-A*N)F71*kknF;%HX zXj;AW^)*rA8mG+Ci^H&TcdKI>MZS-xGZe=qm~GamXAGZRl(YVlE3NyC6)Yw|;G{mq#Ud?t%lDZcX_oC&`j=(m+H zL_VbE2&ts0kX*KDNDkjD%pAlnySR6z2q0KrFTJ^T3%_?Z0>LBKaGQCas`yC;VzXN> zgaX?$4<6ntd8Ce*QO>(%-2=qimwC&vfm^tIt%&U%ucJB@tF8-;%zv#i!MB$h5OJ_~ zt8vlk;3J^2HQQ?c?ka}M1&mA6T)jc4)x@sDCSKoALiUFXW&30L>Rf62%{zNbGK6oyKx^gsp!KQ^V9^3p z?^?x$<;mmI9H}HJ_DxbbX9TFPJ4Cfdm?=W1lh;;6u(1@Kx+uD~g1OxPh@E&BWWj+n zcJ8O^xo+xa)_NE>7L!zic_L#o8t$Cux69s}0;q8`Vf#Ue&+hL0R4u&zaMQ&LQyh-# zRse4Zc$vscja_ADB`(HZz04~|$ro%Fz?&oS39s1@dl3mgrUs*Yasak}+lmO;^pTDD z{ZJjCKpwsn0r*Ne1*g|ITf?PQWt}FkW)SmD=F@j$ z+({0(oqyrI?{7!mHvQ8M$z>NM)&|`4xt zWL`YW0AF7kKzPA^EysU<`!+g*85zs;br<>nn!T z`szF$T)l5&nx8!ob&add;f2CseBG1-L%_z%LRS_&S)|Pl2U$9`J=8z|p~yAiUQrkm z<==FDGm_g6lQBVM{vYFFV@V<^5XzBNFcep_SS4Nwe{euea=O(prW^l^;ILlbe18FWp*6Hnhy%QRaf>~hZ^^u*HFA1X;dRp<-FQtnSsE_&*z!k?y2eJ9?E~I zGB@j%eu2l2qWRQFRb{(pM59fLNse&X&k>F6&s_~Wzfm87$T0YY`cV5}8Q?BQ0 zwka0*#RScY*JLi5dCaS4HPu$MFDJZ*b=%mZu#`XkClXY>37ww*mwTa4uNWr|ic(nCrz)ca5x zHk;^7)R*3}c1oUNGy~7y7o=U8%-47hxox*;m2?h8GO8;xt*8CEUQr(OM3`yjTiO(A z5Zv}O^zFL|&0M$q%4$B2TruyMv==koHu)z9yrojc1eyexaAMZZNH$i-Q|hG-c($)R zJq*CE*6cJKGiq>cy*JZ6J^!{(fo?}yR;2UrC~F81LfE|ztq3;}VHPx!oeE})%8!%& zbp}t?c?0eA9rqAOu(!n9Ac(=20_MEtR8G0J*iF6pzO<@>{fmOu`5W;;AOTvOIXExm zM5~VKLpKPA+0cVDd4EKWG-Xm-5F-gF?gqNAhZd@jApugorS^bO%uEyBp1qN!L+ zfS76X&Wi>W`59)Fk@ZMvhFzQoh{crhTs-KrL)IA%*nY5c`q}CJzJwI{f=YMvZc?`- z@w0`7eBggknEqdo7;1@!5d0tX)7v#5AnYG#wf`T_9n=3tiLw2`l#jTEd`A>{ZlDk3 z9io>N)f&YzEm(=6A1%%}l$J?l`4JW}UTI>bd65*;QJz^}w;5VS2gG90L^*4QcNA2{ zzNp9aTJK)^+gZYM?Sy2G=6Y*NUyVJ=^4rVlchA^vHN+0{DI%hWj-fi{!GF*0ODw{F zC?S^zQzdAG;r~_F)k6+jU8aZ~M+zssh1?M&hP8J@5a&c_+L71f8Fgb5$G9&FeRdp% z3e1bS!z(B~{>HUPM*}6|=-NLT7iLCT`Uu#N73aji_(>4bDGCiygxWr#+W+cRLg`Xb znH=#FdK%4UY&CodSFAxjb@|pbmb##Q8oLCVt-P@(Pd*renf-kd|N8-~=~Cq$zh3sk z^H+W7gM+w*HVT8Kb8@|7huMt$&Ym0j3y;3|AVsE!V%xGNzPrq6i2-al* z`o@T^#aljPegGK_$=djnc0_&t2au7Ap^J;cIVl2K0=IlRTbD{=N#O^VF|lTB2#1Cc(FP9)F4`v2{FciMJXr@I@TlQ>A4une?Oa%PDM@W*@_#Ogcctz8YUwrJHM`co@Wvg5*RQ(*FSIjeSSO; za?bX8r}nmczrKD=!hT=Y@cFvlXLfabzbqksdwYAmzT6x>aymAuz8?KxyzpH3LViu> z>`lYZ8hX7=a(X8ReqLod_5?itHFe-G*AK#h=Ju6(KFa$piM4M_4|M$#r9g5eA;!-B zRu^z%?AO;@ab>BBd!3bNt%9|CEA4E=P2HW15=%wHPL7c>>yJzE-H6z(BUSR|!YNYi zj9^gk8dcE4{XOY<{M_3eFdF!Eq`9(pz4fo_wbS*s)v)*dK;Sl#u^zGD`}@1o`}^%b zxDD&omGzv2J@0kdUyg|!ew}MKo6qe!^n@9cbx8|PiTkp+%-M$D3xkB$g4=%24{tBG zyN5?#lZc#|qnU{X*MxySUOzB3hM#Aj^)`adt(=~Uvz*_z4 zx4V~w1U-Hqw-xWlfq~lIA5ROP1qy;aUk|VKnSoC)=2p9px2KaEijLRYU#CY`pWp4q z&-~xVnw)|9+g-1{f&zSk|NOeXUG8^}7Dz$gPNToSf7mbHM`sZQZww6t1O&e>zP=8R zP8X)n6A@#DKVQM27~6jNJiiBTUSwzy0W*mzlS2Wf=@;RLyV~9|d~p_5WK1-Fwn<-+0!@aWA;^#O~(tw!R-a zyt0fa@N#c^(#a$9?F{x_&rh^^^%a9s;g`o!f>3LrhOgF2YSz9yof_-y0uQfPLP$s_ zQBUZ16W_*94l$IK$cPM?0ZOwN@5(DOfMVR>p}lmE99s>jkHh9)k^< zf90w}&t&@ot5)lr-y$sGJrOesscZsTaw!2f{k?s1C_@J?WZb`bc8`YH^d?(LhZI!? zE8j_IbBb-xs;6xDr9tv6r>knu8X3>a&pHf(8DadYa~dykIZj_x;iw1_ z@eAA{p(Bmk#fV+f$1)B{Q1Z2kE{4Ogv3O>^X98`37uLynBBv2>*5W~%I9&QNC0gnD2WWf#!+_6-ik)JOMYP@b9KWwo5CMo|jpp;y(WFiBuW}ehI zFJ_YuKJ^Pu42iYI^t+xKUw%bV$Rzs_Tfn%2zjBlCeKkXNqt((JBTBw!1&=9vodY)_ zBc+GyaRFUfQp4bVD&j7=N6UN|GcAgBf2gtT+K?0havU;0f)M~&uB2xqZ`4kj&|4se z(3>DrFkAS`>*MKtdpImW&^UA=n(Tg|n)`dpm5COXkTnos#9(6kSp1 z7WZgm2YZfq%F2McKJeI>22A(AqV@4Qe+zS_7ER^qiaQ4I1Y%Ih|& z>F#SCUuD~^8H*mB^$!oL0M#wCD`LoqhIb^Q&HNVafaOY$o`dzyRP*+hC1u|p9zacF zn60Z8vm$;ZtgkHL579yamd}cZ`-mXAnTldY;fA^Y-kxm^QHq()Cv)6P9?m@lTiAwa zM(Q7l_E_@6t(;omu97Z-*P6KjoXbDMT91zcM>wYnCFSQcrY&4%$=>K5<=h9ayZ3|l z3z$EJbX1f|X7$M9`;P7O2tDR`@))vPz3<}^3cXeC_&B*U9Lt`O7kYpGv0dCrOwL(C z*UPwJ+3Tw5;omd>=0sA$FsH{ab4=8w?ruyBw!g;r4)g?)GqTA4ZslbsA2C#nRPx6n z?}HEEKqs;`9_R>9Cmq)<`^tDt_K%V+n=iN;v~=9Yiirl)@I*cX@KXr{u0IfqfL#sI zWQ%)v5jB_%&uiSRoODRm#>WTEwVj|a+$UZBs_F072*2yPb9358>c}*?M@x$CUTJzI zl%Eo`=7KsR{&xLMcG&4^#NBQT2%l4R&1h0wZ?yPJ3Mf2Vq&hnUd1KDxsg3;sV`n(q}8<#w&VXbX8 zLjIuKgB=1tyhwO_;UtG+-c>7&3mJ~Pq=DN0MI;Sq8OGYuV8*^-$%Z)0CP5!Bja1^T zrK$2kMORJiKpu?0t_}K^_P}J1FtoP1|B5q#*#${AQW#Gs54wM>8_W20HikStLi35J zR{=kk%9xw+c=K)~!9HcNsMI7936B8do)}W>m+%C01nLFc?^< z#;#tK^LV%Z?>eo?8-DxW zul5}3)%4o0P#YR;hH~>>;V}0cwQ#*=K_5=^iHm0W#@+o)hjqx75oF!B4^wN z>NN^?$sEwU7A*@Ale17}Qb=^h$Ajzl*QDD#z=o}XJXR`iAEn^f3e*dfgWNnZ_svG4 z>!48&Cda0D-bH(GpO3=nty~Ztn?qDhoSS1+?mU_dua0yPu3K`$RjcG}ZiE$DQgRwS zb1+F0OG355<=|t ziVlr$%qvc-!+2@*gpPQc+Atk(UD{+-I(YSDW#Uch(b$?!wEyw*bRe)(eXrKjZ8BA3 z&?HKwVh)bB7;3*7r-`dt2e>}!NBM1#gKpy2s=wguslz0Ut%ZQ;7d~F%tIu4JQ|!tI zh{TwqqDh&9Tc!xKgZ(?5ca=M;X#v~*HTFBx&((94)Lyrd+P<8gekjYD6ic-2zeiDx zLl|%38i}4cCkmi(gH-Rk17ODcmA|q7zvz0$AWNdATezn+ZQD3)+qP}nwr#toZQHhO z_q5&9Hg7-A8*$%=FTNXb_W4m!=Tt@Q%v@PJ*Is$Oo~JF9)AhbC%Gy?LO4SYL3V?DS z?vGGc2%^N55LkqYs6j;PDEa4bMXtcDa!i0^ZyX7XG?38j`VX{$$0EV!IyIV#UfYL7 zMT)V5jS0kkwq4%S;*eZb^3aGJ?3=-e=bf|^I_tgD&e4r5$n?F^hk6fe1UfkxCuFZQ zd~C^p>5^oA3%6yt6RTL0B%N5Y8ER(?YTEF3rr}{mFgEu92{0*Rs}f^N3>q#89y;X` z1qZLIX2ib3(_uxbv6I`7YPMzB>fCM-VK0y7^8q?X_ zN4G-jiAYPVY6PTaacU!oC`jff7##fxwfBhDD~YFW`&HR~mOUTtrDWp{Nyk_=REBJC zN#x|D2f*2O2&(Ujypfh^;~s(UbdIFWx}$U^BLBqxSL%>zAIs5X zfPnTO8;<) zLunuYzWCOP+JjtXg8Y6}Vl;D~<>xnv*)IO3F=1aB3Z)3MmPI~VFx0>dFw}tzFqc5b zSpCcs_isPwf}y?~3sYs;ilH9Bw9Hpk(G}Z!{B&cr@~W01nECrAc4E>f5U!F>poFp1 zt5&1Vy!*l9619w2=i&|d&UfZ4X1jKx+4GwSd$u0jp2e#4`C>_?QBY~j)MUIQW);D^ zZqx>95(XTd93)j|I$@?~{H*3&erDBHj0iua+hKI|2HA8Hc=!RU7z%e~utq8U5a=&- z1%fnE6zCyDEA$D-NOeK2_KqP2OGgIGHjt*g^pbma<)U~&dy1S+)G(_#OS9g?k`B`; z3Z8}*o`rhK=!?MvH81sDvC=>0^y|k2pVS*1d`SRemD>%~iF*O6CY0|4r`e}&@$7#> zfYO+;&3MtC8XnVj+jPWxi$MDIZHs`f{JX|LduqOsHu>6x6bna6$C-US(I+fMKk#at=$= z4hT~Z`ttNKsdX6V43UD{%`O9rgWUP$7(NK}r+H#D zWyUd4k|9viko`(@(`2%Q@sHwTEoI7}hCjooZ0Xk&OQeEl8W;S=zM>10V|jcYf zPWSD#YWdP_>lU8RBxh2E(&>*uIl8Fp1s}xHJ{|aKMh6uVX59w*Py~4h|^HY3cWq^^o9(y1^tN6go zNbogueq->bMXAQR=iZENlX6m1A<1U>e;h)&Ay!_(X%B)jA|vvvke_&|d}JZurexIS zW{ZP`*WM&yZ?QOzIKb5!>csFP5GGsmAYKM445#n>fyJRp5qlk-ozNuxrp;d7=#VsC zrH^o(3}*&+&l(Z6jeZJ{LwELH}BZe7(Wc za@8sI-l>ipdnD(40U1u~imWtJ%&d|a%63swi$Q0=#g7-skCF=FJ|;e;^Wz$z^4ucQ z4#3~%)!#@lw^{|EglC||$jCO8uV{+1%NCGj4QsreM`piSh-MC8WQO0DxIR6nhgd?z zEwLV`+ZZ&5ib$}Vu@oVq>dus{1C2N+^fM$O7Gv)d$-kW$gu!JRl*W&aB#ISg+@%v- z9|Q}ohCd!X5QU0fw>Ji#faPFXOQW$;Ws(ba$AGyxj|gaHMF+h}cgcRT6w26O&hx~#9 z#bMSShF(w*DEB8yb>kpk4>L#(7{1cep4uM5Jj>u>q91Jc2zrX*lgEAz#KuV07}nq=qrhm&U$i_-w{3A~?NX}_2sNhy@%_CO5Xi85n zQ&tk2u~#xT%8Gn&?apoTxgXl?Pw!&h*d=dQR-qqv)w#d9_+C+oyc=Q%w% zzfv~~?KH^bY@qgV`?wCrjew*2$E;%|*q>ynJRF^(2$uO;<1g>d*%N`IMF(88tTw`!mLo3^#`|Srjw4%Z=a5cZIB#;Nt zyU@ZK%Cd+g|2x4kZR<6qjkNioKx1%Ohv5#PYDowust)||f{y)}QHLm*#4L~*HNoG? zQtXC6x*sxU%qa1M|5RKO%m9$ArbC z1HaY+$qG6+OfbI3UPFL0kohwiOb$Q^+k~0!R7jpp%62JW0P6RMG5iBiX`Kr)O@1SiCLW9xVBv@L&sw)AFVS zX7HvAWbi)!f%J=5te2mp|F*ma=T%v)=B9kd$JeKajJvL8m8Uqi_vGotYlTB4O)%*9 z4P5D6x<#|&RC)|W%n8nlX#5{7gKXF{y4uCb=?SU)5S=0XMQir{`k<@^{AsW#kRjde;rKk(j}S=7J?-Gxrmbwxj~MvYRjKHP@Y)*-7d}XnCc}M z2~O8I!TPtxA4^B+6FNL#0=P0%k_Cr2rG{WgZZY`4as2ID=PSieAM2nE*ye=N3GNYA zmh5z(DzW;!J_NDUvsonL_!%kC(Tac56CVGnDqng-31s$PW}S=fHwpv}B&WNF1%R%D zAKhG5nkdpv-&n9Ki9IBqD?!Zt+~3u0df&4?@DTbepO3f%yTNMN_44W;!h8D_>2e>a zdd$Af`ESJV_B~<@r$PKaZKLh^D=xf7m%-;AV*xu!Xz%Y2VOW3k#8$tsP6=wG)7xV^ zw@kg>*>{wmx{uqHQ=Epjk=gsHA-+!O$aIT%K2CS@3Raj&+@BUOhuh%shx7vnQ=z27 z8EP;$iLxw;v37pA{z}g%h!#c8FwQQkk~(V|xF6&kl+CB`2`(|8ZXXS&W13H_%cetf z$;M*Sk07PXdkdwxD&C)c(dps{M&!0%` zOtNT(9&|Y`#k7H%KO+_%trF@t6i0}C>KEaRH(Lg~JKZjwo0%C)-jxwNhtTeRxJt^g z7arL24TO$asFnF-E|l35?p|ib>@|$jkUUs3%*29+TY`Gpl!Q7mj>rO%>7khd$ae<9b5*8-g@dM5`8c!x~zDHcbq>{&D=9 ze|7D`os=vc^u}nk^#uwe_mG=OV`kQ>?00!>CeU4wvqA6iqy!jpKGnIZf{i@} zGDCGuayyf&JcV*B#NSU~_FpIfm8O=FJs|^jpa_fk{TA@-%l7t?;DNFq{h*m4U(KpnapDRHeibBD4%e`XK=>|Wck;FPSQOdB^hpfE!I_E6juQP~^6M;U zH*1!~6kOb@rYkCN7s+B@2YATTgm6%1!7K`a+UaJt*dQ=4#6a5ELa{?Qr#@z706jh> ziPKUdktPMHqA!>eDtPA4P2p{~6RW8sCWaAiE`K})x6zWyXrEx%WrHMlmym26ojclB?n;Hd})^oA=hwW*{=)kL^f$zz#U~` zk~2T6vQJ!5boso9gs=5Jn1<;PlHvCn8W=cYE8YuMPv`)8Hp-lxgc0q<8A;QGw7D#? zWk!j>m=@bC!bv)(Zr%}RgDPRQYn5?VUjjOE)Tohj##srZXPW{M!3lWRvn|5gOzt-% zO70INN;beBnxhssV-8@5&a)ZWy{7^BYXAc)i&`1I_mZg(Bev6|(o7R^|B6Mh1%L4v z6Z6@-8T+bIdR3JqJqJX^&l+LkV{*X$*6Aj1r1!Rzq_x| zS}dt-TKUb;0m4!VR^oSRtEq%5@j(oKK0o9@o9I|G@3ssWe04Saw6N@RKKttnktK#f4l}({4)Dr_z$T)1T7T9vp}of6AS(}hpS?NdGmkk{ z(BC*TH|Ov6MX#IL$F1Rli4&9PWov{`iYh9I!Pk-WNt0tH8*Ia&Hy-i7`65mvYJnZ_ zw?b_=gd*HtHzd%Q(aq^Y7)vLx_ka%kya$6o%+eTXG3nt_4wK}EM3 zB*`Sq4YI-aE=2azDFYA@f75kQ7x&~3!?Qd|p z)gl7Yb_-^4ZC;qrF_ahr9P-?&SPPDvt)(M;vDPEx7$TvZBs5!P*`mK{2^rACW zDz5Cxmh4{^#65B(|Jr zr+NAnu`fJT>RFep*m++P%50FR^?n&<>WlEJ@;Ctvxi#x7Z0vOcZ2dB|Nx zmQScg?LskLo$+i_Y^*ea+V~ct6fjjD5!evztQ0*_C#NzIh%_HASH?efvX`igiB)jh zwuG2xv-)1a;8F(V@Uj*6W3rx98s>q?@P`|Osc0rD2Nh81@U+bl!Kb?%;qDSDV|W*V z2H6cByzIymrWwAX`7ACQId)PT<0&l~w{Yk-ja$`%QmL8x>kYVjYQ}9sbqxZ?`3inL z2_4~d5E8>j_-?w|o-8M+|B`Be1Y$su_so9xPDq3WBJE9H!}}JwWj0Nvm93SSAjZ>< zI}^Tt(=Sh^R&7v$FF5>2)X1DG$5@2g{iEy4ki{j{OWvZrOIm zElg5WVpjgc-UC$CzX=e~2k&k`D`2!>(JEdxrX^jHFVA3pa}n-JCPyj}v`OOtpDMQX zDnm}#3Q?oO+)=g}%z>9Wc>O|)h&QxxF%Ioo_c9A5UCP6lwZA4a+j(G;wzG9fS5U?g z2DZtLXsJzT#}Y8CL5o|m)NDBTY{&X@nTTuUM5%OiX3O^?#}7Kz)rcSf%aiKP+%CkK z)ZCb0L%eZPBjWGRUo#>vJP{yFI!$TyxdVGoP9~Flw3gi@t^=3Ff5h5GvOYoHW=B_t z$?{ghj#^FSd*o*tAi*6dhMm6VT~y;9M_tvCQ+ zv@j|(ET_RMFK$5reIlEN0`9&=hd|}~kQ_vWx7aZ!B(W5yQ&Gxf_;BX-q`hS!1Js)| z`mCx85Sz#mDY3+gNiXjwWK7V0GB(*3dhMF(??Zio_w^(xmslU~?sXyBCRdimR+2_4i?Op4QjiT(%_K7l7S>(61J!SX{xJ7md+1L|C4M2#OZ z5GcXmOTpQ8=}tsGI=LZ$bNupP!}8wWd-vxg!qi$9XGiSNU@5iQ+yrWSSZ4ZrR_8md zn(ZU7`XR~790L+6 zK4^*To>kV7N2mEM5qf7^_h5> zXo)N?MbTl$EBSNd{jC?Rcy_~(OM1vaA+@pu2|ju!^Fpyy_tAES^x5^qXe6Em3n>7P zxdb3B?ofeKnT=!Wr*n}y^$8r9(nk@;Ac$&7jKqBn4l$mJbtsFE5b2NGH~uQ_xd*!A zhc7u(`uDs7pqaqq#@>Z7znQ;~kE1@Dj0%WVhZ^SQsc z#?Rs7;rVv{J~|kjw!8kmdHo^m9DjWN?DJ8(Wt;kY3 z*gdR+mdglT7)wt=n?-vW?3?L$qJ3Fl{+!X~xhB;>Tc&@UvBgjPAAPbhhp!ple0vxr z9aj>;DbrUK2}h(Ur=f9O`*J{%!XzAgbG3xH{@;RzsVmm-l+=382l;OkFErC+Y*d9~ z6cP>dxR`&c`{UKSP1D%VMn$!i*n-L#>$xF98@KXsBUYUh;6lguuVzgLNODlCsW17j zbTDVi?rMFYTCW|h+=>KdKD>$S?p0_p`EvQ%Q!@n!Q<;JYDA^fDj!I0?SagEg{RMM$ zGCraq*k=_cfAN}(|4xb}dcce5!Q~PqNV2ASgc&mBNOTGi>eo(MBgrKerzV?Kl7>KE zO-@P-ng!|v3#0gohQ>I1`Wyim?DQ5y=w&9mcqT$_Y}1F$mJqzeBJc*F)E3tnpaXL| zjRlf3w&uRX{%fw@T&lbymQ6wt=yc=Qt^Timx#O)L*jb}6HsM`fDUtA~DzS8r%9K|N z(Ui(%|ENXzXQg@gh+rC$A{`7UEYH0o0cBE@}9pXDdc)y0XAH0!PJP9$gm=4WDA9cz9( z<(LNZ=m3Y0op8s8c#neM7}VsDCU@d&Mev_4o@TX zm`5U|!>h=YEPP6w5?H0kgK`gv9UjRCV9j1>9WN!krN*%As;2XY&iOle9}Ff`@9D@| zYnvxtN}ObhtMRcT(&ZO=$3B|2j3|bt=P;^uyhZE3imf1DP$mVL;5f5Y$KxQRNHBx+4^cPYo=`v=fZ~=o*Bk zc#?IXgj4nt0cdOT`Qv;9+TkD$h*KtV(I8(>m(8F(yeNj8z9#B6F2S{7`?5I^2p-bZ zbCgbouMk8z2iDP}rA^8hqbP0jV7bm7j4slBn4MW>Ou1x|=mN!}z z@fwUu;%NB(B%}5jFTQ6lhD{_1a5Zo`DCry9hzU?ih`Fiqh!dO12Jr6BGuQ(VG)Qse zQcY>GNZp4>A(_omoAv!o|AQ;Jsp19|5G?Q^MIWI*Zw5zj^M+l*?SRPS1&v2NeY9XQ zXlIbU6!QxePSmrDH)_DVonE&^IcoF6W{_)3e>7IhFID6LUzt$U2*(rBS1HqwI)~wK z=Q+iH<>1Sg=Dps4BV+|hDBDI{DU5OwljhTW>7_PF?JIV&JlXvq9lAu>B&2wRZ4Ezb zTKIKdsDS7ycv2AGqRNX(<8rQ@++d+1E|k2DmF(FvgFnnKE!61#kl3m2z#R1Q)XvzX zzx9e5R^F(o%b&ie)roNOH}dJ16D>E*vVMj7>J2g(DCWR#IAHdAz25HaJv#nKSxtWf z|KDDPOG=xLKRoIe>21K>)bO(r#i*i`qQK`0IA-kP3*83&?)A$UkY0R{%p8a&!(3z8YCvv)H zhz*(_cmZBQ-+SZUj@?JqOX_2+#dj^Fey$S1P~BX98&TJ39{%*PCuN#-aoMO+9+gT? z$d2U^&v_lGOgr?>8M*ygkXA6g8AdFY6(%DmDnUlX7D@d&Gx@7@ z2AQPwxi820vD4(z&!^Z`j61$pYNPS1ynW zwsTS9(Fp$w8kzVEr98?c`P@3Ec+vH5?RYRaZ+%bz2i(aU(>G^bkP_!jxg5`66C~tG z@4NI)u5Kets&4jSAKGtZa0oPRKvX}#=qd4q{78z z?c7eVtnvw3PD>VnlLVEm;JTWDa_J%vWPRm+!nk9qGb-=SKEfex9uE!Sv~C z+qrUjq6H|HAiovWCRdNCc?@-GIQCK2SKwMks&SHS?-r)1X5C%NiA%y zTbb9WBkkDCF4eG`D>brjZ&^_h{lz)j@2W>)s>C?>vr8)MUGO&C$g>G2#`0-+^7e{E#&>a} z#m%^y&|LIQ2vt_Un;l!~ygRE6Aa+NJ6dVopvyT)$nxPkY!B=8!1KnzWn{(Ckhz+kU zY6#sQF8CRC4+C$H*jYz}uhbpvGS~mnN_d3qc=9np?{}C{AuRPviPO%*6Y`kJ#SQ0? zLhMAh3+ePCu6jVu@V>$vfpZwl2SD6QQ-i!A9hsal$E;OoX$@7;a^GL5Kno z`=?QM_qUq7Pr>dQFxWxBfI2hI@vyt|&xSBm0u1Iu*X{CeI;7uVfRaJABUH;KMv`C`TlEh z|NTnNzOdaIOBvNGE(9F|4=?M`gNZ{|ZN0KcQ=YyLjSC+wbD>UjG-H`Oo3~>fgneUFGU^&EfL-jm^7b z-_GBm%H^Hnb>6PmkGScpn>sHJUpEpgpU10zLR3CGmc5;dYE;^MyXSJ(MTvHHzMjud z$AlKPNh5N?v}sjI0PXD}{EZvzun)5<&k#(%Ly&I!9liMKeejkT@PhVX@Rp6OpBIq> zD2cy)o}xr|oz`tbnuhjt44+oqJ^MC%JzC#ai*KL!SG;?<-!H?WpKq2fj~C5v%bS^_ zEp)s+IBv7wZzq=BUVpo<^ln;H76<-T^ZLGlcx2;CUi)@>Kg0K~Qrf-y__;ZcvCGQR z5nN#F%(GQLby9tBV0srhc}65}&n7~a^co_I>sy;;Zxg4gw_^bEj`aJmL@o3@a?YV}lx|udxIlZ{I=kfmF^6dF? zH2c>m+L_0v`>oDT=hNoE7URadGeL#q{yW{lnGW$?4wX+urRd^=UX*2>E7*9NccYP=S@%DBo=vh;hGA98#b zmu+s2TkdVz;NZn#>(dE*<>$fqiwPUU`6vj$z0cBmdvWq-Lj;Sf2WvdNTqK!C<@E2w z;wW`G36SyN5iR0^$N~2Jpao-|$)y!GUi0!8h8f9nUj*+xH%FEV&*0*m%CJEb!UqxYL!v>FY1&ZMZg3 zJwLC%c$^mMU1^1dXXWR|lTTh8g9tHJ^5QT*Zs?aoZj0;}MSHYul z<`zweislSdyBTLJWe~9!ZLP#W<7DauBnq1Wtope>ne=e53%s;yy zYujuLOYrHiDHR$UTcR*8QqFVStQHN**qmCEplQ^urg~m>*0=U>9$i;AGBe9aS36MJ;w=hXSg z*Bp|qW8&zvo>z$C*b(q>-98&8%7Jb)`5`pERVj z*-ka!li)NHUUe(Vd28ZkEIRO!zyW))k2xE5>FsvLo2HHQ#~W#UD|Dy#s=w)X-3Zxb zjT5cJ>pwemyWCg6IxD4>cC2jIt`h)uWPEqJ=CxR+gBe$J%1=DQ7V5~RCSOa#zg2?C z8mh-Zl$|=7k=>5klW?^bw9U!2=A0(0VI2-O= z3IsaX@!+&bVhil*@x4^{huXif2=kqc*`BYcVWcnjCXvP*Gw07w6Fy+`l6j$duLf<~ z5wtn+h^z&)D-RAdWq+-0*v{=@9g7%CpyfOWih6VoOp9GWno*-=vg-&{t+IUJ)kY>@ z)jQ}9(NX~}7z_y$H4>~gC_)0PswO<3i1sh%YNaR!em<~0Ayj})sJ?RIECyq7frt=P zow#Wc2z8`np~T2$xW_ewNVQWtFowu)BB+58wDk2pEX_Vad!;Fj2BAkl0+=VpF<^eT zHyvCS5@FUT6=RhAq==WpFxqkCq+vzQ!=(NZtgC<#NFiS!$|V_s8WaWzO?GNYmKC@L z%UuL=ib!2XC`t$<9aHH+ow<+zNfQZ*CL+W!hD_P36ly<3h&@@18qxXf`TJ|V+pjfP?uGv!z0NO~iNdB4G^fci{5DqWHoc7}O!I1pAC0`+xsKG;+& zR*nJ~cM@}ulMQyh49W^hXEP*r8yhn? z3BH;{&@bU@@s5)-si_CTPyu9^XlmXKqG|{j_7|7IUMYx~jUQD9bsH{_ZKhBgXL&typ;N+&cG9Sr-;0UMJ$WlkNEUeu1lHxFsZ zOdS0@`nkHildW+7x^?HZqucZBVLi{Xwh)ol&};ypUHla>1>hVe)MN`JcYYg zdl0|FpD)q$X3FJ2Zn>5Sh++ zKG775Z7+@WAbU-?+5keQ4?%*d%L5bpAB~~PF5$8$@>ql1A;It|s$v5XI4D}#2`8)n zAB7-<4j{rrQ+3vabb9;pgit}p1~3py(bSLzdQOO6akwSKPyCB)|MS>`=fEHmn27}m zpu>yG`?UL5qn)XmiKFtwg-=zMpfYj#whcCk%{g2A*A+mJJAg2pVgIw zkh9&(r@}><=$B~Dq3bdT?r=|uGHf}dk*vXOWT1bvap}e3>I4hFepIJ1<%+as)-(r^ zO9(O&m^Hp{xToC7SXHdt2T*Q=7hvd=@|Xu&E=Z7F{oq24A81oBGqlHbHPBSIz(EAPA?GLck-*$1%< z_G>SKRbvC3e64Q6T`=ENh4Y4hNavKl#ma63eD14`g?m0C&7CMB5mywrRY8cT^D<84 z7X4RO(g(0yG`#6_?H;67yrD#ffomr?6$#@OM97`JQH@L4jxb=0Xt+2&?Ka~L>fqoH zA`7!YnuVe0fVrWpkP$KymR!d*tp)9Tune;SdLzDXO;_OuWGjgryBu0eC^hFTum)hd zZlDE{Jz0_x5RhOjun{~H%uqE?+MN(BR)Nxp&#=?-Bt1{Uy4}55+_)A}ogX9H#g8qK zbZL*4Qm@!IA21q}l9b9^VBDwd?8>oKl%`4au%@$ z{(2Htq4*UxLCLWKq}9IrlJctCSL>9DUkF-21V0exKEs?B=~@h1mdjkl)Zh;uUKj&X z_yU9}9%d5~Q<484V+5)P14?8bjGcGC#(^?60{UUzmwY%R>TO)lD1=q#{{SgJu0)b% z+!~Z+Q{9n)F1hu3*CVED-w8jzC~}Pk^sv;Ot23S(fJJ3YTJ?j zgr~Ye8l%zZV1v=DP{Yxx&>Tmp0;#P%B)2xFt@#xfbE|K5G2El)UY(wAk7Ik5srd-> zbYhFpWr!4eJPJR=s-U<$Y8@3P4$EKyMoG;oJqTB9SN=jVlpeKCtm7XSol0N^;Sw>P zO7Ik^Mh+}aUrx?;^x$V^Z3=wO@G& z_!G^sJ5zwvNL3rsx}C#l%;N~hsSWGwybpFNp7Dzl#u7*i##-qVZyu&Y{AlVI^6qaU z+qk#URMYZI9#(%EMV6&Ks4`-Uk!DuIK@iS>#(tatYNiYAYM>*CL=1SkO<-S;;@JPj zh;w_er3B`|9$28EIzbf{9Pr>m^N^!AKyTAtb|IXa5N@`60Ot>G@FZGi2gL}v@{=Yf zE%qlXgf-j8@RO%+v`kNcVYT3E@q6{J%8UI3u2_I%C_!5o1XdoFab!oKxY(kEPd@s8 z(0)}vjIJ-XNFwH$Jgi>>Kg&gM3nr{}?QixZiRUju#I`2&Is3m7@^0q;i3OotAB=8O z;_jQ!iDXXCh{o32B=aO!HxXPP99J&~_qQ(4Pjyeea6A+E4q6Z4_YZsjjren73oubs zTS4Lx#AyUuTXXx9GKn|a>#OpjWV(6LMVOmW$U5xC%wGi;2nk`~j>7x5fN|+ftFa&2 zjmQFwja}Jl?zbCg$G;)QeoCMbv4%5E5TqW0fFHZvk6U!j!)e$*!4kexIQ&U;oQ^Vk zLNPfRFjp=g?9jaUQ?NoIZR|-CfJc21X*t;vSr3%fH*0=@=>b7WB>iP=U|swb8;;m) zRMpznB$5pRrUwF*?fwfT5xkmjC+ksSDR`+14K-zI37EdhsOCel{*+*P2#2#7!}oZ8LLo#lv^F*o0DU2`(MAEQ`!|m3Z4=?S zV+5U!%fgQU6$tSY_v&u4lO~IW9_et@4~OVQZZJ`vT%w)YqvSz; zXQxj&vo?sk6o<28|147nhRe;Lj&N=}oOXjZ0;ep<1E(xcS0R&5kphQ;32u=BPnjSTeZM!4-gCv` z)q@Cr%tj#5v`=*vh^ubkM=iL#FRNDDhUgep5*DFwSWO#d^C~f;^RynE{|8z80n_xk z4!YRo7+H=3^CVT7)K9sSJVeCl>}7FcjNl0hSdklm*JxZ8AS7r(*qAFj^FL<;0W3mevA!B^tO(|YN->HMsUmFNt_{AO?a*d?vx zM<5u7%ORlkVsT@0PwD(e^DFf8@&5hN;da#M@4b$?e z=Z(;S_`Dy~v-Xtg!yn3FyZ9_`K>XL&1Nlbbxscw%D45!zItFGUUrts{b42mn*tX7) z0{IvH7%}`_Q9sS?zZ0U97x`n~Y##ciM`}2;+6T{Rx5~kPi$8-}Oi8(mjG9$0>}N-?jUMBC&Q)m!S#c1eo74 znCBZvp?2y#FBXGdimLaZ4%yTAqBz6B4Pyiw=qWd{Zb9#=Uc#ZqY+r`;UfWTAx1?9t zIMVtCa28dlq7w-FO)j!o%NtK)>j1+I#vHd) zVmb3;n7>jL2ANx^#?IM02VX5_-T&L(6NchKK0Rhb~(7st#78!{DN-2|91iKKXrpbdYDhtI6XIY z`ZSp&$UlVfwi}5}klo!b+VU{4YfI_6uBG3)LJN~EdUa1x&La)Zu|*C^AZ1JxC`A}?4%f2$}Dw{oZ_@uVKBaG%dS$_g~QJPjRQakLE=q1cK z)n2i|O@dszvsNY^oo{IBwhV9UxD#pYzIRw_R`?*9)DH>(ogwS}ehkbK)xgP&`z-9( z%{OmfkgmQuZS#V}#-D}E_{N%!K=XfIe#RM(>3P9^{LU=D8dHnpN<3}_S0liDC=6KW zwsvRq6;q(wPL3YX1>7IJ8fWpX5=;i#J!W-ncKQ>;6LU~9;!hSzLw^CpY)(F#tMYMS zaVG7<42;_nkTC~$`xwMA*n@WGI0X*19<1HzQa7GNn>@ zE|c}9N=91FOaqzbcg+c!7d>5Gc6t`L~-1# zT{}em|Em^Y24J6Z{8S4NJ^tU-0>=MV3z9Y?H|Y^Vuc3W@=fAI|9ta`rQ+JMs(k3f< zcBvjZ5Un(5ORO7fyj|f^gwcR*)aiQ}OWl7mEnJ^X>oy-kMyGUCl1Cv)<{@{aZg&*y z^wRt$maIC^nbz7C5XtRd$XtJSTg+ySa|PLjK;qPzk8%10#8$8Q@3SuOII}OB7q$(m zHOAW5`L7a6G&^JfM5@{@PXp)~R_UD=#eYH5v_NsX`D2*355D~qeR>jB+Q-V;KX%xJ z4_4(NQf)}N;^3!HFubd~YTBzrd;i-Q4c)-zUhEDp$GG<%l9)JPUWtuP$ichhz5d0H zH>#Z_k@l$(&y`UZvF^HVJ_@a>#3rn-piL5aHK&E467O~s@F zU;otMz}PuHBG5>_x9%4`>mv7ETxWhcY$P-neHlW{=~w%odOo%Q(e_{-_-y;Jguu zUwXpOcRt}<8l5sCVHV6-s3h1a93+NT*{Xc-OjBttGed>Ye=y|!NF4<+8tRqBoRr33`rj)^al&o=J+(V&b9M~@EOYP>hxvs-WD|8Hn0 z(ot|cY(eav5dOMJBL@aRI9hd6Os{1+s#ueyP?Gu8)HC17YE)`inXW$h$tHpvB5+8` zCW1r6Z^H%}Q@tZ{U63IFGOu0OI<#tMui?Tb)7~e)rY}wwYco&iYL=FdKMcN|L;bv# zwQYQr9>1=ZKd<%hX57L`C(+eR6m+^hd`|LO;ogT;e3}D#`384e}e))aK37+uqvBy_y+58{9ivyggICjAQFq zdYt|mT^yZs`MAi04?784ynp{(+}?W|9bDY*e11R4o&D3>RKriWkPx1V3zWn5gC z*Gt;{_WfKt%G(h9v&ROx0kN#+yhFj zmp$)aFWb{E`+n~JYK}dxSMOG4)FLjQdk%gMwa;Or2d|3WZGP##H#Z4Y$B&ZN zC)|kid#~79$|}>mJm6;*$iWl z>c4eXNzX4$n;Vm~GDg}$RbxB%MtA>xIz5ImPu>FAIzDGRs{6oO8{7n!Zu?FDC-pj$ zNP4~;>tAO5(WbvF8{AWa=Kac? zpkhRYCdZ$563n>9-AK*w;v~W zn3|14+Q7yo zEthJ6whl@LqA)QCE+u4*%KqYeA&iOnbg?co3=sY^Q}|8vTVW zgK4m**+m0{OdBL3qFrJEVHPE7IZ$_+=XD}-&S=^DW0>%BJqbXftqPHU24vnbaNCfd@YXIB z0X(uolbrUyt)uuTHGT;HYmR432xo`NMT7=Pb`C@LxBSGitlxB?mA?kh6ox<6ChT22 z|7He5kO|O3zS&7;N${M+l3~rDeF0%!_G-i`JNb)g@AW zP{T&i&HO)vyvpSKm8+?uO)ZzIiyRvtEEsInGaGWo4BxM5OQ?3hAY_$0;kCdlY& zZ5W#Ar<950UTZMrY>}?PFn$OS;Q919uky#|JaX)w}dgo!<8IVBUQDh;XpOgkD(^l0pKoTHuQk-75=)Di+tsbNaFl zQnKgIKru|x#A3<;vWc4Zx(R;6{2p!v)sgY#>6>%5${ho#6-BeF4$+`WO`{a(faNL= zX0i$rnFTJJZ}w=TF!L}m|H%;1nsENKrYbbilYqjEy5K4zmdlm>vxs8%xGQc|5^-g;yimy-P@XQ^G?)=gCYbMgD<0U(;NUhm6KY#YJvkuIl}fp3y%6m!q*FMYag$ zlzjw%!fBT$a8xlI1Z$o@WFs4)KzU@7SsjKu2=yRRNsLR3i2+SFhTP?9}fVUSkq&M=uH?ZHj#R{mmyRk;I2b6_16+-@{1{ zj>vT1hcSO7YVn47m`TbMu)wWJ4M;)`!<`O@C|l1O*vY|*z*H-_w;0-+q3Ht%Z`PFF zSwG5fqNIFok`Azh`L9>2!uN>?DwX9ngcusdE>fr?Ey=ph0kQxmuty&|oYiyPE!40L z1bwM#6*BF0CVIDsUrBt0Z(b=pLBU&P(q~A}W#LHqFG!smNRg*)r$cC{XjSEBLr(o< zshKDrHuEG|C4u~gT&u-Kb!b52cuDMQ{I8nRaHzXT# zq>@WQ_UduKrXurb_QWpLE)fH9Vq*NBiC2+lrzjjYs$~U4)p{~6u^+7V?Wt15pe9zf z9c&C-!Joz^7X<|(1eo4;t3dwhmVEyVAd7}A?6hGvM{+5*7%>P4Vtbh+zSomdTVz3R)fG=4O^T#)MNK6u zp4;*AESAeN1yS=WjR$wKl{ckOpsf-c=9PWybTkXJV~bJ-_EzOl*Nty-Irj?VrV9pG3GT9x&w@a-&fV z5&*%k-#?Mjo~;Ww(3!yyAF+QmDrxuWhAzpFEeDN_5M99CV&Wnm2~}mqpQDbnglTX= z<}>>T)!1*aRk@vr9lAR9J8IjK%lrYr3YdNmUHMs)UcCO*0FqR5<7rlAZ%OXR?NB^A z%d=OZKJs2bS^el$Pz5_hWUsbAa(|xFq5qAu2X@6Z1Ge+q%;Ng(s&(%2K&iaTF}i;N zEerOT0)mK_Zj~mjFBr72-z&#Zo&4up@-rKO!Edx3mSi@&gK%bw@~v;v%+17~meazf zo1$#Z95h8Y6;V=5`eXUNf?kpJP!LS+TM|qoVoe9-ikR291n#)r0NY9@3YaMRLttoM2{;B7v z@Lj@a%c7+ycDxzISsaw@Q;K?UxA9gWp~5Q^I%4~}v5aoF+%vRnVv4s{BPrEW0<5tf zLq+3WYBwQXi79)+4#;3JC1?L3Yx*WK$mNhy8Vk`xDQOMEw!FJbI5hr=#6C*u%oSp5 zCz#Y?QclKNcg6ra!Rg)NYPYO}tdeBBF$8n6Xh46;G+kbkuVtMI{W*oYg|{$xMP5ts z!9K#Nk8MypATS2DByu%}{s#7y+vc90grvgF#%yhs9%TUh)-s!7;68Rk-B*1p(Rx;} zM0NmPrBc+!?@bLypadum{-cMjwv_8?_r@QK4W}bIp?MW0%Z3)8=>3sx)(4E<28%vo z#;MvoeG89vmzr!Oz={#2P?ZE-N^6+HRA2zK0YjE!qh>iMoqJGt75KDuq#s#wE0)@` zzc*;6DIgYnfSlr7B=)MdW<%Jk<=l%WEN#=&@J7}=U8l3Zc+HlsQ{^JIr&S^?}{_wuVu&9Cgg31dlpd z!CPP;Stcl}Dgcc}=8GrBgL;@3TZ)g|xx$!R7j9Z)%EcSLAUJ=}kGq;~GH2LOCGK*W zS>#Ed(h+b?hB?!vL`$_RfGpL4a@!y!9}iExRPt8qkw%m)lU*1nyN)M7j1&&>d+{|L zc}k^K;gHL^tRGWe)%>chg}L{FN-aa|K=eB|4b8d}bzT-6)R#J`WSibHa%#)pBbDZO z@&Q#bj$lfLH8UMU{PQEgN6t0~!$73Wh-Q|_xsgf_nGC%hOcb`Pxs5nVAr<#ZR9=65 zZ$Dk=DkKOcd2w3$9Y#z%p4U07b39I7mvGJVp=bhIxd##Qh>Of6s7vtGWy~MZ1P9nU zuE~@%>4jNv{x08%CbcW0X-x=0zDCR`Q!i`is6HatGFh%8l!PWL zhRxO4*j^ynG-*~N3|lA^)~m@3jNHD|66+_-g70xBQ~bMhhtld1N`0F1-kAUZ=$3VPIMh%k zouGj6M{}XlO{%0mrY?&h+>j0@d%WSQ+M1=H)uMpkn~FKwR2Jw({flw%BpQ3#B!|Kt z83JgX2+>I6r`!gKqLJYPIU2LGV`I6CxJXmfDhTq|^6RJHZ(|e6f|dS?;F#SS4$(?* zX}_v_@&WP)5~P+Qn@hkl5BKyZF*v1}wS9_nU&P#fCGl}y9@Lb66qOO4B*YYNvTFm9 zt=b>bRN!QMDLFmUEFpxR!NdV^iYd=ro8-a) z8dL|jO?Z$f>Rc<~e)G1NCfh&-7iMJhF;lD^y^R7Rm^ROxYO<01SWBVK(d?px{ygGA zIzT*#sT@C_hK#^0!w^kNLo}DPVPetN-2AKP&mue{$umdnhSr8~?f!eQQZ<~P0Xl=b z;`{+xz0uDM_R-d9{D+rQ;d6NwsdEsUB&May5`cpLD`575xxqZlriq1Iu>W^bsBjK- zOMe#I9_%kwMNBD3__E|^LtE|4>Lsp3Xenfhd})Y$;M<%)<9|<~igkKg${-DD>%}2P2FeBmG!Qs56yZtMRh3 zhGcQ9-dRYkG>{%DF;2-DC3ihx2Ny13=Fz6a{(V342kAn_Kp~`K4$V@!Q(Sn|hmczi z(EU*>y6a1bRNC@cPC0XYR`Z3Dj$BH~x@Jj(<(@Z5R?G$r+vxIQDo*HUR3@9KB4r#_wI}OBMK{%O zyhdel?A}?Zdco&UZ!EhGFGDkB01Da_%gp9p0w8juX}@?P%AP4M+fEs|xR99i!Q+ii z{9~l?deF|m_3~li67fF|`IH1_EZH-MPbKe7Wh}@5xHL6c7n`v5cthtXRk~eA5J*%8 zC_PC?MbpXDxnW?@5Pc^U(v!+aztEtJ)5;-e1$!(%ijTc7j*p|UQWRuCt~;a?HmS;j zly46IBwGwP4DTGi{BI0gQv|2o0oVC~hO{S(n$PAV=@a$S1HSr0hf}H4y;W>-dgMZ=X=jB%IR4QFo zR205pHc-`9k5bS*q6K!Qz!X|hqwb;7G3oK^l6 zWv`EBIsF?L^2T0=bZne5O~6Q9>>(WnI{#5x&{NPnC_mH+OM!**iLw`Un!;&+nzB%E zB8`^UMTGH%JMAr5Zw_Z!ZAH!%BZ8m9E+EGQT!Yi0Ezw#G)ObtU#Psc$U5IuzZsOdoh0nHKt0ik|-OusAMyIPvrnf<%{hnIixLoo)I z6Ri*P6+gU_H?MKzE6p>6zpYvRiSP#{x#U_J|N2twq|~+^DhN6EqY70?X}X95f6BZN zD16VuARSrDX~dso^9$_SBeHB;8Etn^BZ<$R^89Td@9Q|%k6|mRS`$2%fugcYeHU9; zNl^}xxe}|L%V!w3jeQVtQn4SQ*-v;bZ^Oss`MQ$0B>lu~LIV4^6;_ZUa-7lP0-;8~ z>l>-#ciWj`M9Fs9^Ux%jd4PL}>C^Dawc(JXY-vS^E4FYG#*lRUD(AO#sjgK#vk^t+GV z)Zy!epnJQ5ua|Jt6-Q{5grr?tg3}N#A5|`8M>FQ=Nq-^SbR`e4qKOT%Odq0|U_gs5 zR%*e{5y$ud{S=A@i=t!B-)F`V;d_fr5|*p6V}T~RDWfvR3(rkja2dbpdq1h9R7?`W z#X2fUE+jU`A}W>;vLlwP&T#APiO z{N0TJG;_QAZIoL?Sc4E~0AC~{#T+8i9GMbO7kpJW$!uV^iq1&$aW6C zwJu_?Cq<;u@rG~(um@JL9R@DEqt0|{vts~;{ABV-Gv5`0$VgeIADR`y)pXfU%P8qm zLc_RY0r5m3R6HZ2v7J@q;qav`O0nj11nlA1=EjCA)atm%IWj+%SrBT}brRwiGkX%K zx@FsDml?S!{gw~7o z=qP5M_2wvy^$jdl*I)@O9S=cPCd=kZ^ZVF`TA-$J3=Z&R`jU|<-vp+JaOo%ZYHH_f zM|xAlo6y?5W>&#;&tobWx(Zx&1QQ>gGUz4o=J5^lh@Z*q8_>BULlL*+`9_ae8hhQ7Fuu+@H-+0RwEE7$ za1lJabsr6$ud~q`D;8G~_1~o=!@rybeQv6%bA( zUQtIUxfB#3eJ4aoYC;?ym!1a;LBg%l;Z`25a>P3f5VpV?_^?6hOLs^qS$0r2V(?rP zdqMLHLNHSGqOMgx@}8s9F2yn)atE{yMHAiUG0H%#5HLIHpKB{(JUP=Kzw4^EV-o~~DCe|_y2gzr2`u{Cx>Ul$ zX~|kTq?le6J5V>TyuKat5K$=t{0_k!Wk517K(zxSu#B-YNNyg&Hd8gx9^SLboY`s} zF-KsHdnah77-iJSS9QL`8`~^=xBdD&~pvlpY zjMUd9+UK{oWiF-Ptb;){avjwPi;j5MpyLFMs$-s3EeaTEC88)PaT z!@VT5a8|eDY;?HdR#Qrv`#;~vZ69rEQK@oC##QiEZF1HKCHlTNCVyw`pKuQkfFN68 zthx|mfR2n6aVsdf4`OhIF)|-Oquh*JD75Wj$+L1(6)J4Fbt?o*S=&H|kX4bA3BS8_ zT8h*CAQGksMQPM%vZalJAhe)9uoMA48$T}?)%MccZqxXcA=XMTnfx=dId*6t;*f_f zUOE(U?$Z`?Fce{; zu*^15c7chsT1%(^T_d1HrUOwI7{#Hpt@id45JzlYeW8KS=p_6hpL*l^l*cMOy4Foo zVlrpcrB~d-4;3=;QT{4H`Z66{FSI(L?`*jYttz}4r?wyIGZaDi;dQUw(N_MyO?i<` ze!|2At5ov}F(9p+)u(p*|*O^ z=_!*YjY?UC<@84a9klLJN>FGp+37`avEF!0QCWD}qkkQ_?BU1V=89*;FU`hAvrzhi z&B@WNVqdV{X+n0rGt9`DCSm4e(zK#9w2{73);9DuDF^;(6Cjc6vQ?Tlp>I~YVKv&?91_d@1fSp^ zWgD*GxVtqedZDJ70TV!#hAE(gn`TiH>oqX-fIy3NgXSNki*X0iI2O`$C?LT=Rp}ix58TRW!6ms^J|RX`-55>BQr^ zBHEi zD_c3#qsb9DK;IC#2oF+9>ce^E(?VgA#>b1x5olt%=n3zCU??um4q-KD!UD&iTI*a^ z){E$8R`586z%AQHo3xbdo82=vw9f=ovBEc4ZlA+?nTSL|U0mK%dOaPxk((CuC}QKK z0Rn8sqz%9B*?P|f1;Q>hX7Hc2b9FA-Uq4?Ok30y56B3Me?79)Za4Z+ROq4tn)Vr$zeJD?N`oh1acw-MP2P zgs%|pUz6wfE4O@p%4C2yP+=6fI_1@>hM#LN62U8|GF)NgIhW+yfBprb|6Q2obJrY- zd_x;TEFd7X{}QH_M$TrYs;J=M4^|UXQZthGGKm3gX11N0WqNx&1!R zGr}eJq^FV~DXD=BFf<^wnmq46vJdMzjDC>q;} zIl+jiO@lusIc}@O6xkbB%g*3^UzpT(Hv&Vv)P=&7tZh4a)Miev0uEA05$*QxUI-K$ zQ;g!87TinUOH3Uk<@|-t5tS2`QExalRRwLa-fv+?KoVySyd^4cO=A(YhoER|L|y9| zAxY24XPua&hRp42iOJU=QXr|3QECY%nYgPV6#S(?2#W0I0I0K;6-5r( zZ=|Bv)7?RgKAw@tb-&#Z!y2K$g2dQOXz1`Q%!&N`i|db}y%%u815>#11o`Y1JVJsZ zLHoz;*OiqrlxTgZiC%q&q1|ZH(6qokvtkU0$AFa4Lhtwsob3oYsxd!{i#UELid z`yWkaywS@mhwc&;TlaPD((U@)tQF|K2-XpQ^9-i5%ihD+)|Kuv5WK6BH`;C<0`jHr5FZ{M zlru*rD=v#yYlf7C`$#vQ)S_jn-llmcf4gxud%Z%A3>rRtu`2ZJ)T0~#4)k>bu&xDt z5TjGsHuRdyE{*QMS{*$(&y7Y8{MUMwX}UYtD@*B|T)O=x-)!A0<(W$o8`IT?Dj#eG zT||@b6pm>e%@tgih|8^b=wHzgoc<72zaD7Aw)0XaK2uw!Z=fdp?MD30Y!rn8`3QS< z_6~86?gT!4-(`%-2}7IcX@iS8T`r2(q@WarK}tYUtkZr)|AiarW(<3#5x*bk!hXt) zSsmW>D=ek)f+Zb#gAxt>^W1g4>y7*H5nWQ9mmqs!ZfKq*5l4JrXSAH@o6o<$%^49ex3l(h*Zr^xU2 zfJ=l;!VnS1?T!p@=i4*HG*qu>Jh>fLet{!rZD?_JfinI25@9oImVmBs7cyDKZyYKd)ps3|Jgc*#Y%so@tOI zGF_51S%@XLZxq5@BJqzBnruf(7;B;uB&(QRkgmv|^VB~J^=!1T@F{`@fz<$D$QfGr zk$Of8W9SVKM2CnQ0pqVxvXK3vHaETswMp_EB@Wp2Uu| zV=zatDddR{s%T!I>q`7oJM z(GbQ{;6&hbsB8@nNs1S+G@|!lk0jdg%6y1$nH^i!d9MNxaX;2dU1L|ewjjq4_e~j6AZ4>)+Vo7t1vSJ*y`GzeScx}IehE${}&=GjFINh;bc(&{> z1Cv2LDM^jn#0VJA)#77^(@c@pXO4~hLmfl+7={B;b0qNdo5v}LL z3eyX)Y|wFd;{Wwez7ZI3^YSCObTmFFSg{@QZKZUgpD02SfE!~?-=;n82e zkCwR~%i=olTpMBz+m3A$JxcbZ;pBUC)hM%=jz|gY3=^s2*<-20dQ@yB zvJy)X51SU=xJ<@P@YIA{hJqHmsM=`!SqeUlquUmcHtN8f{KIqJ^Dp1FadwWxFuI9fR9P0ys8Y@^JAY5u7pl_SwKfcue zfAaq|_}8JTy=A|_h0$wZxDzxx#j(oOi2p@y$_;-T56eGcY{Uy>j2U1k};T?ncRnIa2OPt4)nuGwV#VqT_pw zZHf@WQjgFmv_iL=OPuV{EZr!0q7LW1+=YaAx+>JShaCgGxyoy#M>w@X^ z??nyoX@1_Lwnc~a<9f2bjr013L$2GiMETap^gg9QbL zrU0H(IEAi0v2IC4kDiCefW8>VhnABwLw!#@CXW#OLAZAtu!OSsniUMej!bNzC_*8D za6s{%>OcOHZGlgOO*@;9KRPgxDmPSB=w|66SN> z6_Yct-+Y&>_OxhYL0F)xuOao=SS#$3w+r;jCxPYlo|N>CzODam{B){YS?rkpCGZ*| z9ASP2NWOoYqCE690hp6y{qA0-EciqUD-ZMnOYJ*9wK+ml1TbZ09Qlm1(R*!r*hJIw zwoL=(Iso&+R~Mr&3D*9l8qvNX>6l=kFMLyT*2|h$K*!Q$%D1KI!p+*%D;1;EWCYWp z4ckW|f2LFPMK@gMb6ST%cJJ9m=9BI|!K7|gt)H{HhKhV!TFVL!#LB-h&c!5 zD%n1V0f;gn6IMMO(^DDBo`6BpAx7_Yig`R(%as}kVYE@Inahj|Lh?tU9gXS1=qNc@ z&>$F@qbrhf4YcBGjR34NqppT`f&lJmgIxEy_K0`~$P|@|puF@COt3IfCG1Vps8J!&gu$p-`fCJN5iF zX{pr5$H$)MEMYsvpi}wM`Gul(va-b{4>x{-Li|XGboa&=DOJ))45btSWHy);8$?c)TFS%vFC$Dh4)^-V_I8ql1Z~cfXd0!_f~+c`lAh8!7x8x(z_V~yMa@$@&lKhm zwA7`;*NcPSyW!TfsgxN%VrFCdsN{BDpXc8V!LLhhT}8d>HgZx>Fh><;nOE&5Mxq#t6+C zp9uBG#W|jlQwKr-4ic0J6X{D5jLFD}KwOzxD?_2@5eu4}a`a7+OjRXgVZT0O5iDQi z>QK92pJLGO4xO2g>{tBlXBex_fSC?2?a>JT)B4wDJ>GL_3D7yup&#B#@wEMS>C_nT zQIL?sBDGA#AmHEI=>EtByPn81yMvgSvXvYm=0X*k-DpckY7?dr{KPHlM{AeEVit#x z^hb(Rm%wJwHjT|O6ZE^B=+QTHk1W4sx?2yVNY#OfQlJx^BMTky zx#K{SOX+OY2Z^&aEDpHQLd2{asX^5_x5sVHrsr!q8}ZS^syI6&rn=37JvI>>q()E{ zi!-+Ws@`yuWZb6}KKe$V@*?*vp8dNSf%I1guK3xaiVGeRP_6_CP+P8-(qeGsWO7GZ z)$t%IiSpx1hR{Zt5c>HwamF5i+zpa~+g9X)t6h-R3h{MlGI*QS3m85aH9Lhvaiw%+ zk-YIS4va!Bqi<-C-$#w}Xc}4MyAj`d_UNDd;+LCr07q?c6YCCbZQn9aUfRyyZAF0< zD?*cTNdwCf?CWG$!0q3a_#>Y)@I2pncE-(Iucsr?8xJVBhz83xqH&|Yz-_4q$STSe z8}{W_`s)~#oE&%{c9T^Y-)6f2J|%a>aG)J~GYAu}DJyaSTCTUccB(F2$2;+v)jS?_J6yFhGo);72`lBKf_{->yUb`TzsR?Yf0(QPT% z27+-}ux$Ep%9N<6X@(I%mrTG^*s$6U_Xn*~w6>0}Q)dejeng;{;Ar@GIN-asE-1Pn z>&-GY*zm7a+-T6IqZ?A#Dtf9#2OJL$8ss>(pEysMGBPx1#GyF+OIHphEHOPOE^{p& z)`kl(zdb5T!5frpOWhvO`iNr-c?4E|h)mTADay1cx+(n3f^`iC6kKf}T-5ldh31#> z70@3Wl8R%aT2ur(pxhAq`Ybo!tZEoWkFn6tsG=-8>DK}|pS<@?O#~3hi zpH>*!r$=#hu1n;(IeiXJRUrC(75wD{4aZl_m4x*rKrCReu{x%8hsl$+tjaAfvC0bG zsTjVfTxX0wXBFRX?~<@(8)4Oe_1_rlIn@@O4Vah;7_I-+wDdA}wnZA@@IBrC6Rg3` ztr7k|7lir$x*!+AVjNo`L>KBSkWi;B@Y$vVa!7AaWt5lua|e?*0Xs8ziU!nGil)#h z^ci)KVQULmceZ;6cds{KGF21BF)4Ec(IDk}pI0y8b4wUWxqNkaoFHeSox@4;iz zlHe}ga=*uxIPPj zIL+nOeOz^nXi+=oIE{07Gg|a7c9g@ewZwn=j*bNnIrZ19-z^`n5(hc|#{~h3%(g%J zhIV}4vA6#Vfcq!+^g>g|A)5)!KfmQG@Ld65=%Olx4w6Tsu1DBLXozh&3nUrNQCao* zyawT{jB0hRQJMOhgg3pX>)U%UWA5H*XMIqJq&KvI)enn{B|hP)-|E5q@;4*{n5iqT zjl0%M=l5>opZ=rw^Yk?i%(*hzFk;XaJu*U*hO;*}(;xUc=ntISB)t{zf8M z5lF|%o4()rEjU*niY$;fVoQN02x%0&9Bc0_ZC^p_O@c5v@elCgzpb{$tvj)qNkfW|avn0cXQ8;Z9x%f7@>R$iw5@j?- zxzL|pQx^z=>%nB=Os#1?Fbe>9n%p+kXfemd;o(Hm!8h04QBPt(F^0@eVnA~!|CzXO zPs&pG1Nc3GqyX}Tt#zMSTAly-NCWY{6fNm$um2D#5kdd!_(HSX_!Qvvw{1D!{-O=d z`N{ITn3=5!5$9U*YbwB14+1yEcK%E$c|j}0+XGE?eWCCxpywcCKy@ELLLVp8pn1_o zJdd@T!0`oSKECFs+Vas4R8;XO=UQ#O1uQCN{rX!SL2?m6qh@Y{U9|lVQXcY9S5l*T z2t|p465xCdINY-&D3QtiVmG%Ka>b2O;xDGZ5}ivpVKDy(9&7Su$TTi;sEZuZO)iSq zSsB8>Z%#~U3TTkOt8$UPp+;B?J>rp<7s*8N@rMZ_NYmY9P%Fa<9lU!9^@!7_s(d&6 z&w`tTAUb*e<*YtenQtK{e;vdSUor@0c}pRoq9j!;v)7!ZNWKRHB5O?Ax!QYYhCqf+_zcJpbAK>(|Kr_G@UR zg(1puiFINI}p~*1MHG8n{EAhTHsu`q>htqd9n@i z5CXhDe9;gf#j$6~owBBq7xqRVm4h})=3pNB*dvZ7|&k#2G zyE`#wlg%`ZO{udmp7)Fl4gQm2?NsmI$yxQeZP>eXf(ZVRZeQ(nLIVsuqs|xdYZOS)GZsdo+iF1wLr?vAP z8-23xblLFQFVlL=lk+oSi+7sooh{vj)h!!@JKwbF@q{HK#C+euwh;%`>5<)&!QAc$OLEY!jOd$t1tNOZ{2+Pij(RU_Ui;$+7ZFrZrcRtm6o&@EmUVL7ZC+3%f z6A5+O>FAt_;Wg@85_gi^v6%eO{X7ucEu7|p(e&QFx9dk>kdF{!;IPk!jj;bWbSS5h z#X&z-R$s?C_EuUurT%cX8gr%T@YRgrOFlU!e=U2TQ{XJ?V#cdgv-#|K>eO+%m?A+> zz-NS%b4Fi103PzX16nH#vYaRM?BK*Hv4qu&O}@^;SH<+y_SHA7Yzfv2A=xQw#iH-s zP(5GTq0WD_?2%=MbxpUYk5f}nPj@jU*Zh__+qP{WY~uyQ%l0_r?!e6%z~#vEe!t|K zlJn0zDj zJ(pc_a)WnzcOl>nrz#Bsg#>KOQ>T^ZWj7O`V%x+>wR@5^*ne^UB@+k0c7SCaf=v zpT4=~N8j0pHlArCMIvYST!a&Bk{I{T&1=SFN7t`Uy{1rI<^rlag~<a%k~{co&Iy`?rm0So74)=v=_ckFM<4;`_}vVUzCQ5S$;lg(JFL}G zU$@VI3BGJH@x-GLLiz-vnr8X~}+&LR;L-2xZyA{SNKOF?>sX;G7xS?cd7c39v z`g9u~!07Cl}*V zOy+ERD-R2(lq-b-Cm;Z48SHHHWNb>~td!!)%GZvb7p{d<4~M%k-_LOAbv|nTdwE^w zdvphiH8R!OOEw#e$u|@Y+${}{v9stp_enk9w(ZYR)vKiT7ei$^$;SwM53**ICHspS$DbE{~OvAsIriK>PCv=N0`38$ur!+-XAJJ_dWl<0EJOV@^#(uhC)W^w^ba z6b)SgYxWsz&|F2iVgY=c7>@j*xQ&WVM!FftEhc9@#+IJZ+f6ACr^``Jz7a${);ql! z#3jn=&foTyU_sco2cGdqI&;G&yYmTuf;GvPOx2yPGGw%+Pwr+epZ_AC6I}lH-yf9p%2e;QYC5{#{f9e{EzYfg>-X1Cb`U49F zm^{%7PLJ+oCFs&NNhDrol3rhA24H0E-QT`kJP(OQ`PGH*9#feD)is);s$huj;*_Z6 zlw`i}JIcMwzWuy=FKuQ9YK5X+VzyUK**XsAFM0TRSMk%C;-~XBb8=N;*URY7i*SGc z={e944E~@|3cLKuL!qrcJ9U@Cv|)Dm(Q44q2F1fPd+Kh(0P09|QGZj0OkvY94nlkpc1}h2&E8p!zckb#z7S}Rk;9suT~qty>`}J4#4B%^VD7c zvfmcN2+Q_kFr7xLowV22!_-f}!b{s+GXyb646Tct&W+2N6==B_cCzd^lfCa0dSXRP z3n&dn{{p?viWl z)ySt?;B8Gm45&t=?|zcZYakJfGu8SG#ELMRN3DBTJf8()Z{wS2) zbG*N#@u;v%)$fQ_xErcj zsk`Pjbtnn{Z}b+!fa|#CyFkc}#mzM`p>L{hsbmI$5dmc>VOtE-lI0t8#1Fc)#KFbG zr)E=Q6EIgV4#JoCHMjXA1VGa$InH6HWacKN`Ri}F+{9PyR8#fM8ill$3D*)7A(2Ro zweKzvhIA$DvnkZ~JF=HQEX5dznKYAj4v+lIrc1`|5}zftcct!jBu|b+q+QQIMoK0} zo8>}vLxp{HKFHy-?+SQ&D`27W|C;~d4^rpRm<2_aTg?w-Y13|?oyYtd>V^|Hoa=P6aZgHo<&(2rRV)nQDM zQ13Y*u|l&`N2-^#bb&`2X|!t@73f2nJ$rZxEW{<&k@$(HZ7R=4-_|K%3DF-UT{a>C za}vH2pHk6~!ok&sjQk*qWK_&gYFe@02(d+{SrkLnUKwu;)F5|+Q*BsS@nY-9$%5>l zs$T`lIl?Jro0)f#EC4IIGB1luR|bMW!ke7HTUWg_EpEuR9}5KEWm7!I!HMLN(5$Su z5!USpdxatEZtDEV{+Ev;{b=m#W9AFN91y9q{C0H`&+dAc@{r--;oZjwS0$VB)WS4c z`$v~P^(uUtn+i%K#cFhlL`BxW;>d-7$9@Iu!j?4{2qGIMNS(uR=!EZZ9t!t`?hkrs zp^DWC5y1~OP&$!-T?vF7*|C9kW zP%|*3sHB9X(kYd~OTLP>7pj8s@m5g7lqw3Wrk+I3O_M-g8_ZpdR684A`$khqffe|E zH<`0Tf@|P9$PVxX{+#iZW>Vme=3)lmY3Ep`8`3~{Jm900*#)^&+n*6hq=}22qi9GL z6_K;rzfjCUs=Sl{P)(&}LGZnWdj&7(DKL0k>Gi2qq!bTCLKtkxAxwm1pHLz@tyG^6 zdL@KHF&H~VP|h+xm%r;0|6FKIMQec89FV1_?l6wv8j_M|F?Qo+nx%#jX!gtRsu)zR zPS9b9=qlcRO7P-ZLexNO_DwaQv$+qUgmW!tuG+jgz8ZQJGMZN^j$~n>o+3b%rSC~oMX%f>LRowJ(Bm`FiHZ&FilT&gqFS}l)R{wtcc>< zatW;_a0emTc^mh}eu?lCoSCD$4axQ<9al?lhQn}KJ5nPS19+xR9MM~fX7D^gJ4&bx zXGo?v^Rr;knC@Pr5o)*O&Q&%6G=vi2a@27#Vm0R0LTi%fhEXh>^;DE|Vv%CSdIkIw$aznVPzwg=> zH+9}cVxa~!#$r{F_ibnbfD(>5`BiGo!vQ*9iN+o=#n`6}EJHH!~hUo4f6wkteXTa|srhOicdmSKS zOPbTogKAhaEg*6tBnFIAwBT_fPa^B=cRXPHT_y-xtqb)(8OfvCzw&=QnJN;0ooIC7 zfp{Qr8$F?`T+{^n+01DYz7{h;F-8MS)jmEXy45jP zd^hk#r0CHvx`eJs#^2-o4T&P$&v|yd#$F$yq<%|V$-mkV{SFESA;Y0G7$>Pn)J-gvj=h$`7mEJ=Fp^7nI5<|8TC)e;a#{0bJNmYfeq` zYfOo*tlNn4>&Dzl?3ueeNV!NvgBrw-?hiu)l!oA0igT^#OFh#dETu~`a7MpH6+dGn z042_ze(J;TLOjh74SelIY# z8P_698-nl)`6t4BR5#m5dLH?quEj#^2F#S2s{y+&aAVVLR-H^!Z(;NMqTOxu>Va0~ zzQ8&vqf4_1h+$L13dGUb%jUIOC|RfCdB>te{XGWZqd>=|{3XVldiLtKo4*snvlche zIKYug#8`2-q`uVrDbipjTD=U~??w5`IL6Lv`;{e){7`32vhRpWW|^}8?Jkww>7|DyY+8zipUaZoEyE@jXd*s6D&14`cFdhh&`>P*k9*-;i9rwFWBm zxXeqDuooOVpk8TJ&QiPku1HXk;^3bfjn;zb1z`|8{dyM5I4d%pv2j7&H`^T|qRT|; zWkaHpvxbff7unxxFNav~0=xJTx-3drA)BzybYqp!qOK&PEkWAzwzMKeu z0;6XLDHHi$DcQ|1Z_Kg5`)pLt%n+XCJA3hkx%SzioLHS$eiC!xU!9fX>n{x5GN}iK z*1{_C(0^o3FbudN=Rg?Y%5khG7^}*Nkd&k2SAAh?hF>ikMEmdzwI**uU5dnO&X=8xUTP@TpeJ7mh|U0n;Fn&i%;tSP!-8 zP^m%+P_PhWfeD<&J3oZyarW&lme(P@V(pv;MJBxz3Blr2oX-%^%_O%`hg*nirL~%0 z%Y?mPJZjpwpWJ#PGDo`{KexCaUm8r0UUqJ_Sy#2E)I9wswp~ZgDpThc3H{SrqMqxp z@|bfAk{TiAKUNVyPG%72#rktuQf21q_r&&N>j5;QiUhI6HK05wZx11H>;(LNt-={q zhR?7G2Y9I9X4avB7E^b)14Gpq{6)8^g_n&#o?9CXrC55TN{Q8O-#*O@3G7XX0Oxbo z;O%bwW9`YBIjieOe}cK&qaZ0JpGRlEz6ZM2yiOPr2YC%%JvXMv0_ML)Rb{~ z;>(EV4|4Lsyy4Q@PG#tdun*^#JjADf!FX{Pd9uftxK+e75<#xYWe8v(4Clp>u1PPn zUGk$h`j_bZ?PfAZ2I)2E`Vw_)k{Y7aN@TAG zTcK;=bBYyxg&_1dlxxr@2VjdVn*(JcW^I7!UDmyGG;2vqtF$HOZn0mVb;_xp$J%HK zEWa*j$xOS34zgW=NqtP0ooZyzodusyEDab`)?*n!C=VKx5bA0A3x?C9jY*b9>vq@k zM+6WC^BYP;b7nEO=QNI4y^c4y?ug59dYPb^CRI7lcmG{op;J|A%e4bdo9yf;41W3zc+8;9>TXRR!Na5^}m+Mp%8w-I?NN8e={QS&%=N3;;cY?>|!kR1jrx38zVxPP#@YH*p zoabEwXqu|dg2P`Ry-~!Xp@}_Ai}Ieg=Ltj86p zH&Vk_W-aqYfPb0BPyI83a#iQVbpxT-@u|C7;;|m7p(8R{?0(>|gsl404&s3cIjA|0m3ywa6473b5TC6c>55Ap*BW zqjwdpIHvQ3Ht_7zHb=%;EV?kb(nqajwkcnQJSb0cFYk6YyHr`|fenRQ3?Cgh0};WbkRcr#!;$z z40tZ5C%WjvKuevU2H!3wQE9)uKnfIRj^z}COxwKwy2Zn52n$%*J{<<=vgi;JnVSs| zpE`)aY_JcM>I-aO;4{#yg#CzV~|=Sb|5cM55G@>Y9Ul$YrZoVkRu$=&V-Ov3?}Ic z!XnJL;Jxq!hY~8tAA}In5h-Lrn4=6Qw;o8<{4`T?Q~?)VJ6yZgxesoBparG~#>c!M z6D>|pAa@?)mvTXW{+H5&QrWd(P|WAdEw8h`=bK24FTD)Mx#HXjVY16BhLe_U*Rc(kXa5w}#l;pwY|M@nbFFEH zDE!&qgPvBDrtls-7r0<`&=v-&*rKk27YK9`KdN7HXiZ0>-_C<@2l*~BB*^w6U`KLU zzLufdq1;GrDBtu>A9(h1RVE29_)zKPy?AQv;E^dDTql^+v-^I7=->se%=b5jw|1%p zQ^3Qki9P3KxW%S>ca+sj-KX+bFRMcygKn_nSG!4poE&8ttUw--j3u0=E?C=;Mu&)2m0JOU$4ByOtmgE>$0(2%wCEq5{7ya`T`sOMyWN#$U%aP zP+A!HyRwgP(l*12#!28>#zQjt0S{yr})Rb7J z-Q55}S6Wwmhvq4bpL>n(;FR8^Ya4jFu6s;<>EAkB3!}HbUR&*Ok$K*3Vy@)S-Aw*r z40j=*Ai__tH4nBlX2vvEBfj!wY7wGe)Mh3BP8>jXYx2c+CN3zlBxk~TpjKPRB@rDefLo#Ki@m9qT*aL>P=W(;$dP^- z-EP|h7VRQtToa@kw`_xqYp`DWC-2wJpk6UFa~FB^n9_ShX=g-AVM=q8@$MYbpD7U2Q#f!09tLyL87n${)DkcM1ewY7wCzfrK zoDvH{aa8hIu@`j{rcyNnD-W2pfqM82+!4qN0jmu!ryvIf;2j$|7tA$;5*bRAb-JU4 zQf~P^4P@w+k$VzDp#X6@m^*d%YR2uSI^IhOmRr=(O`bz?!LO!Zc)~cQO4XC!R={v3 zk^p2nNEP`78{I9m8T^Kn0WxI#J_jid?kyC=(MAzi8{`N6_Geut(*#Cyqc*H4|CyxJ zlR>KXFj{!JS^J+25SiLG%_Y;fNme)56VQcs!|TmkI_zPkMSe|2lWYryl=J_ zf>}x@Yi90N4MXKx(VfL!-E@Ew66nV!nCu|zmIqzYe6Sc1crYl* zb`Zi5`R61xe|$QE)F}i`(kO~7SLOFcma7Lluq3b>C5>93O35aX0Ivh-yr~;wf> zvcZRD?8axg6~WzIf!ziys2p5psGxk`!u#?E?pzFFLKyC*Gat-Hv@;|74cj3js1zjl5a)KWAK&?<-!w=7KnJDY z#A^*^8ZFj`dvs`*80bw?OW|v(hwxfme6H9SN$d4YXCH)`Y6FjMEBm{I;38$_TpU>V zv`7k@AR@TwX24zYbbhmPxG`rBm<~_dls#R|4oN0f-fe09lV@4<{V^w^s`VSH-X939(ew*RD(Bw#a7x5Q80{R8F16(++9 zf+Qgf)+TM*c*vbhOYsQDs!+q=(OeH-mpiA3AU#t$ZQ9);tR-cWoi~- zL<;p)%8`;pfj51%komqPo3^N&j?25BMT+^r8S1N&STm1M%B}u0uj;~tt7Z!T;Q-|M zI!AiAX@6k6d6@OSndl_1Y$b7hxk!jF;Ln~(7*|}9WJwLlL~dY!HpIw9g;1=XHTvvT zM79u(%g$wpN2&55FJn)?T5Giyg`gF5Ht&}J4CiUwp_eg%TacKVPmYsGwH4cm|HdhQ zU)Wu4!V?xVvI83u2{t1SDHP=KEDtN%Lq=Cnk!>2?E&NID^0Cc&=TCB@9G4v9ROY=; z+zL{3r2y8LdCUkgdP;`pjvLbh0k$xno{vFV zRY^El?g1i=RRX6!=58xb<%nGtEXJsu6E$s@B0EE&M^pp1CRTYTjy5g08YoCFYZ)V1 zQCj`h4_}N-o>CqPd(+^px6^~gY&sIH5YH2czvfHbDVTRy=XU+C&g2{>zB`esA*2v5 zzP!Abt;GXE90VUU{-s7zOaKD{#>{>+cpqcD|GdbZ$Q|taiswp_{}}`hWMZa86Lt!iH)&&lLKZ2$7Kt8W z>OxP{UE6^pOAhWPginGN0&LRfc)R5oHd%0p**#)@ACcD0EDxPN?tZD9#?u2@pN>z#_ z!^{tqCLWM$rWjxMD1e(9Ikd1A zY7K6h|D2>@D`|qlp!1^s(c#f7(eIcJ+_z{UAFteuuy!wo?222l(_c4Q%+K+bP)@FL=owEYGOXE*-v3x>@Ywt-Ac^TV} zOigWjuW1fnP4#M;f%h1Hk`7b&XzHWrFf8RnDT{Sd`%Xzj#G4+;xjJpP&qgnkIj@&* z^3`;R1SfzhUbODtm#98>K2KZtZFuj}ROZ?3Znt)d#aPJEHSU5x@aiP1@$>qgiYDJl z2N9aOIzJ@b$A;MK64|zfb7Kcr)fhC%Hqi(pMol7kIr zgO{Za6K2cza(|#mQ@im}F zH<TxopT+A_fwPwB(QXqDjgDldYX_zv+;mISUs+@g!uAT4OD!Ll<4b5vm|{fR z-x)u8hi?|ENZB@YO^0Ofoe;a!2MJ3W_TFHYzN+2Y2_OU6&oP}8(R5Hl;cXoJf6#j$ z4JYe$q@XTaEr}@|yA(`S#{~qQBHl^Wa(ycwUhFS+Nz)MtVTZ?X_C58Tyd1pMrACQH$mH{_kvVk*f!W^e$!4bd zuM^K|UxpuzrEmsO(>U8L7TOPA;`w%`%^q6m zCNAW*r#-VE_c-Q1{INkwsBGC8k6{fzw(jeMefiGp%?UT60 z0~q$VJVUEHvgq!BGGXKgYlCy_ftpo3{kbQ}e8qB^BOpWuWc5~Ir{--~WqpF&d({p> z3sY}rSNg12eu=i**Gi5#El(v^td3^~af#QWM8naDR9r`MA;OvX9w_AbsH!$3kH>hK<@Jy=hBjO?7NZ-V`8WvWVq+DA~ ziBWHGIxWzMJP$jXzS*^h@VQP6XS8z6Q*Ue_&0)D%tl8)zv$>(5Rf51Dn_16@?<5)` z(h+Ql~E1V*#yg^EaPFf!TBAW~HAR-_l&=)}76$^AL86WXL9NvJqJDRs@ zezFb36U2BO_1dI#T`9x6ufeT-%Jjr=t9vui@%3^0mMP>v&-HZ$+glhWZA`Hs6its( zE)p9bh`_V-lmC7-`7%QTbV?L=2{((;AL52EZ42}|pQ02>(vwbfeb$Vm!)N7`6>a>s z8VLd;i(Pbf1GSmRdL)_7<_54lMq_ApMT(c{@klZyi4QSnYCWUHe%Azp^vO#tvIKS^MYGs8U%Jdn89c-B4 zdbHhQPiHEpP1pw3RcWR{v-*SGgrdfzMIk(g`F5Pl_>M;mi*m zMmdMqJX=pDb&0)q6hHllU6Bgc51OsWiR7d7z(B#8cmsWlogWfG+D91Y1ALEgN%V6- znr)1ty>ZS{6`p;1 zLtk_mJV#}A=6Gh~!@x6FcgdOfXQLQc-`KD+bS$&ssf$Bj^05iTf=$*{8Gecnam9zQ z&1>__f3Laev)Dlk$T}UoviS5E)FKZra2jgb#&S*I7(k`h6=Z5G%Zsj#NQm`>Q&OfV zx2e9Rb=#KY$!$#603LsKa`bgXRCS8uVKC+>y4_frc(X%4;gDRbykPxYl>;1gv)rW-suc}!31{qG17@WxZ%Z}wzaBT(q^9mF&?=L7^_jp2K zrp!Vm3b*|o%~e|Z`p%sde`ubdPrV)dAea7O#Q03V9Yu$A=z$ZlF$o)f^<)ghwe(%Y zCcQ8Vi)2%)roeJpV%|o{r(8?0(Ei}e+Yk-L{3(&f*U)B+zvD6y)SCf{2q>!=mgq`8 zX|fj6u;Vn@(*xh>!J(Qn??XR&G78!9_FnVyvXMO^c+oks7(J;w?j76SPf%xJPs^hN zPci9E2iw<8oIm`c|Gm_QhG}9NKmq_z{D>9(mm1N3$Lk*w(!Y5nn&&nft*BqRx!1i= zfKMYL%$dS`y5oO0C55}Vf5Q0&~_;uz4rZpr#nju&< zCe4Ui50Dqd+?E=nrpQ2}UaO8gRRj?f!clKMbONgxvDvfd7lL4p6*c)Cp6A%A9J&Y}^y2)CghtTOJsrx2nU3=K22%vWi6 zxP6@DYjm~HgS{t841<#9E3uZ$DIUBaQ?a`~=h)*- zIMb5d+(2nBWJi9}l_0~{XSt*1#&T!KuERwAO?D$C^Y23_lZg!rocLs5Qg{2~6m#XIjJw6e8ZCT!!#-^Td_ov4 zoavCXnZptk&4vaz7AuE>D`=W8E(SZFN!vuCb@WaXl)tqw(J~d1d7=Rs#=JBs6)`+Av+kAYwsn_?X>H)z&Oz>C3CM&(EQ)>@ota4N-iav? z05DD8-RUdEeAcDtgjE-FBaJ>W`7fan%S>ZB{PAW@-h z4jWvC71Z}h3IoXAf*|=wQDH#qv;3OULuQ#aMZ8fR369RdTse0c&<+(iu|Z05fEpIw zc!j~mr_Q(_eSP8PapigHs3fPVbE33-twFj@f{Ua;4akJ)=tV%FtwbCKBst|1%M>>V zptqR&@V>-B8xB0GXoJ;OjwUuc0+{CmeNGIXEe7w#MJpW31Wm0Ik2lAlvslceMoffZ zoN*H8;A$fG#Ww5HuD0`yl3u~Ampy3^1$=*^9)~7#b`@@+y_kt2DQe9o0EPQnIT1kC zclM%FG>qfFFe22z%@-d-RlEVlttT0SatnOTCRvkfkq7{m=2UbH?a>{FRs9j=1c8Vp z-&ne|F@ex2F$xcVtHb!Bzl)K*MUCBr7|(m=MYRY~@V-9`*^@*(67@i85yO~SNz=Imm2ayn{po?jNp z|Dn!uPNzES+fEW3k7ngPcx<7NLk1cQFWa^bZ6ZiJ;u_+}ZY5Bv<)2fjl=}~XWIV1b zM1$A(&;AIQvvHsOa$BepR*gb&;;+%vFSUg4G3QipViDWR0NHe>4qPS~DmC+=8a5%|_75Q{az%lEewtrjI6W`DM}LOk9KB z5n?;g>$MIk^1BAPxu5;Z*&Ms>o{UH%JZJrrsYF-8LF@Nd%Bs-@#S`2H1xAp~HN#nZ zR!i4T?JrLM@2wiLTqZ--HQtP%CFh83XAEo}nKwy>Y6%zr+YT&!&yTNsP3*`^9Ydb> z%>g$}j2MgHI#a8`a8RG>-p3rYRuM;4U(KY8GapUaA3x)&YSo%|*ov(E@Pg*u`z64~ z>y6K5OQ#!`JA-)5@S#q}l7HH|Fm zM(|6kj@#T+%*;!?M$N#{zIp$p(n~0_ig;}vYo&masP?Ux$M&@)yrvid+6yO5=PM`8 zuB4nP;+SJBh7eh3!-5g&e;(kmtEsP5<#>H-;H7Tb72fa@((AS2nH5~+?|lEt$t_gm z&jq{e)dJXp@B7+gvZm0F4&mG0wCsW}uRZHFbIdR8Sa27QYd>rMx-+itFU=ct81miI z;EJe{cC??g*J|IZKj5u9`?&biI(4nU>B2Ox%SK#Yfi?eJ_S5CYtq0bK>+ZVe8<=p- z@w4{W{rZfzYK!=t?IpMK0raDck0PW?q_%#_n}fIWzLEXfdZ+Uz1R=`-_v!Gh&+Ol@ z+-_0qF)+Ia!k)HQyl~5>W2xc;lidew>^Ddght#@8_}jJV z`N;RS9CAajo;j(3=+{PLW<8^qJ}a`k1Ka$9p5peHDQ0gsz`hucz1OwR?B z<=s_Dp>Jp3hJVad0)oQ3BvLl&3H}(K>Y4BZ9g~V(1Rkl9RGA%%^9}gy@A;FdmhgYH zL}3fG<$?6eb!(UHkg8q2LO39dfVK>@rnH3nQfym-(t?wwqw@i7;cA(H!iXFx?hH0i zA}syl`666!65O2Y0w#0`OW)dzPPe(Eq-pqL%2~BuEvE7=fjdp@Er8Y()q);|=W?Ac=#@-}DDfadO+nj)& zo_~w1!I?D29=t;ZBE7;<%q%b}F^roIGPp9Ic$$!QY)}ofP`kg`M8a{@Gw>rv1pEn) zv#$NGee5*d!nbpDO<8TtdB?0DY@*H3*5tbX^mxC`^AaaGcC(lr{@4Fa`~B}dna1XS z+VB6g-~Va9|I>c|ztn#Jt9JdT{RRL7fcP0s5CSaj$aX?X+&-iq*8Q6+~%(P78zU6^G2?^NaQ{*Y#7raDSsElrmDvk*%!HW%5(t2f{ZB zuXt|&x4%O$q@t6lb{v;544!oK0;K!XWN8V`WuG~}oB)nIMnNJPB3l_9yWzHekmi^quY?r#~y4OvF!9vk%RunxJS;uaag92471YgSc$ z@!rp`4|T@uP2a{BFhJddzpiy9ppSaECFJrpwtSQbr`}z(q9Zg^134~3F=P@yhoW7E zr;r{IOyCn$>x5z`bzaIpYN-c8^M#9e+fkpHobH{?y>+(QcQFlW*JFp_yQhk3eYK+d z%rmrdxbMP?jJ@nqEzXgkN97vd?+{5fe9X<-#DGfdh`4RXB&P)uxxsiuHC#xeCs3@xY1 zZsoJA&a^JD$YsaTC{ujXMSCy?r(?L7yG0P9lV2ayWV=Mj_-ZoP9Z-)>h!XYTGs3+xNGM+(KMUy~)0)`E*GV+c0 zJOMCT8PNKq-a{Vr+FzfmZ^Sqx;XW_R*y(0L)HfmjCgT!TXS`z+RvvqkkXJXD%vB!0 z-GjeF#)@e=KS&-~dxRC4c`)f7ZEC?FZI_siX2J_4I%X0z3feBmWyKP78ZY<4^D4EX ztEsH`IFQ|a2rkh`H7CwCbgSkb#rG`TStAbOq~fCXKB&JYJ8_Hqt|j8A&)3)!VyUxe zQtOmT@@)wx<&gQV`g|; zkQn?$BwIby#}VT;9Z{#-`zBZOfOf~e^P;LhB zgfcvAzHqR9@5tFfRt_&r0)@IClapX{JZONOXU|+sf>UH&V`Q8K3_n=+A+r6*!9k2_ z?34i+B5-x}r~DwZG`7y?%?O|8;~TX^nVY^lr480wtomNFJ1cV)5ilY-XMx@YQ1K&mEM%77Gu)ve5BPKYEQ<3F1pQ;b0q%E zo_0^er>=&@KgiN~<=T3&rGHgDS?}K;fK})A#wSKoR5=T}M|&s@n;kgLJEDXcRBIN@ zbAIVqqAqts%Ex#wxQa;6v_w$X*a$4K*}gY0z`g57uA}`n%*)yk86P%Jl#gl2B-kh`Xf{)^Tb z{QO4i$(Bv}VEXOm3I)UdDh~ug&!KIwbV(<_X*XG{mdoVwZZS!t5%?2#f2DK_!S9_G zGBzM_-h>Aqw-rK)p=zLDe0SysT%dCnzG(SH6B(1Ayfh1*sGUD~1j4D*(om;|5kWT> z!^6Fa@HlgyU|*)cw)w+(Iz!+#U;U|;#x|YKNDnE~V(!KA-SzQjN2j7i_wIS>I~JsLdb^reCRzmXb=efVKw&KOtMeyXWV4hf^?|ILt*Q zb?YQTt3VABHLg??gTYx;+%(LwII2zK8L8&y4i*8lKGbLA`?8-O87Nv?b5n?Tj zp+4Y`fc0iAPl5bo3V?w?36aTuxp)}X==38*S?;7+XTcymSau9TuGOo)^*PbPV%@?1 z5`9~+_lz1P<8uVQf`9dn=uZ&Z774wbL~U2WKiGFh$cN$kECKm`UIcHwGJrS#mLVXK zF#N$e+I2OeP%PA72}(fQ!!=2M9GVFiIE~z)45T{0hV%sv;1}=y7>wQm*`4`YTAHFM8>Mcl2c^7;{uier~X%;&dVYi zbBV621}aW3NS3^MSRQH#e3fIi#%wQ2f^>?1^;KM*FiNRLTo^_*j6H+6L5yMyS!B+r zI{}OJ^YY}iDN+<`>!mRnH6KyWP8MAB%@!>rRP>>NmyZ=1KBJ*D2dO%YTc~I#8chG3 z8+#91gm>u**(aW!#J{TigkWkBGvCRo&zV0FHQ_A4{HET|UHivU4;ncXh^|sl2yqgP zF;m1{hfyzf+5pwdV_r{A9wc%RZFrEh8Ge2{ z{?%Fn@gdFNZ~KCAty14bmKuxZbE>tk7&UJd4VKEEP{4D)=1WQtGTmCfiq#U4?()59 zIkb%Ww`iBE$$1`N6N7c)n4RO&bE~>^B_jA)EqCss&~h+fJVA2%0A^ zdfP~vc$cE3cK@_!+EF+LN8hv=coUGK;?yQMeUTOa-^%6-@>C{Mh!q&aXh5x66@WCo z-md1VV&~;idWBYJzif)>o3JD=ibuZEF9E?hp_Wq2tOaLFIg~xqeI+rQs%t6o-QR|$8V%pF_+GJ$tIlLONMaBeKL&wdfq)h^mY%TgrS z14LG1XxF3JkT298*f5qLij^1P@S_a;RFe7%jqyCr$nY#IFiqD{-wXdFe2T%#6bG11 zqiH0+6^KmKykJnWnRg=RtOg#znIjqRyTqywJ6x0N9EAp+k&d^8VDmXC)A^)=g=zRF zi+vOQfR;hdpz}UKzDz;3lebrJa zfH^mS6!R*A=0N0Z)rL^lV3YZ!O)BKf&Aog|6(bY@zX#f*wwa4}gFS+arH=s)D7OH2 zax0n5-UiWZz|A;c20yI44E(c^X9H{C^1}31hlyhUGWamVhOo9N%d_GZ5FFs;w;nFF;0K8RmH3@*jZ4L4nYx;` z5@)DZdwdUJ?GmWofq{^SJF8%Ia2sh&Q*G_0NNn=yGylEk*d)mQ&2ELI8=Wupw%8-S zrF%!&De5z^>>0j48N-{;Pa{k*8q4}{fBmsqE~+2*6EmvuXgMJ9XWk zlhV*{l}EL;y znofOZ!2L@}NiIH^q6-E%4+`kH%l6~FpW}2c$naFaYMxaWy z1j2ydnR9;lZE24i>@F;Kga!{S!e1D&8|=WG1`eo%Aw?mk&|(N?0)H%S(p=vS!Pc^% z@z3>3jnYW}yv*C{i{Wlv4GS+BKs=#A-;zPsm;QZDtIHw0IvxaDQZCw$A!>O!xR*T= zoIc52($0y0C2gI7Er@k(SI}^mi-9zob zz9yD6!ZxZt{}Nt0{WG8|-Tp|WwxV9Qq+mnDPrmm}YfvJe#Lq>&2T=>{jEh0=ioiN(Ug)thig%~SYU^b zT{Lnz7W@s6po{jkP8f+e(Tfi(^|XddSIKV1&DIc{4kt2@ZIS)0NY57G{H?Qt?Z4pk zYjU~9%0^(H5qpn`#1R0+rpu4M5(N{|h(5x8DsQu}EHdJaQ%MP-Q2Ga2-^u}`a|gQt z&xsTfy0hG2Pzdc2Rs7=Gz>sHUxRBf3|LmFbe*h%?yG{PjMD#y^r2hbt{sTz*e+!WG zuiEvWiRk~Dh?>4;-yi&l-Ln7xi{1Xa+^g#Th}|OiEEs7NC|eYDlZOpvwXisDsUsBC<7~290a=VB$rYFIsrX@JT8^ozD zcJ5LWdMFbLZ%S$@F|#{Az&4uUB;FKIm}H+LwKkY1F-qPt5O+nOiDoquU&!1{n`lX0 zX!pxpAQ3;LOlr>mHdyA1_Br*(6YRt&i+gOy66PR4^!!sE3RU!zCxFRa8y} znRoKj{#N%O<~xA8l=-!$%$gthfzG~M#A!+$e&rvrhA1q5=Xrs>_$!nh%+zt593q8L zg4~bCp2q-LC@vYz#UC;}?s@>A-*jiPIGW@0#oFdCn{W56@AP9tdaYeg;5O*@=9yZt zqt4gbNesG+qS&G*jnZ-*E25m@Qz@S^gCkwUvjne&bm?Dn_JFTT0y_1Px3``@2d%@3 zyM2Phu>3d7!&NaXATK4S*L_-0-m1sa%Zk2})bq%fu{C|MzjWv?0?qLD|tp7L}{SQz* zmVccUWUV`_tGNVUD<-+>sa2;(U<#~Yn<`Hz5?TLg-@8&#cPXJ_qYDir8ARv>BhWl6 zYnwY{}@#K z?D3mhyjb7Ue&PGZo7?Wai{^R144>5AZlMRZ{!3T)>z?n&xFS>Q&D@*wp8R)Az~Jic zL8(vLal=4BfRzKL)@xewq<`x5bi%{<%xkI+j+EjzGqM}}1KDn$S-k$O{5PN+g^A3jTS|js%oDy@X>va$B z6=S>hiQwjwc_QMO%x4ze)p)=5RlVO%|FG({GSl7BMD~9)w_bcr;IluSFzNIDx%S_# z9$*=benZx$EAI6?138)Y^B|~rA0L9cXPB@p$n7M^lXPwZzrRNl#CVr?I+EorYg2!_ z{V0jJUW~3t$a|u~BYq@64NWK;ur^IYa(ex_m9mh7``EBu{mF9j{NdJbVnQQ( z4quQtBcNU2gJJwy*8Z?JAZ4AFRd_bT!tOI-R=@pPR>qv&V4$Rb`^Re`M%}_Yn$(oEIuSTz{0Zhr*Pp_JWEuCznj z1}$w+%HmZ;oDQ${c2n)}b#AZaK)3RsctjXVS9yS~QH z`-YtDm;|ymAO)@v0K!b{kcCW%+N2(tdGZMpjIVKKKPOMLtP1XF-S0EtvI?glbbODS z9AW-tPsjT`+O8Z~p3gH~ohV%~=dq#KHS0sd*u|IzFJO~nl=`HS%p-0QImm&ry^~bl zfYiC4PuCkZ*=i5g81kx>Aq!=&Pp!#AsWGTv&L{r~xS`!+%4VHEaN?@I3;-E7@wH;{ z`#z0f^CeZ!bSkZn2X8ZdAPldL!8J?XIj=)q-u>8kJ@EIpAT>H4N*ynG}m_FFV zNyHbnMY>!AwvlQ#b{+4isBOLs^h};M?q7i_?8Q``2B|@_)*4vN8 zCkFWFsLiqF?U%1l4C)a$pR5%P$~)uxq655~CEZs#WadzRco?8n@uDaFpT>Q->6~qu z(7Ra_~3gk*Gx6U9KuBKA#3l?u&g0Li} z^LD8%k8y00$aeNbSiGlO>6KdPH_2_#i%r019=T?os3g&C2qRX)Y{e8Eor!0i%{5ZY zS^N2-u~_&5{aCEd)>F(+RVeMir{d-jnrW!F?xehhi)oc2FW>(wI(#z3W6da3tymE2VLw7=Gjs%qx*du0c@gl!wgbd^z2rH`w$E! z8`W_@gaB`wXqDAuK`+)XRN6o&fGmNqM5^*&BRQ~4FnnHyL*!kkL?-)Vw%QR0i;5*x z_2N(6o{cGyAy(#~D?z}->7uxd%n0m}=u!v<(W1&Qf~Y?X6Z^qE%k9fu*Qb z!J!_eo3x^`SYEP;nKqJfyIG-bvIRO>ezN;T%ma!LjRqSD#ab>0)may~BpU#PM(pg% zASUXuWa&plfI4EJc1Nlko%K2fMD9sl_HCJ(Bcqi^jmB)4SO-P+m7rGMiv7zG^gEft zA=IC4J&>x_Sr6)Nm#dE;ocgLh_93+GF0epb3gtrbs5(71qHW41v4Y8p{lVt!3(pWz z;;SjK>XQ}X2jLpzP1~U><2HrHP)Hd^?BE6vG@CH(A4J{KQ^tA9vJpB*JV6Ma|Vk|eR zA$B|3M{d{QRRU7`n+lcC1Zcz@_oco{Th*31t_@S|^;Mvfr`i*)YA+>e71^6nYuXb_zAq=jjqE|%=f2qW6Cs~Ul2;TPmyy&NiaBHJC)y-bhmz5p6 z$Tve~(BxSqSGMz`H!W)QPz;p<7{5@YX{Z=%kynsKX5nf-FQN!I1;f9jUR!B_^9xXt zO;`d*bnvTB%+}6ge~SmCDKKT4I_lOL!Ql)t*$8CTg`Ax^4N@MI;! zMwXl|jg|!4iBUNQbe@fGrXZ%oeuD}L8VGjh|jY`o~_4n0tsjO(+>SR65W>q zr)(;OnFv;0vJ_iehYZ_FSk*$oPi2GzzYHizq$3l3VSXMvA&$@1Yjvu5A6rve+l+?C zY&sXQJmW(6*u7VatyI)JT%0UJsEmcRj?O9BN{UQneMqY*rI1ei(oc8hS&9Jq+?a#) zuKIV~vrAr6L&|b>gbtehp-Z! z0FvHDX0HJvJg5caDfVvW#)OLC)Km+FKbxhYN)hf&q{IwFL2LbcA#5qzrzkbr11Uw8 zA$?ElA?68C)-{Fl&LS7jQ7Wbr>@pYTgZb-?;cM-O?(2(IIHG-1zm#2itd^@=iwS4s zqogi`F>0^4Z11tvARs%IsSZD5E;VwMmg37#^U9gcWq>jmuf;SA{D$swDSGJ!=PiMB z9V)39qUi#0aJYto*HJT=cbR4jDPV_|pck1>h-P3kLu6eLSt#CtQtTQK=~=BXP+8h{ znbZc}fzG5HOX!gWGc0pm;{x^}w05Oe!Fp90$u2%0@nCI$(S!w~0< zcSQuhgKwy|P@`RQAPuO4&}r_a7FZVI9^9I+LiJcJ;5xn*r3fm5NaS)gI$2fln!+g9({fL^mn zZjh}!!CT2>sr8W*zN%0tkeN3^G*VA^_7PG4T#l+qg5De{i{^Q+3fE&L0C=K6oyD2= z%b%c}$_Yd2FCWdaO17sfaW9NOEHPlsR9axc5M>5J#Oxt7r;Xs5vZ@r-zGbjuV`A_E-dxWaJRr0n^)2)S;hoOD=C}IO+aAI zVi;ZGc=Z$$-5VID!G8wzZH6Fsu?zv*=2D;*Z)UQ}ayOE9fxzu>2n_GLb*qG%yt9LL zZixf33_A)0G>E#Ambj{@?;+cn&HZi!9KgcFFY-~^6`hN3FgsqD zZ!0-wDHw+uS(@(|?yqH+OFL|5{dn?-yykDb$fuSQ&KIEp2UC@>&&)m=r-j_ZTd@_STMVt=HUgyhJ!r zqnN0oqFP4)fS^Z2^F@KNwL-ZtC#lRriff&2kIATH4^gNcE2G`s8U@SGt-A6sA1Pceio0rz_%cep&c zJcAEqa$ztx?E7Hjz?O0~K90|jEn*)%@!X?@p`2#2yNj*^*8BL?O%GSnla5LBT2IQe z&v3zuIz-SBh|s1hHgGk|-^f&~Q&lMg2hkP6)-&U_IL@tAk*FDe7O=&SP@+t~%u1G3 zHE6lk`p&bJLb|*T7hq6HkzY}~on6^@k)uUq&B|efo1><^R$B8`^dj11mun}Q3+8ng ze0(h-E6${&4%8r!idWTDAKo6KIi6!D0}_zE9+bMw*q}!Pw_S`A{;yMT%_dHFAcJHx z2Apa`y}50_Zu>X2X}ZP0mt#}teI1D&Snu#3qhqph<}TwGF@ zpUWbQfN<6BCB80pIh=ppT2QcKwFTBS^?5ML81du+oRSW;z4E| zli`Ib5qDYh#p6aiPT-0^A>Yd%&(3W4+t>6F$pheR4bnqLs5tm%8#Cd1A8Ih-yORLsAsv{uq+O=~17yRSL z7JqtmOuuZ=Dwiq4EetIKfkiC&>0grHBD-c4j$mS;S$zIzKk>cqw=`6+lwsFnqhL~~ zTe=h~Zcy~BKb($7J4%hxkccg8G2d!6JRE+H=CBtA_WP97JgPiGD{&8M&&g( z5YCk!rBhc6&gEu7!NCpN+uOr~pu&m2?Mxc-z3)lVLq<<7hA)~W4h){5S zcWC=WM-W(&>R7>6j@V9xpTdWltH}7eY)Z=S4}oWo2Rxs(cb|p7;H>{>p)6hwZ!|8_ zw$7DEjw!lgE0Zq@bK|11y|_>!H(uP9eifeRsbT35vsMabn9&@lw=U9W5B6G(4*_Yb zaZt}U$Yo`#eP|J^kmk z@)T!KdG~HY8dL_gwHUejw@nhQVaNVN>qzVqN^q+O{oKZEce#MpyrYk|()MSwB5>sw zexLpqvQc(Xi3@9aijbhaCeBqD(UgTL%tDihv$0N6I1-L`k$BuApHX@7 z!&`z27hR>Q1U;sx3K8d~P7cH8k6d~;cO*mdXQDQV?lveIKiAjFZnJ1mX#Ye4wvhKj z1zwdk*4x|6w{mUs7n9h{mJecKBL4J>4{s+AXJo*_?aPs%z38!E$F1oPs?5WLgKQef zg#8;UugrwB5fVanaN5;2EpSZlePlglm&2hH?98W=(^-jd1`wCx2&)AmCS1DoCs`Ej zqHO}xA;l+`)c*aYs{Uu9ui|TH+VBuu2|x0oYsp9tyLi1rO(`}arH@zgPMK?!g#BOW z+-5So9dcKqSAMIa7`d zDJs7aTi>VQ+6qZRs-oeqK%e&HWT~r?mG{z0GV#Jqqy2C^ySDGmoo;N$#NFMq;crJT z^iA4b-*oKpndquW+-DIF*@wsZS%)PvYJh2C=Uv@rzoa|kc>4r@%6IRGOCrr5KcZh9 z)#T{8;QMc5cl*X$d@0GONBhIPS85Q6CDl>h8~w5&)X1@=9@u+^7fJg52UCiv*0M9 zv+g35u#Z>YRTvnaOhpc3a^>MYsw^z2d4ll>tK@I}y8FnR0E@G?75Doaehu#Tb46nH z@h12`!XsR3M=VcHXcsY6Tm|ZKW3;0~NlG`G0%8gk&Dd-snqe!`v30ToDE5(R2#T2R`@ z0Da0{%+U9={BjT#4fldY5tBtS;lY%<-$9}A$}EuypKLH*%$udNh-f#*kix_ch`EgS zs!pRHkC8Pxa5#PiMZUDZrWKpr>K7lLEz`abMVB0v%zGB-GGXL_d5lL;(hRYcD5y-v zDUGMdmSBRtKq4)*dvu`T&csd8hR&8d*nE63?aO?5D|1pTo)=K+)iHmZkp z81KpkI%N^L$Dv9rP466tJ3`)oSiTz$Ab zY;7?m*aR{`CBBx5A~%tO)a67Cyq=f7yg86@a+6PVxm^m2V`D#E2Ufmp-QoS*4)Z?N zbTR!fJ$&*B5ZJq%;yqPAc)7UQ#l5ZcElwmEp$@jPh|ia$9?*4hpqFf;A@fATGATE~ zsu6VM|FR7PEY?AfJXxTb`P&pc1uF^X%>lKa){F;@Pb~nezpzUQpZ78-T9*i4n3YYW zN?&89)wh(L4aavUL1bZ|(98&!hf}hCYy-PG_h#l#3 ziz}oLDRB8&Mij~gblh#O?r(0*KLRE(AFdNkoloZH0EaX=Q)_nFb@aQy@|hO0(; z41J`3HODRHcUgQ0mv{EBvjHo8_IJ!hIrv)@9M(_S5e4ui$hUa3h8y=#M+(tFj^_Fa zd6vF9zF$7)@V0AXJXN&9oa6ml6$JQj6zy4KdR5@4usq`t8EEqE8p5u>J9dqyHlG zaQ0y|Iip@2vG3-2vVcc|bGr@ z6WyMO(zTg-s@_S0)A;$iDOm$}hHcCBatmgNB9#7HTPILNOMB+H?e~*yjot4y2!k`i z(}2YsKLIf-g@=WjqAZ;$*!o`8)7GO(kww2BFNW$#v37%e9=$}g}D*+SI1k8gnsltzGVIp7&S#k*$;$M?$MNq_#h6T32+=So>KYTvln0i zM0>K+WJ-Ydi0&^?uW=}eyfUW($Cix611f8#R6y+Zo=GH($m+UelMIWG{2+A<27KKap}_V(CoqVmSR^#j2{CgzOAMo4Xx23tQy9`_#VI*EHdY#hZv_- zzp_~a?Wh%^+is~)iLQW~zT8%^qL7PdE22m{Q+HSeZLqwz7qhh&rAxo8ahILz!ZU(= zqUpj*PL05;2KK`Z&Y~H1GdqY}i0aRC1ux!D@U7rD)6p3{EOI2mEa8ljnzobVtgV+F zueX=Mf%YsPo;Q`1$|I3g4Y*@}Gr?`6ygp0Pv#3E9AB}=qB!0Z>k>#Wv%PqVEQ{(qA zKm<0W&$(?QWNkXuY7A3(f;6P5k-j!Fj~;GZ87z0erCK3$gHIYb*lBMT1}ckr#*QU> zY8mQL!P~`JMnz^@TD+6?6v%QC?#-r^oxv3XLJ2QVc2 zZ60iAlgdTq5bIBKyj=3r_Zqp~>tMV#lJ0rBc}2=Ov~ooc4(_F; zUHE`!^E_5EQ*Of=;hs*vXA0srxv0SBB|JoU0sNLrmdl)&*k?vjYlwB~ZUM{htX?5) zuoQGBP2n&Q>$K8}Uc`a<4NYRYxki)`Vw$brdpW}@5K=*5B^w#?1kMHMWA1ggYJj>%Bsf9~SK~!ox z&4-#;e^@!6HKfa1tI_YeR2nvP0F_N0s!XK4zo45kGV;$Wl3OctX`UYkytcYHIX3I)PL;M_9u=G_bwX^&||mx7H8at zGtV~WRljH(wsxnTRP$jHPG^#HwZNFuik*qK4R)2b>`Z!AXSyqGK&%eT!0N*#X*NZh zjCZxSRcJ=&cjP|rFI0aIc4PR*Ddnpb$PxR=yP_;qiHq3~k|gJ6mz~DJoQmc|M;hj_$20=M~q-lV9-7z@}J9dCL9S1fc6i z#vxMm+B&}i?ZnOavxHIPv zk*>F5N+q9*BTF$suhR5W+JevpM=wXqQE@KO7o4;FyETF2>`aPHNs*OMly+Fm~M=Lg6FG6P300ypMMn?l@KLu zAaO}yIV?dqf?lR&?`A;*L_`5wiXg>P`G!S`I5c`vsRx)WD7~guFuVb0vR#Ko>9_RF ziZ#k7Bt#{sPjKEbiN%<9z>egcEYB{stm?|Prv6Q^XmGtSVP?&*I3@bAr$kO*F$*4Z zgn~Ih-QN5>v(+6!oJtZ9s*DJ0wUoI%2!h@8RkXkdZLm{n{rFyn1rp2Nd~uNW_6=c5 zs`iLT)IQAOr3tx7giC^UichADW|ki3p028XgT2hZd&vE@#z+%Vrk>^FGYDD!#HWIR-o>x%{qsVT<>h;W) z$0eZ@mdi63keB@aj#K5Yw;C%b&?whYn+dE>LTNB7Rp$r#3?vD7VMco8hn!=y|EgeM zZ_3kQs8no|U1Vf?e3^Mfa{M*W3PZI&TAtiO2f8ZM*QjVPpKs)WNuT%NV}FvZ&j5D< zTvE(ng`9cULp{*RgA=Y?7 zXZc{)=N)5WSv4$k+YAs|=Dcc->O&1ol_68Pe5Ea|%9qqAK|i~S6+p({r!P-gPBKtY z&9%?~tyHT_(PWV=#1gIa>!VN`sca+(K$cM9vM2{_1SmXO0rT@R;lSLHss&k_SY}G2 zcBwwWf^Mtb*tmd=Oo-WS5F^7zbL-Ek9swkAS#t{L0`nt>xEV309h(w_Kn0Vi%HH+i zt^G)`&QzusGno=;*HME}xXhzstx%x8?lqjO)7mAR%#@O*s-7Pxy=`h zqy0dsxZ2cjl_Hsdw${0!i!$VKJ$;)#zG`W*2IORsno?x0XsYyDLVWNhXu<%MHE7}u zma;Y}+PQ1ANl*m$jwEyCJHo>j>7XWUh2lTs78M=nv|1_!z>yJF9fWFevCu1}H_^>? zO11kEe1n!)FwqF~2M8HD~6rLE4}Y#LRA6e~(^oXOSWD{%NH z?CfX4z=0ei)nhutI$N1-4VnrnZkm!;Sf`3rVb1?v=YSe)AFIBK317T6YFe^mqfHm% z?=BFEknrpvRDi{v)9Py8TP7QVC>{?E2Tx90w?3D22)<&J3mdN0sGtWW!!s0|TdL+n zJA^F9N1HoZiTjWXEjl*=TdRNM4mAbBT=Kt6IVS-&cXY0rBm4$=P=8-|t-O098z(cK zu1=&q=|lOg_`|Fgp&CQU>oH2GFk#PsgX{JNX4{T#oR@jS0pZ^!GjAbef{0ey(9URoSA@d5M zA)$j!POzmd6~hB}p?+&VPI4i^b^tN*9!$W%g6BJo@l+7Xhb)C)eBc9-bOE0e41vGvCZeB~M%QES!crb0h~>#M z)db621^P^J-@E~hY449_1eIP)5$|bh!a`n?HrY$2$ohCxP(t7yHtZpBlVcz5Nueh# z2+PaA0Vu~HExhBpm992;yfN-f7#;3dU`CiCp-CTuJwb9aR#@*ftx7#G>@7} zkPf)2f#5I#Lc{}*1*1wb2!1dI(T$&DtVreki1v3JTwRVcZ9_R9j#nWJn8FB^fhz*Z-;0*6cVG80|g9IG&7^i zfe3u zinaW<#RWO}!&=BKHIR9rHK906%1>FZ2Y&>@Nd&@4!u~it--c2P_iBnh>Y_WZQ}kaVcDB8IrjL^OPV-)qX&{E9BlNwW%p*UmQjntsW4@)PXz zh|opey{j@Jk8~%wLX-QK42KYg;M?FO1PmjvN+Ugp zuM1;}?%_um+V@_?z1!0i%~Vs{%X0m}F3RItv6E&Z{owrLg&uu)=HcERLNcL3XAecY zYm#VoNm4+#+uQxUB5p7!#UeV^44Jxg?mIdtNhH{q756hJJ!i%PE;!zoJTY|Nrq z>6oT7rxgilErwC6FRN)zCUD?Jxv|iiX;5s_`}X&CqG&}blnOEgYF6RSl#m%&n)VzY zI27aHw=oVqr3Qj`p7m7IrRC>R^>3GI02gd(iF)!tK&@G!NOK00lN#oY#ouAFe!S2z zCPNVq?5qS1CYHpTyI@GCiGUmi4ve8+!3vi|M40`?7x07 zM3A~~ICOGK9nd^+4mQ^C2hHN~jC_io&5R}$m3-#P8jky!DGS68kzX7qi8TcHf2T6v++u z+icjYmav#D$V-2i>5;#^E7yC?9n1vz=Y@A2zj!5NiK(ftj##4uIbf1)+R;P4nF3B| z#{^Up^F&Z42^au9t%7TL^UvSWU6w<#y~X{lQ*Im}KN=Ka1#EhJgY{*2-IwGgCsx#* z1jeo)#-pk&!tPucEijhk%^io8;&Eb0rQ2d;kQyb!P=>oP+2TZH=F*pO2U}r3FjGvT zRD2)I`It6SNb+#uaJGj0kTuWg1U{P`f1gzDBZWk7UnUHD{#<(Nj6mY=>y-=Yg%_NG zD^^(Okw>orSGktxz~ZfzE|aaQk8iwW=gD=&10`^;0&eO28O3)Q{pp+e;LPH=(>I_2 z^CqqV#;NK#>*vko-t#t>g%82i(9?!mPI7Sb07O1idm%NY+3MY}+_@rJ?E!cnPc3Zs zm+!_WsgPWJCHat=0tLg3Qe<2xr40WbzzVQfX!vx_fI^Vx-{vc+yRQ^AxuTc(c? z!vQb7IiP|KhfSRo=g#HaJ3p8*_f__AM6(q;#4l=2h>gsThA156p2ty{$CG=$M+bSeS=N8g|K7F1@KitBb$4dD7tZ4FORuQ1P#`6@1Wa1C3Sj)HE6 z+pp5ami}+N%=cvw<7`dH&%wzR2C0cV=n~6yk(!kjn2iS5PMw{5%D3n{b#VmLW-GqD1L1i>UVf;XL4*mczu+H7@Rb8L zqTp9alG?Mz@${DjyqUXMywpQv1t##~W!a`T@WN&NBw0KXvL)v@?ANY)NUb#Qk4z|U ztry6SMZPkaW6WG(EE$yTV<_9XpI}sMaA(N2&5{qCnbJ>)FVtdH$;!gZ*YpI9F3-zE zP+v*D;(}u=V^)c3Szi8f53c&{TyXM|5wgI^C&1rxf@L$&WIZ9QRc6C8rZA)#QCCs= z0UKxP7gaPq@$0aPMl%Zs-8$|0K`EFqQp zE8!R4&qT=X6aIxbUtL>9aPJpar#B1NNzG9}6J=O31%Dr8bkE&tq#Q75Hm_W9LwqUQ zzd^232y_Z=P<(k4m~|L`cL-{x|M_lGy0$=NWI?kUwRSc87~Djy*#KSNt~&!PDJOS% zE!;+N&v~)QCI<}Ax}yJyNH_46=r`IgTlHWJPat%tVu4e!ffnxG*&N1o@XjiS1gML$ z+!cJ)>u=nkD)H9#1{N)L;~3h`9UyDV%^R17TSvN|Vd9Mrh$K`3i;Iq|bc|;WtKRgD z-y^uR%y`1*r@CWG-fCrRQx;_DI$5u-2H|q|3Cgnn%rZKaijRtC!;6hynsz*89n)>O zp4JV`)HdFzCu_&Qv%AXDEXYI5(^23CATqE{+WZPkMNnfuLLCbwyODQNcBokWbm*9U)f%M>M`yj=nv zrlY9GEq~HN);`*(qtv0-xcqBiwf^j6Kz@A-u%xN5tW;n#U_-6Zq{w>&Vw)VwXR5E9 z8aS#sj=-(*w^YX^z64f|t&KyRJjg;!!o?^gfBuGwNzP+IN?|@A<98=Q24Ax@xazB@ zxng#XPQ~B7Q_~icm-Coc{T*8%h6l$^R~N@~^Y&A9V5`bfT_1uE_k?KK1=S>!*J$Jb&i{jU4n{&23B_ z|L>Q7+$!ev@>9V-a;oD>-ojq!NH1%4`~wiUer{rxG=4goB%UpLH+We&Pi>i6=t zLR(=twGg;!m-|a3zI+C{S>ak?1*NbE2-Vd}R20P;9j{g?$LnGl8xQ$$wqx|cK=qPs zsL6~xIg=ca8uuG`-LC`TxR}vh&vN|*j?f$4gxjcfAXPIWr70=g(5AY16~D9KkOxvHWjSb4Y?;SM^pvi{+uh; zifkjWv{ssP2%HSbTONc~Bdj~Ub>IGelv9BsT^$TYQYm#xV9;Qbm(M@SkAkWJ=gG~i z(OUTWWKopD$|Y^S=hC$gSdfIwL4PA=N>TrF(G&f1Gl+{--3(V^fo(UL%VA>sUe+t6 z>nA0{;0d#sW#l>gqv04xoc;Q9!zkLAEdiH+IRVMfW4}#81a&Jr;*01wFnYl!%Dx4V z^=v{av~dLt%(b*Jf@ir??m}NDey(0%>i3Rm#*GUpCxcwqz?Ihj!%FLj1}n_xs|i)Y}q-;b8+sd zRDLv78!>zGqu%wi_%pyU!Lj_`&$uM3k(hioYgK7zW)p>GDhXua`&+M_Hy)p#lMnIv zjzo6^(=+>=Rhb^r1v0FWvnTtXg%QSyo4<7|sS*8zqWjh#{K)T|ov(`?FFYJ!R(%~E zsR6NPyZA=7h?oynV|H#1E~6~*MOmpm%1Yp>kt>7{Hoin`hK=Cx4Ody6P9__S1cNWPJ2d?pZ zP6yu>n8DPI*m1RBIdTOfs)zgSl^_?o# zP0kMA$|84^`<8~~ikw&#@WxxE3cu}YzK`vl9HpQ5H_ArX%kK z)sjy1V|+y4Es&FV4`!4TyxFfOi1>BG=CBs+8wFB+{&s%G2F~nzUae7&>vy0Qz(eN@ zjCFi5l3!&O8AC+H<#kF$S9FWdM<2OVf1-|-XiKHYGsKQ4~?GsLEEwIPs6Ec@+Z^C&< z)1`eThmX@Jv9+cbU0L4V&d0G$p7sQ_>boIzHpgKWYsEc}29$L4aoMhYS;eagnNkz? z#^D&4c!h?>;pFo;g}3P5S(Fvep2z3ZEGBoW77vKID0aL%`_kIF)2GLMZ~pwz#rEOq zkwc+!c1IpGT{yRtu4&)>wB~Y$dT=%h zPTyYG-h8I5t*fedg>`wsfKxFhCJjT`we@JU<-5qKf_Lstla;(C`g9~jQlGR`TJ2|& zVRVF_ng{SF)T+2$%E<}im=I)p=_Ii=1J7<#Z;lys8JW!5!z^hM^3jc*g?BsnZGcR& zUKBsrhD^TUlni$#Q(-gf&ygW?rtu@A;je}b+IxxQR*#nw$H{h!=s3VIo{-dVDvyE( z*_|)D?=#sS7dsEWC8P^Sc?)C%4uzxQBk(wNm0p8u7HZE@zjUaAf)3XkfYdSG?`^C+ z`+KUPlI#g=`*P9B_VYhBQ)L;9$j^(ath!DhCk4&8c{5YB`p5v;!pwiTIToU}&|;6- zlv^LZ&q>{**`KTXXU@As@_bxrZ3p@QI62U<;pev#`WC3&!`Iaytt!ENhhsNnx}XlHikMw8WJ;FK)Tp+7rVE(9qKL&PB=@{vC7t^B9g~FB70p#rr)$gO z$xOD+s_Wg#A|vk3wH&=AnN-A;NKuDY8Ausz$S1AKgFDD>-4#|2A8_28&vlef0f5e? zJ5x$5u4w-}zfH^PZW>qM2H2Nui*jUk+SNuYiDFQ@A}9TUV{eGVGd6siu9J?W!qLD< zgy9Z{%CuiWn|z^1<>voto+jejc)-xla#V#E@Lpcd!Pabz5ycKgm~IusUREo-77BpO zJYZqM&jPDH@RVjP>p+Cy%0%LqA2ifrLM!b+#@!rIq}v07g#kX|*UT{(Y=STZfGNeN z(z7wtT*4`@%UsqDhqvj-7UbH25W;_qx-~=9>a>&ccAk+}EIO0C*P-TCs_8G;rWz@K zsv#WqGG*FNVY8Q|z6K_=bQ>>qKA%HcVhe~C@zvZ{#%iGG79!g$P{;gXNxY$~{Ge1Z zf9Jh;9E6~no{_2Sn>9@lP4g5H_3xE@&&*D6{_~`y%Nv|=3S(EbpPD^bP(b`SKk@DX z^K#$zVukkV>@uxV+)llpy0NDks%wkX69U;2!Z}1cY_<2A*4Z`?3a|d0yX(qd}>s^+WLJXwg z+J#DYv6p#bnWjIPllK-w^5C+f;3wy4ATUk8F}1O=Kn2qFf@s5T^#&|iD3+~2jYx8# zm#vR^#e;#_eRvTskhTwWGSNk4=uyob={sz3Fdb$OPo5Ga7yN3{)90k4>*z-z{ zxvaplM9Ykl0u_4wLFx})?PT?`ji*YL8i?HTHvQU_deMM@GWDVte7Exa=*Ou*fFJZH z2$7H0wbK1>uly8Ae>FJh(^z$j!+0mLLsA7_bkvH>Gk>h)=gj2QM2p%3)FfU!@PUsQ z#%ySXr+WUuHtDz>bU)wIsy~18SP*FkcnBJj4YP&$>??DQc=5tyGacv)vz006WL^_G z3R}+-nfvo=L#8YVlRb`|`eK1Y?_v=pt%hL4?c+RR&mWNGwbTXNDvtgY`4-q4q*IB; zQd!l{@mGPCn?E=yJP2^Km8HLBt_I-hAw8Z^WPI)xCrBsCFZCS|>j1?GYi9KKeiw8& z>^^VEmc#>o&9=%FjS*%*S}ZOTKJ;rqQT-_^AddY2)_f6vIOE`Z3ucs3dl_)76Hovq zBPsaR8I3JPz-@t*ylx60&tf&=Xj}BR(kA7@hn2XR-|m@;90k#@g4bpI>CzrAG+-$= z>#R%sI;3P;#|0lGWQ$tUY4wkBo9#al44M!SIH1xvzh*3fYuo%0($L(H9Rx^k_tGsu z2od<&PuOa|+y&O4GK=7t$9~HZ&-%&p0qZS+m^B6<5Y*B-tP*;oaUkTdq`1*v1Q^El zg`D@r7LTgt?jxx%&DgGZ9?de^+%ZyA_TtDxEuuwpBa;>+_8m7?%$L)YeNp^wX1`iz zc)eV8Uhnkr$P_X@C3P1&3$JUja3oUxO=q;E=n)I&(-EK* zGQ&=Vy_GP80GH-~+cp<2vXA9uqO<+_xZq^)8`jQ=@6c9uKL2;5Ot`$>YB!khi!xZ`m#6P7|-6M!gqvX{!ew9 zaa{t!)4c-JkK1=jn%r1+OlBS&*x+&;MtL>rdkw0c@`CCHsC#%jzd-d@?1Ia`-tqut zQU+(&1`rKG@$q#d8R*d&^poam_X76^bcFUax3jw+YPjd+4~#&)nO~%v8K5~iG7*)W z_4|c{#`DB;@6L}fyZXE7Ef+0pe@kVz%rUhl)Lx}dhj7m|_50b+9SUia7?j zP#WeE(Rf3iuP^b8mWU9@PDm_v?~l{o!c{73k?WT4)e(Weis2Kh5 zFWV4;)UUS$*&=`iwGUf-vwDQ^-gaAY)|l{%LG!~E%yt+-_vnSu8y7{<<)3_Nw{8kc zg4bQ&!L*Mff)#ZEGp=5s<*cJ({2NQsa9%V*ONF_fLsFu)AV}e%>R`MeI zZK#Kg!v(C#y>!)&#U(s-15psbeHUMQ!>wrt+Qrq|mE89DJ zrJq_`XmVfsD!2}D0JLpTfs8-NBw|B}l#kz|i@|buT(z5R5TXnPMhT|>L{(&1fiYXB znEAy#qEH#f7>p{_m6or?=ZaIbRmEC^xMQKU-{AXwTs>k88%Zv(pEVq85F0*ZOd~}` z$vKJ#*E*F{Ral6DA9D7fuhV9F3+v``N#VHg(^T-&nC-gRX&K*v{(L>Fcir!1w=+ZUu!S+n^M254l}L+r)R<`O9pk*{Y#Rn}vI zY#xwQho7t|?sW)fZ9UCxB%49G)*;KC$8me-fm5Cu8t1DkI(4nsSI?uz99&nLQaJNA zu7=IuJSsA?VG&TDU=duQ_!PE?$!(~gDAa;jmd+bKg?u(6dGzUr!Db_QB%}ho*yn4- zu$O>SPa%qou){3TCW8p7Stl}MfL5s-R0(%StCgPUZQQ=%G#cddt z&+CgGUOj*Qw0_RG$Iz<9UI@CVL$q*kf7m?tKT(YzY}{93q*G`LhW zCJO>Mh8r=1+$yosN>+|53_4?M4^7w37eHq_&nQ!C!dh& z2riM%Z)~X{CnBJW3k3wnXr;BndrCAtR_Kh`d^SD0UQ;WBv!l1`m5A|}Ku88Xlv!yEUr|j(Zw*C}&l za0L=XYc!BXm_>pNvB94MeyO z3o+4sHzoE&i!pk`ZzH>hOtHq%T+)kplXUbb9!Q^E-jtP)2dY2eOFl7U$11n{R@oJPmeY_k5o7DS8d<@5dPM*Mr+0BE$?hTh*hd-A9O^6FEB7 z4vjscN+8ELq$k$N9bSywYg%r=#_8m6#zazLGs!#nr=XRo7*+dh>C6)ATdunC9;sWb zyVZ@E8<)a3I+Kk|9OoES60f=&-i&b8KQInj4wNiXMo)$~Y(DBl2s_R_;FMT~hQ?88 ziVxd}+$>zt$s@KsEH1|!@L#ZIE=9NRTK$rPN%`c8OJ(A9AH575v+*7am=-mO|5R(r z8{6d+FU_qjH8yI*MF|71$_fKL)0!{8XBhzj?tUSE2gYRw+e@6;xzB)zj2ZR1)lXf1 z+UB|3*YsFxImv@#xn|+|e*5DM5q;kyEv!7<%Z-WNZ!oulL>Ok61Wgk_}}{eD9j-~O$n+^75iZ>&O3p<>64g$1j? z&0}q7Q{yum_woMwxxqgArqunpkl)Fptw~I17^OHG6ih^Q#HK2hL*rX$4bbvJjwf6t z-FVF~e7qWw<>(TDJexU- zJzMbHIG`KE2~SlUEE*w+4~8_A2JtVDqpFJ95YaN_scVyYIJ;?1THc@c9WC0Q2xEuJ zW78tKESHTD2HCMRr@~_Kt|BPWSTKb5wWmsA$(C$)PRQt@W#wgczBi3CqojUVP>j(l zsU^-AJuA_Q9yClQZgmNosR*`7s=+&AmV+t$P>=X7e+wqcx^L`1Nur}E%-7WQ^lMiz z`WU|F5wvG!44qf{zC~|$=a8lBMX~7xcWA&lJfz2lv9t$gFHfs4usDl?tsuo#Bd$i@ zp>v@I;+NBD;h2>VchjAwuIxF8VRd!*gwp5peVL}IT^B*-frFOrFtT*NRp(u6{I>fFM}`oGAC3AV{clm z%hr}p>!TC%jxsI*-zgW$EOmL{~L+_jl};(;{UxO@qgZ~zmfQV zM&h`MqD>Fc!~|IpNlyuYUTESuu*pve`}JG}Q)tSZKQ4sMw7 zX05pRT0uW|=J!b(Hz{^1+tyB>TR!id@y@7c#=rLds+X&GaXQlCQYt>(Ysnj+SF*fo zddq_uyVx-FT(9F-o^wdO^r|pF<_OZgpf~vF@(kCm%NI>K=-A;~eYpSr;=s2Ve4b;y zsP`POk+N@?yxyeV{H*r;JeT-w0Ot~)fta$*O?R_b5v8Gvwn0})BAL)yK$#{ReU-WEj%{67guG%WKIKUtC zWG1rL;7sqf_SLd;)h^gP9bkEFpeRN0=v4&L4mTQ#A8Qi?Ta%Mh-KI?RV--i?Oz+jO z+hTpa_T?WG$%(pg1X^sX#$(Ghop;pX<)JaXeb)xLgx%hCO#^UT^v7;fmx$)48SJ^aWN%_qt7}qq1%8=KzZB zLKJz_koBzXIk@3Nz)17C#bU49#QV8^sO7_}W7mo`kit~%iksnktGljQf#+<8&wcEp zF}o#%+gi-V=`VBqneD4Hv15A{@UCC%wUVYK?fNMXnLkoCc91Yg53l^eO5Ekx7zB0GnJw1L55{ z-tZp55LhI6dUQ6ifyo$$XHEEaz!9{gF97T$#YXk5b0!T+g z1FZ;o6vHzE3GkqUvg*NhJT7{I^Eg^Suw$(6yJ`bSN5E-S&S2<>t+%9dnl9;Ys7VGn zz)5j?gOYA4g`L2-!i~7DM2^3hGsp#U_trW0fC|So_Yvk>Ag-XKqu5)45A94OUc%tI zM>^|o(GDHu5j#Km0 zO~qUAz!>y8*0wbgh6BO*I8%uvn15mCLw6$JK*k7;@Cn#Q+y(K-JpJYts(=;2rC)7p z!kuO$a3lg0KzMW9aXm}{CnzZlH#_NM95N&(Oc2J+{N+cie_SVs2#u3K&sHlVp?ee} zPFD`Qlr~~`CH@K`nW6}T&Ttl2VCf;EV;mO&ak@ZAFAv8>eiH`bjJQZC2~t4Y1_Y*j z*WS#VAdQ&2rtuA}xQC-DCSiYc0(!7>LI#H-7sff~wyq#@vVDIO^spre6zq2>qvE{T z_yBmH!XUW>_^3K_9D)qN))*ilsCkMsmWWo278U~IFeD;HF~ohdOv`xkdiR6Q!kv5u=!&}CY-6u1Cy}!e_PoCCf3#;VPPJ%=hr~) zY$Qqet==q(j+FVS>h7U*gR3a0cVvr$8sqj27d_ErjiE`9djOMV{sguPBVP-;urai? z$YM`9O&@dzQZ)Y{bI+i3{iFmpzOX+*m{(j`Y;l%#v7PEwh!JOOrZ5VXC(}ZpWxlG8Eqt?0%5$gmo9J*y?>@Y(l zWpz;%dA%jP{Ic{F+mV%2_qqm@v1KKBm1m}`I*kR6goe36Pjh}A zf-O}RMC6rO5}_KG(i$lI*lr8iq1}!r4FQV~QT`F&bR`(dYV2ttC~!j~2*eilTR;cg z=UB@AYcUgiAzI1kq+j!7qRSrQmnfErupW>+Aj5g%*a;cPR0 zAO~g(lb>fc4QCJQ|8+w(ttlZQ8`PIC@~r=5>l4R6txwukfYvAEkM5o|L2A&nZ?MLv z$6?bCJ!%PJ43gwWgAn(5h@@M9nvIu`4e+ zZqGL@Joc@&$A<=F-1g^U^O#S_fL3+xt#;siAk7?lh0Wq+vNDHAu#AE`rTd(FtFQA6 zJ$N;fa8!aJ37z{mRLWQOoy>u4?umD+W_eelbpdVc5ON*%0(A z<);i}Pu^)pLIM~@i6CNlckT_I%b=s5rHv)BZ*T9 z@N@HU5;aPTp-LxnCFINtSGaPZ_I`VR-*e_tosgSf z15ZFhTWU@I3fUyd>YKSuxfBraA)J5}OzntK<*Z-G)#qJAJYJ-r5*1%VsY&`g;IAoU zrms%rBgls@r*8yX=Sv)1BOwzrR>uHYjW%>y`z>6r6UAzzBD!q3FU$-!tgbfhW0pJa zv&X)wjWc1uyA(Hvv-zRLgD2Cdc%Y4~7bofUk~+x&3#u2@u)f~0sos@z)xNi=R5?yL z&^8rTR@afSNQme+SxF$V3y1=7J(zq6$KDX<*Eb;6Bw{cd(t2(%^By5fHUd!JZoCOXF;r;oc8^RYt0}(=cx2G29)C$2e{`o3lL_0*1%7&R zzq?QY(GH7njlndRkmd$~*_@@De)L<%zFZbs*#CloMZn-}Xg>&IKiJ`{X&H#QsEy^*0=D;1y2NqlgHJAVSB0aY z;Fy~S%s_Ahljtp*c0k_~1J6)C<<*a7|c zgR@Vn<_6kdH~fA;_*EZXR6njgz+A6sW8-}pA@DG3?C;Q*ho7CL+H>;si82#7A@A5( z&O0OyEcnharo<{Rmq#n`hAWihET^nP$x@2mN$ z*PTN_L?iD=0Qa2oS{+X=4cUf@Q-Yjtwl!JR6;eV+r0dgP2!LH~Y#d z-x+Jiz3wh#jNP8!=ln;21vI}IGXK|zg$C|}k@{0aTZuLtpp_71P0Y8H1c}onIlEl} zFB5_++PW)sht1j^!wh~K&w>UE{jcDzmw>rutRsvRs0UB0ON|SJ(iRObEt1&V(^}cx zLAKtfm9`3fkuPcb=FPvOuw*MX{C@Fi@ixq#_bgYvXdu|+?7L{k?Am%(u22nGxvWhF zR=365H;Kor0;{)1*ftTWwU{i+bziqNpuvy@{7}L9YA$x(t95(8!OIM(^K11I2cvo! zA?iB72pi&?*XD&A?i5sDsK+VKN=I9?fY1>A?2~dptQ5zK^Z;qWSNG;^yeO~1R_+z{ z!0Tad4BR`K7Umb#9GbK-3!=9~nmvw{rSpJ9(!51LAOgcEGC zq6`U*8iubAk5^?#AeWFI6$igXLlAzz7@QRmAheee%ZbQ3eE*%lT}Hh5eUunw=uN1zo`7(ptPn~wO2U-#RJ+^DX+?~CLyD`YhenT~dFHu#G+tSWxE zzd=5jNFiancLd>^Rwy3w9sMZX&eaX5C1(<5A}cV{HMp3a6M2~6#E~boK^9Co?8eI3 z)nYVQ(6XMo&GagrS=~1pcc0mn(m+f45!X+}vVyO=ch}*+<0%G5#Z6o_0%NeW1XSaW zD(hK@O5WwirIx?CNskaS5fV!{C5>1}xRgp&K?NkF;i$LilXf8D6_JB&UtgmE?OSd^ zU!k)G2eck*(afc7r4BKnu-aKLY}9ig7YRyBuGeIrKl8HSw0gS*)KuSCg{>0YmPlkD{6p}KdjyZ5@f7lLI=GWR|y(y;ASZ$A2U>8ghnpE z3(wM3`&+3E%4fHQOTI+o8=4da->l@%& z_9tLPyn-|cDC&QfZvHP-`hUN({#NP#R_Xs%>Hk*g|9e&G|9QLqR_XuqsprT;9peR% zTHwDXHRr#iK6PAeMf=!H?*bVc8E>A;7g2JPEd-opV$N|vmOm@Z{s@wgFoe?u4(u{E z`@ENY`Qj7KCzV@>|0UsvF=hO@@#4lXzd14id_a)Xv;E}E7``2msg7b!h$IhFcBl;& z%R?eT)FJWh`#{}1d$dd(2LVmQ@o+~``t$Bnte^Wa!aLaZ4i70Gs{qMl=*O@QWITN4 z#JJvcxN$T|5#x&~RBU(;aV|TET(uyNq3mMwPg&7^8pt>Z#Bd3W)ZJ&g-Na>@yJLf$ zL!{`C9I%ooNP7b9U<);++iW_wONA(*Tyjk&6gd8fj0}x73PdNa2}@NY21v}3-d_}n(c!=JZ zzIY&6!lANgex$V+uH6WhkY2nCBT+Q z+ItPc94;MxWq6VF@w{Ohw4)X&_#d2&F+ebUZ#GOr|}?O#KAr@(Nov&b_i+9rP}?RLgHa zAZ89Nmu%YICRL%6o1D+K&=G=U0F5k2$^&#=O2pm6=JPsnk1IOR+@mE{I>>dU+2LS zZnN}tg{vX4M<*kicK=h9GJg`fW9tnZ(_5c?K^JG_fT49(N81s$pKkCi*uGdArZQFD znxAHY%&q`*F=}E{@@X=w^zvY|!jBvQIn1_ij4YbH103`eA^x zQ#)o~eHm4DE7O9|XWE=e$6>_&yh7jF{WX%=0&b0KGRV0<9T(~V4ztNXH;mxt$>_lt zS6>aTUn;JDw6~?=Za?_LUehzMjDZ2h#^il_zP0e9+wa|LA0%NbSt(xyi}-k2s_#Wm zwsv+!NtoZPIpXryA@v_pZ^&o<(%@=HT*l@K+wP*ua;&TAJx($i1><+9=#?|Qy7G5V1o0#o!Qn6c*xi~s2j z%hkQ0DgKc)T-G}8*G6sIf_bIhN|Q5-=M`jGu4p+o>Lz3%;1YK5&I>kct?fbEU$#G` zpSU$%yF#PE4t#YnW<791E9KIsNNW|@wVV<@Xkq(*KvvzMk=%uGb;g~j)r#lZj>tlF zf5W~m&DmY5istnxDe$@~^DM+D=>SRYT9Tu@g+kVG@_?3c57&>Dnm>sUF?h>ZL(S`Y zAOh|R*RBz98J0yeH1v$*-h-}+;gjMmOUfUY;UzJ7!YzSzx!Sy%+`wjnZ6#ENZ2LX3 zL&frWO8Waj>A@R$7Gbmm2pvbfPh&7m?0`Cw387D4BLRodJUEe_a;Hkvo{;EM=1KHO zuS@JYl+CD|LcnB2|E}2$8n!%a&Ir_!4%z04tvIOL=IX1dpR$LbUkk~AZSUrUre%dT zXmC!#`A3zbsC>uhE?ygvrcW|1>aji;7nOMc6Tz}(v57JAD}K_~diq?odQt09__#%i z`k7&yquEMIVK^DFvAr5}T~L932yu8D#Z0n2Nm*qWrRz zugI{cH^de_vkG8Kr+GU^S}lNidYC?PbKjtto`Pm6A28wriB?$*j`&(85T4G875oifDQa znmnJnET}jT(rV@)CHB0SUm23b4=&*B0C%3sTK!YSR_;){oXyWK?Bbj6b*+-8Uo5lU z=r4_|0y#VbeV1mPJBRr71Tj|X9w zqwSkA6Cw)YvZJs_biS05c4Xi4-J2fu_bZDxKfo4%|JSnq_jBv7tpCdTudM&d`oEW~ z|9QLq%KATL?N(g({VPD$r~p}`{ksyz$lSou*@^z2A4Z1%L2KJp4s<|T-4!H8!O);F z+)OgY{u_@&N(VHtSuoM{>%VFBiBjZfQP6XXJQG4KLS1aVd_BJ8NIWjL%j_|mO*Vdn zM5OrMcAGcP;okGqA|o>$;7wNJrPY^MVK>qH^3$00{;>ZE|h43 z=*$d_CQ8JH&I3nvVg^c*z?FBk)m9Q~^XBp5Gh>qUQ}_%GyD4XK`R{)=>E3`2DSGk4 zgBy`i$P0|hqXej?zD2t2xC6e4;x#pFLuKWN(Ic>%Y@F_J8s9~seKG8gUWs;C@G}C| zyG#UM^I^?hGCPZvbEw&@S|Hk54OHxgZ8;rVCidOtk;#JVD52;`>@4Z&^*V7=Q5Mq) z%_tcY5QvbY%EPRlGsSxi@3LTvE!>?(;c}ooAjNXP62lL7g7XG3K0|b@*c$LEHt0r0w6~L70B;=Hl&$ap^>O-T7+X!~@k#d^E zt4yFzCc@q%iEfh^%CG`(o%;vZHWh9fNZm5R>xPe+GhzbjK`_;4ua)lS})Cw z-KodGOjKTE^wkpiRuVjoQ5lAlR3r0GNf=6|UVW)ndO^WwZ;WJDi1(GDiS0cmcR$*EAZkZ|ukW`= zx7mG1XVDJ-__|IXF=P?o>l?&T<;QY>uUR*z)3X?{-=DCacGs^f7Qx2>zQ+8=*U-e9 z43i_V{5;(5rf!9`6_{zKC>Y$F+$elF^%UzVx&~yhG{V{|`u3dV!=I0O-8J5V$Q>$N zNs?A1*;=9+?7yK?Ew#&ze2Ozcf!?|tY22)@e*NZ@Dir3<>HOh@aXB{2m<8BoB6kTUL4vCde@<$&^s9P@UWvUa5PUbul#F4RrCVMQXC8z})m9%5`La zMIh~K{s;D}0eqojRl975pMng)*P{3UU+04!9D=r1#QY8d>P-0n_&O&l5M=iK1mA_H zg}b@x*JOXoN<)mhF zG!4JpKJX4P0|MAJ%s5=UT^i6e*#rL;YxpzD_zNS217ta1tKWbh?~Pcd`FV+;X0JFY zW{pjJ8k!hE&8-vt>83^pAVu-(0RfXy*S6Sf# zS#wIRS}j%I))6~eKLRUvOVYYm3403qxRTx5g066Fka*m##6^| zLeJq{iGM+`+bz6i)u`g~DQ?Ag(}hd-a5&T1SDAe%+EW*J$mLCgD-z3>#tSb5|D7L_ z-GDK@qyQPO?XgMy(28jYq&Mg{4uVs|HfeU*+WiE0(>ZC?~I4L^27(MCQAGYlWB(K`2k)oJ% z$CUaQu+EbNbDb&QHk^iCLi)1N4Q0Nq+P1yW{bZYM;N=aJpa0d}+7_)UXUJW|Lm;6u zu|Gk#j|9v>1y=DDS$ss=@tfi6%J=XcB8*PtGQ!2NTBJGYH9`n;kKvyqcazFeVCDBb zL}&mxe{m7nyprF zoSwq4)Fg_K>{I61168L(L=iw{s^ywlGX~F92r#mF;XSAw*26M3cWkGQT=qPehVC-! zycX>pgBJgbgWnDFly&3E_3L_6pSS%L?09jX@v5rNt>f2J({;h<6qoTqs^y}l#h-#6 zl&|$o2|$L*Y+}_vWEhF{@};YbVVh_uaa_0VNR$WviQRezz`V@pE@eK{7R zQP$qVk2NXwTp!i%*ixjh@>XMN$mKS<{hYS9f4%_kZbz=>wH1DmTWmbnYIo`Khe@Na z7s|spsl4vDU;WS`U_RN5v@26*wfSPr^!&I#vqG-EcWyAl?!HJ7KLt%wK45qeAZmH! zA4x4GO%Y6_yi8$4B`tGXcAr084fyk`mKB@;&(ATjDet6THj23T@h?Fl;W-|gN{=Mm zJkX8mkylniP}46OB$+`_BzFxDnW&;0=Ip~^?(2lrH!Gq)5!bhd-c|gV|3<}Fa!ae4 zwz(1FmHKI@QC8&F<3qozNHnSN6oXjXzDT2DvO*%Oh+J81jhU_-EP0?9295M(?$?h`;fQxNUYT@XkmRXLboGRYkL$&<&^A+>O_gJ$WVv`SNQyIpEarcx&z~_c|5VR#@1Z4t1RZog$7A9g9on-!dCiN zZ3L1M8X41d9Y?dvz`jSQoEwfQ%N0}u& zWWX)nGw_^k{dq=z8<%V>u}f@}MaR zM{1)5j}E`*96dxeig1jQAql+*35M!d^&0@19d8n%OfB4f`T%Gi0VSRa`~^UB+TKnu z0L?}KG}~RXnjP;i{Yj*W{v^_9N`OR~j&o$t^rMadkVs1cvjiCisKr)#2!*^RDxO$W8FpFNZ!;ZbJsr*yOtt=+Ifa({cDCZVQ`()D+cw(^ zl?fXe%?aSas!l*6Z5VzUHE94bc%Zl2Zzv=$KB5Id+D<&Y$Q7sqB`S(>pOK&vsf*K} zzy(sR@**T=$OK{pKy%<9G{gTv^DjpD#f+mC3VPC7)4@S@^Nrt0NQo$NK@wOaGo#$Y zM*-VniTshHhK4wuoY)wCx?~51h7WS2gHpv-jm^n`mCQ6 zSYNWDy+C{JdysC+TrSsw@8xt$hewSS++$eGyxX^_$536j+Lp&LxSb2Wm*E+a)o-E! z+GBIS9omUq8?K5o8v4H+;JU6?TYw_(Oa`3UW110e+17vQFoDsRk8sWnWU}Ibs+ zXMO6xHz_ZO2e)}7NOrC5;@8lq1+Y01kV@O=2%rM`ycvt>b?okdH-p07#oQV;Ir<1= z00-FzH!U)@4n^{hh}ghwr+hpAS2TlnOKFNB6`UU(WE zRtNaNo^3Ku|FqH{ab&D?L z2(gh!b+~n~yFLnaoz9O$Lq0M;ux|9up6q$>nL4?6gr}Lpds)~J9BP7px#;3Pb8E-l zwA1L?D4@+<3}?O*6Gir+IZ}jD^{Zwx;>_9A+-Nk@{x!Mro9pV~@nx=y|7;bayEN!f zj9GDK(&b6>B^~^YMmqeXARlGHh}5dPpFi|+8Xc_ zQlZawh2mQeiUz(!Us{8>VpZ+9Gdo3!6cYl|JO(3%+2wGV7A-lCKq7TC0XG|wnwJwIYDzhzT7X;mTSejiK4%OwDpJt;Vfk{h3Ihe|LgpCCWKRpQ@k~+xLNzd$f!k-xW z)~Fc?991C-({y*xmqjbSN9D-i@RpDiXicFcL~S}aDjq_^e(rS=28CiXeUFpQ)~yD4 z?*8^+LvErc5B(~6%nqsnZ)C70QaA`mqWcj4VKtByXzmo{j|P*ce^{-Fn$<1V76wS7 zqXb9~^zSY%P%(bPc9BEwXBDk%*^J})Eh{jNe-sA>_qtL>p9>9SCa$Q}1WQZ-n!I(> zi_CPt-h_`QR4$H|m^^3%0Ghn>>?K^)ByrMjKOpvt&c=QwuOVmFQPvZf++d|6ej8^s zD$N+NNTyoo`;KVw(ymX$)updc4PNcvQc4JGSb|+AV-(QMmy&+$LLGD+DV|@6r#15I zdu%*ym%p8Na}MY0GGM^H7TTXzowC|Yab^wmwN}G4gaKY&PJ0Trh zWusNmq{94{vPfYU)gexcAwc8v$OKpW>A&$NW1Tsn--p zB@|`ikQ&)mDdh*}46=&Wl`+^=!$_WUP-sjFBD#hep43riu_t1qkfU4@Vq)3 zE;69Tdo(Y_IWT}e?{6&K4~`y76`y3R&;Q?K{qM)tUs?Z^^

4mGyr!S^x8R{gw59 z%9{Io3pW)&*5dyMSu_1B>-9gq-v1%%pMYNP$ife@*%(>P^l`2Ta@Se8O9JEsjUawP z!J$dH%ZLB9*}K?yBqh7B4&_*g2xte9gM%M`lGF2J%%r~&CS%_KIo7$t>rmGBTeKf>CfXYAkx>0m3z*_mp&z&D~H@2ZazLw z!!!yB0dZrJc7VpZd19K4jao20mB5pJo02PHBU-728m?*kQ+FOi1{$(tp~txqrcbQB ztVNWV_*#f{ddc0b9IK%)90bc<65WU!YfAj%%HJVl0MPBtI8-V9Z@2fdM?6-VZKUX= z5c&-@V1oJ$r?Jxn*w@_dIE-lAoNv}=tj#pQNe?`&)3tk@@)7NOQ%f$f50-I9Ul~VVA*SK%A{g2&yDL_+k)e1wKq4I+2R;K&N3u-*?KLglx$qDlIzjOp zab2vbeU#aJ^pJyH1Zx7}*mIdidxxZ`v}Zy@uK(nwA=@)K&q-R^l*}Ym(&m6fY`&$? zdHod*)mQS88>z*AbsaAq_m8eAROb496AW@{8PY8N93ehFlq`HcJkUOh@_6)EK1Pt$z!kR6*I9MDez6lvJNWz{qGIN%2&!@)h>k8A+4_2Zaj~=G1k}Pl_+fbG2o9+LMP_MLZ zDb1pUQxdCFyOPnTV~`6G{KgylGeTX;xGZ$&baK0!1@j>Q16VYqEqSKv7yRrS_|gzW zP{xZMsIn}lbhRh5CPPt>m6GzqS(j4!Nl+}p(O6xQP-GB)QsMwgKtWf&Z}uSX+j9#T zjt=xx4asjaQTVo!fRhe_<>b^_3|OpYQK_)OLuV^o8(6eOORoYDjAAj-mh1I&+z<3xSwCdlc@~i{Ay_ zXnS0DQg03n_EGdWX)GEp#*8Ml(tXb~s51&k`K;@o8EStGI@|g`v~E~emF7>!6WA-s zZ}dg`kA`plMW;tLuZv0I>osm|Jbz;Vt(6P`v@U4Wz zht_!8HIj`~OF#YxtttMYb=W_&emW*AGm_HxjR|PWpjgMBmEPtL0p6CPX6k9%>?o+V z0w~aEHrq3l`LcabpQ$!Vw`A9Svd7^yEbOyGZ1tiu7F)LPTnc_IEoUcYAAb41Khtv3 z5YF9g%T?#wEm%i3$ow{w6o|mOJCOMp`adJopnpcFH3|Y4thPM3`;vx;z*cMpscigw zd}?c#z%2qQq6L+0cD;$*PIB;ex6*Cka<^AISM6Fe-v^~}wwC-ps@|i?7=7hP0_ljV zHjguE)Uw0uMMjSym+00;SGK-F!8F` z^?Xk`y}Ja&@>VV5z0D=%aE|M0o2(GgQ{T9E@3iebe@2xy_p@m?yNp2|4Y4NvmIYHA z{kCMjWBt?4Gsmbk@gjaZ4P=Ugr=ELIv4;5=*0b5lOZQhlpu>A7ON=3(n`Y-qSWqt| z?T#I1Zf}T#2?6)laeK_nC9X`gg`XjOJIc^G?#}s%LPIZJ(ibOOvlRHE0Oqk2h;5lm znCxhVkv547c9kAuD1HgC6P2$Q^G0t z0k>uS92qIrg0tPBKzb>PDP8>$9|}cSO7oT)``6(*YIz+-L@e_SG;?a=TTMA#`{j5Y z<(RC40ha>BOwG!pK7l%mLO76#GRD5f3u>|?(7Wn;^}#RE5J+#LuwJA+!gQzRD9Xxf zgf`}_67vz2(?+9sqt849UQZxqaH>QCw{ja6L`1EGHczyPR$i1|Ty4h?`wLqTB}j|a zzhsHu!9)ij3>au61I$^7#tu>Q7n$}Db`|jOzIWOxORkbvwdZiY*v{ z3QQuK0NT1kyKQkW0BzkxKz+L9&(i$?>}HkGwh;w*iRT|`76szER}Qg_@f7`|2&2Di zL2+3d1w;W8(@ZQx`R{HS$@TOXYRKZ&gYa=X6wGrHPNh^;n8KVgauqza2}=DEiRxgx z+t*-0j;!`zYhrK)i1dKAZY-3y8x7Oq16sJj?=-L?n$uE3k5*=$ANW}CbelR`i4`v^ zyKEX>s^BeFO^eIOWjzB-v>cYDa@w%~Ovi!|A$@i}rI<4Q2Jazr4Bf=5@^K~H#6$N9 zGmA!kuI%|CMst9!LGlxx@>9YBDnucZ%62qOD%ygtr4n_eNlAnP#@m=PzoXoUnKb8F zkXrlkFen;vn=i~(O*$p6<kB3(zk;d=v1BKI6 z6TX)(xTFG4E*nZ;A3VBkZS2P)P?J7Aa$FW^V%#eK9wP>&nPQdJoMw|nRzV&k)|Ns? zHAoP}D5%A2Y9F>g%6Qt8tq9whzEOVz6LBrw-kDrLibGQQEh`INq zfA*UGUmCmrtBm^JkE_3u{wwLflKv~{|7Mc@=kfY0>Hn1UG89IgI6%@;{|8Ak|3_>0 zE?{a}FYhuB8mL*6P+NVI;h`^OSFm=3B+a;3^m7K;rXv6UVec%0!U)iY!!6CR491`5!-6d#nm*5cmZ<3jLuj>7?ncY-n)jO(K(A9_U-gCte#dlaE zp600zz^_Q|slDR~9;U+&<$xgfhf!qI$S95>4>%zAo6R8A=&InNp$kAj-E-tH5__B7B1+^8w{A>n&}nT3OW1&t#=g7o(8@>sIr ztTFUAN#L*yN8e9Z#v(&%uw6yKX@=Znv05rzw3Z?D6b5-4C=~f6AAiCPN715&y3+)A zn?<294YCKqXJYVpE1rOPtPk$$;Jm4l$Z<$km!OWS#7VS=?&%ipgXypucUXG zx7QGA^2G!Z<-$8?o-`FMC%8c%A%2^K6oUgr)IH>ArB>ZOU-TuUF%(EewxcFRkI@So zCr;__5Qetca7*ixEWmg;*7yao$JA<6Il25zr6D=(Pas9?(4y9g+@aiZDqc7g5V|pW zBER!8s}Km+e@8V{p_b_%oX7YY|7hGvpUYNBSJ0kANQ4hBA1y>6GCEM0f|o36grG|o zBDmfh+ip$9siOIy;uMU`h>b*QMC?U(D%3_5VsqUVhrbJcvi(C{^5K#+jfr{Yi(ESJ z$^VqhG0Y7ctP4k@t<(5Uy9UGV6{BgwXoPP8jHXLTQ6fRHjzRB=D+Mh^T@0{b!#|se zgn#zK8jcI7k6#FUhPix#yS9K9R`H^TD5=e)-n)`niDc=*f0OjZQJ*IF;~=w2^l{d% zh-tto28#hmz@{fYCMWb#qHlU&plsJA9W0_$mjB9@lItq5+o`IECO%OZ2^1#0r?Ptk z>3l+P4!>(msS61Gt`|Q#nRy`5IH=c0evc33{v|zPExicr`MG+2{tlL!`=NpaQPCOo zC~iDp{r3PxV`si6%3xvAO-eHw=12*tt`v(RvfD%5FI8>tj8s}(9VdP3? zp;cm6@+BtK4gl zU}nNj*ve;yUkq5y+T@A7hplMh+`kK3-zPn;RzsX*x4cC44c%1l!aB)$##`>fhuL}O zntt|3ovi@d9w{J5?i!AolC9dQ>zY7#b(VRSE#ki}GlJ9El>M}ov{xIN#9}Z1u_v>R zKacO{JBj6by|t5OeGB$`vrhnU#o)Q-pGYI||HkO+!S|c498{m6+#8M-v&tdCR>wi+ zy38^{KwLlb2fsgS6+>sAs$pc5#S=jdv%dvd0D(@ z9ivb;YI(&Ecs08%6f1k@`HAeTs|$5Q8!|KFSu4~Px4tA;J@-1EOf6tH|LnC?eEeSJ%#R!WG{kh_-ZVvw0pme$IlZatBnWTxbovo?hq$RL6P^z8=B>X<_=Md7*lXUAL$W zT!lUcsvH=D+oG@fp)sjtB-^ljCU+`rd$5GqP=i^Lv@p$v$Izmheuj&y;aDxSUIDN3 z&LD)LjBU>b)_K-$5x>AULZqD6#=F9&(3`=caF#71r|1AA4H zg^uQK2-ho+hv=(-DUa{r!1kVUI~BhNP4hh$Mi=fvOcjllP2?>HyYA5QIUzQypnCiC z^$Isgoswlf3AL#G8B25un$%Fs5((4_hS9(ztQ|`@6#MCObs2S|K%feeS-ESwbLt;k1DbEPRiyqFshJT$p$+ZnE0PSa7QuwVdxDSv{reA8v>7+s@K} zT{OurSaf(;i$<_Oa~f1!_)Cr4bXdlXATyn*!OFMo=)nX8k`ZO{B9fV0xVl0JT4Pq- z!hpmx5yOP6ogm2~E{KG(5V}qsH54e)Lf~L+7w~$pF7=QNJWmqOEk<}Zbl`JhIsqK4 zJ9k3iKe`4RK(9c35>$t6u<4qg1JmvN7`LmNuB*$bk8RI~4=={}taHu)_AB{X2@fP) z3h(*Z@+89M4yke)0~Zdc(>xoO=KGc^MKzhP4Q|blUVc+374tf1P<2PyaA*~N48qC&cdTL z+$5qT3tDR8;T@1drcQ_l$vfpOvyMQ2`jD%m4g8kz#apYSdu^*Vxte+-S&0>y#L%N$ zdoZmYwm?aaKE{?N%>?H2XdZ<1QLb*oR7f@F`Fy$r-l1rO!8sGkVs1zTpJb?CWX>0R z+IMHw{OaQ8HcW3q+YXtO0s$%6e}LJ4zP$co_Fv5Yi`joM`@amcf4{u`V)nldAH5jgEuA^#&S#mS!H3i+IMF{QrZtKDlgkCil>&z4^f1 zMoPZB7-81PfOE9Li#s~{7F-Sem!U})+|FWox}F^LY-ODz+AXPbQYng#zT6v*EXt6654LX3nzn53MBISW?Qv%-h5MQzBQ z=e!DIxuVt^E)U8wV3X8>3;J24w`!(gxgV2MbcQqi%jX=^TwX`xHbKqI$X)3$(d=r7 z=)8-M_z3+$CP^SP*_8Mi-|jF^d(W|}M)>Otn`ms`W5tE74cn#X%?m9XsH}&cBl!-K zh1BHYaGzkpFU6So{jj@9D;LmQ_&P&q`j})`M>62nXP>^~wS!g|7K;PN2Jg7pht$O{*oe|#i_?_&u=E0kDXw&V*rEXAcM4$qZUBtm zxd@-7iVG3OuiWv_8x;KUD5v8#C#OhCQ2Zu9uan_c!(W9kP) zJR~Of?{l>@f=~J;&w_vM-8^>{jOI4;JG@RQM2N*Dp;+l$YHTutynhuGn@md_oViX)1L7Ss}uqza$si17L{y%s)NZNLU zS8JN%%RajFJEFjX^J(b}fzL!8E*@s$&eSB?JbK_TB&Faen~yR>FP|mvtV68^ z-tJgJgVCFjvz(SWd__X829ppUC+3)c45ERu7`W$*f3_7Fq3MS;k_vG5ueN8{%NNAM zH7v6dxOiR@5~<)5ah{?#U!444ceQkIPaai7?oy2M@i!R8f(Ic2ag5tRFzz3O^X;&% z=w2E@ycIxssQ*dci25XMN2($=(dfM>f#HyW)U72$9A+fcigRRi1RxO)NE3aLcl)8L zos2}$t68{)v@9&+EZ~S%*cR)=xc!j#8;TW_6-#y`$M2y=uTimOSon4XxSh-OvfddD zBWi_IPj{_btHr*Ox1LuZu{(t;H72JlcW*$Y^&n=FwNCNAw>c3cs3Fur!q0`%ffDBg z7z=lH--tJczbb$Q31~dR2`NgwU+`Rb+ti>bGlx9gG4B^a!ghrN)g{g6;!eu-?-yMV zNCl(vujwZH%TaP(#30UeC)R!(-~D*AdH127AEa9NV!XJ2)KCct-43goqD`P5p7#Ra>1+wH^*vnkL($%&$SEd2>|bhSMV-vk$3lrpD>5Wmdrkl=k13XonSGn zryWLy`X%v8F<0npY!IijEBmP@dj2q);3$Xf`rA$GmiAL%yNRn_!%4$~T?H3lEE73e z&8whO&;Kcnw4x0hu(uEgtvA#G@&`(6w@8dqbKcIsrB%KlNQ*3#kzF4z%){^isCkWyK*WN-U_sW0QT224En{ApCL zgGoI<5N(~NP@bL2k)H8$dfE>Nl<*;MgeA?WR;INRw_Dy^sEz5)g!jIdp-jVMpR8!{THLu&cyzkFKXnslmZ z`)+TF^PAo$0T(C_H1FZ_IH+8~H@elf^Htw&j4X_=k*T1Qw(g%zj~i7TY+pz#^6(J- zBodA~RlH*1Dres+f+an_0ZTGKlr0?`^`tk9p}5tm{+V~H^*)BeM(INg1;^l;iS(9N zSN!@IsBHJ1W?y_;`6YA8acwi7Q9E4q@vOz*TU)R^$aQGb1D?9%D>tE})uqi2Bhv$Z zWbGubQ3r}*O7NvYYJnPkEz<>UonDT$CnMI=NH8oq$DF+6-xAHj&j!4XvuE&4)IkvrUM z4smF~s!Q$z&~;=&)^?EOfF?lK0RUYmpg?UF0Ca8UZ$LOQI@?^(<=*o}X%=;kwb6jD z?$XAR`*+K_iM8sjJ$P}!;VvP4=XB$VF)X8Dm7Ft@xawZin~-IF4a`mbR?hJGWH4J& z>0irvbrzaGftK~@S$S@pA|jT$G0SZyDGz(sBd!Tza1ORQyNg^wFZ;wdGRQajWw;5L zZ>xQjeCSNra3?+%6?}bo=&G^frlF7>Tn?q?lMgG$n@K)m9-f9xh%zuY(QAWbQS=rK zsjdK52B+~61BvV7$Tz@f_^DFGqhnk0{qae!@xxt#*tgwh)Z4ZMaah4nFb^2e5w9^7 zjY@f%;AtZdGubIH(G*iu3MFaezNRQfjH*vWhwRNth@~$PYmtMX9D23yfd67cV9rrn zb&Xu8C3aS{>FVAL6UjwABmXB@{pU;TFINA>>c3e17pwovu=@AQ>n~RS3#)f*?7bKO ztoHghSp8>}Q|Huvxf%0C!09^Fo4z1p-XQ*p}pynU(S(kF*1OAS7^tOP^_Ys zc4fSFL+<7GwwKWCA7Zi=*Xd;ryA$Mkv2aQ9iH_Iv_5G!5_g6aFiv zD9IhpA`AD^o#IeF>99B`Kzbl=!rI)IFI!oYy8VuUi)_~7GP&VQSk!QTk>saEOOs)P z7siQO@c)Qv;UUg&!Q0&-%JPY<%UR%aR2Y@K=xYkxL)f{g6qVCThWz>-_`OMOEnD1F zIauSz<-z4M=l2Sg&qy<;tp>&6+-9>BzjT3qR?8>Gyiha#zT-o|cj7M6YZk|oVjlwR zXkm@uwb^=IVbu0O4)|zQ?6;6&u@f!D&A0yuxbf!dJmK_`@7SipI;w+iYBidrA9VJv zTiU&7^Mo_$UI-FPwg&FkOhj!bn&9(XqEXYv`HZIYICF1+1)1I{bKWj~H#`wG_5n8XHSPvq^>&+G0SE;Pa z600K!#=+D(tf7&`QTB$?T}mkpwN8Mlsq3T}JU&n19CuDWJIC|N;rFA1 zPDu?f&p_2aah1t`Oyy6-8tOd+yz`xgnAJVj`*anYMu$&9xH}GHgiaiqCDQ&-<%r-v z9fhU0;#u(4%zxFcQ1I}gvxg^k;eTxf3}q}UrI#|NjriJBOpKo2iQb%l6b{DR`yM2- zf1mxGdd8)6TTOawNg^eYgHCxMcs725q&{-x>q2Jh%4`B1C=0*hTr@qNP}BjS0H2b| z+EqD0u2qRqtBry&Z|}u?qTYduj=4wA@#9?Z^*SVZRe1#*A|eRa}sRk z>^ARuj9HDY_@5n5Lt1`vo8;qv9ALOd`sw{xOm-@C+L?Kf`r|wRv?cS^QDzPLasw_{`Aw?p z2aXaU<7ZpA{q%8A*F}78wcOQE;&F;vi0{QD;}j@Q+?%rlS7h!u8X=D!_3pRLiy+JawS&0R{L{{;)}(_dwv7i|^u1KcE2DLpb&^{jC7sYWl4J zj|U>*bvLzAKqS12n){cIgn##RS2~C|j3M)TJo7)N?BLIt5)E6oN7;&16*Fg@J4;1% zc4B>86;syNhh*%AO?__~NL5(mqO?2)zN2Cmtw@wXD;Ilz4-s;(=bBhiBQD$?mzXIs zCk(Np;y%RZvuhyY4IN-uC>?c_s&R*%{$X8z1@vre-a17>ajeBibCV5?p@Rj za&!9(QgiMM-iPQ6MvWzndi#}-4UW)EM7J*xSx3KWT-Z4twd?s|d_y`=pC655XCO?! zi-LF!Hic%Cm48I$xs7#(L_H<3@W5ISRXCFL&p6xl%lP&DCAMa-;V`V`)@tyegfp<++4_Cv5^N?=bB=6;8-sA)Wx!3wLoAjky-bws4T~`! z?~Dy)eD8ttd%f=Gr9c=Ivhg-F2-5kHFbp;z3O=_FA$3|ZI%P7%Zs(@duFV7HxQq6HpK-C}gb!rLRP_FtVRM`89HJCrKypCrvJFle;$ zl%1#v*9@4pbZA%~#J%~nLROa_-f$7f>it!C0PS5)xRk4gHh$vOGahEaaal{^Hd=Zm zc)sxPF^39CVYN-SlE~*7T20%aH|HwPt%j@uZN^IFxaEG!S|ZrJ3S7piOAx+Z^lU7b z-=Vfr!-`51EGZ5i3TlL3kP!zM*Ywu8G->Jw>;@6irknKWdKs}CK`+HMq-;LAt-6~w z+WFEe4c(viHO#^na)NLsYChgup43D<-Bj-L@!KT)<3>&7FW!6CTW$tZ^bYoq1k)%NH&GW@YSi)>WEmz zu=DIJ8N}BK4tTvQ&RzVu{`5WJTNhq^$$-Z;fj!PAZuS+iLS-lWc&HMkal>YDatp8sW{LlB*Uzq+2(|=+5FHHZJ z!SwH!*I$_a7fjPZF8N3RFs<@$V4Cy)l&aldb!D3|Vo!4U1gZ6`sdXl=3tFqf801wU zQcw^Ce0*O269)GIkxGj%&~oE!Q6SZmA{W93gWlWg;gMwWY1L-kO1q$!+4MhPx1Gsx zN|DQR7hqn~2lCferUAXG-SH_%qdIA+f=DmFLf;q(>Y^|*s-UTWi*0@-LK+Q5vFcbH z?S|Bg{~DLiF0n?xAxr8wvNuLUMI`ibD6QP?;m?E?6^d3oHRHggUO!chHpX-w`Dijb zmM2yQD`}^{EoypR0L|ks?{9Lvrlhri-(^!{YuaGxi!~xB=3tq{TBeI{h3* z;J}r-Ixfz=9M96Wt!*-je`t#)6ACt_ilA;U*~qsSVuwWvQktImD8OJOle?Y#k@nAq z78PJDd)?6b2)fkux}n8&mj_4_nQx;JeEFy)!LuCXFigG2#V4Sp&CUP*Oad@_AI9G-2bzSq4}wEcZpRj zw-_5YrMm|@%nIN8sQd|5Cz9lHdzHobEayA6f*P(A`5V-jdfQQpFXosfrymXbl{iY= z_?rWU`-Xy>S)=G+KtqI_X3=K1(BqY{q~~g@vh%Yk=+mOf`e&nNMRA$So0#nJuMtL) zi-y!1?(FckuRdPxEZP>Pw^cX6##3P0t}MalFZG5r${!%O85%yj;^@d({wY=Sb=!U; z8%~5$HSHjWeicy(Q|@vX>DO#~$;P_#yTgyeH~@occLDmqp-KcWWZTvOQ-djMYR0lKp`R=v4D3s?eC3 z+AVsTjj2>RktAeLp{PjMYkrl}sAN{i=iVoXrjRU!QZgwG9ye#lph}H>^hYXw~&oOD8;R$ zOC%oLC){#l@7&mc;bS0cIe3BaUa_e8GVN@c*k~wxIS^Z=SZg7s@b`umO+wL6ML<`k z-=53HW6b%ufiSrBqio&+3IT9@L;iMrBVDdCGY%p9@^bTAekiCcL&e5JN#Wt)#RS!@ zqQ+0kGNMJ06J1`Y^nFV|^m1zOqVEw%?*7e%I`O+yb4`3@yE5C?*?P4pMI1Y{kh5RI z1uHe}F+dnRsr-2Em?N{t*5C~z5C%UO3Njl86?CrY((ePI(VvggS1QH>`O0YzjA*F< zBU;6Y$#Rk?cTRxh`xB;{>v|j2{prN@GO#AK>`m$OD)|lrB;Sf~y@2E!|BX3`4mpv+ z{_hd3a>Tc%Fi4EkjK4>;7>GAHw9Db@ZA0%JY(sfshwBbG&mWBhQkgwby;~IzgV0j4 z(TU4>C6FJkB<>YUgc@1jBQf7PJ03T*jSp5uKROxKnfojs5?te#Z8d+(nX{>D2@UO7 zF*>=)L&kH>_@RN{$n|!Npy-Fihd(1)IY0E5?b0QvyG{^NkdjAS;BxRLKy4TCi&c$R ziq=QqqyWqJnHeOXU=-6_E@1gqBu+GMY!}edvJI>T!r(b2)lMYJeAiMjEWDPt_t1IZ zYhDWuwOkKTn%wPGFV6$N_~eZrGw2>On4-#$mW#^9MR`e2T*xhx?al1YMuOxK*RGrY ztZ3B(D_Y`j|FNPa;jr9^1mz~cQdwu0W5L5oe>47EwE+0abiFc|h+bROq1RDh!NXlg zj$$B&%(Zx){|tk_oj76t_{N4-U|e*0uUnR*vrYd_`D_b`Va3k9$$_9^Zoip(c~-dZ zhsh4(odBT?59W_mjqaTrcfFR42OGA2ou7^Q4l=bWwmlMfd;VG|wO?=Id~NHF^$iIK zy~-x^XzoXuw>63Jn-8l4$8n1o^@H*4m}6lz4*9mB}_Uj?%uAQ zMB~3$esp<^;MKKfhk@+Yg-#2WECC(+vLh|T_AAkE_f1a{?|8LO^_t1Z+j`yWMK9Z? zfh4-g4>1hzlN|#|{kH)O8)N{q-GMX9|cZU&%MVZSo;Ad(P5l$@R4x%t3tMcY-ippT|eotV@8l}PphY} z93Qocmg+DRanr@6qvf=4ZmJOsRK4{k)IU1lwfh0kwdf%CQff+5DZCcZpWwRw>>7nqN1t)oXxfCP09cwDD!AX133+JLX3DzfET_ZO?czh#B z&17n@`pqqu(9r z7$jUNBkrraEC!O5?vJ}{WVje`mrY%AD0sMt`bnNO{M4H7S@JW_=r;xMl|= z!`~{Zm6aERQ!JcBB%~MOzQ!c@BL9hk2UoPHIVxJ=?lb71@vu~G`ixSPd$6?>V-|jLGN-);=@b(hR z4Kt6@k*#@sMr5x!gWQl8=>Ozo{pZ8$FJ=Fw?7x)#m$LuMDEs%z>n~;hi?U58Js`vZ z$`<`Ml+E>jlHh*|*CU+$`rI=7Fr29npGRx=1%Zn-M?!IKv95X5%RRZbpMV%qf#P&e z1jb?&I#uf8*ztm(?I(qSf9fcYkcb3FSyc;&{v_cSA<*iH8$6R3^vpaI7&WC~vl5 zmEMh{v1Qeea({Q3rEcHt-3%s)YJ&MxW;_l(K_qP&3r z6$BrHX(LQ|4T3}D3)lWL2)+Ye^Gl5s2!fljumeHxGJ8^_+h{Ga!_NWP5d|aZy0zOD za*uN7$8Sdqe<8%&bBTe|YSVn}}t7o2xqa9oCk(XjiAOs3z6=rgWDRn9}O( z|JEv0EN_0cj6czdgfQAsm@3{#8agEdEm2CwsKL7_0w>;weTO?nJH!MviU#b3s~o@| zUT(lb8ujDseov$`@81!^2AvQ8aqG@#TCo~ZS36fe%fHdX7P2LlGVnajr}PrHA>nL6 z?t3@M?fGGfx&2jkB#dAf;oZj^K-s!yB59ecJ~cjstNuhF4I;}d+$kN zkHp-CZ^M_Xr>Q_q#1ff+*HTb`lG-RJOpoR$WI#l|v?s^uCIW` zwalRsBha|E!2I2~F0(86f{bH_F|dAKAlqc&on1ICO{u#$bxe>IxY$0;sO@lkjxA1@vk7bK5L5?^eIz3YIZvpwqcnz&kHx?bM! z>^Nyh>u9Un0S!Zl{~*D!35bzWtZJ0#_pe#9z)P~v!fgVxVgShQW|W-+kq@kgIsZVd zhh;m3bl6c(FzT;VZq@)hahm)Ywrg)_?;$^$1?a!{}iY^WXI6&Dh z3$U$@+gi|>PNsJ?3WnE6p3#`iJbEuXmN zGEAWW$Xs@OR1%WU?WK1RYN|_5NZL8nxtLkD*jd&Va8<6yh&le=B`q3uIOQbv57MhhHt!ZuT=hk+i2v- z!?Avf=ycU>r3!YoV6(ZC-J0k-F3-f#lJd4rm+;=?#fiS@36W>HD|R!qJ<}B!Q#h_f z`K2k@9#;TtrcWwGw#>(T<%5Yim9JGl2z1)_pf$Jg*u1^FG3CED9gA4wIHZ5RSV zQp%)MTN0qAXBj3$(x%>s0DWuSQlXKn>7FT4$5v#n3^?}G zBD82P_&hnHFr)f-WAxOEXGvXVf;XSXfrZ(3;v80mCZ{*HxG4@KzGEn$JtDGU;-b2D zXrk-tp?E4+io=ZYzb+uGxf5_c*MYL`K}k3`Q-unTHF`qjro)#atS zP5xlisnWx6_ikT0v?RVvv%PglWcS?ZBUF^a{Y3iJI3K+S&1H5Htkhr3A~75F;Mmu& z(9B?kp&)87D3Z7<_i{RRTM6?WI#>ZSWO;R`j5`UrjG|2U=Pf$0;ki!Cbp~q~@T%!L ztwNSArWi9y>(yz)RXb`n7hQ zyn>yb*`SYO@1+MB%y<5dPTv5o@>Ar7L(F%5-TsC^@s6IF5D5X<%3{)pvRFlHVx^ppvjuC@|e!1z|V>jwe%DnzO}Q&^9O6pSLA zDRI+#$G7s~ABhgjm?GjBcLGw11V}^?7yPNiFkYbj1~=iRD`@o^Xx1`Dxf1N3pOIFU z9}kR5Kweehe0yZ%D|#t+x(ZyC;Ymb5IYQM8yNSy2^3*H3_7)BA@+XmB1`pFZp-mu`e;Im#IZTt-}-C3 z`)BU>kF^Mq?|vbw_?B3AC-jO8g4(&Gjzm79+M`jWxy1J6+hc zw&^KZIzEql4`uRqtIY321{O*TdJ*=(-`4WdVYv{FfXvw3F1a-UkX^R5_Za;Tt=VtL z{@u2&fcOpB<@bG^^8eJD@c>#gjgK|lx=043z7lyY&V?+)eUz3SSPCXdm*@$X&b>lE z>Gdo@1p_O1a=E|>(7~!v0@=gd*1^n(TuwrAZFBZc;d?8~bJ>H;*Rd_;md#y?LNAvd@Rga8No^Yjc|R+hHocGlzux(6H*v!kQ(20 zo;wDXw}`cs{iBxM{gu+ywm#rw1rlKI`%0?>I*KFZMIlA1Nv1fs1T|rVhqGL8P?V8( zU^C0^BOJ>VFb(eYaB(ZLaOtYbMT{t)aqH6QubDKWw0a`fqiN`UQg;b}w={-+r(?*)wa^GA`_E;_Shny`R8KaKeR+!vL$UGJiFL&pCD##Hz+B z1^oOjC#QeGR3L&idsFyo23I1hhMd^r-lpPt^9Tx=Kn7_?@fCUwHR%CQE%Pme^hp&; z{;=(rfEigQwk`-~UEEL84rW0Ys6A45m3;4D;$WX}jQu;ocCN`8#}I4j=IcS$lKf{h z(QxBQp_>Nc4qGLT8FPZl@Ay$f5oodQom^Td=M%&Mz_tJYJ8}n){|(u0Pxd&7TPvTB zccFhGpfu|Q0J|VF(9XdsdA~@o4ZGva_BUW7qAYN;@NR+rGSc^?EZ(YujRktp6+!Y_jkqDjIj9eOx$yz8U>ias{k{&(!#vgZT5bv;GC7$ za%TOLT*l3667(61)G80d1|whwf6al%uV$=6#@=H(G;;2Hgs#v2Q)vc|;u{qYG^+un zna&@j*#|&r)|fH|(;g#V-z|ZMvj76)&L=7Vdgxdgz7<^}Ev-Ll zaR0)k{$Qz}uf^*9;Jsf=>&p*~YmN;MQ6}6iaW9WL4|s(%j~TLOy5#RBex?;2jf--V z?zli0zX8gxXLK^8-)#8WGQm%x{2eQMyMY{d4_La@bMsNuc)33z!WAL*!Xn2c3q3mh z&FXX23dyS)yjF_vV96-2v#Wv=j(QeJzmF6Q1WL4~V0YGXNeqO~t@_C2U1h${(}B0` z!-{>!TmxSj>d$i2xpwWO8CE_$?#Kk9BNIGxZS%4{_SLC1jxS3z^ERo`4QXly7q*B! zU)`a3)$D>nc&(l5-w6yHqzX^6tVrJu?x#F;ViWPyJ8dtbx192nqeCE$%zM}F9Ivc$N-9id5<{mm=wjfyG%0-P&^>X&3I~a4DH1HktwD#u>onEvy&B-M+I-~m}`4oZ* zjwDfaO{}wZO)!OUtM8j#R{9EMK=(HN96}De^y8ifIR|x0lJp1Pob|!zg%7PRIiz zO3}4dSdBoo4;@KdVX#ilafMTFvYApL)w#)j*{E3IJH#@6`P^l=uYn(%3!(S0Br{XE zJUx@jJlEC7h}klbC0Q@4(p3ma2g4b9@8YVlyZKo=%3cHFG|db#AW_gH8VDq(b?zw5 z-vU(up;-(>-Ucm!r-p|_+^nZe);aVzQ)#O|Rz}C17>JW>(z(sZqoEicEUfl)D{Fa!cT1F<`z|2`c;+!8BBT#m1j8&0TZkJP#s`%e+OIWNj!-oE^|8`%C+OG zYS~fc)e`=}$tNkk`VfeQOFCsmJ%r2bi2IVCwTRVnGQHqh6jkIf@ZAxfJWK3k1ZHg={okYOrI(@)tcLJ|N z+DCSA_V|&1?^oYM11d*3iy~~qL!oMoJoo@8ymae+x&tGX#sIsD|0aqu)>gYmsOsfj z*9TuO)T;oBP2;qjSACg+m++b{T-?>+HM`a(OaVvII`;i;q@+|QgTbt5Gyv3MN9A@6 zhv8wV=m1cokf6QUx)a!1{HOrNZ!V0rp|!8=C>IBUlrI~f4_Vh~<#bEf7Y310V9RI? z8gZDWRYgs_>wf)S9o$eQ48pD1ZKhG8)?vw9k5dJM$pt+68<*;jL2ay1?kk)aBR<** zNnr;m$X%A$3vGJC?>rq}+PDcxd=e?Jgaa?o*zS0 zgy@vK4#>;e?^H2#)GXg)+qoF#^98u%kx3;gVF0o4A#ETQ?ys$4VWKds@1{Sw-l6v6$9j4e3RvRD_|dW!N!4TxKItoHeqP< zCGI(2wsQUMgAf8svDPHn#z~qk8)3kOT4w2YQ0BCXbKU3^WFKzmU*|X5b4{S%kyK5> zc`T*I8bR<3*chjf*NCE`c^PPqtsx?>Q5Sf5cu@s@nZ$W9yzLRgW=6EL3|zg=A%0o& zeQLUal0JI}tk8dpyRw2b^3F5YDz(e~f~;+i0kM9QXwYO406;Z)((|$DGaOT=o57wB z&l=zfmr&ga&Td-Fp<*FUlo*IB2cqFZ+Jq}-))QZsYL<2AG*c0Tx2eRmJHVKakNR2^ zT{el1T9=nI8y@5`ZULxveD#En7`=MJxw+sE;(ugp0#FV48>%DLuSid-RepQI)Q?rC0eBf@L0GsiY-x)z^#4jr*_!2 zd*9jJ%}EWfYm301$jK;V{W@hGBAMxSYUw`fDsarzdIE^T#~rT(UpRew=tHvMA-~iN z?l&?#*#7X4lFZMH7W+B<%@)CD$kMWQMZ#iF=z|F{K6BNTR=y1nuw`xF)>c(z^IH^- zy|BVjX~e^+b=>x3(k_~_{{F-X>+N)1k3>`{9wdegbcG3;07#&}-i&i8yB(?yp?=IzoF z2VHKk22;dP3qtnx+0|`;k;VsBLY=Xs<*H_%k$T&KYsvBAw>N34M2BCS=GeEjoYCL! zwikf$TqaQoE4(lTWMm_l9afI)3cvWVB2zp6@FG&$1vM-8Uk4u^-FN{t!(D?L1}x#! zGcU8$Ggc+cbDR)b_y{1{oH>T0dd^mEZ@3@t;3Hdq_ zq$KCU7?dvPVEb5O>Rd3y5IlJ|V)r#hhLENA>pQo~5kX7qw-yk+QC9a4xU+uxYpR<9!>j}TKLp6}2hfG+Saq2&6q`bYXo)fp1D==j>_Gqrx=VN;g4mA7ZwBB*(>pQ`D@PwGKVS3PZ%MXd4L zR)$N?7r`GTXY&Gnse7~Y*G{|G zhnZ`~ha_)3#x6+uBx%Z=cG*B7 z2Q7Z<%8-HhH%Ip-)D}LRaBykskO?hC;oZU}U;?jG5mDdG3x}Ii$4MVLALV+t_G$tT zevN<6s8r{-wox2-$Xyy&KROb`v*QS7UaRwt$~F`4re??HV%gHelkH@K6IVC-Uqyt^ zdk=G4L=aG97_9kwW%~+!#oT$!6Dm|D(aJ zKOa|rN%}8I|0U_aB>i7T(!XC`e@XgZBz=E!$SU~nTh^@1|2j?o&vZ4=vL>Zb@kO_i zEc_a2ak7BU^XD>M{QEL(YH|8M0dR1?d>ZZdL#t6Z$bM~f-=l(8F5J>($ln>0FwSSX00x2s1gL>=2>r{OUBD=S3`qG zBZJipO7;Dr#D1GzkrW>?lByDDSZ`GdegV6Lkb;NHHGoGt1v`q5!VFwu88dR2v+zuuVFm`pK>)?S+?wKzZG@1GV$;FNS9zBvC zkt)by6T@s1%qf;{AmJVH_=b(b;#g;ILI6EVEYu>BuHoZns};utq|sxOXl_o#grG?T zeDu~px6k zTUsu+1^0VztC*sAz=FCbL29nnNKjeOBtZ;aq@+2{GYou~Q&?DgFk@D)0B3&d?zPSdaqJ_t^ z9CARGVRUda%N_!U=@O=5rjVI~1<%kL2w=8Hx_s~emIF7Wt8VuWyk8dd>y7ai{MC20 zNYozqXi-u-Q{Nqe;+ND732!fq&wZ2J9$t$;0GvF6Vi#&C5(g`1)mJ+&^0QQUAx1)7 z-BBWk>_G^8ZcwIwzKTnOv2}Go4D4st+yMyu7RyMhB zi(_lo0g?tRWv?VHbNT80KS|n%9_}Skg;L@VNi&L59#5bklT%Wc59BldlceEZNm}X> zVVh9$m84tzlT zO6TLd%OyG&K>WXb&QnG86T}LhpJtN_2A!D<;XF0~x|%N!^BtP2#IX8w1TqC2@dDq- zzdFn!(A6=hU$Kltb)&4bU(g}To%tNfp&d*p>^hu-+1|QtBu7L=DOj+p>i?br{2m5xT|ltVZ%f05*Cdhos~{q|ci z>gS?~w)!Qnw#k04Gg~h(>X0418i<=e5Q(y+QtU(9h zU{TF9`q;KhzyWwIZ~(4W^$s`yC*7^bA5gtGBrj8q?S9OJI4+00u8DUjdPzor(vX+{ zpMD}DYJO6;*wNg+p{!~5T50y)&Ngw=T|jKl-uH-)^KXG&eO{-I<6XSLh9!|xbQ${j zaODq7Zk0rVCp7Q3by|gDY)ybK0+U-3f$krmb9Gw54rqf;RfWvMUIUX`zz+dZ*pEL< z3x$7AZdLu6+yb?%L6ch|h3=rqE${iu`|8cM2d}OzhZ{m_$MAxer}&>9V;FZWN@Tq8 z@2ggtCV}Z{SFz-gI?YD$!c%bpg%BF$+5h+EmYW>K|Ep$Qz15~)ee-Merm^+C z0%E#`->TiKuYN}yywCehrLzZaM`CssE0N9)nG%z?B)0v)BB8R3zbr?Ahhc%2O=GJ& znrEt@J*iIi8e&DkcB&1r$7=1$QNyD;+T56qkHh8OyfOTVe|ADV&s*B_gGnkv#ub;r z)P;|+m5Hz=J<1;6ZU_sepYa{IiCHALyE1L3+5j`nWI={WT*LiM#PvB7Qk=O5E1 z+;{KQWI)|&v4k7#;Slm|sP9~$ZZ*b-9i%m!nywo%s37vx4vYig=67>ZKYn+seSvQE z@zlS%)dJLL??k%cnAQo|*uIea!8w)7ZkDYhE0?r;hK15h-gr74w$c(8i}wm2G*Y?bA|~9fE}I=N zW*ymO&krQj#rYwp_Fge--+Ml>FQ~T(2~Liis zyO`28A#g@Ha0hpcmp%W$HT*A!fo7>q0~~QXFk3c{yN7~CW~Kwa?tF*y3T3h!Y)!S8 z0e*-WwFPt>E<9W%IBT_Cd34n>mhrUIv|#s2EX;Bd(s$~mRppCvK^C2}2n&56iP_@* zlsx@N(#1`X={`K;*OBNxH?g`aIJ)2M>N#lE$?u^^^M)o`1hg+Ce~jSY+D}!60V6o= z_s1KQ)wV}i<07np_)Gn-(iFu?T)@zlTa{LMZxg(Uk}{}Wt!)VRfk6ra`bMc9@f}Ao z`AhKZfl#95Px=?+KX}dd2d|?6yjFBGdt3(L^*#OE`OE(Xum5>*{l)9Qc>Ncz|Kjz3 z7GD4Pc>Tref8llc19zVsfY+-3_n?}M_21xiS%7>YvDW|TFoS{rzdFpQbpF2%vw-%v zbT@Hvf(uVWAI^m)OQLfvJZ6}iwwS0{z)YOvDS;P%*xi{|_VgRC@vlOt&@cGO!5e{~ z`k|*=U*_Z05fD_*L>6*pPvQBwFi}uA(c@fyBqJ5(H+I`_TXB=`pMX!GMvIif!`&WW z+WzHB0&|rBt0iV)Wlm1>3WG>0%_s?H4x*azkq@hY1&x2#MGMhWjjKNzFqkcrr-2M+ z>=b1SE}y<(8$qr#9+d+IGa~}a<90r-_rVrFV&Ygs=dF)+eXg-yht|@E(9CS3MRM3D|hKMgCaXrv@k7HCr z)Jge7o3oY)FY8$Lrbp(3Ax5+s$p^J;bnuo-+al--ECZK8K@{7sc&6l`9Hs7#K{1L= z*UAZ|h9#tI6s2~F+(LlBOx%e*^~>!XfpkvMg)XDs24y)F59SXGiG2swPgj^EcCG1? zYLj$x8%y1BdDb!~VxI6RF~3?m>nde`3(T%CuC`T^RB-6#T|ojfXPk-GJs+qWAYi7Y>(eNit{X|({_*@11;;#%hVgI|?m=N6T zBRZ=EQ5H`*!}GawvAC^dv(wfwEKDmSlEGz|r_V;hF99$2w_oY8dgv>~qSz$A)?$Z_ zzt1)%O(wcOF6!RLJSmEr0K?a6LV*v#9Jx7XeR;df3~XDAU1o2>6(H5CBU=(I4~lh?PnR2hc0YI- zbYFxl(*5j@IZ0|dOIxtNRnk@%M;r9S19MZJf1RsXodWp|A(WmTGtf;iK7P4U!eWOf z;$&peYGQ@4#AER?GwzIQXk5R3UV!^<7ssuc#P7J2;xu&+P=Vv0dRD##RN(K%wQRv} zZ?u7I6$07?h>X9)CgGDgw1f4VA#N&$x#t9P$V$Q2C<|v$fpw5t{n$Mi`N0)%N&m zwc5G)`UPb>&vWDX9&QLQK33;Hw;?(v(68&Z<`dZxkxmciiOTA7+!c{7)UbiruZBX5 zruyWo9JvJj+V%(6YVTefHRR;2ccZxr^4Qv24UsX0`ft8?>P>4I`|5^fx5AoBmVs4m z*pUR3AN$U#5!JnOVChFi_ZKksOSNmErJr#ocG%(@Tsb9>?q6>tq^l2&0-CQ@p`Ff{ zbG4xmxR0Xe+KRwkJV|>w0q)#@va?mxbbjQ)!n7uU$q zD>wtiWYD!TP@$ve!0s#=PwjoxP~@7fqsjT3y@)Pb20Zfs)p=o$=F9>6+Rbq*o06=d z$m|a%&mAg~lkIog=w+|oC8xw)Jl4PvQ#A;=CH!EhaXb z>Cm;MfA}BTf>y0JFV>*_-LMUPR>Q|~XropdFOH1iaceYRe-tC1_d5Bw)$~*=M77<8 zy^$Xnpp4!KUQ?F2br%2aZCi!|YD{NOy+oX^x}h=^jteToj2^6-e<4SN2#wM^E-%pZ zR*sGi!ijbn<(&B2)?y`tl-iF(!1UH`<-=uK3Q82-Q_@Lo$_CY7|x!YG3% zckG^xco$kP(rb^}R4tgeyiV%+1xyTQA;Dkyyj2!`-?g?V@N$LE@$7;+GKKe1GV>M# zeei-v{1OiQ$Yopww^5#3lBf1<9hIpt`fIIF*776eYMrCpD)}eal5fI6md~G`QS$l5 z9NiXb&=Zu^#^j|DnMW~Hak_4t-4=nEoAdF1brk;3^Xo5b|7GpJto@g@|HD}OH#`1i z?SHX0{YH|HDZtth|Aw{M{;>9n{mL+YC-antVHDU(er|s~N@POlWMIN|WZIPD>tsxXQs#$~PTJ%o?xClETH{>HEbK@pAqI+wuJK4f!&s zxtH8*kbR?^UZ0gz0ke750{vhDMyLR zI)c@VdVje%IP0j*!e@fs*PxEIMe8yr1?1Y?N`7LlYaW54b$d>^ghUKSud zoZ~cl%g*yz(;pX709qN-c<4JlT1&FRZc3FNIA%8*8c_?>uREmO^nMWzd2YqIN!q9a zX}QRDbmvIO-nsuOS;tD0kKf9HL}rvgmTm3+kI1aYu3acnzWiq!(xfmd(pA^%GGUO& zOade_8{^{b)nyZJ1WMMs>LX9aHCiI(n`(4k!hUZv6@~`!p~DAh_Xc4RHTesTqjgmR z^BgO0p4RseUsp_;9Ldi8ocq)lfr}JTJM{smTjxGogxie2N+KMOSRPp!t-ARieQU8t z=HGqm8?3RqT7RH#J+S52L)E9csSfn5=j8QI&19erN(&SQ;^u(U@L|wtctXye?lCG{ z0THbw{h&Z(;?#F$rPv!oO1Ht_L^|wf1O?`SaXid1r!p29!c_FHl1Q)2CQmKNH9>(k zA`}x%^^jAUcdp*Y7U;zmB(T8mQwdl+=K^tR`<%ue;8d>T&)|f{Xtgz@lFc^0@>)>6 zvm6yNFhWk9eJ-rVZy&da$c7_6H188w^2;^Qnv;<;oxOvI*b&}TM_@m_JRNP=RHH#v7onMn5X1MWqpNc{M>npj zv)w1$OBlg584#rTc;7i)5S8?nW{qupho(6{n&D2PCd`_UVv_&KzCgG`3H{}}E29x= zOFMY!ZV$4}%~*y&n|xD4+7g_nKZfkJMqap|Jb>G&kFt!O7H!bYIZ@CXI@e&DFcWRL zR5#G?oikIt$qi;cskVcu2yP*-zzC3{ePIz{m7NBY1`_R`%`{pTfYR)utC9FB2(8Mk z4M?=H>+ySL?`Hi}O7Mc7d7zKy#g2?fHxMA{px~++g25{uc(`u_O4#dSYX#wvJWhQi z99HXNB|e=S~EcPb?|y6;FZN@Upeq_P1K{?X-K8^I*3({42AKO5LkG zxqM&b>fp#6TEQY^aRLRQm;DfCD#)n<-^0vQfFJZpskTZYGzx&dTi)8c7wYR}l|t4f zM&g*~WfHq6B(Y+XFl1|e(l|F+%4sHAcOP|vs3Q7x7SPaTlF>UE);}Yj?`F5bufs*! zi$z=i3ap)lfiPr43B#)M#MJ|(feR)$Cn{__+9w=Zd-l<4fiO!C68I<7Ww(&Wg3wTn z+|-hf&hSO7Q2)U4EQMEvTSNiGs|{|tTxCKp>`XW?Vx;lZjCTcAn9#@Evdu?d7-zBR zH2f+iiF0i{fd3ak8B{$=NobRY&nIe7FVwfEGRNv&I|+K23Jlq5xqHN;&ucAZZ~Rz( z@4ErYXa&kRh9CR8YS?P$|<`JHn@_#O9 z%vp)#x_hE{FNkwj*#B8jW0Z?%ffadyI6hW+f$M$kO3-b5W|YtBgE()l&O34Vw_L22 zRyG8HpUevb2HBqx>3-g@1Yyv&^#yj(D=_P5?-7g-wZ;V*o7B+a5S7S~I(PL9+8rST zr3TZ#^Y!tS+>nD?J9gwMJUo3@PnfUh=t+9VkL?qPM9$-5H|>rp1Jr{N&80WF+Fl49 zBs=JS7M#5s*Kq{y_gSgCZw&Wg@YJR1Lywse-L(y1_-rJvi~1!pwC#iHx(3hc-XJbO)}<8P&{O zAJk)jYjP-tRIQ%Ik%5NmF~^l2Ml|?Ghk+)zO@nvJcHp)YlfMJ+DlmQ^@Sa+rNt`5L zuOD3=kpd7Kh}b*Nd9TD$y@kfKD9pP(!@# z0n2{^hcEhVmq3J1j-W!t{Hb8?2B?`$i?s~glizd7YQb^kVaX&Eji%;7hqH`ODCAL) zTKkeW=i7V_;HyNzN=fbd&P-V+ij3PpJu&L;EMG6jm+A-{{+>Oe8O17>*(RLlk-T$puEl>9vk`z|c2&gxEN17(ZD$nNTJDh|f{NK3!JuOHygbeCVmA2r zKnqaJ9x;`5AKX=p{fEIDO#UfmnaTYGJZ}3 z>|a0HBx!?+*_CcUG5h!ab&8T2Xs_$Pi`jI3!a;Z)0Y08a;xsK3wF5I(gTX1>&e0;u zAO`>CAOA}Q?&xXp4x>YW)206)b~{pv55J-v5;`7#pwW{7)X9!!h#g#)h$b(99vBrfsB zqPZf00u!3OUo;Bw>k&MyJZggtS2LJk8aUV#2EjDQcvgGbF7o)sG`aFl@VD^{nzBHHbn!&U8)t= zO*7LGV_eY`=TBc-R_pr|XDyOsO58aDVsH(+t7Z0ePi=kq3tF59m{;wm3a~b#7d#3D zv4FoxDk@j|61&Zft;f<=19OvvU$n}5)gs7ktrtD)0>AdX&)3aSPdm|2JL?a@YXGPC zpv*BM&Mph%iAbmE!jLC;p?|MUC$jfjGVWffz6v!+4&-GK%JqEHcQs~wpuoEZR;5M# z={0|V(wI3`&IiR?X<`$on|-_*4a`bO@XkaOH9Ej44h^e$>*Be1-Lbw{`+CjF(rr&~ zgFp@@Zr){0VG-Z;@wumm*`stN;G(u5>Wkk%kk9Pws?%fOx_o=t9tS7bEs8Gyic-D* z5IEi+0>=#$q+0ev`9BGqIG)4W2duoI;QBf!EX+;`=gTslWx1Vh=$wMUv{`JA*DTXMZbcRq}6*&$&28o8i$+O6aH zThQsU5VwW#Vf}D-u=wJQ;tHS?CtM5(>VSoDZ$iFe7=roKQ6Ir@3#X=ACBT?e;_b0W zcer!bgkU+RpbtweA_V)Yt;k~DQd<&S=Vs=x*>=Ovc9y7X{>XbJyWD-)rbWPF>h6kk zg`m1I#j+7zVe}SjSgVK9!xt>86pubA#{61BcF)U?4~Z{T@Q_5C*auA;HX_@G_+~YV z@4%}QHyl~^n_nH(5r;ACTCUW{5Cmz;;-v+5Tv z??lh}C?5#tJE4+}JnOCqk@A_5^>6VPK5#7&37@OHP|0u6oT0Z) z$xWk9ZqYE%j&32yi$r1JXahPKLZLP{?v5d-VXGFzqWOeI6>gn44sY7`#K?-*W6jox zB?vZW4mtMo7O&*V5=FwM7W!`H4(;6zV=Vps=?+04}^ITMqW_?|TH(b~RS01ve$2iYx znPp9Mn%7CTw`VDg-doosio-Y~ycNE>#i7^-rFqa;P$Ls`tgkSaUb@?p{k)o!nDg0r z!^b2(CWaq}GtFcumxhlxr998~A{wcM+~L)U_1|+9TWkhfttU`?urlM|9^6;5hSZMX+Bmu8#TRID?u~0G#1?_A8`_rq540 zKN&IZG{lt%!9K}hK0gC9*ob$)3^w6EGuWKQte}FnTi^iaAQe_{hMJe^pqe$X&$U~a zt4BW&QeGvQf?oca_l@dzKpdX$7Vvfv4n5oXpt#Xg+Ur5+Vm3K--eh-+_qCThSjv3b zuP+r_!RZ!WBH`bwqbfp^1KjfuMdqaJKHj~1!jk#WlY4hq9gW*C&qImyI$6gEYTRzadGjZYrw^3e9@4{i_n&G z*Cq<66qMGD@fGqMCOm_oRxiZ)_6}eosT)p~n>N(N#*)CiVS=C%8we&n@F(|NFt*@h zqM=GeCWB3)p&ZE!k^ptMc--U~EEXn)Q)+#Sr8t9?jq!YbVET6B(YyD7(ySPZhbwy9 z1k-s|pX`>Tlv}5exf|E@4kRX`IR>4EJrP!bSUU>FgpNfqNNrdJyq@V-%5ncssGpxH z>J3}*8Js0(&^@e|_6|6wP|}d@jM@iC5qh28a=a?>nG8Nv#W#dK(O+S_#l|^ug>Cs# zpvgR3tJkvZ{-n1WmZM^-Ib|rr7AT4M%$I z-DxIaCVUILM7FqUuZt59aK~{EXPL+_qFJ1CmK>W~8p{adz)6MJ!ZNWuPjlFFe+@T( z&J!Z~1S9jx5+)8_e=QW^l(1B;ZI^AnaUW7@I^l)evqTkF0A4< z$9ae2jKIgZeL7nbQkeX04qsa>%UBVr%ZDIrzJVxvQ$~Ku#t+NK!ts(>h=ZL$Bw<60 zcNj-wa1`lGg@tW0XCtFd>+fWkI)}fQB4aQm6uX{9qOXlEQzdO%%hAzjNEMfvuY_et zqifSCXxNF>>n-@EQ_MkeoCsK{;-;8UKv7>0g2}tdjIYgz!z{(|M zKUM>V{W42nmN}@xUsO|3X;59EC(W%3M|F-=n3CUy?eZb5UG8HtV#j>hj?^01T&Z=> zB~M(x6BA{C5dOTBv<@P0LSBgd0s}fBpYdUUE@R6pMLSkmmtz@#NP{W3GQu}A9%G$( zK90jyG0pEm`l<9i_`+L6W)|{fd7QC((hLl&AylGaSoqYbg0C9A3lZOp{u#9&EhJhT z5@Vxkm~)$zMsc>*ni#C_7tZ8vt`<8zHq8mj}K1Z4^YkB$+i@7_{AZ?;$b?MGi!et;D*PiP@ z8Nsn=*5x0hhW&R){rm^14eMb&3?hG@kZZz>@L2$vc8qg+syn$s-6s-c<`F5g&UA$0=dqd~jnwlEquMir9kn)Z1V-l9js9R%MzaV*>%RJxZ+z_psPdY(K z^4#YH5C#k|gypz|6~*))v~>E_{dikQKVvTm;d!X}4W`S8|s{6m_Z!?7(PUU&CqV$hl$BwwmY$oTq_XsQns9n(TK&WLIUpKXfk#jOrBhc$I| z2qt-MNaET|y){Q=K_H_KzN>aN;#jF5g3{{4O9bJ8hfe3hikVoWOWewZcb1|ZCO`I< zo~Dv=0VkcO?h`ISRQK&{$y{daS*p_Mm73|VZoC3~NNuTHKT#{I)=(=$`r>SeVNUx` zV2P_S!;$It1(&|M5jsH!C>8FA2F1(h-rEew+F;0a4|;cW|4mg9y*_X{ zS#V(1H{NT+xy9edWLS=Mpwk=!Ux*yFNcp-_q$Fok8I^8uF=EW{Z4PM*@sM2y$!9B5 zRY?-2?7~L%2qDyhwik*0kbnVf9+3KsGi1o|fbetdpu3D$DA{FWLc|~S8H$D?CHWzI zE912Pws0i4D2)#lpgueLtv(}<0@P<;K8)n0CCdWpGYKZZA5Q+mHxNC$iycs(iQb&s z>@5Pz*C(dsIVr}Fg52_AUSk=PrO6StzsLX1Oo$9q-&J;++pP1=xY_c2ufK6OP3Q;J z(rI-HzxUQIsiG9gNFBh|=8?bIdJYy~YsFWW%K?3V*!nGqt;Hli=7yU_R4rvF)5HO6 zjr^?_F;V#sThsn#>lJ{lL*@Rkwax6v270Fe6ZY@nILCjSkfKI4462gv&97eLm0|4r8Mf5;jc zMAjI<^7S9Go-YE(x-qnTO!6J}KV(hX(EWdswU6ROeRAr6=|5y0_D$p07ly+>WUXBj z#SokPg7$~3W%NC0w*j(FcJvPbku^-MO4L7O&6I2V4_TYp0c4%~KgjyWQJ!%#wvgGU zM_dK^K#b`Njc0N(w}Y*VDiaYQ36*IsPDW3y#KZf<#Hl3f`_E(RFIoR3 z>%V0Em#qK8$oluk>n~aVi>$Xc^lwD~vNrxVWX<`1&#Ea^^6fFLhW=5X@g44xiV0)b zoMWI}QSm)3P#;2Gk>At!CHaF%MJG{70^MwPil+prKyD~Ud(sIW@273M;aHomvt)5F z#dt|fJawf7HZOQ{*{%qcj(`t%5~&D6;wRRwz?ep9!SY0Cv{barZ?Y#}mAVYS%(w%B-! z`tFi~#YqtYcmYHOeDuX2S9D4%;qm5`)fP!sBwvz}pK^1L+!Z#)1NlN@?nk@j;fsZ| z=yRu>+Jl@%ko892%wD~k*5#TS_72D+^vV=h<-Im02_jC}7wrn@hT27!@^YeNIr8o5 z&2s0be58|QkmKs1paw_m&(dz<@kL%dKW|QG@O68vzY+<+4XorK{DP5B08MJ{6OX7UQica z=j$hF{tmu6Jw(gWDu z6DlMHee@rLD=8MHARS+1?TH~&++0t!q4Z0QMr?6)`!JlklR270o7K^0JEQ2?FyHO_ z+FCv6egeiE3KUm|(3k3B8I;n`ehEf1gu{|#6c%_(PHPsVHV$(qeQz}ue8_Dmw=+7#%jkG)o6y0=I>;f~h9dNS8&0AZvI2MHKav=sQJA}&RO(WYvCk$a>_Hz1$ zjYU2bm{x{htZkLp5c`*h<}c!1XZU`e^LMfZ=-Lrf&&KWR8enL8`z0?kMJeQ*qv~3H zW5t2ig9`D@%sdh8ttiqY&o!hK!2|sn+0!HvSGlgT4}JLHUHc(kfG*S}^sYn#0VZAU ze1LX=TtQi~6fLQZ>^J()xFYWn^Gx}}1KFILjg4k}AM!P$gZJzyJqm5XIE%FpY$g1h zk`P)1rvy1wX1>+6E)ZFLG05FJCX>nlDa1cZv!NS&{e*6&Hnl3TV^42X%e|RwiR+GQ zp@9O|ej67=Yewet<;WVwEPYZ>Ftmoe|{sLHTF){hCB-ZOrhi|El1JeAV)4bYWAbJjQPxw~$J=`+lylLrTQ4Z3jC83UTfTt7`nToe05pUSBgl zJ=zm%I2a$OHwq|2Q#jQ!wAo3{Vm8e~Zub$pP^hl}sT|QfCDjAU?yA z?O=4hB9D>xB|X*u^}(n3*6E%|Y$l#~a(#x!_Qvfr-ep2T@ioJ{_1ljt$nHeoc^qv` zu0bMeV0tZIw&t^?8!g(>)S1|L0>|PZ&7Ki*Yd+te3QT(il+B*nx-U<#J1z5lBH1*N zTKCHA)6+~YplL=tr0C?yb4j?5s`~Zj%9o70@pnj_5&kAVvYApBMAT0kizq&1DK_&C z-+%SlvKd`b+_~n=!Bf=;QU~h13p~9&taQy)334@iTvu0ZaePLj#NzG5I$um3sQ#Py~b`55q?1qD^wL4HbI3j`7M z9)EE3XrrXoo#q*ede4Z4Ca9CW@DmR8z;LnjQ0raBH|1^|efL(*lIb6e08leLz4*a2 zwKXaC1|~f?^Y5x6H)ZBaXG={dWxd57Ri+@8Ibh9<;E0#}1t0 zlmnP>OqRxR)=%AukERHTwZ@%>-r91XOy~Kwz1^{8LFL)h9m;A6DBGN9-`uV#%LbNtM@CNdD1RzI4gW#vL)R z&b{muc=Or&sm9_cz$5z*w_h`ZXQ3ibJzzMdmu+vs6%qI&34L zw=yr_`Hr(9c6&4EtD9{}@;#JQO$$HMAit2VB$ey6ST7kbRWr{`HGkv*OYkZ^IGMAK zL_vD?cNBc_QE?o_;`iCA#>(US@KFP2+>j~(6$X?<=dQF?A{Aj@Z0HoJx zADaACE|8T|ooHgsXsiu8^Jrkmk;17a&?+T%ennGod>SGXxrk3UfO(#$mEmu|S1p31 zV}vbe?oc&U;=CXsr*Ou%0m5sKN4M>MmWNN#XsvC#?Cvp`4hmK}Cq!xj)GJRqMJd@{ zjKi;HWa=d71%_RQ5L}a|AO=M0>W4RnV{bQTC(R7jD=AYKLy5_tlS--*VWy#VF~em( zCgXS+Gqy*57%aiZ>>1$0hJsQbNxbCZL`O z2;%-+gQGPK*$pH7KxolJ>|w%n1iutdg2&R*n*#>AK;d=%^Y8E)cU}buug9pLZ!Aw@ z=%4i_f&GS}w?>QJlyg0241Q2=>n2aRe`<4{=tI4s@5Uqm8hQ7TpLYZoG57Z1OR zF<#U|;HSQ19LU;O5AsQS5v!Ox0NKUQ$uMwfN+m&dah&o^JE9>PPIbwq&@tLx%1bhl zT>)z^8{)b4k*=LNUK*0|fZZ-pwFnZXvy9geY)R3bv{qYYTKS))zv(lf`s^AMgulRo zfxQ3$wvsF)6xP2UU3~68PNSdzJI8AcZ^e>Sb%j_UF;{m$3g5 z_FuyOOW6Nmg#G*D^_Q^!^|3d#`N0Lc2LE@2{pKIS2F-LmcXfA$NYJpg zdvwSp&fvwDm~}hpflZ;g&?sav5-SzgJTOF{mX9sUg!sUw0O;Wvz4r3 z3S%6oVN5tN%GlALz8`*$xDvp)GQ(d)4g%@5sst2w!gwDCeHpYuMDwXqY;Pd778~|^ zvZxG@UL4J(-Mq0JJ)MI*nAfk(P8Z`12S_iDA7#-RH2Cd-eQo#yfA&;V&N<$Svd>!t z7(O{Al91$_eu#ReJ8ZlDpONP={EaMYSTRHq3|L&h<7>-@OQ=Vd_33!8VXHSBkR^Vj z!8a!Oh8~|f%s($`FnqH@WpnB)1^nX8JJz4(ede)H;7m*~bfwnkNUBIWPSPOLK9Wn) zvL@kDV8xcl*!&vrGkohU53_CR?D7>k8xF{?Q(<7qC;QPy`f0cR2*wr8In74qrwU_* zaYR7*H7>^U2uBsSbcFd0D|KcDOcW$Xh)Hl7S7&#}s|6_-222a*!ym6x7!PUF1TlI2 zB9=^Z&Mj94eFv@8^X`Eob|esC_aT<*W@eTFgdO-V!hVf!mIo|#JxIf7_4bYk52piy zaVft+Krr4|kZt7Unknb1GgL3EFv%6KP^`LkoH{O_z))tmovKeZ|N2e8yi7?Ar0C_O zvMp=gh;rSWaMUSWT~QpFs)#b1L`Sh<%6DAY`oalA4H-XjNY;CuUW}s<+z~B!PB_JF z?fQF^uAxt2%#;#wj&}3%0H9cYR`B_#=&!WFq;^9X)O!^EeUOK)hS%qUa zk%MlySCmb*;!nh&-8Hcq@irS*DRm-Gj-^IZlCZ~NkW;T=Rw$%7SbI+;Sq`IQQXZyI z!ht6FG2t=jxhyfMDRsVbPpa<;g3|)>PiWncL5Vxk(!Nv>i=e$2*=twCfX@X7n^88hp4pZDa{x@67!wSteyETk{e z2%kK{7sQx4Qe1cPPt+Qct}rTHQ&bz_*{Dz@I9BN?)p9`35^hHe?+m^;F2iQb>d!ne$UknuK<*@i-O_H>i6nv%ayn`mO+54z@uFMJxudFC!Hgb-gbjc; zy6llM=m>Gq8dc+6@%bF_vwG+s8%`nwBAS<7h`wJ9K3_U)S93)DbSd9-A;IrZA9}0r zN;-pW)%=RJ&6PnmWP{Sy*j3x!;iUW9j1UoH*_D>jhO;5(iNUvyX+5r*n4mQun5G3} zSH_|6@%2Sf93Lp`G@JO#gW7ds9cXHU6}qX?%=S3RJJ1z)p2a&GZ3%Fjn>w>*!m9o6 z4NAV55pSjrmyj3S@0)YnUVY87@gQ77!PEC!jc7u8)$U&XCcn{MtVJVk<)DRIrAh}@ zZ9moHzPO)t;HIL%ziOr4uglKMC(G}%-L0RE$A+EO!`e6f&l}L&zFp=|yzx!`C)$yD z?8DpVCJLyC?e{hE_%xbV{kVu(NMV%awHDD^Yr@oMjcxJPGLmd;{JV03d$JvXYY5w+6kEd(EM6n@-4 zi5r$BP%?c4-r<16r-LEx&56~5Q<_>cYr}PKUlOkBgW97oX|uS#TtZh0->UF}qVH99 zKJ$y;Q;S&2KnAJMe%ze$NtPS^k^*5?Rg)F(Kv%3BI1T6KO+3Q**lZhG)f#8gb|2V2 zH|xT`7NRlV_mvyvv{ZMg!jJ}rktqP!aJKm(&# zc*hZaNNb;<+`w=8^V~~4N}Mx?ni8jLJ2Za+3-9^RM^pkyV+Xs-{Kk&njG5Z*sc?tz zaZ{Kwc13A!%2!kwNdyl`HtCi2`{7Gyam%JiKCvBActO;V7?WgAAhni(Rntb1aPTYX z&h_3AkigpEpN4TTJG~nnj54~>z$U+XGiqUrL#>}R)5HrM=M7rD{+~>}LhB4Lwb~!1 z9{a=8%%%qGi~v)opO8VT6#ijq6M(5>Q$b7(m;8E&2XkwhD#Ur-5qebu+6aw9FUrgs z^YN*6O!E&@XAVc_g@vA_0Zc8(L8RN+1~jbEng;$bwLD_a#J(eVy;la}_U2W^PoQCa zqe+H&y%FtDrXgDi2*q>muBVqRH16$;rnx-7(IiYlFq97%p9YDS+oVO)%OKK(eg{J9 zel<6$2_#}TLnlfLP2|7^)kR-mmggHhjeT+^MNgI z4D&-KnHuK0to%hpkWf69u2w*cjrN5J2(2+@m}88%XwQizzhQZR&cWrMF>93yULpaZ zc%NowUM>kpDE{$taB~Qya9J&HU6H8-`smR>NI89IZ4YjQ;;c)Vi49 z_|oDVY7OUg{PWn9vY>15AJxhQFW~5|0U}GFrxnb@%l^D|03!K zf~xmo08xAYo6MT)4^ab-Gj0N4_tjLDSf?6A2r&uTWd-vK2*uA#;y?brI{}^#5cy}GoMLS)^LXVMw?DpB@2f+jd z0riq?X;&8>p`RW-_g7;v%kYSMMy4xL$$-bcBbvy)$e)));+l{YTIgb;eiL3Ku5IER zL$q|f97Fh5^0cz(5jt;<29fk2eRMZIeqpL3C-b~f5Cm88q;engORKr8mrQMY1`BuGN`MJ52QA6_j?|0I z6RXb4qC%`(JA;m$MEm1?Vs%-Q%qecjI0hSJiOMJbh8!pNQp^+5JD-joe@?7p(|-CJDF*&nfu`X?E<$UOC()`y}PSrl>G=7C3Vfa?P85Z$IKYZ+pTm>#nz8MZMP70lK*XURN zx{kGiWC6~Fxs{%7)I1_iT|y&E!HIMDh!W+EujzO?(70gJ4ddvEl|NQyDs>LAsJuQ- zO$|27Hw@OK$rW<*OHf;eSxzzg6l68pvqz8eUP=pWV_VZrpWK}=n2h-&zo94+1rD^d zZ+-tl@&FmXoldzanCmVdKlZEQ0#XHVH(zdVpYoujJ~6%_B+9~qtRU+%OWcD9!H(qs zvzDe!cOFo&+BFj|`x5P#rMe;|!+3%-Jwl}hRIEOHx!veu>JUL|PZCK+bfdJ@lGj`t zrhVhAGkV}hKq!u$?qj8RTDm?o;*tc<=g#35W06~5P3NPV1r)2v!x}m#bsZ84kkF!M zn?vDuE?}loV7V&R*sX4Dn`UOj_I$c8x5di42xx3=I$#w!)^ltco~|WU{9jA>*vv!T z;c=hEsypIxd^J)(3^P`!j{k`y^%SaMOL*lgwAlzZz#tAu2^6dQH}YLS){65Dzbv~? z!HRrIJ$cRFWuWjHDu)_`)PrzWldRp9Z-n9@d|x4zqLF=>k%5sq-9N9{=XuUHBf1*Q z+fwk!X644y40l+uKS0r~?&^9Og#d{9hGUs*$XH1VU>;7Cx;w z@TPpwLa=F8B#>Xn%>%7RkjP5Fz9@pHAx8sO;1FrZd+nsUGVxM*O+L9YT-ti1mYUo! zWkE+FLB$e7;!@djujB_w(vhe=sr(-!7$ch3or!{OC7v%o-FV2c-|>7nZ;@;4Tw_;E z9gkoU|9T8J#)LHbb(1T(UwhL1o)C7K9bc?Y{X}ulTcoQq`~!P} z1H4ptULPAott|A&FPc8;#qo`&j%vNb%y9^o91{8iuRy)(`U3NLYi)6G!ikyManj((DAWgQRN?@Pw5=df;7&CK zWj)2MIEb$F5EEUh7*T(D%z@-1{4FzE*0vA7=%QlH(U|_&*`J`C>x|W6%~|oaNQ8^i zxLMv&+jkW3PIFgZT`?+NcYj{qGIzwhHAOLHFQO1hAo&E(l~f1yU4U)+^XsJ7OYp5_ zkAdGp&^W_4Z&eK8);N^u@{7NS7f+v)wNcMVPttDD>|ch4ZhK@}s^LR1^7~XrNqx`o z-Ch{`dh!LB?W)g7PKM;;R%mI0Q%1h#!iyF|10N8PjWiY&yT=e6Zi<#HJTOSsqnzG_ zPxhzbd@YM$+KQ6AzNdf^vW)%<$(RpUjkdj!XP22!5T>!KiaW1MFWaQ;?&rN)v+{P) zeqmdedh6lGg-CviHA?H|=nlFP24P7^#@t9L*KZ$*6X9Q|&9Tp7+KNIkKPCuBy;q~P zlZr3xt%46i$beY*Wirr?CR9XCwKeh+8+^-p3xS0YIW)-dhc?4(#^zbP*=y@{6{B_| zX$HEeRE!=2UeT+U1johQHbh^iFk`+|m?e>SJk54NivvV*-L~0jMA6j~e3MAcC#w*N z-7o64vTf1U$cC6+gVi99ID{i?OsB9xfyi6gno{hT7Fe8-D!;L6)~Z}Itj6e1bj34< z8IzTg(1+UPZ>;511-Z4nh_DlCI+v>7y+qP}nwr$(C)n%j0U1pc8MhW3 z->o~fw>{SlDPFu2y>@4lVDN|Oho8$24(AjJ@r%kUu)%)zg=bVLzNDl?vc1cDySW*G{vEP$Xz%|_Tu*#XTqEp#;fSmU6K*s_fc*)2^qjv=QwS=nFjv4a zoo^?q5y`&M=)q?CIdNT9Y?W2IoFaT53E&A7fI_0dlY7CKJ8dcvOvX&9l+{g;g*AlAIL4|ABwrMOvq@xhU*7Tg1FNedpC|yO01~z`R z?8~@$5M*K0G-Zet`o_2tyL4y4jE)?u0ah%S>5%R+T<1&{TNbX9@)dA?${=ArnD|we zRtv8iWNjx9h_9LL$tQze8|LSjV=A>h?gYU66_`%v+Hb~&f$|lr*Nit=Gi=#B>~@3_ z3y-IWtOc>tglCJR{Wbg82xXIt@_E>n$fz2{mCP4N76>gQ8KfU+?EC2K+l?!;1IQ z4She5q1C)~BK0-o_5yCA;O#4lg}lr7ra^MsiG7Muo+;0j`k9GsaxbWRvW5x)2m?5r zAQc}crw|}Ps5~G1IdUDCNX#nP=H`dc{W zr!o7QZmPsK?_!+Z9++l6+QerdCK`%nio-u$;n7X%;>GcCEhJSbE>;Dh(6lDmf&NIJ zao7ftk)MoRnD5GljBW85hn+y<&N_(5N723wf1LN*JzQhcDwclZ$Dgin%eWByMOEaK zY~9E#NYeDZZ=%Y!RAk9<7^2@oHs=(>rJ2(S(V)oXb>tGit?_r>T07a>a6EaF6d+PS z$9rK{O&R#0Cly`fLk_nel+|O;_8Jlwml+oD{-XF}LUW z&DeP#njRte?ldXnUk=5e8Z+xV5|pg(TbJg-s97Tk?H|AAZ7}^-t%;F+&)pv|XZlU` z)ZW|IR-cR|p-z6Q*7&0s_{qBZkYC0QVDqWI*s6D$tzE8OmXt(Ij^f{mMKJrZ$geDV zJ?0&E@hw{X%F1bdlg@saE(O145Z~gc*``%b9;|MJ9uC~f-sXsZ0j+yxJ@yFOULb^N zQih#nHit&^i}|k7k9Lv&fwkU`3vr0&nFH% zu(Vs@V)=Do`4X&}fAX-K`+k1rVH@oc61$xy&>FoCI8OCL-|TsBIJJM8moa2-jrqOT zyrIs+_)e2}(<;%_^V~Tv`aq}^Z^ZVU4>VN6`ie+Yd$`iAaL(#tL5N9Fd#iqDqpr_9 zq+8!wS!bOM3wQ?x&anaZ3N;D*-50L!w!BHOFY)huex;fgR=}95pL$}rhLqBa9*tr$Lq+Q zSC1RI=&h64kLFBA&4!R$`ICu{B)ql$Cv_}jux#1)I~T|^=y=gMgAKszZC*VGk6l0Z zcN~jk%H-wg2nSToi`$i2KX=LQ z=^m=b8&31rUiu$awLQRt~Kp+ zR5A&|N&y5D+ox_%}#+W8a2D%R2%DG0rxhFT&B_tnoL4J;yf zej0^m)$AftynvgeNjSSevEY7-9$y2@NCW=t4F5C=cRl6_`fU^rW@mK!GyxXbe{NSP z7_VPi3G1E@WzfcdbC}(#Ae3}u{E6ARCmCOYxC!fsBby@~Qn*v|ivrN(X^1(`P-4YB zK5&{OABHz2O(Ueo(H-$sE-`QMD+8@0^OZ~cTng18>4v0FEF|0=9cB2xYPJJyhyokn zr+6At>e>_OYXbYfOKm6 zL#NZ-CR&LdeaSHkU4Iw_N~rkI{mehmwglEkh8+Hl*!L!t1E5aS z0Z;#-uPs1Ou8Yd1B3;tYoTupywbr6&N>1f(++WaTX~=|{`z&Ck_jnW(gRsL8X8T<{ zHNMSF!XY)Ga=MyRdsHF;k016yg`u{x8~;!vC%!=1n$VJT_yhD$bNHF%iVh`r*&WSd z&W|6T&Eb3^)hH*wo5P!k_rAz0d}d>N)M|`Hwh>7dEUDvua`xyaXOm$hZwWDca&{rx zOqGe$L*J1e>`Vcmzl|6MKnpQG1b&i>2Ue>wXv zXa5i5>_3~=U(Ws)XH%3evT}TKw$^{a*&P4s4j)5(*UP`^LjrV-3Ph4f=pgtmSX$Gk z&V^=3M)P7P980_;1jmH^k~sYTvI?Kk7FTFAo{;^%5^*oIJ83_?F@2Eb6s!B;n1rwg z)x=$T(&voXYO&2+tdKnoe)ub@VUZ;!yNFpZxYS*LYj@+-oO8U3m~(|5X9uN6Yth( z@6N1ILf+#4A+T$gnyw;5HtVY!IjO^#xvW_i;`ic;@RTSZ!`L6ysw}}WkqD!*GY`>J z!eFH~tRf@=C)z683!Z+plo&MMA;bVpD6qvY-iXiRAV#S{6#->9CN;0r8s}rl!gbIx zKFm zZ{Uadv^Ene55?K5rUt?O7OPHlnv|UvRWcz+YS{>h%!ioU9kjZix2-Z0`Ye4=ZTPGM zGl}wB%Z{QG*jMzV{d67w)UrGNXxYnUuOy@ff0$VLMAg^fT)DM5qze z`T6ihG&lLX8Eu^EZ7|S%h6@NN9Jjr&D@Le>Y=I?@Bd35_hwu#z_+o8UpE4UEg3D~B zQ(*iXy~E53N3EacG5TWRN9|@{W*yVfkJ1BznWG_(A3_}hd|&3Pg4A+y^<7tiU%)=4 z?BkppfOGogCCMrJJuqn@ij{gl7V7aon2jOuDP=PzL)v^w*%Iog`!G}uG$QxBF);%O zlikf^Zpv4CLmaXpI9$2i!%nlSYHPS>K{2wW#A!>98C9^3*+7C8E;t52U#S5)ZVUQO zY34mAT&ZYej9GzF@kSMYrSKc5oo#v*-n?rUZLV53Z<-l4yDf41#|7MxDi7+&lZ#cT zLO%!U=KZE@)Vd5-^Jo9?!w4eCzSZ4w_5IdSIS=a9Z44;&Z z>(?RmN!jwhDf<+F=X?*#fG-4~KS_c9ld?g@Y=OeA^i}pLqz-&++U0pi$x|fP+#e z{uX1ncqM?1GXDuE5ul~=fC(t4EqL0481aBsuXsr&ZM}b!vL}K9DkVoleoNUjBgZPs zm%V{@R|cIscCA>W_HINU&uuqs(n?Pm;@8^5qRTexWmO|W=!7QDAckK|O=|A2BngWy zT06A3I;?PZ3?o8@BfV0AIp^P9*28OTaJXh0oaDspv_?}8R`MI%TH{`V`TPEabY)w7 z0e6w-BhQz|a6gEG06>Si2+dTgvBUwi=8$MR_=&k0J5R;885TR!vaUP$#Z}5L+lJ?x z(;au`oRiifLU)Z(GWel~?_g8sB0_7YP4sOscG;r&<5DS5W8((B3VA>jZ$ivz?c(X0 zz_!M_!LPB~b6tHL>=x~_@;%&+ZrKStPyw0E7QC9vu1vXtO}g(-Mgegwyl*wyO)Rb* zMCfni^`8B6W1QCpNZt8tU<%JVx{Pe6V6d#q3}vjeERz*hxxpMkbio|Gn3KxysXwva zaZN7`4lAFx1}AwYwFUaU(kpI0a7{7!peAAVeE@pW9s}s$nYwG|?S1?r0P%t|03@(n z!3jaQ2BmxF&7a|oCS1#Q9#gLuw<7h@RvOpsaojlEPVBXA;<}a7KQkI9t8EMEhMyJG zteleHSYn?9JM}t{m_vO~gT|C=4vxK$J17u4tb%4L_t;&fHe;4GJ7ViJ^~*cBFl+;f zF!oktDc})@dJlI?#Wi5ya4^PIkR^8SQ8vvt^t=$e2DUeJZ(j4WzQKwtptwPhXz~VM z3AZ0}h@c#J=OQS)1`tJ13hIL-J%-w&+J=+)PL!|bPUy;Zy+{bK)d%ebV@dHX^mzvn zK!YI4Q#S*!v9xE?F+3T!d=LnHqPDk?rB;pSK6{+&q<$U~T0g4pbU2#4tttpm~G zNr*8gW@EygKX90)eib$&DG(|#2Cjs`X$ZXvnb^ue7LV<>!yr%{f`(D>&_Kk=)j~k1 z!K(SMmgB>)|8@;u1L@^X&8YN{3b_d#P7p`cjJWr`K$!gy8Zbz^qP-qA3H)?63kFuP z+^EIS{{l__Ie4vVZ^X@E$IZ0S+KUzr@%Ct_b*k}u>>7g9*Q@zq6~os{{T^qFh51$^ zK!inalBZsr6-VDUrXf@?zL^qgtxVA%b7qR|Q@|dlP0rwBoRQ~M>A{FG^PNpzetXj= z5_X>~$x_ZHdAbnF&MOf7)hzztY1&4RB>w+6g8pyGp~H- zzh`Xr&spm)SpNmA72e0+Mi4k8q(T~3LuAB3$YJK`{FFV*6uPP5da3r3KMO2B+D=C;*aAuT zDBU%K-fWaGlVDBToAdIQ!m&npBu4lSFIn<=_6q!aA_rU{zHii9?y6y}VweGXZ z`=0p^`1T!hLVdyXChRrHxxwYA?IIi`Uv_Rs`eqIof_$iy!AddhK>mJtQQ2EvajO6_ z-u>$mi$KOL)`WTTiHI&n$8R>toH7Wtt^H}GPy_oaMQ_A|lC~AB7qvRZ4@p!7Wde&7 z>ST8GS=vi3z@!tq00a+NLhGa#$a~f_PlCZScM+b8ROJxlHB$tRrM{ z0p#Yo*=13N9$U(v(QFb$n!5YA;tFfYm4wg6n1Bl4v;SdADs?Tl?1$E5`eZ?DLm5N6&Z-I#(eXXD23wyeqT zz09sC>DVVdO|6Q47Ys@wD(Tqywuf4cY;;uQK9_EAU{aNP*e!}$FO-D$kZtsi`!#qV zZJeQn`cWN?u>uMrPri>adj937>LXipWe8i-@Y)Q>*84Ouf+Ou%DKZ6FNJjb$i6LSNk?{ z(>)@qO^+C-gHIVB9kEWj*|LPY%_`gHT7ao;=Xmp{Uw2I-oD`GYAl2A}sVOfX+BxdF zg4GNXwZrDX71(levUJw3h6A!v2x1eb#Jec=77!H~t7LyUIIAb88^pedl@_KW_Z!F)N%Y~&xh59S3ZKIqa$OKi{a|I_J=i&QR($P% zbK+$fU>-1CP|sM#GbQ0gc8J@NP)t_aT{;L$S!wK;9QJeSdBRXrdJWaVfnIi^siREw zTP{mN(UEcPQ-;L>?M+%W5qfUeQ#f~e6jNIjr|@pZY5oz;a*gEd`w|2x<_VZrLoUJIrW^`wesmZQf{(Y-5kO9<4Y`~ zi1ip~T$7V7gf#VUNcQAgRh~a8z2_jZjvDTWZ*QJlHn*);$#StA47Bf%y62c_op6T@ zH@F#kK5k-q_l8DYfQPaaTSsAHC?KycW57gQsw_xxvpkV1+P6)CYncsF z-lUdl^qNP`Bp6K*A-j2f(CwVQ<=~Wd+fwoY*jtY~grT;YBSI_XfbQd$k~n3WZ$^(F@%rs^R1KsGh_Tg{C_Z!C71VdJy! z4N-4@-%CR#bCnoo$ZbQ*;;7xQJpL)jRkTa&^Bx^qw|=hN3Nt<(U1=}=WdzDS zHe$RPFWzXT-&ORiucv~3A>7)%JnUcD$}PK7DlyE<61`?bIj*r1YbILy8oHwX&`*yo zLboxTo;cY!UC&YN3`*wfnkZuS0e_eD7_W9Si11bKifSZ3<65hYoA!6c4wK>g2G?{p zkZq}_@}B%t7pcmXI`Uch!kSA75Zgy>rxm6`!j&T1qn{iAHC^;cwV7DK@TgN~%m@*>N2 z-#vK?gOqt)p>21p4d((ZP#dq|o}>M`%|#{r&kaSEZJjAs)Ab&fEDj~tot|t?Rr;(R zcZ>NGsW{!)zNQSGGq~-`E#UU~XPd*^>iMT-mXDsmu-=6>#+Hgl!_&!fo}UU~O1nMo zi1L!@W?FtJ^*ub&QNC5;`p6Lb3+Oq{{`qT-WC-yyWPx{#T9|FQLaP&lyZ$dX9nVe{ zLjiC*G3@v|HKk#Q;O~9x{KW9H0psXS-McWbh_gaS0b#3at`#=R!uh~Jqs`fU4_JW^ zo@7^&CE_cTJYNK-=rJ=s{Jeo)SFegXwv1dk!4h`HK>7*Nz=Rbh4g_i}Z z)UG4|{uOV`f8OWpW9YGa=&HG3i4_2j6=1in8iNq9B-$5lv5vAacX!s+vbHGBALO4g zL^u?(EY{^&SIO$B096DCS+15>^cbEA1GGWkkdyy40k+v=gcX?c-#-yC@UOdZeXtM7 z+>%f$t*};1b|S23+2FYd{0)pEWPxR5{fji~ZJ6oLkE1mRHcha?FWdlucFl0Go_fXH z+u;=h&~g;OFmKc|c%K`#_I-pqE;D!)x8lgCyDkv)3B0Ozx>F&GR9ych^}Vp2frxvU$cQ4gAI$7uR)2$d8IDC{8{N*ZJF;gVB(Y%+J_@TsJHH+3CBLEn5m8~>3Ak}Z1UrH@7I6DzcTA+hn z@t^}U{u&Mv3C# zwOihrZ~(ph{Bo%+{%}zQO-gC>mSnjH@B^k-5mIi0%m7X*8+Quu^7zu9h1k00MYx*f z`26ztk7{G;(H6NznPrntN*Ohu&k#}o^yg{U-VXZ`8`t1%TIG&Bu(nU&En4#PBGe#- z%#~_%#0%#F#*CybUP`o>LZKH>z2J9Q;?O$!p(bE(gu-v6^hC!z60rEO81#YmABPAw z(O@QM+v6=f!I70|v~D1(S$$?NQ7 z{bf&sj*;PDy!BVY{A|PqqD^CC6h+v>-PmghO1WbcJ^RV}_Oc2GFPO5BaN`spktnq` zUNtfNN7e*G!i*G?>zvSqSDmFt48_jo?)j`q>ZN$<$;b6>E{Tt&Xjf&M zO}42f;I)f4xf$n{1mIwaQfG@*>U5QqMhAg85DXtn4tb53B3ua9`8w%>qfSV@bL>9;;!{G2UH8bSn(~X#h znIg^gh>~Jt7#k-hNQ4?DN`l@4+!m^g3y%_mE1=i32wKA<=F?iB3VD_7hDvA;MbWn{ zLd}I0gNUbz9EbhLdtV!fh?+(1wxkH=zP{C5mbKHi6(U2B4nna2c$ixeu#^;dtrp8( zLSDc_o0o^G1gp)NHt}IdzBSaBTveqbkxQZrk?k(u-=QWG2mZL@<75#Fjk2pFCPCgD zjm~i>L1}THX;+kFz*Cd-c+C{3FDEZiWb6X5)M=CjK`rC$i=<%b=BEKR+(&aBc2+21 z>F$#x6uj5+L89q)x5os9aJ324Z?0-okEZ~{ZTV7?rd>iRr~^o5Qy!^$50e)1WH3|d zAxia?Qbot-Yv6)x|4OD|yb|oC1+YsH9oeQCEB&xFRZU`KW4f{_52GQVj{qZDS zHaQtOb?jkLMQJh>u_JUtS{4m}I5`}nLK(4j=sDv<=;G{vUSXbEmChXI*r3A!j| zhgP)4+=z)|+H4gryzyhUXE%OQ!XO&p>#x}=@Azz7syQYW7p=9bCVbapc-TM12y9$@#r~= z-ra?;A#`xTN?ss-T$>ogxoX;_uz5^_ZJvRSmiqXGlfmxt1($cb>mS*fJV<|94wuJ{1xQCEo-@Gu1onJ7+ti2v8|k8FilJZWSD4uq;Sgsq{8F_ zXfaHvWb31P0#mWR)P2l-6(nkjUSL#=Vo{thM1)^P-WF2itV>@~XbCz*SR^?kHg-4_ zQj8iazws`W!M&9U-6vRrNC)ZO>2$e`muc{=1Ez~v`ZkwP9X1zej=C-Ub_b0u$`@#Qms@z{OPz$VVHth3}kU|nQ(12`1ZpvQnwK; z9cT`%S0d_?N33di00C=V#$}0?GWr;qdPAtCqlMMb91?ax`cWPU(c4tfqr@a%x7mU* z3o#cO+iKHHFIp^I9hhmNHIc?6Gnod1f1=eeb%L1MsM=b;57-jNGa`qfmE>C!p;oeK zGEH0PcLcF5#kDxzRrHF@B~qP$a8x(&xX*oazHdEz><}^<)*OVc&{&+dJJ9biKgrJ zeXY`ycaBgnOf=m(P}{x*RGTqC#5#?41AGh}p>8!9^CLSq#mKNwEg}1#$5Xos zNJL^0$>7<>63JPzt-~p>Bh+B^#Y=O}vDXx-3q372KJUFbo`Z1F)Rh9$HG+;-js8aO zto`WhE~xKldeQP>7}tc;hA&P!CrFy7`at^EoMRN6^}$^r9`s%!mnxm<^vu?Yu6&~Y zUnR!E$ei?z))QNaWhYySfWdZc@gX>v8n|PmmRo{&@soct|#=Ezrcr-oj{Y~j(2z{T{et9v z*Nc@*HSJ_E&acyJ-UR7t#sjl^DVpBZ(EJ2ScMx|_3x}MMou2(1>%^@0!RzXIF~XH* zFyzyH{*5$IQhB6NQsD8!n;&={D5BqPd;@^fI_Wh_hFKIH;LBS5u2*_qpH zwC;zsI2=^Ko9o?3%bkHh>8qI2;JBA9CpqEQfA{&BTSOFDHH-u`*XmVb32ftV&7fPiQ3fNL#*q zb%uC|m&QauolxnkPlDb>vDp95p<;#wv~^#j&jJkd-Qq^tWQ+Si;VWaej9iq+PwKNNcqZN0vE{GsiMxWs7zUsI6u;aT zVMWLeYC0PJS9K{@w$z0iv(g>4JMym38wc&cBt5J!6P9==jc;d$a9kdkTVhuQ@r&iJ zlYO6m3QUEPk$TGp(+bEI!BdsAmGDhSc?V2`sAvZ&9%=R(byN;5DlRk6yJp%dH&~RI zDF)hkve#_Ci%ZDQ`mn*TBY5pk&Llr%qT#v4hJ}!ou>(a4=9q7~A)ZC1uAb5N*Y4H@ zryurkgQD%&zICGN1tZq(oSAUSNUC7Nv-NBjybw~d_Jl`DGHNb{c~`2y8gL`ch%6`~ zm9n{GpWI)lv~X1-@o4bu9t#WmeK~5&At<9*G~{TRNhJc*K994mYgbj-;v zGO4mDbdl1auol6hacA*0fU4oNe;DMX*jvU!zOR{H35B)_MFlax!q!_m!}9h1S}O{| zpzWPD>NF?1z^}J>o;KU7Fz(3RdgGJ(`n??2!kP$a9r){M%i27?3}bM8riW7GER_9qINN>}B%D{fj2j#1SMbJF+_Do?x`d#&qJ^VgxYHi* zloOJA)MOYjbu;5#F7elnT__1 zKPBIK*4e-IG0cI~s%|ed{kLq!|E)0nSMUC-cmLJ9|LWcUv-IwNHm|>W_rLV+zOyz! z)=$0r?Z4K$|ID~&E6c=X(Ia%Lo%+>#lX+(Ziv*~M3$}c>> z&|OcLS=*nz1P#SFoSO79qPvn4f`?Nci*3RVZ2|!L22sb=I8HM6VhAC{lkv1v1hMx%RZh zERrouE{Mml*J#im7vhBFberLhUdV25&uOYe{qQw^_h`VtvC}b=y3px3Zoq-FR9hu> zkNvBPM`atb`w!P*B=>y%KH+O@&cKzOb&=~;2jN_}86amDI3kGj>2Et7!-4_wWjoQi z3q{J)Ky-x@J6b9tCXF@mY8Deh0ufz~QI}nmk@GgqkzCv`pL)^P}Il7hnHcqV~O-ON7gt@&VWC=)$C7|g&0&vq=A zTdnf2q+kDnUz8-R3(Z3E6?d|CdP6@%?ET}{7-CV`QjqbIqhP!ob{pb2;OqPe-tdO~ zEu(zt2o?YA)@87DXLm5avFWcNAso&{TGG~2pLhd`j^xpb6*tUv1ZIk#%R2gZlT#|M zVE>uo-LJtF`2q(3pz#?Gjrt$*Oa4*38U85VI_r+>?HD~*(mw(gjZNt~iXj8VWX^F! z8t0{Rs>#AGr0p0{K-a4&6r~hnyScpn-5kIdpg)M1DcQsVx#W8YO&|zFVBg~#*{kRK z;3l)8twv^ug!cwf=9ZE=HgOVt59g(iyI*Ms56Q%v`>vS8WUSXC_1T;K?I<5N{7ZjJ z-r<$0ZyyRXlG)pKyIamt)BBtPns!q~p2c|dFuPkuu@c{?fu{?z#G9PCHzp27hb>-& zsAnUfUl3TSFM8U~d6!eNv^0Z0Q3?9IMs_;SgvPNM2B1|J_K9yxK!#-_l2P;Y;z`tB zCtL6{a8YAY;6p~jU6gqiA;BzmkD}c>DmGr`9OSNWZU)BU>LP*As_?c@Lo68M$gCNq zzoXX`IlM6+!xv_kFl1tqI>hQu++^0W>?-kfXUr%&yM=~fhbg50h#I}j;TBsTJOkM) z=rZA(8B8`A?e_>Popjg}o^k(&2Q z!*{1F-K}_3P2ipAU0Za#Y!5eza54m;DIm=VCGkEj_m<+P*=YCq(bv4j+}%;YNCra> zO2ie7*a9yTrJ## zNWPF=4YlPu21)o+s5(NBPPRHTtKGZtzTMn5l9!s?AZAi)^+#!S0P3+eB>tFLA+g_Xe>}3N7WJ6JzDtv( z?`20cmLdz3mX37PAF|n(kGPR!Q1_nNu1P){zB(d1Qb%yG|KPb&p`}DsyCc)9Eo2^* z;I{EClxC1N7zrfKHHMDK@89*Q>0;*fsCszBGwA*o$wdR*9If4>L38h8{Lb`N_xbxoWhSk&|24~7p#!Xmzzq{mnNA;MIq2;js_ zQ-wr$#ltVmE_svB$&fEbd^nCBX+OjYgi(^eoV$R&WCkopVSGj^!Z_u~L?*TQdh$sZ z8V4-V!(w=%-4Bj$4s^+WheVT3qKjey;P#%N^@y2Bx}x=;*RBi~(7AeZ7-Bp)7!JEi zVf%vD^J~6o-Yj%FPeVJ#QwR@X#A^DMz??PBSoz*ImFie9AWz`qVIP&8^<}0vg&>qn z#?V{Ix>Bo5GMwvL)o$2*ntk8Chf<3UPWVfei&pL2OhF?aNWODF2G#XT`%mnV>?&QI z9s{+$1Fi9|w^Lv}?p|@|!~IN#?r&yKVi%Wl&Um;}44%b~*DAoT8Pu_OegwnYPKLWg z(8W+e2zv|$UpW!!8gMUP9EEh@2<{GzFo$l$!f6>MXx@tQj3`a%(td~)FWhZjIyGZgbI*6z?jW~R|T=rUy zioD;{RlW1~o}D*am_1G7-Mp<1vkW?E$GJY1W@DRYD#m$A$?W}Hw5w$&pOch@b{!1?;T5=+$ta5#_rp;< zJsm8l&?Ne$%;I8K-osuJ>UoMzG(az{I0z#EA7Ra5HaoXU6G4vhjP}flyTk z19oMp1y-e5J8jQg0TX6UcYXMKRy?jq9w>2|TtDE~{B)C2M18D#ENLPsRs&3iZ*oSN z#3mnaN4?3j3Xl#pY16h9rL$G3B+tN$%o%U|EZuYhEXqg9LOGiXunx>P>Zj>eO0bsh zLl5Fq@@X(a5Cxf5io1{wCj?(hlDT1I*m%t$d&m#qm7HKLv*OA2N6!t>`6Eo( zYtLFAN|X7^MPb*8YnD@(`WM4xt(#0O2#~152Y*S@kGMjL@}C3sk0ruFgfbJ?$+%?n zuM^y-2kaf!xs~?ITMTK;xNcGp84f!=X8D&{{TN=huR(@GybI zRwIF$U!>k_>5c@|L`_Nr2{#ptvDhd|Ns?p<%GXM@Ix9}Pl#DOS1RP?hsT^o45}CaS z>;mWljZY)VGDqA5L#rrk&Zq+OU{txpp0Ag61G_l{+=UnYY~YCzrTm5puBheH7J5}; zl9-8EP+G;6BU+$1R+f2k(rQ#kw?Alaw7PK9aiLGrlNDP4_=DzB3?)xhNrmEk9rGT! zuAe|~)l*!fVp1Zz%s#4B_o79|!q|&wG1#)i^H4uOHzB~oR4WKDxoLDg1%#KDK9G7< z`1}jyPSUv~C<(vCM$6p*=k-9qmmXPN{80SRHStat!+Mu=5%WQv*%Li`<}mbDINSRKc> z9${WDX&B*6N(KmjZG13|MnHzyHN>_CE3fgLYuC|fn_qqK;J#0iKf#3AqjNJ{{0AR$ z)`PW_tw+zAf2LK20tR(iv5RIwnl2^_TjS807iP&)O{Qr=&S4*;+g5$XXkfFi*r+30 zCZ4*ov+E<^45?O%=OhQ6(Nq~^-D*bE7h}sY=}Z*2FnL#~c}a7OxKfDeUjiMBCCWG_ zgo@cJO>;!8NefH92GAH~R*xYGLM6HfYWwk*j_zTE#>+CZw6T&7fFo%e+Ing)CRO>$e zTzQ&2W7tmdB!3f)L`u8sTh~LPojLsQPQKY$f)zF4UbLGjFXZzx39)aqJQTH|^n|13 zuUk2s&;ym_7wT5z!YO6hmP3rrMf0+zgUyE=9cC_~5*(P~V~v)z<|b*0l=-74;nhI> z+~?V=cBA9l3yLzHrWLetaIO8P`!JlB;onEOWR11mgD8#1!ITy1-;BSp?8gvg|P7rc2uN^P-2HlAbVUV{E`2E@kPZws|hhm+up% zgX;`}%(XmaKV(K7LogekmezJ$p2*{H)KkLgA7W?Q_K9pot`SPHZWjtxuE2fKSdkHR zn-cN9^>v_KGfv7put#YOktQAS`bEAk0QJ+k?E9;hnuW<|m>IgGZMRERi)e@x1m5Dd z)WgP7d3Goc9whVR(wQX}uc35>f(PCj4K^waK`F$n$#KphGQ?^}TPjb**N{Gz9LnUe zH`i8#<5T{}dZFoC?8W(tlGt(%>}=0i26X)}?S`MYUh_+vMWl}xtZWtIg|Pl(wKb7) zu9(2#$I}7fawFX!gzJa=`w&=xSEp zA6%A^RvGuX>*-wYVj5Pc9`^HHjfHl(jQE5-PPr-#I{7YCQI8~-bqnE;s89iCxgebP zB0}1O;<;2h9u9~CSIZyCeQD3DkvyE?ZxA=>=f8sE8KC9F5|FE3JXQkj0DNX!?#kkS6@aE$gRAsgrE2o?h`72ux|8Tb2Ic7fs3nmMD45D!>CBK4hcT{>>;x% z)YR`u>qD;^fFT(6>HYwv4J~Rp<0{bwNwYPxMz-dw!M(r)J)h1&qy_T80>jn$aDDie zp@l}H+9n!5Ih6gUQIrl{JC6{@zSK)+UC0#7PRby>i`zBjfB;rNlu?Mb!{fzPJis-mJ$ z_Vc)y$;rUW5p2t@UvrJPYo+r(>l&;!w51y}aFejLxXXG^nOsZ1Y=WAcyVmHb|ZSPXBe#Gu4AQ9R( z$2%}$iUChrXHX$Gb&f&t#O7ldS5L}n1k)0kY2A9_9_&2Z1h*`zpYzv1soF1t-;`Yb zc(^gqcGl_}o?y6nS==wOr3hH|Chv%o9@p(G9xgTHkgYRqMt7ymr(QW}C z;=eH1Mk>>-e?>Mzs*WGb#1mI27R)ItEgYfL66dBC7D@xpTdju&iG3Xt zxqLN6dl)6|kFCrYK3bPEcre1202SusBsF053NT({TjfvWU^0S zLQ8vRRF`z;;k}r6^}L=GzWhuP98&!f*Vm$fIr7Z4zZiB%EF5QbNaXRPk%<4R9ih_n z9W@Bo33WugFAn<${lz27yA*cjywbH2>@MGj0)#_h`E;B)JSMVT348h4z|>*WKuWK( z6K$+YZ@A+v-+$U3{(lJjWcmk||5oAuR^k6v;r~|Q|A$rK|7>1=tMLD-!gDaRB&>h_ z49tF}R1y7C8FjOBv~n^xG5Jh))^jwm{>*s({pQcY(f)p4oiJrNNDt$G71R?n*X{a! z5WPs2&S>^*jmO`NDnf%0OG<0GEzgadx}<)>_C+9%x8UN^DtCblEjEI>xdSj{Un!hf z-JOTG4v)Wq9m-ZR7;Sp2{!GY``6$N*6&Pjqq6VqfWJ1AEsqJakzPjAL$AEBT&eZT#FNw-%|jG z1DDb;^#WDZOz7Ke);^lYNfPxr^2JkjLl#wLv=3^0qwa|;(EN^j{?m-EE2?#U)z7~i z0RJD>hVgg#_|Mu5B`HV^(j$!i0PPhT_e!e4`s}fyql7^bD61opsP=QDi*`uoR$07lFbi<$!IC36``uYiM2HmmDf6 zc(5V^R}`oi%1W7QV4~EO*Z5K7>?|Jr;T7e4jv=6nh#d7)xh}-i6j$A>g7r%bcA52D zRe3qwq@+7JTvh@*?l`Ec@HdJ4oU&DKbJ$ige*iwtfJ!?Bh8u!wIV02I=BJjXuhoc+ z&-meEzLO=uuO%Ak0wlQ7U&eg<%HF{h%f!3fdY8ejK7I3X8|BL^5~aSwY!L?d z0TYHoAy*b%i*WmfK@w%?uymW#cLfc1tv9L1D?B_BEUx{}G9dNm7`hG66E&v6Rc78S$vK$p_a2_CR47GMhm)y?wWd5X zS5`f@8g+s2X(LK9$^@YL>m`GfZ*9YS!!;-gFq-q8!OFJy-#lD>KM&1I2z-$(XGQv` z(G}GZFI+XBc9Uj>w>xe=|)09 zNl42&^@Ls=+#OgoHuls zTW9@?d0im~XgH(yk2UWroME?rcYH<;=L)v8OF7cVL_)Ff~o{x!_d$XNS-=G=4p20QhVk{8J#)a7ZLO{WIV*Gu;LGmxLwrE<^K_eb$`!AAiyjGd|T$ z&zF0!tcQw7tHfJ<=;)I7k8bpy5V97mU(Lcs(lnMSYEmBO!-5(jjEz$jdfPgr zdk=(RjgtnGGe_n`TF#pb-(4o#@=Z)4E;Ly4D;nH*$g{$2DMp5^#6*@y0ENa zI+HQyciHz7o;U%iq?#m;OL29~zuV>%nL_@3*$k_z2V%1T+flU~1*B+>MMU%CzoYp9 z$AI_UyyN0C%3&dLmoYRwKRz3nu=(N6Lo7}2H{eVw8b&By@S2_D14kFdgk*t(n4Bhd zIvHWNeYP4a)vSE{4YVnlX)(Lt($p({OggH530ZRyc-1`Ot||AYu7pL8SjDjNVY+Qz z8_~GUGCq2j?&9%>=Bd_alQ+2TaHrz*Tx$8H^_Ihy;S|10f~luM!in?r2lGWUtdBV??`_uHT`;u_#Rk5MXx7jY=e4$m_2YZi1IMejGWTC7F{hX%Efp)sN2NG0RP&8&DjixTfL!oR0n_#- z2Z_1rXNQ6B&;WMQ|8^D(yBm+{r{6Sex2U?}+o)p+o!_8ni;y9MthyAsdHUL*`YPb3)0xYs{W|=Em6|b)%O2(D-{Bx4 zSjm37&V-+(|I}EK`<>N8o5ShN%nG;iiqAy^y30LG3a61f;HgYfdf}Jp>!1Eilp4)_ zF9Jv&)V~6z!cEV=kPUR-2*2fQ5Reor>H8T^-1;onaK@pWSeb`bGfrALZc!@NstPwc zS-ieJ@NF8OsT#We!C0@jNz;w?R0BDV;M%9$8Wh z&u&k4E7w@op|-$x1;tyVA(x9Y-#1Kta;?4(Gt~Qe{XN~3qj-M>==ipw2X+Fq4xKpf z(Z|Qre+Q@%RifO z#3*bxh;6@FBsNlx^CtBNdCd1Zyo;A5!&gY}H|QTN4xw?xg&C@x<0hHHaZPJu=62=x zh|%s(PGiqswK9OAxc=eE z+lD>{HxzH>UdsLqMJz`0Vnk(M0kdAf(9|j0p0CGGjx3pw;(2y|(KY0<5$lzfYBc#a zCg5`9ok?w2(>TLen-|dIJK0A>p=PzotsoNRyi`Y3FcAVYmT#KqgY z#1oh$2xoQ&#-GDhWspd-&sUq5xaSfpgjU$Ie%@;0+#%*!+aAmu&Q3e^jdr3hxLj6p z42DKBF*b*N__o@=dfN#MeqS=e$-rk(3ns&zU?D1~ePvE_AVo&XZzcrHI$ zCWP~;hwPlmvXj@z)__Thu+-|)kZYg~|W_a3Q9m~%Z9-;g_3qXX9 z+QTBWL?*+O#>N46d=LPDT9)C@#@$GrPy`Z(3 zED?40!1?pCl(w&wZud5ODq=P|ztUUQq7OA8)C<3nN%A04!gx+_sMtc6{9G1qaZ5VC zf$XE#g#mc^1dL<%3^l9{R(KwouwP^P!8gJqZ)ZN)=0Q&-EY8?#5R4m~nwcZo(&SHP z!XC$((@EQ~B^YW)`(sORH9N@D%hdx|QSHrsx2WFw^Z-^w0U3}Ep8a4Si~4K*5BmdB zKaH-H0m3Q}uz~1BD<`6@_RcFQIQv2VEO7SLEyJbW3}dXiN=hXiB4jv_EuirXyc3_{3D!q_}T5Q znK89@7o^i~F0XfwyW?NgV+xxzPqETgB4LYS@4%_*SXSgn$%36dy~J{${hDw`PY!Adqz-0%z4? zLG?#GHV{#9>fvL_s+;Q-be_sAA7EN?%FsYmo!9?^4Tp^gH9HD@SW)Ql{pZeyo_nZ!#GE9`J3;SQei1gakd?vSna^GBRk<9M|{M z@KFIX>G}xrfd>2N<=pyvkuNMRhKU#CrG|9p#~b!4MX^3S^MLm@{>8?=Ky451GkMbT zh1RKcESRBuS%bGJ>z*e1o_~8?X0zQ@y`OUUywg&1~m%KZXW3VjE_6XsEROw}nRCyU#X%^aE+X z1jUcp|298V2~^tu=m*li#@~Ky?<_m?h*7{(ht*%HBbK6du~`I2 z9cTF|uO|>(e>?gWNF9URgCe+utc>-*8LE&C%LYwCiR?#g@G=aPMlxsQ&@qS2xg9dX=o%?j`UY1|YHQ zY+Zs+IuL!vKdKg(bP$OHlQ^QUwmqIoF!{bkWl+zW@+ zO|va&*XWl>hCH)V@WqHL|UVJz~ify5^J#B)8|i46k&AHpNw zDtVkgec+9g6+RhHaW0GC)p@0!*QV#e%Z^P}y{GzPFd*%|{2dJN zaYT$y;vp@L-_?pG^zd|g)8{6vTnVMgP87034wBLvW8cyz=g40EOk<5q4}5ANs$4Ai z;=IN87gdW%qSEIa&L*9*(3E=LCoDwGF(sdS;=bNdbtYx*w#X90&wk zfFdMr+NlwQ@-(W{7uWkrcAcq5HFLDd}*=YA1|n5HS+^&)HsS zWxRt+HVaxrs;X&t6xpQ^^x8EVAV|}Tm(jkQ}g+kuoTesw0dA)u~ zzabloMFsa1G5!6sC()1MVx;p6v?Q__U43IL6Rq5w1iXG3OZs%Ul=(>>BVCPnzI(@n z-rQ(lyMI*Z#1tBrHf9D-&5Pb!;)Mop>7`8lm2rDy$Y*e7%Z9iV?B&afOv7=NE|G_z zNn?@691p>wh!WbkxIWc>?a~_PL-=w^LG6!0iL_UR8k9^Z8S2j747E2XD;Fb?CO1pT z$qLK=FepvBqB(%cjg2&rp`a!=R$7AU3C+Z$gs$(J3v#j1w_M$}#&2}B3{NzLLT0GT zF&n;r^aE*A3Du9Gf3cH_{AWAq%zis5HABs(Z9igwj(QyUue&t|$l3^LS7_JbD0|2+ z*t-0H=-}h0hlzSDsCb+GVT+?Pixb_GZJ%zAby6!{JC~!#Ji$-VrFS?qcCtHrsHSQ9 z%Q$Yl+5-uy^D#cRu@vfmy<3U!eZoc`)|p1h5aN?Luo2E^n`)jpDVM8vn!S5~_YOso zi-rI(?a$34t?<=A6i$nW;H~n!vio*}-4%6ps*BW;Qx>bX>6Vq*X*$-q9MmsfzTr_y zAL3NydH?+3Y?T!u`qrI~ThU|gCXZLr8{VA?vIFGq&06&Z$Nl!T!HXEe@({;DVp9UH zH;FFgC~yF$Z|q7PF(G^*w~c9rWG%*$&rpy?c)kh;J9MvSjec3nY<(iI2Ot@+&&iCKCaj~7MkBa76Ohd z%9O`T1EnMrq)kf>-hUWoT(VfUwl!SEs)zoq>iaC(oH)LT-;b%}f(%^JXfhSrjtPTv zcf(oxEE0zJk&lIx>(Uk5gI zQUz9BOu(v(--bzGi7{kr{f6^-A>U(eZ3~Q+O7i#^>6x_+()Dk?Z2Uar8y4cXLZV#n zOVG4w;`Uv{PKI!0;b@plNS*;Z!(yhffYI5vN_CnXr>6B~Xx8ZC3i!BE7lyyee;6Z} z<&$%u;GSyj@_E6Yn~*$MJ0vG(WWkCp4b;b!QMZ)6?aEu0_*!+7HLOX7%cCx>_Nl-X zJyj~-4V|Z(Vw+RbC5=&6Y-xwUM@wVj2b6y{1hM4G+tm?!^NycJqJ}w2Oyx45zlH z@_X*Lqtw^Vl@DJk^gsDXxlmKnl1))N?AlrWxg34lyCLVxh9#0*95(CQa}IlNrTpN- z45U$gD2aca_HYWjmBj}ZSG*)?^G6S}Gb|E($8s7Ma)&u0KXP53C@q(;=5FG;f`4Nq zq-ch5t1#JV4sfBxAJq+Yn&5Z06bZHDv{>XQE15X58(B|=yk z|Jw|Ca_qp?pAFfy{Mxm#XK-Z5`LORZ4Lv%2scf6Gd&aKBj-9bf>+D}+SKa!T>G31+ zYI3J=6pRfReAeu5x_M^aDhV)N(NasNFL>6MkX%ewsw6&lUe)q$i0rH3M$V7nOPFRo zK~=)@s}IoYBXil)0~{$1r#!W?{XpgV`RTJyY64BlYup~C$`M1VB|6Xg%ns>}&Qs=e zKWkHCdtr0t!(d5Z%GR9+e~n%J6_SU>K*z4~Nm`;m8-lz9DNxwn+H<2HJ+Z3L`xfn> zo+swfo+tK#BJt%m;zuQ2{bxtw6uO8{rQ7@(`_|gjs}VX+tc78MpY>7H;462appJX> z%eNPOW9L_APnr}zORKSXQORFr=nNQwds!{r7%)UTshF%<-?2xz_ANWHgJVgA;KzxL zpa;ryKR2)LZ0$vneHWw^(HrP#(^w!8jO$VpP?YZF#;%4La*g_pS?DV}mxw|0NV7b$ zjpxK`U4I#J&0~8ohc+Z7v32^-hU}XD?urmmvVSc?4$3fR9l<((4 zJ*>zle{H=NpOJ}N@-uaFjSNQ9IIuDmrIjUYo`}tn2{F`XVyLuab4|U~W1ptUK|FIS zGty%KcM;pH$9qKR;|45;D5U#br}R>q;Za|s5jg)q%bhu5I>jR)@m-EyX|`pBKD*rl z|IIu7s+k5!x#Z7A6n;L>B^2FaTiz7AF?0I~Fb`R#<-ZQPV5D; zB)|%ZG1Z3@JF9+$pvpnwyr?a_CX_y9vRLNcCTqp@Y!Olx`f%B4C8X&1sN-Fnmk1`duiv6|)x}%IlH}CN1XKE!CklOSotgx| zu=~phh>%U4(O*FujVwA5bWcVvI)Wdl4d^&z{Miu5nJU?l|29Kv^UY=dYzX8`m5k)S z&5*JZuGBvpf;>~*okPyc@XrL+)jnDLg6^MAp9SmDMBo3&mp#3+*uj56OPc8@aVdqEmqS@E$XXWt}p3Y(8x zRo8^5tM??;rs>b02xLO#(+T6dYC?fwcXR1eVC@~s*akD@+p(`|RAg>+YrpmFmJy>+ zt(1G!edoezUXe>OEUBjNBvo3%PaX@RdIP%ce4t77DicxlU8JO%oy7|r%D-Gc{3Al+ zIi;|6rPtIaLa_aiVxcYAN#k$#;W&|Js>l%%Z_Ft9cZ2|^2fvAszP}@ca63ZK<4K4m zC;4LoxA);D>sKOPGW~Tb9|)Yv`!c$w>Sv6L7hFBvQ5%2Z14UdxvgzrCk*^f+q*@+M zxoQF!f_y5ExysF7u|3LMuDGXrb;#)k$Kz~ZKO`%y7C4nZEN^r0JIs)WFhkx#CDrtn z53r5v-x~rsQ*{-&^{+(;)_*ucX7(E)ROzf?+h=Rro0GpiTSKG~l(RMFV5#1#;95{9c=pLo-fCPH_ zP@@w)AwF*6&!(WsfbH&41pClRU>U4?WWl?K1gv}DZg-E9Z%Y@6;oF5SwA@o`&knfg zsql;y_!Zp`XzI~C^m=={aIu_?bkW+;!=&>^C2JCM`&39MPQ}tx7jMal_hYL|!c%Tn zwX8PG9oAlNQi0`*)q~$yWPoUR3;cTq_!ro>w|BI#bvAKSHF0)^y;KY3&*#_;Eo=?- zQwYu$o-KNZPC33&?ej3kCuW>fLr4-Y`h!`pzM-)`Mr@l%(qS=XFq)29-7t`GDgu;=v8Tf*~iYRNx;M0|*rKnC!?{D}T}bI$)e zn`2%Ov?BM9B8DHeIWLoALJ$!HbZ@lP`?u)1qG#%(Ta+;Ta|te!3?Jy}ILL!3&gFA9 zdWG)b+lzkk(GlR6(*Akia3dP@zZ$~GsfH1-XdjvHETSopk@LTj`u5qLe`_i(%gZ+T zEBEcQJ*c_3Hm|d$=AeK6i&cJ=Tin8ahfWbk4;UEZ#0nAQ5-^C|Yf%zU^*+8BuEZt6 zO&q`V`^Y)lO3?EZsDWt=y!4R>{=AF~tc}FX4J>T8KRM^DHDWu-OXSV*Y)gOE`wgXZ z6{!PR{1Ii>Ti%w!HTG}i(XL^PFb-F9lv2iu5q8GK)Q?ZMdBBxuvLI(>6EDuSR~H7@Ms8zPT}7nq9x?Jos{1n4Q?I zZn}znlrE}sEW3Ao{FnRaShb#T)UndlH(y*wh2JKZCikw-EWWr|QB4;WwPvAxd-}Sk z>j9EZ#h_*zJ#y`iu;w7IfQq%L8=*}3)(^H;9)(I>lyJqI`1(_?q9yr53|&2c@j*^u zcMb8Mp%{0n>YjWGQ64sZ_Qvw*6mN-YUiy`?WAuHzgicPv3L!CTrXPxH1bPA*?0@E` z_h4U1zM8^0nq+(8(i_!)rEDC*lnDCDMji3|%b&Gjzewc=eAeFng_l%7kNB;bD#xuD zQ!KI%T*d>WnwkMtl3%lqzEdDAPqK6t=6R3)Gp-8BL)*g+c>+=33)sskLtBfa<7LP?+5_~c&O{LVGGIFt1 z`i{<7g!C2rYBL{kVNT5^`b?rRT-4B^gncnHFsVmG@qXzE1tSutx2^)ALJ47&G@+^rg)h5!5XTW6Y!s>j~lP|J3AFo(5 zoXASG&CTTQ`+=jFn3>w~M*c+RiI4ap=Ia#%rXjuRC$kH#x-Icyvi0Y6TfBCpxvOFM z+VL7a@B>AK4O50ZeO|571q1_TjrR=Hjv6VM4sRA!N&5PX^BrhZmLAxT+tTOgl66{C4979v9~mCO_TB zfXYV`#&x`82U~-W#F=2#x#c?PPE)>{=Cl#{ei4JBBc$lNDbW$MYKbN5iR*%ecm-Lkr$lDa--7;#10 zSszXQU@B>tY97Twd(*H;P`J2O;KNBpjVA;j%2UG=8xuK}x zP+Z}a5;ZEArgJ9cN*Uq%TsLo&H63#LTy578yw_JQoUnM^aNDpu?kUejle81*B$d7D zP2ow7j`xY$?p<1*()`Xtbvkl+Me`wfNJwMYtJv{!kN4M~J_uqt=`2|!>Ri5(ailCl zBTXzyLp)V#IpzeHb_ikDEO$(yL@059{Yy!K7tQh9nE3h41V<$3Uwn$S_`X%Mb`x|u zHA>ge+yZ=u1z3|&0RJ_%Gm4t-vlhiTKUSZ!6hG zl#e!Oc9cio@MqO~gK^QtccZ~ zc@lwjM_Tf+cWBOgDlXlb=I;$rS2xg>slemsF4L8U7>qBX<${buxB`ebPu2u26tY9!zyyuhaz+$O+p#N~pW^im# zz0B~k#Nv~uenEr+BBpoy-*8wF-^Rt=%pf|E>ZcnW*V*!l&`Uf#bj_MJwcA4iPjO02 zDni27qA8uX)APV}_E*?;XeIqQ9%h=2%`p*o2ITHIB+rl5P#)spet*u1KW;-lEIMJs zK&&&7kt!%iN+4n~$y0|bI_@V=h4kR_#XP;wMqL}RV|rUva~f<9mkT*-9+mLm(F`a( z0jfG(^|-l(g%h-j6?_(#JCK`*IOSgQcvqk?*EzFbPgdhNUGbkDVq;o9eI*_4@;BFN zeSSHRQ;)upy7Rul<(N|QCKYi8+t04R@ftD`Kgvr2oTXvO)z3BZ62i(@8wyF*o)B3& zq*jI8d^d0QOOI77l_KcpTL&!}t(N{PPy1FYj)oC8rHl4oOOqH3;AS^J-A9)zSsqK( z_k!*e_qbor9FxWtXe75|5V4a3t0o5E<4Q!xk!)aZ|NjFS(RqQ#S16a$5mkz_<}d{NSa9Vsz`HQluf`x zxd*ZQa`LpZti;qG5|(9^*vW&&UC<63U=H~FXdbsR(m|zSi$R{p?(>kIW1f<|c=eUr zB#HcjCdDLg%^!0~R?(f8q9k}g(rCedqc$EaR+a^0EHW7_FC;SU)K2gk$?ns8zg078uHThF#oB zwc|1sL}MdzJCj3G^?t0V`-D~h;_GkFA1+a%ma1D?A$$GU-|X2|%dY{fBswYrAsiu# z&H41_#|~M`M_lnVO=3`GJ>VB1lrUFlI3JuTq%Ma?{a5Dle_M57#~Q`jXaNLU#kt;mrXaO2JCI?(GY8 z@NoehgAI-3@4`D-->ghafKh=MxO7E~2%nwb>4@0@dsVj1*VH`hO`P<$JD8H}ZooO3 zkoJ>6R~-d9CH^kJ63|8AR~TV?eW*Y9;B;|FFtFfY1U`3uc-Ie#;=-1T+Xa9O?d+_; z{?=*c>^xQAZ`B0;*X^w?#Mf6~-yT?N!vA7q1ZX2>D%Hr%fRn|_SOfta zq$)&+`a`Hv;3KPUxu@_|ZWlKAXP!-0mjG6bKGBBIU#n81Sgat`*%7E}yzHr47k zAT%m0P%#Zp!7;GUgP>w4e5hvm0n8B(RLoiX&AuC*~8kz2`n8X^XE-n0{(6~AU;TT{ix7K z_>YLYsTR}>(8ypjJwe0^Ca*$OM=fpaf_$j0ceka5F#==Tk^pF9I?|_Q;4I+J)Gb0XnaQmVr5mOn}J% zB|yulnn5POWq=~wvI@eE0WwGc8M_D5rP*DGZ5iMbTVx%(5$9Kt5ilL#2xK8ZKSBEX z{1q7i7Xr?3vzY6>04TBDqlDr6E(S~q=(GS@Nh~@BQqqDe0YyM7QNTe)>?i?c2!xdi zFat^?_b9n}U>5_X1axKqt>i5cG6Jpy6alT|9w{O(uM@ci;E(S~q z=&%m0MC>>+0g+fGGa#wC}Y~n){6~L;hSJ4!$qxf4#e&jCst_9#g<+Qon=0UcSPm0;Q+Bj8Fv z5ztDi9gq<_N28}PoqwF$R8jh;7ULd&`SD}kr6veKp8A!v0lc2lAt|Guv2$2U`jv- zSLkPw2vj*oByzYCPz1CR&3t6UjuKD?d(Pr9PC$vo9wi~qb}?W|KnGW7B}-+<2)GhZ z1hkSzRmg}PC7_J{!1a6EGf4G4N=B-8FUoEMMpjEvav z1C)XK${vIge-#)5>F6Bv1wN?H8TCCk2w;lpZ-B5V1n6-W>Kj@RKtA=~0AV9JC=m5s zBnV*AmEQni!w@JC^`#pKAdcp5fUuqh3PgRq1OjNI^&228R6&8LFH%4N`?P-pgylw1 zAnGdv5I_Ol-vD7L3KWQ1G#>#B)%y()zU2o>M6DW+AdX!99TC<|K#8bj(-A~g{ofIx zV;i+@ID+VH03-gLogu1-qxL@3>e~oluptZxayGnu6c4HwwRSZEcGn031}zn}@G}CI zZ43c}?!u^5lM%2|li&CT4MeRyi~!b|LbMJx!3OmWwQw&2)?)?%gHA4}Rc{fn33CV- zbYej*8H<3eTKvX0XsM`WSP{TwONiD%XDQSgrU=-U)o)q{?Hy`8P6W{32BLM)DG#-n zB?1-?eBuBxyM^~uSU-bKcc_&h5wHw92-t5@9cmdx1g*p#Mgt$jp&u)1!9WC%+7YUG z&|M$3+8%<&<^)B9?y9Jz>JT&`XDAx9Qq($c2%4e`6b(AyP>ZA?X!@>DH0a)nT44-9 zb8v&ALH91y@>~d-uR9bCx;LQKwnEV2JfLXM-2t^Q6oQuF2|Ix>bYDQN=YycV@rI&7_XX5qI0)M84JaCPUqG!SgP{EY%4mWs zh3*TeWmph2JRc|;bYDQNVS=Dh`9jg4`vPhK5Co0Y4~hoe7f`EbAZQo;p=i*30k!l5 zf~F7vqk+!}p{+wL=YRmp20}Csskj5`3={cA@OCW(#FuUmjD}JP0ku*B>^koLp&$1J zh25z|g`(avN3_&4ct0(LRVhG)qF&8LKygF%1BKNZ*grIGUr<=t08}Wne45bxv=mkW zVE@p(eL-RO_)(##_wNy&3WV*arLf!hs8Hw!)wVAv>_$B*6gl{}uX`iDu7vNWrNA5x zs0*@x>WBSNLHiWy)pSH#T_g6@R?tp`dchn)#fgMbcU?C}Rf>9r6YoI#DsZ zFG3<9XlRphFwE|2k*Ghu`}P^~k3YGC^y52s(UfGdfj2v$p}he9vF1WUTfYx{z8LL) E07YQ*6@qFySwXyOK^9W1b=XMw-7WCg1fsza1HJn+zAp0gb>{IdmwY>ob%4i zdvDd9`^QHWwYzt(?zOsCL+$Td$m49dub#t|JaHf+1+Pj$9Gm$A;nOFkNY{{fR^A&eC1G-u{*h>Ii z&FCcf*#YdFY;0`2Y}^1Yb~a`JFD(E-3ldXwF#Ug!P;quJbu%$@rh9w>Qv|aDGXZl3 za{zMz{hEWhf>nW$fk}dSg6^7u*}l}wK`qXpyZ~^8P7fdZ3~dwT>r5z)XZEQ+?+v91UX*P(^XpC6|_vCva~uk8TYdXKqWr17nguG zgOr24E2vS5jqJr&pgW*NF;O>j)nx_kN>&XsPgho1JCG8w-xu-U7ujdWfp(Iq6;RB< zQx`NWfQ*NaoeZ?2^g#{+4GJ>Ig^cUBK4%9vN05>igQcG>9&LGb^J{xZ0#VW4O zDq-esWn!i#Ee0A_18D3568XKae^1VZ4Di==cpmtVb&#^Mbv1Klm9hoRQ^L%|!PJaZ z!OY&m)sl=8z{B_4%f%J6W9*O-JTvC>?OX_(&>yStAHwcriYWJRjuwVZ2}YwbIhFKu z?c;}C`5P%1t?l?~cItmb?>jKzbj9+rM3ZSt&l4llV+gQh!#aG~GIJXL5&wNgS(!B3 z&#Ti)_^zVGJeXd_w$w*JIq;N72ze z4Myq5tGD;}hYV?xA63Z`KGO6EPxjCteiGsnx}7;`2QY3uLnId838A14we#QfC;({&TDo?#8 zARu%LbiXw9-!lJ_&bm7}ZvWl;a^KnB;^@1%JN{Rr9_S8Bk}+jdUmt9e1s>0-yY(+m z%Nd{n&o_JpG5OFj_e3mZOH5(b$IWoNJH`W^ID}Q}z5|uK9!xN^>^z5#RW>(!zx_mT z$(sHN9>EtuHZi7dE~RjQ$bh`8At1WhH9s8kTRw7|)?~ zj+93GSmn_sFtnm~)g+IbgrPkJ=aW;XKHNVJB53fSYdA`!s8%piPiD0H@m$qqb7=L2 zdpdl&tF=@*p-&1KQ*0fNj(Ly#tss4^T;&W?Red@y!B=6nHN3=%w2G4yn~Urbsp%y6 z(X)Qm_!}Gk>e7{qrG+wD-a*aRUfP4OY}n<_@kDt=WDbf8Uzvx8I|BT?9KFvf z1`gr_A-0xW-niuowPgzdXz$D+!1E<|5X_Ua8jF^588Z9Xy}sV(c1J@&rN1^nC4Fd< zN`B8DUxgr$nU=+(c{|B@*p=)Fq0q6+>pDie(U$2Wg1BU-oaTwDFwN}>m_C#s7H&;zsxC1_^7K34xja2Oc}1*FMoDN(=WqL8_r~j7 z`9C^vIo7-EzJuHa_&dQS#E(aES5B64R&~g&tyuy|uabJ6r+uCbf zj4;w`|FOGtEy%c7jqxL(O`(t@W=VB%fhGEg4jlb!$kK;T$b!?Yo{vKM3z#v;GL@TK|qow}qM zI8S9n);AEv7w4Ct%x8nn*Cz_ zF4@hsT}`g4m68b)C5QFzjT*v=v(X7;kysWTUGit^%cUhn5os$d4kGv`Cz8*=iU!hnAx8MXyG};j zu4`d)n~tHfq?+zeq3r`(D@3VKz%cWWwoK3Q2K+mAu+p*2zgZcwJEt#;b7sS2+s?H5Y$%k_tlPM^H>3_2@e-R+-`Y05o%8{%Wga{CK_`*7>k z(tnS`+S1{r>Fd^fM+94Sy_I}?J^6FUPEnRg6NBcce!3&++NfLWCjk`$qkxQ8VLOVf zrVVT&9!bIHX@f9RTe~N=M8fAvhh+C$hO%@aY@l63+j+ehw8Xo>-2C!pj93O9?(jRO zVm-Uy&60!ToD0gxi$yY@?y0L_&GkC16Jkwbk|F=#8r&ESS5GqJ06;ZujE0LRnO#Z5 z#1TAIEYS~SsU9q{ri{pNpu)6AvxBi|_L)ImGLb4O3-oFy#%c>Xiu$kYUmh`9KI{~t zdIorqQ|@=)3wKBTx;x9AD0K#^-05$vf(a>ahi%^oWLc6X2lmHEwoyW-?^5Y!1^KE4 z$-AnC+V`{Lpex?iN;R1a0~NF2s4*D_xPTM(U~NwT3}vjuVVLw9ZWCM26QOKMhNcRAh$-`U2%?$e4<>!XRGNU&e2UTD<%K&p zBabnmvLFE&zxHe}3~V}*f{w52#2ED&;wcSG+V}K;sjN=79=7>&W4L&za2E{B5P+!s zohL~vQ7W6W48%n*~XpbE+S0sLFDiaRhNRz;cO?t*V&^2iYAJZAdl%V@_at@Yd z_%U1eu1ryHAT7o)RJM333PKi%Y%i39-G@77>8@=3t;f<8hh48jz}(S_zvz9x*mTCPgdJ^x~0fjzx_U+<#-bz|mgHlY@5I>gs z&Yh&nal39(g!ETXNW7AFwzzp!@!T4+g-h(8tkM@isJ3D zya;!ou!72yV{=!bUhef;S{C}1X{bd8XJD>Uw@G(N;;?vT07+hNaH?^pl%v)P zlptytJSFU_8YdOLZ<|9?O14BAdMORGA+J7elw4k)kKtR20xmNK9ckh8%L;>Z+?z2c z^UXbvottOiI$ptaf(dGehN)R${^Z6qlW7g}zLmra>Ed=)j!xX%d+d?M51P4OG*rert!BV2rk_-^hHQ+{`%V3cV2&kkYP_}epwH|VRL26Yo z_atj%2#{Pq$@WuE7}r(=ySVxe*XlU~ZRa${*Cls!!lEO`^(i_!%d8-U#iZ5M(}iJ0 z2HQaV9{w}poGP7?o*fT=RLhix9NuD2JwS!=puXAUwXEK`UP_$)hj(f6>U=;Wgc(~N zPU>)aJ20LdX2qT0ck~LQ-{a|L+=$uCPNI;hyE6cQ5UX-1dIntUAg12}kgp`n!b>%b zYJURYj8p?etHr*=oeALRw_YsPZe!86O!An7vdG&TQDLu6kL0Zq(Ki+5W~dO1^lK5s z^{KEF{H}#G0_vGKq(^M6=x7r{on!5!5)dtam)|A6XBy9McJf}@(A&e%S)tt)h#*Fu zN2$YO8Hm$u2)wFi_}F&^Nr=Er5YU`kQ5}N;LzDB_O5gCDG4%KmPxK@?)LG0*#slS5 zF0$hmG1!RSFgzq48QviPBxs_;E0=u6k7D@bKbxrshm$j}y8?S&D<-k<&%b60-$|1>tMsjldU@hm5by zApvd6>5&UTjTX=;Y2V>%p0D$Qz<+v34tE?{YmukVNOX>j9uvu6v(zED~;Gfo^ zA|_xKKsst9B&qrckk@t%&|}x+Pj9 zZCfm_x0qk=Gci^6<;vsr+~YQAr`^7wt3TfT|DagU2;>iP@B$?M$w*km6j_xVob7r!4*ddpUhMnBCJ-;NaCz1s3u22_CZhHhwq|4iR#6v| zXO_eV-~u6;-(_ZYHUJ1gI?9+?Sy+NlA3HCrsJq3BEo7XWY#_Yo`Mbi-&CT_qNc*=W zCjh|u`X9a6xd0$61AJZuG7b(9+cC4#0`-0o7kgny%%I+6&$`Ii0RRwAV(0xk9ehSs zf1BaI^PITo-#zz2BLCLwzf8tg6`1`1fE z(6hqAOJDyhW4^~D`zs0Q!(-iH@5$8COmc^hhlg(ht7gaOlNr;KaJx9XV>EVcvXO7G z3Xx3n&B66v)1dTuks;dwG^?^O%C2AL;W5FP5_H-Z zOMs63O}J^qd~+9|?oFLkWjjvy7=hc>c6#~Y!Pd;fukkB~!?sa}#!8j+1K&3#I?(9E zHyu@F1J>;X;>>z}mFzuphCVk83p=bDyOO7C<%A`i>`$c=16k}_u)-E1C5kP}#U0BY z%F)q}p3WC*Tbvo64?A>9Tu*^OW&5uz_rt{@ue{RNMS2K5rv%?*So_|w2-^#HNqThZ z+?V=Xq<_n&{Zzy?dfWyufJ@~X1i*4T|NFPmXufth(3e;UT}6pppMkLGZ-I=kX1sK z3Gglz3r;d)#6C7J>>jNuKOPq&-2VJzHD$rPeT{l^V`4@x%&pR!rQ&9v*q&`7S$8cy zv9Qjnl=+Hp+{kjd({8d~a0=ff*u%y3)m*J@_rd~e7ZcOx4sWNWN%CK^u<4VZ97Z;! z`U&54UK6jL=tCOBDp4*Zn4vk(3(c?Wl}D?IlWVeOXo^Z95P+5r1xmyX$Yw7e$~;ie?frl=L{Yct}96o=Tq&?xeLWjm1Wh@+o# z)D-fD*mMJ@ievZUQ(MZBZpS#^FQtxq4|%RF&6cv%)tixr5@Oe&)~Lk2M-!!czLgmM zACK_bxA*Xi(FTo=#U(it3-GB=f_98jpO3U6euQgvQSUt{F<3d56R^!kYyy2g9b#kDUW==QzrK!j=PD|1 zI)W|!Y};rloyS8BUBs!8!aNr43b0k_xm|nZA`6x6;7-9K_cP7!=2OVjA{?ssIrE|N z&f3cK*T?sQj~>kC(=$fwHaod!7CGeQ4)!`s1%}h~*wq z2vRc5d>*IfQqe|Re##bm{SW2P(z;G083-iIerYJJaVMBZmrN5JI+ zfXUjAJ_8!9A9Za>6tmFBSZq>=CfxMXR}?kEtAgW4tW!T0ATi^GrxPh&^Nt$?6lDb! z2Y&R+t}P8*YeFzLRtzi*Y(l@EjYPObm+w{)E_4ZM@kzRTmMShN~h!Fc@3u$(^2 zpH5(`;c|k(WA4-lsVzsiDWJ5@7`(TlJd@i0p_erk`mX>pi?Pp0r}jZjY!_ zmmc)|ax$~){>zUEUtk-E*Cg&oqAXalBOT5*z(adeYMxSaL7I56<${ngG#%{tx zaxiIc-bv}Zu50AlQE1I-7`x__hZ|uvVL(Q(s|o~Ew%cgBJE)WCLN=m9kV$nv-j_;I(xp^D5kJ2^=1uzf%ad|l>213B}4fm&l;Cj8bC@iy%p> zu~8VRi*^B-Z=b8}QP`0Jye&lbt-J!1=r?DzTJYw{b?*v^E-Dplkg5@y-l5+d@Mqhf zs4Az0mGd7=Bv&^!Q|YBuG4Fj{?<&eXJiJ_=?kC3($bl*gL#Bj-(y)*ahGE+&1#Zst`)`*6px5@f%5Q>e0G1Hkx1-U9Bbpvpm5E%+nb_(_ zTNOpv(iUbOZdzIVj3lcY6-Ar2h(Vy@z($29-n-w0A~qc^Y88vb!5gcqnl~}kTyxv@ z6zYD`;Rk?Kd%`d-~kiwf&lq?R9z; zUu{TjIi@rVK`rjF1${tdT==5I;bNovma0Qqm$6`Z4cfROYQh6rXY&fP95bB5xV8+7 z?Wzcv-5Cm8O;qb;xI6ol`~6<9{`fti@m8upUyK!s!|v-S=0zPg zkRE~b{l?QZ0%B}ez!mp$@bJDbY%bu97&J{ydR_3}$HB$4kDS1S2}m~_cLgsK?S+zj zbYDeMoaC+YClZF05V|iTW5&O(Zm;|rs+G;5MvDLpwXzsE*lAF$&U}wMfyrnHw>e{g zO2rWEQs~JR1XqujDd%|j z>Yq-Qew+Bpqh6+(>9#8YG82tm=hMXv(v9(kc3zJq4e`-+a0K1SEUsg zCM{s%B}UoNg)`@O$hmU$PlF~bP+u`m(Wp5B~lL{ z!_VIu{VcIiC^zv;l(Wj4sVP1#lPE$=spvnX1%im^)^4gy|KZg;iF+!{(!~donA))A z2UjN8XYEz6h@k$|7V?_GcXdDvlV^IBFeFd!X zYQP#hmB_e0R=^xUN(}WIxwldMqIbn&qc@sG&us4im=ocXgEty#-)ugVPQ%5bPflXo zteIDKOKPqbgN&$;?OkNOS{y$MiBot(nv{RS5I%Kj*teQwqw=6yb?qS&)+wzp(_-2~ zz><9Poaj=ijT-@tmglC{h}R<6G2Dq(1c;(hF@tM!ltiORGT6Y**vJ1NOK=DOS?xRpkA_J@;n%L^6@GC3o)f#m zcZ^IoH1mB5uQ6(YpJ-#}o&;6oCcg8cF{RS8FpcD=5cLTY`EcGH-9eD`Sc2W5!==7` zCSk>*cRSnu$<+2^U6#z+14$H`ZAR79MACQ(S;J@?LE0r4GBeVH_4Nf+@x|0cj!ZKd zN#)qzahsvwU@=knizKldpu^4el?6+ zl263!G|q0IEj=H`643Iht4M1WV*P8C@QfFA(DfonpcBH(0@-{*(e+wp5X*n~eB4kw z7a=3bOG4SjF2e6lbF3pe?EOl8C^Hr0?-^j`u#L3gEWM64(yV?Qn4MS;Uyw0zF^D#1D#(X zvKBmRTvQJ$KK~%V!UYYthbzq=Q6N2t^8)-WH}Fam9%(T`Dp-bX5nMMSOQm<45nY!f z;10D{_53{n4Ik7geJ|NMN_;aI4GmFE<*W&C`kO^c3N_JBM|Y7g*(Yjr1F~0mQ9CZ> z;x$iKc{NyqBcY|-t(jI0y6A`zS%$qqbmcqhSoxYpb2Jo>VbU5nqv;Z@iV_0o(IrfY zN0iZfKdh7vguX3Fn=Mgc{5q@&ti=*k4u#e-t}PlS!O73x8)j$fW8e2AlJJHifUSw- zBY}?1TJ8~JntFXFM%DRPaLi*~Ayuz;SFO%3*1Hz>gjbe(=s#mtA?zW|(7hkaAzQ-Y za^XP!U7_so;bdwwuquz zt%t;f`Fx+JILfu>dOxuw4uAn|CG=6!ieiMHIqRK}AXzA?F{ zp}!8oX^m6Eu%?fp;;{}6yI&AHevpxx0BVPKVHhTF3FzsB|G<=T)_Z^?3ZnmnID%YS zT319B3AJ?V0skH92bf=uoJuwTBSCk6&gsadtQ$T(-}VqwDp14E@gkB;K)E_@rt?#O z3YHPS2{nyQ86J^-l3iQe3~eNoq+YVkaTmTq5zZ{Ab_2)ti5w24rIb-i77yO};+>64 zAXB4iVvZ1yYG{^LdK`WlVP1_<)UQWI#lIG4E13othliyBzYo&o&03RcxkDY16Q_dM zV_sEA&^NZP(*SZ$Lf8ULJ=B|3Db*J_=Q!Ik7*(nUVvwhDR%t?$h&MZ~m!sr*WMUMI zbkd=_Pdm#w+Nfp&jTlCXrA~TmPy?yxjGwB=;ToOhKFWY>J4Mk_CM^3{Sni9D2Vcx2}Xmf2>Hjk2#N|7vsx`3w?tu;8vMu)0vYQ@pO zuoi`%htutcUMc>`w7tza{+ZqnivJ#3wN7g^gWR=hc1r365A*fN^%LN1npF*i~+xq;At6nEs~c{~ch; z{EuQp^_#n7IFby>8SMv~$eBeaXQ z#2H|9ldy_x>0|dy>rflMG_Qe4Wsvj2z;m!9^7hg!-nSONvp+{l?VyMzRW_XHd_`l+ zO({L-aysZphUW%)n=jGsvz7{>5OdlWY-uAx`6@uc)7yQ%-9Wx@)eDaFJ+zC_<;TS# z|DKA3w{!><0Vkh$bKXEn&er~va8A5rywHV4ut1^jp?6r?1B#xR!ZpI zjF256e&Y)zX{&wReTXHX!b0J-uY zCi+){-ltO-*tI8{d&D=s!bAf4woMX5Lm1t_&%+9`IG>3S`E(*_{m&sLX+$1BcTuDE zQ6m?JUO8n#f8kd6grKl5qCn7-_EV^6qbMHN@g(Ml0NMyd>co%{gG>ic>eR_-6hX@a z#n-@3TAdI?QbeH}uXyv0?Yy5l0<2MjBjWYTgtR^vjBksy3ZLu{La<&B|4`nWouTyI zIsV!uOd`pj7KnT)!Y#AI#_MCtD@dxmZK)I^)AJO60yX(oWUr7JtrZg8+Mce?FBm8) z#uJ9=9Q-p{)*lO6EE}C5agv3r)e@8AG1CXlKt~huHy7Ok6>E%o zNy7II0&JA*or>`^OzXR(TAopu@Kr7NR=nL$yW1;hkQ(n|GIF*%sq;^v8iG`f<%8vd zS{&O{qwz7%@UI+aeT$Bb@YU_jH#7uI#spIpk27TPtTs&0rGKRQ6Txh~;)#aDBD#qu z3$zJ@Y>qfm5V-b?*(BSk*a#G;n`;rl4=Qh{4@GF}hej^c@!7DvG;-slOHKQXC6w|d zcyZW?iGb(dq3$!~a6Y>xus_su*bG4tGC0kV_seOeyyEL&7(ZM4 zGLn(p#vtZ_M~&j~@c9Z|#UDOoOE>XQ7kt(#5$|E#|0No8nwE1qf&i!LhgKlaFaNRi zs;UGFB_YD@jev3Kl~sNW#V59p#u=4bZP0JB4t~B)CtHDOE518DFbJE4T1uu27x=P3 z&L!#&Rv;jcsFb)$INTG8-C0pOp1i2iXu|79q|8goMLoDq-7N5SzS{}1A%8Ew7fR$2 zb40qrtJ)ihdIBA%4a!6G)A7MIp3Nr5N9_BizMgV9w^v!1)v6|%eHZVO4n}5VGWFFE z79)3EX^( zD#NS))YII7iu;ad<#jA~$QSuLrdW$wesg+3&Ck&{_iMCWh=fD}Ucp?L==3afL6{fS zA>D6?)z*Jv_+%~ZFDCMkPj9m+cmrxe5J^s zD;y(CIQ3AQ5^9=+ZzBthIU6pd5!*9gFzL5A!_ACm2r+(Y#$2KSGa5mnGGi;((=fvnnk z`0O->Ynoq;#+zzJ z_*a-q2H<*d=D~;IPfw!@zflAdtcoTXnsCSm|rW`#5 zc^r{l1j97MWv;JizxcSBTNgPoUblF@@#t44eTvLw49Y@91Q(Gb$QvZ2u>?b8?8n-gHAG3FQ}n8=el{zAcwjmNXgp;=^5Qk~+%h zJC%eT;=7nvY4i8}r~cvaX35T?yFcF&lUzmUEDYjmUrAba0Lg{?xW6;0@epPw$wZAd zDY<1K>~|GTlaF)|cxI99cX8Qs{?dl}f(bioV6BgI9K>0D?N!e=lVZX!FBP*WaS5y;D zBf(dzv@tA5Sd7cxqDLit(d$yRF zGe#-Yfz^{0&G$tRI!Q4MQ&6@v4f`hFO3OLzbP9bm!}#U36JtD|R7c;!Oxt?x)gq|5 zC~*lBnO-W;%&WreN#q2bLhdg)_EtVf&}eUeGM|7y6$Gzd9|DVm7!!mZ5FvtiiHmuZ*ATZMbcJL7xy~K7!?;eWOVlDmPdX?gMhgPM)-Mem1-;P3CI)3?5Z_e zqZQ~SdiXdq@`2;x1UTiLWv+$LykkyNd1c>qEz0)(3%kkzddI6Gf@>^_+Co|c{s_H2 zdTh_*ci2l>SkT0hWX83HbWmMI6ut!Sny@8BrO_oY6pA5YkuJ34Rk2*LhL2MeMPoOb zpKR$P0;4FPw> zXHr%v4l?%NC+||6Wb7}mO8@19efF0GsQ=QzKF435Q(s2@v zoEEb(6#H1k@Br)EO3@@{`PIU4N3Fl;KIwp8NH=|(!Hu+DXUW`Hmr%Z~Dc#VeldSWk z)dz+u2Znd!y$1IVyKH2@_nD8~0)m2qyYz3Ayz;FG29h$L48P9wJBA=OXgTJV29+0- zW|G4nb?^(_e)^Vw;PJi{ovsy~p}k7dhSqvXLye10CtQWGE&o7uQD>BZg)m58@WK2| zrof|fo41FPm#*NAd0<1MC52t3e);>*e5qQ8a()k)ukGTNJ5mknjBa-NyXn~(z3tvK zjM_X^{BJ#csTjeZOwAy=&k5Eiguw^)1=r?jzRvI8oJ<*-k1m{rWc%s1XN;O(dfA&F zReiU=1f?2jI|d*pU*~%B8`AOnqCRP7pA@7&WjranJ+^(zFW&7;Tl&eZoBh_+)nHUj<9Qdh?18W)JzzxeZ_jq9Uc&^fGvOL}kTXIEAU#6xnrKpPY~I z6LNz#(iD$Np~V8&-X)|f^?~u?Zby)Z?Pr<6b&HtR2Qtl`1*MOoejf!ZBwc;L9iO-j zhj7l_1N?=-xK{OngJ432?aJJ}Z9d^J_fT*<2e8-Yl*^uTLM@%88#_0cz9}cGv?a^w zbH2G2IFH9$%lsLszDUc~TSY=dwsjDrq7-s!58chcOwPhAOI!QLmaowb2QIK%CXOdt; z^qzzO!l-B9I}iMHFu^P`MSy2$W)cL`QF?x`26r!0_8jXuViy-pZsmU7Tf? z<3VpWPH+%|*Oj~71| zy4rTmW9_Y=;tT;!&WX*6bsK(RePVBW%n0i9>OhKf@36l&AafF9{aiOP`*97CN@;T= zDip$FXd@hl1`NR3va*z8!6jj$&syUBfhhFh;L=@(_M3)YXu~g4aU%~-Qi}<889UlG zVXA)FdL*@#;Y+aanlvlD%C`4`soREo1m>+;aOW1Veuw1^yRMn7E3ABuk&X!6vLXwS z@5CJ{Zt*R06Jc<5H#c28$*(H;U-<-~WOUE3Yau8(9@jh=zB6~f+#=2zxpSWZ2& zgDUL$Jq@>AR-DM@-Rbzjcbt5}Z07)cO>BrH@>z4Flcx0txYuag%>LI1#Ay4Emfv-{ zU^)HlHpMTvKF8Fof6w8NV+^k9KH)qS&E*JX3|9Igca}4Wk4ccj+0f?jjjqq+b1-a% z=#!vOk{#|-M#e()oU+}_ID@=Y+KCCd%F^p|3aOYa88z5y5_~M$Z5m|4>l?qouVn#? z+R1#|8B(J$Hs2Y@VEa<#HR^uGfIUe3e8L6#j06z-nlB!?BD$DCnVV%EU8k9P%SE zf73uWd?6CbWtsV`K>Vg-k_rLIE(8g;r(Hm}L5F-hIYmd$PEdL|k=O)`BwS*3gOdr` zR4SiUJHc(hyt+Em(BM6V?{e$%BAahf$d1Rr;NT5~FU|z#E!g#SGi#d8;bDJkXe(Kk z@G7KLoJ+X8?cDf2sml+GrH(=R9HjI(aPzy~MNUbd zW(ya33ft5zkxUxJXk!>Y%0RX4<<|%1s9_oC67*F6ej#JkxhuXsaJ@hvoYyIgqcu?z`)5kk*Y212^;8*soqjr+HmmZn&cc}owuJJC^ zx7mXs7Wcj`=^q^4NXZs@4^H0aY_;dqVe!dCh9Hm^PTmWVv7OQfCy1Th1@x7`sm~bM z*PTswcIok*mrzIZszT9UQXRatz-^kXW1qwm#uif-%NG|6M9y}-e;asW=(VuG#ut;( zF(P7ZV_l94IqihP2x(PVQ7M2Xqol1f|Md`QI#@?cdA15Ci+1_*z>g6i1_W_&B|EZd z5EM_5n6LNbMP<@YVEbGk4(t0W=V9(BbxnL7yMXj`@%$Yq^~G04oT%PN%;$)O+<3x| zE*$|a{Y#n=J(VmoL)fVB{%wnarC6kU8R-bS`-?c*6xyUyO9lPO#8}_YsRda~e+dA5 zT3>g^kG7wk^KMr=eoYIIME%&u**!_t#|?>&0=Sgc>%H}QhZFCDnjf0I!#f7DKS=t` zl0UAeKZ3x73Fb!X<`hq4cdscmW_;4Yw;9Q&-I|T)Q|rBVx<^CW7$+r@51rL%MPNx+n=5dsiJ=)=h*uO zBYCK5V^g1ZKejV<#@mVG4&P7@g~p378W?{DIXFL{c9q= z7tcWLV;S<>WyEjtStqkpuHt@b+znEm7!N)Ay$o4X()1`Fv)Wk)x=Zr0!ZbVP8}VSj z!R@&Ha=#)qPLTCV!#rdj*~XVvc*nyV$EO!3<{Rj{293zYpH(<34UpV)}1?&e1$~1cKXjk|A4eMyk zn0IEcVM}@ra1JN9NPHn0SdU}-lCX)kwVoh%)7T3Z*FOqSsZG2xlc^c~`8`I4j{_L^ zbof}H&HF_gaF>vQDitpZ#JGIR7o1T?Lq=3xh@^-dlrbganNf4E(Eav2h;AKCC@YtI zvw!027%q^b!qRifD&t4STZpiwu#cFAsqD*e=kbk}-)O8&Hv4g{d%bEE2xqz=e_R!g zceQ5}AQ!7Ev@;`gP=If^gDI)AS{}{S-lED``9)$r>KVxO!n;CLchib&^vRV5oWD}Y z;m41zxIq%V)&I?K7y>T^@B86V#e{8c;V zX3N?$rJ%B31&yv^m`Y7vk}((;tD(85*&K$(Vjmt5mc{H4qrXk|v}wmHd%`ic`LPKi zm5}oFb*J|`l$8jv)uhEc$os^x$J;HKut>tsnle9xH&aPBcW_-vK&LRxPu|4nqrXh; zd4D9+fAqnO33(K({~jfgI^P@yzO=tCZu8w~Xkarm4g&VeM!(3hZ~cu6X@AtZndQ3P zRT~iuty7_j3#8%N&$ea*BdkW946(+!zJk@J>j^(6EAWa*xVB* zl1)#$1$w%E0FJG*0({*sW*9>KEMw1^K19^4V`&5dN%qv?uDU$Bq z!;Y?QWng7gj;IvHe1E5M>ePWm9qM6H<9N{ zF+BPK%UB*>a=NvqvSY^z5SI)_FOkX~ff5{tDTP0hpyo=JkRevzTVtx7FrnhW=}gvp z(|>{N-q|dl{{0=P+Jp)U@opTuYw7Ao&f_s8-Vp{NM!S-y>}wV6gzJ+N3N3@{Sa+xy zTke8i+2aC-89uRdf$_?zI4Nef+x9x2vtL=?Tnk1)rc{@g8gAnnQn3!OL0_rl;-`cW z3P#?k>I#Kg4+iU74;I{t{%~l$E6VEX|B?sx%Q#RvE}da^(bIUPxzvd=4k|2_^c;5t z?@g99zj(v$Zsgz_b$Y!I^yPUpiM@bLU|ET*0Z#!M)1jG%J<>1!LO@ZJX0lxd97nJ5 zDt5G`>+4T_%-T&Y5ZH0J`1F0BG=pYVDDF91mpJK|o_sQ~qrV-mU|lF>zt5~YB>$wY zR+8AQiKAOISMHF^88uX|9C_S%D<~zQCG^wdtd#~}RqCO}%&FQFfDlAbc{ z)9_(_sq5FI`c$Rn{)vqdfi}PQ$_E!wSeaRTHEgMoZ9HLrIH1fPRl0+k58VN{P^+kC)vr|(N}KRLR#BRb{BCW^_w$$>dBN=XC3yIFBO-aBK_jZnXF2L-V4IIaO#vZ567S^6 z@MOiKQusMh!iqp{ zQA~roS{cmq%LbyU0`+m^hcQ8$E{|VF9hb@bOuR@oIcuTER&uqe67$&j1?Cy$FfH-DXFs`YW;<^Cvdpr!cO8B#k0lnb^-#iulGA zT<=%HNWj=o&v3{G>>~+3q6DTq9HS(m_ry3$L^SN%ku4GyK=i(TT50`JcB>Cf_oPES z99s4o_M;NRM&#hKnqp4$ptJ*~tF3m8{GWV9@{Glm zaT@s+5-Fi*f*2HF^gpP5?6O-TR4vV=MObkh_0&!<&O@r^eGKi^(XieN?`hN$2(ML~ z0jMgabOgQ#9Wo55HWaaGP@E6ygpeL(kzQ{`oU)bG=b9{hpnst`B{XSpTVS<{y*3K*RUdD~hmKe={p%{Ma=^C+N*eC- z?bi%kndneFYVNNLtoPujHCA}?an)45jMe1Q%BZ@{N0L?0Pt~c|?!jYd#JIFszcv}W zjmnvw#4aHGc$2+?4pc36&v+jQ@>~qho~bDsFfr>;JeI&akV4lCsuK20>$xktMg#*T z+<-Tp)2kQBqGd6B(dlmkXVV3~Xxxt6hf+Bkt+DgpM zbq+der9#^xRZ zav%!;rqR*T?Y{i3sJ?iDVc%iWU0}ei!gk%k`a?;`?7rz4mxmki&ac%7KkEqR^5O_I z3Z?dLi~@B%#v9G3bgHZzoGZ;`q}zxlO%tM!TDYEJ*2nDsR+Vx-!=YQ)rDE%?gim}W z*tU0Sr#^z``|B7~+l$0`+bG|S7PXK3!E@E8gYL0IpK|2;(JHOL!CK*zYNB}?;OE2J z_(K!be@pRRu-YGV>YwN7|0#=#wh=Nl8X=P@0mlv(^9WWGeRGA2<3B*;MRo ze`ZttPPO?PN&Aj#IN`m?`s|r;4OI8)A_Rp*;(07df%&Gz< zesR3$`wv-Fzf-BW*k5G+kxIn{-~gpkaRET7Nt~cmDlP!%t31y$_~*I*KfeF-XGq5j z>il24^Y?fY(5$8YolE|Rv-me*C+sg#B!3S(c{#EF+qypsy;z}3#{LqU@Yk@Dmpgyh z_|LGD|6$92FZzW2cbv)JJx|8|l6uGb9DVZpGgp7J|F7wV|DU2yUex?C*MI)C{JZFr z|1tmndw1Ypqby!#{9mU0|80){@;CHf;`p3@+eLpp@A%IgpPT2U*Pj@_GV%XV_f}zb zEzQ~}?(PJ4cZV=>cX!tWch}$&Jh%jRCpZLm3GN=;-8qx2wf48=`~P#Z&&9c!PtB&@ zx2vkFdNO+S=%FWrCxj7lhJ7oLF``F8b@c*DienkfRJDY@t16GWpBzdl$!T|(MEipAoLbqDQo^SyE-kvHf95ufmDmHc=S z@z<}0tS3U2Mw6Aqrh+G)2njq6Z3!F9yeaGRNf1H;Xb2ySzo8|CJRk=Kg$CjCnR1tv zF^%ODr0%vQ)p!V4Nt6b}cpKCgYW?`(;qm;i;v4)Rrk6SX7EN~-M90vu@^dV)NY>*U zNKxa7q+2(Il5`8RnzwDiXY+CsYdw=<;YrNqE<~=$g67=MnPqAn!oEmldyOV#{C&GW z`)x-uxAM!srO4wauu(wKa?|D?hzKdrEanh9k{hs=eFULsoG`oH-JIfMJ>oaxTr*nW zou|3Fap1cgyH-a3!GJ4Gjin1an=P8i)42~%aa&2ladhE+B;FZO^b6|BfKMUsdt;su z1J{;~b(7ATwL&BxsgKsNVEvP@YIm-~GLA=Y|Hr297{mUj+N8Dy{!3F+Q_CPD_*9&o zNF}aUu*r#2ipw8LfWb}I4UEv6ntHhE;Y#Pr+ zB7L-lbjN!&G{Ngia0H=U>w5*BQjs(Jbri3WA-QGE5`p+9kC7}TtVeO4C9J%9;ZFsx zprb)Q;s5Qw1%J*1{4W#T|8m*?fA)kf+yCYhx*Y#`0{FxHPfY&L695}K%YWN&cJh8B zumA7!?Pf@LH(~E0TEB*>gga)`J8TU)P8S|Pi+wYbGh3`l{BaO7YPJJ~JqqiD{aDuyH>KO+K1v2Z}U+%cDtK{YF7P_WyFlwI(q7mjh zAWtA3kmi)zo60;-KF)LcrBCWE%q0`xe}M#Gk2=~dUX-m8bbY>u{(Sk3OZF*bKAxZx zR1*d^>UUnvLc(fhr$Uy73O`TT^^T7j<=ZgRLfvOts$?pz^Viws;gBzOrGC}h0w<>* z0jE%m5@40}dkw^c=o=4a9BK)Xb##I*Js|6Brqv{>9B8U4R<&% zIwCpR6VdvDE@7V#i&0qcxs%aX&~p^%N+|6>XUD7rlQ^EpK5lm{Jt0=%~`ocl7tn>c03ezTHz0TE68F_tnb1x=J8)BGq}W zfNhi4+~0xin@!t}Is!`v5BRq3tG3)p5dVOXg3l%hGSjjwl@YlGI*+%vhFo+iv5PA? zx<{(2qt|Iu+GEcAhPMRxqdnPy2p;CvPbjhtwri&9d@VC60x{cx0itt;C_F88+egez z3FV-Q6NxRjz(np3mtT_#9ERb_U3tMVzut1qs)jL7#a%IpY*?ZyLNSIG+B$bh$YbqS z{vx-`c2cbX*%Ny+c@wH*SOdT^j=T<<^2QP&G^OqPdChaHA|tb;<0AsEKaN6#Me=iI z#%>}FXeq#3;fYP>w)ZyurDKeJ>ldiElfTGvbqeG$FjbC&-rOMW9+tU8bHhn#=Oo6B zg6@bAR@Zk2Y<18e;%i?nO(`QtSD9ZL!$_9}p`PF5oQH|+f2eA4$k>OPPW|oK#68H4MSK&%C2Gq)-(|%ru!-~=5vAfX8sUW zt5#X!!x>VsT*)+AahNBP;qp~JqC{hUu;Ph>SpjpF)DWagbD!@0RopxLOaSF+eFyD!R?qZ6Z|027u$8Tv*Tu*IBB?4v z^Rec01SU4TseYp>N)O{yhiRzAJoX-q^Z-aS#%nZs1T{b~n4Tv|d8no=J<{^G#O*2y zwelLq5f^0AY>w=(R!@#31#n-RmlrlQXK=0ZmgqLd`GF@2CJmajl9>R0mv02Q{j9rf zZ+~RZFaAU;_f-_h^){=D`dqdZU3znUqjn@=Xx=L=YE>wd1O)vsZjUawK_=g4p0O`> z=VB=!AzQ^E*t{B2$8`7T8Ltm0*74plm;^67-whMmv|#u=rbN0ZX{u0M9UDReh!Bbec1;w7eGyInntsW(PG zyPtmiHB=uv(szJ@p4JODo8|4~C2P4$XA-~K5E?E->G9P&8FBZsgB>Tk$+wyz{WgwP z%QW+by;>Rd{n9 zUikCs6UqT7!@+mVU3OPZ<_7Alm4n%rBmnj+tV4i@8&~%DY^ujvy+@;>FQfmK=NO1{ zn$Jj%h1=Q>8cS`)?F0hPWpB$4b-}L0mIkagRJ^7eQWfyJ^0dpPzIX|o4cdNtMq&YNLG z`Z7XXt0^tSUh^@2D- zFsda8hx^;>Y?L`|I%{?egpK^6b_9#t2iu%xOYNCZT}044tro?AAs-2FaVLA+>SL2U z(7d5~1WnhRU;e+!!%o`ztfYnQ<@VMLvNzQgHTlPaF(1Wku}6+39jYurn5BZIxNYGw zNbAR>tvuu~d5k5BNSZ5A>^~>#1-NH?laJ4Xt10wdgq~y^k_`@1={dG5(UZCSs*1F2 zun1Kiq~LLAMb}$!~y0lKWlFU)i<*vn2YWJdzIMS#adLpq~f@<_mUQBM!dDEyzQ1)up5j~D?WhGQ} zzvm|SeMGTNLme@+5bz{kU>`~98n3)%uI3+8kWN~$rSQnZ(sR?nEeV~U#{X*)T7o{r zzG3&lECoF5xh_r~YxGmL+92c2pZd<;iOV~g;$9v%FE@b8RVf5R6!tdn*vd?KvR>1<*bk|O(wk(=+ zqh9gS@zp2|E7OtFyEDz)qG6PGtin7WoY}f>YM?;I1WcGfNl!|9@#v3#fBsxjri`SV zgWEnyNdxArGT0c@ZIM4UO&W1vs^A=l`o+{zn29Xmr87Osl7xVG+LYDNiIsgIIc{jJ zGq_6(t5=Z z^)JkQOQJ(Xg&)OI-3+4N@NLQ35!YZ#DIe-k^e%H8Dx~ck<|UuEx}PYU((tsq*6Tv= zirn+4P|r@wFh5Ei*JFghFLwWsrMKvt4a?k{DnH~K_aLno-aNe3_tK%>c;sC9B8@jx zpe^`iNiq`D(h(-@A}b~Y0q?UI6qGrdc9m%1w^)~BoeyE_{XaQ?i_DWv0@O!K#Z5}M zjU>}Zg_jQCEOyqksKHl^1*H~fca+oWg|=HNZBVBkP$YOv@k|7Fe9@g=*bYPHb9Xq}{ z#Kbn|vMVY#7el>8^}jrskLq0E@D`X$Bv zfx_R=dieurN4pR*?=c<>N?(H&^l(;y;apVWYL{L)-d8l%w(J(mxyVgd%doBXDJ%GS zYhLXD0mNVHO(OYBNBN-c=}2BshIRUYw!M3qpO^PKR7$NbQ{U0D``2C5I*L<={CZLF zYRh9dDt6-!C{;@<-XcI5C_m2gLQoVpLCkR~)25=%-u-A zoP5vF|K<;3ej+R^Je;Ii&T)^|PE3-~)oN`48;QGiBPB}v6D@%i!mL z-aH0>_Gi5h;~^!|$kBwbikY>elGTeR`ozPlsQxWF```RKykHTpDztBPg>_P5Z!+f@ zC-oSJ2~JP%U}j|K3RML?3^$)3K7_ha9->yG*?CTp<>0l23&;4!-;W#;25|3sLTvri zXCT(=Q6qwa{Uma06kFI;`&~ki182<(!aw*}CGi}b7*f=0mWvQ#A=m9AOm*-+9x|Bq zFFy0{oLfGhQ5r1QALHQ2Cv+H4+FOsjk$${^i=WnE{`>3J|FJ~;|C{U9za9Ym-+55# z&jsi|hpVx@GylI|x3X|@a=%~h{%hrGR%_XISscX&JI8N9f~8oU&x@ovm_!wOKuU$G zl7UO`3xNB_F^tRcjPla%8DAZC>aB9_*as>2fGr!zEH*YiHog%kXlo0LY5@-(Gh=Sk z?Sr6~Uni1XBk=(oy4vbET?OXKU>Xstqpg{Z6ANmMYzutDi+#1rLPp@n-TZ~Av`+dU z({N232EU&CR!a6nb%`9=zZpF9^*gw~r)XW@Q^#G`HTfJVlTB9=p0)-2D8F4yv)_2nu@% z;_jGwqh`m_DXdBta*q_&MfSZEqBKqy%c_{d$P+}B-6y;w(JHG9;=q?u?d*e51%SA` zcKiv_14wdHDLpRGScDvSGBhn>ZOFyMuY}7?5px+CmU5^)F z%EVUPK961tYoN1@QiYY(iEh$qPV5O~r?UcK`x3SGBeU6qmP~8N>0W?k0)4l1!;p|a zKG5s4-1o{k{_uT?fkWQ=?PMtHK6InVS54$`8b1e@0C0=uTqU32^ORojTdsplnvaR@ zHg>Xc5ic932RSB7DRt>}yM|J?sq#arp)COh52`K5`gaD~7U;f;_9aUp09mX4LZL_h zkSPan3`xfak?O&*4pn$_2^b#Sz`pi??}d6KL5!h1gok^=iFroZUxS5*@a<%n%tFiu zV5hWrF2FwA#kR`8WJ3r(vHf;)n|NmI&?+&?9#_Myr0I-df>uoh|L>*cg9yWDah>G@XJve@-3jdZbdQ`pfr;^5vh#O{o@1ewFpJCfF4bHrrE0T;c zk!Z2u>wsInh6@^MOQB{R*#(AJRpwDdITd1$3wV!vXP z8`$(gz1jos=BA%y$pT2E(VeW!LTv%k%Ae~C&@#-ylksa5F?-iIL8T8P=UQ$~Rp7&J zAhD;uS-U<~LQk>hN|%CZL)~C zWk5k`r*k6(X_UzTk<*l%Qgc>ij~gsH3yHlUxq<`CB^}#J_xVKN8M`P2Q&Abkf8D^Kr!z&8 z&Rcl}gml{sP(AAKlYHoOz{hTCM!TT@5OjHbN54`i$l)JL2*n!2esIceFQwujvkYNS zwg-lpgQy!ZCrB9@RRPmu;%>+Qnzjnv|*)AqsdPcJl8A8q&x&Wgq)hM%tQ z;#jhOWNXh&|1PP^MutEAbZ_s&`-Yr+a?twG2Vk1l&zH2@EiB6 zPRB<)cqTr>P-F;Q#&OL%hg_s~uzdb_Pxy$g@1P8KO|)wK{BVq= z9OLu(Pe|;i?$o@?9vlxvT!EBoNg5n;HGRR)MkJ6TvxsNO%GtNVP?cSfAGM_YKD@OP zGi!fu*es`ni2}1?*Se+xE9t(qr)R9r1p}}D;U1uPgou`+bCe>C3h_%Y3=={=Jw>=n z(}tCBWBiA`6{;nJXFVwtmoeP;0Qd!%nn26nv?x*S_L!J)T1{~DKj1Y2@OVv14CUJ$ zG4FK+pn6n2w`mW>_Z=|h?7P%{()ie8o~sk-LPjC0Tn2WHsR=e!`3R#aO{fXlwdq2# zeg=Mg$$&CcjqX+>b_=n`Y%+q;a6tP~=FcUuO&bfH=YTnw`>PplcVUATDlfH6P?NM* zeq(z}6i|<)0lHb!1eZU&pbP0}Xk;8(?MxN+Xj8_&lnFkTcc~FC zW7X9V6U^_>GS86~z@a%ZRDyk}ltUriyd6<9oP1&JVmXsThL3l=vJ-@6gu)HPsL~83 z-gNDwg^+57l@;nf5&ud|<5mUzvB{rxr2{%V5Mu?N5N*`;ELouICmvzEqkni}eD{uz zkq$_Nt{|(Vk-1Z!DW$(ZY5)UkU|o-1lQ1>vp+ETEz$`d(?~)MnmQ06W0IDcV9B5S} zegrY3g009m!5w$_a1!Cw24svOte|?;`0c(&F_;MKP2!|{Lsc0HT7S*01dXtU1aL{F zC^t(b*iCTK4RB3=^zr8AJy?u2^g%2-k1OJOttKC!YMw<#X|q8Hvz zq3ja48}46jk7hD88=}BJF2&Fgm&;xT0|hZ_|nL zQJN@L!O_7Kb?W{7pt>d-wbpZA4KcC$ha)KHGjm8?9_j-8P}QS&as3in;8`dlw=0H} zidzrz`Yt$q>Id5zYs%xw%B85C+qz552x>{S6#^_XLYZGSEH!Q7a;Qlsuo1$s7x81f zQ7^&G`>E5nqx+SmX5{hmSi6edycEGSprrz;EM2Pf(!s>>W!Xe9y7 z1s*~leR!9PS!VTy(pPPv`vC1>q&07=C3|rcg$^pC$vs}Xj@UpirG5oyVv;>bWZm2 zI|gR*3Xg59cY7c3gSUU}73Gdc<5axNeLYO*do?vp0DeFY*A+ ziZ~sTdo%P0_C{a&5Y^4(nI1E;gAHgKHSl9}LWWli(6=>queHMhfTGno(yMh39grqw zBK3>^&&B+j;$J}An6Osr^wWJtb0_YVOO|qI4bU{M)0dR#RwLDKq>22S1VtdMez;6P z6^x&pICreAce#4wSh$$*0w%#;pOWk3cA=_STMTSlX&Z-`%KUzt-%}hN(KwAZL~uhJ z<8IU2+nHE1@1Za@ZT-0Cf8}>^v47_eO!&Y z7<;^D)o6}`^5(X9Jw8@ZU@%3rX0oD`TGkYSbIOYg33jqFWs2>ko0FZ(y3O)j_UNmP z;wQLmFK=AowxvIA{%iLSB zl+*pyvcjc$>er;#gIO`+snh(jr>*$(NpCJm#p^>EtZ;;zBKFYF0T}&XE#;~941j?> zHSP$v>TxHP<(3R+cKw?1x_Hjh>JJY~O$MJvnbG1ydNJ?NqEn|H7iDC9Pml2|_2wQp zfgfp{pH}NSHS13|+#}}*Ec}Fy9N(rnD@L>tL7YraDVmb4SKgdY1D6+N>@{Y_I|4Y$ zNLn{lwUd)-*|r7=fM%9k!$|2nbawBxs<2I5Nt?X%gTqVgPEb~iltbscL%YoHqGJv$ z*E8Pf6>~dg9Xp5}vEB`MV428gM@*+Y6>XhsA+=&?yJfz>Rq#y^hRm7C=WU%Mo$5Yr zOn(@_t7mi~!VJycb}x^PZWhqQ!*lw88LBb!yAF&FtkP0%_tpE8-lbWGH7|1!Rkj|l zlf9$CQ$x?@X+GN>)zSFkl{{-6P|7Q0D+;jCcc}UjA5C{^UoESvWHQ;ZGh!<6ovlde zg9Ee^ips7N)c{#2uan;;PX}8OHqT7zQ>4P;%dy`*#4J$cJMwi^Um^++I)|5=g?=YLK2+(Smg|2V zZJ&leiYHxg>q^e#Y0^UFbMj0OG4NtJWlrg7%5_P7O81xu^#lfW)W;h6-gN9`G3UK8 z#xT=c=&{!JLf>(}vGe5?{O3#C1X0J!{mDS*>jl%g@8etN>of4n+sWJO-r3tlQTE&O zN$8u;(!<@lz}cDaUAiA%BM6*9uZ%@TuD8>$5q2W~SH0HV1L- z%kVkJ2G@Kwfyu+FB1mp1?GtnchlbDjvjV?9S0bt%6$zxk^L=+}_eAuyaVYx4XebJg zv&+D*|I@iaJ)cA1*HYG;J6Xj|z#MhuH&N{;bcT6@JTdLxFQVmsI1UCiPOF{_zR?bQ zpDKxa^|-B0z*zo1h}Db>INkel6hI%j9_bI=$MRS}pQs+??v_R0J&P8Q2cTnQ zgO=cunxc~tU;Gr2eY9wGWfje*-~>tH+iSY2y$SatGMivaLUzu~n9yi+>gN{gzoDvr~S6+(;t;ol6qZO`mspQq-M;9o0eKE(B&?@{6Wg1xF2Alb|>Q`l2 z&Mmw7EUYeN%v8uBzb9Ly{Z!ys))F=)|7>wfV|Ix#pY}* z(^egC+AG@isVRre91M}Yo?4nr@JIiJpmTIUD}16Di?wu7R`$kQydhR;%-pzikF9hd zsx~#{z-8T!zx>ncd(&J!J=(IFQ7gT5a`4M3(F>C20ZCrVST7;hFPS^sSrKuU^zf3b ziDE+5(q-Av>yU|J)OQKU`WAgphw}bMo#|T1V<5V=+5qcotKYTGky%jGAt$=TiY`-+ z#S9GYtj!pYE|0BJ_O~n61 z)d{o2|0&&Mw;9Q$ zbE2!XD|Bi#|3@0k4X&pn3A|i%zFpU?x^tVXRP3)p>%y!fG+j0tm7dVrVD0Cn*(SQA z=ZXods3Mwv_-Ol60ev-|-M-K6r_73fHf4(wzkvTX)HD09%%cZTe6Ee6@Kar;a|4(* zvs}K}E4=ed-xw=46Wfm^*|V>ou`{c9oL2%2C|fBGHX_yolAFLgWnK3?ZIhDzmKX!b zPk%BctTqE9{->$P(-#_m*tZ8t)!rsv#ot1K+dkRBlgukno4*2{2jrou9NMT6P6 zku2AzI#Hy$5@^7Q$;RIa$>adL|EYf)VE^bE z&y;Pwa`!fW_0P50DAy-DVb?9Q612{VslvbR=YZbP#)+lUwQ;}_-uHCuL{NdG*s~vW z(h!tFTQ$~~A4BI$q`w95_OM|t3Wb5Rfq%m>2$ta`dhX@C0-zsPk+l5D1HReRc`qLCBPq!5bv7x z^dZPEwi;+Ljd;#J6^x1_?{@Sy)vo8nFdksikHw39iG}pvqaO`H|DAd_+cH#cCJwJa4)(|DXpeyrE zc^|S94BenYOX+qTTvVAY}f)0|St^KUj2r zOMrv2Iix2%y9@ruhzzx8AVI2%)yi1a?7*KdY41TO1yZ>=y;E`YC%rfVJASI};~4kl zqryb+oYnfcc2-gKw+-#@hClH%^fA0FdFxVymM<{v@4Syif_lVD2oSkV3$QI%sM^mD ze2t^y05}yaoBVE<1AN(Qtq8ag$^%qHW!2;7EkR+U* z0}x-ZV0ZXV!u8)tW=qzQBQEB)wrP33_xWnQ2O%kA%DA&40qzmHVP+>E>&Yckck<52 zQ9JUb1IWzvvCh4%Q^ECbQ^vFkI$vG=V5R)PMLT`x&j2@GR3~><*=kQZOB$1VJgX8% zuVP0byJbr>8Rc#e%Qjfgj|e}~uGc%AfyWfg%1JTp%%9x(b#R?_j)VrH!StUE`%y1| zb=`~2AVW$J<8=PM`Mz-p>Br;VFW!iQ$#_1ngVby?20?>b7_iE6b{X{8m{e&OIg2`Y-Z=&OL(90{?VH3O%f0lav45%1S6tqY%ADbntuW>0U=(Q2(!I&okmZ|Z=#1eJNpPJ0H+`p;NqX?O<#H5L=tBwHYMB9P;7 zIq95$NpQi!+GB23`5p*K^M&%!_fSd<$)>-D@<2ZCl91BrUxBdDe-8xN0Wh7svqqi& z%0h+2)V~9f$~tXiR=5Br&|fqx3-oMjZ~bKDUj=b|&2@ch$9r&=ywCOcNKoo)Rj`h3 zX`$|AwB$3n$G>WHlqFskwp*cnm~HWd%-AI__HUP@IoSWl0iFMDO`4nY|Gp;8&h=k! zz-H==d}U~MSiw6Nm?2u(N{5BfUZk^uVq3!19}L-uwuT95J74Dul>4!IGCUXwWTBUOjtcCppmV1*jjt>>J zrKM5OAV%A_0rxRr1UwI@*Y(ohg_wOI_}i6mG;qD~vlvXy2`#gZ9CTAtY!xO=QdATk zO5JEsP|zBciZ$eth@#7O(}hDPAL8^HOySVMh$aq7uMI;@KI_G7!);geOgE4L_y~)^ z2MNfE^Hf0QWS?O?cXL4KKa_5-ON-nfM6P&eeE+f#7)x9};6P!q7PZfmB4mMrAu7ZK z-jzY)Yz{8K8o~p)3;sN5#34!r*^fe06|5u{z-SPI4F!7k9UE27ijvEI-wP+PJE!Jc zX*t67?qC3%BB+(UXsBp&bHqdN0Q4yj{uxcEo%(t$v!;H=@ zw9Hp#89IZ3v4J=TvQ-S)x@4f~#snI>9B|%iMtMr-20xp{$7J2U_^a-xyjdm*2O73P zd(NT6CWVpt&X6F$O@5r;Zepc63m_DEbqVTg+r&k{4^(Pma>pYAH7CU*@t1JilR)f^ zr%{FR#uv|SP~Jzm=f3obui@=BW-KF9p;W0N+XjC0pg55l^3lV0DuH~%1P*~Y#*!Hq z(ZCA*Dh3n;;47@=I6HY*1u-~ImAWOKX~wK9iAV}CoP#OVyb#+GlOjckn(O_ZE5wt& zgGA}>_#J`%0n@$a)rj7ZN#LVHu5(#ydk9&0yi&177+}kWUnA+E(S@i#CAg(&Smfv zobSdi)KQeU4?Vmjj3jx)ui_XZ7ImR*sxM~PQ4{RnJ3h^fT0!YWji}lh6JwLGsA)2H zX(qx^h&~$*L1-y{PK+Wm3#aeZR7=3%Q(rVqA}{K}>owe@r-vGqmh3e=ppOdaTGG;_ z#f>PbR!5o}?l$Iux~YLdP*+t2-8TUh!_6_@$VFmq|Bf8&k9KquA-@Ak%f%8q9c!ug z%>)BnGU4$9KetD8pSSGc5xj|Vu1oae{(w<$W__j7I(Z1{yro_>1i3M>FRJV{u`zRl z=u!7!Y*>J|d@+KgyJcWKv;Q(&IbYmQO^}sDGzx8OS3xK+leHO%rF5>Uxb8hUpK_Ed zN=AN2@h|MyP!4;fT!(DN%tdj5Ga60)pitZ3)(FV*yA%=B*DOU!P&5ucUpAU>E@H#F zRwNh#0lVz9NF3nUeP14%*te#PS&N=Xb$87yDCKvZi%uw=h}z(MMn6?kQEX`!neT%a zDWxSs-53!PBP%=X8pD&NjmpG=0ePXXyjkz!S|$fF2uH7mRwT4n^Btsn4Ns97shiw` zL=kqc4ZRh1e{Wb*I(afrP&pBOp}JAYt{??j8R}axl~>s1v49Mpqevc2+N*~z6dS|o z0ydi8Sy%U(x=!3G$q+jfFsH6MC1IN1k<-OuPv_rJvf)NjNY8{ygVHDo=PQ_V$qvT{ zSwXFtIOMWZgU+jyv$jUZai^}TGp`NF{xDATw@PvlgsKs@Vs4F=X&E}^tfZx^Th>&w zq>#kNtXswbPgt}m_k89`b+8>3-v$Fk@72DyEsxc#UE8^6o*M{vNrd0wL;pgz(`vt~ zOS&G6qf54qc`>L)Ms94MMIw+C0K*&dvsV6^nV*)`s)V%V$P-bynn7?eUsZmjo($ZM4ubD%8^Hf#& z3D=DSn}}#)u&7Ta8uM`>z>%K~yk#Ty`D%o&Y-xpJKPPXbg!W6p7MrJ5H&fYM>R;U zXjFIRFYOkL!)-v25>)tH#S9k~l!I2bW?4-#sdxcE8|;W$x}LEXub&vI77wjLr+NOJ zLL}FPrA+s#d*fSFD=GLFK~ktosAYD~SiH3*%T;lo6{`=9MCOGqcsC9=pQ4*|6EyPx z>InUYm7~_%uFX^Hb0rh|LOW*t;t-F9V^Ntf1+w9DV)8u!y@IDe=D6u*Ok?6Zl3y(w z@lg@rpA-%9pi`5hTLE}810M&rP^Yw=kPc0bo|CC&5%ygVA{io< zUF}h{FSz|ac;tsVUdFrZ8f&q|F&==F>GGIZ${SGd$eDf$7V_PCmI-L553&>dx6+>} z9}ShaNjYlOKv^7)+P}4q-l!R&p@V$YEajzOWVAJZ(kyppTZN8>j{D4`9fkukj4;c( ztu!$WTmCxQ0Z0T=3;4ytQzQ5aZ!>y z!lBFI3O~o2H6$<}e`tO?p(bNj_(p!PNqaMOi?XQGbkvf(3V9mW?3S}I-#*#u%7H@F zSMviSoB>vSES=pyO0TDDC5qvDFTbt+Q+jWQpgtdd*vyEH<<=VG@>-JOnYLs+rjl1P zt6@pVAmox9fO!}5jupEpTrBNNf493_@g*&nPD>dBYAqXDM3kOBAk zGrzYxzsBs+rxJnJo@o=Cn^|9%tV}eAHc@t>npHH14iO0ChcGSDC7E7QlrrIrJDFa} zXU*}tPJovwzt^)*o4vJ@`0=Ru;3D~63E`+YoM=MW1A%d;Pj42vu}63cq;Xu>4Mpy` zN%Ar4vW#dSqmZhv=-kEKMR&~@ZVemWknZ&6Fd%RdX7@vUb74;s>K4i<2f346D4mFC zm#zp~a!(RT?)mmU+%Nn}1k{12;^TC(f!)Cxp35Fii{5c-a;MauB$LBM*)Clb)?^-C zTvE8*wD{)H&0!QJ=J~`?GbD|WuYG!gJcyF`%e>uRuEHUrT>u87>aWFTHfSnI>VLuNZaCN53j}GxHEe&MJR_1Y#|wS zx#d5=h<~`sp)hu<%^|dcnaV2t7vCA^H}fXx(-}t0M{57YU>3^pCCeJS-KArmf`niW zfdn#GP0k3Y4^s;ZXO-4n+ml9;e{MF7$;?-dD;$9&ijmgZ+2Qk_yl~a5Yk2n}t+4v| zj~C@ua3e@o;<0*)n#k0n?>*nw9>Hni1IgEkUXIUs0RmsS=XgY13-f5oxyu)CQiaX} zyITsYw}AHYKm;So8TGxR6c;8j)ZbCTOre$j?@gC44--gL+gICRCYfxe7QAWQ5j|-l zMEKVv)YhNj=-xY3QCyh$3k4ZG=HLAnyr|ND_79w7^1s4Md(u?037yd~%+tDoGD&_8 z^E8I~UtvfstUi4A$NN(Yn0hF12=Azb_}acs#E^Y;8IL7$xw7%&AqeG=%O;LmE~<90 zf-8F;IzVu=j*6y>CR@-ojr9s>Fb^X4-^|H{;cF9GJ;p2E_R(A;LlTwZ?DG|M>jm+ zt`dIfubMjuGpRx4`C}fEcm4y(zOQ!{W&|fppsgT@YR+vC8DNT?Lib43Fht@GHOq&~ z*8`*$ITO5*zTw>-l0-Vu$`FS35D#cd%&IdTe&N*AwEJs(`8cF-T;kuATx(WNPHYm` zrYtf!EVZ~54yJD}3IbkV77d%<(3GlGCu`l@Swu0|F`bVIj1f?SiSici&#!7;Yld(S z>f~QSd>j{kK#g=$M&$bT0C=47NZ|^n>=vj(@qkl|YKu%Zfh_{+tTAm^^b6A4k9SlF z8@v{FT~DLjnYjdZuu`X1HbDm>Dx}H**HK+hG)b-@?TfjOYZI_fUj&nF6T12eKWj(v z!%<=N{^N6-bIRNOT-HObLm&fiI+HC}FP=c+3JD7=xyHL$-0VkDI(ZCSSU796J0eQY zh~uq`O=98m=DS|(!g+@sM83l?fUta~zhUXueebY)mWb$8u*Eq6P98EBa}qiehkMsU zvWbfV58x23n%m!VioU~gfH3he$`t8NAp?A%Ywz($@iM4gHd&^$$RDRSHKRvbPxDnV zNhywMF*3ma<65dYZ73eNsPB4YDECd|)$Bk0`P^3j%elwV#;dupR*$D7heJvqpWS7g zUlye$Ue`CNcqXFRCFWBEzbt?qPCtFyzx^xz?~%kkVp|{2$8%?UNOGVYQoTvF`=cB$ ztCro&B7U9v`A;KoFlLv&8gKVZ3_K11?VQ*is%4$C=*ZM^J2$_j+a8TCSz=r72S%~h zv~<_rB#b(RR;KXtdxXxOa+m)D^^0kSE7UIXbYpBwqhITKGrFe{IAEousiizCzJ@9I4LtIc3)@b64QeP;SF zuJYJq<@;&O&r7{=H$kKU7&DsHGzBCkVBQv9C$Er);)sZD4lr}NqT3OHS^vq~$*Os7 z>!$}7zBPJLr5Ru+lQuz?fhEpXfWJaLWni1|AGPHJ^B;*sM9S-J1?(=3I;w@^y|hps z+6xGC59YkzYw9|6_*!Gh^u0Pa2dFu3Nq+uQlP{iQZ0k!EF|Eh9ba_B+-#-aRJ6-Yc zJQ_`6d*9}{4u2IU@OI35RWYl@$*K|9Mxm4~yolA*AcOw6iIE$Z%pSk2O#s&6JyZ5>9 zQO5ecHh|?)`4@rrI`*nB;9nDt)BTSlq%6dm?qBP=(|=7I2TYZp{xADGB;jsi$-5pa|=vgb={&CLP~xuC;FWQU2#XZQjy6>&R?5ctD9%?E@3EQ zq3h0}oqV!{q!E&H1h1#e9%0lt2UmW}_M{$e_lcy$R)9?jfdz%sg>7e4l5j7Nc)Ha;}M15Pe^H0>xKgKPF^YkZR!Qz9m` zj*8A9_T3pV1i%lc=N=3AAslf2pFdy^rY!FN;kRxnd%zZWwAbdeEsHRo{ z^eYj-=88w(j>m&KRb03)+g^lh&~nE&GI?OCZ?9Pcqb%<*05kvJu)x1yuZ` zK+nHfdFEgPo?Z2S*$vIj`QKKaEA-^5=(%iHp$Gkn{m!LHZy*uOlgiCa7SpMu^#R=jr3?Hns<%zs<N<^e-Zy6dZk2;1cu0A8aF=){Ae2}a3+)hxh zTDl1HVQR?zo{ZpUu1gCa?R%ii{w@5#aEK`m4e*a6_}~yw%t65M7z) zRNo57$lUqR={}U*!^NHxq8MqB2u)YDlV>VoOBuiQx#8-_X0gFp*@W*PVM@0AH zNs?E>+s?dPiN?@@-p1*ju}$UUItrW-b`L916RdOsvGo!ShQ7`QT($tqo3gpXbxtd} zWY#L$!}ZECz;^m=D&w^tWfh0ykGF;c@pZGy$8)t$9@hmz$dfV)p+V+H+ zm9Zr`V-JaT4QoubR#3u?)(2(-O!RvV;1I}nt4f;MC&~ya)uL@_t(|(|;+h0#jno?2 zOs*45wTW}BtqE=oJLn}N7}YwmZx*ecX1BTG)kiv-g&$?A znj~r#l)rguC{4IZ`K_IgU78zyjjQhVo|X@fhv+MAR~ruldf1h*70=x3e$4gUngr@o zo-II@nvVyst6|tp>ab&61nlbfk;W%aN0nC{FQn$dB{lZ{) znDJR5rWFR4;?t5e#vuMHebx9*E&6NaRO8d6|fUj&p=?VVYtYM)8QOSpa z()ie{%)P-_ff$X#72pre)pUe+8w7y)CJ+vL3)`I15ga9nxt}DR{NgWVg@`ufY8Ouf z--7;HR`_lm0?2)%Y zRErH17_kB`8~7mk;5jgCmy{oiPq=a!I@q1IK_zk?xvzkoVIJbNupXc|q94k1l6>S3WnrVkMtthi6fq2Rrxg(Lv zAkt0nn->pbQR{X-Nv&eRfvCE9QQJ51Bl;lHE{Rf9<$v~WgU#y58~-xx3tZuu-3Yx& zaCp*-e$(eq0r?D7tqgfbHnUQOm=K|Q8zKhj?It=k*`Y;f zI(I`w0b4bVglOTpHw7b0wa2Ssd6RC?y#n?ic-=}zKP66t!=|CX3awk z!y5IdPsMLx?$se(4-#+?o)XX=3l*f7s!>n8TS~ZZ;UYQ$+ymFlh6-oeP9E4&U6M$7$iTCBmzZxCfywfH2>*z6SxI?hYRQ&dlx8bg=-K3 zw7@+EMbh|*RNJ#1!p*%MEz2HsaqIfvQtZ;^3=H}1x_-FDo3xO(o7PCu1YmgPI;HzC zPs-WrjFz(zLh0#xo7!e~hpoCh3OUOIi}x~v8qy9$LwY-9?nSbj=*}xCdL&cSr<`p% z00vy?$)OjPa_vOec`9?V%kL%Ybg(z2u=mB2Y!djSGG}EWkk86`s#XwO!-ItS4vx(Jwd-0tha!^G8%s(P7UI3Unc%Gn;kml7u-+F^tM60M0pietb| z4pwsAt_TUz+VKYDyo^bMMREqc5<^8_Y6)ty`Opq@ShF(yB@Rf&dDT~A(-}%HONVbR zvr+%c-^DUpt<;kPY`D9~3X`s)Jk$|)kQP~Ytxs&383~1X+My^jW^IE@DuQWaeNf;q zBE_a>vxgZLmigx7`@-(BN6riM&T`PRt=jicz#S_cra4bXd52Ae&q@ww66IVzi->Yr zaJfr5_E}-0tR|9{-k0{AC2UCF#IRQ$5v2aK8e1+ZrWE z1_M(NV&92UYfU2OBtjF{Sw{?YCa)#q7|)p*C5X=D)5l{Y4V&D;bp^H+qOWjbDLw5= zZH{dVb+8r)bj%@a{^I@nI6i)wjaqXe5Mee5R%cgW z0C0=>Gaz%&V+4vZ#08plA2h0areDOW9YRx0iKSGNdSB5bDQH(xAtFpGmQ?l@aLd$t zn&VmB*erC*#sXtd$rLxly57VkOb%O*SYZ-V_RB>~TPw(25LOEZfJ)xhT{g zJ}|MH0iT;kk`#SMXUNes=AccFcAb>X^>v-d&5d;()UgpCW{;(-L*Qf;Vn|RMv1}`= z>nJX8U6qZECNS_IG*%0;kd!2x7G-}XVcRO;K1dl)$5-3l+djB>j*NCB4f^>=piQ*z zus?0gOy&e3TXfjyrrk?bs1CC1#2t?&3wnb_9o)qT@RbO!NQV}p$U=>uilTI5Ga8KR$xe^PTu00}Ft zw_KH4vJEr5TOK++!;5R*LJJ#W*_SmA5lndsgL>V263k?Kznh)|tftyYXM!8^U?7V7 zjaC9lH*Uu$sQ{=*snVYoWCY2>87VM#M>josI)U?Jl{D)JU?ScDlL+<9XSJG_D)er? z{PhKux%ggt?oCIe7ifOWsdQpPuxemdG-~zRYk|-;^(}1(YvajUL8-*krp>Th#F=SG4x<2%TDCbo54(kTg?wJV(VQiuF=%KPJY7%RFpCgEyK0+==H?|Gj*ZC;}CAZJoiv+e>mGraPkGm$WKn-hrdX3BfN zRKL*;q-HW~i(+7FN_5Y-L=(;g8A!QU2mS%Z{P;<(+aL$81kubWQW3F0Zrt#%oxL}i zrTfrwq!eSKtM|c712AT!K&c=`9Mg^^;kWO5h~ocf+nPp=sz|YxErdwOD}-L8y-6%w zV5OU!5~J&G(r~sehU66(t;UptU8Ts*hveBS44U<-RtRgP<?K^IKPb(hUBesv@@lQqIvB{o8~v& zC;c!64&&~gV)p1tcv}YvX&eb-Rr#ZGVmOhb?{2yEn!zO>eiQ7*Op)K?^3nTp8N3lq zHhKOxK(3EV;Y6PH?`t5Fn4l)j&}|*Q2f)%sY3_SNefQ44F70pcymmaE`YwkMjh!Za z2rOM37DDnw>|RxX$I^(uowBkEWkxLgrM}%8mNT=JnSQtO6lKzx=iE!u*60Y5bv8(N z38vDcQElsKbQ1f>x#mIsYM*&qbNuS+C%3+wc8RqYbU;W|p_wFJsybX`2BUlC#gPWz;6#C67}qy^SZ&$xlnC2V_<3At?% z*bm~DI%XG;@q_0#%!dXkj7C$=-!^++^XkuIiPrk`?uAiuYfj-c80q=}t{vAGLV{#* z>zxXwkc=%`ZWswSDTE+B*8cSPa|T7P)W!7ag_HQLn)-9$7H+bKFQa*;f)r^Q9SVj$ z3NOB5{S<7Zd-MlI-EcMgIT7OtIGsY{aemB3bMvYSjpi0%Gg5S;8re$}^C7CWD5vK7 z0Ez$5cE5#<+qH6q$K(9z+4mm6ne>t{6IK`Jg!4l8w8@X zU5^jnXoL?f^Ck=^^slnRAO$kve^>U8$p#XHBkj@=5(GS*#3Q@sN2kt?ytP&}?!UEk z{Pp&?l?UM83eck#X&hslZ|8P_tz$^129;A|NQ~c&)O=mniGMA+IMq)6tC=jLZar^{EBfK|b9C*do>i^ggIS{|6 z{pIp?IoP$)ke_xI$9T%(vDkX<1cFoPAdhVRNlyQzxa0U;NJ8NS%LJp5Hv#w_mB8uL zYw|#3+A)Y6ZMKd?%>vvualS{_u4;9NovYrj%FMXYfMJ@triTJp`1o>qJiBqk26*JL ziH7iB2HCA+mB_Gm>ap7VmuLCr+9(=ttk0Manfe)ZLOg3#@oL%iCjtEe&O?b)gg^`` zWF?Ko%67ekiF!!}n=446gblFH^Hl09iEzf;260*7T?wQ+lq765)#{-rLF1z8&$^n7 zV{sY8p}*&h1dNpNd&9BKz8SV5zvl`Ch)asEDL}!D*+#qrHTl|U4TKj)ooi<(v)!Xj zhDRmSp;I-zL948$O_G>e?BNUxCxXXGgVCMdECd;^py8 z$LmjGRuqG;>4T6b?8g3RzscPbJfYF#BjeP(2(}}LKy%MH z_8sB4R^bnGC^In_FCUNpD%&P+iQa!xK>smw`_~Nm*Ij7{Hr?2`6LkCqri-j#^glt& zOqTf&qWX6dy$k;l`5T_!le9LRiyP9G_>36!0%?M z%KG2_2}1iD#SxRw(Zmxzs#-q`S5!|a+x+;QfdVh{5Db@4M#s>V`YD;%Kz}PO<-CLI z{@wA^Qv8wv$M|>3YfGyAox_wI65l%r+2hJT)1E1X=i&mchJ9fvLUC*4hKW$4RY>1X z{tk)OO7-`Q&Q$IzlKm3{2nN>eEJ%|&sADVR7B%2|*qO+%NA!g4ylekZD# zNW?H8X`Xo_vBB+l~sL?Olcj?jvp~8GmW~yPDnHwk*CUu=dMdU?OLfs`bYP`#A3?^C{VNHlr42 z{rFL@5799(bLnAr1qY3D$gBbC&v+@W=FbItS$L^;ot!UaJy=Q3HX2zi$r%C|td&`# z0Yn5o=H=RU^lv-R*Xd2X_&UMpJsfLxVs^JX-@q=7d&)lT4?Ed5ZP(=D&##pu z8lA>sb$FSzDsX)62kF@Xr);q_LCIS{aoMWzEicEV>k^i#7%vR@E_%(k>aLGc*1b7g zlYseRWq91RDlhUAdF#*1nY9${+e*#xY4AJhPh$+>d5Ss*!_9$kxWj)0tsQ0bS8g@r zJMvBA^NB6Ww;<^F;KO`f``YdlE97@HUQIS$o=4xpBnE+iK7F=>S)i zTMRL-({i1;r9<6!lvuNVK(^y#PK&Q* z1Nm>p72P~(tGSVdA@<-Gra+_%F33k*@I~@gIz7x@6WD{i$Coyr$GGeNd{>=`>Hm$H z^}pU#XJ-BvVuYFgdj*L9BfuwSzLN*{7k@?!Q9tTh zH3~6`W!n-qJ%c5&rRe8}xZ>1|WVa>|GaX9pPfFYvQ&U@~3XY`H>k!9yLQ_ajaPP zh_qz(2}Ts*+_c57t3ZnksJ*tCyt>L{@v5EV-+S5Hccc$A{8&`tU#Q8PW5-)j7UF~o z=maBPB}-o;j-j*hWVQ-uIAvH5X3@%b#}$viC!+JsqL&7FN?>NzSYL~KT#Q8$Hd~xs zL**i#tRTw|wa|0r><5Ec|1hpeSQ<OWkZB95uRpP^)WCcHcOyw{%+V zY@D;`0;|UFUKLI`Z?c2ARAXpLc3(bqh}Z4$b}ZWJuSN>{YsPGRp_bRmw*9Q=S<(AR z1&Azg>*xex`}S~iad9$cYe5`UM@|T?W!=8{s{_mp;eq&6YBoK8ShdVMaDOypj5)Qc zVKq;2W!J8TgEX0{g36R)&s|hA zRi}!nY>f;?=7x&QT6Dh}Pn}GAZ>nND#^WBAAvoirRIH~#Z@?zCkrD^h0<4)2;n-vYIsZe1rP4pmt zE}^|i3`iKA6E~n=naQ!8sKxo_<5 z;-qPhq%La}nkEXS=>{qg0bYOJcLOA7CPifi%X#g3g5=j+|!>+`1X zOzYv|rK+!Nu5D~5dG6-pM3*X>nf&YM^YQTW{&dgQfK}_=W{UumhULhen@7Xy)A>B` zU=H)X?yhahnX8B-el@-w(~G0iot5Y1eQ)UVDt>CpDyr!SMnwZqlhP0snENcrvB zrk&&6<8J3p0B3ZfK>iiQ`T53!#qDF~b7ROJr>AI-ivDKDE(oNKxpZd7P_|r<%iM3EO1#fx=^30yQ7WK zgMv{5XkqtW1KU{K{1QQiDhzXwV@nq6cq>dg@W#^NS)slX8M9hCvZ$bytvZBX!J&%y z-ld^uZGgrC_^ucx8>|ebOh(RwPzG-XFXl?TsJ9(iEURpptX_c-+k8U0hitXd~~V>aG|+5e(nC+#pS|-2gw8l z72+lPDVKXef{P5$06HwhOT$kL9R?Q@;bRowX9N|R_C}-w(hco{^@Bsf55Vci`Hy8j z9Tr4{Y62aR?kFFumFRy70FK%h;lCUJ4qz<6W0?O^V1p41rT>pE+#%>P+<*AR_P@^m zDS%_e9)}2v2(tf~g-eY(b`UHk_}+J)hA0gcZuH+kaw5n$A@;N5;e)}6g^2PCI>;oz z!fB!q2@?J{3m*SZTRvh#*A3ThuKIp}iw%io^A?jv3;G6-9g<126TL<=TBc+RI=x0J zYNm`M=Rs7rPG%%@Nwb>2A|oqlUOf5Xpv@r*X8s$)?LOp>YzB|XYqq{4$p0(BYW$qB zV;?ky#{Eq=&QGff1Vd#qZ~qtPzeQ!zRQDkN86fp+DBs<4kG_sl`#{Bj%)F@9}>V0Xy42+W}h(BL_shQA026?I7C>8k^GM=TqJlZ z#oxa$6NO<7Jo|%vS$)@xbCID!d#c1AExpI!!$>g{xXP%S04+c5sMv84K>ikaD@jFw z_WzNM$^x_i*a7Yb*)f*IKcg1`*a@(qaY9TB9*808EVXhkY4K!+YS!@9|Yn3szRHDK~D;~$M{%k8!9CSU7xW*{H;o-$a) z7{yrS8tKBpk%i$(l@}z6KD~~gqqQoo$P{yW2WsUO;fdBY^wdg|1Hz8vS9*q8z zmNl(L)vo|E#0#;GHMi8vWpm}yhT7%t;fcOAt?f0nv`W~6qx~u^s1z9b2m0j^5sC3N zX%Iy%Hh+Io8gFJ;%&0RCcW2nms0&>k(!xz~io7#`?bwG+@rr^oi0xWMztyo?Oz*oj+eyK-NomUDz@|g_{htK8}|JaoWi#a=f(fbvtEIS{=da zToq7cH3c1WaR>~HGct^6iXo_wstVxyf2{l_!C8g4D=QrpmkbvvoF7Si^t%Lfgu(Ma z-T$3QSsE{L@r_tNCg!v-QpJv@gmM_e6nR@IeYh~v(VBA>MN328(7kVvS#D9qd8IU; z%!=-C0mta%ih?4jv=RB>2S2x*cMYazS$r4SI=#GSQJKCoJuf7Wg5E|cr`=Kqt1yie zGl(779$`px+&Vl_RZCu$$V!vG0&&W3M?i9Dz?R8b6n}CKYgF zM#6BqFPwQ^9FKF6h-AFUAAWaRNIi*jLZY5uq^mUuhRmZoV+Y!b8*Z;@@FmEAZQX_> zU?tZz9nJFU$vaJKOCmNBpIxCKl7^RFvgI^nxJg-@_?#GE3top5I_CB0r)IiBRN|TT z6tt@Yzie0V8aZ}+6&>+OZ1S1J_%&|?c>^ac2S+Vb%~pB*#P{`Z1l zoSgrcQ(y+x|1GP;Q+~|KpBA?L4vD)l7*roH7oJ~S6yNq(py1=jPl+&v2*Y7KZ|ypX zIO5pqUr9?0Z48gAQgu>=X}&Eug%tLvDjUV4yjxgOU=}dJ!fbwZrDVaTfnT*$^?Pb| zUZ)@?42sUum<&&IR*(K{HARXG7dtDqTT(!UvLKK5nTJl9I1QhH$n)LJ@r31mtHf?p z_v$l0?PD8<$=kLdivkZc;Iv7gB)3-y=nR$ls>0zVbD_8U*P{XkI5^-z6dap)Y91Y5 z?5 zE8QYOX%2j@jK=7G{DqCUC<;Q%Ic+9SxX(M~z&ikl0{1fPYPK`#jAMBtDJl8^n9O|ArWR?g{3hAk_F&()#q6ByCqmvmv%sC`P~tE9w&fD@sDh7cz`Y z8~F>KKfx2C7}pF{uv*C0V>E{WJZcP=dY!PFaSU>N`?y&U(=mg^DrcAe&LH*0Ci(tt z=|cL)&`ak_=hpk+)8^9AV~Ks~slq~ue3A5mWgKrr2z<=2!%+a)3s8yv(gJ)<3#sMz zN(jyOhpRHyB&9BqAze!j6W|vk0z;R$rCZ_#Y90%-Whh8IahC+8;BW&c$}}2|3uRwt zx}hdffyXc__URZ$?~V)SGBZmg-MaR&F&Zf~U>{GyolomR6qRq+hdpS+UrEL5o!=;< zd#7*CT!I$APPs5pn6#Q0_mW&-PH6($8;f0q^6E8B37y$-G{v7rh+BYJkzxy*4im&jBr8y8!1s=^XqT%C_0UQJ~=ctU1{=;l2dT}~A z>FLj{*%G0W3HHG1M}O}QE%HPeCM-h%qr3tUcRPsVCrFxWZqg`YDT9I2EV{28^veN> z37dg4zBY8Lc739{3Hdc@H6kli3KkUxjKwc$IM?CJysIDK;Yp-(y+^d0$Oo#e;Lksv zvq!JypyzDV0^a*f+(}F4knbA?>f;;#ypt4L^REErI1clGdI7IOh24H9HYN~}P6ym} zZegoifpuZzGD5qAOESXMqpSGS$@5b9U@X_w&%_X{f_h;MZsCslCd#^zS=3@b7@90$ zyf|*x!W{!^-D=)_RY8t|TucAp{MxuuPySNYnQIODq8RLH<+)=qyvtSDHjuY^xadO=U)1Y)(@(t%=CbCN{P z&jYIhr4{v+S8G0W%Q-HYZ%Ey=%;T3MjxRNUnn_t)^gG(>=%=K5q${s5yU9EEqS6b( zbWgE1VuAG^3%JO5#Z!tdDV~I*xB;;n%UjhSPZje23~B$iI}lmeHOASW^)7L%UxfoPc6306V;Kc9rJHLA~0{XzZ#7zb#<{dt#Y3jTtGMs)@C6S4yf5O1eHr;}Jb{<~|-3kYM;dG6D zQY=-OsCHVM#0`WaQ>XA)UoI#!O;uL4>8;MMf+Z)vjR{9mV#(6CH=7*C^!oMq--19N zU!5&I#c^-8Z)Ry(O(dr8IT#ol8s!%i6cUn@go52XvZgQSF_YEyqdipJDzKQeHt{8Q zWJ%Cq;ofEmBQk3Jp3X5RE$vhhUP6bYyU-KkA*^~$%ICp9xr#L?RT3`%d^K0Zmbxmx zH^F1}l*4*nqu>IN;xWD6gZh;Aj|RDN%JWr9WNVU}Q}tIWP}XK(QCavMdbzr5k;j#n zZVC7*pH2mBfyv6%S63ft7@-H;$51N1%Bs4Xn~-%;Qa)S9naB^QIa+3xktdU(psaXe z(*_9mdS>AeZ`>%I1X60RYSkCm08OTn$R>lZqGRJZfBu77gAPB&kK}F+Ifz2ymHH0 zUERl9?+{s=IK<+j>$+QNAA(7_Lt}xQ&24EEWml4;?S#*#rI}oRwIx`n>>#!`-rTyp zu}x6(wi)r0N)lKkr~VFX>ta4%JUh&x0%hEx9yl|S03@uANx@W4*!WyU9)_P;C@IQ? zBazI5NskN|_BPwg$N*8~47_~SD;b#5vsiRlZfE+ECwGT7&kp|KjEej*>*z2H*7R2z z?2}7UDLUx52!ig~Ag>tr_?1&QP$$)X(aP{~Mf@qgYorm!>PYq##Bn>{JmzJkSUS0x zTAR6B8jH*JNL%D|Ord%^SGFJ%SAm&T=;Z7d%fdwLYZ_Trn%LY~eW{MXd?Ql>$^rhv z0GFezv<#Xi37bp89`s0Yp#0=99C9jq8}-qSK7EFp^st4mGCGlD+2TTw1c7Pyq0^Ut zB>xV>pG3Y%mhjS&5LYE~ORr=6Q~3 zVVSAHGNc~ODmvY{Z@RTJ{69WX$ij?h;2i{Ce(<^+`Ft*bI`rr?h?GGy|H2U1SzN)E zdieEpqkX3hlB~q!z#&cA{>2=u)j{?{eZr6k@YP*Vik7t~GM^K2&$*t?pA6kooi}?M z!sUL2J9yzgh{BxX!zhI!q?EP<$KhDn$7!cutHWby=nu93d) z&Bly@`=XIu>T1HNL~@Jhv7LnEmVA-qW{(IcEDW+}zRU(@XYY;p-l5(B-iS1N)gh#2 zA|qbIQ9AQeR0+*CFlfGF&o4-w>>MzQlD;5e;_5YhPJ9i-=%?d(Q@0vN$aQl? zu1YPk$1_ZV=AaETsK&Uc^VcpqiG`+)u?h+`pVIe;)9S{~W9%8%p?9hrY3N=HJBW1o z#`R}yBI;2yzYxx`rLA8lXL33wD-la>=;p04gtLa9SqljKfsKC^_2F!|MWM2mg4BXd z`pf__u9%thM)Z|{Is29MYt0JA2&CLcnC~UOL5@PAV;+^H0fPCmIt1(0tgmMH#N38R z5nz5@f!9C;^z-RCU@M3?9H}s(taS2yoQBugyVoxKv=te_Km|ic39m8Sc=ZVGq}@J) zGexwA*0W9yfzr}r#3rZ)7@9=wXefar-)%=*+vn?HMKp1&BS1z3as0iWk%m4aslvt{ z*SYL0n;ZfLi3g4qdU9NW&=BOa=!4oz+_MZVrOewtBU!OlwJlb$!a2hAIBgw`0aQ$1r8o=--48TkOkt%6UMf;k@qQnPcWcvEzi zR>*`~X!mZ^QTUD`{;oRB(OnD;|KqXu?NW`DL zOr^)nt&{ZPkR=--PUp((+G zfb-+}sgbkMb~iBACMZjT;YAz-k|U?W$|IXyO*3$sEANBZ-(g%^MaKYxUS#*=J7e-&kh6TZZ1KR+AAzOV*WZF3rq}X zZFK17$pqyZ_V@19j%#w41k24{@1f|J&zeuLkBD!PFR>Tq6dmW)wKHjKm()u){Wj%y zXYsa8b5gq^6%MS?v=!P7?_ZD!x9}&o_)BkD=qgxZiEmlDJ9@T!%&HnuplFtjX*=IZ z_eR6%(V|G>GqhlLtLnX1P_PdM#QXA~MLVv8)JPx9rZ6C%#w=(ukHBx9I2g0d-CuA& z(NB19E_%#SUzlcn5PYG!3wRNlmwGy+E4n#+*L>MwwSp9Y%-&tM!@HMEVW(j-KuD4L{*`_t7k6suqTBymR1-Nn}!At!P z^R#xlPLDIs!IMdD1gbM3Ou4HeSpirfn$b*lgI%f~*(wbj?0mACUv zJd(FK(4mV%`_<{;G2<*(nQ5Nfo7Th2McbX%fuspH(^-5GLY*JfZ18WW+krDg+wBki zzZtKT-wf$upkTs&oM+c7?#e%45GhJo!&QNamH+t?rD{`Pzse!Eel&j{Csoe5yQ-{t zlqoKmK~&ES7GLETSI5_VRm-v)uyqD#g(&W>I!QU!vq#jj zjVoko7m*iF4J%yB8(7R2RAPDhH!k!cpyUi=qU>r;ctxW4@5T15ROX?woe79B`P~pv zneDtlO*`;=Ra^WXPmh(6M_?$)RQxXNe?dj-phZp7J?dhK@h3o%0E_{we`W!vK{`qUvQdSWHjn+5B+bN8^!WMvP~`f)ckj^3 zJL*)y^R!^rh(u!C5W%jmr9EVJ?j|cS!$1tY=D8z#$os~XQ+2H39lBFBz{<-w!OH3j zJ~!Rc@(J4}v8V<|wi0qmzi(vhmN&d)U>Js|NN5^we?QtExt^maS5>dAsmq*40=D9@ zwbLQX=3@kHSBJ+5+vbB0Vx)ZI!99VqbLk=NPHD;h1rI_T+D!n`pvx%a(jK`j%VQH( zGd%2VRg(q0cFED-KEm90uGl0>VcddmGW$5~JoB;@@k5c_xb2DVV4{Z9`mG!1xC4&qt3ke^gYQ;&m)J;GN6pdF{k>8F%LE0*K$Fed4eK5(HP$&8Z=gnv<^WY_Dj=~Yf z&8eI^ik_}){U%;@|LyBh{Nqn6%28I zBk0XRvdu1Qu!i&$94ab{{%he>k^T0N^y@gD@S905oapmq%96rv2geyY8Z;=)VFpQsrwXL-f zFj#eseJ>ua;KFZ0ClEVtk6^$@@NQQp{$w@q_#$Oge)cHW7tApYdPXnnkzatcn6_>+ zvMOwjHMf-VJ$_Q(3|f)02O7v^+%~P8Pv=ZjTMI$klI0Rmh`v^6oQWu1ZEZyRJRE(5 z`-D@>8XV;vN(mJRqeeel%&G~Je2hCLe-Dm zIYl^IrBr}J;F^|lgak{Gs2C>IL^f2S|D*O!zME--GMfOJ$`D0KlH z>3!`%8?7L3fZF5$ggu2PxgsaYC;w=1-cB%J^*EXT&9(;KSV-arf`u;k;_QZqf@O9z zzAL=_f#UI95cqz43sae2Yl}_yN3ffkt@so5}`lEkJI$M|osg^%pe55v@@|aPMF5D$|v&JJCWN zIqy>;X}P5E7da#=oNjT@GgT{S^{YZ}CM==qibNj)i_;tR03-(v3G3qVH>9Lp>4t&M z^4we@b+>${0p~~#1Q$gPNob!48-iyA$}?tM1M)PIM)*{@Yb%C3iH!bHBWJ}S)OiE- z2@OGKz0Ltw;5?Gi)7&RbJNAeoEAs(G3?AYyPuJ?S-H@%pmj(Lg)MwbY`Uy&+n=)f3 z+$CB@J7s;^e1I&P5LgE2GMVSJPq|M${yavT=}2!FJvF~%!1z<4mZ1ytv=2lcqyPKcaTPT89p%OH{EbqYEe zF_|QelkX`z#yQWGJwo-A!4bP^+27F%W_8lQExm)1aB8PsB3-QFvSQ2nkP?2hz5iB8 z_Y%0OwMd#Gt~PC_(eA~GNs0#|fF5KBKJ3|4pLyf38SGe}j24F(i5}BVw`+|GMySTu zdZpWt>C(GNDH&8-hFLy4_vB8S+T}UC5_2Al9;r&fpgT*g53wp5IoX5Ldu@2<*E?8ufC1t+d7jFpS?A`1}rh>Jl|n!2auY@2G& z(auBG#+FbHn`C$vav1F>iL;G&l6TUqkQA?Rc?)(aNPT+KggV5m0kfkpfdMb z=B8b7aYiYge{nLnGH3_2ZD@M1cxQSOr53P~$%@Cnis|ROm8h!WRm3%lw%-4_Z@kaC zPrZlAsxsNAa;0?bATYIsnTlV72&(A9d_%|1VkC2JjszQ$9n2Ox!W7+et z&QU#i9(i>4sHu2=&cF*|Y98XxMfS4^LZU>g@8|oTg>&!G^>I<7%x)rXKUZ-MWW@Fk z7)A}4Q8>1NTLY5>RCNd zpm;f#VYU3Rou@(SQ=O_GG?(V&+A}}l+~4R6(r^?@`v6^HzG7u|)TyE~Od((Tnf}OSfK;3C31}~m|8M%TJXpmzS_EwI$syC~tg}q?siS1+u zYK{xr_d%Hc#L5>L2WLw2RMF4u@O#mcneec437lVNf^k}gqzL&6)+5@Ts<@?h&3q@a z?9JNpbE(66Ga_-CwA%Bwh1KpQKqcSJUY}5?Nv}g7wh!4llfe_>#*3Yx%+^UTq$49~ zc7T;O5*kJmP33;5o3$@UE;E)G%BCowzdmIV1%|OBX^Fp$*^ZOhGMU}6eh2rIO7l>8 z7xaTxw-`h#j7e-fKJ#?6B;LdDk`@oSx*bv#mtr;#)i5L1rTpq|K~QkI?4uV+pDTX& zlN3LK{N!gqB^!1iM;?@+FxoPR>fU1%u72iaLqUByUE1l;?k30OOD$boR9tS+?sh4l zG0TCdk&~iUngsmQAB3%1(x?mU3M)ZXDQQDBhR4JPS4f;(GA=(i&=dy=3*&ZLw()mu zZD}RomdB=}voXmbt^Dcs61sgzo;QNM4_cqL!~H16pMYNNXLr{(=un20YEBn01w{FE zva$huv5fH~IrTpc3!@dyC(m=c2*D~_x^Wt#sXW}KpkuW(Z>ojAH%KT;v+fp>(Swji zvnYgm^IL3+vZdSg!9siieBc9;_DfnA{{n&Ek<@I>XJF0#ap>EK^PXgMM89tVvJ9*wx6}YXJKOMMf@>*h8j_OPU8G>$3drzpjxjTA$Kn!7=ZL9K{ z22Hzt)&T%5neFn6j`B@UPSP5;m~j`3Ft#tsYVkbnCGuH^sMxYnp~xiU8;e9=$UYIX zZj~kvC6my0=RPLOWn0D?Fq7mp)}0R@U68vxuO%nO$M3?EERiBf(#8^i|>qyS{ zy8G6oGblM)@N{!%c7PvD?fFlFe(}xA`TMS%eIb*IV>Ku6@A;lUi>N6X8AY+vQdx$S zu2{se3}u@q1_k8zwC#wOvJf6L(I}uPjR%voAxMBnhZDa9ifyn_8fJ~`pDYYnZmQ@Ng~ST`79dGE6PT-o8Q={N*K%=Ka*Kex#AWaVK$ddop> z#SO3a*4Eaxda9&h=Q2U1Y*m@Gs60VcmdaFDt1Q)JFR;>~yw1REqw?MjS8K(89X`h{`z{3FsLPLR+ zb=nnR3d54?1T?d4fA zU=J0D*UZ7`EcN0sx&Xl&(K~#ix+Sf1t>Ur48Zq7X*^yLQDlB;RN=2y$c?yja2{HP3 z=*G`-_SEf)co)WOlBUfm=5 zit)p$|J)GHMOuGUoiohk8hpm*=+e<9>%zklb@=oe4b#n*z?G!iS3-QpCPX-AaB)_CB3g49O;TGI=@?L|tkEE54$HVM(xL$nWSB z{&Q^a_FK@Gc)6h8N{h2nvA7JM?N2e0ON6Ey-kV7B1qq+EbTw5jgv4bda(AX(=) z%@?zXj{;;5rw@SEYFu_BjzkE9NZNIObSmEWu?9S!6qq*{^1rY?;Df9(*md_V_6n`A zQ-C0-hQyTduY|^-P?7Cs^eDb&@u`%?iO=+wPb31VsQVnF?kL}o<|4%wqj}WbY8AL- z)x=+n;RL79@o<7OCn#@X6qXx*{~7nt*5%{V?F9XB6}!x}8t=|Ll-?~jM_X%;x4cwy zirZ)qn8wl!A5gnEcMGoQYYU4!1eJPRwz1)cW{|mekL(LsQ6XDt4uDJzn;=54Nmlnu zA90Fk=aR`>72!c-P*NT=18up2jNi zMO?E3RE@&TUN~ee^NId0j(!t&BM3`v)BSuh-uol3Ip*>)@r)9_?s;{5vaGbH8+4h9@A?I$JN z&vG#X6ru#HCW3}=`q81sE5X@NGz|Nwb#3a)6G}Jw&@662O1c#J&+=y0_+vRFqWskh zd?Z6I&8(|A?-q;IZk9nyNe^ninzTK7SH)37)4f_@4l{Y9{E6YTa}^ye*rkM8T1!49 ze|@B`MF*+%(|)MJibJYX`&8@eC8?^Rj?|QmJh>KhZV9Jaeb+)+;peLV2Tee-zt}@@ zQT_|O-N!S(KWSPkz-W>s-VDYv0fx1crraYXRv)2w&8lI(VSi1~L*8LOfcAK;;)rpYZ&b7E-hL2M^Fd3K#EEd3kfk42~7N3eUcK5>gcsbiRY-J^K z*a|mSlf@~To{Z3JoUJLUI5Cq{oT6sa)CI3@zovaf#r#(vd~MYa_dT{|&0~+Xr`JgmdkGxKi7 z)@?27W^55#^ccnOo+gRA-~#z3zM0=^k)M#6l}rkktO~Pp_Pouou1%~HG4W5qq406z z1OxMDf0E&@PQzUj40naK{8_>oyFJeEzHGbV$L&)QeIh!9i>QuA-b-O@2kOGP_mSx(pW?<6;AgyFE!JlX)n@S0Vp; zcloQeKYA~Jg?#;}HLj<|JU z3NANQ$rT$lLkQ&NirQRf0%~&YVuraPegHbzGBPxmnuON_m$+HICKSr=qItcGw5-AwQE+Vgr!JDRM)53C>6tm087 zDw`B$qGPt>ssMeVXN7i|Yq94_!}WvgpT;8Def8~7O>pz{H&2)I4(5Wopd1&rvB zWjEvNfsar{@RGEMu8u{Qr|5(9LNCw@y#NOEH4DO*hhf-HX*p{m6d43ZA4s8CQ; zR_rDkRZKLhrm7Yz73O~_v@t0wMgyW%e0^TD6bx$^kDsY%X(H26a`*D}Pd&7*cB)I4 z)^u;W?9y$nj>zY~y87kiOBZiw&3*dzbN%2(-@TjnZn$ojYX`h~-J%T}H-UXVeiN&oCzSTg(RFMhD}7Xc8=ewFJ%*L`yJnEM1?`=g61H_sM(Ymt=M-JyrP` zL+i+8Q8GvKtWBbMgocXpGR?SXnxPd6$_ffw4#7i+I6|;9ZKD_l9YVcqW4hte11xJx z2TST~7R=cUPDO;j5preg2K8x$PnSmP_?E~BzEy?TMkU2nM?npSAtL(d&Gi}r|Ha$`qufc12@TqDjewkXvf)&NBb6d4m(9bH4DxKMsv_7UMJNT)S3QMr z;?1;Fo1#mHrf4P@Y#=2Zq=@L4v`bD)DY>OFC8rZ9xwIH(=#@;t?VO>k>) zyYq49f5~sl9|Q!^>9be*X{XJtJDo4vRhQlAva1U6b7?0&b=tlYMdWrh?FQy$98ekX z2KIpn4ATyLL5>C5<=T4fc8$@#w-56XALc_*gyw@j%ZK?|!}`-;1f_zz(UGI}+dIF_ zmz50mWrMs~7Dsu=e~~fBB2SaWnaRzqgkv%LzfqU34Gc;p=d&Qu0AuooLEWSzDRQ$H^rpLjD@f?;JE`XaA#bhpkr3m>0zN|(Cq6-UvH!v6N#)xTM zB*553b;bwjPrN{X;syE>U!xKI(Ez0(SB38F!~x8?wbmiXM~!L^SwnICH!;x-FyYL5K>}(2j0>Ijoa-vg;5^$9gopLDwmOy5J9nh&7Lz@7fC?io5 z$76KVG5v~`QVy*iVx-8W?N^31qd7sGK+FYD<0>FXQ+oICcwmQUM0?5m%C z+xe>>dFa};PcSp=msL-z{=wh=xM*SSUw_!zdp(!}t^<3X+kNEBhuJ5Zx_8|3)Kh4d zE<$+eVIN0he+$V@*pDiJLBCL7LF;zprh_bSVrXik|>4eqN(60(>#Q&kDBduHj z>9J^YIarTkyCS=3z&LBv+%WBA@A=v(9P{9817^F3AsEya*<+wXlxd_Z*wz5Cwc4>} z28E+&j*pJi_4i+;Q^f9n z*CETUyE36Q&x$}Cr^97bvf$`s!O_VAFOXcLro+e!sDM&66{_9fj*c3iW%@qL^nF$m z#J6TI4d`yTtUYEO6tfPBSxJ@H8n+dyhuvv6bhi$F!NM8>FeG_$GLJWT@~o$_WGIcI zQQi%d_J6#;IP6DGV1GDVg*QXPh_ipZuOdvmf`L2P)y-5hip$#3$PFWIArRp7a^enpl6uJg**{T)CpFRBf@|Bx!&7FGhz1*ogF5a~K zlAE?HU3T;6$*nVP*xj~a{o{0?;-1TPzW>qAr9ZD2a`fh>`w`7QeEV}?<|P|%T(D^K z#xwoXTBkqOvf+u{l$r0tmKCBZ;oO0o!U3rSp?lncu>BNKOAPV^7koHTQjYb~=OBvd zAo&u9&oLw^m4|Q*V7i^Q+g(&603--hGzT{eV5(7ygKs?AN0Zqa;&E%pfFqs7Mx~#r z3vKh?61vZ`$N#kdbIGlPfR~6%vfHxWqH0;2qEg4r< zp!+2Sx?f`HK9)g>k_wKK`cLJLlM0TLz^JriFxhGyg0Z#}VXb8aamxzgmKDU;IPzBD zNIRgTH95!%5CobQVCsLKat%P>A*u{L0HQI^lPZFQ zg;|I^02*}a2XI7P8Ad*|rI6z54w6|+0IM*ERDD&O=emn#tZN(zMjl$(bq4Uy-`;!e z+J8Ls%lF|ck6v;0{yo>N+XZH5Ykx9z{kyAVUuHQF-u(z@_vQXO_eJinx%OW@OV|CP z>*)R45d0ziK1fku6O$knd!zCeMzI3Ni;!zz=mx+sHrRk@HwAHyaF<}#9GWqf^=hQj zkRmQ2(8)4_RRkDolmTkpxN>~()mLAon_hkO%;TiyeR=Kz{w?+`>Rf6z7(vcBn~7*) zPb3l_q14(Z+9&%aL?*>1O+Gg>)4sOC?ul1`gjg9&RE+SCOpT9cd`-c5kxWHqa#LoB zZ%KS<#Z~^b!PT*wd>j4Sg11CACyMM^qn)B>VDPr7Wz|xn1SQ^c2>yr~PfdY`I>wKt zZ6%oOj|So7sw7PIfN4}2Jk(Y7qnOGAz8l_{R<*_-P%(X{8mrb;ArjUD_D})1qhsu- z${0Fcq@r*~S`3c>BZ}tC-e%T_diyYA&-R||JErwzd#S44-Yl}QV`!wtW{)8aGLauA zssiaiWo6Z9wX90Dt21Uul4tZ3TA)0h@j}UHTz{w?i|F=PqmrwqYR1-9ncSO{a$;U(0b!v@{mOh2MIeiqtT;l1M0eSpv9BokQAX zhs>67yDn(jed)trte(9iRodPfstAtAtiI{VT-&Rk=hnUT7P#x5fCCrL>8$-K_ryQ` zl-rW~YW&Q_Yr%6M{S~-n^+mt!dUxV1my+{rm^tdY<{xdoDBXNn`r#?_E_wHc2f^5# z^RmC_yJ(vlC_ASSD7QZjN`Li1ZrSJm%W8@~eU*<(a%J7VKvW zWX6&U(iR+(&tJ&$Gv-^qHk=MRjFY5i%=ezOc0$?O2?lm@HY@Alu~PI_KrC6w7n{w> zuvmgR+IP!w)IQAzqg z%*!tHCrHzHpBwL^!;I`g?-D~SD}*?j>xQe+A|=S5r6GW@2S^B0rL-=GsUW`;eyyqeYdl=fxA2g^OMUwt!@(4x2m19P)A1K+y}igF z8vMwV8gQ|*7nkgkDCFHd%ty9HLrW^PSvz{ve)Q;O*4QGBJ!P*nW6Ivp1#=C*J$FtA zqtXK3(|-cD+8F#~b#wMx5-Itgz&?TNriZ8H6@owk^1l z5gM1KX;}MvIOoGB`+l+OJ@AivCzTf0vOQl<0#D~Az`0=i!JpoG3-Y+zQS9*<(q#vU zcK>A5Gm(O&D{wWBVJ1a0(WTKf;zp6X)PE(rN?aq|$lfS%Wge0Cl~sm3K@suJPYd+@ z)3eKLXp{-2bYs>f&oQ!Ed*WG_S z9mgbt(=ox|IIIYqfUlGzs>ivoT5>bvgFbg~jB9Y zSo!0`__?EJ-n}i?#`ZKFT=L_`YRjH(S+;NCaQZ@rXX&)bmsgzL$;+dbPrC3LT!mQL z|5x@Z_6;fssz`sqB6wL40wcU5WAkZx0hYp4jk1VZMO_hWp*99vsr%R`=|`1=bcgbS z@;Y@a_+`*x*Mp8=kgnv)9hJr5l5>@eYqmR6bP2mWc&&bmejk0W{l4Pe;1Rgn@s{05 zxhTKp()7 z0FYlMFO`fyG({*1Et+pMpd_yhd|d7T0}~Nw7MQwjsa9sG24-fo{5{ zV+D%pok-ZfLZ*$mGJ2*+`W|=*aH2WUW3>o1`2?n57*{a~V>*tUll!kvdUNmm;wdox z$RELwG0)Z>xogjV&tG|b)5Cv(aQNR(KL>vDhflz)eILIvbmyH9<^F!hq1+8@zMZfd5L zc|Bf_?sh?BWAR9hS=uW@mbC-?`^mZ2H(jyj{Iz$ydQ)y6NZs-1@QKrYzT*70+;7<) zcW~;(xz~<9p3CjIsHSb?@QI&2cKlzJA>4KH5K7SbJLu$@x9E**Pg`#6FLO#89vc)UGdH2J z5ve!G*p!m7xgF7HYk1Jm+#iNv2|~a0|2ZCM$>^Xl-B<3c(Xfq~|DB_8%*emtIBw8L z93Ug{jUmv#IRd-s56^r8_x3g7yC2=ww-jCJO5|S;BL5l(kCS`%2V4O+Tv!HvEI5Hq z$6{1O_rf?8g2Vy3@znyrc|&$O65>Pv5@qq&AWM%GSb8jP=}IORrfD>WWeZ6a>KGXr zLJG}t)cZtJ2r0DVv%u9YWuPo*4R+8P>|oxh1`}Z$u;rbqjif1TMTtf8207JfTK1GV zk{TIGOnLH|JT#oRNsp9MQ#i6gHa>xg28#W~Ma48HC$zXbQIZhiOd=ZhDZvQkQJoQV zu*(_d(W_E69s$J?GIo~(J%z+bgo@GVi7c){Mvq&T^F$1rKa~v}pwqEf#7@$tohtya z;|x;GffbynJDqkfap`t?P+itRx=65J9}#y9^6VZjKMW$*j+;Yq(1aX@gPsak-ky7X z=exNFJK90x2M+>pXX2^I#a)-*bmXUzQJVq05dx7D7ot zUU85r2nDJjSgO#O5yCvr;hH5OV@L(k5+P)y-ofFjgqx5uKuB4(MF9$wEIp7cJ&+7N zIFOOTX7|tGv<+R<_b@)eG`<2X@R+VBtbnl6s4P@gDa@EAUozXAZ~Mt(gP~5z21A=j z170dyMF0XIs!iL_tfrL}G))0Sw*?jwgB&}+wQ&&&w^l~c^CL&#*GG=@acocDV{q=* zlVE$_G;}x5BFA(C8gm*nl6oby?w_^<$eRN}BVn9khH?MS4>)DX=ulQR0Fy>S{?OCL ze2_y;xhsQx9mLI0?W4{iO;PQ&b;fpRwXrR)Ft*XSu?>cdt{MDaf;ci~4>4~y z6@c~l`E&NSphzv-)V#W(FCPNosx_fb`S_qZ_6Uw<&^V(AhP@E$Hk?WP zUx+TPPYL4aGTR7lr0rbWYdyjq(K|JXAA+jGqu~Y?o6y-rJ5|3+9%=+aTEw95`Wv?tPDBygJD zQ7342txm*}O+%_@u4!#b4k!R_L2!trXpDyxr5OAu4Wy9r{i(}?L{EytQy}Rnq12UP z(ykPqlFm31-Kl~Yx(R03r{|QbHK#9pK}gJj`1^ zZA~7X=N|O;=3=ML;=tUuap2=wgL+|_LD3e?B6pehVs3O-e?YeMC1FzcP!y-4JJ4vu zYYCN-;dmet26a#%I53>CU{@Rj_L0RSfyI4|(0+~4C+(H^}Df$4{NW#ei*k-B~2BftiI(Eo;VqfJk#Wv zA=WwL*xL{6#GC1%SN7gE5Rk;0WKlEUo4QrHoZ5S^CPuXkgiP$+5?KQifl1;0dv&fs^k_(0BOTqq}#a|l&RWm_{{~4%-B1!MadJsXh z0Yri09uNcbsy#&`zyiP?%4MF)&0%{^e{si;8h=5b`Faxb%IOiz$EUGJxF2C$365_d zBvwiLtOT)O<=*yZbjc!6f=~94;`pZ>rf@PHbvS-FOOVMkM7Yii2!;g6)3hKm5Q@CO z(1`k-&Qm|Sfcnwa{!ZkhIF7Yop3PG~*06QN$kKjdaM`c~!cwEOP+BFmNUS6j##(0d z6*f4Zf-d>{#aztUv@aiX6|jnAHrYV*CEI-Rn>>%yKT@g93{e&fm+1bF4^HJrO*%_?wXVm(lv%ylj0E?;lxr`5fW<*5$}x}>p-MvinqJa!--#? z>~bP8Xox{1y0Q4xzC1jE0;@L^Nu~t~@ZW?bBa2rHF!cnE`(L18&kJXANNa9j)+3E+ zIo*Q4wGBmtAF^*!b}9f2PBg`@0+;4;1-yX(!)T04@=5__kGISIyq)%XeE}E_rXACr z)4gf`9CnU4Tbt!r;GFAS;LG@D2X67+3$>yUP3s{^bPtLT+yzSFw&H`Xj5~psCO%WJ z}`lB+^ABnxpB0RTsF5h4VWkimTVC z+8WAH2NTg!Y7w{@jC=)5da@(e_3Uf8p4~5k;5#3Hz%`%U@w?nR@MW+PJa8oU=pR4I z?d*IJ%zY;JuiR^(4g}hPbXV>ZBQwYJAzN1{9~fpZ+9i(VE;vP-;+m(;b1{+}LXMpB z`i#7WK4?O|wGp~KqbGzzXnKrnygeh3jJKefpm;K^<8c!FVLw2>exG8ExMGdCk|!z3 zf0`gUJ6%*%$W;ci;LXOAnA2h9i%3w0L+*WcnIuZ=cAO~jeS4y$#v6iYULp|(lJiM7 zSaIjH6?ZoME%#FHW^nD(JF-)UZ_I6Bd+hp>u9b&!eSN>A!M64DZ*(g-H#(>PHui7G ze%w?!q{$52y&&;m0v7q|-B2oKN^o+)RpN?rmF!S&GBJj2@QzMQWv6;4C$j9U=$ypm z?6ve-b{oBoy_>p^euR3Gev5j`^9l8d_Y=>5GvUmEnC*DdhaZhEU&XY<^ z_DwFHSTZp>C6N*4IA*!$7S9c4N@j;=mtM*)buUj`o4Bp`w!{a%KPHMKA8;cq+8;8JLBi7#P4@RPpdyH`Gm%>TXIUqIlGm&~8H?yp1J6YZsCbq@SGSXQT*Th-j6 zzrWiaOc;Mh$7|*m|2;h!332;*#pi`*Nb`$V7OxhswqK*(WV>1Yxw1#?RzJ1>Rn?F$ z4?9$s!=XAZlij)#R=R=xk7ea&d zc5sthK06gk*zf186_A|enMhiyh$N^uQYFlWW2<5sJGn^| z2i9ljgG~e~I_5KL6qv}GUJu3I=tZiEtVc4IBeD6_p#?vbnL|n2gtVGcwb2fJG^P-s znQ&M;Vj}*clml@U9X;&n;*^F$WUaIWJ@VwEDX?EG!YRwNw4$hp0xDu1pp+Pf>0gb! z*EdRtUXRxqrH4V}VWY&`k`}ngF1YpRZ`Z#3#$t#D9f z1>IJO0y*EWjOI%-XRyc;{74vALXJ}kYSMCnEEUMb1#-ZWQEcur6#`lUFobk@W)Z!J zSwmmJF!8bxbgFnfJ(-^xoLDj;HmPg|-Neri&Mx2Lv`2A*94k-E65^JSu!J&8h!VXr z4mO0iB_u4N3}@6PVX0h6#9)jri;q<6q7&j1tLBC?(OK~o(q+nW`%>2u-!;-&Wv#kS zyE3*WzKPx{ZBe$Ww`w=VZj9flY*)9tLuS}KG?LH*3BQ=A011k!@as&?@C3C4xgKTc zHGwSw7>Ik6p`o%kh_fCxU(#bkL&Z?YLlf$R)7x1i505ulvbL$J*Z3Dm4~@s{io`~W zgQ0-Ha|{hR5Ra9jeHHOg=d}0=xw1QvZOF?&Vo;aupRxI!j;y@Fun4l^^1*V;LJDgR4?}wOhg}){upM_Xf zZz~!jkkE1DgWsiF9zf4`spvDYqbM3~7B8~Xj$yFU%&M@3WTZ5V7f@<_*(C1JN}j|c zzyS~dLj#_nEJ^7Ml{_KBhCH-YIYZ`ZwDg5wZH-xc zjFpjQR(F;Kg;9KQW_m{+Qf~3oc!D{TDxb_}`n@9<^=H z-d(9t;JrC5Yd4*K;Dy|m_kwq{pWHh8hifKGTo(0Sl&pVv$>q;2zVx>@*l)S*hI!L# zYnPXg>AdR7*VbI|8O|$JBUamkd!~SwM6jIYa(PSOtZX>{e`douD;v)LAK7r^zgZYU z%0`j@6h`cZYudv`3(x^B3}6-RRtBIGnDvKG(-QIXg6ZVHu<~txvAq2m%e&`{Ob-4; zq3hnl)EoY`9=PZIw*l7U)X?x3gTaD#+r+v$Tu}Ob3tZnF2E{n zZC`(h$8Hyj=WzAL1;!ZPOOjCMFmr^L1jf^C)^X~XF~TI~JmD(!G4@lHmnrDL3wt@y zH3%?W1qR`=0JA;gNV;7&C)rprHK&>05@pw?l=Ya1Xyo+j;R3@*NJ8h*;Ppys8n7CmPHSv6d!U(7+(UgdAZ|4ejf^ObrT&3u|j% z#4xyP-YAZGSfKufUst!xoK%a(zo62v6}SONu7 zTq;jB3ZO-370_z{(h5wMrxrkPJA4pAi1#?cY$FS+*@bK?dzd}JvTQfJxm{Yg+ejrg zzq%O61xI>OR7b)Z|6 z;qrBe0>v{F+CoFx643c9R%J&YVd&9DTM=yGOW3g$^GsqiN*XWGb~9@E0ZBb!?qHft?^vanEFD z%JbZd*~Rj5_Z93F^0jW2b^kxYz63ssD&4nE)v0~2UaGgGJ4tu84g``8As9`evIw$k zk04Q8P+3%h2sq*h4KyoG{TohIkJ%LUJD5|m)=t7IU{(j1Sp zPeSHLR5e{!!XeAT+sDOFUftQtQn3uaRV)+VI`blen4wtA&4%nSpkk~bR7PUqNF-({ zqF5QRkZ75Tu4fE0Y#62`Dncy6>ZXB=G5Sa>6Ek#O6p5RWMbNiHv0&ED4DLva$9IDVx4FqfcOpJ*AQ2lFNW=$X(O{f*r0NmjUz|RJ6 z|HGkdVyp!0PyallCN<)t-wochcks13-VP7Gifnn;FcLGi+&MWSs~^!l4;145hz#azT8(dOI_AldEHp02ZeHzHp`Wkj{j4&oK1vc z{)6IKDTdQnL;+?TL}j|cY6y)5?aWwVtTns?fdg0saB$F}PdUWxWDl75)wWyYQ~|j`cA-Djd~5vYI$T)1sItPLJZFEZ`$D zm|yl{!CSMkLPf$xOfor~gCBJ?EEya{tH{OBG(qLDz9O}7muELxnW6~rE`AJ($3nWM zs=zQ*(+Y)TblGTCW|WX51J0mBq7({cC=t16k)c&JqcCAbVUT-a7@7{Ls-jRra|A@t zO3NsYLL)nN<*tmhLF$(nX*bxt<4Vty>~oy#*cL$>qY`ug5AT`;Cgo0a>Jw0=u6k}3hNa11#jFHwr?(R4=%o0S`#F5mvIdt z-T`pd9!qvuxP>*@5pr)Jo{3C$2-V3BOuo21h3iuNPCC^QLK!}TzEv%1cZRH}eXbxP ziGfIxab04hgLC=|qNphDe z;L0KG)hjSu0Lp&OU$r9LKB#;Qq&YRCF93CK4h+!wQWY!CB6eI+a05y;Vcp*_~>OtX3-Vh9n%1Z1M z5Cx*+LtcD#IuM_o3S^SySWWjEF#P02`Try@yNZH1j)CW(>-KT4W#cvbv7;dY#?gVn z2NC0KF|wTh-8lydupS*Ow2FmBsabPF3?33<=J0E_BQoaBn`sxlkvPNx2(& zs`VrDc>zUVESaFd(t12MVW7j*tv2!^sldd9poj|GgUl~8bi$oy(#K~k}@)|&J=1FwA!s%B0&{|b=$`@k;x zMrPXJ#5HSf*#Nej?j866&J7w+cDohpCYa;KXn}bCOu~}wMN7l0RaeczWH3pXEHaWH zi(ahkXbPpNK(16!a8k)3>p0NaHL%aKjQQswx_$saQG$Xb)mBs$>Lu_MO0_ZoqOb%? z^>V5J@BwR2FS>dfdG-^tZu9D0TjfVyFhM|E3Fb22yc&!Fa*~=JN$6 zw^4XX5MOKty>J@-VsO!Sze~~7l%b3EBJIg> zjTbd`tKE%@)kTd9Mm?b3+wi04j}yO8BlZ4RLmlDW!_wyXbM-spd+Ybb_t*b9@?O1g zVH8wi$jQWsu4R?yrrNOko`I#bosQ+38Ven;qv;|zxv413&0i-h%CA(`E3YbFsbA&I z_JRhW(VQ*VEmh&zl?^vH&<&|(txJ1E+o%m`>_%;i_Jzi1dws*TtEed+C+Md>OXvwF zjQPhjG9#>Uc!pT3^{lEEBQ|qKQPWb4y_pMeta3QwWqx@q+FypVnA|LSqwjTAnE5W{E&s%retxSF`nzs;;IEc zpGf#CMrn{lK5*8@WU{;I%N?!G!JJK*+EKMzYS|841;J=t?e3v>Tw$-Dwe%KiM|Y6! zgUXu1sE+;)dQ%7JurW^xesNpyyIbsBth(8!25R;bp=Nh6J7%#t`!ctmqtjd$M{{8x z69|{sh1}N}S>!ZQjvzLZi;+46VJzTCos`o^O@cH)bEC(WTUIeyor@dh^9EXCgkSJj z2@N5&)BJ}YVg%$!zUvsed&FgGEB$hhi^}?JQxvHn5FQq)C%KDa@_9wk8g?@yaA`GF zCowHVX5^S{r@}776kqCO?jplPq&QX@1I4IfJNegEsB_6!&Z7y;7>!^m-*ux@B_Pl7rWS)(Q9Bd0WNS*i8rSzwf!3hG8)uxovu!`rp`|-OK(VX<3gR$&cwyh zixb^Kw|bf09lav4SXipwpx+d|Dbb&PS2<`OjQ>6Kx&3+kgNnoHp>#aMHtWsdQEZp) zuovqy+3VPY75{{%48@3O5E-y!3I#h-B&ErC%&lCuhy@bl(#V@rU5sT9$iR>td4arE zhVDd-OxCO%^Lph@`V~C4_$vG4oHAyFA*E9SW@A;7S0LBrDWT~q;m)GjN?CrZ1XTT2 z3F!9L5InQBlAdfDK?1*b0sVmR94T}kkPI#2j=jr}OOmm7j&~`=^Bq3eT7xurOlwOe9Wkhy>N+ND2RIx?{Jd}Z zwyU@FID`Lq>BS{<;nMG~eBobjUHJmLXW*ZY%y{IFw+wzU_|8wkzr1|ugKxZc;FUL! zFPS-XlsSg{MuNV|jR5SS<9@B%*9JUQ1q;aK1ee6D%2>fd&!acMEVh*`|usw`92s+*M^;!bIYqC}N@l@Dk}tG-gdS-(?fbb#D| zQ#*?EfiFNGYXh~3I!ql$iL|cERQWTe&_^ePt7SmSS986nXElNNo9ZM*Is{Jye8Yc~ zoX}^002kJyQMeB(`Vp#XAO*Tt#AfCMD)xdN;a9OR)VP3MX88yyd%D9M?B+Q zRg7f!^B~Q4@iecI9wZ6B08hL!?~{k|ino0D(aFNT64dXiVPLpmxwok=$fz9aD1h%RCNII*IjE3!5*KL@?OSnjBtOCb70ADTH91Fg2_brr+Ib`dz%{UjNe} z!@9x}N>p<`FP*DpJBwD9_p8#lI|FOpImMUx{rn+*h=*AFX7CJONtQ4P4ERcSX%mek z!5N<*8X2!F%naB`oPU%0N69pj2Rlj45!uVmBVK6)f0qpzIDe31NR?81F_zJ2TBx9(c-FU`FJnHO$d`LEC3zWT{~ zfA-MVPj3Xw1GC1{+Np`O_4*(G_bUfq--mtkROFj0k*$m%jf)bEv(r>6g7VaEwp+YZ zUdSwAZx$EILd4~-C+GZ#GaE}4DZ+qey~~~opG?5f*0}iS)Oc%JVti_rH9tN(b(M8v z;;PhYZgu1&eKKZHQJ|}~Ju`}nR!lUdZ!k6)v|+$xO5&+K^mBNv`d+NxA(z@fHs`Sr zvSWAv_W1vsYPz>*J1PoENj*#TX&hA?WaBGFa2c_#v9MJIY9fs%Msl?Ue7g&WooSGc z?)3xWoyBNt$sd$QWd+|)8(H4THWqw+@zu;TQ%=5!o8L<+lp-qR8nqNr9x@<>RcN@Y z?aq^Slq2Xdq)sOZq~jOW@Ia2oI@3AO)9Ep-c%$cJmLlm?P{c*8Mc#lNNd6SaFyh*%2_I%|P-OYKkwYj?|VB*})$}nF=%5|WMP_%K( z6?>Zgf)RSOj{!IX)`x#Fxc?to%g%g(@V}dUb2OKMvyr zTuY=26@tnrdP)KjIv)-}hNGm7VGte)xfg3!G33DV=(ukXY;VR{MyUj(82V8_+Maty zk1de)=X@$%*g%1}gH2G}QIQGqkd}IavQ%TEL^XL>%Rjt!qp;U*^eKs}$nXSV#@;=2 ziV*ctPsidff<06fbrMLF56G0RIfiCXR!FcnaIDc>!^C_|u znD@%f=NHH_8*Y+w;fv)7VNR@wS2W4B;ij4nc}#eUJTYA4FOzSOPf7obXd`Qy>dvb< zukPZy4NaSx_%T&u8oHV$$`h+5G+a`3Ny8%knyPCW7BsDGI#_qK>OX3}s59*-7uij3 z>#I+NcoOLt8EO=XbJh}tMyW1+hhwm*lrBxEPAO6}(wb|<{M12C>VE`iV0(i5Idu?asy^0XfW5 z0ULG7drzfkuh2n`s!sa~nf4Vj?J1;POlM!#_v?rBAswdmE`0`y@qDwQd!A}!Q72|a zPhhpvs|gif3V$M(?^GwANss57R#aiYys5TtT8p1mh|W}VAo zh5eS@)@qU-m)an6+jU#ymJ3$gabHXWE4TjT_)UNM-izPfyznoZe)H)MH{Y@5*0ijTEIQz_>^N4YXEJ&e?SAGb16R^duD^ZG!*>kqMON!Rq!68W?w8_e z$({U_&aAMP8JdtVKJJwZP6q(9vdji#=*xXYTK4mFU*LR!M?&dE2LjnuS!M$=40H=X z@9HwX!1)4?guo{)S6Rw38<1fM=4&sCWAJsJA#M;iiCe{f@sN02?g<~xAmSPn`YpJOSw5pIH8VLvs}r#ehvRh z?t&7_VIn%ReT=)45@w=9Ds^99UmyHz|NgHdu=eYNXqg~=VZOxk%Z%mDc4?&wl=w{@ zLgeI@@}N=}SRzsPXw{umk#kcAuE@<38D0HP_9VxMK9iE$11|Z3=nHsi%ppDGmolp) z+Tt=HQi?j>Yt`Y>)bT!aYq8`3QhGk_+>IXLAY>ueE>41MjvFCeCaq#_l@2l=aQtQt z)Nr+YPUzsqie2gqbuOICUB=H9?|`?lKNMf#{tVyYj&Psw|HXYJL@Y^S83xjLcv=+D z9Z?W+?#MI)Va^?+mXLZt+)o5q+{7WvlmvGJ-4R(x;Ph$%--~2OUT?UAL>o{5UY?is zt(dHM6lZddDz&}P{!i%=I}M!!0L5rs)C{!3k=i(;i5Vj^tdu+6PBaX|YshR7{D2}b z2NhS;zg11TF6bFCC9mOyUbs25=Ok_pJ&`|FDkr0K&F;WO73dFHlP%l~p;PE&$k8ho zsZ&Le7VlJ``ZmNT8(bVJ%!wCll|F8j9%ckvfhJ02uIPog~OfXfV=Lu$M7$I zzAfoK9dyriq33c#D;adtCA8Wh1qEb^A?KQ4w_V4dz9yw(9<3bk+$V#J!OQOtK6xj* z=k$wU>)^_PYw7g22l38PcOkpmPWTqqooxU`K)SzmOC{NFu(4;@($Z7E!E&BqAp@^^ zmOb26WQo}$%nI#eU5151n|nKIw0rArve@tBP^_S{X?7!fh=nuI;W*2r*=6inc8G=O zlt?t=I#>Kk#K}fbRK1Y`{nT;1TVmPMep&Xk6#-A{DyN$>2wuYAcd!l(`8}Kg_UivnMmOEo5rMSTZ9rOig8qi{Qwd2V6}e zZb%1mU_)+Gj?US3Ld$J1fe8uTMP|B8QfVW2SD4WBk`0KknwT^}y%d6aks6!n5Al_; zuic=#H|B#C<*^yBsGt^OxeZB>B=3|gy;G9BQxX#dnE0KNB=k!X%r%LvDhWoD3VzR| zU)M{bx7tb3tu_9e)cEGP#xu|1VoeSlpa6!W=`@9nG((JZ#aTw0RNSbj7jd5UbLSII zn1r6L`fIM;&gOQ5)xA}dh9@;{VS?l@fpSH6kh~AyA0YifJ&~SzFEU&u61Okq+=QqFPwe>XFhUys zZNgzq0$GxT2SeH?TQ)CV`FQ#}fBe~Vy*2aCTlT;DF1z;Pb>m>|W7Ds^`m#M+b`I3h zKV5p|xW}Fuc%0t8diBg7eShFxKkNAjt?MZGh-Bv>mf=G5v&L@Y1LotmmW`22Cq}X!ndCb% z$#>*Zz9SQRCs&hakYX4ql#zS~-T%rfj!9nC?4&!gD3g>*2I#*$J%;UT0_Q%l<1u>? zI_T1un*^O8Y0$7A}!EKlt)sZ|dS9+Hlz zN>QX-3~LdUV*MPuq)uL4H=s-FpStzE1y9a2q`t-_lW+MstbKgTgk{rO?ijd*zW1gZ z$3Ob|z>CO9T{v_U)*)?CsW=$I`m!?;^9Dzc5}SgV{+)$bijyB$ycAa^ag&82H&?ix zTO6naJ>x};H$}GDpd9qp*ecSLU(hOVW2kr~3Tku$!@}O~g+Wa+ zdh*>UJnbH?U;eZ_E+vo#pE>Lt@oM&J5#lh97^-cu=~fhEXuI1M@__;Rc6Xx|g7O9- znG2t}|M!0Z(c3?J@X+9~UvFQ3@Alq%)^Ddnpzh(7gMS-%|rx^2$ z59QA)1`XNNm}m-@_qk|FlPN7eOrhcdg^HIHDpibU4@kg}9BF~HR)QI#RvE_|g!#zr zsgq*Qon>(iB5)OR)A0|-C8a@`6kJ$KsoWEm7}O{uH5?tdm~V=H-;7L zPo7z@CrwJoSCN8!dQy7~46E3e3ikOEN6xO*yYt!|!%Ov)W@=Fa>uEAFIb3QtGos|h z2oQsH#rI*+NSH9u*O^Q}Vb(u0?$I0WJFxiHL$}X+WTd%yji^mOo%{=qQzSrJ@?D${woF?J3;mA*~k zx*}cisqqb!n=08tsF3WcyfAcOa!zPY@|w^!$pw{bE8pVYwm#xMQ9g|s4Rp1Vk95#& z%0=``YGOi^0q7%qiFG0pSfsAzm8S5mFB4BM4UqRiS+2`D+lc6eoh3PBJnVX+F zcG1k9RhvAc67JL&`7AMi1Qg~8D9lIQMBA-0RaBk{2YYC601Ebj;1iu^>xZiz0EUTr zKw0BdIYHDz9H_AMZ@lrlKfLwMA8=fA4@J?hAnh;#*=fu);B=iKZY5q8c(qm0|=S6JtP0V*LgI2-TSo2+`GN?k5{O+y66GTFT(Qbj(2Dz!4ii zEM|4#QZ9fsOt&WO`zE;bh|zswIi?=P=eWb)363&eU9S5<`%da@^{PWMbB3ub;pX`}D?&o!rx)v-ScnhDyuA!sMHgpb0 z!Ag>im&#(LTp^Beu6CmK^?@Z2+?u0y*hW=lIUA5`0&;agt_sMRQV_YO2&==@KW-5%F}hv z)c4l@zBXF#Cv(-lP~!_(U#NFsIPW=FsPToYFI3<@eyg%$o=}%lB$&w5MxZ>hB7sX* z)$u0M-VpDK&xl_c-xA*+=k$0wesla#9H!%s#Oe4;NR1*$!%5}a3FD_5nE%TF2LMe0 z0}!6r-f*-)D&m-y4}g*LE0$K!6{!dh-9}6Tv3}&ks2@2Ytcx%;Qcfp8A{%!?u|f;} z^%hdpjJZc_CgNoO>39bJW;}y`D^3u)I4Mu#pNjE^?VYrBhaBTjP6!xHA$+vMpc9cDGBQ9M2XA8B$YNQa8}^5 z-jV}JMjP|%)~%xgwi_eh-Q@>>o19VrRLHfp6(ikkN=M4^L?Ti_;=f3_i4^aBd*&Xc zb+vUP>9)d{_Om9`(LYS;L&|qu+x7czU$eR`_k&k{IAi?S#_!L$tnO$&u*_tRfME$>-aVE z=WYDf3&>h!hyFn~vOlD3a1%kOGQ$uddDt*-S>^*W7my{)ty+t*@qQlD#xoS5KX6PE_O{KQSb_&xMuDh9@EyY39|4+V6` zyt3O7?5vOB)_?4*&DK`q)c`3;avKvR%sPyWnUPx@aVbC%hp9;-@!b+fwwG&ZY4#F< zob5Ij;Wh!Yo$LfkCbMZHadGF>OPlV#yLZQqP`qL`xQxD&U+g?tEPg-}!hp{R`9w296?)fyd! zIz$68Su}t;L~}74!vhI|J=g?wuq{%eFEy*%#7EmDyul88#d@#4Y7!elN5#(8XMQSV{IsuL&|RwxQdaZ98rTVzZKW*FRQC1WBE3*24k?ybgg;& zil=87a-VG8G;7ww=k)!gZ}N>Z+HRpA9q9eu=t;BYJaQl1L276TTz^MeAko*HHGdNr zqo=-kjGm5`p!{GkE|fcXF&9FxJJ)6fN)kB0l}6;VgbuMeADomYgYvuDSU^>q9TN81 zs@Wk%tyDq42O7DmUUUn*TM1*lVr5lUV zwNeMwCQYU$Nkvek=L(mJ*MaNkMZzL+HMI(?qHhyci>svdU_E^=b3cEd@PPPJ>Iw1t z(hJm6(o57Xew*|v^?T_c^|tgm^?~#?bwX-Fe~uKRqEbCoE452AC`S@m$BGtMq{an* zCwt5fz~NvQtGiA}8YtrL@VOu?UU=B_ALt(FPQ;EK>Gn1UD^=Faj^R9^c#O;&hbVsq2l?^rKDpMx*?lsP9BKvN zSA$D`b0n9Js8Q3fMA8*(yazCCw z;mpARJ;mu^fUWmCx>9}|8f_oeOLSHbkV-wR=1tV4AmgSvY>1+XViWuUOV%%aO1vy!iHNi5d5EJH8 zLQ<~KvQ&<56!KbuYU9TV=V%u)lQ@T;CQOwt&?lKwtjqP;))M|&;d<*f?%Vtd;n&#XMRSgPGxCEP1K!91?~96U=uD{oeIP&>Ij+W&#?aPNvo z;Zgl#>jZa7Ov!|9O(92vb4ShG@R*>yo>@ssnhq_>6nG)W>p2ZWR~pZ#K*_1Qhu(48 zu^Us7b!a5`lnTNjPLj=9DQ{i^XG`)?PVzoB?1>Y*Gd1)+n2{TRi%HQ=iIC}+s&Rd zX__?6*0fEVv<-pMhOI3v(4tUip#suYTEGfrYm2f8l|`wD3g`^XsDt=ADo~)5aojqh zBB-MdKNr;B=ckU2Q^()$Wf*59m-n2Tv{an=yz|fdrs-XBw{xE7InQ#=x!*^lcD|yO z%A8WIP8SqQ4PvoWP5A6lsSPTXTBg-#f~?FyE~iitDNTkjNOdBaMyph-gyQLl4?r`w zP}Qc>P%KZ@aD`I=OMc3fVgg?8-*Ers=&9anH-SSSn+ zGer3vJ^@$sS6YOi!aDtOW21!x1>{SWV58-q!$hRCqE=Y`8z>P{HCICads44=RzH_< z^T?~YDyPzke{=0K5>S3d+W*^qC|~2$4JTz5-qo#suJGpTND;DYZy%NBgYdiS=IZAn zqhTau*FHNcbtVt$ew;Rq0@}TOl7w93fyC0heN389A9D z&7T=KQYMNVIeoBb{K;qezB5Mx7m2xeAJDugeo^wG^qfYniJM|+j9zY3rEx_tR#(tn%8Ug!(WwGR^*bgld z36n&BkjMr62aSfK@DCb|iW|Xx;8=+UYgq18^r{?lo)PqiS5=_$j0S*^Z~_1XTcZQr zRMlO|OjffU^XAK%g!Pu=D zokStg$8{Q~BCd3%#M>GQrd>+2oKLfIbR~3m3MSTeTw@0frF(}x<~P)7o@JT1Do$j` z>CZ3Z=t8QL>r5((&ZqDxeX3$*v8qVDU#sxz{Q62$y{=wgZ*0}I>RXMSk~OMMZI_|T zxK*`7yGyrAzumBpeNyoo?u>T8@F(_<*pYqE?SU7n$@5yG^8-thrmm zXsjcZNOsEVMzD#Z8jX@8)`j@OR)b#e(Xj@yqfrv`;Zd*#g@V=7m&O$mdIMzV>^Nk9 z!H(_yxa@?6P$X{X$Ft&!GF@DU7wBHlVO>8=JgI?9G{wf!Ljuj>PG!EbM#o$EByZ??Q;?0GrbgRB z3hbo%IW^J2mg_qBOcH_f+E-M;3n@MX6>PZn$;oJp&5Xv>L`|G9#~KG^a(JU1rs2SXQ8>;G-wU7=6iE;86iMDCMscmw-f@8w}%eN-E7-uUNq^ zfB8}{Gb8w4ef+WsS^4Yh3i;cPaQ-aYQcb$ZpV+^8)A}`d=?||xJF)&|+DiOHGrvb# zi5k8YS_%9Vi9fEc)Ai%mWmpH{B*MpqxSVW2sY94a`EtC9Yy_}h9^zsUW2ZtnE|*P} z*KqS;7M>+*kk@j{U@=}SYm;}wDp|LDCv25%m;W6uW1Cgxg@7z5kI9~pT?8rB8mBm8 zAtuI4PJy7^x)|WGau&;2)&rRM9~fw2RlG2TdRGM#Pdc zcpgGXN(UB%5$#OXAvGX%T)j}eQT?4-EC2>%(L?H0h`kHoSp+qx9bH2V640BGRl}`v zQT0hjeUg(Lxdh$#JSei>sU5Z2~P6WU!qkYjy z>d7cA$==JSC>1Fwlj*ru)C(9O)NlCYm_`7YNUpv*WsAvWCfj)G9Un8tgpLu`6vGDc zpJuwQVCgKx<1BIH`E1KFm-dlE z_Cn6-TNS2(rzx{DM$ZXjKS@h@*4dn_C#_$G65a@^TD$ zc{#Z`!R&xIFTWtn6z3QCBXnOD3IvMCIVYk~rz>7Y&$hcz=I%_KxfkVyLd7*s&^X(j z8=QY39?)J3IpLU0!O<0B#?h5`R%igs01TqB?{G9hV!E+1Wyg5iDb}qQEaJsHOm-oGp_& z<5eZ4;K}lg_v$=qFZMXRCNJxec+)(k?r}Mw)fikT1MDs^%gL5iO14ToWjUZ%aV|(_ z$ik?wxR6b3NhNLjiGQ|9nBdZAiyMnWAxk_^R1}TM;w~4m$mASKWx5_ty(xqPp+P#h zI11&TLVufw(^`kKgf!RiBQ#{Q+viP!AtwhQ$tJ$?TWEXZEWdwnkl+8>IcWRsLH^yR z4nxjkk3r6_j_|+d&0yZWaoUE~U|K=N;zO0Q3$vz7Ke+N{f5pNF#qYktKlr;>_R5qN?WcIDYQC`T8czEEvFC}^dfAK^+W41ITlmt zA@M9L7a^6JQ&X|fz_?@Dr*#7`4jHKE$mIQ4d`$d=l%A_bPhcmL&n#pZ#%lcFG~^+{ zi7L6AxO|1Q`zu$j3?=6z32&Kkf>01iFsC{b1%Us!79ovJA>))tFGmuL89JX-NI+f| z2}Q^!BnCkVhvPAc^6!_awRn{*x`fR3-9qkLOuNm*^kr5~ z=r4RsnAeM(!ld%ghX;QD+^0&y&wh73S=aFBZ(JBGL+OIPeN2?>?@IOF62g8dBu)^D z#wL5LQnPVi?Gs)%AZ7YO^b`z=caSpW3bj{bU{DD#CFq>!gTD&hXDCF`$H`rep^X$K zN2nitf#Hi!MNfV98NJI=dxqD^WjwEUuAC zWgwFm0eJ+onw@hRLV|ddToL9JQ@xRth*V~JqL5FMhh)U%(m06$0Hlx1s4~Z0a zhz^7#E;oM8B@)^cxJuH~J7Lk<8EqO$b}Q7UW=Yvjs8nfe8l}rIRx8x!^Dxyjl(fW2 z$%>}tEJ%IA0GQDrCC-Y}Wwc5HW#F`V$|p~pV0<`z_qK=c9hZCPW;blcmtwKHcN~FW zeraeD*WS8j_N?kL9pke)sW*?$;ccY;LG-Lpf2~`_>D}CPjm9JfUp_hGH>8?kQu6?z zvohW4TaebChRf0*jn>~p>TgX4%^`ij>5~P#pCW4@%?ANw%^4=bP?8FX%K{lzQaTW7 z&>CH(a}Kpa=dKDs?B+BI1;>fS0aEu<>1kH0$&^0AQ10`vpm!s5UThMx+H@U@Ln?7c zO*axInZBBi8o2ftoWXGwb(M3JMzzc|v$|y5m@@P3z@6)Qw^U53pY`)SyY@1_(3F@Q zGos-V47Wo`&{LQ**4#F0X>GJlZ)%?0xTBpSM-6}no&>5BEYBm1|AGL2gAo4is2D;V z9r++^Bzgy)83g?8CzOM{xWZAaHKVKwKX)fbulfOtU5a{&zsqTLZ6(Eh0ecy zUI3C0g}ArRbLUA*BU|U`=IDRnJP0M%+=b-%H=O*F^9cnE?#!aXNZ6NJ>r-oUYCU{h zdc*#W>l=^XxbL~){=?5>-pem6%->5Fe~T1E3bN=H9}SbwuOkHo5quL}Cer8+c+1GS ziD&W)!(riMc#R(8=hNli=*fHO7V}V;`m6;J{$eCj@G04Whsj1Ay;(O|!1LiyQKFJ; zJ(!Ak;X~fM>;OnW%Kx2s3~!S<7clk6f$HNKUSqKlbDTLK)hcxXwMs=)s1$)p z3FMChCysYr}^2>9b(GuGJHmTskvuh0e~O z7xlKUpH?0jLn!tRDS3!c%tYGrjJU?E4T$Zeyr2l!5jaR$<7%l`+ABRI#l*@=8S5ER zUJWhvQN0F*KBjLi&>}Z#BV>z|dMFYa;yxaV5a!a!V01pXIax@jmZOQ!!5}DacP6AC z)GdBAZ^OFko95iSfjU&CO?!E^Ij!Y}VBpRk3{ z`dh+dJ)yM>5&Msk7zIQmd?h74DnhhCgo$qsy-CfBPp7A@AZ8x64DlPW=W^zHm;mn1 zTK;o>H)8ldkh%v+-HpgiG+AZbNMbu2FqsU2tZZ4pZdV4fiOf0;Ei|{gObc=>7L)t6hCfNqUNF%8X|M5XddRFA&yv{Y{)iY<@1-bm$_8D>5xpB*; z&ES=$nbHYACv~o=n9M)GFDosCJv}{JD2fBk=VBI(M|vWGmGPXVI%v?rQYADfp-Bd_ z$rm?cEbizfk&xMy-r>}M#^7L8X%5uSBqy3fs8PTmAq8zret;r(8PwRFN#_ieV*T)p zPou$vW9yEu8&}qOykqV0wIyYp$2uO^)BEuKd+ryFufFfX-CN$eue$oax3=7U;lApt zrEi~o?d`W-Kl`>YXOR?lz9>qxi{}*khFlDo`06x^f`sj7vX6q2_|}3DKY9(o_}m|U&o{GM^`hI#f{!)^X{&ocdSH+`I^+-kDiFPO!YvQX@?2#FhZvhnzc}; zh3#rsr-rFAs1!k_&+kv~*dc*&l5PnONQm}PD6-G_;DGNb-++%<;oIcHwZ4TuJkbY! z-xweEfv!WZuK@tPB-5wU`8bDVfY8i`&m1K&J0%^FVO=anSlvj4_Vk~A16}Q#rxA6ne} zXj{}jdrL#z>ME}kA1m(c?uv#hOG|xO6Y|q9>l=11$j_U#Zfa#)T~)wa63*6B+KlI$ z7@p9kg!mKPTgSoHDBN8P+rzNE02b$TY2Aa?+B4o~3hu7tF=6Sv14vR%on%6-rf5%-KsL@dHGDZ_B%Y(eUAS!V=oaq`a zpM!KRozn*$g+$1X>m95*MXqG?&_;&n=oq0Ip+>6yC#iYw(l0}1NUXu-cd2Wby|>>|J)jD zoKie{T*f_{T?_78kYBX0=cbMab7@N~;JZcRL=ht2YE%)osiX=KAo&}s)mzoLg#KPQ(35zH^|%MOt}=MFwg*Cvxz%&I)%e(u_)}wY*uzC5O!G13Z>DkRGN*9#y)`0 zpfo1?xW*w%@lkLA1wMv}nj?l*2sQ*J3fA^I&yp{qAa`b#Z+I%=hy{jVPwz?K_&)`{(?ls{5d4^E>R-*F) z;+KdQ?MD0Jw-vf4yYF^0UN`h)!kSFzal%w9WEnwhG#YWM9(p)f!@(*I>{7v4Rh0@) zk;5!$lN2{Ga5DpA0JMYzL`T{PH9Rh4?x4x@v^s}_`1TfqLoK#AWUAo?9~rt3rYbWW z7L=x-;D0KtLd|LKRuX5)B_PSUGqDy%=~A$Cf7y5SAMd=&UnJfQdi+aEU%}b!XQBMg z{rk79eflYU`DOmfTkr56kuvUv9@urdkNcYcmj58p^ZLE_o_Tin{la)=8NY$KO!&^C zvUvKvQdlNkFU8f;c~a~rP6x5;N~S|3lZlX&a*`#=mZ367c^3(BNhL;$ObnF-F>W2A zhM&27k$*Yuv;=IQCL+uU=D@d!N`5B<@j1XGo?%cO;ew5D!GtnV9(ppqvMCQ{lW%r& zHeTw720tv#f`%+uoC(biSZaqh3v4yQDkE&u!&*Ibb2~ZQrP`swo$PiN_W-ORjuTQF zI?~c`2j}heVy{=0@9a>kb2>6)AXAzYNMv>d0`7E&R_u0&M_4jsjzMTdk2H?>690rP zjUdt*M1(1HE^HCSR!w?=zENx$ZSEKu==uMC_ac9J--eIB`ppa9Tzso}`_AT;o}En_ zo_g-iEssCRSm*Np`8*HkrJZk?MU%e##fP6A{Ez9AHZ5xE**bsuhJ@|$EnA*^x_iUZ zL~>{GO@g(ype)oIZ!C6}J8_i*Vh-4z4nY&7nLuuWRR(A=K)n{`Q4(!dL%A9X)lkJk zn+)cWuV|?V+m#)t*8&!c#H%OLo7o}J>8%dLFd0UNMxEi151aQ9ksYB=g!-5NgffyO zqK^OyMsYj6KI$~=ewQFcLkshu1MJ%CcVPt6KYxEyu~@FN zT|-%|8+K1!d{?F2JM*r&^ZwV?nVF4ayfeyt0c(1CpDV0`&DFEn{T z>M?t8TPD<5L289M3shLZt6ihrre$`hpj!#8GH8=R5d%I3iZSeVz+FUrIAFd5raFMQ z=|(Sda!%}Y1~N=E5tfF{U@?mvPC{gx+o54>4zYYhH3-U@Xlp?+5KGmV)O0@)eoA4l zR{*pUX2|P|Mwf%3W1QfkbLG`o-0ocQ=qoFKk$ARo!870QzyHfavwwJATGf4I$)QjD zS2r=M^?x|i^ozAq;hnXo*2mjk2lwg6LH$x&>&yH%PgiVwwWs{Ir=aSaE92Ikpj!J^ z2#Mzt>e$hkxND7kn;dVI!B`R4oHSb+a%dDzl~&>~F=~vIL}CVs#->3<_`0v|(xxPV zzAqdxMux>fbRe{#chTk}i+wMilw~aJ>YcIfwfk$Cs-Dvw;W_ueaNE^S#RC^3b0&D= zd;i*Z^{G^8WY3V&6r#5H1uii>iWko9C?F z*We~qve&QJS~-3D%KCJfueo#Q^xOOIu6mIipSyZrz5Ab3Gh<47?WASX{K@K$K7nR` zCsKNoc!2PsIG(Xn4qb965`jkLq)vdsft)s4E4f;xBqBY+g`}MkY6IHgHb?V@7r9ey zRSUP|E!a)D;D70)+_|u8H{n8Wt$5(-se1-i(&sN>oc}0)KRhQ+`0&5`n7Nb>R1cn| zctt1}H!GwhOk$i)d9NJHMRJE!#xf3?aw#!LvlfIJ6PE_LOM?aZ#G9f7bQO^TjKjM= z|2(nk%P++P&;0PqXPyz=Lht)Hk+y|2zZ%*aul+#=U$StDy-3;(i)2!nlTHw3WwI-B zkjj1XwelYMDY;lK*Fh$PfngB|)-X5`Yalm3N-1`V!C)$>s?wL0A?*3@)%GW^s=m~JXwzQye+mY8- z_VJgR_!l9!0MehU{&45+OZ#S2?fc`;HvImfn+2@mKQf;Z@5G1#Xj!}}>aXzQXl6ww z9^}6F4AhK^O3Z0k+3;Z6oHYDE?ITv(sdX9JM^74wd3U{cswWqdr<^ykbZ@ADq{j%Vdkj9At~Q@H?(Jrwi|izq&}6oX7IA&xNuHmTHJqHzDA zM$-HQp{7F4Sq6#BQ}L!jTS~Mez%U4J_8a1cLrH#ONu_55r z`Aru8B>RHqkhBkSePi5OF>^>JE2vvrArktDX7IB_eT27JXbf5$FN;PiqBvSmQGmzf zR_5Y*MXLh0V3>`;W;b}Uva@wrSz`;cGGt{kEF%(c&(P>4AqS_DdK|zArJ~<5ApjJ3 zI!Jh$cymO*d)%Y=X`&2GYRQq3dv4@!MAUlZ)+6gHOIIIWa@&Iu@nf`JSvkYi;;)po zR%ZuiEQ?QAQsvL9X=}fy=7D!M5yLifX6^PLmUyFXt>lnQl25AqUy0R$~F7D&rO>fZBM$YyXzmEH*ZqMqYZd< z;@!EMW(5iwduC(naPMI{kwFLXay#PFW*2}U6ZJl5a6zjPnzV465>_j=DsYnm)=GLL zSd!P}*x|tS#1-p6NZrw!1zA~zc^ys?iv%1J;@xIA^dwM2WvS3(q6Z{ozYhnrfCZmO!j_uR(jm*2d!ap6B+3e|O0*Q}cE2~4}A zyzmMyTu4Vum(7EKx(ZYNrY>Q^7qX4n=7lz4?%z z&lTV1gxOB;I>AXu;q7p8zy%!?Fwf;MiUpy%PJ#%nePc2{{Sgsd(Nb}AwDwDN2}<>; zx(|1BK}OHyB{O2#8LIq;n-)FVUNWisx#g=*tdED|;UBT8Z<78N1A=XIZ_GgkKUV3Ay{k zqLiH&BpM~zQ8q{@W(`qZ6IZAwH6u?Tf_Ul!xZU^HCm`WfN!loi;#z&W~+=n_rP{%<~|HQ-Kx87`!7FdUFru z;@sR!S5Vo{WFOBAD%jzGs&NG!BWfgGo=)h^Z4{ul=p>_5_pFP~+M-*Yksew%`6;#O zVxr^ck9Wk^9A3hQ;kDOaH;2j-e;z8FKgH*pSRc6zy6G+Bt+t8FXI?{iAvdO~E^p4A zwO-MRo?dt5#8{p>tMvb{_a)#>RcXWb+Xlt(dmj4B-Hs*<2KF`6YtO$=&*wLK*&ib~m(GMw^A3X_w9sFbi2E`>=+NmZpJ$`F_a zl@bL>9is}xC)a9~NIr6M&0j3?-faHGCx2*Ui%vE%;?!jS{4FSCua0ZI=6>tCL)TR{ z-S);Z%XU}oxF;$_}JFt-hB-&ej{O1x-8NU$^1yTWWNz%Vw*OmGs_W_B>#p zblq~WWUR^cph*c>a!F}YQ7|eGLS-~!Q^G>IP&za;CQ3s|I9RV`nG#u4CIy7QJ znj;62>cSEAY(={v&{umu^M39v-S=JhtqU@9Et}1!*~Q*3yq|badQaS$*L9cOv2{TP zmNup&87PrcHKl11iJWCi;hR2W$h@ogbtRvx|qluMT_f6RMmqld$$d;FbJ+XxuvY?b zd28AW)}#h&G7QBcO)OeB4y8s8Mbg)XA*%v4C{U5CN=6%4G?_*H2qh*)$A_eZhN?AG zJQrUYKOE1-M{8i`APxyl$W*JxhpS*BW$qcTqN(h_gNf54LBOb;u_j3EGbS!loT~Pl z624VevwJ`HezESu;a21Jk2j&)njh4Af3V)(kyq4p+roX@ygTXOZMzz7e|ycUcXqUR zf0uG-+TuGGcHQkLw(j#ukZmRU)0IZZHBy=uDI|$M#d;*48#~R4sTsC z)s%=6(IN&Bx*zp9-{}X!i$|1B!gTyyuMm@u%Z@09Em_C`_P{#PxG*7RN3aJ$Vqe(K zu08GT*u8fzJ@GHzXnOo@-euriU!x~^69oIi^H4-(l!o)zMPgPkcZLul*g0ku<0qC4 zB4gH{58X!>vWw4U^KeAj04IvNyL8@GB^p$sRwXhhkuoAe!!k;iiD1{sx5()wa?~kD z&2m%$qERUL^-3YyAVPz%XoXgV()}PlnhgsTv0^4lL(?%qVh#T6IxS_C_zPzyz^a5K z!m5;afWN*VaVP%Z9{daQZ)E2Ci{t!dJ{*P}DY=b8(GboT;K%+&*}H?hyRP%@W_P0? z6rmh1q2&oNDE2gS9ZcRG=PV$#=t=EdV&@e4grMg}qGv~GVG~RaF}{E@ASA)hAcDpv zVxWHsNNA>9u8_t0myS_xns5-Q_7M(lAd+0?@e9a-fnAPCxjj2Q z-p}{#JGWs&T2+_+EW3Cj>^Ekwv6Oh5=~?r8if3hp6G`94o&`ydhj{x`>9(Ofv^E#5 z&qC|dQGQ%S99NcA$7cl%RkLf;s+Of&nM+LJ z+IQ->?C{kwFCO^dEG>)ihm_28>G~B#3umUOW?s2)!Eit9Z+BW&Jioe}-NP2mT{5Gz zYgYRBx*OdcH_ywQ(s!5H^1#xWER$%qM`cXbsB_a(B9*$@{?=JnwdEFBZ)gslK7U$5 zoUSY*twfg*6E&sIJhj{nd&kxlM&Va?na{}1DZW%HMjBXS*~}okj?;J@$GgBo=U*q` z6n;iQ&3xuvw%5Cyd4@TBE}J<#jA7gaTB-#YiPYTEg0?tY9NiMt8AV?wL$HQxdjTs-D)p!?|o{ zPf;xMlqga&ecp`D!D-=7qYv}lPV;PSuF>Y~&ZKWX-t4x(8N{wM>@Ftj3+LJ9<~6DcWFk0%aVjgSi=3qptvWhsWt z6PgdCV7E7wTt1Nv-^lSeiAVyA#pG-NeY?8 z*~Ndm5f-nPqLS2VWpa5cQ-UR)Kv6-Hfp!flD19s~426X#g2RGCG>TxQBKQ^sQYeDg z2cxcFlopJF<@;h#dJKw-LGl>1S&HzYH^I|)6iF?FGD_N@n19GO*Dja z0dmDX1EQJ0o;;XKl!5Rg5cr4GQ`3@zg76u9UL=lorZM>Q=7KgNgC=`le%O09EQEm& zEyR273GW}DdQce?1Vo31h)%xqYH)P846#9xVUJ-QFQP&mb>@1bqqH2N-(k+}LlM7=U@l=M z06#J$v4%{jS1#ITP`KxrGbC0|AY@i=2*V20b!PVs{InN`d4GE2 zq{h>R^L6VlVdIl%{97Iy!k3hdy$TPHl=>UO>;Vw z?vMgl{9-rxeGXG^_Dds3(=^=I zmUU|L+W`5HiymyZm`~h7KD5qk*)jZYM+&pR*j-Pey5Ckr80@g)Xd*!`X3XC-!7+fKNahhgKBr?`^oie z$z4;CNMzG)@{#ZB1MC_++_Y7*)HBfr<`Rm;@_&DG6Z21FQkLop%3+d}AFKz=9?e4* z(|5;h{nTBM5uHUUq!02sbLZTvC$*45U5^c{-MfxZw<41j=D)kn=kJ#s8Y51oe=7Nj z7=FFVuBVa=&X2B*S0{6uef`5rJDAHpD?B*hI{K&FQ#(JzPiQDnU^wjR?Vl(hoj)@0 zgeoC;+edgI6Zk|4H+MX+D*b>G4xJ$T0?`FDJ>`+sQtgcW-`{67Vbr7Gj$Pi zt9c7gXQWGX{bZ5ZD`njm$uF`HU>%suS&W#RF*kq?Bi#=sHZ98f#N8)GML``fNjM!y z{=o@0zK-vf)}+vym5t%>TU#Ywxmsb0QdI+jS~cC5ho4gnOO4m z3u6EAwCV^G4H$_9Ug! zCk{LmvJB-aZU>Cu-W%M_q`>Zu3})guvskOIX_a0)B|?TH&Syxy%iWZbvqZ8qP;yi3 zmW{3gpmZ@jUz1l!z3aINm9*Xk);8DT zjd*9M>6vCX2b%|<^mtD*-QtO*za39^vCcEzmmNmnNf?+>y$2V*=@8FH z7S!C)rdZl5Pqr9PeHECJ24++-{{sHKuo)i#A64MJd^hxU_;cadf`?hsRCV!6F0Xic z+2zVRIc+E1n5ARc!}B&L34_|(6s2KfK53@VYSbouMR{6zc43aCYkXy4b%B@F?c&|O ztF_0epGc*hx5{*5@fMB0)%)WW`hZroW2-K39a@GqOiQt=*m2r+)^hf8_1MX?t?XUq z;MXmv80Gk*1|n5Q((VHBDJjlA&gVMD?D5H)!D5vaW_o|)Yj%wG8@#m5%7@|W!oj(+ zL-b|Vl-8Wq{QAuL^!jGU!6x}Vy1OLzL{^!5is@|uH2SSlSBZK!93QVxo$rP}e}n%; zj~EYI7SHcC6?-^XZ-v2;7!UF}WXz1X3g!q;*k$i_8tA^p(2{#NnzY#^Hn5#)wE zpO^Wwe5}|J9)ULTUmca zyb^N%IOvj^m4@x~jc$x5%N1NMO@cKP{eh4|*E%H?TNz9ESgo@1!GIBoUXCWnonCH5;%?2jT z?pJ`PJ|3W>dGc)ugl<9D{3RzjP%O&dEt?014=E`sGQ$4HvNXwVMSgJN30Q{e4C>7Y zB=Yl%UH%3vv#OV;7i`p(?SDY+{XQBDvp6BLcm$Yv7pVAZpp1}w*G1Wm0=02xfKK(q}&grtob>}g2rfP59OQWcyS+&viI z^_O?)o#G^dKQQtv?{B7DF+b#%zfXW>d$xw0zn+boye9Ad34WJvB>`Xhb`WsbFL?Fr zkrzIe#jnr%luV4>XOQ9*r?BxLnO7c`{Vctmco;TmfHz0#LMT}wlg`7|O#0x=Ym(Q# z=me35;d_xzz`{R5vx8-22KiY)Pv5lFzkL+91S864wOb^s7X)&4nZ% zX)@}WDyNn}EmU-X%`IfJ7O^7jH=INBe9Tj}G@Er?m=GMoLvXUJ!f)dox% zNQJbYsb^dFr(^mza#yD@bV!3y+5yJ&cz#Zs44nu^->GNg@|$?9vt(~E@wn4oR3=O zKLXX8JIs6EO^GO}WE?qj`YFm4-@UZHzaVM{FZ&Iw8zwEiGd&&Ln`*qg7GO5_+-iQW zWXiL06G8+Lit70Mjq6zk6!jg)&#)W_S#^kVfSoYe`bZB>m6MBiH`;a5h5-9Xxjjx! zdkyqQX5X$RPv^ld?O7q3t=8#HiKJ2`V*A4$~o|6vW~qJZ;o7YwmWN$Jh<<~ zB%#+ntmgEm9Q0L%jglgmSw)_)Kna-g*K;*JA5N4;c9{ zV=>eVb1ZXIuwKBTI$09}9&7xL7)G+O;HSDz;GnW>1*j+(p9Ma5B&ZYYoN}*w;1ql` znSUAy*{%oR+K;PK9lie&6{aU`NMQxyJ|nw#S`h<61kZ?BmI3&UB1Z|n8Wt3{5N|j8 z=S_nbhY3`|NoWL}Axs?%St$uXa~1UReyYlMYY{2S)!YISo-rr$u)t5&hu#FT{pP*e zBP|fMoJdlKIUk|&PUyQ*KpMhxVz)cDY}B@jfCZ5n!HKRR`*yvrSd~Bw#zA9Rf=KfY zvyw=2w|qYXS+ED}o6?Js32_w{^}sjNJflT{6;ZeAx8uM?2ErB&`JDzi5%EYNm1ZT% zqZzTMW^@banm(u*2^lu)D~%yZ{SK~sp3^HS9QQ85`Dz5secx z(dmrp>&lCWWq@!6veNQ%n$vG8izbdtJM{B~{3cz&b)1}5#uH~}SPt9t*x##0cjd1& za*`O+&OEg=6NmMj(%gSLCDn~~E8T0Xp(_|9kB={dY&)=Nt-)xttsAo}vjY*`*#gT? zoS!+TR$MJ?W6-sfYj*YOJKExo1?R+;HKta;H90aIHmb8**tE^-*Qm2`>Wr$DTi1;) z1Y;1hQ7K0c8#|jEYq9WwqDy8B8r7=0OI6}dEML#gTq`AA5k)Rcc(o1p7c4G~(U6i* z=PjSu888K^wPMed6i&m3LR799VOFzqw6sRS{f!pJ;l0avkLbY?#%X-X<%b=734 z8}DOIfrz%IDQYHLjP|vEe7s+_`lA&*yFI+KlJTxtWfpS^&cm&m+njdG#$GpSexuru z(SL9OLT{jK?dpHM1$jBDy!uJbbk4!S>T7gFl$ z@UcBR^}BI&Qg0)$L4!*KJP;kr6j%nBEXy}BE4aFjO+VOC(T-U?yuBGKjP?uQR*-<~ zyd)Loq@vN20lQvvOn#r>S3M~bM6o6y)aVAZ(>WuMa@0eg-U}u*5sAsZa41QbC0k$$ zm-ELv-xBrm6sfhMQS(bIYPl4iDE=HyWr%N}&koBgR($qJ0=}?iy=G0Ep^y-{3wGxu ztWzfrPvgr)0e3XfMODO{t0zO4)%Qy;<*4c8qsJ#>0|k~xEwXfPFtW7t(5xgd{w;5k z9cwV0gm=XQ+)?`U&YZraRG6oTWjMxo2#-fC5yI@|ER@Lis_fSK?|)cB{y^n%XpzK3 z2#-28>l*vrr?g2{N^fC|q8ox?25x86;TY$GK=%u^hfYp>c+t@DlQgYDGJ>xL_nTc@ zf<>l0{j{m`H=LpGlUuWc_2Xnu<0q?Ct>N9e|F!J3<}*GUSOzjCMVW$CxUp#2N_3km&u_4RtsaaX)s+^5%4cg+e69`;9d zB^bF@lA>1+h`84bL>!3_j03y7caSI-?28s0{($&5)r>7CNa}Uvx=E?3; z!8|v?1e_F>7Pc}#^#{sPQH-0^?s)K)NQ*|h-ugB8BYxFJ474;)a z|AD?uWF7QfzDdTw*_!8FOPdUbfyw2+ zrB8j6OCU~FBsC(B{pjJS1rGCI3!H{vK$|Ly@3@{xUE-Od3w6*2$(0A2^x?Y_nj7f) zxNM*I&U-~T7g&%U#ALL>Tp-g4%gN1W%vDxY_CezkPNo3Q%J^YXw@%!~{OOdPkv~ma zQkjFAd%b4E@a|NbKEmMXAI~tH-i=UDOT$jtBBjW^4la1$fPefT)^XKZR@+b@Phsxt2W0cJ@GhMql z9CEGBsWQC{EF)9y@eo&-{^8qI{pkktAufq#wLP7xF9N7ZobX+FOs5*G4LR^~mS zlO9`sL~;xImNv_;^=fBhkqV)^6L-XFrp=EXIjGnnIZ!HNg+(H|h(E?VQwg3I`WCIkW^K2w6o{%16h^N2rI$Gobr5$qpcdGY`>nWK2!Ga&@rROmNH5R9d{&x48>tHqIC&Xz>o|21@W*JZ~#n zq`NAe_t&x>g%6onl2tm))<)wvjTULRk)Ce1N^P$y7@fl>pNeOYk$COHb}zfIPWw5x zn_us&^PR1&Uq1$)y|o)gkg**?B*_gr9=K zf`)lJi`i~hx_9MX5wjJn*k(HlIVGk6I&UjQ&x{Oy+E`TaiSGbC=My|(lvk6_t2NEi zrt8r9{2A%-TCM01I4z}Y&7Otqv{nhu>(YSL4W5?g_}~I25QV)BrD8N<{aIl@s$FKw zYH3trASATVKA61l!9)T}rm}XKH_7wO6>$m8^Wki#{*1TMxAC5?lM`EeHj62x4V4_q z(c!5Ay}%(NkPC~Gm-2RwHBHa1`F&eEk@V$MqzZ+d%3LvFYi(;=FWZBBzn-6g@eZNr z`V5@XXdMF2$1Q=QS<<=cyteZ^ADJuh8L!8zZ4Tqjf`l4HWz*xhEHEl?N)XPd#>w1e zi@_|TXJ_LVr#0IvmfIsknXrn!?|%1w+cbSaoN_8`aw;)kUwh_wY7+G=oRm7&c^Khv zGfDB$)6xB6NB%QE+5K?jM?W45p`~kv<2b!$R1L7coSAsnm&qk4ug7nH9Dz6LTK=$0 zJ*+XYdAi;@&p29}{i!qI{u*ctHhX3|Q% zL!I~RdukYsgXeafR@rfb#^Ww9!o%VyR$bP8EA1>zM5jbW_Kc)mqT6JdK(De z`n~mB#j)J;BR%GQF{?3fZ_T9#q1E$qd^Ef3veEgm(}g?vWf2G76U|~Z`|e2vFlx2T#%w3|6ZCG>Bsj=`nmE5&h z0d)dWwK(F=l}f&(I__P6s4v=dv!c@%zjxx$PCERxaTQ1A6r_X2X?3a;E@jc>qXWAZ z%jECP!)}Y5N_-X8pM$UM9-S%C;hzrKZ{9av-f%T@@J>8lTYIkLN}h)mZC<7qFS&0| zShS4`+zy{{Wp0!YIMkibHLr*tFDj>{GWt<#E}b61d-kW)!SzxWSsm(j43xM0o(IjO zm#OD;ijP8}GT1z|-kv=>8amz{&u4>PMC>0c4L0|EYwC8RIdjKfgQqg<8}?P-JbEA9 zNepo`UL98N>e6vXHP4>~5Axzovfi8ux1Up@22>QgqIsw0Hg-5xpA&-89rQmYURi0M zlwQIXqt9Dzk#C!yk8t<958uI}<(ywEsBYAFI z5{vB#YN^I3sxIdJfL0a*YwjF<$x;3M1{nWNI)v}hgH_2OUKPI^n?v=TF(3MV(la*7 zw@D87V(D?bYUAmd`*x)bPNu_g7R3f{h2`SU=R&XVy5GEC4U~4VuD(LL-kszq?g=Ed;I z&E1Lkd#~Aget;$oZm#Z|876iUO#pY80vixUfV}@N7K8h2StHx)l)rn#~F7~T4jwHnGSk>PNN4Q3fWw*19eTaB?2lG;cpeP4%9jNBgOQDpBbl&*9f62BRP1q%t0IpC@(K^Wb@1&*xG5 zhjGSOENr>b7n+@WVZE^atQ8%*ORz`-fVOXrz*-!fhI$g0&ch_Kzc$eYE(;(!T7p&$dS!@L68+QBf z-Yg3qtoWrGuQRsKsaVpJF}TW9?5S(*Oymt)sl0DadAr+BG7Fs=sf%8_o>glJO}}gf zj%^y%b$-5h1gY2YGuuvc2A5E!&&zi)sO0YeXC#%Nj5`G{-h{=on_T|p>tiedZ#@o4@)c4K$(Z}?SdP7$Udv| zW*CEC;)bd60BytRjwz6o{cw`WKA~YvSAiivv=w zPV?iNYmS$n_4ZkEa>N?g*+L0>_!O?Xkm4D7-+)Ai2{DrBVEpKazoxZPRSRYSiC`nh zAp@cO48xC$2ZaPCaHop#*xa2U%ViT^%7I~U#j)LRyCM zNenwA(pXn@g0~235z8sb)t%cNq zFmZQVXEPcUSf1Eb5uZ-4iY62-SZf%*?vYJP0w_!VMh%KI>NlyMx`01T=(>DxF4?^4 zaNW~_-J5z-0e?mb1f-04ucPcs)s!|L)Th%J=Hcr+Gpbx^SwWDvz;Xt3yv&>vtCN+W z@Hw{FzL{cT@Qvn9GaQJi{s84l=ChO{U?B>H^&Tlj5c;SdM$qhvqHJ4g1L3g5lKCAF z1Du|0ia85ss_4k9YXNJp3rWbyddG4LyTH^-z4!Z_Fb6w|x z7RKZSE}D0c;$Ukb(z98-Lk`2EDid+@XcnIFZ`?PXUY)t2@Ft7*yZP$lyAslO(%P6} zYTcX^8_Xx1ND571*6&DWEnaDDbTc%eBiC5Ys+wm~ZFlhwTN{mi2d}g99PD)?I?g>* zfXL)1{XG|J9!GAyQ*F}WF6K>(DWx4Wq4d*-4U+*y(LJQJrJb9$nBA3qxB4 zoM%>E&{kHUIzKrf2CDy(n6d-LU9OYrYz^Ff%uS*gW=JS@{PfW$h-K>Es8TAf@B;%w zVJ|y5bq=UQ^_=0F>)+#5%HezSIr8l9DgI1?ddFGaw0m%d!mQu)x9P=6y^I=?tNFR2 zl%R8;EmZvQ;E-mpIV4VQ2xZb3J&vQ!aMqscc=G}oii8As+Pw|5T0RFTG95bay>P;D zn{~F?dl<^)yMli|^;+8WSl0&3aD3{4a`2d#&F?B69;G1(HeB37$t~%g=odE5UpyR7 zJCHhKVD#}%Mq5%^#U8am`=_AH{OJVS9#IaXZ#A%1zI8h^lYJW8w+#I`_$<)Abp+Nw zOc-f}(J4{kh?#!!7K@Bj-u zSc@yc$e;^FrmBM=Cx3LE3pbTzd$|?ZF@6wYkYITuWEo*Rr(_28ZPS`>r`M{R6-UnS z77xoC*>k@o7j4m2wB zYc-a=by*uKgEeAs6z2m~*d>SusBA#ahg*4uW~-Tu)x$NK_Xpfef$!vb8M(P-hD!q; z{x4Ude_VsbP3zVF7?wSp9CtSdWwbGZBDl@XX>gxN1nSi@5b2Snn`@D7 zyFvbi;zu)5mBh?(@N^G-B`0}!_@H6jG_nw(0Lc^ z6>W5b6U)F%81|?6)jqh$wv_$CuP~MPxVW2p>2U5-)bPQ_p+1Ndb9d>|>Nyi7&;_sw z>?>B4y|mDGwbPmC-&E`e+_!PONavgRC~WXtUN1Gi-|Xf^N+GWd)HeieM0NVkX(=@W z@3UF}1%P(*I6#Xs8@ew|LrvJ}ABsq6-blv~7#-(YNDq~=hTS;X>^IQ1r1Wzs9iC)S z*0^{&3Ez)t=~y0T&8+(k)jjQ4cTc0r3l>i%_5!XfBBVk@P0b$;nL2QC_-v-EHFPQn zy5UHMBw|Ab9dT-p-*r{rt^i!sg^9BBHrvCk2(Tr9>2BMl{sV9&?nLeb~@x%UNe(#cBHyjvLTNl z)sMMK_o?zcF0HzW#H;qGMf=wa((f3cxlJKnz27Fr4fJ?G-($GhJfa-tI|>~ZmJ)ND z>8UK76VQhB0WKZ*8k60`&74jm`I8h*Yeq^w@#yjzWfeK~#oFRB@0XYs;d_rN-(@j8^2mSfE z{6~ZYTpkVrI7jK(qjpx`UXh%@!9dYzwIe1{eKm)P)k^h;BOsm-2|6`@}J>H#U6ea*2yq^`%9felN6q;Tt|x1mp?`J%8+ z4ATvysjhVCBMD4jK!xMc-I^$*#KMn_e6C%V%>D2!Wm<4ioz#7gA@_t7(pVH-u!ceG z$Fy5nil%8a4v9;bdYCdqP8DKSucWDqilQ|{&fj5#{1`yL@?Y;bMBf{>-?XgPwZ#d6 zw6Iq#en#k^AbTm$P`GkyDs$%S;-%-r{;9QoSG6f1j@bXz3zC7#FVy7}7(q;9#rQ2& z2w^a$PmkF4+fS-s0%=H#dh=iw7xHk_TY1Z zQs@26Tqk>&<;yTNu`7hySyB^~z@qz<$lNq&_q5}uYlxauN=BV2O9rw@knqQu19*?D z8c5B#^uMF)x7>Xg%K}uQ$H|*W)~dyMyQIZ1OlbWTb9)aUU+(vO_HU1w&QD4`<$Fup z(%xuVxv`5fT?kS%!nCEjXhk7y-RTr*^>FGL3BR7x?boho^i<|;ucpjOA63_i1L|^3 z0{$%T=_3l~of=!K3%{~ucepH2gxm6=WpAD>YJpMjO_%V5{QXJKLeGU(|w z@EI8y|1Gnz<1@1{{e}Ef{`>tmGc&XNukyc9Gcs%7)6>y2|83x(%*w!k|2O|rrl-ee z{i6I^{x>)i%RjFC(*WC-Bmbbk5P#ACf&5p^e`jH5V)_?53qAc;7%X2bUvB=j*#5Ep z&0#vae>whN17Tob_(HP&MSdZ^O89?U{-0s~mF1uF{~^H__OEw;kNtc8D}Qj$RA9;;6)F+&E{}5ln7}6|&%2HqOu~{VLd9?1ZEb zf$Ic812usIw^S~;#A6ZVq!>SvC*zI)4aS~yd#d`Sq| zrD;LhNKdQp0mLhfiMg#0qe)!O{fGJpwL=~_B2iGJt6~n3$FYgz)Ij^8NxG(UT`&Ff zPR_2j8Sd95FXDqLT%Mo2kLA6%_Y7lb^Q zAArA5oL>wXO3vDmo`n^urW~1KZZ{i#{9gEoCDleh%q;ieJkgYAD%tV)@qGF9hs~wy z=CknuC!@Xb0Ssg_ambKgcqj#w-K~U)L-Hsod~l9=zxHvLu?COLG_I@S_3ukg>-(mNAXy18Prnxm{tQ z*a_ErO7q#KR=LN3;bX4o|Z^C9-r~%jK>|)t9#ddm9 z0&^|n#Epd9x<7k&3IRCE>d_xDRGU+(v%hBHZ!d||+pwBm(S~avMz}8%li`@hLU9|i zTZ4w|93D-IxB$=Nmo_eY!64uo0*^skS;*p;H96Wpa0stumkROVu$Pcq7enTTTdiYp zPt6<%)J)lri_LN*SNM=Qzdz2$l%;3vcrYl0Jloys2AoZl^GcjDLlID~l$)jfT$IF< z{fq)HIi$z~tNuBA=G4O|<{q~C!y_`unsl1r_Ac{)Y*`_URT#%C(&c;fD~H|Jit!5U zR`mM{`?j|aGfA6Rlcw9}_5IlVj9!SBgGAR%vv}cFec6t0t^8G6ot~dAYZJyNw!~4x zx2yUmcyf2%I~(48cQ$mdUPY;r4w)as)Lhouuv*TFZ%KKxysJ?q4w=VHrJq0B_eD>_ zJG!u;pXqs%i#qJT`1qSI!3HN8*F_q-9J&moyr0CdJvZrh=>o%|G>RfxaU3Tj9nNh! zH~Gc6D?iCGOS3;tCzZTIjXxILag=Om67eSS+Ed>7{g#;mD56RqLB?N*9$&r3?}Eur zRX1xq?_X(D)Y~Oby{KtMG(SW~L6tu3v=kn$vzKk?&qMPJPD>O$+U*W1L|+>!cr&vL z4|XS)4MtOTn>D8_y0o>Ex>a@VEcO$QRTQFkUy|kU-dA=H-!sfK$Tqiz6RJGLcI%xm zE%Ip#I#-GmX-lKpiTW)|S)~rSrC&p!;9k25+^oLshdmAxoSL2v87HU-b~N0Ve|ocy zSs=Q9oGF7+=>kEcr#lruye_vxtt_%fX4kxYG_B+~6XbNf&p!Gr2!SDfR=j>7%+?*9 zZOTu05beJ;t?IXi9GJ?)32sW*`|8K1yB$^e1kYfui>hr==#qHh;(}X(VfwoI;1mN% z1DE5Do;UIZ)HWB#)j9RV3 zFbmRk+2x5nS^qvK^_nsh?X8(%LwCTXd!VhwFI#v{=^(tv4Xk z^eWWw#d#H}+6U~fbgF-e{xM%SnZGvZ?#8>F3_m%P6#NXzW`uvezAb!HW4y9A{uk`O z)PP>fw%Jkfmf-nsJpTpo+GU{*(huD0!-S7>1b6qn6<8Tx`^5Vcw}A;W1y=Gpk5?9E z55xc{d2X<3-S)K0{_=l1^Xc*$(>Z`;~_}@+0_7xDM3QH6P3KV}bGi9UB=&OrUnj)=BubL*Kr^Ln1 z@EY-nE02UACv(qP*Bq{5ol%~d6g-Hzm-aqoPzu#4d^egwr|9EWtfxp5y$@Ls8;vyW zl%D=xWLOlzgKZVs(T^wPaK>9|(v2*Ddf&oB2$ixXSHNA4c9KVaS_doI`Ec*%eesId z-i51kx}ov<0QoWb4J-ITvam%_G#qYF^V>Rv^&1XR`RVSxgk#f2@#3lbz30o}3))Na zi^|L53-7(n-Mb4gJ+j2l1nG{5SoFwP>K~}FRITA9xte2$;lg!nq8YT6KRElWp%6DQ zZM(Q1>PBJ~-m)zVr)ksKW@>kt(5?FKpDMUl6jzUbofcqle;f6NR`rUKx}RhLIp53j zi|oYIzC&A_58|1zL5vO&^)Ar)Y&`BdzOC{i|3hn%?)Dk`J9k0VHGkb@qfBN<#usye zJ${cuf9TNqQLvFXF((@QQ5Ri?%Ri^B`yKrz6>E}Kfye#on=ad(_7ytso^>qr9?;Ef z+s%6nHKZ@}m?@x=4~y9+!5Q=buc1_hFM~f3hTWu^7ySpS zH!gl$H!}=-cC|nH4+d)-!J(8}i~-FulaT(cKp6H%Q~KT%|Bri`JyIkV`Uo6bNI^{? zYvT}{tpXT!gBo{)2f;DTD-zy4`<6_-h!*&z@9*3$Xo8<`j^AJ8BogAfA^2MoJ|cds zQ5t$@6xq z*WvwU4KyKu!C0sWAEZfbCPP0Ka!R4oW&KlOcNjJtjk=uItoyCiwSjrw{%|ad* zc8p@O&0`ppyKmDkGc5~#VV<3az~FuM8}ptQBUueqHbb_Arl!0csXJV1n4EhW^yxx5 zvHbQ56&cFn@&{?^2jo2s1%*)1TaFw*ECq(Qu^_70$9JlmE=ZX+ zYHTj2v&6ed$;NB5^AohVxFm;E(xzDIf4oabC0en!tqa@7UNs`Svso3jDC4y-WEyE# z2ppU#vqM^^&gR;|sZ{;S9}!iO-Tu^8Pu{aiwYW_7oo4Nvy^8c`Q<14k(NH37sV$t3 z&7RyKbA3R!QpqJzz0ScbFF$Hf(Q<1Z^}U9c#t zRyOcjSm9n!tSW=)RXMntOBgQNt9E&9_g^i*bY?Qou^CKI4m6tdpA|U69WPBDE&>bg z_<<&RL1w6W)f|!Uk=v4YhN}lqf^oi1jyWl26FV|<-8D?2DLaU)D@-b=eHVx?|6oa- zZ70aoSKDG+Bk7!+WQmj*{eEAAHn-<&z9q*${_*`K52q_L4IBD1QjSQ7XTp|3LZ){l zP(~2F7m%$Vw=SV@jw>3OH|EQ}mv$2C8E;_4wFxlP?d;{JM@upovf9mPq~CT7HG*ru*gwkjqQuduvA0;4y<**3=6uXb=X;m9Fx z#QADeA1EtiZYGSoU{27;DXLchB6`rXF!B(U)KXzZqhcY*FT4&>9Je(v;8@V^OeOULN!%VmJa3I9n60jP959ige6bAB!7)j4^GKVzp|BfWaU-0pgARsE z-k#4_;V|l>UI`@Oo3AuqRHD6pZ*~TisVzt}OP?)#g z!A_>I@}mv#q}FV0>T-avqhdIu=?M>rkZfd$1380YY!lGTsf3`G?~M^phRkn6|9oC_Mjc`(F3XnVMrF2Y1cQE;Aqtm@V}|^Wj=a3N_?LP|9W{!D8Oh&kyReQs z`^M5((RKy^iY?qztCG@$bNnXJ1z2gyQi|SIMIgESA*?WnoDT>xIn%k=Q`p2(x;%eR2}1TmGI;Nb`cl<8TLYUtqroea^?m4p(T&%h29w!F(~0x)3ji2 zWn$Yp2HgBDi2C*s-||u@&dFFa)zP(*1Ffy@z>0>?aKaXJ0gHgE3D;6TZTJ+9=a3tTlPDaYY+Tu?bnbe|bj)FJV!+9Y;6RnF=J^CmVCw!QvhF6CTc_~WbEKX-%W z@!^F9BNwUR!%;qgC>4PfHzDdu$9OECneE{jqJe#_KT^0Lh}(9pQq8N#>QuH&DZ;o* zN|eh*syk2*6-5g?P$c(1B&k`WC=x5|!jPmXjFfHYv?Uu7nWGfj`P=QlcczoAn4Xhi z)igA4DzpvOQFG~fiVT)fe)c%~&&sJ0MNc){fNB}cQ485ePRCA{gTa;WV>UND`5b+X(3K}4ahymKwBBph*1+&?T`wE)0Mt-a07U(0(S*D^-a zbJ0psQdV2Nm>1{%7&MrpjZ>;P@wH`kiEs|__Qdg8EqJ zS*{m-e|I#QfXEv}D@uF*m;Ex{$vRbVZh5W1NWyp=vEmEa9aPcdz+mRCbsE(n`6) zp$+2Ya{3L3MPxPNp^4(Dp~zb zL`U&4gC)y0liEp2N6tsk21DMs1ng2;TL&qUY{vIT2?;+ zkth*8r|%b_HW9tBPZm&{fS%i@3;0e>FB=LZZb&h$6Y3>yNVct*TZQPEa8w~g3z$h1 zQ^+ksgaf#d#g+zWL8c?FCyL4CeyutMh)X;w6^cXDN)S^d?DLJ5sG|GESwe> zB1b3@3P9Wz4+=G1%I#!}dsOtPh_d$zuRrT46bCvZmil52t zm;tincYXr0WOr-R}-Gf5Su7$!x332ZQ~JHDQx`_ z7b$Hs5z`Zzl|wBOn$<(g5}M^hDHEDCLq`*u6+rm-JCXnvvKsABNAY5VXxTyBoG5Mq9QOc@2MEVAgp(zrxf;>`j}z0y z0UVzYjvojoZ$xv?qQ$q-vWIv%6TAYM-v3um{L3RV(c;-C*&>`AX-$r&duPW&0E4LM2JcROKi6 z0iUE-`5V5`lq3db`wc{CW_sq@TSFajnqprw`dLDn+L)58V`7~q;3?q3XGurN+Z1!p z^&5WXj8fxL{j^cVVa0ByU^AA*^!}1mNye0s)R7b}Nm2$W6FbI41}OsyJqqKr5vD*h zhFRjmP)Veef!A{3--Ly>Jf|*Gs)YjCj=;r#Ofn%xOhybw_S@+-C;Jn6??o6h|1gC~ zL>Xds&j&LaF~S;PjwJMV2hqVEkol*4XMo+(O_h9a(dI3LAt6=JDddstNOiM4awueq zT8g(`+tAml_!*;&RIL>@)%bww>S zbrPW$W)S|R&8C2^Kz0~N2gV5Y+$s&0>ZujZPePKJiCF}Sgh9-a0*4{o$1nXpbO$mE zH1Ud7)M@e(b*xeN-kC2y#oC!~_S@U;#GmZz{I;}K()cj=jYmQ)gt z(X&sOXKxNX!!29klD=c~4(_mH{0^0K)9{6J_G=Kbee5FJJ;f|#Ck?5e_Jd<8oLlIz zuv6!sJFZ!K$w|9dlaz<*TeHHVlq^Fxzi_UJJ9JJwBe#HKy~5y>j{yMQ-W-TzTk6GO z`)kUD(IK~_Gvj)HeRxTx#KqoRrX*)dBcs(V;K0YDvk&7p3kmHkj-*DawaH3Ped;lb z4&j5vwMvv7fA9EGjr?N0)_5vmk6e(xbBuyBsbk#2Ma^*>D6r}OmVyGiqQ_n3v|G-~}!$t7_mer9m9mKS$a z*2|KG7;DFNa9RVbH*}dpMKg=Uff$^K3kG}BUA?G3#!T5aje;m)U7{&PL~X5A%vm<`bZ;_99l2f;y-9zKna)OI?yAx{Y^f0o;88L=_P3ju?wd= zRogfFAu}-d!^uG+4KcUFV-*b86dGxbsZ3L72S6}S!pXsnY&U)sc6X-0OySe8PoWWU zviw0KS~Mn6@UGm5zKrSK+tc+-efd?rA=!~KlRD!;ZyjL^!jq^xa{!PO(iY-g10Ia~JfUQ8K60!}2>LkJ_{Lh6v{EG%-U_!R= zN1<*ZK8@Ky9EsD3BxbxWp^E68W_n-F5yfivk@;%$6z?~1AMfr*ZjVi%P2g8wo2^gM zE9RR{fftD_=q-C)yf=5>$(Tp@d|ogfAfE3$VQD6>bfpsyKvz6hxK}hDmp+@{H-I;U zUM{y-eHMKeeN>2KYC90wrydxd3WnTP!()#d(R>bbvcnx#vcR%{9`HZSp0dI;P2CY+ zAdd!f@WM59-I?q<&tr!;R(oS_E#Y%6Hh*05Zvp+8ufbY;^?mhmCz5VlN18Tm;ghZZxyq1#>xGOypejl%qqQiamgCw18NOw z4R{S^4S8eIiRkJ3mfKtlTs!$2@oK}=BA*VdCYUEcU&~iDnP|c2N()L8SQADQR1@M2 z?4E|N{kE56bpkqFpOe)x%gSI$s?om3x~5`w*3#r?xp7vw@tnyy{B>@e`l8`t{(?DO zrL9*b{Azw-9!a@My~Fh$t4yngU$3ZHrB%gB`&fHh`#`(XHT9LoihJJtR5Nc)f|a#R zYW#X!BJJKLt^dF{c9ZHm6Le%6_4tD^w|;2b^m={Ev{a-5mIh`=WMS;afb;*u+B*d4 z!US#FZQHhe+O}=mwsG3FjnlSmTTk1zZOr?{|4+&G^1<$ zt~NI9F3cc3TpUvRP|7aME?jm-cg9M+GyMRF*}1&a+D+a$y==Yf+I8LQxZ2t3mCDA` zPTD5gEtf`%4hx68Y}-VDoAv=e_SH)V6U;slt(_!73hQ|W&L_uZXQQ$0c!-V=KKLyHu{9cH6- zl%9vjx9M2tZnzxE(l=lZldSOyyUo<(ZMH#eV%F(X^Z3fBQ7K#&9Bdq1e6^@lC%GAn zCQl)s^mCfcXO_tL+sHK?g@Cs+DxDMxMtr4CTI;>~!81vn;{Q&G-i5R%1nYhJ$SN|h zMMHM`Sv-`!((NZOa}%_#p|7}#Z^?h{lfRF6c+#gw&DdZ;yW{YH3(pHw%m#6=nvJRwCouZunHb)9{0snrE_N}s%itq$iwk!rN2B-vL7}D26 zXu{`!RD-C4&;}g$GaFLR!_fg}1tbST82+h;(u7(9r38izco)QHLM#Wj4zMnO$$(V_ zLGQ;iWM_iSfSv}L3}o&H&4H4I+!CZ_f~NwG3ascyGh}9h&VZ5y8U+>W=QN~KghB&) z7bHl4jRDpbL}0?<1jYi70TTxr3bZxEVnQ1S2?Gia6zUf=1WJH@g8Tr+4nRGFeFF9d z?Fn!tfIV@8RT3ma0uBK+ z5~M-`0Rt5Zz-Ed)?^lCl6$BDp2H|&t;)MEw`U1iO`T_Sv^o8_=y@Koo{(<&|_XYQb z_67DuxB`6v$p`)g;{X406bL^UKhO^34(JZ}4)D(Zq$jW+gl^z&gl>p#m~N16xNfj+ zsBR!$L|#Z*tkUX$FAOc7NPy%oQU;+pNkRJ$MC|;l|h#SZo&>Qd@;2Ve= z5O?6~e)&ClL&$pkHHfzW_I}tcU|Vo|$X1~2z|DX@kT#GuAP!ItU=9!tpjFUS;OqdK zK%0Q7z^VY+K-z$*z^MS4K$(E(z~}(jz&;>Mpi|IO;8PIXK$n1~K=nPkJy}E0dIC*} zWfN_%mA?KDOwC`AO9R7i*lPd4XU6&;@c(--7U%^ikWbJM5orw+?Ek6zzu&YmFuaDX z_6uxbtbYMt>FwXZ)cgebpWyHM=iP66>i0VQ8zG0a^@S@_avG}zyA*71*x7vO(t7Cv zh;<=`Z%tXOEAh61!@Mpx{)O{~D956~gbd>R;SP3Mj|$!uXFxi`0=|fu_9hU% z-O?M)Tb76iUgvSfAF5L4EFl&v3^hga4+A+jRzgx2l;uTXZPQ{5Vh4iS7Ri}EKiZ8O z^AghZr4Wa8l-l$IlUpZqcGhF8eEmidWbU@+9Dn=1}^Pa6r_Hj z+mamnT3g0cBUc^R9~hnJHlOSpvO0Heq8{qgI_K_(6J8auzRj_%hf+IIJQdq_zsfuJ zUz=Zlca7#4I-_q!;X2~oP0GBG3?^L|eP2I&5V85?1tk4Nw9^ijNS~67}1AI4}B|_Xi$5>c0f>)ziDtaq3 zC+Bc26TMj}p4uAoyY&>zVfdZ*bWaEJ)s(>3#^CC1+K=RM#$(re^yQNzHTm%x4fK_`6C+X%4xdgkHSR_1l1rqV1_WXX5MLXH*pTMt4 z2mi5QLhEQEi(h?*kF%?Pb1_gY2__ZZTwRPUfOYA03@23pjX-hr-2%hp9E- z>5Ma)ns*i@84Cpu7Zt$ce=_h4y=@v}4-JiXxkm<9R&*F(70Z?9t;qeY7x&3{?vrhl)&)xI{XF zj6sbehm2+vIe+80b?Z6$kB0;gxq+Iz!M-Wm+1c4R9C2JDo>V<62h*cbv4 zk0__<>(;+)@!}#b0oiJI$s=x^OFjw4l`{ra>F#a;1vNN^az#dAxyV%_{^{x85STzI z!vF%`@)8~iofW%|wr;uA{Xjg%nUPbRJ+&2pMD$HIQP0P%?Nu(0xdz1{8d5>BSv>P; zFGBIp!~EgD#D5enDQtiv*mxMgbHdqOETo1Jy8|&Z^z`M;QL*iZd|VCfdYUP6vTd5B zSL84`H3%k;xb)9|6u;XeFw6&3-Ejp;sJVnt7Snw=Elv@P+5g;|!Fa$@gAXKY3F4^B zMp)7wW1H9~*TSAdn;6#DM62UO)(CnBwXa7zM&8zCb|7f556X)r z%0%J@DIcx*`20Wx)+JbnXXmA{r2ou*L?SKaO#~~n&Pz&Vax{Y7^+K5fW@7JAVWm*y z>>s}-bBrf`S0=75-|pLg-AVuSD9d8`Dm>=`6Z5)V0j`7HNdP}o+aB4n=SH)X+MK06 zS6g>mP4x*9IaB%R3wO&jlcSVihq$|#M*PVe_G;0eSRb>o^in7nk1LK*{iYh+Sl15; zz&WAVojzC8-rO-2_jl1*w*qdA9=nvOGEGS|oWU0B;&vgU82|{-NAJqbo{#w|6(LK& zFg1l}G`yjlydied`mxmh8Q?!7eJ&v%rPu*k+wQ@%Kh zoU*qobwrSiw5yj~L|#@)EzA-=+;vh-qy2c~m0YU#u(fdoejK`j)MO6>jTni;dVEB^ z$f1rnRmtr?T3mWS3>YvH`jD4VwpI1b*I!w{?d$yLB)rYYG`zKm{jWq3dy9d&LEEG; z@Bt~W<#xOA=REUV=+E|XA1Kl3gIC5>gK5!JOlD3}GR?u!S4{FF3j+ZuI%c#{l*}O3 z$Ts~cCHZ3n#is=8x&UMH_apPVK;eXk%^$QVxGd{|T#6O1MTZZ896xAZ+~tm2+XYK(DHfdTPll;RdYvRp zujDJv*6xvpCQdRZbspmLVks!rln{F0kF(=eN3D+nS06j-Yny*kcn4`agMN(j3u*i} zpT__h*e~hN|Kzu&9T@FEBEM!m%*m#R|7}S`n6tXfX1es={??r09&bxX-x_L)5%PVO z+$XR1>~1>dfo==Cak(pnn(=M1Lll3105h-yyj4&}SDos(Itzb4mXIy2^jy74+ znNQ|6PCK6%dDx+bK}I{KYy%80SqPSC8D-DS!sgalm=oYljCBYN4WZ;#w1}ESQ75U64Y;*DE3ykW4@BP$zh*u@4XjqV8HWy{oWNGH%;epo*`GLu{4ZhOw0dy z*b|=LTQhe_E`T3J1YybAJ*JDI_s3y;K+f-V`;)-?_fto2KZI6v9LiLk1U8ps2S|SU zBzv5xEFoQGe2SXr2o1>@#*51^W{dy;divDQ#pfuGn6nsMd@(?=78!(&ofHH`*ew`` z1n!z&KNJVs2|PwQ&*yUuru^atzPO0@4kFT)sBaUg6GurGrS-3t+R z6ei~Uhm@lTCI5wV&b&RiScbIi`UOUTKr&ef!V}0?h|Z{G=_Pg4`5@f9Qn0PX_dP;A zLFL-D_>oem7gy}k()hP!-u?AH25@?xQu;qVT2BP2cJ+L%U?a^n1F5+WL zYsF8S(Qli{f|=~Z_zWR1Dj%+Kl_OdU;ab|%i2z9MP>`^xxP47LQ)r$cGR}2wz{!aeJk4`vKF}a~zxn!I$PCFv4=9tkXR>HYpL*yt@{;GaOl` zQRo#|rZ6io(;uW%6>LX}*mPG%D!xh9R4MG}k^j&$&&*)YfEU?nnEYuuh^ndii>UCS zm4)R%0z@$-9KfyP+e{YsY6!>8D~xGZ8~OGT5jD!T(*}t7OaOSOgtr5?{5QLYiNt;2 zKUn{YjFH==5Z9NG-vfoF%{JMR(VtJOXOp9E7RmjlJ$z!qeM+cx0umu#a35&o7Gbp( z5cB`vgw@y*rWrf~i}eK?(@)`!A~3ss8PRS~gwbR(wP!*T|65w;o>l8WWEOh=j|l)@ zqaa=0fxa;(-9A)8Ao`}<7Fgk&dX|rd@b~t^r^R?4T3PRsq^6~)>m`Ov-d$FH+3z@- z*8N4|%%_e2YhqSe1l&GCu$r&$elgjkt7$a))CQt;%GKy8f`ML?@P1D7#LqW)zEa4> zCbXsXprv(iLkC!(#zpl+6@;WfR39klABzxiK>?DYA`}%B7+S&A>SF&~Vix5K`nd4G zPUwmsGpD^q2%aPxIPpw4NfsW-+I<*^D6&>2Stg)qflhi>l54?5=mr_;|)P=toUYqe>TU zN%cQo67}5G^>Cowt3@B|g~z|p4+JCH!F5<9fWO!S1G13o^$BrH3lzc896=zL%pFa8 zU{DsvyT7qjo-)b&abm`uT2^CvG#s#f&c#;U0IlhIG`6yTLt`RKT@BVh{;>p$mk1L+ z+2NR~n#Ml|Git2#$&q@BD$SY@72jLt{-#GFBNv9PA=2`l*N=5%7{!smYWvUy)Lxo& za#ey>lxZ&M;0i$QjK#cLFE2;Eg3OYi%W}n3SOK(3cmfJ2mFoEbE+62d|hVZifg(a17!|2eo1i%kjJ8gYzkpTl;~c*gkOpOCHBY3XD4s&{ z8*>Am2Jn_cSE7x-cmE0UeLx80dc|*R;UDLF!RL05_NAq{7|Y%7?kDhngbgb+WxkRv zeVBkhKA8o3eO%^EKd^23NdGRPL~4#_&2ZGzppw$>A6YUyQQCYz%sS*xKQC$WY}HhK z{Qa_aF&LFGZ?s7-6Y1uV^d~)of3~}gd~I39WowyQNkk0Yw6QHI+j`C|**A3vE_#C3 zX~gAi<%7LlkjNBA3-AXicia*hJ}tiqifi&qcsS`v;b4`>g7r3nbYqxbS`%eko=Kb; zThl`-SC~0OEXJD^O!x^|eF;25cZAF(yRs~tm6D@t7GU^?kUo4WQy4@$a{8ncOUuK# zOoQLo;lG!Ls%-6CK2#!hlC?bcpc486dgxZs0twkg(Pa&1jn?w3;V_T>6anhN?{TzZ z;ASukVOuPjO!$5>4sJlaNAty!y`H#bl+Oj3? zan$P7YkAh0m7Y?7tLU)XG)KK-PNp(frs(wUxX9vTaL>a3Bc9NZ9VY)fy~uC3PU{i- zJg>`R%SjFYxbG6v<7pgE=kKNphul_6)At~c9E$zRZhQ2^$h2Rzky`k4cojfHbgD$I&4N!SgYD!sETIztzS)CAtd zRI}&fAiEx7a{59G!p}lRXFRu>pwa#8OX%T0mPrqyQg-TUFoy46C;-+>1PCC3Nb z^K81?VOkx}d~eJeI@#)BS{3Me<|c_c4^)D-i*BN|Ohk~ym#TD#&ttPpuw^gk7HB3y z$@XOw)S91itX=UQ8|GAgxf>OidMJ6lPMu&|m|^Y+zEe4h4~|4ZEA=5Bu_o9VzZP-* z>aZWJdEAf=8Q9zTTPCYQ!K)$tf;kK4{w`Kgc$l$Gn9h?sA9?hT#qwa|gBN@%cXmRX ztrABlPf{7Ug#;u|%%|BqAC#MU1U~bU7yQ`y)AptIs%hG#%cy;KPr!datIXW&wu4Ul ziHAmgDkDq$C8qlOYQ^^Qg=Y-6b@W&Bgt%iJV=r`Xa@huzL%?HoZ*6-HU)y~qOFcTS zSH|@b$6;x+<&x2^%~PvMW+{H_`3j!n_u=MG)h^=)`VL7j znh5f%_p}b0y-M04RaDsO2DHz$EErnIiwFf+SG!f_5T&(6dqlMazb*R@ixR^(Z7jMI zhoO-SKOQ<5@gzKV4%H30|Clxi)k~xG6}mOAjx}Tc;;;U|yZeL8v5Q&g5tH`6)QItW z(m?gnLyJ3YBnKnlw@q}=<^2cUW+qV9^hvn0zVc%hwubtEoUqr4*viccta~hF8EYB} zD(^sVXue$`)8o@e z0ToRGq4F|+rM$&Z9jyU+6+yP(qqf+!*d`jcm5+9lI9)1Bk+787UjCHj0ggZ@HigyA zr)CcJ>z*u)482H;1#>hq5=YX!|3;MQG8pXi-i;m?kfP$KBJ#~xa?sLK)dI$9iG zC~h%x*W~HLJTf47uB@{mlk@^_^>To zGM-i4()uu&$rBfmNH}0{WxG#UCr;VUvCcy7*cjV=C{qA(jCqRG< z1Kfut5*_nA^xbFyUNiHzofaQG>dv|j*XQcGG%o0_dOUoGKz+EqU$+rk37u>?gzGWA z0%%Qt6~4*Lh{0Ij<%gMxfAXY9#D1-FPikusu%|F2(IupHMzo6jQyr80Fv)~7hSbN{ zpKh*R=H|9E)WQBhT0y;nZ*ZUqQ+jvRP;UzOb(qvoNcHqyjEAY7=UJzfYcVt#o0qrC zDOO1PeANtvH0Ff^yqU)t{*M_rrrS@av5PWXx0@$?(rhcw$ ztnMsb7>zl`a;}he5)`KdZ|ag3ULJ(>zL+58>3Dl~?2r7X_>Sit;kb9CItzW@q(c?@-H}A)Vo^Dx^A!JiGjC zb6<&X{z(45EL%ZbGUp&*Sdpi*VLtfOR?j}m8Xtyri|Y)0&i?N%@&2F9*u}IxV%=#? z_lOO^dq`iWkTmT~TFi!Js_g~M9iH4Xd$AZ3uREgKEPd51*1UV-Cd^Z=(*f0z6LC<3 zV^AZ&a%@4E2}+*QmLg1IlKe){?|k64C1Yw#GPAvP$5OPuM0z4 z1!FlfC_?4VG6D6vV1_`kLT9EM(YpCtN6_f|>V2H5i37rS=xWA((?0fW|um(jO z7jQ@#U3Qi?^p-cTg&h_Vm!_Q(F(wW3{T-qgS00I2$V=*L9oNdy5F)f zsNXEcBi}>Olf}9S&1Bs)WJYT&dkyRh8Xo2B3DIWIPVNm1Moxgj9yPkUtYQZ?z)8q1 zVbi&4-Q{x~i?__$aowLq@y6OGA9=G{Xde;v+zWD4k1#;w_+w<|(#OJWmmWfJl(=|& zH^80|e_2p}V9zn3Ey9pZ>8wfDj~F`MYHfW4Eh1g{X@R1IOTdNQEe$&tbH zk?(*=5Z8w5H%7!RPbnQc`@`ktxBnxsEL8_RFDbfErDagoEHFidHtb3sY;1vUEX$bK zV5!oKLGd?G6XzwQP&_rSyH7Qd)YS(KYV}yOqL! zq+=|gVs$tBqU2fIC+%n2Wl<4V)9bXqoLx%9IZQ20hLl~p1s=R3Z+<-|$)pd3Xqg;=WMwj+0l(8$97bGw zX$C#&R-6)m)jEGSdy;bHQvobLGjlPb93nb)I-5T{bbW$)K3 zu+dj!VY;i>EYQ`s9{a=!BPu6fM|*{Sxw6&#(cHtIe`c_}8GhG7a8D&DAi;_%FoLrV z>Lq2Ce%&^Cv$3wVHW*3kHQAn27DKx^?i5OV_J-jaF{ktqL~KSDJInD6p8M}i;tv)Z zEds!SLugjThD!KD!cUe5hW!s$ya0{PWLYY!ciZpsm`DAs-o83G%?L568u3QH8W z2rqq}X^r!sdj5EdeW{UJl~D=?oC^K&3Qs?MjrTJOo5Nx%T2e zPFC&NL3I^%71_ekwa?}8&HfF%adgFLkVPU-06E^Fl~*c{l65?O9#c0v`OydR;o9U{ z^-$6%#G25G5PLjdP5~2g9tF1ZxhNzYAr?y_N07rCN{2{M|M7jgn<(9u`tKhy)Wy0y zuE#8*Q#nwd)-=*G>L}>NTf!{M~yTxAb_extAM zaVn_zoOgyt7!r-;x4p=+b&Uchw!r==c_J%jZ*+448E9 zcXkVR&Ey%=r!YitUMvFPns_f^1QB97{zZgj>~s3xoc(H@p?X)2cyo?2``cq|>!Y#v?3HAr^2Y+mga0GQ8$ zd@SKV;(>HK3+Mj$v|f@-MXSAJ2BzLt6i$DRT?wH%cs#hZAtb?8{des|)>EqwdptgU zJx1RUHj@pZmb;0;U%jfuEWJ(CE1IXVTpX@}N-oY$_ouT4yH$R#&He|ol-_PyU#IuV ziCMdFSU(B#&gqt^EF`xwBsW4)eTk%T6bGY=l~U~odLJFjKR|58z*JbM!EW_t`jAVh zusjXFfpnAa$$k?KWYknbywKzkTH=2~VvKL^Joxx05uiYy5dITVa)x!`5aF9iU}Pd2 z@>FIV3&V4;`*XvhEPkz%r$i1k+&lJTGbu0eky>o~=IBOgE*Meq8TmxbD^B;^h=$9| zy^{n*>QF4(3@GxGc^Jn)T)hzNtB@Z3h(ZbJ9n)cA)chD|e>`w9`0c$Pd@_s@*$0&a zRQH-){|xRp=q>Ja{8nvx?Dm!XAb%`EcCovMQ$Ig$8>!^FCng$|ZgBEe&5>H@-5Ip1j8o?T@w zAX|An$iESRpn;*k$o|s02%L{wTrQSwoBhegep#!^O$PTneAd{m;I*XnA3P)YXo{-0 zK7*AHGFgx;Im=uytg*xgedrr;LG;(7)RTYX7jPD9+}Ls0UwS9?%gQDcxE-lqp`iUu zZ-bKsQd~l3>P2E`4{F&M-!*s6|81I&nmTEn_7ek!Z z7Fu}{+sBauC(;_#wI@-@8Wf79sDP0as~QoUFGA( zG<>fkXh~6!o`#$2t2SJdz)F#Y+`3EeZ!gR?HPR4WlNUf`5dkX#0G>-?Z&hRho zDQkv5^}a;^{se!Zt|3x)p5;NdEuGJ(2S>I!Vk`-}M%C74CT5B<7~}ALytRS~@NyH0 z@}T%MJ4=h`eBi44y&p@I^2|h<-66^#BNkfTS5CtCL9j%G9#J#fQFzS!JVGZJ=@Q-;(eflbfor zrpXakUrSjF>=RdO(7dXQ+lv+Z3ZI6*!t7C|ndg2HmF`DBN{wJpxoa{sx(gR2CvCwb z-mIG*9cUui-R(s>_g6St;gUMWV)^ z51;$tYt_Mkq9J*3AW5goUHmFvLkD$qHhI&|Y^%oleM1glr%?QodcZ?*AON=-5<$Ld zdxdj*U2TJ3iB}C#Mu4Yn`Q$3ukseR2G87WOk8fy?hY!w}l^zn|^JFb*cIA}t;`K5( zE&_X%PO?>VmVNNyS6_y06pY4^(K8Ua0UCBg@KH3ibDP3(UR6KBWlyu!&%=NJGTc=2 zi}n3}yAw2hPznAKlR`XYNPI#x&=TDpU^A&zRghCLsEJVRhgtwi`1C|~!gtTh8}fZU zLN$%el!Y>#798bdE+sah(~?Psj(VcDZ&TQhP`HQni!wM<;XDN;J&`$=%muH!pxaB+ z?0;^lxmL9jMa$czB^LH(6~!)IJn^o%Qxs}N6>tECRrI39*F)_NE4x{S_A zl@+Of7Q;_hEh6-}jQ-p5-Tzv<{yC%2dW7Kn3dG&Ye{!?K7bdGdpS#PazO{`>Wbk{0 zc!PD*WA%dLiYGia6fDtt+?X97gMh!DHD%9TD9QXxpR?Tsx|!uSV>(@2;Ca6cBIYXx z<10smZeB8t51bD)h{Qk!4G3cALSxYa4PHSO4rI#Y)~hucQ$N;3;aHN2rpJfjW~{Jj z$AkOxv$WJk)uLY1;6tizVdCQRrra*(v*DsI!gO$ql~vrf?i-|DwXBIFuFd%Occ_4S zV*!%`fRih46M}zzAL@0H9zhXI7Joln681P`GQ@!>z+Zi#BspI+UoJ7Agh;j}ddyRY z_v}b!!0<+TE}7*LeBc-i<0N8u2621@q1)jP_3KLa5b2gca}4w52KvMnHag`J?}D~^ zCtY=8XH-%zwnjN$BV1*8`6U;xlZw@to0=F_UUr1==0z~Xprw|VDXq3%EZp3(k1jRe zLyru?2nM!66`yhT6&o_9x;~qh2*smz%qr9v#WOv&)9_(tzL>YU$1PrC@jUUh z(Yn#w%%iW_Vcw0TAd8n3fwqaNo>pq5UbYgZ4WZH!8q!kA)L^bgERT~e14Zs$Mt*>) z$9sv;KC_=@NvCs&NZz&T&hCI;eK@V2$FlBOPSeO`XWR}zb*P07vF6c0W#&-xQT2)R zC;1_{m1#?4L}eKk7kwl!CVbyqmlWo8g%vLId1?Qha>+d= zHI;`-q%D@d%P@0G39URYTP;nU*p`ZD2446pNqIB;!6F2;?=42TMB7=Y4?us8x~|Z8vgRvSnzb*_N5t$2^YUqc zHXNR0@+LsD5T7{b^~a%H1k698SlckJ!9OPTjI$dd)x$vPem%aEqo*$O{!-N{`LOyD z0Ss^R89W`5yxdLJm%UL@+#)t=Kwl!p3YDzXrXd_6I2cNvqmM~`Fd0h#gCO$}Kgl#= zBjXa#L53An&t{&f_y+#ifZFhOoUw!o0)nrh;<|iAJgAFcF>y4b8bZ(fLk#g|UPDh`dOJ}}#QLrbC&X1n(y6f6)e=>7>RMTjkCaWfeOPoI6TRYybzHPsN&ZlV=|1M)u#_cI7`>P~+|>#6%K)0c zB6S17KS(nZ%fnH|3Jpc1ineshBW|NdzAgZ`gorr%>L8Q_sq;mZ4z%{IGL_8A?nYtm zGF~{Pqv(`82KG7@5MpHP85r#KSebyZC%LTfY4n&5FvoPor=iL+S-?mm@4j<{>5m%e z8%lxLL$9&|`T%tn+q*S`g7X8lqi`Z_!N9s%)BQ{ua4|`>q!^H8aJN#KVowxykQfsd zoQ5%^R2uBD3~ahYt2es%OLdGbx|(=vC!~`jhZBg){4QS@a1T$sxP^N-&bMR3baQ)BfRnbfvaj2-&s!76#G zNz`#pg_ZJgMflEnbP`fxW~j1vB5dd+U|%*taS+oj(Z4j35T_XWRF>d2f3O4z*gxD` z3&hO1pnPEN;NQ?g?zxu!XtH1}bwNCn9yh|t`jBvil6Dk-2|(ZHOy^R+CBnZ-kw`w! zd=c?^E1JP|{3VSXmUAjypM;H5)0%WHiHa{oOhl7Q&9i{3#mTQ5wKA3NGgZAS&E3Yu zXpxQlCs5#OJa6|3P4|s1UUj$zyBnsBFK)5~NY2rcA>C3r6Y&9zI4$8SrVCp=^{0a! zOfenEM7@Pgr&FN0Le;;5wc7)Udax!Sh(=NtvhL&vcgZ5Hi^Il?se6(cN=d zf5@vm$TP$FbMqQn<9)LGcjT;wIG%$hL3=^zBRdT35ZGa@&?L`45CHwRjl;UMx#ZDJ zJW&Op2+gAML6VaCEwIoF-W_`}>p_r}$VzHu&bna9>fL8Hk!GrsMBhg?bt~~DeLxnM zyXM8oNZ#t?i7qJ;c-Nbd0OUts;%a5Y7|JWD+fMKMdf3o~bhCii{`oBI(AC`Y*^+(Z z{#ry$pLWYdI+2zevYi(9{{UrIVtAQmBsNg#!t z>D?}&@{#ghXW+~W)=ODHkj{_{C4q$AG87!$tSLF-!%YewL$L@&^%93{Nf$AEIfmPN z8f{nPnHnOox3Gf}UXn(ah(c18$+NIq2@nAqJ{XEAUKsZ}6tVZ_tcl4uaN{`m+7B)3>7OBUW(=oL&y=u4WjX9V+862 zSOGPGR2bKW=Y>0sm`)O1umf@YLDtGPKJNd3_`D_=>HrDy zq%#et#w(N?RzBkg&GE3id|WO)$LnWKuL{>Zmoaai2#M{{E=fBCXvMpdI-H7VYg#Gn!8t-R$a*rb=e99(!99ji?z}%P4!elG{G^0cM ztExlV><*V}zMfZ2dKO3LZ!NP~HXUz{EO-W#D$}dxVZ-O3pN|gQe!;G2GAEf4Il63D z=f|h_P|aWM3NTHY+GQJR!1Gyf@S~r8(g~+@*(v~nuf#EvqKu+#gV$Of3~e4h??Q$Q z(vnYU{D`xNol;+%1GHWxbSK-0YfPvK2eFrEdf8V`h5fKXIK<4T(5oX6$(NXN+RC~& zZ!tx|hPb>&6Z!&{t8hT|@5I#sSXhian^{{anZ@Oc>$9Zdgd{}paMCMSSI|y-FksE* zYbOut1vVC=(@nc*88F}{m-`rO)JGVLL<5fCNuFpx7|D5PN8n&Fh%PT!c?&Y<3#HhD z7)jJNsicMszLhaO=!EeL(n)P1O!p;~E05206JoF5a1i1m?CFstr- z_Lj^wFM$j^Tq{V=d3IXOndmYbd7;8}f9EWWQT*^gS8A4DZ>TEz0jf4rcmbG!)poutj} zB-8Q@zh0^QPvBN#%5qStuB`d*DQV(Q`|={o;ws;^bsd~$E`bzNmk#@~cG2h~>-SI? zw@j$mCQ@4%{e~oocbmkO4I+mp*2wQ4SYOQ;*HuYvcr)W?5^-a3{~n$-j5g?>5SBIT zRdI5KA}PtxqU{!pB&7U^WvA8vVhjtrov*(<^EGt91Ba%fWjO<3J)!u-?`| zY2{U~t{OiudlD;^$VRS#v-w@$Rcq&ow3U%R0}J893< z^+9FzZHh5($yd13XyqD${vZSh!10pA2&&u`<`&1NALr8iFs|!Tr!FTkU&x+#=lGJC zU+Ay;8Yj)4A%b;I;icu`Fzc_bI@<$20i&BVN0E&fSA%1%?ZeR%( z#m>bVm)XUNj#5*x8QYPshw8ew!a@OsK!3*zxf;VAom1u6#g1kC70+z`aF~_ea1pIL zR%mhw)E}EWLtfv0&%t#JGVDkWZ7egn!CXBG)TS|$%60YaCZ*;j@gn0P!Iu4v;>H&~ z3hRD_R(55~mXDd{Hm_oWd3ngaAB7;m0q`&52`KKoj}bp=f`|3w7ZuB?R}N=41!2Y? z0hnQqeJ@@Kyq}=j9I_YFkv+HV;3mEs!QW@lo^WqpMCRTT$#O2sfU&^Xx8v6J#7e>6 zf!fy0{mH1_V7`IckZqjXN;tPsyUi$<=w4dVW_d2=fT z6OTIdLFpFYiR*E7FOXBK0C&+4sWa8)WeOoVr`!!)M-mGUecy$)-3@JYddCy1Dtd%zhK|k3WAYzSPVDr5Pt9 z3+ldao?p$ntz4(?0>_-q|62u(KYqhDd#e&_r^ZbhI%n>lK!4w&VQH*4Zc6sz{NR;> zcE@DtFbXKOn&EDY7`lPHJ^Rcho=~8X1R2s2Dpo5t_Qk!xv{S!l7Z#&J+gr6OT&6hj z*U=lIvh0yvtTU<4GcV4-tbIWPDub7#z&}+wL~kn9I)$BbWTA(ps7HZaFB*ZsIfXS7 zWcjbqf7BvNGDs5*65s1?w51$N}mL8zt;>?=hkDWVXexQFo(@z zwvLk~5lxtS=bOsMR5e*Iw;O{qB|ZhW9{c-iEdp}Fq=|)I>j_;3XJ55!Qhb*VEj*(Q zAEc&mK*{t)AUmQB1xmL!V)@9ODh?f-ta{dG!x;-RjqRIt%^S~Op_<6)8Al`tk9#a_ z{RqBuCO$_@asZwc$4wH1XAkmTVH}07xozstdB@S5fA|Zo#woDI-6*%yBJ&S@7(|-JX>)O z=gC-FlCQ4|=!ONBIdh~Ir2g$bF#bb8?T@3cvoHD?(WP$iDgM6EwW8~@%2vCrky(it zs`9-56&+H?psS>TSo@6=^lxe5_CLs}9T1lq5r2m={C-ol3C{qP{}Te8knkJxgAdd` zzjv7~(&5i>dlj2nsDF8gnqQ4Kd3a?ELDh(f>iN=1!H^Ph<+5?;5Hf2O;8Ny~#G4FG zPZ-ev$-BxVdh2{M`IWO;rqzZnxWvoC0{N5Ii2FOjCG4eWzdNz~a74-MqAk?cEkD>6 z-KRFRP);}{3r0pD)xe_}2Y&zIYUjC%wBS%sD7jMHQIC3bwnpW0(5m@!_L6n-uJt2~ zH7jt@vC|!_&O$w!5YX{0OBp@ULNiwVdXRY(GbFHH_=AAhX<|Mw{w#$H)rP6q+N)vr zl@>SX561dy@qa>#jg6Z6g>eSGsa`}A9X`!JX z5(vlf+Brx~+yfqk@4w+H!}0x`nWpk{D3Yo#W&r}jmT11&6>nlAG((fO)XfmtfU5;s zNn7E6$JgRbDs@DE%gj_^O43%7+AOiz-UxzohW0h1Of1lEf5BJ%(aQ#&#VE`A0sj91 zOksMXhaxSEI!U3)?+bF?pv(#N`#|}^#p=z$?%IXpB9eNG1&=;x6FSkKO7N!XbXJaG zhv_l3BhEU$*!kXs`f3&`p`R85w)bB*rR24>L9m+fbzezJhwDI{Z;b?%UJn=7RwFK_ ztac7=jk|zm45|~Yc9IfzMY8%do1#u+&Rk}&2O_qG8Yz18O(jvy95Y4Jgo5*>Xw(5GL$iG3T7Vx5}50V{LFony&lMExeThodoX^?X1T-0jL_~~(zSkC zf6xRoY0JzuD+B@zO7y|zD3hH#px_3i(C>-<7XVH`vA>{ZFbF#3eWVI59u`-007rNG(j`SgQM>w0hSO>6Wgz>vml(yG2<}UOgNJRpMT*x3yG#%UOw`b3pdokJe^*=C|ARP3`di(!cWfScQ?j;H zPeCas%HK;+jCwJp6?7_vn$e=huCZEPtnRCA+0`4PNg5y)Ld7+1x-Pr%&aF|YeSY(A zuxb@U5#4%=UajQpqS0yQ)xU0<9O`fdG7-rcaFVQ5&|e=Em!uo#~k0RIdT>4`O;cJ~Gph-P!K0Tz_-_mo$tT zC@Ks+o88wFkP<_Q#vQ#i6sZRIP@yt5ZakPCyJxJ**|N1EeQ163$ngDpn*@i0)*9^s zZ)QlB-|21{txaxDyOpeDHVl^S+UK4Mx)jZ_3rAjl)=FY@2H5P><4A>A*59(6X@cOr5(F4)N zD;F1Ny_N*BK#l%^Fu1B5o@%@9ZAQrN#qw3?`(19|2SDr-1W+wL00d?-IILC{Z&gDv zs$$fiJ>bE8pXCejdWc+&rKKP}et^`W|NF&YO|V8|S-`r^AR6Zaikxj|Ad+x5%z@^Wu>HAQ%AVCb4JySq1iV6e*FchjgN8u0Q~(P6{vY8_1&_2~ekZa==AISJ z3t0O(jU;Jm7qE^Ch$gaBUr%_!@3$hjQ9g@El> zM8fKd-il&!`>M)($|6!Kj*zkxmi&{%q3_O(+&Yo=b9{90=BEz@`dg!%3MDma%AXpj z-F9p=j7!;p&9VI_wgz4h>&LRb{;ss-%52ZHPPW+5Z*BbMO+CTh+Vb+Vu`|!0NQaN?FHIl zvAt(W(!?!by=M^5Un_62`3zyH9zCAFz}_o1+-V^2qVGVQqg zt}h=$`#0V(R#h>4(_r)TXrJ5BF*KfRyK$^GI()DtxqU;o!_&KQJ2tgpYRASvTXV`@ zdvx*1NMEMK2by;Xd9*<7#{t`jh$oP{`m0G=X256;O=cF35pL&k)r0wy$F6%(3Dn9%eWVM6@P>fCcjZhvN1MeN{nM~*(TyW$10 zdEapF-d3B~Bp>aVK3Bwq2j760@NbW7erzV)u>X+K2}K2vYr};VTY*U# zhO=`tcp)OM_iU{IE7MRySRVm+_*yA|5sEWjp8-us*k%P#NgrKY3#MvaSAmOi>>>UH zNokdf<4O%pl0YD#x>eXwFX~07p8T~+rzYBAoG`3lsh2o4_8S>QnmCc8$yZAt^0Qmj zoCJ9h(An^BfLUokc9f9hf%8!hcp-@e?B;W{5GBFzr_P4P*+dzJEH3FX00|m~V9OVC zpedPH1s#h^`YO25e?@JQ71;O<0&uFugbwH_IK^mCv3IPRo!Z&Fv#&-=&}xDb(qr@K zEq86HG`AlfyM)C7>RAgJ(^Es8-gRTnpbJ(ImRy3@V-EJ^vUR)n7vUm`>;Y);1Q>(O zCA3(L^=7mK>-_^6|GhI*Fcf;R~Zhxpz;pfEL#~ zE0=Qe1y&3CYoW#De-bR7XgU1sf!4Xr4Gho-by{^|{Y-mySGzmB{^oVJfbmk2v`#%I zL&f-TqG77PhJtZLIHA-vY?yBwyJstaiers!GecE(Zhm-AozZS*bOxKzYjOA;?v{MY$ad8@`w;v;=~&_AkbX_&*u+Cse#kt_`sGHVsrRm<51d zQW%_74p+>t$t)+iusL0K!qi|0Tm87Q8~xf_6RxR8_kU8#4J>uXn2 zI^5(nLF1#HM^ERPavKsXbcd*DB^B!0*WETd90?5HI@-M1FN4NrsHGY7d7GyvHuLmM z>df9JXBzZoQLE)7PH*8 z(yJL0gRPtFMZ?#p(djhWmPX@g)SE`r3s_smps`vtUrM0+6KG!oZAhTu1PZuS+xb}p zg{C@~EmzpI?_|UFtQW%4+rPiBDmJ_u4BAexCZCtn5s;E@d z%cxZRTvWPjPQ;&=L1~0wOkTZY~mX~BaT8DI@ z#fZ=(3{vLM{v?c6hImtU2uWo2P>D+RBSje(11OE;Y@-E_8i(^-xv;AHnNVqIBPd>0^nfGu#AmQQl3*dfaRNZ=R)p|J?-i3 z&5pb7JbDM(zu}ItNND7ybww;cJf3X7aWla3htjp%HuN}UEWbU}9y3EU-;{F1k1T#W z(%0JTam6>+fxAM&I7>HwAoeFt`k`5Iw7BdcXYF zF@Safh#ylF?9j~3)zsg zvRr%vvOPG9XEf00+j=f%E!MCMzi|zlZ_&|fL_eeqPSNBrs(!&5H3Y7pRG-qk$)Xn> zoKlp1*$_TV-cDg!n~5`V8pYR@>Vj7r+}}9B>0w0;(uB;EaQf7W#rG>Ei*NjVtw2Vm zp}9A4S}w9*Q_Q-(y16%=QRfOhps_b`rdVyirkGQErLni}^Nqa%xJi{(wRjJ79T1dW z`{VY_wFXmFTcmbFI;@7F>6k*rB?qPwqqmJzNUevrei!{v&vkN=UQGhbXtbM6+IQP$ z#`<0E#!AuRmS7bpt%1|BcAK%XZ&xb5YyRF%--c1lTL8WJWZZs73Afi@f!i|~p#=cw%m5XmmyZKNyTD!TU3Z|990cGpJfDYU^Twzc2S2ay$ zy=bWWhLI}PBrzJ^VsM%P43nbSV5lQvCUs_m-mD{}SVx7ku0oXD5}`BmjFHtccB{eD zwxgwHV_Ox46QS%Du&Vt_pW$~Yya1raidNm2i&;RI&IJepi7ueeWEde}iCtDyxmcER z?WPK)L8JN<#OtfTdWHo^J$5-))>JID1%SO2*ZTQr-Hyp{{VqN8O{Kvhz+9|+B~T9u zsMn(Rk_JcE?vB{C-vrBv%s+wUA4m72jV|}kORnXg6Rb_tTSb!&8>V%XyzUlnRI+Al zXz^!--9rBr7XcE=7WPYk5nfL(WA=suX0KmB*^H*L<(GgcDtC!x^mGllE_Yya&X!;1 zNT$5Ze`$p`T|JrD5w5>`@Dr9~^1yVz`*~yIKfn!q8<6un& zNkacc-dIPiQRnBhCdsBBe|TpC9o=--c$DT06wOIIYtkt>fpgXmR!xrK1TH$zA6P9a z*^t1i^Z$b4C>kD|1vGJJ>9_b^z>R8vT+9{hyp3?Pgrc3-kl_(c@>h`vPdFSI?`3{E z^ZO#p2am~JY3emalE2CooAchwIewbR{C-&q6#IN(W3PV|u-;p<^P%ycx%CYmmJ4_8 zduY48wIR%^Fp^SHuIg-PV5ZB18SC151Dn4*5_ooMkFB{eZnW18$KwMvBD$gf&h2%s z^w{jN-md@lmD%+%CC#vA5e8UM8meLHNV`VhsOZSyq47OxPSEYVYuN2>9DsBbUHYTE z8%i0XZY?6}W{9Xcz;VxtyXd`FLDcXwfT-bzS3}hC)reXL{lNL_6*|FVb_F;B{p-cc z9H(cohcz5c;6Kq@%_hxf&(U%?088Wh{9Z5gADGL1U@jq|?kiPAj$xQ=;l;`< zy!9_*Mj*_ym;f+jeYQCf3z9_mee-nJ`^CwX%bH!q%xfsMHizHOnWu@a_m}6kRHW?B z+IkBTzZ@bK=XYOn@8dgmoY)a_WVW{gIB2dKzkmDKotwg9e0^gEEP}Ttch&U9OorHC z%+a-Mys`{ za}fP)dY#c1ci3wxE6r6~^Q^zVIwslb;&H39K4`L5%DEULU?ls&NXC)QLaz8}2hd9S za|p{KX?eSt3ZqeK_#~43mDj6`OiP!QYq>_5`BwmKBMqPAyxCvnRTW?EcD* zl%r|W+&#%1caL~HjRWC{X^Me$nuov`4T3R(u^}y`Xrm+`D<@{ms5O?$+_dixYv5r< z!&xrpxM{-ohpQk%#Wj)PU}*g0{*FT%n*ChG){_VJ9v`cCPD&0Z(?e0cB{5u=9*%IB z{+7Rgco3-2H$O6Z>)$;**n9uq-#7oQospK@sZHQm)toy8R`P>jp;Q8N7(oV0_Fw%4 zJI5g+%>&&xbKbMNg8QV1w_t3*YqHRT;Q@Tc16yRM9B4ywpDZJ+vM_;V>u>Eft-qCo znkE-tB4`z%P}9W!{g^Xq6p)9ZDgx;DdeN>YUr~ZjTj6duN(SzF!z!`ge?^Tl4ihIZ zH1QO!z+qs&^0Rp$?3R|0-QcO>U@l1fAJlN~mK^|3MF5^6@psjbo*MxF%3bNtMN3w? z@5qura#<1nVv;`sB!Bq7Ao&Y+x*X5kuI61P(ZMV8X_^HXnNXAHt)$*w>2O8uy4$tJ z`~sH$F1i&Bxm@p-iif{TFjmoE69o;ngXRjNaB-g#>sx#u+JJ$~uPT@MjX>sy;vsp@ zh5CZbZ;)kvq~&9`n~3ZZKlyVqKhjW;`RGjCBWRTdAkN~$_0i6lS)srMC%OQ{J^%!qMX%2P7Q@0rvydL9ifb22{c)MUg@M$^ z&p~S(kori5K{WNhj6?|kCqUx=sw5PykoJ6}{+B@76TJTuSP>dc zhb+nAMBPBOfNp@rrabAbb9eN#-FL@z8*5b>hNiTtvsV_J?{7W98ZYm`MnPw^IgLDb{OFqpq}VE#NvHPTv=^dWQ!(IZ~C z|AAHOuLr=@tyA&i1}V z;H|EO3rp^NI$;fOs+a>Uxs2VK&b9eRmy9WMRj0!5lU40c9%Y8$hz`W$7#Kc`$HAVPvYX+|9co_`O%CUsftpVr?L)AT7^(7( znM^}A@sQ~g1Y@59%n5!x2oJ~UNoD??exvH&!lSznm-%tz`N5nh(~}>m-!Y%>*4K6L zsq3#hwZGVL@X15h!{dqg;(a44t{(Bl#;zD$e)Xu2Jbd?iN7hgN;QjC1`~I=@lMlZC zZT+3k9d29rg{Sr^$C_UsZ6$m}zB*2Qq*&5fMOM!`tBJdj)%G-=wI9*5C8v_OyUt4c zf8x%k*Qo~XVky1bDq(j%xV{1?kW|f^4^p<1xVEO-Sv{-Ux#3Y(EVfkDJ1MN*8As}! zXf>VpjCyCjJ@(ORBc$X>DN3GktmKK-l6}u8c`js}HGTr8K4hIO)|~o2SaC_S=!C1L z)(t~x2EH>*s&!)PW++NOLa$5op&wPJAUgIyVXh!~g{+JJK ze==AlUh)MU>4|-V9hWR=VM#hSQT6BhK&7Hh82@dX#eaqx{`=cW3%1ZgLMym#DYNV& zA>hfXm6k2B<+|DzLV@S!TRPvXPi2yxt0w-Fv}fO(@#=eyHu+NSK-R-OXEY+OzSZD< za5(CzyjBbD4t#3CWi@)74w3v{T7$0Y*!Mdh`SRIOh0Au*9K?b9Yr3E58623K(fu@( zbg9n2B2c$Pp37Pv0R?dzJ%oaP#ZYhwvenMzr%#Or)w3EP?BUn7b8(u}s3_#)$3%O# z4qn8^PxN?0GV<~R-`(}yJuQOO%Ib|ylVB6HR;TRm-8s^^uEV3kj()t)tCO(J_8yW1 zpmEdZ;aWlQ^dr^O(?r#$?E~r{q+1;dTBe|&MudXk8Yp?vzJy(N0ThJpK@@~;{#|_9 zRMV2nVdMVxOTB5gmW_At&kht2TsmHnl6@gzTv%s9FeWzB^;_5Y!d7Ww%2k#FC+y`2@z zlFM#(2x{!3b^jW{iF~wfS&DtMzWdKt9Qsk!N2`5fZ*bN}3sS;JBmj#kVVf7CHdG5y zn?$icL&#p51a-k@Q4C@eiaKRN=1h}}&YmEXXn|i1E9h0stzIQ|u~!K_1&?n%0*^ry zYk^Fbbc`2vNQs~rWZl%E+!M@Vh9q!QBp!z~#wOCIu~na72qxL!sA7;%9R@)We6alz z1?~~vsD?ihW&@2T>f+owY9J-_aUcbDmZ}b;sHmT(Fa6QsYwq8c$drF{!?p1EBhHa% zUzl08v%})<-a1rV))AF;DkmU-rcwcv@a}JSAXhoXQx2oF=Ww4GVof071~C^1yFqEH=CV=(7Bm_x z*rsps;y6@fat6*-d!*%Y@({sRNywtC5-g91{z+jTGR-SUuV#v0izxYjAN0+0$@|;# zOIjpn<5&SF3qzU~lGe&?D|fY3LEmThoF?-Z2Ype$R&twpi@=$K0U3k7YwtLAIR<@L zFLQ?aGD3Yo$w{{g655Ffy$WQtkd#>@cCwh-Sw!tDoN!7Ef`yA@BUo5Manr)EN*KjW zn?Z_Np%&E`w{)PE4rMq%7@0hU1BKyyWJtPj3fptaNa9d;8)4^hZ79ZmTM2zhx((oA zp$w0zN5-cBqd2@6j@D>1|cg5TR_+h!X^M=1Z0vR z4T36=Z~!bzrK&7VT4Z=)@d`>-hH5s3*B?dM8l0(=Fm>+3k_*-l(6AQ}PYEdWqJSnr z0Y#@1o*+dcLa3^eR0SunT56vXgL3<*GU7SPc-sY788lGw;)TXb-6+Tes5ceu}gflCsq%I!{{!Vy&{S<72O|Em6 zAyrs{doZEI#;k?NRP?hSyD|ySCItQn$3~jrbXC{J*_8>eQ&uKXxHL2Bl$D9%-3TfD zk3`j?c!s)9S(*GR9wEk93dCuY|2;;5co>8k(1(PT7m=h7PKk!vZ0Ncs3tFSFjX8VPM^?t@L+5T27z^DEmxQ`wl*r*2HK{`M2Sm> zK&bSt*Q;rIFOC*PT4hnBXg#shEKaJqR`Hw7)sGE0I!e43E|rkS8(cNT@p4H!vs}8U zO7vnbZ%s9ix5Sd}29pkXys^=arTI_b@iuxZ`S`?+niiXJW8?YomJ`b*>{{&0aO35hs0=XU6(I_R| zu+>UjqE$*lP97<5sLvl7p=_;5z!5-BUTwg+Za?M>kTN=0Dy?)i* zJ6oH!-@SEwPp>O7w4)myo#CM!IC{{z47idUuItrcG6?_+yZw@108el0G6oMfQ{!o%Pk|0e#_F>YD1t znVOas)7B8RzFLO2slu~mSfRSs!OyFn=X3Xw8)Yoyc0E^#v&hjyYhb>X>+mzBwoqHr zt5xaLlv;|n4Mn@wcX|w|<-+*Tc;@776@U^MVV`>Jwsu6T0S0)x;F_2!+>_ysBMz8kfdSeTg%GsgJkd{FTLPpa+ zzM{P&*p>CcD4`LsV1YunF|DOa`xM_}k&hCD^~&9kY4hiW?28<9<2PCcWb zwyT+IFJ@Dhh14_Ux~lU4-Y47pLgmR*dc%ILfCquP@%OdlFq8VHLKSukalTELblULF5GxaQy$b7oJElPGMaw0i? z#Xz9DF|1dqP;ORQV~cgABi$+4lo-s13wDV&TEG<$(>f>$aGMZFzW=UetepRbeL!ocblKVVPDE76q z!HWIjV&uKB?>s~U(M}wy&X!^DLqZ_D;D;r~&bsUZ{7;*F0vv}^Xfrroa%vMfiUeID*(Z7$}4GkNc14qi{161pgGBwk$wQX!lT-Ui~bBm?k zY(B?oPTu>i+k1A6Mpv!~HHS_9zU_T|o4ec#hL^2|N9X!Hgn)TXW?AP^^6z1kKlhMJcY?355r z=3N6D+a00d-Dn`#?<7s3#$c#1z;nJrII-Y&nQa@EbqCcf$1r-H^;=jy6Dm#SbfQ5U z=w6<0-Y^&|ZvK30_o|}J63IJ#g@DltsZu=m4!M=QXBKajBopCg1RJm&;}3Ztj~t_K zgd~7$0bl=_m!47Z*0C~whz=Yp!<(OU9|XD8Kd@_Pct?-N*SBkU@vf5nR>9X43KV>T zG0^Cb<~;^5xbWc0Tx$9CW5d_2Dm1OWX1KV#)#WUX7yDK>n%!;VaBODJeE>c~ei`De zVhwK{D=`>weds(yAD)A@N(_QqAC~cri$=bg!CIfuiVm~UA!4kh0&p66LZvg91+$H( zMHFz8M8Qx0bCHJ9nM7F5sCEMq08&LGtkn)#%?|0=L`>AM)=Q5dtd&GyOH~qdf)!Yg zMGqM~P@o>8Za}onVdB=A#u4f|R)&?R!m%=}GlRN9p9Xc=4NPUT9xks*HOt-{%DD|q zDSuw$|iG&*e%bs2fH#OtmJ6jBYgNKP;!;2@ezf=SXl za(yvhSMS2)7)D)5$U6dj*CoW4YN+dE$uP7qIMf;(8Va^jDBkFAOI||X*ccNsAoF0a zCkA2LnX-)ad=W7 z^`L^X4l;*l0omRQdO1E=uB-bA5t*?Lkx>Ydb?DhqCD^p4@;{OB8mKhtzfhbtAUh*A zW|T4fh5Y$1rtHqW$P<8qAA41L*(g{ffdM;J8jR8C%04UTul$C@6&D@3i%dmbdeY70A zp0+$VBd;zRttLDNnpx%`Gh=M@l2W`o<%B|jR;zW^aBI?22wR4RQ!Q4JH=4i%YtZW} z|7A=I?Q6UJFBHaGJvxJdvC1fbnV#1h9C25!pyf@#ZWLvg%W(?;w#*QLHN@|bNjRd= zd|vg>4&@Q_W-Jy4cJ@P%1`HZV_eG<$;050Mp@=Sm;twuFgPD0Mnz^qG>+|0Gk<2ds z;3H@-qr12u9t+pQv82CeeS6=A4wrlQn$@O6*ri8~9%%g%vuAtbM&MbL>{yz&-jZ(j zbUS%>(&CSXLASlU}9KsNIGBuw?Qyj$|8KjC^h+ZI!$U zCF2drLynl;p8#G|aM;TwJz=H=2}nof>siF|klE*8F@K%Vv1W+ELa@(ru+Nas zVs9;uYaHA}n1~>_3GGhsRd*-ko&XOapG9z_Br(ZmT8Mzi_icwf-Jtuvx(@sZ&)JyJMCQCbTl#u7mb~V0G2qVlB}d!3-tN^!`%ruT z_%~DSt=YzQQ^+BRf?##r)0`G~WT>h?%OHQ-~3X5Ex zr9c&k#{!q$(3n>7)}Zg%;#}%{&X-`V|QLOaj$3S z>J`pH+^L6a2Av#8dz0O9Bk5h(&@rC3`nxxD^{(%5-YR$!PJ7(Nv$m+)-3!)@+_be- z!|Dwp=e02`135b)nOu!S@l=)hTbf7GCefR)J7O+_h_rM?@@jG`?IJZQ6BGfTgC$w= zF!&5@gC%CAq@Da6X~%UeN=Xv75^#xCDKU|U$qw2COJt=a2kRJV$nq+iQj&osN!-e= zl=xvQ9@lXwB_UWRhwC_%k^n66;u04sA;7uObN@u`rW*-!4TBlaJzdMDj(2<;wa9`Y zfQ4EdBow3*I7v}ms8G#}g1Q!m*H3dMr^(`ADJ^vq&6yo$vqMj5wK|=K(xQOgIxVZF zG@OZr3!H7lD%EP$B0@v(gcYuUB1D>KCc210VllCtSWj#x_7GPQhrj}iX?Kt9C{L7& zS6|nDUG%bj4g0(swgk6mhZg7;5T!m>Rm# z|Dydp*Bl%;nA^F#X}5jVI`=wh>7-?nZ0%HcGVv6b+JA8Oy2;K|s&jJP?t}X^;cc7z zVIqAxeOj!hDXk`zJDt1u2S6>PPtyXieeu8de5Ek#1N*0aL_CTD{>DN+7g3HzuAs zPvz2$XNTa~-I+{*^eUAq8hG*+YWBNArjUXsaI`qd)8tF^pGmd$B!R~F?c}e?Z_$5( z*Qc;;Y8Ckpgi0mZr$ho{UXv_bHnaQ5sy3 zK2uF{WaP7+?6cS-Y>cC$Y|;uGZAAJS|oW1~vY0q?QQ?@4ITo z86(I?Aef7HjWsN~VRgQ+_Qpls`{II*V|8}cKGB{YYxP~W)zzF1=>@Zn)l(B*mtJGF ziuo;HS-6 zA6NSGG z=T(qvH87>1@U7;9i}>bBWjATnK&1am8nA9zb_!}$Pvy;aQY$)T*~0=FFr*GHmMw0P zAt$!|lKc-&(2;=Fs81fJwQ7>miU#tJ8l8$HRgC)Um0v-$q>Ki_O(Y=BzZ&OTuoL0@ zkHK<+B|P8@a87A}!+|4MdwBZSIgra~L(@EZ^8B>+@>w-K($b%K=?r8B5eGJw?78rC znaBYP1)K1p=`w0`N}Gox;_~xU^w@+~|A0u6?lKH@wZxO%1K> z@N)L3&0CDhPZ}Hzf#vJHQM(}}1)HKf4!RS5BXvC3nG4$uMw$GvY;@#D)6RS%z#?5f zXfgX`{Xa^cc;zW)!o|PAxnihSm61V6a4zeqVn`$SBJ6NGSb0)sn>HW{*r(N(E0!<# z>d0}|V1r`-y{FnfT~=Q{R~J+Nei+^aMEo}I6t;b3>jRv_!9Bd~jt$w{9jz<+RTbSuGYH%uTnl7hjGk2kXby}tR zgk_C^IxQ+nm#V5?zmR`KIYYGmma*p6H6?$jdv%k$-F}2&D9Dt6Rr8$M5G=NJxY7Rd zmhE3!kzCvv7u56wE9q5Mr!_vZy{&iqP>|!M{C?iV;wTZ7Ux>0~;k7N7e0lY{dv~=9 z7F#f+NH8c3Ao{58G7snxO{_t73P90rjl;ev=>pTgMv1h}8=-4)Li4kVM0<>g!^#GI z0J5=haTF6Awp29AV3C=Y_4150wEt!fuLp0uG( z4rCbI*MUYf4pUZ^a!1jb`wDR;>ys5F@O$Y@PB@dp57~I3nsTk`9BR#EBMH>ytg3}s z&m*l2CMNZoMzEQU4o>rDpyMsPVBvJ&p8(JZvKeJpaT^D$UYnr)1@)pvGTWpP#;Dhk z{|sLa{y}<|&OS*YHyxE)1<#+IDSz2+hF>9`eU~&yc3w^EMFZMxT%E&+j;faPIGdjW zlP5Z&rYFEMDB6p#WB8QKkGR)XpQI#xCijv;g7fOsCnl*%uWAmK2+AUNfe`a<%9;-s zX4#2+OUFsNu;h?s{skdu1S(R)LH?FGjaw|59A*>y9o}iTiEx7A?PdY-{f8*{fJXGl z;PbA&hQg9)Wpyo)A^ECoR&zFEiWUPOH^$D4P*7(bwabGd298hVwG4V}|3sR%;G^;pRl)ufDgcUXC9O4Q}e6&~hka z8n3a74>~g4gK69Cu0+zZXhAsdmsDpvH}^*>|31^5ui8v1E?gXG4CQ4_-^jWdVWs+1#&3_g7j%xM8Zm> ze}z=}a9&f9A%wW-`*IW8b2I1)>b4njpjHoBhLH`wslEa%Bb~5D7%cwW`ao zB?!m6BA-85=IOu_mA&?WNdf8$1|{Z>DZor&$Z@;?uLAK zn71_az+;FbPp|*Ro{nPqo0~U$V^4c=*Ee^qIW!i?PVOIAeP}G0oxBRsvKcgyMO7J! z6YIkUtAk>z4?^Jw$))q?NoJCOps)lunPhbdgSFIT(ZE&)7@lK~K57vd{qtJ8&1boX zHGs`FlgVZ`lN)yEO&+URa}c5t=4`7O{%Q!ckfW-r;C${mDNueRrh=bJ*8ezyCiTBQ z#}I?A6;-)nh#^M}T*W3DIn(zV2V&yLXrLn-(3v%yF4{U6?cUJkG38ec+y<_9fU@i` zd)xu(`;C)>ZH|^vv&{_Am|kNx^Zu3viO8Z=yL!I>pE3-5)6#R|ha8Ev{Wp(gvW35B6tFN&& zbcM2un_Q$^TwMwpj3$lN6LBBNb4S|ZJUd(v;R*esIWYGJe%6SsY1q_;7s zB~`wBKjKw4NRs2KOQ7`i62X&piS?pa*Oe)kg=IAs%@pdVbe&lPn2=GUn&nvOG%&*{ zRDkl4;Kp#<)f5N)m}ep-?4a|qv5NBMfYgV{7|br?)eTs4v! z%6rsWjfT-VQ@!bRySMHOwRKfiX93t~Q+Xg}`$E#@BDOz}uU$T0^ zVx43Ii%jNh_6?HscPxiAw88N{3CDW^&f>A-kt}}lNQ4$W_{l}OW$wtI`{Y$?27AP%} zT~Uc_Y1urQXx(wgviz>CD|*6`Mk|_~qL$HSmh5V?ghHhiouRh4#cf?&?s2*^U8px4 zAWlxGHW60Be1bB;rP1>z$|i*$sw}V8;B;VfV}4r52^~{;UgHf~?JTJR!<9|Eo-=?C zjG`)S_nXe{5cSwe_93|Rb(m08A`|eFYasm`M3g8H@siG_i$oe4s2csKTuo-o^n4!c z>u2(E&1Am4u%AS@mie}cTyxJ|U$}eAmX(vf-t7bZo4Q<+E4OXAePrRHAu-(1aGShh z!}3MT)~uKyHGB4N-#)rC8QI#`Fk1Auas$!cZSmAru)5II*(Aq9L1U?MytptH8f)(8 z?IAE9gfJh(Ag;}vkR2_U5gZOWl#dz39Cw9}vx7zJ(Yx{cEw%WJD<$fsMps2VO!48h7gNoveGgHNdPem%DSRE|y zbI!RpNlRgHmhb)M_vJn`J|)kDnN^bym^*^w_io zQ87`eQPE1-m?+HzUO#QJ-ne|88*BP&CYD`C&L_UgIAugj~_2h%bD>!)-ne> zFd8Lxg!C!o!Pzsh%0U5ng(EF%^b<`ckBNHaBIZ0*9mYedG z5n(Dr;qtJFS}rkrCQK>--c)}6WJ^=&qRQ2Dhi+-sBy~g-yp@PciX0ocI4jo>5gQ$u zH!pVj#KFJzY&tr_P~CV^Ji?x4-l62k zAB(Phij56E#bz_vvDIQ5`N<3@W(fl*!exuh3tdrQ4@(Rj5>otq~K)C4|#A zg{$G_sES@cvcw7KzmmJk8kfC%PcRCqss(Qu8R?zNA@L@1!JY#H3-lo;fVnFFD4T%O7LI z9(qrnl)G|a;mWL-c!PH}pQljC6-w#YgqaDMOLTE`!piJeL zN6@#XRV|pt*R~hw?7ekBRNLD&OoMcH2?7EF6Lg1kD2;S?cZ_rjhze4olz=oycZU*+ zl7dnqf&xPbL;CIUc+PXqVcz$9e(xXe_x7BPHN-jn-UBcYnG!^YG+kNkGMg@t>Lm4_9wk zHh4`6e)j0lp%d|QR>NoDHs&L*7}OEZ=Q*zQf3-{{+7>Hi&m8Kl*|ltZbW*nYl%hL# zKizI{r=oPzD9@tav$K}(gOR!Gbz|on&)v%DU0kTAHAQAqsW%3P-VA5GbIvqZ@cAI- zQ(xM$kdy!B9kr&W;PbI(Pc+XiY8H2#PC0FHi7VD)_BXGIRUgjxDW}YI`B-BA6l%R; zzt`M8&N2qN8BYxI?q|Or?1PbxbLcXSlg1Y@4Wu>)t&79{e|ZXsrwmYKP~0vBMS*kXv|IZy1o`E zR`ZMJTa^7w%^!d-tYi=Dtc|^y&z1rIyeSvdQyTdOpD1RqcjlfupSfLuC4sO@myeQ_YWv5(G^$m_x#(gagDHl*4b`D!;cB3R-a6fS*h=Ila*4368e3rLF&B`=T)%l0f8yu*!IP84ic$1d zx{0z`eF+05#d_nq;PsS2yvY{(ciGc_;I!lADGPoEw(p(<3Pp3f0vHZ* zzwD{1OmpwY)zMiGb z*VNr>_RSx=y@yDae^Ruv8WP=jEnZp8X(fQ*m($nDVJeQiFEVEZ^DCdTt;&#n;A!oB zQ(bI77{*>Nq&M2kEia)let(4e!?GSZyoNIQqJ-N%X_BKp;i>AA*(_uNv{K=_#)p@A zUe*rpIQ==@{*i3ezytY}9VsMaRJ`(#g!?1~rCQun$IDb`|YB^_J1X+nj1 z;)#rj)o`IWuZjD$AA_bf8oG0Ek1F6F@fWt>&gkQyGFszgkadxGcp>BSJ90`wQ!yrt z`WTPeWZfA)O%PYwS6soh&|2DvY*=B=v2b{nog9k$adD)$J}`_z#S>is_cYs*_1)FdSTBY_%dQG(%~@i`2HKNkP?VD9EPfxDTucj{J1)Q}>JD^} z*SHCDx^-8D!Shbdlqju7k4;e;nXmkG?Xn-S>x^mpr+Z=*dc5BgDqMT2bMhVzk1TtU z9F`LKl6BB@M||nXiy<-y?#2(IO(tTmJ@(36DyK3aCAz{)@hOS|=7yP|Qg#LLpo0Ia zpX^BD`4T!T`E8@k(}h`TylNuzC)ny_rRGTq-#5DBM~!rGKMh2k+xD9~MUqjbxXnf~ z-?@9@39&r0*lspBTqub3rhkTmMaV%8TD*j#P7 z`XT+R`;c}NTfSnv>2rNvc(!Gjz_wUT^*}SHFnb%Rj5wC5mg~IIDBW98TC$hMT;|HR ztD`!J*WGJGQfuoCZsZfp=W7nXvkXa`SSMwpTjmP&T6H9~nY?#5nQdlGS;5zpI#BX$ zo56!^Y|4Ixtx$n1&!jv;N2XZy3Q7C7{n1Av1gGCT{hS6n<)InVu$Gc zr@Pb=k>;w$-&+#j>h<@-`w^j1i+?xuc4}pF!7NC=z>Ixb;b2cCB`AjKTIfPXlHf7e zvAT@>fo@Nv+a{G3%cERd;1|qtAL6M{ZFT!YR$=sG_DmQ$4C8_lpst z>`>g332a8qognqT<^|i2Ztiw+z2_fe_hTlx2&9Kx<=w7&la??U{u~*)Ub;fu14ZAk zj-8F%<}vo{sJr5?q%TD{qh1~^*WO23DJfajFbm~eXuQ30J~b>Z0-?}Rn#AUSlF9NNbjGHVakdCsy{ue8!!z2MZgq&j4yn!k!LO9Z#YyjO0o zW<63Ii_9Gm<@8!3-wqfi)`Bg%^@Tbo!%@T%*Z1>c1e+>*;ly7rJh>?8KB6|6d<13TAr?hE-~;G zb_gDlDyn|NWQ^^JH>U6HBsUKWkCv|cS#DfQ4v5iNA0kRsys2^ zTMg=_5YgA;9v{;>Q8k)JSVE&4v5qia#Fy-W=xz0FqhO91y@n4L{U0M9f7PM8=GMHm z35SIzEnF4rkGPrCV98i?78}&(HCURKk{H){Gv9=RQTkb@Llu z!a*&PCG%^(Puhh`6o%-&`&Yf2URe3z`Pd+3>Z5zZ*WP=T$?h(=hTAX2%i2s0s`@9_ zT!AmEa;O^{&3anJFJjzt_0^fHyZA|Zu>ob zj`ht&tqF>KT(CAN{Ae#cCuXI*{W$%#F<(=|w4CBv zuM%e*Zhy0zrfF*Fl>N?%xa%kC@%PhT8%)HSD1;luvgLR!e;Aus74>0IdrDNer`(4I zw{Vufn*Y)mEZRh6IonAIJyi*zEU!GDWRdBpT6Ul zg9e@JYsh9a%V{BksU0XC`h-9J$l1Np81z83ve=+%ET$1XVGOiVYew+hoD0oa`GE#w zVBX{JF$F^l*4|Op22EA(J}lUaw46sgW_N!Yg~3)wtq~1-P&6`3Zy#JeZ(G}#z3`2c z_`7bHqj1!Knd9ttEClhBsX|^SXi27I>5@{_@Ro0|NxOLl7Z=~yieQLu<%$~94V8WW zmYgy7)bb+Uu}Qp$SfhydpX`N$vU7+*qMq2o`}HlR{tUog?)P52Ic}D<{8dEO!-)k{ zMC=NgJq_mN>2g+9Rdjlw%CpO(1f;w2MY6%L-SIIQ6y;gk_lR4|7 z4t2hh;-(AN87B7DGEF;A(0gIRZz(5^Es#IIgX`+-ley(adWOFBlsG<((uC(#BCG!c zPYyR#dbx_IMJe;9F%1lz1+=03>%253tOGc0vRZk@D@s{5UjCxrwNmYxe`xgUk={YW zU5KN1Gp9*IPo(O(2`oFexil;0gXF%G`+p{77*C}(DI810*E14&vGDokT%;bgM&%m4 zCiB~=4R{9r#1(G&8%9j6#Pi}$Mx1N2e6@g|8`vyDU#HK1@jNob%V>#F)>^4LvQWQT z1nWL||1KwF-(Z=m@@&Q0IdyKk z`PXRRKN2P6Gvr9y6w$3!qBbQ;U{PA@^P?uhjd@~#C-S^Q)U8P_=TXPQ8zN$5mbf`L z@qo`~c$-k1xDViXHK4QJBG57-{Wu%@XeJ6L<~n+`6WW4}BFBe~mk;j+-_cj??MuJr zgr#o7A#R*gY1keAE^llL&91^IeBg=@<+qp9&WHEVspD<%LKBI&%!#jfUANhq&ZC%F zJ}5}LI*G?(r5Zi*|| zp*vKp@bHYdBp+iRo4(A)KY3+bHEv{PYOXtmu3VH~5sYnUK1}!8z*9Zh?%24>GDy)* zPM^xq{1u%#uUZl|$o|F%Z|N(MbSkezE3-RdjNg@pZ;~3Djpq|#YY#^9T&t+hgH+SD z#tsaU+x)=iGjy!rSAY=6y@)6W=@~HWJF5{sHdWNLQw-8acg-cS9!au`d0e9|=OlMW zh_S^EG)OfmA6L#JoR+p!Nhp+emt$I>y4_l_sP;v3HZQ-xD}nHy0qndiD=$^_Iegi* z$FxaB#{7%7ov-tBm%o-%gNyijJDRi z^@#xfSS;s(=deW|bgBz{DF(zFn6PX+O|UfSpMRj2*>TUv2*Z?EjkykTSbzO3{cKss zdhq1N@#l+yKxYCW;4AiXhfBUvB0iI+<9dGf$r{TKtygBRZOkCw^ z3|FUbcw~$xB*enwgg!-NKBqOM8zBE9^nv{guCFSaq*BksmAG#2yrY_o^OgF+(UDDG z@U%YLy>NBo+eYbuu(9$^-ui@0;r<4uFZM_E>gT6lIZv7nguauX;55c|W;MKTs&zkM zJ{&rT`pz88aw>NBft12gt;Ji9mkt}D{^8Pnp{ij+&ul(_x5#olx%&vT6haN}z(9P$fB}TwpfApRUnlH)X2muqkcoA8qtFgFld;>75aEoTiw$o< z&n1C>#6W~%AhvN}BtVSx1`}*lECZZL6QLDR&2F`>9Q?~lT z)O`eAlE6j8e#H`JgeT*|O2a*v^SChpW*&S?F1WCd;XBmb0bENVXmBZFL@@f|x9}Z- z@EyUW)BEROh1w8Wj-^Cg*sE{>0yq^~r*pRh4$Qr~i44Al2J6Ctb)h49192Gigq5|k z!YI%dHM%nh5a}45kHf2(;9FR*5Hui80fg|o7>LgN8}&K%Uq!_qJ;QnN4t>$O+mIMB zjV4XAG>?YxN9#OATMTvVKttR|@07vld>_8!a?-R^$T3RW7@PI_Lyxk%J=qd38q6-- zLuknb8#aU4`4a~gfZiF0zUULaqu+hR)g6EXzx#&xrP`s1Fpl%b&&!D38*&IvbZMp~ z6MTd*8cXnyC+<=RA)E;v#)Y;hR(Ah>-lkJCNihvtC#1UxA6|(K`{OiO58tusKEha% zAVz3mcJ3b#E6M*%qEle8EPmTTc&87E z%5lIo5^WD$4fR@l86MltW5jy8X{`B;&G`9dKTriHnQ_k|C1@aI^S z`Loi}U*S?dm6Cc2xx$y)`oM_hi(#I&;C&-gsVHd+uA~v$rxN~x2ve-|h4qkw>nm(L zI+6E!PF_>iYy9XRFXdN-}=50^`)TsK)288wwK@<~zFT+(ml%@kyUa6W=J;c`p5^yFu%DwEeLn zQs??b<4c@*Oc(6nWvxDU*C%pTIM+Ob!-k4(7+;y-h^5JPoCVAF@{^ipj)*wJxSfnQX3o6PV+)Qa zK9QjpKL|JbQK+1GgVgG!Z1k&sJc3ULJn|X|@`UF&MSDrrlTBE7Pq;}P-d^dadq%dJ z^dy1y8_b}Yi!g5ZInFVYk|F05T+2KKdYi8Yrpn1SLXaZZkLcEttbL_IdPsYS`= zNrUOOW$U##{c#@i7?*CuxTJ)pTo`D~ho|8imx|Xrycw@F5%K6GCe!|U_k2v2NgVrF%RbQh=;m6Z`X{Mdbci!N# zKh30=mh{828{;f!BTbN|(k#bNfq8TA%}6RS_0cJ)q!AjHn97gDiP9&*mSllMU$H%m)I*NI9y@BwYWJV5M zP5t>f`IX8?TjA;j&mTL3A-UQktWvxp_TO{%S0G zbgBFU3@l9qLjDw%x`n&wY`-kv@wcrEhG5Iu!~)%V@!o-#PV!lbY8@rGLvjoaa6kZ{dJzt z7oV^nXB)))5Izu=o!^#ENtf3L-jq zOs|5$q|vkbBaTA%K59o95(B@vR&%`6ZTI-6G8_{r4OHnKNA3^RfD+fdP6#gCxiWQz_XERdSGR+2 zvpM>AnuD%woM7JBS#0u24Qr$@K@(Hp_M8ZDqRdW*%XDe*lvxc6WN8dyt^FaiQ(f zYwnxMYF76>OQK$~hVc*UW4+4@k@~_J6n?AR{n5BJ8$wX9@`}USzAELmIkXvNzIUsrVp1v39o^~U;4as@v!_Tqg1mAY(hFXOB& zDfzV<4_JLOM{`-9CzIbSEEe4K@)GsEpw5-Mh;C2Tani z=AB5TE0_ssBQvr;aI`U#uIbQIRZb9U$1M{#iF_6REb*P8!1Fm_{sk%B;6j3Q#tH%wc`;wGkY65Tt~_Me=bbYDx}7kbfz zp7gP)^9U!i3td3gW$_dNYU!mF*LXLtt{wWKSLJQdPG+n6_W((Rwu=vND%7k1P~r@zK| z4iVSzA~e$>D!Rh3QnIf~rAqbQ-!k_K@C&?>pr-xjUmqt1THa-uWwssKCyGFygV7}& zRt~unmyo|Odg~`Xb(1PGRp7*yl5O-ZCS));f`RB-xOa!BUNn2ag)_wDm5_aFH4IWsI2 zEAcsaHi_4pnrF;ct=+2__d`q(RlD=nPvA;4m?pgXhOk;N!lnO)Zjma|d*oQ~?lXU9 zi1!l>F&p*WbdO#~vAVVpZwf*oh~QJ^p14_OX&?FOd_HbU4_ukK!Z6+P*JLx&AZF4f;?dh_u2FHu9tFE0MNxuu*Q4h(6lYHZS zXoQYVv?F=U@NTMIXK-4@)+Q^v?4TA#xp{rCY*XVI^^?N6Z@$l!_m&-ROBo-H!c277pf?4*ws*Pt`j#nx3zGsAl=r zIQQxNec@denD0x@k0z&WZgoXtQX6v*vtCh9Yb6RlX8f_eQ#$g>P-*~nDmQRERxH4L z=S6Az+QR(e<0)y$od>n2%enSki_`T>gK-&BNBs@Y_EvL=_?GT}( zS!J#^V-9lLQ=4z@%Kpi#ExvehK6eXu@$NO7eax^xuQz4wRmz6ngM(hbT4nL!Q@4jL z1k7xR>uzy<>-3i*Cauiq7c`eW}GfY$TX2pRm?!}N6foT8N#t#!Ep(ADn zE7T;0kTsS54Mj6LDy~V-n<(BO!08C^iiPnG|l;LjL?h}G{*QM zzwLXsVQPmiR9gC!)FP3VoMTPWwSB(1%~3r9el@6WUV24@*YumKqo5{&3HuhkBMz95 zjIgH-2Ycs+bO?2B5hiD+`Ascgn?+}|1>0ee{fN9(;{E&gCmL&7tFO4u|qowa#zZi&q4m(33Z0d}AEb_Kf3;cX|M z=LKA!6*K5MFzu&5KWoF^-opK~(R`d_sI%M8r|j=8puTwFbU>uYburBn@~NTt;>S&k zg)#k^pAAogFP`l`EtkIVeeXAL)LDO2hx_c|$YAcLfV1x`=8{JnA{=uB(kbU7_#+pe z6+~E4+U>6&4|D`|Ercu?CGJvW_{}_;Qyz@TJ+3(G@B+52z6uyIy?vaXT2=ya;!*b; zuveyTOP2bX?)<1{|9SRvoG-jjwJb3fKV-Ywr_Q{l`_Rs()igH0RviM*9&@mR`vmG7 zK~zO3{C>`S+gmIT|H-wWGcg5Q8ZO;=toKU5U+O^9ZKULw%OcDv=s4=Aq1uBl@MJ8g zNjlV{w`tM1O(rSC=Je<|fJe*3BSh1rZX@O!Rf#W@1vHuzwzpkGZ1bv>_Bc1CYjql?iKAk-qy{8x@!JHdZ zcoYC-8R3%@y6q@QIiq*J*T2rmRO(dWLn6$gvGV0C=*Rid$BROX5`?)_?N1t(THH~O z?G)+YuU|Fa>+=c^%XHC(4vKKr*^ZvSF|0Z}xsW+ntl-}c4Dp#r7>>GIaun*d|KZL% zeWJT5Z-9@YOeVPCYbB>>dD?eD+#5Z%rr(833uj4qNc41th7=f~--=MY=v985);q_n zpw5b4nztw7%tXI#K{}=)c!j47Ka7z(1p;@jhsa0vyDqbzwFs=;K?qm$J{7e%eSP>o zpxW70tkZJl{^OnY!#V`}PuG~J=9m{TmQ-c8rz?`?ewN_}(9M~Lu9~ShX}#R_4_&Q1 z8{7(A{c04tsuUX0OrDPU8mlyc3Z!yw6Q`j9m%c+S5Ki5V47M`m+^NR+yAGw+ZZTcx&iv&pP?|w-m zc-_Xld9k~clL)Ps>AX9t%+d)zA!uTf5?b$(affYoU~L|YK%S<+>hr%H-cQMF>OA}Y zx!mpwg+@S^k&NG2cOx!+29+SM7GHL*=R9kd*!EY+^Yfi(8DX&>g{OzJjhZc6nMYi? z-??8)82-5Jf#op|I#42?*%a(Jra4Xs_?R}`Sai4NrOYRpYQ&nWZuwc*2a%l@#gC-! zr$&=^-|p3Uep3xg@nOgY9vcV7yC-=E@!{PVO0uM8Dv^2N*A*XdV#JQ?=&`XW>OElN zXx4Mmxyj9j$99fAU+}H-e6>UB)c?)t`ZEciozbl0bNkNy)3fm`>Gze%cbH~z^%dlT zDx%y~Ynf!WK>YYqU`~z;qX35|!~%)xS`Hq5XlqLerh9LWw4B8}1?>EvJH%?(*I7Mc z92PN?-(YcxMeKhMyTi^p8EQ|)!=_z0ZN!mhI3#42!`RGL5ds>G@)3o?)2IdxeD&ZB zhm8)W?Y4_NG?^a-C%ijyN66;=6FRth<_tDhUcx{{GhCv_u*xDucvP8m|5~CdIYpC2 zQ@j*kt=}CsuL=2=Z^zB~PGUa;U&UH}K~Sx6unyZGjB;tqbl}%4h3b%+)SM=P%J zFx(Re{*0f5b>yhZ(xFk7@~E*dUyopw>069N8G#$gYDUdYI*C33)w=aoKaYi&y9eZB z{^JMt4DXJq!u3^|1=p5^$ZyAM%8RM`#B+O}KFx3u4s;Cg3C&IC4)Q#pgZk$NIvi}< z2dPu(`#q>nCc5aBY{7T4A?)}P+?ssqbnC}sbwY>6RZ!4Dl|X1I6RB#_ekpoF4nNw% z_YdjMFemL!X-0nR#GX9CV|#qN{;EVSHH4jT;U%5+3-;rNe!`b%PrAG@9(4&n$BuEr z)?RT{RJKzKr@gbWa?@pKX?a8cY)k^g^3#)2Ccj&NsoT}L6FUvlRwt~RJuGDGS~AO< z(_{bg9go8U5iU|bGWpauVrwG6w{liZIdVR)GptH~Q(}=I=Bpqv(HQ*t7L>H=(H{AR z#9KYs*Uapn*Q&JD`WixhGU~BJm>12g7tU*v)eMb&+!upkO*(@fKnNev+!$BDf2;m5 zmK{>2r{?y6GDGXi-kh*MncCs@k8U5#dwuD~IOM{1D{|EadiTEZ%hBn0TU)r~_%1rrG5xuiFMFZ!VSdNBqi$2|%}{BH`A8ee z>0`)#Ros!(U0ezEr@)uZ#}Ctm$LE|_o6{0vkUzXT*}7dlOF1W7Sr#L2*=488_L)>_ z-yz_o_HG(p&uzDIwevg;vp2b|(gD#|aE^)N=!r~=GoHwmy1b4Cr5l8IYM=)IA$kE;Q>N8+1;@wDFd}-go{pZ;}bL_u$ zXyYBUyuICTod50NAmupq!^1((wZnyAX#0#whgd)zc4WUybt@NK`Jr(r|3Vv2XFi5Pf|D*oH+`c@ol8U1&6hfm-0y*)xKF+QmnxG_ zyQU0E9j9Y23Wha8xRajz*evv;{$|pn)=aQVW+=G2;6lK)nGg(Cqzj-7v>wj(DiO~&h9H&-Qpc(*L7xZH>X>(E!`zL;7+?M7b0Dz*enuL zdq?%fQNYU91ygh{Y+uP+sx7Rd)*BbG-BKb-{Cey~i1 zSb)KZ#d}I&B|9ELrxLG-5J#*zx}40<>NEJElfy%|3Flu)C6CYM*Mg=~#b19IeEd%4 zxizK)CFw_mwL_e7T%-b7Zca^9gr`Sz6vG|nPf*&ZRZ}jJ@xdIUlJ&1)k|9tA-_Nsk#4iyBpy~4PJC)&Fi?bq_4ZSL5K z4sA1WT{!`F$s+}}EJ>+J{qcbS5RTzF&(bZMUiBK^s_|`STFnkjR3M%C{vlMJT2&Bc zqUTel?6?Bzy}$(#=58Ig&e!iZcyRNVnG|1ZqvjxK8J`!jMMT}`psP&%I{Z0_Y1KFL z3kjuEeMqXh$pe~Lg_NATM>IUqt1ngpK0~9S3T~QvyxXV zZj6OJ*Av5t_qI5!>4;3_Wi(vRz%Z+74ti4C-~gH+jmjt)XP)`!X1MQ3dAc;oL{(yD z;329x;Mn8l3~R5&7)xTz7`KjKA3%%s8@%WhgC_GmJt@M6n{3_%A2XSU$6VhXM8EkB z(N3LTiti@iH*4;5SYr@BDV8QOE%y4prNIJuA&uGj_4l!nYw%g^kv7`ME|GAvi3(~8 zSqb)Ad-yR_o6H14;>NCZn{9533$6(EGS*V}Q%& zaJWYImY)99qu|PxnMxYB7Q5AnFRh>6Z@T)$=tnd})a}XK*}od(V6X}<$15Yc{^CJrdlaZOwCut%e$O-eE9?2)uXsqBJUn&V1(WCNj@0y^$2;^ ze!ZsPt)E?$a1=LxwBEyB6WMZRJ>$q*s*Wla1Ejf>8yvww>uI;Na%d^ke*wY*aMt!m)zz-rQ>jHYT zM|RduUANj}E>h8rL~boI%G*%ebgp!~`Y_19vX!cd;ZxK@N*S6Zptqp0TdaSJY2|k0 z&maK~L}FmVZoJ%v)bP{&BaX5DXzI;9qG10t>UGJ)wydIO=8G@&HN!x*+LF6CZ4~f> zXH99_;eA=h(IC~%A^8|(HT+=kYr|yEr%rA{aj|A6d}Z9{-RDzW0tXpn`ogi*Rr!rP zD(Ov(#q-c)6`@C4S@x8x9$i$diIEvUhNVKB16meOd*y8D$v)ta=Bu$>hjDD>CQxm# zX-sZ_-@f0nj*tto$`KsVnfN5VocXD>d}W6H# z0rkZN9L}Z4Q2HOap0PHSiutnp?Qz7^%DsH6$BEWCK{)t;GxPA6iTu&Y*aIsp49)I0 z9z{;HwbxcY4jiuD{n&e9NI>t%Yb6l3NZ6-!_3gSNlWKEtd4Y+LDs-AhRFTZwVjPSUsdPKN*0?3xwaao&$K@id=Nn9?_-~UH-R1~Qj$(; zu|^Y`D%C4t((z@0cNF2m0~!i&Lbe^Npc%*-yNN4MMKA4e#>H#{XoQ#a|R*^xLC1 zIWm?2C#4j4pVAB!#&TfVEE*Wia%@%!)yGx%RiQ2B_%NsnPzyaQD0K9A^SCT}{^~ni z&9^1VL+Z7!Lq=I3ZSfDQX$6}E5u3AzU81fIo%a6Wu+VYJ)Li~|wOfsrzU}E(vSh0W zn!xRK?vIpbtu3PiIltlsH|jkbxt-12BoUD3fx(x>97<~+Ua^a@h={f2faU@0)XKo#*o^q?k?UV9nRVDWQUeTPuFds8 zoT=7UrZ})t_RPjD2ygq{6XZXFRE*|=9n_mo_Q$lFw4&_rI{c)&hFHg>_gsl*1-4AC ze4DU;b(hO`X__8Z8hz2|^|c!uQ7_K6m5{3H@u9F`(^e^~wDFY2u}ET;2;a`#*2mY| z&dTjq*2CJF2p=pA2C;%zf8|YBK_;w1AQ3ZGR;URp1R~5TBq9bJfJFeapdhOd6bc+c z1Wj0l!9qxos0pi}kQl2l6bu{*BJB_)LJ+`1gdjjp7y=+5AY=}}iV6v_3JVGYN1{lN zU}QyN$SMVqrNAH{D=3OILWGc>f7J~U`Q_qQ#iBq~2!gB`A}S03#6*EC6pGA>AX@+x z1hRhspn_sR8=%OVp~A@K{E-y_c!`K1IYC9iNG!58L1e{Hv0og($Sf3Dg&-8!#$UAw z3jQLBBqS&VuoQ$sfef-qqC&`y5<*rZDgsmv75&8y*-$75nH7To?GOPW9RmD-vOxF# zcmA8Pnz8;PNZ|hp5;8Ezm|gw{v;T*3(&2|=pm0pz`u@w`1~q`Oo0Fac_22w zEC3fVFft~8l=@ZwUjg_>7a?2r>pYf}6wnLuv=h*@^R=?I^0gAs^|SUx8jLhG1T^e` z9}{zAg@VN(($Ylue~t%9nLtk~8y7oYR%<&4CwD2fowhDERwr92HsDucK$@Nkc8*Re z!QOVd!CHDY!7espwrn!el7ZrZZk}#HHLQVduI@hKfl_Q%wjS1Y;y@m07GPuj1@U!} zVuSL7`9wfqHfelGZ(DnDotsL(3js$`Y>vLZp5g)m0RaL00Z@JqZwCRen3$LV2qFN1 z@Bs)upCET%t3WIHX z#=}hjctHTn4;4T<0`lUwy*+IGZ0x+HHJxm{J$yXueS51|6$5STT!GZy!<+RlJ204p zB$4I+tBj-ovhKgN?&|}%2mfMkYa?z0Tn~Lcyrq8yMc2;jkBqC6w2GCxkDdDsBzZ?GJ6Bev zKbQiq{E_*E75KY{|HH-tr_f)m{YOs(T6qF31^sF%01{XDqp5lx9c`g~dTa;t((&NK_mIk`(wC@>fYs4_hbup#M(EzajtQGvv>~ zWOH@81KM^jV$riYCmGQ6rcrTzSzY{iAegcTrQurQyJki00LAVe63D=CUWfs+}a`OmUQqJTx!-N)C;-Nx?!#M|G>q9D)uf8l}uf1wW6|Ec6Z z>gE6GuK($-|ELH4Bk=#6UH{Wv|4|S8N8tZCyZ*E8`a|}Cdwk$#@vljO^nXn;WCfJOw2ghq(x=!rQpmMtwfH@ddN{mkiVwz)B%PW117K^ za?PahvD6-oY5}u+8NkS7A+VCW5gAj9K|N$`M6Pb>bR)*DmN0z?-gk$B*GjP>@^LMX zbkrMbp^Av9+N;uW4e+3fWIs4|MVwzP`t(pn4OqChcoyr44b(M(53cAM^At`6qcd48 zpQWC+3~`NPs>M7UoT-8O5Ae?tSqw4HGKE;(V!U^^mgI0KRFo8Vi9r-EZpo4{uFs41 zem@z4^L{T_lzy?d7IC#RdDV*z*s8wE1Y=q3Bk?33A57V0&xuU+qSqRr6J>H();PnE z=@CP4j}HVP==i&j5n}v|Z#}2ZacEZAOz)|Cit+b7ZpP>utoCFNO)Q6poDt6~Ih)40 zcwtrzNj1}nEE_CD&U-Oz_cJv!1T2FW;zD-Owue~YtjvQ7fs`prJAsU0{kgkbk_lXJ z!!zu#KHWg7H%m^EaSpo}&xX_k$*Py^+M{N6=|6SLz^Om=cfoI+F?g&}w8sVPvb3lC zT)~9J|6I0#*|s?8_YsC}s{DM{bs%uA{yQnxnV1Cqb@aXjuV^Z$2AA0fU`Z^l+oZ^m^_Sc3KM1S1{J zQF+CpAQR4U@6QB))&C8GL|R_4c$fV*2v8uL1E%WopF5kGy?#H-q2?pKopp5W`z7r} zqQRDlLN5@`s+Flm_%4`?v)?z9GS$*sBbFPCT|B6lN&VhZYczZVOkv-DBa^Pq%3w5N zt`_ghplK$}zGdiW9Cj@l^`N{FbE1{@jfjC-%JhDS5&ekOor(ymS`33hH3XqSkFya~ z`D)!Pnv0cYMe>_v0)*uBfVdIMx8=lHJO*nD*QBu;?#Avfv-l>~$}l>MUc+4~*Qm-` zrp5u{S>_Nchwss5a3L5|n{~kVSgBfE;vVT)hEppG#_)45QsVv|1Wsp6pXc@a-OHl* z$}5gAilG(SGqj;q(VTFPRS8kD#l8S9^3IexIQtnX*XoccRmqZXPAt?5C-lCLr||f| zEjaXya(z|OG@by|s8z_oLD0HPbiiwxl<3LL-?P?CXt;uJnz*<$gW#*`-Px5ejVkwI zs2cKXCKFk*Ux+sGBG?{cYGw*p)>?>a^J3W^VA*A5?p8oxGWYZa5~i$pASjrJC=k$# zNUmMVi+dSx!HfG2yM)h%d;-Y?miXEuGj{3C2V~&lpZXaPB(aqzTo=hryZoPeP6OX; z(&2pXLZGp%gdu2ImUUq(zGCW#cZ1Oa+8PM*0c0{x-bpFEtWHVMc@qpIsdcs>It|35 z>EDpgH|R8QQhRJc%j(8Rh?x4XqhH2o0Vk$W=fZ$m@il+7DV@GakdOwEE|8q7TfHCr zeH1&xAT5c^5B&`Q;NI5?3c39O|5eKO#<@Df`^kFnZy!v+^UsnT+bQtHoKd%i-mN^UoU#!|w>!Jm~n!kGzNIu_KR@eC5lYjIUg8cnI zJG)=-@16a>E8Zh2a)M6yo*kCp)GNZcMBdqRTQpdnR-*gq$;d-8dS3Jkf(V=%7$kZC5xzNPzUTu?0*bw7p? zEdem(A_r<|4f=JA$VUK;i?gf6NFSm%qT^kL8ex}P=~vu;V=Yw?BUCFa9Vf7~Z$$IW z(#JLYM=hCfKj$p%6DwWUWWHML&iG2W9qfw5fFWGSHPNj`s

EV_gZ*Y{+zS_1#R znOa=`A@f-ZTpNq;QRlTR#Qo?99OB*yQOdX_PsUhwFRadlLO2s+qKzkGXN<>cU`{-R zC!{m3_7(j{b)uPmRfKeCJj>D$f^58}R1~{p75Lh6C@}DPTlifRh1(ay?&q#Jh_b9N z%bSMt!qv|({!8bk5L8wD9&nd4YQyCtTAc6Hrim_|QdND)&FCVloC}Hbp1j-LTg?Ok zE6fYY%3dh-6I3Zh&N>16=Z1A4A|3 zffcIuq!~C*?5kj=on^lEgs|mYTF!57?TG>KD;H5myB4r;l3f{CQc8sZlgn(^EvDp% zo5iuER?eC9#m))Vkj#6<$;cz==Yhy%rKrHk<|N~yD4E+F2?;E9{E&H36y{`eFN3}~ zmfM^sTevuE!{P&g1iakK0E)Nty&tp%Y5#2;{dr#3EY}UC3j98u|G!IcF&D1?ul00e z*3z3{%@of9f+Wfi57XjG_Z$77qbh2etAajlBb=`O?pb)H?SY;m2SWq}GulwJwqae5U+)#Rx~; zrgTE&n7(hXp*p3g(1X_VI}hf42G~H;>Ji;YrY7!U7E|4E3}$0E) z<)kIh+s>W{AB3E52m$Hu2UL(%SV#z1y+OYA`+;09LOxglM}L}xFWVr(!0OY#a6-Tm z;=gU8U|_xIpEyA=5nx*LPn!@3xo-9^8(8#GSs_8>s@K18g2KRR)xT^az>?@cZD61Z zU}p6X92f)@{dZX~NDzG4CUmLIV34rjzxxFY5*7P5H!$!n_rH07p~9d`ZGno4UGgU? zEPTnIs3;JzfAJ6liCiix2E4z12?r6pY!d`sszXc=g+Gx?u>_XEME~6u2uK7tum0IS z2pEK1$^J**Lj-}KU$Tja{JZagLIB@O=P6Veg-ry7O%#OZ3jDjTgi!Dh zLcv1_1rOvlg@5}KM!^FZzn5_+cmQMOG7bd~;2L%rhk^%i{kx1q!2=lQmvAB|cmV6_ zmvJa~0N0$$I21fYQ1B2z!2=l6m;IsO0qh01gcC)|wZ!L%{>MwqM4f;30;BhZqVT!1eyJKNLKGJs1DR0UK6O z*e>$`R1XF2G7muYP~a}}08|eJ?lKQR2~prs@Bnn-5;s7%F56J>0HfdmM!^HPXSnPS z1rOlf=Q0ij4?qVm;eb6imu)C`Kv3`icHLYqi-HHRGv_i61rK0%&Se}59uO2ffIU8! z%0f}_fTG|5Y`wZ%76lI!dJfzXp(u-j2NVSlC<-1Z^c)P_`&??DAPOG9ZmG*S6g*Jq zIT(eWgMs_R%l=UCK%wVg6nYLuq32)}dJYys!2^Y!gHh-?7=@mLQRq1sg`R^^=s6gL zo&%GS%l#sZA|5F894w3?9w_u2ER5p3LZRniU=DF9Mkw?gj6%=BDD)gGf`SJMJqL@R z;DJKV!6@_`j6%=B!2RpwM#&=(2u5Q0O@@gF{irWgZaF<#+%yy~|}$=s5(8f(HschoI1N2nsz1 z7DrLki-HF*x4+b=MWTn4nd*k$oCj8`$I84Q0O@Xg`PuD=s5(1o~kJQ%yo;P^j;CX}R4W2i6 z-r#wI=MA1Wc;4W7gXayNH+bIQ`F%f6nM3fr!Se>s8$56DyutGZ&l@~%@VsGvp4-rB zZScIo^W3*q`@-`E&l@~%*q=9e-r#wI=MA1Wc;2u-&j(VqHhA9Pd4uN-p6C7~JO;)C zo;P^j;CX}R4W2i6-r#wI=MA1Wc;4W7gXayNH+Y`gBJ1l1&l@~%@VvqE``*N|_Tl;c zxoK^O^#RZCdj?Bic;4W7gXayNH+bIQd4uN-p65Q)@|-(7@9_NoeD%5B4$nJ0@9?~1 zf8ODFhvyxhcX;06d57m6p5LF*mwMrOhv)Za@3n88SNA=xr5!x)@ch2Zwf3$1!4A)J zryBa!_g5XB=S$nQFFfz?yuu4tU<-`GDsGp5Nb5dVZ}2JRk6U z!1Dpm@7Rg@7u@9$VYkK2Uj6P{0aKH>R< z=M$b!?9V4WzZ1X8bB5;=o=iKHI^9j!n0T%QX(zq72$YXHwD zJfHArPk27z`Gn^ao=R<=M$b!cs}9zgy$2UPk27z`Gn^ao=R<=M$b! zcs}9zgy$2UPk5d@uR15;`Gn^ao-cTwfWGn=3!X1{zTkPHAMy8U9Sfc>c)sBIg69jK zFL=J-`GV&Q`|}0Q7d&6^{JziY`TVlr`GV&Qo-cU5;Q4~*3!X1{zTo+S=L?=Mc)sBI zg6H?WZsoOt=L?=Mc)sBIg69jKFL=J-`GV*7{gR~)c)sBIg69jKFL=J-`GV&Qo-cU5 z;Q4~*3!X1{zTo+S=L?=Mc)sBIg69jKFL=J-`GV&Qo-cU5;Q4~*3!X1{zTo+S=L?=M zcz)j>TIMM{U+{dv^99crJYVp9!Se;r7d&6^e8KYt&lfyj@O;7Z1(`G)5ko^N=*;rWK=8=h}?zTx?X=Nq1Hc)sEJeGh&a z4|u-e`G)5ko^N=*;rWK=8=h}?zTx?X=Nq1Hc)sEJhUfSF_T}}1=Nq1Hc)sEJhUXie zZ+O1p`G)5ko^N=*;rWK=8=h}?zTx?X=Lu}DuQNRV8dd-FdH06r8=h}?zTx?X=Nq1H zc)sEJhUXieC-9-Z2Jn2t^9|28Jm2tq!}AT#H$30)Jc;|Y4tT!d`G)5ko^N=*;dufx z>T`kT8=h}?zTx?X=Nq1Hc)sEJhUa&-NI4I|^9|28Jiqfs>ffvHuQohC@ch8@1J4gU zKk)p(^8?Qlep2c<@ch8@1J4gUKk)p(^8?QhJU{UK!1Dvo4?I8cJOMEEb%Eyxo*#IA z;Q4{)2c92zp5Nc7&kvp-cz)pdf#(OFCt4nlThCVqo*#IA;Q4{)2c92ze&G3m=Leo2 zc%CqyS}#05@ch8@1J4gUKk)p(^Mn2Qf#(OFA9#M?`GMz&(5ZqQl9qOo_Cv*>WRL|26byUyO4t3Q1dD@|l>Ur9sj_Uax6!^Th zat!OEdY*PzAJy};!}_S6rybTu_56BMJZqQl9qOo_ryc63o?maO=Q)OXRXtBT%&Y2o+F@Q*&(jX`s(PNd zFZfYCPdn66Jx@E-Q9VyP)KNW8JJeAUd-lp!Sf5AU-0~b=NCM`;Q0m5FL;i8xC}4G1D;>- z{DSAmhs$)q`he#bJip*M^5OCrr~{r~@ErMYna)@r@ErMY>5Fv^&yf$8zNiD9BOfly z4d*L(o}a;~>k^(LA1;r9^A$Y5;5qW)zUDeV?=_GQcOf4x$5016M?PHoq7HbDe7N+* zc^94|AMQdvT#lg*c#eFy^hF);{DSAmhs)oCAMhOca5-h84tS1yxb#IG@ErMY>5K7z z=g5Z(DRrGAA1>{x=g5akyIKeG;nJ?I^9T0l$cGDgb)DZi<mc>cit9Qknhdv%>7A1-g4^O!E@xp5F*<&yf$8xBOV=@ErMY>5K7z=g5akU#xTN&yf$8EeX~K_UFilOJA%H z?9Y)8m%dmZ*q|AFJJbQskq?(G zVqG7|hfBLUuaFOyc6B_E50`dzeIOq$+toU+kPnx3r~~_R!P_UFil%cUFC0nd>Sm%gY2o+BSFeNhKI zM?PFGEn%I*bL7LNFV;CcM?PHoVtv4K|>-h@#aA{Z92lC<4uGWEkxU{S1E9Aqa zT^$eP!{xFl>VW6ShfCjj-bFrK+SPTAe7Ll$5Fv^&yf$8k5e!n@ErMY>5K7z=g5akUyKJl@9-S?a2N97atz}E&yf$8z8DX9 zj(oWE#dyGTiR%FT-w#~Kt5dB)jE(5 zmv+@bSm%jCV3G(66uFfmu!=+svj{(n-50{^}LLKlN`Ecor zI^g+$=g5c4-$Nbn9Qknh$Q5;9e~x^(^hF);9QknRi+Kglkq?)?^?f4p;quXOUFQRy zBOfk(u|D8A^5N1K;{nf+50{@l!+5}R9dC1<#QWm#-CIUcqzZ!=*3g6+A~iT>4^O!E@xpA<|g{v7#m>5F-V{Wp!p6uiD)*W=%gIyEB8IkKI`ePsb8z_w-z{Qs3k?{ZG3upMUu2=Z~LXzIglg z_4hyhzssFh-~R6we}47tw=ZA(?aL3Jzx?vM^%>=SzVY(KH{ZPb{ri_M{{3V2 tZ(hFmzdXCwfB5|I@2`G&Qm_B`;p30rfB8%P_kP`;pLlNRo8w#A^70#?rsV0?(Q%^a0m{ATX1&`5?q7qeBZsh&+c>g z$Ns;2o@cuHbk)?%IbBt6_uDG+uyD9g2vA5+P*9XmZoE2WKG0B5-{GO4aG;Q2bR``f z+{_)^4As4z%w6@_yzK2Lir`=v3ZP&h_W%F-Z~O*8>XS+^pD??#AJMZ7&~z`2VvKi! zeAKZop=|_zpl!7LMn!VGuoE{yl|a&KS!ua>Rj@1*)#Izx7MCFLqxCV@!rgY~Se$0Q z`s9Bu6a^#eU95Mh5{Xtoi0AF2N$tSh~k~V6+Cbw<5?>gH_`v21?yt@Srqkr^u5-@ z`Qn9il$c?NX(rodxkFFz32%0n>${XU>HhlsK5lAk!`qh!tT#U*a|0hO{MqO6UV1mo zCBl)HEVM<>{NElOl%%LUz=0ZeA@dSG^02UxfGHvG zDK67NsU8p{yN=NuTTDZ;(L+y)sZRV2_Df(_z{}X$hEVLu2<81QHz*n#SCG2JvnDM4 z+1VYDiOwZW+8MM1#`0LWTe#2sBInKE(G|~B-BwX3Ke|ONJ$LoF9&?&aix?TFlr#)q zI5SvhND;Vc@>mPKAfa|%6ZWH(uizwkIy-PFt?W0NXaqpvY&H{b#MQ)drPgo6j`Hq_ zSVPSUVEw}+&qaXB+tAFe_vUkU56-I}t5VL0Dit^OBj@y&VcLSbK%FM;tMOcKFdywGHz8}tC>~k_=^iLiqhx>*=K!TczNgwOK3@+VOj&GEH8J9h&(*6 zj}NBnYuK=V|1fq^=@6Sxnp6YgFtB}@^R3hP z3^NBSWzQ1F$p8q_P?~-$E0YWiP>4n9(2PlG&7;q<=G5fmBF=o zrj?)bzxX}sA`$!YBd00FRauQ?8nu%`H?1U#Ty05|nfoL4{ozJ9qHL$5XIJ0YNyK{) z9UT)jd2%$uf41iSP_kf6`+eIVds5J)@oqdiCvCqly*4cS5MS%0OirJEdF%t*l($ml0W{SI?YxSD5q@1#yz8$$+*;;mY*;wMV4C+y0pE%yVv?YtM zSVnvr$qkGbgGSn@r`rrRvM*F&@(;HHWl(aeWU?_&L`Yx%J{X5rTsmRCbI8pg0Buy#J;nG37US)aq?EFQ`%?ouQa0%6RE2X?DK}4ki&rw2M(*8l_G{(OC zr?Ijh!OIE;PRnVp(;BPjtO7p8SHw!v)1v_T+e)aZv^`&@tG1dp#k(bM6JJ-s2-K9v z$Q^;v{)qM75FB*p0Hre9fAj42;jvtTin6sa6>LMq zfZeuU&bwZw%z5mc`>`Mw(RZ_h4-*c2eGQ>i1t=1H=o;Ci7V*j6qRwp<7~TYIrUxU& z4?RSi-yUcIbNt@?b4~%ijXwc%_dfMHqI5^Z(+)*m9^J2Y?K!W_IxhfH<2R0i*U;*g zU)ZV&jP@otG<=fje;|wOSPS&5 zd88pHw!|}&MF|0Un48+dUkr{BaO>PqW{+znkc4tT;GI( z8qbG^B7|7{CtA2#nVY-0vi~bS{R%YfgHCE^0@A3yDYQ((gfPL(TeQT z4GO~&ZqBsk$#ye=7cnK2L^LXlfL;st-H3rqR|_v9!vG()>p{=2d_av_))p5~D8rmh ze(HnmY5Z2}_=pa-z3QLjPNutzY9-CcPB*`hp0hLZSUe&!oHQfXURa ztX_;*^5;qPuw75fTO@Cv`-mf-{Ly{-S}DO(>-cF60c-o|mffE1JMa3Bz?$lDe*_d$ z^=KW^#YLWuWe06io%;pzbcsNV-O8EEB1I2RDgtX}BVr5I)stH((p8egBU{@Wc#)^C z8iV>-`@4J-a*rv1go}*Pjw&CqZ~4X*u*vyJsw@V7loBu;?e$^#WLGV=2DQBG?a!-P zYEW{jt5fks1m+hozIt?fuo)9t-Q10LCHD0>vRoU#Z0R>;|KUSgNUJ}|*xiJzcA5>& z1}VKK*0P!2nb%r}oAGKwdR<`%6wm|zY6`zHHTczl~X+Ij`w;yK2;UpNQ+kj7c2$gH&p@8*M9x@K5 zrFf{zPn29q8HzI*i6cm5VTznwYPm$nvD@GJ=_*%nYltFajZBu=D$il|q-{D1WXgGA zPz3d!x(Pm0I(bgq`_5>+Y9lAB*CwH)VYIgq>QK%&39D|BpwqqTm6FG6<#eE1TK)Qk zYJCVpAwq`;VGmG7)N5pSV z3&Xj~{OJpJmKreyVVF;Fnxd(xtbc8#BY(cPn3yPb4v}cVaH+QA+HE*oPRi9I0$UUMzZpTyTZFcx7>LN;2+MvK$*ROdnM+-WY8F!^)oO`LjVtA+ zmv^ov2UhMWW)2pTGdbg*uHV668l({GU~mai$kTtcQUCO1Gx3rskko0FJ3omGI1N>G=urcZ&# z5Hq(gbTN5`b0X!#LV&m%0>N@Ok&dJf_3$=azdF~K)5GIf$${MO#*&to;_^zSOlBT7 z_IX5qp-edivCwj)nW>q~Y;61sf$Agk^Ey%!DJUzUJn04;6GZvk8b(RK}6KuG8PJ%yw#cZg}wziX{`@ShK>li+i zW+v^Fiq#mB{JT%XM@-&E+DX-;Mm*H;enkGKvy%(-?6Zbz$jz}fn&Eauvz3xlLn0V+ zhSF+CKmBQz{&SOvOFK9_X@4A5%q{Ip!$>naGAPZM(BL3Odb?}wc{>Pv`}_X-J22qg z{$?%z?J4m6-tax;{aG}7&avn9H~D+gyU^3Q^852};LEG%-}krYo~^g%^q#=nHUHd~ zVdec}(Szgnx4SjNx0jy4*YW)KTFcuTqP49z5Mr2@Q4R(vtEIwC@ZLdhoCdGK0hPfm zCs|t*lW5TxPqkgwm}2L#y^s@u#-HLb*lJSm)M2lga<6SbRp;l_C9iFNk;{cBs|r*0 zzpf&CZk2cOUfZrBPAxez*1Ej4tJbG~*w?IYvZz`PuzMyk{P`(x-+EWTv^PvTr14^q0F`enWt9cnbfOpB(CQ6or|f>cN6YS zPZL=7qOJ!9zU5ul-p4(wLw`?iS4$2fcq_Xa?qz%rE_TLNM-Bz%%eq=ByIf{NvNPN& zkbQ0X^Rj;^XF1_;Dk|GAtC8HVhxyNjWhWT;*nsnh>yM;Kn?mh093_;9Dkc%^8~x8Q zWEy?1!%D~7J}Kd4ZE~kfX&>ht)t`-Npt;Vv-@__ z(&Az3(&D1W5_*?o$vt`d>z9w>=gl}YErow$zk@4emfQNiS0=Z5>$y3Hzwt5gl_{lQ z``U;0>oeT8$Xe?C{h&A&+y1LWrR_s#H_zAi{I+*x+4rrmJZ?yz-&$END4O9mTQ-qH zs`yRR`tMr5jctc~cmS96O}4z6sq*@=+G0Z?z2bZW&fUB0-=0XH@^hbU4*hn19RE9p6Yr`!n|;YMT9LyHOOyA0%}E0umtIa zP=xJgUGLR;^~GO9=GkRmnba`+uMjH@3FaMTUeo3;IPZQ1U&y`|7QD%vD3`qDRE8V8 zOPtiX?RQ~LD@PrD+kKkYjdZn#qB$Z1*TSm#{t=*PLac+8jO(mGG|TcS7&(R}E=IJ; zG03@sO)4X6`{dILJ`TnyC&1Q6WXbZO7(9kXEk@+bF&GKZhfj;M{!N(9ws*j+3_2^K0Y-*3L-_{!;%ip%E`MFL&SYbd*NNSKK#p7FAHD zm|jrlp6&%zB3{Ojad7z%%H|r}F5{KaS=hj+Nn-%&G`c?`*`zummfat$)swHA11`++ zdvnc&pHpkf>yiWC?U~JoJ4{uh!jJ<#R9{_Y%jz>fgyt~bTH;qUog{AnFu}b&;=3dnpI8C^gD>KyX%X;IdB{IPZL!DbJXjE&Y080p+ez2KjFO}W6pgNRjsVrq^^=Ur?;A>7L>d0&3yyqwRI=G zy(%|f46`05c2p!1V$~|%9towuN3Bw}hGmwL{{=RVu`044sp1QK&&S}9i&F{*+8eO@ z=OTy2;Icm+aI1oTaFlDYt9V(9wzEPTDcNpiI|oH8(uWSI4zm**Y2z+W-nXl%dLq_Z zR^#S4u{Uj}HS6R&@`!(Q)pC)AjYStSQ+y43oL%)29lk(~WMffuWFx7T?2-MvUWbB!OZD;+TYJyBd8YSgvadx20kS7rKDk^S>)7dhGOtZrS{B8jM>0XYCJEF2+y2Vun@klCe;o>DqZ1#8~OWC-0)%6g|#t{kiO{K88e*?St z!eAL|E6c$@uXB_2%c7NY$!zrfPR^~|m1;BUsw!(%Xyyp2#xuqb)9a$1ytx+HDIT=K6g3`F!@)qc>Yl!QJuMz2Twk28-NyJ!Z)@hA%fh!uQ>$?e;}? zJ$#8g!`}LcDTO z=?gzW<@uSO8Rez77NSj2Xy2L+Am8OtAG)*qe5OgvT7M!Kj=XBp-ZaE zU`9LD<5hx1Tn~PegY)H|gAzS?3G*ObyLeFYtXU}st3&AyvS|@d6tk*aT&k)|mRd40 zY{6ZGePwQlX^BUateQ=D<*b2C`7O=2wzcLI-O|&k%Tu!p)FWy({?XJocgk;hW}qFv z&!&P!E88VK9GeLx>`b%iH2w!A@v_f|vWCgGic-&MB^Noj$cn6gWS@ge$eCxAXXAfJGs0gk|+D{24`b9X&-pa?w zxe_~eCbFn@+WE&|-lMJlDc^EJ{$g!xShPh(fqK)HJB2S={l%st0vJfqcfy6s)T7J$ z%9$02(u=ImNm#L6bi|ef%nQr5Rn7r3b0fSN(7$=?n_TUtgKo2gxyN0)e-kAb;0#6) z!p_=u2N7z$W-qRYls`-;7@Cw>TKG&)M5gefO5|#GHOGFRQ5oZcJ<2p@okq>EM1Yz# z^Vk;RI!-7_?b-m>igHLxA{PgLA{LSku>%1IM*v^!?M_L(4GIVWrxl_(HG4D2sS26S zi}kZ4h!z%?6Lyt+aXFUKF^Y>^Dx=baNI0ZL&gyzOzUstlTADHFO1aWv=I7MTA||SH z_;Zm{J0?a+rmxh|jOtT*PPsMCye5B4(P*Nr5p=QCZdDpCuuc+>M2jsrjws~q%Vg0^ z%4y0d1U>-UuSaevjEc+3ouAGWwiX<#Xp#S`a(Ymrk{h$Pwfx?1Ez5 zVAlO+!P%w$oiOL>wEVtDiDXh#5}NrYiiwzRp+pCZrQdR+|~s9H>WT5g`sz%GaD zCF{sKp2>#vzI*_3qNyY*ulle?b?JkFc1RzozP7PsWZ8|@K>(qgIdSgcvr-*L zv#S8q$T^*s$YGsggDQjQ2B)F!Xqu)gPg`ppv$()&jwj~tM| zioNp^{9PtiaTvx12q(`m1I{VJl)mwA^=RbI{Zf&Wge@cD3$Ay>}I{Cr<%2?r`T0gY7e_yD|Bjf%cwJvoW7GSGhDH@kS+l=Ra*i;lzne1K- zsu+PMz^UdaPsjskK!ME7r4;>`qfidzh`cpHs*T%!2hoB|=8rihQy%}%tqx4MIu9C40^B)fRHReAlb>H?{Zd$J^YTo9{ytfy!k;mVtp7p4nf7VBBjwo<$XwUqn!#?p)q%|LWT28{Y zsux*g9MrO9#GC3#OX&^0t5ae`UC3?exv8P0#WeD_ZB&Sn)Y>gHr4Ug5?6>)Yp6a^9^g3)S~_M$~UJ?&e+gd zvpqfQQc3iQ@$F>ATY(%qX+>URx{vt{YHmL$aqFss_(`f>ywwQns*jJ|Td$^jN1l~; zeZY2QtfgF}sZn*LA3wF$4<(o47N@RWn9|e0Q3Y&YApSN?ep8fsPrJQfdVVN*7kiG!d!?TZtUc~Yc!O+b$|C|ash$fWMA9SP zyfEqg=3FQ;A$7vdXoNpcUriS198g~ZrvoyyV}e8n`@hiWa{cE&nSQXvbqVCk0Nfj!fm?>UD1;I zjy{P$YRc#_4-bfe6A~}sc!PX30;f3bjA~XD%czPy;X`!yk1juzviqJtqnI$5X9ROi zY;!5#9-(pt;fm@<7Smg1h$GTW^!59LC*~5aZ&{f>DG%h8o6QO`e3_!LksKEt7Zl_z zDIo?;+s@^q;adLPukHB0wd`woeR zjtpBVukfyq!z-H~PYUl#OnQMsV%vClzHs5_Us8A{;7vO z59U6|RMFmeBAKPJ! zFU95T$5>YOpd*V*3ev|yJf+V}tYQ2#ZpqaB0=0L~FynB}3|l6Np7M7CDFU?0DQHt| zJmfYKN@rs>#WHN}rOFxzZjB#o)eFSA&9r>z`I9o5ogw@6n{as8?LLrPPl+NT-MCy~Zob0;B zZ^^kB&9EPD!!5#iPYN+DUf3`LtUj{BI|nzuAdz`>?o-rWQ|$UTkb-%Z{vYFjWsr6% zt>9Qpn?%vMMefIyX-5`Q#5H>BEagw~NTvcC&q#D6?{ZyLeF_&P&$r)EOc_^kttCHI z_DCJ8hdB;Zacw18Dtx2`Y@5>AD)durB`7DIl8X&K;R<{g zMcK>K#YI{BC_{R~8W{T=7b)^QIS`-is+!IdpOg75gb#D+Hs*EJpo}&F>9|n(z6UmX z{7uFOw5u+f-Sbk( z`&j0-FmE8|B;$Ct2mkO~#&ZE1YaQn&@YTR1nROh3tCo$dizKFuo2ibHpQwvd-dgH|-qYpnv3sg$(Kn0R zXNyq0J1+PG;?-FACZIOIv`vJ+c3M?$ zw=+_~c@vL&&>JVap(bhe-ry?(wv8zRvC3<^X3JoI&6M#g|_)SmGinh5mz%G+;I-?mjoOs zESk@ea}H7z=(_J`q`b|I{AVb0yFV(22932=f%iE;_r5aO^fN%C4XYNKY%O-UK9j9P zI{%p}P(0Ifx$5Mjho2@<%9F6wYo$U<7P_yib!`;5660RA?8(b;5TGuRdkb5U#IUaUj1kPWJ`| zrsF2CYk|->?xu9sM$PxN-{+DfavKe7*oZ!uv-`d^dd|}}n+JKs{j5Z2=}cy;Wc^m} zI;+ImSuaP*ovL_W%WSh-aL0XAfRQpOX!(SC&dz@XDHE=F7|rFv1JctCQ{Eyax$FUW zc%nai7ia>48?$os8y-{1RIh$aJboLjyGs~_=cB#S#qY6=+G>1+jTTJ<=WD;TGrslj z{tFjRKnPpD_jG#&ez=DrJzW|~O5-;%Q6jDX##R-Kd-0t)Xq=3@8k)E?=AwbFK=8-R zAEn(OAX*Bk3hal?r?jyam7V(>eZ`>$+K7lf)=Juhh&}tt$hm{grji^B2h4JL9!8~B zsYrf3Ob!;3zq&gOJ&b3}#P z09ov0#p5KMB(NC{R{RGsmg2P+ODg^e%r*Z&T0fZH!!xE+fuhFTE71F)Ad*{ zm&U%)Y*TOfPEp@qYv)YOE^A^E%wx6nG+<^8r+^rP8g!LCUJ8K`SS^D#m$Y;%f^l8d zfs~V=RnBym>`@THkC#K5m7ZRmHeFSg+LorZ&lWs1op?N#o&|QJ-15Bf8j5d#oabS4 z&|L!fn&Kh%v3l?TSLHQ56;$r*FB%{(;3n@@=#ky zxCh@&l@n<=ccl{JFIATVma;3!&bi7wdsH)fwgY;m-d_1SPI@vinyW2)fs3zGqPNk^ zUyO-`cfuFp)i4Q4DgLc3xFJZ&^sn0#wRZ`Xk=d%4t)yUm#t2{Z-QaItEbv!gaiR1O zuN?}5r(c4@ca6A9h{Jd7@wRW@U+#tWQC7IgvS4EgEp6mJO<8M3D`6v#)6LQfH){|~-SaT>Sz1=Dar_#YT+z|I zQG;^Z?WT3Y&F4W7qaSi64w_I2_{=V^q%J-a#l+B1HNsce2=1ud`o%v*<@zMPBu7&) zE^Tt$a8TY#SL~caPIm&v(BdZ$RhW+?SE`J@AXb`9d)!U3NooNTbZjgolj0a9Hl?I`0zTMV`D9! zXK;9kYJGaVM$~1T*_g(E@Fdq4IvC#s#kCa}S6qmoN+&sL;VcWC0Uz47)0J?P!*%z; zd5Ts;p(n%!)NcNEK+0VW)04V75h)ax+O062lWd`(^0NpB5%`dQ6UV?!`P!wH^hTj&Y!ZxoPua*$*MVum(wFCZDRw}G7IDGqGUcI4!i4D)n%*dwURFBGuHaXofhIQnKIVW$DIP= znPVu~Ntc+(eb_^v8*xt%!=E$5iSz$_l>0g|+dE;L?FS_OHMc%1HWq*i=hos|03u9* z0VePS2d&gGpM~!xTWtsL$x(>8l}h7FZ%|3-P#c5)^lr&f7`f0Uz{|C^6{P-dfl}Jl z`5LPZbYbp449-LA+aG0tYmd~g2G@L6=T8}olnYN7M79Y4 z=4cc5<40_+>7QUW7<@&ckgK!lDM6#G-D=SKP9tCs`p-3Wxy`}4uc|BgXv^x*0SrL+N#GyhQ02!zrO zEKq&FW@}Aiz&?(Ge3qmXd*V)UK5RK?l;9#g8X&`Qt)z;jRf2RG7~x?KwO}gUVcS25 zQ@Y#Imx}%3(S#9S(ymN2+hrbSbfpLY&=y~E*G{o)46>Hz3~3PD#@;`&Ftgy@nunH# zbPHV?R2sIc_0Z^$K<|@d1E=DH<=zLhEZ- zmFQF$xjDxQgqg4a=cBe)>V-g7piT+V4^=E)S+kU$;c?g&e;a}%LQ@u!nv}j(fzniR z$82O*lfpIS%>ai44pxr>g6mVFie;|w2GgFm7K=@3B zWPu&{Du+?|Z1I;BWbs zbkzS>cS-)C!2@ z^$Z?cwxKA&BX)dA`1=>=AS+NaLGzZMrTj<9X|Q;nW-3smPw-vBer)oEg}@3B^&{fS z5@41rwe_a$Pw)xZi=< zzfFx%)3_WW_kqFNB6pC(iLv`Di>%1*A~UVL6L#Y!3z5XcO7_-Yb zgY|yGT8eqrYZPVaY$PS?NF;gb7_R6E^Bli$k7MAyywdIInUn3S-^j(QipAD3a4~8s z99{S8Yh-f~O|15y+7r3xbJCq7exV0yAO|(-7grRcWtJ3 zoKsOm|Ni&TJ3|N}hu=2h&LW225=pFrnXp9Vpd!d?nt}*bsZ>xaC7lLBpoX_34Y6Mm zkzfiWC!%Q&U_w$nFmRN|;Ndt}mB&KFOd%Ky&Qvul&SW(2BmQ))&(3CS76vz%npDs> zD~B@{#)CW-cBd-QDCvUlDUz7}(se_tVMh0U`6SmBC7e6WFWf4~2VsA`-#T$6VUo`@ zItW`bEm;Ru_T>FomP9x)gUX~nuJkP$0;qMPs>2F1bmN#!yolA(+jMJ$j~tK{om*28 zuzlslBf8wt!1rkj_={Ns<&V0WAKGJHS*sVeNw6Qm+q~mp(0{-i3U%Tgtd?1sP+S*I zcje)vwb%Q#q<>eT@}|}|SAgqn2+Ed+SKoTonkZzIaE1jD4?J^BoCmL4m{eKVd9v5P z8vf)Q8vB-~bvo~Dpb4wRe9&f{Zkp)~m>ah-vX`of&2KYcTMOCm1`gh(*ygv57()QTdU_(o@0DGTz=K z3cVK-c0z0uJjs>XhV~+s{`vsk%!l0RA0gvS-K=dfty3?x$RP~O5i0?{77X#3uE>c8 z94>R5d5?xng^z|^s*T@Gf@D=tf3WXV%7$)2kH`EbxHsvU!bdzyOR~yp&Yad6ec>NB zLlRC=Yn|(9`A_O-0k?-Oaw`Sw2z;q!d{e;=WQi25e{@xQ(SP?jp@^}6YovMW+s#*W zzUt(Xw|B4a!`lZVoLc)Hf)S>NU5@v=_MVfA54ttlnf2nIm?AO0z4ux>=(be06d2F< z4gqz~MZUuFBaR{9Ycdh|HT`vcNK*e}+u_ou*&#pPVqfWw^V4jrMPth(ULF|@wm7+g?^3Md}u!%Y(w)nbWnc|wDZ(+@@rZ0^8Ql>`SI?&zK6#hx0;@0jy}aU z;uH`sqox9et3%9)!?v=U(-fPLA5jqKH!NVrLy7(uSdltZ2u-|Sngy)S2p84W5Y zGF(t{Z-CW9F$xCRpQW8y!t^p^H8MDIu*3hL0ee8nOK?Ls;q3lEjpOuQ-GWkB^pK4h za->epJjbr@)gfyKOzE+8`$LgF`5UJ5#{HfoZ@AE{Wcf67W4$Q{?q(TU{(V(^Nc+jG z5${uw$1i4E180MFTqr1AWT=}aSm>aEn#So;D5woND0}q5udZl8llL-|bRF}XBSnac zyof<(zZE^Ih76JC#%_eudayMsO|LdmsiH0%C|SEZXu^b69aQRf73+0u@H`EefW9&W zo+5?^pms6(=^9%Sz9U?di#8o*l)?N>Ggy(z62z%RmUE|YX)zpdyn z>0QD;`JB_ytxfyWbwkFLrN3K1xhS`|9)x80mw-B6aKQ!hAUdm|?52Hd8~=u6aZP5q z^6F$0H>`@OObT{oj37YYO~vRsrM!_?{ZH;esP;z$b2Xf+2B9nvyP|+m9>!-iPZ5ay z3;)4MdLgvpa4Z6ZKcRI4q`k-TKF!HH)cgQhGYy=5&^Q+pTZJsZCIj0*xJV-n=TJ?n zj1rm{Z6ER5`~)xQ2P&yYlEf?TaQ4kcDOhd&UOno{SL~3`*LYY?;pXQX8uO~8_hR%c zHTrgng_;8eqg5egsSP1?Ju)gbOMGx=_{fL( zt1@ECax@eZ?AYp5iBd4aldRYi!)I&VwwX$ra24UBW!}WdJvF%+!EdxXKxFW4R$J{ z`qw%F4%0e+lA2*{lz|Kx@!rKa;fz2AnaId#8CaG%kA`FeRWX4NZkKAyS=#GN(CHt3 z)TDg4X)PemWy!dehXtWPX{lpy5NUj>YpmLykAX9JljrH2*n4Rsw)GNUn&)UJHiR2d z-#g{l-*6i{Z57Sy7Fzi%+-sYGRcFYU==F+{y8BdFx=(hK)E}%sw3UG1WlxmAxW+QF zaR?;p9(GiMp1*m;Pt^84|KI>a__tQuTX+)q)O7fW=XPVQi3iv6MeOPvq(kilW3P#K zZzAPRl$R@8m!hE{kg7rIT^cr+s0JKNEU)@2@`v zNVNS7rtPRNSy5zJJ-4Z%7<*QxIJl8I!`#5oa3V*Z*4Pt(!fdEAf{*Bl{a-~YK+NHtjd#3tFe;k6YIees0YKjoc1#$#;dkjw>bTpdKS z;!sza##c_`sI=`a1?}xxirTvXaBR|Y#ph(56F=>g2e`n5PkBQQh$tt#_&;uQ1Q7gD z#LKy39DS?Eob!%5% z-JJ3eu$Sqr_*ca8wJoNsN16b=wJ)CJSi z8`Ib%U|L+r2{XQwB9A}fNEMTv6b#u8E=fXISdORdHj$gQ-4BC^X}CxFGq^{Jc2PG< zJ28;J5xjURuRX$!-tD~O?zqaW9`a0srM&2ruL>Fwyl4X5mfe#2_UT4aUYPPK(cn+5}A2-mARu-jRw?~94As{g{dELDWkzsh|#V*2Ol3R_Xw3i5@2 zDe}$KCbp@6=t_te$e^G`Y+XvAfQCaK?skVIM8{+R!Ij?fDpBK^WI0fRYP}yDo44ksz*6|_8)@; zE;{eZuctSgh8G5miO_Rzl*WF>W`C1}knY~)=CR#`fQe_5n4~@;aIDt;507qQ=nT(I zS0`o^8Czx)c^%iOY@gAH|A-%Po?2j#;zcJAkD|6>k+X>3F?2KquhZv9ziP+4?9ifx z>(Zl@e+vKM&Ld|)HLhhQij#q;#6cfHDNlbJ;n`-4%SJ_@4 z6fKU(xSK6`<~UK;HqM&ROo4-H(|z8``i#|HHdV$~^UUUVc{*gc@9@eHX8m&hhElX< z^ZPw%i%;6!{7qqyp&j;X;mtfQ7@q9{M+Rv(>04X>!7Ii9#m%>GZHtn0Ya9PTu3u9~ zDE?pR^}B?dc|3)G_;sOL4)igHdq4kEu3=UEh;lmKLA7!xX${N+;ky^Le0Gy?Ol8On zD(6-@46wApN^Gj7t3xL~%$GKV5cOp1TWeg+>kWM1tv#~;M=ke{8Ycs+R&H*ttjJh~ zXngL0r>uC+lqX=n)V2gUeerB$hTLUoq=OD@`8R1XvO3Elv(Ft_&bGi|b)EOK1m)k6 z?!v5`^1Q$k&qn#gAdZVl=ER8d#W~1Oyti`hk8VITNtrA?y(QGsssl5RQ6Fpp`Eoc{em~ zL9-JK^c`f@+dL}W9t?yW+4_tCPvq(y0TCTSNDcEkfWKK@+Z1H-JpYqJ&GLe5|Nk{Q z@Pi%ncEpoJx4Zg_}ffkIY`d5pB%-rjJj6oLM}6>NagL0;dg`^>>N$NUfg2`VKp zYk8G(d4r{f^LT0)UaJBcvjad)8099440){MAc;veWh+=y;Vj3>7k4WgJ*!jq-?-9M z8Yk955KHzPr-h)~hD#OXZWF?vvO=cWp&b_)>BgZQJF*S(_u9?vIc}~wwwpK==!oRk zU5I5f$|FrJG`5?HqG%33y2|p58t-Nu<=0k(2wY^TEf{@7fRw4mmS)$frge;yPS?f! zv3u~YK+6~BawzIt@4AbGqaV~hiX>IwfIMc$rMB1@(nHUJqo7>*LVw3kbJq4lxjImX zlWwxmzn(rFF)*=MPi9*0YD~Dv-qEoxIp2C3&1G9_!bdPSXIg^yyhOR_%r_OLt2PzV z$9^jGub13qT)b+2-{{%`p@enyl+xb?H}@Rf=j8o7%%MjAsmkX3izs{eYDj~^&a*#} zX6>97T|gt^f3WqHQE_a+*0>WOxDyEO4#7eQ5Q4i~2sSXdy9Cz+cZb1s26sy!xDW2` z?r-kB@BR7K_hVLfubDaBeY$#A?b_AVF9LruHge_7bt;1FGn(e!7_Ah_g%uzAC2zdV5i?$WJc=y7vlSajT{Bc9{OoyXsD`ZWHb|lp!hv^d7Z|?^ zLl*?vPUS4OJ`je*Kb|Itth~i!)0^s4eGJ1dr{!OrxK&Nd^B^52Z~h~3;w!@3q2@}7 zg(mLoiC~@bNSXZX1wVzRWPIu6no+-3)XoogG zA}(hEPB7mHn~_~J)?QY$db_KSZ=%K|7=;y1o?!>T1=Ck=c1x<7D@ahahhP%%KSu>~ zbKuJM%UFH5+))wxXP6Sp=mQR-`EUQ`wQ7Ylfz>|j?rB+ z9HIdugfob8zl{8nK?tVy1M@pw=ht4wh(34 z_95n12oc<**PO?I?WByP;yHMpUXyFLv7WW;+raMBlj0_!CikXB@3#SMjds$M?0Za$ zr^BOFf02sFzn*7kn96$FpwTorLdRUmlL<#7%risb2tst*qw;i5tyZ9#!e* zPir1k*D8`I%H?ITyQ0)CJW}N zrvF+#BQmeU90g#Cvr<#aPjevvHo}!ZFtua))nTePCb3DuwjT06)de252?D7C zI8NKIOZ;x|Z|t^-oT^k|G{7lww`w;B>Kd9@c+BiulA+meHI|Y)fm8P&&xfp zk&V}Ey*(6g*p<|s2KN1;KD&WXM#%rS{0%5jth05*9MOsuU>d$<2n^{L(}sn8GA+|G z!{1p(R_Na52#!~v2(-7yR4e`xOl!_`>^uE{n6x6I9Z9>=oD|>3tDX>U$zj->b+A=r z?qg0fZ+BEG<}t*+N|xiRC^V_(d`1u_`Z|qeHpP>WB9WHy;Mog{BXqs=T}jPicm=~r z)u~t6Buvk)sB|$WvcDPoI(6(QStHeJDqXwxPsnjkt~aNh zY1tF@yHb8h_lb7*v-L-z*FIl(pcir+?a9qg()$-FLc=So?FK8ez9iID*I_3XCxss~ zJkJ$J8-#u)D#fnEv77{04W^YB&==*nD z76Q0s@N^|so8~p2bFe*oScJJb%nyF6`Ix#YuA~=qaSeB1 ztNXK8OG7eiRs0|HvNr`k9~0Lme2|l^VOJFSRY5n#UNX_osP@yfLYMrigjMh_?ctr_ zxX{rlRB2r7Q&f~Hsi~aD!0!C80k+ZcBK_Ces~J| z3hlDEp)j%yqSpJxbtQ8Y!_Qq%b$I~wZC_@wZD_4EQqM}B))4(0e%1NDjOPB*xAYqt z@)*{|iaf+^5nDa17|n)@sCp6jR4C zYBh}N4FR0dMJvt8LoHa}FETgXCBc~@?8vV_%suyQP&0Dc$k-2(frz&Gh)ZGu% zdZcTGmQyy6U8#5NQ^{4?=8Z1Q;g|+1>|vi9J^x4=?B}`RN;u4lhr_fKHhV4n388-o zo-XbcM*M%Ww1N#S%Krsb{|M8qR8At0q@jN~9?`dI1(k5^Dyv9sSXj~pu;o&Gf-))M}|MxNp$-rI#08C!_j(^VTu>Y6WIgypoBu!<;%v##qOJ(xUf{{dwSmfsczsGT=QH zl5gh|gS9B1AkJj{4s$t)=v-zYrF&y9aB?<4u{cQ`Br%~_m^AZCaJt}0W`GRqQ1g)y z>(bb1?ts5h-!(&^GGo2PdRs51!D*baOs~7f2pptDF}*Fy$`uWE!XPw_2RoHjno!zo z6R`M@*lb_y9r5f#dtK7_tkgIXZqyefNg75*B+27C4t6Xk$CPRF?Cdz=>NYsc6EtgS z7Bw7M5qN6Q5q=BA^0)4cesN@FsjERha2*nv@7F@^ez6{up{uIfP`b7l#fP3lBQdgJ z8+j0{n6{2i!P(+qLc*1wgzf=liyM+Q7O}Q^s3E$JmQeFZ5szMaIe~t5HtzncPiW5K z(r8abl_g5=`tTMk^#rsH{9*{*DXUFC^Yj~*|AY-s;qm*hR2{_bFlFF~L6~U<`xj6$ zoxmMshM@7PW`xjEMbS|nCu1W!w{8Oh7U<;G_5mFexHE4we_QzFtYLHQ++P%KJgF}H zqjmOxu#vgMZbU*hYh{Hd$oE(nsspKgV6Xe}{oID`_q1Cf~RgdHd0`1 z#g?a^5;EgI1()IB_929<1~x!=8L_@5lG+l~kIQ!Q(1e8bB${48#IStg$c1@xVP9>h zp_;CO`9v>g`ooG@^3Lf3nXhxGB*dNWaO z&}b;>?!H(KwYqUJ+e^0(fe$}y(<^wgvNZh}5}C;mj-VZ`{Ac4Yp%8jQ47opP{HcluP z&+vI|aoccp?M2llf959FHq3G^Vxfcv{zag*bs;LKL1X@a=#_m-<)oH|gIy3bh@bgX zL0{1mvkbPx$+$>{q+h&Il@Li?L>RSaI+X&Jw8{U}FFKa`ZMr$WepYJ)3tDe~4H>vs z&am2ZZ(vC%+YJoXI`ePj6UD?deXca}B&Pe~DJ!;a{yT=uk({jdSA9znZE zXc?X|GhH^A(Wqe$O!V_&bpE=^&6mv$xpfifUKigDGv#4|Mu$lPcRKSQC@8g)9C0=N zXjQFcGgg!yR~SfhhSucWY3K)rKpfU%G}rCw8w6P$>oDMTNm4LLBb4io=;jA7yysfSs)>JqdDDTn427J|jJ zn{NyT{bpA;kl$QTK=Oke5+DOqoK44H7cu)PX?BR05ac%HrH2*oPPN z-%@H?nV>;BdR1k-6Hp&iC9Nd+xUCGDfsOQ?)3OzcA)RHQ)mKlj(M)T^v{6KBG?Q;c z9v1#W@jx!!W{B%8xDKu*bF7vTEsXZipvzX15iQ5#)VnI{J=cm_(%;|e5dp9lJmW!& z>L)RpSBkjiypauGf9C~>7bZD_)(A$va)LG&tAYlCK5xD;T1AdQjxqSQY0mn^`N4N# z8+(?k>{X{71^9(6%C}>W0A>&REh%IpKZrktj;n!jn3j#ZmGWJn%ZPj82 zR%ElelhdMR07hiYpetha%&>sUQQr+Rd)Y_gAZ?Ae!_6?@f%e&cNDf(p2mED_sLBXX z%-9KGaurQptQxN}Vna>dTAE=v)6PgWUHNrPc6Hic*D_HVt;rSF)ty1?(W`ZXt3wAtrIoY~Hp zp^C{+RB>B!9$VL~MoxPd1NTL}ffCVsL0iVmmq%A{yq$cp6p7otsFK)_QTFQ)E zqcFCK#j37wKS_@O3|b`>vp7(rAu@=4hok#DDyOg{u$0X6bNeyN$7l2oH&A$Qg5pUo zKj+olVpW6S?OFJy+wngxY{iX`rb|us-bR+GjF8?z*cWRsgx7Xpnh+qNVilK4Lb;a25JNPbce!W z(1Rc$!MC9n(-ed^{El(%CtjV!j!aCGVKmwf{t3a8Q8%^PS{%j;ACkY;88N$8rSvnO z3p<%jo%{4bk=w_pH4mN;zEVS~ti7)Gp!8{_ydcT;2c!@N&>F7%F`DKyBhyqt5HG$) zd{}W&F9ZXyiE;1YFFQ>3w7wbt{1g>&Iyrqp@yFFn%_IATtm4~57THFC_F>n0_d~k2{~?gXB(Gnh2lpx{AJP) z_8%*#2-(sb2-yxKxLnu{5)(=;cNkkDKD%=_HEyvB&cgR9xL88%^INs(CJcwHLR)6d z?pb9ed$VfyMw{x{NoEUzl<+mUqNUw2U=C~oc4oOju52yppcvOIC%X!|iArsSxxgB} zO8n*PO4iouXIoJpVq=Hvk=0=2I>-9X`|>XzAsq`ei?`pp{!F;5#ygll^YI#x+90+s zu@K_8qh;{@j9ED#&N`elbr=KoCk$6!ftLfl#aJ$C8|jrGO6={vkwuRW5oH__);Ts{}oh3%CKo8Fo^$ScnG~2FK#Ei#;5YqaHx_EA(zBGUAtze$czc~qqIm5VfpCczZ7}N-w89u+bTsNxXW%^)AD;;L^6_hipEBtM zJ}{RPo_X`*v>az3sKH0fJkyr~1k8;?oM}1%lx9{pklueyJ(;MjZiqvOob?cFc4YnN z96LcRJ-4?&I^VWNQt8?KGEiBRf|D~TM%G3%)~5aSQWox&32l%X{-7Z7+g|8du55SV zA3Jfbh$R0n&efUg4?52&vX+8&j+_sbJ2bfEFVmdC29B94ftKw=A^v9IoKI3tx9DU| zgDl4Lorn__f_B)=jZzka2OYV!Ez00ZbC>?(M_1yrkctCBZ06POo3Xj9)H3@_kv8p# z82ztK_*ZGgsktT8w>`L3u^~PODT7s`NNrdU^t)-!R?s03Q%JGubXEkSVv3P7voago(qxi=%Ok3BYc{+QQu&=b{i<2??U%=O*D z$DWz+sQ&O^GvViQOtTaWf6b^5{tlGAt&JBl>-L-C4y#{Ir^CJZH{O}|6_A}L@@(kF zz+!Z5@F9DhJkNnU_>iuFRp*++rKa>QATh|gd?h4_0am{6v9S7JK;D9q#t=l5xlwwD zc8%jtE5tNCY~^4Tgz@%V!za8fY6To7nOG`lugSV^NEM^^vG zpQq;D{n|mFFNm+=RmJqe0NP@G5=D{FY55)Tbtb0V`r^UaY+9z}q~PPFUNlRJp7sy> z;oavEM%xQ-L3&ah+mC4E)4DRV7vW9b}!`^8(duAgm)j8 zt`5Z=m;RLz6@4B(-ULy5So{7qWzc*1GXjJ!g1QvTOJwX`#2qK`F^bR-tdYE-b~6XA zhCM@7$v$+z9-g-U?{+hImb;-z(V1R$^#bXlp0`3u$$LrF4SQlAu}>&5o9{JGDAjY! zK8Q_mOzo4}ok*I_9y0&ny2XTj35Dy3C9TDwyZm}eS&nA-9olQ(vNSjr4=ccpTG1bS zhAzMM=LmztJoexvyU|UemK#>YoHSl|a-PR>)6jcR-i~#?9vi$%{2SR}IPEeOtiH&}Dk(|CHgVtagm4GW7NWjyO-1ODQPokyN z%(}dElK|EtrhJnCKu7t{`utT^NKxmO$otegwT$N3onVs@Wa`HRHbu6SK?a#VpUXGP zSM_A)2tQilZ?|ynKb+%hChwU-o1X|c+3rrf84C+CR9#_EtO&SxIC00ySxY0|GU!jOo#}i>$YV{}f8UIE9lrH?3o`0R8I%z$Px`n}u9 z`Fmj)A))m&()9ixsZ^s4!&eTW8O34dZ8v8wo9EEo%$yN3R)o6`k^z_G_5L_5 zenp(F;?UI3RZ>>3E>Ypn$rnU*admE^$VmJ0_hiyqVE`@ zUhtwA#o&J_MO&?>X*BSDij5TPHzvXO&v?RKcilcCn`?C_Y3rz(TUNOtG1$s>jvm17W;jFp zc3XAV!`it(W+_z0!_>J%W~m$x$5h#aj%p^nBoS{6&=*D2US?>YXZS*r^&bqT9&%)j z4bK+9pQj4U8J4rV@bgojYo1b0g+dM7Jzer0x|y{u@yeJCRKj;*Q4nYb zD$lFZ0??s#I2$Gijb}Y?+)YiNb3{&I5X0iBO&@ERnQqHXPanIgDfNVz+C9wFNXqa3 zH8t4!{FN^rlfWEhY`L*8Rn4EB7R2dIPQQDx`RRfllClW{_wRp(PQzt1j#MQ} z&|#bb^NIk5cM7>BP>k8yk2tE&c)KQ)m{+irSMe!;V)+W2AsL5T92=p&MaolQ3Kb}N z@fV5`IKI==D>$6S|lHM0l&CHonz~oFs^D%&`Rbqrhn6RR^~VoApD`cmXYD zM#CY@;)ccD*WTNBNh8BEg7O$eA2u2hY0S(Nja77~O1zrPt+c#Vy}VX+#(U3V+nPmT zz(*d{yx;jgpywHjPjtOWP;n~EzG+ENtNfh8y?9q)3YNz3x3-y5M^ltNo4>=%`KY}} zI;#Y>TYB%i^BfA~;0!$rx*E5KWidXxn|p;}_zTDWE}kdk-&(J=?1Tl>EKfT~U}WoG zdufn#1!9eYHsMd$Sc%ggOGceOi9l8$#DXD}Kx?vC&nufqIK~@LWYTNH#M5pI+8}C57*`AGLELCK(7vA%TuMw85 zF--xq&n}0u{YqPvfhZjcl<@oH0r{KrgG@J(6(PO)O|8~OfM^VX$_^LVxo%|c3in(A zk*XSpPoFo;ay~C&c~VAD5h+qf zi#UpAW~h?~E1lR&@(?W)Q4JfmJ5{_;Q18vMF+Xy4-V^_|=n8Ssudxn73##qPrbRg! z?sF}RStE}+WTtHz=xI@F%psh_xBtj*^x25|nD}S)`+n<+gTm2sV*hg%k6Fs1piEtr{!a{J%Nh6&rP*j$30q>1vW<@X5?torj5I4Ob=VaOWYu)V=NeQ40y^)Y!e z5~D@Yi}%XIX(aJ@tEyoiieXU)166R5(bmd!Pc0mwQhbq%WR{^2=xXIeM0L zoZQ|G6%Hqby1)6<2J;V&H1c;yR1@!AFy6gs_FC6@2CM%XPofm<-;Z;%2~6AR3eFa} z$x&1MlmdY97{ND940+{_eEXF7EjWG*d4z=7jzQ2*TylF@WpY-M{ODQxrU~PdS_$Lt z4ldv$=&u@Ae~pJh-LmgfUYL+0@G~~`Dn-DbquiJtCJoKQ>inP|LOhG%cJ*BFOpI7| zG}&nb4gNalk5g{j9|Rlz?d%`p>}yi*^OpAMvi~{ubbw1ucrJ&RLwGJ7Wi%7ssJFER zdL+A#)oZ`1>>ZdroIF>nBnHq7kW9UI@N6IkNF*1#8}X*vYH6tSMi6u2bQ2}_nCiES zA_?nY%*mXU@Wf#Z)DXbfE)811PBNKu{Wk znOT^IhXiM4caTXni9=y4Gwd1BT>1Bl;S9UUc z##YHn`oGJww2xmGi~|psma_go**CqYmfVNX9LCC>I42mS8DVOF9cgeV9ngbaF_t9n zoNk`k9%c>P@Y6>O6;0|O^$M6Z&4lIRuHGiW+QI2ymZ*?hdYs0-GQm^DC}Z$=8hmV} zm_K=p{IHB_lrZ>V`dxd&6r$gPf@Amr)SS1!NYB5aSAZ6rn=V>qCdJ#IrX=WD0ivOt z&{JLi-I51M+aO>;+sSM{si^9-SfOHtbw42qS^i`vR5p`h*UyW_FA1N=z}-JPC z@DM0wbPvA9Ecn77d{vkpA|_yL5L4`&8SLH8({8y~N(f&S=^9MakQqf>-egpp&~c~} z575%sGR22=o@moDFOsh&Bs4hc#0~JkpWeCg_%6ocBw*xa$Ywf|N!GiAHVATvY0J`6 zmE1g+F%^LqHHpm(H@O_(2`x_NJ?5FApqZE3^wCQMmS`af^`CpB0hHb$`AzP`r~}au zYasA0l&Ez0BQr+H`m%liB7&3PpOm<mK_W9V3_Ys$@DN>d0C%|qra)&c)}xBPMw4@P#-^IhwK<8Z zTP5_Z$PLpWp*zYYTbSqnqS{7l^HUMu9C%g-u(Y9}mg*cq;lq^aJ_Hg0GJm zOX}f<3O}SMxnDokh>dS+>oDJ|8U+Wi^=H6~NiGSJicC~!Jbn;B^2s1U=&1dmOp3s9 zuHecaE0J|~InhO*ya9i3{?lF@O`v07RtXsgYhr3;Rkd;ves#i*>rL?}@21J+0_mqz zlj@l-HI@V2NZv!ZxE=~<`mk5C@VUzs9CZGgZ#UXjjkQ^RN7*?#+rT~x)~#4q{X#|+ zQn;?pG*aIDx!?c0wOyt5$VhE%Xc&J(n}|8~-d+NlJ@(xmUz=O7^_||luWAkbN>L3l zL^#E&etmnw-i2M8ZmpVrCG!S)VJD5~J22{pDRXTV-o=ng%U2U6?alAy#-0MfR=vjO z6B5k6uBr}wo&s{Yu7V&Y1X0%yOv1-zuS}U4-oZ}>GGoQYtmd5*l=u2yZHM$%nKCZ4 zSQft7lgi?s!dGj*Y{-jX#*l2df2S+H&~QIUk*xCYQK*=ELtk5U3|Gn^ANO<4HwfkA zJl{@QiZ9D(nBS|~mb(#x?epC+`{2*cde1EY)p#(!?TJU(JcWm-;YEQIe|rf+=MNXU z@3=2CwFyh5P*J;P?mIl2-+|BLQ{Pa(IISHh_?Ot81X-@l9+A|qX~+7G^DP@5lZWwU zbjh?Ug$Srlgz)OuWg*x_nj*CI=}#`#6yrZhICq_eB*a_eFH1%d@5;7_OnsYLF0B$+ zuIwsqE$uICRc8CT?Rr}cHFAGsC&|#W0hg(N7g~MV8=m-MtnAq1c`rfSu~;BK)NEs( zB`&L5GNum6diGo#MO@@SSmc<2lwImd-HF; zGaoQB8BiaMw&dUZ$Nknd{^n_q#RL=C`K;|S@@``^qs%ZQK>k;c`0Qp&Q;ac3GvP6O zNY+N{xq{N&QNezw8oMQ4rB@x+UTKa$FQRei77_fVoUnH&#Nc|tFO;f{)EKD-#r{jo zT#k*UadzgAxj{E5RNO$b3;`u`LOAro((Pb+41|@Tn-u{dAyx@vwen|vCdr9DfS#6x5c?uCuM@b zNXYd7A0y--`hLM^&`(Be&d_8(XP-kQrKQ}lvEHFIG{pFW9Cq2SPlJnHGxEkW>;~)e@b=vr=@zoX^Cl7_(cxbs#pbl)e-$q{gJO!F`9zE)1E@sowiFKZ|Fh=9acK!n-}1zzSA&p~y*LAu5O1V2*P!tpL9k@?QCU0Ro76{X&&;QX3(`{X^V^Nb7^W+^9x^B2EuD_OQSlFeCB3NI6<)BeQGINv z7yN~!|9Y)_>apLumpV4~>M++GXsZ)zjl!v{(B1~km0#%1mODubVd9P={)p0o&=};~ zO2S-q)<9F0v{{5>u--DpmEJT*%l(F6!BQ#wEuc$kC_t8f%^J{EPj;3sT@C*I(%sb| zE%3)_!5zGhC?PR5WnL)qWooL(wvEuFU0Y>pDoNf<19#WO8nnNE!6Z-PUmLi;uUCl= zp_u|)Kc_GX3G4o8u{kRaYa5Xa6633l2^ILQ&JE@cxe)WG>>@jIgpQ4A1%Gn+pgX-h zIOykz=rM!&5U=Cp`s>f(P>7#aYcjv&tMrKrbehG`KBHk2t47X-bPXE>!8)V%ySzhr zQx=9aHjoMBJGl0)eTIWo?ak@gbukn3JSnhQ?TAZIx9XT*kd8LzG}(nb_qnoX!UgN{ zuI*%5ZG1p@S?~%ebc!Nco@48r@vk zqpJW}&;=QkJ2M}b&X%lgyC_O#8R9p_gErqtXq%B6WXl$%4h%6aKnJ#g-$z~79qPrH zJNmAB!$J*wDEF?pq!%3qa8o=Pc^~9H9q~xS7e{n)v^LzLByetEo6W|Ga(d^%?aveu zv3p@7hOo2%3m{FT@{iKP=*}xZYp_gXX?m-++{rBh^8;{6Qop+&okb0T&N3;a7jLAh zD_Sy8BP@Gjx-dZ5K)CX2r-N<>>DD_iJMqi@vk^3dAsgn=&oez26h$%R6+}yLovNI1 zFx-z;gR9MCjxuUU>R=g}*xsTD%Rb}u{7XmZ74N|{{$^#3al8>GW0wOc;dhhqhNlEK z6@0KKzRCMg-ZgrcTg*vB6ZMHA`+=_Fe04+MC%sna4?YZkSAITo(W{rDpB}({?x3kI%ZpIsjBqH&`FfIaYZK%I=EG0EU@TDP>2t)3H`S z0Fr~K(W4NffhUqmYLQzRw&Uldsa8pCEkW`8^l6#!R5{=@-G2w>fUxA$<@lMuloF29 z6k;4qB_iKhORc+R06^{f^6-Ffr2w$hvsL>iCIl1C7YexY%L5mEpUBl7RTkxe{z#iU zh12O@ff|QJ0z)me-DB6hi=-l>PHvfvfGA4hO5581Xvj0VEjWC)v;A$eWTlaBia<>D z`&MvKa#g!TXJnE$pfc&&x*iG=%CIuldEABw8Oy^6aNrljr;> zew|O{BfdM|rdE^MqZm--lLf7ob?OSM7%GA-+PBlMsj#R(JRO>2-(3Q?n~%eJoUql+ zh^%-1l0dKiQ5MX0Qa=*qr+3ee|`y>qs*i5K2frepB*`*%js9l_^36?%rVr@U`_QD#e*G~ z&9Xqmz!_Ox%tqDMtNcnhJfw6Q7_SO0311_zS=S(!WS#9%@7qcJf;tC(+Nn0gvCVEJ zEvU2k%p#~We7|`;7CTXg--?$!eHF1fPst4+=N@9H3K1F!a>5KOJii)XzhW-6l9s08mu!z+%3AS7W~qzR(m1mVihKe%wURh-WY=(jD)QCM z=0vG)Yc?0(bi`|Wv$Eu~O=vfs3M!M>W1~uXZ==Tvo=B^Y9fwe@G!wr@rFkj6 z3uvjxR!1{!B79l+?CSa{ktgdecuZe>?ii(3cOM$gF?p2Iv{-qIygKmC4TsY+9C(az zOn)-TV)xOVNx~UuEoTr{_d`<;T*<@`#4Bb&OESg*P{;73%T(f?*$i3P1!xp64pL3Y2GupQ}8>8sxgl>s)t>q_O%eVChlxvN2ykl!M zXoqW(0dI?(#Skq+sP#uPI;9pS_Z}&Bf)PXk6Y2r?6{-I*lCI=8P6)HcC7RUl;q|@U zAn~mlhct@6Ew~ha&jQ|mR|n+|>TCpZWo5Tkq#922q)4L&p>M9D3b#-r3by2jkPn}l zO)n5WjA0b3o)2N$qtxhro7NXcOLLJnwZtOfv767hnQSHU@4ua=eL|ko2`Ed5di|OokMg&WA*hUk#_k>&xA@{B}V#1pTLEqwjnh_}l zl++7MLxSGIB&rya8EWrPg%Gf*q&YAZMsZ_FG>Lh&d0Y=3ErAkaH7KC&ER8d+1_kZY z!pX)}3mq7e#gMc+dogMT1ndb=aZZK53tEyRr1>YYLWJUJOVrt|*9RzqML}CCi*~1m z1D9%1Re+!Jp-ZN2fR z#=uuFiD<^s@;4EI;Hnoi5_^1IQWJdiQf^3028OMOpz6GJKs^PQkWg!-#XHl+BPY`( z1luX*ezH-3JgFtP&KVnfy#gXo<)5-8{(~Lbtqa5;tX!)Q2#F+)sD~1;7{Pw14mxJf zuYiaS=~fz@>!F6#Lm&3o%99+yX|ZYps@kW~@RE<*q8PK-EC^Pjl2&{SKh%k}fKf(3 zmJtsomNr-_J;ZurCt`^dKewmxrkRh4TN0S<8XY-5q6@TUwFH-P+jh7`W1J52Qbl5f z9nCJ?PqztgQSmqN%J{~bgVWq}we(?5U82OvQXv4&ou7(x7@&?8h+5C?RVu(IpzLI_jzK`$q!Qjdvyo7Nt zn~A%iV8Llxs>k526;&JSJ5*dxt7Y$R@)ch#6{>XSi~iPRLSFRTY5LzOx8uQ(YdPf3xlgRggUuS-v-y{`*^Ib%V*W95V6 z>t|Fg=9V(>5+j2kuS?A8!?Tso6qRAnIgs^GU90w3+&h7>>q;{&88MteX|<*(2??$nM~L2J zR$@Dfwze+@s$X+GtB*2!V!d!eZ-vhb3;Ni?uk1UWmWKO?j$*dQS5@&sAaW{;IiIK<2!uj8)6Nm zFmxJyeHug^Dy@R@oxMkJt*IVIw0>iAte2yXGxqLs<2SwIHUSk7; zWy_8rhiWt&_#U%>v*{tCAC`&iBVPYjGhw1fT5n!fuO0>`b|#~yTzYIXdejAEv(!KLq87V*0&B^uE(u zaO2~klUF}`NJwr?`ZT)C(m(&_?$QE*LGMJ1`%bkJ6-VZhafpEB?vMHzEK!~8+(^K} z^tV|bgD^HSZkIi-Wl2#&HyM!GIwNbD!)9)9oM8SeJhcAQ*J;R|V4dRIqJf0#!lSt; z1S=r(?Ws^KQcjJGOy!dVI!3BqCmKkH=x;LLY1j)$2+Bx@Z&FmzIJ`;u1kARN#xVXZ zoS9E68k?eUl0JB(8W*JQWRAriHrz1m)PyC-9;S}eRL(8W7A6lsM5vyAEnDu5Xu<4^lP^=JXPJjqUWnex{{HZ@k02o*R4y z{?D0J+REu?&&2*)=D3Ndx2h6#cM@meBU|ZL*5QWhZ+k^7ZL6-Ov!mlT;%j}K14|=> z8RGup*%Gyg&M>O^!P90umlJ0tT)a|kV=R=!2YKQv3`gxyL=b#?q2UXM^`Tj@`K2vA z#FSw_8C!XBW8lNf4k=u)=SfD1JvT_uQv`T#Jw8J%sjc6Uz7LQ$RX4NMtwhk*Drbij zXsC6qtsONDhx|x6i|=+I2MTH;gC=T|;GJWif%zg$M%jyFu*_$TLPkM0?306UumUSN3e26LwSD>H_JdG0~k>QYYho}-_FhQ zjlmbU5|l`=Q zLNjZ~{z-I~Du!^8ivGAmFJXqj7#yZwJW2iOm`|sZSni*2@A3C0alnRTCh&mM1mJ(V z)1t2{I9G|XRD$J^N^(X*az;RMR)GSel&rR7S#NK&SlQQ)4Zn(~qYnpl0e1%3>)8tC z6a`Mfd$MPbyN+Q)VPEG8UQSaaP~_8Cw;MBv+^;JRZq-8qiQf{%p`Z^kN9 z&Z~!U{V9=rj+DRY2`ae^5>%11daVs$juLG5ZWR`-$@$|>dgXoXT;rJmC%^lq(_`R) z$TRUrK39DeoKa6fBNQqX`n*3(+^wT01KL{XaT!IE?_VfYy1un~72)!D7|Ah3dlY6> z$jc|pB%Do-Q9aKeNBzPV)QNIu?a)S%Ng4`rD#H(Lc!i6o`!mr4f-EUP`lC#5Cs0JH z*#_BU9AkBd7}QH*ik6~Y5Gj4Ac2k*u$SiUr%nu6}LoinyCaVtP6TH&ZD+h#&^Mf#8 z8kBHp<%?GoB?9YiDsqzZUkw~V;~^02Ju&06IpAn<(b>B5UtN@+xQ%;Zy;qq9Zx7KV zmB{K1c)jMX_)6g5{oG)=2BM+n-K~GNe6ncxUWd_gGIw0Onk?vZY-Z(kTjXwyB|hq} zpEm62M*aIdNK9OyS8uS-n>>lI*LKarH{t+uL!)}dJiX#jOlzi*vzi+`-0KG^MY^?1 z+`GPAL1K@a-=nB6Pla-iJKBNAq96$Sb{V1D?9x_%eL@$o&3o+;O^C|G`kaxDjnt5@ zFWDsTmAM-O_S#xYSjMK@QCmO1$gtfSah1}`JVFbDL?acR(?Dln{w;@2eRM6oK~X$@ zm70Kv14w)bUAuoRZuew!pXhk)2Wv>AmO~Fr-OLeQ|C$%nn(0uaLe1Tf=#V69IcPO2 zYdz4ItlI-_+h+Cu8AWfzQza73kBTDZfF_#9E&3}43jbfcO+^>bpXp)Uy4J=3nlo)x zdiKMp`GL`t=qDW}i$KnAhuGA>d$ z#gS1675=U5W1ZrVZ|iX%6}Ec(&A15;Tcy0sq&H&z(G|QP+o3_8PQ6pB3+ZzCvERBPH$w#u>+fVe9m@B({|XzZ-AF_$G0U|{b617 zyTJR}$dGv*3}J!fx`lW2*Qy^1kw~_u8i{vy;i1TW1I5q!UrXFy3{b4J%^EV`js?O{ z9>(9%n>EN{z3cCqOsJQ-Jt&|+JcNCo4SHo3t+=_GV(Ekme=Ke)Yatrm6-XVVaR0p) z#gJGI!rM({6m3ASB!FH!7`U0h4}OsDjURi%Oj;l|Epa&*5;VXK6K!_tdv2{!U;Aqn zC)TEgIQHJrFG2R=-xg(rfECo)Dl~8L1^Ro^up&b0r#S@&iC&%7T-c(E{798#!~d=y zIxxA4`^?Z)Jf6`@yO*e4RY?EK^an~Z+SVl(#kHz1L_a)~JF}lwj@FSJ6ePJO@GTM) z_E$j*;fsB_KNi_hZ^XgPAZVBV2`o=ke1iao7S*!%-O(SRG~$DFJIK6#6IG>Bf5b5) zdE%B?vK?K;-Wf#mHmXHKpPIXhV0@^cUcC`EA+QyHmlP&ogDFw_*Y-cC<1!_ta|;(( zq&3UBZ{W?u?)xIwcdd#+B!_aXRdin~UgCLC0sOIm$jI}Y+?8DwY6Ok(9|igVtPFWN zc6#dC^&O!hLL@B**mGkN`-5%fI7G`7Oq1 z`34DX?JyM&qCf}ey;UBis4>0E!u0@+$PTvTh ztpd`qBB69GC8g3Su?Q#)i*$o@EZrd~OLuokiZsZAQl44#`@VCo^M{Ms8D^h(-}}Ae z*=5FLMnTj=JKTvvlt7K|$mL43BggnYT=(r9w}+JhFcR)}cf9eDME=j(i}eJ>#0n() z+lwXViz{=?4~3r0cYsFvbFNx{-0W&Kai=RfK*W_9Ui}hH_fBLQ-7R+#*VMxMlWR1i z-lVf)`RCmSoP<*x(zv9|Aa3=^;5%f4l~SfVZ_nd)SGO{LS&kQeQ_4RO+a3uU%|S$e zViV;_2RpPKwCQZ=i4XAitm%KCUlHd?$5!S}&m&Ap+(>s>T*Rau%r(ThOS`9eR-G%B zW}2Mnii{dG9{;}=4P(9Vq&I;Nz*yLWmG7fVbX*NIq~>#Scl^x7Bd_JdFl4W29RjEL zV!~`Hi2`e0wURp9a$ClJj=8+>qNZBF&R-Hdke=n(!SIPt{4uj&f*KwDKXAjhl`Vx+ z9(BA^I-{Y1Mn$EO^pnnWyJv+7L&L0vQzB15S}CPsF%phccEw%YC-9$JRZ>DzsURxA zT0Y4q9F{|0$_O>yIe&8>PH_W3#$0PIgEDjZK0LD%9Vz(?>G@Ue1J0JCtqRid=eyHq zzgU>gz|}FKX6790vlN;4FNJdIXfvRG?f1@bw`MrD^lZbIMVB%P*DWgrCS0BvB+A|i zynOQ2{}ZJt1bCvP$}Y`tq2RK%E$DsTHhD=fWs8J$y1K0~x}L?U z+j0j24=w-X;Rd22n=#8lOSO*^La&!jwrAyvbha&_03P2|_Da15X5EgyPdv44{+)22 z_3eTk{)5w1JrQ{?+a;Tya~Ufzd#B196>=`B$4yn;Lwo~M+jY|5r=zmQ9P7G*v))ex zS7blmoai4Uj*Kxncea;R`+XHuQ!jUh&xmS8eUP%@rz6n}fr%y2d&rz&@EYi3;0fbajIgf8- zFn(qDl>Cape*AudZ!lBK3B${Xh$iv)Y%w0_boO8Sy>IPMJI%JBxxA2Z8OV+ttoCUe zc@p?K>%mC3!PxoT^uR|LHBqd=b^6b_EGL%uYai}M;^RAOuD+wqG~Q4XsB0Dx3WSnm zE^n#H(BV6AO!hcF*S&EaF0g;z6ymWn*$XP!ch{K%QPMzgeFbYkH%fCGsXFU3YElLi zS^?G6XcS)!tTu}&pSPR-9f55&YuyTgXIDM=@~h=ph0D#6>YFJ)A)h*4$$aBalC_gx zt=pMVv)di9#tv@3^SEJ4+`4$MWnU5?4}78hDo2VIxAtxd)6l>UQdyU{MVCf_XGC9T zjxQ}&`E5JiP?_TwTy1n@rR};}VcOEMUOhEivKp%fwY-hSz!MGbMSI4PTDDa_#r{hE zI~T?ZCK+(Em#}GvaM ztgQIJ@Y(h*lZcs;)ai%2d&9{4t9MqpgB>Hb#+3_&vA8VVUr8S}qGp)Ko6UVR5KbbP&{OqfL?16ONZIO0vne6IBf3o`k&uhblheZAETy44c<1)K^@6OxstoPU;lA&%WtaOcwXfb4}*SKQ2Jr}(Dkuep344$x(&zV%KiHiNp!&T zx(9!n>gz!@w;5}trGh66?owd?3e}bJEpC__HYI4`~&CVx< z7P3jP$P0RSByFv`qX*g%oUe4AF@U&edDvgN-+&<8FYFuvZhdz?calT{7s=QiqxnJ` z*0|sL8QshT2cgZCO4#V|CTqF07QV9^C1QJ&Sh&AE8ASWS*H4X&a}- zAQlA8GXwM<;v+$m=Q^P2vwj_-ol;w}jKuZmdwuSEej6}N4S9Gy{hqii-L#dh2Jow^ zCxq_00fU_iKM?B6h5hxWG&3JJ*x)Xigm)cf`W@ph+n->LT$#*o7jdlTxT6|%aqH2d zDPD~DHIVTmyK|Otc8s_T2nJg`2qzUH=fy;pakvdk9-2yA5V{9XLsUYBSA!c&YoG@+Mp}UZ&CgxWr15e+pbeYrTIRsndz+cN)bnIzilyWJ^D zce_g=i&AFRbY#_c%wOc)F_-bSvOEql{at}8okDtgmqNyye3D>Om*`cAt;A)5-F2)2 zzCA^RQG;o(Rif0$IGEd>S3f{Gax~b{!|U7JD0naPRTtH>K5?W2#bedSi~R8I!uR6@ z4w>Y&f9J0wli6sIADF+5I$d1h@Ms+ad}9&K6$ipjsz?v65ckp&yCaS@e|kx1=W^xo zGlAOUyxLaU$)Hqu8;)h>lOMqzMcYWk6D>Z&R%X_8fmy>94uosAXA>Qg7v8j zULml&5P7V??YZeskDtOArULE>!YK1ZE--IFhfnu?>FJW-0g|4nZezs?SmUdTlN2Y~kyd5mn^(;AETE(ui;EdBT zJ*)v!iob#?ijr?WxhwaY%o^8YB?3!jwSm44%Sd_c3NlMb-pNuqw8ZnVLIa1UPRM%h z5k-|YZhI6{X_@$3MW*0>ch1t8HSW9-myH2s@)^CY*XdhTzq4i~{|_;CddZN&l$MTE z8rPGux4(f0aYCg>tWV!jU(8lFto0R_u{ERhQ3QXNnz z->qxGp7-K{A=rsuse<*)DqaM!X!OXC&}e3c+Ry~+ep);kw)yScvMKp6=RRa+u|Vnt z67J0c`+>Ee^Pc0a2@ftz3Cj328z>j16fIZ#yZvGBBvmAo3+`UZ-MYlGBUcQ12|9tR z`d!$nXN6jDJsRs@*o_Feyqorw2}_rD6|xYO!P4ZA{jADpi70jy;q_vZzD+3v(uW#f z3%|ea+q%!P!OhQo{*XLk~p>)kG&IdOt=9 zX5scQqaSLJi1bEcdpExXMp#3u_oI+cYkY1dQ$W06B7I3MH;#?u88KqM2*dQ9U=?9t z&wopPr}i?|7lq#oX7~t*4AKAPJ@DlM2|diQiEQB?@&WsKDEJZ<);|E`*hA5NA20- z&J}|1_WUZI3eg~xa?n+GL4Vovl?dzUaib(Y5uR`*Ly6vi&>0cpLjQWk1aXaR{RM9CZ8MVG|cFwZnUm()zV!m?e$TXZ_&BS~y(5SPP z(Zc{xY&O%9uQ8gG>E?@JinN`&d770efFea1J&B!SS{Bkrtnfp>3jn-tRn$jX)eY}l zI-aLm&Xf`CKANzi(jshhEQyR+$~*X?vMncKd)MQMD~~8#M_n(nPp!E@dfLTvlP<6} z0c<*CDox&WrfGXG@=?Qp&a@ksg;pZ{@@am8L(LQUX0pyE(PJ4xI*GMddP&vLg_UG1 z0c}q$YC|%E0(ZsiO|J6IUGD)KQ-dXPb$PIThC<6Zd`V%o(qwj?hM0{7xrR`%sk^Za zIGSDQ&kljKdQH`hdj{W*bEz#zi%dZU_)q_vz80)w*S(J(VecCz-pg*;b72`#PhX=S z*I;>XKRd3m-A*Cu`l+TibsjUaDs|wiGqR?yDZ4h{874JfoAKALCunTt)YIL&MV?OExFEW_EgOB6>JefHrf1_mksYzG`@ZF;9sScnj;Z*gK~s zpGHI66^}^+nHC#M4k*KeWf-Q6_vzmiSSd>;YF27XTu`lzUKz;=EWj)=#^sn_0biye zq&m?|Tk41JMmCXJ=emEl2!3&C`r;UvK=gs_ADslt|AVrQ6Cy1;49-R?N~qJri&dQV z!%CHqOXp<9{yPz`gx_e4(M$~TckM1D#?{u_ zT{zU*a^FeHp~bjfN!&w8nkw6kr{}(L>&;uc)u3ZsDA&&=rYEBPJ9=~ zh`MTYes;P?!(V1&dR!F1S_O(x&J5HUNZjRt{UCTy138$;xbHmK8YAWyaHc+tSyP@c zl${Q(t$>pz<1+3PUO%$V1Ox7}Ne5)I?gW5A5?UvTy;BViWOddV0&^Ke_RqR0FgKmV zF5i-bjom%4M6INT|_y3IG71`Q8T-#bo}zwT^av_^-kOB8R*ElY*{^Uga%hCSi&h;I{>qLJR}Ks97SRUrk5>*z=Wg;}pm9obJqB{N)5&GIiB3&WJmJ#WnrM zUaGr{3VlqcrY(}i?%r5E=gNSMP{t=-3#;wCqN_C9EXmqGhdlWNwVpzRy~~vax&yo@VoYHed}PbF;!%Nx{A$Qz(tYW z<~G~G>N()5PAB{G$5c&OM=^&yJEi4myy&xE@|nF;^6?9S1W2aMC`jlOs5Bg78HwQ& zXNM%*xnYuzn~1or`2wh!v1r^y{**)$@X>bVpn%D2$(oEHZjD<#E9Gw74A05v)hT%i zZp4&oey6~U_W5_Go9R1iqO#ft)YpL1^6Ph7@|^TiBpXvz@g}Wv`wmn*$xejYvYKRv z>kzbA8;D+n7NP_Kst1RrE$Ov^F6-Vgd+oemv2mUFwLmTrb8_kmFb zODW1q%Gh{u{AH>O7}k>Te>Lnt^rH23F-WJ#4}}5RBl5s_lyAgE>>!Vd>*52nZ+TT6 zd>We_28`~|l0$+~lVr)vn+re%!~j2!44|*EnCaQ&z5v?&UtvXbmlg$dslE&EKMvoJ z2&iNvo{#+b)#H0}W3uVncC}yr=as>x_@(g5k)wH;u{x8i-4r0*R+{@Au36k#w zohG>RFC1P=%N!rK9hA#^1--49{F{sy-^16{NXfhO}B+#wK_L+xXf7 zMy-o9`33F1PfKv4RJMj-sr${|Xu-Srk+bhd+<>sjPID!V8qZic_?_{-r2~ZRhSZAl zDijk#eoO!(TSOzgf6LIPpp}g0usMNm#sD-y$N1LBCFAB%RLA;7q<2*en@`-eQ{}ZS z6UG!wGXtQr5-rO6S1rHJCMu49s5@XF%i_DfL$W2YcK+F59QBQ>6qwEvnC}TzqMzd? zQ5o6Cj9uWvVfGkCgT9<)z@JNDTc7KO_@2PWO;YJd|J?FKSbpygQHI+o^w*2qai8N{ z%Q)@LqK49ns;Twxvcr>ptW8AtRs6*A)(h5e`7LvNt7sQj^Y3^<1LQK?t|~`#PU5t4 zxxDM5)>iS(PfkUukWhSu4)jL0jZDi z$lvJvg%-c(hDNZ37{HgMreg}DZk1pQTdDEng0C#{{Onie$JC5$Q<+QH@if<7_0qEhRSFemy<~Zu01=j^PeyH5HneRdXlbyA2(Hr+h6qh zB8V{)-Sj)rMMXq)3YIsPf@Q7kHY5fsxK8%GPc6MF%oa|^$rnoCb6X*GXw?9MFEPn& zeas$XzbwA?I$Fi+tuMaAuEcz7rbPL8W+vlqVf+vxecGUvl*h!A$K=q!_<_6iRJL=N zEn6h?P`$}_EhVmSiL%;}E%HKh7A0HGMJ4o4&33dkITmmIOAc^?&ubxBN$Ru$YAQXn z?D@UU#K98Sh+(GFRcd5-MC^{7FlWA{+)S8%Km%y$ji_gsq7~Mx6q(h_Q)Z{i-6=n_ zuno+o&(_x(uOA&`K>iw-iCXrBf6@RVQ{o&?1IAOfK+J53w>jYrs*@2Bf)0JyY*h*d z11ACo^^Cel424cZSXeH|=Va<;&ExN=ssp217jH5!Go2uD1$gnIU~n@)3?BcT`jj8< z=VR$zSCizKkft;}TjXKx!~v4VZ;oHi2gx13WI?pukAe0pd19$eL!LbFutvdi5A!%~ z1Qi|@ig}n64d%4;lmDx<`k`ST*rJ2mN355KG%YPrs*v#H$Vs&nqNS6+3~(%1;xav> zo({eCl8du=Ds|jM<(dPfxSYjc@2R>IO&>cRP8%NFtlve!+FxUnf;qp1zQ?izRJRHT zG3V~n{{|RaLui0xzYS1|;0e~HmJ=Teba>Fk11)eR`P&}T zkQajv>P)6ign$-=#Foo2{Hp|XS^|6V=Tm$}rO((zz#k+Pj5@h)#Qx2Z&SS#+hw6{# zqQnT?rP?L14BZvul#5qh^Zw>=1nS($4fOh~ch2f~h;}~KpYNn4srG^EDPXSiV)l0; zk^)?pY&e4@oi1LU>uRYK3lsc;$z>#6jPr`%6o9>v04<#!HD&s+w-t@D#Rqg`5UfV~ zW8|BY%WDn0KD|&xwLyT;cpBQx;HSt#)tJ!v`yKn~kM|ix77~rV)2c+sQd_q(&GI^} zX`O_!#~BD$9=I?qQb+Aje8(TKf`3^F2r-QqD z@d7vt0D#93Gki&uWaqB0`gUw)YfV*Vi7`PNw5}C<2UTJbmbZqM(;3P6)`9RqnGxOe zF4LQOvVhMD+Z|9@LP*u_`a5s=y~YG;=77h$zoWk>-RjZlNBwQhODhrW^7z^}R!|wf zx56#S9x29>o4)}$I;*M8$*28~z~+x<0l_7^sJ!neOEucKlY4bQ@nm@dLizd36C&Rn~dJBNpq>8 z)_f3UW!zSSWe8Y?19Rd3?|cEzk3E*h6_ z+hS}o@6(AZCCL<3QG!?F;LY>=$6uCL*p>{n=8Q=Pl9j@KYnV0AhDp+=h*P+pE8-~x zFj-Wngulkm{vwZ`9o5#0)X}NCOW+%BcBw#}_>uOtk(AdrM_Lfcos8#$b&C;_r5>n^`1 z%ILZxZ%H_1!x$m6-Rrdn}1_VPEKAN|1e(LVlrf8*~*~07|1Fu@qr@j zQyq->O6p*0(nxCdVsr91?P1poAfmXg>YdLVj5A^Jw7)I__&z3=3RBX{-N&MC;>~qQ z$>V}oAj~PBhxk4~@{@TV>qfy)$hcHkFC_X~@!J7w1LjAsQ%6;_^X3Tukgxz?4m|A_ z^7H>(B>uTxF!^&5=_WaIrcr~+flXH*l#g(Vj9s-}ME}@WJz(>mG$P*A$QAKyNLKd2 z*)@jfPxe8vNrYQomUxjgxPQC^R;CC%MAJ!4oImqZ1kY%9^o&;Bj}yZ#D%O*J`|ppJ zf_Fa()RPF_d0bO=arRBSN8y}FGiex%qsf)pC4;nH9W!En5~BJI`#+8Zw9@DV%;A5Q z|LC)5#AH9~v8(}@a77ywz!m9OYL_Vj*Wpphdv1Cgv4H6WGT>UKIBM~uroX0&J&kcn zf%@(y@+kGcX!sG`ahT8>OsL+&8v;-JL5ew5#Ithsx7A4yIuD9AVckgCRggw8t1~;PLTHQp#U`3vw4FK+r zDH&$oGkm(cUFIbHK-a?Z-Rq}1g^!+8D%Ao#PMNpZpkkn{Wh#VUB>#&JP^ALLN{MpI z3Dk&`=+6i`Wx`dOj^Ea|K-4B>3?lu_{(8{wItkhABKTo&k0GsrnC$VMLuQ78*)`8} z?Aw0D{8t63+UYe&d?BjP`7XLP5}@-2hwC^1IZy;X4_U-3wNyvxJuM40X4fQ;sv0Oc z;hgwNgCW%qXer+l7wde}KP ziKW%Gjfv^5CyOz_G@``8>zPR&>qHJ1$AFRYCa1yd>QspQ#T+uC$v9n*#^H+6LA5MQ z4lykhMBlfx9ZZ=6P86PPt9tk=RN|Z#DWldDnH^RnP97!?`So^aI^))uh5=mwJFXt7 zsekUI)b`;Y(q}5mlk!(21-RF*A5ivwe+$i{s-(C4r}}Q@6u|AK#CKF}u~Olwu}m)M zI|SnkxyB;6A0HwUi05(HR-ey=y*H0>Ap1ScEX973qv)ft@%^3!P7v|NKA6vtU}o0- zaR7M{jb-DsXFj0dChdq?+@o9kCdWKpr)e+u~*Qq zuW=$ZHr&BCQ5HlDEPagfN)4{(sc)7K&hKrF%^Z_jypiQ4kY-m?k_lQKl+DAJ#_v?J zdl@kpStLbI2BOqpG%WffV%vCz@Mpk!?27&KvxYJ6C>My5b8&~K9$Q6FE2%H32iP36 z7&MQ5?0hC6b!4vRoI;?bXOSPml*=q{`;`+*Yt|31T%K&pJQwPv7&s>muK)+(3`pkg zae!%;l{syr2|W2?eK`fS8f0T|3@qRkRlgN{+&ncc;QX;!2tV%ZLT3E{&Cqr&tyzI` zc*P?=yR7|+)=Z~re;flscm;+!w3-SkguVC>N&?>RS&pdzNeUKNyD-KSL07qJtS1f9 z1*UMlw7`v(CYfRsp$G6l66VI7DT8#O$8xgnl%dHs3|UR#or4eN%u}1+w_}ldYPUFo zUdKZUXfUQk0_up_K1JS0eaqZG0&D2iVd4OXSM0VtrY2o0jf?M~BH=2)_sgHQ!C9zm z;kPQXWO?*%NhySbi6Ot1-?sYs81@XLe&77iWty?d!XCbRo7}(EDGc z6>6dCfnX5unQ$DED-XCm&pj9(*=xbACScNqD1Xi}$^|Bed+tTSa7ez`u7wQ}`mA;6 z)zVWW;C;j~h+2);$I^A+E98$2)@2>DQZu!p4Rqo-IWpET9oUjv&Qaq9PNVx=!xT7a zoo#nm{j@RwK6nK^EQD7Pq+sx0ln9x@a3&f!IBjLR9Zl#e#qxwLiEIIsQwQYM9z}cV zr;-fE$(Bzd^gU+a@w(Wa&n3Yr>G4iKspMsd9AX%2x|`IX!HfPf_wxcGbj;pA7kKkZXr)Cqfk`?sQqKD5);$;HK68Zpk`w$p+FdiAnMpz{<`w_fJHA~EJK?gw|Nh`es@H)G7Lv-i;OH`D|A*MNIH-QOG{5yevd*9 z;6C#n=8ac-dVNJAN`V}|*U>l)ao=Q6QzNr+iklO_NnIf{=7P{J0J{b)4Zmu8)l6B< zX#uL@j}?fdan?sC`2w%#4T!EZ$2s9qmIz{z%S3?C0qh0k!~mcND6SGK!JGYwuT4u0 z@E}pgDd?-wv#9gVEk1zYYWWK*EQPf6=B^|h3`~Cl18vGG7l;8Aj9nE@lM}(ixtFA? zrPrth=SQ8^0`Hu3I-bGYqF^5rSMYR;p1CwodAduTb5IeYwh9r6q_IJo?dc7|vIu%k zr^%Ppms|9oyJ9j4jLk%k8r2au+As(t1Ekj!#_gxv%h*usNGk?17? z5Zv#T=64Itd*I}-h^MS%G)9jLZCM?63IEZYIh=K~212JXX5MoqOUmNnV!Jc>=Wg- zJ@%?}Ws{+0pIxNcq^y6Zh!I$)(j3Y^Mg%NB8AqO<^bI(&{@9HJ%N?v`n3Nq7U@_;t zqz~by;-6?@6R5*!7UIOi%|T@ufG(EJQCF|GKH2X%ZqesTE9T!!eRVx=&`H&Itspe~ zv#_Q8LCOvPUtnbU6s6j*g{?fa=nXk8rxF;Im5N ztKj8lBO*M(fu8|^OlsBz)HZxo@)He7ltB1QFqU!N5VxYuYD|Jt9FFD=))Ob;T3~pm zce`qjdfD`w$#rZ@S#xx`1)+IUxTy`Lj;)~Af?BhT=GG{#uKWV$kA~sQAgu6`G21IF zP*N@(7|+5OR5JCKcfs8dy%^CTm8jtJwcC zgo9fZ#-O=TU2evs!ut=?DLn!f;s8+_L_*NP3Z0Sr4B1Qr!}o;oYil0{&vW8B^W(>SG-%1KQwf@fSinocw8qLd#;gS7 zFV?%oPgE_1ChKBi=OItz?DB@Fg;}9g)&mxBJD#-ePhB!Kisf2$zG3QG7Sol*y(Q}d zRkJMhWXZMk=4qcC8Zv$C)SUwbx8rn5-Ne zbX6ot>SXRG-kb<(0s&4>eldw&&WDS+54BGzRn%T3ilI#<=nAJQj7rmjOt{;>5ME(! ztPY5%0yh?F^1@eX752y@s7L@|nP?EO>&t4Wbmr+POa1FWIxDcPGra+(49rb3J7-)4p(a1cQTGAKFN5%`~E&FVc)F9MTa_#=$%$rNi+|V1!5eRiLU^lIGS@GVUGcSIq==Fr z@KyHbuK_{)1PJ0PCkbS{J}%mAnqE(8s{-}Msk|cz@H5Q z*sDT4=;%;unIK(+pmg`Ask4}pD$fosV3$>T2$ebDmF6WYY$Z1&h=jW>LTq_d7y&)3 zN>I+KRSB9Fq=azw3IafxN=6O!x2IcV?SS`aR9?^DF2neEpcDd}0!s%IWU>7s4HUfI zt@IF(78Oc5aG=eqxP8eKjB}5S#fEwoLCWZgD{|k${znUnvk&Te&<>b~dnM^jP zMem4VP%{P0XiSiSPbN)-66fH?YP{!S(w-cVu+DDYW8T={;w>k05T&vafEycA4b$TK zEW~Y~_-FosWu^aF^nXZBpY3EhBqKwiqC8SBO_Y_JvcrQ`_(>&729>&jTCJG=S1Yx% zqI@SB$ru%gXlFov@TZKSluOB!L0M~7D~M7vrBBU7aZ9Kt>DJlN^5xU74GP>v8L)75 zz#zp>CK`qb^@t1Bftd9>VN*~=?__bq$3JH8Eo&Z|sRUh2BXV1H9OS$=8;EYV1u1uc z!8KNi8fo1mf=S=snayqmnAtne)9hyggPI908WNxiivk`jK)6|-U;WtnTF)k2(DS;& z`-cAe%Nm4LGXUbrda^nR5~OJFE;KnBI)TeJxuDM1ESaiFM~WQFmE_oQ;S;yznr>O_ zP+Qg!!FHPGyO0T(>~K-{#PDpedMH*$$G7_? z0z1{@!ct)qX?OvI!w4D_HQ#IpPRv${D6otCQWXfJ|B_%7Wmv3Gx&PI^!*9kv+^H4t zS>MQ_82Evsl3ASEy&%rw5&B0yHy@bmUx%rf_smC{H$u3=Tb#SghtN@`j-jd4IL;`V z#zo0Ou+)n^O5UZ zB9QCR;V<~ga>WAfE*DZ<# zeZHI`e;o7~Ah_dr(N5W-gcTBr;d^785Gz11Bfz=gH0zZbu9E_V+uKEoU}>u)^1HQ= z;16Z>l0iG?*I3foPlx8fYAN5Ac&RX zJ!CdOj#1h$JpFhJ6xvr?0+s7Kv}575a?9%|6!lIIykxYQ!;1!x1u@uVDYX==(TURD zOJXdZ6`)lCJJS5TWyz&HCr#HGlz`@dd>jF5C=CRX(NxlhPDd^JiqWXpz>ouXL|rx# z6@U|hE{rWNoq(VkZVcqJ|2Vh*IIF*25ST!0r-#PL#kbBL@Pqn(s|!0|fL#F#CKt$( z3J15G|7KwMw8zuCV5FtsO^ByIgZuK4IFEn6pK#JS|DcTXMQo{Ewueuw9W}tP7r718| zpjF~+^BMRXGR>4iwH3uQF@PT>PiNSk9#y~8@Fy3<_A zYM|l#+X&k33&h4 zwo#j~`_FlhZ=)eV5Uzb0nGnb;M_&s<*RYZ4j(Q3F462~UQ&CQBWyxD(FU;ePW~>FW zmJo2m>B4wLe#Y0~eMwUw$`*u$iYqJ}+i;hq@2?fyP^$9qe73>Z#K;z$K^UyNg=-uH zfDNm=s7Ags9O$n208yceHlaH33b;!TrMV!eS%tPNC5pg9wJThJ^27i)_6pMS;CK8J zSW+c~ukT+326VJLT)hWG<~GR1$vs3OV2*0I0Ol71b?@f=?&Cl~coPRnfA0guo`ZUTJJe(8*!bzIG>R5G zhH4A~oj`Y>H&X^=7y}CxJ0K>DZB+eY0di0Dlbm5HO~R)ytDB+b|C)k@_f#E~qD5--6rk)*t>2KU^p0??*cF9c}{&ryqbW6K&9ki7UPym7CDFMV60)PRDI$Z#2XGvyCOt}lynrYF?xj1#LKqt@w;vl zyQam^KQ!rdp33gRI~b<+QW2V$3XPj`NiDE10(@IKMuSFtX@-OTo&u3uUE+l>&okcp zCw53Y(j{jS$#1Vc(Zdd$%2f`Ij-cO}Y)hRItE%0<@92Sb~nlToFc5?3(K>Ok3E5>9p0Qi zqdmY%sCH5qy-rOW^vW_oaxLRQ7X|#sL%!!VKAs>H`0mtWAu2L4?w!M&MO_V*%F*K3 z%c?;%K>sAzhmdbKU92a$&)}>txjIRz|LJe0&(Vdz2K3#OEKYG^v&b3meGl&)y<125 z^i_1$J(ccU_g?T#sA0`A19@p;)YWTZrg#QIH(WH+L_+*D8)Qs{kf(>GH@_(XIQD08 z(`rlZn#yvM^*esDzJw>=b={-(AP$N_7BdLuxmri!D$G8R#d4pArUkojY}TS*3EFp3 zeT?DbSDcW2Ahb?Zng7nq`mon!Ipx(Puo%01WYvylw(~&ea6aam=SiWXqK>Fipux$@ zSviiJ@4n8+L>)aD@+y__O1qqQBWo2uALg+7Fhv+Aa&Va4YWK@!?Puka29WGORG zb4y%!zK+J?i-@gIYt6)DImb0l$0alf$0!)-pVdK+BVy}?N8gkhlP*RJSI>SB9kF@s z8uLzITX@%O)Zx8=+MQi9{-#n`dq6$!{fA%4{jm=XRifX1Hw&{CBiGbgSTlQTU>NeH zi-tBr)z*_at|fZj7$R#xE+n3&Go?S0d8`BAGD>G^x@&K#^{G?8>_Q_&uhco-MjI$U z&<*IJWz=6W{0ZZ3MQ<#x4xO;fY3jH^4Oy|U0`6cgj@4etUjp28hl*oNOdlkZU3ZY*=E#!TE^;PH*?pw52C$5U#QFK^js>R$b z#1r~VnN)^ftc7b2LTG@0@h8=P&Ne)-61V(KG%^G-QocsdF5bH znFgfs`V%wE2Hi!eEW=D(=nvV;QA`l^=Wp#}LTC}99N&_BK-5Cx{^rkcMDm$CU|3I& zvi(5K&zU;vDL$b|rd1yNWK5=g|I5jx%7fF$!%n@?am=j4Z8dvpRr@S7@N-9EYF^Bt zL_N1rgB4Bf(c;<#L;gNm!YA1mHdY!F5Q?c&Za(d-{L;R>l=LwOzQZ#5Ln_3BmWEH# zFUV5rmViBq+&a5W$#iR(?lu)5p4x6@B0aY?dx7b5rkw3EAsOYGyi|}AJ{#|P zhW2#OFQ?<8yyK@p`R*r3j9n6lN$7dau_MO!(?r~#z6<0JDNYC2*5JucL)FtVi-G3Ir4cu>iBzPHJ^2YSsQrR+N%z=7c9&u1jVJ}=Q{AO9Yp zXl68S5J9|~LO@dBvaHEDaF`%mZT0!+&kfc2uJ4}(z5=!albgdClbee~U%vXoi#Csk z7DPuk>(@6X`Z4F@?BAv_vYKv?xU%G7xPI0lxPBCRuX85pD7vPF zy6m$BH6syCfv>xYuG3@c|=nW)HJ| zM(AeQNx4BwVxRT9#)-JAjGTi{K$Ir>e7MRM`Jh(KCa2>bZQov-*1iR8 zg}i2XkgY#xZx15oOX<=A4VgtaSA&}h=NKm!ZPj0m-FbFT_|6xy2d_dx?w=XOy2hm% zoer91kXo~+&|Jq)MFE>L+4@T+qc!)HVXKgoN2Q?r?cgiDdoOHW+hwX z()dB@bUbu3lJ`mxkwHeyX`zpA`tTZ*ZsAfdSNyE2Li?L(nc-k<=iW65*^ocqVzvEx zO8|is>(b9}xCNyhNHg1Mdm=`Kj@um5V;NGlad*O>=oo!NN1T(z8FJq z=AT9jVrrjc|3tf-H8bqr1;RdAS?RXYI)T!!Jl}W{iM6qAX_$4xjT6zhH&q5@gX82A zx#P?e>_ot&%*LI?18<5*L+yX?xP%^%<;Vj790O4st+$UoM#FzV=%`|ciywOSEomPu zqHySlaSXrtx$7UKR<>6caNK?3eN+A zDaFdqj5Ch`P4xTIsd&UZh_9p%iA(n+hf*t|w(arjSk8%G7;Hzq74TsgdwC8DO-O3) zWG5a<%yx=)Oc^g<#F#$MJ|@9}W%U_mn-$e# zg#S7F1A^sJS6Og|ytO0@iS~KSh8v!QY;l$8vZ%8SWLMaE9V?{EUyT8B9-L+D!LRFe z>Kre8gUcpQQ#T)s&qlr_4527myEuRO9L-jpybOL1b7lAi-Fw82-mBpHs#C~ppg(8A z%j2nGzpXkO#S{bO*exJ_H|skHzAXz8)_nW6Oz;=CgQ&F2LLglmSU;hEzsI`zK)H{q zg2nQ(>i1Q9d0XyqRTXfvm&gesS-ztCQ`7h zdyRG7+B8ZC$%`Do+PD_iWA16};2ay(zxn3mdEfd$DPI!I+zb4!oPEr5%I!Vb(Te2A zsDWr!nMYmsPnLC?)l+ZjOD7fTJZXkg8JKqG)=!^(X|oW??T?|R(K1wc+y2JtNSRP? z(J}(9ib-;!%1#AuQ&-rl!Uy|w*#ugPqls4sG~T0f?(;^Xzb2d{xWs9J%vl#N_H?;AG%| z?@A4#@LL_Up+U8MNujxbm#e;Qy{u@Ne?mJH<1Ss(MdR=JgSz15@O;?9im#FHG?j6q zNb7uaW*{Z{%O3rp5t|T`fne&xqIx5HEfk|;=0Zg;qh%u<)TEwc? z$1@Ikux-#W10{b>&Sw0z7;JFqG!fOcuH0CZbBHa1%-M^`FD5=mFJxFtuLv&-A=f2- z89~x8VzL3V37nY8EnRLGk{$2b^g?6(eetwLo=jt;7Eu^f19f7ho{Vz%J7U6?MPl(dZsc)cWYw<6NePczRE>~ zKj9pX{10Rdr=ynM4W+)4$da<;riy;zx4Szw)Jr%s*o!B+i|y?U_y|JPSB1oD&mMO| zWizOi$$7br9ECd$Nl>pq;frh^J_YTisv>ct5!0Di3K6zP0uSzr*z~sEd#e{HAp@c>Hk@pFO0j2+wRZn8 zGw=#g1oaE@_q_LxGJ|p}*w_)#=0DKJ2tQx~J4;bJtM6ws+#x8F)0lloy4qXm+72?w z#i03`^N_eMsI%m*|I#&KC*=bxZ|2+M?YEZS9EELpZwL~6)t7^qD1Gio&%_$Q@i%%P zvzQq)XsW6fz@#Alu~hC3?PQeb9(I`H#_UVt+c&hpH}Geq7Ojhwe}w)`=0S4D?FH!F z8?d}R*7gB#Y{=t^cUWJa$w9n|T!4ZYHa6Nyu0O$c?PLwFYhO4|-E%@(fbu>=0|kSZ zWQptrh3mI#I$Bllr1mY^a0D67@=U*Yt1rm|rUNB^v8$L6)#<$(et`Jy;#7ek#1#vy zfsfbqoIIm8Y$q)X;i}JRTO?CiRXPcqcw>|Ja*HX+luI_|$J+F)9panQ8xegH`eE_S z>7cep3ZG&I3S9c#yU$075Ln#92e?h))76(qeP9##WSYE|mAr|IwdStOS*}u1I(=Qa zKrUTNALY}$hfMgIG{&~s@n1ugULRlFIW-ptk1t=pfq=fhf&Nn@W@*|JsmEux2uze< zCgef+ahYnBNOaMXkS)0PES)C8%w5RX0TNWLC^kaM5z+miMg&8nD!*J z=n^u%ZE)z{ig|Bq6$SCWFIk!KakbLn^L%;TcggwsxSepZDc^JaIBw~A1=aHO{`_>Y zSr=r%)NU57w&_;pY98)S-S|J{ZA@?3_Iv6!UL;N>$Dd93|On%}%Nf-M>ZZoKIh8`2t42FO!G9+@*ICFN>G4NFX_ zY9-|(CRWUWSwzw~ut>6!ah;U^jGkN(4ZN)9*+w~pC08wRD?+Z?MK{lQ@1Ptd^Cn_a zBqpg@Z4M|Ln$}ACLz7scP#9#-;+{E8t6O{=Qv{lnRBr-rk)4QJ;^3YcQ8IVH&4Xp} z#I)$cwSelxXUv|nD)ssPId8|gXU&SWX%*jbCU&L&V0y`(HD53U%p_E8b1t+6G3>h^!>z z%i4x|!&OOLuH%{(Dkt_Iw8WO#FQ>yv_% zzWduYzfDt{$h9NE85nv4uvuK(mKhj5m>faQ1ne3C&K!&3P4O#H)wYE7J+b^KwTDkr zg#g*{P(>ui=V@nso+~*0ne}M)Dx{KhX>Q`Sr^+^#b=ZY!z{#-AhsL{+w0|2M38(GZ zTEu`Bp8=vrJKSZm9(GbuH1%Njna<6?_EXbA;9YL}0^HN`8pGv~L!sG7t3~oX?Sbu$ zCHnFTZP}p54Doe;b|!9rc2br3YPm96ifv4*zvY`}P{VLj1I^SwR2~x6x}u#ZhLG3K zbC$am>=YmDr(H|@|Mn=Xp5Pp{(I(z8ys_nLz4#s*<0d=PI~Ua?j7ez-NUGZmVrX0a zVq~Sb7JQ6*pmMhucFhUAO*mP;iFfUBX@^RuOxnQ!{jdVFgFg44hvegZn~cb~d`% zG1jhVrWDszon?P@7NMtzEK^GzR6z;#ve5%K9g-wbtmz00kRX9!T#>VS?KrW@oFX|G zmGwkw{R}B{y9!l`6=$c*55C$v!+59+SBiNTIQ?K3`;X8nbYbPP3UadmHE+0Tm;>m4 z+P>Ca@4C3xSe4(Xk{BKl&FhdKi2Rmb3aMt26Le?~W{KkU?c9whO4nxsa7i!tQlM-)65jLp%S z^K+ej=^sLZA2+br=U2ZJ=QeE)mXqQB*lv#br1x4)U<#&Xy}yKn$#8ojhVk2^rB4CnlO!3Ts%WWOt9lZb8ff0W zX1JW&xaw~$8+z}kbyZM%Jz-IAysQ zNWF~O0OccsZosuDHb!rDN#+f6u^8bNdn%SsF7qv#d@_zripL&}GXDg+MV3}S5y1&L zlmV?{V&y4GHnPgBG7E`n!X=I5^H?VR!4{*wo=WXlWak3Y*|s1=`UaDkHV!vh0OnQw z1s8yLX33@JFvJUN2Bw$4`y?>wqqapd(Sa|@pD0FLkHH6+5FWryF|aTuZk2V_|1=G! zzq#ny9K3fnEpk|x9=DWT;BV188n1ocD7rjgRFs*gR#~Dub874UuU2E zzIU@u`#i9drJwXBUZsFPke$Thp>}Z9&ZX9}uf_sD{m&r}w4M99}P2s15dk#52VW z{qdJTMk&mkf0lSKIj3f#Xa{bGGxSmMLvq1%Q%u*(aJv3iS-Abt;~39w!(AmaMhqg} z%NQgPd6B#c0SYRtoAA1KC#e0`S$P_sV@x9S!&^)u`kPA6!}{JmoX>`vOSB>K)5Szw z8TlTJ?BlaMZ5aQdv^tpHS()JtP8hfy-eYC#hH3>lIY9&2UYj?J*AN#DD)t~mTl`A} zhqj5RQYxM;XogRfmu8Mv&fCFTR0GLlL+t-S)FN5Sve!_rRYiPBUV%pwY4K@(Obdx@ z4O?dj!?WimLN`LI5uCV9*&OOI(Neh4c+Wt_vp$uajrvP13F?KQ&7QkV+jpZQ}_ zsKYd2P&jrJ77|LU4hrEnG5g%mm|3I%+QpSJ0R&pBFnkksf zdG3?XBw9Q1KW|(CUz{Zn#%M2+!4+J7i}HAUTz)6z=}zlcbD2bYdjkjM<<$QdOdNik zhNt;C&4#CpRG5Y;;|};dhMGs7ifh-HTMi0#9hL*vNqhGkQ9q!WtGmcv2yO6M=i&H) zY)1}>{^@oD@FDfu0kX35Ytr@I%4%d~VD=Q7{)+WX2fYGI^3-bZmPJSUUSIa~AIT}{ zrEGm3uRE|mPY=!`wE*KST<5c9^OT>>EdO}niXY!Lve5~(* z)xT|wMbP-9gY|(l!(7q?ZH4qZCHpCktLMi`!tSPIba!`IkSULv*bZrscgw+bz;&Mg z29whvwQ*y941?#4{VBW7cBba0*K`%*0mPq)w-Lg~l#6ykgeSbkFg@*7k~3gd?`)(}`^K7wRUZgLn+i>14E`I@ zhuW@&2k}|W4rN$FLi*tfSCF*`a7>$h28SjddYRD;*KXA|*v>~efq0mNoZSJ|=~{_? zsSW+4z%<6nc>0CBzmXHg7ffdLcu9O6@+22|)XUC>y8O%THJYPY@uH(L;*0iG5{;DA z%6+Yn+%sc=k7^i#gW)Ey+b>6BY13<@|3CK<$D}g6L-Ha@$doDqJUf;fM z<-I|yQG}IkDnv?UZ_lYjN->H|8&HT0=WIhdNV0^4FMRNC)EV4k(QQ+<*Xa$YOji$V z2B=l*^LpKqK&HMkQsLUWFYapB%7`WkBOEgfWgOE7K6b%P5>1K-S)GtFTi-_I^qo7fF5I zG54!ynMU9v=urny?v;9>x___LsO?C{(@utG9upEDTdXbnL87_k(!>3}%H(q|lWLJ| zN{$CRNkAVHjAXNypH7CAs=YYxqK^k_i5EXn<#>>2a@~TE6TnpW(iuhc|Y4m~jKzM>wg@zxKVRr9=hn&v<16JEOZNd{de zW|y`I5(f)(hN}RYv3%D1W22;TDc!XLy&Q+dqRi%nAMg?*_!+XRrzF{Z7h`u}!mBFg zK*H-v%L{#{uKGFyxSKWGq?4YEnDxpdksjku)VGa3`ToYY(KyviqUu;87gr!|5@O$2 zB9)}Ahvm<*Qy$O++Roz61&%TzDYg}*&cw&v+=IYfcwP^w*me^k%gP1+I)L;Z{Sgy_ zf6-+Cj9;N7d57{l{9QS{%2h?2-jtF;G(C+1JpXZbpyx35-$!x_I7=8%rohciaD}dice$FA zOz&$?qLYe-6{XU-cg8m^c4-u>8B&OBP%q*9@S8f?qToYIbdeF(8Jzyp)#uDIZT16| zTO=!V+1l@|Pw=bTyvR9>0izNXs9)9DvN00y*~0 z`~&^?a7W|QT>n2>F^t+T9rTy6Nt!obBQS zeWhrs)D}a*gk`32u9y`Md#EvnW?usWiItQ)TWp#kkg;zS+g`fpl)zYJ-YyfT0*SOp z$?uXPP{G%hu23E?7+xfz;xxXdVKl%_y0&y@Z{^dYt+TzhgP)SZ_0DyyV1YW^dWx~~ zuJrlgHoN%IXtb!Xc#&vHup-eM>p}PTC{BsatSG+-q%5L^6R?hSJnsP2Rt@V?)vRGXaX;wH-up63YMW`)7dF!cu7{Rtnzu43y_!#dH8OcauHgm{w@o0%r0%Nvh)_mZxnwbGjH95oT&zBRe{(L#=>W;i% z({4mfWZm)QYL6jVPCZH)o4bPY4t06eUwQY)qLC2vu1o-`UFW*McSq(uN-$% zqxgaQHBXt#OefjxwcB&y7LTB}JLsytMhpUyU$AqDOF&d-K?pMj!CgvjE2m3)K*aXAgmLU$&wQgw^DtS*IWBd=G!)*MY} zW2O2ZC*V|?ENPYT2C!l|~*f^6erq!d^&hWjIBDXD{hsFzv4BWdv2}5|{N7Zu9Xw76t1<6TM_B$7HKjZRYllUC?jt+Loc z?HYnbAlN;ULnh;Y{aAlH<&WC5+v6WK;Y+Ck{==B+b~+JObl5R1 zde1O95 zOs&wgt~SU~VmUS{eq*7-D&OPT)xBZLWKQrrlTtHkF)AMQP-2YbP~<<q3V#9#ehVtHv;-NrEw3@iKub!ms3`O!i17nEerg3MzQJ- zY$FMN)qtOxo^*8J`MsW3qtQ!tw^a(<7!^5d305a<3S2k+3$vWkBmp0dkkBS-iiVnj zJUQ`|%CQ)&il;f`kH~Uaw0u@+}J%F?}t3|eLw8I z`@CIG#prf_Jlr3(?0A2_#_V){zZ^Xdu=Tt@l-T%qe}3KHcz?uvA9Ym7;(NcqFUU8! zawgpjK-SszI3vN4k71HfL}SuZQvT<7UNz6;EbH+5E11 z!IA}c(J2jqQp5z-D-$;*l>0at;Ax$#flN-As}k4ih;$$_BjQwNsrY#frhC1+;x7 zhcke*d$Zz;<9q9+DzMJv$elZ@fIddVMdgSkTh_|trP>Dm&N|A!T`ZSWk z^0h*X4rg8!idX5Rd)gU5)KR_(tWB6)J|xA+ly9LGPp*k_xw86gfCXo%r_Oih#JyWV zdH08_-Q(MB{_Df*Q-k{LGk8ZQ|B&+c>-m;x;IQE+A12*>be(B11&xZBGGWcL$@1J> z`Anj3<|fl@i0Yunya^-O7<0-F4^It4DxFg!=}d3+j$MdLt5PH)Z+c#r=EHMp??$J@ z>GiCbY1n9VlHLZhhLP-V`#kIm=ToWFaDsasP8o*o4chd8t31uigWv+(%aUQ_5(6aN zal&~c!eoa@liK2W=?C6iTix_PEB4AIk(vM+&BVA-XC}j%(mlU4qkgkNcUn}}L`^dD zw$|5@+R{r6gG>6#xBhcX$?khdNrz8oXx?yx53*OnndMj|&n^`?yFO_*qgHdEL`!CK zg?v*cnp1xb3BkF~?ZuK&zbvz{wia5x4R=PpRDDz5%KDP0#<{wtX37WYcw5%=Z+A`; zuM9fOf$%X01qXF0QKrkCsdMoztayE8ik|aZ?(C5Y=g=hd-^&ckch+q}-AAtUS}Kzj zQ})VJi+6`OKCl3tn=j=nH!j_zVjUQh1qwViT!sSJ4RiV|8nl8}PZjBra#qy52E4Fi z-IL7z+{u%QUq{@_Z}LolfdLd|>r~PgJ1b&k%rfH7b)uSqoGPJvJ01&Cm>YvgT+y@x z&3!KpqtWy9oQ2E}{SC+dyw$;1)cfOC879XG2 zhlR76mp-4bhogxSzR#;~Z_m%Kkd>KNzK^fx9~IN<#&@l}Y|r}&ozK@#YO|B)8vLHx zL!Y;ot&E?iUw5xRboz%XkM4ZGBJT>mFTCGuONmU3*|Xx`HtYl5K95{)cYfL(%$q!Vge}J5IiVso z5Bd-Bk@yPq^$XsgvB(*-`b&3N(gTSNL4`e|XiQnL6;0$bFZ-K=nCt!DsBx(Tv9gE1 zuHEAl&iI=s0rZhaxY_X_-kP?EhM+A5>RHg9knV;muwFgtKW8^{C+=b_@@i0@M&^d8 zoNaB8<1@5MCocYGBAsqDHxS3T{NSF2XiNT0Ityx|4S$?=KLOdMfM=ja`I2|>dOTCH z-Jv-I-iDy)VI*m9c%L$H1blx#J4unea8>w4Zn+Xr<^5aF9G%Vl;=79XcW(nGx7lz} zC6g)hr*o>IWbWmBUdIBIIcs-ePxAoBDf7Gj*1XN%w6C{orf9AL{egaLxnP^iT@UZo zn?>r*U&b%TaNWG;P5p5}Y`LLxho0zLmQ5Q|I9);1>|BohTvNq6SjYc>LFa0tXnBVG z$;@>thh8wz4S`Gv2Es|jsub&cnL_t<%3Fm5DHVfP@FeW|Nnqaf5&oVoecHa;P)EZ< z(285=^a@?WnDL5DgGei{cmQd1AsrsE?R2_+Jw}}T5XvERy6kUg+&SYyM&;y9LYZXa zHvF%zk?+q(m(!l_uMK$Jud%2UdGlf}=$o{uD_b$N8SnaC{O54<0+dGiZ*mDv-Nh&$|V3&=G}vcp~hvJJ}bDD-C;7 z&JYSQa2}0=@*O`x%{}jeXI?rzD@sti^zb5b`+=wyD}jW58I$02cgnVQ;jLKug;7mB z2w57hWamzwp4KF*nBpV$|0Bsp_Rsb_*PA^lGrsqK2!N;`C$s}y{%tu@-Ypv%$SA(T z(m`<-DnVs5Ga7%h=g`@m?7GYzR<~O63essEbgk5D;3(%hXmVn_3mJ3=zdvM_D2n8m z=)g`$j>C#ypbkxJ9Zdgm3v*oDm57Sl$L;S?nJO#pJ;r~aEG&P?{wH*|3 z;xj08TQzERoT8-`-D(evJhIZAta$7qJO`N?r02uUJO^Iou@$vPva5lv21lpD{%OxU zsA&qJCSDPEW6XNLoyN*q68q-6yK${0*#PR@GC;zb2VKEB3!7$J6{tPLY;fAtI&) znKGTbKp&#^7-a}u3%#HFEg?hbm`LhTzgf#jP!(Y7A{iqxDidDGio+oWO*bDO?rjGA zNJzts4moQbeaa36x^}(I4tp<}Q^pMVP0BBy=R=zvouMgsCDEpJqh2-wnuvA%I%e`; zNz&_+Tq6kwv8w)hxX6&;;De-9oS;k(G^bUD_E>N!W`9-*R{y~_Ocr6^Q5aw24#8jcgS#k?&=Y$N}bFv#=JS!&uYAv&sg`UgplZvb%@z&&InvUvgvpX*nBKq z;uYk)XnS_zvx6w*8D1D_; zBV_41b34JaaH2<|YUgo)s;{51l5~1lOPMzcyxHMI9`oRLJ>T2>TQIU595j22_MC1+ z|Ldaj|1$bf(m`zVAD%C54Fm-HKaAeN(aqA>;lC_iTQequ9mQMM?ECjV&u?PFgvfX$ zhlu9+=5pua`EI0rBe^!Azr|_QPdztvkdT3PKqI8Z8xmFlzutc=_8Y4Q@R~EqI`SAC zethy#RNBh1HsH`By3&g^GY!*n%K58z=*ieGrup4@VxRP%pDlJMJ#D9Fpm}P3lpjBL z2XEg*7XG}0x~Vv#R>r}*B=euDGiU`<(P)H>Y6!iM%q+FH^UiWl+%6RcW6xA8gaLwf zSTCY+T`j1bkE1UJZ3=0XZ!a@qsQ4hN+MwlHM0V+Qigx4J{!L1P}8DNm*#^mw_TeuUs?K+|Z#9l3f^#9@RsB2^&bdx-{xB9}| zT0w)ZiE7@XiR08cosi!JHGTLJkG!u3X*cV&IE%wNEX{9Bst(G3(V81`uNuk*;1+<- zayG1}YH0^ktO9HNFQhLi2KFytt2u*wxVOR(90a*{s_Lqdi;=zqAeUU39tb}_pJV=3 zpc&#Oz!n~ijsh!ghbg9DQ(l96-}gu#78}waL+Z_$>z-OgZ;O^?`aD~`jlbmY?FpgB z+5RPS6;Ze;WKjrbq7A9wLej>sw$TP0d4wL4 zE*7TK=_3$vkt+TsJ}EW!NO@pckS+)zdU(TCZl0LObAjgx&H-oqMKa%lJjs+xHEn)V zEtaxS(KvDDR6N11YF&gPr{h2S`U`b0vkjJ*pWX{7fw;~PAYp5--%;q23Whnp0oF!F zXXN`-O`a(CYETK}x-s0vF?zMTjbjwn##TV|c4ZiebLdPj0%s46DQG|mV6Iz3(kbS* zCQKqZHjZs`k!DWJu@6jVnPZOa9lr|*blpc0tMjInBgfrt9_4|pAGDVZpsw45U#EVK zq9i-=Fs;g&OBSj&wj-G`{nZ9KnA9||ZL$Sq{ZCIn7-q2`Ef0wyWk980L*C7jL|R|s z59yS~)VhH{09jZnbH(q5v%lPF6R|LbzwpMY8iydDQywUnrW4r*#L+sPC0d(7=IkFl zM)n!i&9ExFu9FF!WPZ1`_}kCHsjT0?;QQ<+xdo6F`y**?aub)EB?vKMFXP};iW=ux zEyEifc5$9TEbEOT3|MGUt6J4(r9yW9wXU)hGmQli0k4c@Kb}&T%72o{6G_&Z zT<$kSy!&!QKWMaJeozo44avk}fm<4&6|Z6FIQxyxKXi1w%0AL%a0h|2h_~dHq|FkR z)1hbdE4`kvF#WqgBAJ*nCs?+VfPv1pAUyt%m}PJmIaPS`1iv>=I@$eB4fM5Ps!EIg z8jws#c^vNqDQ(Pwt&(Ao#k~fQBgye(SZ`&oBBrvl<9HiNFnZ;2z>|xIWIYcj61U&a z)iBjfq_3i;dV$kQZpARD#6zzTcyuP2qgV2*rwfhUIuD!Ky?e$Zk?G6oGLI#^p$>z{ zqA;0^pRIOuAGSaZ=P~8t7#9jc!7y4-%n9ddNEQ90A z!uM5~XY;$gJ{seZ;nq0=Zox&vQXk4GICkL~WOG^d-@1}7c5eJ? zYyHA7tX!DSGIy@7po-?%af@8u8_=Wbpz2esA_&ZFCGiBasrD7xzbs_6t_v(BhDJ~Z zR5aXw&WKPC7O;2z)ycqaCRCK^n6!76I)cm@Sh+)bQ~8xYSwu%RJri{-41&l10C#^J zc5X(dR1n)^#t^ryHhkIz!=q8Yo547#;=Lg|E&SnUXz;jXidVMhA4*EOBLO$#9<|_0 zHs8KD@}f8s5cbPS{s)EEHhHpuAs2{CF_jq`#im~|HTjj_wk7=hZcq6_UJ~{Oj#G-% z9{o*ouYc}&&}#bxv2E@U8gt0*eZsU&yEuh$Hwi7;;R75!^iz0qK!HKlKhtqoI^M48@ zNSI&%8F(O|J$4`Zenu^!Af-{U;H z4lG=*Bd5y0d~!s9F%7dqUPk*#x%oP(3WszE#PJiJl5mJBJcgzlmy9@>yP>Uov$VmM z%=h(W#z3}n;FwUEoHLy&G9%WRS#2_Q_qDM0Sh!F-GfEnf++fVf=I^<jKTB%VVB_fmFjg(*Wmh<&CYvNGyl@_{emtY33kxm%6pW_ew4YS*Ut`%&qAKnIOaAl12NmMEC#^i zW9ihKF5UQ+6OPZQLOUE=Y@4etFIYO68Qck5%jjje ztd2@x9W_K&NN-`luMV}2<6M&bRjZS2qZ?0R;N{b=1QaV+mc?C{&cT0S?v!5D2kOOd zj+o)CRF8G^k!}6D9m$^WkG{s#d5@G1-ga>srJW;|y|;BuQ!_81#^r-L4nVZk&lEw8 zm6;x~`-qB6So4zdmfVB|dE`(ZL7eAE?j}|Y%ud0`4efQd+%Sirlt~bQI~Dso&>RTq zOLmLpf3jffFLWKevPaxa{H09ap4~W7$oIg~%Zv3Kz}j{cyUPv;)w8#K)M zH}ihpNHzb<3{~i1foX$V#u6#!$j&~>cunf2(7jPJlNM%-F^v=dbYDpySy+zI%=wGz z{8PqD_sE3R2U<6DtVf>$yO};O>xGp&e=zRvc^y5i*qm-KodAW6V}$yLme=XWha#pn z0>kMxI6f~}`RGk5tCGT+U_tCrKu_hoB_d7OMi;@A1w?Z6ROXJ|FvTJjx)3nvyHE1i zc~s~XWUbvxUf0JL458C~YSpsX^cMtbVxKwMOohUT-IT=FUrULA?Va-Iln7NIy>x=D z1p*_?cev2}+$q9J#T#-7&Dh_)cG9z0>*4MUbf)(UNg*>M_ynIPMO7aRHC7JomZ%Vh zYroHiC$w27^z*iPazOCK>ginX-@oup^UA8dv)Jz4-N=GC&$=kn9g<;|^i4|D{CP>~ zvcuZlP4YG1WGMEl%ju1SeW6vd6X&)UpV3HQu!KM3WwW&6l$ktAgqK&1|9B z3NW2Rcq!YcBk;qeoRQhKphr*-!~1@K^rC;|a`XLNc2BNX7mvS+oF3nG zPzs8RehzHE%FS(RLmHY-xT>vM@z1_2hsw7M1!&B?Im_f`%vdOH^4w7UlOVTNriczw zC&6O-U+D8+iICNhCge?*6J$nA#vz(L&S(ICJ@E+vg_6vrb3sY?7R939#JfuR-%c6} zk~qb>oE7@4iTyhrW^J;lGxv9zsBeGswX1zvnVRljA*GZ@M5UKj%$|%(6U|yCq@28y zHUt$mvvsog2QbNHc8scTDBTTFZA5tDjBr_s67U zuuFcX;|@V!RH|8U`R347jKjj|$V!tF%?CSq|+an5B7f6n^P9 zL>JR{22(tc%Jhr?Lc$mwmF$%~Q`41%yDxMuj$)1T%))Z9ROcuxNnT??0y+Sf&9TRc z7m;=(czKHv{HRzvkRdPypp3De%&-ULjQ9Ps)Or%arhPT$7JPb~Xle2sfn1#6PCjYj z{{M>nZMgyDx)ReBZ|pD|vI(#-{B#5_Elc>Y(d!qVcKfMhS#&S9*Tt+HvIjST;p2qI z7Ob?PyUOT)VQ=3|#U0HTzL)5pvEp$K9d-iDasqQf;QkQZbiRm58hf?asn`$5cWnJh zaDA)Qno@n9L7#n*W>Kib?`%nRIi6p_jKBU@ymQ_mDsdT&(KMFRZU;!}UoGMyjD&hS z$@2db*{@6UbC!9&>ICB&Q4jpTH&A`qNuj-V2T48aF14bWBO1k!IC3dKPA$%8&4dLP z%J(b4EsMYFY%Hx%ywoHvY(3E^hX0<=IU#XDbQitfTVvXx6~J!AnYMwk)Ht?{b+Xo3L#2P`kSv~^F7De(3sbg zZ5;f1u9AP+vtv{Gh^f67G93T|9tLTjIW2iEWX_;rXl+%tWr0}W9GB4Fpr2O!#x0E?qAhS;KS6&%2``(wM*dIifgXytzKo8}1o()la`Yfw zcw9RY@ZQtNiuaB%{_b76nl^&Ni)rpTd-(3U=z(~~y2${&m!)kzHC=*P?_eqj@+HqE zb6$PlHPNnSvw7A9YN#{NIW{#a=?jFaXgpK`Q7)%~W#sO`NHqxU{gv!PHJ-Bo_F=`V zb4iFvg$b(QW*nCMCMk~wW=Slz1BA|zaO!4xIX@=#ocvI_46d*rg=&%$^%-TwkFlZ& z?-b2}U+>R9wSf^LJD+|jis?G4)(X(c%UrlIXENMnmdEXt;L|(S0VqEmbh$>$+D;r$ z18iW+6_&?jtM`^amLH)a$DEt`^xZv1mED88-P3}X>B!=~R~O98ho7|mXvNsh57k4W zy4al-&D-p|jdP;dCQzEnBk%BtM(QZ~5ymZqJ!^RzzvVZq;^;+^O(+7oc= z+pETl0z6yR8&~62A^BH-ZCSzG7UGoJ_}<3oEG}((Zf=8p0yu?67+@!6_Xa|XUK5=s z{&n>>xw%-r@nLXK5BuLH-4nLl1zspj&j*&Zx?) z)+A0Ns>&sYepE=VKBiwOiF(cv(qU*f;gh`jj+~mq4xGQxKyc&I*Zf^_FUQ#GpCZmH z!Pb!vR08)#oFEDd+o~*|@N}N=-u{B4J)Dj%JX)+gwSIA~nY?}SAkLPvOC_gwM7mSe+ok?B56TddWP; zV2ezylD!(__?z|-P;Z<3 z8={myus!X-AbjwdoD*M7{$Dmg>b4#uMpFRq!f>KFaQIIy`&29t=03||m=a>~CWhEq z>=>}%BVTuzU*~)D$tIh#yN?J8vLga+mA~O4H&Hl7j58@28ykN`v8GC6Kc8oxzrk&L zSVPVVIi;Gyl0w|4fM_)KSVIUZq0~+$*A_#ec{V^`tg^*eFDuWAk|n-K!(LmTilDj- zbt^Z`l!7@3SVM_)S^?j{-{(xsv$9YOo@@3mwOrO9W=Ep8sE+IG8#nSxoA>IXLal30 zUzLAW$50=8bCR3{{j1D$uA5q#{K~#K|2?KUVc8947|T((ce!W({sP2Gkb>$6rL1HV!71heV0u<&3X?Q2k1nodNhdO&&F)`?D-S>QckbqsO!MMD@N= z-Y0LVun??3y^iJ3*mXQ0q1dC?a7PI9ApfE*Cku_pIf@rp-*=LWhWJpT$+ zFRR!47GqAe-C@A7C=4{?f(>+5d&7@xIXV)BSI_TL9v_Yta7=u z2GU$qYxP1(tg-RHES3{HLlJaKTj(fAs%MTB`s1_5PRCDfgdsy?A~)pm37p#+XfKJ) zFXMYJCqVD_wlZ^Ny^7TWE(Pra#R6Vm$BHBe=#8*;<%a2DQ+#?ZxSwQfWzF~14KW$c z#qm)J?uQoniQJYD2eR=G@U$1t4bLSnrn=bubCtg2)F?F}XYu>3oJqn=lYpnxncDoc zw3zCI5{^VQO;v1s_Kiiz82QfN$!RZl`shLny(4!8TJzt=Z>ftIPic(yZYD(BR1~dK z*Iuw55J%L08@@iS98h`kVUVZmlrraM_9dy8mL;u>77qxB=zspdU2NA*zuOk5Mf9jX&(l#3+D4po1 zJ_xR`Dx1&A?|pyj>7=3X-NkWpRg*O$0>xYE4gP zu-Uvky)n$Fe4m%gNY~x!Np~gD8Z<~pZ2eeW-?PbWl-cC#q8JrVqn_1QwjY;jdOAOI z8%pbZ$`cwZRh^Uaw_U{-5_?{~amhGi&YLKl^n2Qvl<`=e>epY5QFcVRu84~IYu+qe zRRdU5**2n7w=AzdZTM22rcYK_U0lp6eb{b!SA_5TX+8$4m5j~oY|x|1v#g^hmYrwh zoqA*hKBgPysrZT z0X;Qd+Bn(cbW{ynBhpty1e9ldJS1+5#RNxtFcDrdD2j=z_`~hh(!wB9;y7!H#9|mE z9fv(m^M`9e1d|SqC;SktrvbH0YsnC3rFcNUoPi)zf#jqHfjzc)u6e<8mv6i0y>ZrJ zqG2P!2tT2OEGeZGAf@Jc;a9`J-{YBsZT+kDq?8e5)c%J0DbcBd{g{y``nk--0Oq}u z^}~>IGG|vE6XqJz9aeqhJ+>ph5sCYPw{`rtc6$Ev@7WOgvy4?$ytd2xZgAI;Xo48| zSoyua{eS%j5dwO<@CkKEk%{kAf<>j%;EO_@+Uti#tEgra;mG(NL#&!@*!|nPW~HQ- zBtv-rb*AZ!f*a!i2M#a>BL=;G=hilzFtzSE7*Eb(O~fVqy&#TP9i%3>o?_B!t`XQJ zG9&B7*m<7yP3Ym4@X(`ashH9sCJ{ip^ULFGw=|->=+gs0U#?M{JT*(%sF;#K*@A(3Q@kQ-SIpfVN z43R}UfKe}0{F={&m0OR)v@E#jcsjY--m0K+Hx(H^H46^IN=OjJBv>(08i)j$HgPJ4 z7^C6m_X7?V#QDX&a+WpcTeg{Fm<@8Lgj2vj3?}VG2jQz74ovLW%&UmmHxcabE3q`c z4+()dUVF*nB_o@_vyL-7_fm(yemrp=3$5yNHcZ;z(GESjVv&If*Nv?^@Rrl~qJq*r zN{ch@3qQh28dCS$!VvseX2!T4S+3TZ+s)u)PE+eFX0G@SVMfao^g$IGj|wBR$`$%@GI%yy?Y`H-x6M zfhcq?<4=a^KUHMdFR#dd;5TtK-T$XH4p-0_-TucHsF48(i0pr;f$fhP&e7Oj(b&=P zKl(Vj|EJDCYhY%5sjg{Cq>S;kaqcdxil|N}hKPumbg5xM6JZ%IzP?DzWJ20e4Plp_ zS!`Isq|aDZ#YP!Q9vT|@vMY%1`Jf-yPg{Q)&-*^A+od`IWh@s*I#xtHJwM<9A!>n#Ibz*zJV9C-31t>1o6sC8F)1Dx zHy=E#CT?Q*$xHGC=}cBsMAS6UyK}v#KKSP1402bVd&Ku{o;R_(aE;GB(y{W@Bhd!s ztDhx-iA-rkqgt&yK_*hW<7TqU1_#T_ORm7>`cHnTS3cF38)IDv&%poJ-j)AD)wkiX zWzPs8y>9u4k;anS1YpBCIh- z7MW>8e$FyR*w8(ER+0G3{7?FF7^6+kHlMh=Z{z^ zbL)j8(rp!cV{A=9uZz6qXOSSvpCl>er51M=Sr{+AUb{wlk*#^D!m!^Srd9m`M9bHQ zRMjf*e4iv`g)QtHcB+Tq8TjYP<;Dz+QlGQy&%8+%QgKOJ-<7nER4xW84!)a?NGj-M zu~?@sD_dmM5EX9%E7{4MWL@g><1$xx8Ja0%8i8bM!OHk5Xh3h0i7`D{W$KT%J91 zZ8F_d!s?Et-aA2ijD=jfp;TtitP)7HH+cZ53dygyAJ)L`#}RfW{puB!;n>>SxzLL1 zk2#+(d8kJumReQdxu5Y&6f@zyA9S}R+bw_EXz?smwfzgU)0{4KeCd%up%`1Xshv7& z7v_dyqGQR~h?3D5fy~L93QDZ0WKAFWHLSFO54i^2fgX!57_E)}Vu1=->yyW2qPC+t zM!`Gt({qg~A>fOkVY*~7LU4W$|i+tQmMNi(( zjUc^#CD}W?qH}Dki4h>SYj4ru*esJgX1yxbQ>I}^^e%U3#sRzmMtgUU?455`&q47< zCZa$jy3kH=+>D4=3xQ^gPH;Yi8HEcEq1!WWo-(f^te4a?kh8+aj_A+`ZfiYk$q(OZ z#*Xg@$Pe>rA5C2gjUI7w~&$Rtv-T>F??_bA&$k zb0*?KJvd1`7x#o$XJun_$0?Y`_-~r>K`!q z!`f-VaSemh2^S;mR+GYwI7Y8^KD}(B<)#+^h`Gh6|38|<6cFJ@JqOed1y%@MNS zSTkJFdmt;zJ`bCW<)LM__kE=`w+Ls338A7_i+aAi2WoqZjA<$@;|MDr{R60g54;Ek z?5@Ask)fq0scgkJ>sY%|#i4%u9#yuj+rq9;^B`m|$SJt`HYjcDTtI z%D}$!w^8y|Ie%}6{KJWD0%T-nOdrR9D2mdaBRbc1FFN;aSj((HowkqxX^E%ZD|y#+ z%#G^f?!3|W2-F=)kE`>p>iDQAGdFVE_Eu@6cCR5n!2=Pn_zaS8+8QS6hb)?}4CO$7 zNO*_ZsZsgZK{WjqR5o4GyOArZJT6pqDqvnujdnv$f^~3xYurJ+*aM1RUMbb8Nl4ai zJ-si|pL0A=H(+Mog>PB<$wW4O<_mnrhF=b()A5qev9%3D_fV};2(BA@*L{)RC5uOg zQMS{sNH<4ALm#Q$)$)j62peEMW?(9q!G~}lmVed(%>g&aA52BDJD;BWssyBk2~K-T zr6=eNYj92CMX(tp6rb2Boo!!FmGS$rT#YX0qs$V5CMG$X2pkLT8?uAjrUC2dof7=e z?9G|M^MWQS>WPo)etFEhjnjL`^~4J zA$8}-gtmQ8;;X$QzboR?T*w*#N_iho3jhFr+_qg^JX|mQ;o|OfVD9nw8*I|500gWC zG!uX;BYBQJa5!+T_7x(Ra5MF`SX#} zx3Y|~9Ht%3F!nW>N8iOJjTg^HmLi_^h}c-n%Y~kC?@ZRWh{AIwjHb}Ik!Xoj^t^Knvo4$dZAsGzos=B>{d+~!J}B!n+PX`WX!%g0#R*`d zar1UHM|i`1`E8&@1V2grB zM=OKIm_B>9=`%ka*{iP{f%Or`#j!rR{k5VQ1mDM~h>b!ejcoDoV+z@rZV$xfE4y6f zWKvVyGz21I6twz>lY^66i;{&#pnnPjFE>d76ANvb{X-xe!WUG=(6=kCY08aHd3&xc zXV|%?{p1)xm`kZ0M4NgJ{uy97%r!_Oa}iw$F7cqb<}9rj!7Tiiv%PT?aP?aSU9l1z z#zR!|*K89i&->CgTE9`K{=rYX@T6yK8g6oH=BW%!N6u@=2YYG#gQQbV~&w&co zUDu*zqm*pj0-vg7oT=sLdAV6di>%RAhr?pzEAPF6o|jS-TVxSRsp;Bng@wf^@`Ilx z=WqI7WD>VD>@rH47(j8={`DqGV*li*f|39b9V*s(ax9nh+b3>aM&N!lWzR86asq!& zKNmRs|JiqV#bGge-opxP_Xu>v6Ko>_8YAXxVMRADsc)9L&|v(bSMpu6$|W8YaT4(8 z>cMxm0iWWgg93L9j7Oemf;STe+s&iD7IHJt3*6?7PYFDWs^5`dk>G+&xrSb53!SuT z40-0SA=NnzV2x+8iGC#QnQJht4~4fQob#eVY~2+?Ek5Ty8o0T5R3 z;%^P}-sp@cd+L?({?n};tqFWK&j)0XQ1&(1uK`e>wvXm(Gj7eNYq@#eE;s8yjUwby z9I9d7aZ$32i?0LeCBcjf!o4pmGw<+MEx5mp$V+4OZA5zFvq=!3A@=g7lor>6GQ;7| zrd4K_Z1-cgne74kl0ym{&k*yDA69al%#O&(#kn@IwX{f1HYYNY2bWCqLx3+*+qhbL zwWh{HVXfOi+6wk;{<0Iw1NNHR-kgjfed*A-uSuu>sOZ1d!>rx0nKS2S27MAbe zA1D5q;_ZVsW3$UmUe4jgI*YSr+N;W!%&RSbTsAyN}k3lE2c62R|- zRIaysNbeQ)$=`TeRHTu^w;SdV|F!R#@{;O!9hX1Mc>?n Vfs(-ozHD}Y8zn)FgejQ{_z#vDWySyi literal 0 HcmV?d00001 diff --git a/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ov.json b/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ov.json index 7576fd96148554..4d482804d30a5a 100644 --- a/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ov.json +++ b/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ov.json @@ -1,18 +1,22 @@ [ { - "Platform": "Intel® Celeron® 6305E CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 11.75, - "fp16": "", - "fp32": 4.32, - "bf16": "" + "int8": 312.06, + "fp16": 345.49, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -22,7 +26,7 @@ "Precisions": [ { "int4": "", - "int8": 87.69, + "int8": 4.83, "fp16": "", "fp32": "", "bf16": "" @@ -34,19 +38,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "efficientdet-d0", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 21.17, - "fp16": "", - "fp32": 15.03, - "bf16": "" + "int8": 328.55, + "fp16": 285.3, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -56,7 +64,7 @@ "Precisions": [ { "int4": "", - "int8": 49.24, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -68,19 +76,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "gemma-2-9b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 32.5, + "int8": "", "fp16": "", - "fp32": 21.9, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 20.07, + "token_int8": 17.42, + "token_fp16": "" } ], "Unit": "FPS", @@ -89,8 +101,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 35.99, + "int4": 49.81, + "int8": 57.4, "fp16": "", "fp32": "", "bf16": "" @@ -102,19 +114,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "glm-4-9b-chat", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 33.02, + "int8": "", "fp16": "", - "fp32": 12.59, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 36.48, + "token_int8": 27.59, + "token_fp16": "" } ], "Unit": "FPS", @@ -123,8 +139,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 45.4, + "int4": 27.41, + "int8": 36.24, "fp16": "", "fp32": "", "bf16": "" @@ -136,19 +152,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "llama-2-7b-chat", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 39.93, + "int8": "", "fp16": "", - "fp32": 15.99, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 42.82, + "token_int8": 33.97, + "token_fp16": 22.23 } ], "Unit": "FPS", @@ -157,9 +177,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 40.15, - "fp16": "", + "int4": 23.35, + "int8": 29.43, + "fp16": 44.97, "fp32": "", "bf16": "" } @@ -170,19 +190,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "llama-3-8b", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 119.95, + "int8": "", "fp16": "", - "fp32": 47.19, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 39.6, + "token_int8": 30.59, + "token_fp16": "" } ], "Unit": "FPS", @@ -191,8 +215,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 13.31, + "int4": 25.25, + "int8": 32.69, "fp16": "", "fp32": "", "bf16": "" @@ -204,19 +228,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 33.13, + "int8": "", "fp16": "", - "fp32": 22.73, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 55.37, + "token_int8": 51.62, + "token_fp16": 35.82 } ], "Unit": "FPS", @@ -225,9 +253,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 31.1, - "fp16": "", + "int4": 18.06, + "int8": 19.37, + "fp16": 27.91, "fp32": "", "bf16": "" } @@ -238,19 +266,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 50.91, - "fp16": "", - "fp32": 18.37, - "bf16": "" + "int8": 34.84, + "fp16": 19.43, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -260,7 +292,7 @@ "Precisions": [ { "int4": "", - "int8": 22.52, + "int8": 48.51, "fp16": "", "fp32": "", "bf16": "" @@ -272,19 +304,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "mistral-7b-v0.1", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 38.31, + "int8": "", "fp16": "", - "fp32": 13.71, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 43.4, + "token_int8": 32.32, + "token_fp16": 20.91 } ], "Unit": "FPS", @@ -293,9 +329,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 28.51, - "fp16": "", + "int4": 23.04, + "int8": 30.94, + "fp16": 47.82, "fp32": "", "bf16": "" } @@ -306,19 +342,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 86.13, - "fp16": "", - "fp32": 33.75, - "bf16": "" + "int8": 2348.6, + "fp16": 2074.34, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -328,7 +368,7 @@ "Precisions": [ { "int4": "", - "int8": 16.59, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -340,19 +380,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 44.68, + "int8": "", "fp16": "", - "fp32": 17.96, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 59.06, + "token_int8": 47.96, + "token_fp16": 29.29 } ], "Unit": "FPS", @@ -361,9 +405,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 37.63, - "fp16": "", + "int4": 16.93, + "int8": 20.85, + "fp16": 34.14, "fp32": "", "bf16": "" } @@ -374,19 +418,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "qwen2-7b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 60.67, + "int8": "", "fp16": "", - "fp32": 23.99, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 40.48, + "token_int8": 32.79, + "token_fp16": 20.67 } ], "Unit": "FPS", @@ -395,9 +443,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 24.02, - "fp16": "", + "int4": 24.7, + "int8": 30.49, + "fp16": 48.37, "fp32": "", "bf16": "" } @@ -408,19 +456,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 27.34, - "fp16": "", - "fp32": 18.04, - "bf16": "" + "int8": 1401.85, + "fp16": 1046.9, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -430,7 +482,7 @@ "Precisions": [ { "int4": "", - "int8": 30.86, + "int8": 1.42, "fp16": "", "fp32": "", "bf16": "" @@ -442,19 +494,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 32.78, - "fp16": "", - "fp32": 21.32, - "bf16": "" + "int8": 112.21, + "fp16": 73.01, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -464,7 +520,7 @@ "Precisions": [ { "int4": "", - "int8": 38.38, + "int8": 14.86, "fp16": "", "fp32": "", "bf16": "" @@ -476,19 +532,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 168.92, - "fp16": "", - "fp32": 67.23, - "bf16": "" + "int8": 1308.1, + "fp16": 1201.69, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -498,7 +558,7 @@ "Precisions": [ { "int4": "", - "int8": 10.73, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -510,19 +570,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "stable-diffusion-v1-5", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 52.48, + "int8": "", "fp16": "", - "fp32": 35.73, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -532,7 +596,7 @@ "Precisions": [ { "int4": "", - "int8": 26.63, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -544,19 +608,23 @@ } }, { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 20.68, - "fp16": "", - "fp32": 14.76, - "bf16": "" + "int8": 517.1, + "fp16": 550.33, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -566,7 +634,7 @@ "Precisions": [ { "int4": "", - "int8": 49.95, + "int8": 3.21, "fp16": "", "fp32": "", "bf16": "" @@ -578,19 +646,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Atom® X6425E CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 218.51, + "int8": 23.3, "fp16": "", - "fp32": 80.07, - "bf16": "" + "fp32": 23.72, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -600,7 +672,7 @@ "Precisions": [ { "int4": "", - "int8": 14.63, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -612,19 +684,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Atom® X6425E CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 590.15, + "int8": 228.97, "fp16": "", - "fp32": 224.94, - "bf16": "" + "fp32": 219.37, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -634,7 +710,7 @@ "Precisions": [ { "int4": "", - "int8": 9.22, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -646,19 +722,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Atom® X6425E CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 881.6, + "int8": 59.38, "fp16": "", - "fp32": 338.79, - "bf16": "" + "fp32": 54.24, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -668,7 +748,7 @@ "Precisions": [ { "int4": "", - "int8": 5.08, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -680,19 +760,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Atom® X6425E CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3032.01, + "int8": 1.26, "fp16": "", - "fp32": 488.41, - "bf16": 1975.07 + "fp32": 1.08, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -702,10 +786,10 @@ "Precisions": [ { "int4": "", - "int8": 3.74, + "int8": "", "fp16": "", "fp32": "", - "bf16": 4.78 + "bf16": "" } ], "Unit": "ms", @@ -714,19 +798,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Atom® X6425E CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4693.95, + "int8": 111.92, "fp16": "", - "fp32": 562.05, - "bf16": 3202.49 + "fp32": 98.44, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -736,10 +824,10 @@ "Precisions": [ { "int4": "", - "int8": 3.77, + "int8": "", "fp16": "", "fp32": "", - "bf16": 4.61 + "bf16": "" } ], "Unit": "ms", @@ -748,19 +836,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Atom® X6425E CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 427.23, + "int8": "", "fp16": "", - "fp32": 164.01, - "bf16": "" + "fp32": 34.99, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -770,7 +862,7 @@ "Precisions": [ { "int4": "", - "int8": 11.12, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -782,19 +874,23 @@ } }, { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Atom® X6425E CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 429.84, + "int8": 36.35, "fp16": "", - "fp32": 167.86, - "bf16": "" + "fp32": 33.97, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -804,7 +900,7 @@ "Precisions": [ { "int4": "", - "int8": 8.0, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -816,19 +912,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Atom® X6425E CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 76.06, + "int8": 7.26, "fp16": "", - "fp32": 30.37, - "bf16": "" + "fp32": 5.01, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -838,7 +938,7 @@ "Precisions": [ { "int4": "", - "int8": 25.96, + "int8": 139.68, "fp16": "", "fp32": "", "bf16": "" @@ -850,19 +950,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Atom® X6425E CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 52.79, + "int8": 134.16, "fp16": "", - "fp32": 21.03, - "bf16": "" + "fp32": 80.45, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -872,7 +976,7 @@ "Precisions": [ { "int4": "", - "int8": 32.08, + "int8": 7.8, "fp16": "", "fp32": "", "bf16": "" @@ -884,19 +988,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Atom® X6425E CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 73.09, + "int8": 19.87, "fp16": "", - "fp32": 26.35, - "bf16": "" + "fp32": 8.15, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -906,7 +1014,7 @@ "Precisions": [ { "int4": "", - "int8": 19.84, + "int8": 51.33, "fp16": "", "fp32": "", "bf16": "" @@ -918,19 +1026,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Atom® X6425E CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 404.72, - "fp16": 444.29, - "fp32": "", - "bf16": "" + "int8": 0.33, + "fp16": "", + "fp32": 0.13, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -940,7 +1052,7 @@ "Precisions": [ { "int4": "", - "int8": 3.24, + "int8": 2995.1, "fp16": "", "fp32": "", "bf16": "" @@ -952,19 +1064,23 @@ } }, { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Atom® X6425E CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 45.84, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 21.63, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -974,7 +1090,7 @@ "Precisions": [ { "int4": "", - "int8": 4.76, + "int8": 22.72, "fp16": "", "fp32": "", "bf16": "" @@ -986,19 +1102,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Atom® X6425E CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 187.77, - "fp16": 149.65, - "fp32": "", - "bf16": "" + "int8": "", + "fp16": "", + "fp32": 5.3, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1008,7 +1128,7 @@ "Precisions": [ { "int4": "", - "int8": 5.71, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1020,19 +1140,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Atom® X6425E CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 89.12, - "fp16": 74.2, - "fp32": "", - "bf16": "" + "int8": 10.31, + "fp16": "", + "fp32": 5.12, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1042,7 +1166,7 @@ "Precisions": [ { "int4": "", - "int8": 12.51, + "int8": 99.61, "fp16": "", "fp32": "", "bf16": "" @@ -1054,19 +1178,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Atom® X6425E iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 89.38, - "fp16": 74.26, + "int8": 22.02, + "fp16": 25.05, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1076,7 +1204,7 @@ "Precisions": [ { "int4": "", - "int8": 12.3, + "int8": 60.1, "fp16": "", "fp32": "", "bf16": "" @@ -1088,19 +1216,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V NPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Atom® X6425E iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 177.17, - "fp16": 139.65, + "int8": 187.37, + "fp16": 222.58, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1110,7 +1242,7 @@ "Precisions": [ { "int4": "", - "int8": 6.02, + "int8": 7.71, "fp16": "", "fp32": "", "bf16": "" @@ -1122,19 +1254,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Atom® X6425E iGPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 42.88, - "fp16": 33.61, + "int8": 48.1, + "fp16": 51.68, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1144,7 +1280,7 @@ "Precisions": [ { "int4": "", - "int8": 26.85, + "int8": 22.89, "fp16": "", "fp32": "", "bf16": "" @@ -1156,19 +1292,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® X6425E iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 47.07, - "fp16": 39.06, + "int8": 1.16, + "fp16": 1.16, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1178,7 +1318,7 @@ "Precisions": [ { "int4": "", - "int8": 19.89, + "int8": 870.65, "fp16": "", "fp32": "", "bf16": "" @@ -1190,19 +1330,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® X6425E iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 47.0, - "fp16": 39.61, + "int8": 93.36, + "fp16": 95.62, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1212,7 +1356,7 @@ "Precisions": [ { "int4": "", - "int8": 18.15, + "int8": 13.54, "fp16": "", "fp32": "", "bf16": "" @@ -1224,19 +1368,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® X6425E iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 67.79, - "fp16": 52.83, + "int8": 31.79, + "fp16": 33.13, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1246,7 +1394,7 @@ "Precisions": [ { "int4": "", - "int8": 17.21, + "int8": 35.83, "fp16": "", "fp32": "", "bf16": "" @@ -1258,19 +1406,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 47.29, - "fp16": 40.73, - "fp32": "", - "bf16": "" + "int8": 39.3, + "fp16": "", + "fp32": 28.97, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1280,7 +1432,7 @@ "Precisions": [ { "int4": "", - "int8": 21.63, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1292,19 +1444,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 88.92, - "fp16": 69.92, - "fp32": "", - "bf16": "" + "int8": 480.45, + "fp16": "", + "fp32": 302.75, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1314,7 +1470,7 @@ "Precisions": [ { "int4": "", - "int8": 12.82, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1326,19 +1482,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 66.18, - "fp16": 52.8, - "fp32": "", - "bf16": "" + "int8": 129.7, + "fp16": "", + "fp32": 54.69, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1348,7 +1508,7 @@ "Precisions": [ { "int4": "", - "int8": 14.46, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1360,19 +1520,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 2.49, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 0.86, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1382,7 +1546,7 @@ "Precisions": [ { "int4": "", - "int8": 12.42, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1394,19 +1558,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 159.37, - "fp16": 100.87, - "fp32": "", - "bf16": "" + "int8": 233.16, + "fp16": "", + "fp32": 114.81, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1416,7 +1584,7 @@ "Precisions": [ { "int4": "", - "int8": 6.11, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1428,19 +1596,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 152.5, - "fp16": 108.49, - "fp32": "", - "bf16": "" + "int8": "", + "fp16": "", + "fp32": 41.37, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1450,7 +1622,7 @@ "Precisions": [ { "int4": "", - "int8": 7.14, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1462,19 +1634,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 316.53, - "fp16": 268.1, - "fp32": "", - "bf16": "" + "int8": 67.73, + "fp16": "", + "fp32": 36.05, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1484,7 +1660,7 @@ "Precisions": [ { "int4": "", - "int8": 4.61, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1496,19 +1672,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Atom® x7425E CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 44.78, + "int8": 14.29, "fp16": "", - "fp32": 33.39, - "bf16": "" + "fp32": 11.18, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1518,7 +1698,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 71.84, "fp16": "", "fp32": "", "bf16": "" @@ -1530,19 +1710,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 273.98, "fp16": "", - "fp32": 16.3, - "bf16": "" + "fp32": 169.54, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1552,7 +1736,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 4.05, "fp16": "", "fp32": "", "bf16": "" @@ -1564,19 +1748,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 49.58, + "int8": 45.27, "fp16": "", - "fp32": 26.72, - "bf16": "" + "fp32": 18.84, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1586,7 +1774,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 23.76, "fp16": "", "fp32": "", "bf16": "" @@ -1598,19 +1786,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 85.59, + "int8": 0.76, "fp16": "", - "fp32": 51.66, - "bf16": "" + "fp32": 0.31, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1620,7 +1812,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1317.43, "fp16": "", "fp32": "", "bf16": "" @@ -1632,19 +1824,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 48.88, + "int8": 98.2, "fp16": "", - "fp32": 25.61, - "bf16": "" + "fp32": 45.36, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1654,7 +1850,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 10.52, "fp16": "", "fp32": "", "bf16": "" @@ -1666,10 +1862,11 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ @@ -1677,8 +1874,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 13.77, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1700,19 +1900,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 61.03, + "int8": 21.58, "fp16": "", - "fp32": 32.25, - "bf16": "" + "fp32": 11.78, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1722,7 +1926,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 47.39, "fp16": "", "fp32": "", "bf16": "" @@ -1734,19 +1938,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", - "fp32": 46.74, - "bf16": "" + "int8": 40.0, + "fp16": 34.31, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1756,7 +1964,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 34.13, "fp16": "", "fp32": "", "bf16": "" @@ -1768,19 +1976,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 240.55, - "fp16": "", - "fp32": 157.84, - "bf16": "" + "int8": 414.66, + "fp16": 324.8, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1790,7 +2002,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 3.49, "fp16": "", "fp32": "", "bf16": "" @@ -1802,19 +2014,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Atom® x7425E iGPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", - "fp32": 73.76, - "bf16": "" + "int8": 106.34, + "fp16": 64.69, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1824,7 +2040,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 10.56, "fp16": "", "fp32": "", "bf16": "" @@ -1836,19 +2052,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Atom® x7425E iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.18, - "fp16": "", - "fp32": 0.38, - "bf16": "" + "int8": 2.16, + "fp16": 1.32, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1858,7 +2078,7 @@ "Precisions": [ { "int4": "", - "int8": 853.85, + "int8": 472.59, "fp16": "", "fp32": "", "bf16": "" @@ -1870,19 +2090,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Atom® x7425E iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.0, - "fp16": "", - "fp32": 1.27, - "bf16": "" + "int8": 211.07, + "fp16": 137.13, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1892,7 +2116,7 @@ "Precisions": [ { "int4": "", - "int8": 510.0, + "int8": 6.2, "fp16": "", "fp32": "", "bf16": "" @@ -1904,19 +2128,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Atom® x7425E iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.05, - "fp16": "", - "fp32": 1.87, - "bf16": "" + "int8": 60.92, + "fp16": 44.64, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1926,7 +2154,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 18.51, "fp16": "", "fp32": "", "bf16": "" @@ -1938,19 +2166,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 45.34, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 33.5, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -1960,7 +2192,7 @@ "Precisions": [ { "int4": "", - "int8": 424.59, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -1972,19 +2204,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 57.78, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 48.75, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2006,19 +2242,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.56, "fp16": "", - "fp32": 3.91, - "bf16": "" + "fp32": 0.51, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2028,7 +2268,7 @@ "Precisions": [ { "int4": "", - "int8": 128.89, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2040,19 +2280,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.01, + "int8": 525.47, "fp16": "", - "fp32": 1.89, - "bf16": "" + "fp32": 392.65, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2062,7 +2306,7 @@ "Precisions": [ { "int4": "", - "int8": 324.87, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2074,19 +2318,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.03, + "int8": 197.41, "fp16": "", - "fp32": 1.64, - "bf16": "" + "fp32": 115.71, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2096,7 +2344,7 @@ "Precisions": [ { "int4": "", - "int8": 198.73, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2108,19 +2356,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.69, + "int8": 5.38, "fp16": "", - "fp32": 1.2, - "bf16": "" + "fp32": 2.71, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2130,7 +2382,7 @@ "Precisions": [ { "int4": "", - "int8": 268.0, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2142,19 +2394,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 7.52, + "int8": 316.13, "fp16": "", - "fp32": 2.85, - "bf16": "" + "fp32": 194.29, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2176,10 +2432,11 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ @@ -2187,8 +2444,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 80.2, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2198,7 +2458,7 @@ "Precisions": [ { "int4": "", - "int8": 360.08, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2210,19 +2470,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.26, + "int8": 114.67, "fp16": "", - "fp32": 2.05, - "bf16": "" + "fp32": 78.26, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2244,19 +2508,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 11.77, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 4.32, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2266,7 +2534,7 @@ "Precisions": [ { "int4": "", - "int8": 329.86, + "int8": 87.73, "fp16": "", "fp32": "", "bf16": "" @@ -2278,19 +2546,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.14, + "int8": 18.94, "fp16": "", - "fp32": 1.9, - "bf16": "" + "fp32": 11.49, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2300,7 +2572,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 55.76, "fp16": "", "fp32": "", "bf16": "" @@ -2312,19 +2584,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 16.05, + "int8": 0.17, "fp16": "", - "fp32": 5.99, - "bf16": "" + "fp32": 0.04, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2334,7 +2610,7 @@ "Precisions": [ { "int4": "", - "int8": 94.97, + "int8": 5772.15, "fp16": "", "fp32": "", "bf16": "" @@ -2346,19 +2622,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.17, + "int8": 301.05, "fp16": "", - "fp32": 3.33, - "bf16": "" + "fp32": 132.91, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2368,7 +2648,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 3.6, "fp16": "", "fp32": "", "bf16": "" @@ -2380,19 +2660,23 @@ } }, { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 51.66, "fp16": "", - "fp32": 1.27, - "bf16": "" + "fp32": 14.45, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2402,7 +2686,7 @@ "Precisions": [ { "int4": "", - "int8": 513.28, + "int8": 19.8, "fp16": "", "fp32": "", "bf16": "" @@ -2414,19 +2698,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 21.82, + "int8": 0.89, "fp16": "", - "fp32": 6.98, - "bf16": "" + "fp32": 0.23, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2436,7 +2724,7 @@ "Precisions": [ { "int4": "", - "int8": 102.33, + "int8": 1118.71, "fp16": "", "fp32": "", "bf16": "" @@ -2448,19 +2736,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 51.17, + "int8": 115.03, "fp16": "", - "fp32": 18.46, - "bf16": "" + "fp32": 36.99, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2470,7 +2762,7 @@ "Precisions": [ { "int4": "", - "int8": 48.78, + "int8": 9.06, "fp16": "", "fp32": "", "bf16": "" @@ -2482,19 +2774,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 78.31, + "int8": "", "fp16": "", - "fp32": 29.72, - "bf16": "" + "fp32": 11.94, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2504,7 +2800,7 @@ "Precisions": [ { "int4": "", - "int8": 38.13, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2516,19 +2812,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Celeron® 6305E CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 244.06, + "int8": 25.97, "fp16": "", - "fp32": 41.97, - "bf16": 211.62 + "fp32": 9.66, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2538,10 +2838,10 @@ "Precisions": [ { "int4": "", - "int8": 25.21, + "int8": 40.21, "fp16": "", "fp32": "", - "bf16": 27.63 + "bf16": "" } ], "Unit": "ms", @@ -2550,19 +2850,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { + "Platform": "Intel® Celeron® 6305E iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", + "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 305.38, - "fp16": "", - "fp32": 55.37, - "bf16": 289.16 + "int8": 43.69, + "fp16": 33.8, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2572,10 +2876,10 @@ "Precisions": [ { "int4": "", - "int8": 19.29, + "int8": 26.56, "fp16": "", "fp32": "", - "bf16": 25.79 + "bf16": "" } ], "Unit": "ms", @@ -2584,19 +2888,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Celeron® 6305E iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 38.28, - "fp16": "", - "fp32": 13.45, - "bf16": "" + "int8": 73.58, + "fp16": 58.53, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2606,7 +2914,7 @@ "Precisions": [ { "int4": "", - "int8": 69.24, + "int8": 25.45, "fp16": "", "fp32": "", "bf16": "" @@ -2618,19 +2926,23 @@ } }, { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Celeron® 6305E iGPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 38.81, - "fp16": "", - "fp32": 15.09, - "bf16": "" + "int8": 0.48, + "fp16": 0.52, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2640,7 +2952,7 @@ "Precisions": [ { "int4": "", - "int8": 62.64, + "int8": 2110.65, "fp16": "", "fp32": "", "bf16": "" @@ -2652,19 +2964,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 7.34, - "fp16": "", - "fp32": 2.49, - "bf16": "" + "int8": 671.35, + "fp16": 504.8, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2674,7 +2990,7 @@ "Precisions": [ { "int4": "", - "int8": 194.83, + "int8": 2.72, "fp16": "", "fp32": "", "bf16": "" @@ -2686,19 +3002,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E iGPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.5, - "fp16": "", - "fp32": 1.72, - "bf16": "" + "int8": 203.17, + "fp16": 118.59, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2708,7 +3028,7 @@ "Precisions": [ { "int4": "", - "int8": 246.14, + "int8": 6.3, "fp16": "", "fp32": "", "bf16": "" @@ -2720,19 +3040,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Celeron® 6305E iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 7.21, - "fp16": "", - "fp32": 2.31, - "bf16": "" + "int8": 5.09, + "fp16": 2.78, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2742,7 +3066,7 @@ "Precisions": [ { "int4": "", - "int8": 195.21, + "int8": 210.41, "fp16": "", "fp32": "", "bf16": "" @@ -2754,19 +3078,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Celeron® 6305E iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 121.51, - "fp16": 110.4, + "int8": 396.07, + "fp16": 221.18, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2776,7 +3104,7 @@ "Precisions": [ { "int4": "", - "int8": 9.1, + "int8": 4.3, "fp16": "", "fp32": "", "bf16": "" @@ -2788,19 +3116,23 @@ } }, { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Celeron® 6305E iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 121.77, + "fp16": 81.6, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2810,7 +3142,7 @@ "Precisions": [ { "int4": "", - "int8": 12.35, + "int8": 10.34, "fp16": "", "fp32": "", "bf16": "" @@ -2822,19 +3154,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 30.58, - "fp16": 21.73, - "fp32": "", - "bf16": "" + "int8": 243.99, + "fp16": "", + "fp32": 157.96, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2844,7 +3180,7 @@ "Precisions": [ { "int4": "", - "int8": 32.8, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2856,19 +3192,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 10.38, - "fp16": 6.72, - "fp32": "", - "bf16": "" + "int8": 189.52, + "fp16": "", + "fp32": 154.61, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2878,7 +3218,7 @@ "Precisions": [ { "int4": "", - "int8": 97.03, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2890,19 +3230,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 10.38, - "fp16": 6.74, - "fp32": "", - "bf16": "" + "int8": 2.45, + "fp16": "", + "fp32": 1.19, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2912,7 +3256,7 @@ "Precisions": [ { "int4": "", - "int8": 97.28, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2924,19 +3268,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V NPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 32.32, + "int8": 4485.9, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 2415.8, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2946,7 +3294,7 @@ "Precisions": [ { "int4": "", - "int8": 43.5, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2958,19 +3306,23 @@ } }, { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.91, + "int8": 1097.16, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 475.61, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -2980,7 +3332,7 @@ "Precisions": [ { "int4": "", - "int8": 329.17, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -2992,19 +3344,23 @@ } }, { - "Platform": "Intel® Atom® X6425E iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.4, + "int8": 18.81, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 9.71, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3014,7 +3370,7 @@ "Precisions": [ { "int4": "", - "int8": 720.67, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -3026,19 +3382,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.47, - "fp16": 3.64, - "fp32": "", - "bf16": "" + "int8": 1120.99, + "fp16": "", + "fp32": 624.14, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3048,7 +3408,7 @@ "Precisions": [ { "int4": "", - "int8": 192.94, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -3060,19 +3420,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.41, - "fp16": 4.03, - "fp32": "", - "bf16": "" + "int8": 374.74, + "fp16": "", + "fp32": 236.96, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3082,7 +3446,7 @@ "Precisions": [ { "int4": "", - "int8": 173.57, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -3094,19 +3458,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.17, - "fp16": 4.28, - "fp32": "", - "bf16": "" + "int8": 76.15, + "fp16": "", + "fp32": 30.19, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3116,7 +3484,7 @@ "Precisions": [ { "int4": "", - "int8": 137.83, + "int8": 25.21, "fp16": "", "fp32": "", "bf16": "" @@ -3128,19 +3496,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 8.85, - "fp16": 6.59, - "fp32": "", - "bf16": "" + "int8": 97.68, + "fp16": "", + "fp32": 66.63, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3150,7 +3522,7 @@ "Precisions": [ { "int4": "", - "int8": 106.93, + "int8": 22.16, "fp16": "", "fp32": "", "bf16": "" @@ -3162,19 +3534,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.41, - "fp16": 4.47, - "fp32": "", - "bf16": "" + "int8": 1.2, + "fp16": "", + "fp32": 0.3, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3184,7 +3560,7 @@ "Precisions": [ { "int4": "", - "int8": 179.75, + "int8": 1025.52, "fp16": "", "fp32": "", "bf16": "" @@ -3196,19 +3572,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 11.76, - "fp16": 8.39, - "fp32": "", - "bf16": "" + "int8": 1969.75, + "fp16": "", + "fp32": 815.83, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3218,7 +3598,7 @@ "Precisions": [ { "int4": "", - "int8": 87.43, + "int8": 1.36, "fp16": "", "fp32": "", "bf16": "" @@ -3230,19 +3610,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 8.08, - "fp16": 5.89, - "fp32": "", - "bf16": "" + "int8": 390.17, + "fp16": "", + "fp32": 94.82, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3252,7 +3636,7 @@ "Precisions": [ { "int4": "", - "int8": 110.63, + "int8": 6.23, "fp16": "", "fp32": "", "bf16": "" @@ -3264,19 +3648,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 6.38, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 1.6, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3286,7 +3674,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 209.14, "fp16": "", "fp32": "", "bf16": "" @@ -3298,19 +3686,23 @@ } }, { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.39, + "int8": 685.79, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 242.78, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3320,7 +3712,7 @@ "Precisions": [ { "int4": "", - "int8": 408.09, + "int8": 2.71, "fp16": "", "fp32": "", "bf16": "" @@ -3332,19 +3724,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 23.39, - "fp16": 14.13, - "fp32": "", - "bf16": "" + "int8": 166.55, + "fp16": "", + "fp32": 64.31, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3354,7 +3750,7 @@ "Precisions": [ { "int4": "", - "int8": 39.66, + "int8": 12.75, "fp16": "", "fp32": "", "bf16": "" @@ -3366,19 +3762,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 88.41, + "fp16": 74.04, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3388,7 +3788,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 12.15, "fp16": "", "fp32": "", "bf16": "" @@ -3400,19 +3800,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", + "Model": "efficientdet-d0", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 54.9, - "fp16": 49.62, + "int8": 37.81, + "fp16": 34.74, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3422,7 +3826,7 @@ "Precisions": [ { "int4": "", - "int8": 20.12, + "int8": 27.47, "fp16": "", "fp32": "", "bf16": "" @@ -3434,19 +3838,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.85, + "int8": "", "fp16": "", - "fp32": 3.73, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": 0.27, + "token_fp16": 2.55 } ], "Unit": "FPS", @@ -3456,8 +3864,8 @@ "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 3688.24, + "fp16": 390.94, "fp32": "", "bf16": "" } @@ -3468,19 +3876,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", - "fp32": 1.5, - "bf16": "" + "int8": 1966.11, + "fp16": 1346.18, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3490,7 +3902,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.79, "fp16": "", "fp32": "", "bf16": "" @@ -3502,19 +3914,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.12, + "int8": "", "fp16": "", - "fp32": 2.56, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 3.7, + "token_int8": 0.49, + "token_fp16": 3.91 } ], "Unit": "FPS", @@ -3523,9 +3939,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 269.82, + "int8": 2003.58, + "fp16": 255.57, "fp32": "", "bf16": "" } @@ -3536,19 +3952,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 9.46, - "fp16": "", - "fp32": 5.5, - "bf16": "" + "int8": 771.23, + "fp16": 382.83, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3558,7 +3978,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.58, "fp16": "", "fp32": "", "bf16": "" @@ -3570,19 +3990,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.05, - "fp16": "", - "fp32": 2.57, - "bf16": "" + "int8": 705.76, + "fp16": 453.35, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3592,7 +4016,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.67, "fp16": "", "fp32": "", "bf16": "" @@ -3604,19 +4028,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", - "fp32": 5.4, - "bf16": "" + "int8": 126.18, + "fp16": 129.18, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3626,7 +4054,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 8.71, "fp16": "", "fp32": "", "bf16": "" @@ -3638,19 +4066,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.41, - "fp16": "", - "fp32": 3.19, - "bf16": "" + "int8": 164.18, + "fp16": 107.12, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3672,19 +4104,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 8.01, - "fp16": "", - "fp32": 4.49, - "bf16": "" + "int8": 195.27, + "fp16": 164.33, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3706,19 +4142,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "gemma-2-9b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 22.64, + "int8": "", "fp16": "", - "fp32": 12.18, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 8.94, + "token_int8": "", + "token_fp16": 0.94 } ], "Unit": "FPS", @@ -3727,9 +4167,9 @@ "latency": { "Precisions": [ { - "int4": "", + "int4": 111.74, "int8": "", - "fp16": "", + "fp16": 1056.4, "fp32": "", "bf16": "" } @@ -3740,19 +4180,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 12.39, + "int8": "", "fp16": "", - "fp32": 6.33, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 10.82, + "token_int8": 6.3, + "token_fp16": 1.1 } ], "Unit": "FPS", @@ -3761,9 +4205,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 92.41, + "int8": 158.68, + "fp16": 906.89, "fp32": "", "bf16": "" } @@ -3774,19 +4218,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 14.28, + "int8": "", "fp16": "", - "fp32": 11.21, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 14.62, + "token_int8": 8.53, + "token_fp16": "" } ], "Unit": "FPS", @@ -3795,8 +4243,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 71.92, + "int4": 68.39, + "int8": 117.1, "fp16": "", "fp32": "", "bf16": "" @@ -3808,19 +4256,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "llama-3-8b", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 7.25, + "int8": "", "fp16": "", - "fp32": 4.96, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 14.82, + "token_int8": 7.84, + "token_fp16": 4.04 } ], "Unit": "FPS", @@ -3829,9 +4281,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 139.91, - "fp16": "", + "int4": 67.44, + "int8": 127.51, + "fp16": 247.29, "fp32": "", "bf16": "" } @@ -3842,19 +4294,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 18.99, + "int8": "", "fp16": "", - "fp32": 11.5, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 26.17, + "token_int8": 20.38, + "token_fp16": 10.76 } ], "Unit": "FPS", @@ -3863,9 +4319,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 55.68, - "fp16": "", + "int4": 38.21, + "int8": 49.06, + "fp16": 92.92, "fp32": "", "bf16": "" } @@ -3876,19 +4332,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 37.48, - "fp16": "", - "fp32": 27.9, - "bf16": "" + "int8": 2.35, + "fp16": 1.58, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3898,7 +4358,7 @@ "Precisions": [ { "int4": "", - "int8": 27.49, + "int8": 421.72, "fp16": "", "fp32": "", "bf16": "" @@ -3910,19 +4370,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 59.12, + "int8": "", "fp16": "", - "fp32": 32.99, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 15.03, + "token_int8": 8.94, + "token_fp16": "" } ], "Unit": "FPS", @@ -3931,8 +4395,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 20.56, + "int4": 66.52, + "int8": 111.8, "fp16": "", "fp32": "", "bf16": "" @@ -3944,19 +4408,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 44.97, - "fp16": "", - "fp32": 24.31, - "bf16": "" + "int8": 1293.98, + "fp16": 1371.59, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -3966,7 +4434,7 @@ "Precisions": [ { "int4": "", - "int8": 32.17, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -3978,19 +4446,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 56.87, + "int8": "", "fp16": "", - "fp32": 36.06, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 23.61, + "token_int8": 18.01, + "token_fp16": 9.36 } ], "Unit": "FPS", @@ -3999,9 +4471,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 28.54, - "fp16": "", + "int4": 42.34, + "int8": 55.51, + "fp16": 106.82, "fp32": "", "bf16": "" } @@ -4012,19 +4484,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "qwen2-7b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 152.17, + "int8": "", "fp16": "", - "fp32": 93.19, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 16.68, + "token_int8": 9.5, + "token_fp16": "" } ], "Unit": "FPS", @@ -4033,8 +4509,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 9.14, + "int4": 59.95, + "int8": 105.26, "fp16": "", "fp32": "", "bf16": "" @@ -4046,19 +4522,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 59.7, - "fp16": "", - "fp32": 42.45, - "bf16": "" + "int8": 563.96, + "fp16": 416.13, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4068,7 +4548,7 @@ "Precisions": [ { "int4": "", - "int8": 17.62, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4080,19 +4560,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 75.02, - "fp16": "", - "fp32": 41.47, - "bf16": "" + "int8": 21.26, + "fp16": 12.84, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4102,7 +4586,7 @@ "Precisions": [ { "int4": "", - "int8": 14.65, + "int8": 47.61, "fp16": "", "fp32": "", "bf16": "" @@ -4114,19 +4598,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 52.75, - "fp16": "", - "fp32": 21.83, - "bf16": "" + "int8": 1030.66, + "fp16": 811.13, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4136,7 +4624,7 @@ "Precisions": [ { "int4": "", - "int8": 20.19, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4148,19 +4636,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 112.1, - "fp16": "", - "fp32": 61.82, - "bf16": "" + "int8": 403.44, + "fp16": 306.22, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4170,7 +4662,7 @@ "Precisions": [ { "int4": "", - "int8": 11.47, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4182,19 +4674,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 62.08, + "int8": 223.99, "fp16": "", - "fp32": 39.8, - "bf16": "" + "fp32": 189.97, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4204,7 +4700,7 @@ "Precisions": [ { "int4": "", - "int8": 26.84, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4216,19 +4712,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 83.44, + "int8": 174.87, "fp16": "", - "fp32": 43.76, - "bf16": "" + "fp32": 149.3, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4238,7 +4738,7 @@ "Precisions": [ { "int4": "", - "int8": 16.75, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4250,19 +4750,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 52.39, + "int8": 7.24, "fp16": "", - "fp32": 38.12, - "bf16": "" + "fp32": 3.52, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4272,7 +4776,7 @@ "Precisions": [ { "int4": "", - "int8": 17.79, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4284,19 +4788,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 65.23, + "int8": 4846.91, "fp16": "", - "fp32": 41.09, - "bf16": "" + "fp32": 2888.98, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4306,7 +4814,7 @@ "Precisions": [ { "int4": "", - "int8": 18.79, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4318,19 +4826,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 220.01, + "int8": 1975.45, "fp16": "", - "fp32": 126.72, - "bf16": "" + "fp32": 922.35, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4340,7 +4852,7 @@ "Precisions": [ { "int4": "", - "int8": 7.33, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4352,19 +4864,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 15.45, + "int8": "", "fp16": "", - "fp32": 12.76, - "bf16": "" + "fp32": 20.97, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4374,7 +4890,7 @@ "Precisions": [ { "int4": "", - "int8": 66.3, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4386,19 +4902,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 97.27, + "int8": "", "fp16": "", - "fp32": 48.02, - "bf16": "" + "fp32": 585.46, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4408,7 +4928,7 @@ "Precisions": [ { "int4": "", - "int8": 13.26, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4420,19 +4940,23 @@ } }, { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 35.92, + "int8": 343.07, "fp16": "", - "fp32": 30.28, - "bf16": "" + "fp32": 274.85, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4442,7 +4966,7 @@ "Precisions": [ { "int4": "", - "int8": 28.54, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -4454,19 +4978,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 271.61, + "int8": 44.06, "fp16": "", - "fp32": 166.53, - "bf16": "" + "fp32": 16.03, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4476,7 +5004,7 @@ "Precisions": [ { "int4": "", - "int8": 11.3, + "int8": 41.27, "fp16": "", "fp32": "", "bf16": "" @@ -4488,19 +5016,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 576.89, + "int8": 53.32, "fp16": "", - "fp32": 325.0, - "bf16": "" + "fp32": 38.06, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4510,7 +5042,7 @@ "Precisions": [ { "int4": "", - "int8": 7.03, + "int8": 28.44, "fp16": "", "fp32": "", "bf16": "" @@ -4522,19 +5054,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1014.6, + "int8": 0.65, "fp16": "", - "fp32": 582.63, - "bf16": "" + "fp32": 0.16, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4544,7 +5080,7 @@ "Precisions": [ { "int4": "", - "int8": 4.2, + "int8": 2598.78, "fp16": "", "fp32": "", "bf16": "" @@ -4556,19 +5092,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1454.4, + "int8": 917.84, "fp16": "", - "fp32": 872.52, - "bf16": 1037.91 + "fp32": 490.87, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4578,10 +5118,10 @@ "Precisions": [ { "int4": "", - "int8": 4.6, + "int8": 2.07, "fp16": "", "fp32": "", - "bf16": 4.96 + "bf16": "" } ], "Unit": "ms", @@ -4590,19 +5130,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1736.65, + "int8": 194.09, "fp16": "", - "fp32": 1132.21, - "bf16": 1408.76 + "fp32": 52.09, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4612,10 +5156,10 @@ "Precisions": [ { "int4": "", - "int8": 4.6, + "int8": 9.58, "fp16": "", "fp32": "", - "bf16": 4.68 + "bf16": "" } ], "Unit": "ms", @@ -4624,19 +5168,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 413.78, + "int8": 3.52, "fp16": "", - "fp32": 257.07, - "bf16": "" + "fp32": 0.87, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4646,7 +5194,7 @@ "Precisions": [ { "int4": "", - "int8": 8.54, + "int8": 493.86, "fp16": "", "fp32": "", "bf16": "" @@ -4658,19 +5206,23 @@ } }, { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 497.16, + "int8": 380.37, "fp16": "", - "fp32": 303.25, - "bf16": "" + "fp32": 135.96, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4680,7 +5232,7 @@ "Precisions": [ { "int4": "", - "int8": 5.86, + "int8": 4.64, "fp16": "", "fp32": "", "bf16": "" @@ -4692,19 +5244,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 97.68, + "int8": 80.52, "fp16": "", - "fp32": 67.18, - "bf16": "" + "fp32": 34.88, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4714,7 +5270,7 @@ "Precisions": [ { "int4": "", - "int8": 21.46, + "int8": 20.34, "fp16": "", "fp32": "", "bf16": "" @@ -4726,19 +5282,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V NPU-only", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 70.93, - "fp16": "", - "fp32": 46.39, - "bf16": "" + "int8": 265.97, + "fp16": 198.16, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4748,7 +5308,7 @@ "Precisions": [ { "int4": "", - "int8": 25.46, + "int8": 5.25, "fp16": "", "fp32": "", "bf16": "" @@ -4760,19 +5320,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V NPU-only", "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 92.57, - "fp16": "", - "fp32": 61.6, - "bf16": "" + "int8": 13.69, + "fp16": 13.65, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4782,7 +5346,7 @@ "Precisions": [ { "int4": "", - "int8": 13.58, + "int8": 119.56, "fp16": "", "fp32": "", "bf16": "" @@ -4794,19 +5358,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ Ultra 7 processor 268V NPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 456.95, - "fp16": 402.8, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": 0.24, + "token_fp16": 4.4 } ], "Unit": "FPS", @@ -4816,8 +5384,8 @@ "Precisions": [ { "int4": "", - "int8": 2.93, - "fp16": "", + "int8": 4094.9, + "fp16": 226.87, "fp32": "", "bf16": "" } @@ -4828,19 +5396,23 @@ } }, { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ Ultra 7 processor 268V NPU-only", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 3799.36, + "fp16": 3178.95, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4850,7 +5422,7 @@ "Precisions": [ { "int4": "", - "int8": 4.8, + "int8": 0.46, "fp16": "", "fp32": "", "bf16": "" @@ -4862,19 +5434,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ Ultra 7 processor 268V NPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 147.83, - "fp16": 120.48, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 3.31, + "token_int8": 0.72, + "token_fp16": 6.86 } ], "Unit": "FPS", @@ -4883,9 +5459,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 9.67, - "fp16": "", + "int4": 301.49, + "int8": 1378.29, + "fp16": 145.76, "fp32": "", "bf16": "" } @@ -4896,19 +5472,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V NPU-only", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 37.71, - "fp16": 34.85, + "int8": 2161.26, + "fp16": 948.32, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4918,7 +5498,7 @@ "Precisions": [ { "int4": "", - "int8": 27.94, + "int8": 0.79, "fp16": "", "fp32": "", "bf16": "" @@ -4930,19 +5510,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V NPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 37.99, - "fp16": 34.96, + "int8": 230.18, + "fp16": 192.78, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4952,7 +5536,7 @@ "Precisions": [ { "int4": "", - "int8": 27.51, + "int8": 8.29, "fp16": "", "fp32": "", "bf16": "" @@ -4964,19 +5548,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V NPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V NPU-only", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 13.38, - "fp16": 13.66, + "int8": 401.12, + "fp16": 497.56, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -4986,7 +5574,7 @@ "Precisions": [ { "int4": "", - "int8": 124.82, + "int8": 3.97, "fp16": "", "fp32": "", "bf16": "" @@ -4998,19 +5586,23 @@ } }, { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 40.02, - "fp16": 34.39, + "int8": 225.83, + "fp16": 298.39, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5020,7 +5612,7 @@ "Precisions": [ { "int4": "", - "int8": 34.08, + "int8": 3.93, "fp16": "", "fp32": "", "bf16": "" @@ -5032,19 +5624,23 @@ } }, { - "Platform": "Intel® Atom® X6425E iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 21.7, - "fp16": 25.09, + "int8": 114.57, + "fp16": 121.87, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5054,7 +5650,7 @@ "Precisions": [ { "int4": "", - "int8": 63.02, + "int8": 10.22, "fp16": "", "fp32": "", "bf16": "" @@ -5066,19 +5662,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "gemma-2-9b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 73.6, - "fp16": 58.55, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 14.49, + "token_int8": 8.34, + "token_fp16": 0.59 } ], "Unit": "FPS", @@ -5087,9 +5687,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 24.96, - "fp16": "", + "int4": 68.99, + "int8": 119.77, + "fp16": 1691.52, "fp32": "", "bf16": "" } @@ -5100,19 +5700,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 68.83, - "fp16": 51.74, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 17.63, + "token_int8": 9.8, + "token_fp16": 0.71 } ], "Unit": "FPS", @@ -5121,9 +5725,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 19.37, - "fp16": "", + "int4": 56.72, + "int8": 102.04, + "fp16": 1402.74, "fp32": "", "bf16": "" } @@ -5134,19 +5738,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 80.63, - "fp16": 60.04, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 18.48, + "token_int8": 11.87, + "token_fp16": 6.44 } ], "Unit": "FPS", @@ -5155,9 +5763,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 14.48, - "fp16": "", + "int4": 54.09, + "int8": 84.18, + "fp16": 155.17, "fp32": "", "bf16": "" } @@ -5168,19 +5776,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "llama-3-8b", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 91.41, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 20.41, + "token_int8": 11.07, + "token_fp16": 5.81 } ], "Unit": "FPS", @@ -5189,9 +5801,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 48.98, + "int8": 90.29, + "fp16": 171.98, "fp32": "", "bf16": "" } @@ -5202,19 +5814,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 56.47, - "fp16": 42.15, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 36.58, + "token_int8": 23.94, + "token_fp16": 12.86 } ], "Unit": "FPS", @@ -5223,9 +5839,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 24.14, - "fp16": "", + "int4": 27.33, + "int8": 41.77, + "fp16": 77.71, "fp32": "", "bf16": "" } @@ -5236,19 +5852,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 128.8, - "fp16": 97.86, + "int8": 10.4, + "fp16": 5.7, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5258,7 +5878,7 @@ "Precisions": [ { "int4": "", - "int8": 12.63, + "int8": 109.21, "fp16": "", "fp32": "", "bf16": "" @@ -5270,19 +5890,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 100.05, - "fp16": 74.24, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 20.06, + "token_int8": 11.6, + "token_fp16": 6.05 } ], "Unit": "FPS", @@ -5291,9 +5915,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 13.33, - "fp16": "", + "int4": 49.85, + "int8": 86.18, + "fp16": 165.15, "fp32": "", "bf16": "" } @@ -5304,19 +5928,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 1007.75, + "fp16": 862.8, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5326,7 +5954,7 @@ "Precisions": [ { "int4": "", - "int8": 14.0, + "int8": 1.2, "fp16": "", "fp32": "", "bf16": "" @@ -5338,19 +5966,23 @@ } }, { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 33.67, - "fp16": 30.88, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 31.27, + "token_int8": 20.55, + "token_fp16": 11.04 } ], "Unit": "FPS", @@ -5359,9 +5991,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 38.07, - "fp16": "", + "int4": 31.97, + "int8": 48.66, + "fp16": 90.57, "fp32": "", "bf16": "" } @@ -5372,19 +6004,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "qwen2-7b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 201.37, - "fp16": 162.08, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 20.99, + "token_int8": 12.69, + "token_fp16": 6.07 } ], "Unit": "FPS", @@ -5393,9 +6029,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 6.86, - "fp16": "", + "int4": 47.64, + "int8": 78.78, + "fp16": 164.54, "fp32": "", "bf16": "" } @@ -5406,19 +6042,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 169.74, - "fp16": 143.49, + "int8": 830.46, + "fp16": 585.38, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5428,7 +6068,7 @@ "Precisions": [ { "int4": "", - "int8": 7.91, + "int8": 1.23, "fp16": "", "fp32": "", "bf16": "" @@ -5440,19 +6080,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 163.17, - "fp16": 185.29, + "int8": 57.99, + "fp16": 32.18, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5462,7 +6106,7 @@ "Precisions": [ { "int4": "", - "int8": 8.23, + "int8": 26.21, "fp16": "", "fp32": "", "bf16": "" @@ -5474,19 +6118,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 39.25, - "fp16": "", - "fp32": 29.57, - "bf16": "" + "int8": 485.85, + "fp16": 555.71, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5496,7 +6144,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.75, "fp16": "", "fp32": "", "bf16": "" @@ -5508,19 +6156,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 22.93, - "fp16": "", - "fp32": 23.89, - "bf16": "" + "int8": 362.75, + "fp16": 375.06, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5530,7 +6182,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 3.3, "fp16": "", "fp32": "", "bf16": "" @@ -5542,19 +6194,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 57.64, + "int8": 34.21, "fp16": "", - "fp32": 48.78, - "bf16": "" + "fp32": 15.71, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5578,17 +6234,21 @@ { "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 47.95, "fp16": "", - "fp32": 30.78, - "bf16": "" + "fp32": 29.38, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5610,19 +6270,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 74.05, + "int8": 0.5, "fp16": "", - "fp32": 48.82, - "bf16": "" + "fp32": 0.18, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5644,19 +6308,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 100.6, + "int8": 742.67, "fp16": "", - "fp32": 65.57, - "bf16": "" + "fp32": 331.98, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5678,19 +6346,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 57.55, + "int8": 162.84, "fp16": "", - "fp32": 28.28, - "bf16": "" + "fp32": 51.66, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5712,10 +6384,11 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ @@ -5723,8 +6396,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": 75.36, - "bf16": "" + "fp32": 1.03, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5746,19 +6422,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 88.7, + "int8": 328.29, "fp16": "", - "fp32": 59.09, - "bf16": "" + "fp32": 115.41, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5780,10 +6460,11 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ @@ -5791,8 +6472,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": 59.29, - "bf16": "" + "fp32": 41.68, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5814,19 +6498,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 37.02, + "int8": 79.4, "fp16": "", - "fp32": 28.15, - "bf16": "" + "fp32": 35.44, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5848,19 +6536,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 191.27, + "int8": 31.55, "fp16": "", - "fp32": 150.88, - "bf16": "" + "fp32": 12.38, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5870,7 +6562,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 46.55, "fp16": "", "fp32": "", "bf16": "" @@ -5882,19 +6574,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", + "Platform": "Intel® Core™ i5-1235U Processor CPU-only", "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 123.24, + "int8": 43.39, "fp16": "", - "fp32": 97.13, - "bf16": "" + "fp32": 23.14, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5904,7 +6600,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 33.09, "fp16": "", "fp32": "", "bf16": "" @@ -5916,19 +6612,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU-only", "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.17, + "int8": 0.45, "fp16": "", - "fp32": 0.04, - "bf16": "" + "fp32": 0.12, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5938,7 +6638,7 @@ "Precisions": [ { "int4": "", - "int8": 5769.81, + "int8": 2440.72, "fp16": "", "fp32": "", "bf16": "" @@ -5950,19 +6650,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.3, + "int8": 789.02, "fp16": "", - "fp32": 0.14, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -5972,7 +6676,7 @@ "Precisions": [ { "int4": "", - "int8": 3268.87, + "int8": 1.9, "fp16": "", "fp32": "", "bf16": "" @@ -5984,19 +6688,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.44, + "int8": 147.74, "fp16": "", - "fp32": 0.18, - "bf16": "" + "fp32": 38.84, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6006,7 +6714,7 @@ "Precisions": [ { "int4": "", - "int8": 2406.55, + "int8": 11.4, "fp16": "", "fp32": "", "bf16": "" @@ -6019,18 +6727,22 @@ }, { "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.47, + "int8": 2.66, "fp16": "", - "fp32": 0.12, - "bf16": "" + "fp32": 0.77, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6040,7 +6752,7 @@ "Precisions": [ { "int4": "", - "int8": 2337.51, + "int8": 511.09, "fp16": "", "fp32": "", "bf16": "" @@ -6052,19 +6764,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.58, + "int8": 313.17, "fp16": "", - "fp32": 0.16, - "bf16": "" + "fp32": 95.81, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6074,7 +6790,7 @@ "Precisions": [ { "int4": "", - "int8": 2064.41, + "int8": 4.81, "fp16": "", "fp32": "", "bf16": "" @@ -6086,19 +6802,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.71, + "int8": "", "fp16": "", - "fp32": 0.5, - "bf16": "" + "fp32": 31.84, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6108,7 +6828,7 @@ "Precisions": [ { "int4": "", - "int8": 708.93, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -6120,19 +6840,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.48, + "int8": 67.43, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 26.68, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6142,7 +6866,7 @@ "Precisions": [ { "int4": "", - "int8": 2022.46, + "int8": 20.62, "fp16": "", "fp32": "", "bf16": "" @@ -6154,19 +6878,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.72, - "fp16": "", - "fp32": 0.19, - "bf16": "" + "int8": 46.15, + "fp16": 38.3, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6176,7 +6904,7 @@ "Precisions": [ { "int4": "", - "int8": 1351.31, + "int8": 19.82, "fp16": "", "fp32": "", "bf16": "" @@ -6188,19 +6916,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.52, - "fp16": "", - "fp32": 0.14, - "bf16": "" + "int8": 64.24, + "fp16": 50.43, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6210,7 +6942,7 @@ "Precisions": [ { "int4": "", - "int8": 1804.22, + "int8": 20.17, "fp16": "", "fp32": "", "bf16": "" @@ -6222,19 +6954,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.24, - "fp16": "", - "fp32": 0.35, - "bf16": "" + "int8": 0.5, + "fp16": 0.51, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6244,7 +6980,7 @@ "Precisions": [ { "int4": "", - "int8": 936.49, + "int8": 1499.27, "fp16": "", "fp32": "", "bf16": "" @@ -6256,19 +6992,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.64, - "fp16": "", - "fp32": 0.18, - "bf16": "" + "int8": 768.31, + "fp16": 485.7, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6278,7 +7018,7 @@ "Precisions": [ { "int4": "", - "int8": 1922.82, + "int8": 1.7, "fp16": "", "fp32": "", "bf16": "" @@ -6290,19 +7030,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.91, - "fp16": "", + "int8": 208.55, + "fp16": 117.84, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6312,7 +7056,7 @@ "Precisions": [ { "int4": "", - "int8": 1366.6, + "int8": 5.0, "fp16": "", "fp32": "", "bf16": "" @@ -6324,19 +7068,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.37, - "fp16": "", - "fp32": 0.15, - "bf16": "" + "int8": 5.64, + "fp16": 2.72, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6346,7 +7094,7 @@ "Precisions": [ { "int4": "", - "int8": 2090.62, + "int8": 172.69, "fp16": "", "fp32": "", "bf16": "" @@ -6358,19 +7106,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.47, - "fp16": "", - "fp32": 0.17, - "bf16": "" + "int8": 382.92, + "fp16": 223.39, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6380,7 +7132,7 @@ "Precisions": [ { "int4": "", - "int8": 2235.88, + "int8": 3.11, "fp16": "", "fp32": "", "bf16": "" @@ -6392,19 +7144,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.48, - "fp16": "", - "fp32": 0.71, - "bf16": "" + "int8": 126.83, + "fp16": 77.91, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6414,7 +7170,7 @@ "Precisions": [ { "int4": "", - "int8": 558.42, + "int8": 8.1, "fp16": "", "fp32": "", "bf16": "" @@ -6426,19 +7182,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.75, + "int8": 49.68, "fp16": "", - "fp32": 0.29, - "bf16": "" + "fp32": 26.85, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6448,7 +7208,7 @@ "Precisions": [ { "int4": "", - "int8": 1441.19, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -6460,19 +7220,23 @@ } }, { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.29, + "int8": 73.94, "fp16": "", - "fp32": 0.15, - "bf16": "" + "fp32": 48.63, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6482,7 +7246,7 @@ "Precisions": [ { "int4": "", - "int8": 3400.61, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -6494,19 +7258,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.25, + "int8": 0.69, "fp16": "", - "fp32": 0.89, - "bf16": "" + "fp32": 0.3, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6516,7 +7284,7 @@ "Precisions": [ { "int4": "", - "int8": 650.12, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -6528,19 +7296,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 8.7, + "int8": 1050.26, "fp16": "", - "fp32": 2.25, - "bf16": "" + "fp32": 535.0, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6550,7 +7322,7 @@ "Precisions": [ { "int4": "", - "int8": 251.14, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -6562,19 +7334,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 14.54, + "int8": 234.19, "fp16": "", - "fp32": 3.46, - "bf16": "" + "fp32": 87.89, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6584,7 +7360,7 @@ "Precisions": [ { "int4": "", - "int8": 160.14, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -6596,19 +7372,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 63.19, + "int8": 4.74, "fp16": "", - "fp32": 5.23, - "bf16": 37.95 + "fp32": 1.74, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6618,10 +7398,10 @@ "Precisions": [ { "int4": "", - "int8": 60.21, + "int8": "", "fp16": "", "fp32": "", - "bf16": 83.04 + "bf16": "" } ], "Unit": "ms", @@ -6630,19 +7410,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 75.35, + "int8": 466.65, "fp16": "", - "fp32": 6.47, - "bf16": 48.43 + "fp32": 188.83, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6652,10 +7436,10 @@ "Precisions": [ { "int4": "", - "int8": 55.97, + "int8": "", "fp16": "", "fp32": "", - "bf16": 73.23 + "bf16": "" } ], "Unit": "ms", @@ -6664,19 +7448,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.51, + "int8": "", "fp16": "", - "fp32": 1.65, - "bf16": "" + "fp32": 65.34, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6686,7 +7474,7 @@ "Precisions": [ { "int4": "", - "int8": 322.06, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -6698,19 +7486,23 @@ } }, { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { + "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 7.4, + "int8": 125.18, "fp16": "", - "fp32": 1.76, - "bf16": "" + "fp32": 58.13, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6720,7 +7512,7 @@ "Precisions": [ { "int4": "", - "int8": 286.63, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -6732,19 +7524,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.18, + "int8": 39.97, "fp16": "", - "fp32": 0.32, - "bf16": "" + "fp32": 15.97, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6754,7 +7550,7 @@ "Precisions": [ { "int4": "", - "int8": 999.99, + "int8": 40.14, "fp16": "", "fp32": "", "bf16": "" @@ -6766,19 +7562,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.9, + "int8": 56.15, "fp16": "", - "fp32": 0.21, - "bf16": "" + "fp32": 35.76, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6788,7 +7588,7 @@ "Precisions": [ { "int4": "", - "int8": 1330.81, + "int8": 28.73, "fp16": "", "fp32": "", "bf16": "" @@ -6800,19 +7600,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.12, + "int8": 0.57, "fp16": "", - "fp32": 0.27, - "bf16": "" + "fp32": 0.16, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6822,7 +7626,7 @@ "Precisions": [ { "int4": "", - "int8": 1255.21, + "int8": 2069.28, "fp16": "", "fp32": "", "bf16": "" @@ -6834,19 +7638,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 39.0, - "fp16": 21.24, - "fp32": "", - "bf16": "" + "int8": 951.93, + "fp16": "", + "fp32": 463.06, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6856,7 +7664,7 @@ "Precisions": [ { "int4": "", - "int8": 52.08, + "int8": 1.74, "fp16": "", "fp32": "", "bf16": "" @@ -6868,19 +7676,23 @@ } }, { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 184.54, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 52.88, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6890,7 +7702,7 @@ "Precisions": [ { "int4": "", - "int8": 48.01, + "int8": 9.61, "fp16": "", "fp32": "", "bf16": "" @@ -6902,19 +7714,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.33, + "int8": 3.16, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 0.92, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6924,7 +7740,7 @@ "Precisions": [ { "int4": "", - "int8": 179.63, + "int8": 466.34, "fp16": "", "fp32": "", "bf16": "" @@ -6936,19 +7752,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.48, - "fp16": 0.52, - "fp32": "", - "bf16": "" + "int8": 383.62, + "fp16": "", + "fp32": 134.93, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6958,7 +7778,7 @@ "Precisions": [ { "int4": "", - "int8": 2109.61, + "int8": 4.16, "fp16": "", "fp32": "", "bf16": "" @@ -6970,19 +7790,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.53, - "fp16": 0.52, - "fp32": "", - "bf16": "" + "int8": "", + "fp16": "", + "fp32": 43.64, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -6992,7 +7816,7 @@ "Precisions": [ { "int4": "", - "int8": 1494.17, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7004,19 +7828,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.54, - "fp16": 0.58, - "fp32": "", - "bf16": "" + "int8": 91.3, + "fp16": "", + "fp32": 36.39, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7026,7 +7854,7 @@ "Precisions": [ { "int4": "", - "int8": 1486.87, + "int8": 18.15, "fp16": "", "fp32": "", "bf16": "" @@ -7038,19 +7866,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.83, - "fp16": 0.89, + "int8": 47.17, + "fp16": 39.79, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7060,7 +7892,7 @@ "Precisions": [ { "int4": "", - "int8": 1111.35, + "int8": 18.45, "fp16": "", "fp32": "", "bf16": "" @@ -7072,19 +7904,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.57, - "fp16": 0.55, + "int8": 80.6, + "fp16": 59.92, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7094,7 +7930,7 @@ "Precisions": [ { "int4": "", - "int8": 1643.33, + "int8": 14.61, "fp16": "", "fp32": "", "bf16": "" @@ -7106,19 +7942,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.03, - "fp16": 1.14, + "int8": 0.52, + "fp16": 0.58, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7128,7 +7968,7 @@ "Precisions": [ { "int4": "", - "int8": 973.29, + "int8": 1506.76, "fp16": "", "fp32": "", "bf16": "" @@ -7140,19 +7980,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.73, - "fp16": 0.77, + "int8": 778.4, + "fp16": 509.56, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7162,7 +8006,7 @@ "Precisions": [ { "int4": "", - "int8": 1184.14, + "int8": 1.48, "fp16": "", "fp32": "", "bf16": "" @@ -7174,19 +8018,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 225.12, + "fp16": 127.27, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7196,7 +8044,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 4.31, "fp16": "", "fp32": "", "bf16": "" @@ -7208,19 +8056,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.97, - "fp16": 1.64, + "int8": 5.79, + "fp16": 2.86, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7230,7 +8082,7 @@ "Precisions": [ { "int4": "", - "int8": 481.56, + "int8": 144.71, "fp16": "", "fp32": "", "bf16": "" @@ -7242,19 +8094,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 404.76, + "fp16": 237.61, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7264,7 +8120,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 2.75, "fp16": "", "fp32": "", "bf16": "" @@ -7276,19 +8132,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 12.69, - "fp16": 7.44, + "int8": 131.89, + "fp16": 83.17, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7298,7 +8158,7 @@ "Precisions": [ { "int4": "", - "int8": 91.66, + "int8": 7.11, "fp16": "", "fp32": "", "bf16": "" @@ -7310,19 +8170,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i5-13600K CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.56, + "int8": 120.44, "fp16": "", - "fp32": 0.51, - "bf16": "" + "fp32": 47.21, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7332,7 +8196,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 13.32, "fp16": "", "fp32": "", "bf16": "" @@ -7344,19 +8208,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-13600K CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 148.91, "fp16": "", - "fp32": 0.19, - "bf16": "" + "fp32": 93.08, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7366,7 +8234,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 9.22, "fp16": "", "fp32": "", "bf16": "" @@ -7378,19 +8246,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Platform": "Intel® Core™ i5-13600K CPU-only", "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.7, + "int8": "", "fp16": "", - "fp32": 0.31, - "bf16": "" + "fp32": 0.49, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7400,7 +8272,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 733.91, "fp16": "", "fp32": "", "bf16": "" @@ -7412,19 +8284,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-13600K CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.17, + "int8": 2974.41, "fp16": "", - "fp32": 0.65, - "bf16": "" + "fp32": 1317.04, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7434,7 +8310,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.69, "fp16": "", "fp32": "", "bf16": "" @@ -7446,19 +8322,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-13600K CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.67, + "int8": 537.98, "fp16": "", - "fp32": 0.33, - "bf16": "" + "fp32": 148.85, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7468,7 +8348,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 2.82, "fp16": "", "fp32": "", "bf16": "" @@ -7480,19 +8360,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-13600K CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 8.8, "fp16": "", - "fp32": 0.71, - "bf16": "" + "fp32": 2.47, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7502,7 +8386,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 133.73, "fp16": "", "fp32": "", "bf16": "" @@ -7514,19 +8398,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-13600K CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.81, + "int8": 1068.19, "fp16": "", - "fp32": 0.43, - "bf16": "" + "fp32": 379.85, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7536,7 +8424,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.33, "fp16": "", "fp32": "", "bf16": "" @@ -7548,10 +8436,11 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-13600K CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ @@ -7559,8 +8448,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": 0.46, - "bf16": "" + "fp32": 122.62, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7582,19 +8474,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i5-13600K CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.37, + "int8": 266.57, "fp16": "", - "fp32": 1.2, - "bf16": "" + "fp32": 102.14, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7604,7 +8500,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 5.27, "fp16": "", "fp32": "", "bf16": "" @@ -7616,19 +8512,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 84.71, "fp16": "", - "fp32": 0.58, - "bf16": "" + "fp32": 51.06, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7650,19 +8550,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 274.01, + "int8": 98.02, "fp16": "", - "fp32": 168.87, - "bf16": "" + "fp32": 65.51, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7672,7 +8576,7 @@ "Precisions": [ { "int4": "", - "int8": 4.07, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7684,19 +8588,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 133.76, + "int8": 1.16, "fp16": "", - "fp32": 80.44, - "bf16": "" + "fp32": 0.64, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7706,7 +8614,7 @@ "Precisions": [ { "int4": "", - "int8": 7.82, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7718,19 +8626,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 300.95, + "int8": 1353.32, "fp16": "", - "fp32": 133.55, - "bf16": "" + "fp32": 683.15, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7740,7 +8652,7 @@ "Precisions": [ { "int4": "", - "int8": 3.62, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7752,19 +8664,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 534.91, + "int8": 365.63, "fp16": "", - "fp32": 406.53, - "bf16": "" + "fp32": 164.12, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7774,7 +8690,7 @@ "Precisions": [ { "int4": "", - "int8": 2.01, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7786,19 +8702,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 893.76, + "int8": 8.65, "fp16": "", - "fp32": 462.2, - "bf16": "" + "fp32": 3.77, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7808,7 +8728,7 @@ "Precisions": [ { "int4": "", - "int8": 1.62, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7820,19 +8740,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 826.85, + "int8": 657.26, "fp16": "", - "fp32": 315.6, - "bf16": "" + "fp32": 293.93, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7842,7 +8766,7 @@ "Precisions": [ { "int4": "", - "int8": 1.9, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7854,10 +8778,11 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ @@ -7865,8 +8790,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": 462.14, - "bf16": "" + "fp32": 107.24, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7876,7 +8804,7 @@ "Precisions": [ { "int4": "", - "int8": 1.72, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7888,19 +8816,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2989.92, + "int8": 182.9, "fp16": "", - "fp32": 1328.99, - "bf16": "" + "fp32": 101.97, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7910,7 +8842,7 @@ "Precisions": [ { "int4": "", - "int8": 0.69, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -7922,19 +8854,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 848.04, + "int8": 50.21, "fp16": "", - "fp32": 626.94, - "bf16": "" + "fp32": 18.33, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7944,7 +8880,7 @@ "Precisions": [ { "int4": "", - "int8": 1.4, + "int8": 22.66, "fp16": "", "fp32": "", "bf16": "" @@ -7957,18 +8893,22 @@ }, { "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1284.93, + "int8": 71.27, "fp16": "", - "fp32": 509.36, - "bf16": "" + "fp32": 41.39, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -7978,7 +8918,7 @@ "Precisions": [ { "int4": "", - "int8": 0.96, + "int8": 14.62, "fp16": "", "fp32": "", "bf16": "" @@ -7990,19 +8930,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 975.3, + "int8": 0.71, "fp16": "", - "fp32": 312.11, - "bf16": "" + "fp32": 0.19, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8012,7 +8956,7 @@ "Precisions": [ { "int4": "", - "int8": 1.21, + "int8": 1361.21, "fp16": "", "fp32": "", "bf16": "" @@ -8024,19 +8968,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU-only", "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1966.6, + "int8": 1291.06, "fp16": "", - "fp32": 958.97, - "bf16": "" + "fp32": 507.09, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8046,7 +8994,7 @@ "Precisions": [ { "int4": "", - "int8": 0.96, + "int8": 0.95, "fp16": "", "fp32": "", "bf16": "" @@ -8058,19 +9006,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1039.53, + "int8": 224.68, "fp16": "", - "fp32": 513.13, - "bf16": "" + "fp32": 60.81, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8080,7 +9032,7 @@ "Precisions": [ { "int4": "", - "int8": 1.61, + "int8": 4.95, "fp16": "", "fp32": "", "bf16": "" @@ -8092,19 +9044,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 3.84, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 1.01, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8114,7 +9070,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 250.45, "fp16": "", "fp32": "", "bf16": "" @@ -8126,19 +9082,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 737.89, + "int8": 491.99, "fp16": "", - "fp32": 488.9, - "bf16": "" + "fp32": 146.3, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8148,7 +9108,7 @@ "Precisions": [ { "int4": "", - "int8": 1.44, + "int8": 2.2, "fp16": "", "fp32": "", "bf16": "" @@ -8160,19 +9120,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 904.6, + "int8": "", "fp16": "", - "fp32": 570.78, - "bf16": "" + "fp32": 48.0, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8182,7 +9146,7 @@ "Precisions": [ { "int4": "", - "int8": 1.56, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8194,19 +9158,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4254.47, + "int8": 106.45, "fp16": "", - "fp32": 2047.25, - "bf16": "" + "fp32": 40.14, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8216,7 +9184,7 @@ "Precisions": [ { "int4": "", - "int8": 0.6, + "int8": 10.2, "fp16": "", "fp32": "", "bf16": "" @@ -8228,19 +9196,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 296.47, - "fp16": "", - "fp32": 183.87, - "bf16": "" + "int8": 68.4, + "fp16": 53.22, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8250,7 +9222,7 @@ "Precisions": [ { "int4": "", - "int8": 3.81, + "int8": 17.09, "fp16": "", "fp32": "", "bf16": "" @@ -8262,19 +9234,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1451.27, - "fp16": "", - "fp32": 666.39, - "bf16": "" + "int8": 91.46, + "fp16": 72.22, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8284,7 +9260,7 @@ "Precisions": [ { "int4": "", - "int8": 1.2, + "int8": 17.92, "fp16": "", "fp32": "", "bf16": "" @@ -8296,19 +9272,23 @@ } }, { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 iGPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 519.78, - "fp16": "", - "fp32": 425.23, - "bf16": "" + "int8": 0.82, + "fp16": 0.88, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8318,7 +9298,7 @@ "Precisions": [ { "int4": "", - "int8": 2.07, + "int8": 1113.84, "fp16": "", "fp32": "", "bf16": "" @@ -8330,19 +9310,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Platform": "Intel® Core™ i7-1185G7 iGPU-only", "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5407.63, - "fp16": "", - "fp32": 1924.43, - "bf16": "" + "int8": 729.72, + "fp16": 569.2, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8352,7 +9336,7 @@ "Precisions": [ { "int4": "", - "int8": 1.51, + "int8": 2.05, "fp16": "", "fp32": "", "bf16": "" @@ -8364,19 +9348,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 iGPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 15016.47, - "fp16": "", - "fp32": 4645.46, - "bf16": "" + "int8": 262.94, + "fp16": 174.98, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8386,7 +9374,7 @@ "Precisions": [ { "int4": "", - "int8": 0.94, + "int8": 4.82, "fp16": "", "fp32": "", "bf16": "" @@ -8398,19 +9386,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 22842.63, - "fp16": "", - "fp32": 7144.4, - "bf16": "" + "int8": 8.29, + "fp16": 4.67, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8420,7 +9412,7 @@ "Precisions": [ { "int4": "", - "int8": 0.58, + "int8": 118.28, "fp16": "", "fp32": "", "bf16": "" @@ -8432,19 +9424,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 38642.1, - "fp16": "", - "fp32": 10319.56, - "bf16": 25708.49 + "int8": 447.59, + "fp16": 299.29, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8454,10 +9450,10 @@ "Precisions": [ { "int4": "", - "int8": 0.64, + "int8": 3.33, "fp16": "", "fp32": "", - "bf16": 0.65 + "bf16": "" } ], "Unit": "ms", @@ -8466,19 +9462,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1185G7 iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 39913.42, - "fp16": "", - "fp32": 15945.06, - "bf16": "" + "int8": 161.26, + "fp16": 111.45, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8488,10 +9488,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 8.1, "fp16": "", "fp32": "", - "bf16": 0.75 + "bf16": "" } ], "Unit": "ms", @@ -8500,19 +9500,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 10312.71, + "int8": 50.01, "fp16": "", - "fp32": 3331.01, - "bf16": "" + "fp32": 25.82, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8522,7 +9526,7 @@ "Precisions": [ { "int4": "", - "int8": 1.24, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8534,19 +9538,23 @@ } }, { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 11955.69, + "int8": 57.69, "fp16": "", - "fp32": 3563.39, - "bf16": "" + "fp32": 28.41, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8568,19 +9576,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1984.45, + "int8": 0.69, "fp16": "", - "fp32": 814.84, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8590,7 +9602,7 @@ "Precisions": [ { "int4": "", - "int8": 1.35, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8602,19 +9614,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1516.8, + "int8": 958.94, "fp16": "", - "fp32": 588.51, - "bf16": "" + "fp32": 350.53, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8624,7 +9640,7 @@ "Precisions": [ { "int4": "", - "int8": 1.61, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8636,19 +9652,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1651.84, + "int8": 230.4, "fp16": "", - "fp32": 775.78, - "bf16": "" + "fp32": 85.03, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8658,7 +9678,7 @@ "Precisions": [ { "int4": "", - "int8": 1.02, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8670,19 +9690,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2423.58, - "fp16": 2444.62, - "fp32": "", - "bf16": "" + "int8": 4.44, + "fp16": "", + "fp32": 1.75, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8692,7 +9716,7 @@ "Precisions": [ { "int4": "", - "int8": 0.64, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8704,19 +9728,23 @@ } }, { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 456.16, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 162.16, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8726,7 +9754,7 @@ "Precisions": [ { "int4": "", - "int8": 0.99, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8738,19 +9766,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1862.03, - "fp16": 1564.35, - "fp32": "", - "bf16": "" + "int8": "", + "fp16": "", + "fp32": 55.98, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8760,7 +9792,7 @@ "Precisions": [ { "int4": "", - "int8": 0.78, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8772,19 +9804,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1956.69, - "fp16": 1343.46, - "fp32": "", - "bf16": "" + "int8": 103.63, + "fp16": "", + "fp32": 53.56, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8794,7 +9830,7 @@ "Precisions": [ { "int4": "", - "int8": 0.82, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -8806,19 +9842,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1999.97, - "fp16": 1359.27, - "fp32": "", - "bf16": "" + "int8": 38.28, + "fp16": "", + "fp32": 13.87, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8828,7 +9868,7 @@ "Precisions": [ { "int4": "", - "int8": 0.74, + "int8": 28.41, "fp16": "", "fp32": "", "bf16": "" @@ -8840,19 +9880,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V NPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3751.95, - "fp16": 2877.38, - "fp32": "", - "bf16": "" + "int8": 53.34, + "fp16": "", + "fp32": 22.26, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8862,7 +9906,7 @@ "Precisions": [ { "int4": "", - "int8": 0.4, + "int8": 20.12, "fp16": "", "fp32": "", "bf16": "" @@ -8874,19 +9918,23 @@ } }, { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 415.36, - "fp16": 324.74, - "fp32": "", - "bf16": "" + "int8": 0.52, + "fp16": "", + "fp32": 0.14, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8896,7 +9944,7 @@ "Precisions": [ { "int4": "", - "int8": 3.52, + "int8": 1805.69, "fp16": "", "fp32": "", "bf16": "" @@ -8908,19 +9956,23 @@ } }, { - "Platform": "Intel® Atom® X6425E iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 188.4, - "fp16": 223.08, - "fp32": "", - "bf16": "" + "int8": 972.25, + "fp16": "", + "fp32": 311.82, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8930,7 +9982,7 @@ "Precisions": [ { "int4": "", - "int8": 7.66, + "int8": 1.2, "fp16": "", "fp32": "", "bf16": "" @@ -8942,19 +9994,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 673.52, - "fp16": 505.05, - "fp32": "", - "bf16": "" + "int8": 174.69, + "fp16": "", + "fp32": 45.52, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8964,7 +10020,7 @@ "Precisions": [ { "int4": "", - "int8": 2.74, + "int8": 6.4, "fp16": "", "fp32": "", "bf16": "" @@ -8976,19 +10032,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 779.69, - "fp16": 496.63, - "fp32": "", - "bf16": "" + "int8": 2.72, + "fp16": "", + "fp32": 0.78, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -8998,7 +10058,7 @@ "Precisions": [ { "int4": "", - "int8": 1.63, + "int8": 335.04, "fp16": "", "fp32": "", "bf16": "" @@ -9010,19 +10070,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 386.67, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 99.8, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9032,7 +10096,7 @@ "Precisions": [ { "int4": "", - "int8": 1.46, + "int8": 2.82, "fp16": "", "fp32": "", "bf16": "" @@ -9044,19 +10108,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", "int8": "", - "fp16": 574.04, - "fp32": "", - "bf16": "" + "fp16": "", + "fp32": 32.19, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9078,19 +10146,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 630.96, - "fp16": 442.65, - "fp32": "", - "bf16": "" + "int8": 76.54, + "fp16": "", + "fp32": 27.6, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9100,7 +10172,7 @@ "Precisions": [ { "int4": "", - "int8": 1.74, + "int8": 13.2, "fp16": "", "fp32": "", "bf16": "" @@ -9112,19 +10184,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1281.44, - "fp16": 911.94, + "int8": 45.77, + "fp16": 40.93, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9134,7 +10210,7 @@ "Precisions": [ { "int4": "", - "int8": 1.08, + "int8": 21.21, "fp16": "", "fp32": "", "bf16": "" @@ -9146,19 +10222,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 890.07, - "fp16": 624.4, + "int8": 56.2, + "fp16": 41.8, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9168,7 +10248,7 @@ "Precisions": [ { "int4": "", - "int8": 1.41, + "int8": 23.38, "fp16": "", "fp32": "", "bf16": "" @@ -9180,19 +10260,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE iGPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 0.56, + "fp16": 0.54, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9202,7 +10286,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1606.31, "fp16": "", "fp32": "", "bf16": "" @@ -9214,19 +10298,23 @@ } }, { - "Platform": "Intel® Processor N100 iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE iGPU-only", "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 339.71, - "fp16": 267.18, + "int8": 648.66, + "fp16": 431.47, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9236,7 +10324,7 @@ "Precisions": [ { "int4": "", - "int8": 3.83, + "int8": 1.76, "fp16": "", "fp32": "", "bf16": "" @@ -9248,19 +10336,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE iGPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1298.67, - "fp16": 1337.24, + "int8": 208.21, + "fp16": 122.24, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9270,7 +10362,7 @@ "Precisions": [ { "int4": "", - "int8": 0.78, + "int8": 5.47, "fp16": "", "fp32": "", "bf16": "" @@ -9282,19 +10374,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1422.4, - "fp16": 1361.15, + "int8": 5.71, + "fp16": 3.09, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9304,7 +10400,7 @@ "Precisions": [ { "int4": "", - "int8": 1.27, + "int8": 173.5, "fp16": "", "fp32": "", "bf16": "" @@ -9316,19 +10412,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1185GRE iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1200.66, - "fp16": 1554.15, + "int8": 348.95, + "fp16": 224.45, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9338,7 +10438,7 @@ "Precisions": [ { "int4": "", - "int8": 1.23, + "int8": 3.56, "fp16": "", "fp32": "", "bf16": "" @@ -9350,19 +10450,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i7-1185GRE iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 478.43, - "fp16": "", - "fp32": 307.02, - "bf16": "" + "int8": 113.89, + "fp16": 78.71, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9372,7 +10476,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 9.49, "fp16": "", "fp32": "", "bf16": "" @@ -9384,19 +10488,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i7-12700H CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 228.63, + "int8": 111.58, "fp16": "", - "fp32": 220.31, - "bf16": "" + "fp32": 57.55, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9418,19 +10526,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i7-12700H CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 524.29, + "int8": 141.13, "fp16": "", - "fp32": 394.1, - "bf16": "" + "fp32": 75.23, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9452,19 +10564,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-12700H CPU+iGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.63, "fp16": "", - "fp32": 350.27, - "bf16": "" + "fp32": 0.68, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9486,19 +10602,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", + "Platform": "Intel® Core™ i7-12700H CPU+iGPU", "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1049.71, + "int8": 2287.47, "fp16": "", - "fp32": 538.46, - "bf16": "" + "fp32": 1150.08, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9520,19 +10640,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-12700H CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 532.56, "fp16": "", - "fp32": 680.55, - "bf16": "" + "fp32": 180.65, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9554,19 +10678,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-12700H CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 10.33, "fp16": "", - "fp32": 347.8, - "bf16": "" + "fp32": 3.81, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9589,18 +10717,22 @@ }, { "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2306.0, + "int8": 1013.57, "fp16": "", - "fp32": 1096.89, - "bf16": "" + "fp32": 403.5, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9622,19 +10754,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-12700H CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1219.99, + "int8": "", "fp16": "", - "fp32": 644.18, - "bf16": "" + "fp32": 133.88, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9656,19 +10792,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-12700H CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1610.28, + "int8": 268.57, "fp16": "", - "fp32": 845.71, - "bf16": "" + "fp32": 120.55, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9690,19 +10830,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i7-12700H CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 484.88, + "int8": 87.88, "fp16": "", - "fp32": 280.8, - "bf16": "" + "fp32": 34.76, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9712,7 +10856,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 16.26, "fp16": "", "fp32": "", "bf16": "" @@ -9724,19 +10868,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-12700H CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4622.78, + "int8": 113.82, "fp16": "", - "fp32": 2338.78, - "bf16": "" + "fp32": 62.45, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9746,7 +10894,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 11.46, "fp16": "", "fp32": "", "bf16": "" @@ -9758,19 +10906,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-12700H CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.27, "fp16": "", - "fp32": 1684.08, - "bf16": "" + "fp32": 0.36, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9780,7 +10932,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 886.78, "fp16": "", "fp32": "", "bf16": "" @@ -9792,19 +10944,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i7-12700H CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 45.28, + "int8": 1982.75, "fp16": "", - "fp32": 18.84, - "bf16": "" + "fp32": 968.72, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9814,7 +10970,7 @@ "Precisions": [ { "int4": "", - "int8": 23.81, + "int8": 0.89, "fp16": "", "fp32": "", "bf16": "" @@ -9826,19 +10982,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU-only", + "Platform": "Intel® Core™ i7-12700H CPU-only", "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 19.87, + "int8": 429.58, "fp16": "", - "fp32": 8.15, - "bf16": "" + "fp32": 107.58, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9848,7 +11008,7 @@ "Precisions": [ { "int4": "", - "int8": 51.41, + "int8": 3.47, "fp16": "", "fp32": "", "bf16": "" @@ -9860,19 +11020,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i7-12700H CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 51.65, + "int8": 7.11, "fp16": "", - "fp32": 14.46, - "bf16": "" + "fp32": 1.96, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9882,7 +11046,7 @@ "Precisions": [ { "int4": "", - "int8": 19.81, + "int8": 159.25, "fp16": "", "fp32": "", "bf16": "" @@ -9894,19 +11058,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 96.85, + "int8": 854.13, "fp16": "", - "fp32": 50.34, - "bf16": "" + "fp32": 289.32, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9916,7 +11084,7 @@ "Precisions": [ { "int4": "", - "int8": 10.76, + "int8": 1.72, "fp16": "", "fp32": "", "bf16": "" @@ -9928,19 +11096,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 144.67, + "int8": "", "fp16": "", - "fp32": 72.98, - "bf16": "" + "fp32": 90.72, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9950,7 +11122,7 @@ "Precisions": [ { "int4": "", - "int8": 8.21, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -9962,19 +11134,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 154.38, + "int8": 206.32, "fp16": "", - "fp32": 40.41, - "bf16": "" + "fp32": 78.09, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -9984,7 +11160,7 @@ "Precisions": [ { "int4": "", - "int8": 11.07, + "int8": 6.49, "fp16": "", "fp32": "", "bf16": "" @@ -9996,19 +11172,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 185.28, - "fp16": "", - "fp32": 53.47, - "bf16": "" + "int8": 89.81, + "fp16": 69.99, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10018,7 +11198,7 @@ "Precisions": [ { "int4": "", - "int8": 9.56, + "int8": 12.71, "fp16": "", "fp32": "", "bf16": "" @@ -10030,19 +11210,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 543.91, - "fp16": "", - "fp32": 151.19, - "bf16": "" + "int8": 128.07, + "fp16": 97.39, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10052,7 +11236,7 @@ "Precisions": [ { "int4": "", - "int8": 2.82, + "int8": 12.87, "fp16": "", "fp32": "", "bf16": "" @@ -10064,19 +11248,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H iGPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 151.17, - "fp16": "", - "fp32": 75.67, - "bf16": "" + "int8": 1.04, + "fp16": 1.15, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10086,7 +11274,7 @@ "Precisions": [ { "int4": "", - "int8": 7.12, + "int8": 972.87, "fp16": "", "fp32": "", "bf16": "" @@ -10098,19 +11286,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 225.84, - "fp16": "", - "fp32": 61.28, - "bf16": "" + "int8": 1281.93, + "fp16": 912.69, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10120,7 +11312,7 @@ "Precisions": [ { "int4": "", - "int8": 4.95, + "int8": 1.08, "fp16": "", "fp32": "", "bf16": "" @@ -10132,19 +11324,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", + "Platform": "Intel® Core™ i7-12700H iGPU-only", "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 174.35, - "fp16": "", - "fp32": 45.46, - "bf16": "" + "int8": 381.27, + "fp16": 226.42, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10154,7 +11350,7 @@ "Precisions": [ { "int4": "", - "int8": 6.41, + "int8": 3.22, "fp16": "", "fp32": "", "bf16": "" @@ -10166,19 +11362,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 413.95, - "fp16": "", - "fp32": 107.82, - "bf16": "" + "int8": 10.47, + "fp16": 6.14, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10188,7 +11388,7 @@ "Precisions": [ { "int4": "", - "int8": 3.45, + "int8": 100.17, "fp16": "", "fp32": "", "bf16": "" @@ -10200,19 +11400,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 203.93, - "fp16": "", - "fp32": 59.04, - "bf16": "" + "int8": 744.92, + "fp16": 407.72, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10222,7 +11426,7 @@ "Precisions": [ { "int4": "", - "int8": 8.97, + "int8": 1.87, "fp16": "", "fp32": "", "bf16": "" @@ -10234,19 +11438,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-12700H iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 298.27, - "fp16": "", + "int8": 215.67, + "fp16": 148.01, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10256,7 +11464,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 5.58, "fp16": "", "fp32": "", "bf16": "" @@ -10268,19 +11476,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 122.15, + "int8": 61.33, "fp16": "", - "fp32": 60.34, - "bf16": "" + "fp32": 32.27, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10290,7 +11502,7 @@ "Precisions": [ { "int4": "", - "int8": 7.21, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10302,19 +11514,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 152.19, + "int8": 88.48, "fp16": "", - "fp32": 71.16, - "bf16": "" + "fp32": 59.03, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10324,7 +11540,7 @@ "Precisions": [ { "int4": "", - "int8": 7.72, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10336,19 +11552,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 766.25, + "int8": 0.81, "fp16": "", - "fp32": 233.37, - "bf16": "" + "fp32": 0.43, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10358,7 +11578,7 @@ "Precisions": [ { "int4": "", - "int8": 2.16, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10370,19 +11590,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 48.79, + "int8": 1218.37, "fp16": "", - "fp32": 20.22, - "bf16": "" + "fp32": 644.91, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10392,7 +11616,7 @@ "Precisions": [ { "int4": "", - "int8": 21.9, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10404,19 +11628,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 245.58, + "int8": 284.91, "fp16": "", - "fp32": 121.38, - "bf16": "" + "fp32": 109.93, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10426,7 +11654,7 @@ "Precisions": [ { "int4": "", - "int8": 5.17, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10438,19 +11666,23 @@ } }, { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 92.24, + "int8": 5.67, "fp16": "", - "fp32": 49.79, - "bf16": "" + "fp32": 2.15, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10460,7 +11692,7 @@ "Precisions": [ { "int4": "", - "int8": 11.19, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10472,19 +11704,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 971.95, + "int8": 554.73, "fp16": "", - "fp32": 269.81, - "bf16": "" + "fp32": 228.8, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10494,7 +11730,7 @@ "Precisions": [ { "int4": "", - "int8": 3.12, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10506,19 +11742,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2968.31, + "int8": "", "fp16": "", - "fp32": 754.35, - "bf16": "" + "fp32": 80.32, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10528,7 +11768,7 @@ "Precisions": [ { "int4": "", - "int8": 1.61, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10540,19 +11780,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4948.09, + "int8": 154.56, "fp16": "", - "fp32": 1155.67, - "bf16": "" + "fp32": 72.19, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10562,7 +11806,7 @@ "Precisions": [ { "int4": "", - "int8": 1.06, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10574,19 +11818,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 19506.31, + "int8": 44.62, "fp16": "", - "fp32": 1609.79, - "bf16": 7600.31 + "fp32": 17.96, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10596,10 +11844,10 @@ "Precisions": [ { "int4": "", - "int8": 0.99, + "int8": 37.64, "fp16": "", "fp32": "", - "bf16": 1.24 + "bf16": "" } ], "Unit": "ms", @@ -10608,19 +11856,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 21749.45, + "int8": 61.85, "fp16": "", - "fp32": 2011.71, - "bf16": 13645.99 + "fp32": 39.52, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10630,10 +11882,10 @@ "Precisions": [ { "int4": "", - "int8": 0.98, + "int8": 26.95, "fp16": "", "fp32": "", - "bf16": 1.33 + "bf16": "" } ], "Unit": "ms", @@ -10642,19 +11894,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2122.71, + "int8": 0.64, "fp16": "", - "fp32": 565.52, - "bf16": "" + "fp32": 0.17, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10664,7 +11920,7 @@ "Precisions": [ { "int4": "", - "int8": 1.88, + "int8": 1935.64, "fp16": "", "fp32": "", "bf16": "" @@ -10676,19 +11932,23 @@ } }, { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2249.41, + "int8": 1042.94, "fp16": "", - "fp32": 563.79, - "bf16": "" + "fp32": 515.99, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10698,7 +11958,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.61, "fp16": "", "fp32": "", "bf16": "" @@ -10710,19 +11970,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 392.72, + "int8": 203.02, "fp16": "", - "fp32": 95.29, - "bf16": "" + "fp32": 59.12, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10732,7 +11996,7 @@ "Precisions": [ { "int4": "", - "int8": 6.4, + "int8": 9.0, "fp16": "", "fp32": "", "bf16": "" @@ -10744,19 +12008,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 290.74, + "int8": 3.48, "fp16": "", - "fp32": 70.82, - "bf16": "" + "fp32": 1.03, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10766,7 +12034,7 @@ "Precisions": [ { "int4": "", - "int8": 7.55, + "int8": 439.19, "fp16": "", "fp32": "", "bf16": "" @@ -10778,19 +12046,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 318.23, + "int8": 422.9, "fp16": "", - "fp32": 86.35, - "bf16": "" + "fp32": 151.69, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10800,7 +12072,7 @@ "Precisions": [ { "int4": "", - "int8": 4.59, + "int8": 3.87, "fp16": "", "fp32": "", "bf16": "" @@ -10812,19 +12084,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2318.55, - "fp16": 1375.72, - "fp32": "", - "bf16": "" + "int8": "", + "fp16": "", + "fp32": 48.93, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10834,7 +12110,7 @@ "Precisions": [ { "int4": "", - "int8": 0.87, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -10846,19 +12122,23 @@ } }, { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i7-1355U Processor CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 101.73, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 40.76, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10868,7 +12148,7 @@ "Precisions": [ { "int4": "", - "int8": 1.42, + "int8": 16.99, "fp16": "", "fp32": "", "bf16": "" @@ -10880,19 +12160,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 854.09, - "fp16": 539.78, + "int8": 67.08, + "fp16": 52.9, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10902,7 +12186,7 @@ "Precisions": [ { "int4": "", - "int8": 1.4, + "int8": 14.38, "fp16": "", "fp32": "", "bf16": "" @@ -10914,19 +12198,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 768.87, - "fp16": 382.94, + "int8": 98.8, + "fp16": 73.53, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -10936,7 +12224,7 @@ "Precisions": [ { "int4": "", - "int8": 1.57, + "int8": 13.41, "fp16": "", "fp32": "", "bf16": "" @@ -10948,19 +12236,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "gemma-2-9b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 772.54, - "fp16": 383.99, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 6.21, + "token_int8": 3.88, + "token_fp16": "" } ], "Unit": "FPS", @@ -10969,8 +12261,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 1.54, + "int4": 160.82, + "int8": 257.32, "fp16": "", "fp32": "", "bf16": "" @@ -10982,19 +12274,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V NPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2022.98, - "fp16": 975.93, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 7.25, + "token_int8": 4.27, + "token_fp16": "" } ], "Unit": "FPS", @@ -11003,8 +12299,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 0.75, + "int4": 137.82, + "int8": 233.92, "fp16": "", "fp32": "", "bf16": "" @@ -11016,19 +12312,23 @@ } }, { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 106.37, - "fp16": 64.66, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 8.53, + "token_int8": 5.74, + "token_fp16": "" } ], "Unit": "FPS", @@ -11037,8 +12337,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 10.57, + "int4": 117.18, + "int8": 174.01, "fp16": "", "fp32": "", "bf16": "" @@ -11050,19 +12350,23 @@ } }, { - "Platform": "Intel® Atom® X6425E iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "llama-3-8b", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 48.1, - "fp16": 51.69, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 8.49, + "token_int8": 5.06, + "token_fp16": "" } ], "Unit": "FPS", @@ -11071,8 +12375,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 23.01, + "int4": 117.69, + "int8": 197.3, "fp16": "", "fp32": "", "bf16": "" @@ -11084,19 +12388,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 203.75, - "fp16": 118.64, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 16.79, + "token_int8": 11.89, + "token_fp16": 6.7 } ], "Unit": "FPS", @@ -11105,9 +12413,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 6.33, - "fp16": "", + "int4": 59.54, + "int8": 84.05, + "fp16": 149.13, "fp32": "", "bf16": "" } @@ -11118,19 +12426,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 213.35, - "fp16": 119.51, + "int8": 0.73, + "fp16": 0.77, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11140,7 +12452,7 @@ "Precisions": [ { "int4": "", - "int8": 4.89, + "int8": 1191.59, "fp16": "", "fp32": "", "bf16": "" @@ -11152,19 +12464,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 225.63, - "fp16": 128.49, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 8.86, + "token_int8": 5.44, + "token_fp16": "" } ], "Unit": "FPS", @@ -11173,8 +12489,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 4.26, + "int4": 112.76, + "int8": 183.5, "fp16": "", "fp32": "", "bf16": "" @@ -11186,19 +12502,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": 174.47, + "int8": 869.88, + "fp16": 621.94, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11208,7 +12528,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.38, "fp16": "", "fp32": "", "bf16": "" @@ -11220,19 +12540,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 203.92, - "fp16": 125.66, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 14.78, + "token_int8": 9.98, + "token_fp16": 5.45 } ], "Unit": "FPS", @@ -11241,9 +12565,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 5.53, - "fp16": "", + "int4": 67.65, + "int8": 100.19, + "fp16": 183.48, "fp32": "", "bf16": "" } @@ -11254,19 +12578,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "qwen2-7b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 387.63, - "fp16": 228.73, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 9.11, + "token_int8": 5.39, + "token_fp16": "" } ], "Unit": "FPS", @@ -11275,8 +12603,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 3.23, + "int4": 109.74, + "int8": 185.49, "fp16": "", "fp32": "", "bf16": "" @@ -11290,17 +12618,21 @@ { "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 278.1, - "fp16": 165.78, + "int8": 277.06, + "fp16": 164.27, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11310,7 +12642,7 @@ "Precisions": [ { "int4": "", - "int8": 3.86, + "int8": 3.85, "fp16": "", "fp32": "", "bf16": "" @@ -11322,19 +12654,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 7.1, + "fp16": 3.99, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11344,7 +12680,7 @@ "Precisions": [ { "int4": "", - "int8": 3.5, + "int8": 126.73, "fp16": "", "fp32": "", "bf16": "" @@ -11356,19 +12692,23 @@ } }, { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 81.72, - "fp16": 49.75, + "int8": 484.13, + "fp16": 298.47, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11378,7 +12718,7 @@ "Precisions": [ { "int4": "", - "int8": 13.15, + "int8": 2.49, "fp16": "", "fp32": "", "bf16": "" @@ -11390,19 +12730,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "stable-diffusion-v1-5", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 556.79, - "fp16": 393.72, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11412,8 +12756,8 @@ "Precisions": [ { "int4": "", - "int8": 1.7, - "fp16": "", + "int8": 29.54, + "fp16": 29.97, "fp32": "", "bf16": "" } @@ -11424,19 +12768,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 568.08, - "fp16": 375.74, + "int8": 162.35, + "fp16": 106.83, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11446,7 +12794,7 @@ "Precisions": [ { "int4": "", - "int8": 2.26, + "int8": 6.38, "fp16": "", "fp32": "", "bf16": "" @@ -11458,19 +12806,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1167.79, - "fp16": 621.08, - "fp32": "", - "bf16": "" + "int8": 170.14, + "fp16": "", + "fp32": 67.07, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11480,7 +12832,7 @@ "Precisions": [ { "int4": "", - "int8": 1.49, + "int8": 10.73, "fp16": "", "fp32": "", "bf16": "" @@ -11492,19 +12844,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 129.36, + "int8": 219.8, "fp16": "", - "fp32": 55.58, - "bf16": "" + "fp32": 126.91, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11514,7 +12870,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 7.34, "fp16": "", "fp32": "", "bf16": "" @@ -11526,19 +12882,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "gemma-2-9b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 59.45, + "int8": "", "fp16": "", - "fp32": 54.27, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 9.42, + "token_int8": 6.89, + "token_fp16": 3.59 } ], "Unit": "FPS", @@ -11547,9 +12907,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 106.06, + "int8": 144.95, + "fp16": 278.42, "fp32": "", "bf16": "" } @@ -11560,19 +12920,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 196.98, + "int8": "", "fp16": "", - "fp32": 115.77, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 10.65, + "token_int8": 7.46, + "token_fp16": 3.83 } ], "Unit": "FPS", @@ -11581,9 +12945,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 93.82, + "int8": 133.88, + "fp16": 260.66, "fp32": "", "bf16": "" } @@ -11594,10 +12958,11 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ @@ -11605,8 +12970,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": 53.71, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 13.44, + "token_int8": 9.29, + "token_fp16": 4.94 } ], "Unit": "FPS", @@ -11615,9 +12983,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 74.39, + "int8": 107.62, + "fp16": 202.32, "fp32": "", "bf16": "" } @@ -11628,19 +12996,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "llama-3-8b", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 235.76, + "int8": "", "fp16": "", - "fp32": 88.15, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 11.91, + "token_int8": 8.65, + "token_fp16": 4.48 } ], "Unit": "FPS", @@ -11649,9 +13021,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 83.93, + "int8": 115.48, + "fp16": 223.15, "fp32": "", "bf16": "" } @@ -11662,19 +13034,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 368.58, + "int8": "", "fp16": "", - "fp32": 166.93, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 25.41, + "token_int8": 18.99, + "token_fp16": 10.18 } ], "Unit": "FPS", @@ -11683,9 +13059,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 39.35, + "int8": 52.64, + "fp16": 98.23, "fp32": "", "bf16": "" } @@ -11696,19 +13072,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 224.44, + "int8": 2.49, "fp16": "", - "fp32": 81.26, - "bf16": "" + "fp32": 0.71, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11718,7 +13098,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 562.6, "fp16": "", "fp32": "", "bf16": "" @@ -11730,10 +13110,11 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ @@ -11742,7 +13123,10 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 14.41, + "token_int8": 9.12, + "token_fp16": 4.71 } ], "Unit": "FPS", @@ -11751,9 +13135,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 69.39, + "int8": 109.54, + "fp16": 211.91, "fp32": "", "bf16": "" } @@ -11764,19 +13148,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 285.59, + "int8": 4239.14, "fp16": "", - "fp32": 110.56, - "bf16": "" + "fp32": 2047.2, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11786,7 +13174,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.6, "fp16": "", "fp32": "", "bf16": "" @@ -11798,19 +13186,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 433.32, + "int8": "", "fp16": "", - "fp32": 147.73, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": 15.66, + "token_fp16": 8.52 } ], "Unit": "FPS", @@ -11820,8 +13212,8 @@ "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 63.84, + "fp16": 117.37, "fp32": "", "bf16": "" } @@ -11832,19 +13224,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "qwen2-7b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 112.49, + "int8": "", "fp16": "", - "fp32": 42.68, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 13.1, + "token_int8": 9.24, + "token_fp16": 4.75 } ], "Unit": "FPS", @@ -11853,9 +13249,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 76.33, + "int8": 108.16, + "fp16": 210.38, "fp32": "", "bf16": "" } @@ -11866,19 +13262,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Platform": "Intel® Core™ i9-13900K CPU-only", "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1107.14, + "int8": 762.32, "fp16": "", - "fp32": 471.86, - "bf16": "" + "fp32": 234.53, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11888,7 +13288,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 2.17, "fp16": "", "fp32": "", "bf16": "" @@ -11900,19 +13300,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 12.97, "fp16": "", - "fp32": 257.94, - "bf16": "" + "fp32": 3.84, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11922,7 +13326,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 102.02, "fp16": "", "fp32": "", "bf16": "" @@ -11934,19 +13338,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.76, + "int8": 1606.89, "fp16": "", - "fp32": 0.31, - "bf16": "" + "fp32": 589.62, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11956,7 +13364,7 @@ "Precisions": [ { "int4": "", - "int8": 1318.07, + "int8": 1.08, "fp16": "", "fp32": "", "bf16": "" @@ -11968,19 +13376,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "stable-diffusion-v1-5", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.33, + "int8": "", "fp16": "", - "fp32": 0.13, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -11990,8 +13402,8 @@ "Precisions": [ { "int4": "", - "int8": 2998.35, - "fp16": "", + "int8": 40.27, + "fp16": 39.61, "fp32": "", "bf16": "" } @@ -12002,19 +13414,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.89, + "int8": "", "fp16": "", - "fp32": 0.23, - "bf16": "" + "fp32": 187.66, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12024,7 +13440,7 @@ "Precisions": [ { "int4": "", - "int8": 1117.77, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -12036,19 +13452,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Core™ i9-13900K CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.67, + "int8": 389.04, "fp16": "", - "fp32": 0.89, - "bf16": "" + "fp32": 154.4, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12058,7 +13478,7 @@ "Precisions": [ { "int4": "", - "int8": 598.98, + "int8": 4.13, "fp16": "", "fp32": "", "bf16": "" @@ -12070,19 +13490,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.42, - "fp16": "", - "fp32": 1.29, - "bf16": "" + "int8": 187.26, + "fp16": 147.66, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12092,7 +13516,7 @@ "Precisions": [ { "int4": "", - "int8": 428.32, + "int8": 5.66, "fp16": "", "fp32": "", "bf16": "" @@ -12104,19 +13528,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.76, - "fp16": "", - "fp32": 0.79, - "bf16": "" + "int8": 147.24, + "fp16": 121.24, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12126,7 +13554,7 @@ "Precisions": [ { "int4": "", - "int8": 494.95, + "int8": 9.93, "fp16": "", "fp32": "", "bf16": "" @@ -12138,19 +13566,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "llama-2-7b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.17, + "int8": "", "fp16": "", - "fp32": 0.92, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 9.15, + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12159,8 +13591,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 463.7, + "int4": 109.23, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -12172,19 +13604,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "llama-3-8b", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 8.97, + "int8": "", "fp16": "", - "fp32": 2.5, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 10.18, + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12193,8 +13629,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 128.87, + "int4": 98.23, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -12206,19 +13642,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.61, + "int8": "", "fp16": "", - "fp32": 1.34, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 22.57, + "token_int8": 16.65, + "token_fp16": "" } ], "Unit": "FPS", @@ -12227,8 +13667,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 399.72, + "int4": 44.29, + "int8": 60.05, "fp16": "", "fp32": "", "bf16": "" @@ -12240,19 +13680,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.96, + "int8": "", "fp16": "", - "fp32": 1.02, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12262,7 +13706,7 @@ "Precisions": [ { "int4": "", - "int8": 250.0, + "int8": 187.74, "fp16": "", "fp32": "", "bf16": "" @@ -12274,10 +13718,11 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "mistral-7b-v0.1", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ @@ -12286,7 +13731,10 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12296,7 +13744,7 @@ "Precisions": [ { "int4": "", - "int8": 321.18, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -12308,19 +13756,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.82, - "fp16": "", - "fp32": 1.9, - "bf16": "" + "int8": 1879.14, + "fp16": 1565.12, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12330,7 +13782,7 @@ "Precisions": [ { "int4": "", - "int8": 169.83, + "int8": 0.78, "fp16": "", "fp32": "", "bf16": "" @@ -12342,19 +13794,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.5, + "int8": "", "fp16": "", - "fp32": 1.02, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 19.33, + "token_int8": 10.23, + "token_fp16": "" } ], "Unit": "FPS", @@ -12363,8 +13819,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 436.59, + "int4": 51.73, + "int8": 97.67, "fp16": "", "fp32": "", "bf16": "" @@ -12376,19 +13832,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "qwen2-7b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.14, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 10.59, + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12397,8 +13857,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 247.47, + "int4": 94.38, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -12410,19 +13870,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.0, - "fp16": "", - "fp32": 1.06, - "bf16": "" + "int8": 820.74, + "fp16": 519.4, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12432,7 +13896,7 @@ "Precisions": [ { "int4": "", - "int8": 374.78, + "int8": 1.39, "fp16": "", "fp32": "", "bf16": "" @@ -12444,19 +13908,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.57, + "int8": "", "fp16": "", - "fp32": 1.28, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12466,7 +13934,7 @@ "Precisions": [ { "int4": "", - "int8": 413.46, + "int8": 37.02, "fp16": "", "fp32": "", "bf16": "" @@ -12478,19 +13946,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 12.97, - "fp16": "", - "fp32": 3.83, - "bf16": "" + "int8": 993.56, + "fp16": 762.67, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12500,7 +13972,7 @@ "Precisions": [ { "int4": "", - "int8": 101.21, + "int8": 1.33, "fp16": "", "fp32": "", "bf16": "" @@ -12512,19 +13984,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 0.82, - "fp16": "", - "fp32": 0.32, - "bf16": "" + "int8": 319.11, + "fp16": 290.1, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12534,7 +14010,7 @@ "Precisions": [ { "int4": "", - "int8": 1223.85, + "int8": 3.76, "fp16": "", "fp32": "", "bf16": "" @@ -12546,19 +14022,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4.34, - "fp16": "", - "fp32": 2.29, - "bf16": "" + "int8": 385.87, + "fp16": 420.99, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12568,7 +14048,7 @@ "Precisions": [ { "int4": "", - "int8": 239.97, + "int8": 2.99, "fp16": "", "fp32": "", "bf16": "" @@ -12580,19 +14060,23 @@ } }, { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.59, - "fp16": "", - "fp32": 0.85, - "bf16": "" + "int8": 426.56, + "fp16": 362.73, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12602,7 +14086,7 @@ "Precisions": [ { "int4": "", - "int8": 628.98, + "int8": 2.8, "fp16": "", "fp32": "", "bf16": "" @@ -12614,19 +14098,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "gemma-2-9b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 17.67, + "int8": "", "fp16": "", - "fp32": 4.59, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 22.66, + "token_int8": 18.13, + "token_fp16": "" } ], "Unit": "FPS", @@ -12635,8 +14123,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 115.61, + "int4": 44.13, + "int8": 55.13, "fp16": "", "fp32": "", "bf16": "" @@ -12648,19 +14136,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "glm-4-9b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 58.24, + "int8": "", "fp16": "", - "fp32": 15.05, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 40.04, + "token_int8": 26.95, + "token_fp16": "" } ], "Unit": "FPS", @@ -12669,8 +14161,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 36.32, + "int4": 24.97, + "int8": 37.1, "fp16": "", "fp32": "", "bf16": "" @@ -12682,19 +14174,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "llama-2-7b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 85.63, + "int8": "", "fp16": "", - "fp32": 21.01, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 45.22, + "token_int8": 33.88, + "token_fp16": 21.45 } ], "Unit": "FPS", @@ -12703,9 +14199,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 25.38, - "fp16": "", + "int4": 22.11, + "int8": 29.51, + "fp16": 46.62, "fp32": "", "bf16": "" } @@ -12716,19 +14212,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "llama-3-8b", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 438.33, + "int8": "", "fp16": "", - "fp32": 30.86, - "bf16": 213.33 + "fp32": "", + "bf16": "", + "token_int4": 45.55, + "token_int8": 30.8, + "token_fp16": "" } ], "Unit": "FPS", @@ -12737,11 +14237,11 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 7.94, + "int4": 21.95, + "int8": 32.46, "fp16": "", "fp32": "", - "bf16": 13.88 + "bf16": "" } ], "Unit": "ms", @@ -12750,19 +14250,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 512.76, + "int8": "", "fp16": "", - "fp32": 35.43, - "bf16": 276.38 + "fp32": "", + "bf16": "", + "token_int4": 69.44, + "token_int8": 57.9, + "token_fp16": 37.69 } ], "Unit": "FPS", @@ -12771,11 +14275,11 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 7.12, - "fp16": "", + "int4": 14.4, + "int8": 17.27, + "fp16": 26.53, "fp32": "", - "bf16": 11.56 + "bf16": "" } ], "Unit": "ms", @@ -12784,19 +14288,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 41.98, - "fp16": "", - "fp32": 10.9, - "bf16": "" + "int8": 33.38, + "fp16": 19.04, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12806,7 +14314,7 @@ "Precisions": [ { "int4": "", - "int8": 48.76, + "int8": 48.67, "fp16": "", "fp32": "", "bf16": "" @@ -12818,19 +14326,23 @@ } }, { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "mistral-7b-v0.1", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 42.23, + "int8": "", "fp16": "", - "fp32": 10.5, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 45.53, + "token_int8": 32.37, + "token_fp16": 20.21 } ], "Unit": "FPS", @@ -12839,9 +14351,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 49.04, - "fp16": "", + "int4": 21.96, + "int8": 30.89, + "fp16": 49.48, "fp32": "", "bf16": "" } @@ -12852,19 +14364,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.45, - "fp16": "", - "fp32": 1.66, - "bf16": "" + "int8": 3134.27, + "fp16": 3004.5, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12874,7 +14390,7 @@ "Precisions": [ { "int4": "", - "int8": 208.68, + "int8": 0.57, "fp16": "", "fp32": "", "bf16": "" @@ -12886,19 +14402,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4.88, + "int8": "", "fp16": "", - "fp32": 1.23, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 69.93, + "token_int8": 51.51, + "token_fp16": 32.84 } ], "Unit": "FPS", @@ -12907,9 +14427,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 259.24, - "fp16": "", + "int4": 14.3, + "int8": 19.41, + "fp16": 30.45, "fp32": "", "bf16": "" } @@ -12920,19 +14440,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "qwen2-7b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.08, + "int8": "", "fp16": "", - "fp32": 1.48, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 45.8, + "token_int8": 32.78, + "token_fp16": "" } ], "Unit": "FPS", @@ -12941,8 +14465,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 234.74, + "int4": 21.83, + "int8": 30.5, "fp16": "", "fp32": "", "bf16": "" @@ -12955,18 +14479,22 @@ }, { "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 140.65, - "fp16": 92.11, + "int8": 1921.18, + "fp16": 1329.28, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -12976,7 +14504,7 @@ "Precisions": [ { "int4": "", - "int8": 15.32, + "int8": 0.78, "fp16": "", "fp32": "", "bf16": "" @@ -12988,19 +14516,23 @@ } }, { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", "Model": "ssd-resnet34-1200", - "Checked": "true", + "featured_SKU": false, + "whats_new_model": false, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 133.77, "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13010,7 +14542,7 @@ "Precisions": [ { "int4": "", - "int8": 14.78, + "int8": 13.93, "fp16": "", "fp32": "", "bf16": "" @@ -13022,19 +14554,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 35.4, - "fp16": 17.45, + "int8": 2200.83, + "fp16": 1665.15, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13044,7 +14580,7 @@ "Precisions": [ { "int4": "", - "int8": 35.98, + "int8": 0.78, "fp16": "", "fp32": "", "bf16": "" @@ -13056,19 +14592,23 @@ } }, { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "stable-diffusion-v1-5", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.16, - "fp16": 1.32, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13078,8 +14618,8 @@ "Precisions": [ { "int4": "", - "int8": 472.12, - "fp16": "", + "int8": 2.33, + "fp16": 2.36, "fp32": "", "bf16": "" } @@ -13090,19 +14630,23 @@ } }, { - "Platform": "Intel® Atom® X6425E iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.16, - "fp16": 1.16, + "int8": 759.93, + "fp16": 694.57, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13112,7 +14656,7 @@ "Precisions": [ { "int4": "", - "int8": 870.37, + "int8": 1.96, "fp16": "", "fp32": "", "bf16": "" @@ -13124,19 +14668,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.09, - "fp16": 2.78, - "fp32": "", - "bf16": "" + "int8": 36.93, + "fp16": "", + "fp32": 27.64, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13146,7 +14694,7 @@ "Precisions": [ { "int4": "", - "int8": 210.29, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -13158,19 +14706,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.8, - "fp16": 2.81, - "fp32": "", - "bf16": "" + "int8": 484.32, + "fp16": "", + "fp32": 278.4, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13180,7 +14732,7 @@ "Precisions": [ { "int4": "", - "int8": 164.94, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -13192,19 +14744,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.92, - "fp16": 2.89, - "fp32": "", - "bf16": "" + "int8": 112.23, + "fp16": "", + "fp32": 42.14, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13214,7 +14770,7 @@ "Precisions": [ { "int4": "", - "int8": 143.88, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -13226,19 +14782,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", + "Platform": "Intel® Processor N100 CPU+iGPU", "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 8.32, - "fp16": 4.74, - "fp32": "", - "bf16": "" + "int8": 2.04, + "fp16": "", + "fp32": 0.6, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13248,7 +14808,7 @@ "Precisions": [ { "int4": "", - "int8": 117.76, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -13260,19 +14820,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.43, - "fp16": 3.17, - "fp32": "", - "bf16": "" + "int8": 216.96, + "fp16": "", + "fp32": 94.92, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13282,7 +14846,7 @@ "Precisions": [ { "int4": "", - "int8": 170.95, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -13294,19 +14858,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 10.46, - "fp16": 6.1, - "fp32": "", - "bf16": "" + "int8": "", + "fp16": "", + "fp32": 34.52, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13316,7 +14884,7 @@ "Precisions": [ { "int4": "", - "int8": 100.05, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -13328,19 +14896,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 7.12, - "fp16": 4.0, - "fp32": "", - "bf16": "" + "int8": 61.06, + "fp16": "", + "fp32": 28.61, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13350,7 +14922,7 @@ "Precisions": [ { "int4": "", - "int8": 126.18, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -13362,19 +14934,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 15.44, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 12.75, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13384,7 +14960,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 66.23, "fp16": "", "fp32": "", "bf16": "" @@ -13396,19 +14972,23 @@ } }, { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.62, - "fp16": 1.01, - "fp32": "", - "bf16": "" + "int8": 296.53, + "fp16": "", + "fp32": 183.3, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13418,7 +14998,7 @@ "Precisions": [ { "int4": "", - "int8": 623.25, + "int8": 3.8, "fp16": "", "fp32": "", "bf16": "" @@ -13430,19 +15010,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 21.37, - "fp16": 12.68, - "fp32": "", - "bf16": "" + "int8": 48.77, + "fp16": "", + "fp32": 20.13, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13452,7 +15036,7 @@ "Precisions": [ { "int4": "", - "int8": 47.25, + "int8": 21.88, "fp16": "", "fp32": "", "bf16": "" @@ -13464,19 +15048,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", + "Platform": "Intel® Processor N100 CPU-only", "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": 10.09, - "fp32": "", - "bf16": "" + "int8": 0.82, + "fp16": "", + "fp32": 0.31, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13486,7 +15074,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1224.62, "fp16": "", "fp32": "", "bf16": "" @@ -13498,19 +15086,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 65.86, - "fp16": 39.71, - "fp32": "", - "bf16": "" + "int8": 106.12, + "fp16": "", + "fp32": 49.52, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13520,7 +15112,7 @@ "Precisions": [ { "int4": "", - "int8": 21.36, + "int8": 9.72, "fp16": "", "fp32": "", "bf16": "" @@ -13532,19 +15124,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.53, + "int8": "", "fp16": "", - "fp32": 0.87, - "bf16": "" + "fp32": 15.36, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13566,19 +15162,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.25, + "int8": 23.65, "fp16": "", - "fp32": 1.08, - "bf16": "" + "fp32": 12.86, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13588,7 +15188,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 43.43, "fp16": "", "fp32": "", "bf16": "" @@ -13600,19 +15200,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.37, - "fp16": "", - "fp32": 2.71, - "bf16": "" + "int8": 33.69, + "fp16": 30.91, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13622,7 +15226,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 38.02, "fp16": "", "fp32": "", "bf16": "" @@ -13634,19 +15238,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.52, - "fp16": "", - "fp32": 1.13, - "bf16": "" + "int8": 337.95, + "fp16": 267.38, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13656,7 +15264,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 3.84, "fp16": "", "fp32": "", "bf16": "" @@ -13668,19 +15276,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4.77, - "fp16": "", - "fp32": 1.75, - "bf16": "" + "int8": 81.72, + "fp16": 49.76, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13690,7 +15302,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 13.15, "fp16": "", "fp32": "", "bf16": "" @@ -13702,19 +15314,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", + "Platform": "Intel® Processor N100 iGPU-only", "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 8.81, - "fp16": "", - "fp32": 3.81, - "bf16": "" + "int8": 1.62, + "fp16": 1.01, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13724,7 +15340,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 622.97, "fp16": "", "fp32": "", "bf16": "" @@ -13736,19 +15352,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4.33, - "fp16": "", - "fp32": 1.68, - "bf16": "" + "int8": 164.31, + "fp16": 106.85, + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13758,7 +15378,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 7.35, "fp16": "", "fp32": "", "bf16": "" @@ -13770,19 +15390,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 47.04, + "fp16": 34.97, "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13792,7 +15416,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 23.03, "fp16": "", "fp32": "", "bf16": "" @@ -13804,19 +15428,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.71, + "int8": "", "fp16": "", - "fp32": 2.16, - "bf16": "" + "fp32": "", + "bf16": 2314.52, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13829,7 +15457,7 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 6.96 } ], "Unit": "ms", @@ -13838,10 +15466,11 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ @@ -13849,8 +15478,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": 3.05, - "bf16": "" + "fp32": "", + "bf16": 1153.0, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -13863,7 +15495,7 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 6.38 } ], "Unit": "ms", @@ -13872,19 +15504,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "gemma-2-9b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.05, + "int8": "", "fp16": "", - "fp32": 0.61, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 15.13, + "token_int8": 11.3, + "token_fp16": 7.91 } ], "Unit": "FPS", @@ -13893,9 +15529,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 66.06, + "int8": 88.43, + "fp16": 126.41, "fp32": "", "bf16": "" } @@ -13906,19 +15542,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 18.73, + "int8": "", "fp16": "", - "fp32": 9.5, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 18.48, + "token_int8": 12.91, + "token_fp16": 8.64 } ], "Unit": "FPS", @@ -13927,9 +15567,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 54.09, + "int8": 77.42, + "fp16": 115.66, "fp32": "", "bf16": "" } @@ -13940,19 +15580,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 12.04, + "int8": "", "fp16": "", - "fp32": 4.95, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 21.97, + "token_int8": 15.54, + "token_fp16": 11.29 } ], "Unit": "FPS", @@ -13961,9 +15605,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 45.5, + "int8": 64.31, + "fp16": 88.54, "fp32": "", "bf16": "" } @@ -13974,19 +15618,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "llama-3-8b", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 97.87, + "int8": "", "fp16": "", - "fp32": 45.25, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 20.0, + "token_int8": 14.57, + "token_fp16": 9.82 } ], "Unit": "FPS", @@ -13995,9 +15643,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 10.56, - "fp16": "", + "int4": 49.98, + "int8": 68.59, + "fp16": 101.73, "fp32": "", "bf16": "" } @@ -14008,19 +15656,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 45.67, + "int8": "", "fp16": "", - "fp32": 21.53, - "bf16": "" + "fp32": "", + "bf16": 35.29, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14030,10 +15682,10 @@ "Precisions": [ { "int4": "", - "int8": 22.79, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 100.04 } ], "Unit": "ms", @@ -14042,19 +15694,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 115.01, + "int8": "", "fp16": "", - "fp32": 36.97, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 22.73, + "token_int8": 15.18, + "token_fp16": 10.61 } ], "Unit": "FPS", @@ -14063,9 +15719,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 9.06, - "fp16": "", + "int4": 43.98, + "int8": 65.87, + "fp16": 94.18, "fp32": "", "bf16": "" } @@ -14076,19 +15732,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 211.59, + "int8": "", "fp16": "", - "fp32": 119.52, - "bf16": "" + "fp32": "", + "bf16": 13124.67, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14098,10 +15758,10 @@ "Precisions": [ { "int4": "", - "int8": 4.95, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 0.83 } ], "Unit": "ms", @@ -14110,19 +15770,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 327.96, + "int8": "", "fp16": "", - "fp32": 163.65, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 31.5, + "token_int8": 25.32, + "token_fp16": 18.29 } ], "Unit": "FPS", @@ -14131,9 +15795,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 3.63, - "fp16": "", + "int4": 31.74, + "int8": 39.49, + "fp16": 54.66, "fp32": "", "bf16": "" } @@ -14144,19 +15808,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "qwen2-7b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 326.48, + "int8": "", "fp16": "", - "fp32": 98.82, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 21.22, + "token_int8": 15.5, + "token_fp16": 10.78 } ], "Unit": "FPS", @@ -14165,9 +15833,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 4.67, - "fp16": "", + "int4": 47.12, + "int8": 64.49, + "fp16": 92.7, "fp32": "", "bf16": "" } @@ -14178,19 +15846,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 383.99, + "int8": "", "fp16": "", - "fp32": 136.38, - "bf16": "" + "fp32": "", + "bf16": 10591.04, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14200,10 +15872,10 @@ "Precisions": [ { "int4": "", - "int8": 4.16, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 1.8 } ], "Unit": "ms", @@ -14212,19 +15884,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1073.43, + "int8": "", "fp16": "", - "fp32": 385.05, - "bf16": "" + "fp32": "", + "bf16": 204.32, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14234,10 +15910,10 @@ "Precisions": [ { "int4": "", - "int8": 1.31, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 17.56 } ], "Unit": "ms", @@ -14246,19 +15922,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 335.03, + "int8": "", "fp16": "", - "fp32": 182.29, - "bf16": "" + "fp32": "", + "bf16": 7286.92, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14268,10 +15948,10 @@ "Precisions": [ { "int4": "", - "int8": 3.15, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 1.08 } ], "Unit": "ms", @@ -14280,19 +15960,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 495.53, + "int8": "", "fp16": "", - "fp32": 147.22, - "bf16": "" + "fp32": "", + "bf16": 2130.01, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14302,7 +15986,7 @@ "Precisions": [ { "int4": "", - "int8": 2.19, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -14314,19 +15998,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® GNR29 CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 385.05, + "int8": "", "fp16": "", - "fp32": 100.34, - "bf16": "" + "fp32": "", + "bf16": 2528.52, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14336,10 +16024,10 @@ "Precisions": [ { "int4": "", - "int8": 2.83, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 4.55 } ], "Unit": "ms", @@ -14348,19 +16036,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 832.22, + "int8": 218.18, "fp16": "", - "fp32": 284.89, - "bf16": "" + "fp32": 80.36, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14370,7 +16062,7 @@ "Precisions": [ { "int4": "", - "int8": 1.73, + "int8": 14.4, "fp16": "", "fp32": "", "bf16": "" @@ -14382,19 +16074,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 423.45, + "int8": 271.94, "fp16": "", - "fp32": 153.0, - "bf16": "" + "fp32": 167.25, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14404,7 +16100,7 @@ "Precisions": [ { "int4": "", - "int8": 3.88, + "int8": 11.07, "fp16": "", "fp32": "", "bf16": "" @@ -14416,19 +16112,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 3.26, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 0.9, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14438,7 +16138,7 @@ "Precisions": [ { "int4": "", - "int8": 2.45, + "int8": 637.88, "fp16": "", "fp32": "", "bf16": "" @@ -14450,19 +16150,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 275.79, + "int8": 5417.98, "fp16": "", - "fp32": 151.19, - "bf16": "" + "fp32": 1926.0, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14472,7 +16176,7 @@ "Precisions": [ { "int4": "", - "int8": 3.22, + "int8": 1.45, "fp16": "", "fp32": "", "bf16": "" @@ -14484,19 +16188,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 351.76, + "int8": 979.5, "fp16": "", - "fp32": 183.93, - "bf16": "" + "fp32": 267.16, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14506,7 +16214,7 @@ "Precisions": [ { "int4": "", - "int8": 3.41, + "int8": 3.06, "fp16": "", "fp32": "", "bf16": "" @@ -14518,19 +16226,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1607.16, + "int8": 17.65, "fp16": "", - "fp32": 587.94, - "bf16": "" + "fp32": 4.58, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14540,7 +16252,7 @@ "Precisions": [ { "int4": "", - "int8": 1.09, + "int8": 116.19, "fp16": "", "fp32": "", "bf16": "" @@ -14552,19 +16264,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 105.81, + "int8": 2104.85, "fp16": "", - "fp32": 49.62, - "bf16": "" + "fp32": 639.65, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14574,7 +16290,7 @@ "Precisions": [ { "int4": "", - "int8": 9.76, + "int8": 1.56, "fp16": "", "fp32": "", "bf16": "" @@ -14586,19 +16302,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 575.93, + "int8": "", "fp16": "", - "fp32": 272.53, - "bf16": "" + "fp32": 206.18, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14608,7 +16328,7 @@ "Precisions": [ { "int4": "", - "int8": 2.33, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -14620,19 +16340,23 @@ } }, { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Xeon® Gold 5218T CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 202.32, + "int8": 440.56, "fp16": "", - "fp32": 120.89, - "bf16": "" + "fp32": 173.57, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14642,7 +16366,7 @@ "Precisions": [ { "int4": "", - "int8": 5.11, + "int8": 5.93, "fp16": "", "fp32": "", "bf16": "" @@ -14654,19 +16378,23 @@ } }, { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Xeon® Gold 6238L CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2068.2, + "int8": 426.19, "fp16": "", - "fp32": 637.61, - "bf16": "" + "fp32": 162.63, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14676,7 +16404,7 @@ "Precisions": [ { "int4": "", - "int8": 1.66, + "int8": 11.09, "fp16": "", "fp32": "", "bf16": "" @@ -14688,19 +16416,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Xeon® Gold 6238L CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6032.65, + "int8": 411.51, "fp16": "", - "fp32": 1652.7, - "bf16": "" + "fp32": 254.65, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14710,7 +16442,7 @@ "Precisions": [ { "int4": "", - "int8": 1.23, + "int8": 8.51, "fp16": "", "fp32": "", "bf16": "" @@ -14722,19 +16454,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Xeon® Gold 6238L CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 6.45, "fp16": "", - "fp32": 2525.11, - "bf16": "" + "fp32": 1.65, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14744,7 +16480,7 @@ "Precisions": [ { "int4": "", - "int8": 0.69, + "int8": 321.85, "fp16": "", "fp32": "", "bf16": "" @@ -14756,19 +16492,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Xeon® Gold 6238L CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 23599.57, + "int8": 10273.19, "fp16": "", - "fp32": 3405.56, - "bf16": 12205.1 + "fp32": 3342.96, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14778,10 +16518,10 @@ "Precisions": [ { "int4": "", - "int8": 0.74, + "int8": 1.21, "fp16": "", "fp32": "", - "bf16": 0.87 + "bf16": "" } ], "Unit": "ms", @@ -14790,19 +16530,23 @@ } }, { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Xeon® Gold 6238L CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 2125.81, "fp16": "", - "fp32": 4698.93, - "bf16": 16520.05 + "fp32": 570.61, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14812,10 +16556,10 @@ "Precisions": [ { "int4": "", - "int8": 0.83, + "int8": 1.84, "fp16": "", "fp32": "", - "bf16": 1.2 + "bf16": "" } ], "Unit": "ms", @@ -14825,18 +16569,22 @@ }, { "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4267.65, + "int8": 41.83, "fp16": "", - "fp32": 1238.3, - "bf16": "" + "fp32": 10.91, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14846,7 +16594,7 @@ "Precisions": [ { "int4": "", - "int8": 1.29, + "int8": 49.53, "fp16": "", "fp32": "", "bf16": "" @@ -14858,19 +16606,23 @@ } }, { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", + "Platform": "Intel® Xeon® Gold 6238L CPU-only", "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4642.89, + "int8": 4376.71, "fp16": "", - "fp32": 1227.04, - "bf16": "" + "fp32": 1244.57, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14880,7 +16632,7 @@ "Precisions": [ { "int4": "", - "int8": 0.93, + "int8": 1.22, "fp16": "", "fp32": "", "bf16": "" @@ -14892,19 +16644,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 6238L CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 682.74, + "int8": "", "fp16": "", - "fp32": 243.93, - "bf16": "" + "fp32": 383.86, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14914,7 +16670,7 @@ "Precisions": [ { "int4": "", - "int8": 2.71, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -14926,19 +16682,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 6238L CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 533.14, + "int8": 749.14, "fp16": "", - "fp32": 179.15, - "bf16": "" + "fp32": 338.04, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14948,7 +16708,7 @@ "Precisions": [ { "int4": "", - "int8": 3.35, + "int8": 4.21, "fp16": "", "fp32": "", "bf16": "" @@ -14960,19 +16720,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 663.32, + "int8": 622.71, "fp16": "", - "fp32": 226.12, - "bf16": "" + "fp32": 240.52, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -14982,7 +16746,7 @@ "Precisions": [ { "int4": "", - "int8": 2.25, + "int8": 6.4, "fp16": "", "fp32": "", "bf16": "" @@ -14994,19 +16758,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2300.75, - "fp16": 1706.66, - "fp32": "", - "bf16": "" + "int8": 721.9, + "fp16": "", + "fp32": 423.3, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15016,7 +16784,7 @@ "Precisions": [ { "int4": "", - "int8": 0.91, + "int8": 4.83, "fp16": "", "fp32": "", "bf16": "" @@ -15028,19 +16796,23 @@ } }, { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 10.46, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 2.45, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15050,7 +16822,7 @@ "Precisions": [ { "int4": "", - "int8": 1.28, + "int8": 221.46, "fp16": "", "fp32": "", "bf16": "" @@ -15062,19 +16834,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Accelerator Platforms", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1024.85, - "fp16": 777.67, - "fp32": "", - "bf16": "" + "int8": 16509.95, + "fp16": "", + "fp32": 5201.56, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15084,7 +16860,7 @@ "Precisions": [ { "int4": "", - "int8": 1.33, + "int8": 0.59, "fp16": "", "fp32": "", "bf16": "" @@ -15096,19 +16872,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 704.76, - "fp16": 451.92, - "fp32": "", - "bf16": "" + "int8": 3352.09, + "fp16": "", + "fp32": 825.5, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15118,7 +16898,7 @@ "Precisions": [ { "int4": "", - "int8": 1.65, + "int8": 1.34, "fp16": "", "fp32": "", "bf16": "" @@ -15130,19 +16910,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 705.61, - "fp16": 458.2, - "fp32": "", - "bf16": "" + "int8": 60.91, + "fp16": "", + "fp32": 15.11, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15152,7 +16936,7 @@ "Precisions": [ { "int4": "", - "int8": 1.66, + "int8": 36.91, "fp16": "", "fp32": "", "bf16": "" @@ -15164,19 +16948,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V NPU-only", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 229.58, - "fp16": 198.86, - "fp32": "", - "bf16": "" + "int8": 6975.09, + "fp16": "", + "fp32": 1755.62, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15186,7 +16974,7 @@ "Precisions": [ { "int4": "", - "int8": 7.67, + "int8": 0.77, "fp16": "", "fp32": "", "bf16": "" @@ -15198,19 +16986,23 @@ } }, { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 210.98, - "fp16": 137.08, - "fp32": "", - "bf16": "" + "int8": "", + "fp16": "", + "fp32": 571.3, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15220,7 +17012,7 @@ "Precisions": [ { "int4": "", - "int8": 6.21, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -15232,19 +17024,23 @@ } }, { - "Platform": "Intel® Atom® X6425E iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Xeon® Gold 6338N CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 92.82, - "fp16": 95.83, - "fp32": "", - "bf16": "" + "int8": 1224.86, + "fp16": "", + "fp32": 495.73, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15254,7 +17050,7 @@ "Precisions": [ { "int4": "", - "int8": 13.65, + "int8": 2.98, "fp16": "", "fp32": "", "bf16": "" @@ -15266,19 +17062,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 396.49, - "fp16": 222.23, - "fp32": "", - "bf16": "" + "int8": 587.54, + "fp16": "", + "fp32": 225.64, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15288,7 +17088,7 @@ "Precisions": [ { "int4": "", - "int8": 4.31, + "int8": 9.18, "fp16": "", "fp32": "", "bf16": "" @@ -15300,19 +17100,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 392.98, - "fp16": 229.59, - "fp32": "", - "bf16": "" + "int8": 580.8, + "fp16": "", + "fp32": 343.39, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15322,7 +17126,7 @@ "Precisions": [ { "int4": "", - "int8": 3.07, + "int8": 6.9, "fp16": "", "fp32": "", "bf16": "" @@ -15334,19 +17138,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 405.21, - "fp16": 238.92, - "fp32": "", - "bf16": "" + "int8": 8.58, + "fp16": "", + "fp32": 2.26, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15356,7 +17164,7 @@ "Precisions": [ { "int4": "", - "int8": 2.72, + "int8": 248.72, "fp16": "", "fp32": "", "bf16": "" @@ -15368,19 +17176,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": 298.42, - "fp32": "", - "bf16": "" + "int8": 14930.31, + "fp16": "", + "fp32": 4646.16, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15390,7 +17202,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.93, "fp16": "", "fp32": "", "bf16": "" @@ -15402,19 +17214,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 360.04, - "fp16": 225.51, - "fp32": "", - "bf16": "" + "int8": 2965.31, + "fp16": "", + "fp32": 761.01, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15424,7 +17240,7 @@ "Precisions": [ { "int4": "", - "int8": 3.42, + "int8": 1.59, "fp16": "", "fp32": "", "bf16": "" @@ -15436,19 +17252,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 748.06, - "fp16": 416.28, - "fp32": "", - "bf16": "" + "int8": 58.15, + "fp16": "", + "fp32": 15.0, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15458,7 +17278,7 @@ "Precisions": [ { "int4": "", - "int8": 1.86, + "int8": 37.18, "fp16": "", "fp32": "", "bf16": "" @@ -15470,19 +17290,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 488.8, - "fp16": 302.47, - "fp32": "", - "bf16": "" + "int8": 6130.48, + "fp16": "", + "fp32": 1654.84, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15492,7 +17316,7 @@ "Precisions": [ { "int4": "", - "int8": 2.51, + "int8": 1.2, "fp16": "", "fp32": "", "bf16": "" @@ -15504,10 +17328,11 @@ } }, { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ @@ -15515,8 +17340,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 512.57, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15538,19 +17366,23 @@ } }, { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8280 CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 164.35, - "fp16": 106.86, - "fp32": "", - "bf16": "" + "int8": 996.59, + "fp16": "", + "fp32": 452.05, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15560,7 +17392,7 @@ "Precisions": [ { "int4": "", - "int8": 7.36, + "int8": 3.6, "fp16": "", "fp32": "", "bf16": "" @@ -15572,19 +17404,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 979.63, - "fp16": 802.16, - "fp32": "", - "bf16": "" + "int8": 881.04, + "fp16": "", + "fp32": 338.12, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15594,7 +17430,7 @@ "Precisions": [ { "int4": "", - "int8": 1.15, + "int8": 5.18, "fp16": "", "fp32": "", "bf16": "" @@ -15606,19 +17442,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 974.05, - "fp16": 701.6, - "fp32": "", - "bf16": "" + "int8": 1009.71, + "fp16": "", + "fp32": 562.38, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15628,7 +17468,7 @@ "Precisions": [ { "int4": "", - "int8": 1.35, + "int8": 4.28, "fp16": "", "fp32": "", "bf16": "" @@ -15640,19 +17480,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "gemma-2-9b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 985.08, - "fp16": 990.93, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 20.78, + "token_int8": 14.18, + "token_fp16": 7.72 } ], "Unit": "FPS", @@ -15661,9 +17505,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 1.87, - "fp16": "", + "int4": 48.12, + "int8": 70.5, + "fp16": 129.51, "fp32": "", "bf16": "" } @@ -15674,19 +17518,23 @@ } }, { - "Platform": "Intel® Atom® x7425E CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 234.86, + "int8": "", "fp16": "", - "fp32": 116.72, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 22.79, + "token_int8": 15.56, + "token_fp16": 8.48 } ], "Unit": "FPS", @@ -15695,9 +17543,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 43.86, + "int8": 64.26, + "fp16": 117.92, "fp32": "", "bf16": "" } @@ -15708,19 +17556,23 @@ } }, { - "Platform": "Intel® Atom® X6425E CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 111.52, + "int8": "", "fp16": "", - "fp32": 98.51, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 25.41, + "token_int8": 18.68, + "token_fp16": 10.61 } ], "Unit": "FPS", @@ -15729,9 +17581,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 39.34, + "int8": 53.51, + "fp16": 94.17, "fp32": "", "bf16": "" } @@ -15742,19 +17594,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "llama-3-8b", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 318.36, + "int8": "", "fp16": "", - "fp32": 194.83, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 26.07, + "token_int8": 17.66, + "token_fp16": 9.72 } ], "Unit": "FPS", @@ -15763,9 +17619,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 38.35, + "int8": 56.62, + "fp16": 102.88, "fp32": "", "bf16": "" } @@ -15776,10 +17632,11 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ @@ -15787,8 +17644,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": 117.54, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 46.81, + "token_int8": 33.54, + "token_fp16": 19.32 } ], "Unit": "FPS", @@ -15797,9 +17657,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 21.36, + "int8": 29.81, + "fp16": 51.74, "fp32": "", "bf16": "" } @@ -15810,19 +17670,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 471.49, + "int8": 14.73, "fp16": "", - "fp32": 189.58, - "bf16": "" + "fp32": 3.42, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15844,19 +17708,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 650.89, + "int8": "", "fp16": "", - "fp32": 298.25, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 26.89, + "token_int8": 18.54, + "token_fp16": 10.22 } ], "Unit": "FPS", @@ -15865,9 +17733,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 37.18, + "int8": 53.93, + "fp16": 97.8, "fp32": "", "bf16": "" } @@ -15878,19 +17746,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 22703.47, "fp16": "", - "fp32": 160.27, - "bf16": "" + "fp32": 6937.71, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15900,7 +17772,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.58, "fp16": "", "fp32": "", "bf16": "" @@ -15912,10 +17784,11 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ @@ -15923,8 +17796,11 @@ "int4": "", "int8": "", "fp16": "", - "fp32": 380.11, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 39.41, + "token_int8": 29.28, + "token_fp16": 17.35 } ], "Unit": "FPS", @@ -15933,9 +17809,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 25.37, + "int8": 34.15, + "fp16": 57.61, "fp32": "", "bf16": "" } @@ -15946,19 +17822,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "qwen2-7b", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 554.71, + "int8": "", "fp16": "", - "fp32": 229.14, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 28.26, + "token_int8": 19.32, + "token_fp16": 10.27 } ], "Unit": "FPS", @@ -15967,9 +17847,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 35.38, + "int8": 51.74, + "fp16": 97.35, "fp32": "", "bf16": "" } @@ -15980,19 +17860,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 4874.95, "fp16": "", - "fp32": 350.12, - "bf16": "" + "fp32": 1144.73, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16002,7 +17886,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.07, "fp16": "", "fp32": "", "bf16": "" @@ -16014,19 +17898,23 @@ } }, { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 217.21, + "int8": 84.6, "fp16": "", - "fp32": 95.95, - "bf16": "" + "fp32": 20.95, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16048,19 +17936,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1045.56, + "int8": 10174.18, "fp16": "", - "fp32": 608.3, - "bf16": "" + "fp32": 2524.59, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16070,7 +17962,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.7, "fp16": "", "fp32": "", "bf16": "" @@ -16082,19 +17974,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "stable-diffusion-v1-5", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 748.16, + "int8": "", "fp16": "", - "fp32": 419.13, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16104,8 +18000,8 @@ "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 13.34, + "fp16": 13.66, "fp32": "", "bf16": "" } @@ -16116,19 +18012,23 @@ } }, { - "Platform": "Intel® Celeron® 6305E CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "yolo11", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1.49, + "int8": "", "fp16": "", - "fp32": 0.37, - "bf16": "" + "fp32": 803.12, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16138,7 +18038,7 @@ "Precisions": [ { "int4": "", - "int8": 672.94, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -16150,19 +18050,23 @@ } }, { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8380 CPU-only", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2.54, + "int8": 1704.08, "fp16": "", - "fp32": 1.37, - "bf16": "" + "fp32": 697.23, + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16172,7 +18076,7 @@ "Precisions": [ { "int4": "", - "int8": 403.86, + "int8": 2.36, "fp16": "", "fp32": "", "bf16": "" @@ -16184,19 +18088,23 @@ } }, { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.62, + "int8": 3023.92, "fp16": "", - "fp32": 2.01, - "bf16": "" + "fp32": 483.11, + "bf16": 1976.63, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16206,10 +18114,10 @@ "Precisions": [ { "int4": "", - "int8": 306.85, + "int8": 3.79, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 4.84 } ], "Unit": "ms", @@ -16218,19 +18126,23 @@ } }, { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.8, + "int8": 1445.78, "fp16": "", - "fp32": 0.82, - "bf16": "" + "fp32": 861.51, + "bf16": 1021.75, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16240,10 +18152,10 @@ "Precisions": [ { "int4": "", - "int8": 348.07, + "int8": 4.69, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 5.16 } ], "Unit": "ms", @@ -16252,19 +18164,23 @@ } }, { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "gemma-2-9b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4.66, + "int8": "", "fp16": "", - "fp32": 0.83, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 22.71, + "token_int8": 16.83, + "token_fp16": 10.76 } ], "Unit": "FPS", @@ -16273,9 +18189,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 310.23, - "fp16": "", + "int4": 44.03, + "int8": 59.39, + "fp16": 92.87, "fp32": "", "bf16": "" } @@ -16286,19 +18202,23 @@ } }, { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 12.67, + "int8": "", "fp16": "", - "fp32": 3.3, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 23.7, + "token_int8": 16.93, + "token_fp16": 11.27 } ], "Unit": "FPS", @@ -16307,9 +18227,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 93.38, - "fp16": "", + "int4": 42.19, + "int8": 59.04, + "fp16": 88.67, "fp32": "", "bf16": "" } @@ -16320,19 +18240,23 @@ } }, { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.95, + "int8": "", "fp16": "", - "fp32": 2.06, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 26.11, + "token_int8": 20.1, + "token_fp16": 14.19 } ], "Unit": "FPS", @@ -16341,9 +18265,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 247.13, - "fp16": "", + "int4": 38.29, + "int8": 49.73, + "fp16": 70.45, "fp32": "", "bf16": "" } @@ -16354,19 +18278,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "llama-3-8b", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.54, + "int8": "", "fp16": "", - "fp32": 1.66, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 26.02, + "token_int8": 18.97, + "token_fp16": 13.23 } ], "Unit": "FPS", @@ -16375,9 +18303,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 154.95, - "fp16": "", + "int4": 38.42, + "int8": 52.71, + "fp16": 75.57, "fp32": "", "bf16": "" } @@ -16388,19 +18316,23 @@ } }, { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4.97, + "int8": "", "fp16": "", - "fp32": 1.24, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 45.68, + "token_int8": 36.96, + "token_fp16": 27.27 } ], "Unit": "FPS", @@ -16409,9 +18341,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 206.27, - "fp16": "", + "int4": 21.89, + "int8": 27.05, + "fp16": 36.67, "fp32": "", "bf16": "" } @@ -16422,19 +18354,23 @@ } }, { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 9.59, + "int8": 62.13, "fp16": "", - "fp32": 2.4, - "bf16": "" + "fp32": 5.19, + "bf16": 37.54, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16444,10 +18380,10 @@ "Precisions": [ { "int4": "", - "int8": 121.76, + "int8": 58.49, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 81.95 } ], "Unit": "ms", @@ -16456,19 +18392,23 @@ } }, { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 5.14, + "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 27.42, + "token_int8": 19.9, + "token_fp16": 13.72 } ], "Unit": "FPS", @@ -16477,9 +18417,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 292.14, - "fp16": "", + "int4": 36.46, + "int8": 50.24, + "fp16": 72.84, "fp32": "", "bf16": "" } @@ -16490,19 +18430,23 @@ } }, { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 7.22, + "int8": 38538.65, "fp16": "", - "fp32": 1.55, - "bf16": "" + "fp32": 10274.08, + "bf16": 25608.67, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16512,10 +18456,10 @@ "Precisions": [ { "int4": "", - "int8": 178.96, + "int8": 0.65, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 0.66 } ], "Unit": "ms", @@ -16524,19 +18468,23 @@ } }, { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.06, + "int8": "", "fp16": "", - "fp32": 1.66, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": 33.53, + "token_fp16": 23.1 } ], "Unit": "FPS", @@ -16545,9 +18493,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 254.86, - "fp16": "", + "int4": 24.06, + "int8": 29.82, + "fp16": 43.29, "fp32": "", "bf16": "" } @@ -16558,19 +18506,23 @@ } }, { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "qwen2-7b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 3.79, + "int8": "", "fp16": "", - "fp32": 1.91, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 30.03, + "token_int8": 22.14, + "token_fp16": 13.95 } ], "Unit": "FPS", @@ -16579,9 +18531,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 282.74, - "fp16": "", + "int4": 33.3, + "int8": 45.16, + "fp16": 71.68, "fp32": "", "bf16": "" } @@ -16592,19 +18544,23 @@ } }, { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 18.74, + "int8": 19226.96, "fp16": "", - "fp32": 4.12, - "bf16": "" + "fp32": 1597.37, + "bf16": 7480.12, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -16614,10 +18570,10 @@ "Precisions": [ { "int4": "", - "int8": 72.32, + "int8": 1.01, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 1.25 } ], "Unit": "ms", @@ -16626,8485 +18582,23 @@ } }, { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "ssd-resnet34-1200", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { - "int4": "", - "int8": 6.13, - "fp16": "", - "fp32": 3.29, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 174.28, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 2.45, - "fp16": "", - "fp32": 1.35, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 415.75, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 28.98, - "fp16": "", - "fp32": 7.41, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 69.96, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 96.65, - "fp16": "", - "fp32": 22.64, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 23.08, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 139.91, - "fp16": "", - "fp32": 33.26, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 15.34, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 511.53, - "fp16": "", - "fp32": 48.78, - "bf16": 276.34 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 6.66, - "fp16": "", - "fp32": "", - "bf16": 9.97 - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 617.57, - "fp16": "", - "fp32": 56.93, - "bf16": 334.64 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.71, - "fp16": "", - "fp32": "", - "bf16": 7.81 - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 69.09, - "fp16": "", - "fp32": 16.42, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 29.59, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 69.98, - "fp16": "", - "fp32": 16.46, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 30.05, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 8.9, - "fp16": "", - "fp32": 1.98, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 152.81, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 6.79, - "fp16": "", - "fp32": 1.6, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 179.19, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 7.92, - "fp16": "", - "fp32": 2.48, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 185.72, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 276.55, - "fp16": 194.61, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.38, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 6.54, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 55.61, - "fp16": 34.5, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 19.05, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": 9.86, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": 9.91, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V NPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": 46.18, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 3.4, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 294.79, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® X6425E iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 1.98, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 505.59, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 8.29, - "fp16": 4.48, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 121.49, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 10.02, - "fp16": 4.68, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 79.13, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 9.86, - "fp16": 4.73, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 82.71, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 14.62, - "fp16": 7.64, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 63.94, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": 5.13, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 93.71, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 18.44, - "fp16": 9.72, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 54.75, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 12.84, - "fp16": 6.49, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 68.04, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 2.56, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 390.36, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 36.35, - "fp16": 23.49, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 25.29, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 31.67, - "fp16": 18.53, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 105.25, - "fp16": 74.67, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 10.37, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 8.85, - "fp16": "", - "fp32": 4.55, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 1.47, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 7.38, - "fp16": "", - "fp32": 2.41, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 15.79, - "fp16": "", - "fp32": 6.34, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 7.73, - "fp16": "", - "fp32": 2.82, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 6.37, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 9.45, - "fp16": "", - "fp32": 3.58, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 5.04, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 31.21, - "fp16": "", - "fp32": 16.3, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 7.99, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® x7425E CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 5.65, - "fp16": "", - "fp32": 2.51, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 182.75, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® X6425E CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 2.58, - "fp16": "", - "fp32": 1.04, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 393.57, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Celeron® 6305E CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 6.38, - "fp16": "", - "fp32": 1.86, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 158.93, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 12.59, - "fp16": "", - "fp32": 6.77, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 80.57, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 18.57, - "fp16": "", - "fp32": 9.33, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 57.66, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 18.83, - "fp16": "", - "fp32": 5.89, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 76.97, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 23.43, - "fp16": "", - "fp32": 7.32, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 70.0, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 66.59, - "fp16": "", - "fp32": 20.16, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 19.62, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 19.4, - "fp16": "", - "fp32": 10.16, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 52.75, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 27.81, - "fp16": "", - "fp32": 7.85, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 37.86, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 20.29, - "fp16": "", - "fp32": 5.69, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 49.78, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 49.8, - "fp16": "", - "fp32": 14.8, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 25.53, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 25.87, - "fp16": "", - "fp32": 8.21, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 65.38, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 37.03, - "fp16": "", - "fp32": 10.74, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 36.87, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 15.32, - "fp16": "", - "fp32": 7.94, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 50.38, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 19.78, - "fp16": "", - "fp32": 9.0, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 54.97, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 95.49, - "fp16": "", - "fp32": 30.36, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 15.47, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 6.13, - "fp16": "", - "fp32": 2.62, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 167.91, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 32.67, - "fp16": "", - "fp32": 14.74, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 34.71, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 12.06, - "fp16": "", - "fp32": 6.75, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 83.73, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 117.81, - "fp16": "", - "fp32": 35.12, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 19.72, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 334.59, - "fp16": "", - "fp32": 102.89, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 8.17, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 533.18, - "fp16": "", - "fp32": 154.64, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.76, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 1419.98, - "fp16": "", - "fp32": 214.44, - "bf16": 830.31 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 4.62, - "fp16": "", - "fp32": "", - "bf16": 5.98 - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 1755.21, - "fp16": "", - "fp32": 265.47, - "bf16": 1055.23 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 4.46, - "fp16": "", - "fp32": "", - "bf16": 5.21 - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 251.62, - "fp16": "", - "fp32": 74.96, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 11.14, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 265.17, - "fp16": "", - "fp32": 76.67, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 9.63, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 44.74, - "fp16": "", - "fp32": 12.8, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 36.67, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 33.1, - "fp16": "", - "fp32": 9.15, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 45.86, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 288.05, - "fp16": 333.83, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.35, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 6.92, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 105.43, - "fp16": 93.59, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 11.01, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 50.31, - "fp16": 40.99, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 21.31, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 50.34, - "fp16": 41.31, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 21.03, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 14.53, - "fp16": 9.52, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 71.11, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® X6425E iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 7.37, - "fp16": 7.57, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 141.78, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 31.36, - "fp16": 17.91, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 34.6, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 33.47, - "fp16": 18.2, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 24.97, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 35.33, - "fp16": 18.84, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 24.68, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 48.16, - "fp16": 28.25, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 22.07, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 33.73, - "fp16": 18.76, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 29.57, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 63.37, - "fp16": 36.52, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 17.02, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 44.64, - "fp16": 24.85, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 20.43, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 53.52, - "fp16": 30.53, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 17.5, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 11.12, - "fp16": 7.41, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 91.66, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 132.43, - "fp16": 70.84, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 7.23, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 120.91, - "fp16": 58.45, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 9.06, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® x7425E CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 17.32, - "fp16": "", - "fp32": 7.01, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® X6425E CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 8.67, - "fp16": "", - "fp32": 7.46, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 32.84, - "fp16": "", - "fp32": 18.05, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 7.54, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 32.16, - "fp16": "", - "fp32": 12.68, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 55.36, - "fp16": "", - "fp32": 25.15, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 29.16, - "fp16": "", - "fp32": 11.65, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 24.74, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 39.77, - "fp16": "", - "fp32": 15.13, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 52.61, - "fp16": "", - "fp32": 20.01, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 14.47, - "fp16": "", - "fp32": 4.84, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 120.56, - "fp16": "", - "fp32": 58.53, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 70.85, - "fp16": "", - "fp32": 32.17, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® x7425E CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 21.65, - "fp16": "", - "fp32": 11.8, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 47.52, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® X6425E CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 10.28, - "fp16": "", - "fp32": 5.11, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 100.13, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Celeron® 6305E CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 25.98, - "fp16": "", - "fp32": 9.65, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 40.09, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-8100 CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 53.64, - "fp16": "", - "fp32": 32.13, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 19.04, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-10500TE CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 81.71, - "fp16": "", - "fp32": 45.23, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 13.68, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1235U Processor CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 70.36, - "fp16": "", - "fp32": 27.96, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 19.88, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1335U Processor CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 91.48, - "fp16": "", - "fp32": 36.51, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 18.13, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-13600K CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 272.16, - "fp16": "", - "fp32": 103.27, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.12, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-8500 CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 84.24, - "fp16": "", - "fp32": 49.32, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 11.97, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185G7 CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 111.42, - "fp16": "", - "fp32": 40.37, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 10.1, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185GRE CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 76.56, - "fp16": "", - "fp32": 27.61, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 13.37, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-12700H CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 200.97, - "fp16": "", - "fp32": 75.16, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 6.52, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1355U Processor CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 101.43, - "fp16": "", - "fp32": 40.83, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 16.95, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 9.34, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-8700T CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 70.91, - "fp16": "", - "fp32": 40.94, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 11.95, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-10900TE CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 89.73, - "fp16": "", - "fp32": 48.88, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 12.74, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 387.96, - "fp16": "", - "fp32": 154.46, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 4.11, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 23.66, - "fp16": "", - "fp32": 12.89, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 43.5, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® W1290P CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 136.65, - "fp16": "", - "fp32": 71.98, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 8.7, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® E-2124G CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 51.95, - "fp16": "", - "fp32": 32.5, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 19.42, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 5218T CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 442.04, - "fp16": "", - "fp32": 173.4, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 6.02, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8280 CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 997.47, - "fp16": "", - "fp32": 452.11, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 1718.36, - "fp16": "", - "fp32": 701.27, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 2.39, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 2411.67, - "fp16": "", - "fp32": 959.63, - "bf16": 2381.88 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.11, - "fp16": "", - "fp32": "", - "bf16": 2.53 - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 1262.79, - "bf16": 3461.09 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.15, - "fp16": "", - "fp32": "", - "bf16": 2.62 - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238L CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 757.45, - "fp16": "", - "fp32": 337.98, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 4.31, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Silver 4316 CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 859.9, - "fp16": "", - "fp32": 340.51, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.38, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 166.62, - "fp16": "", - "fp32": 64.56, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 12.93, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 127.76, - "fp16": "", - "fp32": 46.89, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 14.67, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 139.58, - "fp16": "", - "fp32": 59.12, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 9.46, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 803.05, - "fp16": 733.02, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 2.32, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.14, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 322.1, - "fp16": 299.84, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.75, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H NPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 126.74, - "fp16": 129.8, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 9.1, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H NPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 127.62, - "fp16": 131.24, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 8.95, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V NPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 390.98, - "fp16": 506.71, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.51, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® x7425E iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 60.91, - "fp16": 44.63, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 18.54, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® X6425E iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 31.72, - "fp16": 33.09, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 35.85, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Celeron® 6305E iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 121.95, - "fp16": 81.83, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 10.32, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1235U Processor iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 130.79, - "fp16": 80.07, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 8.12, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1335U Processor iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 133.28, - "fp16": 83.52, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 6.92, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185G7 iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 164.13, - "fp16": 112.15, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185GRE iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 117.62, - "fp16": 77.67, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 9.6, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-12700H iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 218.62, - "fp16": 149.58, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.56, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1355U Processor iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 164.07, - "fp16": 107.64, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 6.35, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.83, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 47.04, - "fp16": 34.98, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 23.03, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 388.85, - "fp16": 291.51, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 2.61, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 386.42, - "fp16": 271.08, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.16, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 442.05, - "fp16": 412.46, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.33, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® x7425E CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 68.47, - "fp16": "", - "fp32": 36.85, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Atom® X6425E CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 36.31, - "fp16": "", - "fp32": 34.01, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Celeron® 6305E CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 114.66, - "fp16": "", - "fp32": 78.23, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1235U Processor CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 36.38, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i5-1335U Processor CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 126.5, - "fp16": "", - "fp32": 58.24, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185G7 CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 184.9, - "fp16": "", - "fp32": 103.88, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1185GRE CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 100.37, - "fp16": "", - "fp32": 51.71, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-12700H CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 117.6, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1355U Processor CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 154.41, - "fp16": "", - "fp32": 72.66, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 227.78, - "fp16": "", - "fp32": 103.62, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Atom™, CPU+iGPU", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 61.09, - "fp16": "", - "fp32": 28.95, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 155H CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 369.1, - "fp16": "", - "fp32": 228.3, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 253.52, - "fp16": "", - "fp32": 149.21, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 15.4, - "int8": 11.0, - "fp16": 5.7, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 64.94, - "int8": 90.95, - "fp16": 176.6, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 30.9, - "int8": 22.8, - "fp16": 12.7, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 32.39, - "int8": 43.95, - "fp16": 78.58, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 33.7, - "int8": 25.7, - "fp16": 16.2, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 29.69, - "int8": 38.85, - "fp16": 61.67, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 39.5, - "int8": 29.1, - "fp16": 18.6, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 25.3, - "int8": 34.31, - "fp16": 53.7, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 9.4, - "int8": 6.2, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 106.9, - "int8": 162.58, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 56.8, - "int8": 45.2, - "fp16": 26.5, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 17.61, - "int8": 22.14, - "fp16": 37.67, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 45.5, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 21.99, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 11.4, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 87.86, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 15.8, - "int8": 11.3, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 63.49, - "int8": 88.47, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 18.0, - "int8": 12.6, - "fp16": 7.0, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 55.41, - "int8": 79.46, - "fp16": 142.26, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 14.8, - "int8": 9.4, - "fp16": 4.9, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 67.35, - "int8": 106.1, - "fp16": 205.6, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 28.4, - "int8": 19.7, - "fp16": 10.9, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 35.25, - "int8": 50.64, - "fp16": 91.76, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 30.6, - "int8": 21.6, - "fp16": 14.1, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 32.72, - "int8": 46.36, - "fp16": 71.16, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 35.7, - "int8": 24.3, - "fp16": 15.9, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 28.02, - "int8": 41.12, - "fp16": 62.78, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 9.8, - "int8": 5.4, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 102.44, - "int8": 186.87, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 44.7, - "int8": 32.1, - "fp16": 21.3, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 22.37, - "int8": 31.17, - "fp16": 47.04, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 34.7, - "int8": 25.8, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 28.79, - "int8": 38.77, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 5.7, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 174.81, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 6.5, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 153.92, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 14.2, - "int8": 9.7, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 70.34, - "int8": 102.9, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 17.6, - "int8": 10.8, - "fp16": 6.1, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 56.96, - "int8": 92.92, - "fp16": 164.05, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 13.4, - "int8": 9.3, - "fp16": 5.0, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 74.6, - "int8": 107.69, - "fp16": 201.87, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 25.2, - "int8": 18.4, - "fp16": 10.6, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 39.73, - "int8": 54.31, - "fp16": 94.63, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 26.7, - "int8": 20.3, - "fp16": 14.2, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 37.4, - "int8": 49.27, - "fp16": 70.22, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 32.5, - "int8": 23.1, - "fp16": 16.4, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 30.75, - "int8": 43.23, - "fp16": 60.97, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 8.6, - "int8": 5.3, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 116.29, - "int8": 189.6, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 45.3, - "int8": 35.9, - "fp16": 21.8, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 22.08, - "int8": 27.87, - "fp16": 45.77, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 38.1, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 26.27, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 5.4, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 184.58, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 13.3, - "int8": 9.1, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 75.47, - "int8": 109.71, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 15.6, - "int8": 10.4, - "fp16": 5.8, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 64.18, - "int8": 95.84, - "fp16": 171.66, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 11.8, - "int8": 8.7, - "fp16": 4.5, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 84.87, - "int8": 115.0, - "fp16": 223.46, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 26.1, - "int8": 17.6, - "fp16": 9.8, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 38.25, - "int8": 56.78, - "fp16": 102.16, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 25.3, - "int8": 19.3, - "fp16": 13.3, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 39.49, - "int8": 51.8, - "fp16": 75.23, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 28.9, - "int8": 21.2, - "fp16": 15.0, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 34.63, - "int8": 47.15, - "fp16": 66.83, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 4.8, - "int8": 4.9, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 209.01, - "int8": 205.41, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 45.6, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 21.92, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 35.3, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 28.33, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 5.7, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 175.65, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 14.3, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 70.04, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 17.0, - "int8": 9.7, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 58.82, - "int8": 102.99, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 14.4, - "int8": 9.1, - "fp16": 4.7, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 69.6, - "int8": 109.3, - "fp16": 212.27, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 27.1, - "int8": 18.3, - "fp16": 10.3, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 36.86, - "int8": 54.59, - "fp16": 97.42, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 27.8, - "int8": 20.0, - "fp16": 13.9, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 35.91, - "int8": 49.99, - "fp16": 72.1, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 33.0, - "int8": 22.7, - "fp16": 15.8, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 30.3, - "int8": 44.07, - "fp16": 63.31, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 9.0, - "int8": 5.0, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 111.21, - "int8": 201.82, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 46.0, - "int8": 34.4, - "fp16": 21.0, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 21.76, - "int8": 29.06, - "fp16": 47.66, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 38.0, - "int8": "", - "fp16": 17.9, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 26.33, - "int8": "", - "fp16": 55.81, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 11.1, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 90.09, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 5.8, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 171.03, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 14.6, - "int8": 9.3, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 68.27, - "int8": 108.02, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 16.7, - "int8": 10.2, - "fp16": 5.7, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 60.02, - "int8": 98.33, - "fp16": 176.33, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 23.3, - "int8": 15.6, - "fp16": 8.5, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 42.95, - "int8": 64.22, - "fp16": 117.2, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 40.8, - "int8": 29.1, - "fp16": 17.1, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 24.53, - "int8": 34.38, - "fp16": 58.33, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 43.6, - "int8": 33.3, - "fp16": 22.0, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 22.94, - "int8": 30.03, - "fp16": 45.44, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 52.2, - "int8": 39.0, - "fp16": 25.9, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 19.15, - "int8": 25.66, - "fp16": 38.58, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H CPU-only", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 11.6, - "int8": 8.6, - "fp16": 6.9, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 86.42, - "int8": 116.68, - "fp16": 145.89, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 55.6, - "int8": 46.6, - "fp16": 31.6, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 17.98, - "int8": 21.47, - "fp16": 31.63, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 41.9, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 23.88, - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 18.8, - "int8": 10.7, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 53.33, - "int8": 93.53, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 9.7, - "int8": 6.1, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 102.87, - "int8": 165.24, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 165H iGPU-only", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 23.1, - "int8": 15.7, - "fp16": 9.1, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 43.28, - "int8": 63.82, - "fp16": 109.36, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": 25.6, - "int8": 16.7, - "fp16": 10.3, - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 38.99, - "int8": 59.98, - "fp16": 96.89, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-13900K CPU-only", - "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 42.87, - "fp16": 41.99, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8380 CPU-only", - "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 14.24, - "fp16": 14.44, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", - "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 4.67, - "fp16": 4.55, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8580 CPU-only", - "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Intel® Xeon®, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 4.13, - "fp16": 4.04, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Arc™ A-Series Graphics dGPU", - "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 2.55, - "fp16": 2.47, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 10.8, - "fp16": 10.89, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-1360P iGPU-only", - "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 33.5, - "fp16": 32.47, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 9 processor 288V iGPU-only", - "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.67, - "fp16": 5.33, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 43.86, - "fp16": "", - "fp32": 19.46, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 30.78, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 4.63, - "fp16": "", - "fp32": 1.74, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 281.45, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 67.33, - "fp16": "", - "fp32": 45.22, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 18.61, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 0.83, - "fp16": "", - "fp32": 0.2, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 1662.21, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 1206.23, - "fp16": "", - "fp32": 587.85, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 1.27, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 248.53, - "fp16": "", - "fp32": 60.14, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 5.96, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 510.87, - "fp16": "", - "fp32": 166.45, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 2.76, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 4.55, - "fp16": "", - "fp32": 1.1, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 310.48, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 6.44, - "fp16": "", - "fp32": 1.82, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 213.98, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 31.79, - "fp16": "", - "fp32": 8.85, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 42.2, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU-only", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, CPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 116.4, - "fp16": "", - "fp32": 42.84, - "bf16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 10.51, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 279.89, - "fp16": 242.6, - "fp32": "", - "bf16": "" + "int4": "", + "int8": 434.12, + "fp16": "", + "fp32": 30.6, + "bf16": 209.11, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25114,7 +18608,7 @@ "Precisions": [ { "int4": "", - "int8": 4.17, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -25126,19 +18620,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 43.58, - "fp16": 40.08, - "fp32": "", - "bf16": "" + "int8": 24134.02, + "fp16": "", + "fp32": 3392.4, + "bf16": 12168.49, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25148,10 +18646,10 @@ "Precisions": [ { "int4": "", - "int8": 20.29, + "int8": 0.74, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 0.89 } ], "Unit": "ms", @@ -25160,19 +18658,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "stable-diffusion-v1-5", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 184.73, - "fp16": 167.81, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25182,8 +18684,8 @@ "Precisions": [ { "int4": "", - "int8": 7.43, - "fp16": "", + "int8": 4.62, + "fp16": 4.55, "fp32": "", "bf16": "" } @@ -25194,19 +18696,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "yolo11", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 8.7, - "fp16": 5.04, - "fp32": "", - "bf16": "" + "int8": "", + "fp16": "", + "fp32": 1034.68, + "bf16": 2068.81, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25216,7 +18722,7 @@ "Precisions": [ { "int4": "", - "int8": 106.68, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -25228,19 +18734,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8480+ CPU-only", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1530.29, - "fp16": 1450.39, - "fp32": "", - "bf16": "" + "int8": 2380.51, + "fp16": "", + "fp32": 950.6, + "bf16": 2374.89, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25250,10 +18760,10 @@ "Precisions": [ { "int4": "", - "int8": 1.42, + "int8": 3.13, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 2.54 } ], "Unit": "ms", @@ -25262,19 +18772,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "bert-base-cased", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1023.16, - "fp16": 466.9, - "fp32": "", - "bf16": "" + "int8": 4671.04, + "fp16": "", + "fp32": 560.3, + "bf16": 3211.93, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25284,10 +18798,10 @@ "Precisions": [ { "int4": "", - "int8": 1.4, + "int8": 3.66, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 4.77 } ], "Unit": "ms", @@ -25296,19 +18810,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "efficientdet-d0", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1114.16, - "fp16": 946.07, - "fp32": "", - "bf16": "" + "int8": 1725.13, + "fp16": "", + "fp32": 1123.04, + "bf16": 1407.69, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25318,10 +18836,10 @@ "Precisions": [ { "int4": "", - "int8": 2.05, + "int8": 4.71, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 4.84 } ], "Unit": "ms", @@ -25330,19 +18848,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "gemma-2-9b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 45.06, - "fp16": 25.61, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 25.46, + "token_int8": 18.96, + "token_fp16": 12.14 } ], "Unit": "FPS", @@ -25351,9 +18873,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 21.18, - "fp16": "", + "int4": 39.27, + "int8": 52.74, + "fp16": 82.36, "fp32": "", "bf16": "" } @@ -25364,19 +18886,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 68.61, - "fp16": 48.46, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 27.1, + "token_int8": 19.33, + "token_fp16": 12.69 } ], "Unit": "FPS", @@ -25385,9 +18911,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 12.01, - "fp16": "", + "int4": 36.9, + "int8": 51.72, + "fp16": 78.77, "fp32": "", "bf16": "" } @@ -25398,19 +18924,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 112.66, - "fp16": 119.27, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 31.71, + "token_int8": 23.05, + "token_fp16": 16.64 } ], "Unit": "FPS", @@ -25419,9 +18949,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 7.63, - "fp16": "", + "int4": 31.53, + "int8": 43.37, + "fp16": 60.07, "fp32": "", "bf16": "" } @@ -25432,19 +18962,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "llama-3-8b", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 326.29, - "fp16": 320.34, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 30.06, + "token_int8": 21.73, + "token_fp16": 14.93 } ], "Unit": "FPS", @@ -25453,9 +18987,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 4.17, - "fp16": "", + "int4": 33.26, + "int8": 46.01, + "fp16": 66.97, "fp32": "", "bf16": "" } @@ -25466,19 +19000,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 211.04, + "int8": "", "fp16": "", - "fp32": 165.57, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 54.73, + "token_int8": 42.58, + "token_fp16": 31.51 } ], "Unit": "FPS", @@ -25487,9 +19025,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 18.27, + "int8": 23.48, + "fp16": 31.73, "fp32": "", "bf16": "" } @@ -25500,19 +19038,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 34.98, + "int8": 74.86, "fp16": "", - "fp32": 28.58, - "bf16": "" + "fp32": 6.39, + "bf16": 48.32, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25522,10 +19064,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 55.7, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 73.74 } ], "Unit": "ms", @@ -25534,19 +19076,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 166.87, + "int8": "", "fp16": "", - "fp32": 144.55, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 33.27, + "token_int8": 22.24, + "token_fp16": 15.74 } ], "Unit": "FPS", @@ -25555,9 +19101,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 30.05, + "int8": 44.96, + "fp16": 63.51, "fp32": "", "bf16": "" } @@ -25568,19 +19114,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "mobilenet-v2", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 6.73, + "int8": 39894.55, "fp16": "", - "fp32": 3.15, - "bf16": "" + "fp32": 15839.75, + "bf16": 29419.55, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25590,10 +19140,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.84, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 0.72 } ], "Unit": "ms", @@ -25602,19 +19152,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 1609.77, + "int8": "", "fp16": "", - "fp32": 1160.68, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": 40.45, + "token_fp16": 26.95 } ], "Unit": "FPS", @@ -25623,8 +19177,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", + "int4": 21.43, + "int8": 24.72, "fp16": "", "fp32": "", "bf16": "" @@ -25636,19 +19190,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "qwen2-7b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 738.31, + "int8": "", "fp16": "", - "fp32": 365.53, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": 35.48, + "token_int8": 25.7, + "token_fp16": 16.1 } ], "Unit": "FPS", @@ -25657,9 +19215,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 28.18, + "int8": 38.91, + "fp16": 62.09, "fp32": "", "bf16": "" } @@ -25670,19 +19228,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "resnet-50", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 911.79, + "int8": 21612.82, "fp16": "", - "fp32": 627.75, - "bf16": "" + "fp32": 2002.36, + "bf16": 13669.05, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25692,10 +19254,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1.0, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 1.37 } ], "Unit": "ms", @@ -25704,19 +19266,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 31.73, + "int8": 513.09, "fp16": "", - "fp32": 16.1, - "bf16": "" + "fp32": 35.2, + "bf16": 275.94, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25738,19 +19304,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 51.44, + "int8": 26748.89, "fp16": "", - "fp32": 30.42, - "bf16": "" + "fp32": 4718.18, + "bf16": 16684.87, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25760,10 +19330,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.72, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 1.15 } ], "Unit": "ms", @@ -25772,19 +19342,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "stable-diffusion-v1-5", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 102.09, + "int8": "", "fp16": "", - "fp32": 85.74, - "bf16": "" + "fp32": "", + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25794,8 +19368,8 @@ "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 4.09, + "fp16": 3.99, "fp32": "", "bf16": "" } @@ -25806,19 +19380,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V CPU+iGPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "yolo11", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 315.12, + "int8": "", "fp16": "", - "fp32": 248.34, - "bf16": "" + "fp32": 1455.5, + "bf16": 2962.49, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25831,7 +19409,7 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": 3.19 } ], "Unit": "ms", @@ -25840,19 +19418,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "bert-base-cased", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® Platinum 8580 CPU-only", + "Model": "yolo_v8n", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 179.86, - "fp16": 141.28, - "fp32": "", - "bf16": "" + "int8": 3043.23, + "fp16": "", + "fp32": 1258.2, + "bf16": 3444.22, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25862,10 +19444,10 @@ "Precisions": [ { "int4": "", - "int8": 6.54, + "int8": 3.08, "fp16": "", "fp32": "", - "bf16": "" + "bf16": 2.56 } ], "Unit": "ms", @@ -25874,19 +19456,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "bert-base-cased ", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 31.04, + "int8": 8897.30, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 1217.03, + "bf16": 6414.49, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25896,9 +19482,9 @@ "Precisions": [ { "int4": "", - "int8": 42.23, + "int8": 7.74, "fp16": "", - "fp32": "", + "fp32": 14.8, "bf16": "" } ], @@ -25908,19 +19494,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "efficientdet-d0", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "efficientdet-d0 ", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 12.96, - "fp16": 13.4, - "fp32": "", - "bf16": "" + "int8": 3384.23, + "fp16": "", + "fp32": 2295.4, + "bf16": 2872.84, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25930,9 +19520,9 @@ "Precisions": [ { "int4": "", - "int8": 125.1, + "int8": 9.71, "fp16": "", - "fp32": "", + "fp32": 9.43, "bf16": "" } ], @@ -25942,19 +19532,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "mask_rcnn_resnet50_atrous_coco ", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 149.52, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 11.97, + "bf16": 91.85, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25964,9 +19558,9 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 74.6, "fp16": "", - "fp32": "", + "fp32": 248.21, "bf16": "" } ], @@ -25976,19 +19570,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "mobilenet-v2", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "mobilenet-v2 ", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 4151.21, - "fp16": 3149.73, - "fp32": "", - "bf16": "" + "int8": 32737.09, + "fp16": "", + "fp32": 25621.92, + "bf16": 26297.21, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -25998,9 +19596,9 @@ "Precisions": [ { "int4": "", - "int8": 0.42, + "int8": 1.65, "fp16": "", - "fp32": "", + "fp32": 1.34, "bf16": "" } ], @@ -26010,19 +19608,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "resnet-50", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "resnet-50 ", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 2038.58, - "fp16": 835.87, - "fp32": "", - "bf16": "" + "int8": 27670.82, + "fp16": "", + "fp32": 4254.94, + "bf16": 22432.74, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -26032,9 +19634,9 @@ "Precisions": [ { "int4": "", - "int8": 0.77, + "int8": 2.28, "fp16": "", - "fp32": "", + "fp32": 3.69, "bf16": "" } ], @@ -26044,19 +19646,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "ssd_mobilenet_v1_coco", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "ssd-resnet34-1200 ", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 229.76, - "fp16": 200.09, - "fp32": "", - "bf16": "" + "int8": 1009.62, + "fp16": "", + "fp32": 77.99, + "bf16": 532.90, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -26066,9 +19672,9 @@ "Precisions": [ { "int4": "", - "int8": 8.16, + "int8": 9.73, "fp16": "", - "fp32": "", + "fp32": 34.1, "bf16": "" } ], @@ -26078,19 +19684,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "ssd-resnet34-1200", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "ssd_mobilenet_v1_coco ", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 29674.40, "fp16": "", - "fp32": "", - "bf16": "" + "fp32": 9800.83, + "bf16": 19479.18, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -26100,9 +19710,9 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 2.08, "fp16": "", - "fp32": "", + "fp32": 2.45, "bf16": "" } ], @@ -26112,19 +19722,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "unet-camvid-onnx-0001", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "yolo_v8n ", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": 37.1, - "fp32": "", - "bf16": "" + "int8": 5590.87, + "fp16": "", + "fp32": 2699.0, + "bf16": 6003.66, + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -26134,9 +19748,9 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 6.56, "fp16": "", - "fp32": "", + "fp32": 5.59, "bf16": "" } ], @@ -26146,30 +19760,34 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "yolo_v5m", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "gemma-2-9b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 129.88, - "fp16": 105.29, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 136.4, + "token_int8": "", + "token_fp16": 53.6 } ], - "Unit": "FPS", + "Unit": "Tokens/sec", "UnitDesc": "higher is better" }, "latency": { "Precisions": [ { - "int4": "", - "int8": 9.44, - "fp16": "", + "int4": 7.3, + "int8": "", + "fp16": 18.7, "fp32": "", "bf16": "" } @@ -26180,30 +19798,34 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V NPU", - "Model": "yolo_v8n", - "Checked": "true", - "PlatformType": "Intel® Core™, NPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "glm-4-9b-chat", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 399.28, - "fp16": 469.58, + "int8": "", + "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 116.5, + "token_int8": "", + "token_fp16": 51.9 } ], - "Unit": "FPS", + "Unit": "Tokens/sec", "UnitDesc": "higher is better" }, "latency": { "Precisions": [ { - "int4": "", - "int8": 3.94, - "fp16": "", + "int4": 8.6, + "int8": "", + "fp16": 19, "fp32": "", "bf16": "" } @@ -26214,19 +19836,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "chatglm2-6b", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "llama-2-7b-chat", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { - "int4": 20.6, + "int4": "", "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 139.5, + "token_int8": "", + "token_fp16": 132 } ], "Unit": "Tokens/sec", @@ -26235,9 +19861,9 @@ "latency": { "Precisions": [ { - "int4": 48, + "int4": 7.2, "int8": "", - "fp16": "", + "fp16": 7.6, "fp32": "", "bf16": "" } @@ -26248,19 +19874,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "falcon-7b-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "llama-3.2-3b-instruct", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { - "int4": 18.8, + "int4": "", "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 272.7, + "token_int8": 65, + "token_fp16": "" } ], "Unit": "Tokens/sec", @@ -26269,10 +19899,10 @@ "latency": { "Precisions": [ { - "int4": 53, + "int4": 3.7, "int8": "", "fp16": "", - "fp32": "", + "fp32": 15.4, "bf16": "" } ], @@ -26282,19 +19912,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "llama-2-7b-chat", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "llama-3-8b", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { - "int4": 17.5, + "int4": "", "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 148.2, + "token_int8": "", + "token_fp16": 57.2 } ], "Unit": "Tokens/sec", @@ -26303,9 +19937,9 @@ "latency": { "Precisions": [ { - "int4": 57, + "int4": 6.7, "int8": "", - "fp16": "", + "fp16": 17.5, "fp32": "", "bf16": "" } @@ -26316,19 +19950,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "llama-3-8b", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "mistral-7b-v0.1", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { - "int4": 19.2, + "int4": "", "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 126.4, + "token_int8": "", + "token_fp16": 61.4 } ], "Unit": "Tokens/sec", @@ -26337,9 +19975,9 @@ "latency": { "Precisions": [ { - "int4": 52, + "int4": 7.9, "int8": "", - "fp16": "", + "fp16": 16.3, "fp32": "", "bf16": "" } @@ -26350,19 +19988,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "mistral-7b-v0.1", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "phi-3-mini-4k-instruct", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { - "int4": 18.2, + "int4": "", "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 176.6, + "token_int8": "", + "token_fp16": 111.9 } ], "Unit": "Tokens/sec", @@ -26371,9 +20013,9 @@ "latency": { "Precisions": [ { - "int4": 54, + "int4": 5.7, "int8": "", - "fp16": "", + "fp16": 8.9, "fp32": "", "bf16": "" } @@ -26384,19 +20026,23 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", - "Model": "phi-3-mini-4k-instruct", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "Platform": "Intel® Xeon® 6979P CPU-only", + "Model": "qwen2-7b", + "featured_SKU": true, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ { - "int4": 29.5, + "int4": "", "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": 164.4, + "token_int8": "", + "token_fp16": 62.2 } ], "Unit": "Tokens/sec", @@ -26405,9 +20051,9 @@ "latency": { "Precisions": [ { - "int4": 34, + "int4": 6.1, "int8": "", - "fp16": "", + "fp16": 16.1, "fp32": "", "bf16": "" } @@ -26418,10 +20064,11 @@ } }, { - "Platform": "Intel® Core™ Ultra 7 processor 268V iGPU", + "Platform": "Intel® Xeon® 6979P CPU-only", "Model": "stable-diffusion-v1-5", - "Checked": "true", - "PlatformType": "Intel® Core™, iGPU-only", + "featured_SKU": true, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", "Parameters": { "throughput": { "Precisions": [ @@ -26430,7 +20077,10 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": "" + "bf16": "", + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "n/a", @@ -26440,8 +20090,8 @@ "Precisions": [ { "int4": "", - "int8": 7.2, - "fp16": 6.9, + "int8": 4.0, + "fp16": 4.1, "fp32": "", "bf16": "" } diff --git a/docs/sphinx_setup/_static/benchmarks_files/llm_models_7-155H.csv b/docs/sphinx_setup/_static/benchmarks_files/llm_models_7-155H.csv index d2c68a3619620e..0d16c5c4998329 100644 --- a/docs/sphinx_setup/_static/benchmarks_files/llm_models_7-155H.csv +++ b/docs/sphinx_setup/_static/benchmarks_files/llm_models_7-155H.csv @@ -1,156 +1,156 @@ -Topology,Precision,Input Size,max rss memory,1st latency (ms),2nd latency (ms),2nd tok/sec,,, -opt-125m-gptq,INT4-MIXED,32,965.9,29,7.7,129.87,,, -opt-125m-gptq,INT4-MIXED,1024,1507.9,113.1,7.8,128.21,,, -tiny-llama-1.1b-chat,INT4-MIXED,32,1831.8,46.5,16.7,59.88,,, -tiny-llama-1.1b-chat,INT4-MIXED,1024,1806.3,635,17.8,56.18,,, -qwen2-0.5b,INT4-MIXED,32,2551.7,61.4,18.3,54.64,,, -qwen2-0.5b,INT4-MIXED,1024,2976.6,356.1,19.2,52.08,,, -tiny-llama-1.1b-chat,INT8-CW,32,1987.4,56,21.6,46.30,,, -tiny-llama-1.1b-chat,INT8-CW,1024,2209.1,772.7,22.6,44.25,,, -qwen2-0.5b,INT8-CW,32,2484.9,57.3,22.8,43.86,,, -qwen2-0.5b,INT8-CW,1024,3102.5,407.1,23.9,41.84,,, -qwen2-1.5b,INT4-MIXED,32,4265.2,71.7,25.5,39.22,,, -qwen2-1.5b,INT4-MIXED,1024,4884.5,862.4,26.8,37.31,,, -dolly-v2-3b,INT4-MIXED,32,2401.3,89.6,27.5,36.36,,, -red-pajama-incite-chat-3b-v1,INT4-MIXED,32,2511.5,78.6,28.2,35.46,,, -phi-2,INT4-MIXED,32,2279.5,95.7,29.1,34.36,,, -minicpm-1b-sft,INT4-MIXED,31,2759.9,104.4,30.9,32.36,,, -phi-2,INT4-MIXED,32,2620.1,100.8,31,32.26,,, -stable-zephyr-3b-dpo,INT4-MIXED,30,2636.5,86.8,31.7,31.55,,, -dolly-v2-3b,INT4-MIXED,1024,3137.1,1782.9,32.2,31.06,,, -red-pajama-incite-chat-3b-v1,INT4-MIXED,1020,3118.5,1831.7,33.3,30.03,,, -red-pajama-incite-chat-3b-v1,INT4-MIXED,1024,2862.7,1821.1,33.5,29.85,,, -qwen2-1.5b,INT8-CW,32,4831.2,87,33.8,29.59,,, -opt-2.7b,INT4-MIXED,31,2898.3,73.2,33.9,29.50,,, -phi-2,INT4-MIXED,1024,2797.4,1887,34,29.41,,, -orca-mini-3b,INT4-MIXED,32,2877.8,100.3,35,28.57,,, -stablelm-3b-4e1t,INT4-MIXED,32,2669.4,94.7,35.3,28.33,,, -qwen2-1.5b,INT8-CW,1024,5455.8,1047.6,35.3,28.33,,, -minicpm-1b-sft,INT8-CW,31,3104.1,103.5,35.3,28.33,,, -phi-2,INT4-MIXED,1024,3039.8,1917.4,35.9,27.86,,, -stable-zephyr-3b-dpo,INT4-MIXED,946,3411.4,1695,37,27.03,,, -gemma-2b-it,INT4-MIXED,32,3991.7,116.1,37.9,26.39,,, -opt-2.7b,INT4-MIXED,937,3617.5,1764.9,38.2,26.18,,, -phi-3-mini-4k-instruct,INT4-MIXED,31,2935.3,111.6,38.2,26.18,,, -phi-3-mini-4k-instruct,INT4-MIXED,38,3102.4,134,38.4,26.04,,, -phi-3-mini-4k-instruct,INT4-MIXED,31,2986.1,114.1,38.9,25.71,,, -phi-3-mini-4k-instruct,INT4-MIXED,38,2977.4,131.1,39,25.64,,, -gemma-2b-it,INT4-MIXED,1024,4973.3,1249.2,39.7,25.19,,, -stablelm-3b-4e1t,INT4-MIXED,1024,3196.9,2045.4,39.9,25.06,,, -dolly-v2-3b,INT8-CW,32,3490.2,107.4,41.5,24.10,,, -red-pajama-incite-chat-3b-v1,INT8-CW,32,3457.9,105,42.5,23.53,,, -opt-2.7b,INT8-CW,31,3686.8,107.5,44.1,22.68,,, -phi-2,INT8-CW,32,3554.9,116.6,44.1,22.68,,, -phi-3-mini-4k-instruct,INT4-MIXED,1023,3390.7,2277.1,44.2,22.62,,, -phi-3-mini-4k-instruct,INT4-MIXED,1061,3643.6,2485,44.4,22.52,,, -phi-3-mini-4k-instruct,INT4-MIXED,1023,3516.4,2280.9,44.5,22.47,,, -phi-3-mini-4k-instruct,INT4-MIXED,1061,3537.2,2522.4,44.7,22.37,,, -orca-mini-3b,INT4-MIXED,1024,3557.3,1898.9,45,22.22,,, -minicpm-1b-sft,FP16,31,3814.4,97.9,45.4,22.03,,, -stablelm-3b-4e1t,INT8-CW,32,3486.9,100.5,46.1,21.69,,, -stable-zephyr-3b-dpo,INT8-CW,30,3516.7,101.9,46.1,21.69,,, -dolly-v2-3b,INT8-CW,1024,4265.9,2178.6,46.2,21.65,,, -red-pajama-incite-chat-3b-v1,INT8-CW,1020,3979.1,2219.7,47.2,21.19,,, -red-pajama-incite-chat-3b-v1,INT8-CW,1024,3975.5,2199.7,47.3,21.14,,, -opt-2.7b,INT8-CW,937,4358.6,1981.8,48.4,20.66,,, -phi-2,INT8-CW,1024,4058.1,2280.1,48.9,20.45,,, -gemma-2b-it,INT8-CW,32,4786.8,119.8,49.4,20.24,,, -chatglm3-6b,INT4-MIXED,32,4141.5,166.6,49.7,20.12,,, -stablelm-3b-4e1t,INT8-CW,1024,4054.8,2243.5,50.7,19.72,,, -stable-zephyr-3b-dpo,INT8-CW,946,4521.8,1816.4,51.3,19.49,,, -gemma-2b-it,INT8-CW,1024,5810.7,1580,51.3,19.49,,, -chatglm3-6b,INT4-MIXED,32,4651.4,164.7,51.6,19.38,,, -chatglm3-6b,INT4-MIXED,1024,4235.1,2818.7,52.3,19.12,,, -orca-mini-3b,INT8-CW,32,4162,109.2,53.3,18.76,,, -chatglm3-6b,INT4-MIXED,1024,4783.8,2869,54.4,18.38,,, -gpt-j-6b,INT4-MIXED,32,4667.3,176.7,56.3,17.76,,, -chatglm3-6b-gptq,INT4-MIXED,32,5369.4,173.9,58.9,16.98,,, -llama-2-7b-chat-hf,INT4-MIXED,32,4280,173.2,60.1,16.64,,, -phi-3-mini-4k-instruct,INT8-CW,31,4585.1,123,60.5,16.53,,, -phi-3-mini-4k-instruct,INT8-CW,38,4597,152,60.5,16.53,,, -chatglm2-6b,INT4-MIXED,32,4847.8,158.7,60.6,16.50,,, -vicuna-7b-v1.5,INT4-MIXED,32,4476.9,178.2,61.2,16.34,,, -chatglm3-6b-gptq,INT4-MIXED,1024,5217.6,2863.7,61.3,16.31,,, -mistral-7b-v0.1,INT4-MIXED,31,4413.6,194,61.7,16.21,,, -qwen2-7b,INT4-MIXED,32,7044.7,184.4,61.7,16.21,,, -mistral-7b-v0.1,INT4-MIXED,32,4427.6,193.3,61.8,16.18,,, -orca-mini-3b,INT8-CW,1024,4821.6,2239.1,62,16.13,,, -codegen25-7b,INT4-MIXED,32,4687.2,176.2,62.7,15.95,,, -chatglm2-6b,INT4-MIXED,1024,5165.9,3148,63,15.87,,, -llama-2-7b-gptq,INT4-MIXED,32,4632.8,175.2,63.4,15.77,,, -stablelm-7b,INT4-MIXED,32,5219.5,206.3,63.4,15.77,,, -qwen-7b-chat,INT4-MIXED,32,7805.6,193.8,63.6,15.72,,, -gpt-j-6b,INT4-MIXED,1024,5314.9,3111.8,63.6,15.72,,, -qwen2-7b,INT4-MIXED,1024,7716.2,3548.3,64.1,15.60,,, -llama-3-8b,INT4-MIXED,32,4910.9,204.8,64.7,15.46,,, -mistral-7b-v0.1,INT4-MIXED,1024,4720.8,3667.1,64.8,15.43,,, -mistral-7b-v0.1,INT4-MIXED,1007,4704.7,3685.4,64.9,15.41,,, -llama-3.1-8b,INT4-MIXED,31,4850.3,211.5,64.9,15.41,,, -phi-3-mini-4k-instruct,INT8-CW,1023,5128.6,2815.2,65.7,15.22,,, -phi-3-mini-4k-instruct,INT8-CW,1061,5155,3407.9,65.9,15.17,,, -mistral-7b-v0.1,INT4-MIXED,32,4939.3,192,66.5,15.04,,, -llama-3-8b,INT4-MIXED,33,4919.4,261.9,67.2,14.88,,, -llama-2-7b-chat-hf,INT4-MIXED,1024,4948.2,3811,67.3,14.86,,, -qwen1.5-7b-chat,INT4-MIXED,32,5943.1,180.5,67.7,14.77,,, -qwen-7b-chat-gptq,INT4-MIXED,32,8057,187,68.1,14.68,,, -llama-3-8b,INT4-MIXED,32,5503.5,198.4,68.1,14.68,,, -qwen-7b-chat,INT4-MIXED,32,8091.6,185.9,68.1,14.68,,, -llama-3-8b,INT4-MIXED,1024,5569.1,3920.5,68.2,14.66,,, -llama-3.1-8b,INT4-MIXED,31,5358.6,201,68.2,14.66,,, -stablelm-7b,INT4-MIXED,1020,5804.4,3726.6,68.8,14.53,,, -llama-3.1-8b,INT4-MIXED,31,5452.6,202.9,68.8,14.53,,, -llama-2-7b-chat-hf,INT4-MIXED,32,5023,165.7,69,14.49,,, -llama-3-8b,INT4-MIXED,32,5413.6,202,69.1,14.47,,, -llama-3-8b,INT4-MIXED,33,5440.4,262.1,69.2,14.45,,, -codegen25-7b,INT4-MIXED,1024,5434.6,3513.2,69.9,14.31,,, -mistral-7b-v0.1,INT4-MIXED,1024,5614.9,3819.1,70,14.29,,, -mistral-7b-v0.1,INT4-MIXED,31,4927.8,205,70.5,14.18,,, -llama-3-8b,INT4-MIXED,33,5498.9,270.7,70.6,14.16,,, -llama-3-8b,INT4-MIXED,1025,5577.4,4271.2,70.6,14.16,,, -llama-2-7b-gptq,INT4-MIXED,1024,5302.2,3529.4,70.7,14.14,,, -zephyr-7b-beta,INT4-MIXED,32,5212.4,190.6,71.2,14.04,,, -llama-3-8b,INT4-MIXED,1024,6161.1,3918,71.5,13.99,,, -llama-3-8b,INT4-MIXED,1025,6098,4441.8,72.3,13.83,,, -llama-3-8b,INT4-MIXED,1024,6071.7,3972.2,72.4,13.81,,, -mistral-7b-v0.1,INT4-MIXED,1007,5224.1,4153.4,73.8,13.55,,, -llama-3-8b,INT4-MIXED,1025,6156.9,4357,73.9,13.53,,, -zephyr-7b-beta,INT4-MIXED,1024,5511.6,3978,74.4,13.44,,, -opt-2.7b,FP16,31,9220.3,107.8,74.7,13.39,,, -dolly-v2-3b,FP16,32,6058.9,109.9,74.7,13.39,,, -qwen1.5-7b-chat,INT4-MIXED,1024,7063.2,3791.7,75,13.33,,, -qwen-7b-chat,INT4-MIXED,1024,8919.5,3763.9,75,13.33,,, -red-pajama-incite-chat-3b-v1,FP16,32,6036.5,107.5,75.9,13.18,,, -llama-2-7b-chat-hf,INT4-MIXED,1024,5716.8,4231.7,76.2,13.12,,, -phi-2,FP16,32,6090.1,115.2,77.1,12.97,,, -stable-zephyr-3b-dpo,FP16,30,6113.1,112.1,78.6,12.72,,, -qwen-7b-chat,INT4-MIXED,1024,9212.9,3857.4,78.6,12.72,,, -stablelm-3b-4e1t,FP16,32,6065.4,110.2,78.7,12.71,,, -opt-2.7b,FP16,937,9733.8,3750.8,78.8,12.69,,, -dolly-v2-3b,FP16,1024,6615.2,2230.9,79.1,12.64,,, -red-pajama-incite-chat-3b-v1,FP16,1020,6588.3,2259.4,80.2,12.47,,, -glm-4-9b,INT4-MIXED,33,6386.2,328,80.4,12.44,,, -red-pajama-incite-chat-3b-v1,FP16,1024,6570.3,2268.7,80.4,12.44,,, -baichuan2-7b-chat,INT4-MIXED,32,5977.9,201.7,81,12.35,,, -glm-4-9b,INT4-MIXED,32,6389.7,248.1,81,12.35,,, -phi-2,FP16,1024,6646.2,2406.7,81.4,12.29,,, -stable-zephyr-3b-dpo,FP16,946,6875.7,1868.2,82.9,12.06,,, -stablelm-3b-4e1t,FP16,1024,6636.1,2036.9,83,12.05,,, -chatglm2-6b,INT8-CW,32,6731.8,159.2,84.4,11.85,,, -glm-4-9b,INT4-MIXED,1025,7061.4,4939.2,85.2,11.74,,, -qwen-7b-chat-gptq,INT4-MIXED,1024,9175.3,3898,85.3,11.72,,, -gemma-7b-it,INT4-MIXED,32,7883.9,230.5,86,11.63,,, -gemma-7b-it,INT4-MIXED,32,8002.6,235,86.1,11.61,,, -glm-4-9b,INT4-MIXED,1024,7064.9,4411.2,86.2,11.60,,, -gpt-j-6b,INT8-CW,32,7009.2,176.8,86.4,11.57,,, -chatglm2-6b,INT8-CW,1024,7050.5,3871.6,86.8,11.52,,, -chatglm3-6b,INT8-CW,32,6755.9,159,86.8,11.52,,, -baichuan2-7b-chat,INT4-MIXED,1024,7033.3,4049,88.8,11.26,,, -chatglm3-6b,INT8-CW,1024,7076.5,3865.9,89.2,11.21,,, -qwen-7b-chat,INT4-MIXED,32,9245.7,176.3,90,11.11,,, -gemma-7b-it,INT4-MIXED,1024,9449.4,4305.8,93.2,10.73,,, -gpt-j-6b,INT8-CW,1024,7672.3,4181.1,93.5,10.70,,, -gemma-7b-it,INT4-MIXED,1024,9330.5,4222.5,93.7,10.67,,, -orca-mini-3b,FP16,32,7416.5,122.3,94.7,10.56,,, -codegen25-7b,INT8-CW,32,7557.6,170.7,98.4,10.16,,, -qwen-7b-chat,INT4-MIXED,1024,10371.1,4271.7,98.9,10.11,,, -llama-2-7b-chat-hf,INT8-CW,32,7390.6,171.6,99.9,10.01,,, +Topology,Precision,Input Size,max rss memory,1st latency (ms),2nd latency (ms),2nd tok/sec +opt-125m-gptq,INT4-MIXED,32,965.9,29,7.7,129.87 +opt-125m-gptq,INT4-MIXED,1024,1507.9,113.1,7.8,128.21 +tiny-llama-1.1b-chat,INT4-MIXED,32,1831.8,46.5,16.7,59.88 +tiny-llama-1.1b-chat,INT4-MIXED,1024,1806.3,635,17.8,56.18 +qwen2-0.5b,INT4-MIXED,32,2551.7,61.4,18.3,54.64 +qwen2-0.5b,INT4-MIXED,1024,2976.6,356.1,19.2,52.08 +tiny-llama-1.1b-chat,INT8-CW,32,1987.4,56,21.6,46.30 +tiny-llama-1.1b-chat,INT8-CW,1024,2209.1,772.7,22.6,44.25 +qwen2-0.5b,INT8-CW,32,2484.9,57.3,22.8,43.86 +qwen2-0.5b,INT8-CW,1024,3102.5,407.1,23.9,41.84 +qwen2-1.5b,INT4-MIXED,32,4265.2,71.7,25.5,39.22 +qwen2-1.5b,INT4-MIXED,1024,4884.5,862.4,26.8,37.31 +dolly-v2-3b,INT4-MIXED,32,2401.3,89.6,27.5,36.36 +red-pajama-incite-chat-3b-v1,INT4-MIXED,32,2511.5,78.6,28.2,35.46 +phi-2,INT4-MIXED,32,2279.5,95.7,29.1,34.36 +minicpm-1b-sft,INT4-MIXED,31,2759.9,104.4,30.9,32.36 +phi-2,INT4-MIXED,32,2620.1,100.8,31,32.26 +stable-zephyr-3b-dpo,INT4-MIXED,30,2636.5,86.8,31.7,31.55 +dolly-v2-3b,INT4-MIXED,1024,3137.1,1782.9,32.2,31.06 +red-pajama-incite-chat-3b-v1,INT4-MIXED,1020,3118.5,1831.7,33.3,30.03 +red-pajama-incite-chat-3b-v1,INT4-MIXED,1024,2862.7,1821.1,33.5,29.85 +qwen2-1.5b,INT8-CW,32,4831.2,87,33.8,29.59 +opt-2.7b,INT4-MIXED,31,2898.3,73.2,33.9,29.50 +phi-2,INT4-MIXED,1024,2797.4,1887,34,29.41 +orca-mini-3b,INT4-MIXED,32,2877.8,100.3,35,28.57 +stablelm-3b-4e1t,INT4-MIXED,32,2669.4,94.7,35.3,28.33 +qwen2-1.5b,INT8-CW,1024,5455.8,1047.6,35.3,28.33 +minicpm-1b-sft,INT8-CW,31,3104.1,103.5,35.3,28.33 +phi-2,INT4-MIXED,1024,3039.8,1917.4,35.9,27.86 +stable-zephyr-3b-dpo,INT4-MIXED,946,3411.4,1695,37,27.03 +gemma-2b-it,INT4-MIXED,32,3991.7,116.1,37.9,26.39 +opt-2.7b,INT4-MIXED,937,3617.5,1764.9,38.2,26.18 +phi-3-mini-4k-instruct,INT4-MIXED,31,2935.3,111.6,38.2,26.18 +phi-3-mini-4k-instruct,INT4-MIXED,38,3102.4,134,38.4,26.04 +phi-3-mini-4k-instruct,INT4-MIXED,31,2986.1,114.1,38.9,25.71 +phi-3-mini-4k-instruct,INT4-MIXED,38,2977.4,131.1,39,25.64 +gemma-2b-it,INT4-MIXED,1024,4973.3,1249.2,39.7,25.19 +stablelm-3b-4e1t,INT4-MIXED,1024,3196.9,2045.4,39.9,25.06 +dolly-v2-3b,INT8-CW,32,3490.2,107.4,41.5,24.10 +red-pajama-incite-chat-3b-v1,INT8-CW,32,3457.9,105,42.5,23.53 +opt-2.7b,INT8-CW,31,3686.8,107.5,44.1,22.68 +phi-2,INT8-CW,32,3554.9,116.6,44.1,22.68 +phi-3-mini-4k-instruct,INT4-MIXED,1023,3390.7,2277.1,44.2,22.62 +phi-3-mini-4k-instruct,INT4-MIXED,1061,3643.6,2485,44.4,22.52 +phi-3-mini-4k-instruct,INT4-MIXED,1023,3516.4,2280.9,44.5,22.47 +phi-3-mini-4k-instruct,INT4-MIXED,1061,3537.2,2522.4,44.7,22.37 +orca-mini-3b,INT4-MIXED,1024,3557.3,1898.9,45,22.22 +minicpm-1b-sft,FP16,31,3814.4,97.9,45.4,22.03 +stablelm-3b-4e1t,INT8-CW,32,3486.9,100.5,46.1,21.69 +stable-zephyr-3b-dpo,INT8-CW,30,3516.7,101.9,46.1,21.69 +dolly-v2-3b,INT8-CW,1024,4265.9,2178.6,46.2,21.65 +red-pajama-incite-chat-3b-v1,INT8-CW,1020,3979.1,2219.7,47.2,21.19 +red-pajama-incite-chat-3b-v1,INT8-CW,1024,3975.5,2199.7,47.3,21.14 +opt-2.7b,INT8-CW,937,4358.6,1981.8,48.4,20.66 +phi-2,INT8-CW,1024,4058.1,2280.1,48.9,20.45 +gemma-2b-it,INT8-CW,32,4786.8,119.8,49.4,20.24 +chatglm3-6b,INT4-MIXED,32,4141.5,166.6,49.7,20.12 +stablelm-3b-4e1t,INT8-CW,1024,4054.8,2243.5,50.7,19.72 +stable-zephyr-3b-dpo,INT8-CW,946,4521.8,1816.4,51.3,19.49 +gemma-2b-it,INT8-CW,1024,5810.7,1580,51.3,19.49 +chatglm3-6b,INT4-MIXED,32,4651.4,164.7,51.6,19.38 +chatglm3-6b,INT4-MIXED,1024,4235.1,2818.7,52.3,19.12 +orca-mini-3b,INT8-CW,32,4162,109.2,53.3,18.76 +chatglm3-6b,INT4-MIXED,1024,4783.8,2869,54.4,18.38 +gpt-j-6b,INT4-MIXED,32,4667.3,176.7,56.3,17.76 +chatglm3-6b-gptq,INT4-MIXED,32,5369.4,173.9,58.9,16.98 +llama-2-7b-chat-hf,INT4-MIXED,32,4280,173.2,60.1,16.64 +phi-3-mini-4k-instruct,INT8-CW,31,4585.1,123,60.5,16.53 +phi-3-mini-4k-instruct,INT8-CW,38,4597,152,60.5,16.53 +chatglm2-6b,INT4-MIXED,32,4847.8,158.7,60.6,16.50 +vicuna-7b-v1.5,INT4-MIXED,32,4476.9,178.2,61.2,16.34 +chatglm3-6b-gptq,INT4-MIXED,1024,5217.6,2863.7,61.3,16.31 +mistral-7b-v0.1,INT4-MIXED,31,4413.6,194,61.7,16.21 +qwen2-7b,INT4-MIXED,32,7044.7,184.4,61.7,16.21 +mistral-7b-v0.1,INT4-MIXED,32,4427.6,193.3,61.8,16.18 +orca-mini-3b,INT8-CW,1024,4821.6,2239.1,62,16.13 +codegen25-7b,INT4-MIXED,32,4687.2,176.2,62.7,15.95 +chatglm2-6b,INT4-MIXED,1024,5165.9,3148,63,15.87 +llama-2-7b-gptq,INT4-MIXED,32,4632.8,175.2,63.4,15.77 +stablelm-7b,INT4-MIXED,32,5219.5,206.3,63.4,15.77 +qwen-7b-chat,INT4-MIXED,32,7805.6,193.8,63.6,15.72 +gpt-j-6b,INT4-MIXED,1024,5314.9,3111.8,63.6,15.72 +qwen2-7b,INT4-MIXED,1024,7716.2,3548.3,64.1,15.60 +llama-3-8b,INT4-MIXED,32,4910.9,204.8,64.7,15.46 +mistral-7b-v0.1,INT4-MIXED,1024,4720.8,3667.1,64.8,15.43 +mistral-7b-v0.1,INT4-MIXED,1007,4704.7,3685.4,64.9,15.41 +llama-3.1-8b,INT4-MIXED,31,4850.3,211.5,64.9,15.41 +phi-3-mini-4k-instruct,INT8-CW,1023,5128.6,2815.2,65.7,15.22 +phi-3-mini-4k-instruct,INT8-CW,1061,5155,3407.9,65.9,15.17 +mistral-7b-v0.1,INT4-MIXED,32,4939.3,192,66.5,15.04 +llama-3-8b,INT4-MIXED,33,4919.4,261.9,67.2,14.88 +llama-2-7b-chat-hf,INT4-MIXED,1024,4948.2,3811,67.3,14.86 +qwen1.5-7b-chat,INT4-MIXED,32,5943.1,180.5,67.7,14.77 +qwen-7b-chat-gptq,INT4-MIXED,32,8057,187,68.1,14.68 +llama-3-8b,INT4-MIXED,32,5503.5,198.4,68.1,14.68 +qwen-7b-chat,INT4-MIXED,32,8091.6,185.9,68.1,14.68 +llama-3-8b,INT4-MIXED,1024,5569.1,3920.5,68.2,14.66 +llama-3.1-8b,INT4-MIXED,31,5358.6,201,68.2,14.66 +stablelm-7b,INT4-MIXED,1020,5804.4,3726.6,68.8,14.53 +llama-3.1-8b,INT4-MIXED,31,5452.6,202.9,68.8,14.53 +llama-2-7b-chat-hf,INT4-MIXED,32,5023,165.7,69,14.49 +llama-3-8b,INT4-MIXED,32,5413.6,202,69.1,14.47 +llama-3-8b,INT4-MIXED,33,5440.4,262.1,69.2,14.45 +codegen25-7b,INT4-MIXED,1024,5434.6,3513.2,69.9,14.31 +mistral-7b-v0.1,INT4-MIXED,1024,5614.9,3819.1,70,14.29 +mistral-7b-v0.1,INT4-MIXED,31,4927.8,205,70.5,14.18 +llama-3-8b,INT4-MIXED,33,5498.9,270.7,70.6,14.16 +llama-3-8b,INT4-MIXED,1025,5577.4,4271.2,70.6,14.16 +llama-2-7b-gptq,INT4-MIXED,1024,5302.2,3529.4,70.7,14.14 +zephyr-7b-beta,INT4-MIXED,32,5212.4,190.6,71.2,14.04 +llama-3-8b,INT4-MIXED,1024,6161.1,3918,71.5,13.99 +llama-3-8b,INT4-MIXED,1025,6098,4441.8,72.3,13.83 +llama-3-8b,INT4-MIXED,1024,6071.7,3972.2,72.4,13.81 +mistral-7b-v0.1,INT4-MIXED,1007,5224.1,4153.4,73.8,13.55 +llama-3-8b,INT4-MIXED,1025,6156.9,4357,73.9,13.53 +zephyr-7b-beta,INT4-MIXED,1024,5511.6,3978,74.4,13.44 +opt-2.7b,FP16,31,9220.3,107.8,74.7,13.39 +dolly-v2-3b,FP16,32,6058.9,109.9,74.7,13.39 +qwen1.5-7b-chat,INT4-MIXED,1024,7063.2,3791.7,75,13.33 +qwen-7b-chat,INT4-MIXED,1024,8919.5,3763.9,75,13.33 +red-pajama-incite-chat-3b-v1,FP16,32,6036.5,107.5,75.9,13.18 +llama-2-7b-chat-hf,INT4-MIXED,1024,5716.8,4231.7,76.2,13.12 +phi-2,FP16,32,6090.1,115.2,77.1,12.97 +stable-zephyr-3b-dpo,FP16,30,6113.1,112.1,78.6,12.72 +qwen-7b-chat,INT4-MIXED,1024,9212.9,3857.4,78.6,12.72 +stablelm-3b-4e1t,FP16,32,6065.4,110.2,78.7,12.71 +opt-2.7b,FP16,937,9733.8,3750.8,78.8,12.69 +dolly-v2-3b,FP16,1024,6615.2,2230.9,79.1,12.64 +red-pajama-incite-chat-3b-v1,FP16,1020,6588.3,2259.4,80.2,12.47 +glm-4-9b,INT4-MIXED,33,6386.2,328,80.4,12.44 +red-pajama-incite-chat-3b-v1,FP16,1024,6570.3,2268.7,80.4,12.44 +baichuan2-7b-chat,INT4-MIXED,32,5977.9,201.7,81,12.35 +glm-4-9b,INT4-MIXED,32,6389.7,248.1,81,12.35 +phi-2,FP16,1024,6646.2,2406.7,81.4,12.29 +stable-zephyr-3b-dpo,FP16,946,6875.7,1868.2,82.9,12.06 +stablelm-3b-4e1t,FP16,1024,6636.1,2036.9,83,12.05 +chatglm2-6b,INT8-CW,32,6731.8,159.2,84.4,11.85 +glm-4-9b,INT4-MIXED,1025,7061.4,4939.2,85.2,11.74 +qwen-7b-chat-gptq,INT4-MIXED,1024,9175.3,3898,85.3,11.72 +gemma-7b-it,INT4-MIXED,32,7883.9,230.5,86,11.63 +gemma-7b-it,INT4-MIXED,32,8002.6,235,86.1,11.61 +glm-4-9b,INT4-MIXED,1024,7064.9,4411.2,86.2,11.60 +gpt-j-6b,INT8-CW,32,7009.2,176.8,86.4,11.57 +chatglm2-6b,INT8-CW,1024,7050.5,3871.6,86.8,11.52 +chatglm3-6b,INT8-CW,32,6755.9,159,86.8,11.52 +baichuan2-7b-chat,INT4-MIXED,1024,7033.3,4049,88.8,11.26 +chatglm3-6b,INT8-CW,1024,7076.5,3865.9,89.2,11.21 +qwen-7b-chat,INT4-MIXED,32,9245.7,176.3,90,11.11 +gemma-7b-it,INT4-MIXED,1024,9449.4,4305.8,93.2,10.73 +gpt-j-6b,INT8-CW,1024,7672.3,4181.1,93.5,10.70 +gemma-7b-it,INT4-MIXED,1024,9330.5,4222.5,93.7,10.67 +orca-mini-3b,FP16,32,7416.5,122.3,94.7,10.56 +codegen25-7b,INT8-CW,32,7557.6,170.7,98.4,10.16 +qwen-7b-chat,INT4-MIXED,1024,10371.1,4271.7,98.9,10.11 +llama-2-7b-chat-hf,INT8-CW,32,7390.6,171.6,99.9,10.01 diff --git a/docs/sphinx_setup/_static/benchmarks_files/llm_models_7-258V.csv b/docs/sphinx_setup/_static/benchmarks_files/llm_models_7-258V.csv index efbf0cee8e4a80..09799a2de31fe6 100644 --- a/docs/sphinx_setup/_static/benchmarks_files/llm_models_7-258V.csv +++ b/docs/sphinx_setup/_static/benchmarks_files/llm_models_7-258V.csv @@ -1,182 +1,182 @@ -Topology,Precision,Input Size,max rss memory,1st latency (ms),2nd latency (ms),2nd tok/sec,,, -opt-125m-gptq,INT4-MIXED,1024,1513.6,81.9,7.8,128.21,,, -opt-125m-gptq,INT4-MIXED,32,979.9,50.4,7.9,126.58,,, -tiny-llama-1.1b-chat,INT4-MIXED,1024,1943.3,176.3,16.8,59.52,,, -tiny-llama-1.1b-chat,INT4-MIXED,32,1982.2,59.5,17.1,58.48,,, -qwen2-0.5b,INT4-MIXED,32,2678,117.3,18.7,53.48,,, -tiny-llama-1.1b-chat,INT8-CW,32,2080.9,59.4,19,52.63,,, -qwen2-0.5b,INT4-MIXED,1024,3036.1,165.5,19.2,52.08,,, -tiny-llama-1.1b-chat,INT8-CW,1024,2287,241.4,19.6,51.02,,, -qwen2-0.5b,INT8-CW,1024,3084.9,172.1,20,50.00,,, -qwen2-0.5b,INT8-CW,32,2518,105.5,21.4,46.73,,, -red-pajama-incite-chat-3b-v1,INT4-MIXED,32,2793.6,141.8,23.9,41.84,,, -qwen2-1.5b,INT4-MIXED,32,4515.4,118.7,24,41.67,,, -qwen2-1.5b,INT4-MIXED,1024,4930.1,229.6,24.3,41.15,,, -dolly-v2-3b,INT4-MIXED,32,2486.1,174,25.4,39.37,,, -phi-2,INT4-MIXED,32,2552.9,210.6,26.9,37.17,,, -red-pajama-incite-chat-3b-v1,INT4-MIXED,1020,2934.1,464.5,27.5,36.36,,, -qwen2-1.5b,INT8-CW,32,4813.4,119.1,27.8,35.97,,, -opt-2.7b,INT4-MIXED,31,3172.5,131.9,28.5,35.09,,, -red-pajama-incite-chat-3b-v1,INT4-MIXED,1024,3038.2,447.1,28.6,34.97,,, -dolly-v2-3b,INT4-MIXED,1024,2947.4,409,28.8,34.72,,, -qwen2-1.5b,INT8-CW,1024,5394.8,327.9,29.3,34.13,,, -stable-zephyr-3b-dpo,INT4-MIXED,30,2728.1,131.2,29.8,33.56,,, -phi-2,INT4-MIXED,32,2805.1,208.3,30.2,33.11,,, -minicpm-1b-sft,INT8-CW,31,3104.2,147.8,30.9,32.36,,, -phi-2,INT4-MIXED,1024,3058.9,602.9,31.1,32.15,,, -minicpm-1b-sft,INT4-MIXED,31,2970.1,183.7,31.1,32.15,,, -stablelm-3b-4e1t,INT4-MIXED,32,3077.1,183.2,31.6,31.65,,, -opt-2.7b,INT4-MIXED,937,3416.7,429.4,31.6,31.65,,, -stable-zephyr-3b-dpo,INT4-MIXED,946,3211.8,428.8,32.3,30.96,,, -phi-3-mini-4k-instruct,INT4-MIXED,31,3014.5,116,32.5,30.77,,, -phi-3-mini-4k-instruct,INT4-MIXED,38,2957.4,153.9,32.5,30.77,,, -phi-2,INT4-MIXED,1024,3278.9,613.3,33.4,29.94,,, -phi-3-mini-4k-instruct,INT4-MIXED,38,3288.5,152.9,33.4,29.94,,, -phi-3-mini-4k-instruct,INT4-MIXED,31,3265.1,123.6,34.1,29.33,,, -gemma-2b-it,INT4-MIXED,32,4162.1,208.8,34.2,29.24,,, -stablelm-3b-4e1t,INT4-MIXED,1024,3525.8,524.5,35,28.57,,, -phi-3-mini-4k-instruct,INT4-MIXED,1061,3427.8,777.5,36.5,27.40,,, -phi-3-mini-4k-instruct,INT4-MIXED,1023,3405.4,554.1,36.7,27.25,,, -gemma-2b-it,INT4-MIXED,1024,5053.1,354.8,36.9,27.10,,, -minicpm-1b-sft,FP16,31,3595.5,124.9,36.9,27.10,,, -phi-3-mini-4k-instruct,INT4-MIXED,1061,3547.2,755.8,37.1,26.95,,, -phi-3-mini-4k-instruct,INT4-MIXED,1023,3528.4,536.4,37.4,26.74,,, -red-pajama-incite-chat-3b-v1,INT8-CW,32,3747.7,189.9,38.1,26.25,,, -opt-2.7b,INT8-CW,31,3810.7,145.7,38.5,25.97,,, -chatglm3-6b,INT4-MIXED,32,4120.7,67.3,38.7,25.84,,, -dolly-v2-3b,INT8-CW,32,3747,188.4,39.2,25.51,,, -chatglm3-6b,INT4-MIXED,32,4482.9,69.9,40.7,24.57,,, -chatglm3-6b,INT4-MIXED,1024,4146,606.8,41,24.39,,, -opt-2.7b,INT8-CW,937,4458.9,587.8,41.8,23.92,,, -red-pajama-incite-chat-3b-v1,INT8-CW,1024,4088.4,634.1,41.9,23.87,,, -red-pajama-incite-chat-3b-v1,INT8-CW,1020,4086.8,653.4,42,23.81,,, -phi-2,INT8-CW,32,3794.6,202.7,42.1,23.75,,, -chatglm3-6b,INT4-MIXED,1024,4446.7,598.6,42.3,23.64,,, -stablelm-3b-4e1t,INT8-CW,32,3652.5,146,42.6,23.47,,, -stable-zephyr-3b-dpo,INT8-CW,30,3768.6,151.9,42.6,23.47,,, -dolly-v2-3b,INT8-CW,1024,4092,603.1,42.9,23.31,,, -stablelm-3b-4e1t,INT8-CW,1024,4143.2,671.7,45.2,22.12,,, -gemma-2b-it,INT8-CW,32,4878.4,221.6,45.6,21.93,,, -phi-2,INT8-CW,1024,4153.6,810.3,46,21.74,,, -llama-2-7b-chat-hf,INT4-MIXED,32,4394.6,109.7,46.2,21.65,,, -chatglm3-6b-gptq,INT4-MIXED,32,5218.9,79.7,46.7,21.41,,, -stable-zephyr-3b-dpo,INT8-CW,946,4360.1,627.8,46.8,21.37,,, -vicuna-7b-v1.5,INT4-MIXED,32,4482.3,101.2,47.2,21.19,,, -gemma-2b-it,INT8-CW,1024,5837.1,507.1,48,20.83,,, -llama-2-7b-gptq,INT4-MIXED,32,4734.3,102.8,48.1,20.79,,, -orca-mini-3b,INT4-MIXED,32,2720.1,132,48.1,20.79,,, -qwen-7b-chat,INT4-MIXED,32,7803.7,178.5,48.3,20.70,,, -mistral-7b-v0.1,INT4-MIXED,31,4537.5,99,48.5,20.62,,, -codegen25-7b,INT4-MIXED,32,4723.3,108.5,48.5,20.62,,, -chatglm3-6b-gptq,INT4-MIXED,1024,5150.8,614.2,48.8,20.49,,, -mistral-7b-v0.1,INT4-MIXED,32,4572,102.9,48.8,20.49,,, -llama-3-8b,INT4-MIXED,33,4991.2,252.2,50.9,19.65,,, -qwen-7b-chat-gptq,INT4-MIXED,32,8088.4,212.6,51,19.61,,, -chatglm2-6b,INT4-MIXED,32,4960.6,105.5,51.2,19.53,,, -gpt-j-6b,INT4-MIXED,32,4699.5,259.2,51.4,19.46,,, -llama-3.1-8b,INT4-MIXED,31,4897.8,106.9,51.5,19.42,,, -llama-3-8b,INT4-MIXED,32,4999.7,105.9,51.6,19.38,,, -qwen-7b-chat,INT4-MIXED,32,8085.9,193.5,51.7,19.34,,, -falcon-7b-instruct,INT4-MIXED,32,5416.2,175,52.5,19.05,,, -mistral-7b-v0.1,INT4-MIXED,1007,4772.6,803,52.6,19.01,,, -qwen1.5-7b-chat,INT4-MIXED,32,6027.3,174.9,53,18.87,,, -mistral-7b-v0.1,INT4-MIXED,1024,4775,717.6,53,18.87,,, -llama-2-7b-chat-hf,INT4-MIXED,1024,4976.5,992.1,53.1,18.83,,, -qwen2-7b,INT4-MIXED,32,7087.1,138.1,53.3,18.76,,, -llama-2-7b-gptq,INT4-MIXED,1024,5351.2,711.6,53.7,18.62,,, -llama-3-8b,INT4-MIXED,32,5472.8,109.4,53.7,18.62,,, -phi-3-mini-4k-instruct,INT8-CW,38,4575.3,115.9,53.7,18.62,,, -stablelm-7b,INT4-MIXED,32,5213.7,128.5,53.8,18.59,,, -phi-3-mini-4k-instruct,INT8-CW,31,4571.8,118.9,53.8,18.59,,, -llama-3-8b,INT4-MIXED,33,5480.4,246.8,53.9,18.55,,, -llama-3-8b,INT4-MIXED,32,5528.2,144.9,54.3,18.42,,, -llama-3.1-8b,INT4-MIXED,31,5377.3,112.8,54.3,18.42,,, -chatglm2-6b,INT4-MIXED,1024,5232.3,759.6,54.6,18.32,,, -llama-3.1-8b,INT4-MIXED,31,5440.4,126.4,54.8,18.25,,, -llama-3-8b,INT4-MIXED,33,5532.8,248.2,54.9,18.21,,, -codegen25-7b,INT4-MIXED,1024,5412.9,714.8,55,18.18,,, -mistral-7b-v0.1,INT4-MIXED,32,4998.5,117.3,55.2,18.12,,, -mistral-7b-v0.1,INT4-MIXED,31,5000.2,122.4,55.6,17.99,,, -llama-3-8b,INT4-MIXED,1024,5594,953.5,56.6,17.67,,, -gpt-j-6b,INT4-MIXED,1024,5323.8,1254,56.8,17.61,,, -llama-3-8b,INT4-MIXED,1025,5596.7,1192.3,56.8,17.61,,, -qwen2-7b,INT4-MIXED,1024,7722.1,714.2,57,17.54,,, -phi-3-mini-4k-instruct,INT8-CW,1023,5067.1,818.5,57.4,17.42,,, -phi-3-mini-4k-instruct,INT8-CW,1061,5086.1,975.1,57.4,17.42,,, -llama-2-7b-chat-hf,INT4-MIXED,32,5087.7,126.2,57.9,17.27,,, -stablelm-7b,INT4-MIXED,1020,5780.5,1248.4,59,16.95,,, -llama-3-8b,INT4-MIXED,1025,6088.9,1381.5,59,16.95,,, -llama-3-8b,INT4-MIXED,1024,6084.8,931.2,59.2,16.89,,, -llama-3-8b,INT4-MIXED,1025,6141.2,1494.3,59.4,16.84,,, -llama-3-8b,INT4-MIXED,1024,6133.8,1075.2,59.6,16.78,,, -mistral-7b-v0.1,INT4-MIXED,1024,5472.6,794.3,59.7,16.75,,, -zephyr-7b-beta,INT4-MIXED,32,5328.5,103.5,59.8,16.72,,, -falcon-7b-instruct,INT4-MIXED,1024,5677.5,686.2,59.8,16.72,,, -mistral-7b-v0.1,INT4-MIXED,1007,5243.5,1074,59.9,16.69,,, -qwen1.5-7b-chat,INT4-MIXED,1024,7096.7,1132.7,60,16.67,,, -qwen-7b-chat,INT4-MIXED,1024,8872.6,792.8,61,16.39,,, -qwen-7b-chat,INT4-MIXED,1024,9164.4,822.6,63.3,15.80,,, -orca-mini-3b,INT8-CW,32,4221.7,170.6,63.5,15.75,,, -llama-2-7b-chat-hf,INT4-MIXED,1024,5708.1,1397.9,63.6,15.72,,, -glm-4-9b,INT4-MIXED,33,6402.9,307.1,63.8,15.67,,, -zephyr-7b-beta,INT4-MIXED,1024,5572.4,1156.4,64.3,15.55,,, -glm-4-9b,INT4-MIXED,32,6383.1,256.2,64.5,15.50,,, -baichuan2-7b-chat,INT4-MIXED,32,5926.3,191.8,65.8,15.20,,, -opt-2.7b,FP16,31,5886,112.2,68,14.71,,, -dolly-v2-3b,FP16,32,6161.5,147.5,69.5,14.39,,, -red-pajama-incite-chat-3b-v1,FP16,32,6265.4,146.2,69.6,14.37,,, -glm-4-9b,INT4-MIXED,1024,6994.5,1013.7,69.8,14.33,,, -opt-2.7b,FP16,937,6345,379.5,71.6,13.97,,, -glm-4-9b,INT4-MIXED,1025,7014.9,1416.8,72.5,13.79,,, -phi-2,FP16,32,6204.7,189.2,72.9,13.72,,, -stable-zephyr-3b-dpo,FP16,30,6221.4,159.7,73,13.70,,, -dolly-v2-3b,FP16,1024,6669.9,424.3,73.3,13.64,,, -red-pajama-incite-chat-3b-v1,FP16,1020,6658.8,484.7,73.4,13.62,,, -stablelm-3b-4e1t,FP16,32,6216.3,145.4,73.5,13.61,,, -qwen-7b-chat,INT4-MIXED,32,9294.9,144.4,73.8,13.55,,, -red-pajama-incite-chat-3b-v1,FP16,1024,6755.1,469.1,73.9,13.53,,, -qwen-7b-chat-gptq,INT4-MIXED,1024,9152.1,827.2,75.1,13.32,,, -gemma-7b-it,INT4-MIXED,32,7991.4,128.6,75.8,13.19,,, -chatglm2-6b,INT8-CW,32,6854.4,110.2,76.3,13.11,,, -chatglm3-6b,INT8-CW,32,6754.8,112.3,76.4,13.09,,, -stable-zephyr-3b-dpo,FP16,946,6940,428.6,76.7,13.04,,, -baichuan2-7b-chat,INT4-MIXED,1024,6930.2,1229.5,76.7,13.04,,, -gemma-7b-it,INT4-MIXED,32,8061.5,125.6,76.7,13.04,,, -stablelm-3b-4e1t,FP16,1024,6722.9,480.8,77,12.99,,, -phi-2,FP16,1024,6709.4,624.1,77.2,12.95,,, -chatglm2-6b,INT8-CW,1024,7132.9,1361.9,78.7,12.71,,, -chatglm3-6b,INT8-CW,1024,7037.5,1389.2,78.7,12.71,,, -qwen-7b-chat,INT4-MIXED,1024,10374.1,1357.5,81.1,12.33,,, -gemma-7b-it,INT4-MIXED,1024,9398,1268.5,82.7,12.09,,, -gemma-7b-it,INT4-MIXED,1024,9469.5,1268,83.2,12.02,,, -gpt-j-6b,INT8-CW,32,7126.5,255.2,87.2,11.47,,, -falcon-7b-instruct,INT8-CW,32,8287.6,131.1,88.4,11.31,,, -llama-2-7b-chat-hf,INT8-CW,32,7474.9,139.5,89.7,11.15,,, -codegen25-7b,INT8-CW,32,7559.4,138,90.8,11.01,,, -vicuna-7b-v1.5,INT8-CW,32,7390.8,136.6,90.8,11.01,,, -falcon-7b-instruct,INT8-CW,1024,8546.8,1205.9,92.2,10.85,,, -stablelm-7b,INT8-CW,32,8356.4,143,92.4,10.82,,, -qwen2-7b,INT8-CW,32,9940.7,132,92.5,10.81,,, -baichuan2-13b-chat,INT4-MIXED,32,9879.2,184.9,93.3,10.72,,, -phi-3-mini-4k-instruct,FP16,38,8290,125.2,93.4,10.71,,, -phi-3-mini-4k-instruct,FP16,31,8290.5,109.5,93.5,10.70,,, -gpt-j-6b,INT8-CW,1024,7759,1996.8,93.9,10.65,,, -llama-2-7b-chat-hf,INT8-CW,1024,8097.8,1701.6,94.7,10.56,,, -phi-3-medium-4k-instruct,INT4-MIXED,38,8210.4,527,95.1,10.52,,, -mistral-7b-v0.1,INT8-CW,31,7882.4,128.6,95.1,10.52,,, -vicuna-7b-v1.5,INT8-CW,1024,8013.2,1558.1,95.1,10.52,,, -mistral-7b-v0.1,INT8-CW,32,7886.9,140.6,95.2,10.50,,, -qwen2-7b,INT8-CW,1024,10573.1,1564.5,95.3,10.49,,, -codegen25-7b,INT8-CW,1024,8253.1,1526.3,95.7,10.45,,, -zephyr-7b-beta,INT8-CW,32,7785.3,144.4,95.8,10.44,,, -stablelm-7b,INT8-CW,1020,8921.9,1845,96.9,10.32,,, -mistral-7b-v0.1,INT8-CW,1007,8127.4,1648.4,97.4,10.27,,, -qwen-7b-chat,INT8-CW,32,11083.2,140.6,97.7,10.24,,, -qwen1.5-7b-chat,INT8-CW,32,8870,156.4,98.1,10.19,,, -llama-3.1-8b,INT8-CW,31,8600.3,189.2,98.4,10.16,,, -mistral-7b-v0.1,INT8-CW,1024,8134.7,1554.1,98.4,10.16,,, -qwen-14b-chat,INT4-MIXED,32,9876.2,192.3,98.6,10.14,,, -zephyr-7b-beta,INT8-CW,1024,8035.2,1580.4,98.8,10.12,,, -llama-3-8b,INT8-CW,32,8694.2,150.7,99.5,10.05,,, -llama-3-8b,INT8-CW,33,8700.4,175.4,99.8,10.02,,, -phi-3-mini-4k-instruct,FP16,1023,8795.2,601.3,99.9,10.01,,, +Topology,Precision,Input Size,max rss memory,1st latency (ms),2nd latency (ms),2nd tok/sec +opt-125m-gptq,INT4-MIXED,1024,1513.6,81.9,7.8,128.21 +opt-125m-gptq,INT4-MIXED,32,979.9,50.4,7.9,126.58 +tiny-llama-1.1b-chat,INT4-MIXED,1024,1943.3,176.3,16.8,59.52 +tiny-llama-1.1b-chat,INT4-MIXED,32,1982.2,59.5,17.1,58.48 +qwen2-0.5b,INT4-MIXED,32,2678,117.3,18.7,53.48 +tiny-llama-1.1b-chat,INT8-CW,32,2080.9,59.4,19,52.63 +qwen2-0.5b,INT4-MIXED,1024,3036.1,165.5,19.2,52.08 +tiny-llama-1.1b-chat,INT8-CW,1024,2287,241.4,19.6,51.02 +qwen2-0.5b,INT8-CW,1024,3084.9,172.1,20,50.00 +qwen2-0.5b,INT8-CW,32,2518,105.5,21.4,46.73 +red-pajama-incite-chat-3b-v1,INT4-MIXED,32,2793.6,141.8,23.9,41.84 +qwen2-1.5b,INT4-MIXED,32,4515.4,118.7,24,41.67 +qwen2-1.5b,INT4-MIXED,1024,4930.1,229.6,24.3,41.15 +dolly-v2-3b,INT4-MIXED,32,2486.1,174,25.4,39.37 +phi-2,INT4-MIXED,32,2552.9,210.6,26.9,37.17 +red-pajama-incite-chat-3b-v1,INT4-MIXED,1020,2934.1,464.5,27.5,36.36 +qwen2-1.5b,INT8-CW,32,4813.4,119.1,27.8,35.97 +opt-2.7b,INT4-MIXED,31,3172.5,131.9,28.5,35.09 +red-pajama-incite-chat-3b-v1,INT4-MIXED,1024,3038.2,447.1,28.6,34.97 +dolly-v2-3b,INT4-MIXED,1024,2947.4,409,28.8,34.72 +qwen2-1.5b,INT8-CW,1024,5394.8,327.9,29.3,34.13 +stable-zephyr-3b-dpo,INT4-MIXED,30,2728.1,131.2,29.8,33.56 +phi-2,INT4-MIXED,32,2805.1,208.3,30.2,33.11 +minicpm-1b-sft,INT8-CW,31,3104.2,147.8,30.9,32.36 +phi-2,INT4-MIXED,1024,3058.9,602.9,31.1,32.15 +minicpm-1b-sft,INT4-MIXED,31,2970.1,183.7,31.1,32.15 +stablelm-3b-4e1t,INT4-MIXED,32,3077.1,183.2,31.6,31.65 +opt-2.7b,INT4-MIXED,937,3416.7,429.4,31.6,31.65 +stable-zephyr-3b-dpo,INT4-MIXED,946,3211.8,428.8,32.3,30.96 +phi-3-mini-4k-instruct,INT4-MIXED,31,3014.5,116,32.5,30.77 +phi-3-mini-4k-instruct,INT4-MIXED,38,2957.4,153.9,32.5,30.77 +phi-2,INT4-MIXED,1024,3278.9,613.3,33.4,29.94 +phi-3-mini-4k-instruct,INT4-MIXED,38,3288.5,152.9,33.4,29.94 +phi-3-mini-4k-instruct,INT4-MIXED,31,3265.1,123.6,34.1,29.33 +gemma-2b-it,INT4-MIXED,32,4162.1,208.8,34.2,29.24 +stablelm-3b-4e1t,INT4-MIXED,1024,3525.8,524.5,35,28.57 +phi-3-mini-4k-instruct,INT4-MIXED,1061,3427.8,777.5,36.5,27.40 +phi-3-mini-4k-instruct,INT4-MIXED,1023,3405.4,554.1,36.7,27.25 +gemma-2b-it,INT4-MIXED,1024,5053.1,354.8,36.9,27.10 +minicpm-1b-sft,FP16,31,3595.5,124.9,36.9,27.10 +phi-3-mini-4k-instruct,INT4-MIXED,1061,3547.2,755.8,37.1,26.95 +phi-3-mini-4k-instruct,INT4-MIXED,1023,3528.4,536.4,37.4,26.74 +red-pajama-incite-chat-3b-v1,INT8-CW,32,3747.7,189.9,38.1,26.25 +opt-2.7b,INT8-CW,31,3810.7,145.7,38.5,25.97 +chatglm3-6b,INT4-MIXED,32,4120.7,67.3,38.7,25.84 +dolly-v2-3b,INT8-CW,32,3747,188.4,39.2,25.51 +chatglm3-6b,INT4-MIXED,32,4482.9,69.9,40.7,24.57 +chatglm3-6b,INT4-MIXED,1024,4146,606.8,41,24.39 +opt-2.7b,INT8-CW,937,4458.9,587.8,41.8,23.92 +red-pajama-incite-chat-3b-v1,INT8-CW,1024,4088.4,634.1,41.9,23.87 +red-pajama-incite-chat-3b-v1,INT8-CW,1020,4086.8,653.4,42,23.81 +phi-2,INT8-CW,32,3794.6,202.7,42.1,23.75 +chatglm3-6b,INT4-MIXED,1024,4446.7,598.6,42.3,23.64 +stablelm-3b-4e1t,INT8-CW,32,3652.5,146,42.6,23.47 +stable-zephyr-3b-dpo,INT8-CW,30,3768.6,151.9,42.6,23.47 +dolly-v2-3b,INT8-CW,1024,4092,603.1,42.9,23.31 +stablelm-3b-4e1t,INT8-CW,1024,4143.2,671.7,45.2,22.12 +gemma-2b-it,INT8-CW,32,4878.4,221.6,45.6,21.93 +phi-2,INT8-CW,1024,4153.6,810.3,46,21.74 +llama-2-7b-chat-hf,INT4-MIXED,32,4394.6,109.7,46.2,21.65 +chatglm3-6b-gptq,INT4-MIXED,32,5218.9,79.7,46.7,21.41 +stable-zephyr-3b-dpo,INT8-CW,946,4360.1,627.8,46.8,21.37 +vicuna-7b-v1.5,INT4-MIXED,32,4482.3,101.2,47.2,21.19 +gemma-2b-it,INT8-CW,1024,5837.1,507.1,48,20.83 +llama-2-7b-gptq,INT4-MIXED,32,4734.3,102.8,48.1,20.79 +orca-mini-3b,INT4-MIXED,32,2720.1,132,48.1,20.79 +qwen-7b-chat,INT4-MIXED,32,7803.7,178.5,48.3,20.70 +mistral-7b-v0.1,INT4-MIXED,31,4537.5,99,48.5,20.62 +codegen25-7b,INT4-MIXED,32,4723.3,108.5,48.5,20.62 +chatglm3-6b-gptq,INT4-MIXED,1024,5150.8,614.2,48.8,20.49 +mistral-7b-v0.1,INT4-MIXED,32,4572,102.9,48.8,20.49 +llama-3-8b,INT4-MIXED,33,4991.2,252.2,50.9,19.65 +qwen-7b-chat-gptq,INT4-MIXED,32,8088.4,212.6,51,19.61 +chatglm2-6b,INT4-MIXED,32,4960.6,105.5,51.2,19.53 +gpt-j-6b,INT4-MIXED,32,4699.5,259.2,51.4,19.46 +llama-3.1-8b,INT4-MIXED,31,4897.8,106.9,51.5,19.42 +llama-3-8b,INT4-MIXED,32,4999.7,105.9,51.6,19.38 +qwen-7b-chat,INT4-MIXED,32,8085.9,193.5,51.7,19.34 +falcon-7b-instruct,INT4-MIXED,32,5416.2,175,52.5,19.05 +mistral-7b-v0.1,INT4-MIXED,1007,4772.6,803,52.6,19.01 +qwen1.5-7b-chat,INT4-MIXED,32,6027.3,174.9,53,18.87 +mistral-7b-v0.1,INT4-MIXED,1024,4775,717.6,53,18.87 +llama-2-7b-chat-hf,INT4-MIXED,1024,4976.5,992.1,53.1,18.83 +qwen2-7b,INT4-MIXED,32,7087.1,138.1,53.3,18.76 +llama-2-7b-gptq,INT4-MIXED,1024,5351.2,711.6,53.7,18.62 +llama-3-8b,INT4-MIXED,32,5472.8,109.4,53.7,18.62 +phi-3-mini-4k-instruct,INT8-CW,38,4575.3,115.9,53.7,18.62 +stablelm-7b,INT4-MIXED,32,5213.7,128.5,53.8,18.59 +phi-3-mini-4k-instruct,INT8-CW,31,4571.8,118.9,53.8,18.59 +llama-3-8b,INT4-MIXED,33,5480.4,246.8,53.9,18.55 +llama-3-8b,INT4-MIXED,32,5528.2,144.9,54.3,18.42 +llama-3.1-8b,INT4-MIXED,31,5377.3,112.8,54.3,18.42 +chatglm2-6b,INT4-MIXED,1024,5232.3,759.6,54.6,18.32 +llama-3.1-8b,INT4-MIXED,31,5440.4,126.4,54.8,18.25 +llama-3-8b,INT4-MIXED,33,5532.8,248.2,54.9,18.21 +codegen25-7b,INT4-MIXED,1024,5412.9,714.8,55,18.18 +mistral-7b-v0.1,INT4-MIXED,32,4998.5,117.3,55.2,18.12 +mistral-7b-v0.1,INT4-MIXED,31,5000.2,122.4,55.6,17.99 +llama-3-8b,INT4-MIXED,1024,5594,953.5,56.6,17.67 +gpt-j-6b,INT4-MIXED,1024,5323.8,1254,56.8,17.61 +llama-3-8b,INT4-MIXED,1025,5596.7,1192.3,56.8,17.61 +qwen2-7b,INT4-MIXED,1024,7722.1,714.2,57,17.54 +phi-3-mini-4k-instruct,INT8-CW,1023,5067.1,818.5,57.4,17.42 +phi-3-mini-4k-instruct,INT8-CW,1061,5086.1,975.1,57.4,17.42 +llama-2-7b-chat-hf,INT4-MIXED,32,5087.7,126.2,57.9,17.27 +stablelm-7b,INT4-MIXED,1020,5780.5,1248.4,59,16.95 +llama-3-8b,INT4-MIXED,1025,6088.9,1381.5,59,16.95 +llama-3-8b,INT4-MIXED,1024,6084.8,931.2,59.2,16.89 +llama-3-8b,INT4-MIXED,1025,6141.2,1494.3,59.4,16.84 +llama-3-8b,INT4-MIXED,1024,6133.8,1075.2,59.6,16.78 +mistral-7b-v0.1,INT4-MIXED,1024,5472.6,794.3,59.7,16.75 +zephyr-7b-beta,INT4-MIXED,32,5328.5,103.5,59.8,16.72 +falcon-7b-instruct,INT4-MIXED,1024,5677.5,686.2,59.8,16.72 +mistral-7b-v0.1,INT4-MIXED,1007,5243.5,1074,59.9,16.69 +qwen1.5-7b-chat,INT4-MIXED,1024,7096.7,1132.7,60,16.67 +qwen-7b-chat,INT4-MIXED,1024,8872.6,792.8,61,16.39 +qwen-7b-chat,INT4-MIXED,1024,9164.4,822.6,63.3,15.80 +orca-mini-3b,INT8-CW,32,4221.7,170.6,63.5,15.75 +llama-2-7b-chat-hf,INT4-MIXED,1024,5708.1,1397.9,63.6,15.72 +glm-4-9b,INT4-MIXED,33,6402.9,307.1,63.8,15.67 +zephyr-7b-beta,INT4-MIXED,1024,5572.4,1156.4,64.3,15.55 +glm-4-9b,INT4-MIXED,32,6383.1,256.2,64.5,15.50 +baichuan2-7b-chat,INT4-MIXED,32,5926.3,191.8,65.8,15.20 +opt-2.7b,FP16,31,5886,112.2,68,14.71 +dolly-v2-3b,FP16,32,6161.5,147.5,69.5,14.39 +red-pajama-incite-chat-3b-v1,FP16,32,6265.4,146.2,69.6,14.37 +glm-4-9b,INT4-MIXED,1024,6994.5,1013.7,69.8,14.33 +opt-2.7b,FP16,937,6345,379.5,71.6,13.97 +glm-4-9b,INT4-MIXED,1025,7014.9,1416.8,72.5,13.79 +phi-2,FP16,32,6204.7,189.2,72.9,13.72 +stable-zephyr-3b-dpo,FP16,30,6221.4,159.7,73,13.70 +dolly-v2-3b,FP16,1024,6669.9,424.3,73.3,13.64 +red-pajama-incite-chat-3b-v1,FP16,1020,6658.8,484.7,73.4,13.62 +stablelm-3b-4e1t,FP16,32,6216.3,145.4,73.5,13.61 +qwen-7b-chat,INT4-MIXED,32,9294.9,144.4,73.8,13.55 +red-pajama-incite-chat-3b-v1,FP16,1024,6755.1,469.1,73.9,13.53 +qwen-7b-chat-gptq,INT4-MIXED,1024,9152.1,827.2,75.1,13.32 +gemma-7b-it,INT4-MIXED,32,7991.4,128.6,75.8,13.19 +chatglm2-6b,INT8-CW,32,6854.4,110.2,76.3,13.11 +chatglm3-6b,INT8-CW,32,6754.8,112.3,76.4,13.09 +stable-zephyr-3b-dpo,FP16,946,6940,428.6,76.7,13.04 +baichuan2-7b-chat,INT4-MIXED,1024,6930.2,1229.5,76.7,13.04 +gemma-7b-it,INT4-MIXED,32,8061.5,125.6,76.7,13.04 +stablelm-3b-4e1t,FP16,1024,6722.9,480.8,77,12.99 +phi-2,FP16,1024,6709.4,624.1,77.2,12.95 +chatglm2-6b,INT8-CW,1024,7132.9,1361.9,78.7,12.71 +chatglm3-6b,INT8-CW,1024,7037.5,1389.2,78.7,12.71 +qwen-7b-chat,INT4-MIXED,1024,10374.1,1357.5,81.1,12.33 +gemma-7b-it,INT4-MIXED,1024,9398,1268.5,82.7,12.09 +gemma-7b-it,INT4-MIXED,1024,9469.5,1268,83.2,12.02 +gpt-j-6b,INT8-CW,32,7126.5,255.2,87.2,11.47 +falcon-7b-instruct,INT8-CW,32,8287.6,131.1,88.4,11.31 +llama-2-7b-chat-hf,INT8-CW,32,7474.9,139.5,89.7,11.15 +codegen25-7b,INT8-CW,32,7559.4,138,90.8,11.01 +vicuna-7b-v1.5,INT8-CW,32,7390.8,136.6,90.8,11.01 +falcon-7b-instruct,INT8-CW,1024,8546.8,1205.9,92.2,10.85 +stablelm-7b,INT8-CW,32,8356.4,143,92.4,10.82 +qwen2-7b,INT8-CW,32,9940.7,132,92.5,10.81 +baichuan2-13b-chat,INT4-MIXED,32,9879.2,184.9,93.3,10.72 +phi-3-mini-4k-instruct,FP16,38,8290,125.2,93.4,10.71 +phi-3-mini-4k-instruct,FP16,31,8290.5,109.5,93.5,10.70 +gpt-j-6b,INT8-CW,1024,7759,1996.8,93.9,10.65 +llama-2-7b-chat-hf,INT8-CW,1024,8097.8,1701.6,94.7,10.56 +phi-3-medium-4k-instruct,INT4-MIXED,38,8210.4,527,95.1,10.52 +mistral-7b-v0.1,INT8-CW,31,7882.4,128.6,95.1,10.52 +vicuna-7b-v1.5,INT8-CW,1024,8013.2,1558.1,95.1,10.52 +mistral-7b-v0.1,INT8-CW,32,7886.9,140.6,95.2,10.50 +qwen2-7b,INT8-CW,1024,10573.1,1564.5,95.3,10.49 +codegen25-7b,INT8-CW,1024,8253.1,1526.3,95.7,10.45 +zephyr-7b-beta,INT8-CW,32,7785.3,144.4,95.8,10.44 +stablelm-7b,INT8-CW,1020,8921.9,1845,96.9,10.32 +mistral-7b-v0.1,INT8-CW,1007,8127.4,1648.4,97.4,10.27 +qwen-7b-chat,INT8-CW,32,11083.2,140.6,97.7,10.24 +qwen1.5-7b-chat,INT8-CW,32,8870,156.4,98.1,10.19 +llama-3.1-8b,INT8-CW,31,8600.3,189.2,98.4,10.16 +mistral-7b-v0.1,INT8-CW,1024,8134.7,1554.1,98.4,10.16 +qwen-14b-chat,INT4-MIXED,32,9876.2,192.3,98.6,10.14 +zephyr-7b-beta,INT8-CW,1024,8035.2,1580.4,98.8,10.12 +llama-3-8b,INT8-CW,32,8694.2,150.7,99.5,10.05 +llama-3-8b,INT8-CW,33,8700.4,175.4,99.8,10.02 +phi-3-mini-4k-instruct,FP16,1023,8795.2,601.3,99.9,10.01 From cbdaba3c032bd3743ceceacb9b14365a4db2cb08 Mon Sep 17 00:00:00 2001 From: Karol Blaszczak Date: Tue, 19 Nov 2024 14:42:39 +0100 Subject: [PATCH 08/62] [DOCS] release notes 2024.5 (#27612) port: https://github.com/openvinotoolkit/openvino/pull/27595 --- .../about-openvino/release-notes-openvino.rst | 783 +++++++++++++----- .../get-started/install-openvino.rst | 6 +- .../llm_inference_guide/genai-guide-npu.rst | 4 + 3 files changed, 568 insertions(+), 225 deletions(-) diff --git a/docs/articles_en/about-openvino/release-notes-openvino.rst b/docs/articles_en/about-openvino/release-notes-openvino.rst index 6685a4325d57fe..dc82009b402593 100644 --- a/docs/articles_en/about-openvino/release-notes-openvino.rst +++ b/docs/articles_en/about-openvino/release-notes-openvino.rst @@ -1,3 +1,4 @@ +============================= OpenVINO Release Notes ============================= @@ -15,115 +16,464 @@ OpenVINO Release Notes -2024.4 - 19 September 2024 +2024.5 - 20 November 2024 ############################# :doc:`System Requirements <./release-notes-openvino/system-requirements>` | :doc:`Release policy <./release-notes-openvino/release-policy>` | :doc:`Installation Guides <./../get-started/install-openvino>` + + What's new +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ * More Gen AI coverage and framework integrations to minimize code changes. - * Support for GLM-4-9B Chat, MiniCPM-1B, Llama 3 and 3.1, Phi-3-Mini, Phi-3-Medium and - YOLOX-s models. - * Noteworthy notebooks added: Florence-2, NuExtract-tiny Structure Extraction, Flux.1 Image - Generation, PixArt-α: Photorealistic Text-to-Image Synthesis, and Phi-3-Vision Visual - Language Assistant. + * New models supported: Llama 3.2 (1B & 3B), Gemma 2 (2B & 9B), and YOLO11. + * LLM support on NPU: Llama 3 8B, Llama 2 7B, Mistral-v0.2-7B, Qwen2-7B-Instruct and Phi-3 Mini. + * Noteworthy notebooks added: Sam2, Llama3.2, Llama3.2 - Vision, Wav2Lip, Whisper, and Llava. + Preview: Support for Flax, a high-performance Python neural network library based on JAX. + Its modular design allows for easy customization and accelerated inference on GPUs. * Broader Large Language Model (LLM) support and more model compression techniques. - * OpenVINO™ runtime optimized for Intel® Xe Matrix Extensions (Intel® XMX) systolic arrays on - built-in GPUs for efficient matrix multiplication resulting in significant LLM performance - boost with improved 1st and 2nd token latency, as well as a smaller memory footprint on - Intel® Core™ Ultra Processors (Series 2). - * Memory sharing enabled for NPUs on Intel® Core™ Ultra Processors (Series 2) for efficient - pipeline integration without memory copy overhead. - * Addition of the PagedAttention feature for discrete GPUs* enables a significant boost in - throughput for parallel inferencing when serving LLMs on Intel® Arc™ Graphics or Intel® - Data Center GPU Flex Series. + * Optimizations for built-in GPUs on Intel® Core Ultra Processors (Series 1) and Intel® Arc™ + Graphics include KV Cache compression for memory reduction along with improved usability, + and model load time optimizations to improve first token latency for LLMs. + * Dynamic quantization was enabled to improve first token latency for LLMs on built-in + Intel® GPUs without impacting accuracy on Intel Core Ultra Processors (Series 1). Second + token latency will also improve for large batch inference. + * A new method to generate synthetic text data is implemented in the Neural Network + Compression Framework (NNCF). This will allow LLMs to be compressed more accurately using + data-aware methods without datasets. Coming soon: This feature will soon be accessible via + Optimum Intel on Hugging Face. * More portability and performance to run AI at the edge, in the cloud, or locally. - * Support for Intel® Core Ultra Processors Series 2 (formerly codenamed Lunar Lake) on Windows. - * OpenVINO™ Model Server now comes with production-quality support for OpenAI-compatible API - which enables significantly higher throughput for parallel inferencing on Intel® Xeon® - processors when serving LLMs to many concurrent users. - * Improved performance and memory consumption with prefix caching, KV cache compression, and - other optimizations for serving LLMs using OpenVINO™ Model Server. - * Support for Python 3.12. - * Support for Red Hat Enterprise Linux (RHEL) version 9.3 - 9.4. + * Support for + `Intel® Xeon 6 Processors with P-cores `__ + (formerly codenamed Granite Rapids) and + `Intel® Core Ultra 200V series processors `__ + (formerly codenamed Arrow Lake-S). + * Preview: GenAI API enables multimodal AI deployment with support for multimodal pipelines + for improved contextual awareness, transcription pipelines for easy audio-to-text + conversions, and image generation pipelines for streamlined text-to-visual conversions. + * Speculative decoding feature added to the GenAI API for improved performance and efficient + text generation using a small draft model that is periodically corrected by the full-size + model. + * Preview: LoRA adapters are now supported in the GenAI API for developers to quickly and + efficiently customize image and text generation models for specialized tasks. + * The GenAI API now also supports LLMs on NPU allowing developers to specify NPU as the + target device, specifically for WhisperPipeline (for whisper-base, whisper-medium, and + whisper-small) and LLMPipeline (for Llama 3 8B, Llama 2 7B, Mistral-v0.2-7B, + Qwen2-7B-Instruct and Phi-3 Mini-instruct). Use driver version 32.0.100.3104 or later for + best performance. Now deprecated +----------------------------------------------------------------------------------------------- + +* Python 3.8 is no longer supported: + + +OpenVINO™ Runtime +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -* The following will not be available beyond the 2024.4 OpenVINO version: +Common +----------------------------------------------------------------------------------------------- - * The macOS x86_64 debug bins - * Python 3.8 - * Discrete Keem Bay support +* Numpy 2.x has been adopted for all currently supported components, including NNCF. +* A new constant constructor has been added, enabling constants to be created from data pointer + as shared memory. Additionally, it can take ownership of a shared, or other, object, avoiding + a two-step process to wrap memory into ``ov::Tensor``. +* Files are now read via the async ReadFile API, reducing the bottleneck for LLM model load + times on GPU. +* CPU implementation of SliceScatter operator is now available, used for models such as Gemma, + supporting increased LLM performance. -* Intel® Streaming SIMD Extensions (Intel® SSE) will be supported in source code form, but not - enabled in the binary package by default, starting with OpenVINO 2025.0. -| Check the `deprecation section <#deprecation-and-support>`__ for more information. +CPU Device Plugin +----------------------------------------------------------------------------------------------- +* Gold support of the Intel Xeon 6 platform with P-cores (formerly code name Granite Rapids) + has been reached. +* Support of Intel® Core Ultra 200V series processors (formerly codenamed Arrow Lake-S) has + been implemented. +* LLM performance has been further improved with Rotary Position Embedding optimization; Query, + Key, and Value; and multi-layer perceptron fusion optimization. +* FP16 support has been extended with SDPA and PagedAttention, improving performance of LLM via + both native APIs and the vLLM integration. +* Models with LoRA adapters are now supported. -Common +GPU Device Plugin +----------------------------------------------------------------------------------------------- + +* The KV cache INT8 compression mechanism is now available for all supported GPUs. It enables a + significant reduction in memory consumption, increasing performance with a minimal impact to + accuracy (it affects systolic devices slightly more than non-systolic ones). The feature is + activated by default for non-systolic devices. +* LoRA adapters are now functionally supported on GPU. +* A new feature of GPU weightless blob caching enables caching model structure only and reusing + the weights from the original model file. Use the new OPTIMIZE_SIZE property to activate. +* Dynamic quantization with INT4 and INT8 precisions has been implemented and enabled by + default on Intel Core Ultra platforms, improving LLM first token latency. + + +NPU Device Plugin +----------------------------------------------------------------------------------------------- + +* Models retrieved from the OpenVINO cache have a smaller memory footprint now. The plugin + releases the cached model (blob) after weights are loaded in NPU regions. Model export is not + available in this scenario. Memory consumption is reduced during inference execution with one + blob size. This optimization requires the latest NPU driver: 32.0.100.3104. +* A driver bug for ``ov::intel_npu::device_total_mem_size`` has been fixed. The plugin will now + report 2GB as the maximum allocatable memory for any driver that does not support graph + extension 1.8. Even if older drivers report a larger amount of memory to be available, memory + allocation would fail when 2GB are exceeded. Plugin reports the number that driver exposes + for any driver that supports graph extension 1.8 (or newer). +* A new API is used to initialize the model (available in graph extension 1.8). +* Inference request set_tensors is now supported. +* ``ov::device::LUID`` is now exposed on Windows. +* LLM-related improvements have been implemented in terms of both memory usage and performance. +* AvgPool and MaxPool operator support has been extended, adding support for more PyTorch models. + +* NOTE: for systems based on Intel® Core Ultra Processors Series 2, more than 16GB of RAM may + be required to use larger models, such as Llama-2-7B, Mistral-0.2-7B, and Qwen-2-7B + (exceeding 4b parameters). + + + prompts longer then 1024 characters will not work with a model of 7B or more parameters, + such as . + +OpenVINO Python API +----------------------------------------------------------------------------------------------- + +* Constant now can be created from openvino.Tensor. +* The “release_memory” method has been added for a compiled model, improving control over + memory consumption. + + + +OpenVINO Node.js API +----------------------------------------------------------------------------------------------- + +* Querying the best device to perform inference of a model with specific operations + is now available in JavaScript API. +* Contribution guidelines have been improved to make it easier for developers to contribute. +* Testing scope has been extended by inference in end-to-end tests. +* JavaScript API samples have been improved for readability and ease of running. + + + +TensorFlow Framework Support +----------------------------------------------------------------------------------------------- + +* TensorFlow 2.18.0, Keras 3.6.0, NumPy 2.0.2 in Python 3.12, and NumPy 1.26.4 in other Python + versions have been added to validation. +* Out-of-the-box conversion with static ranks has been improved by devising a new shape for + Switch-Merge condition sub-graphs. +* Complex type for the following operations is now supported: ExpandDims, Pack, Prod, Rsqrt, + ScatterNd, Sub. +* The following issues have been fixed: + + * the corner case with one element in LinSpace to avoid division by zero, + * support FP16 and FP64 input types for LeakyRelu, + * support non-i32/i64 output index type for ArgMin/Max operations. + + + +PyTorch Framework Support +----------------------------------------------------------------------------------------------- + +* PyTorch version 2.5 is now supported. +* OpenVINO Model Converter (OVC) now supports TorchScript and ExportedProgram saved on a drive. +* The issue of aten.index.Tensor conversion for indices with “None” values has been fixed, + helping to support the HF Stable Diffusion model in ExportedProgram format. + + + +ONNX Framework Support +----------------------------------------------------------------------------------------------- + +* ONNX version 1.17.0 is now used. +* Customers' models with DequantizeLinear-21, com.microsoft.MatMulNBits, and + com.microsoft.QuickGelu operations are now supported. + +JAX/Flax Framework Support +----------------------------------------------------------------------------------------------- + +* JAX 0.4.35 and Flax 0.10.0 has been added to validation. +* jax._src.core.ClosedJaxpr object conversion is now supported. +* Vision Transformer from google-research/vision_transformer is now supported + (with support for 37 new operations). + + +OpenVINO Model Server +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -* Encryption and decryption of topology in model cache is now supported with callback functions - provided by the user (CPU only for now; ov::cache_encryption_callbacks). -* The Ubuntu20 and Ubuntu22 Docker images now include the tokenizers and GenAI CPP modules, - including pre-installed Python modules, in development versions of these images. -* Python 3.12 is now supported. +* The OpenAI API text embedding endpoint has been added, enabling OVMS to be used as a building + block for AI applications like RAG. + `(read more) `__ +* The rerank endpoint has been added based on Cohere API, enabling easy similarity detection + between a query and a set of documents. It is one of the building blocks for AI applications + like RAG and makes integration with frameworks such as langchain easy. + `(read more) `__ +* The following improvements have been done to LLM text generation: -CPU Device Plugin + * The ``echo`` sampling parameter together with ``logprobs`` in the ``completions`` endpoint + is now supported. + * Performance has been increased on both CPU and GPU. + * Throughput in high-concurrency scenarios has been increased with dynamic_split_fuse for GPU. + * Testing coverage and stability has been improved. + * The procedure for service deployment and model repository preparation has been simplified. + +* An experimental version of a Windows binary package - native model server for Windows OS - is + available. This release includes a set of limitations and has limited tests coverage. It is + intended for testing, while the production-ready release is expected with 2025.0. All feedback + is welcome. + + +Neural Network Compression Framework +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -* The following is now supported: +* A new nncf.data.generate_text_data() method has been added for generating a synthetic dataset + for LLM compression. This approach helps to compress LLMs more accurately in situations when + the dataset is not available or not sufficient. + `See our example `__ + for more information about the usage. +* Support of data-free and data-aware weight compression methods - nncf.compress_weights() - + has been extended with NF4 per-channel quantization, making compressed LLMs more accurate and + faster on NPU. +* Caching of computed statistics in nncf.compress_weights() is now available, significantly + reducing compression time when performing compression of the same LLM multiple times, with + different compression parameters. To enable it, set the advanced ``statistics_path`` parameter + of nncf.compress_weights() to the desired file path location. +* The ``backup_mode`` optional parameter has been added to nncf.compress_weights(), for + specifying the data type for embeddings, convolutions, and last linear layers during 4-bit + weight compression. Available options are INT8_ASYM (default), INT8_SYM, and NONE (retains + the original floating-point precision of the model weights). In certain situations, + non-default value might give better accuracy of compressed LLMs. +* Preview support is now available for optimizing models in Torch + `FX format `__, nncf.quantize(), and + nncf.compress_weights() methods. After optimization such models can be directly executed + via torch.compile(compressed_model, backend="openvino"). For more details, see + `INT8 quantization example `__. +* Memory consumption of data-aware weight compression methods - nncf.compress_weights() – has + been reduced significantly, with some variation depending on the model and method. +* Support for the following has changed: + + * NumPy 2 added + * PyTorch upgraded to 2.5.1 + * ONNX upgraded to 1.17 + * Python 3.8 discontinued - * Tensor parallel feature for multi-socket CPU inference, with performance improvement for - LLMs with 6B+ parameters (enabled through model_distribution_policy hint configurations). - * RMSNorm operator, optimized with JIT kernel to improve both the 1st and 2nd token - performance of LLMs. -* The following has been improved: - * vLLM support, with PagedAttention exposing attention score as the second output. It can now - be used in the cache eviction algorithm to improve LLM serving performance. - * 1st token performance with Llama series of models, with additional CPU operator optimization - (such as MLP, SDPA) on BF16 precision. - * Default oneTBB version on Linux is now 2021.13.0, improving overall performance on latest - Intel XEON platforms. - * MXFP4 weight compression models (compressing weights to 4-bit with the e2m1 data type - without a zero point and with 8-bit e8m0 scales) have been optimized for Xeon platforms - thanks to fullyconnected compressed weight LLM support. +OpenVINO Tokenizers ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -* The following has been fixed: +* Several operations have been introduced and optimized. +* Conversion parameters and environment info have been added to ``rt_info``, improving + reproducibility and debugging. - * Memory leak when ov::num_streams value is 0. - * CPU affinity mask is changed after OpenVINO execution when OpenVINO is compiled - with -DTHREADING=SEQ. -GPU Device Plugin +OpenVINO.GenAI +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -* Dynamic quantization for LLMs is now supported on discrete GPU platforms. -* Stable Diffusion 3 is now supported with good accuracy on Intel GPU platforms. -* Both first and second token latency for LLMs have been improved on Intel GPU platforms. -* The issue of model cache not regenerating with the value changes of - ``ov::hint::performance_mode`` or ``ov::hint::dynamic_quantization_group_size`` has been - fixed. +* The following has been added: + * LoRA adapter for the LLMPipeline. + * Text2ImagePipeline with LoRA adapter and text2image samples. + * VLMPipeline and visual_language_chat sample for text generation models with text and image + inputs. + * WhisperPipeline and whisper_speech_recognition sample. -NPU Device Plugin +* speculative_decoding_lm has been moved to LLMPipeline based implementation and is now + installed as part of the package. +* On NPU, a set of pipelines has been enabled: WhisperPipeline (for whisper-base, + whisper-medium, and whisper-small), LLMPipeline (for Llama 3 8B, Llama 2 7B, Mistral-v0.2-7B, + Qwen2-7B-Instruct, and Phi-3 Mini-instruct). Use driver version 32.0.100.3104 or later for + best performance. + + + + + +Other Changes and Known Issues +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +Jupyter Notebooks +----------------------------- + +* `Text-to-Image generation using OpenVINO GenAI `__ +* `Multi LoRA Image Generation `__ +* `Virtual Try-on using OpenVINO and CatVTON `__ +* `Visual Language Assistant using OpenVINO GenAI `__ +* `Speech recognition using OpenVINO GenAI `__ +* `YoloV11 `__ +* `Llama-3.2-vision `__ +* `Pixtral `__ +* `Segment Anything 2 `__ +* `Video Lips-sync using Wav2Lip `__ +* `Convert JAX to OpenVINO tutorial `__ + + +Known Issues +----------------------------- + +| **Component: CPU Plugin** +| ID: 155898 +| Description: +| Description: When using new version of Transformer version to convert some of LLMs + (GPT-J/GPT-NeoX or falcon-7b), the inference accuracy may be impacted on 4th or 5th + generation of Xeon CPU platforms, due to model structure update triggering inference + precision difference in part of the model. The workaround is to use transformer version of + 4.44.2 or lower. + +| **Component: GPU Plugin** +| ID: 154583 +| Description: +| LLM accuracy can be low especially on non-systolic platform like Intel Core Ultra. When + facing the low accuracy issue, user needs to manually set a config ACTIVATION_SCALING_FACOTR + with a value 8.0 in compile_model() function. From the next release, scaling factor value + will be automatically applied through updated IR. + +| **Component: GenAI** +| ID: 156437, 148933 +| Description: +| When using Python GenAI APIs, if ONNX 17.0 and later is installed, it may encounter the + error “DLL load failed while importing onnx_cpp2py_export: A dynamic link library (DLL) + initialization routine failed.” It is due to the ONNX dependency issue + `onnx/onnx#6267 `__, + Install + `Microsoft Visual C++ Redistributable `__ + latest supported downloads to fix the issue. + +| **Component: GenAI** +| ID: 156944 +| Description: +| There were backward incompatible changes resulting in different text generated by LLMs like + Mistralai/Mistral-7B-Instruct-v0.2 and TinyLlama/TinyLlama-1.1B-Chat-v1.0 when using a + tokenizer converted by older openvino_tolenizers. A way to resolve the issue is to convert + tokenizer and detokenizer models using the latest openvino_tokenizers. + + + + + + + + +Previous 2024 releases ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +.. ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + + + +.. dropdown:: 2024.4 - 19 September 2024 + :animate: fade-in-slide-down + :color: secondary + + **What's new** + + * More Gen AI coverage and framework integrations to minimize code changes. + + * Support for GLM-4-9B Chat, MiniCPM-1B, Llama 3 and 3.1, Phi-3-Mini, Phi-3-Medium and + YOLOX-s models. + * Noteworthy notebooks added: Florence-2, NuExtract-tiny Structure Extraction, Flux.1 Image + Generation, PixArt-α: Photorealistic Text-to-Image Synthesis, and Phi-3-Vision Visual + Language Assistant. + + * Broader Large Language Model (LLM) support and more model compression techniques. + + * OpenVINO™ runtime optimized for Intel® Xe Matrix Extensions (Intel® XMX) systolic arrays on + built-in GPUs for efficient matrix multiplication resulting in significant LLM performance + boost with improved 1st and 2nd token latency, as well as a smaller memory footprint on + Intel® Core™ Ultra Processors (Series 2). + * Memory sharing enabled for NPUs on Intel® Core™ Ultra Processors (Series 2) for efficient + pipeline integration without memory copy overhead. + * Addition of the PagedAttention feature for discrete GPUs* enables a significant boost in + throughput for parallel inferencing when serving LLMs on Intel® Arc™ Graphics or Intel® + Data Center GPU Flex Series. + + * More portability and performance to run AI at the edge, in the cloud, or locally. + + * Support for Intel® Core Ultra Processors Series 2 (formerly codenamed Lunar Lake) on Windows. + * OpenVINO™ Model Server now comes with production-quality support for OpenAI-compatible API + which enables significantly higher throughput for parallel inferencing on Intel® Xeon® + processors when serving LLMs to many concurrent users. + * Improved performance and memory consumption with prefix caching, KV cache compression, and + other optimizations for serving LLMs using OpenVINO™ Model Server. + * Support for Python 3.12. + * Support for Red Hat Enterprise Linux (RHEL) version 9.3 - 9.4. + + *Now deprecated* + + * The following will not be available beyond the 2024.4 OpenVINO version: + + * The macOS x86_64 debug bins + * Python 3.8 + * Discrete Keem Bay support + + * Intel® Streaming SIMD Extensions (Intel® SSE) will be supported in source code form, but not + enabled in the binary package by default, starting with OpenVINO 2025.0. + + Check the `deprecation section <#deprecation-and-support>`__ for more information. + + **OpenVINO™ Runtime** + + *Common* + + * Encryption and decryption of topology in model cache is now supported with callback functions + provided by the user (CPU only for now; ov::cache_encryption_callbacks). + * The Ubuntu20 and Ubuntu22 Docker images now include the tokenizers and GenAI CPP modules, + including pre-installed Python modules, in development versions of these images. + * Python 3.12 is now supported. + + *CPU Device Plugin* + + * The following is now supported: + + * Tensor parallel feature for multi-socket CPU inference, with performance improvement for + LLMs with 6B+ parameters (enabled through model_distribution_policy hint configurations). + * RMSNorm operator, optimized with JIT kernel to improve both the 1st and 2nd token + performance of LLMs. + + * The following has been improved: + + * vLLM support, with PagedAttention exposing attention score as the second output. It can now + be used in the cache eviction algorithm to improve LLM serving performance. + * 1st token performance with Llama series of models, with additional CPU operator optimization + (such as MLP, SDPA) on BF16 precision. + * Default oneTBB version on Linux is now 2021.13.0, improving overall performance on latest + Intel XEON platforms. + * MXFP4 weight compression models (compressing weights to 4-bit with the e2m1 data type + without a zero point and with 8-bit e8m0 scales) have been optimized for Xeon platforms + thanks to fullyconnected compressed weight LLM support. + + * The following has been fixed: + + * Memory leak when ov::num_streams value is 0. + * CPU affinity mask is changed after OpenVINO execution when OpenVINO is compiled + with -DTHREADING=SEQ. + + + *GPU Device Plugin* + + * Dynamic quantization for LLMs is now supported on discrete GPU platforms. + * Stable Diffusion 3 is now supported with good accuracy on Intel GPU platforms. + * Both first and second token latency for LLMs have been improved on Intel GPU platforms. + * The issue of model cache not regenerating with the value changes of + ``ov::hint::performance_mode`` or ``ov::hint::dynamic_quantization_group_size`` has been + fixed. + + + *NPU Device Plugin* + * `Remote Tensor API `__ is now supported. * You can now query the available number of tiles (ov::intel_npu::max_tiles) and force a @@ -140,193 +490,178 @@ NPU Device Plugin only during the export method. -OpenVINO Python API -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + *OpenVINO Python API* -* Openvino.Tensor, when created in the shared memory mode, now prevents “garbage collection” of - numpy memory. -* The ``openvino.experimental`` submodule is now available, providing access to experimental - functionalities under development. -* New python-exclusive openvino.Model constructors have been added. -* Image padding in PreProcessor is now available. -* OpenVINO Runtime is now compatible with numpy 2.0. + * Openvino.Tensor, when created in the shared memory mode, now prevents “garbage collection” of + numpy memory. + * The ``openvino.experimental`` submodule is now available, providing access to experimental + functionalities under development. + * New python-exclusive openvino.Model constructors have been added. + * Image padding in PreProcessor is now available. + * OpenVINO Runtime is now compatible with numpy 2.0. -OpenVINO Node.js API -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + *OpenVINO Node.js API* -* The following has been improved + * The following has been improved - * Unit tests for increased efficiency and stability - * Security updates applied to dependencies + * Unit tests for increased efficiency and stability + * Security updates applied to dependencies -* `Electron `__ - compatibility is now confirmed with new end-to-end tests. -* `New API methods `__ added. + * `Electron `__ + compatibility is now confirmed with new end-to-end tests. + * `New API methods `__ added. -TensorFlow Framework Support -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + *TensorFlow Framework Support* -* TensorFlow 2.17.0 is now supported. -* JAX 0.4.31 is now supported via a path of jax2tf with native_serialization=False -* `8 NEW* operations `__ - have been added. -* Tensor lists with multiple undefined dimensions in element_shape are now supported, enabling - support for TF Hub lite0-detection/versions/1 model. + * TensorFlow 2.17.0 is now supported. + * JAX 0.4.31 is now supported via a path of jax2tf with native_serialization=False + * `8 NEW* operations `__ + have been added. + * Tensor lists with multiple undefined dimensions in element_shape are now supported, enabling + support for TF Hub lite0-detection/versions/1 model. -PyTorch Framework Support -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + *PyTorch Framework Support* -* Torch 2.4 is now supported. -* Inplace ops are now supported automatically if the regular version is supported. -* Symmetric GPTQ model from Hugging Face will now be automatically converted to the signed type - (INT4) and zero-points will be removed. + * Torch 2.4 is now supported. + * Inplace ops are now supported automatically if the regular version is supported. + * Symmetric GPTQ model from Hugging Face will now be automatically converted to the signed type + (INT4) and zero-points will be removed. -ONNX Framework Support -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + *ONNX Framework Support* -* ONNX 1.16.0 is now supported -* models with constants/inputs of uint4/int4 types are now supported. -* 4 NEW operations have been added. + * ONNX 1.16.0 is now supported + * models with constants/inputs of uINT4/INT4 types are now supported. + * 4 NEW operations have been added. -OpenVINO Model Server -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + **OpenVINO Model Server** -* OpenAI API for text generation is now officially supported and recommended for production - usage. It comes with the following new features: + * OpenAI API for text generation is now officially supported and recommended for production + usage. It comes with the following new features: - * Prefix caching feature, caching the prompt evaluation to speed up text generation. - * Ability to compress the KV Cache to a lower precision, reducing memory consumption without - a significant loss of accuracy. - * ``stop`` sampling parameters, to define a sequence that stops text generation. - * ``logprobs`` sampling parameter, returning the probabilities to returned tokens. - * Generic metrics related to execution of the MediaPipe graph that can be used for autoscaling - based on the current load and the level of concurrency. - * `Demo of text generation horizontal scalability `__ - using basic docker containers and Kubernetes. - * Automatic cancelling of text generation for disconnected clients. - * Non-UTF-8 responses from the model can be now automatically changed to Unicode replacement - characters, due to their configurable handling. - * Intel GPU with paged attention is now supported. - * Support for Llama3.1 models. + * Prefix caching feature, caching the prompt evaluation to speed up text generation. + * Ability to compress the KV Cache to a lower precision, reducing memory consumption without + a significant loss of accuracy. + * ``stop`` sampling parameters, to define a sequence that stops text generation. + * ``logprobs`` sampling parameter, returning the probabilities to returned tokens. + * Generic metrics related to execution of the MediaPipe graph that can be used for autoscaling + based on the current load and the level of concurrency. + * `Demo of text generation horizontal scalability `__ + using basic docker containers and Kubernetes. + * Automatic cancelling of text generation for disconnected clients. + * Non-UTF-8 responses from the model can be now automatically changed to Unicode replacement + characters, due to their configurable handling. + * Intel GPU with paged attention is now supported. + * Support for Llama3.1 models. -* The following has been improved: + * The following has been improved: - * Handling of model templates without bos_token is now fixed. - * Performance of the multinomial sampling algorithm. - * ``finish_reason`` in the response correctly determines reaching max_tokens (length) and - completing the sequence (stop). - * Security and stability. + * Handling of model templates without bos_token is now fixed. + * Performance of the multinomial sampling algorithm. + * ``finish_reason`` in the response correctly determines reaching max_tokens (length) and + completing the sequence (stop). + * Security and stability. -Neural Network Compression Framework -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + **Neural Network Compression Framework** -* The LoRA Correction algorithm is now included in the Weight Compression method, improving the - accuracy of INT4-compressed models on top of other data-aware algorithms, such as AWQ and - Scale Estimation. To enable it, set the lora_correction option to True in - nncf.compress_weights(). -* The GPTQ compression algorithm can now be combined with the Scale Estimation algorithm, - making it possible to run GPTQ, AWQ, and Scale Estimation together, for the optimum-accuracy - INT4-compressed models. -* INT8 quantization of LSTMSequence and Convolution operations for constant inputs is now - enabled, resulting in better performance and reduced model size. + * The LoRA Correction algorithm is now included in the Weight Compression method, improving the + accuracy of INT4-compressed models on top of other data-aware algorithms, such as AWQ and + Scale Estimation. To enable it, set the lora_correction option to True in + nncf.compress_weights(). + * The GPTQ compression algorithm can now be combined with the Scale Estimation algorithm, + making it possible to run GPTQ, AWQ, and Scale Estimation together, for the optimum-accuracy + INT4-compressed models. + * INT8 quantization of LSTMSequence and Convolution operations for constant inputs is now + enabled, resulting in better performance and reduced model size. -OpenVINO Tokenizers -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + **OpenVINO Tokenizers** -* Split and BPE tokenization operations have been reimplemented, resulting in improved - tokenization accuracy and performance. -* New building options are now available, offering up to a 12x reduction in binary size. -* An operation is now available to validate and skip/replace model-generated non-Unicode - bytecode sequences during detokenization. + * Split and BPE tokenization operations have been reimplemented, resulting in improved + tokenization accuracy and performance. + * New building options are now available, offering up to a 12x reduction in binary size. + * An operation is now available to validate and skip/replace model-generated non-Unicode + bytecode sequences during detokenization. -OpenVINO.GenAI -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + **OpenVINO.GenAI** -* New samples and pipelines are now available: + * New samples and pipelines are now available: - * An example IterableStreamer implementation in - `multinomial_causal_lm/python sample `__ + * An example IterableStreamer implementation in + `multinomial_causal_lm/python sample `__ -* GenAI compilation is now available as part of OpenVINO via the –DOPENVINO_EXTRA_MODULES CMake - option. + * GenAI compilation is now available as part of OpenVINO via the –DOPENVINO_EXTRA_MODULES CMake + option. -Other Changes and Known Issues -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + **Other Changes and Known Issues** -Jupyter Notebooks ------------------------------ + *Jupyter Notebooks* -* `Florence-2 `__ -* `NuExtract: Structure Extraction `__ -* `Flux.1 Image Generation `__ -* `PixArt-α: Photorealistic Text-to-Image Synthesis `__ -* `Phi-3-Vision Visual Language Assistant `__ -* `MiniCPMV2.6 `__ -* `InternVL2 `__ -* The list of supported models in - `LLM chatbot `__ - now includes Phi3.5, Gemma2 support + * `Florence-2 `__ + * `NuExtract: Structure Extraction `__ + * `Flux.1 Image Generation `__ + * `PixArt-α: Photorealistic Text-to-Image Synthesis `__ + * `Phi-3-Vision Visual Language Assistant `__ + * `MiniCPMV2.6 `__ + * `InternVL2 `__ + * The list of supported models in + `LLM chatbot `__ + now includes Phi3.5, Gemma2 support -Known Issues ------------------------------ + *Known Issues* -| **Component: CPU** -| ID: CVS-150542, CVS-145996 -| Description: -| The upgrade of default oneTBB on Linux platforms to 2021.13.0 improves overall - performance on latest Intel XEON platform but causes regression in some cases. Limit the - threads usage of postprocessing done by Torch can mitigate the regression (For example: - torch.set_num_threads(n), n can be 1, beam search number, prompt batch size or other - numbers). - -| **Component: OpenVINO.Genai** -| ID: 149694 -| Description: -| Passing openvino.Tensor instance to LLMPipleine triggers incompatible arguments error if - OpenVINO and GenAI are installed from PyPI on Windows. + | **Component: CPU** + | ID: CVS-150542, CVS-145996 + | Description: + | The upgrade of default oneTBB on Linux platforms to 2021.13.0 improves overall + performance on latest Intel XEON platform but causes regression in some cases. Limit the + threads usage of postprocessing done by Torch can mitigate the regression (For example: + torch.set_num_threads(n), n can be 1, beam search number, prompt batch size or other + numbers). + + | **Component: OpenVINO.Genai** + | ID: 149694 + | Description: + | Passing openvino.Tensor instance to LLMPipleine triggers incompatible arguments error if + OpenVINO and GenAI are installed from PyPI on Windows. -| **Component: OpenVINO.Genai** -| ID: 148308 -| Description: -| OpenVINO.GenAI archive doesn't have debug libraries for OpenVINO Tokenizers and - OpenVINO.GenAI. + | **Component: OpenVINO.Genai** + | ID: 148308 + | Description: + | OpenVINO.GenAI archive doesn't have debug libraries for OpenVINO Tokenizers and + OpenVINO.GenAI. + + | **Component: ONNX for ARM** + | ID: n/a + | Description: + | For ARM binaries, the `1.16 ONNX library `__ + is not yet available. The ONNX library for ARM, version 1.15, does not include the latest + functional and security updates. Users should update to the latest version as it becomes + available. + | Currently, if an unverified AI model is supplied to the ONNX frontend, it could lead to a + directory traversal issue. Ensure that the file name and file path that a model contains + are verified and correct. To learn more about the vulnerability, see: + `CVE-2024-27318 `__ and + `CVE-2024-27319 `__. + + | **Component: Kaldi** + | ID: n/a + | Description: + | There is a known issue with the Kaldi DL framework support on the Python version 3.12 due + to the numpy version incompatibilities. As Kaldi support in OpenVINO is currently deprecated + and will be discontinued with version 2025.0, the issue will not be addressed. -| **Component: ONNX for ARM** -| ID: n/a -| Description: -| For ARM binaries, the `1.16 ONNX library `__ - is not yet available. The ONNX library for ARM, version 1.15, does not include the latest - functional and security updates. Users should update to the latest version as it becomes - available. -| Currently, if an unverified AI model is supplied to the ONNX frontend, it could lead to a - directory traversal issue. Ensure that the file name and file path that a model contains - are verified and correct. To learn more about the vulnerability, see: - `CVE-2024-27318 `__ and - `CVE-2024-27319 `__. - -| **Component: Kaldi** -| ID: n/a -| Description: -| There is a known issue with the Kaldi DL framework support on the Python version 3.12 due - to the numpy version incompatibilities. As Kaldi support in OpenVINO is currently deprecated - and will be discontinued with version 2025.0, the issue will not be addressed. -Previous 2024 releases -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -.. ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -.. ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. dropdown:: 2024.3 - 31 July 2024 :animate: fade-in-slide-down @@ -638,8 +973,8 @@ Previous 2024 releases *CPU Device Plugin* * Performance when using latency mode in FP32 precision has been improved on Intel client - platforms, including Core Ultra (codename Meteor Lake) and 13th Gen Core processors - (codename Raptor Lake). + platforms, including Core Ultra (formerly codenamed Meteor Lake) and 13th Gen Core processors + (formerly codenamed Raptor Lake). * 2nd token latency and memory footprint for FP16 LLMs have been improved significantly on AVX2 and AVX512 based CPU platforms, particularly for small batch sizes. * PagedAttention has been optimized on AVX2, AVX512 and AMX platforms together with INT8 KV cache @@ -1069,8 +1404,8 @@ Previous 2024 releases * More portability and performance to run AI at the edge, in the cloud, or locally. * A preview plugin architecture of the integrated Neural Processor Unit (NPU) as part of - Intel® Core™ Ultra processor (codename Meteor Lake) is now included in the main OpenVINO™ - package on PyPI. + Intel® Core™ Ultra processor (formerly codenamed Meteor Lake) is now included in the + main OpenVINO™ package on PyPI. * Improved performance on ARM by enabling the ARM threading library. In addition, we now support multi-core ARM processors and enabled FP16 precision by default on MacOS. * New and improved LLM serving samples from OpenVINO Model Server for multi-batch inputs and @@ -1315,22 +1650,26 @@ Discontinued in 2024 for applying NNCF optimization on top of models from Hugging Face. * Support for Apache MXNet, Caffe, and Kaldi model formats. Conversion to ONNX may be used as a solution. + * The macOS x86_64 debug bins are no longer provided with the OpenVINO toolkit, starting + with OpenVINO 2024.5. + * Python 3.8 is no longer supported, starting with OpenVINO 2024.5. -Deprecated and to be removed in the future --------------------------------------------- + * As MxNet doesn't support Python version higher than 3.8, according to the + `MxNet PyPI project `__, + it is no longer supported by OpenVINO, either. -* The macOS x86_64 debug bins will no longer be provided with the OpenVINO toolkit, starting - with OpenVINO 2024.5. -* Python 3.8 is now considered deprecated, and it will not be available beyond the 2024.4 - OpenVINO version. + * Discrete Keem Bay support is no longer supported, starting with OpenVINO 2024.5. + * Support for discrete devices (formerly codenamed Raptor Lake) is no longer available for + NPU. - * As MxNet doesn't support Python version higher than 3.8, according to the - `MxNet PyPI project `__, - it will no longer be supported in future versions, either. -* Discrete Keem Bay support is now considered deprecated and will be fully removed with OpenVINO 2024.5 +Deprecated and to be removed in the future +-------------------------------------------- + * Intel® Streaming SIMD Extensions (Intel® SSE) will be supported in source code form, but not - enabled in the binary package by default, starting with OpenVINO 2025.0 + enabled in the binary package by default, starting with OpenVINO 2025.0. +* Ubuntu 20.04 support will be deprecated in future OpenVINO releases due to the end of + standard support. * The openvino-nightly PyPI module will soon be discontinued. End-users should proceed with the Simple PyPI nightly repo instead. More information in `Release Policy `__. diff --git a/docs/articles_en/get-started/install-openvino.rst b/docs/articles_en/get-started/install-openvino.rst index 7f26ab9ec72c9f..22d889c18f71cd 100644 --- a/docs/articles_en/get-started/install-openvino.rst +++ b/docs/articles_en/get-started/install-openvino.rst @@ -23,12 +23,12 @@ Install OpenVINO™ 2024.4 -OpenVINO 2024.4, described here, is not a Long-Term-Support version! +OpenVINO 2024.5, described here, is not a Long-Term-Support version! All currently supported versions are: -* 2024.4 (development) +* 2024.5 (development) * 2023.3 (LTS) -* 2022.3 (LTS) + .. dropdown:: Effortless GenAI integration with OpenVINO GenAI Flavor diff --git a/docs/articles_en/learn-openvino/llm_inference_guide/genai-guide-npu.rst b/docs/articles_en/learn-openvino/llm_inference_guide/genai-guide-npu.rst index 6917d809c7e5d6..5a641300a68edb 100644 --- a/docs/articles_en/learn-openvino/llm_inference_guide/genai-guide-npu.rst +++ b/docs/articles_en/learn-openvino/llm_inference_guide/genai-guide-npu.rst @@ -20,6 +20,10 @@ Install required dependencies: pip install nncf==2.12 onnx==1.16.1 optimum-intel==1.19.0 pip install --pre openvino openvino-tokenizers openvino-genai --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly +NOTE that for systems based on Intel® Core Ultra Processors Series 2 and 16 GB of RAM, +prompts longer then 1024 characters will not work with a model of 7B or more parameters, +such as Llama-2-7B, Mistral-0.2-7B, and Qwen-2-7B. + Export an LLM model via Hugging Face Optimum-Intel ################################################## From 4cd1512682fb2628580b2b5a881fc9cff7caeca7 Mon Sep 17 00:00:00 2001 From: Ekaterina Aidova Date: Tue, 19 Nov 2024 17:51:33 +0400 Subject: [PATCH 09/62] fix getting config in pt fe decoder (#27609) ### Details: - *item1* - *...* ### Tickets: - *ticket-id* --- .../python/src/openvino/frontend/pytorch/ts_decoder.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/bindings/python/src/openvino/frontend/pytorch/ts_decoder.py b/src/bindings/python/src/openvino/frontend/pytorch/ts_decoder.py index af8eafda8e9be7..eb32a0a93c669b 100644 --- a/src/bindings/python/src/openvino/frontend/pytorch/ts_decoder.py +++ b/src/bindings/python/src/openvino/frontend/pytorch/ts_decoder.py @@ -51,8 +51,10 @@ def __init__( self.out_debug_name_overwrites = {} if graph_element is None: if hasattr(pt_module, "config"): - self.config = pt_module.config.to_dict() if not isinstance( - pt_module.config, dict) else pt_module.config + if isinstance(pt_module.config, dict): + self.config = pt_module.config + elif hasattr(pt_module.config, "to_dict"): + self.config = pt_module.config.to_dict() try: pt_module = self._get_scripted_model( pt_module, example_input, skip_freeze) From 236a854be935bb219dd507f55e307ae3719f779a Mon Sep 17 00:00:00 2001 From: Oleg Pipikin Date: Tue, 19 Nov 2024 19:53:16 +0100 Subject: [PATCH 10/62] Add sdl required flags to debug build on Windows (#27592) ### Details: - Add sdl required flags to debug build on Windows ### Tickets: - CVS-157306 --- .../developer_package/compile_flags/sdl.cmake | 19 ++++++++++++++----- 1 file changed, 14 insertions(+), 5 deletions(-) diff --git a/cmake/developer_package/compile_flags/sdl.cmake b/cmake/developer_package/compile_flags/sdl.cmake index 34ad5904519e7f..35f59cb8970573 100644 --- a/cmake/developer_package/compile_flags/sdl.cmake +++ b/cmake/developer_package/compile_flags/sdl.cmake @@ -55,11 +55,20 @@ if(ENABLE_INTEGRITYCHECK) set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} /INTEGRITYCHECK") endif() -set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} ${OV_C_CXX_FLAGS}") -set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} ${OV_C_CXX_FLAGS}") -set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} ${OV_LINKER_FLAGS}") -set(CMAKE_MODULE_LINKER_FLAGS_RELEASE "${CMAKE_MODULE_LINKER_FLAGS_RELEASE} ${OV_LINKER_FLAGS}") -set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${CMAKE_EXE_LINKER_FLAGS_RELEASE} ${OV_LINKER_FLAGS}") +if(CMAKE_CXX_COMPILER_ID STREQUAL "MSVC" OR (OV_COMPILER_IS_INTEL_LLVM AND WIN32)) + # add sdl required flags to both Debug and Release on Windows + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OV_C_CXX_FLAGS}") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${OV_C_CXX_FLAGS}") + set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} ${OV_LINKER_FLAGS}") + set(CMAKE_MODULE_LINKER_FLAGS "${CMAKE_MODULE_LINKER_FLAGS} ${OV_LINKER_FLAGS}") + set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${OV_LINKER_FLAGS}") +else() + set(CMAKE_C_FLAGS_RELEASE "${CMAKE_C_FLAGS_RELEASE} ${OV_C_CXX_FLAGS}") + set(CMAKE_CXX_FLAGS_RELEASE "${CMAKE_CXX_FLAGS_RELEASE} ${OV_C_CXX_FLAGS}") + set(CMAKE_SHARED_LINKER_FLAGS_RELEASE "${CMAKE_SHARED_LINKER_FLAGS_RELEASE} ${OV_LINKER_FLAGS}") + set(CMAKE_MODULE_LINKER_FLAGS_RELEASE "${CMAKE_MODULE_LINKER_FLAGS_RELEASE} ${OV_LINKER_FLAGS}") + set(CMAKE_EXE_LINKER_FLAGS_RELEASE "${CMAKE_EXE_LINKER_FLAGS_RELEASE} ${OV_LINKER_FLAGS}") +endif() unset(OV_C_CXX_FLAGS) unset(OV_LINKER_FLAGS) From 4f75ab9b00e9c600d585900f9a8c7dacfd340a9c Mon Sep 17 00:00:00 2001 From: Karol Blaszczak Date: Tue, 19 Nov 2024 23:24:53 +0100 Subject: [PATCH 11/62] [DOCS] relnotes 24.5 pass2 (#27621) port: https://github.com/openvinotoolkit/openvino/pull/27616 --- .../about-openvino/performance-benchmarks.rst | 6 +- .../about-openvino/release-notes-openvino.rst | 74 +- .../OV-2024.5-platform_list.pdf | Bin 191948 -> 193350 bytes .../benchmarks_files/data/graph-data-ov.json | 1724 +++-------------- .../benchmarks_files/graph-config.json | 18 + 5 files changed, 350 insertions(+), 1472 deletions(-) diff --git a/docs/articles_en/about-openvino/performance-benchmarks.rst b/docs/articles_en/about-openvino/performance-benchmarks.rst index d874d1808f7aaf..8a58dc27df1f83 100644 --- a/docs/articles_en/about-openvino/performance-benchmarks.rst +++ b/docs/articles_en/about-openvino/performance-benchmarks.rst @@ -13,7 +13,7 @@ Performance Benchmarks Efficient LLMs for AI PC Performance Information F.A.Q. OpenVINO Accuracy - Getting Performance Numbers + Getting Performance Numbers This page presents benchmark results for the @@ -160,10 +160,10 @@ For a listing of all platforms and configurations used for testing, refer to the **Disclaimers** * Intel® Distribution of OpenVINO™ toolkit performance results are based on release - 2024.3, as of July 31, 2024. + 2024.5, as of November 20, 2024. * OpenVINO Model Server performance results are based on release - 2024.3, as of Aug. 19, 2024. + 2024.4, as of Sept. 30, 2024. The results may not reflect all publicly available updates. Intel technologies' features and benefits depend on system configuration and may require enabled hardware, software, or service diff --git a/docs/articles_en/about-openvino/release-notes-openvino.rst b/docs/articles_en/about-openvino/release-notes-openvino.rst index dc82009b402593..343c9e780f05dc 100644 --- a/docs/articles_en/about-openvino/release-notes-openvino.rst +++ b/docs/articles_en/about-openvino/release-notes-openvino.rst @@ -28,21 +28,22 @@ OpenVINO Release Notes What's new +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -* More Gen AI coverage and framework integrations to minimize code changes. +* More GenAI coverage and framework integrations to minimize code changes. * New models supported: Llama 3.2 (1B & 3B), Gemma 2 (2B & 9B), and YOLO11. - * LLM support on NPU: Llama 3 8B, Llama 2 7B, Mistral-v0.2-7B, Qwen2-7B-Instruct and Phi-3 Mini. + * LLM support on NPU: Llama 3 8B, Llama 2 7B, Mistral-v0.2-7B, Qwen2-7B-Instruct and Phi-3 + Mini-Instruct. * Noteworthy notebooks added: Sam2, Llama3.2, Llama3.2 - Vision, Wav2Lip, Whisper, and Llava. - Preview: Support for Flax, a high-performance Python neural network library based on JAX. + * Preview: support for Flax, a high-performance Python neural network library based on JAX. Its modular design allows for easy customization and accelerated inference on GPUs. * Broader Large Language Model (LLM) support and more model compression techniques. - * Optimizations for built-in GPUs on Intel® Core Ultra Processors (Series 1) and Intel® Arc™ + * Optimizations for built-in GPUs on Intel® Core™ Ultra Processors (Series 1) and Intel® Arc™ Graphics include KV Cache compression for memory reduction along with improved usability, and model load time optimizations to improve first token latency for LLMs. * Dynamic quantization was enabled to improve first token latency for LLMs on built-in - Intel® GPUs without impacting accuracy on Intel Core Ultra Processors (Series 1). Second + Intel® GPUs without impacting accuracy on Intel® Core™ Ultra Processors (Series 1). Second token latency will also improve for large batch inference. * A new method to generate synthetic text data is implemented in the Neural Network Compression Framework (NNCF). This will allow LLMs to be compressed more accurately using @@ -52,9 +53,9 @@ What's new * More portability and performance to run AI at the edge, in the cloud, or locally. * Support for - `Intel® Xeon 6 Processors with P-cores `__ + `Intel® Xeon® 6 Processors with P-cores `__ (formerly codenamed Granite Rapids) and - `Intel® Core Ultra 200V series processors `__ + `Intel® Core™ Ultra 200V series processors `__ (formerly codenamed Arrow Lake-S). * Preview: GenAI API enables multimodal AI deployment with support for multimodal pipelines for improved contextual awareness, transcription pipelines for easy audio-to-text @@ -95,9 +96,9 @@ Common CPU Device Plugin ----------------------------------------------------------------------------------------------- -* Gold support of the Intel Xeon 6 platform with P-cores (formerly code name Granite Rapids) +* Gold support of the Intel® Xeon® 6 platform with P-cores (formerly code name Granite Rapids) has been reached. -* Support of Intel® Core Ultra 200V series processors (formerly codenamed Arrow Lake-S) has +* Support of Intel® Core™ Ultra 200V series processors (formerly codenamed Arrow Lake-S) has been implemented. * LLM performance has been further improved with Rotary Position Embedding optimization; Query, Key, and Value; and multi-layer perceptron fusion optimization. @@ -117,7 +118,7 @@ GPU Device Plugin * A new feature of GPU weightless blob caching enables caching model structure only and reusing the weights from the original model file. Use the new OPTIMIZE_SIZE property to activate. * Dynamic quantization with INT4 and INT8 precisions has been implemented and enabled by - default on Intel Core Ultra platforms, improving LLM first token latency. + default on Intel® Core™ Ultra platforms, improving LLM first token latency. NPU Device Plugin @@ -138,14 +139,11 @@ NPU Device Plugin * LLM-related improvements have been implemented in terms of both memory usage and performance. * AvgPool and MaxPool operator support has been extended, adding support for more PyTorch models. -* NOTE: for systems based on Intel® Core Ultra Processors Series 2, more than 16GB of RAM may +* NOTE: for systems based on Intel® Core™ Ultra Processors Series 2, more than 16GB of RAM may be required to use larger models, such as Llama-2-7B, Mistral-0.2-7B, and Qwen-2-7B - (exceeding 4b parameters). + (exceeding 4B parameters) with prompt sizes over 1024 tokens. - prompts longer then 1024 characters will not work with a model of 7B or more parameters, - such as . - OpenVINO Python API ----------------------------------------------------------------------------------------------- @@ -328,17 +326,17 @@ Known Issues | Description: | Description: When using new version of Transformer version to convert some of LLMs (GPT-J/GPT-NeoX or falcon-7b), the inference accuracy may be impacted on 4th or 5th - generation of Xeon CPU platforms, due to model structure update triggering inference + generation of Intel® Xeon® processors, due to model structure update triggering inference precision difference in part of the model. The workaround is to use transformer version of 4.44.2 or lower. | **Component: GPU Plugin** | ID: 154583 | Description: -| LLM accuracy can be low especially on non-systolic platform like Intel Core Ultra. When +| LLM accuracy can be low especially on non-systolic platforms like Intel® Core™ Ultra. When facing the low accuracy issue, user needs to manually set a config ACTIVATION_SCALING_FACOTR - with a value 8.0 in compile_model() function. From the next release, scaling factor value - will be automatically applied through updated IR. + with a value of 8.0 in the compile_model() function. From the next release, scaling factor + value will be automatically applied through updated IR. | **Component: GenAI** | ID: 156437, 148933 @@ -402,7 +400,7 @@ Previous 2024 releases * More portability and performance to run AI at the edge, in the cloud, or locally. - * Support for Intel® Core Ultra Processors Series 2 (formerly codenamed Lunar Lake) on Windows. + * Support for Intel® Core™ Ultra Processors Series 2 (formerly codenamed Lunar Lake) on Windows. * OpenVINO™ Model Server now comes with production-quality support for OpenAI-compatible API which enables significantly higher throughput for parallel inferencing on Intel® Xeon® processors when serving LLMs to many concurrent users. @@ -450,10 +448,10 @@ Previous 2024 releases * 1st token performance with Llama series of models, with additional CPU operator optimization (such as MLP, SDPA) on BF16 precision. * Default oneTBB version on Linux is now 2021.13.0, improving overall performance on latest - Intel XEON platforms. + Intel® Xeon® platforms. * MXFP4 weight compression models (compressing weights to 4-bit with the e2m1 data type - without a zero point and with 8-bit e8m0 scales) have been optimized for Xeon platforms - thanks to fullyconnected compressed weight LLM support. + without a zero point and with 8-bit e8m0 scales) have been optimized for Intel® Xeon® + platforms thanks to fullyconnected compressed weight LLM support. * The following has been fixed: @@ -622,7 +620,7 @@ Previous 2024 releases | ID: CVS-150542, CVS-145996 | Description: | The upgrade of default oneTBB on Linux platforms to 2021.13.0 improves overall - performance on latest Intel XEON platform but causes regression in some cases. Limit the + performance on latest Intel® Xeon® platform but causes regression in some cases. Limit the threads usage of postprocessing done by Torch can mitigate the regression (For example: torch.set_num_threads(n), n can be 1, beam search number, prompt batch size or other numbers). @@ -949,7 +947,7 @@ Previous 2024 releases * Preview: addition of the :doc:`Generate API <../learn-openvino/llm_inference_guide/genai-guide>`, a simplified API for text generation using large language models with only a few lines of code. The API is available through the newly launched OpenVINO GenAI package. - * Support for Intel Atom® Processor X Series. For more details, see :doc:`System Requirements <./release-notes-openvino/system-requirements>`. + * Support for Intel® Atom® Processor X Series. For more details, see :doc:`System Requirements <./release-notes-openvino/system-requirements>`. * Preview: Support for Intel® Xeon® 6 processor. **OpenVINO™ Runtime** @@ -973,8 +971,8 @@ Previous 2024 releases *CPU Device Plugin* * Performance when using latency mode in FP32 precision has been improved on Intel client - platforms, including Core Ultra (formerly codenamed Meteor Lake) and 13th Gen Core processors - (formerly codenamed Raptor Lake). + platforms, including Intel® Core™ Ultra (formerly codenamed Meteor Lake) and 13th Gen Core + processors (formerly codenamed Raptor Lake). * 2nd token latency and memory footprint for FP16 LLMs have been improved significantly on AVX2 and AVX512 based CPU platforms, particularly for small batch sizes. * PagedAttention has been optimized on AVX2, AVX512 and AMX platforms together with INT8 KV cache @@ -988,9 +986,9 @@ Previous 2024 releases * Both first token and average token latency of LLMs is improved on all GPU platforms, most significantly on discrete GPUs. Memory usage of LLMs has been reduced as well. - * Stable Diffusion FP16 performance improved on Core Ultra platforms, with significant pipeline - improvement for models with dynamic-shaped input. Memory usage of the pipeline has been reduced, - as well. + * Stable Diffusion FP16 performance improved on Intel® Core™ Ultra platforms, with significant + pipeline improvement for models with dynamic-shaped input. Memory usage of the pipeline + has been reduced, as well. * Optimized permute_f_y kernel performance has been improved. *NPU Device Plugin* @@ -1045,7 +1043,7 @@ Previous 2024 releases * OpenVINO Model server can be now used for text generation use cases using OpenAI compatible API. * Added support for continuous batching and PagedAttention algorithms for text generation with - fast and efficient in high concurrency load especially on Intel Xeon processors. + fast and efficient in high concurrency load especially on Intel® Xeon® processors. `Learn more about it `__. **Neural Network Compression Framework** @@ -1088,8 +1086,9 @@ Previous 2024 releases | Description: | In 2024.2, oneTBB 2021.2.x is used for Intel Distribution of OpenVINO Ubuntu and Red Hat archives, instead of system TBB/oneTBB. This improves performance on the new generation of - Xeon platforms but may increase latency of some models on the previous generation. You can - build OpenVINO with **-DSYSTEM_TBB=ON** to get better latency performance for these models. + Intel® Xeon® platforms but may increase latency of some models on the previous generation. + You can build OpenVINO with **-DSYSTEM_TBB=ON** to get better latency performance for + these models. | **Component: python API** | ID: CVS-141744 @@ -1598,10 +1597,11 @@ Previous 2024 releases | **Component: CPU runtime** | *ID:* N/A | *Description:* - | Performance results (first token latency) may vary from those offered by the previous OpenVINO version, for - “latency” hint inference of LLMs with long prompts on Xeon platforms with 2 or more - sockets. The reason is that all CPU cores of just the single socket running the application - are employed, lowering the memory overhead for LLMs when numa control is not used. + | Performance results (first token latency) may vary from those offered by the previous + OpenVINO version, for “latency” hint inference of LLMs with long prompts on Intel® Xeon® + platforms with 2 or more sockets. The reason is that all CPU cores of just the single + socket running the application are employed, lowering the memory overhead for LLMs when + numa control is not used. | *Workaround:* | The behavior is expected but stream and thread configuration may be used to include cores from all sockets. diff --git a/docs/sphinx_setup/_static/benchmarks_files/OV-2024.5-platform_list.pdf b/docs/sphinx_setup/_static/benchmarks_files/OV-2024.5-platform_list.pdf index 8e57fde69d60c3efde43d68c1001c19a499eb4ab..9cb20570020cc8fb73ccafb23ee817dfeab1c4ff 100644 GIT binary patch delta 21848 zcma&MWmsKHuq_%i1Shy#aCe8`t^tC(yF;*v6Wrb1J-9<~4;nPMyZc*Y?|tq&-+e#c zpV_0UYjjmtSJ#|huQj>^*S`Z7?GDBo=;UCmZ;b%!mfo(pksPVFD>yub!9P zR0}T>%me<0MY|rTLy%9BfC(=mM~}nd?|t#ELHgV@AqP~CHjevm?suN-r-cl^Z&|Pl zlUP{z00GBoc@`coBwGc$Ac~`PtICZfI5#>5`^jqo(^r!sfg@Rh%*sZQv|8M^Cq8@U7w0Yb{sJV8_~`{!Sp zzJ1>}7Ad9~Jc^8Nma}JZ$DEQMkl$qyP_f(jv%P*@>NmF1U&!~VkhbPq9SSFI6=E!0 zJAY5*yYU0eGrU5GodHeG+%sA4*Gj+oC3Mw&gn1KqTJBnT?SNaJm&f=1z*Jw=St23f zA0P!kLOda=(jZ_h0YF)V9L+!>jnp0Aad#g?jt!dk|BYAu`n@_Utsm@HzAHVx5ci(jq`RU$ zn0}WSCf05=m@}~3d^p#XEpFc7_D9d&(3zPsEwvp@fK!XIK4w#dne(23d*aLPw%}0k zlyBzexcB?fmCB4Vnw$@PPteN0SnZA)3j+gT(N}iC8x!-P#%-2`wFD{$SgG)EP`F5& z2{PE`$SqP*wJTkMoT1J>J@R)yeP@=n$5PV1?tW)PN(J})PYmOd-IvGV`@>}U0ogR?yDq-n?X%MfAFVr;_(%s z3F_x5gGX|~j7FnFUL@kGs!=5jVV6mJ=3t|Zzh}#@TpcmU!3KALj2;a{N63XRPwE_Dp~G2<*@N5>YS zWLoP_AHg?gYugZ%7kl=ki$ccNI_SvpBkMo;{>+jQIl|*cuMr4{s!bAEHQ9XMpF$2{ zCeDxcV3z7p!?br?KR20JF#q1*g21Jl>-q)AeH){fLb7%-uV|XXUD$pk;rs36D=(Wk-GV^@z@a~P& z?!2s;%2DWLEp z!Hv{wn63G`=K8LZ({~@`cmO?bodLbr=h&Fx4sRvRsmb{5MMg{frWnIy-HvDYGlTjl zirZ?|NKwxtX~^{Cw0_)PN>y|Qiv=qD27Qk1zJO=)BNv{dc#frNoY{{^66ZRl z8HRx%966zw2+n$YHHFAOPF_WX!l<}racQQ!os_C69)t`N7K0yH*hw0N3|GFn4k8fu z&Ji_S(9{sK2#fz|%fcO&kUSh+pr8{V;@aSTmj^*Jhb*N`Fj|c;GLZ;3C|TSL&{^-3 zqz!DRWRXDCe)ZyTP5H{GRIdppy-{|;#B|MI8$MJi@}76V zNS!O5Z-HWxb8>1*Jm%8qj~<6vFE*VHE5#k{SyU9vZqllsdfb(KNS~XlnVR(Z382z4 z-+!lDB^~TVV?fcRO_qBwNxWUL?(`pv@&vvpSC)ttzIx|rjs~Tz*35Q;0gucyOANMl z$2aOyMT|LVK{#5D5SYCx_;lTp4Ou@w-ag}ce%$p!AR(J#{|-ws+Qbyz;;P+Pk3^Ny z05&rxPOpuGoCgqc1l$B;Yk`TY*CC__#)w7v!nIHLSf9?qa!XUyGJ*5-iD%p*U<=(V z2;(m+@CPRoFWQTi?Ish)0QnPVT=*{SPnP7Z=4>34a1Fj+7%u~v>}EA3N-7bcY?dEf zu;N1(_0+cQI!jT4^6;jy7+3AhQ}riPv|Tvct=gf8ngi6oNS^G`Yn5|7jIGpdUw?#) zksX#1H@#+e`Mz-#%3I2G1=!<#~-MvXFr+G^WOQ#G@lEDrPWC?6o94;Bie%A&IPB%1m zb4f^o8h?1+zc}ARM^x9z#7siWO7Ca?DDUk*zsq~z?yOyxR;^UFVou>vvF?wcsBUO4 zPFaegv^uoQCG;U zMA&1G4{iHE)+6VQzU>8xp=iwkAn@sIDSl3CKaCJfHvdC$kA%NGbZ+4%N|l_!W~B(^ zXatRQzljKPbdno})Y|Ns0FvwzUvg1a?B(L{v1P1m;m5?`2>DKj20TJmn{(=l^v0c z!2vF9YARSF5qL3_f+EsXMl4Ne-2GV*bJ7}JR4)5DF!vJ{Wi6O=b*On0gUSOX9CT z7-QU2JRZQ#!u7wuT?;bCB59lL18)^1880DCzDmOCEcta0q?>C>uPM8L%3HIDz9W|jIG7k^q9Fl~9BuA{r( zVupsr*?JwB`3VQ1Ur&=W7A`zeE^(A0FUj~14{5s+oug$@%UOo6%fd(y%a=|fzDc=L z)@(NBp94Nj=Goio3q+BlP!QtNsZST~+hrR(SYBe7`dwrpn;GaGIdvw0ub}NZq*Jt* zzaTJnKb(M=1_8|%bYg;7K=)&MeHU6_%YK~)?ik)30b(%DM$^pnm+jsc9j9d>p0G?| z&F$R7IrjvS`HBw^h&HI=4jNUFV$zZy#Qc0~Ts@J_^SVFFc?Y;?6in&Sf?D@HCLQB5 zCy|Em?$hk;X8D1bkagfDzy+g!=8N}i85&md$N5}ET{abORvk87R_^fesJXI48N;4G z`&{kRqzfp?8n^qLJ@<1c`-@BhJaWYl!}xr~dDA{~GEXqaqk<3`PRav7LgWE9L9-io z5#kO;IHX}#Bq`VBNb)lj>0UY{R9$wG?@+qAlL>@?M*0qGG7|7zIC!3s$J`F_{7Qqf z%#3mC@v29%>y$AxS=QqdrW2M2XMBHl-I=0c7QMHree_+iBEz)A5o%{IDr`j0>ACJx za`12TgQGtPKidr1ucpjDhq07i5~eF=Z0eqgnDWj^@#qfpO~=@iYO_+;B^7PL>~Kep z&QpH>791tk-I)zs;;^x-V8Z7M4cv*Bv3?BiJI&5mfifc7K@m7$ghu$m3tBQWMW??D z#zmE(#wc8j;DA0Y&OF>yzj$=YpcvL4iIb;dFxnKc!0zewD|MO(ME@evHHwf~Cc2#) zZf0ZVYi?#6&;yT&g4`QanD|G9MEc8kGJz*t&(9?CyILGBNTF^FXG ztj5|J1#ZYGBnwacehpe7@dZI?4+>(wcKc0>n|uQ+v4=k{@SKwR!D#$GmZTuo3|%8# zBW2*gCh(Z;p`n@;c`>h=uW;|PQctd!u}9ui?n!{7xUolTQ9=t`9zlY0;2sVPSZhxr zF5OSJ>UTRoDTybk{C5}{K3JZI_bJVAPEF}lUIB2-Xc$HIh8}d&)IS(;2k3Pm`^;wk zT;U`P+*uoaD3@I0IG~roDWsznLw+rAuxGF4PP8mSbrLKe`kBjSKC6v6g*jKFM6Z#7 z(Z1wX{;QQXwm_TLKOw#^`CKOO^QT??V@P9mRi4uGR1i?n&tN zI#&R_v}60;%FtpumNdp}D$11aQ=sT6i7$$dJ0>=K*lORoGEFCSiSRiR_64 zvUg3{qNj)ihHV1|9&t5VGsY1(VaIgtuJ$T+Q>_vvYFG5NTx|62pYb)W=r2;fM7(P; zn`#{lo@u(vA*_Dqnn4|+ar8g!N)}nSM%RUegT|R&T&uq*!1k*jQICx5GLNMZ4p@ z`^${EhzIW0=){%8jhw*HM1IG~R5OkXCjG+0DvEK7M;aQdpVKbE||W&Th)0c{PS zb);XD0JAM*IqBE`hub`sK|)(?)ch0vgb)2kQ^RzO_K{^m+T=JcKA7I2Wx`Q^5DRb2 z=maUTaVTA^$z0K&2CVYT2f{sx3&HgQp8omQA{jUa7i-ER7I?=%dgd0gDX-b*Q|2Oy zd%l#%yImud9MMzeTZ)mJPrBqGTq(eGT%A7idx}@Sl<7FU)Y0eWJ?)(Lc` z(VfgaskxK6eI#AS=+Wrpl$m8HN##EcJ2nw-<|52DV__rdL+LuLqfED$Ap_`WiR~aR zP$ORM`@z^2Qv$RRcYc}C@L0MXgE>Qi$D%tjmBETl>Yp!l0KU8v2hngzAvo#Zmbamn z8=wc=ZkQz*2D@5$vlh3m(biVzqrr|0eRx#{l@_1j`OihI+aAxa<{IHd8!3urW^5W~ zLn2Ph=LnXQsf0;HTuepteJXrk`;8cAk)d+_NOewbY-Al?I@j=3wo7!RyYzJ|5Tvxy zA26p?D`W=(nTEda;8gIlHn7qIZ}!;WBQY4NKbxsNI{4v)%%e7+utAjzAsBJc>d+H& z-!ax1H_SJMO7QlFl_mTpn4eFk{(-y=LYCBdFmT?wDaTAms)h6e6AuVsINvFZnEWxB z2{7jI2DjR(7%)7)kKkR!e)17TF!H2l)K}4%@i74C>udRRu*Uro;&VX2wjlY-R)Wrk293OqwO)PJjfvpz zq<4(B+|+G2s>ZDmK(huUvCz@{shdQ=4^q- ziC3rb(%o`_r;!zNYumEtqja8v4Y%SZ&psE)zIz#zlyw@Dk|qLbj2yLujWz;mc9a2w z-4{>77_Z8{rChy^8Umg?YzNjMuCn)eDMI#=eNQqqx*Z1cN;SO7qe3+jeGdwXKWn60 zMsE}pr^u2M2w6rptQ8s8)PSgrDG3MJ-~=zoJd`b*SgmP&<9trY=AcjWPHNcHunq{a z^uY-^x!*VUr0luM;n4>!)qf+A4(M-jhCiJ}TrXfpe42Mq<0>Q=L)%J?)f&+^E~Xr0 zBWq6V9ad0`wv=+9`-dLSX8T4L9Sbwr<_u2{j=5xX$Q-)6>f7!YrT&>{Cv^NHMhOg&@4|B>?kk2 zdalyD-fp6+R11e0h(zY)c5c%U-?nQ^yFVS)I8w01$7m>tF0Fp|%UOO@fjMLtQ)Y~% zofg=^fVqrxzVFbLyr9v(9o&iocjx5qx{LOUZ!f}=l_V?s%VoXFM_ zD?Fy4L_}-(HW*`sQbXGUP9B_^ldskeh<{-r)YuUo@ie<%$3LuP!HcNgQmBj9;{?|i zC8gz>fhU9N2^CXwD8`n84VZnWkH|(kL~Wq%3^j_#CM;PK5LIZx^@BDzaeMNjmuJq{ zloLn)caBLl#ZuoYH{oGy?3{ z0pA8%j^M>f%QyaOVk)ugw8tAZq3mua6(-PK3w zkazx#+<}7QLF;?34*_%$)dz2?Fq$~}vwwG?fwiafZ5Mtz?8BPaF78*dK`Hca1~nXC zoXiovKFfEmu3DDvuKrS=$_##Y)KIpvTbl;k;BUEP+`yra>DO@3Ko&WRDy?%Fe{#$K z056-stG4O=Gm_2Ii}N#`UN_<^3QksD`c=^I-H*=KYb`FSOW|%}$nAJqB&Z~Y;@g`; zmTq~TM_nkgu|?r-60L{T!*=;*N4WK$i$Z#tHHbX^G1otMDE6)6&&|KS9yZseMh5$R zfjOY~koCcPaq88fa&<}<2}%?`OeaViKteyf#Qt`Im#(J?Kgu>|Xv*5{(&3o6%8xBd zM6)pFN(3qbCQxbjh-9RF)Y<4q+dRpjQ;b(e?~F(xEUI$or~b_bC*4gGZIt7U{6NwL zW<1M7kq2u7g?o=kMcPN+g?_ZjQw)y|)$C{O`L`Gt#$D6%ycL7)Fu-KHS=a8LTJ*OR z83!kv7Tvc`!8q*mJsnjuD;@c1xx)#(sY5kiA$q%js!}MYOS_aS2$UMTGtx{{_@UZ{ z`Q7)sABBs>((sfYxpx}D3(~iHW#Ov^g@Wj}{QuH-y#AqgaC|*w*v12m0=YY)br;KI zoaYUc$+qPKx7LRhj~mqvkeXy)qNPulG2a{}N?L7V_*;(X+ii8& zbD+^$zFlAURGo|Ax#_6Q(?Paf7u3V1dDu#IZ|zvEV^+NyV!O8SYBx15r`$-sTHWKL z5$^kBp#uuUXzl;(L#)^ zGC*so)A}~s+gb!n694Cg60>E2NK@BA$XG}8HSrC<)nnSB*|z}rX^F;`ag~Rog?%@i zYIJ@RO`IO4rx^bm$8oby&8fY^xu+jSNd{H^V$kcLOu~UJI3dOWeXBH8>uo+1V1U|E z=WQ$bI~yJUm+b>Ynz9ZG#xkNL?^fA24OoZ$#wDjWI_lOh+s6IBeDP`CP0J|WTT}!R|W}k*EG`)yDDPOtxK$B%I=r=l*?3 z<|4j!lC!8y(<-mWQ`dlL(u(BLZc1aNIjx3X)5ZqK@2|E*?Rj~wzY64X`0pM^O+MgN zVq2wnF8+9Ruk2X4Q>gwR*lzl&U3#J>y~1{qno>RflZ*m$mdb zKZI3z+-u#5S*WWVJ#@boZ+^5RAqD!j9brL~UNfC^zRQ2q8)Q3j)!_}e{3jdp=8B4d z{Z3eD2J&ykd#DyrNYgwr@aSG56@S#---qVp8pLPfP>k+)U4~rGV2hpsMo#0uI40+d z=q?y`$)927`78`Yvy`N5hl@Pu`$PE)6IbKqZ}q~_p^AE%&uap>RynfN)e0WJ==5953`!hN(71zGWc{H+y;P;zDB*W;!CQ)-#|l&tO0Enqe(M_+Q(`DjEptW<{KxCK zE&elKcQv;}ZQz?n7&U=t3(8&UXE#rm1uQDn{}6R)8PK6;lO8-9bvs5e_r(-#%|qnu zQ2oKAyVM{sk1&kkU!Iz$J&4B+;{8R`p){RlHtBTHoBxES@0I}J$vv^7y*X8FeJ^Pd zEw~X>G!ne=IynDva`UWD-!OmSnKQ^SFXJ$_j?cx@fX`X%yj>gs@60S998}XTx4TmH zvUkI!hE>|XT|f(t_&Wo92e(x?S}@^&olW`@;=tn$>fyc@aE0OJ?8bAovw{QivT?7h zcL{X1q->|Y{>yTfbzZ=t=3o2BXV3mOkhjlM44GAkwneL5nqNSc!gl}VbOLT53x5Oo zChJ!?zi-$Jsr$AD@~{0g%6r|@*$ua?$dj(7XNn=+_TO;y-0xH~DWm28dK2La_W^pk zSIZ4N|8Do-@B-c{eY<}n@O3P1S)jC#6@Q~YmB0K(zJW?MII5;Fs_fa{PHLLisQexY zu!`~yc*OT_G+b@J4$95qZWlj&NoWDx*QG2AL==gf?mr?-ZN*u>USxi358iea@p3yR zyg|)<&P-ZE@#@gDU;+l#kJDTM+ocFXY0k#DdfaFPdxb_HO$&adS5?x?VEG$=oL%PQRCyRMX%vlR~nwFme68R_0>l{NLBg722(pR?QJ(?~{RIfhJ9o zYjAk+%|XfDV=G2qYgY0a1Vg>|FCLEeNvyjMte9}l-1|Ev+9Xu58u|HSE!)4&n+Uco zwx?)bY=L+%Ap&Dsg!?(28hLcvgHuyw&s0?MKl1vpaCL(L0}q1kZ6J z%-EJRpavCn#YDVwa9o4d+sZ$8vA#)Q5syIChfQ0&gSV5X3>K#US^~!#v`@@^KJzjI zUQhHDvkCsOoXrl*U#H7>0qwz#p55xZBU7`X-v}#o9T%oyHT|e-a^#3g+C_1-zoH4j z7i47?Sieg1#M0Sj_&flkX%vygZ$vAqG|4eVtogU;w;9!(HD>R2pRnJ>E;82|{kAYo z3(ALfOKB7b%697HuFco;0m11bZiCx59P8jW1zOw0P$tw#UaFQ5WMV0?Vza?8>3S}} zcO6)%2tQ1#10?g==H$yIyXHe;?X#J1I*7}9q=-~3&G0p4*Gyo@`T38S{d1_SLKUp` zC^{@Dj`5W8&*stKxsfdTyuy*{iDmD;8HGk&Cg@VTM$um3w25NXJkO4sBA4qTix~&{ z&78c)FPpnBbhG@Ln5=B5hkku@g~*eW%B70aS&8B_HpH5U6)l0ZW*W!i1p=wq=vlvU z6huU7>}*5bs7!#uIcnDG4-L>r8sYpJ!SBN=a@gt3EDjo`o=b)@Z0ya&WC4pO48Q7R z;)iXF#^_VKTE*VPbj{_$*~4vl>uZ(9Cb~`f*{nq+%*LjQy~+dm2;E*-$_5E{$FL>t zlXgJ}xGVnUjgC|=;>Fn0X*!#oqiI8!Fz@C*YxStZiz2{c#t+_chSJ7fb2<;hOz#W+ zES=434cVT2u%eAU++5Cq{JbG+Bd1Ggo z{{gkP87^C%_F0GNY!nz#;4v(&^$UaH;qui%Ix}B<{QI<+`si$=1@^t@@mjKKk#uSR#X<(c)xH^9q0zoovgr3vwgHvNdv$ZJ|$g(2Ylaj1?#6$ zRc!x))S+5d1k>dVrJ&VVI(r#-7}{roe5z}w75H4{zXw^|@*Y7xBc&Jis_aBxo^iGp z!E%Maoe5eu`XG8LldC^-6Yj(v%t`q+)j(r*r3^8cfPo+d9fMIvjy~#xS&)qR3FLb~ zJZ78!qLc4fwfv{Xvh=}R%Vqk+Fhx}*t6uxVkG_hNldp{}EqLXtp^T-+FAV)I<^4-= zD-=;p^^-2nXJ&U>d>Z5=9W8m7CTh5M^jT1~ty;-Go0&wC3q^4G074v9MmX6xyyv`w$xYUNVDslKkv^kOu(J+Hoi}FO-rvYL_|7070C_Mew`W## zPtauO_S-6#RyNK7!$&Ysg7Fh4W!Q$hc87m79q&Q|I~*Ti`-C)-9vHzwh|q~fscm$# z`yH||pV~duVUbOmy}@aj-gCiZuipHiu3o=zB2rr!nf7&J=+)O4FqGETgPu{#<6=3x z-Xi;cvF%1a=|pF5@~5>B@DEej!)y=;qEvO)?aMI6(%VAdN{Wf>G#F?3m0=8H_yKJP z97a=B)Jk(7?&5>|0&!QnffQ~C8-4L*#iHFL_?oY1pc?b9O-#+;kwc7gzJC191*2ry zmDv97iwYLp1^2s_F1Zc4s$!V~RJoezCQ_a13Q9y+KQGZ{e~h$`fIrqNxC_JBZqTvV z`7jz{P5O~zZ%{Azf~e2wRkQwxBCpvsxGux%l^>$(q``nv4L0F!>4;f4wKKHCN#4y$x)-1ks4}uZ>@{@0$6EAIm z&3@=W=9U?F+VEh31S&B0suL-oHfa_HaLl>C?;6HmiamJO8VDQ>_16Z)AaYp@>gAgv z#2LYxnh!k485lHM!e(iEql^48oa&P%~rmf~oJ*nTV z9&MNag^_I?75)=rbP&velVw`4U12ITV>iRfYSHA2yqlkVH}H$Uf~dv~g-{ih7U(f|*3ZHBnER$CsslKtk8Hcs=4OAo$L@V>Md5LdM<{+X5@(Iw;V# zK7?na%`$&iIV~HV;e-}WhK7P(z6Ev5!Q}3Ten@ug5AGcY6S#byGXK-!%ozfc0Rv!c09ji0gwAjQNU{_@U(y0d4DMn+}|v{_G6NFoC!qP3OrSzzsm-I z=fl^hs9iHuAnFD5Gkz2XymX*OGHOl&?6EBzI?0UC71x8SZw5}sI|JU3B~I~mGxmsa z6OrCuzn#dP6ml-Db6C|d&|6l~7a2%?V3IrO*uoj}%la1_>zPxi&Bg7}UbX)E9dDLw zRA(+@RwQ(|B=jDYyF}re<{pXA`@lM&g_PF17 zg}Rp|CG1S&sf(+J^Pe^2Q=F-YW2NMrI2>{NXtAp29Z(54e=gyaKI8)H3+Yk_c2`g( zkiYC8gbJWPKlmvJnSblz<@hYGi+70^k+(lNYGFx%!%@2*Jrg=_Ry^}A&#@#SN28$F zgyzP&1b&V>kS#2%v7nZj4k7ts8IsS-e$3VVmi-&B&v3z)UO z{ADSZ{)e;WA`ts|nCW!- zd;oKRSA6qxMITGK2qRye<6s|kQJ=l+KT*{DP$~M?P6KD9`I{XEAmggCok9cGVlD{B zWkhit%!x4rrtqr+Vn{=#f<-$>4HD#`pXK#NQ{h`oK2*+zZKl4h^7)tKsryMt=Kt(~ zDnxQ6h^WpGt8cm9R0t-ss|43wuK}9Er;tH{tVtDafDK_1lxezL`(E5HcN@vOW_W;X zu*m|2)@o4$p*UGlbiLCP26L-=yHo8|Yo|JKKe z^8n}W^zQO`Iz@(~U)?K@^K<3=&Q2S9ht3jhf$ihZloz$lfzIdks;qGRwNBylC0oMz z0lVz_KDs@EyIH~1Wv?O49)i^nPpb=c3y0eRTWJ*%6*+K3a2=QQQ{3>PZ$M=Kl+#@% zDNe%jh%&wmdBx?y5;JDZ5H5DEf~2uG@AWHfT;VaLf)P~(wW`WEb0B6htYi@}-MPT~#Z#sDAM#h-`_z`!LTpp4uDF zpX|#U)*1?Arro52Cj(E^qJ&_16B$Vc%I!?Es=2@kul!@OTPO=;7)HI;_2|&(L&9I| zkOM#N1}7X5O$I8F;f=uCxI@lc1wO(EYSF|xu+cA$3*@)eFGi^~I$fCWi>3aHw?K9D z93u#aSQ=E1vHumN{4X~#KX^TjBc()$492z_FkniS_Gn^#5(4?>%d$=f|M894m2!pw z9rD*UEnH&JbQJKn7*AQ5DJ8s9|K!oG!_Us>i8F?a_33jWX23^I)YVQRu-Z1FH|7o0 zhv;o6kpCBY-%h-YTp0#WbfSBNvRoO=d}w3aH5dXvsjR!A;SfpF|6u0aOah@1xHq>o zVG-Vl<=oQ##;){(9fi?evcw62-VU-T0I{@7B7Fjgc#8&F8?`c4`gs`aztLG!HVdF_ zo~%k=9_}cO_EI5!EENg}lUuH9{EM*iGiDPU+Mb>vxlQ@ysz3^0WuU|LS2u7HnLLx2xXMjGGc>= z?4Wtx!)CPdQso|LK7q2#rNgDUTSvDj{`~k$@unaz9zvMnXXFK~DI07l9Zo_zqhubl zw1KOOD2b%zPjrM4RTsmql(NwVyeFmH0M&5|0~eZezhU?vJObrm<;!u?pY~_!Q-+VE zMBE;QBg&Tn7I=`LWZ251yNc-34+quPTAJ^e$|eeYcXI5t7tOcdLI2A!l($Hf)le*RV8Ab@kZ#(1UCM@y`#1M=O6;Nu@({=1d zJSD=Baba{~j0cFBqNgYnaC`(Mb$?XRzbQs3ccUj?{M9ec9VK0P6@SW~ZXHetkmM@D z9Bbv{EOQg29BLi5{p4<^8UqL7s#ovXJh*XhVC_=k758b9y8F;-#Af(o)H*7xVoyOB zeG>%bH67YGNX`I5W>@$mmivD2!cU>WSi;|!!;=@^KHL!vai)~DED}b296^#bO)9oj z^<~%tlu`=NrU|c+P9Ow;;FFjE=*YksFMVfKJE(*A`qw5IYYB*HQQNCPEU6Osn)>Y@ zqR_h4Uv0a7+fQO|q87_hKhaFwxc}C97uNN5bnnY*g+gvwNB>Pv>E7^vj{IxTufGU! z>FpQ+UQVv^yb+FQktV&s7_Sh9?D%^8H7n(*c`{0EzL5GC|BJ#P?0*jZ>vR5JWX%pL zPWWgoJd{p^8I-LZ3afL~Bu3^#fx-{kgI%^7&lA#Yr-Xy%*`~pGD;mVR< zdcU>NT3SIitEgLdCiCmoL$i%PeN`LQz-2YJZX&T7Dq#LB?T%Xsqoq`G^iwx3OSXl? zX{f+^8||wrMc|1X>`L&-aHT-JD@S@3*9$K_kEziy0x-OUJ3fKeNu1z|b85>e;P@1z z9Aq&xL9UI`ldrVD?}@|MnkMAR5qNS57&l_I?4_)O4)J}=>>md@l$$faJ8G2Sn->-A znASm(9IuOj{|TzwN(6$`3B6A9aoX;uSG@Vmye7xd+PDdCQN`Os@zSDlE~>-NQy*&b zc)OeW0idbm{e0)0I8w>_Qs2j&*G89}W)XGqBR(pc877f8HO0t={~J=$G2&PdMth)F0=q-`@m* zc6qKS&BKson(La2lfy$ON@c8Ih178t+2;Ux2)cSRc@jeeo8%UC-AH8rDgJn;z~whB zrE_-o^XNkv#@gBWm$B%H%~*&$Rr8Xh1DVgY#;@_hM!SwBDEp)CBn}xvEB$cWeJp%g z`L0SsV0#cA?`6h6^7#U}`%QbJX*3uw4pZ&$osZkO*=P~N8^^7!LGpRug0GgCuW5D`gP)IEcf#eRCgDq_ifIMCZ@)-p ziCwxzttQ?V>@AML%c?t_13mgrkHr8OzShi#xBqp=J(uCZbFnjsnL9W-5pglIgRa_f z0Tw1U#{YfSUZAlOO2F~4U3bBC5;we5suLNUSitB5Q$PEW=t5Y?Wd+~Q%nxx)pKLn| zP8?b5{-_3zYRMcPCQ23L<;BW5OOJmvh)#E;8fS+d}*q| zyPT{^Va z`#x{3!Tn%z(QL_a5>LE6vn^g+^HeD;oWDhr9yIlh1rF*q#LVJAbsOo5J!|8yvG;18 zjO4vKAqQkx+6ii` zR2nC-#hjvphczH$d=AV(-}Jq=_8Q&B3cv19FCp%m5gRHYWiIE8iw!TK!^!x{%^Tfa z&2+I5mt;kxJ|iT>56MuHG+e!r@MTr(1WkG=9({~Bfwlb3UwPA{?|8O5CwGjB=T6P@ zt^`2{!_k8e;dOZhZ!<{jDi(F7h^maogl=nVBj2UfwGh}^GMG_LoaK*NN)ET!4)&^1uo7lc>47)hDx0iX{MmBU#qHCJsq!i zCf4Y`RuP^Ew0qs%^lop?A_+vma#KH1XtjIpqVf#udf!g~)y_hPNm-W5ks28{DVSaw zCWl|&SBL{M-`LuD9C9nQlH6v>)9+VRGj5EFE?jx&&eeTXz~?YcLg~F4^GqCeS}VA< zKg#@CJGoMaU&={7!RM+mQRA|3;r>k%l)%<|uy*`|KTJOmo`xo9maX?ev94~Y5(LS5 zuP71IT_0$JRG7;vu~d`bzwrG-`a1+5rn@X=_Ek#-&7$`%#_Fd=rDn*wXfl;oV7s8b zALsGH=*7XFQ-!y*48lJ?J9eu5`WN>3PRl*kVa*LzHyEhtGLJ^O+Z_Iux<>DPaF~n4so(aAiiA4saQSeR4i-oJoV+4t)7kuT-@APYxRiIvh0+>j zPP$Rl$*r3=p2=-z4D&>|;+#6I{wBVE`X1b&+3h)V5TGjjnf@W%GJbtV@v!aUj%Wta z%YEbgW5Jgw%n`G_KY;`9Yi!JRP@^zqUf>8AdM+n-$_y6;e_#uFAJ@`S+>CCol;w5I zGjHgv5&jNDO-f?%IJh982O??vhqwJRT=RM5v~*;Tn@60dG&YBg`4%3<2uzM^D+_OC zjJ1y*NZ0Bd?)6BqhjLJpzJBpIxuE`Rq2A(|eJ0j_@N;{1CbP9IG`fX%^0#k4j`@%J zk%rVo$E2^szveX8rdCZ&J+k;LPwO_3?=cZE7E*SDIBsLGGfYR*VgRGciJ^y~0xn_4 zuMs zh`1NDbc^Q}OCD4E14BxA!X7+Y<`<4?K;|9=CSQ?o2OU9*-lNQ5-tgyR8D1 zI=ZX;T3$f&%eF&jk~nVStMlXf&dtTc^YxkC^4-B4otD7z^2)~TbMF{X)n5Ia>i@EH za$FR3G99PxOhH)b-RA89+-}{@pFY3Ft*tF<0ZksaTm6Rs5=W<2=FH0Dxy9*8m4J?y z>*dLGJJR+}H`jB!mY3J9{nf+l@zSj;zu#J=t#5CbJR$HpG$ob1OvlloXf7RT`)RRu z6L;k1>aM?-kB>jpSUyT1qX~1KkfHPS$E)Yoo(|t}d~b-Sd|WW!HeR&i%4G?d_!Zw2 z_RWmpLY*6s<2UGW$+v|1hr6wB-)n*iG8)6eDd`88k|m~wzQASXR5NMQSbbDI zxtcTy45F40I1f&9tljpoAATVI=7S&pFt$H_X==a=rH#_I!%vcYvGv?^2hbIOBFtB4 zwtm)y9D>qn|74Qy*geVJ;S&D%p#{)9Gcx!t+((~*n|%W6r_v^JEy1793dg%dyzj?h z)6qF;>{S1hcd7fu_0hc6* z*a*uYh)I-JXl9uatO3jlVgb6kq8qD=-N*Xf5)6AoKV&s(m)tuZu%iG3F0iA2d&j2V zI(rb`C$8%!XP+FB6Y5OB^+0Oqp}*N=EZQS2nOJdGN~zHy-5V z2C+GA#=O7)2R#G0`(Hk2CVDoA*d0E!N`EA=urqx|#s7Ru9?++Jeg}rKWv z_`QG_|Mo83NSikPAo71bYi5>b^PAr+AxL=V0=0my_cgi&{0O$1rmON@tKST$@1qUo zQjpp;t_JqcxQ})Xi|U*3ul7-RLNHI}iCf#|JtB|cU(wrGAMlTU{bf4LH7aQ(CpGvH zbhYM|??(zL$f%${Q^>qHe&W633x8YTGqe6Uss~U5Lx9koN9@LW6pla<9YpjCSwv$8 z1@oEsJ1b<&-!6`ygfOx>3<-i@zDj1oA?Xl70SWc_XM}3pE}M6)U?3y7T_<`j?|}t; zJy)L0g3;6jbM9%L^c)I>+Bvu3u5S3h`Tl{B0Bj1Vs8~*$sWS5SaOFZgQWxKl($A9$cd$w21F z_bCm^Mes(+61FcPKczO6tBjS9iG~fIbQhH?Yw-T3o$$9E*F9dzG!SkQM%Z;-fGSA< z;dk-7(TB9va1Q)qwm@>N^#9Y!wMR8^W^pqSkVim4P{;!m0TnPanLL$8ePAC1S;0qC z6hZ{$B?PUeni7zgJVMrLnWz+76+CWH26auesF0A*ZWUT-l`57zT3lC@q_}IU5qARA zD)F55oZb27p8Namy}$e2x!;%h&dChyy`n1wQ{o+hH_VJb^zGC}xuo?io|4pbuIJ){ zOHmcRvU$61e|dhTLEPay>-@niYs3cya74ejn<~6_t7VulY#rE9;Iy#+r>^GEdoyIi z()-q$yn;Kder^|aU|?@Vz{4lUAD?`N8+OA z9#8xFb`o6Dwr#e3@#Mhag9LyHpCc=}ZLgL-RDRQmgB4c_aS}{bMD4|0#{n?=07Vg` zpq1dFF@*RIm~2c0X4~XftAMxgPOOC>Z6Te7OyvRaOXWQt z@YleY#o!VqObcJe25I4RHVlU^VMB|4=dJgv7wKCCtSjGX|tB!?@{e z8~Ll;Q4xP=u`yum+=eCl`nip9`~8DCY2-rvvrU&Swl#dA_q=iU?ndAvpz_4*DYNe$ zyD@j}sdrcj^XJYA$SF+uv;?T!dl1(Z*)1HAZ+Uq6=DIyyhjWLXJiA`E^7!&AQ6tLz zLxSZ}zwXue!`W^B6yk$<>1~glbid%>CRWOzWJzyYTeXu1PZZXr3CH{V;5NgJ_xKvl zxPXXScjDKuceiQJ>GilHeT&Rmo&Bcp*AG|a&2^^adrdozCe`B@~y_T&5-$u0VJidQPYt*!v)1-JhscY`IV z^SKonAVZt7o-4Z#`oJlaYt@Iq(De2}IbnOM`!(rBbmoA%l{aZvFe_|d3<_%bE1L>~ zPlrp^kJ(()%pqGo{NdU1m~w zmJolYGfp|}iD6CO-aA5KLE)mj8IL+5l|&EY+m>06)H1VIpUlaW7*XP6cB8a+n`Tlp zMhmCD!%O5BuUA+c>ah>g#72}S8(AU>)!<r{w2 z4e-VjngfuQG+;aOO&Z{i%t;6KNi1_~>T}V%e*F9rbB_+%g=fz*b-CF_q z25WfSig_TTQFUJK*u!_j`cF=T`OB~EJp>WcH6%HCJAXyBx>fEpEQku*=YWDfs$;y# zUkij`WkD^IYgHkrTdg*q=aa>cX)64<#dD2jK9Br4Kdz}LuEqVPM7x_egP8JRb_t=y znW=T}B^}|%W*41q@y2?vmvRTYt0m+XII3b}i|bDHNz`d)C$5*chwm@;d&W-|`y6e* zhep@5EoM(G*QJOlqiLac>)9Ewmcd+B|xV24Yh>^;vA^5X1Dk>Ylv$U2XT- zUdbAknIM*sw;03fs<40L1AQCC+*TL=&RV1$D(Lurlo!5{>BiczlZLmXo`oy zH)8KmJrq&dY2F@D5n`g(t8&e*5mm{0MC5)|yDrxhv8lt=0vE{o^+8sgMCEUG*`&=9 zc@OE{w>sn+2v&kbgRaii=UDOn`i)kvA!DR?pNA>e>iR+>31i{1W?qqzos)0NO+l5; z$;U<=44Vm{bvylerxHz@`DWYnWWC+v`2>#_y5oTlN zDSjqqfW9=yl>0T7o#Kz%{jGLX8wKwtyBuNK#>P!-LhzbacAt0nu+1d;H=BjE72GZ_SfibXKZqF zgT)o8Hqls-nqT|M#FLM)NtbOt`q=4SSC&C~qFY|z5Pv4omsGtyFeK>nUCVUfif zQz_SEzoG#?Z7SZBSOLjLW8k^PraFg}#;16PRi&X3wdzV9E>sZIRP{YN|7P32c&=Hg z|&ZIxI3K_If|#dkofG zMnj^cms_a+X`f{Yiz@EsOC+Ry_&m^EwcirquHjn%_s$vSAX1T+DUDRxtlLVe668hH zsMiWTS9Hu{`<~RVsw?UdQm&N2SeK7Cxrv-qIBbqf+E--?7ZYa~=ORm#wGg`}L>0m=ymSKHT@dF`wTL(eus?eqPm2D78oVIm-V!=HUt_wY@e?H>XSLJW$ zv`l-{`Ju>dywkUcX37lHtbojTPvjY|%ZMo50=f0(nnSIg-TGS=LVahI2-j?QVj*u* z`I|zl)0%Z@t)Kwb0P)!z_pk+7mv zlFKRbb{Y9T59=BCV8yia5Q9rdRDTDOV_)3RuA=9RX$)@XjjQJ&?>M4u3^VCzc}&gW z#`I`MwM6x*@ReN0#BWF4d&;L%ZBbfn?k{Y9npiC6bSmcFbJgZ&PQapQB(a$PW9su* zVq&W;j5z`G-zB;&cMGn3z3oReruaXiUm9Lt9{-aj`F-iVfo{S=e95FwDI_j-JkY)w z^yR)mkcr%Ffqi&=tfOtJG4GfYT6e%wte97)Q*C8vwUxnm0~^!05None@hcHS!!cxP z4z{Tjf0eTtL5XD^Z(){RS32mz*l~uwkGl0FR$ZTCkM7IT;KI#>0p$aQAHyp8d27kL z(6r!=Ir5o_{CsZNJjkI@O_%!#!0>dHo16lthNX9g$$ii9*IX;+Ku!bNB;M3v{-&_P zUm=B|e2A$)h_T-T^2r_)bqz$(9c9!QHn(EM0xf-R})*B&95Z+YrZ*;-Uc5J;p z5O`%*OHodlWMAL(4cIRWbT<1@{SMQMJA3Mh@gYf(x)U*ub=Hx>M5m)uDVHA}!C#2Z zYI9c*dY>s*9v^YKlx2-Q@L96AwE6fb2Lp4EiW00m0viF%EkYmR&#^2+~*+h-n~= zGcg$&1kz~}?I0Lpzk-o%bEnbS5cmo>iZMVoQ}H+tII(*k#02R9Oa}IVA7Cry=!Ibn zI}G{k6TmAH8)DfFF+uhlFpeFDn8!k3m9S%kXm7w^I}BM|Kbi$(pCA)r+L<#U`)rZ@ z<00%5WYR$U5H$O25&c*Qntg&y+8fGAv(FZRj*de|SXvGl$5r zvm-x$$Jq9QH9Q0|5x0jx$ZtI2{|gWGFZpRZvAxR{e)L3~xFaWEbW-E5`23y%2Zop| Mf=f_PO#Jfy0uXUBxc~qF delta 20612 zcmb5W1yEg0(=Hl3xCDZ`ySuvwcXxN!MF{ThgajuzgrLFQ3GVLh4tJCHd%ypjQ@5@Z zRcrP1^wZtb)7^WoVL|T>{O}@toHrPAybliOl2QZ3!NktOoG1^5{|{B`0M?BL##GCO z{s|q7De((c18BfUEeHm>`O?UP0j%sC0s@F|F0RgIMs|pBo>{*$Mx1du(E?7(Hh9;h znnX}u1$7wI=>}60=OQ!;H$-V*bW+d@ZK>n;Oly(r#u}F2Nn{aoLBp~#By)99%sn*~ zeYU?sNN**HB~&mNZR4e|aZ^rGdX#QY zFGgZ;dpACgu^wQ*{IDSTE$~F+ks;F=+((F^t6W%1NKQX=U$`BHXdo|$2=~+K`C10gw_&WuBWLSU^h5!3NJZl4cj z5IlXf-;hSCl{0_HZzxxEf+tb!j|QyIH}Ip#B<1T zKxnUk9;FEa)!Lq_7Vrx)5*FbM{oovQ7%T0M4keOOs*u@LBZ_lZP-Hh~Zg5vOu|cb;+UMB8O+fqb>ojY8O8O6z`xz%}Tm54LH%^;5}nz?KplgrQA*%l?4}4a(`tXQGvfyrM32hzIr;XM$cPS zd1$|v`q&Wlw2)E4CAu1lcG%RNm5Sgt3_fFY`b#tsHczg-C^9lfvYwMXzeT-Rw3GJW zSUBz??$>IICQ}8A|Kx(V=E0 z(IRwn(c{IDe&`hBd@5-;-`FfMyRiGad_GzDPsvNT1DzolZDl1RsdI|IOt}2;6u1c4 z$@^Bx8~A{k4kw7ZyuG|GNTHVxgOX)lRo)2Xqo^O7Aw7gE&yMb}Y}Q$$F`nwXJ4Cvh|eVXtt5u8w+hCIq(T{zSH+Bkog9YYCRy&{z!ur|EhK% zr#Py-|0uVX^P3HZiF8a10fm->Iz{_E(`dEJPph8Twt9}zv__4gLUA+=ZK*qJSHi)I zSn>@%rD;@`u@ZH^euv%<zqb=^r13{P63D7HA)s3%Zr3(GiRj#GQDqNeeMe!M2fx`Y;tPpc>yflRxe^H_1Vd zWdJ?`T$s@pt3nS&5GYRC%5>KCG2rO~sZBw%{6mFxdl}Mnf~_RpDNMn2 zQ))?_87yHYlvHD~>v5_giCcYDG>JJGGO)KVFpRfOk;aqCH@c4kITJtq5|u3=1s-|7D2DYjW~G6roOu* zqrkxMe|>*N3|`QcX`n;ORCbT)nHj$XFdFGNNcPMPO{(E%t;IYDG~`BJAgD?bcSu$c zkd86AVQFI>*s@D}l_Vmbdwy(6Q9&B|teCWo{}{)cG4s@O;U5lbmg+3LdkEkY+(u~6 z_F-z=id(iC5exWn?9eH5;^w4C#SGPda{CH*&|dI|c(4`6^DEIoJG(vGu?FNA@BwB@ z-&zmh)SFZ8{ZB=v=JJmM&sV6t@Tc0%tj_Q@YZx594EB>Do4JifVK4Z>s#g2oC4dj}L-h*bfou69ha@0(bsUOCh)nQZ4xH5oIcnZ9pT$qp^GjFFd;#BV_7*FYw2HZY(pEq#yZ%N* zWq)OP>Q~suG+jpvJL9cbWu-9+aSoAFv21I&2sKX)n_x!VdsdY6USWHIeKsWbjLlS5 z!FN@i^mos-U3o9H<82|%9Xdc#Q$PvF6QR3Djp2Fr8Dm}@9SUEZ$hf9}3bMVB#24pt zJ%+fjB&ryiToHIY!nL}r61pq;z*(BSaQs@st1WdzUHlz0_ly&WOB zuw*>k?}(3L^14UD<=M2`3Es~62D zLuSytk^(Nk&cVj{zkdVE)*FhUZ**G1JrkTFT-wNleXl)FYX{A`fUVyjvKDRoKBVPr zl{ZlS*YfdTeGBWb1TzUbQZM2yd>z5K5IcF#c~v9&qhA z*lQ+2Y3Sd)KGJk2TcJ3z&u@*mZ#Qoc3=5cqp%^A~-;csAj%(HmeXBcr2MtJl7ykBp zr^UITV<$i;(x@R_#R{4q-`&1Slz{Vuc@O(14n-X0i1|3H6=xJRE~3S_$xl4j1@2_m$q{ zjKK2ip_6WUn!UofNt%kn{SOa-8U+QdUa43^KAAAObSG0Jbo@SEug({*Noe zP!nst*iE?2@~+7`QUP9qVu*fx(&9XocQbNN@4a`vy`#hVvAHTEdW{gd+T_Rw7zbiJE8B2o`kQFgqJL#B3aul zU^yOQqJ-JtRai>duN`3rS#2F?*)L48v<7`6eeq7D%jh(&G10ol)*o(gr=enrx+ z_nze8L&!TFA^?N9LyfeyCBG&&9F2~fDI$sGx4vgNx`6N~qeMv!k&|lh!-9Nxfl+dj zh+lrx=lmcsQQ|@+#xuN5v_IVllG}9y?PXJv9?lF_PEW;jbWu;Am*gV1Vc-oZq!GAy z=)V%ar;zFv;>blO&XwG(%fPw_{sZT~wgY__CE-U0F9p1pB8!+Pjx}Od7v7}&Vul$t z#`e9{YHHX9S}$ry)!vv0lbBgelc_^9365Os$#4KtOVK(hiqtHeu3J+r5uI0k-ZYu4 zs0+8-aGj10dRRuP+i;&QDx_mUOOpmWqNGY4X=bp~m>c@K`aOcWsw&u?379zcH}kbz zB&Ofrk%NH%w8QI&Pg`I#oXl~Pah7^1Cg>1Si4QpZTwc*VzH$eL@FvQ+ZqW~WeMa5c zKP!}0$wE+PE%mA($&88oQRS|PjG5}h4m%Ix!ano6tDWaBY37Xo;@5@>P)_Gi5$C4y{pui3r4H7#9X3v+02DXhq#w{D!TnD9Japi_G^Rij*>vps$Sxh>(??c8uXkGlpg3z(IOpE`3>U z_%Yh|H?d5zYnQp@qWCp4R&mb{`ohw6Mg`Mp+TM8F%rZGwv!Y`E9DmmpO z@0JF7mP|h>>~LE^h0lg&=ID5?^ksFXl>xb5 z#z_G-$xcGh)e<&L&C#+=14kScG!(UqnrfEhQg|4(i&zke^LAz4Pn_va_QMjJ;9wuR zweRf9;xubkw$2-8`oi6k;J0``e4*WHcHGe=Sq;Y0C0)fh?^h!wGj>e&HCcwDPXzw_ z_&wF91~v;`Y#LTEkbE)HvYP@1KhQ+OLPiRWIi!g%M5r-D6_i9%And2f*p(l*HijS* zYF_bOIJ-D6EI=;T)Tk&(X<;PT-^}TEuvcu&N9MA!$RizH}r z%~_V~x?rOuLp?}D5AyE*rEVuPQ6w=iQi(!=oeVSmyk&Zo98^*lQWuvV@I@hQto9yr zV@EyFaIAg_TTi%a44G~r_g&>R;UFq92Cu4jxnfbB*%R7L=m(nuK}yi!Gv&Y(=SOHK ztsKqL>QoYm0s(FCLn@h{3^lmDM9?+3XyrPMvu`>?bKRIrbuT;DQlgqkAifBZKwm&F zvU$hht}IwCOZY9>;5ZYS7rNnIJ6&5vH|Qp6<_V}H^cq$S+itoy{#l(V8QT-yGV7Iq zd@vk|%6?xU7d|8Ysq1sM5O4xg9Y5KKVN7&OJkhk45ETJorD%``lb#ygEPy-J$Je)k z`bXOZ>A>XcA`ycPxEBl>Gqlsb(&17X%UGkVuw5ZYbZ*|MXJDD;GE4R{iXR)4-Mp7e zX^(Sa!h|kOtYguQvEm${67bvRKK|1v%C_T0C`-t^qdkoF1smw~!zDY=@iE?E(^!cs zj`b2qn=Ffsqqqixh@9#rXC~YE$vg(*f+IJ^ekD6tktjjRbF!9#h+F@Al1`(!NHkGDkAD;Amycv?xFR6awfhYRc z)CTr7%rcfKaZUmR9|vr-6r3u2N<7>wEs<3Neag>EQis@e+1=r1c)kt@&L$j~-;Alr zIuyQ=?XS~Z|G7b#*J(IxN?nFJiEs4yHaGiwyxE-{g|esm7kW57toleMTR@avSI1Hm z{r7Hud;Q1E?p7gvUc9iWAv?>B6^6x?WW`f$sRRrqpGFqIuq31(YC&FrX$Rw$1+yVs zJmX7mr>A`J1r4Wgc;<7Ul}nsZDA&wy2;=pIFS34N?^{=&Px=^3Z`};qUha3$V>TF<9P}-vX`7}d@#DAm%8|8q1 zRk4idz9|@tt=TbpJV#ST|4l__mzZyt@KZj4u$NoZe`=)5kUQJr*H(OG1`=|npVWZf z9D8ff)j(w5JlaJ%qlr|`Z%=&&;v87k5Y^VCU}WBVk2q=7Rw6bJPy8H0NxZ`>V!>Ei z=@*$>Ru$dOcur+5AxCnODw@ukthb}|D3`6|jSpwiUz!iK%wKfzSoDv&vIUcevIDZPg``wvmj5Cn{#8~6jlNT5 z4yhH)SXvP<|E7X&J#U;YlYYp2sOEoEn1!-``D%;#+pTq$oEU!wffy=SP2LD3|Gowm z&L*R?rYnOu|IBO>gNe5cTOfX;vLMHKqj7|8mv#U*fFH^(pFZUClGpF1l$kh5RPRW2h3D8yE-Z~A6-$5N}QE7jiK#MB__7?DSQ8Hd%jk9J%5EBdKC2B1dwN++mPUS zo^UOFubuxOCZ89yy*_JUC*6~;p#-py4ao{!g1I@gwL(1onSXsbvN*ivhH#ezGG8>e z5T??D%JN6Nq;3QHQvF|UEzIzb8Nr(0C8@c#KxTm}wh7-MRlO&aaH?KBSiI^ZvG_K| z6X_q`=_N&|6Riws=m>d_1}HJ9PPGEzR8%y3E4=wwBygM(-<8~}myVC^5?TLPWV2gp zaVhLi-kcYFetDiZY>n zJogKFsFNZh*S|}E+ZC4tuJDh;9AzjjsMV-8$>!kOBcM(j(}YDo14ysG-e4uH@LHI) zUG?&(=8~AfN^M#>_^pVjP%8VJhqYbN#JPqv&*pya4Ircb2qv2*v_H!MTW5)bVG;G- zqcgiRikrP$mIKZMkOoj=lPg#)9z)^`2@5Q_!o8kf??q8Meh8eKJFRy(B+Sf;=c$cL zX6E(fy_)X;u-DYOJ z7igE3%^mMJ#NJ@vKro3giZq!vVFNtS;@%>X=3`K^XtGFQkv~djYDR~&n&Gcvl2#nm zWMqK%cWvqBG@$@4gqZ(oWGL5l}F!st4`a50dmB+)JRGDx^s$ z$pCHR#$)ThPVQfNUddjK0PBmnyq>iY7%~2#t=ztEnero}l}+XRGsb0r?iO)zOr0S zMHaBXYhxrF{wCS>`KUBgTjpo-nv)Dw+Xh(%t_YOQ$hI@QGi%IZ1PeyaYnUr~_itUf zIjm$!L;idzV1LDDx-+OSAb|_{Q+~hYXdFgGB?HXMJ4-Q(sp4S-^$I}F{U@7HpBQm~ zQI&@V8-J@2z(>7)CsDKx6gKMR3d;!<90!ofhDds0{=w&kAZ#Se@o2o{eP2*kc7wA24pUSDyoV7t-w(3 zf!_rLx%)G|Z>4pWK6s_EVER_2>wQ!lH^lA#mgckf2SE>W4KtR<==on_1FAHy9i5?vhM^h*Z29_~p?|Uv1myZRp(47XQH@b;e*7Pv zf>KYM&_nC|ucJ=?)Thqn-(qG`i`Q`Z}CT_ zhk6g3Wc^O*-}I$f&ZzuPH<%sJ0EMOdmVV-R*W}p*)ERfr_bPvuAgyvNx7Sjrb3NeN z{rD&63k^IR-};nXL7PM?56Ib8he&%t!oQ72AXXBHmUp|mxvcZC_^4Orcm>qYv4uLk zwZ#Wn+qV({RZYb|8hlqVm;D9)r}Mbn{W?VYidfw_v8p@yPY2m&tg!lzhqp6qe?9d3 z*F$BCj}O;4p$=ahxT+af)h~~=p6%F2))(J9fts;_`s6<~2JD~y*#AGuIhJQL75V1- zIX4*}rAW~u!1KGP)Psl1qTb)#+ycX2UAL%-fP!Dkg>Gw3SHjujhv;}D$HY44^7`o~ zw=fj(&{fyaHeNXbk_f38{Ff6ZuP`dC{Yzlck;Kd6uKxNh;MrRJ?D`PZDspxnhWk{KAQ6mgYw-WCN{<%s}~4A zClpj&qW~5|C}4NVt?$566BuKmuHW_nTdueA`(Yn7NR0j!p_qi#kAR5}I$9VbeV_F6 zKC|U}8YHL<&G{nmKIEKV_jr|@*w33j1gtRfN7u4>;Hs}LU;Bn-ncr{%O#J`B1^$DB zeZxWi2ggd|r_97}j+C)*NNEC9{DLnKn{J{gZvN0>=#7B$MId4R^_>FNd|(3AM+hc% zro?eeQb=}I&}#sH&zHG4{*UwJay|JA#-i7!;MR=5d-`~fSg}2kENQS1Uo?sYEiSYn9;z505Iu{> ziC+kA+VXNG>qGmx>nFEH)|HQH$+3pmyllWsK4#*Jua;=g_q5evvwpU`E}c181vqTv zQ&}o$4pu8l1vWEp(iyJwC@R^de!bT1ORSn*Je)auZYp7heF&A+Sf(H@_r1Hy{|+SI*c<)*qOz~&-jep&lQi*aRk z%T2Btu>&u+>AT!YfJ7Qv`glD$kXV0$%sP0$P@w0n@j9bhgzl?;RLjzG+VT^QEYqq^ zWL4SWl6|(gOcvuI16gy3{m1qnb-f5}5J6Gd71+}{zW$(|$Z6PNyvCzv;6kQX5nu9c z=sMZCZjrsl9#P_#!?tw~JIiJr9Q?=21$8}xeT@&ARZ`6vO|8a}GJ2!{_Adrq19qrAOPQR_lw8bec!&zjO`oHQMw@aTVM240Hy5H%QzlRMyK@ zPiug9+L-l3>iJErH*I>$fxi}fT>R{=Z+39zy-$~~dyEMx;>+&@mI66`3h0sQjQKS} z+3DSEKP*HL)a#%Oy|@E@cJi?sIOzdOAkCvYb1!ynAq`N`q9F|u6xPK$<1^cq6_tn{ zZ1vyD{czzP7D!DRB!1oWIljU6apo85$v2gE%n}b*D{NNmg>_Od-Tf#%2k0$r0QK3~ znX598`Df~i%JO~cig?&M@;&8Vv)&5_d3_i7g{G{rJ6phQ%)%F%E$6;=nm^~B)ods0 zrCeG+0yQn5F=dEV1~PNO;!->Ib^bvA8X9&%u+;suI!aWybXxru?aaDv`Zv0I04trv zo^`2DorNj|Hmw&}F5~HV6wnM0X0rlC$kWI@slJO!M`;r%$E-2~si7Lx{ISZ@&-8Oa zcgX^*_6Q(YzTeRSZ8SpZ%*MtKxAsMwUrg@X^x`}ovZ@mafhEr?2jM4|gCM4yfyZ7f z+ht5-=>mU8v=EihNmv5Uw0ASNM9Nu)1g%aXCCa|vA0S#Id}WwWbkMP4d}3-1lPPH$ zb#Lk3sfmfZ&{L7c773-nXcQg%uy^S0is-kH=xPHVhUtPz6i_?$)HI+(%MTKn_0o_R z+Ejpp5GKMlcgzr^kz|iWHcs^}jo*(RcT=1CZsmMuYK}p~4NU!d_H;R;P=aN@1fnrY z;WI5s8fJFlv1->}sO;n6py=C$uv%6~qPpNSPRP@=RKPpeVE-7ZSU_5J1 za=itNV;@aK(qLY~wJO4%K3dyrz>i4MKlV|;bheWpSP2I^(w{)Sf7M>nOv7~3qcI=q z{0ZMx6xf|l0;PBz{#kutNs}qeZ;v(fD6hGiDcgD~jG~umNmDbOr<#X`h+fdi=+3oK zeEj>erYeC}1||9QU1u8ZSFH~J^5H&`8DCuh?9v!LoSJ#3y0DT?&RM0N+EKfC**k90 zR~pv=Yqv`?aC-8=HPaXYq8E)0!UZ1-rj!1cu}r(XMA!6jY!sX?k>jy{v;5~tRHbt6 ziSF+AUk^@tKo~!=K#ZlHW#~z(30Mi3{9wu`%uf@UK2Y!TZx2?W8wu9=p^gYPpkjb| z{$BX^1wL_@au&B#27WE@-aakZUtQkMte$P3NS`_E0u zKpFRV5Hg(lt!pO=-QvE4$5{4gTi`wL^`N&it+4ank9rh(zvA2cbg;0!!&r?NoUSju z$Vq#GV@TEqQ4FSG*VpS|V}>GVreWvTYhOz*Z~Ce^5s9E+J9!Tmu+MaaNwKa2qzKsc zA~C!@&dMlqOU^|Y2o7%NFh`~Ax}0(oI2E{>Ci1h0D-Ao&@B8&CGVc?h6g(Dy&$=rf zh{j(Hg(tIrnY@1Sw9C={;l~Tx-(F;o$6Q_(?nczh^wDUsOL~q21CwLiwJ1AoahYE} zihX&hS9B*T&9P&pg98PH<9r|Vw06>=?En7KuGg=*>hq4M4}KPaAYCk? zL`QDIm(N*+ZXA2ca67CW`h(QCJk1tTGA01OWIS?${EfRqx-f-3=N>qYYi@$M48d>Y z1=Gx1J%Qv9PgxH0=z^6#?+G}+p-eMz^FSEP;0RgqZ81z+Sz0oTURho;#2Ok1;P9P2 z-GNTkq(FG@D4k<(e-^_JXQa8jTn`Hug3V=19hR0t*r@S^UBamnx(h+U&+Wmf^P&qi zkvFRaLziVL8fpdmpcm*o_?4Z?2SYvMy4=pRnXXzDV%JJE97`SYh=kR zVZTQic-MznpJ(JY_9)ESSleI|U{pwHhFHeTPipI#liWk>n)+(W0p+ zkc=+OB%NfxdWRGN<&*JTWqQdPa{oqI`1phnkxL^JN|;^O*I@uYl#TD>l-=oS zBl8=ywpJ^%B~je9sT5J%b|eDxurr&SDuf!HW_L!2If5WxwCLm|^Z4Mw2;sX0s;|3X zBZ;nfzt01=3+TuMxBI9d@tFhF z9z=Z>vV}EBL2aV9_1q{5n$h+5+#dVH)|WV{J?Wvy#qwc{=JhPpL6EO6?e==JKBx8h z?0EO}evxt2|KSmMz6Bjz<-A@D8MHm`8`x$(SI*WGz5rbb@A$6!1Yb4|w1DqSni z2bZ10`h+faB9?e`D+^3=fv!Tnd&GodVL0>KCmDDJhXYdPE$2KrHJ@{K(NjM-yi8v8|R>+S+> z8YA|lU6rXYS(mA~ipDlRcfmrgS{Gvo`k+nTs6{t&}WU|+W*1L?kkFaAf` z$tq@0Q=Y4II!snYHT)RJc$A$!#?Q1opujO&VdQI?3Ck-mUx+J%Jxx=Z3d?g*?X~XI zDwojB$YW9eFlx=d@dsVaq083n53E@)xi zoz|$SJ0OC`f;}1(ndi>?lhEZ@>d~XOBstsrfhu{sbyba!!j3p@bvi6h()mFXdMJYeq?L`YOyX~|Dcp+~`%1smoml8Da zHpg_IuIdF}cZ*7@b_J0ZUR;+~AHhdx2)-33BMcuck zxG8Ea4yXzo>hA!e;MfxG&FG}>%~1r`GAD-C#|VkNN9PEV;&&T=IfNs-B7ZF z35!o*jPjy;WHoN4Ns!m?vtw~F>V&0MRVxIx##L|Jr1LZ~m=1?8EF_brMGp+cESZI) zBu#oIlaaRGN;~(1c~)Yy8M?n!%{o-?KTCJha#L-xYy(B@sMTBFqK1=FHMgg=y7b9@v(a-Ywa&&83W#p zX?7O_9*tS{hR5%@)o@+KLBERVKW2WvX?t^X8xACZG-#W_JGAn6 zvTDTz7b_FBbAIjzb&GoRC6AFov1(G6{3_OP?j%bFt>1KI9wVHr4#i`NUS~cT)k45D z;VWEQ64g2#C3kg|PB>=Bu$0c7ksjMnLKa21;-tB#xdusRB!M-E;S{DgStv?TQhY}H z9@c_;00`C-ZecQ&m>%?~nV`$@iM8w>l*>d)*8|>H+K*eNus6D(niYDqoS_Vl(D{B&@y{ zYxy9F#7j8JE9X43d;YK~+b1GK~Z* ze_%+Emp{S?n1_blkrI#bJe9;^|7J1=b@p|b)>|n7PZGJ3Z%GO&)|yCu0R$%sP$xxc zu~fxJtBs6_=;fXtrUeRTI5ub{cNTT39yANa@MpOCiq zcubMzr}s9*!fIafT?lF0P>LcJgR(g0?R1O*Xgn$}&C;x(gcZWh-X6WEzMatHKWFB) zXpiT3^7sy6K9P{sMO|v8s6WXB0rE>r>B~iK2=-elM~&XaQj7#>>Ay3Z*E*nU4ixc# z3O>pEU^a_6ID`YJchj+RzgJYB{q_WB^wxoDXd&g{aQ{>K26EbOwM@q=n{B3{v{#Wk z?w3}0BqQo&))kQ^p0G$`mW()9i2+jb78L#YDo|kx&rh^o-tm0IWFkaH(?b74Utyb z-}gOWYR(YxU=KdI1kCEO7WUKEPZy}~ji%R}>A{+x6c|yT6J}FMW`_ZgosI9!UxjaB zTTVKm$vrSDmu~7@5`Y++F{LGL^F|Mo#6kaOsUc~bPyIZquDVAEUwar{66N_DUZ!1? z(Bz*CzrAW0qsZ|!Gb-uJO^_nv&;K#+L1#orKjy!C?tX8P z%O9yR>2an*FP@@IV|Tp4uEWvOV)-WaTlSm5S0Wzq*(Zx@v4EoU`-kxr;>239*iAwQ z9ts}s%g@1Wx>s~v2G7n)%?4hn#V@b(yNrgvignNw7AxUM1g<~*>2c4I&JdHkfRItD z9A5Qzn>{Pxtc>%=RcT|m+v#;Xtax$@)W^hrVH#`n^ z$?#%k62y^hBcW{GDm~UCMImT}`vDQa?PE+^BH@gF^z>FWk0e;6po4bIIsm z@96Kh3!0qk#G|{nK|qDw`etxZ<(D`fZ`}>Sy_d>o``TctvO{h3bjmzM)nsFCYevxfiYFTFNd)tp z5PF=tn#qE^Sr2=y;`?eK>^tlQIz3NRF2_UQ;9!)na&~bg;$UI@dxFlw$;SM@zoA`_ zp%;nAi4%N^c_X+-__I}z2vQ^~j=URdu?mv{)2?}zfQ8kL!cOYtSw?MaLatp8ii3>7 zt?uezdSW6z##>irRC7~VhT`!&s<)8^?%B>NPcd3SHrY5({&!qCtXIjT+?8ypt#R~b zsi8zMpnpcuJVA5YZZeno$IwUgi}mm2YToHO5wkOcjT8z`tbnb-*qBPa?IGDmGy${?93 zs79PGC(_Wn8&r+@H1Ifn;C{u09v?&$oX{mbh;PD=gZ4T^!haj(s4qIf76|$nPh7v+ z0+q9QWUIA|qv=@>nK+r$zjn8D3H7f@nOAjY;s$we=|c=#A5YGVw^P^|kt>lZbMWo_fO6aGnG1G;o360B9Zwgm0n&Rfmhjb(sBdelIpS9jAQo<4xT#c zasz!U{+WrP^p!fYLGzPq?b?=9pXGy}fSvn7>#s@MHrOhX_63Qwqk3oL6K&S|RG+y6 z*F>WZz^!Pty9y)X4aZ?|tew}yy>_SOSKj$T5Ajj-!p$AV>GAsf_;`QF$(ACfmWB*Y z-=Sp%@;9vahii%h`9GQY{aU3!@YZ115J!4t-9nz){DyNiFI6giIlUF_rVXI1v!UOc zNp)=AQWrHykz8q|N9}{{!0aWsN$*u*Pix{KZ+Ecbug*#7&bZB9YUAesmoGb;5&S!e z)54u|n_jOhlee<@*X0ag#fA~ylgGH%~ zRaq|^>syr-wEh%M6^7Z2g5N+=eWAPHHhr*dCQOrIC41>2HL}7vJ+*_>Ru!=hwZZ&j zh{e40z0Qq5&BMVL@rjBydrud_^aDP22buxhoW$!yS(lKVhPJFU^YgadP{EfcPaeJ2 zB)rg+hMSY)1q;pk(HGrIIbQ=VY>fIs)y)j4?fl84mU?L@2^>D6pl(2e*(N~UM1$6R z%j1$OoBRFBp~|8*dE%=v{+Z`{XC6Mn*C*H4m-DVe{p;iV%C6GM=Are}$@AxZBl=hl zn%CWzo1K@dgH0z>E`8w1ag`L0k#pBaKv38I#p5V=dlK)e_Of}*gTII}aUrn<&!4x| zhfDDOX|wO;G;wUqKBj&bSwX+0>9-f)t?Y4Jr15y>*uwkdd%1ooN;o)Dpz?s}ad+;^ z>HWO^vfSrF*ip2}&&BuZ3wq}uM%~mIbEu8U>ih{l^y6e(9OyR~&+y;X7X>4{uAdbs zPwvl8u68r8H%Y~3515cT%Q4@XU;lwFZNL%l((-Y0G{06guX|tEzFEgTlrS|*nx%!z z5#rYPm20>OITLz$cIU9rM4g&LJM*KIn7xw@d|<(jrp)GvnO{wi?ljznG#)pK0-i$F zw`=h%p)4W1`9vuIaHfvuQYuw4E)eIwmdfZ#iqeG+&h>{DuT8<{z7d-bwf&o z42b56=0fXmTRloJ^YZ>0Pbv1(1|Z1d=kjs9Sv%^O|56KMfjs*EmjV3WVZgE=xL_QS z*6`Z7E$5rt6U)0%`>K09H~#rCtP&+c65xg0fZQPBCq{$Ge*a!vh#Fk^`!OZaN3gp0 z{o+E5A{0mwsBuvN=0Snxh~XKJ}aHt2JJ6uY+O`%i8MRCjys%q`H!p@TL|xRSbay&xVv@{{vQc03*atm-6doUTfpKj z%WZ00OEeTQn`7-?&VQ#$Kha;o{HtoowZ(~#yS{4|JSTEHpRfDyTv36%ogqN)UGqD4 zup{s}s494w{|wOF7D$i`G20p9nuHF(OW+MLrSyp026%pTqV?KxOCdLTZE-* zzb>)M_Lh=~SJshaU!e1C^rrfr;xqAr2ED@&;)cpqM3cb(z`~`Ogiq{4BjaY+_GX+ zh%sA?2V$^2m4qlWPQ(P)3OjnyHwn0Uhs~gd;ix3{BY5cHiX+)H(5s3ke*`osxjfVv?isx1e`P10)cRzz&qmCW z`sw74m!2A32SH_H))H5yIm}XtkSBNci$uMt{`20f7-0(y?KgjeE2CB=%qBP3S}#>h zB|R~>?}Vg)S#egD1!FNhJw{~#TKBtoMM}a2_{-8CgEDfFl7&+P$v1%~5N^oCUkkdh~PY+b`V5?*9#IZ+TRD9Z*9{An#?J$Okk*2PF%QU;pwvz8ueJYg; z$MqPM&D{$VQ%rq1`Z^G8axU)-*4VE2GP-GeZqv2`$Xp+v5?8^*X{J*)Xskt19mk04 zB?xYbGGp9p>L01Bp(#z~V$7U}KM-*yrQ9*)&gLsh+&_YH`_7&G>dNx%6VlW`AdeR} zCF{YKMD|f}BH?i|hQ*3VybXmid5579$=r?F8l6qYY3Y_EUJU#^FqM$BKl`UC72QeGjR4jP}|UKrtEVoNm& z10%0xZYlu-XJUUFP}G9Al=Oiy)t7w%13-VTSWg!WHUgUD`YTya6bknC4Rt^b+hM zT=Z-#9L)6G9Ly5*5+dTP;@lz<;@qt4sfRk?wEtfO>$!EoW!{4^rv{mVGY~Sf5it=t z7+WL4@$oULdO4agYATzVGpbm5n-MW{FsDwKf>Vk!bFeUTh;nd?a7nOpFtf3|8UJ5x z3-JBFO6U>&eKKHWV){Rx4A>XklHAOe1J)Z2nwp!NaUo%qnmTfYh5L=?r8hWEZ9`KjMm;hKbdF8$4}%a2-;oUs*;kLG+GLM zK*Xx;4{%DsW*X6NFl_&6gPdj$X3Gy4KKQ}Vap8aJQG@unfMS85d6y3snI&l0*a%J6 zb`s#Gk6m>_4Kc&(-~NzjNX*A%2!Xla7zl>zg_!BfVpv-OW9xApaC}PW$4yGQslZ+X zuHG%9i9KY}s3h_6<@JL)HoxC&EsSIy@I+40ffeM$jMUrbnTGPg&{!^LpBWpV=s{1z zQ!$c}`%=P;U)ZCYhRSMaJ{+XQ3~%0NoA$xQP-*x}BQxPn4|p0Oypb?nU~o0~$3A0C zG+9F{x?nTVd3oPim(~Z`|Era24{GAh;}8Utr;zXrA+`=6DkPidt|HFZdX7!dDvGG7 zBp@R1L=mFoSR{}DA#hiwCaqLwj)LQe$_`cWWO(H@j+~xGORiGMquQ}@xT{=^7VtKR z*2d0V|LFda&;Gvq`M!R;-~Ii5vzec0eOJwA$nsSAjcj(;(L2a{smcrJ;V$(d)VoW0 z2=V;Jr0E7Y!=07|$8(rP4N2(wqPj29Fjy9~+i}Jcepgsoq@*BTKN&~6ndOFLOJJ5M z)S~(pwYAp~b3ngCXOlx|hxVDV3n!6J6q@L1CuCM`P%1X>* z(nLj<#2P=4l3J{fr7^JnB$klUkbv%(_J(!o(y$xW{1jy#MmS;ONkh(CbNl3@STw%9 z2O54)KY(tYCX-7Y#6D#OM(t~PY4?)VzqIAZ_Fm54t_!I8QG@0YEAQ3V<=-)xuAeHZLsf`5l!~UX%CYFY^yGHtOi5Ibrq7( z{+1qeDLlP6!C?Z!PFc&^6ikUvRr5hH=el7t%A6+s;7AeF`_nIOYA46}|J@ciwL;LO z5+^1P-R}+9VfPcL#;v2yq;tcMvDE_k843BE=}>P7*Sc@4cDKZT(nRT{RM<9+$*)ML zlLjibBzEuvr&sKdP8!f+FLe@QO++Vic2`XTMw)HZpsd;EJS^aZ>7X=(W=-m=(O}Vg z3~SL<_w;0Eu;T2Cgjw|^EcxEh&(a$|M)^z(I6bI#*cnN+rOWJP5YKtbSWDZ&cZM2V za6Po2#L=86B5+|^K4qk!y#iTvzx<;+TXnbTY<^%m4l_KF**M=)jFMART<4~@2sqD- zu-L_~=yO_SD`$9Q^X;#Jc^eeo%Ui}NS}{NUl_}G$fv*7<1&6dWa{7fS8eGhu=ism{ z$>sGI&UPLi$`q|GZJzQvKJx&HgPTde@FrR||K(Q88^81JJ;_Z)?Ut?t`Bfo98xW5J z;g~GG@pa28Y$I2|4@7;fJ+(9-ZN6)8_BsX8^wIPDB1MH^v&;KlCwpWL@Oy!7pE2`)kMsxK$`GTO2z6p|)R&k7CsboQaLpw@|QRRqZ`s+#^(zbe{n9;99u10^y3mnyNbf#9- zQ>`Xv$n~Mqp!X>UtF3_vr*9l?K55SbsXy4(39471ylKKO@69{TuyguS zv~W6nhwWLU&qTODRVZF_PN(kmtA7y9aC?Rtew=!r5?eyVCjNj z$9d<{Slvk!JTZ6@i%4mRK)ES(r?83?RUVph0@jhGi)i++zNS%hx#H>(%41tnUjuAA z85#emxKCAqE}E4;!?I-6!Pr_^Q%hF{rO-04nW8@WVW;A>s6ZRtS`{4GD{g=V%xow&zEk)GcFt#-Bp&d+JoM0Y9@_bmT{(+NM!`c`X)J8(0N8eIRf z&TQOE-||*Dy?(m8Z9S$|+WbTP2Lo-c*ngizye#&tIAp08E=>sq}XCu_(a)+~!Zv?we43pB2ABfR$wXRfRn+Flpu3Do9L zn0LCUGC5Q-b(ZY=>7CW>USqjbb-=(L$VarUNhtw5ji$Y>QrpU}IZcN;^`y*jGhTP{ zD8Q@Jgb!IvQTd2sClK*KALAObN>)}s2dJ(gYq4v{s)+-pS8e#-p6SEOIenA+JX-hU z!ught_R>i4#B#4@vdnPF@hs|^HqCeZb4t(iIb29jC z#}JGDjuVLW8wR-m^o~IYbPt9>=Q8oR_OFs-0Ca%h&$c zg)Gpmr*Ik9JwSks@3iL@j7tV-FpLn3uSXU?A;j$ZzYB@qv($L??Ll;)OnM-9U!Whr zhM4%8P4R?~Fz-h8hq$4Ad?2@c$t&w~%ck_QV!Vl`1iD860bC9{&@V1-{pLUY7g?l@ A6951J diff --git a/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ov.json b/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ov.json index 4d482804d30a5a..59e06ef51f812d 100644 --- a/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ov.json +++ b/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ov.json @@ -13490,7 +13490,7 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", "Model": "bert-base-cased", "featured_SKU": false, "whats_new_model": false, @@ -13500,8 +13500,8 @@ "Precisions": [ { "int4": "", - "int8": 187.26, - "fp16": 147.66, + "int8": 385.87, + "fp16": 420.99, "fp32": "", "bf16": "", "token_int4": "", @@ -13516,7 +13516,7 @@ "Precisions": [ { "int4": "", - "int8": 5.66, + "int8": 2.99, "fp16": "", "fp32": "", "bf16": "" @@ -13528,7 +13528,7 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", "Model": "efficientdet-d0", "featured_SKU": false, "whats_new_model": false, @@ -13538,8 +13538,8 @@ "Precisions": [ { "int4": "", - "int8": 147.24, - "fp16": 121.24, + "int8": 426.56, + "fp16": 362.73, "fp32": "", "bf16": "", "token_int4": "", @@ -13554,7 +13554,7 @@ "Precisions": [ { "int4": "", - "int8": 9.93, + "int8": 2.8, "fp16": "", "fp32": "", "bf16": "" @@ -13566,10 +13566,10 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "llama-2-7b-chat", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "gemma-2-9b", "featured_SKU": false, - "whats_new_model": false, + "whats_new_model": true, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { @@ -13580,8 +13580,8 @@ "fp16": "", "fp32": "", "bf16": "", - "token_int4": 9.15, - "token_int8": "", + "token_int4": 22.66, + "token_int8": 18.13, "token_fp16": "" } ], @@ -13591,8 +13591,8 @@ "latency": { "Precisions": [ { - "int4": 109.23, - "int8": "", + "int4": 44.13, + "int8": 55.13, "fp16": "", "fp32": "", "bf16": "" @@ -13604,8 +13604,8 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "llama-3-8b", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "glm-4-9b-chat", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Accelerator Platforms", @@ -13618,8 +13618,8 @@ "fp16": "", "fp32": "", "bf16": "", - "token_int4": 10.18, - "token_int8": "", + "token_int4": 40.04, + "token_int8": 26.95, "token_fp16": "" } ], @@ -13629,8 +13629,8 @@ "latency": { "Precisions": [ { - "int4": 98.23, - "int8": "", + "int4": 24.97, + "int8": 37.1, "fp16": "", "fp32": "", "bf16": "" @@ -13642,10 +13642,10 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "llama-3.2-3b-instruct", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "llama-2-7b-chat", "featured_SKU": false, - "whats_new_model": true, + "whats_new_model": false, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { @@ -13656,9 +13656,9 @@ "fp16": "", "fp32": "", "bf16": "", - "token_int4": 22.57, - "token_int8": 16.65, - "token_fp16": "" + "token_int4": 45.22, + "token_int8": 33.88, + "token_fp16": 21.45 } ], "Unit": "FPS", @@ -13667,9 +13667,9 @@ "latency": { "Precisions": [ { - "int4": 44.29, - "int8": 60.05, - "fp16": "", + "int4": 22.11, + "int8": 29.51, + "fp16": 46.62, "fp32": "", "bf16": "" } @@ -13680,8 +13680,8 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "llama-3-8b", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Accelerator Platforms", @@ -13694,8 +13694,8 @@ "fp16": "", "fp32": "", "bf16": "", - "token_int4": "", - "token_int8": "", + "token_int4": 45.55, + "token_int8": 30.8, "token_fp16": "" } ], @@ -13705,8 +13705,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 187.74, + "int4": 21.95, + "int8": 32.46, "fp16": "", "fp32": "", "bf16": "" @@ -13718,10 +13718,10 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "mistral-7b-v0.1", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "llama-3.2-3b-instruct", "featured_SKU": false, - "whats_new_model": false, + "whats_new_model": true, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { @@ -13732,9 +13732,9 @@ "fp16": "", "fp32": "", "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" + "token_int4": 69.44, + "token_int8": 57.9, + "token_fp16": 37.69 } ], "Unit": "FPS", @@ -13743,9 +13743,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", + "int4": 14.4, + "int8": 17.27, + "fp16": 26.53, "fp32": "", "bf16": "" } @@ -13756,8 +13756,8 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "mobilenet-v2", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "mask_rcnn_resnet50_atrous_coco", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Accelerator Platforms", @@ -13766,8 +13766,8 @@ "Precisions": [ { "int4": "", - "int8": 1879.14, - "fp16": 1565.12, + "int8": 33.38, + "fp16": 19.04, "fp32": "", "bf16": "", "token_int4": "", @@ -13782,7 +13782,7 @@ "Precisions": [ { "int4": "", - "int8": 0.78, + "int8": 48.67, "fp16": "", "fp32": "", "bf16": "" @@ -13794,10 +13794,10 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "phi-3-mini-4k-instruct", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "mistral-7b-v0.1", "featured_SKU": false, - "whats_new_model": true, + "whats_new_model": false, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { @@ -13808,9 +13808,9 @@ "fp16": "", "fp32": "", "bf16": "", - "token_int4": 19.33, - "token_int8": 10.23, - "token_fp16": "" + "token_int4": 45.53, + "token_int8": 32.37, + "token_fp16": 20.21 } ], "Unit": "FPS", @@ -13819,9 +13819,9 @@ "latency": { "Precisions": [ { - "int4": 51.73, - "int8": 97.67, - "fp16": "", + "int4": 21.96, + "int8": 30.89, + "fp16": 49.48, "fp32": "", "bf16": "" } @@ -13832,21 +13832,21 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "qwen2-7b", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "mobilenet-v2", "featured_SKU": false, - "whats_new_model": true, + "whats_new_model": false, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 3134.27, + "fp16": 3004.5, "fp32": "", "bf16": "", - "token_int4": 10.59, + "token_int4": "", "token_int8": "", "token_fp16": "" } @@ -13857,8 +13857,8 @@ "latency": { "Precisions": [ { - "int4": 94.38, - "int8": "", + "int4": "", + "int8": 0.57, "fp16": "", "fp32": "", "bf16": "" @@ -13870,23 +13870,23 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "resnet-50", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "phi-3-mini-4k-instruct", "featured_SKU": false, - "whats_new_model": false, + "whats_new_model": true, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": 820.74, - "fp16": 519.4, + "int8": "", + "fp16": "", "fp32": "", "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" + "token_int4": 69.93, + "token_int8": 51.51, + "token_fp16": 32.84 } ], "Unit": "FPS", @@ -13895,9 +13895,9 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 1.39, - "fp16": "", + "int4": 14.3, + "int8": 19.41, + "fp16": 30.45, "fp32": "", "bf16": "" } @@ -13908,10 +13908,10 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "ssd-resnet34-1200", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "qwen2-7b", "featured_SKU": false, - "whats_new_model": false, + "whats_new_model": true, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { @@ -13922,8 +13922,8 @@ "fp16": "", "fp32": "", "bf16": "", - "token_int4": "", - "token_int8": "", + "token_int4": 45.8, + "token_int8": 32.78, "token_fp16": "" } ], @@ -13933,8 +13933,8 @@ "latency": { "Precisions": [ { - "int4": "", - "int8": 37.02, + "int4": 21.83, + "int8": 30.5, "fp16": "", "fp32": "", "bf16": "" @@ -13946,8 +13946,8 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "ssd_mobilenet_v1_coco", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "resnet-50", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Accelerator Platforms", @@ -13956,8 +13956,8 @@ "Precisions": [ { "int4": "", - "int8": 993.56, - "fp16": 762.67, + "int8": 1921.18, + "fp16": 1329.28, "fp32": "", "bf16": "", "token_int4": "", @@ -13972,7 +13972,7 @@ "Precisions": [ { "int4": "", - "int8": 1.33, + "int8": 0.78, "fp16": "", "fp32": "", "bf16": "" @@ -13984,8 +13984,8 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 140 dGPU", - "Model": "yolo_v8n", + "Platform": "Intel® Data Center GPU Flex 170 dGPU", + "Model": "ssd-resnet34-1200", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Accelerator Platforms", @@ -13994,8 +13994,8 @@ "Precisions": [ { "int4": "", - "int8": 319.11, - "fp16": 290.1, + "int8": 133.77, + "fp16": "", "fp32": "", "bf16": "", "token_int4": "", @@ -14010,7 +14010,7 @@ "Precisions": [ { "int4": "", - "int8": 3.76, + "int8": 13.93, "fp16": "", "fp32": "", "bf16": "" @@ -14023,7 +14023,7 @@ }, { "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "bert-base-cased", + "Model": "ssd_mobilenet_v1_coco", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Accelerator Platforms", @@ -14032,8 +14032,8 @@ "Precisions": [ { "int4": "", - "int8": 385.87, - "fp16": 420.99, + "int8": 2200.83, + "fp16": 1665.15, "fp32": "", "bf16": "", "token_int4": "", @@ -14048,7 +14048,7 @@ "Precisions": [ { "int4": "", - "int8": 2.99, + "int8": 0.78, "fp16": "", "fp32": "", "bf16": "" @@ -14061,7 +14061,7 @@ }, { "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "efficientdet-d0", + "Model": "stable-diffusion-v1-5", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Accelerator Platforms", @@ -14070,8 +14070,8 @@ "Precisions": [ { "int4": "", - "int8": 426.56, - "fp16": 362.73, + "int8": "", + "fp16": "", "fp32": "", "bf16": "", "token_int4": "", @@ -14086,8 +14086,8 @@ "Precisions": [ { "int4": "", - "int8": 2.8, - "fp16": "", + "int8": 2.33, + "fp16": 2.36, "fp32": "", "bf16": "" } @@ -14099,21 +14099,21 @@ }, { "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "gemma-2-9b", + "Model": "yolo_v8n", "featured_SKU": false, - "whats_new_model": true, + "whats_new_model": false, "PlatformType": "Accelerator Platforms", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 759.93, + "fp16": 694.57, "fp32": "", "bf16": "", - "token_int4": 22.66, - "token_int8": 18.13, + "token_int4": "", + "token_int8": "", "token_fp16": "" } ], @@ -14123,8 +14123,8 @@ "latency": { "Precisions": [ { - "int4": 44.13, - "int8": 55.13, + "int4": "", + "int8": 1.96, "fp16": "", "fp32": "", "bf16": "" @@ -14136,22 +14136,22 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "glm-4-9b-chat", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "efficientdet-d0", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Accelerator Platforms", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 36.93, "fp16": "", - "fp32": "", + "fp32": 27.64, "bf16": "", - "token_int4": 40.04, - "token_int8": 26.95, + "token_int4": "", + "token_int8": "", "token_fp16": "" } ], @@ -14159,50 +14159,12 @@ "UnitDesc": "higher is better" }, "latency": { - "Precisions": [ - { - "int4": 24.97, - "int8": 37.1, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "llama-2-7b-chat", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { "Precisions": [ { "int4": "", "int8": "", "fp16": "", "fp32": "", - "bf16": "", - "token_int4": 45.22, - "token_int8": 33.88, - "token_fp16": 21.45 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 22.11, - "int8": 29.51, - "fp16": 46.62, - "fp32": "", "bf16": "" } ], @@ -14212,1121 +14174,19 @@ } }, { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "llama-3-8b", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "mobilenet-v2", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Accelerator Platforms", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "", - "token_int4": 45.55, - "token_int8": 30.8, - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 21.95, - "int8": 32.46, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "llama-3.2-3b-instruct", - "featured_SKU": false, - "whats_new_model": true, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "", - "token_int4": 69.44, - "token_int8": 57.9, - "token_fp16": 37.69 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 14.4, - "int8": 17.27, - "fp16": 26.53, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "mask_rcnn_resnet50_atrous_coco", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 33.38, - "fp16": 19.04, - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 48.67, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "mistral-7b-v0.1", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "", - "token_int4": 45.53, - "token_int8": 32.37, - "token_fp16": 20.21 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 21.96, - "int8": 30.89, - "fp16": 49.48, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "mobilenet-v2", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 3134.27, - "fp16": 3004.5, - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 0.57, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "phi-3-mini-4k-instruct", - "featured_SKU": false, - "whats_new_model": true, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "", - "token_int4": 69.93, - "token_int8": 51.51, - "token_fp16": 32.84 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 14.3, - "int8": 19.41, - "fp16": 30.45, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "qwen2-7b", - "featured_SKU": false, - "whats_new_model": true, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "", - "token_int4": 45.8, - "token_int8": 32.78, - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": 21.83, - "int8": 30.5, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "resnet-50", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 1921.18, - "fp16": 1329.28, - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 0.78, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "ssd-resnet34-1200", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 133.77, - "fp16": "", - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 13.93, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "ssd_mobilenet_v1_coco", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 2200.83, - "fp16": 1665.15, - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 0.78, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "stable-diffusion-v1-5", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 2.33, - "fp16": 2.36, - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Data Center GPU Flex 170 dGPU", - "Model": "yolo_v8n", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Accelerator Platforms", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 759.93, - "fp16": 694.57, - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 1.96, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "efficientdet-d0", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 36.93, - "fp16": "", - "fp32": 27.64, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "mobilenet-v2", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 484.32, - "fp16": "", - "fp32": 278.4, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "resnet-50", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 112.23, - "fp16": "", - "fp32": 42.14, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "ssd-resnet34-1200", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 2.04, - "fp16": "", - "fp32": 0.6, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "ssd_mobilenet_v1_coco", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 216.96, - "fp16": "", - "fp32": 94.92, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "yolo11", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 34.52, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU+iGPU", - "Model": "yolo_v8n", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 61.06, - "fp16": "", - "fp32": 28.61, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "efficientdet-d0", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 15.44, - "fp16": "", - "fp32": 12.75, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 66.23, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "mobilenet-v2", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 296.53, - "fp16": "", - "fp32": 183.3, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.8, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "resnet-50", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 48.77, - "fp16": "", - "fp32": 20.13, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 21.88, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "ssd-resnet34-1200", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 0.82, - "fp16": "", - "fp32": 0.31, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 1224.62, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "ssd_mobilenet_v1_coco", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 106.12, - "fp16": "", - "fp32": 49.52, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 9.72, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "yolo11", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": 15.36, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": "", - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 CPU-only", - "Model": "yolo_v8n", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 23.65, - "fp16": "", - "fp32": 12.86, - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 43.43, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "efficientdet-d0", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 33.69, - "fp16": 30.91, - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 38.02, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "mobilenet-v2", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 337.95, - "fp16": 267.38, - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 3.84, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "resnet-50", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 81.72, - "fp16": 49.76, - "fp32": "", - "bf16": "", - "token_int4": "", - "token_int8": "", - "token_fp16": "" - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - }, - "latency": { - "Precisions": [ - { - "int4": "", - "int8": 13.15, - "fp16": "", - "fp32": "", - "bf16": "" - } - ], - "Unit": "ms", - "UnitDesc": "lower is better" - } - } - }, - { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "ssd-resnet34-1200", - "featured_SKU": false, - "whats_new_model": false, - "PlatformType": "Mobile Platforms (Intel® Atom™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "int4": "", - "int8": 1.62, - "fp16": 1.01, - "fp32": "", + "int4": "", + "int8": 484.32, + "fp16": "", + "fp32": 278.4, "bf16": "", "token_int4": "", "token_int8": "", @@ -15340,7 +14200,7 @@ "Precisions": [ { "int4": "", - "int8": 622.97, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -15352,8 +14212,8 @@ } }, { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "ssd_mobilenet_v1_coco", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "resnet-50", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Mobile Platforms (Intel® Atom™)", @@ -15362,9 +14222,9 @@ "Precisions": [ { "int4": "", - "int8": 164.31, - "fp16": 106.85, - "fp32": "", + "int8": 112.23, + "fp16": "", + "fp32": 42.14, "bf16": "", "token_int4": "", "token_int8": "", @@ -15378,7 +14238,7 @@ "Precisions": [ { "int4": "", - "int8": 7.35, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -15390,8 +14250,8 @@ } }, { - "Platform": "Intel® Processor N100 iGPU-only", - "Model": "yolo_v8n", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "ssd-resnet34-1200", "featured_SKU": false, "whats_new_model": false, "PlatformType": "Mobile Platforms (Intel® Atom™)", @@ -15400,9 +14260,9 @@ "Precisions": [ { "int4": "", - "int8": 47.04, - "fp16": 34.97, - "fp32": "", + "int8": 2.04, + "fp16": "", + "fp32": 0.6, "bf16": "", "token_int4": "", "token_int8": "", @@ -15416,7 +14276,7 @@ "Precisions": [ { "int4": "", - "int8": 23.03, + "int8": "", "fp16": "", "fp32": "", "bf16": "" @@ -15428,20 +14288,20 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "bert-base-cased", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "ssd_mobilenet_v1_coco", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 216.96, "fp16": "", - "fp32": "", - "bf16": 2314.52, + "fp32": 94.92, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -15457,7 +14317,7 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": 6.96 + "bf16": "" } ], "Unit": "ms", @@ -15466,11 +14326,11 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "efficientdet-d0", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "yolo11", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ @@ -15478,8 +14338,8 @@ "int4": "", "int8": "", "fp16": "", - "fp32": "", - "bf16": 1153.0, + "fp32": 34.52, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -15495,7 +14355,7 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": 6.38 + "bf16": "" } ], "Unit": "ms", @@ -15504,23 +14364,23 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "gemma-2-9b", + "Platform": "Intel® Processor N100 CPU+iGPU", + "Model": "yolo_v8n", "featured_SKU": false, - "whats_new_model": true, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 61.06, "fp16": "", - "fp32": "", + "fp32": 28.61, "bf16": "", - "token_int4": 15.13, - "token_int8": 11.3, - "token_fp16": 7.91 + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15529,9 +14389,9 @@ "latency": { "Precisions": [ { - "int4": 66.06, - "int8": 88.43, - "fp16": 126.41, + "int4": "", + "int8": "", + "fp16": "", "fp32": "", "bf16": "" } @@ -15542,23 +14402,23 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "glm-4-9b-chat", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "efficientdet-d0", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 15.44, "fp16": "", - "fp32": "", + "fp32": 12.75, "bf16": "", - "token_int4": 18.48, - "token_int8": 12.91, - "token_fp16": 8.64 + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15567,9 +14427,9 @@ "latency": { "Precisions": [ { - "int4": 54.09, - "int8": 77.42, - "fp16": 115.66, + "int4": "", + "int8": 66.23, + "fp16": "", "fp32": "", "bf16": "" } @@ -15580,23 +14440,23 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "llama-2-7b-chat", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "mobilenet-v2", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 296.53, "fp16": "", - "fp32": "", + "fp32": 183.3, "bf16": "", - "token_int4": 21.97, - "token_int8": 15.54, - "token_fp16": 11.29 + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15605,9 +14465,9 @@ "latency": { "Precisions": [ { - "int4": 45.5, - "int8": 64.31, - "fp16": 88.54, + "int4": "", + "int8": 3.8, + "fp16": "", "fp32": "", "bf16": "" } @@ -15618,23 +14478,23 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "llama-3-8b", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "resnet-50", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 48.77, "fp16": "", - "fp32": "", + "fp32": 20.13, "bf16": "", - "token_int4": 20.0, - "token_int8": 14.57, - "token_fp16": 9.82 + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15643,9 +14503,9 @@ "latency": { "Precisions": [ { - "int4": 49.98, - "int8": 68.59, - "fp16": 101.73, + "int4": "", + "int8": 21.88, + "fp16": "", "fp32": "", "bf16": "" } @@ -15656,20 +14516,20 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "ssd-resnet34-1200", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 0.82, "fp16": "", - "fp32": "", - "bf16": 35.29, + "fp32": 0.31, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -15682,10 +14542,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 1224.62, "fp16": "", "fp32": "", - "bf16": 100.04 + "bf16": "" } ], "Unit": "ms", @@ -15694,23 +14554,23 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "mistral-7b-v0.1", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "ssd_mobilenet_v1_coco", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 106.12, "fp16": "", - "fp32": "", + "fp32": 49.52, "bf16": "", - "token_int4": 22.73, - "token_int8": 15.18, - "token_fp16": 10.61 + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15719,9 +14579,9 @@ "latency": { "Precisions": [ { - "int4": 43.98, - "int8": 65.87, - "fp16": 94.18, + "int4": "", + "int8": 9.72, + "fp16": "", "fp32": "", "bf16": "" } @@ -15732,11 +14592,11 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "mobilenet-v2", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "yolo11", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ @@ -15744,8 +14604,8 @@ "int4": "", "int8": "", "fp16": "", - "fp32": "", - "bf16": 13124.67, + "fp32": 15.36, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -15761,7 +14621,7 @@ "int8": "", "fp16": "", "fp32": "", - "bf16": 0.83 + "bf16": "" } ], "Unit": "ms", @@ -15770,23 +14630,23 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "phi-3-mini-4k-instruct", + "Platform": "Intel® Processor N100 CPU-only", + "Model": "yolo_v8n", "featured_SKU": false, - "whats_new_model": true, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", + "int8": 23.65, "fp16": "", - "fp32": "", + "fp32": 12.86, "bf16": "", - "token_int4": 31.5, - "token_int8": 25.32, - "token_fp16": 18.29 + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15795,9 +14655,9 @@ "latency": { "Precisions": [ { - "int4": 31.74, - "int8": 39.49, - "fp16": 54.66, + "int4": "", + "int8": 43.43, + "fp16": "", "fp32": "", "bf16": "" } @@ -15808,23 +14668,23 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "qwen2-7b", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "efficientdet-d0", "featured_SKU": false, - "whats_new_model": true, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "whats_new_model": false, + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 33.69, + "fp16": 30.91, "fp32": "", "bf16": "", - "token_int4": 21.22, - "token_int8": 15.5, - "token_fp16": 10.78 + "token_int4": "", + "token_int8": "", + "token_fp16": "" } ], "Unit": "FPS", @@ -15833,9 +14693,9 @@ "latency": { "Precisions": [ { - "int4": 47.12, - "int8": 64.49, - "fp16": 92.7, + "int4": "", + "int8": 38.02, + "fp16": "", "fp32": "", "bf16": "" } @@ -15846,20 +14706,20 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "resnet-50", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "mobilenet-v2", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 337.95, + "fp16": 267.38, "fp32": "", - "bf16": 10591.04, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -15872,10 +14732,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 3.84, "fp16": "", "fp32": "", - "bf16": 1.8 + "bf16": "" } ], "Unit": "ms", @@ -15884,20 +14744,20 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "ssd-resnet34-1200", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "resnet-50", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 81.72, + "fp16": 49.76, "fp32": "", - "bf16": 204.32, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -15910,10 +14770,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 13.15, "fp16": "", "fp32": "", - "bf16": 17.56 + "bf16": "" } ], "Unit": "ms", @@ -15922,20 +14782,20 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "ssd_mobilenet_v1_coco", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "ssd-resnet34-1200", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 1.62, + "fp16": 1.01, "fp32": "", - "bf16": 7286.92, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -15948,10 +14808,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 622.97, "fp16": "", "fp32": "", - "bf16": 1.08 + "bf16": "" } ], "Unit": "ms", @@ -15960,20 +14820,20 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", - "Model": "yolo11", + "Platform": "Intel® Processor N100 iGPU-only", + "Model": "ssd_mobilenet_v1_coco", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 164.31, + "fp16": 106.85, "fp32": "", - "bf16": 2130.01, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -15986,7 +14846,7 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 7.35, "fp16": "", "fp32": "", "bf16": "" @@ -15998,20 +14858,20 @@ } }, { - "Platform": "Intel® Xeon® GNR29 CPU-only", + "Platform": "Intel® Processor N100 iGPU-only", "Model": "yolo_v8n", "featured_SKU": false, "whats_new_model": false, - "PlatformType": "Server Platforms (Intel® Xeon®)", + "PlatformType": "Mobile Platforms (Intel® Atom™)", "Parameters": { "throughput": { "Precisions": [ { "int4": "", - "int8": "", - "fp16": "", + "int8": 47.04, + "fp16": 34.97, "fp32": "", - "bf16": 2528.52, + "bf16": "", "token_int4": "", "token_int8": "", "token_fp16": "" @@ -16024,10 +14884,10 @@ "Precisions": [ { "int4": "", - "int8": "", + "int8": 23.03, "fp16": "", "fp32": "", - "bf16": 4.55 + "bf16": "" } ], "Unit": "ms", @@ -19457,7 +18317,7 @@ }, { "Platform": "Intel® Xeon® 6979P CPU-only", - "Model": "bert-base-cased ", + "Model": "bert-base-cased", "featured_SKU": true, "whats_new_model": false, "PlatformType": "Server Platforms (Intel® Xeon®)", @@ -19495,7 +18355,7 @@ }, { "Platform": "Intel® Xeon® 6979P CPU-only", - "Model": "efficientdet-d0 ", + "Model": "efficientdet-d0", "featured_SKU": true, "whats_new_model": false, "PlatformType": "Server Platforms (Intel® Xeon®)", @@ -19533,7 +18393,7 @@ }, { "Platform": "Intel® Xeon® 6979P CPU-only", - "Model": "mask_rcnn_resnet50_atrous_coco ", + "Model": "mask_rcnn_resnet50_atrous_coco", "featured_SKU": true, "whats_new_model": false, "PlatformType": "Server Platforms (Intel® Xeon®)", @@ -19571,7 +18431,7 @@ }, { "Platform": "Intel® Xeon® 6979P CPU-only", - "Model": "mobilenet-v2 ", + "Model": "mobilenet-v2", "featured_SKU": true, "whats_new_model": false, "PlatformType": "Server Platforms (Intel® Xeon®)", @@ -19609,7 +18469,7 @@ }, { "Platform": "Intel® Xeon® 6979P CPU-only", - "Model": "resnet-50 ", + "Model": "resnet-50", "featured_SKU": true, "whats_new_model": false, "PlatformType": "Server Platforms (Intel® Xeon®)", @@ -19647,7 +18507,7 @@ }, { "Platform": "Intel® Xeon® 6979P CPU-only", - "Model": "ssd-resnet34-1200 ", + "Model": "ssd-resnet34-1200", "featured_SKU": true, "whats_new_model": false, "PlatformType": "Server Platforms (Intel® Xeon®)", @@ -19685,7 +18545,7 @@ }, { "Platform": "Intel® Xeon® 6979P CPU-only", - "Model": "ssd_mobilenet_v1_coco ", + "Model": "ssd_mobilenet_v1_coco", "featured_SKU": true, "whats_new_model": false, "PlatformType": "Server Platforms (Intel® Xeon®)", @@ -19723,7 +18583,7 @@ }, { "Platform": "Intel® Xeon® 6979P CPU-only", - "Model": "yolo_v8n ", + "Model": "yolo_v8n", "featured_SKU": true, "whats_new_model": false, "PlatformType": "Server Platforms (Intel® Xeon®)", diff --git a/docs/sphinx_setup/_static/benchmarks_files/graph-config.json b/docs/sphinx_setup/_static/benchmarks_files/graph-config.json index 29fbe714094b74..e5fe953b72bca1 100644 --- a/docs/sphinx_setup/_static/benchmarks_files/graph-config.json +++ b/docs/sphinx_setup/_static/benchmarks_files/graph-config.json @@ -7,6 +7,9 @@ "BF16": "bf16", "FP32_OV": "fp32_ov", "FP32_OVMS": "fp32_ovms", + "TOKEN_INT4": "token_int4", + "TOKEN_INT8": "token_int8", + "TOKEN_FP16": "token_fp16", "INT8_OV": "int8_ov", "INT8_OVMS": "int8_ovms", "THROUGHPUT": "Throughput", @@ -73,6 +76,21 @@ "color": "#00536a", "label": "INT8 OVMS" }, + "token_int4": { + "data": null, + "color": "#00536a", + "label": "T INT4" + }, + "token_int8": { + "data": null, + "color": "#00636a", + "label": "T INT8" + }, + "token_fp16": { + "data": null, + "color": "#00736a", + "label": "T INT16" + }, "Vllm": { "Latency": { "data": null, From c6d8e8228dc105391f1d496d4b03bb16698e8c6b Mon Sep 17 00:00:00 2001 From: Karol Blaszczak Date: Tue, 19 Nov 2024 23:25:04 +0100 Subject: [PATCH 12/62] Add 2024.5 Selector Tool (#27619) (#27623) port: https://github.com/openvinotoolkit/openvino/pull/27619 Co-authored-by: Alexander Suvorov --- .../get-started/install-openvino.rst | 4 +- .../{index-f34d1fad.js => index-Codcw3jz.js} | 0 .../selector-tool/assets/selector-4f9c3c09.js | 61 ------------------- ...tor-b0e2854e.css => selector-BC2lpCQ9.css} | 0 .../selector-tool/assets/selector-Bu10eOtw.js | 61 +++++++++++++++++++ ...tor-8d4cf1d.html => selector-451bede.html} | 7 +-- 6 files changed, 66 insertions(+), 67 deletions(-) rename docs/sphinx_setup/_static/selector-tool/assets/{index-f34d1fad.js => index-Codcw3jz.js} (100%) delete mode 100644 docs/sphinx_setup/_static/selector-tool/assets/selector-4f9c3c09.js rename docs/sphinx_setup/_static/selector-tool/assets/{selector-b0e2854e.css => selector-BC2lpCQ9.css} (100%) create mode 100644 docs/sphinx_setup/_static/selector-tool/assets/selector-Bu10eOtw.js rename docs/sphinx_setup/_static/selector-tool/{selector-8d4cf1d.html => selector-451bede.html} (66%) diff --git a/docs/articles_en/get-started/install-openvino.rst b/docs/articles_en/get-started/install-openvino.rst index 22d889c18f71cd..71afff035b2c81 100644 --- a/docs/articles_en/get-started/install-openvino.rst +++ b/docs/articles_en/get-started/install-openvino.rst @@ -19,9 +19,9 @@ Install OpenVINO™ 2024.4 .. raw:: html - + - + OpenVINO 2024.5, described here, is not a Long-Term-Support version! All currently supported versions are: diff --git a/docs/sphinx_setup/_static/selector-tool/assets/index-f34d1fad.js b/docs/sphinx_setup/_static/selector-tool/assets/index-Codcw3jz.js similarity index 100% rename from docs/sphinx_setup/_static/selector-tool/assets/index-f34d1fad.js rename to docs/sphinx_setup/_static/selector-tool/assets/index-Codcw3jz.js diff --git a/docs/sphinx_setup/_static/selector-tool/assets/selector-4f9c3c09.js b/docs/sphinx_setup/_static/selector-tool/assets/selector-4f9c3c09.js deleted file mode 100644 index 0836ad1aed41f3..00000000000000 --- a/docs/sphinx_setup/_static/selector-tool/assets/selector-4f9c3c09.js +++ /dev/null @@ -1,61 +0,0 @@ -var Xf=Object.defineProperty;var Jf=(e,t,n)=>t in e?Xf(e,t,{enumerable:!0,configurable:!0,writable:!0,value:n}):e[t]=n;var Ve=(e,t,n)=>(Jf(e,typeof t!="symbol"?t+"":t,n),n);function uc(e){return e&&e.__esModule&&Object.prototype.hasOwnProperty.call(e,"default")?e.default:e}var cc={exports:{}},po={},dc={exports:{}},D={};/** - * @license React - * react.production.min.js - * - * Copyright (c) Facebook, Inc. and its affiliates. - * - * This source code is licensed under the MIT license found in the - * LICENSE file in the root directory of this source tree. - */var qr=Symbol.for("react.element"),Zf=Symbol.for("react.portal"),qf=Symbol.for("react.fragment"),ep=Symbol.for("react.strict_mode"),tp=Symbol.for("react.profiler"),np=Symbol.for("react.provider"),rp=Symbol.for("react.context"),ip=Symbol.for("react.forward_ref"),op=Symbol.for("react.suspense"),lp=Symbol.for("react.memo"),sp=Symbol.for("react.lazy"),Oa=Symbol.iterator;function ap(e){return e===null||typeof e!="object"?null:(e=Oa&&e[Oa]||e["@@iterator"],typeof e=="function"?e:null)}var fc={isMounted:function(){return!1},enqueueForceUpdate:function(){},enqueueReplaceState:function(){},enqueueSetState:function(){}},pc=Object.assign,hc={};function sr(e,t,n){this.props=e,this.context=t,this.refs=hc,this.updater=n||fc}sr.prototype.isReactComponent={};sr.prototype.setState=function(e,t){if(typeof e!="object"&&typeof e!="function"&&e!=null)throw Error("setState(...): takes an object of state variables to update or a function which returns an object of state variables.");this.updater.enqueueSetState(this,e,t,"setState")};sr.prototype.forceUpdate=function(e){this.updater.enqueueForceUpdate(this,e,"forceUpdate")};function mc(){}mc.prototype=sr.prototype;function hs(e,t,n){this.props=e,this.context=t,this.refs=hc,this.updater=n||fc}var ms=hs.prototype=new mc;ms.constructor=hs;pc(ms,sr.prototype);ms.isPureReactComponent=!0;var _a=Array.isArray,gc=Object.prototype.hasOwnProperty,gs={current:null},vc={key:!0,ref:!0,__self:!0,__source:!0};function yc(e,t,n){var r,i={},o=null,l=null;if(t!=null)for(r in t.ref!==void 0&&(l=t.ref),t.key!==void 0&&(o=""+t.key),t)gc.call(t,r)&&!vc.hasOwnProperty(r)&&(i[r]=t[r]);var s=arguments.length-2;if(s===1)i.children=n;else if(1{const e={type:"size",height:document.body.offsetHeight};window.parent.postMessage(e)};new ResizeObserver(kp).observe(document.body);function me(e){"@babel/helpers - typeof";return me=typeof Symbol=="function"&&typeof Symbol.iterator=="symbol"?function(t){return typeof t}:function(t){return t&&typeof Symbol=="function"&&t.constructor===Symbol&&t!==Symbol.prototype?"symbol":typeof t},me(e)}function pt(e,t){if(!(e instanceof t))throw new TypeError("Cannot call a class as a function")}function Sp(e,t){if(me(e)!=="object"||e===null)return e;var n=e[Symbol.toPrimitive];if(n!==void 0){var r=n.call(e,t||"default");if(me(r)!=="object")return r;throw new TypeError("@@toPrimitive must return a primitive value.")}return(t==="string"?String:Number)(e)}function kc(e){var t=Sp(e,"string");return me(t)==="symbol"?t:String(t)}function Pa(e,t){for(var n=0;ne.length)&&(t=e.length);for(var n=0,r=new Array(t);n1&&arguments[1]!==void 0?arguments[1]:{};pt(this,e),this.init(t,n)}return ht(e,[{key:"init",value:function(n){var r=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};this.prefix=r.prefix||"i18next:",this.logger=n||Np,this.options=r,this.debug=r.debug}},{key:"setDebug",value:function(n){this.debug=n}},{key:"log",value:function(){for(var n=arguments.length,r=new Array(n),i=0;i1?r-1:0),o=1;o-1?s.replace(/###/g,"."):s}function i(){return!e||typeof e=="string"}for(var o=typeof t!="string"?[].concat(t):t.split(".");o.length>1;){if(i())return{};var l=r(o.shift());!e[l]&&n&&(e[l]=new n),Object.prototype.hasOwnProperty.call(e,l)?e=e[l]:e={}}return i()?{}:{obj:e,k:r(o.shift())}}function Ta(e,t,n){var r=ws(e,t,Object),i=r.obj,o=r.k;i[o]=n}function xp(e,t,n,r){var i=ws(e,t,Object),o=i.obj,l=i.k;o[l]=o[l]||[],r&&(o[l]=o[l].concat(n)),r||o[l].push(n)}function bi(e,t){var n=ws(e,t),r=n.obj,i=n.k;if(r)return r[i]}function Ra(e,t,n){var r=bi(e,n);return r!==void 0?r:bi(t,n)}function Nc(e,t,n){for(var r in t)r!=="__proto__"&&r!=="constructor"&&(r in e?typeof e[r]=="string"||e[r]instanceof String||typeof t[r]=="string"||t[r]instanceof String?n&&(e[r]=t[r]):Nc(e[r],t[r],n):e[r]=t[r]);return e}function Tn(e){return e.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g,"\\$&")}var Cp={"&":"&","<":"<",">":">",'"':""","'":"'","/":"/"};function Ip(e){return typeof e=="string"?e.replace(/[&<>"'\/]/g,function(t){return Cp[t]}):e}var mo=typeof window<"u"&&window.navigator&&typeof window.navigator.userAgentData>"u"&&window.navigator.userAgent&&window.navigator.userAgent.indexOf("MSIE")>-1,Tp=[" ",",","?","!",";"];function Rp(e,t,n){t=t||"",n=n||"";var r=Tp.filter(function(s){return t.indexOf(s)<0&&n.indexOf(s)<0});if(r.length===0)return!0;var i=new RegExp("(".concat(r.map(function(s){return s==="?"?"\\?":s}).join("|"),")")),o=!i.test(e);if(!o){var l=e.indexOf(n);l>0&&!i.test(e.substring(0,l))&&(o=!0)}return o}function La(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable})),n.push.apply(n,r)}return n}function si(e){for(var t=1;t"u"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch{return!1}}function Pc(e,t){var n=arguments.length>2&&arguments[2]!==void 0?arguments[2]:".";if(e){if(e[t])return e[t];for(var r=t.split(n),i=e,o=0;oo+l;)l++,s=r.slice(o,o+l).join(n),a=i[s];if(a===void 0)return;if(a===null)return null;if(t.endsWith(s)){if(typeof a=="string")return a;if(s&&typeof a[s]=="string")return a[s]}var u=r.slice(o+l).join(n);return u?Pc(a,u,n):void 0}i=i[r[o]]}return i}}var jp=function(e){ho(n,e);var t=Lp(n);function n(r){var i,o=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{ns:["translation"],defaultNS:"translation"};return pt(this,n),i=t.call(this),mo&&nn.call(Gt(i)),i.data=r||{},i.options=o,i.options.keySeparator===void 0&&(i.options.keySeparator="."),i.options.ignoreJSONStructure===void 0&&(i.options.ignoreJSONStructure=!0),i}return ht(n,[{key:"addNamespaces",value:function(i){this.options.ns.indexOf(i)<0&&this.options.ns.push(i)}},{key:"removeNamespaces",value:function(i){var o=this.options.ns.indexOf(i);o>-1&&this.options.ns.splice(o,1)}},{key:"getResource",value:function(i,o,l){var s=arguments.length>3&&arguments[3]!==void 0?arguments[3]:{},a=s.keySeparator!==void 0?s.keySeparator:this.options.keySeparator,u=s.ignoreJSONStructure!==void 0?s.ignoreJSONStructure:this.options.ignoreJSONStructure,p=[i,o];l&&typeof l!="string"&&(p=p.concat(l)),l&&typeof l=="string"&&(p=p.concat(a?l.split(a):l)),i.indexOf(".")>-1&&(p=i.split("."));var d=bi(this.data,p);return d||!u||typeof l!="string"?d:Pc(this.data&&this.data[i]&&this.data[i][o],l,a)}},{key:"addResource",value:function(i,o,l,s){var a=arguments.length>4&&arguments[4]!==void 0?arguments[4]:{silent:!1},u=this.options.keySeparator;u===void 0&&(u=".");var p=[i,o];l&&(p=p.concat(u?l.split(u):l)),i.indexOf(".")>-1&&(p=i.split("."),s=o,o=p[1]),this.addNamespaces(o),Ta(this.data,p,s),a.silent||this.emit("added",i,o,l,s)}},{key:"addResources",value:function(i,o,l){var s=arguments.length>3&&arguments[3]!==void 0?arguments[3]:{silent:!1};for(var a in l)(typeof l[a]=="string"||Object.prototype.toString.apply(l[a])==="[object Array]")&&this.addResource(i,o,a,l[a],{silent:!0});s.silent||this.emit("added",i,o,l)}},{key:"addResourceBundle",value:function(i,o,l,s,a){var u=arguments.length>5&&arguments[5]!==void 0?arguments[5]:{silent:!1},p=[i,o];i.indexOf(".")>-1&&(p=i.split("."),s=l,l=o,o=p[1]),this.addNamespaces(o);var d=bi(this.data,p)||{};s?Nc(d,l,a):d=si(si({},d),l),Ta(this.data,p,d),u.silent||this.emit("added",i,o,l)}},{key:"removeResourceBundle",value:function(i,o){this.hasResourceBundle(i,o)&&delete this.data[i][o],this.removeNamespaces(o),this.emit("removed",i,o)}},{key:"hasResourceBundle",value:function(i,o){return this.getResource(i,o)!==void 0}},{key:"getResourceBundle",value:function(i,o){return o||(o=this.options.defaultNS),this.options.compatibilityAPI==="v1"?si(si({},{}),this.getResource(i,o)):this.getResource(i,o)}},{key:"getDataByLanguage",value:function(i){return this.data[i]}},{key:"hasLanguageSomeTranslations",value:function(i){var o=this.getDataByLanguage(i),l=o&&Object.keys(o)||[];return!!l.find(function(s){return o[s]&&Object.keys(o[s]).length>0})}},{key:"toJSON",value:function(){return this.data}}]),n}(nn),Ec={processors:{},addPostProcessor:function(t){this.processors[t.name]=t},handle:function(t,n,r,i,o){var l=this;return t.forEach(function(s){l.processors[s]&&(n=l.processors[s].process(n,r,i,o))}),n}};function Aa(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable})),n.push.apply(n,r)}return n}function Oe(e){for(var t=1;t"u"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch{return!1}}var ja={},Fa=function(e){ho(n,e);var t=Fp(n);function n(r){var i,o=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};return pt(this,n),i=t.call(this),mo&&nn.call(Gt(i)),Ep(["resourceStore","languageUtils","pluralResolver","interpolator","backendConnector","i18nFormat","utils"],r,Gt(i)),i.options=o,i.options.keySeparator===void 0&&(i.options.keySeparator="."),i.logger=kt.create("translator"),i}return ht(n,[{key:"changeLanguage",value:function(i){i&&(this.language=i)}},{key:"exists",value:function(i){var o=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{interpolation:{}};if(i==null)return!1;var l=this.resolve(i,o);return l&&l.res!==void 0}},{key:"extractFromKey",value:function(i,o){var l=o.nsSeparator!==void 0?o.nsSeparator:this.options.nsSeparator;l===void 0&&(l=":");var s=o.keySeparator!==void 0?o.keySeparator:this.options.keySeparator,a=o.ns||this.options.defaultNS||[],u=l&&i.indexOf(l)>-1,p=!this.options.userDefinedKeySeparator&&!o.keySeparator&&!this.options.userDefinedNsSeparator&&!o.nsSeparator&&!Rp(i,l,s);if(u&&!p){var d=i.match(this.interpolator.nestingRegexp);if(d&&d.length>0)return{key:i,namespaces:a};var h=i.split(l);(l!==s||l===s&&this.options.ns.indexOf(h[0])>-1)&&(a=h.shift()),i=h.join(s)}return typeof a=="string"&&(a=[a]),{key:i,namespaces:a}}},{key:"translate",value:function(i,o,l){var s=this;if(me(o)!=="object"&&this.options.overloadTranslationOptionHandler&&(o=this.options.overloadTranslationOptionHandler(arguments)),o||(o={}),i==null)return"";Array.isArray(i)||(i=[String(i)]);var a=o.returnDetails!==void 0?o.returnDetails:this.options.returnDetails,u=o.keySeparator!==void 0?o.keySeparator:this.options.keySeparator,p=this.extractFromKey(i[i.length-1],o),d=p.key,h=p.namespaces,g=h[h.length-1],y=o.lng||this.language,k=o.appendNamespaceToCIMode||this.options.appendNamespaceToCIMode;if(y&&y.toLowerCase()==="cimode"){if(k){var _=o.nsSeparator||this.options.nsSeparator;return a?{res:"".concat(g).concat(_).concat(d),usedKey:d,exactUsedKey:d,usedLng:y,usedNS:g}:"".concat(g).concat(_).concat(d)}return a?{res:d,usedKey:d,exactUsedKey:d,usedLng:y,usedNS:g}:d}var f=this.resolve(i,o),c=f&&f.res,m=f&&f.usedKey||d,w=f&&f.exactUsedKey||d,O=Object.prototype.toString.apply(c),S=["[object Number]","[object Function]","[object RegExp]"],P=o.joinArrays!==void 0?o.joinArrays:this.options.joinArrays,E=!this.i18nFormat||this.i18nFormat.handleAsObject,A=typeof c!="string"&&typeof c!="boolean"&&typeof c!="number";if(E&&c&&A&&S.indexOf(O)<0&&!(typeof P=="string"&&O==="[object Array]")){if(!o.returnObjects&&!this.options.returnObjects){this.options.returnedObjectHandler||this.logger.warn("accessing an object - but returnObjects options is not enabled!");var x=this.options.returnedObjectHandler?this.options.returnedObjectHandler(m,c,Oe(Oe({},o),{},{ns:h})):"key '".concat(d," (").concat(this.language,")' returned an object instead of string.");return a?(f.res=x,f):x}if(u){var H=O==="[object Array]",Te=H?[]:{},Nt=H?w:m;for(var rt in c)if(Object.prototype.hasOwnProperty.call(c,rt)){var xn="".concat(Nt).concat(u).concat(rt);Te[rt]=this.translate(xn,Oe(Oe({},o),{joinArrays:!1,ns:h})),Te[rt]===xn&&(Te[rt]=c[rt])}c=Te}}else if(E&&typeof P=="string"&&O==="[object Array]")c=c.join(P),c&&(c=this.extendTranslation(c,i,o,l));else{var mt=!1,it=!1,C=o.count!==void 0&&typeof o.count!="string",L=n.hasDefaultValue(o),j=C?this.pluralResolver.getSuffix(y,o.count,o):"",V=o["defaultValue".concat(j)]||o.defaultValue;!this.isValidLookup(c)&&L&&(mt=!0,c=V),this.isValidLookup(c)||(it=!0,c=d);var Q=o.missingKeyNoValueFallbackToKey||this.options.missingKeyNoValueFallbackToKey,Pt=Q&&it?void 0:c,ze=L&&V!==c&&this.options.updateMissing;if(it||mt||ze){if(this.logger.log(ze?"updateKey":"missingKey",y,g,d,ze?V:c),u){var Cn=this.resolve(d,Oe(Oe({},o),{},{keySeparator:!1}));Cn&&Cn.res&&this.logger.warn("Seems the loaded translations were in flat JSON format instead of nested. Either set keySeparator: false on init or make sure your translations are published in nested format.")}var be=[],Et=this.languageUtils.getFallbackCodes(this.options.fallbackLng,o.lng||this.language);if(this.options.saveMissingTo==="fallback"&&Et&&Et[0])for(var Lo=0;Lo1&&arguments[1]!==void 0?arguments[1]:{},s,a,u,p,d;return typeof i=="string"&&(i=[i]),i.forEach(function(h){if(!o.isValidLookup(s)){var g=o.extractFromKey(h,l),y=g.key;a=y;var k=g.namespaces;o.options.fallbackNS&&(k=k.concat(o.options.fallbackNS));var _=l.count!==void 0&&typeof l.count!="string",f=_&&!l.ordinal&&l.count===0&&o.pluralResolver.shouldUseIntlApi(),c=l.context!==void 0&&(typeof l.context=="string"||typeof l.context=="number")&&l.context!=="",m=l.lngs?l.lngs:o.languageUtils.toResolveHierarchy(l.lng||o.language,l.fallbackLng);k.forEach(function(w){o.isValidLookup(s)||(d=w,!ja["".concat(m[0],"-").concat(w)]&&o.utils&&o.utils.hasLoadedNamespace&&!o.utils.hasLoadedNamespace(d)&&(ja["".concat(m[0],"-").concat(w)]=!0,o.logger.warn('key "'.concat(a,'" for languages "').concat(m.join(", "),`" won't get resolved as namespace "`).concat(d,'" was not yet loaded'),"This means something IS WRONG in your setup. You access the t function before i18next.init / i18next.loadNamespace / i18next.changeLanguage was done. Wait for the callback or Promise to resolve before accessing it!!!")),m.forEach(function(O){if(!o.isValidLookup(s)){p=O;var S=[y];if(o.i18nFormat&&o.i18nFormat.addLookupKeys)o.i18nFormat.addLookupKeys(S,y,O,w,l);else{var P;_&&(P=o.pluralResolver.getSuffix(O,l.count,l));var E="".concat(o.options.pluralSeparator,"zero");if(_&&(S.push(y+P),f&&S.push(y+E)),c){var A="".concat(y).concat(o.options.contextSeparator).concat(l.context);S.push(A),_&&(S.push(A+P),f&&S.push(A+E))}}for(var x;x=S.pop();)o.isValidLookup(s)||(u=x,s=o.getResource(O,w,x,l))}}))})}}),{res:s,usedKey:a,exactUsedKey:u,usedLng:p,usedNS:d}}},{key:"isValidLookup",value:function(i){return i!==void 0&&!(!this.options.returnNull&&i===null)&&!(!this.options.returnEmptyString&&i==="")}},{key:"getResource",value:function(i,o,l){var s=arguments.length>3&&arguments[3]!==void 0?arguments[3]:{};return this.i18nFormat&&this.i18nFormat.getResource?this.i18nFormat.getResource(i,o,l,s):this.resourceStore.getResource(i,o,l,s)}}],[{key:"hasDefaultValue",value:function(i){var o="defaultValue";for(var l in i)if(Object.prototype.hasOwnProperty.call(i,l)&&o===l.substring(0,o.length)&&i[l]!==void 0)return!0;return!1}}]),n}(nn);function Do(e){return e.charAt(0).toUpperCase()+e.slice(1)}var Da=function(){function e(t){pt(this,e),this.options=t,this.supportedLngs=this.options.supportedLngs||!1,this.logger=kt.create("languageUtils")}return ht(e,[{key:"getScriptPartFromCode",value:function(n){if(!n||n.indexOf("-")<0)return null;var r=n.split("-");return r.length===2||(r.pop(),r[r.length-1].toLowerCase()==="x")?null:this.formatLanguageCode(r.join("-"))}},{key:"getLanguagePartFromCode",value:function(n){if(!n||n.indexOf("-")<0)return n;var r=n.split("-");return this.formatLanguageCode(r[0])}},{key:"formatLanguageCode",value:function(n){if(typeof n=="string"&&n.indexOf("-")>-1){var r=["hans","hant","latn","cyrl","cans","mong","arab"],i=n.split("-");return this.options.lowerCaseLng?i=i.map(function(o){return o.toLowerCase()}):i.length===2?(i[0]=i[0].toLowerCase(),i[1]=i[1].toUpperCase(),r.indexOf(i[1].toLowerCase())>-1&&(i[1]=Do(i[1].toLowerCase()))):i.length===3&&(i[0]=i[0].toLowerCase(),i[1].length===2&&(i[1]=i[1].toUpperCase()),i[0]!=="sgn"&&i[2].length===2&&(i[2]=i[2].toUpperCase()),r.indexOf(i[1].toLowerCase())>-1&&(i[1]=Do(i[1].toLowerCase())),r.indexOf(i[2].toLowerCase())>-1&&(i[2]=Do(i[2].toLowerCase()))),i.join("-")}return this.options.cleanCode||this.options.lowerCaseLng?n.toLowerCase():n}},{key:"isSupportedCode",value:function(n){return(this.options.load==="languageOnly"||this.options.nonExplicitSupportedLngs)&&(n=this.getLanguagePartFromCode(n)),!this.supportedLngs||!this.supportedLngs.length||this.supportedLngs.indexOf(n)>-1}},{key:"getBestMatchFromCodes",value:function(n){var r=this;if(!n)return null;var i;return n.forEach(function(o){if(!i){var l=r.formatLanguageCode(o);(!r.options.supportedLngs||r.isSupportedCode(l))&&(i=l)}}),!i&&this.options.supportedLngs&&n.forEach(function(o){if(!i){var l=r.getLanguagePartFromCode(o);if(r.isSupportedCode(l))return i=l;i=r.options.supportedLngs.find(function(s){if(s.indexOf(l)===0)return s})}}),i||(i=this.getFallbackCodes(this.options.fallbackLng)[0]),i}},{key:"getFallbackCodes",value:function(n,r){if(!n)return[];if(typeof n=="function"&&(n=n(r)),typeof n=="string"&&(n=[n]),Object.prototype.toString.apply(n)==="[object Array]")return n;if(!r)return n.default||[];var i=n[r];return i||(i=n[this.getScriptPartFromCode(r)]),i||(i=n[this.formatLanguageCode(r)]),i||(i=n[this.getLanguagePartFromCode(r)]),i||(i=n.default),i||[]}},{key:"toResolveHierarchy",value:function(n,r){var i=this,o=this.getFallbackCodes(r||this.options.fallbackLng||[],n),l=[],s=function(u){u&&(i.isSupportedCode(u)?l.push(u):i.logger.warn("rejecting language code not found in supportedLngs: ".concat(u)))};return typeof n=="string"&&n.indexOf("-")>-1?(this.options.load!=="languageOnly"&&s(this.formatLanguageCode(n)),this.options.load!=="languageOnly"&&this.options.load!=="currentOnly"&&s(this.getScriptPartFromCode(n)),this.options.load!=="currentOnly"&&s(this.getLanguagePartFromCode(n))):typeof n=="string"&&s(this.formatLanguageCode(n)),o.forEach(function(a){l.indexOf(a)<0&&s(i.formatLanguageCode(a))}),l}}]),e}(),Up=[{lngs:["ach","ak","am","arn","br","fil","gun","ln","mfe","mg","mi","oc","pt","pt-BR","tg","tl","ti","tr","uz","wa"],nr:[1,2],fc:1},{lngs:["af","an","ast","az","bg","bn","ca","da","de","dev","el","en","eo","es","et","eu","fi","fo","fur","fy","gl","gu","ha","hi","hu","hy","ia","it","kk","kn","ku","lb","mai","ml","mn","mr","nah","nap","nb","ne","nl","nn","no","nso","pa","pap","pms","ps","pt-PT","rm","sco","se","si","so","son","sq","sv","sw","ta","te","tk","ur","yo"],nr:[1,2],fc:2},{lngs:["ay","bo","cgg","fa","ht","id","ja","jbo","ka","km","ko","ky","lo","ms","sah","su","th","tt","ug","vi","wo","zh"],nr:[1],fc:3},{lngs:["be","bs","cnr","dz","hr","ru","sr","uk"],nr:[1,2,5],fc:4},{lngs:["ar"],nr:[0,1,2,3,11,100],fc:5},{lngs:["cs","sk"],nr:[1,2,5],fc:6},{lngs:["csb","pl"],nr:[1,2,5],fc:7},{lngs:["cy"],nr:[1,2,3,8],fc:8},{lngs:["fr"],nr:[1,2],fc:9},{lngs:["ga"],nr:[1,2,3,7,11],fc:10},{lngs:["gd"],nr:[1,2,3,20],fc:11},{lngs:["is"],nr:[1,2],fc:12},{lngs:["jv"],nr:[0,1],fc:13},{lngs:["kw"],nr:[1,2,3,4],fc:14},{lngs:["lt"],nr:[1,2,10],fc:15},{lngs:["lv"],nr:[1,2,0],fc:16},{lngs:["mk"],nr:[1,2],fc:17},{lngs:["mnk"],nr:[0,1,2],fc:18},{lngs:["mt"],nr:[1,2,11,20],fc:19},{lngs:["or"],nr:[2,1],fc:2},{lngs:["ro"],nr:[1,2,20],fc:20},{lngs:["sl"],nr:[5,1,2,3],fc:21},{lngs:["he","iw"],nr:[1,2,20,21],fc:22}],zp={1:function(t){return+(t>1)},2:function(t){return+(t!=1)},3:function(t){return 0},4:function(t){return t%10==1&&t%100!=11?0:t%10>=2&&t%10<=4&&(t%100<10||t%100>=20)?1:2},5:function(t){return t==0?0:t==1?1:t==2?2:t%100>=3&&t%100<=10?3:t%100>=11?4:5},6:function(t){return t==1?0:t>=2&&t<=4?1:2},7:function(t){return t==1?0:t%10>=2&&t%10<=4&&(t%100<10||t%100>=20)?1:2},8:function(t){return t==1?0:t==2?1:t!=8&&t!=11?2:3},9:function(t){return+(t>=2)},10:function(t){return t==1?0:t==2?1:t<7?2:t<11?3:4},11:function(t){return t==1||t==11?0:t==2||t==12?1:t>2&&t<20?2:3},12:function(t){return+(t%10!=1||t%100==11)},13:function(t){return+(t!==0)},14:function(t){return t==1?0:t==2?1:t==3?2:3},15:function(t){return t%10==1&&t%100!=11?0:t%10>=2&&(t%100<10||t%100>=20)?1:2},16:function(t){return t%10==1&&t%100!=11?0:t!==0?1:2},17:function(t){return t==1||t%10==1&&t%100!=11?0:1},18:function(t){return t==0?0:t==1?1:2},19:function(t){return t==1?0:t==0||t%100>1&&t%100<11?1:t%100>10&&t%100<20?2:3},20:function(t){return t==1?0:t==0||t%100>0&&t%100<20?1:2},21:function(t){return t%100==1?1:t%100==2?2:t%100==3||t%100==4?3:0},22:function(t){return t==1?0:t==2?1:(t<0||t>10)&&t%10==0?2:3}},bp=["v1","v2","v3"],Ua={zero:0,one:1,two:2,few:3,many:4,other:5};function Vp(){var e={};return Up.forEach(function(t){t.lngs.forEach(function(n){e[n]={numbers:t.nr,plurals:zp[t.fc]}})}),e}var $p=function(){function e(t){var n=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};pt(this,e),this.languageUtils=t,this.options=n,this.logger=kt.create("pluralResolver"),(!this.options.compatibilityJSON||this.options.compatibilityJSON==="v4")&&(typeof Intl>"u"||!Intl.PluralRules)&&(this.options.compatibilityJSON="v3",this.logger.error("Your environment seems not to be Intl API compatible, use an Intl.PluralRules polyfill. Will fallback to the compatibilityJSON v3 format handling.")),this.rules=Vp()}return ht(e,[{key:"addRule",value:function(n,r){this.rules[n]=r}},{key:"getRule",value:function(n){var r=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{};if(this.shouldUseIntlApi())try{return new Intl.PluralRules(n,{type:r.ordinal?"ordinal":"cardinal"})}catch{return}return this.rules[n]||this.rules[this.languageUtils.getLanguagePartFromCode(n)]}},{key:"needsPlural",value:function(n){var r=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{},i=this.getRule(n,r);return this.shouldUseIntlApi()?i&&i.resolvedOptions().pluralCategories.length>1:i&&i.numbers.length>1}},{key:"getPluralFormsOfKey",value:function(n,r){var i=arguments.length>2&&arguments[2]!==void 0?arguments[2]:{};return this.getSuffixes(n,i).map(function(o){return"".concat(r).concat(o)})}},{key:"getSuffixes",value:function(n){var r=this,i=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{},o=this.getRule(n,i);return o?this.shouldUseIntlApi()?o.resolvedOptions().pluralCategories.sort(function(l,s){return Ua[l]-Ua[s]}).map(function(l){return"".concat(r.options.prepend).concat(l)}):o.numbers.map(function(l){return r.getSuffix(n,l,i)}):[]}},{key:"getSuffix",value:function(n,r){var i=arguments.length>2&&arguments[2]!==void 0?arguments[2]:{},o=this.getRule(n,i);return o?this.shouldUseIntlApi()?"".concat(this.options.prepend).concat(o.select(r)):this.getSuffixRetroCompatible(o,r):(this.logger.warn("no plural rule found for: ".concat(n)),"")}},{key:"getSuffixRetroCompatible",value:function(n,r){var i=this,o=n.noAbs?n.plurals(r):n.plurals(Math.abs(r)),l=n.numbers[o];this.options.simplifyPluralSuffix&&n.numbers.length===2&&n.numbers[0]===1&&(l===2?l="plural":l===1&&(l=""));var s=function(){return i.options.prepend&&l.toString()?i.options.prepend+l.toString():l.toString()};return this.options.compatibilityJSON==="v1"?l===1?"":typeof l=="number"?"_plural_".concat(l.toString()):s():this.options.compatibilityJSON==="v2"||this.options.simplifyPluralSuffix&&n.numbers.length===2&&n.numbers[0]===1?s():this.options.prepend&&o.toString()?this.options.prepend+o.toString():o.toString()}},{key:"shouldUseIntlApi",value:function(){return!bp.includes(this.options.compatibilityJSON)}}]),e}();function za(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable})),n.push.apply(n,r)}return n}function ot(e){for(var t=1;t0&&arguments[0]!==void 0?arguments[0]:{};pt(this,e),this.logger=kt.create("interpolator"),this.options=t,this.format=t.interpolation&&t.interpolation.format||function(n){return n},this.init(t)}return ht(e,[{key:"init",value:function(){var n=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{};n.interpolation||(n.interpolation={escapeValue:!0});var r=n.interpolation;this.escape=r.escape!==void 0?r.escape:Ip,this.escapeValue=r.escapeValue!==void 0?r.escapeValue:!0,this.useRawValueToEscape=r.useRawValueToEscape!==void 0?r.useRawValueToEscape:!1,this.prefix=r.prefix?Tn(r.prefix):r.prefixEscaped||"{{",this.suffix=r.suffix?Tn(r.suffix):r.suffixEscaped||"}}",this.formatSeparator=r.formatSeparator?r.formatSeparator:r.formatSeparator||",",this.unescapePrefix=r.unescapeSuffix?"":r.unescapePrefix||"-",this.unescapeSuffix=this.unescapePrefix?"":r.unescapeSuffix||"",this.nestingPrefix=r.nestingPrefix?Tn(r.nestingPrefix):r.nestingPrefixEscaped||Tn("$t("),this.nestingSuffix=r.nestingSuffix?Tn(r.nestingSuffix):r.nestingSuffixEscaped||Tn(")"),this.nestingOptionsSeparator=r.nestingOptionsSeparator?r.nestingOptionsSeparator:r.nestingOptionsSeparator||",",this.maxReplaces=r.maxReplaces?r.maxReplaces:1e3,this.alwaysFormat=r.alwaysFormat!==void 0?r.alwaysFormat:!1,this.resetRegExp()}},{key:"reset",value:function(){this.options&&this.init(this.options)}},{key:"resetRegExp",value:function(){var n="".concat(this.prefix,"(.+?)").concat(this.suffix);this.regexp=new RegExp(n,"g");var r="".concat(this.prefix).concat(this.unescapePrefix,"(.+?)").concat(this.unescapeSuffix).concat(this.suffix);this.regexpUnescape=new RegExp(r,"g");var i="".concat(this.nestingPrefix,"(.+?)").concat(this.nestingSuffix);this.nestingRegexp=new RegExp(i,"g")}},{key:"interpolate",value:function(n,r,i,o){var l=this,s,a,u,p=this.options&&this.options.interpolation&&this.options.interpolation.defaultVariables||{};function d(_){return _.replace(/\$/g,"$$$$")}var h=function(f){if(f.indexOf(l.formatSeparator)<0){var c=Ra(r,p,f);return l.alwaysFormat?l.format(c,void 0,i,ot(ot(ot({},o),r),{},{interpolationkey:f})):c}var m=f.split(l.formatSeparator),w=m.shift().trim(),O=m.join(l.formatSeparator).trim();return l.format(Ra(r,p,w),O,i,ot(ot(ot({},o),r),{},{interpolationkey:w}))};this.resetRegExp();var g=o&&o.missingInterpolationHandler||this.options.missingInterpolationHandler,y=o&&o.interpolation&&o.interpolation.skipOnVariables!==void 0?o.interpolation.skipOnVariables:this.options.interpolation.skipOnVariables,k=[{regex:this.regexpUnescape,safeValue:function(f){return d(f)}},{regex:this.regexp,safeValue:function(f){return l.escapeValue?d(l.escape(f)):d(f)}}];return k.forEach(function(_){for(u=0;s=_.regex.exec(n);){var f=s[1].trim();if(a=h(f),a===void 0)if(typeof g=="function"){var c=g(n,s,o);a=typeof c=="string"?c:""}else if(o&&Object.prototype.hasOwnProperty.call(o,f))a="";else if(y){a=s[0];continue}else l.logger.warn("missed to pass in variable ".concat(f," for interpolating ").concat(n)),a="";else typeof a!="string"&&!l.useRawValueToEscape&&(a=Ia(a));var m=_.safeValue(a);if(n=n.replace(s[0],m),y?(_.regex.lastIndex+=a.length,_.regex.lastIndex-=s[0].length):_.regex.lastIndex=0,u++,u>=l.maxReplaces)break}}),n}},{key:"nest",value:function(n,r){var i=this,o=arguments.length>2&&arguments[2]!==void 0?arguments[2]:{},l,s,a;function u(g,y){var k=this.nestingOptionsSeparator;if(g.indexOf(k)<0)return g;var _=g.split(new RegExp("".concat(k,"[ ]*{"))),f="{".concat(_[1]);g=_[0],f=this.interpolate(f,a);var c=f.match(/'/g),m=f.match(/"/g);(c&&c.length%2===0&&!m||m.length%2!==0)&&(f=f.replace(/'/g,'"'));try{a=JSON.parse(f),y&&(a=ot(ot({},y),a))}catch(w){return this.logger.warn("failed parsing options string in nesting for key ".concat(g),w),"".concat(g).concat(k).concat(f)}return delete a.defaultValue,g}for(;l=this.nestingRegexp.exec(n);){var p=[];a=ot({},o),a=a.replace&&typeof a.replace!="string"?a.replace:a,a.applyPostProcessor=!1,delete a.defaultValue;var d=!1;if(l[0].indexOf(this.formatSeparator)!==-1&&!/{.*}/.test(l[1])){var h=l[1].split(this.formatSeparator).map(function(g){return g.trim()});l[1]=h.shift(),p=h,d=!0}if(s=r(u.call(this,l[1].trim(),a),a),s&&l[0]===n&&typeof s!="string")return s;typeof s!="string"&&(s=Ia(s)),s||(this.logger.warn("missed to resolve ".concat(l[1]," for nesting ").concat(n)),s=""),d&&(s=p.reduce(function(g,y){return i.format(g,y,o.lng,ot(ot({},o),{},{interpolationkey:l[1].trim()}))},s.trim())),n=n.replace(l[0],s),this.regexp.lastIndex=0}return n}}]),e}();function ba(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable})),n.push.apply(n,r)}return n}function xt(e){for(var t=1;t-1){var r=e.split("(");t=r[0].toLowerCase().trim();var i=r[1].substring(0,r[1].length-1);if(t==="currency"&&i.indexOf(":")<0)n.currency||(n.currency=i.trim());else if(t==="relativetime"&&i.indexOf(":")<0)n.range||(n.range=i.trim());else{var o=i.split(";");o.forEach(function(l){if(l){var s=l.split(":"),a=_p(s),u=a[0],p=a.slice(1),d=p.join(":").trim().replace(/^'+|'+$/g,"");n[u.trim()]||(n[u.trim()]=d),d==="false"&&(n[u.trim()]=!1),d==="true"&&(n[u.trim()]=!0),isNaN(d)||(n[u.trim()]=parseInt(d,10))}})}}return{formatName:t,formatOptions:n}}function Rn(e){var t={};return function(r,i,o){var l=i+JSON.stringify(o),s=t[l];return s||(s=e(i,o),t[l]=s),s(r)}}var Kp=function(){function e(){var t=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{};pt(this,e),this.logger=kt.create("formatter"),this.options=t,this.formats={number:Rn(function(n,r){var i=new Intl.NumberFormat(n,xt({},r));return function(o){return i.format(o)}}),currency:Rn(function(n,r){var i=new Intl.NumberFormat(n,xt(xt({},r),{},{style:"currency"}));return function(o){return i.format(o)}}),datetime:Rn(function(n,r){var i=new Intl.DateTimeFormat(n,xt({},r));return function(o){return i.format(o)}}),relativetime:Rn(function(n,r){var i=new Intl.RelativeTimeFormat(n,xt({},r));return function(o){return i.format(o,r.range||"day")}}),list:Rn(function(n,r){var i=new Intl.ListFormat(n,xt({},r));return function(o){return i.format(o)}})},this.init(t)}return ht(e,[{key:"init",value:function(n){var r=arguments.length>1&&arguments[1]!==void 0?arguments[1]:{interpolation:{}},i=r.interpolation;this.formatSeparator=i.formatSeparator?i.formatSeparator:i.formatSeparator||","}},{key:"add",value:function(n,r){this.formats[n.toLowerCase().trim()]=r}},{key:"addCached",value:function(n,r){this.formats[n.toLowerCase().trim()]=Rn(r)}},{key:"format",value:function(n,r,i){var o=this,l=arguments.length>3&&arguments[3]!==void 0?arguments[3]:{},s=r.split(this.formatSeparator),a=s.reduce(function(u,p){var d=Bp(p),h=d.formatName,g=d.formatOptions;if(o.formats[h]){var y=u;try{var k=l&&l.formatParams&&l.formatParams[l.interpolationkey]||{},_=k.locale||k.lng||l.locale||l.lng||i;y=o.formats[h](u,_,xt(xt(xt({},g),l),k))}catch(f){o.logger.warn(f)}return y}else o.logger.warn("there was no format function for ".concat(h));return u},n);return a}}]),e}();function Va(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable})),n.push.apply(n,r)}return n}function $a(e){for(var t=1;t"u"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch{return!1}}function Gp(e,t){e.pending[t]!==void 0&&(delete e.pending[t],e.pendingCount--)}var Yp=function(e){ho(n,e);var t=Hp(n);function n(r,i,o){var l,s=arguments.length>3&&arguments[3]!==void 0?arguments[3]:{};return pt(this,n),l=t.call(this),mo&&nn.call(Gt(l)),l.backend=r,l.store=i,l.services=o,l.languageUtils=o.languageUtils,l.options=s,l.logger=kt.create("backendConnector"),l.waitingReads=[],l.maxParallelReads=s.maxParallelReads||10,l.readingCalls=0,l.maxRetries=s.maxRetries>=0?s.maxRetries:5,l.retryTimeout=s.retryTimeout>=1?s.retryTimeout:350,l.state={},l.queue=[],l.backend&&l.backend.init&&l.backend.init(o,s.backend,s),l}return ht(n,[{key:"queueLoad",value:function(i,o,l,s){var a=this,u={},p={},d={},h={};return i.forEach(function(g){var y=!0;o.forEach(function(k){var _="".concat(g,"|").concat(k);!l.reload&&a.store.hasResourceBundle(g,k)?a.state[_]=2:a.state[_]<0||(a.state[_]===1?p[_]===void 0&&(p[_]=!0):(a.state[_]=1,y=!1,p[_]===void 0&&(p[_]=!0),u[_]===void 0&&(u[_]=!0),h[k]===void 0&&(h[k]=!0)))}),y||(d[g]=!0)}),(Object.keys(u).length||Object.keys(p).length)&&this.queue.push({pending:p,pendingCount:Object.keys(p).length,loaded:{},errors:[],callback:s}),{toLoad:Object.keys(u),pending:Object.keys(p),toLoadLanguages:Object.keys(d),toLoadNamespaces:Object.keys(h)}}},{key:"loaded",value:function(i,o,l){var s=i.split("|"),a=s[0],u=s[1];o&&this.emit("failedLoading",a,u,o),l&&this.store.addResourceBundle(a,u,l),this.state[i]=o?-1:2;var p={};this.queue.forEach(function(d){xp(d.loaded,[a],u),Gp(d,i),o&&d.errors.push(o),d.pendingCount===0&&!d.done&&(Object.keys(d.loaded).forEach(function(h){p[h]||(p[h]={});var g=d.loaded[h];g.length&&g.forEach(function(y){p[h][y]===void 0&&(p[h][y]=!0)})}),d.done=!0,d.errors.length?d.callback(d.errors):d.callback())}),this.emit("loaded",p),this.queue=this.queue.filter(function(d){return!d.done})}},{key:"read",value:function(i,o,l){var s=this,a=arguments.length>3&&arguments[3]!==void 0?arguments[3]:0,u=arguments.length>4&&arguments[4]!==void 0?arguments[4]:this.retryTimeout,p=arguments.length>5?arguments[5]:void 0;if(!i.length)return p(null,{});if(this.readingCalls>=this.maxParallelReads){this.waitingReads.push({lng:i,ns:o,fcName:l,tried:a,wait:u,callback:p});return}this.readingCalls++;var d=function(k,_){if(s.readingCalls--,s.waitingReads.length>0){var f=s.waitingReads.shift();s.read(f.lng,f.ns,f.fcName,f.tried,f.wait,f.callback)}if(k&&_&&a2&&arguments[2]!==void 0?arguments[2]:{},a=arguments.length>3?arguments[3]:void 0;if(!this.backend)return this.logger.warn("No backend was added via i18next.use. Will not load resources."),a&&a();typeof i=="string"&&(i=this.languageUtils.toResolveHierarchy(i)),typeof o=="string"&&(o=[o]);var u=this.queueLoad(i,o,s,a);if(!u.toLoad.length)return u.pending.length||a(),null;u.toLoad.forEach(function(p){l.loadOne(p)})}},{key:"load",value:function(i,o,l){this.prepareLoading(i,o,{},l)}},{key:"reload",value:function(i,o,l){this.prepareLoading(i,o,{reload:!0},l)}},{key:"loadOne",value:function(i){var o=this,l=arguments.length>1&&arguments[1]!==void 0?arguments[1]:"",s=i.split("|"),a=s[0],u=s[1];this.read(a,u,"read",void 0,void 0,function(p,d){p&&o.logger.warn("".concat(l,"loading namespace ").concat(u," for language ").concat(a," failed"),p),!p&&d&&o.logger.log("".concat(l,"loaded namespace ").concat(u," for language ").concat(a),d),o.loaded(i,p,d)})}},{key:"saveMissing",value:function(i,o,l,s,a){var u=arguments.length>5&&arguments[5]!==void 0?arguments[5]:{},p=arguments.length>6&&arguments[6]!==void 0?arguments[6]:function(){};if(this.services.utils&&this.services.utils.hasLoadedNamespace&&!this.services.utils.hasLoadedNamespace(o)){this.logger.warn('did not save key "'.concat(l,'" as the namespace "').concat(o,'" was not yet loaded'),"This means something IS WRONG in your setup. You access the t function before i18next.init / i18next.loadNamespace / i18next.changeLanguage was done. Wait for the callback or Promise to resolve before accessing it!!!");return}if(!(l==null||l==="")){if(this.backend&&this.backend.create){var d=$a($a({},u),{},{isUpdate:a}),h=this.backend.create.bind(this.backend);if(h.length<6)try{var g;h.length===5?g=h(i,o,l,s,d):g=h(i,o,l,s),g&&typeof g.then=="function"?g.then(function(y){return p(null,y)}).catch(p):p(null,g)}catch(y){p(y)}else h(i,o,l,s,p,d)}!i||!i[0]||this.store.addResource(i[0],o,l,s)}}}]),n}(nn);function Ma(){return{debug:!1,initImmediate:!0,ns:["translation"],defaultNS:["translation"],fallbackLng:["dev"],fallbackNS:!1,supportedLngs:!1,nonExplicitSupportedLngs:!1,load:"all",preload:!1,simplifyPluralSuffix:!0,keySeparator:".",nsSeparator:":",pluralSeparator:"_",contextSeparator:"_",partialBundledLanguages:!1,saveMissing:!1,updateMissing:!1,saveMissingTo:"fallback",saveMissingPlurals:!0,missingKeyHandler:!1,missingInterpolationHandler:!1,postProcess:!1,postProcessPassResolved:!1,returnNull:!0,returnEmptyString:!0,returnObjects:!1,joinArrays:!1,returnedObjectHandler:!1,parseMissingKeyHandler:!1,appendNamespaceToMissingKey:!1,appendNamespaceToCIMode:!1,overloadTranslationOptionHandler:function(t){var n={};if(me(t[1])==="object"&&(n=t[1]),typeof t[1]=="string"&&(n.defaultValue=t[1]),typeof t[2]=="string"&&(n.tDescription=t[2]),me(t[2])==="object"||me(t[3])==="object"){var r=t[3]||t[2];Object.keys(r).forEach(function(i){n[i]=r[i]})}return n},interpolation:{escapeValue:!0,format:function(t,n,r,i){return t},prefix:"{{",suffix:"}}",formatSeparator:",",unescapePrefix:"-",nestingPrefix:"$t(",nestingSuffix:")",nestingOptionsSeparator:",",maxReplaces:1e3,skipOnVariables:!0}}}function Ba(e){return typeof e.ns=="string"&&(e.ns=[e.ns]),typeof e.fallbackLng=="string"&&(e.fallbackLng=[e.fallbackLng]),typeof e.fallbackNS=="string"&&(e.fallbackNS=[e.fallbackNS]),e.supportedLngs&&e.supportedLngs.indexOf("cimode")<0&&(e.supportedLngs=e.supportedLngs.concat(["cimode"])),e}function Ka(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter(function(i){return Object.getOwnPropertyDescriptor(e,i).enumerable})),n.push.apply(n,r)}return n}function gt(e){for(var t=1;t"u"||!Reflect.construct||Reflect.construct.sham)return!1;if(typeof Proxy=="function")return!0;try{return Boolean.prototype.valueOf.call(Reflect.construct(Boolean,[],function(){})),!0}catch{return!1}}function ai(){}function Jp(e){var t=Object.getOwnPropertyNames(Object.getPrototypeOf(e));t.forEach(function(n){typeof e[n]=="function"&&(e[n]=e[n].bind(e))})}var Vi=function(e){ho(n,e);var t=Qp(n);function n(){var r,i=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},o=arguments.length>1?arguments[1]:void 0;if(pt(this,n),r=t.call(this),mo&&nn.call(Gt(r)),r.options=Ba(i),r.services={},r.logger=kt,r.modules={external:[]},Jp(Gt(r)),o&&!r.isInitialized&&!i.isClone){if(!r.options.initImmediate)return r.init(i,o),ei(r,Gt(r));setTimeout(function(){r.init(i,o)},0)}return r}return ht(n,[{key:"init",value:function(){var i=this,o=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},l=arguments.length>1?arguments[1]:void 0;typeof o=="function"&&(l=o,o={}),!o.defaultNS&&o.defaultNS!==!1&&o.ns&&(typeof o.ns=="string"?o.defaultNS=o.ns:o.ns.indexOf("translation")<0&&(o.defaultNS=o.ns[0]));var s=Ma();this.options=gt(gt(gt({},s),this.options),Ba(o)),this.options.compatibilityAPI!=="v1"&&(this.options.interpolation=gt(gt({},s.interpolation),this.options.interpolation)),o.keySeparator!==void 0&&(this.options.userDefinedKeySeparator=o.keySeparator),o.nsSeparator!==void 0&&(this.options.userDefinedNsSeparator=o.nsSeparator);function a(f){return f?typeof f=="function"?new f:f:null}if(!this.options.isClone){this.modules.logger?kt.init(a(this.modules.logger),this.options):kt.init(null,this.options);var u;this.modules.formatter?u=this.modules.formatter:typeof Intl<"u"&&(u=Kp);var p=new Da(this.options);this.store=new jp(this.options.resources,this.options);var d=this.services;d.logger=kt,d.resourceStore=this.store,d.languageUtils=p,d.pluralResolver=new $p(p,{prepend:this.options.pluralSeparator,compatibilityJSON:this.options.compatibilityJSON,simplifyPluralSuffix:this.options.simplifyPluralSuffix}),u&&(!this.options.interpolation.format||this.options.interpolation.format===s.interpolation.format)&&(d.formatter=a(u),d.formatter.init(d,this.options),this.options.interpolation.format=d.formatter.format.bind(d.formatter)),d.interpolator=new Mp(this.options),d.utils={hasLoadedNamespace:this.hasLoadedNamespace.bind(this)},d.backendConnector=new Yp(a(this.modules.backend),d.resourceStore,d,this.options),d.backendConnector.on("*",function(f){for(var c=arguments.length,m=new Array(c>1?c-1:0),w=1;w1?c-1:0),w=1;w0&&h[0]!=="dev"&&(this.options.lng=h[0])}!this.services.languageDetector&&!this.options.lng&&this.logger.warn("init: no languageDetector is used and no lng is defined");var g=["getResource","hasResourceBundle","getResourceBundle","getDataByLanguage"];g.forEach(function(f){i[f]=function(){var c;return(c=i.store)[f].apply(c,arguments)}});var y=["addResource","addResources","addResourceBundle","removeResourceBundle"];y.forEach(function(f){i[f]=function(){var c;return(c=i.store)[f].apply(c,arguments),i}});var k=fr(),_=function(){var c=function(w,O){i.isInitialized&&!i.initializedStoreOnce&&i.logger.warn("init: i18next is already initialized. You should call init just once!"),i.isInitialized=!0,i.options.isClone||i.logger.log("initialized",i.options),i.emit("initialized",i.options),k.resolve(O),l(w,O)};if(i.languages&&i.options.compatibilityAPI!=="v1"&&!i.isInitialized)return c(null,i.t.bind(i));i.changeLanguage(i.options.lng,c)};return this.options.resources||!this.options.initImmediate?_():setTimeout(_,0),k}},{key:"loadResources",value:function(i){var o=this,l=arguments.length>1&&arguments[1]!==void 0?arguments[1]:ai,s=l,a=typeof i=="string"?i:this.language;if(typeof i=="function"&&(s=i),!this.options.resources||this.options.partialBundledLanguages){if(a&&a.toLowerCase()==="cimode")return s();var u=[],p=function(g){if(g){var y=o.services.languageUtils.toResolveHierarchy(g);y.forEach(function(k){u.indexOf(k)<0&&u.push(k)})}};if(a)p(a);else{var d=this.services.languageUtils.getFallbackCodes(this.options.fallbackLng);d.forEach(function(h){return p(h)})}this.options.preload&&this.options.preload.forEach(function(h){return p(h)}),this.services.backendConnector.load(u,this.options.ns,function(h){!h&&!o.resolvedLanguage&&o.language&&o.setResolvedLanguage(o.language),s(h)})}else s(null)}},{key:"reloadResources",value:function(i,o,l){var s=fr();return i||(i=this.languages),o||(o=this.options.ns),l||(l=ai),this.services.backendConnector.reload(i,o,function(a){s.resolve(),l(a)}),s}},{key:"use",value:function(i){if(!i)throw new Error("You are passing an undefined module! Please check the object you are passing to i18next.use()");if(!i.type)throw new Error("You are passing a wrong module! Please check the object you are passing to i18next.use()");return i.type==="backend"&&(this.modules.backend=i),(i.type==="logger"||i.log&&i.warn&&i.error)&&(this.modules.logger=i),i.type==="languageDetector"&&(this.modules.languageDetector=i),i.type==="i18nFormat"&&(this.modules.i18nFormat=i),i.type==="postProcessor"&&Ec.addPostProcessor(i),i.type==="formatter"&&(this.modules.formatter=i),i.type==="3rdParty"&&this.modules.external.push(i),this}},{key:"setResolvedLanguage",value:function(i){if(!(!i||!this.languages)&&!(["cimode","dev"].indexOf(i)>-1))for(var o=0;o-1)&&this.store.hasLanguageSomeTranslations(l)){this.resolvedLanguage=l;break}}}},{key:"changeLanguage",value:function(i,o){var l=this;this.isLanguageChangingTo=i;var s=fr();this.emit("languageChanging",i);var a=function(h){l.language=h,l.languages=l.services.languageUtils.toResolveHierarchy(h),l.resolvedLanguage=void 0,l.setResolvedLanguage(h)},u=function(h,g){g?(a(g),l.translator.changeLanguage(g),l.isLanguageChangingTo=void 0,l.emit("languageChanged",g),l.logger.log("languageChanged",g)):l.isLanguageChangingTo=void 0,s.resolve(function(){return l.t.apply(l,arguments)}),o&&o(h,function(){return l.t.apply(l,arguments)})},p=function(h){!i&&!h&&l.services.languageDetector&&(h=[]);var g=typeof h=="string"?h:l.services.languageUtils.getBestMatchFromCodes(h);g&&(l.language||a(g),l.translator.language||l.translator.changeLanguage(g),l.services.languageDetector&&l.services.languageDetector.cacheUserLanguage&&l.services.languageDetector.cacheUserLanguage(g)),l.loadResources(g,function(y){u(y,g)})};return!i&&this.services.languageDetector&&!this.services.languageDetector.async?p(this.services.languageDetector.detect()):!i&&this.services.languageDetector&&this.services.languageDetector.async?this.services.languageDetector.detect.length===0?this.services.languageDetector.detect().then(p):this.services.languageDetector.detect(p):p(i),s}},{key:"getFixedT",value:function(i,o,l){var s=this,a=function u(p,d){var h;if(me(d)!=="object"){for(var g=arguments.length,y=new Array(g>2?g-2:0),k=2;k1&&arguments[1]!==void 0?arguments[1]:{};if(!this.isInitialized)return this.logger.warn("hasLoadedNamespace: i18next was not initialized",this.languages),!1;if(!this.languages||!this.languages.length)return this.logger.warn("hasLoadedNamespace: i18n.languages were undefined or empty",this.languages),!1;var s=this.resolvedLanguage||this.languages[0],a=this.options?this.options.fallbackLng:!1,u=this.languages[this.languages.length-1];if(s.toLowerCase()==="cimode")return!0;var p=function(g,y){var k=o.services.backendConnector.state["".concat(g,"|").concat(y)];return k===-1||k===2};if(l.precheck){var d=l.precheck(this,p);if(d!==void 0)return d}return!!(this.hasResourceBundle(s,i)||!this.services.backendConnector.backend||this.options.resources&&!this.options.partialBundledLanguages||p(s,i)&&(!a||p(u,i)))}},{key:"loadNamespaces",value:function(i,o){var l=this,s=fr();return this.options.ns?(typeof i=="string"&&(i=[i]),i.forEach(function(a){l.options.ns.indexOf(a)<0&&l.options.ns.push(a)}),this.loadResources(function(a){s.resolve(),o&&o(a)}),s):(o&&o(),Promise.resolve())}},{key:"loadLanguages",value:function(i,o){var l=fr();typeof i=="string"&&(i=[i]);var s=this.options.preload||[],a=i.filter(function(u){return s.indexOf(u)<0});return a.length?(this.options.preload=s.concat(a),this.loadResources(function(u){l.resolve(),o&&o(u)}),l):(o&&o(),Promise.resolve())}},{key:"dir",value:function(i){if(i||(i=this.resolvedLanguage||(this.languages&&this.languages.length>0?this.languages[0]:this.language)),!i)return"rtl";var o=["ar","shu","sqr","ssh","xaa","yhd","yud","aao","abh","abv","acm","acq","acw","acx","acy","adf","ads","aeb","aec","afb","ajp","apc","apd","arb","arq","ars","ary","arz","auz","avl","ayh","ayl","ayn","ayp","bbz","pga","he","iw","ps","pbt","pbu","pst","prp","prd","ug","ur","ydd","yds","yih","ji","yi","hbo","men","xmn","fa","jpr","peo","pes","prs","dv","sam","ckb"],l=this.services&&this.services.languageUtils||new Da(Ma());return o.indexOf(l.getLanguagePartFromCode(i))>-1||i.toLowerCase().indexOf("-arab")>1?"rtl":"ltr"}},{key:"cloneInstance",value:function(){var i=this,o=arguments.length>0&&arguments[0]!==void 0?arguments[0]:{},l=arguments.length>1&&arguments[1]!==void 0?arguments[1]:ai,s=gt(gt(gt({},this.options),o),{isClone:!0}),a=new n(s);(o.debug!==void 0||o.prefix!==void 0)&&(a.logger=a.logger.clone(o));var u=["store","services","language"];return u.forEach(function(p){a[p]=i[p]}),a.services=gt({},this.services),a.services.utils={hasLoadedNamespace:a.hasLoadedNamespace.bind(a)},a.translator=new Fa(a.services,a.options),a.translator.on("*",function(p){for(var d=arguments.length,h=new Array(d>1?d-1:0),g=1;g0&&arguments[0]!==void 0?arguments[0]:{},t=arguments.length>1?arguments[1]:void 0;return new Vi(e,t)});var fe=Vi.createInstance();fe.createInstance=Vi.createInstance;fe.createInstance;fe.dir;fe.init;fe.loadResources;fe.reloadResources;fe.use;fe.changeLanguage;fe.getFixedT;fe.t;fe.exists;fe.setDefaultNamespace;fe.hasLoadedNamespace;fe.loadNamespaces;fe.loadLanguages;function Zp(e,t){if(e==null)return{};var n={},r=Object.keys(e),i,o;for(o=0;o=0)&&(n[i]=e[i]);return n}function ks(e,t){if(e==null)return{};var n=Zp(e,t),r,i;if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(i=0;i=0)&&Object.prototype.propertyIsEnumerable.call(e,r)&&(n[r]=e[r])}return n}var qp={area:!0,base:!0,br:!0,col:!0,embed:!0,hr:!0,img:!0,input:!0,link:!0,meta:!0,param:!0,source:!0,track:!0,wbr:!0};const eh=uc(qp);var th=/\s([^'"/\s><]+?)[\s/>]|([^\s=]+)=\s?(".*?"|'.*?')/g;function Ha(e){var t={type:"tag",name:"",voidElement:!1,attrs:{},children:[]},n=e.match(/<\/?([^\s]+?)[/\s>]/);if(n&&(t.name=n[1],(eh[n[1]]||e.charAt(e.length-2)==="/")&&(t.voidElement=!0),t.name.startsWith("!--"))){var r=e.indexOf("-->");return{type:"comment",comment:r!==-1?e.slice(4,r):""}}for(var i=new RegExp(th),o=null;(o=i.exec(e))!==null;)if(o[0].trim())if(o[1]){var l=o[1].trim(),s=[l,""];l.indexOf("=")>-1&&(s=l.split("=")),t.attrs[s[0]]=s[1],i.lastIndex--}else o[2]&&(t.attrs[o[2]]=o[3].trim().substring(1,o[3].length-1));return t}var nh=/<[a-zA-Z0-9\-\!\/](?:"[^"]*"|'[^']*'|[^'">])*>/g,rh=/^\s*$/,ih=Object.create(null);function xc(e,t){switch(t.type){case"text":return e+t.content;case"tag":return e+="<"+t.name+(t.attrs?function(n){var r=[];for(var i in n)r.push(i+'="'+n[i]+'"');return r.length?" "+r.join(" "):""}(t.attrs):"")+(t.voidElement?"/>":">"),t.voidElement?e:e+t.children.reduce(xc,"")+"";case"comment":return e+""}}var oh={parse:function(e,t){t||(t={}),t.components||(t.components=ih);var n,r=[],i=[],o=-1,l=!1;if(e.indexOf("<")!==0){var s=e.indexOf("<");r.push({type:"text",content:s===-1?e:e.substring(0,s)})}return e.replace(nh,function(a,u){if(l){if(a!=="")return;l=!1}var p,d=a.charAt(1)!=="/",h=a.startsWith("");return{type:"comment",comment:r!==-1?e.slice(4,r):""}}for(var i=new RegExp(Zp),o=null;(o=i.exec(e))!==null;)if(o[0].trim())if(o[1]){var s=o[1].trim(),l=[s,""];s.indexOf("=")>-1&&(l=s.split("=")),t.attrs[l[0]]=l[1],i.lastIndex--}else o[2]&&(t.attrs[o[2]]=o[3].trim().substring(1,o[3].length-1));return t}var qp=/<[a-zA-Z0-9\-\!\/](?:"[^"]*"|'[^']*'|[^'">])*>/g,eh=/^\s*$/,th=Object.create(null);function _c(e,t){switch(t.type){case"text":return e+t.content;case"tag":return e+="<"+t.name+(t.attrs?function(n){var r=[];for(var i in n)r.push(i+'="'+n[i]+'"');return r.length?" "+r.join(" "):""}(t.attrs):"")+(t.voidElement?"/>":">"),t.voidElement?e:e+t.children.reduce(_c,"")+"";case"comment":return e+""}}var nh={parse:function(e,t){t||(t={}),t.components||(t.components=th);var n,r=[],i=[],o=-1,s=!1;if(e.indexOf("<")!==0){var l=e.indexOf("<");r.push({type:"text",content:l===-1?e:e.substring(0,l)})}return e.replace(qp,function(a,u){if(s){if(a!=="")return;s=!1}var p,d=a.charAt(1)!=="/",h=a.startsWith(" " + << "Subgraph[" << subm_idx_to << "]/" << port_idx_to); + NPUW_ASSERT(m_subrequests[subm_idx_from]); // prod request is created + NPUW_ASSERT(m_subrequests[subm_idx_to]); // cons request is created + NPUW_ASSERT(m_subrequests[subm_idx_from]._ptr != m_subrequests[subm_idx_to]._ptr); + + const auto& iport = m_subrequests[subm_idx_to]->get_compiled_model()->inputs()[port_idx_to]; + const auto& oport = m_subrequests[subm_idx_from]->get_compiled_model()->outputs()[port_idx_from]; + const auto& tensor = m_subrequests[subm_idx_from]->get_tensor(oport); + LOG_DEBUG("Set Subgraph[" << subm_idx_to << "]/" << iport << " to Subgraph[" << subm_idx_from << "]/" << oport); + m_subrequests[subm_idx_to]->set_tensor(iport, tensor); + } // for(map) + LOG_INFO("Done"); + + init_gio(); + + for (size_t i = 0; i < m_num_submodels; i++) { + LOG_VERB("Trying to preemptively set tensors for Subgraph[" << i << "]..."); + LOG_BLOCK(); + auto& comp_model_desc = m_npuw_model->m_compiled_submodels[i]; + if (!comp_model_desc.compiled_model && !comp_model_desc.replaced_by) { + continue; // Optimized out + } + unpack_closure(i, m_subrequests[i]); + LOG_VERB("Done"); + } +} + +bool ov::npuw::UnfoldInferRequest::valid_subrequest(std::size_t idx) const { + return m_subrequests.at(idx) != nullptr; +} + +void ov::npuw::UnfoldInferRequest::infer() { + const bool do_async = m_npuw_model->m_cfg.get<::intel_npu::NPUW_FUNCALL_ASYNC>(); + + auto prepare = [&](std::size_t idx) { + if (idx >= m_subrequests.size()) { + return; + } + bind_global_params(idx, m_subrequests[idx]); + bind_global_results(idx, m_subrequests[idx]); + }; + auto wait_and_clear = [](RqPtrs& rqs) { + for (auto&& r : rqs) { + r->wait(); + } + rqs.clear(); + }; + + if (do_async) { + std::size_t past_repl_id = 0u; + RqPtrs previous_requests; + + prepare(0); + for (std::size_t idx = 0; idx < m_num_submodels; idx++) { + auto& subr = m_subrequests[idx]; + if (!subr) { + prepare(idx + 1); + continue; + } + auto& comp_model_desc = m_npuw_model->m_compiled_submodels[idx]; + const auto this_repl_id = comp_model_desc.replaced_by.value_or(idx); + if (this_repl_id != past_repl_id) { + // For non-repeating blocks, the above value_or returns idx + // For repeating blocks, it returns the function group id + // If either is not equal to the past_repl_id, make a barrier here + wait_and_clear(previous_requests); + past_repl_id = this_repl_id; + } + subr->start_async(); + previous_requests.push_back(subr); + prepare(idx + 1); + } + wait_and_clear(previous_requests); + } else { + prepare(0); + for (std::size_t idx = 0; idx < m_num_submodels; idx++) { + auto& subr = m_subrequests[idx]; + if (!subr) { + prepare(idx + 1); + continue; + } + subr->start_async(); + prepare(idx + 1); + subr->wait(); + } + } // (async) +} diff --git a/src/plugins/intel_npu/src/plugin/npuw/unfold_sync_infer_request.hpp b/src/plugins/intel_npu/src/plugin/npuw/unfold_sync_infer_request.hpp new file mode 100644 index 00000000000000..76b67571ec4c40 --- /dev/null +++ b/src/plugins/intel_npu/src/plugin/npuw/unfold_sync_infer_request.hpp @@ -0,0 +1,42 @@ +// Copyright (C) 2023 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include +#include +#include +#include +#include + +#include "base_sync_infer_request.hpp" + +namespace ov { +namespace npuw { + +class UnfoldInferRequest final : public IBaseInferRequest { +public: + explicit UnfoldInferRequest(const std::shared_ptr& compiled_model); + + //////////////////////////////////// + // implement IBaseInferRequest - nether of these are required here + // this hierarchy needs revew + void prepare_for_infer() override {} + bool valid_subrequest(std::size_t idx) const override; + void start_subrequest(std::size_t) override {} + void run_subrequest_for_success(std::size_t, bool&) override {} + void subscribe_subrequest(std::size_t, Completed cb) override {} + void complete_subrequest(std::size_t) override {} + void cancel_subrequest(std::size_t) override {} + bool supports_async_pipeline() const override { + return false; + } + void update_subrequest_links(std::size_t) override {} + +private: + void infer() override; +}; + +} // namespace npuw +} // namespace ov From 0f149e39ed7dba9fae09fa6b56bb9f08f117aa9f Mon Sep 17 00:00:00 2001 From: Mingyu Kim Date: Fri, 22 Nov 2024 15:04:16 +0900 Subject: [PATCH 29/62] [GPU] Do not use usm_host when network output tensor size is large (#27513) ### Details: - For dGPU including A770, when network output size is large, performance is better with explicit copy then writing data to usm_host. - Allow usm_host access for input - Next-gen dGPU, write to usm_device and copy it to usm_host - For DG2, write to usm_device only for large output ### Tickets: - 157439 --- .../intel_gpu/plugin/sync_infer_request.hpp | 1 + .../intel_gpu/runtime/debug_configuration.hpp | 1 + .../src/plugin/sync_infer_request.cpp | 21 ++++++++++++++----- .../src/runtime/debug_configuration.cpp | 4 ++++ 4 files changed, 22 insertions(+), 5 deletions(-) diff --git a/src/plugins/intel_gpu/include/intel_gpu/plugin/sync_infer_request.hpp b/src/plugins/intel_gpu/include/intel_gpu/plugin/sync_infer_request.hpp index cdead8a816626d..916427c280310c 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/plugin/sync_infer_request.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/plugin/sync_infer_request.hpp @@ -118,6 +118,7 @@ class SyncInferRequest : public ov::ISyncInferRequest { void init_mappings(); bool is_batched_input(const ov::Output& port) const; + uint64_t total_output_bytes = 0; }; } // namespace intel_gpu diff --git a/src/plugins/intel_gpu/include/intel_gpu/runtime/debug_configuration.hpp b/src/plugins/intel_gpu/include/intel_gpu/runtime/debug_configuration.hpp index 465ed898ecb7ec..a020c5d1cd5ef6 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/runtime/debug_configuration.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/runtime/debug_configuration.hpp @@ -142,6 +142,7 @@ class debug_configuration { int disable_runtime_skip_reorder; // Disable runtime skip reorder int disable_primitive_fusing; // Disable primitive fusing int disable_fake_alignment; // Disable fake alignment + int use_usm_host; // Set explicit usm_host usage for network input and output std::vector dynamic_quantize_layers_without_onednn; // Specify Fully-connected layers which enable Dynamic quantization int use_kv_cache_compression; // Enable KV-cache compression int dynamic_quantize_group_size; // Enable Dynamic quantization for fully connected primitive by specified group size diff --git a/src/plugins/intel_gpu/src/plugin/sync_infer_request.cpp b/src/plugins/intel_gpu/src/plugin/sync_infer_request.cpp index cc4681d2ac3387..00ec52f64750bc 100644 --- a/src/plugins/intel_gpu/src/plugin/sync_infer_request.cpp +++ b/src/plugins/intel_gpu/src/plugin/sync_infer_request.cpp @@ -32,12 +32,19 @@ namespace { -inline bool can_use_usm_host(const cldnn::engine& engine) { +inline bool can_use_usm_host(const cldnn::engine& engine, const uint64_t total_output_bytes) { + GPU_DEBUG_GET_INSTANCE(debug_config); + GPU_DEBUG_IF(debug_config->use_usm_host == 1) { return true; } + GPU_DEBUG_IF(debug_config->use_usm_host == 2) { return false; } + auto can_use_usm = engine.use_unified_shared_memory(); + // When output size is large, it is better not to write to usm_host directly + const uint64_t LARGE_OUTPUT_BYTES_THRESHOLD = 4 * 1048576; const auto& device_info = engine.get_device_info(); if ((device_info.gfx_ver.major == 12 && device_info.gfx_ver.minor == 60) || - (device_info.gfx_ver.major >= 20 && device_info.dev_type == cldnn::device_type::discrete_gpu)) { + (device_info.gfx_ver.major >= 20 && device_info.dev_type == cldnn::device_type::discrete_gpu) || + (device_info.dev_type == cldnn::device_type::discrete_gpu && total_output_bytes > LARGE_OUTPUT_BYTES_THRESHOLD)) { // WA: Disable USM host memory for infer request`s tensors for PVC and subsequent dGPUs, as kernel access // to system memory is slower than using an explicit memcpy (Host <-> Device) call with the copy engine // Driver tickets with additional details: 6155, 10054 @@ -544,7 +551,7 @@ std::shared_ptr SyncInferRequest::create_device_tensor(const ov::Pa } // Create OpenCL buffer for PVC if lockable memory is needed due to performance issue with usm host - if (!can_use_usm_host(m_graph->get_engine()) && need_lockable_memory) + if (!can_use_usm_host(m_graph->get_engine(), total_output_bytes) && need_lockable_memory) tensor_type = TensorType::BT_BUF_INTERNAL; return std::make_shared(m_context, @@ -573,7 +580,9 @@ TensorWrapper SyncInferRequest::create_or_share_device_tensor(const TensorWrappe auto usm_host_raw_ptr = engine.get_device_info().dev_type == cldnn::device_type::integrated_gpu && user_tensor_mem_type == cldnn::allocation_type::usm_host; - bool can_share = !is_convert_required(user_tensor->get_element_type(), element_type) && can_use_usm_host(engine) && !generic_remote_tensor; + bool can_share = !is_convert_required(user_tensor->get_element_type(), element_type) + && can_use_usm_host(engine, total_output_bytes) + && !generic_remote_tensor; if (usm_host_tensor && can_share && m_context == usm_host_tensor->get_impl()->get_context()) { return { usm_host_tensor->get_impl(), user_tensor_wrapper.owner }; @@ -662,6 +671,7 @@ void SyncInferRequest::allocate_inputs() { void SyncInferRequest::allocate_outputs() { OV_ITT_SCOPED_TASK(itt::domains::intel_gpu_plugin, "SyncInferRequest::allocate_outputs"); + total_output_bytes = 0; // allocate outputs for (const auto& it : m_output_ports_map) { size_t output_idx = it.first; @@ -669,6 +679,7 @@ void SyncInferRequest::allocate_outputs() { GPU_DEBUG_LOG << "[init output blob with index: " << output_idx << "]" << std::endl; allocate_output(port, output_idx); + total_output_bytes += ov::ISyncInferRequest::get_tensor(port)->get_byte_size(); } } @@ -817,7 +828,7 @@ std::vector SyncInferRequest::prepare_input(const std::string } else { m_plugin_inputs[input_idx] = user_tensor_wrapper; } - } else if (is_usm_host_tensor && !convert_needed && can_use_usm_host(engine)) { + } else if (is_usm_host_tensor && !convert_needed) { if (element_type != ::data_type_for_remote_tensor(element_type)) { m_plugin_inputs[input_idx] = { std::make_shared(m_context, user_tensor->get_shape(), diff --git a/src/plugins/intel_gpu/src/runtime/debug_configuration.cpp b/src/plugins/intel_gpu/src/runtime/debug_configuration.cpp index 5c3b3ee0c970f9..4a68355e1bc8ba 100644 --- a/src/plugins/intel_gpu/src/runtime/debug_configuration.cpp +++ b/src/plugins/intel_gpu/src/runtime/debug_configuration.cpp @@ -183,6 +183,8 @@ static void print_help_messages() { message_list.emplace_back("OV_GPU_DisableRuntimeSkipReorder", "Disable runtime skip reorder."); message_list.emplace_back("OV_GPU_DisablePrimitiveFusing", "Disable primitive fusing"); message_list.emplace_back("OV_GPU_DisableFakeAlignment", "Disable fake alignment"); + message_list.emplace_back("OV_GPU_UseUsmHost", "Set explicit policy for usm host usage for network input/output. " + "0: default, 1: use usm_host, 2: do not use usm_host"); message_list.emplace_back("OV_GPU_KVCacheCompression", "Enable/Disable KV-cache compression"); message_list.emplace_back("OV_GPU_DynamicQuantizeLayersWithoutOnednn", "Enable Dynamic quantization for specified Fully connected layers only, " "separated by space. Support case-insensitive and regular expression. For example .*fully_connected.*"); @@ -254,6 +256,7 @@ debug_configuration::debug_configuration() , disable_runtime_skip_reorder(0) , disable_primitive_fusing(0) , disable_fake_alignment(0) + , use_usm_host(0) , use_kv_cache_compression(-1) , dynamic_quantize_group_size(DYNAMIC_QUANTIZE_GROUP_SIZE_NOT_SET) , disable_horizontal_fc_fusion(0) { @@ -307,6 +310,7 @@ debug_configuration::debug_configuration() get_gpu_debug_env_var("DisableRuntimeSkipReorder", disable_runtime_skip_reorder); get_gpu_debug_env_var("DisablePrimitiveFusing", disable_primitive_fusing); get_gpu_debug_env_var("DisableFakeAlignment", disable_fake_alignment); + get_gpu_debug_env_var("UseUsmHost", use_usm_host); get_gpu_debug_env_var("KVCacheCompression", use_kv_cache_compression); get_gpu_debug_env_var("DynamicQuantizeGroupSize", dynamic_quantize_group_size); get_gpu_debug_env_var("DisableHorizontalFCFusion", disable_horizontal_fc_fusion); From c801f4ec1191c9c4967fe1b8aa1fea67441178fa Mon Sep 17 00:00:00 2001 From: cecilia peng Date: Fri, 22 Nov 2024 14:59:36 +0800 Subject: [PATCH 30/62] [GPU] Relax UnsqueezeBroadcastReshapeSDPAFusion (#27515) ### Details: - By relaxing UnsqueezeBroadcastReshapeSDPAFusion, GQA pattern is enabled and Broadcasting nodes overheads in paths of key and value are removed, thus improves performance of GLM4 model significantly. - Fix for GLM4V, which has initial state shape (-1, 0, 0, 0), and shape infer failed. ### Tickets: - *CVS-157263* --------- Co-authored-by: Chen Peter --- .../src/plugin/transformations/op/kv_cache.cpp | 11 +++++++---- .../intel_gpu/src/plugin/transformations/op/sdpa.cpp | 5 ++++- .../unsqueeze_broadcast_reshape_sdpa_fusion.cpp | 6 +----- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/plugins/intel_gpu/src/plugin/transformations/op/kv_cache.cpp b/src/plugins/intel_gpu/src/plugin/transformations/op/kv_cache.cpp index 12d961be6d337a..6721d0f9ebd608 100644 --- a/src/plugins/intel_gpu/src/plugin/transformations/op/kv_cache.cpp +++ b/src/plugins/intel_gpu/src/plugin/transformations/op/kv_cache.cpp @@ -106,18 +106,21 @@ std::vector shape_infer(const KVCache* op, const std::vectorget_gather_axis(); const auto& concat_axis = ov::util::normalize(op->get_concat_axis(), input_shapes[0].size()); + // We update output shape with input1 shape by default, as input1 is always new, and in some situations, input0 shape + // has zeros in some dimensions. For example to concat input0 [-1, 0, 0, 0] + input1 [-1, 4, -1, 128] along axis 2, + // we could (and should) infer dim value of axis 1 and 3 in this case. if (op->get_output_size() >= 2) { - out_shapes[0] = input_shapes[0]; + out_shapes[0] = input_shapes[1]; out_shapes[0][gather_axis] = input_shapes[2][0]; - out_shapes[0][concat_axis] += input_shapes[1][concat_axis]; + out_shapes[0][concat_axis] += input_shapes[0][concat_axis]; std::vector dims(out_shapes[0].size(), 1); dims[gather_axis] = out_shapes[0][gather_axis]; dims[concat_axis] = out_shapes[0][concat_axis]; out_shapes[1] = dims; } else { - out_shapes[0] = input_shapes[0]; - out_shapes[0][concat_axis] += input_shapes[1][concat_axis]; + out_shapes[0] = input_shapes[1]; + out_shapes[0][concat_axis] += input_shapes[0][concat_axis]; } return out_shapes; diff --git a/src/plugins/intel_gpu/src/plugin/transformations/op/sdpa.cpp b/src/plugins/intel_gpu/src/plugin/transformations/op/sdpa.cpp index 09513d99153a1f..3988306ba5eff4 100644 --- a/src/plugins/intel_gpu/src/plugin/transformations/op/sdpa.cpp +++ b/src/plugins/intel_gpu/src/plugin/transformations/op/sdpa.cpp @@ -144,9 +144,12 @@ std::vector shape_infer(const SDPA* op, if (is_broadcastable) { size_t max_rank = shape_q_t.size(); for (size_t i = 0; i < max_rank; ++i) { - if (shape_q_t[i].is_static() && shape_k_t[i].is_static() && shape_v_t[i].is_static()) { + if (shape_q_t[i].is_static() && shape_k_t[i].is_static()) { auto broadcasted_dim = shape_q_t[i].get_length(); shape_k_t[i] = broadcasted_dim; + } + if (shape_q_t[i].is_static() && shape_v_t[i].is_static()) { + auto broadcasted_dim = shape_q_t[i].get_length(); shape_v_t[i] = broadcasted_dim; } } diff --git a/src/plugins/intel_gpu/src/plugin/transformations/unsqueeze_broadcast_reshape_sdpa_fusion.cpp b/src/plugins/intel_gpu/src/plugin/transformations/unsqueeze_broadcast_reshape_sdpa_fusion.cpp index d525792ccd8d06..2b0d2ed5eaf145 100644 --- a/src/plugins/intel_gpu/src/plugin/transformations/unsqueeze_broadcast_reshape_sdpa_fusion.cpp +++ b/src/plugins/intel_gpu/src/plugin/transformations/unsqueeze_broadcast_reshape_sdpa_fusion.cpp @@ -23,10 +23,6 @@ using ov::pass::pattern::op::Or; UnsqueezeBroadcastReshapeSDPAFusion::UnsqueezeBroadcastReshapeSDPAFusion() { using namespace ov::pass::pattern; - auto not_reshape = [](const ov::Output& output) -> bool { - return std::dynamic_pointer_cast(output.get_node_shared_ptr()) == nullptr; - }; - auto unsqueeze_predicate = [](const ov::Output& output) -> bool { return rank_equals(5)(output) && consumers_count(1); }; @@ -42,7 +38,7 @@ UnsqueezeBroadcastReshapeSDPAFusion::UnsqueezeBroadcastReshapeSDPAFusion() { return rank_equals(4)(output) && consumers_count(1); }; - auto input_a_m = any_input(not_reshape); + auto input_a_m = any_input(); auto input_attn_mask = any_input(); auto input_scale = any_input(); auto input_b_m = wrap_type({any_input(), any_input()}); From 91a5518347a5e4e55f940b6e3c9b8f00626f09ff Mon Sep 17 00:00:00 2001 From: SYNC <125558396+xyz-harshal@users.noreply.github.com> Date: Fri, 22 Nov 2024 13:27:06 +0530 Subject: [PATCH 31/62] [CPU][ARM] JIT Ceiling Operation (#27527) ### Details: - Added JIT emitter for Eltwise Ceiling operation on ARM64 SIMD - Implemented fp32 optimization replacing C++ Math implementation - Modified ARM64 executor to support new JIT emitter - Updated kernel files to include Ceiling in Eltwise operations - Added test coverage for JIT implementation verification - Transitioned operation type from Math to Eltwise for better performance ### Tickets: - #27498 --- src/plugins/intel_cpu/src/cpu_types.cpp | 3 +- src/plugins/intel_cpu/src/cpu_types.h | 1 + .../plugin/aarch64/jit_eltwise_emitters.cpp | 46 ++++++++++++++++++- .../plugin/aarch64/jit_eltwise_emitters.hpp | 32 ++++++++++++- src/plugins/intel_cpu/src/nodes/eltwise.cpp | 7 +++ .../nodes/executors/aarch64/jit_eltwise.cpp | 1 + .../aarch64/jit_uni_eltwise_generic.cpp | 2 + .../single_layer_tests/classes/activation.cpp | 3 ++ .../skip_tests_config.cpp | 2 +- 9 files changed, 93 insertions(+), 4 deletions(-) diff --git a/src/plugins/intel_cpu/src/cpu_types.cpp b/src/plugins/intel_cpu/src/cpu_types.cpp index e20369c9cca215..3b6440e56c3272 100644 --- a/src/plugins/intel_cpu/src/cpu_types.cpp +++ b/src/plugins/intel_cpu/src/cpu_types.cpp @@ -191,7 +191,7 @@ static const TypeToNameMap& get_type_to_name_tbl() { {"Atan", Type::Math}, {"Atanh", Type::Math}, {"Ceil", Type::Math}, - {"Ceiling", Type::Math}, + {"Ceiling", Type::Eltwise}, {"Cos", Type::Math}, {"Cosh", Type::Math}, {"Floor", Type::Eltwise}, @@ -419,6 +419,7 @@ std::string algToString(const Algorithm alg) { CASE(EltwiseSubtract); CASE(EltwiseDivide); CASE(EltwiseFloor); + CASE(EltwiseCeiling); CASE(EltwiseFloorMod); CASE(EltwiseMod); CASE(EltwiseMaximum); diff --git a/src/plugins/intel_cpu/src/cpu_types.h b/src/plugins/intel_cpu/src/cpu_types.h index d6ac9947a8fb5d..9461526184b0bf 100644 --- a/src/plugins/intel_cpu/src/cpu_types.h +++ b/src/plugins/intel_cpu/src/cpu_types.h @@ -165,6 +165,7 @@ enum class Algorithm { EltwiseSubtract, EltwiseDivide, EltwiseFloor, + EltwiseCeiling, EltwiseFloorMod, EltwiseMod, EltwiseMaximum, diff --git a/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.cpp b/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.cpp index 17ce08f7159379..355c8fb7f4c4d7 100644 --- a/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.cpp +++ b/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.cpp @@ -516,6 +516,50 @@ std::set> jit_floor_emitter::get_supported_precisions return {{element::f32}}; } +/// CEILING /// +//Initialization of the emitter, taking node as input +jit_ceiling_emitter::jit_ceiling_emitter(dnnl::impl::cpu::aarch64::jit_generator* host, + dnnl::impl::cpu::aarch64::cpu_isa_t host_isa, + const std::shared_ptr& node) + : jit_emitter(host, host_isa, node, get_arithmetic_binary_exec_precision(node)) { +} + +//Initialization of emitter, without taking node as input +jit_ceiling_emitter::jit_ceiling_emitter(dnnl::impl::cpu::aarch64::jit_generator* host, + dnnl::impl::cpu::aarch64::cpu_isa_t host_isa, + const ov::element::Type exec_prc) : jit_emitter(host, host_isa, exec_prc) { +} + +//This will tell the JIT compiler that how many inputs the ceiling operation requires (here 1) +size_t jit_ceiling_emitter::get_inputs_count() const { return 1; } + +//Main implementation method that emits the JIT code +void jit_ceiling_emitter::emit_impl(const std::vector &in_vec_idxs, const std::vector &out_vec_idxs) const { + if (host_isa_ == dnnl::impl::cpu::aarch64::asimd) { + emit_isa(in_vec_idxs, out_vec_idxs); + } else { + OV_CPU_JIT_EMITTER_THROW("Can't create jit eltwise kernel"); + } +} + +// Template method that generates actual instruction sequence for ceiling operation +// The h->frintp() method rounds up the floating value to the nearest integer. +template +void jit_ceiling_emitter::emit_isa(const std::vector &in_vec_idxs, const std::vector &out_vec_idxs) const { + OV_CPU_JIT_EMITTER_ASSERT(exec_prc_ == ov::element::f32, "unsupported precision: " + exec_prc_.to_string()); + + using TReg = typename dnnl::impl::cpu::aarch64::cpu_isa_traits::TReg; + TReg src = TReg(in_vec_idxs[0]); + TReg dst = TReg(out_vec_idxs[0]); + h->frintp(dst.s, src.s); +} + +// Template method that generates actual instruction sequence for ceiling operation +// Currently only supports 32-bit floating point (f32) +std::set> jit_ceiling_emitter::get_supported_precisions(const std::shared_ptr& node) { + return {{element::f32}}; +} + /// GELU_ERF /// jit_gelu_erf_emitter::jit_gelu_erf_emitter(dnnl::impl::cpu::aarch64::jit_generator* host, dnnl::impl::cpu::aarch64::cpu_isa_t host_isa, @@ -2275,4 +2319,4 @@ std::set> jit_tanh_emitter::get_supported_precisions( } // namespace aarch64 } // namespace intel_cpu -} // namespace ov \ No newline at end of file +} // namespace ov diff --git a/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.hpp b/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.hpp index e2aff7557f7365..a99e016c9c834a 100644 --- a/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.hpp +++ b/src/plugins/intel_cpu/src/emitters/plugin/aarch64/jit_eltwise_emitters.hpp @@ -214,6 +214,36 @@ class jit_floor_emitter : public jit_emitter { void emit_isa(const std::vector &in_vec_idxs, const std::vector &out_vec_idxs) const; }; +class jit_ceiling_emitter : public jit_emitter { +public: + // Constructor with explicit precision + jit_ceiling_emitter(dnnl::impl::cpu::aarch64::jit_generator *host, + dnnl::impl::cpu::aarch64::cpu_isa_t host_isa, + const ov::element::Type exec_prc = ov::element::f32); + + // Constructor from node + jit_ceiling_emitter(dnnl::impl::cpu::aarch64::jit_generator *host, + dnnl::impl::cpu::aarch64::cpu_isa_t host_isa, + const std::shared_ptr& node); + + // Get number of inputs + size_t get_inputs_count() const override; + + // Get supported precisions + static std::set> get_supported_precisions( + const std::shared_ptr& node = nullptr); + +private: + // Implementation of JIT code emission + void emit_impl(const std::vector &in_vec_idxs, + const std::vector &out_vec_idxs) const override; + + // ISA-specific implementation + template + void emit_isa(const std::vector &in_vec_idxs, + const std::vector &out_vec_idxs) const; +}; + class jit_gelu_erf_emitter : public jit_emitter { public: jit_gelu_erf_emitter(dnnl::impl::cpu::aarch64::jit_generator* host, @@ -943,4 +973,4 @@ class jit_tanh_emitter : public jit_emitter { } // namespace aarch64 } // namespace intel_cpu -} // namespace ov \ No newline at end of file +} // namespace ov diff --git a/src/plugins/intel_cpu/src/nodes/eltwise.cpp b/src/plugins/intel_cpu/src/nodes/eltwise.cpp index c2d23bf9adc89e..54cf435009059d 100644 --- a/src/plugins/intel_cpu/src/nodes/eltwise.cpp +++ b/src/plugins/intel_cpu/src/nodes/eltwise.cpp @@ -257,6 +257,7 @@ std::set> eltwise_precision_helper::get_supported_pre OV_CASE(Algorithm::EltwiseMultiply, jit_multiply_emitter), OV_CASE(Algorithm::EltwiseDivide, jit_divide_emitter), OV_CASE(Algorithm::EltwiseFloor, jit_floor_emitter), + OV_CASE(Algorithm::EltwiseCeiling, jit_ceiling_emitter), OV_CASE(Algorithm::EltwiseFloorMod, jit_floor_mod_emitter), OV_CASE(Algorithm::EltwiseMod, jit_mod_emitter), OV_CASE(Algorithm::EltwiseMaximum, jit_maximum_emitter), @@ -636,6 +637,7 @@ struct jit_uni_eltwise_generic : public jit_uni_eltwise_kernel, public jit_gener OV_CASE(Algorithm::EltwiseMultiply, jit_multiply_emitter), OV_CASE(Algorithm::EltwiseDivide, jit_divide_emitter), OV_CASE(Algorithm::EltwiseFloor, jit_floor_emitter), + OV_CASE(Algorithm::EltwiseCeiling, jit_ceiling_emitter), OV_CASE(Algorithm::EltwiseFloorMod, jit_floor_mod_emitter), OV_CASE(Algorithm::EltwiseMod, jit_mod_emitter), OV_CASE(Algorithm::EltwiseMaximum, jit_maximum_emitter), @@ -1086,6 +1088,9 @@ const std::map& Eltwise::getIn {ov::op::v1::Mod::get_type_info_static(), [](const std::shared_ptr& op, Eltwise& node) { node.algorithm = Algorithm::EltwiseMod; }}, + {ov::op::v0::Ceiling::get_type_info_static(), [](const std::shared_ptr& op, Eltwise& node) { + node.algorithm = Algorithm::EltwiseCeiling; + }}, {ov::op::v0::Floor::get_type_info_static(), [](const std::shared_ptr& op, Eltwise& node) { node.algorithm = Algorithm::EltwiseFloor; }}, @@ -1891,6 +1896,7 @@ class EltwiseRefExecutor : public EltwiseRefBaseExecutor { case Algorithm::EltwiseSubtract: *dst_ptr_f = src_f[0] - src_f[1]; break; case Algorithm::EltwiseMultiply: *dst_ptr_f = src_f[0] * src_f[1]; break; case Algorithm::EltwiseDivide: *dst_ptr_f = src_f[0] / src_f[1]; break; + case Algorithm::EltwiseCeiling: *dst_ptr_f = ceilf(src_f[0]); break; case Algorithm::EltwiseFloor: *dst_ptr_f = floorf(src_f[0]); break; case Algorithm::EltwiseFloorMod: *dst_ptr_f = src_f[0] - floorf(src_f[0] / src_f[1]) * src_f[1]; break; case Algorithm::EltwiseMod: *dst_ptr_f = src_f[0] - truncf(src_f[0] / src_f[1]) * src_f[1]; break; @@ -2098,6 +2104,7 @@ size_t Eltwise::getOpInputsNum() const { case Algorithm::EltwiseRelu: case Algorithm::EltwiseGeluErf: case Algorithm::EltwiseGeluTanh: + case Algorithm::EltwiseCeiling: case Algorithm::EltwiseFloor: case Algorithm::EltwiseElu: case Algorithm::EltwiseTanh: diff --git a/src/plugins/intel_cpu/src/nodes/executors/aarch64/jit_eltwise.cpp b/src/plugins/intel_cpu/src/nodes/executors/aarch64/jit_eltwise.cpp index 3f1031255d1775..6da6b63eb94a72 100644 --- a/src/plugins/intel_cpu/src/nodes/executors/aarch64/jit_eltwise.cpp +++ b/src/plugins/intel_cpu/src/nodes/executors/aarch64/jit_eltwise.cpp @@ -26,6 +26,7 @@ bool JitEltwiseExecutor::isSupported( Algorithm::EltwiseEqual, Algorithm::EltwiseExp, Algorithm::EltwiseFloor, + Algorithm::EltwiseCeiling, Algorithm::EltwiseGeluErf, Algorithm::EltwiseGeluTanh, Algorithm::EltwiseGreater, diff --git a/src/plugins/intel_cpu/src/nodes/kernels/aarch64/jit_uni_eltwise_generic.cpp b/src/plugins/intel_cpu/src/nodes/kernels/aarch64/jit_uni_eltwise_generic.cpp index 04286a0c8aaf68..b7fbfaf16e1587 100644 --- a/src/plugins/intel_cpu/src/nodes/kernels/aarch64/jit_uni_eltwise_generic.cpp +++ b/src/plugins/intel_cpu/src/nodes/kernels/aarch64/jit_uni_eltwise_generic.cpp @@ -648,6 +648,7 @@ std::shared_ptr jit_uni_eltwise_generic::create_eltwise_emitte OV_CASE(Algorithm::EltwiseEqual, ov::intel_cpu::aarch64::jit_equal_emitter), OV_CASE(Algorithm::EltwiseExp, ov::intel_cpu::aarch64::jit_exp_emitter), OV_CASE(Algorithm::EltwiseFloor, ov::intel_cpu::aarch64::jit_floor_emitter), + OV_CASE(Algorithm::EltwiseCeiling, ov::intel_cpu::aarch64::jit_ceiling_emitter), OV_CASE(Algorithm::EltwiseHswish, ov::intel_cpu::aarch64::jit_hswish_emitter), OV_CASE(Algorithm::EltwiseIsFinite, ov::intel_cpu::aarch64::jit_is_finite_emitter), OV_CASE(Algorithm::EltwiseIsInf, ov::intel_cpu::aarch64::jit_is_inf_emitter), @@ -828,6 +829,7 @@ std::set> eltwise_precision_helper::get_supported_pre OV_CASE(Algorithm::EltwiseEqual, jit_equal_emitter), OV_CASE(Algorithm::EltwiseExp, jit_exp_emitter), OV_CASE(Algorithm::EltwiseFloor, jit_floor_emitter), + OV_CASE(Algorithm::EltwiseCeiling, jit_ceiling_emitter), OV_CASE(Algorithm::EltwiseGeluErf, jit_gelu_erf_emitter), OV_CASE(Algorithm::EltwiseGeluTanh, jit_gelu_tanh_emitter), OV_CASE(Algorithm::EltwiseGreater, jit_greater_emitter), diff --git a/src/plugins/intel_cpu/tests/functional/custom/single_layer_tests/classes/activation.cpp b/src/plugins/intel_cpu/tests/functional/custom/single_layer_tests/classes/activation.cpp index 307938fbfec17a..bd81bcf1a41c63 100644 --- a/src/plugins/intel_cpu/tests/functional/custom/single_layer_tests/classes/activation.cpp +++ b/src/plugins/intel_cpu/tests/functional/custom/single_layer_tests/classes/activation.cpp @@ -182,6 +182,7 @@ std::string ActivationLayerCPUTest::getPrimitiveType(const utils::ActivationType (activation_type == utils::ActivationTypes::Elu) || (activation_type == utils::ActivationTypes::Exp) || (activation_type == utils::ActivationTypes::Floor) || + (activation_type == utils::ActivationTypes::Ceiling) || (activation_type == utils::ActivationTypes::HSwish) || (activation_type == utils::ActivationTypes::IsInf) || (activation_type == utils::ActivationTypes::HardSigmoid) || @@ -206,6 +207,7 @@ std::string ActivationLayerCPUTest::getPrimitiveType(const utils::ActivationType } #endif if ((activation_type == utils::ActivationTypes::Floor) || + (activation_type == utils::ActivationTypes::Ceiling) || (activation_type == utils::ActivationTypes::IsNaN) || (activation_type == utils::ActivationTypes::IsFinite)) { return "ref"; @@ -246,6 +248,7 @@ const std::map>>& activat {Clamp, {{-2.0f, 2.0f}}}, {Elu, {{0.1f}}}, {Floor, {{}}}, + {Ceiling, {{}}}, {Swish, {{0.1f}}}, {HSwish, {{}}}, {PReLu, {{-0.01f}}}, diff --git a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/skip_tests_config.cpp b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/skip_tests_config.cpp index 6103ecf0dd11f9..764133d52a7fdd 100644 --- a/src/plugins/intel_cpu/tests/functional/shared_tests_instances/skip_tests_config.cpp +++ b/src/plugins/intel_cpu/tests/functional/shared_tests_instances/skip_tests_config.cpp @@ -310,7 +310,7 @@ std::vector disabledTestPatterns() { }; // fp32 floor for bf16 models: conversion issue - retVector.emplace_back(R"(.*smoke.*ActivationLayerCPUTest.*CompareWithRefs/Floor_.*netPRC=bf16.*)"); + retVector.emplace_back(R"(.*smoke.*ActivationLayerCPUTest.*CompareWithRefs/(Floor|Ceiling)_.*netPRC=bf16.*)"); #if defined(OPENVINO_ARCH_X86) retVector.emplace_back(R"(.*DetectionOutputLayerTest.*)"); From 2e3948583c15620258b66b415bc323f3b50ee47b Mon Sep 17 00:00:00 2001 From: "jag.Xu" Date: Fri, 22 Nov 2024 16:13:56 +0800 Subject: [PATCH 32/62] bugfix for dynamic-shape backedge for static input shape in loop operator intel GPU (#26047) ### Fixed: This PR include bugfix for the loop operator when the different shape of input is preformed on different iteration to a loop operation with static input shape. ### Cause: The issued commit introduces a consideration of memory predictor in function named _set_memory_in_body_network_ in case where memory buffer has been over-allocated by shape predictor, memory layout might be unexpected shape. So when handling backedge memory copy for next iteration, memory layout is re-interpreted according to original layout . But in this senario in TF_Faster_RCNN_Inception_ResNet_v2 , when batchsize is 2, the loop is not unrolled for each iteration deal with one batch, shown in picture below, the broadcast is used to create a array and each iteration write a part of that array. with the set_memory_in_body_network function, **the 2nd iteration's input with the generated array is cutted off, which lose the first batch of data.** ![image](https://github.com/user-attachments/assets/0bcf264c-bbee-4061-98e2-f123db293dfc) ### Solution: The bugfix functions in two places, first in graph generation and second in runtime. in graph generation phrase the shape of input primitive take consider of the shape of from-node's backedge and mark it dynamic according to the from-node's shape. in the runtime, the set_memory_in_body_network will preform according to the shape of both sides of shape and compare with the pre-allocation memory which matching SINGLE_SHARED type. ### A testcase is added to test this behavior. this test case can not be passed on the issued commit( 236e1062b290e2d2345f1d1c319e78f15e0a311d) while can be passed when doing the change in the mentioned PR. ### Tickets: - CVS-143684 --------- Co-authored-by: Ahn, Paul Y --- .../include/intel_gpu/graph/network.hpp | 2 +- .../src/graph/impls/ocl/non_zero.cpp | 5 ++ .../src/graph/include/input_layout_inst.h | 2 +- .../intel_gpu/src/graph/input_layout.cpp | 4 +- src/plugins/intel_gpu/src/graph/loop.cpp | 10 +-- src/plugins/intel_gpu/src/graph/network.cpp | 4 +- .../tests/functional/subgraph_tests/loop.cpp | 61 +++++++++++++------ 7 files changed, 59 insertions(+), 29 deletions(-) diff --git a/src/plugins/intel_gpu/include/intel_gpu/graph/network.hpp b/src/plugins/intel_gpu/include/intel_gpu/graph/network.hpp index f6207e25a5ca41..5b7873c1500638 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/graph/network.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/graph/network.hpp @@ -111,7 +111,7 @@ struct network { engine& get_engine() const { return _engine; } void reset_execution(bool wait = true); - event::ptr set_input_data(const primitive_id& id, memory::ptr data); + event::ptr set_input_data(const primitive_id& id, memory::ptr data, bool need_to_check_memory_to_set = true); std::vector set_output_memory(const primitive_id& id, memory::ptr mem); std::vector> const& get_outputs() { return _outputs; } diff --git a/src/plugins/intel_gpu/src/graph/impls/ocl/non_zero.cpp b/src/plugins/intel_gpu/src/graph/impls/ocl/non_zero.cpp index 44be7824d4b7dd..84f6020e517f28 100644 --- a/src/plugins/intel_gpu/src/graph/impls/ocl/non_zero.cpp +++ b/src/plugins/intel_gpu/src/graph/impls/ocl/non_zero.cpp @@ -95,6 +95,11 @@ struct gather_nonzero_impl : typed_primitive_impl_ocl { update_shapes(*_kernel_data.params, impl_param); (_kernel_data.update_dispatch_data_func)(*_kernel_data.params, _kernel_data); } + + static kernel_impl_params static_canonicalize_shapes(const kernel_impl_params& impl_params) { + auto updated_impl_params = canonicalize_fused_shapes(impl_params); + return updated_impl_params; + } }; namespace detail { diff --git a/src/plugins/intel_gpu/src/graph/include/input_layout_inst.h b/src/plugins/intel_gpu/src/graph/include/input_layout_inst.h index ee59d43bf116aa..a366980aa31e84 100644 --- a/src/plugins/intel_gpu/src/graph/include/input_layout_inst.h +++ b/src/plugins/intel_gpu/src/graph/include/input_layout_inst.h @@ -40,7 +40,7 @@ class typed_primitive_inst : public typed_primitive_inst_base; diff --git a/src/plugins/intel_gpu/src/graph/input_layout.cpp b/src/plugins/intel_gpu/src/graph/input_layout.cpp index 9c60cd187405d0..cc9e1b7d2787cb 100644 --- a/src/plugins/intel_gpu/src/graph/input_layout.cpp +++ b/src/plugins/intel_gpu/src/graph/input_layout.cpp @@ -35,11 +35,11 @@ input_layout_inst::typed_primitive_inst(network& network, input_layout_node cons _has_valid_input = false; // by default input for 'input_layout' is invalid as long as user doesn't call set_data } -event::ptr input_layout_inst::set_data(memory::ptr mem) { +event::ptr input_layout_inst::set_data(memory::ptr mem, bool need_to_check_memory_to_set) { auto ol = get_node_output_layout(); bool empty_mem = mem->size() == 0 && (ol.is_dynamic() || ol.count() == 0); - if (!empty_mem) { + if (!empty_mem && need_to_check_memory_to_set) { check_memory_to_set(*mem, ol); } diff --git a/src/plugins/intel_gpu/src/graph/loop.cpp b/src/plugins/intel_gpu/src/graph/loop.cpp index 15dc768b8248fe..5d842c2a863433 100644 --- a/src/plugins/intel_gpu/src/graph/loop.cpp +++ b/src/plugins/intel_gpu/src/graph/loop.cpp @@ -1002,16 +1002,16 @@ void loop_inst::set_memory_in_body_network(cldnn::network::ptr body_network, const std::shared_ptr& inst, memory::ptr mem) { if (inst->is_input()) { // in case where memory buffer has been over-allocated by shape predictor, memory layout might be unexpected shape. - // so memory layout needs to be re-interprete according to original layout. + // so memory layout needs to be re-interpret according to original layout. memory::ptr updated_mem = mem; layout impl_layout = inst->get_impl_params()->get_output_layout(); OPENVINO_ASSERT(impl_layout.bytes_count() <= updated_mem->get_layout().bytes_count(), "impl_params layout size(", impl_layout.to_short_string(), ") should not exceed memory size(", updated_mem->get_layout().to_short_string(), ")"); - if (impl_layout.bytes_count() < updated_mem->get_layout().bytes_count()) { - updated_mem = body_network->get_engine().reinterpret_buffer(*updated_mem, impl_layout); - } - body_network->set_input_data(inst->id(), updated_mem); + // Set need_to_check_memory_to_set to false to set output memory even if the input node has static shape, + body_network->set_input_data(inst->id(), updated_mem, false); + // Update impl_params.output_layouts[0] to updated_mem's layout + inst->update_shape(); } else if (inst->is_output()) { body_network->set_output_memory(inst->id(), mem); } else { diff --git a/src/plugins/intel_gpu/src/graph/network.cpp b/src/plugins/intel_gpu/src/graph/network.cpp index 37bceb53396179..37152b0d9e3b4f 100644 --- a/src/plugins/intel_gpu/src/graph/network.cpp +++ b/src/plugins/intel_gpu/src/graph/network.cpp @@ -312,7 +312,7 @@ void network::reset_execution(bool wait) { } } -event::ptr network::set_input_data(const primitive_id& id, memory::ptr data) { +event::ptr network::set_input_data(const primitive_id& id, memory::ptr data, bool need_to_check_memory_to_set) { GPU_DEBUG_TRACE_DETAIL << "Set input " << id << " " << data->get_layout().to_short_string() << std::endl; auto primitive_inst = find_primitive(id); @@ -322,7 +322,7 @@ event::ptr network::set_input_data(const primitive_id& id, memory::ptr data) { auto input = std::static_pointer_cast(primitive_inst); - return input->set_data(data); + return input->set_data(data, need_to_check_memory_to_set); } void network::add_default_output_chains() { diff --git a/src/plugins/intel_gpu/tests/functional/subgraph_tests/loop.cpp b/src/plugins/intel_gpu/tests/functional/subgraph_tests/loop.cpp index 39501c67e1bbb7..62952dadb57cc8 100644 --- a/src/plugins/intel_gpu/tests/functional/subgraph_tests/loop.cpp +++ b/src/plugins/intel_gpu/tests/functional/subgraph_tests/loop.cpp @@ -307,7 +307,8 @@ using DynamicShapeLoopDynamicInputParams = typename std::tuple< InputShape, InputShape, ov::element::Type, - std::string>; + std::string, + bool>; class DynamicShapeLoopDynamicInputTest : public testing::WithParamInterface, virtual public ov::test::SubgraphBaseTest { @@ -315,6 +316,7 @@ class DynamicShapeLoopDynamicInputTest : public testing::WithParamInterface &obj) { bool static_iter_num; bool static_continue_cond; + bool freeze_input; int64_t max_iter_num; int64_t dynamic_exit; int64_t axis; @@ -331,11 +333,13 @@ class DynamicShapeLoopDynamicInputTest : public testing::WithParamInterfaceset_friendly_name("start_add"); - auto start_mul = cond_input_create(model_type, inputShape, 1); - start_mul->set_friendly_name("start_mul"); + auto start_add2 = cond_input_create(model_type, inputShape, 1); + start_add2->set_friendly_name("start_add2"); auto count = cond_input_create(ov::element::i64, scalarShape, max_iter_num, static_iter_num); count->set_friendly_name("count"); auto skip = cond_input_create(ov::element::boolean, scalarShape, true, static_continue_cond); skip->set_friendly_name("skip"); - auto init_const = cond_input_create(model_type, constant_shapes.first, 1); + auto init_const = cond_input_create(model_type, constant_shapes.first, 1, freeze_input); init_const->set_friendly_name("init_const"); auto b_indx = std::make_shared(ov::element::i64, ov::Shape{}); b_indx->set_friendly_name("body_index"); auto b_data_add = std::make_shared(model_type, inputShape); b_data_add->set_friendly_name("b_data_add"); - auto b_data_mul = std::make_shared(model_type, inputShape); - b_data_mul->set_friendly_name("b_data_mul"); + auto b_data_add2 = std::make_shared(model_type, inputShape); + b_data_add2->set_friendly_name("b_data_add2"); auto b_data_broadcast = std::make_shared(model_type, constant_shapes.first); b_data_broadcast->set_friendly_name("b_data_broadcast"); auto b_indx_cast = std::make_shared(b_indx, model_type); b_indx_cast->set_friendly_name("body_index_cast"); auto b_add = std::make_shared(b_data_add, b_indx_cast); b_add->set_friendly_name("body_add"); - auto b_mul = std::make_shared(b_data_mul, b_indx_cast); - b_mul->set_friendly_name("body_mul"); - auto b_shapeof1 = std::make_shared(b_data_mul); + auto b_add2 = std::make_shared(b_data_add2, b_indx_cast); + b_add2->set_friendly_name("body_mul"); + auto b_shapeof1 = std::make_shared(b_data_add2); b_shapeof1->set_friendly_name("b_shapeof1"); auto b_shapeof2 = std::make_shared(b_data_broadcast); b_shapeof2->set_friendly_name("b_shapeof2"); @@ -432,7 +438,9 @@ class DynamicShapeLoopDynamicInputTest : public testing::WithParamInterfaceset_friendly_name("b_max"); auto b_broadcast = std::make_shared(b_data_broadcast, b_max); b_broadcast->set_friendly_name("b_broadcast"); - auto b_mul2 = std::make_shared(b_broadcast, b_mul); + auto b_reshape = std::make_shared(b_broadcast, b_shapeof1, false); + b_reshape->set_friendly_name("b_reshape"); + auto b_mul2 = std::make_shared(b_reshape, b_add2); b_mul2->set_friendly_name("b_mul2"); std::shared_ptr b_cond; @@ -447,8 +455,8 @@ class DynamicShapeLoopDynamicInputTest : public testing::WithParamInterface( - ov::OutputVector {b_cond, b_add, b_mul, b_mul2}, // TODO: check with reverse - ov::ParameterVector {b_indx, b_data_add, b_data_mul, b_data_broadcast}); // TODO: check with reverse + ov::OutputVector {b_cond, b_add, b_add2, b_mul2}, // TODO: check with reverse + ov::ParameterVector {b_indx, b_data_add, b_data_add2, b_data_broadcast}); // TODO: check with reverse body->set_friendly_name("body_network"); auto loop = std::make_shared(count, skip); @@ -456,15 +464,15 @@ class DynamicShapeLoopDynamicInputTest : public testing::WithParamInterfaceset_function(body); loop->set_special_body_ports({0, 0}); loop->set_merged_input(b_data_add, start_add, b_add); - loop->set_merged_input(b_data_mul, start_mul, b_mul); + loop->set_merged_input(b_data_add2, start_add2, b_add2); loop->set_merged_input(b_data_broadcast, init_const, b_mul2); if (axis == -1) { loop->get_iter_value(b_add, -1); - loop->get_iter_value(b_mul, -1); + loop->get_iter_value(b_add2, -1); loop->get_iter_value(b_mul2, -1); } else { loop->get_concatenated_slices(b_add, 0, 1, 1, -1, axis); - loop->get_concatenated_slices(b_mul, 0, 1, 1, -1, axis); + loop->get_concatenated_slices(b_add2, 0, 1, 1, -1, axis); } ov::ResultVector results; @@ -507,6 +515,23 @@ INSTANTIATE_TEST_SUITE_P(smoke_DynamicShapeLoop_dynamic, DynamicShapeLoopDynamic /* data_shape */ testing::ValuesIn(inputs_dynamic_shape), /* constant_shape */ testing::ValuesIn(constant_dynamic_shape), /* model_type */ testing::ValuesIn(model_types), - /* device */ testing::Values(ov::test::utils::DEVICE_GPU)), + /* device */ testing::Values(ov::test::utils::DEVICE_GPU), + /* freeze_input */ testing::Values(false)), + DynamicShapeLoopDynamicInputTest::getTestCaseName); + +std::vector constant_static_shape = { + InputShape({1, 1, 1}, {{1, 1, 1}, {1, 1, 1}, {1, 1, 1}}), +}; + +INSTANTIATE_TEST_SUITE_P(smoke_DynamicShapeLoop_conflict_dynamic, DynamicShapeLoopDynamicInputTest, + testing::Combine( + /* static_continue_cond */ testing::Values(true), + /* args_pack */ testing::ValuesIn(dynamic_loop_input), + /* start_value */ testing::Values(0), + /* data_shape */ testing::ValuesIn(inputs_dynamic_shape), + /* constant_shape */ testing::ValuesIn(constant_static_shape), + /* model_type */ testing::ValuesIn(model_types), + /* device */ testing::Values(ov::test::utils::DEVICE_GPU), + /* freeze_input */ testing::Values(true)), DynamicShapeLoopDynamicInputTest::getTestCaseName); } // namespace \ No newline at end of file From f6e0ba02a580cebfb4a4f9305b07a806c5e9509f Mon Sep 17 00:00:00 2001 From: Katarzyna Mitrus Date: Fri, 22 Nov 2024 10:07:15 +0100 Subject: [PATCH 33/62] [Op][Internal] Rename SwiGLU to GLU (#27683) ### Details: - Rename internal op SwiGLU to GLU (no naming changes for GPU swiglu kernel in this PR) Current SwiGLU can be also GeGLU, it depends on the glu_type member. It has been proposed by several people to rename this op and make the name more generic like GLU. Related comment: https://github.com/openvinotoolkit/openvino/pull/27579#discussion_r1846130138 ### Tickets: - 157623 --- .../include/ov_ops/{swiglu.hpp => glu.hpp} | 24 +++--- .../{swiglu_fusion.hpp => glu_fusion.hpp} | 6 +- .../src/ov_ops/{swiglu.cpp => glu.cpp} | 34 ++++----- .../{swiglu_fusion.cpp => glu_fusion.cpp} | 32 ++++---- ...lu_fusion_test.cpp => glu_fusion_test.cpp} | 74 +++++++++---------- .../intel_gpu/plugin/primitives_list.hpp | 2 +- .../include/intel_gpu/primitives/swiglu.hpp | 6 +- src/plugins/intel_gpu/src/graph/swiglu.cpp | 4 +- .../kernels/swiglu/swiglu_kernel_base.cpp | 4 +- .../kernels/swiglu/swiglu_kernel_base.h | 6 +- .../intel_gpu/src/plugin/ops/swiglu.cpp | 8 +- .../src/plugin/transformations_pipeline.cpp | 4 +- .../tests/unit/test_cases/swiglu_gpu_test.cpp | 4 +- 13 files changed, 104 insertions(+), 104 deletions(-) rename src/common/transformations/include/ov_ops/{swiglu.hpp => glu.hpp} (80%) rename src/common/transformations/include/transformations/common_optimizations/{swiglu_fusion.hpp => glu_fusion.hpp} (69%) rename src/common/transformations/src/ov_ops/{swiglu.cpp => glu.cpp} (67%) rename src/common/transformations/src/transformations/common_optimizations/{swiglu_fusion.cpp => glu_fusion.cpp} (83%) rename src/common/transformations/tests/common_optimizations/{swiglu_fusion_test.cpp => glu_fusion_test.cpp} (70%) diff --git a/src/common/transformations/include/ov_ops/swiglu.hpp b/src/common/transformations/include/ov_ops/glu.hpp similarity index 80% rename from src/common/transformations/include/ov_ops/swiglu.hpp rename to src/common/transformations/include/ov_ops/glu.hpp index f03c1ac1a26666..760641978b574d 100644 --- a/src/common/transformations/include/ov_ops/swiglu.hpp +++ b/src/common/transformations/include/ov_ops/glu.hpp @@ -11,16 +11,16 @@ namespace ov { namespace op { namespace internal { -/// \brief Operator performing Swish Gated Linear Unit Activation +/// \brief Operator performing Gated Linear Unit Activation /// This operation performs gated linear unit activation that combines swish or gelu activation function -class TRANSFORMATIONS_API SwiGLU : public ov::op::Op { +class TRANSFORMATIONS_API GLU : public ov::op::Op { public: - OPENVINO_OP("SwiGLU", "ie_internal_opset"); + OPENVINO_OP("GLU", "ie_internal_opset"); enum GluType { Swish = 0, Gelu, Gelu_Tanh }; - SwiGLU() = default; - /// \brief Constructs an SwiGLU operation. + GLU() = default; + /// \brief Constructs an GLU operation. /// /// \param data Input tensor with data /// \param axis The index of an axis in "data" along which to perform the split @@ -28,12 +28,12 @@ class TRANSFORMATIONS_API SwiGLU : public ov::op::Op { /// \param glu_type GLU type, one of Swish, Gelu and Gelu_Tanh /// \param split_to_glu_idx Output index of variadic split, which is connected to GLU /// \param output_type Output element type - SwiGLU(const Output& data, - int64_t axis, - int64_t split_lengths, - const GluType glu_type, - const size_t split_to_glu_idx, - const ov::element::Type output_type = ov::element::undefined); + GLU(const Output& data, + int64_t axis, + int64_t split_lengths, + const GluType glu_type, + const size_t split_to_glu_idx, + const ov::element::Type output_type = ov::element::undefined); bool visit_attributes(ov::AttributeVisitor& visitor) override; @@ -76,7 +76,7 @@ class TRANSFORMATIONS_API SwiGLU : public ov::op::Op { }; // TODO 157615: Move to shape_inference -TRANSFORMATIONS_API std::vector shape_infer(const SwiGLU* op, +TRANSFORMATIONS_API std::vector shape_infer(const GLU* op, std::vector input_shapes); } // namespace internal diff --git a/src/common/transformations/include/transformations/common_optimizations/swiglu_fusion.hpp b/src/common/transformations/include/transformations/common_optimizations/glu_fusion.hpp similarity index 69% rename from src/common/transformations/include/transformations/common_optimizations/swiglu_fusion.hpp rename to src/common/transformations/include/transformations/common_optimizations/glu_fusion.hpp index 18205bd1a1e8e2..7ec71a05027d80 100644 --- a/src/common/transformations/include/transformations/common_optimizations/swiglu_fusion.hpp +++ b/src/common/transformations/include/transformations/common_optimizations/glu_fusion.hpp @@ -11,10 +11,10 @@ namespace ov { namespace pass { -class TRANSFORMATIONS_API SwiGLUFusion : public ov::pass::MatcherPass { +class TRANSFORMATIONS_API GLUFusion : public ov::pass::MatcherPass { public: - OPENVINO_RTTI("SwiGLUFusion", "0"); - SwiGLUFusion(); + OPENVINO_RTTI("GLUFusion", "0"); + GLUFusion(); }; } // namespace pass diff --git a/src/common/transformations/src/ov_ops/swiglu.cpp b/src/common/transformations/src/ov_ops/glu.cpp similarity index 67% rename from src/common/transformations/src/ov_ops/swiglu.cpp rename to src/common/transformations/src/ov_ops/glu.cpp index b3b9e71076aee0..bc3dfb89ab8b9b 100644 --- a/src/common/transformations/src/ov_ops/swiglu.cpp +++ b/src/common/transformations/src/ov_ops/glu.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "ov_ops/swiglu.hpp" +#include "ov_ops/glu.hpp" #include "openvino/core/partial_shape.hpp" #include "openvino/core/validation_util.hpp" @@ -13,12 +13,12 @@ namespace ov { namespace op { namespace internal { -SwiGLU::SwiGLU(const Output& data, - int64_t axis, - int64_t split_lengths, - const GluType glu_type, - const size_t split_to_glu_idx, - const ov::element::Type output_type) +GLU::GLU(const Output& data, + int64_t axis, + int64_t split_lengths, + const GluType glu_type, + const size_t split_to_glu_idx, + const ov::element::Type output_type) : Op({data}), m_axis(axis), m_split_lengths(split_lengths), @@ -28,14 +28,14 @@ SwiGLU::SwiGLU(const Output& data, validate_and_infer_types(); } -bool SwiGLU::visit_attributes(ov::AttributeVisitor& visitor) { +bool GLU::visit_attributes(ov::AttributeVisitor& visitor) { visitor.on_attribute("axis", m_axis); visitor.on_attribute("split_lengths", m_split_lengths); visitor.on_attribute("output_type", m_output_type); return true; } -void SwiGLU::validate_and_infer_types() { +void GLU::validate_and_infer_types() { auto output_type = m_output_type == ov::element::undefined ? get_input_element_type(0) : m_output_type; std::vector input_shapes = {get_input_partial_shape(0), @@ -45,17 +45,17 @@ void SwiGLU::validate_and_infer_types() { set_output_type(0, output_type, shape_infer(this, input_shapes)[0]); } -std::shared_ptr SwiGLU::clone_with_new_inputs(const ov::OutputVector& new_args) const { +std::shared_ptr GLU::clone_with_new_inputs(const ov::OutputVector& new_args) const { check_new_args_count(this, new_args); - return std::make_shared(new_args.at(0), - m_axis, - m_split_lengths, - m_glu_type, - m_split_to_glu_idx, - m_output_type); + return std::make_shared(new_args.at(0), + m_axis, + m_split_lengths, + m_glu_type, + m_split_to_glu_idx, + m_output_type); } -std::vector shape_infer(const SwiGLU* op, std::vector input_shapes) { +std::vector shape_infer(const GLU* op, std::vector input_shapes) { ov::op::v1::VariadicSplit variadic_split; std::vector axis = {op->get_axis()}; std::vector split_lengths = {op->get_split_lengths(), -1}; diff --git a/src/common/transformations/src/transformations/common_optimizations/swiglu_fusion.cpp b/src/common/transformations/src/transformations/common_optimizations/glu_fusion.cpp similarity index 83% rename from src/common/transformations/src/transformations/common_optimizations/swiglu_fusion.cpp rename to src/common/transformations/src/transformations/common_optimizations/glu_fusion.cpp index 84c6dbceb39f2f..2b6c2092a054c2 100644 --- a/src/common/transformations/src/transformations/common_optimizations/swiglu_fusion.cpp +++ b/src/common/transformations/src/transformations/common_optimizations/glu_fusion.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "transformations/common_optimizations/swiglu_fusion.hpp" +#include "transformations/common_optimizations/glu_fusion.hpp" #include "openvino/core/rt_info.hpp" #include "openvino/op/constant.hpp" @@ -13,13 +13,13 @@ #include "openvino/pass/manager.hpp" #include "openvino/pass/pattern/op/or.hpp" #include "openvino/pass/pattern/op/wrap_type.hpp" -#include "ov_ops/swiglu.hpp" +#include "ov_ops/glu.hpp" #include "transformations/utils/utils.hpp" namespace ov { namespace pass { -SwiGLUFusion::SwiGLUFusion() { +GLUFusion::GLUFusion() { using namespace ov::pass::pattern; using ov::pass::pattern::op::Or; @@ -28,8 +28,8 @@ SwiGLUFusion::SwiGLUFusion() { return out_ps.rank().is_static() && out_ps[out_ps.rank().get_length() - 1].is_static() && out_ps.size() <= 5; }; - // Detect SwiGLU decomposition pattern - // SwiGLU(Xw, Xv, beta) = (Xw * (1.0 + exp(-beta * Xw))) * Xv + // Detect GLU decomposition pattern + // GLU(Xw, Xv, beta) = (Xw * (1.0 + exp(-beta * Xw))) * Xv auto data_m = any_input(last_dim_static); // VariadicSplit(X, axis, split_lengths) = Xw, Xv @@ -60,11 +60,11 @@ SwiGLUFusion::SwiGLUFusion() { auto isSwiGLU = pattern_map.count(swish_m); auto isGeGLU = pattern_map.count(gelu_m); size_t split_to_glu_idx = 0; - ov::op::internal::SwiGLU::GluType glu_type = ov::op::internal::SwiGLU::GluType::Swish; + ov::op::internal::GLU::GluType glu_type = ov::op::internal::GLU::GluType::Swish; if (isSwiGLU) { auto swish = std::dynamic_pointer_cast(pattern_map.at(swish_m).get_node_shared_ptr()); - glu_type = ov::op::internal::SwiGLU::GluType::Swish; + glu_type = ov::op::internal::GLU::GluType::Swish; split_to_glu_idx = swish->input_value(0).get_index(); size_t split_in_idx = ov::is_type(mul->get_input_node_shared_ptr(0)) ? 1 : 0; @@ -73,8 +73,8 @@ SwiGLUFusion::SwiGLUFusion() { } else if (isGeGLU) { auto gelu = std::dynamic_pointer_cast(pattern_map.at(gelu_m).get_node_shared_ptr()); glu_type = (gelu->get_approximation_mode() == ov::op::GeluApproximationMode::ERF) - ? ov::op::internal::SwiGLU::GluType::Gelu - : ov::op::internal::SwiGLU::GluType::Gelu_Tanh; + ? ov::op::internal::GLU::GluType::Gelu + : ov::op::internal::GLU::GluType::Gelu_Tanh; split_to_glu_idx = gelu->input_value(0).get_index(); size_t split_in_idx = ov::is_type(mul->get_input_node_shared_ptr(0)) ? 1 : 0; @@ -107,12 +107,12 @@ SwiGLUFusion::SwiGLUFusion() { auto data = pattern_map.at(data_m); auto output_type = m.get_match_root()->get_output_element_type(0); - auto swiglu = std::make_shared(data, - axis_value, - split_lengths_value, - glu_type, - split_to_glu_idx, - output_type); + auto swiglu = std::make_shared(data, + axis_value, + split_lengths_value, + glu_type, + split_to_glu_idx, + output_type); swiglu->set_friendly_name(m.get_match_root()->get_friendly_name()); ov::copy_runtime_info(m.get_matched_nodes(), swiglu); ov::replace_node(m.get_match_root(), swiglu); @@ -120,7 +120,7 @@ SwiGLUFusion::SwiGLUFusion() { return true; }; - auto m = std::make_shared(mul_m, "SwiGLUFusion"); + auto m = std::make_shared(mul_m, "GLUFusion"); this->register_matcher(m, callback); } diff --git a/src/common/transformations/tests/common_optimizations/swiglu_fusion_test.cpp b/src/common/transformations/tests/common_optimizations/glu_fusion_test.cpp similarity index 70% rename from src/common/transformations/tests/common_optimizations/swiglu_fusion_test.cpp rename to src/common/transformations/tests/common_optimizations/glu_fusion_test.cpp index 75c8fba75024c3..4d879be57672cd 100644 --- a/src/common/transformations/tests/common_optimizations/swiglu_fusion_test.cpp +++ b/src/common/transformations/tests/common_optimizations/glu_fusion_test.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "transformations/common_optimizations/swiglu_fusion.hpp" +#include "transformations/common_optimizations/glu_fusion.hpp" #include @@ -18,13 +18,13 @@ #include "openvino/op/swish.hpp" #include "openvino/op/variadic_split.hpp" #include "openvino/pass/manager.hpp" -#include "ov_ops/swiglu.hpp" +#include "ov_ops/glu.hpp" #include "transformations/utils/utils.hpp" using namespace testing; using namespace ov::pass; -TEST_F(TransformationTestsF, SwiGLUFusionTest1) { +TEST_F(TransformationTestsF, GLUFusionTest1) { { auto input = std::make_shared(ov::element::f16, ov::PartialShape{2, 1, 6}); auto axis_const = ov::op::v0::Constant::create(ov::element::i64, ov::Shape{}, {-1}); @@ -34,24 +34,24 @@ TEST_F(TransformationTestsF, SwiGLUFusionTest1) { auto mul = std::make_shared(swish, variadic_split->output(1)); model = std::make_shared(ov::NodeVector{mul}, ov::ParameterVector{input}); - manager.register_pass(); + manager.register_pass(); } { int64_t axis = -1; int64_t split_lenghts = 3; auto input = std::make_shared(ov::element::f16, ov::PartialShape{2, 1, 6}); - auto swiglu = std::make_shared(input, - axis, - split_lenghts, - ov::op::internal::SwiGLU::GluType::Swish, - 0, - ov::element::f16); + auto swiglu = std::make_shared(input, + axis, + split_lenghts, + ov::op::internal::GLU::GluType::Swish, + 0, + ov::element::f16); model_ref = std::make_shared(ov::NodeVector{swiglu}, ov::ParameterVector{input}); } } -TEST_F(TransformationTestsF, SwiGLUFusionTest2) { +TEST_F(TransformationTestsF, GLUFusionTest2) { { auto input = std::make_shared(ov::element::f16, ov::PartialShape{-1, -1, 6}); auto axis_const = ov::op::v0::Constant::create(ov::element::i64, ov::Shape{}, {0}); @@ -61,11 +61,11 @@ TEST_F(TransformationTestsF, SwiGLUFusionTest2) { auto mul = std::make_shared(swish, variadic_split->output(1)); model = std::make_shared(ov::NodeVector{mul}, ov::ParameterVector{input}); - manager.register_pass(); + manager.register_pass(); } } -TEST_F(TransformationTestsF, SwiGLUFusionTest3) { +TEST_F(TransformationTestsF, GLUFusionTest3) { { auto input = std::make_shared(ov::element::f16, ov::PartialShape{-1, -1, 6}); auto axis_const = ov::op::v0::Constant::create(ov::element::i64, ov::Shape{}, {-1}); @@ -75,24 +75,24 @@ TEST_F(TransformationTestsF, SwiGLUFusionTest3) { auto mul = std::make_shared(swish, variadic_split->output(1)); model = std::make_shared(ov::NodeVector{mul}, ov::ParameterVector{input}); - manager.register_pass(); + manager.register_pass(); } { int64_t axis = -1; int64_t split_lenghts = 3; auto input = std::make_shared(ov::element::f16, ov::PartialShape{-1, -1, 6}); - auto swiglu = std::make_shared(input, - axis, - split_lenghts, - ov::op::internal::SwiGLU::GluType::Swish, - 0, - ov::element::f16); + auto swiglu = std::make_shared(input, + axis, + split_lenghts, + ov::op::internal::GLU::GluType::Swish, + 0, + ov::element::f16); model_ref = std::make_shared(ov::NodeVector{swiglu}, ov::ParameterVector{input}); } } -TEST_F(TransformationTestsF, SwiGLUFusionTest3ReverseOrder) { +TEST_F(TransformationTestsF, GLUFusionTest3ReverseOrder) { { auto input = std::make_shared(ov::element::f16, ov::PartialShape{-1, -1, 6}); auto axis_const = ov::op::v0::Constant::create(ov::element::i64, ov::Shape{}, {-1}); @@ -102,24 +102,24 @@ TEST_F(TransformationTestsF, SwiGLUFusionTest3ReverseOrder) { auto mul = std::make_shared(variadic_split->output(1), swish); model = std::make_shared(ov::NodeVector{mul}, ov::ParameterVector{input}); - manager.register_pass(); + manager.register_pass(); } { int64_t axis = -1; int64_t split_lenghts = 3; auto input = std::make_shared(ov::element::f16, ov::PartialShape{-1, -1, 6}); - auto swiglu = std::make_shared(input, - axis, - split_lenghts, - ov::op::internal::SwiGLU::GluType::Swish, - 0, - ov::element::f16); + auto swiglu = std::make_shared(input, + axis, + split_lenghts, + ov::op::internal::GLU::GluType::Swish, + 0, + ov::element::f16); model_ref = std::make_shared(ov::NodeVector{swiglu}, ov::ParameterVector{input}); } } -TEST_F(TransformationTestsF, SwiGLUFusionTest4) { +TEST_F(TransformationTestsF, GLUFusionTest4) { { auto input = std::make_shared(ov::element::f16, ov::PartialShape{-1, -1, 6}); auto axis_const = ov::op::v0::Constant::create(ov::element::i64, ov::Shape{}, {-1}); @@ -129,7 +129,7 @@ TEST_F(TransformationTestsF, SwiGLUFusionTest4) { auto mul = std::make_shared(swish, variadic_split->output(0)); model = std::make_shared(ov::NodeVector{mul}, ov::ParameterVector{input}); - manager.register_pass(); + manager.register_pass(); } } @@ -143,18 +143,18 @@ TEST_F(TransformationTestsF, GeGLUFusionTest1) { auto mul = std::make_shared(variadic_split->output(0), gelu); model = std::make_shared(ov::NodeVector{mul}, ov::ParameterVector{input}); - manager.register_pass(); + manager.register_pass(); } { int64_t axis = -1; int64_t split_lenghts = 3; auto input = std::make_shared(ov::element::f16, ov::PartialShape{2, 1, 6}); - auto swiglu = std::make_shared(input, - axis, - split_lenghts, - ov::op::internal::SwiGLU::GluType::Gelu, - 1, - ov::element::f16); + auto swiglu = std::make_shared(input, + axis, + split_lenghts, + ov::op::internal::GLU::GluType::Gelu, + 1, + ov::element::f16); model_ref = std::make_shared(ov::NodeVector{swiglu}, ov::ParameterVector{input}); } diff --git a/src/plugins/intel_gpu/include/intel_gpu/plugin/primitives_list.hpp b/src/plugins/intel_gpu/include/intel_gpu/plugin/primitives_list.hpp index 27e5540a3786ab..ced915d25610e8 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/plugin/primitives_list.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/plugin/primitives_list.hpp @@ -287,7 +287,7 @@ REGISTER_FACTORY(internal, KVCacheCompressed); REGISTER_FACTORY(internal, ReadValue); REGISTER_FACTORY(internal, ReadValues); REGISTER_FACTORY(internal, Gemm); -REGISTER_FACTORY(internal, SwiGLU); +REGISTER_FACTORY(internal, GLU); REGISTER_FACTORY(internal, IndirectGemm); REGISTER_FACTORY(internal, Convolution); REGISTER_FACTORY(internal, Placeholder); diff --git a/src/plugins/intel_gpu/include/intel_gpu/primitives/swiglu.hpp b/src/plugins/intel_gpu/include/intel_gpu/primitives/swiglu.hpp index 8e9ea5aff03902..1a72e36d471dfc 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/primitives/swiglu.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/primitives/swiglu.hpp @@ -3,7 +3,7 @@ // #pragma once -#include "ov_ops/swiglu.hpp" +#include "ov_ops/glu.hpp" #include "primitive.hpp" namespace cldnn { @@ -25,7 +25,7 @@ struct swiglu : public primitive_base { const input_info& input, const int64_t& axis, const int64_t& split_lengths, - const ov::op::internal::SwiGLU::GluType glu_type, + const ov::op::internal::GLU::GluType glu_type, const size_t split_to_glu_idx, const tensor output_size) : primitive_base(id, {input}), @@ -37,7 +37,7 @@ struct swiglu : public primitive_base { int64_t axis = 0; int64_t split_lengths = 0; - ov::op::internal::SwiGLU::GluType glu_type = ov::op::internal::SwiGLU::GluType::Swish; + ov::op::internal::GLU::GluType glu_type = ov::op::internal::GLU::GluType::Swish; size_t split_to_glu_idx = 0; tensor output_size; diff --git a/src/plugins/intel_gpu/src/graph/swiglu.cpp b/src/plugins/intel_gpu/src/graph/swiglu.cpp index 127b8645870157..e82e4e974b1868 100644 --- a/src/plugins/intel_gpu/src/graph/swiglu.cpp +++ b/src/plugins/intel_gpu/src/graph/swiglu.cpp @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // -#include "ov_ops/swiglu.hpp" +#include "ov_ops/glu.hpp" #include "swiglu_inst.h" #include "primitive_type_base.h" @@ -28,7 +28,7 @@ std::vector swiglu_inst::calc_output_layouts(swiglu_node const& /*node*/ auto output_type = impl_param.desc->output_data_types[0].value_or(input_layout.data_type); auto output_format = input_layout.format; - ov::op::internal::SwiGLU op; + ov::op::internal::GLU op; op.set_axis(desc->axis); op.set_split_lengths(desc->split_lengths); diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/swiglu/swiglu_kernel_base.cpp b/src/plugins/intel_gpu/src/kernel_selector/kernels/swiglu/swiglu_kernel_base.cpp index b3c31f31128c49..b6b67bd4ed222d 100644 --- a/src/plugins/intel_gpu/src/kernel_selector/kernels/swiglu/swiglu_kernel_base.cpp +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/swiglu/swiglu_kernel_base.cpp @@ -25,10 +25,10 @@ JitConstants SwiGLUKernelBase::GetJitConstants(const swiglu_params& params, cons jit.AddConstants({MakeJitConstant("LWS1", dispatchData.lws[1])}); jit.AddConstants({MakeJitConstant("LWS2", dispatchData.lws[2])}); const std::string type_suffix = (GetAccumulatorType(params) == Datatype::F32) ? "f" : "h"; - if (params.glu_type == ov::op::internal::SwiGLU::GluType::Gelu) { + if (params.glu_type == ov::op::internal::GLU::GluType::Gelu) { jit.AddConstants({MakeJitConstant("GEGLU_HALF", "0.5" + type_suffix)}); jit.AddConstants({MakeJitConstant("GEGLU_MULT", "0.7071067811865475" + type_suffix)}); - } else if (params.glu_type == ov::op::internal::SwiGLU::GluType::Gelu_Tanh) { + } else if (params.glu_type == ov::op::internal::GLU::GluType::Gelu_Tanh) { jit.AddConstants({MakeJitConstant("GEGLU_HALF", "0.5" + type_suffix)}); jit.AddConstants({MakeJitConstant("GEGLU_MULT", "0.044715" + type_suffix)}); jit.AddConstants({MakeJitConstant("GEGLU_SQUARE_2_OVER_PI", "0.79788458347320556640625" + type_suffix)}); diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/swiglu/swiglu_kernel_base.h b/src/plugins/intel_gpu/src/kernel_selector/kernels/swiglu/swiglu_kernel_base.h index 73d679c8a643fb..2f5c046690f78d 100644 --- a/src/plugins/intel_gpu/src/kernel_selector/kernels/swiglu/swiglu_kernel_base.h +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/swiglu/swiglu_kernel_base.h @@ -6,7 +6,7 @@ #include "kernel_base_opencl.h" #include "kernel_selector_params.h" -#include "ov_ops/swiglu.hpp" +#include "ov_ops/glu.hpp" namespace kernel_selector { //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -14,10 +14,10 @@ namespace kernel_selector { /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// struct swiglu_params : public base_params { swiglu_params() : base_params(KernelType::SWIGLU), axis(0), split_length(0), - glu_type(ov::op::internal::SwiGLU::GluType::Swish), split_to_glu_idx(0) {} + glu_type(ov::op::internal::GLU::GluType::Swish), split_to_glu_idx(0) {} int32_t axis; int32_t split_length; - ov::op::internal::SwiGLU::GluType glu_type; + ov::op::internal::GLU::GluType glu_type; int32_t split_to_glu_idx; }; diff --git a/src/plugins/intel_gpu/src/plugin/ops/swiglu.cpp b/src/plugins/intel_gpu/src/plugin/ops/swiglu.cpp index 32d2f296670a91..5df2cafd41a41f 100644 --- a/src/plugins/intel_gpu/src/plugin/ops/swiglu.cpp +++ b/src/plugins/intel_gpu/src/plugin/ops/swiglu.cpp @@ -6,14 +6,14 @@ #include "intel_gpu/plugin/common_utils.hpp" #include "intel_gpu/primitives/swiglu.hpp" -#include "ov_ops/swiglu.hpp" +#include "ov_ops/glu.hpp" -using SwiGLU = ov::op::internal::SwiGLU; +using GLU = ov::op::internal::GLU; namespace ov { namespace intel_gpu { -static void CreateSwiGLUOp(ProgramBuilder& p, const std::shared_ptr& op) { +static void CreateGLUOp(ProgramBuilder& p, const std::shared_ptr& op) { validate_inputs_count(op, {1}); auto inputs = p.GetInputInfo(op); std::string primitive_name = layer_type_name_ID(op); @@ -41,7 +41,7 @@ static void CreateSwiGLUOp(ProgramBuilder& p, const std::shared_ptr& op) } } -REGISTER_FACTORY_IMPL(internal, SwiGLU); +REGISTER_FACTORY_IMPL(internal, GLU); } // namespace intel_gpu } // namespace ov diff --git a/src/plugins/intel_gpu/src/plugin/transformations_pipeline.cpp b/src/plugins/intel_gpu/src/plugin/transformations_pipeline.cpp index 01696615e545f8..f4ec7afb5c3d1e 100644 --- a/src/plugins/intel_gpu/src/plugin/transformations_pipeline.cpp +++ b/src/plugins/intel_gpu/src/plugin/transformations_pipeline.cpp @@ -93,7 +93,7 @@ #include "transformations/common_optimizations/move_eltwise_up_data_movement.hpp" #include "transformations/common_optimizations/mvn_fusion.hpp" #include "transformations/common_optimizations/softmax_fusion.hpp" -#include "transformations/common_optimizations/swiglu_fusion.hpp" +#include "transformations/common_optimizations/glu_fusion.hpp" #include "transformations/common_optimizations/transpose_sinking.hpp" #include "transformations/common_optimizations/weights_dequantize_to_fake_quantize.hpp" #include "transformations/common_optimizations/wrap_interpolate_into_transposes.hpp" @@ -943,7 +943,7 @@ void TransformationsPipeline::apply(std::shared_ptr func) { } manager.register_pass(); - manager.register_pass(); + manager.register_pass(); manager.register_pass(); auto kv_cache_compression_dt = config.get_property(ov::hint::kv_cache_precision); diff --git a/src/plugins/intel_gpu/tests/unit/test_cases/swiglu_gpu_test.cpp b/src/plugins/intel_gpu/tests/unit/test_cases/swiglu_gpu_test.cpp index 0d96a165108972..11bca6e27ba942 100644 --- a/src/plugins/intel_gpu/tests/unit/test_cases/swiglu_gpu_test.cpp +++ b/src/plugins/intel_gpu/tests/unit/test_cases/swiglu_gpu_test.cpp @@ -7,7 +7,7 @@ #include #include #include -#include "ov_ops/swiglu.hpp" +#include "ov_ops/glu.hpp" #include "swiglu_inst.h" using namespace cldnn; @@ -64,7 +64,7 @@ TEST(swiglu_gpu_test, swiglu_test_bfyx_dyn) { topology topology; topology.add(input_layout("input", input_layout_dynamic)); - topology.add(swiglu("swiglu", input_info("input"), -1, 3, ov::op::internal::SwiGLU::GluType::Swish, 0, tensor())); + topology.add(swiglu("swiglu", input_info("input"), -1, 3, ov::op::internal::GLU::GluType::Swish, 0, tensor())); ExecutionConfig config = get_test_default_config(engine); config.set_property(ov::intel_gpu::allow_new_shape_infer(true)); From c542f21095e2ba43a40cfda6750bf5bcecc856b9 Mon Sep 17 00:00:00 2001 From: Alina Kladieva Date: Fri, 22 Nov 2024 16:01:16 +0100 Subject: [PATCH 34/62] [GHA] Add merge queue stub check for ci/jenkins (#27703) Allows to keep Jenkins running in pre-commits, but not in merge queue without paralyzing queue when Jenkins is offline Signed-off-by: Alina Kladieva --- .github/workflows/merge_queue_stub.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .github/workflows/merge_queue_stub.yml diff --git a/.github/workflows/merge_queue_stub.yml b/.github/workflows/merge_queue_stub.yml new file mode 100644 index 00000000000000..a3d2e0b456a106 --- /dev/null +++ b/.github/workflows/merge_queue_stub.yml @@ -0,0 +1,13 @@ +on: + merge_group: + +jobs: + merge_group_stub_check: + name: ci/jenkins + runs-on: ubuntu-latest + defaults: + run: + shell: bash + if: ${{ github.event_name == 'merge_group' }} + steps: + - run: echo "Just a stub check to keep Jenkins running in pre-commits but not in merge queue" From edcd4d85e9d70e6803159f49c44ae313aa2c2a62 Mon Sep 17 00:00:00 2001 From: zhaohongbo Date: Fri, 22 Nov 2024 20:28:06 +0800 Subject: [PATCH 35/62] [GPU] Support rope for glm4v (#27545) support rope kernel for glm4v **Tickets:** CVS-157422 --------- Co-authored-by: Chen Peter Co-authored-by: Xiake Sun --- .../fuse_rotary_positional_embeddings.cpp | 11 +- .../fuse_rotary_positional_embeddings.cpp | 108 ++++++++++++++++++ 2 files changed, 115 insertions(+), 4 deletions(-) diff --git a/src/common/transformations/src/transformations/common_optimizations/fuse_rotary_positional_embeddings.cpp b/src/common/transformations/src/transformations/common_optimizations/fuse_rotary_positional_embeddings.cpp index 143603f0415373..f002e0043a8744 100644 --- a/src/common/transformations/src/transformations/common_optimizations/fuse_rotary_positional_embeddings.cpp +++ b/src/common/transformations/src/transformations/common_optimizations/fuse_rotary_positional_embeddings.cpp @@ -549,7 +549,7 @@ ov::pass::RoPEFusionChatGLM::RoPEFusionChatGLM(int split_output_id, const bool s auto cat_Concat_505 = makePattern({flatten_Reshape_501, slice_Slice_443 | var_split_1->output(1)}, {{"axis", -1}}); - auto result = cat_Concat_505; + auto result = cat_Concat_505 | flatten_Reshape_501; matcher_pass_callback callback = [=](ov::pass::pattern::Matcher& m) { const auto& pattern_map = m.get_pattern_value_map(); @@ -577,6 +577,11 @@ ov::pass::RoPEFusionChatGLM::RoPEFusionChatGLM(int split_output_id, const bool s config.slice_stop = static_cast(config.slice_start + validator["total_size_k"]); } + if (ov::is_type(root)) { + if (config.rotary_ndims != config.head_size) + return false; + } + new_args.push_back(pattern_map.at(qkv_linear)); new_args.push_back(pattern_map.at(cos_sin_cache)); new_args.push_back(pattern_map.at(cos_sin_cache)); @@ -585,9 +590,7 @@ ov::pass::RoPEFusionChatGLM::RoPEFusionChatGLM(int split_output_id, const bool s auto new_node = std::make_shared(new_args, config); new_node->set_friendly_name(old_node->get_friendly_name()); - ov::copy_runtime_info({pattern_map.at(flatten_Reshape_501).get_node_shared_ptr(), - pattern_map.at(cat_Concat_505).get_node_shared_ptr()}, - new_node); + ov::copy_runtime_info({root->get_input_node_shared_ptr(0), root}, new_node); ov::replace_node(old_node, new_node); return true; }; diff --git a/src/common/transformations/tests/common_optimizations/fuse_rotary_positional_embeddings.cpp b/src/common/transformations/tests/common_optimizations/fuse_rotary_positional_embeddings.cpp index 6eb0add525c815..ea928de5c01702 100644 --- a/src/common/transformations/tests/common_optimizations/fuse_rotary_positional_embeddings.cpp +++ b/src/common/transformations/tests/common_optimizations/fuse_rotary_positional_embeddings.cpp @@ -854,4 +854,112 @@ TEST_F(TransformationTestsF, ConvertToROPE_chatGML_2d_rope) { model_ref = std::make_shared(ov::NodeVector{rope}, ov::ParameterVector{input, cos_sin_cache, position_ids}); } +} + +TEST_F(TransformationTestsF, ConvertToROPE_chatGML_nano_2d_rope) { + disable_rt_info_check(); + const int batch = 2; + const int seq_len = 7; + const int num_heads = 16; + const int ndims = 128; + const int rotary_ndims = 128; + const int max_pos_length = 2048; + { + auto input = std::make_shared(ov::element::f32, ov::PartialShape{batch, seq_len, 3072}); + auto cos_sin_cache = + std::make_shared(ov::element::f32, + ov::PartialShape{max_pos_length, (rotary_ndims / 2), 2}); + auto position_ids = std::make_shared(ov::element::i32, ov::PartialShape{batch, seq_len}); + + auto __module_transformer_index_67_Gather = + makeOP({cos_sin_cache, position_ids, 0}, {{"batch_dims", 0}}); + + auto ListUnpack_321 = makeOP({input, -1, {2048, 512, 512}}); + auto view_Reshape = makeOP({ListUnpack_321->output(0), {0, 0, num_heads, ndims}}, + {{"special_zero", true}}); + + auto permute_Transpose = makeOP({view_Reshape, {0, 2, 1, 3}}, {}); + + auto slice_Slice_357 = + makeOP({permute_Transpose, {0, 0, 0, 0}, {0, 0, 0, rotary_ndims}, {1, 1, 1, 1}}, + {{"begin_mask", {1, 1, 1, 0}}, + {"end_mask", {1, 1, 1, 0}}, + {"new_axis_mask", {}}, + {"shrink_axis_mask", {}}, + {"ellipsis_mask", {}}}); + + auto aten_view_Reshape_1 = + makeOP({ListUnpack_321->output(1), {0, 0, 2, ndims}}, {{"special_zero", true}}); + auto aten_transpose_1 = makeOP({aten_view_Reshape_1, {0, 2, 1, 3}}); + auto shape_of_105249 = makeOP({aten_transpose_1}, {{"output_type", "i32"}}); + auto gather_105252 = makeOP({shape_of_105249, {2}, {0}}, {{"batch_dims", 0}}); + auto scatter_update_63441 = makeOP({{0, 0}, {1}, gather_105252, {0}}); + // connected to cos_sin_cache + auto slice_Slice_369 = makeOP( + {__module_transformer_index_67_Gather, {0, 0}, scatter_update_63441, {1, 1}}, + {{"begin_mask", {1, 0}}, + {"end_mask", {1, 0}}, + {"new_axis_mask", {}}, + {"shrink_axis_mask", {}}, + {"ellipsis_mask", {}}}); + auto list_construct_concat_1 = + makeOP({{-1}, {1}, gather_105252, {rotary_ndims / 2}, {2}}, {{"axis", 0}}); + + auto reshape_Reshape_373 = + makeOP({slice_Slice_357, {0, 16, 0, 64, 2}}, {{"special_zero", true}}); + auto select_Gather_384 = + makeOP({reshape_Reshape_373, 0, -1}, {{"batch_dims", 0}}); // x_even + auto select_Gather_381 = + makeOP({reshape_Reshape_373, 1, -1}, {{"batch_dims", 0}}); // x_odd + auto view_Reshape_380 = + makeOP({slice_Slice_369, list_construct_concat_1}, {{"special_zero", false}}); + auto select_Gather_385 = makeOP({view_Reshape_380, 0, -1}, {{"batch_dims", 0}}); // cos_tab + auto select_Gather_382 = makeOP({view_Reshape_380, 1, -1}, {{"batch_dims", 0}}); // sin_tab + + auto mul_Multiply_386 = makeOP({select_Gather_381, select_Gather_382}, + {{"auto_broadcast", "numpy"}}); // x_odd_sin + auto mul_Multiply_383 = makeOP({select_Gather_384, select_Gather_385}, + {{"auto_broadcast", "numpy"}}); // x_even_cos + auto Multiply_101315 = + makeOP({mul_Multiply_386, -1.000000f}, {{"auto_broadcast", "numpy"}}); + auto sub_Subtract_389 = + makeOP({mul_Multiply_383, Multiply_101315}, {{"auto_broadcast", "numpy"}}); + + auto mul_Multiply_391 = makeOP({select_Gather_381, select_Gather_385}, + {{"auto_broadcast", "numpy"}}); // x_odd_cos + auto mul_Multiply_393 = makeOP({select_Gather_384, select_Gather_382}, + {{"auto_broadcast", "numpy"}}); // x_even_sin + auto add_Add_396 = makeOP({mul_Multiply_391, mul_Multiply_393}, {{"auto_broadcast", "numpy"}}); + + auto Unsqueeze_62716 = makeOP({sub_Subtract_389, -1}, {}); + auto Unsqueeze_62717 = makeOP({add_Add_396, -1}, {}); + + auto stack_401 = makeOP({Unsqueeze_62716, Unsqueeze_62717}, {{"axis", -1}}); + auto flatten_Reshape_421 = + makeOP({stack_401, {0, num_heads, 0, rotary_ndims}}, {{"special_zero", true}}); + model = std::make_shared(ov::NodeVector{flatten_Reshape_421}, + ov::ParameterVector{input, cos_sin_cache, position_ids}); + } + manager.register_pass(true); + { + auto input = std::make_shared(ov::element::f32, ov::Shape{batch, seq_len, 3072}); + auto cos_sin_cache = + std::make_shared(ov::element::f32, ov::Shape{max_pos_length, (rotary_ndims / 2), 2}); + auto position_ids = std::make_shared(ov::element::i32, ov::PartialShape{batch, seq_len}); + auto gather_cos_sin = makeOP({cos_sin_cache, position_ids, 0}, {{"batch_dims", 0}}); + auto rope = makeOP({input, gather_cos_sin, gather_cos_sin}, + {{"config.slice_start", 0}, + {"config.slice_stop", 2048}, + {"config.input_trans0213", false}, + {"config.is_interleaved", false}, + {"config.rotary_ndims", rotary_ndims}, + {"config.is_chatglm", true}, + {"config.support_2d_rope", true}, + {"config.is_qwen", false}, + {"config.head_cnt", num_heads}, + {"config.head_size", ndims}, + {"config.gather_position_arg_id", 0}}); + model_ref = + std::make_shared(ov::NodeVector{rope}, ov::ParameterVector{input, cos_sin_cache, position_ids}); + } } \ No newline at end of file From 229ed28b9e86ac5a97c99b3b37e5e9931e0d3c61 Mon Sep 17 00:00:00 2001 From: Egor Duplenskii Date: Fri, 22 Nov 2024 14:05:15 +0100 Subject: [PATCH 36/62] [CPU][DEBUG_CAPS] Do not include all the nodes to executable ones (#27599) There is a case, when a node is a static one and it has 0 dim in one of its input shapes. In this case we must not execute this node, but performing additional isExecutable() check in runtime for all the static nodes would be to expensive --- src/plugins/intel_cpu/src/graph.cpp | 7 +------ src/plugins/intel_cpu/src/utils/debug_capabilities.h | 2 -- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/src/plugins/intel_cpu/src/graph.cpp b/src/plugins/intel_cpu/src/graph.cpp index f3f3a379fc2af7..9b9357b5b2ff85 100644 --- a/src/plugins/intel_cpu/src/graph.cpp +++ b/src/plugins/intel_cpu/src/graph.cpp @@ -277,13 +277,8 @@ static std::tuple, std::vector> ExtractExecutableNo std::vector executableGraphNodes; for (size_t i = 0; i < graphNodes.size(); i++) { const auto& graphNode = graphNodes[i]; - if ((!graphNode->isConstant() && CPU_DEBUG_CAPS_ALWAYS_TRUE(graphNode->isExecutable())) || // non-constant executable or + if ((!graphNode->isConstant() && graphNode->isExecutable()) || // non-constant executable or (graphNode->isDynamicNode() && !one_of(graphNode->getType(), Type::Input, Type::Output))) { // dynamic, except inputs / outputs - /* @todo - * Revise implementation. - * With current way it is possible that with debug_caps enabled - * we execute a node, which is not ready to be executed - */ graphIdToExecutableId[i] = executableGraphNodes.size(); executableGraphNodes.emplace_back(graphNode); } diff --git a/src/plugins/intel_cpu/src/utils/debug_capabilities.h b/src/plugins/intel_cpu/src/utils/debug_capabilities.h index cea96c6cfdbd72..7faf02429b45eb 100644 --- a/src/plugins/intel_cpu/src/utils/debug_capabilities.h +++ b/src/plugins/intel_cpu/src/utils/debug_capabilities.h @@ -162,7 +162,6 @@ static inline std::ostream& _write_all_to_stream(std::ostream& os, const T& arg, } while (0) #define CPU_DEBUG_CAP_ENABLE(...) __VA_ARGS__ -#define CPU_DEBUG_CAPS_ALWAYS_TRUE(x) true #define DEBUG_LOG(...) DEBUG_LOG_EXT(nullptr, std::cout, "[ DEBUG ] ", __VA_ARGS__) #define ERROR_LOG(...) DEBUG_LOG_EXT(nullptr, std::cerr, "[ ERROR ] ", __VA_ARGS__) @@ -267,7 +266,6 @@ bool getEnvBool(const char* name); #else // !CPU_DEBUG_CAPS #define CPU_DEBUG_CAP_ENABLE(...) -#define CPU_DEBUG_CAPS_ALWAYS_TRUE(x) x #define DEBUG_LOG(...) #define ERROR_LOG(...) From 287ab9883acc0e2b8014c43cc543837f31abc088 Mon Sep 17 00:00:00 2001 From: Vladislav Golubev Date: Sat, 23 Nov 2024 18:55:48 +0100 Subject: [PATCH 37/62] [Snippets] Move BrgemmCopyB repacking logic outside the Subgraph (#27007) ### Details: Currently, CopyB repacking is always performed inside Subgraph. In the case when batch on B Matmul input is significantly smaller than batch on A Matmul input, and parallel work amount is big enough, this may lead to ineffective execution, since repacking for B input is performed in each parallel task whereas only one repacking iteration for each B batch is enough. Within this PR, CopyB repacking is moved outside the snippets kernel and performed via common reorder primitive just before the snippets kernel execution. ### Tickets: - *CVS-154383* --- .../pass/mha_parallel_wa_optimizer.hpp | 53 ++++ .../include/snippets/lowered/pass/pass.hpp | 16 ++ .../lowered/pass/runtime_optimizer.hpp | 52 ++++ .../snippets/lowered/pass/serialize_base.hpp | 4 +- .../lowered/pass/serialize_control_flow.hpp | 7 +- .../lowered/pass/serialize_data_flow.hpp | 7 +- .../include/snippets/runtime_configurator.hpp | 123 ++++----- .../snippets/utils/linear_ir_pass_dumper.hpp | 4 +- .../pass/mha_parallel_wa_optimizer.cpp | 175 +++++++++++++ src/common/snippets/src/lowered/pass/pass.cpp | 19 ++ .../snippets/src/pass/collapse_subgraph.cpp | 5 +- .../snippets/src/pass/split_dimension_m.cpp | 15 ++ .../snippets/src/runtime_configurator.cpp | 246 +++--------------- .../tests/include/utils/split_dim_m.hpp | 37 +++ .../tests/src/pass/mha_tokenization.cpp | 2 +- .../snippets/tests/src/utils/split_dim_m.cpp | 72 +++++ .../snippets/cpu_runtime_configurator.cpp | 69 +---- .../snippets/cpu_runtime_configurator.hpp | 39 +-- src/plugins/intel_cpu/src/nodes/reorder.cpp | 6 - src/plugins/intel_cpu/src/nodes/subgraph.cpp | 79 ++++-- src/plugins/intel_cpu/src/nodes/subgraph.h | 14 +- .../snippets/x64/op/brgemm_cpu.cpp | 24 +- .../snippets/x64/op/brgemm_cpu.hpp | 1 - .../snippets/x64/op/brgemm_utils.cpp | 18 ++ .../snippets/x64/op/brgemm_utils.hpp | 8 +- .../x64/pass/eliminate_brgemm_copy_b.cpp | 46 ++++ .../x64/pass/eliminate_brgemm_copy_b.hpp | 29 +++ .../adjust_brgemm_copy_b_loop_ports.cpp | 57 ++-- .../adjust_brgemm_copy_b_loop_ports.hpp | 9 +- .../brgemm_copy_b_loop_ports_adjuster.cpp | 49 ++++ .../brgemm_copy_b_loop_ports_adjuster.hpp | 33 +++ .../x64/pass/lowered/brgemm_cpu_blocking.cpp | 12 +- .../lowered/external_repacking_adjuster.cpp | 72 +++++ .../lowered/external_repacking_adjuster.hpp | 32 +++ 34 files changed, 969 insertions(+), 465 deletions(-) create mode 100644 src/common/snippets/include/snippets/lowered/pass/mha_parallel_wa_optimizer.hpp create mode 100644 src/common/snippets/include/snippets/lowered/pass/runtime_optimizer.hpp create mode 100644 src/common/snippets/src/lowered/pass/mha_parallel_wa_optimizer.cpp create mode 100644 src/common/snippets/tests/include/utils/split_dim_m.hpp create mode 100644 src/common/snippets/tests/src/utils/split_dim_m.cpp create mode 100644 src/plugins/intel_cpu/src/transformations/snippets/x64/pass/eliminate_brgemm_copy_b.cpp create mode 100644 src/plugins/intel_cpu/src/transformations/snippets/x64/pass/eliminate_brgemm_copy_b.hpp create mode 100644 src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.cpp create mode 100644 src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.hpp create mode 100644 src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/external_repacking_adjuster.cpp create mode 100644 src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/external_repacking_adjuster.hpp diff --git a/src/common/snippets/include/snippets/lowered/pass/mha_parallel_wa_optimizer.hpp b/src/common/snippets/include/snippets/lowered/pass/mha_parallel_wa_optimizer.hpp new file mode 100644 index 00000000000000..9af247cd52ecab --- /dev/null +++ b/src/common/snippets/include/snippets/lowered/pass/mha_parallel_wa_optimizer.hpp @@ -0,0 +1,53 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "snippets/lowered/linear_ir.hpp" +#include "snippets/lowered/loop_info.hpp" +#include "snippets/lowered/pass/runtime_optimizer.hpp" + +namespace ov { +namespace snippets { +namespace lowered { +namespace pass { +/** + * @class MHAParallelWAOptimizer + * @brief Optimizes the dynamic MHA execution increasing parallel work amount dy dividing Brgemm's "M" dimension to "parallel_m" + * and "kernel_m". Uses heuristics from snippets::pass::SplitDimensionM for dimension splitting. + * The optimizer performs the following steps: + * - Identifies applicable Brgemm operations within the LinearIR. + * - Finds parameters whose shapes and layouts need to be adjusted after the split. + * - Determines loops that should be adjusted. + */ +class MHAParallelWAOptimizer : public lowered::pass::RuntimeOptimizer { +public: + MHAParallelWAOptimizer() = default; + MHAParallelWAOptimizer(const lowered::LinearIRCPtr& linear_ir, const RuntimeConfigurator* configurator); + + bool run(const lowered::LinearIR& linear_ir) override; + bool applicable() const override { return !m_loops_to_split.empty(); } + +private: + static std::unordered_set find_applicable_brgemms(const lowered::LinearIRCPtr& linear_ir); + static std::unordered_set find_unsqueezed_params( + const lowered::LinearIRCPtr& linear_ir, + const std::unordered_set& brgemms); + static std::vector find_loops_to_split( + const lowered::LinearIRCPtr& linear_ir, + const std::unordered_set& unsqueezed_params); + + std::vector m_loops_to_split{}; + std::unordered_set m_unsqueezed_params{}; + std::vector> m_optimized_layouts{}; + std::vector m_dim_M_idces{}; + size_t m_concurrency = 0; + + static const size_t m_dim_M_idx; +}; + +} // namespace pass +} // namespace lowered +} // namespace snippets +} // namespace ov \ No newline at end of file diff --git a/src/common/snippets/include/snippets/lowered/pass/pass.hpp b/src/common/snippets/include/snippets/lowered/pass/pass.hpp index 446f96d30a27cf..2758ab85070341 100644 --- a/src/common/snippets/include/snippets/lowered/pass/pass.hpp +++ b/src/common/snippets/include/snippets/lowered/pass/pass.hpp @@ -67,6 +67,21 @@ class Pass : public PassBase { virtual bool run(lowered::LinearIR& linear_ir) = 0; }; +/** + * @interface ConstPass + * @brief Base class for LIR passes which are performed on a full LIR body but doesn't change it + * @ingroup snippets + */ +class ConstPass : public PassBase { +public: + /** + * @brief Apply the pass to the Linear IR + * @param linear_ir the target Linear IR + * @return status of the pass + */ + virtual bool run(const lowered::LinearIR& linear_ir) = 0; +}; + /** * @interface RangedPass * @brief Base class for LIR passes which are performed on a range of a LIR body @@ -114,6 +129,7 @@ class PassPipeline { void register_positioned_passes(const std::vector& pos_passes); void run(lowered::LinearIR& linear_ir) const; + void run(const lowered::LinearIR& linear_ir) const; void run(lowered::LinearIR& linear_ir, lowered::LinearIR::constExprIt begin, lowered::LinearIR::constExprIt end) const; /** diff --git a/src/common/snippets/include/snippets/lowered/pass/runtime_optimizer.hpp b/src/common/snippets/include/snippets/lowered/pass/runtime_optimizer.hpp new file mode 100644 index 00000000000000..ed37a1c6c58bca --- /dev/null +++ b/src/common/snippets/include/snippets/lowered/pass/runtime_optimizer.hpp @@ -0,0 +1,52 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "snippets/lowered/linear_ir.hpp" +#include "snippets/lowered/pass/pass.hpp" +#include "snippets/runtime_configurator.hpp" + +namespace ov { +namespace snippets { +namespace lowered { +namespace pass { +/** + * @class RuntimeOptimizer + * @brief Base class for runtime optimizers that operate on LinearIR and RuntimeConfigurator during + * RuntimeConfigurator::update stage. + */ +class RuntimeOptimizer : public ConstPass { +public: + RuntimeOptimizer() = default; + RuntimeOptimizer(const RuntimeConfigurator* configurator) : m_configurator(configurator) { + OPENVINO_ASSERT(configurator, "RuntimeConfigurator musn't be nullptr"); + } + /** + * @brief Defines if this pass is applicable. If it is not applicable, its registration in pass pipeline can be skipped. + */ + virtual bool applicable() const = 0; + + /** + * @brief Creates an instance of the specified pass type and checks if it is applicable. + * If the pass is applicable, it is registered in the provided pipeline. + * @param pipeline The pipeline in which the pass should be registered. + * @param args The arguments to be forwarded to the pass constructor. + */ + template ::value>> + static void register_if_applicable(PassPipeline& pipeline, Args&&... args) { + auto pass = std::make_shared(std::forward(args)...); + if (pass->applicable()) { + pipeline.register_pass(pass); + } + } + +protected: + const RuntimeConfigurator* m_configurator = nullptr; +}; + +} // namespace pass +} // namespace lowered +} // namespace snippets +} // namespace ov diff --git a/src/common/snippets/include/snippets/lowered/pass/serialize_base.hpp b/src/common/snippets/include/snippets/lowered/pass/serialize_base.hpp index 51cc528a155a00..560744f4eb09d8 100644 --- a/src/common/snippets/include/snippets/lowered/pass/serialize_base.hpp +++ b/src/common/snippets/include/snippets/lowered/pass/serialize_base.hpp @@ -16,9 +16,9 @@ namespace pass { * @brief Base class for LinearIR serialization passes * @ingroup snippets */ -class SerializeBase : public Pass { +class SerializeBase : public ConstPass { public: - OPENVINO_RTTI("SerializeBase", "Pass") + OPENVINO_RTTI("SerializeBase", "ConstPass") SerializeBase(const std::string& xml_path); protected: diff --git a/src/common/snippets/include/snippets/lowered/pass/serialize_control_flow.hpp b/src/common/snippets/include/snippets/lowered/pass/serialize_control_flow.hpp index 602e9d9df7ce32..2e8f91aed6c08d 100644 --- a/src/common/snippets/include/snippets/lowered/pass/serialize_control_flow.hpp +++ b/src/common/snippets/include/snippets/lowered/pass/serialize_control_flow.hpp @@ -22,12 +22,7 @@ class SerializeControlFlow : public SerializeBase { OPENVINO_RTTI("SerializeControlFlow", "Pass", SerializeBase) SerializeControlFlow(const std::string& xml_path, bool update_dynamic_ops = false) : SerializeBase(xml_path), m_update_dynamic_ops{update_dynamic_ops} {} - - bool run(LinearIR& linear_ir) override { - return run(const_cast(linear_ir)); - } - // We need a const method to run from functions that can't change LIR - bool run(const LinearIR& linear_ir); + bool run(const LinearIR& linear_ir) override; private: const bool m_update_dynamic_ops = false; diff --git a/src/common/snippets/include/snippets/lowered/pass/serialize_data_flow.hpp b/src/common/snippets/include/snippets/lowered/pass/serialize_data_flow.hpp index ce5b3855400264..ecbc1a834ce388 100644 --- a/src/common/snippets/include/snippets/lowered/pass/serialize_data_flow.hpp +++ b/src/common/snippets/include/snippets/lowered/pass/serialize_data_flow.hpp @@ -23,12 +23,7 @@ class SerializeDataFlow : public SerializeBase { public: OPENVINO_RTTI("SerializeDataFlow", "Pass", SerializeBase) SerializeDataFlow(const std::string& xml_path) : SerializeBase(xml_path) {} - - bool run(LinearIR& linear_ir) override { - return run(const_cast(linear_ir)); - } - // We need a const method to run from functions that can't change LIR - bool run(const LinearIR& linear_ir); + bool run(const LinearIR& linear_ir) override; }; } // namespace pass diff --git a/src/common/snippets/include/snippets/runtime_configurator.hpp b/src/common/snippets/include/snippets/runtime_configurator.hpp index 10c15a4621a72a..866e98843fcd50 100644 --- a/src/common/snippets/include/snippets/runtime_configurator.hpp +++ b/src/common/snippets/include/snippets/runtime_configurator.hpp @@ -4,9 +4,9 @@ #pragma once +#include "snippets/kernel_executor_table.hpp" #include "snippets/lowered/linear_ir.hpp" #include "snippets/lowered/loop_info.hpp" -#include "snippets/kernel_executor_table.hpp" #include "snippets/lowered/pass/pass.hpp" namespace ov { @@ -44,12 +44,15 @@ class RuntimeConfig { size_t tensor_rank = 0; size_t tile_rank = 0; + std::vector io_shapes = {}; + std::vector io_layouts = {}; std::vector io_data_offsets = {}; ov::snippets::VectorDims master_shape = {}; size_t buffer_scratchpad_size = 0; std::vector buffer_cluster_offsets {}; KernelExecutorTablePtr kernel_executor_table = std::make_shared(); + std::vector latest_shapes = {}; }; /** @@ -83,18 +86,62 @@ class RuntimeConfigurator { */ void reset_kernel_executor_table() const; -protected: + // Getters for private members + std::shared_ptr get_config() const { return m_config; } + size_t get_io_num() const { return m_io_num; } + size_t get_in_num() const { return m_in_num; } + const std::vector& get_io_descs() const { return m_io_descs; } + const std::vector& get_io_data_sizes() const { return m_io_data_sizes; } + const std::map>& get_dynamic_buffer_clusters() const { return m_dynamic_buffer_clusters; } + /** - * @brief Update RuntimeConfig based on LinearIR + * @brief Computes the offsets for each dimension of a tensor shape. + * + * This function calculates the offsets for each dimension of a tensor shape, which represent the distance between + * consecutive elements of the corresponding dimension. If a dimension size is 1, the next dimension starts + * immediately, and the stride is 0. + * @param shape The shape for offset computation. + * @param idx The index to get the corresponding offsets and io_data_sizes. + * @param idx_stride Defines the number of dimensions that should be skipped in the offsets vector. + */ + void compute_offsets(const ov::snippets::VectorDims& shape, size_t idx, size_t idx_stride) const; + struct UnifiedLoopInfoRtParams { + size_t work_amount = 0; + std::vector ptr_increments; + std::vector finalization_offsets; + }; + /** + * @brief Retrieves the runtime parameters for a given UnifiedLoopInfo. + * @param unified_loop_info The UnifiedLoopInfo for which the runtime parameters are to be retrieved. + * @return A LoopInfoRuntimeParams object containing the runtime parameters. + */ + static UnifiedLoopInfoRtParams get_loop_runtime_params(const lowered::UnifiedLoopInfoPtr& unified_loop_info); + using LoopInfoRuntimeParamsMap = std::unordered_map; + /** + * @brief Update Loop information in LinearIR: Unified and ExpandedLoopInfo * @param linear_ir LinearIR - * @todo Ticket 148891: Rewrite on PassPipeline */ - virtual void update(const lowered::LinearIRCPtr& linear_ir); + static void update_loop_info(const lowered::LinearIRCPtr& linear_ir); + /** + * @brief Updates the ExpandedLoopInfo based on the initialized runtime parameters. + * @param expanded_loop_info The ExpandedLoopInfo to be updated. + * @param initialized_info_map A map containing the initialized runtime parameters for UnifiedLoopInfo. + */ + static void update_expanded_loop_info(const lowered::ExpandedLoopInfoPtr& expanded_loop_info, + LoopInfoRuntimeParamsMap& initializated_info_map); /** * @brief Update tensor rank based on master shape * @param master_shape Master shape */ - virtual void update_tensor_rank(const ov::snippets::VectorDims& master_shape); + virtual void update_tensor_rank(const ov::snippets::VectorDims& master_shape) const; + +protected: + /** + * @brief Update RuntimeConfig based on LinearIR + * @param linear_ir LinearIR + * @todo Ticket 148891: Rewrite on PassPipeline + */ + virtual void update(const lowered::LinearIRCPtr& linear_ir); /** * @brief Allocate and intialize fields in RuntimeConfig and RuntimeConfigurator * @param linear_ir LinearIR @@ -120,21 +167,6 @@ class RuntimeConfigurator { * @param linear_ir LinearIR */ virtual void init_tensor_rank(const lowered::LinearIRCPtr& linear_ir) const; - - struct UnifiedLoopInfoRtParams { - size_t work_amount = 0; - std::vector ptr_increments; - std::vector finalization_offsets; - }; - static UnifiedLoopInfoRtParams get_loop_runtime_params(const lowered::UnifiedLoopInfoPtr& unified_loop_info); - using LoopInfoRuntimeParamsMap = std::unordered_map; - /** - * @brief Update Loop informations in LinearIR: Unified and ExpandedLoopInfo - * @param linear_ir LinearIR - */ - static void update_loop_info(const lowered::LinearIRCPtr& linear_ir); - static void update_expanded_loop_info(const lowered::ExpandedLoopInfoPtr& expanded_loop_info, - LoopInfoRuntimeParamsMap& initializated_info_map); /** * @brief Update Buffer scratchpad size and offsets if needed * Note: `update_loop_info` must be called before @@ -146,8 +178,7 @@ class RuntimeConfigurator { * @param shapes shapes used in offsets computation * @param layouts layouts used in offsets computation */ - void update_data_offsets(const std::vector& shapes, - const std::vector>& layouts) const; + void update_data_offsets() const; /** * @brief Extract shapes from m_io_descs */ @@ -157,43 +188,6 @@ class RuntimeConfigurator { */ std::vector> extract_layouts() const; - class MHAParallelWAOptimizer { - public: - MHAParallelWAOptimizer() = default; - MHAParallelWAOptimizer(const ov::snippets::lowered::LinearIRCPtr& linear_ir, RuntimeConfigurator* configurator); - /** - * @brief Checks if the current master shape can be optimized, and if yes, updates all the necessary runtime information - * @return status if the optimization is applied - */ - bool optimize(); - - private: - /** - * @brief Checks if optimizer is enabled - * @todo Ticket 148891: when RuntimeConfigurator::update will be rewritten on PassPipeline, this method should be removed - * We will not just register MHAParallelWAOptimizer in case if it is not needed - */ - bool enabled() const; - - static std::unordered_set find_applicable_brgemms(const ov::snippets::lowered::LinearIRCPtr& linear_ir); - static std::unordered_set find_unsqueezed_params( - const ov::snippets::lowered::LinearIRCPtr& linear_ir, - const std::unordered_set& brgemms); - static std::vector find_loops_to_split( - const ov::snippets::lowered::LinearIRCPtr& linear_ir, - const std::unordered_set& unsqueezed_params); - - RuntimeConfigurator* configurator = nullptr; - - std::vector loops_to_split{}; - std::unordered_set unsqueezed_params{}; - std::vector> optimized_layouts{}; - std::vector m_dim_idces{}; - size_t concurrency = 0; - - static const size_t m_dim_idx; - } m_optimizer; - std::shared_ptr m_config = nullptr; size_t m_io_num = 0; @@ -203,7 +197,14 @@ class RuntimeConfigurator { // [cluster_id -> buffer expressions ] std::map> m_dynamic_buffer_clusters = {}; - std::vector m_latest_shapes = {}; + // WA: until ticket 148891 is not implemented, 2 pass pipelines for runtime optimizers are necessary since different + // optimizers must be called at different pipeline stages. + // - Intermediate optimizers must be called right after `update_loop_info` + // - Final optimizers must be called after all other RuntimeConfigurator's update methods + // When all updates will be rewritten on PassPipeline, PositionedPasses can be used to precisely define the place of + // the additional optimizers + lowered::pass::PassPipeline m_intermediate_optimizers; + lowered::pass::PassPipeline m_final_optimizers; }; } // namespace snippets diff --git a/src/common/snippets/include/snippets/utils/linear_ir_pass_dumper.hpp b/src/common/snippets/include/snippets/utils/linear_ir_pass_dumper.hpp index 85abfc9a91ab31..c8c145d7eac075 100644 --- a/src/common/snippets/include/snippets/utils/linear_ir_pass_dumper.hpp +++ b/src/common/snippets/include/snippets/utils/linear_ir_pass_dumper.hpp @@ -16,7 +16,7 @@ namespace snippets { class LIRPassDump { public: - explicit LIRPassDump(lowered::LinearIR& linear_ir, std::string pass_name) + explicit LIRPassDump(const lowered::LinearIR& linear_ir, std::string pass_name) : linear_ir(linear_ir), pass_name(std::move(pass_name)), debug_config(linear_ir.get_config().debug_config) { dump("_in"); } @@ -44,7 +44,7 @@ class LIRPassDump { num++; } - lowered::LinearIR& linear_ir; + const lowered::LinearIR& linear_ir; const std::string pass_name; const DebugCapsConfig& debug_config; }; diff --git a/src/common/snippets/src/lowered/pass/mha_parallel_wa_optimizer.cpp b/src/common/snippets/src/lowered/pass/mha_parallel_wa_optimizer.cpp new file mode 100644 index 00000000000000..2f57d6422cf11d --- /dev/null +++ b/src/common/snippets/src/lowered/pass/mha_parallel_wa_optimizer.cpp @@ -0,0 +1,175 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "snippets/lowered/pass/mha_parallel_wa_optimizer.hpp" + +#include "snippets/itt.hpp" +#include "snippets/lowered/loop_info.hpp" +#include "snippets/lowered/loop_manager.hpp" +#include "snippets/pass/split_dimension_m.hpp" +#include "snippets/utils/loop_utils.hpp" +#include "snippets/utils/utils.hpp" + +namespace ov { +namespace snippets { +namespace lowered { +namespace pass { +using namespace ov::snippets::pass; + +const size_t MHAParallelWAOptimizer::m_dim_M_idx = 1; + +MHAParallelWAOptimizer::MHAParallelWAOptimizer(const lowered::LinearIRCPtr& linear_ir, const RuntimeConfigurator* configurator) + : lowered::pass::RuntimeOptimizer(configurator) { + if (linear_ir->get_config().m_enable_domain_optimization || !linear_ir->is_dynamic()) + return; + + const auto brgemms = find_applicable_brgemms(linear_ir); + if (brgemms.empty()) + return; + + m_concurrency = linear_ir->get_config().m_min_parallel_work_amount; + m_unsqueezed_params = find_unsqueezed_params(linear_ir, brgemms); + OPENVINO_ASSERT(!m_unsqueezed_params.empty(), "unsqueezed_params mustn't be empty after initialization"); + m_loops_to_split = find_loops_to_split(linear_ir, m_unsqueezed_params); + + m_dim_M_idces.resize(configurator->get_io_num()); + m_optimized_layouts.resize(configurator->get_io_num()); + for (size_t i = 0; i < configurator->get_io_num(); ++i) { + const auto& layout = configurator->get_io_descs()[i]->get_layout(); + const auto dim_idx = i < configurator->get_in_num() ? utils::get_input_dim_idx(layout, m_dim_M_idx) + : utils::get_output_dim_idx(layout, m_dim_M_idx); + m_dim_M_idces[i] = dim_idx; + const auto m_idx = i < configurator->get_in_num() ? dim_idx : layout.size() - 2; + m_optimized_layouts[i] = SplitDimensionM::get_updated_order(layout, m_idx); + } +} + +bool MHAParallelWAOptimizer::run(const lowered::LinearIR& linear_ir) { + OV_ITT_SCOPED_TASK(ov::pass::itt::domains::SnippetsTransform, "Snippets::MHAParallelWAOptimizer") + const auto& config = m_configurator->get_config(); + size_t new_batch_dim, new_kernel_dim; + if (!SplitDimensionM::split(config->master_shape, m_concurrency, new_batch_dim, new_kernel_dim)) + return false; + auto& master_shape = config->master_shape; + *++master_shape.rbegin() = new_kernel_dim; + master_shape.insert(master_shape.cbegin() + master_shape.size() - 2, new_batch_dim); + m_configurator->update_tensor_rank(master_shape); + + RuntimeConfigurator::LoopInfoRuntimeParamsMap initialized_info; + auto updater = [&](const lowered::LoopInfoPtr& loop_info) { + if (const auto unified_loop_info = ov::as_type_ptr(loop_info)) { + if (initialized_info.count(unified_loop_info) == 0) { + if (!ov::is_type(unified_loop_info)) + unified_loop_info->set_work_amount(new_kernel_dim); + snippets::utils::update_data_pointer_shifts(unified_loop_info); + initialized_info[unified_loop_info] = RuntimeConfigurator::get_loop_runtime_params(unified_loop_info); + } + } else if (const auto expanded_loop_info = ov::as_type_ptr(loop_info)) { + m_configurator->update_expanded_loop_info(expanded_loop_info, initialized_info); + } else { + OPENVINO_THROW("Failed to update loop info: unknown type!"); + } + }; + lowered::LoopInfoSet updated_loops; + for (const auto& loop : m_loops_to_split) { + loop->apply(updater, updated_loops); + } + + for (size_t i = 0; i < m_configurator->get_io_num(); ++i) { + config->io_shapes[i] = m_unsqueezed_params.count(i) + ? SplitDimensionM::unsqueeze_m_dim(config->io_shapes[i], m_dim_M_idces[i]) + : SplitDimensionM::reshape_m_dim(config->io_shapes[i], m_dim_M_idces[i], new_batch_dim, new_kernel_dim); + } + config->io_layouts = m_optimized_layouts; + return true; +} + +std::unordered_set MHAParallelWAOptimizer::find_applicable_brgemms(const lowered::LinearIRCPtr& linear_ir) { + auto is_brgemm = [](const lowered::ExpressionPtr& expr) { + return ov::is_type(expr->get_node()); + }; + auto brgemm_it = std::find_if(linear_ir->begin(), linear_ir->end(), is_brgemm); + std::unordered_set brgemms; + while (brgemm_it != linear_ir->end()) { + brgemms.insert(*brgemm_it); + brgemm_it = std::find_if(std::next(brgemm_it), linear_ir->end(), is_brgemm); + } + const auto& loop_manager = linear_ir->get_loop_manager(); + auto applicable_brgemm = [&loop_manager](const lowered::ExpressionPtr& expr) { + const auto& loop_idces = expr->get_loop_ids(); + if (loop_idces.empty()) + return false; + const auto& outermost_loop = loop_manager->get_loop_info(loop_idces[0]); + if (!snippets::utils::is_dynamic_value(outermost_loop->get_work_amount())) + return false; + bool loop_by_m = true; + outermost_loop->iterate_through_ports([&loop_by_m](const lowered::LoopPort& port) { + if (port.is_incremented && port.dim_idx != m_dim_M_idx) + loop_by_m = false; + }); + return loop_by_m; + }; + return std::all_of(brgemms.begin(), brgemms.end(), applicable_brgemm) ? brgemms : std::unordered_set{}; +} + +std::unordered_set MHAParallelWAOptimizer::find_unsqueezed_params( + const lowered::LinearIRCPtr& linear_ir, + const std::unordered_set& brgemms) { + const auto& params = linear_ir->get_parameters(); + std::unordered_set unsqueezed_params; + auto add_param = [¶ms, &unsqueezed_params](const lowered::ExpressionPtr& expr) { + if (ov::is_type(expr->get_node())) { + auto found_param = std::find(params.begin(), params.end(), expr); + OPENVINO_ASSERT(found_param != params.end(), "find_param didn't found parameter for expr"); + unsqueezed_params.insert(std::distance(params.begin(), found_param)); + } + }; + + std::unordered_set visited; + for (const auto& brgemm : brgemms) { + const auto& brgemm_b_input = brgemm->get_input_port_connector(1)->get_source().get_expr(); + utils::visit_path(brgemm_b_input, visited, add_param, true); + } + return unsqueezed_params; +} + +std::vector MHAParallelWAOptimizer::find_loops_to_split( + const lowered::LinearIRCPtr& linear_ir, + const std::unordered_set& unsqueezed_params) { + const auto loop_manager = linear_ir->get_loop_manager(); + std::set loop_idces_to_split; + std::vector prev_loop_idces; + + auto add_loop_idx_to_split = [&](const lowered::ExpressionPtr& expr) { + const auto& loop_idces = expr->get_loop_ids(); + if (loop_idces != prev_loop_idces) { + prev_loop_idces = loop_idces; + for (const auto& loop_id : loop_idces) { + const auto expanded_loop_info = loop_manager->get_loop_info(loop_id); + if (expanded_loop_info->get_dim_idx() == m_dim_M_idx) { + loop_idces_to_split.insert(loop_id); + } + } + } + }; + + size_t i = 0; + std::unordered_set visited; + for (const auto& param : linear_ir->get_parameters()) { + if (unsqueezed_params.count(i++)) + continue; + utils::visit_path(param, visited, add_loop_idx_to_split, false); + } + + const auto& loops_map = linear_ir->get_loop_manager()->get_map(); + std::vector loops_to_split; + for (const auto& id : loop_idces_to_split) + loops_to_split.push_back(ov::as_type_ptr(loops_map.at(id))); + return loops_to_split; +} + +} // namespace pass +} // namespace lowered +} // namespace snippets +} // namespace ov \ No newline at end of file diff --git a/src/common/snippets/src/lowered/pass/pass.cpp b/src/common/snippets/src/lowered/pass/pass.cpp index f5b902a1a17b8c..235d248d8e9838 100644 --- a/src/common/snippets/src/lowered/pass/pass.cpp +++ b/src/common/snippets/src/lowered/pass/pass.cpp @@ -27,6 +27,23 @@ void PassPipeline::register_pass(const std::shared_ptr& pass) { m_passes.push_back(pass); } +void PassPipeline::run(const lowered::LinearIR& linear_ir) const { + for (const auto& pass : m_passes) { + OPENVINO_ASSERT(pass != nullptr, "PassPipeline has empty pass!"); + SNIPPETS_DEBUG_LIR_PASS_DUMP(linear_ir, pass); + + if (m_pass_config->is_disabled(pass->get_type_info())) { + continue; + } + const auto const_pass = std::dynamic_pointer_cast(pass); + OPENVINO_ASSERT(const_pass != nullptr, + "Unexpected pass (", + pass->get_type_info(), + ") is registered in PassPipeline. Only ConstPass is allowed."); + const_pass->run(linear_ir); + } +} + void PassPipeline::run(LinearIR& linear_ir) const { run(linear_ir, linear_ir.cbegin(), linear_ir.cend()); } @@ -41,6 +58,8 @@ void PassPipeline::run(LinearIR& linear_ir, LinearIR::constExprIt begin, LinearI } if (auto lir_pass = std::dynamic_pointer_cast(pass)) { lir_pass->run(linear_ir); + } else if (auto const_pass = std::dynamic_pointer_cast(pass)) { + const_pass->run(linear_ir); } else if (auto ranged_pass = std::dynamic_pointer_cast(pass)) { ranged_pass->run(linear_ir, begin, end); } else { diff --git a/src/common/snippets/src/pass/collapse_subgraph.cpp b/src/common/snippets/src/pass/collapse_subgraph.cpp index 0f0cc225173479..6348f89598523d 100644 --- a/src/common/snippets/src/pass/collapse_subgraph.cpp +++ b/src/common/snippets/src/pass/collapse_subgraph.cpp @@ -51,9 +51,12 @@ auto is_supported_op(const std::shared_ptr &n) -> bool { const auto parent = transpose->get_input_node_shared_ptr(0); const auto child = transpose->get_output_target_inputs(0).begin()->get_node()->shared_from_this(); auto is_brgemm_case = ov::is_type(parent) || ov::is_type(child); + auto decomposition_case = true; // Check for Transpose parent is MatMul inside Subgraph if (const auto subgraph = ov::as_type_ptr(parent)) { if (GetSnippetsSubgraphType(subgraph) != SnippetsSubgraphType::Completed) { + // Transpose decomposition is supported only for Transpose nodes right after Subgraph's parameters + decomposition_case = false; const auto body = subgraph->body_ptr(); const auto subgraph_output = body->get_results()[transpose->input_value(0).get_index()]->get_input_node_shared_ptr(0); is_brgemm_case = is_brgemm_case || ov::is_type(subgraph_output); @@ -63,7 +66,7 @@ auto is_supported_op(const std::shared_ptr &n) -> bool { const auto& order = as_type_ptr(n->get_input_node_shared_ptr(1)); if (order) { const auto order_value = order->cast_vector(); - return (TransposeDecomposition::is_supported_transpose_order(order_value)) || + return (decomposition_case && TransposeDecomposition::is_supported_transpose_order(order_value)) || (is_brgemm_case && FuseTransposeBrgemm::is_supported_transpose_order(order_value)); } } diff --git a/src/common/snippets/src/pass/split_dimension_m.cpp b/src/common/snippets/src/pass/split_dimension_m.cpp index 0f50ad27931e04..ae95a371483163 100644 --- a/src/common/snippets/src/pass/split_dimension_m.cpp +++ b/src/common/snippets/src/pass/split_dimension_m.cpp @@ -34,6 +34,8 @@ bool SplitDimensionM::is_supported_matmul(const std::shared_ptr& std::pair SplitDimensionM::get_splited_dimensions(size_t batch_dim, size_t m_dim, size_t optimal_parallelism_work_amount) { std::pair splited = { 1, m_dim }; + // Ideal case #1: M can be split on the parts one of which complements the batch dimension to the optimal parallel work amount + // In this case, each thread will execute the Snippets kernel once const size_t lower_bound = optimal_parallelism_work_amount / batch_dim; if (lower_bound * batch_dim == optimal_parallelism_work_amount && m_dim % lower_bound == 0) { splited.first = lower_bound; @@ -42,6 +44,19 @@ std::pair SplitDimensionM::get_splited_dimensions(size_t batch_d return splited; } + // Ideal case #2: M is divisible by optimal parallel work amount, and the new_m_dim is big enough + // In this case, each thread will execute the Snippets kernel 'batch_dim' times + if (m_dim % optimal_parallelism_work_amount == 0) { + const auto new_m_dim = m_dim / optimal_parallelism_work_amount; + const size_t min_kernel_m = 64; + if (new_m_dim >= min_kernel_m) { + splited.first = optimal_parallelism_work_amount; + splited.second = new_m_dim; + OPENVINO_ASSERT(splited.first * splited.second == m_dim, "Incorrect dimension M splitting!"); + return splited; + } + } + const size_t upper_bound = utils::div_up(2 * optimal_parallelism_work_amount, batch_dim); for (size_t divisor_0 = upper_bound - 1; divisor_0 > 1; divisor_0--) { size_t divisor_1 = m_dim / divisor_0; diff --git a/src/common/snippets/src/runtime_configurator.cpp b/src/common/snippets/src/runtime_configurator.cpp index 9174d93eea3f98..96d13074d042ba 100644 --- a/src/common/snippets/src/runtime_configurator.cpp +++ b/src/common/snippets/src/runtime_configurator.cpp @@ -7,16 +7,17 @@ #include "snippets/lowered/pass/compute_buffer_allocation_size.hpp" #include "snippets/lowered/pass/init_loops.hpp" #include "snippets/lowered/pass/insert_specific_iterations.hpp" -#include "snippets/pass/split_dimension_m.hpp" +#include "snippets/lowered/pass/mha_parallel_wa_optimizer.hpp" #include "snippets/snippets_isa.hpp" -#include "snippets/utils/utils.hpp" #include "snippets/utils/loop_utils.hpp" +#include "snippets/utils/utils.hpp" namespace ov { namespace snippets { using namespace ov::snippets::pass; using namespace ov::snippets::lowered; +using namespace ov::snippets::lowered::pass; #ifdef SNIPPETS_DEBUG_CAPS std::string RuntimeConfig::to_string() const { @@ -51,6 +52,8 @@ const std::shared_ptr& RuntimeConfigurator::get_updated_config(co initialization(linear_ir); update(linear_ir); + // Note: after 'update' is finished, io_shapes can be corrupted, so we move it to latest_shapes to avoid copying + m_config->latest_shapes = std::move(m_config->io_shapes); return m_config; } @@ -60,30 +63,32 @@ void RuntimeConfigurator::initialization(const lowered::LinearIRCPtr& linear_ir) init_buffer_info(linear_ir); OPENVINO_ASSERT(m_io_num > 0, "LinearIR must have parameters and results"); - m_latest_shapes.resize(m_io_num); + m_config->latest_shapes.resize(m_io_num); m_config->io_data_offsets.resize(m_io_num); m_config->tile_rank = linear_ir->get_config().m_loop_depth; - m_optimizer = MHAParallelWAOptimizer(linear_ir, this); + + RuntimeOptimizer::register_if_applicable(m_intermediate_optimizers, linear_ir, this); } void RuntimeConfigurator::update(const lowered::LinearIRCPtr& linear_ir) { m_config->master_shape = linear_ir->get_master_shape(); - update_loop_info(linear_ir); + m_config->io_shapes = extract_shapes(); + m_config->io_layouts = extract_layouts(); + if (linear_ir->is_dynamic()) + update_loop_info(linear_ir); - if (!m_optimizer.optimize()) { - // If the optimization was not applied, offsets are updated using shapes from descriptors - auto shapes = extract_shapes(); - update_data_offsets(shapes, extract_layouts()); - m_latest_shapes = std::move(shapes); - } + m_intermediate_optimizers.run(*linear_ir); // Update KernelExecutor Table should be before `update_buffer_scratchpad_size` // because `ComputeAllocationSize` depends on subtensors which are updated in the table get_kernel_executor_table()->update_state(linear_ir); update_buffer_scratchpad_size(linear_ir); + + update_data_offsets(); + m_final_optimizers.run(*linear_ir); } -void RuntimeConfigurator::update_tensor_rank(const ov::snippets::VectorDims& master_shape) { +void RuntimeConfigurator::update_tensor_rank(const ov::snippets::VectorDims& master_shape) const { m_config->tensor_rank = master_shape.size(); } @@ -257,8 +262,9 @@ void RuntimeConfigurator::update_buffer_scratchpad_size(const lowered::LinearIRC OPENVINO_ASSERT(!utils::is_dynamic_value(m_config->buffer_scratchpad_size), "Buffer scratchpad size must be defined!"); } -void RuntimeConfigurator::update_data_offsets(const std::vector& shapes, - const std::vector>& layouts) const { +void RuntimeConfigurator::update_data_offsets() const { + const auto& shapes = m_config->io_shapes; + const auto& layouts = m_config->io_layouts; OPENVINO_ASSERT(shapes.size() == m_io_num, "Number of custom shapes must be 0 or be equal to m_io_num"); OPENVINO_ASSERT(layouts.size() == m_io_num, "Number of custom layouts must be 0 or be equal to m_io_num"); for (size_t i = 0; i < m_io_num; ++i) { @@ -271,26 +277,17 @@ void RuntimeConfigurator::update_data_offsets(const std::vector& sha // shape: s0, s1, s2 == 1, s3 // offsets: s1*s3, s3, 0, 1 const auto& shape = shapes[i]; - if (shape == m_latest_shapes[i]) + OPENVINO_ASSERT(m_config->tensor_rank >= shape.size(), "Incorrect tensor rank!"); + if (shape == m_config->latest_shapes[i]) continue; - - const auto& layout = layouts[i]; - auto& offsets = m_config->io_data_offsets[i]; - - offsets.resize(m_config->tensor_rank); - std::fill(offsets.begin(), offsets.end(), 0); if (utils::is_dynamic_vdims(shape)) return; - size_t dim_step = m_io_data_sizes[i]; - offsets[offsets.size() - 1] = dim_step; - - OPENVINO_ASSERT(m_config->tensor_rank >= shape.size(), "Incorrect tensor rank!"); const auto idx_stride = m_config->tensor_rank - shape.size(); - for (int i = static_cast(shape.size()) - 2; i >= 0; i--) { - dim_step *= shape[i + 1]; - offsets[i + idx_stride] = shape[i] != 1 ? dim_step : 0; - } + compute_offsets(shape, i, idx_stride); + + auto& offsets = m_config->io_data_offsets[i]; + const auto& layout = layouts[i]; if (!layout.empty()) { std::vector reordered_offsets(offsets.size()); const auto is_input = i < m_in_num; @@ -318,8 +315,21 @@ std::vector> RuntimeConfigurator::extract_layouts() const { return layouts; } +void RuntimeConfigurator::compute_offsets(const ov::snippets::VectorDims& shape, size_t idx, size_t idx_stride) const { + auto& offsets = m_config->io_data_offsets[idx]; + auto dim_step = m_io_data_sizes[idx]; + + offsets.resize(m_config->tensor_rank); + std::fill(offsets.begin(), offsets.end(), 0); + offsets[offsets.size() - 1] = dim_step; + for (int i = static_cast(shape.size()) - 2; i >= 0; i--) { + dim_step *= shape[i + 1]; + offsets[i + idx_stride] = shape[i] != 1 ? dim_step : 0; + } +} + void RuntimeConfigurator::set_kernel_executor_table(std::shared_ptr table) const { - OPENVINO_ASSERT(table, "Failed to update Kernel Executo Table: passed table is missed"); + OPENVINO_ASSERT(table, "Failed to update Kernel Executor Table: passed table is missed"); m_config->kernel_executor_table = std::move(table); } @@ -339,181 +349,5 @@ RuntimeConfigurator::UnifiedLoopInfoRtParams RuntimeConfigurator::get_loop_runti }); return rt_params; } - -const size_t RuntimeConfigurator::MHAParallelWAOptimizer::m_dim_idx = 1; - -RuntimeConfigurator::MHAParallelWAOptimizer::MHAParallelWAOptimizer( - const ov::snippets::lowered::LinearIRCPtr& linear_ir, - RuntimeConfigurator* configurator) - : configurator(configurator) { - OPENVINO_ASSERT(configurator != nullptr, "Configurator is nullptr"); - - if (linear_ir->get_config().m_enable_domain_optimization || !linear_ir->is_dynamic()) - return; - - const auto brgemms = find_applicable_brgemms(linear_ir); - // Parallel WA optimization is Brgemm related - if (brgemms.empty()) - return; - - concurrency = linear_ir->get_config().m_min_parallel_work_amount; - // At the moment this optimization is Brgemm related so there must be `unsqueezed_params` - unsqueezed_params = find_unsqueezed_params(linear_ir, brgemms); - OPENVINO_ASSERT(!unsqueezed_params.empty(), "unsqueezed_params mustn't be empty after initialization"); - loops_to_split = find_loops_to_split(linear_ir, unsqueezed_params); - - m_dim_idces.resize(configurator->m_io_num); - optimized_layouts.resize(configurator->m_io_num); - for (size_t i = 0; i < configurator->m_io_num; ++i) { - const auto& layout = configurator->m_io_descs[i]->get_layout(); - const auto dim_idx = i < configurator->m_in_num ? utils::get_input_dim_idx(layout, m_dim_idx) - : utils::get_output_dim_idx(layout, m_dim_idx); - m_dim_idces[i] = dim_idx; - optimized_layouts[i] = SplitDimensionM::get_updated_order(layout, i < configurator->m_in_num ? dim_idx : layout.size() - 2); - } -} - -bool RuntimeConfigurator::MHAParallelWAOptimizer::enabled() const { - return !loops_to_split.empty(); -} - -bool RuntimeConfigurator::MHAParallelWAOptimizer::optimize() { - OPENVINO_ASSERT(configurator != nullptr, "Configurator is nullptr"); - if (!enabled()) - return false; - - size_t new_batch_dim, new_kernel_dim; - if (!SplitDimensionM::split(configurator->m_config->master_shape, concurrency, new_batch_dim, new_kernel_dim)) - return false; - - auto& master_shape = configurator->m_config->master_shape; - *++master_shape.rbegin() = new_kernel_dim; - master_shape.insert(master_shape.cbegin() + master_shape.size() - 2, new_batch_dim); - configurator->update_tensor_rank(master_shape); - - LoopInfoRuntimeParamsMap initialized_info; - auto updater = [&](const lowered::LoopInfoPtr& loop_info) { - if (const auto unified_loop_info = ov::as_type_ptr(loop_info)) { - if (initialized_info.count(unified_loop_info) == 0) { - if (!ov::is_type(unified_loop_info)) - unified_loop_info->set_work_amount(new_kernel_dim); - utils::update_data_pointer_shifts(unified_loop_info); - initialized_info[unified_loop_info] = get_loop_runtime_params(unified_loop_info); - } - } else if (const auto expanded_loop_info = ov::as_type_ptr(loop_info)) { - configurator->update_expanded_loop_info(expanded_loop_info, initialized_info); - } else { - OPENVINO_THROW("Failed to update loop info: unknown type!"); - } - }; - lowered::LoopInfoSet updated_loops; - for (const auto& loop : loops_to_split) { - loop->apply(updater, updated_loops); - } - - auto shapes = configurator->extract_shapes(); - for (size_t i = 0; i < configurator->m_io_num; ++i) { - shapes[i] = unsqueezed_params.count(i) - ? SplitDimensionM::unsqueeze_m_dim(shapes[i], m_dim_idces[i]) - : SplitDimensionM::reshape_m_dim(shapes[i], m_dim_idces[i], new_batch_dim, new_kernel_dim); - } - configurator->update_data_offsets(shapes, optimized_layouts); - configurator->m_latest_shapes = std::move(shapes); - return true; -} - -std::unordered_set RuntimeConfigurator::MHAParallelWAOptimizer::find_applicable_brgemms( - const lowered::LinearIRCPtr& linear_ir) { - auto is_brgemm = [](const ExpressionPtr& expr) { - return ov::is_type(expr->get_node()); - }; - auto brgemm_it = std::find_if(linear_ir->begin(), linear_ir->end(), is_brgemm); - std::unordered_set brgemms; - while (brgemm_it != linear_ir->end()) { - brgemms.insert(*brgemm_it); - brgemm_it = std::find_if(std::next(brgemm_it), linear_ir->end(), is_brgemm); - } - const auto& loop_manager = linear_ir->get_loop_manager(); - // Brgemm is applicable if it has dynamic loop by M - // The loop by M is necessary since only in this case we can regulate BrgemmExecutor parameters (via loop's work amount) - // Only dynamic loops are applicable since in static case LoopEnd expressions are not updated during code generation and compiled as is - // Ticket: 148805 - auto applicable_brgemm = [&loop_manager](const ExpressionPtr& expr) { - const auto& loop_idces = expr->get_loop_ids(); - if (loop_idces.empty()) - return false; - const auto& outermost_loop = loop_manager->get_loop_info(loop_idces[0]); - if (!utils::is_dynamic_value(outermost_loop->get_work_amount())) - return false; - bool loop_by_m = true; - outermost_loop->iterate_through_ports([&loop_by_m](const LoopPort& port) { - if (port.is_incremented && port.dim_idx != m_dim_idx) - loop_by_m = false; - }); - return loop_by_m; - }; - // Note: if at least one brgemm is inapplicable, the parallel work amount optimization can't be applied - return std::all_of(brgemms.begin(), brgemms.end(), applicable_brgemm) ? brgemms : std::unordered_set{}; -} - -std::unordered_set RuntimeConfigurator::MHAParallelWAOptimizer::find_unsqueezed_params( - const lowered::LinearIRCPtr& linear_ir, - const std::unordered_set& brgemms) { - const auto& params = linear_ir->get_parameters(); - std::unordered_set unsqueezed_params; - auto add_param = [¶ms, &unsqueezed_params](const ExpressionPtr& expr) { - if (ov::is_type(expr->get_node())) { - auto found_param = std::find(params.begin(), params.end(), expr); - OPENVINO_ASSERT(found_param != params.end(), "find_param didn't found parameter for expr"); - unsqueezed_params.insert(std::distance(params.begin(), found_param)); - } - }; - - std::unordered_set visited; - for (const auto& brgemm : brgemms) { - const auto& brgemm_b_input = brgemm->get_input_port_connector(1)->get_source().get_expr(); - utils::visit_path(brgemm_b_input, visited, add_param, true); - } - return unsqueezed_params; -} - -std::vector RuntimeConfigurator::MHAParallelWAOptimizer::find_loops_to_split( - const lowered::LinearIRCPtr& linear_ir, - const std::unordered_set& unsqueezed_params) { - const auto loop_manager = linear_ir->get_loop_manager(); - std::set loop_idces_to_split; - std::vector prev_loop_idces; - - auto add_loop_idx_to_split = [&](const ExpressionPtr& expr) { - const auto& loop_idces = expr->get_loop_ids(); - if (loop_idces != prev_loop_idces) { - prev_loop_idces = loop_idces; - for (const auto& loop_id : loop_idces) { - const auto expanded_loop_info = loop_manager->get_loop_info(loop_id); - if (expanded_loop_info->get_dim_idx() == m_dim_idx) { - loop_idces_to_split.insert(loop_id); - } - } - } - }; - - size_t i = 0; - std::unordered_set visited; - // The idea is to traverse LIR down from the M dimension related parameters - // and find all the outermost loops: these loops will be split in runtime - for (const auto& param : linear_ir->get_parameters()) { - // Ops after non related params mustn't be traversed - if (unsqueezed_params.count(i++)) - continue; - utils::visit_path(param, visited, add_loop_idx_to_split, false); - } - - const auto& loops_map = linear_ir->get_loop_manager()->get_map(); - std::vector loops_to_split; - for (const auto& id : loop_idces_to_split) - loops_to_split.push_back(ov::as_type_ptr(loops_map.at(id))); - return loops_to_split; -} - } // namespace snippets } // namespace ov diff --git a/src/common/snippets/tests/include/utils/split_dim_m.hpp b/src/common/snippets/tests/include/utils/split_dim_m.hpp new file mode 100644 index 00000000000000..3e04c2a911d76a --- /dev/null +++ b/src/common/snippets/tests/include/utils/split_dim_m.hpp @@ -0,0 +1,37 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include + +namespace ov { +namespace test { +namespace snippets { + +struct InputData { + size_t cur_batch; + size_t cur_m; + size_t concurrency; +}; + +struct ReferenceData { + bool is_split; + size_t batch_m; + size_t kernel_m; +}; + +struct SplitDimensionMParams { + InputData input; + ReferenceData reference; +}; + +class SplitDimensionMTest : public testing::TestWithParam { +public: + static std::string getTestCaseName(testing::TestParamInfo obj); +}; + +} // namespace snippets +} // namespace test +} // namespace ov diff --git a/src/common/snippets/tests/src/pass/mha_tokenization.cpp b/src/common/snippets/tests/src/pass/mha_tokenization.cpp index c5932ed690d670..382257f935cc49 100644 --- a/src/common/snippets/tests/src/pass/mha_tokenization.cpp +++ b/src/common/snippets/tests/src/pass/mha_tokenization.cpp @@ -204,7 +204,7 @@ TEST_F(TokenizeMHASnippetsTests, smoke_Snippets_MHA4D_SplitM_withMul) { TEST_F(TokenizeMHASnippetsTests, smoke_Snippets_MHAWOTranspose_SplitM) { const auto& f = MHAWOTransposeSplitMFunction(std::vector{{10, 9216, 128}, {10, 128, 9216}, {10, 9216, 128}}, std::vector({ov::element::f32, ov::element::f32, ov::element::f32}), - std::vector{{10, 3, 3072, 128}, {10, 1, 128, 9216}, {10, 1, 9216, 128}, {10, 9216, 128}}); + std::vector{{10, 18, 512, 128}, {10, 1, 128, 9216}, {10, 1, 9216, 128}, {10, 9216, 128}}); model = f.getOriginal(); model_ref = f.getReference(); config.set_concurrency(18); diff --git a/src/common/snippets/tests/src/utils/split_dim_m.cpp b/src/common/snippets/tests/src/utils/split_dim_m.cpp new file mode 100644 index 00000000000000..9e801fceae02e9 --- /dev/null +++ b/src/common/snippets/tests/src/utils/split_dim_m.cpp @@ -0,0 +1,72 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "utils/split_dim_m.hpp" + +#include "common_test_utils/ov_test_utils.hpp" +#include "snippets/pass/split_dimension_m.hpp" +#include "snippets/utils/utils.hpp" + +namespace ov { +namespace test { +namespace snippets { + +std::string SplitDimensionMTest::getTestCaseName(testing::TestParamInfo obj) { + const auto& input = obj.param.input; + const auto& reference = obj.param.reference; + std::ostringstream result; + result << "Batch=" << input.cur_batch << "_"; + result << "CurM=" << input.cur_m << "_"; + result << "OptimalParallelWorkAmount=" << input.concurrency << "_"; + result << "IsSplit=" << reference.is_split << "_"; + result << "BatchM=" << reference.batch_m << "_"; + result << "KernelM=" << reference.kernel_m; + return result.str(); +} + +TEST_P(SplitDimensionMTest, SplitDimensionM) { + const auto& input = GetParam().input; + const auto& reference = GetParam().reference; + + // last_dim is fixed since it doesn't affect the SplitDimensionM result. + static const size_t last_dim = 1024; + ov::Shape shape = {input.cur_batch, input.cur_m, last_dim}; + size_t batch_m_dim, new_m_dim; + bool result = ov::snippets::pass::SplitDimensionM::split(shape, + input.concurrency, + batch_m_dim, + new_m_dim); + + ASSERT_EQ(result, reference.is_split); + if (result) { + ASSERT_EQ(batch_m_dim, reference.batch_m); + ASSERT_EQ(new_m_dim, reference.kernel_m); + } +} + +namespace SplitDimensionMInstantiation { +const std::vector split_dimension_cases = { + // Negative test cases: split is not needed + {InputData{40 /*cur_batch*/, 32 /*cur_m*/, 40 /*concurrency*/}, ReferenceData{false /*is_split*/}}, + {InputData{65, 32, 40}, ReferenceData{false}}, + + // Positive test cases + {InputData{20 /*cur_batch*/, 32 /*cur_m*/, 40 /*concurrency*/}, ReferenceData{true /*is_split*/, 2 /*batch_m*/, 16 /*kernel_m*/}}, + {InputData{30, 60, 40}, ReferenceData{true, 2, 30}}, + {InputData{10, 100, 40}, ReferenceData{true, 4, 25}}, + {InputData{15, 45, 40}, ReferenceData{true, 5, 9}}, + {InputData{25, 50, 40}, ReferenceData{true, 2, 25}}, + {InputData{5, 16384, 40}, ReferenceData{true, 8, 2048}}, + {InputData{5, 16384, 32}, ReferenceData{true, 32, 512}}, +}; + +INSTANTIATE_TEST_SUITE_P(smoke_Snippets_SplitDimensionM, + SplitDimensionMTest, + ::testing::ValuesIn(split_dimension_cases), + SplitDimensionMTest::getTestCaseName); + +} // namespace SplitDimensionMInstantiation +} // namespace snippets +} // namespace test +} // namespace ov \ No newline at end of file diff --git a/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.cpp b/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.cpp index 1c3d283ab673b1..b2758735b2d27a 100644 --- a/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.cpp +++ b/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.cpp @@ -8,10 +8,12 @@ #include "snippets/utils/utils.hpp" #ifndef OPENVINO_ARCH_ARM64 -#include "transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.hpp" +#include "transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.hpp" +#include "transformations/snippets/x64/pass/lowered/external_repacking_adjuster.hpp" #endif namespace ov { namespace intel_cpu { +using namespace ov::snippets::lowered::pass; const size_t CPURuntimeConfigurator::rank6D = 6; @@ -41,37 +43,19 @@ CPURuntimeConfigurator::CPURuntimeConfigurator() : ov::snippets::RuntimeConfigur void CPURuntimeConfigurator::initialization(const ov::snippets::lowered::LinearIRCPtr& linear_ir) { RuntimeConfigurator::initialization(linear_ir); - if (linear_ir->is_dynamic()) { - loopPortsAdjuster = BrgemmCopyBLoopPortsAdjuster(linear_ir); - } +#ifndef OPENVINO_ARCH_ARM64 + RuntimeOptimizer::register_if_applicable(m_intermediate_optimizers, linear_ir, this); + RuntimeOptimizer::register_if_applicable(m_final_optimizers, linear_ir, this); +#endif } void CPURuntimeConfigurator::update(const ov::snippets::lowered::LinearIRCPtr& linear_ir) { - m_config->master_shape = linear_ir->get_master_shape(); - if (linear_ir->is_dynamic()) { - update_loop_info(linear_ir); - } - - if (!m_optimizer.optimize()) { - // If the optimization was not applied, offsets are updated using shapes from descriptors - auto shapes = extract_shapes(); - update_data_offsets(shapes, extract_layouts()); - m_latest_shapes = std::move(shapes); - } + RuntimeConfigurator::update(linear_ir); if (linear_ir->is_dynamic()) - loopPortsAdjuster.optimize(); - - // Update KernelExecutor Table should be before `update_buffer_scratchpad_size` - // because `ComputeAllocationSize` depends on subtensors which are updated in the table - get_kernel_executor_table()->update_state(linear_ir); - update_buffer_scratchpad_size(linear_ir); - - if (linear_ir->is_dynamic()) { update_loop_args(linear_ir); - } } -void CPURuntimeConfigurator::update_tensor_rank(const ov::snippets::VectorDims& master_shape) { +void CPURuntimeConfigurator::update_tensor_rank(const ov::snippets::VectorDims& master_shape) const { m_config->tensor_rank = std::max(master_shape.size(), rank6D); } @@ -101,40 +85,5 @@ void CPURuntimeConfigurator::update_loop_args(const ov::snippets::lowered::Linea } } } -#ifdef OPENVINO_ARCH_ARM64 -CPURuntimeConfigurator::BrgemmCopyBLoopPortsAdjuster::BrgemmCopyBLoopPortsAdjuster(const ov::snippets::lowered::LinearIRCPtr& linear_ir) { -} - -void CPURuntimeConfigurator::BrgemmCopyBLoopPortsAdjuster::optimize() { -} -#else -CPURuntimeConfigurator::BrgemmCopyBLoopPortsAdjuster::BrgemmCopyBLoopPortsAdjuster(const ov::snippets::lowered::LinearIRCPtr& linear_ir) { - const auto& pass = std::make_shared(); - pass->run(*linear_ir); - const auto& affected_uni_loops = pass->get_affected_loops(); - const auto& loop_map = linear_ir->get_loop_manager()->get_map(); - for (const auto& p : loop_map) { - if (const auto& exp_loop = ov::as_type_ptr(p.second)) { - const auto& uni_loop = exp_loop->get_unified_loop_info(); - if (affected_uni_loops.count(uni_loop)) - m_affected_uni2exp_map[uni_loop].push_back(exp_loop); - } - } -} - -void CPURuntimeConfigurator::BrgemmCopyBLoopPortsAdjuster::optimize() { - for (const auto& p : m_affected_uni2exp_map) { - const auto& uni_loop = p.first; - const auto& exp_loops = p.second; - snippets::RuntimeConfigurator::LoopInfoRuntimeParamsMap initialized_info; - if (intel_cpu::pass::AdjustBrgemmCopyBLoopPorts::update_loop_info(uni_loop)) { - initialized_info[uni_loop] = get_loop_runtime_params(uni_loop); - for (const auto& exp_loop : exp_loops) - update_expanded_loop_info(exp_loop, initialized_info); - } - } -} -#endif - } // namespace intel_cpu } // namespace ov diff --git a/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.hpp b/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.hpp index d8ef9772e813ff..42ce35a3c66c2b 100644 --- a/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.hpp +++ b/src/plugins/intel_cpu/src/emitters/snippets/cpu_runtime_configurator.hpp @@ -4,10 +4,10 @@ #pragma once -#include "snippets/runtime_configurator.hpp" - -#include "snippets/lowered/port_descriptor.hpp" #include "emitters/snippets/jit_snippets_call_args.hpp" +#include "memory_desc/cpu_blocked_memory_desc.h" +#include "snippets/lowered/port_descriptor.hpp" +#include "snippets/runtime_configurator.hpp" namespace ov { namespace intel_cpu { @@ -22,48 +22,25 @@ class CPURuntimeConfig : public ov::snippets::RuntimeConfig { #endif std::vector loop_args = {}; + std::unordered_map m_in_requested_descs = {}; }; class CPURuntimeConfigurator : public ov::snippets::RuntimeConfigurator { public: CPURuntimeConfigurator(); -protected: /** - * @brief Update RuntimeConfig based on LinearIR + * @brief Calculate Loop parameters of Loop emitters and update these values in CPURuntimeConfig * @param linear_ir LinearIR */ + void update_loop_args(const ov::snippets::lowered::LinearIRCPtr& linear_ir) const; +protected: void update(const ov::snippets::lowered::LinearIRCPtr& linear_ir) override; - /** - * @brief Update tensor rank based on master shape - * @param master_shape Master shape - */ - void update_tensor_rank(const ov::snippets::VectorDims& master_shape) override; - /** - * @brief Initializes tensor rank of config - * @param linear_ir LinearIR - */ + void update_tensor_rank(const ov::snippets::VectorDims& master_shape) const override; void init_tensor_rank(const ov::snippets::lowered::LinearIRCPtr& linear_ir) const override; void initialization(const ov::snippets::lowered::LinearIRCPtr& linear_ir) override; - /** - * @brief Calculate Loop parameters of Loop emitters and update these values in CPURuntimeConfig - * @param linear_ir LinearIR - */ - void update_loop_args(const ov::snippets::lowered::LinearIRCPtr& linear_ir) const; static const size_t rank6D; - - class BrgemmCopyBLoopPortsAdjuster { - public: - BrgemmCopyBLoopPortsAdjuster() = default; - BrgemmCopyBLoopPortsAdjuster(const ov::snippets::lowered::LinearIRCPtr& linear_ir); - - void optimize(); - - private: - std::unordered_map> m_affected_uni2exp_map; - } loopPortsAdjuster; }; } // namespace intel_cpu diff --git a/src/plugins/intel_cpu/src/nodes/reorder.cpp b/src/plugins/intel_cpu/src/nodes/reorder.cpp index 9b521cdb3b57c7..7257e31369bd66 100644 --- a/src/plugins/intel_cpu/src/nodes/reorder.cpp +++ b/src/plugins/intel_cpu/src/nodes/reorder.cpp @@ -17,13 +17,7 @@ #include #include -#include "convert.h" #include "cpu/x64/cpu_isa_traits.hpp" -#include "nodes/common/cpu_convert.h" -#include "nodes/common/cpu_memcpy.h" -#include "nodes/common/reorder_prim.h" -#include "openvino/core/parallel.hpp" -#include "shape_inference/shape_inference_pass_through.hpp" #include "utils/precision_support.h" #include "nodes/executors/executor.hpp" #include "nodes/executors/transpose_list.hpp" diff --git a/src/plugins/intel_cpu/src/nodes/subgraph.cpp b/src/plugins/intel_cpu/src/nodes/subgraph.cpp index ee24dd66493204..a23835d398cbe7 100644 --- a/src/plugins/intel_cpu/src/nodes/subgraph.cpp +++ b/src/plugins/intel_cpu/src/nodes/subgraph.cpp @@ -35,6 +35,7 @@ #include "transformations/snippets/x64/pass/lowered/insert_brgemm_copy_b_buffers.hpp" #include "transformations/snippets/x64/pass/remove_converts.hpp" #include "transformations/snippets/x64/pass/brgemm_to_brgemm_cpu.hpp" +#include "transformations/snippets/x64/pass/eliminate_brgemm_copy_b.hpp" #include "transformations/snippets/x64/pass/enforce_precision.hpp" #include "transformations/snippets/x64/shape_inference.hpp" #include "transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.hpp" @@ -79,14 +80,14 @@ class SubgraphStaticExecutor : public Subgraph::SubgraphExecutor { const BufferScratchpadAllocator& allocator) : SubgraphExecutor(snippet_attrs, snippet, start_offset_in, start_offset_out, snippet_config, allocator) {} - void exec(const std::vector& inMemPtrs, const std::vector& outMemPtrs) override { + void exec_impl(const std::vector& inMemPtrs, const std::vector& outMemPtrs) override { const auto& callable = m_schedule->get_callable(); auto initializer = [&](jit_snippets_call_args& call_args, size_t ithr) { init_call_args(call_args, inMemPtrs, outMemPtrs, ithr); }; - auto caller = [&](jit_snippets_call_args& call_args, const size_t* indexes) { - callable(&call_args, indexes); + auto caller = [&](jit_snippets_call_args& call_args, const std::vector& indexes) { + callable(&call_args, indexes.data()); }; if (m_parallel_exec_domain.size() == rank6D) { @@ -127,7 +128,7 @@ class SubgraphDynamicSpecializedExecutor : public Subgraph::SubgraphExecutor { reset_exec_table_state = snippet_config->kernel_executor_table->get_state_reset(); } - void exec(const std::vector& inMemPtrs, const std::vector& outMemPtrs) override { + void exec_impl(const std::vector& inMemPtrs, const std::vector& outMemPtrs) override { const auto& callable = m_schedule->get_callable(); OPENVINO_ASSERT(data_offsets.size() == inMemPtrs.size() + outMemPtrs.size(), "Incorrect data offset count!"); @@ -144,7 +145,7 @@ class SubgraphDynamicSpecializedExecutor : public Subgraph::SubgraphExecutor { auto initializer = [&](jit_snippets_call_args& call_args, size_t ithr) { init_call_args(call_args, ithr); }; - auto caller = [&](jit_snippets_call_args& call_args, const size_t* indexes) { + auto caller = [&](jit_snippets_call_args& call_args, const std::vector& indexes) { update_ptrs(call_args, src_ptrs, dst_ptrs, indexes); callable(&call_args); }; @@ -181,17 +182,17 @@ class SubgraphDynamicSpecializedExecutor : public Subgraph::SubgraphExecutor { } inline void update_ptrs(jit_snippets_call_args& call_args, const std::vector& src_ptrs, - const std::vector& dst_ptrs, const size_t* indexes) const { + const std::vector& dst_ptrs, const std::vector& indexes) const { for (size_t i = 0; i < src_ptrs.size(); i++) { auto i_ptr = src_ptrs[i]; - for (size_t j = 0; j < data_offsets[i].size() - 1; j++) { + for (size_t j = 0; j < indexes.size(); j++) { i_ptr += data_offsets[i][j] * indexes[j]; } call_args.src_ptrs[i] = i_ptr; } for (size_t i = 0; i < dst_ptrs.size(); i++) { auto i_ptr = dst_ptrs[i]; - for (size_t j = 0; j < data_offsets[i + src_ptrs.size()].size() - 1; j++) { + for (size_t j = 0; j < indexes.size(); j++) { i_ptr += data_offsets[i + src_ptrs.size()][j] * indexes[j]; } call_args.dst_ptrs[i] = i_ptr; @@ -648,6 +649,8 @@ Subgraph::DataFlowPasses Subgraph::getDataFlowPasses() { } SNIPPETS_REGISTER_PASS_RELATIVE_X86_64(Place::Before, ov::snippets::pass::PropagatePrecision, ov::intel_cpu::pass::BrgemmToBrgemmCPU); + SNIPPETS_REGISTER_PASS_RELATIVE_X86_64(Place::After, ov::intel_cpu::pass::BrgemmToBrgemmCPU, + ov::intel_cpu::pass::EliminateBrgemmCopyB); SNIPPETS_REGISTER_PASS_ABSOLUTE_X86_64(Place::PipelineEnd, ov::intel_cpu::pass::RemoveConverts); SNIPPETS_REGISTER_PASS_ABSOLUTE_COMMON(Place::PipelineEnd, ov::intel_cpu::pass::MulAddToFMA); @@ -782,7 +785,12 @@ void Subgraph::prepareParams() { snippet->get_runtime_configurator()->set_kernel_executor_table(code_gen->get()->lowering_result.kernel_executor_table); } const auto& snippet_config = ov::as_type_ptr(snippet->update_runtime_config()); - return std::make_shared(key.attrs, code_gen, start_offset_in, start_offset_out, snippet_config, allocator); + return std::make_shared(key.attrs, + code_gen, + start_offset_in, + start_offset_out, + snippet_config, + allocator); } else { // Static case: // 1. Update runtime config to get static scheduling data (io data offsets, parallel domain) which will be compiled in JIT code @@ -793,7 +801,12 @@ void Subgraph::prepareParams() { [&snippet_config](const SubgraphCodeGeneratorKey& key) -> std::shared_ptr { return std::make_shared(key.attrs, snippet_config); }); - return std::make_shared(key.attrs, code_gen_result.first, start_offset_in, start_offset_out, snippet_config, allocator); + return std::make_shared(key.attrs, + code_gen_result.first, + start_offset_in, + start_offset_out, + snippet_config, + allocator); } }; @@ -846,7 +859,7 @@ bool Subgraph::created() const { void Subgraph::execute(dnnl::stream strm) { OPENVINO_ASSERT(execPtr, "Can't execute Subgraph node. Primitive didn't created"); - execPtr->exec(srcMemPtrs, dstMemPtrs); + execPtr->execute(strm, srcMemPtrs, dstMemPtrs); } void Subgraph::executeDynamicImpl(dnnl::stream strm) { @@ -893,7 +906,16 @@ Subgraph::SubgraphExecutor::SubgraphExecutor(const std::shared_ptrbuffer_scratchpad_size; OPENVINO_ASSERT(!ov::snippets::utils::is_dynamic_value(m_buffer_scratchpad_size), "Undefined buffer scratchpad size!"); - m_buffer_scratchpad = allocator(static_cast(m_nthreads) * m_buffer_scratchpad_size); + m_internal_buffer_size = static_cast(m_nthreads) * m_buffer_scratchpad_size; + m_in_requested_descs = snippet_config->m_in_requested_descs; + const auto external_repacking_buffer_size = + std::accumulate(m_in_requested_descs.begin(), + m_in_requested_descs.end(), + size_t(0), + [](size_t sum, const std::pair& requested_desc_elem) { + return sum + requested_desc_elem.second->getCurrentMemSize(); + }); + m_buffer_scratchpad = allocator(m_internal_buffer_size + external_repacking_buffer_size); #if defined(__linux__) && defined(OPENVINO_ARCH_X86_64) && defined(SNIPPETS_DEBUG_CAPS) const auto target = std::dynamic_pointer_cast(snippet_attrs->snippet->get_generator()->get_target_machine()); @@ -919,7 +941,7 @@ void Subgraph::SubgraphExecutor::segfault_detector() { #endif void Subgraph::SubgraphExecutor::parallel_for6d(const std::function& initializer, - const std::function& caller) { + const std::function&)>& caller) { const auto& dom = m_parallel_exec_domain; #if defined(__linux__) && defined(OPENVINO_ARCH_X86_64) && defined(SNIPPETS_DEBUG_CAPS) @@ -933,7 +955,7 @@ void Subgraph::SubgraphExecutor::parallel_for6d(const std::function indexes{0, 0, 0, 0, 0}; parallel_it_init(start, indexes[0], dom[0], indexes[1], dom[1], indexes[2], dom[2], indexes[3], dom[3], indexes[4], dom[4]); for (size_t iwork = start; iwork < end; ++iwork) { caller(call_args, indexes); @@ -943,7 +965,7 @@ void Subgraph::SubgraphExecutor::parallel_for6d(const std::function& initializer, - const std::function& caller) { + const std::function&)>& caller) { const auto& dom = m_parallel_exec_domain; #if defined(__linux__) && defined(OPENVINO_ARCH_X86_64) && defined(SNIPPETS_DEBUG_CAPS) @@ -965,11 +987,36 @@ void Subgraph::SubgraphExecutor::parallel_forNd(const std::function& inMemPtrs, const std::vector& outMemPtrs) { + if (!m_in_requested_descs.empty()) { + auto reorderedInMemPtrs = reorder_inputs(strm, inMemPtrs); + exec_impl(reorderedInMemPtrs, outMemPtrs); + } else { + exec_impl(inMemPtrs, outMemPtrs); + } +} + +std::vector Subgraph::SubgraphExecutor::reorder_inputs(const dnnl::stream& strm, const std::vector& inMemPtrs) { + auto reordered_in_ptrs = inMemPtrs; + size_t offset = m_internal_buffer_size; + for (const auto& requested_descs_elem : m_in_requested_descs) { + const auto in_idx = requested_descs_elem.first; + const auto& requested_desc = requested_descs_elem.second; + + const void* data_ptr = m_buffer_scratchpad->getDataAs() + offset; + const auto scratch_mem = std::make_shared(strm.get_engine(), requested_desc, data_ptr, false); + scratch_mem->load(*reordered_in_ptrs[in_idx]); + reordered_in_ptrs[in_idx] = scratch_mem; + offset += requested_desc->getCurrentMemSize(); + } + return reordered_in_ptrs; +} + } // namespace node } // namespace intel_cpu } // namespace ov diff --git a/src/plugins/intel_cpu/src/nodes/subgraph.h b/src/plugins/intel_cpu/src/nodes/subgraph.h index ffd7944c59d48a..8040da0a98ef57 100644 --- a/src/plugins/intel_cpu/src/nodes/subgraph.h +++ b/src/plugins/intel_cpu/src/nodes/subgraph.h @@ -129,13 +129,15 @@ class Subgraph::SubgraphExecutor { const BufferScratchpadAllocator& allocator); virtual ~SubgraphExecutor() = default; - virtual void exec(const std::vector& inMemPtrs, const std::vector& outMemPtrs) = 0; + void execute(const dnnl::stream& strm, const std::vector& inMemPtrs, const std::vector& outMemPtrs); protected: + virtual void exec_impl(const std::vector& inMemPtrs, const std::vector& outMemPtrs) = 0; + void parallel_for6d(const std::function& initializer, - const std::function& caller); + const std::function&)>& caller); void parallel_forNd(const std::function& initializer, - const std::function& caller); + const std::function&)>& caller); inline void update_scratchpad_ptr(void*& scratchpad_ptr, size_t ithr) const { if (m_buffer_scratchpad_size > 0) @@ -151,6 +153,7 @@ class Subgraph::SubgraphExecutor { // Buffer scratchpad MemoryPtr m_buffer_scratchpad = nullptr; size_t m_buffer_scratchpad_size = 0; + size_t m_internal_buffer_size = 0; const size_t rank6D = 6; @@ -164,6 +167,11 @@ class Subgraph::SubgraphExecutor { bool enabled_segfault_detector = false; inline void segfault_detector(); #endif + +private: + std::vector reorder_inputs(const dnnl::stream& strm, const std::vector& inMemPtrs); + + std::unordered_map m_in_requested_descs = {}; }; } // namespace node diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_cpu.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_cpu.cpp index b40bd88f31726b..1c3e90bbccf34f 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_cpu.cpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_cpu.cpp @@ -68,13 +68,9 @@ void BrgemmCPU::custom_constructor_validate_and_infer_types(std::vector INTERNAL_OP_SCOPE(BrgemmCPU_constructor_validate_and_infer_types); validate_inputs(); - // During ctor call, BrgemmCPU doesn't know his port descriptors. - // So we use port descs from source inputs - const auto brgemm_copy = with_repacking(m_type) ? get_brgemm_copy() : nullptr; - const auto planar_input_shapes = - std::vector{ snippets::utils::get_planar_pshape(get_input_partial_shape(0), layout_a), - brgemm_copy ? snippets::utils::get_planar_pshape(brgemm_copy->input(0)) - : snippets::utils::get_planar_pshape(get_input_partial_shape(1), layout_b) }; + const std::vector planar_input_shapes{ + snippets::utils::get_planar_pshape(get_input_partial_shape(0), layout_a), + snippets::utils::get_planar_pshape(get_input_partial_shape(1), layout_b)}; auto output_shape = infer_output_partial_shape(planar_input_shapes); set_output_type(0, get_output_type(), snippets::utils::get_planar_pshape(output_shape, layout_c)); @@ -130,20 +126,6 @@ std::shared_ptr BrgemmCPU::clone_with_new_inputs(const OutputVector& new_a } } -std::shared_ptr BrgemmCPU::get_brgemm_copy() const { - OPENVINO_ASSERT(one_of(m_type, BRGEMM_TYPE::REPACKING_ONLY, BRGEMM_TYPE::WITH_COMPENSATIONS, BRGEMM_TYPE::WITH_AMX), "Brgemm doesn't need BrgemmCopyB"); - auto b_input_node = get_input_node_shared_ptr(1); - if (const auto brgemm_copy_b = ov::as_type_ptr(b_input_node)) { - return brgemm_copy_b; - } - if (ov::is_type(b_input_node)) { - if (const auto brgemm_copy_b = ov::as_type_ptr(b_input_node->get_input_node_shared_ptr(0))) { - return brgemm_copy_b; - } - } - OPENVINO_THROW("BrgemmCopyB hasn't been found!"); -} - size_t BrgemmCPU::get_offset_scratch() const { OPENVINO_ASSERT(with_scratchpad(m_type) && get_input_size() == 3, "Offset of scratchpad must be only in Brgemm with scratchpad on 3rd input"); return get_input_offset(2); diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_cpu.hpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_cpu.hpp index a646ffc792fd6d..a781bc7ddd4e15 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_cpu.hpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_cpu.hpp @@ -44,7 +44,6 @@ class BrgemmCPU : public snippets::op::Brgemm { BRGEMM_TYPE get_type() const { return m_type; } size_t get_offset_scratch() const; - std::shared_ptr get_brgemm_copy() const; bool visit_attributes(AttributeVisitor& visitor) override; diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_utils.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_utils.cpp index adc215ef1d9900..6a4fc83d409355 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_utils.cpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_utils.cpp @@ -6,7 +6,10 @@ #include "dnnl_extension_utils.h" #include "emitters/utils.hpp" +#include "snippets/lowered/expressions/buffer_expression.hpp" +#include "snippets/op/buffer.hpp" #include "transformations/snippets/x64/op/brgemm_copy_b.hpp" +#include "transformations/snippets/x64/op/brgemm_cpu.hpp" #include "utils/general_utils.h" using namespace Xbyak; @@ -83,6 +86,21 @@ size_t compute_inner_n_block(const ov::element::Type& precision) { default: OPENVINO_THROW("BrgemmCopyB doesn't support precision ", precision); } } + +ov::snippets::lowered::ExpressionPtr get_copy_b_expr(const ov::snippets::lowered::ExpressionPtr& brgemm_expr) { + OPENVINO_ASSERT(ov::is_type(brgemm_expr->get_node()), "get_copy_b_expr must be called only for BrgemmCPU node"); + const auto b_input_expr = brgemm_expr->get_input_port_connector(1)->get_source().get_expr(); + if (ov::is_type(b_input_expr->get_node())) { + return b_input_expr; + } else if (ov::is_type(b_input_expr)) { + OPENVINO_ASSERT(b_input_expr->get_input_count() >= 1, "BufferExpression on brgemm's B input must have at least one input"); + const auto input_buffer_expr = b_input_expr->get_input_port_connector(0)->get_source().get_expr(); + if (ov::is_type(input_buffer_expr->get_node())) { + return input_buffer_expr; + } + } + return nullptr; +} } // namespace repacking } // namespace brgemm_utils } // namespace intel_cpu diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_utils.hpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_utils.hpp index aeb5b22cd56129..0d8e3f5fb6fc9b 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_utils.hpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/op/brgemm_utils.hpp @@ -18,7 +18,7 @@ enum class BRGEMM_TYPE { STAND_ALONE, // No extra requirements, used for f32|f32 WITH_AMX, // i8|i8 or bf16|bf16 on AMX system - needs BrgemmCopyB and scratchpad WITH_COMPENSATIONS, // i8|i8 (non-AMX system) - needs BrgemmCopyB for data repacking and compensations - REPACKING_ONLY // u8|i8 or bf16|bf16 (non-AMX system) - needs BrgemmCopyB on second input for data repacking + REPACKING_ONLY, // u8|i8, or bf16|bf16 (non-AMX system), or brgemm with transpose_b=true - needs BrgemmCopyB on second input for data repacking }; dnnl::impl::cpu::x64::cpu_isa_t get_primitive_isa(const ov::element::Type& dt_in0, bool is_with_amx); @@ -56,6 +56,12 @@ T compute_LDB(T n_block, const ov::element::Type& precision) { n_block : std::max(n_block, static_cast(compute_inner_n_block(precision))); } +/** + * @brief Retrieves the expression pointer for the brgemm_copy_b expression corresponding to the given BrgemmCPU expression. + * @param brgemm_expr The expression pointer for the BrgemmCPU operation. + * @return The expression pointer for the BrgemmCopyB operation. + */ +snippets::lowered::ExpressionPtr get_copy_b_expr(const snippets::lowered::ExpressionPtr& brgemm_expr); } // namespace repacking } // namespace brgemm_utils } // namespace intel_cpu diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/eliminate_brgemm_copy_b.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/eliminate_brgemm_copy_b.cpp new file mode 100644 index 00000000000000..4ad2bb8a11a667 --- /dev/null +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/eliminate_brgemm_copy_b.cpp @@ -0,0 +1,46 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "eliminate_brgemm_copy_b.hpp" + +#include "cpu/x64/cpu_isa_traits.hpp" +#include "openvino/pass/pattern/matcher.hpp" +#include "openvino/pass/pattern/op/wrap_type.hpp" +#include "openvino/pass/pattern/op/optional.hpp" +#include "snippets/itt.hpp" +#include "snippets/op/rank_normalization.hpp" +#include "transformations/snippets/x64/op/brgemm_copy_b.hpp" + +namespace ov { +namespace intel_cpu { + +pass::EliminateBrgemmCopyB::EliminateBrgemmCopyB() { + MATCHER_SCOPE(EliminateBrgemmCopyB); + auto m_param = ov::pass::pattern::wrap_type(); + auto m_rank_norm = ov::pass::pattern::optional(m_param); + auto m_copy_b = ov::pass::pattern::wrap_type({m_param}); + + auto callback = [=](ov::pass::pattern::Matcher& m) { + OV_ITT_SCOPED_TASK(ov::pass::itt::domains::SnippetsTransform, "ov::intel_cpu::pass::EliminateBrgemmCopyB") + const auto& pattern_map = m.get_pattern_value_map(); + const auto& copy_b_out = pattern_map.at(m_copy_b); + const auto copy_b_node = ov::as_type_ptr(copy_b_out.get_node_shared_ptr()); + OPENVINO_ASSERT(copy_b_node, "BrgemmCopyB node is null in EliminateBrgemmCopyB transformation"); + + const auto& in_desc = snippets::lowered::PortDescriptorUtils::get_port_descriptor_ptr(copy_b_node->input(0)); + const auto& layout = in_desc->get_layout(); + // TODO: + // 1. Ticket 157340: support external repacking for copyB with compensations + // 2. Ticket 157339: support external repacking for non-planar layout + if (!ov::snippets::utils::is_planar_layout(layout) || + brgemm_utils::with_compensations(copy_b_node->get_type()) || transformation_callback(copy_b_node)) + return false; + return ov::replace_output_update_name(copy_b_out, copy_b_node->input_value(0)); + }; + + auto m = std::make_shared(m_copy_b, matcher_name); + register_matcher(m, callback); +} +} // namespace intel_cpu +} // namespace ov diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/eliminate_brgemm_copy_b.hpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/eliminate_brgemm_copy_b.hpp new file mode 100644 index 00000000000000..2cdeae53fab026 --- /dev/null +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/eliminate_brgemm_copy_b.hpp @@ -0,0 +1,29 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "openvino/pass/graph_rewrite.hpp" + +namespace ov { +namespace intel_cpu { +namespace pass { + +/** + * @interface EliminateBrgemmCopyB + * @brief EliminateBrgemmCopyB identifies BrgemmCopyB nodes which can be inferred outside the Subgraph. + * If this is possible, CopyB node is removed, and the external repacking is configured on the further pipeline stages in RuntimeConfigurator. + * + * @ingroup snippets + */ +class EliminateBrgemmCopyB: public ov::pass::MatcherPass { +public: + OPENVINO_RTTI("EliminateBrgemmCopyB", "0"); + EliminateBrgemmCopyB(); +}; + + +} // namespace pass +} // namespace intel_cpu +} // namespace ov diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.cpp index c421e5cc2a4805..7dfe711a5a5c67 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.cpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.cpp @@ -65,38 +65,35 @@ bool pass::AdjustBrgemmCopyBLoopPorts::run(const snippets::lowered::LinearIR& li bool modified = false; + auto get_repacking_loop_idces = [](const snippets::lowered::ExpressionPtr& brgemm_expr) { + // Repacking may be extracted outside the snippets kernel. In this case, brgemm parent expression is a parameter. + if (is_type(brgemm_expr->get_input_port_connector(1)->get_source().get_expr()->get_node())) + return std::vector{}; + const auto repacking_expr = brgemm_utils::repacking::get_copy_b_expr(brgemm_expr); + OPENVINO_ASSERT(repacking_expr, "BrgemmCopyB expression is not found"); + return repacking_expr->get_loop_ids(); + }; + for (const auto& expr : linear_ir) { - const auto& node = expr->get_node(); - if (!is_type(node)) + const auto brgemm = ov::as_type_ptr(expr->get_node()); + if (!brgemm || !brgemm_utils::with_repacking(brgemm->get_type())) continue; - const auto& repacking_loop_ids = expr->get_loop_ids(); - const auto& child_ports = expr->get_output_port(0).get_connected_ports(); - OPENVINO_ASSERT(child_ports.size() == 1 && - is_type(child_ports.begin()->get_expr()), - "BrgemmCopyB should have one BufferExpression child"); - auto grandchild_ports = child_ports.begin()->get_expr()->get_output_port(0).get_connected_ports(); - for (const auto& target_port : grandchild_ports) { - const auto& port_node = target_port.get_expr()->get_node(); - if (!is_type(port_node)) { - OPENVINO_ASSERT(is_type(port_node), - "Invalid grandchild of BrgemmCopyB"); - continue; - } - const auto &brgemm_loop_ids = target_port.get_expr()->get_loop_ids(); - // Continue if there is no blocking loop - if (brgemm_loop_ids.empty() && repacking_loop_ids.empty()) - continue; - OPENVINO_ASSERT(brgemm_loop_ids.size() > repacking_loop_ids.size(), "Invalid BrgemmCopyB loop configuration"); - const auto &loop_manager = linear_ir.get_loop_manager(); - for (auto i = repacking_loop_ids.size(); i < brgemm_loop_ids.size(); i++) { - const auto &loop = loop_manager->get_loop_info(brgemm_loop_ids[i]); - auto uni_loop = ov::as_type_ptr(loop); - if (!uni_loop) - uni_loop = ov::as_type_ptr(loop)->get_unified_loop_info(); - if (!m_affected_loops.count(uni_loop) && update_loop_info(uni_loop)) { - m_affected_loops.insert(uni_loop); - modified = true; - } + const auto& brgemm_loop_ids = expr->get_loop_ids(); + const auto& repacking_loop_ids = get_repacking_loop_idces(expr); + // Continue if there is no blocking loop + if (brgemm_loop_ids.empty() && repacking_loop_ids.empty()) + continue; + + OPENVINO_ASSERT(brgemm_loop_ids.size() > repacking_loop_ids.size(), "Invalid BrgemmCopyB loop configuration"); + const auto &loop_manager = linear_ir.get_loop_manager(); + for (auto i = repacking_loop_ids.size(); i < brgemm_loop_ids.size(); i++) { + const auto &loop = loop_manager->get_loop_info(brgemm_loop_ids[i]); + auto uni_loop = ov::as_type_ptr(loop); + if (!uni_loop) + uni_loop = ov::as_type_ptr(loop)->get_unified_loop_info(); + if (!m_affected_loops.count(uni_loop) && update_loop_info(uni_loop)) { + m_affected_loops.insert(uni_loop); + modified = true; } } } diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.hpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.hpp index 5c65c7a0282823..794c55d868158a 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.hpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.hpp @@ -18,14 +18,11 @@ namespace pass { * Finds loop ports connected to BrgemmCopyB and sets appropriate pointer increments. * @ingroup snippets */ -class AdjustBrgemmCopyBLoopPorts: public snippets::lowered::pass::Pass { +class AdjustBrgemmCopyBLoopPorts: public snippets::lowered::pass::ConstPass { public: AdjustBrgemmCopyBLoopPorts() = default; - OPENVINO_RTTI("AdjustBrgemmCopyBLoopPorts", "Pass"); - bool run(const snippets::lowered::LinearIR& linear_ir); - bool run(snippets::lowered::LinearIR& linear_ir) override { - return run(const_cast(linear_ir)); - } + OPENVINO_RTTI("AdjustBrgemmCopyBLoopPorts", "ConstPass"); + bool run(const snippets::lowered::LinearIR& linear_ir) override; static bool update_loop_info(const snippets::lowered::UnifiedLoopInfoPtr& uni_loop_info); const std::unordered_set& get_affected_loops() { return m_affected_loops; } private: diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.cpp new file mode 100644 index 00000000000000..d88e0660e9e6fb --- /dev/null +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.cpp @@ -0,0 +1,49 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "brgemm_copy_b_loop_ports_adjuster.hpp" + +#include "snippets/itt.hpp" +#include "snippets/lowered/loop_manager.hpp" +#include "transformations/snippets/x64/pass/lowered/adjust_brgemm_copy_b_loop_ports.hpp" + +namespace ov { +namespace intel_cpu { + +BrgemmCopyBLoopPortsAdjuster::BrgemmCopyBLoopPortsAdjuster(const ov::snippets::lowered::LinearIRCPtr& linear_ir, + const CPURuntimeConfigurator* configurator) + : ov::snippets::lowered::pass::RuntimeOptimizer(configurator) { + if (!linear_ir->is_dynamic()) + return; + + const auto& pass = std::make_shared(); + pass->run(*linear_ir); + const auto& affected_uni_loops = pass->get_affected_loops(); + const auto& loop_map = linear_ir->get_loop_manager()->get_map(); + for (const auto& p : loop_map) { + if (const auto& exp_loop = ov::as_type_ptr(p.second)) { + const auto& uni_loop = exp_loop->get_unified_loop_info(); + if (affected_uni_loops.count(uni_loop)) + m_affected_uni2exp_map[uni_loop].push_back(exp_loop); + } + } +} + +bool BrgemmCopyBLoopPortsAdjuster::run(const snippets::lowered::LinearIR& linear_ir) { + OV_ITT_SCOPED_TASK(ov::pass::itt::domains::SnippetsTransform, "Snippets::BrgemmCopyBLoopPortsAdjuster") + for (const auto& p : m_affected_uni2exp_map) { + const auto& uni_loop = p.first; + const auto& exp_loops = p.second; + snippets::RuntimeConfigurator::LoopInfoRuntimeParamsMap initialized_info; + if (intel_cpu::pass::AdjustBrgemmCopyBLoopPorts::update_loop_info(uni_loop)) { + initialized_info[uni_loop] = snippets::RuntimeConfigurator::get_loop_runtime_params(uni_loop); + for (const auto& exp_loop : exp_loops) + snippets::RuntimeConfigurator::update_expanded_loop_info(exp_loop, initialized_info); + } + } + return true; +} + +} // namespace intel_cpu +} // namespace ov \ No newline at end of file diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.hpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.hpp new file mode 100644 index 00000000000000..7b9f30ac96e4b1 --- /dev/null +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_copy_b_loop_ports_adjuster.hpp @@ -0,0 +1,33 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "emitters/snippets/cpu_runtime_configurator.hpp" +#include "snippets/lowered/linear_ir.hpp" +#include "snippets/lowered/loop_info.hpp" +#include "snippets/lowered/pass/runtime_optimizer.hpp" + +namespace ov { +namespace intel_cpu { + +/** + * @class BrgemmCopyBLoopPortsAdjuster + * @brief A runtime optimizer that adjusts blocked loops parameters for Brgemm operations which require repacking. + */ +class BrgemmCopyBLoopPortsAdjuster : public ov::snippets::lowered::pass::RuntimeOptimizer { +public: + BrgemmCopyBLoopPortsAdjuster() = default; + BrgemmCopyBLoopPortsAdjuster(const ov::snippets::lowered::LinearIRCPtr& linear_ir, const CPURuntimeConfigurator* configurator); + + bool run(const snippets::lowered::LinearIR& linear_ir) override; + bool applicable() const override { return !m_affected_uni2exp_map.empty(); } + +private: + std::unordered_map> m_affected_uni2exp_map; +}; + +} // namespace intel_cpu +} // namespace ov \ No newline at end of file diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_cpu_blocking.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_cpu_blocking.cpp index 9b3009284e09e8..66d6f4d223c90f 100644 --- a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_cpu_blocking.cpp +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/brgemm_cpu_blocking.cpp @@ -83,11 +83,12 @@ bool BrgemmCPUBlocking::mark_blocking_loops(LinearIR& linear_ir, if (stand_alone(type)) return res; - const auto copy_b_expr = linear_ir.get_expr_by_node(brgemm->get_brgemm_copy()); - const ov::snippets::VectorDims full_subtensor(2, get_full_dim_value()); - copy_b_expr->get_input_port_descriptor(0)->set_subtensor(full_subtensor); - copy_b_expr->get_output_port_descriptor(0)->set_subtensor(full_subtensor); - + const auto copy_b_expr = repacking::get_copy_b_expr(brgemm_expr); + if (copy_b_expr) { + const ov::snippets::VectorDims full_subtensor(2, get_full_dim_value()); + copy_b_expr->get_input_port_descriptor(0)->set_subtensor(full_subtensor); + copy_b_expr->get_output_port_descriptor(0)->set_subtensor(full_subtensor); + } if (with_amx(type)) { move_new_memory_buffer(linear_ir, brgemm_it); auto buffer_it = std::prev(brgemm_it); @@ -98,6 +99,7 @@ bool BrgemmCPUBlocking::mark_blocking_loops(LinearIR& linear_ir, if (with_compensations(type)) { const ov::snippets::VectorDims compensations_subtensor{1, get_full_dim_value()}; OPENVINO_ASSERT(brgemm_expr->get_input_count() == 3, "Brgemm must have 3 inputs in case of compensations."); + OPENVINO_ASSERT(copy_b_expr, "BrgemmCopyB must be present in case of compensations."); const auto& compens_port = brgemm_expr->get_input_port(2); compens_port.get_descriptor_ptr()->set_subtensor(compensations_subtensor); copy_b_expr->get_output_port_descriptor(1)->set_subtensor(compensations_subtensor); diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/external_repacking_adjuster.cpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/external_repacking_adjuster.cpp new file mode 100644 index 00000000000000..e98c8ebbecf49b --- /dev/null +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/external_repacking_adjuster.cpp @@ -0,0 +1,72 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "external_repacking_adjuster.hpp" + +#include "emitters/snippets/cpu_runtime_configurator.hpp" +#include "memory_desc/cpu_blocked_memory_desc.h" +#include "snippets/itt.hpp" +#include "snippets/utils/utils.hpp" +#include "transformations/snippets/x64/op/brgemm_cpu.hpp" +#include "transformations/snippets/x64/op/brgemm_utils.hpp" + +namespace ov { +namespace intel_cpu { + +BrgemmExternalRepackingAdjuster::BrgemmExternalRepackingAdjuster(const ov::snippets::lowered::LinearIRCPtr& linear_ir, + const CPURuntimeConfigurator* configurator) + : snippets::lowered::pass::RuntimeOptimizer(configurator) { + const auto& params = linear_ir->get_parameters(); + for (size_t i = 0; i < params.size(); ++i) { + const auto& param = params[i]; + const auto consumers = param->get_output_port_connector(0)->get_consumers(); + const bool brgemm_with_extracted_repacking = + std::any_of(consumers.begin(), consumers.end(), [](const ov::snippets::lowered::ExpressionPort& port) { + auto brgemm = ov::as_type_ptr(port.get_expr()->get_node()); + return brgemm && brgemm_utils::with_repacking(brgemm->get_type()) && port.get_index() == 1; + }); + if (brgemm_with_extracted_repacking) { + m_param_idces_with_external_repacking.insert(i); + // Ticket 157339: Support non-planar layout + OPENVINO_ASSERT(ov::snippets::utils::is_planar_layout(configurator->get_io_descs()[i]->get_layout()), + "Non-planar layout is not supported for external repacking"); + } + } +} + +bool BrgemmExternalRepackingAdjuster::run(const snippets::lowered::LinearIR& linear_ir) { + OV_ITT_SCOPED_TASK(ov::pass::itt::domains::SnippetsTransform, "Snippets::BrgemmExternalRepackingAdjuster") + const auto& cpu_config = ov::as_type_ptr(m_configurator->get_config()); + auto& optimal_descs = cpu_config->m_in_requested_descs; + for (const auto& i : m_param_idces_with_external_repacking) { + const auto& shape = cpu_config->io_shapes[i]; + const auto& K = *++shape.rbegin(); + const auto& N = *shape.rbegin(); + + const auto& precision = linear_ir.get_parameters()[i]->get_node()->get_output_element_type(0); + const auto vnni_factor = brgemm_utils::compute_vnni_factor(precision); + const size_t brgemm_kernel_rank = 2; + // Firstly, batch dims are set + VectorDims requested_blocked_shape(shape.begin(), shape.end() - brgemm_kernel_rank); + // Then, the blocked dims are formed + requested_blocked_shape.insert( + requested_blocked_shape.end(), + {snippets::utils::div_up(K, vnni_factor), std::max(N, brgemm_utils::repacking::compute_inner_n_block(precision)), vnni_factor}); + + VectorDims requested_order(shape.size() - brgemm_kernel_rank); + std::iota(requested_order.begin(), requested_order.end(), 0); + const auto last_idx = shape.size() - 1; + requested_order.insert(requested_order.end(), {last_idx - 1, last_idx, last_idx - 1}); + + optimal_descs[i] = std::make_shared(precision, Shape(shape), requested_blocked_shape, requested_order); + + ov::snippets::VectorDims shape_for_offset(cpu_config->tensor_rank - shape.size(), 1); + shape_for_offset.insert(shape_for_offset.end(), requested_blocked_shape.begin(), requested_blocked_shape.end()); + m_configurator->compute_offsets(shape_for_offset, i, 0); + } + return true; +} + +} // namespace intel_cpu +} // namespace ov diff --git a/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/external_repacking_adjuster.hpp b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/external_repacking_adjuster.hpp new file mode 100644 index 00000000000000..f102af8f23fe5b --- /dev/null +++ b/src/plugins/intel_cpu/src/transformations/snippets/x64/pass/lowered/external_repacking_adjuster.hpp @@ -0,0 +1,32 @@ +// Copyright (C) 2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "emitters/snippets/cpu_runtime_configurator.hpp" +#include "snippets/lowered/pass/runtime_optimizer.hpp" +#include "snippets/runtime_configurator.hpp" + +namespace ov { +namespace intel_cpu { + +/** + * @class BrgemmExternalRepackingAdjuster + * @brief A runtime optimizer that creates the memory descs for BRGEMM inputs which require external repacking. + * The generated memory descs are stored in the CPU runtime config. + */ +class BrgemmExternalRepackingAdjuster : public ov::snippets::lowered::pass::RuntimeOptimizer { +public: + BrgemmExternalRepackingAdjuster() = default; + BrgemmExternalRepackingAdjuster(const ov::snippets::lowered::LinearIRCPtr& linear_ir, const CPURuntimeConfigurator* configurator); + + bool run(const snippets::lowered::LinearIR& linear_ir) override; + bool applicable() const override { return !m_param_idces_with_external_repacking.empty(); } + +private: + std::set m_param_idces_with_external_repacking; +}; + +} // namespace intel_cpu +} // namespace ov From 13f7b5b1a4a670ae019c17f42c152b0f672bb7d9 Mon Sep 17 00:00:00 2001 From: Andrzej Kopytko Date: Mon, 25 Nov 2024 08:24:31 +0100 Subject: [PATCH 38/62] [DOCS] Port for Fixed search mechanism with category to master (#27708) ### Details: - *item1* - *...* ### Tickets: - *ticket-id* --- docs/sphinx_setup/_static/js/custom.js | 2 -- docs/sphinx_setup/_templates/search.html | 34 +++++++++++------------- 2 files changed, 16 insertions(+), 20 deletions(-) diff --git a/docs/sphinx_setup/_static/js/custom.js b/docs/sphinx_setup/_static/js/custom.js index 739548d46da2f9..241f8895ee1c61 100644 --- a/docs/sphinx_setup/_static/js/custom.js +++ b/docs/sphinx_setup/_static/js/custom.js @@ -415,8 +415,6 @@ document.addEventListener('DOMContentLoaded', function () { element.innerHTML = element.innerHTML.replace('search.html', `/${version}/search.html#f-ovversion=${version}`); } - // preProd = "intelcorporationnonproduction2ybdyblf7" - // prod = "intelcorporationproductione78n25s6" await element.initialize({ accessToken: "xx1f2aebd3-4307-4632-aeea-17c13378b237", organizationId: "intelcorporationproductione78n25s6" diff --git a/docs/sphinx_setup/_templates/search.html b/docs/sphinx_setup/_templates/search.html index 5430f24f74aa8c..b77c97c073de99 100644 --- a/docs/sphinx_setup/_templates/search.html +++ b/docs/sphinx_setup/_templates/search.html @@ -9,7 +9,7 @@ {% block body %} + fields-to-include='["ovversion", "ovcategory", "filetype", "date", "source", "author", "sourcetype", "language", "description"]'> @@ -17,23 +17,21 @@ -
- - -
- - - - + +
+ + +
+ + + + +
From dc038e2cbf9b56c8adbe4c269ab812fe1dc38a7a Mon Sep 17 00:00:00 2001 From: Andrzej Kopytko Date: Mon, 25 Nov 2024 08:25:34 +0100 Subject: [PATCH 39/62] [DOCS] Port for Graph Title Fix to Master (#27677) ### Details: - *item1* - *...* ### Tickets: - *ticket-id* --- docs/sphinx_setup/_static/js/graphs.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/sphinx_setup/_static/js/graphs.js b/docs/sphinx_setup/_static/js/graphs.js index 7171aed374dd99..f29042de3e51b7 100644 --- a/docs/sphinx_setup/_static/js/graphs.js +++ b/docs/sphinx_setup/_static/js/graphs.js @@ -669,7 +669,7 @@ $(document).ready(function () { } else { var graphConfigs = setGraphConfigs(filteredGraphData, appConfig, kpis, precisions) - createChartWithNewData(labels, graphConfigs, chartContainer, display); + createChartWithNewData(labels, graphConfigs, appConfig, chartContainer, display); } } else { @@ -730,7 +730,7 @@ $(document).ready(function () { setChartsDisplayDirection(display.mode); adjustHeaderIcons(display.mode); } - function createChartWithNewData(labels, graphConfigs, chartContainer, display) { + function createChartWithNewData(labels, graphConfigs, appConfig, chartContainer, display) { var chartWrap = $('
'); chartWrap.addClass('chart-wrap'); @@ -759,6 +759,8 @@ $(document).ready(function () { columnHeaderContainer.append(columnIcon); var columnHeader = $('
'); columnHeader.append($('
' + graphConfig.chartTitle + '
')); + columnHeader.append($('
' + graphConfig.unit + '
')); + columnHeader.append($('
' + appConfig.UnitDescription[graphConfig.unit] + '
')); columnHeaderContainer.append(columnHeader); chartGraphsContainer.append(graphItem); var graphClass = $('
'); From 1b3550ecdc9ab9d33c4936d671d33de4ff1f7bd3 Mon Sep 17 00:00:00 2001 From: Sungeun Kim Date: Mon, 25 Nov 2024 16:29:10 +0900 Subject: [PATCH 40/62] [GPU] update onednn 3.7pc (#27689) commit 0f269193c7466313888d3338209d0d06a22cc6fa (HEAD, private/rls-v3.7-pc, origin/rls-v3.7-pc) Author: Roy Oursler Date: Thu Nov 7 10:45:39 2024 -0800 --- src/plugins/intel_gpu/thirdparty/onednn_gpu | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/plugins/intel_gpu/thirdparty/onednn_gpu b/src/plugins/intel_gpu/thirdparty/onednn_gpu index 1722066ad4c0f1..0f269193c74663 160000 --- a/src/plugins/intel_gpu/thirdparty/onednn_gpu +++ b/src/plugins/intel_gpu/thirdparty/onednn_gpu @@ -1 +1 @@ -Subproject commit 1722066ad4c0f15495f2d0fcbe9deb2bfd188c36 +Subproject commit 0f269193c7466313888d3338209d0d06a22cc6fa From f4a700d23392b5434349a51824c2adc4d946d0fe Mon Sep 17 00:00:00 2001 From: Sebastian Golebiewski Date: Mon, 25 Nov 2024 09:54:33 +0100 Subject: [PATCH 41/62] [DOCS] Updating Interactive Tutorials (#27704) Porting: #27702 Signed-off-by: sgolebiewski-intel --- docs/nbdoc/consts.py | 2 +- .../3D-pose-estimation-with-output.rst | 108 +- ...-segmentation-point-clouds-with-output.rst | 4 +- ...on-recognition-webcam-with-output_22_0.png | 4 +- docs/notebooks/all_notebooks_paths.txt | 9 +- ...-lightweight-text-to-image-with-output.rst | 984 ------------ ...tweight-text-to-image-with-output_29_0.jpg | 3 - ...tweight-text-to-image-with-output_29_0.png | 3 - ...tweight-text-to-image-with-output_38_2.jpg | 3 - ...tweight-text-to-image-with-output_38_2.png | 3 - ...htweight-text-to-image-with-output_7_0.jpg | 3 - ...htweight-text-to-image-with-output_7_0.png | 3 - docs/notebooks/animate-anyone-with-output.rst | 276 ++-- docs/notebooks/auto-device-with-output.rst | 48 +- .../auto-device-with-output_27_0.png | 4 +- .../auto-device-with-output_28_0.png | 4 +- ...visual-language-processing-with-output.rst | 7 +- docs/notebooks/catvton-with-output.rst | 338 ++-- ...clip-language-saliency-map-with-output.rst | 2 +- ...p-zero-shot-classification-with-output.rst | 1 + ...ontrolnet-stable-diffusion-with-output.rst | 49 +- .../convert-to-openvino-with-output.rst | 8 +- .../convnext-classification-with-output.rst | 2 +- ...segmentation-quantize-nncf-with-output.rst | 68 +- ...ntation-quantize-nncf-with-output_37_1.png | 4 +- ...ddcolor-image-colorization-with-output.rst | 196 +-- ...or-image-colorization-with-output_10_0.jpg | 3 + ...or-image-colorization-with-output_10_0.png | 3 + ...r-image-colorization-with-output_17_0.jpg} | 0 ...r-image-colorization-with-output_17_0.png} | 0 ...r-image-colorization-with-output_26_0.jpg} | 0 ...r-image-colorization-with-output_26_0.png} | 0 ...lor-image-colorization-with-output_8_0.jpg | 3 - ...lor-image-colorization-with-output_8_0.png | 3 - ...lor-image-colorization-with-output_9_0.jpg | 4 +- ...lor-image-colorization-with-output_9_0.png | 4 +- .../depth-anything-v2-with-output.rst | 133 +- ...=> depth-anything-v2-with-output_15_1.png} | 0 ...=> depth-anything-v2-with-output_25_1.png} | 0 ...=> depth-anything-v2-with-output_44_0.png} | 0 ... => depth-anything-v2-with-output_9_1.jpg} | 0 ... => depth-anything-v2-with-output_9_1.png} | 0 docs/notebooks/depth-anything-with-output.rst | 105 +- ...pg => depth-anything-with-output_11_1.jpg} | 0 ...ng => depth-anything-with-output_11_1.png} | 0 ...ng => depth-anything-with-output_18_0.png} | 0 ...ng => depth-anything-with-output_27_0.png} | 0 ...ng => depth-anything-with-output_46_0.png} | 0 .../detectron2-to-openvino-with-output.rst | 154 +- ...etectron2-to-openvino-with-output_22_0.jpg | 4 +- ...etectron2-to-openvino-with-output_22_0.png | 4 +- ...etectron2-to-openvino-with-output_32_0.jpg | 4 +- ...etectron2-to-openvino-with-output_32_0.png | 4 +- ...rt-sequence-classification-with-output.rst | 338 ---- ...ly-2-instruction-following-with-output.rst | 923 ----------- ...micrafter-animating-images-with-output.rst | 342 ++--- docs/notebooks/efficient-sam-with-output.rst | 102 +- .../efficient-sam-with-output_17_1.png | 4 +- .../efficient-sam-with-output_25_1.png | 4 +- .../efficient-sam-with-output_36_1.png | 4 +- .../encodec-audio-compression-with-output.rst | 20 +- ...dec-audio-compression-with-output_19_1.png | 2 +- ...dec-audio-compression-with-output_38_1.png | 2 +- ...odec-audio-compression-with-output_6_2.png | 2 +- .../explainable-ai-1-basic-with-output.rst | 7 +- ...explainable-ai-2-deep-dive-with-output.rst | 5 +- ...le-ai-3-map-interpretation-with-output.rst | 5 +- .../fast-segment-anything-with-output.rst | 24 +- docs/notebooks/film-slowmo-with-output.rst | 6 +- docs/notebooks/florence2-with-output.rst | 44 +- .../florence2-with-output_18_0.png | 4 +- .../freevc-voice-conversion-with-output.rst | 67 +- docs/notebooks/gpu-device-with-output.rst | 8 +- .../grounded-segment-anything-with-output.rst | 97 +- ...ded-segment-anything-with-output_30_0.jpg} | 0 ...ded-segment-anything-with-output_30_0.png} | 0 ...ded-segment-anything-with-output_46_0.jpg} | 0 ...ded-segment-anything-with-output_46_0.png} | 0 .../handwritten-ocr-with-output_22_0.png | 2 +- .../handwritten-ocr-with-output_32_1.png | 2 +- .../hello-detection-with-output_11_1.png | 2 +- .../hello-detection-with-output_16_0.png | 2 +- .../hello-segmentation-with-output.rst | 4 +- .../hello-segmentation-with-output_11_2.png | 2 +- .../hello-segmentation-with-output_13_1.png | 2 +- .../hello-segmentation-with-output_17_0.png | 2 +- .../hello-world-with-output_11_1.png | 2 +- .../hugging-face-hub-with-output.rst | 40 +- ...nyuan-dit-image-generation-with-output.rst | 24 +- ...dit-image-generation-with-output_31_0.jpg} | 0 ...dit-image-generation-with-output_31_0.png} | 0 docs/notebooks/image-bind-with-output.rst | 1027 ------------- .../image-bind-with-output_20_0.png | 3 - .../image-bind-with-output_22_0.png | 3 - .../image-bind-with-output_24_0.png | 3 - .../image-bind-with-output_26_1.png | 3 - .../image-bind-with-output_27_1.png | 3 - .../image-bind-with-output_28_1.png | 3 - .../image-bind-with-output_52_0.png | 3 - .../image-bind-with-output_53_0.png | 3 - .../image-bind-with-output_54_0.png | 3 - ...lassification-quantization-with-output.rst | 96 +- ...fication-quantization-with-output_30_2.png | 3 - ...fication-quantization-with-output_31_2.png | 3 + docs/notebooks/instant-id-with-output.rst | 204 ++- .../instant-id-with-output_15_0.jpg | 3 - .../instant-id-with-output_15_0.png | 3 - .../instant-id-with-output_16_0.jpg | 4 +- .../instant-id-with-output_16_0.png | 4 +- .../instant-id-with-output_17_0.jpg | 3 + .../instant-id-with-output_17_0.png | 3 + ..._0.jpg => instant-id-with-output_40_0.jpg} | 0 ..._0.png => instant-id-with-output_40_0.png} | 0 docs/notebooks/internvl2-with-output.rst | 518 +++---- ...6_0.jpg => internvl2-with-output_14_0.jpg} | 0 ...6_0.png => internvl2-with-output_14_0.png} | 0 docs/notebooks/jina-clip-with-output.rst | 252 +-- .../jina-clip-with-output_11_0.png | 2 +- .../jina-clip-with-output_21_0.png | 2 +- .../jina-clip-with-output_37_0.png | 3 + .../jina-clip-with-output_39_0.png | 3 - .../knowledge-graphs-conve-with-output.rst | 28 +- ...modal-large-language-model-with-output.rst | 32 +- ...-large-language-model-with-output_29_1.jpg | 4 +- ...-large-language-model-with-output_29_1.png | 4 +- ...-large-language-model-with-output_48_1.png | 4 +- ...l-large-language-model-with-output_8_0.jpg | 4 +- ...l-large-language-model-with-output_8_0.png | 4 +- .../language-quantize-bert-with-output.rst | 63 +- ...cy-models-image-generation-with-output.rst | 959 ++++-------- ...dels-image-generation-with-output_13_0.jpg | 3 + ...dels-image-generation-with-output_13_0.png | 3 + ...dels-image-generation-with-output_21_0.jpg | 3 - ...dels-image-generation-with-output_21_0.png | 3 - ...dels-image-generation-with-output_27_1.jpg | 3 + ...dels-image-generation-with-output_27_1.png | 3 + ...dels-image-generation-with-output_34_1.jpg | 3 - ...dels-image-generation-with-output_34_1.png | 3 - ...dels-image-generation-with-output_37_0.jpg | 3 + ...dels-image-generation-with-output_37_0.png | 3 + ...stency-models-optimum-demo-with-output.rst | 252 --- ...y-models-optimum-demo-with-output_15_1.jpg | 3 - ...y-models-optimum-demo-with-output_15_1.png | 3 - ...cy-models-optimum-demo-with-output_8_1.jpg | 3 - ...cy-models-optimum-demo-with-output_8_1.png | 3 - ...a-multimodal-chatbot-genai-with-output.rst | 18 +- ...multimodal-chatbot-optimum-with-output.rst | 8 +- ...va-next-multimodal-chatbot-with-output.rst | 1293 +++------------- ...t-multimodal-chatbot-with-output_17_0.jpg} | 0 ...t-multimodal-chatbot-with-output_17_0.png} | 0 .../notebooks/llm-agent-react-with-output.rst | 23 +- .../llm-chatbot-generate-api-with-output.rst | 6 +- docs/notebooks/llm-chatbot-with-output.rst | 15 +- .../llm-question-answering-with-output.rst | 7 +- ...a-content-type-recognition-with-output.rst | 8 +- docs/notebooks/meter-reader-with-output.rst | 2 +- .../meter-reader-with-output_16_1.png | 2 +- .../meter-reader-with-output_18_1.png | 2 +- .../meter-reader-with-output_20_1.png | 2 +- .../meter-reader-with-output_22_1.png | 2 +- .../meter-reader-with-output_24_1.png | 2 +- ...nicpm-v-multimodal-chatbot-with-output.rst | 437 ++---- ...v-multimodal-chatbot-with-output_12_1.jpg} | 0 ...v-multimodal-chatbot-with-output_12_1.png} | 0 docs/notebooks/mllama-3.2-with-output.rst | 4 +- .../mobileclip-video-search-with-output.rst | 395 ++++- ...bileclip-video-search-with-output_10_4.png | 3 - ...bileclip-video-search-with-output_12_4.png | 3 + ...bileclip-video-search-with-output_14_1.png | 3 - ...bileclip-video-search-with-output_17_1.png | 3 + ...bileclip-video-search-with-output_25_1.png | 3 - ...bileclip-video-search-with-output_28_1.png | 3 + ...bilevlm-language-assistant-with-output.rst | 765 ---------- ...lm-language-assistant-with-output_32_1.jpg | 3 - ...lm-language-assistant-with-output_32_1.png | 3 - ...multilora-image-generation-with-output.rst | 468 ++++++ ...lora-image-generation-with-output_15_0.jpg | 3 + ...lora-image-generation-with-output_15_0.png | 3 + ...lora-image-generation-with-output_18_0.jpg | 3 + ...lora-image-generation-with-output_18_0.png | 3 + ...lora-image-generation-with-output_21_0.jpg | 3 + ...lora-image-generation-with-output_21_0.png | 3 + .../music-generation-with-output.rst | 150 +- ...o-llava-multimodal-chatbot-with-output.rst | 330 ++-- .../notebooks_with_colab_buttons.txt | 1 + ...tract-structure-extraction-with-output.rst | 4 +- .../object-detection-with-output.rst | 10 +- docs/notebooks/omniparser-with-output.rst | 663 ++++++++ .../omniparser-with-output_32_0.jpg | 3 + .../omniparser-with-output_32_0.png | 3 + docs/notebooks/openvino-api-with-output.rst | 22 +- docs/notebooks/openvoice-with-output.rst | 122 +- ...character-recognition-with-output_13_0.png | 2 +- ...character-recognition-with-output_23_0.png | 2 +- .../optimize-preprocessing-with-output.rst | 19 +- ...ptimize-preprocessing-with-output_14_1.png | 2 +- .../paddle-ocr-webcam-with-output.rst | 9 +- .../paddle-ocr-webcam-with-output_30_0.png | 4 +- ...to-openvino-classification-with-output.rst | 57 +- ...envino-classification-with-output_14_3.png | 3 + ...envino-classification-with-output_15_3.png | 3 - ...envino-classification-with-output_22_1.png | 3 + ...envino-classification-with-output_23_1.png | 3 - ...envino-classification-with-output_26_1.png | 3 + ...envino-classification-with-output_27_1.png | 3 - ...envino-classification-with-output_29_1.png | 3 + ...envino-classification-with-output_30_1.png | 3 - ...penvino-classification-with-output_7_1.png | 3 + ...penvino-classification-with-output_8_1.png | 3 - .../paint-by-example-with-output.rst | 1359 ----------------- .../paint-by-example-with-output_41_0.png | 3 - .../person-tracking-with-output_17_3.png | 2 +- .../person-tracking-with-output_25_0.png | 4 +- docs/notebooks/phi-3-vision-with-output.rst | 23 +- docs/notebooks/photo-maker-with-output.rst | 90 +- .../photo-maker-with-output_33_0.png | 3 - .../photo-maker-with-output_34_0.png | 3 + docs/notebooks/pixart-with-output.rst | 35 +- .../pixart-with-output_40_2.png | 2 +- docs/notebooks/pixtral-with-output.rst | 35 +- .../pose-estimation-with-output_22_0.png | 4 +- .../pytorch-onnx-to-openvino-with-output.rst | 6 +- ...orch-onnx-to-openvino-with-output_22_0.png | 2 +- ...orch-onnx-to-openvino-with-output_27_0.png | 2 +- ...orch-onnx-to-openvino-with-output_29_0.png | 2 +- ...training-quantization-nncf-with-output.rst | 95 +- ...uantization-aware-training-with-output.rst | 79 +- ...on-sparsity-aware-training-with-output.rst | 359 +++-- .../pytorch-to-openvino-with-output.rst | 14 +- docs/notebooks/qrcode-monster-with-output.rst | 2 +- docs/notebooks/qwen2-audio-with-output.rst | 78 +- docs/notebooks/qwen2-vl-with-output.rst | 71 +- .../rmbg-background-removal-with-output.rst | 9 +- ...bg-background-removal-with-output_14_0.png | 2 +- ...mbg-background-removal-with-output_8_0.png | 2 +- ...ce-text-to-video-retrieval-with-output.rst | 8 +- .../segment-anything-2-image-with-output.rst | 61 +- ...ment-anything-2-image-with-output_12_0.png | 2 +- ...ment-anything-2-image-with-output_16_0.png | 2 +- ...ment-anything-2-image-with-output_18_0.png | 2 +- ...ment-anything-2-image-with-output_33_0.png | 2 +- ...ment-anything-2-image-with-output_39_0.png | 2 +- ...ment-anything-2-image-with-output_47_0.png | 2 +- ...ment-anything-2-image-with-output_51_0.png | 2 +- ...ment-anything-2-image-with-output_56_0.png | 2 +- ...ment-anything-2-image-with-output_60_0.png | 2 +- ...ment-anything-2-image-with-output_65_0.png | 2 +- .../segment-anything-2-video-with-output.rst | 946 ++++++++++++ ...ment-anything-2-video-with-output_40_1.png | 3 + ...ment-anything-2-video-with-output_46_0.png | 3 + .../segment-anything-with-output.rst | 8 +- docs/notebooks/segmind-vegart-with-output.rst | 727 --------- .../segmind-vegart-with-output_12_1.jpg | 3 - .../segmind-vegart-with-output_12_1.png | 3 - .../segmind-vegart-with-output_23_2.jpg | 3 - .../segmind-vegart-with-output_23_2.png | 3 - ...-shot-image-classification-with-output.rst | 19 +- ...-image-classification-with-output_13_1.png | 2 +- ...-image-classification-with-output_24_1.png | 4 +- ...t-image-classification-with-output_6_1.png | 2 +- ...tch-to-image-pix2pix-turbo-with-output.rst | 146 +- ...-image-pix2pix-turbo-with-output_15_0.jpg} | 0 ...-image-pix2pix-turbo-with-output_15_0.png} | 0 ...o-image-pix2pix-turbo-with-output_18_0.jpg | 3 - ...o-image-pix2pix-turbo-with-output_18_0.png | 3 - ...o-image-pix2pix-turbo-with-output_19_0.jpg | 3 + ...o-image-pix2pix-turbo-with-output_19_0.png | 3 + .../softvc-voice-conversion-with-output.rst | 33 +- .../sparsity-optimization-with-output.rst | 212 +++ .../speculative-sampling-with-output.rst | 79 +- ...tion-quantization-wav2vec2-with-output.rst | 143 +- ...hbrain-emotion-recognition-with-output.rst | 106 +- docs/notebooks/stable-audio-with-output.rst | 4 +- ...e-cascade-image-generation-with-output.rst | 16 +- ...cade-image-generation-with-output_29_2.jpg | 4 +- ...cade-image-generation-with-output_29_2.png | 4 +- ...table-diffusion-ip-adapter-with-output.rst | 51 +- ...-diffusion-ip-adapter-with-output_22_1.png | 4 +- ...-diffusion-ip-adapter-with-output_25_0.png | 4 +- ...-diffusion-ip-adapter-with-output_28_0.png | 4 +- .../stable-diffusion-keras-cv-with-output.rst | 8 +- ...fusion-torchdynamo-backend-with-output.rst | 3 +- docs/notebooks/stable-fast-3d-with-output.rst | 47 +- docs/notebooks/style-transfer-with-output.rst | 4 +- .../style-transfer-with-output_25_0.png | 4 +- .../table-question-answering-with-output.rst | 57 +- ...classification-to-openvino-with-output.rst | 18 +- ...ification-to-openvino-with-output_19_1.png | 2 +- .../tensorflow-hub-with-output_26_0.png | 2 +- .../tensorflow-hub-with-output_43_0.png | 2 +- ...e-segmentation-to-openvino-with-output.rst | 11 +- ...mentation-to-openvino-with-output_25_1.png | 2 +- ...mentation-to-openvino-with-output_39_0.png | 4 +- ...ject-detection-to-openvino-with-output.rst | 17 +- ...detection-to-openvino-with-output_25_1.png | 2 +- ...detection-to-openvino-with-output_38_0.png | 4 +- ...uantization-aware-training-with-output.rst | 93 +- ...ization-aware-training-with-output_6_1.png | 2 +- .../text-to-image-genai-with-output.rst | 281 ++++ .../text-to-image-genai-with-output_13_0.jpg | 3 + .../text-to-image-genai-with-output_13_0.png | 3 + .../text-to-image-genai-with-output_9_0.jpg | 3 + .../text-to-image-genai-with-output_9_0.png | 3 + ...tflite-selfie-segmentation-with-output.rst | 87 +- ...e-selfie-segmentation-with-output_25_0.png | 2 +- ...e-selfie-segmentation-with-output_33_0.png | 4 +- .../tflite-to-openvino-with-output.rst | 24 +- docs/notebooks/typo-detector-with-output.rst | 621 -------- ...-detection-and-recognition-with-output.rst | 7 +- ...ction-and-recognition-with-output_14_0.png | 2 +- ...ction-and-recognition-with-output_21_0.png | 2 +- ...ction-and-recognition-with-output_27_0.png | 2 +- .../vision-background-removal-with-output.rst | 15 +- ...on-background-removal-with-output_22_0.png | 2 +- ...on-background-removal-with-output_24_0.png | 2 +- .../vision-monodepth-with-output.rst | 76 +- .../vision-monodepth-with-output_18_0.png | 2 +- docs/notebooks/wav2lip-with-output.rst | 98 +- .../whisper-asr-genai-with-output.rst | 19 +- ...isper-subtitles-generation-with-output.rst | 20 +- ...uerstchen-image-generation-with-output.rst | 1054 ------------- .../499b779a-61d1-4e68-a1c3-437122622ba7.png | 3 - ...chen-image-generation-with-output_11_0.png | 3 - ...chen-image-generation-with-output_45_0.png | 3 - ...chen-image-generation-with-output_61_0.png | 3 - ...ov11-instance-segmentation-with-output.rst | 126 +- ...instance-segmentation-with-output_46_0.png | 4 +- ...yolov11-keypoint-detection-with-output.rst | 60 +- ...11-keypoint-detection-with-output_43_0.png | 4 +- .../yolov11-object-detection-with-output.rst | 127 +- ...ov11-object-detection-with-output_43_0.png | 4 +- ...ion-with-accuracy-control-with-output.rst} | 269 ++-- .../yolov9-optimization-with-output.rst | 73 +- .../yolov9-optimization-with-output_36_0.png | 4 +- 334 files changed, 7472 insertions(+), 13896 deletions(-) delete mode 100644 docs/notebooks/amused-lightweight-text-to-image-with-output.rst delete mode 100644 docs/notebooks/amused-lightweight-text-to-image-with-output_files/amused-lightweight-text-to-image-with-output_29_0.jpg delete mode 100644 docs/notebooks/amused-lightweight-text-to-image-with-output_files/amused-lightweight-text-to-image-with-output_29_0.png delete mode 100644 docs/notebooks/amused-lightweight-text-to-image-with-output_files/amused-lightweight-text-to-image-with-output_38_2.jpg delete mode 100644 docs/notebooks/amused-lightweight-text-to-image-with-output_files/amused-lightweight-text-to-image-with-output_38_2.png delete mode 100644 docs/notebooks/amused-lightweight-text-to-image-with-output_files/amused-lightweight-text-to-image-with-output_7_0.jpg delete mode 100644 docs/notebooks/amused-lightweight-text-to-image-with-output_files/amused-lightweight-text-to-image-with-output_7_0.png create mode 100644 docs/notebooks/ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_10_0.jpg create mode 100644 docs/notebooks/ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_10_0.png rename docs/notebooks/ddcolor-image-colorization-with-output_files/{ddcolor-image-colorization-with-output_16_0.jpg => ddcolor-image-colorization-with-output_17_0.jpg} (100%) rename docs/notebooks/ddcolor-image-colorization-with-output_files/{ddcolor-image-colorization-with-output_16_0.png => ddcolor-image-colorization-with-output_17_0.png} (100%) rename docs/notebooks/ddcolor-image-colorization-with-output_files/{ddcolor-image-colorization-with-output_25_0.jpg => ddcolor-image-colorization-with-output_26_0.jpg} (100%) rename docs/notebooks/ddcolor-image-colorization-with-output_files/{ddcolor-image-colorization-with-output_25_0.png => ddcolor-image-colorization-with-output_26_0.png} (100%) delete mode 100644 docs/notebooks/ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_8_0.jpg delete mode 100644 docs/notebooks/ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_8_0.png rename docs/notebooks/depth-anything-v2-with-output_files/{depth-anything-v2-with-output_14_1.png => depth-anything-v2-with-output_15_1.png} (100%) rename docs/notebooks/depth-anything-v2-with-output_files/{depth-anything-v2-with-output_24_1.png => depth-anything-v2-with-output_25_1.png} (100%) rename docs/notebooks/depth-anything-v2-with-output_files/{depth-anything-v2-with-output_43_0.png => depth-anything-v2-with-output_44_0.png} (100%) rename docs/notebooks/depth-anything-v2-with-output_files/{depth-anything-v2-with-output_8_1.jpg => depth-anything-v2-with-output_9_1.jpg} (100%) rename docs/notebooks/depth-anything-v2-with-output_files/{depth-anything-v2-with-output_8_1.png => depth-anything-v2-with-output_9_1.png} (100%) rename docs/notebooks/depth-anything-with-output_files/{depth-anything-with-output_9_1.jpg => depth-anything-with-output_11_1.jpg} (100%) rename docs/notebooks/depth-anything-with-output_files/{depth-anything-with-output_9_1.png => depth-anything-with-output_11_1.png} (100%) rename docs/notebooks/depth-anything-with-output_files/{depth-anything-with-output_16_0.png => depth-anything-with-output_18_0.png} (100%) rename docs/notebooks/depth-anything-with-output_files/{depth-anything-with-output_25_0.png => depth-anything-with-output_27_0.png} (100%) rename docs/notebooks/depth-anything-with-output_files/{depth-anything-with-output_44_0.png => depth-anything-with-output_46_0.png} (100%) delete mode 100644 docs/notebooks/distilbert-sequence-classification-with-output.rst delete mode 100644 docs/notebooks/dolly-2-instruction-following-with-output.rst rename docs/notebooks/grounded-segment-anything-with-output_files/{grounded-segment-anything-with-output_29_0.jpg => grounded-segment-anything-with-output_30_0.jpg} (100%) rename docs/notebooks/grounded-segment-anything-with-output_files/{grounded-segment-anything-with-output_29_0.png => grounded-segment-anything-with-output_30_0.png} (100%) rename docs/notebooks/grounded-segment-anything-with-output_files/{grounded-segment-anything-with-output_45_0.jpg => grounded-segment-anything-with-output_46_0.jpg} (100%) rename docs/notebooks/grounded-segment-anything-with-output_files/{grounded-segment-anything-with-output_45_0.png => grounded-segment-anything-with-output_46_0.png} (100%) rename docs/notebooks/hunyuan-dit-image-generation-with-output_files/{hunyuan-dit-image-generation-with-output_30_0.jpg => hunyuan-dit-image-generation-with-output_31_0.jpg} (100%) rename docs/notebooks/hunyuan-dit-image-generation-with-output_files/{hunyuan-dit-image-generation-with-output_30_0.png => hunyuan-dit-image-generation-with-output_31_0.png} (100%) delete mode 100644 docs/notebooks/image-bind-with-output.rst delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_20_0.png delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_22_0.png delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_24_0.png delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_26_1.png delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_27_1.png delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_28_1.png delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_52_0.png delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_53_0.png delete mode 100644 docs/notebooks/image-bind-with-output_files/image-bind-with-output_54_0.png delete mode 100644 docs/notebooks/image-classification-quantization-with-output_files/image-classification-quantization-with-output_30_2.png create mode 100644 docs/notebooks/image-classification-quantization-with-output_files/image-classification-quantization-with-output_31_2.png delete mode 100644 docs/notebooks/instant-id-with-output_files/instant-id-with-output_15_0.jpg delete mode 100644 docs/notebooks/instant-id-with-output_files/instant-id-with-output_15_0.png create mode 100644 docs/notebooks/instant-id-with-output_files/instant-id-with-output_17_0.jpg create mode 100644 docs/notebooks/instant-id-with-output_files/instant-id-with-output_17_0.png rename docs/notebooks/instant-id-with-output_files/{instant-id-with-output_41_0.jpg => instant-id-with-output_40_0.jpg} (100%) rename docs/notebooks/instant-id-with-output_files/{instant-id-with-output_41_0.png => instant-id-with-output_40_0.png} (100%) rename docs/notebooks/internvl2-with-output_files/{internvl2-with-output_16_0.jpg => internvl2-with-output_14_0.jpg} (100%) rename docs/notebooks/internvl2-with-output_files/{internvl2-with-output_16_0.png => internvl2-with-output_14_0.png} (100%) create mode 100644 docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_37_0.png delete mode 100644 docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_39_0.png create mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.jpg create mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.png delete mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.jpg delete mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.png create mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.jpg create mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.png delete mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.jpg delete mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.png create mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.jpg create mode 100644 docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.png delete mode 100644 docs/notebooks/latent-consistency-models-optimum-demo-with-output.rst delete mode 100644 docs/notebooks/latent-consistency-models-optimum-demo-with-output_files/latent-consistency-models-optimum-demo-with-output_15_1.jpg delete mode 100644 docs/notebooks/latent-consistency-models-optimum-demo-with-output_files/latent-consistency-models-optimum-demo-with-output_15_1.png delete mode 100644 docs/notebooks/latent-consistency-models-optimum-demo-with-output_files/latent-consistency-models-optimum-demo-with-output_8_1.jpg delete mode 100644 docs/notebooks/latent-consistency-models-optimum-demo-with-output_files/latent-consistency-models-optimum-demo-with-output_8_1.png rename docs/notebooks/llava-next-multimodal-chatbot-with-output_files/{llava-next-multimodal-chatbot-with-output_36_1.jpg => llava-next-multimodal-chatbot-with-output_17_0.jpg} (100%) rename docs/notebooks/llava-next-multimodal-chatbot-with-output_files/{llava-next-multimodal-chatbot-with-output_36_1.png => llava-next-multimodal-chatbot-with-output_17_0.png} (100%) rename docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/{minicpm-v-multimodal-chatbot-with-output_17_1.jpg => minicpm-v-multimodal-chatbot-with-output_12_1.jpg} (100%) rename docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/{minicpm-v-multimodal-chatbot-with-output_17_1.png => minicpm-v-multimodal-chatbot-with-output_12_1.png} (100%) delete mode 100644 docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_10_4.png create mode 100644 docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_12_4.png delete mode 100644 docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_14_1.png create mode 100644 docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_17_1.png delete mode 100644 docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_25_1.png create mode 100644 docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_28_1.png delete mode 100644 docs/notebooks/mobilevlm-language-assistant-with-output.rst delete mode 100644 docs/notebooks/mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.jpg delete mode 100644 docs/notebooks/mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.png create mode 100644 docs/notebooks/multilora-image-generation-with-output.rst create mode 100644 docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.jpg create mode 100644 docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.png create mode 100644 docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.jpg create mode 100644 docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.png create mode 100644 docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.jpg create mode 100644 docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.png create mode 100644 docs/notebooks/omniparser-with-output.rst create mode 100644 docs/notebooks/omniparser-with-output_files/omniparser-with-output_32_0.jpg create mode 100644 docs/notebooks/omniparser-with-output_files/omniparser-with-output_32_0.png create mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_14_3.png delete mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_15_3.png create mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_22_1.png delete mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_23_1.png create mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_26_1.png delete mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_27_1.png create mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_29_1.png delete mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_30_1.png create mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_7_1.png delete mode 100644 docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_8_1.png delete mode 100644 docs/notebooks/paint-by-example-with-output.rst delete mode 100644 docs/notebooks/paint-by-example-with-output_files/paint-by-example-with-output_41_0.png delete mode 100644 docs/notebooks/photo-maker-with-output_files/photo-maker-with-output_33_0.png create mode 100644 docs/notebooks/photo-maker-with-output_files/photo-maker-with-output_34_0.png create mode 100644 docs/notebooks/segment-anything-2-video-with-output.rst create mode 100644 docs/notebooks/segment-anything-2-video-with-output_files/segment-anything-2-video-with-output_40_1.png create mode 100644 docs/notebooks/segment-anything-2-video-with-output_files/segment-anything-2-video-with-output_46_0.png delete mode 100644 docs/notebooks/segmind-vegart-with-output.rst delete mode 100644 docs/notebooks/segmind-vegart-with-output_files/segmind-vegart-with-output_12_1.jpg delete mode 100644 docs/notebooks/segmind-vegart-with-output_files/segmind-vegart-with-output_12_1.png delete mode 100644 docs/notebooks/segmind-vegart-with-output_files/segmind-vegart-with-output_23_2.jpg delete mode 100644 docs/notebooks/segmind-vegart-with-output_files/segmind-vegart-with-output_23_2.png rename docs/notebooks/sketch-to-image-pix2pix-turbo-with-output_files/{sketch-to-image-pix2pix-turbo-with-output_14_0.jpg => sketch-to-image-pix2pix-turbo-with-output_15_0.jpg} (100%) rename docs/notebooks/sketch-to-image-pix2pix-turbo-with-output_files/{sketch-to-image-pix2pix-turbo-with-output_14_0.png => sketch-to-image-pix2pix-turbo-with-output_15_0.png} (100%) delete mode 100644 docs/notebooks/sketch-to-image-pix2pix-turbo-with-output_files/sketch-to-image-pix2pix-turbo-with-output_18_0.jpg delete mode 100644 docs/notebooks/sketch-to-image-pix2pix-turbo-with-output_files/sketch-to-image-pix2pix-turbo-with-output_18_0.png create mode 100644 docs/notebooks/sketch-to-image-pix2pix-turbo-with-output_files/sketch-to-image-pix2pix-turbo-with-output_19_0.jpg create mode 100644 docs/notebooks/sketch-to-image-pix2pix-turbo-with-output_files/sketch-to-image-pix2pix-turbo-with-output_19_0.png create mode 100644 docs/notebooks/text-to-image-genai-with-output.rst create mode 100644 docs/notebooks/text-to-image-genai-with-output_files/text-to-image-genai-with-output_13_0.jpg create mode 100644 docs/notebooks/text-to-image-genai-with-output_files/text-to-image-genai-with-output_13_0.png create mode 100644 docs/notebooks/text-to-image-genai-with-output_files/text-to-image-genai-with-output_9_0.jpg create mode 100644 docs/notebooks/text-to-image-genai-with-output_files/text-to-image-genai-with-output_9_0.png delete mode 100644 docs/notebooks/typo-detector-with-output.rst delete mode 100644 docs/notebooks/wuerstchen-image-generation-with-output.rst delete mode 100644 docs/notebooks/wuerstchen-image-generation-with-output_files/499b779a-61d1-4e68-a1c3-437122622ba7.png delete mode 100644 docs/notebooks/wuerstchen-image-generation-with-output_files/wuerstchen-image-generation-with-output_11_0.png delete mode 100644 docs/notebooks/wuerstchen-image-generation-with-output_files/wuerstchen-image-generation-with-output_45_0.png delete mode 100644 docs/notebooks/wuerstchen-image-generation-with-output_files/wuerstchen-image-generation-with-output_61_0.png rename docs/notebooks/{yolov8-quantization-with-accuracy-control-with-output.rst => yolov11-quantization-with-accuracy-control-with-output.rst} (74%) diff --git a/docs/nbdoc/consts.py b/docs/nbdoc/consts.py index bfad4b042e5359..1a4d3a13049041 100644 --- a/docs/nbdoc/consts.py +++ b/docs/nbdoc/consts.py @@ -6,7 +6,7 @@ repo_owner = "openvinotoolkit" repo_name = "openvino_notebooks" repo_branch = "tree/main" -artifacts_link = "http://repository.toolbox.iotg.sclab.intel.com/projects/ov-notebook/0.1.0-latest/20241104220807/dist/rst_files/" +artifacts_link = "http://repository.toolbox.iotg.sclab.intel.com/projects/ov-notebook/0.1.0-latest/20241120220806/dist/rst_files/" blacklisted_extensions = ['.xml', '.bin'] notebooks_repo = "https://github.com/openvinotoolkit/openvino_notebooks/blob/latest/" notebooks_binder = "https://mybinder.org/v2/gh/openvinotoolkit/openvino_notebooks/HEAD?filepath=" diff --git a/docs/notebooks/3D-pose-estimation-with-output.rst b/docs/notebooks/3D-pose-estimation-with-output.rst index f39aa93b36851d..9e09d96094fc78 100644 --- a/docs/notebooks/3D-pose-estimation-with-output.rst +++ b/docs/notebooks/3D-pose-estimation-with-output.rst @@ -93,6 +93,11 @@ Lab instead.** .. code:: ipython3 + import platform + + if platform.system() == "Darwin": + %pip install -q "numpy<2.0.0" + %pip install pythreejs "openvino>=2024.4.0" "opencv-python" "torch" "tqdm" --extra-index-url https://download.pytorch.org/whl/cpu @@ -108,68 +113,68 @@ Lab instead.** Collecting torch Using cached https://download.pytorch.org/whl/cpu/torch-2.4.1%2Bcpu-cp38-cp38-linux_x86_64.whl (194.9 MB) Collecting tqdm - Using cached tqdm-4.66.6-py3-none-any.whl.metadata (57 kB) - Requirement already satisfied: ipywidgets>=7.2.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from pythreejs) (8.1.5) + Using cached tqdm-4.67.0-py3-none-any.whl.metadata (57 kB) + Requirement already satisfied: ipywidgets>=7.2.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from pythreejs) (8.1.5) Collecting ipydatawidgets>=1.1.1 (from pythreejs) Using cached ipydatawidgets-4.3.5-py2.py3-none-any.whl.metadata (1.4 kB) Collecting numpy (from pythreejs) Using cached numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.6 kB) - Requirement already satisfied: traitlets in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from pythreejs) (5.14.3) + Requirement already satisfied: traitlets in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from pythreejs) (5.14.3) Collecting openvino-telemetry>=2023.2.1 (from openvino>=2024.4.0) - Using cached openvino_telemetry-2024.1.0-py3-none-any.whl.metadata (2.3 kB) - Requirement already satisfied: packaging in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2024.4.0) (24.1) + Using cached openvino_telemetry-2024.5.0-py3-none-any.whl.metadata (2.3 kB) + Requirement already satisfied: packaging in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2024.4.0) (24.2) Collecting filelock (from torch) Using cached filelock-3.16.1-py3-none-any.whl.metadata (2.9 kB) - Requirement already satisfied: typing-extensions>=4.8.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (4.12.2) + Requirement already satisfied: typing-extensions>=4.8.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (4.12.2) Collecting sympy (from torch) Using cached sympy-1.13.3-py3-none-any.whl.metadata (12 kB) Collecting networkx (from torch) Using cached https://download.pytorch.org/whl/networkx-3.2.1-py3-none-any.whl (1.6 MB) - Requirement already satisfied: jinja2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (3.1.4) + Requirement already satisfied: jinja2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (3.1.4) Collecting fsspec (from torch) Using cached fsspec-2024.10.0-py3-none-any.whl.metadata (11 kB) Collecting traittypes>=0.2.0 (from ipydatawidgets>=1.1.1->pythreejs) Using cached traittypes-0.2.1-py2.py3-none-any.whl.metadata (1.0 kB) - Requirement already satisfied: comm>=0.1.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipywidgets>=7.2.1->pythreejs) (0.2.2) - Requirement already satisfied: ipython>=6.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipywidgets>=7.2.1->pythreejs) (8.12.3) - Requirement already satisfied: widgetsnbextension~=4.0.12 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipywidgets>=7.2.1->pythreejs) (4.0.13) - Requirement already satisfied: jupyterlab-widgets~=3.0.12 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipywidgets>=7.2.1->pythreejs) (3.0.13) - Requirement already satisfied: MarkupSafe>=2.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from jinja2->torch) (2.1.5) + Requirement already satisfied: comm>=0.1.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipywidgets>=7.2.1->pythreejs) (0.2.2) + Requirement already satisfied: ipython>=6.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipywidgets>=7.2.1->pythreejs) (8.12.3) + Requirement already satisfied: widgetsnbextension~=4.0.12 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipywidgets>=7.2.1->pythreejs) (4.0.13) + Requirement already satisfied: jupyterlab-widgets~=3.0.12 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipywidgets>=7.2.1->pythreejs) (3.0.13) + Requirement already satisfied: MarkupSafe>=2.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from jinja2->torch) (2.1.5) INFO: pip is looking at multiple versions of networkx to determine which version is compatible with other requirements. This could take a while. Collecting networkx (from torch) Using cached networkx-3.1-py3-none-any.whl.metadata (5.3 kB) Collecting mpmath<1.4,>=1.1.0 (from sympy->torch) Using cached https://download.pytorch.org/whl/mpmath-1.3.0-py3-none-any.whl (536 kB) - Requirement already satisfied: backcall in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.2.0) - Requirement already satisfied: decorator in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (5.1.1) - Requirement already satisfied: jedi>=0.16 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.19.1) - Requirement already satisfied: matplotlib-inline in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.1.7) - Requirement already satisfied: pickleshare in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.7.5) - Requirement already satisfied: prompt-toolkit!=3.0.37,<3.1.0,>=3.0.30 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (3.0.48) - Requirement already satisfied: pygments>=2.4.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (2.18.0) - Requirement already satisfied: stack-data in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.6.3) - Requirement already satisfied: pexpect>4.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (4.9.0) - Requirement already satisfied: parso<0.9.0,>=0.8.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from jedi>=0.16->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.8.4) - Requirement already satisfied: ptyprocess>=0.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from pexpect>4.3->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.7.0) - Requirement already satisfied: wcwidth in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from prompt-toolkit!=3.0.37,<3.1.0,>=3.0.30->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.2.13) - Requirement already satisfied: executing>=1.2.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from stack-data->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (2.1.0) - Requirement already satisfied: asttokens>=2.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from stack-data->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (2.4.1) - Requirement already satisfied: pure-eval in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from stack-data->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.2.3) - Requirement already satisfied: six>=1.12.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from asttokens>=2.1.0->stack-data->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (1.16.0) + Requirement already satisfied: backcall in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.2.0) + Requirement already satisfied: decorator in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (5.1.1) + Requirement already satisfied: jedi>=0.16 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.19.2) + Requirement already satisfied: matplotlib-inline in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.1.7) + Requirement already satisfied: pickleshare in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.7.5) + Requirement already satisfied: prompt-toolkit!=3.0.37,<3.1.0,>=3.0.30 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (3.0.48) + Requirement already satisfied: pygments>=2.4.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (2.18.0) + Requirement already satisfied: stack-data in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.6.3) + Requirement already satisfied: pexpect>4.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (4.9.0) + Requirement already satisfied: parso<0.9.0,>=0.8.4 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from jedi>=0.16->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.8.4) + Requirement already satisfied: ptyprocess>=0.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from pexpect>4.3->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.7.0) + Requirement already satisfied: wcwidth in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from prompt-toolkit!=3.0.37,<3.1.0,>=3.0.30->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.2.13) + Requirement already satisfied: executing>=1.2.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from stack-data->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (2.1.0) + Requirement already satisfied: asttokens>=2.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from stack-data->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (2.4.1) + Requirement already satisfied: pure-eval in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from stack-data->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (0.2.3) + Requirement already satisfied: six>=1.12.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from asttokens>=2.1.0->stack-data->ipython>=6.1.0->ipywidgets>=7.2.1->pythreejs) (1.16.0) Using cached pythreejs-2.4.2-py3-none-any.whl (3.4 MB) Using cached openvino-2024.4.0-16579-cp38-cp38-manylinux2014_x86_64.whl (42.6 MB) Using cached opencv_python-4.10.0.84-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (62.5 MB) - Using cached tqdm-4.66.6-py3-none-any.whl (78 kB) + Using cached tqdm-4.67.0-py3-none-any.whl (78 kB) Using cached ipydatawidgets-4.3.5-py2.py3-none-any.whl (271 kB) Using cached numpy-1.24.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.3 MB) - Using cached openvino_telemetry-2024.1.0-py3-none-any.whl (23 kB) + Using cached openvino_telemetry-2024.5.0-py3-none-any.whl (23 kB) Using cached filelock-3.16.1-py3-none-any.whl (16 kB) Using cached fsspec-2024.10.0-py3-none-any.whl (179 kB) Using cached networkx-3.1-py3-none-any.whl (2.1 MB) Using cached sympy-1.13.3-py3-none-any.whl (6.2 MB) Using cached traittypes-0.2.1-py2.py3-none-any.whl (8.6 kB) Installing collected packages: openvino-telemetry, mpmath, traittypes, tqdm, sympy, numpy, networkx, fsspec, filelock, torch, openvino, opencv-python, ipydatawidgets, pythreejs - Successfully installed filelock-3.16.1 fsspec-2024.10.0 ipydatawidgets-4.3.5 mpmath-1.3.0 networkx-3.1 numpy-1.24.4 opencv-python-4.10.0.84 openvino-2024.4.0 openvino-telemetry-2024.1.0 pythreejs-2.4.2 sympy-1.13.3 torch-2.4.1+cpu tqdm-4.66.6 traittypes-0.2.1 + Successfully installed filelock-3.16.1 fsspec-2024.10.0 ipydatawidgets-4.3.5 mpmath-1.3.0 networkx-3.1 numpy-1.24.4 opencv-python-4.10.0.84 openvino-2024.4.0 openvino-telemetry-2024.5.0 pythreejs-2.4.2 sympy-1.13.3 torch-2.4.1+cpu tqdm-4.67.0 traittypes-0.2.1 Note: you may need to restart the kernel to use updated packages. @@ -193,17 +198,19 @@ Imports # Fetch `notebook_utils` module import requests - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - with open("notebook_utils.py", "w") as f: - f.write(r.text) + if not Path("notebook_utils.py").exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) + with open("notebook_utils.py", "w") as f: + f.write(r.text) - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/engine3js.py", - ) - with open("engine3js.py", "w") as f: - f.write(r.text) + if not Path("engine3js.py").exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/engine3js.py", + ) + with open("engine3js.py", "w") as f: + f.write(r.text) import notebook_utils as utils import engine3js as engine @@ -227,10 +234,11 @@ Download the model # directory where model will be downloaded base_model_dir = Path("model") - download_file( - "https://storage.openvinotoolkit.org/repositories/open_model_zoo/public/2022.1/human-pose-estimation-3d-0001/human-pose-estimation-3d.tar.gz", - directory=base_model_dir, - ) + if not base_model_dir.exists(): + download_file( + "https://storage.openvinotoolkit.org/repositories/open_model_zoo/public/2022.1/human-pose-estimation-3d-0001/human-pose-estimation-3d.tar.gz", + directory=base_model_dir, + ) ckpt_file = base_model_dir / "human-pose-estimation-3d-0001.pth" @@ -270,7 +278,7 @@ Convert Model to OpenVINO IR format .. parsed-literal:: - /tmp/ipykernel_496305/2723667668.py:9: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + /tmp/ipykernel_3496586/2723667668.py:9: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. pose_estimation_model.load_state_dict(torch.load(ckpt_file, map_location="cpu")) @@ -660,10 +668,16 @@ picture on the left to interact. .. code:: ipython3 + from notebook_utils import download_file + USE_WEBCAM = False cam_id = 0 - video_path = "https://storage.openvinotoolkit.org/data/test_data/videos/face-demographics-walking.mp4" + if not Path("face-demographics-walking.mp4").exists(): + download_file( + "https://storage.openvinotoolkit.org/data/test_data/videos/face-demographics-walking.mp4", + ) + video_path = "face-demographics-walking.mp4" source = cam_id if USE_WEBCAM else video_path diff --git a/docs/notebooks/3D-segmentation-point-clouds-with-output.rst b/docs/notebooks/3D-segmentation-point-clouds-with-output.rst index e60951d40c75f9..9ac414c9421193 100644 --- a/docs/notebooks/3D-segmentation-point-clouds-with-output.rst +++ b/docs/notebooks/3D-segmentation-point-clouds-with-output.rst @@ -219,7 +219,7 @@ chair for example. .. parsed-literal:: - /tmp/ipykernel_497205/2434168836.py:12: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored + /tmp/ipykernel_3496878/2434168836.py:12: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored ax.scatter3D(X, Y, Z, s=5, cmap="jet", marker="o", label="chair") @@ -313,7 +313,7 @@ select device from dropdown list for running inference using OpenVINO .. parsed-literal:: - /tmp/ipykernel_497205/2804603389.py:23: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored + /tmp/ipykernel_3496878/2804603389.py:23: UserWarning: No data for colormapping provided via 'c'. Parameters 'cmap' will be ignored ax.scatter(XCur, YCur, ZCur, s=5, cmap="jet", marker="o", label=classes[i]) diff --git a/docs/notebooks/action-recognition-webcam-with-output_files/action-recognition-webcam-with-output_22_0.png b/docs/notebooks/action-recognition-webcam-with-output_files/action-recognition-webcam-with-output_22_0.png index 1821f275db1019..0a8e2ecc3e82da 100644 --- a/docs/notebooks/action-recognition-webcam-with-output_files/action-recognition-webcam-with-output_22_0.png +++ b/docs/notebooks/action-recognition-webcam-with-output_files/action-recognition-webcam-with-output_22_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:31cb7026c28d1308b88f61a6939b7e11a54948c3cb3e4f7a1a1b8a038871150f -size 68999 +oid sha256:878b0e691f8cb979e806b7101df02610fc796f00bbea26a4f1541e4b958a7bd1 +size 69011 diff --git a/docs/notebooks/all_notebooks_paths.txt b/docs/notebooks/all_notebooks_paths.txt index 5cfa565a07d239..f93cf6e0dbe8d6 100644 --- a/docs/notebooks/all_notebooks_paths.txt +++ b/docs/notebooks/all_notebooks_paths.txt @@ -48,12 +48,12 @@ notebooks/image-classification-quantization/image-classification-quantization.ip notebooks/instant-id/instant-id.ipynb notebooks/instruct-pix2pix-image-editing/instruct-pix2pix-image-editing.ipynb notebooks/internvl2/internvl2.ipynb +notebooks/jax-to-openvino/jax-classification-to-openvino.ipynb notebooks/jina-clip/jina-clip.ipynb notebooks/knowledge-graphs-conve/knowledge-graphs-conve.ipynb notebooks/kosmos2-multimodal-large-language-model/kosmos2-multimodal-large-language-model.ipynb notebooks/language-quantize-bert/language-quantize-bert.ipynb notebooks/latent-consistency-models-image-generation/latent-consistency-models-image-generation.ipynb -notebooks/latent-consistency-models-image-generation/latent-consistency-models-optimum-demo.ipynb notebooks/latent-consistency-models-image-generation/lcm-lora-controlnet.ipynb notebooks/llava-multimodal-chatbot/llava-multimodal-chatbot-genai.ipynb notebooks/llava-multimodal-chatbot/llava-multimodal-chatbot-optimum.ipynb @@ -74,18 +74,22 @@ notebooks/mllama-3.2/mllama-3.2.ipynb notebooks/mms-massively-multilingual-speech/mms-massively-multilingual-speech.ipynb notebooks/mobileclip-video-search/mobileclip-video-search.ipynb notebooks/mobilevlm-language-assistant/mobilevlm-language-assistant.ipynb +notebooks/modelscope-to-openvino/modelscope-to-openvino.ipynb notebooks/model-server/model-server.ipynb +notebooks/multilora-image-generation/multilora-image-generation.ipynb notebooks/music-generation/music-generation.ipynb notebooks/named-entity-recognition/named-entity-recognition.ipynb notebooks/nano-llava-multimodal-chatbot/nano-llava-multimodal-chatbot.ipynb notebooks/nuextract-structure-extraction/nuextract-structure-extraction.ipynb notebooks/object-detection-webcam/object-detection.ipynb +notebooks/omniparser/omniparser.ipynb notebooks/oneformer-segmentation/oneformer-segmentation.ipynb notebooks/openvino-api/openvino-api.ipynb notebooks/openvino-tokenizers/openvino-tokenizers.ipynb notebooks/openvoice/openvoice.ipynb notebooks/optical-character-recognition/optical-character-recognition.ipynb notebooks/optimize-preprocessing/optimize-preprocessing.ipynb +notebooks/outetts-text-to-speech/outetts-text-to-speech.ipynb notebooks/paddle-ocr-webcam/paddle-ocr-webcam.ipynb notebooks/paddle-to-openvino/paddle-to-openvino-classification.ipynb notebooks/paint-by-example/paint-by-example.ipynb @@ -105,7 +109,7 @@ notebooks/pytorch-to-openvino/pytorch-onnx-to-openvino.ipynb notebooks/pytorch-to-openvino/pytorch-to-openvino.ipynb notebooks/qrcode-monster/qrcode-monster.ipynb notebooks/quantizing-model-with-accuracy-control/speech-recognition-quantization-wav2vec2.ipynb -notebooks/quantizing-model-with-accuracy-control/yolov8-quantization-with-accuracy-control.ipynb +notebooks/quantizing-model-with-accuracy-control/yolov11-quantization-with-accuracy-control.ipynb notebooks/qwen2-audio/qwen2-audio.ipynb notebooks/qwen2-vl/qwen2-vl.ipynb notebooks/riffusion-text-to-music/riffusion-text-to-music.ipynb @@ -134,6 +138,7 @@ notebooks/stable-diffusion-v2/stable-diffusion-v2-optimum-demo.ipynb notebooks/stable-diffusion-v2/stable-diffusion-v2-text-to-image-demo.ipynb notebooks/stable-diffusion-v2/stable-diffusion-v2-text-to-image.ipynb notebooks/stable-diffusion-v3/stable-diffusion-v3.ipynb +notebooks/stable-diffusion-v3/stable-diffusion-v3-torch-fx.ipynb notebooks/stable-diffusion-xl/segmind-vegart.ipynb notebooks/stable-diffusion-xl/stable-diffusion-xl.ipynb notebooks/stable-fast-3d/stable-fast-3d.ipynb diff --git a/docs/notebooks/amused-lightweight-text-to-image-with-output.rst b/docs/notebooks/amused-lightweight-text-to-image-with-output.rst deleted file mode 100644 index aafda311c34c45..00000000000000 --- a/docs/notebooks/amused-lightweight-text-to-image-with-output.rst +++ /dev/null @@ -1,984 +0,0 @@ -Lightweight image generation with aMUSEd and OpenVINO -===================================================== - -`Amused `__ -is a lightweight text to image model based off of the -`muse `__ architecture. Amused is -particularly useful in applications that require a lightweight and fast -model such as generating many images quickly at once. - -Amused is a VQVAE token based transformer that can generate an image in -fewer forward passes than many diffusion models. In contrast with muse, -it uses the smaller text encoder CLIP-L/14 instead of t5-xxl. Due to its -small parameter count and few forward pass generation process, amused -can generate many images quickly. This benefit is seen particularly at -larger batch sizes. - - -**Table of contents:** - - -- `Prerequisites <#prerequisites>`__ -- `Load and run the original - pipeline <#load-and-run-the-original-pipeline>`__ -- `Convert the model to OpenVINO - IR <#convert-the-model-to-openvino-ir>`__ - - - `Convert the Text Encoder <#convert-the-text-encoder>`__ - - `Convert the U-ViT transformer <#convert-the-u-vit-transformer>`__ - - `Convert VQ-GAN decoder - (VQVAE) <#convert-vq-gan-decoder-vqvae>`__ - -- `Compiling models and prepare - pipeline <#compiling-models-and-prepare-pipeline>`__ -- `Quantization <#quantization>`__ - - - `Prepare calibration dataset <#prepare-calibration-dataset>`__ - - `Run model quantization <#run-model-quantization>`__ - - `Compute Inception Scores and inference - time <#compute-inception-scores-and-inference-time>`__ - -- `Interactive inference <#interactive-inference>`__ - -Installation Instructions -~~~~~~~~~~~~~~~~~~~~~~~~~ - -This is a self-contained example that relies solely on its own code. - -We recommend running the notebook in a virtual environment. You only -need a Jupyter server to start. For details, please refer to -`Installation -Guide `__. - -Prerequisites -------------- - - - -.. code:: ipython3 - - %pip install -q transformers "diffusers>=0.25.0" "openvino>=2023.2.0" "accelerate>=0.20.3" "gradio>=4.19" "torch>=2.1" "pillow" "torchmetrics" "torch-fidelity" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q "nncf>=2.9.0" datasets - - -.. parsed-literal:: - - Note: you may need to restart the kernel to use updated packages. - Note: you may need to restart the kernel to use updated packages. - - -.. code:: ipython3 - - # Fetch the notebook utils script from the openvino_notebooks repo - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - open("notebook_utils.py", "w").write(r.text) - - - - -.. parsed-literal:: - - 24692 - - - -Load and run the original pipeline ----------------------------------- - - - -.. code:: ipython3 - - import torch - from diffusers import AmusedPipeline - - - pipe = AmusedPipeline.from_pretrained( - "amused/amused-256", - ) - - prompt = "kind smiling ghost" - image = pipe(prompt, generator=torch.Generator("cpu").manual_seed(8)).images[0] - image.save("text2image_256.png") - - - -.. parsed-literal:: - - Loading pipeline components...: 0%| | 0/5 [00:00 1 or self.sliding_window is not None: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:164: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if past_key_values_length > 0: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/clip/modeling_clip.py:861: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - encoder_states = () if output_hidden_states else None - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/clip/modeling_clip.py:866: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if output_hidden_states: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/clip/modeling_clip.py:889: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if output_hidden_states: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/clip/modeling_clip.py:892: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if not return_dict: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/clip/modeling_clip.py:988: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if not return_dict: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/clip/modeling_clip.py:1486: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if not return_dict: - - -Convert the U-ViT transformer -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - class TransformerWrapper(torch.nn.Module): - def __init__(self, transformer): - super().__init__() - self.transformer = transformer - - def forward( - self, - latents=None, - micro_conds=None, - pooled_text_emb=None, - encoder_hidden_states=None, - ): - return self.transformer( - latents, - micro_conds=micro_conds, - pooled_text_emb=pooled_text_emb, - encoder_hidden_states=encoder_hidden_states, - ) - - - shape = (1, 16, 16) - latents = torch.full(shape, pipe.scheduler.config.mask_token_id, dtype=torch.long) - latents = torch.cat([latents] * 2) - - - example_input = { - "latents": latents, - "micro_conds": torch.rand([2, 5], dtype=torch.float32), - "pooled_text_emb": torch.rand([2, 768], dtype=torch.float32), - "encoder_hidden_states": torch.rand([2, 77, 768], dtype=torch.float32), - } - - - pipe.transformer.eval() - w_transformer = TransformerWrapper(pipe.transformer) - convert(w_transformer, TRANSFORMER_OV_PATH, example_input) - -Convert VQ-GAN decoder (VQVAE) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Function ``get_latents`` is -needed to return real latents for the conversion. Due to the VQVAE -implementation autogenerated tensor of the required shape is not -suitable. This function repeats part of ``AmusedPipeline``. - -.. code:: ipython3 - - def get_latents(): - shape = (1, 16, 16) - latents = torch.full(shape, pipe.scheduler.config.mask_token_id, dtype=torch.long) - model_input = torch.cat([latents] * 2) - - model_output = pipe.transformer( - model_input, - micro_conds=torch.rand([2, 5], dtype=torch.float32), - pooled_text_emb=torch.rand([2, 768], dtype=torch.float32), - encoder_hidden_states=torch.rand([2, 77, 768], dtype=torch.float32), - ) - guidance_scale = 10.0 - uncond_logits, cond_logits = model_output.chunk(2) - model_output = uncond_logits + guidance_scale * (cond_logits - uncond_logits) - - latents = pipe.scheduler.step( - model_output=model_output, - timestep=torch.tensor(0), - sample=latents, - ).prev_sample - - return latents - - - class VQVAEWrapper(torch.nn.Module): - def __init__(self, vqvae): - super().__init__() - self.vqvae = vqvae - - def forward(self, latents=None, force_not_quantize=True, shape=None): - outputs = self.vqvae.decode( - latents, - force_not_quantize=force_not_quantize, - shape=shape.tolist(), - ) - - return outputs - - - latents = get_latents() - example_vqvae_input = { - "latents": latents, - "force_not_quantize": torch.tensor(True), - "shape": torch.tensor((1, 16, 16, 64)), - } - - convert(VQVAEWrapper(pipe.vqvae), VQVAE_OV_PATH, example_vqvae_input) - - -.. parsed-literal:: - - /tmp/ipykernel_498025/3779428577.py:34: TracerWarning: Converting a tensor to a Python list might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - shape=shape.tolist(), - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/autoencoders/vq_model.py:144: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if not force_not_quantize: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:147: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - assert hidden_states.shape[1] == self.channels - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:162: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if hidden_states.shape[0] >= 64: - - -Compiling models and prepare pipeline -------------------------------------- - - - -Select device from dropdown list for running inference using OpenVINO. - -.. code:: ipython3 - - from notebook_utils import device_widget - - device = device_widget() - - device - - - - -.. parsed-literal:: - - Dropdown(description='Device:', index=1, options=('CPU', 'AUTO'), value='AUTO') - - - -.. code:: ipython3 - - core = ov.Core() - - ov_text_encoder = core.compile_model(TEXT_ENCODER_OV_PATH, device.value) - ov_transformer = core.compile_model(TRANSFORMER_OV_PATH, device.value) - ov_vqvae = core.compile_model(VQVAE_OV_PATH, device.value) - -Let’s create callable wrapper classes for compiled models to allow -interaction with original ``AmusedPipeline`` class. Note that all of -wrapper classes return ``torch.Tensor``\ s instead of ``np.array``\ s. - -.. code:: ipython3 - - from collections import namedtuple - - - class ConvTextEncoderWrapper(torch.nn.Module): - def __init__(self, text_encoder, config): - super().__init__() - self.config = config - self.text_encoder = text_encoder - - def forward(self, input_ids=None, return_dict=None, output_hidden_states=None): - inputs = { - "input_ids": input_ids, - "return_dict": return_dict, - "output_hidden_states": output_hidden_states, - } - - outs = self.text_encoder(inputs) - - outputs = namedtuple("CLIPTextModelOutput", ("text_embeds", "last_hidden_state", "hidden_states")) - - text_embeds = torch.from_numpy(outs[0]) - last_hidden_state = torch.from_numpy(outs[1]) - hidden_states = list(torch.from_numpy(out) for out in outs.values())[2:] - - return outputs(text_embeds, last_hidden_state, hidden_states) - -.. code:: ipython3 - - class ConvTransformerWrapper(torch.nn.Module): - def __init__(self, transformer, config): - super().__init__() - self.config = config - self.transformer = transformer - - def forward(self, latents=None, micro_conds=None, pooled_text_emb=None, encoder_hidden_states=None, **kwargs): - outputs = self.transformer( - { - "latents": latents, - "micro_conds": micro_conds, - "pooled_text_emb": pooled_text_emb, - "encoder_hidden_states": encoder_hidden_states, - }, - share_inputs=False, - ) - - return torch.from_numpy(outputs[0]) - -.. code:: ipython3 - - class ConvVQVAEWrapper(torch.nn.Module): - def __init__(self, vqvae, dtype, config): - super().__init__() - self.vqvae = vqvae - self.dtype = dtype - self.config = config - - def decode(self, latents=None, force_not_quantize=True, shape=None): - inputs = { - "latents": latents, - "force_not_quantize": force_not_quantize, - "shape": torch.tensor(shape), - } - - outs = self.vqvae(inputs) - outs = namedtuple("VQVAE", "sample")(torch.from_numpy(outs[0])) - - return outs - -And insert wrappers instances in the pipeline: - -.. code:: ipython3 - - prompt = "kind smiling ghost" - - transformer = pipe.transformer - vqvae = pipe.vqvae - text_encoder = pipe.text_encoder - - pipe.__dict__["_internal_dict"]["_execution_device"] = pipe._execution_device # this is to avoid some problem that can occur in the pipeline - pipe.register_modules( - text_encoder=ConvTextEncoderWrapper(ov_text_encoder, text_encoder.config), - transformer=ConvTransformerWrapper(ov_transformer, transformer.config), - vqvae=ConvVQVAEWrapper(ov_vqvae, vqvae.dtype, vqvae.config), - ) - - image = pipe(prompt, generator=torch.Generator("cpu").manual_seed(8)).images[0] - image.save("text2image_256.png") - - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/configuration_utils.py:140: FutureWarning: Accessing config attribute `_execution_device` directly via 'AmusedPipeline' object attribute is deprecated. Please access '_execution_device' over 'AmusedPipeline's config object instead, e.g. 'scheduler.config._execution_device'. - deprecate("direct config name access", "1.0.0", deprecation_message, standard_warn=False) - - - -.. parsed-literal:: - - 0%| | 0/12 [00:00`__ enables -post-training quantization by adding quantization layers into model -graph and then using a subset of the training dataset to initialize the -parameters of these additional quantization layers. Quantized operations -are executed in ``INT8`` instead of ``FP32``/``FP16`` making model -inference faster. - -According to ``Amused`` pipeline structure, the vision transformer model -takes up significant portion of the overall pipeline execution time. Now -we will show you how to optimize the UNet part using -`NNCF `__ to reduce -computation cost and speed up the pipeline. Quantizing the rest of the -pipeline does not significantly improve inference performance but can -lead to a substantial degradation of generations quality. - -We also estimate the quality of generations produced by optimized -pipeline with `Inception -Score `__ which is often -used to measure quality of text-to-image generation systems. - -The steps are the following: - -1. Create a calibration dataset for quantization. -2. Run ``nncf.quantize()`` on the model. -3. Save the quantized model using ``openvino.save_model()`` function. -4. Compare inference time and Inception score for original and quantized - pipelines. - -Please select below whether you would like to run quantization to -improve model inference speed. - - **NOTE**: Quantization is time and memory consuming operation. - Running quantization code below may take some time. - -.. code:: ipython3 - - from notebook_utils import quantization_widget - - QUANTIZED_TRANSFORMER_OV_PATH = Path(str(TRANSFORMER_OV_PATH).replace(".xml", "_quantized.xml")) - - skip_for_device = "GPU" in device.value - to_quantize = quantization_widget(not skip_for_device) - to_quantize - - - - -.. parsed-literal:: - - Checkbox(value=True, description='Quantization') - - - -.. code:: ipython3 - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", - ) - open("skip_kernel_extension.py", "w").write(r.text) - - %load_ext skip_kernel_extension - -Prepare calibration dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -We use a portion of -`conceptual_captions `__ -dataset from Hugging Face as calibration data. To collect intermediate -model inputs for calibration we customize ``CompiledModel``. - -.. code:: ipython3 - - %%skip not $to_quantize.value - - import datasets - from tqdm.auto import tqdm - from typing import Any, Dict, List - import pickle - import numpy as np - - - def disable_progress_bar(pipeline, disable=True): - if not hasattr(pipeline, "_progress_bar_config"): - pipeline._progress_bar_config = {'disable': disable} - else: - pipeline._progress_bar_config['disable'] = disable - - - class CompiledModelDecorator(ov.CompiledModel): - def __init__(self, compiled_model: ov.CompiledModel, data_cache: List[Any] = None, keep_prob: float = 0.5): - super().__init__(compiled_model) - self.data_cache = data_cache if data_cache is not None else [] - self.keep_prob = keep_prob - - def __call__(self, *args, **kwargs): - if np.random.rand() <= self.keep_prob: - self.data_cache.append(*args) - return super().__call__(*args, **kwargs) - - - def collect_calibration_data(ov_transformer_model, calibration_dataset_size: int) -> List[Dict]: - calibration_dataset_filepath = Path(f"calibration_data/{calibration_dataset_size}.pkl") - if not calibration_dataset_filepath.exists(): - calibration_data = [] - pipe.transformer.transformer = CompiledModelDecorator(ov_transformer_model, calibration_data, keep_prob=1.0) - disable_progress_bar(pipe) - - dataset = datasets.load_dataset("google-research-datasets/conceptual_captions", split="train", trust_remote_code=True).shuffle(seed=42) - - # Run inference for data collection - pbar = tqdm(total=calibration_dataset_size) - for batch in dataset: - prompt = batch["caption"] - if len(prompt) > pipe.tokenizer.model_max_length: - continue - pipe(prompt, generator=torch.Generator('cpu').manual_seed(0)) - pbar.update(len(calibration_data) - pbar.n) - if pbar.n >= calibration_dataset_size: - break - - pipe.transformer.transformer = ov_transformer_model - disable_progress_bar(pipe, disable=False) - - calibration_dataset_filepath.parent.mkdir(exist_ok=True, parents=True) - with open(calibration_dataset_filepath, 'wb') as f: - pickle.dump(calibration_data, f) - - with open(calibration_dataset_filepath, 'rb') as f: - calibration_data = pickle.load(f) - return calibration_data - -Run model quantization -~~~~~~~~~~~~~~~~~~~~~~ - - - -Run calibration data collection and quantize the vision transformer -model. - -.. code:: ipython3 - - %%skip not $to_quantize.value - - from nncf.quantization.advanced_parameters import AdvancedSmoothQuantParameters - from nncf.quantization.range_estimator import RangeEstimatorParameters, StatisticsCollectorParameters, StatisticsType, \ - AggregatorType - import nncf - - CALIBRATION_DATASET_SIZE = 12 * 25 - - if not QUANTIZED_TRANSFORMER_OV_PATH.exists(): - calibration_data = collect_calibration_data(ov_transformer, CALIBRATION_DATASET_SIZE) - quantized_model = nncf.quantize( - core.read_model(TRANSFORMER_OV_PATH), - nncf.Dataset(calibration_data), - model_type=nncf.ModelType.TRANSFORMER, - subset_size=len(calibration_data), - # We ignore convolutions to improve quality of generations without significant drop in inference speed - ignored_scope=nncf.IgnoredScope(types=["Convolution"]), - # Value of 0.85 was obtained using grid search based on Inception Score computed below - advanced_parameters=nncf.AdvancedQuantizationParameters( - smooth_quant_alphas=AdvancedSmoothQuantParameters(matmul=0.85), - # During activation statistics collection we ignore 1% of outliers which improves quantization quality - activations_range_estimator_params=RangeEstimatorParameters( - min=StatisticsCollectorParameters(statistics_type=StatisticsType.MIN, - aggregator_type=AggregatorType.MEAN_NO_OUTLIERS, - quantile_outlier_prob=0.01), - max=StatisticsCollectorParameters(statistics_type=StatisticsType.MAX, - aggregator_type=AggregatorType.MEAN_NO_OUTLIERS, - quantile_outlier_prob=0.01) - ) - ) - ) - ov.save_model(quantized_model, QUANTIZED_TRANSFORMER_OV_PATH) - - -.. parsed-literal:: - - INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, openvino - - - -.. parsed-literal:: - - 0%| | 0/300 [00:00`__ of original and -quantized pipelines on a small subset of images. Images are generated -from prompts of ``conceptual_captions`` validation set. We also measure -the time it took to generate the images for comparison reasons. - -Please note that the validation dataset size is small and serves only as -a rough estimate of generation quality. - -.. code:: ipython3 - - %%skip not $to_quantize.value - - from torchmetrics.image.inception import InceptionScore - from torchvision import transforms as transforms - from itertools import islice - import time - - VALIDATION_DATASET_SIZE = 100 - - def compute_inception_score(ov_transformer_model_path, validation_set_size, batch_size=100): - original_ov_transformer_model = pipe.transformer.transformer - pipe.transformer.transformer = core.compile_model(ov_transformer_model_path, device.value) - - disable_progress_bar(pipe) - dataset = datasets.load_dataset("google-research-datasets/conceptual_captions", "unlabeled", split="validation", trust_remote_code=True).shuffle(seed=42) - dataset = islice(dataset, validation_set_size) - - inception_score = InceptionScore(normalize=True, splits=1) - - images = [] - infer_times = [] - for batch in tqdm(dataset, total=validation_set_size, desc="Computing Inception Score"): - prompt = batch["caption"] - if len(prompt) > pipe.tokenizer.model_max_length: - continue - start_time = time.perf_counter() - image = pipe(prompt, generator=torch.Generator('cpu').manual_seed(0)).images[0] - infer_times.append(time.perf_counter() - start_time) - image = transforms.ToTensor()(image) - images.append(image) - - mean_perf_time = sum(infer_times) / len(infer_times) - - while len(images) > 0: - images_batch = torch.stack(images[-batch_size:]) - images = images[:-batch_size] - inception_score.update(images_batch) - kl_mean, kl_std = inception_score.compute() - - pipe.transformer.transformer = original_ov_transformer_model - disable_progress_bar(pipe, disable=False) - - return kl_mean, mean_perf_time - - - original_inception_score, original_time = compute_inception_score(TRANSFORMER_OV_PATH, VALIDATION_DATASET_SIZE) - print(f"Original pipeline Inception Score: {original_inception_score}") - quantized_inception_score, quantized_time = compute_inception_score(QUANTIZED_TRANSFORMER_OV_PATH, VALIDATION_DATASET_SIZE) - print(f"Quantized pipeline Inception Score: {quantized_inception_score}") - print(f"Quantization speed-up: {original_time / quantized_time:.2f}x") - - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torchmetrics/utilities/prints.py:43: UserWarning: Metric `InceptionScore` will save all extracted features in buffer. For large datasets this may lead to large memory footprint. - warnings.warn(\*args, \*\*kwargs) # noqa: B028 - - - -.. parsed-literal:: - - Computing Inception Score: 0%| | 0/100 [00:00`__ tackles the task of generating animation sequences from a single character image. It @@ -37,7 +36,8 @@ repo `__ and .. warning:: - This tutorial requires at least **96 GB** of RAM for model conversion and **40 GB** for inference. Changing the values of ``HEIGHT`` ``WIDTH`` and ``VIDEO_LENGTH`` variables will change the memory consumption but will also affect accuracy. + This tutorial requires at least **96 GB** of RAM for model conversion and **40 GB** for inference. Changing the values of ``HEIGHT``, ``WIDTH`` and ``VIDEO_LENGTH`` variables will change the memory consumption but will also affect accuracy. + **Table of contents:** @@ -70,6 +70,9 @@ need a Jupyter server to start. For details, please refer to `Installation Guide `__. +.. |image0| image:: https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/animate-anyone/animate-anyone.gif + + Prerequisites ------------- @@ -81,13 +84,10 @@ Prerequisites import requests - REPO_PATH = Path("Moore-AnimateAnyone") - if not REPO_PATH.exists(): - !git clone -q "https://github.com/itrushkin/Moore-AnimateAnyone.git" - %pip install -q "torch>=2.1" torchvision einops omegaconf "diffusers<=0.24" transformers av accelerate "openvino>=2024.0" "nncf>=2.9.0" "gradio>=4.19" --extra-index-url "https://download.pytorch.org/whl/cpu" - import sys + %pip install -q "torch>=2.1" torchvision einops omegaconf "diffusers<=0.24" "huggingface-hub<0.26.0" transformers av accelerate "gradio>=4.19" --extra-index-url "https://download.pytorch.org/whl/cpu" + %pip install -q "openvino>=2024.0" "nncf>=2.9.0" + - sys.path.insert(0, str(REPO_PATH.resolve())) r = requests.get( url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", ) @@ -98,8 +98,25 @@ Prerequisites ) open("notebook_utils.py", "w").write(r.text) + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", + ) + open("cmd_helper.py", "w").write(r.text) + + + from cmd_helper import clone_repo + + clone_repo("https://github.com/itrushkin/Moore-AnimateAnyone.git") + %load_ext skip_kernel_extension + +.. parsed-literal:: + + Note: you may need to restart the kernel to use updated packages. + Note: you may need to restart the kernel to use updated packages. + + Note that we clone a fork of original repo with tweaked forward methods. .. code:: ipython3 @@ -154,11 +171,9 @@ Note that we clone a fork of original repo with tweaked forward methods. .. parsed-literal:: - /home/itrushkin/.virtualenvs/test/lib/python3.10/site-packages/diffusers/utils/outputs.py:63: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - /home/itrushkin/.virtualenvs/test/lib/python3.10/site-packages/diffusers/utils/outputs.py:63: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/utils/outputs.py:63: FutureWarning: `torch.utils._pytree._register_pytree_node` is deprecated. Please use `torch.utils._pytree.register_pytree_node` instead. torch.utils._pytree._register_pytree_node( - /home/itrushkin/.virtualenvs/test/lib/python3.10/site-packages/diffusers/utils/outputs.py:63: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/utils/outputs.py:63: FutureWarning: `torch.utils._pytree._register_pytree_node` is deprecated. Please use `torch.utils._pytree.register_pytree_node` instead. torch.utils._pytree._register_pytree_node( @@ -206,6 +221,13 @@ Prepare base model local_dir=local_dir, ) + + +.. parsed-literal:: + + diffusion_pytorch_model.bin: 0%| | 0.00/3.44G [00:00:2: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + :6: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + :9: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + + Convert model to OpenVINO IR ---------------------------- @@ -324,7 +423,7 @@ semantic features are extracted through the CLIP image encoder for Cross-Attention. Temporal-Attention operates in the temporal dimension. Finally, the VAE decoder decodes the result into a video clip. -.. image:: https://humanaigc.github.io/animate-anyone/static/images/f2_img.png +|image01| The pipeline contains 6 PyTorch modules: @@ -364,6 +463,8 @@ compression parameters. More details about weights compression can be found in `OpenVINO documentation `__. +.. |image01| image:: https://humanaigc.github.io/animate-anyone/static/images/f2_img.png + .. code:: ipython3 %%skip not $SHOULD_CONVERT @@ -421,14 +522,12 @@ of the pipeline, it will be better to convert them to separate models. .. parsed-literal:: - WARNING:nncf:NNCF provides best results with torch==2.1.2, while current torch version is 2.2.2+cpu. If you encounter issues, consider switching to torch==2.1.2 INFO:nncf:Statistics of the bitwidth distribution: - +--------------+---------------------------+-----------------------------------+ - | Num bits (N) | % all parameters (layers) | % ratio-defining parameters | - | | | (layers) | - +==============+===========================+===================================+ - | 8 | 100% (32 / 32) | 100% (32 / 32) | - +--------------+---------------------------+-----------------------------------+ + ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ + │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ + ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ + │ 8 │ 100% (32 / 32) │ 100% (32 / 32) │ + ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ @@ -444,14 +543,6 @@ of the pipeline, it will be better to convert them to separate models. - - - - - - - - .. code:: ipython3 %%skip not $SHOULD_CONVERT @@ -477,12 +568,11 @@ of the pipeline, it will be better to convert them to separate models. .. parsed-literal:: INFO:nncf:Statistics of the bitwidth distribution: - +--------------+---------------------------+-----------------------------------+ - | Num bits (N) | % all parameters (layers) | % ratio-defining parameters | - | | | (layers) | - +==============+===========================+===================================+ - | 8 | 100% (40 / 40) | 100% (40 / 40) | - +--------------+---------------------------+-----------------------------------+ + ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ + │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ + ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ + │ 8 │ 100% (40 / 40) │ 100% (40 / 40) │ + ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ @@ -498,14 +588,6 @@ of the pipeline, it will be better to convert them to separate models. - - - - - - - - Reference UNet ~~~~~~~~~~~~~~ @@ -552,12 +634,11 @@ step. .. parsed-literal:: INFO:nncf:Statistics of the bitwidth distribution: - +--------------+---------------------------+-----------------------------------+ - | Num bits (N) | % all parameters (layers) | % ratio-defining parameters | - | | | (layers) | - +==============+===========================+===================================+ - | 8 | 100% (270 / 270) | 100% (270 / 270) | - +--------------+---------------------------+-----------------------------------+ + ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ + │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ + ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ + │ 8 │ 100% (270 / 270) │ 100% (270 / 270) │ + ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ @@ -573,14 +654,6 @@ step. - - - - - - - - Denoising UNet ~~~~~~~~~~~~~~ @@ -654,12 +727,11 @@ step. .. parsed-literal:: INFO:nncf:Statistics of the bitwidth distribution: - +--------------+---------------------------+-----------------------------------+ - | Num bits (N) | % all parameters (layers) | % ratio-defining parameters | - | | | (layers) | - +==============+===========================+===================================+ - | 8 | 100% (534 / 534) | 100% (534 / 534) | - +--------------+---------------------------+-----------------------------------+ + ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ + │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ + ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ + │ 8 │ 100% (534 / 534) │ 100% (534 / 534) │ + ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ @@ -675,14 +747,6 @@ step. - - - - - - - - Pose Guider ~~~~~~~~~~~ @@ -709,12 +773,11 @@ efficiently integrate pose control signals into the denoising process. .. parsed-literal:: INFO:nncf:Statistics of the bitwidth distribution: - +--------------+---------------------------+-----------------------------------+ - | Num bits (N) | % all parameters (layers) | % ratio-defining parameters | - | | | (layers) | - +==============+===========================+===================================+ - | 8 | 100% (8 / 8) | 100% (8 / 8) | - +--------------+---------------------------+-----------------------------------+ + ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ + │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ + ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ + │ 8 │ 100% (8 / 8) │ 100% (8 / 8) │ + ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ @@ -730,14 +793,6 @@ efficiently integrate pose control signals into the denoising process. - - - - - - - - Image Encoder ~~~~~~~~~~~~~ @@ -763,19 +818,19 @@ required for both reference and denoising UNets. .. parsed-literal:: - /home/itrushkin/.virtualenvs/test/lib/python3.10/site-packages/transformers/modeling_utils.py:4225: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead warnings.warn( + `loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`. .. parsed-literal:: INFO:nncf:Statistics of the bitwidth distribution: - +--------------+---------------------------+-----------------------------------+ - | Num bits (N) | % all parameters (layers) | % ratio-defining parameters | - | | | (layers) | - +==============+===========================+===================================+ - | 8 | 100% (146 / 146) | 100% (146 / 146) | - +--------------+---------------------------+-----------------------------------+ + ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ + │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ + ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ + │ 8 │ 100% (146 / 146) │ 100% (146 / 146) │ + ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ @@ -791,14 +846,6 @@ required for both reference and denoising UNets. - - - - - - - - Inference --------- @@ -824,15 +871,6 @@ For starting work, please select inference device from dropdown list. device = device_widget() - - - -.. parsed-literal:: - - Dropdown(description='Device:', index=5, options=('CPU', 'GPU.0', 'GPU.1', 'GPU.2', 'GPU.3', 'AUTO'), value='A… - - - .. code:: ipython3 class OVPose2VideoPipeline(Pose2VideoPipeline): @@ -1130,7 +1168,7 @@ Video post-processing .. raw:: html @@ -1204,9 +1242,23 @@ Interactive inference demo = make_demo(fn=generate) try: - demo.queue().launch(debug=True) + demo.queue().launch(debug=False) except Exception: - demo.queue().launch(debug=True, share=True) + demo.queue().launch(debug=False, share=True) # if you are launching remotely, specify server_name and server_port # demo.launch(server_name='your server name', server_port='server port in int') # Read more in the docs: https://gradio.app/docs/" + + +.. parsed-literal:: + + Running on local URL: http://127.0.0.1:7860 + + To create a public link, set `share=True` in `launch()`. + + + + + + + diff --git a/docs/notebooks/auto-device-with-output.rst b/docs/notebooks/auto-device-with-output.rst index 2ebcbe7d80deb2..ad19853a06aea5 100644 --- a/docs/notebooks/auto-device-with-output.rst +++ b/docs/notebooks/auto-device-with-output.rst @@ -197,16 +197,16 @@ By default, ``compile_model`` API will select **AUTO** as .. parsed-literal:: - [22:41:57.1267]I[plugin.cpp:421][AUTO] device:CPU, config:LOG_LEVEL=LOG_INFO - [22:41:57.1268]I[plugin.cpp:421][AUTO] device:CPU, config:PERFORMANCE_HINT=LATENCY - [22:41:57.1268]I[plugin.cpp:421][AUTO] device:CPU, config:PERFORMANCE_HINT_NUM_REQUESTS=0 - [22:41:57.1268]I[plugin.cpp:421][AUTO] device:CPU, config:PERF_COUNT=NO - [22:41:57.1268]I[plugin.cpp:426][AUTO] device:CPU, priority:0 - [22:41:57.1268]I[schedule.cpp:17][AUTO] scheduler starting - [22:41:57.1269]I[auto_schedule.cpp:181][AUTO] select device:CPU - [22:41:57.2582]I[auto_schedule.cpp:346][AUTO] Device: [CPU]: Compile model took 131.300219 ms - [22:41:57.2583]I[auto_schedule.cpp:112][AUTO] device:CPU compiling model finished - [22:41:57.2584]I[plugin.cpp:454][AUTO] underlying hardware does not support hardware context + [23:30:35.1625]I[plugin.cpp:421][AUTO] device:CPU, config:LOG_LEVEL=LOG_INFO + [23:30:35.1626]I[plugin.cpp:421][AUTO] device:CPU, config:PERFORMANCE_HINT=LATENCY + [23:30:35.1626]I[plugin.cpp:421][AUTO] device:CPU, config:PERFORMANCE_HINT_NUM_REQUESTS=0 + [23:30:35.1626]I[plugin.cpp:421][AUTO] device:CPU, config:PERF_COUNT=NO + [23:30:35.1626]I[plugin.cpp:426][AUTO] device:CPU, priority:0 + [23:30:35.1626]I[schedule.cpp:17][AUTO] scheduler starting + [23:30:35.1626]I[auto_schedule.cpp:181][AUTO] select device:CPU + [23:30:35.2748]I[auto_schedule.cpp:346][AUTO] Device: [CPU]: Compile model took 112.194882 ms + [23:30:35.2749]I[auto_schedule.cpp:112][AUTO] device:CPU compiling model finished + [23:30:35.2750]I[plugin.cpp:454][AUTO] underlying hardware does not support hardware context Successfully compiled model without a device_name. @@ -220,7 +220,7 @@ By default, ``compile_model`` API will select **AUTO** as .. parsed-literal:: Deleted compiled_model - [22:41:57.2639]I[schedule.cpp:308][AUTO] scheduler ending + [23:30:35.2802]I[schedule.cpp:308][AUTO] scheduler ending Explicitly pass AUTO as device_name to Core::compile_model API @@ -378,7 +378,7 @@ executed on CPU until GPU is ready. .. parsed-literal:: - Time to load model using AUTO device and get first inference: 0.12 seconds. + Time to load model using AUTO device and get first inference: 0.13 seconds. .. code:: ipython3 @@ -553,12 +553,12 @@ Loop for inference and update the FPS/Latency every Compiling Model for AUTO device with THROUGHPUT hint Start inference, 6 groups of FPS/latency will be measured over 10s intervals - throughput: 179.70fps, latency: 32.12ms, time interval: 10.00s - throughput: 183.61fps, latency: 31.86ms, time interval: 10.01s - throughput: 183.96fps, latency: 31.88ms, time interval: 10.01s - throughput: 183.98fps, latency: 31.91ms, time interval: 10.00s - throughput: 183.26fps, latency: 31.98ms, time interval: 10.01s - throughput: 183.40fps, latency: 32.01ms, time interval: 10.00s + throughput: 185.58fps, latency: 30.99ms, time interval: 10.01s + throughput: 184.03fps, latency: 31.86ms, time interval: 10.01s + throughput: 178.79fps, latency: 32.85ms, time interval: 10.00s + throughput: 182.60fps, latency: 32.13ms, time interval: 10.01s + throughput: 184.75fps, latency: 31.76ms, time interval: 10.00s + throughput: 184.82fps, latency: 31.71ms, time interval: 10.03s Done @@ -604,12 +604,12 @@ Loop for inference and update the FPS/Latency for each Compiling Model for AUTO Device with LATENCY hint Start inference, 6 groups fps/latency will be out with 10s interval - throughput: 130.56fps, latency: 7.18ms, time interval: 10.00s - throughput: 142.51fps, latency: 6.61ms, time interval: 10.01s - throughput: 142.47fps, latency: 6.62ms, time interval: 10.00s - throughput: 142.46fps, latency: 6.61ms, time interval: 10.00s - throughput: 142.63fps, latency: 6.61ms, time interval: 10.00s - throughput: 142.73fps, latency: 6.60ms, time interval: 10.00s + throughput: 141.02fps, latency: 6.60ms, time interval: 10.01s + throughput: 142.78fps, latency: 6.59ms, time interval: 10.00s + throughput: 132.85fps, latency: 7.12ms, time interval: 10.00s + throughput: 142.85fps, latency: 6.59ms, time interval: 10.00s + throughput: 142.91fps, latency: 6.59ms, time interval: 10.01s + throughput: 142.93fps, latency: 6.59ms, time interval: 10.00s Done diff --git a/docs/notebooks/auto-device-with-output_files/auto-device-with-output_27_0.png b/docs/notebooks/auto-device-with-output_files/auto-device-with-output_27_0.png index cc037738f18096..ee0ced8554407f 100644 --- a/docs/notebooks/auto-device-with-output_files/auto-device-with-output_27_0.png +++ b/docs/notebooks/auto-device-with-output_files/auto-device-with-output_27_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:1bedd8ff3e65a23fb4af380958a261d0916d2e0134b9426652a2779bdc06d6de -size 26887 +oid sha256:39ace04fe6c27d34344fa99d5119ed623b69144df356a39d6ab7f99cb32a81e4 +size 26587 diff --git a/docs/notebooks/auto-device-with-output_files/auto-device-with-output_28_0.png b/docs/notebooks/auto-device-with-output_files/auto-device-with-output_28_0.png index 21be57ac89d68d..8f6ad87cc674ee 100644 --- a/docs/notebooks/auto-device-with-output_files/auto-device-with-output_28_0.png +++ b/docs/notebooks/auto-device-with-output_files/auto-device-with-output_28_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:ed1ab24c30040707a36155169f4aaa91a5bff6cb48a2c5d10401ecbd87ca6f54 -size 40117 +oid sha256:5af2ed1645ba8fbde80b2c7e3e5fdf053c80531cf5d11f311c762a9921e6f668 +size 39937 diff --git a/docs/notebooks/blip-visual-language-processing-with-output.rst b/docs/notebooks/blip-visual-language-processing-with-output.rst index 09d58ec75b4fd0..a2c688c88a16b6 100644 --- a/docs/notebooks/blip-visual-language-processing-with-output.rst +++ b/docs/notebooks/blip-visual-language-processing-with-output.rst @@ -278,13 +278,8 @@ text and vision modalities and postprocessing of generation results. .. code:: ipython3 - import platform - %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu "torch>=2.1.0" torchvision "transformers>=4.26.0" "gradio>=4.19" "openvino>=2023.3.0" "datasets>=2.14.6" "nncf>=2.8.1" "tqdm" - if platform.system() != "Windows": - %pip install -q "matplotlib>=3.4" - else: - %pip install -q "matplotlib>=3.4,<3.7" + %pip install -q "matplotlib>=3.4" .. code:: ipython3 diff --git a/docs/notebooks/catvton-with-output.rst b/docs/notebooks/catvton-with-output.rst index a7a9a04359f338..f9b2a4c33a83e6 100644 --- a/docs/notebooks/catvton-with-output.rst +++ b/docs/notebooks/catvton-with-output.rst @@ -31,7 +31,9 @@ Teaser image from `CatVTON GitHub `__ |teaser| In this tutorial we consider how to convert and run this model using -OpenVINO. +OpenVINO. An additional part demonstrates how to run optimization with +`NNCF `__ to speed up +pipeline. **Table of contents:** @@ -41,6 +43,14 @@ OpenVINO. - `Convert the model to OpenVINO IR <#convert-the-model-to-openvino-ir>`__ - `Compiling models <#compiling-models>`__ +- `Optimize model using NNCF Post-Training Quantization + API <#optimize-model-using-nncf-post-training-quantization-api>`__ + + - `Run Post-Training + Quantization <#run-post-training-quantization>`__ + - `Run Weights Compression <#run-weights-compression>`__ + - `Compare model file sizes <#compare-model-file-sizes>`__ + - `Interactive demo <#interactive-demo>`__ Installation Instructions @@ -67,18 +77,10 @@ Prerequisites if platform.system() == "Darwin": %pip install -q "numpy<2.0.0" - %pip install -q "openvino>=2024.4" + %pip install -q "openvino>=2024.4" "nncf>=2.13.0" %pip install -q "torch>=2.1" "diffusers>=0.29.1" torchvision opencv_python --extra-index-url https://download.pytorch.org/whl/cpu %pip install -q fvcore "pillow" "tqdm" "gradio>=4.36" "omegaconf==2.4.0.dev3" av pycocotools cloudpickle scipy accelerate "transformers>=4.27.3" - -.. parsed-literal:: - - Note: you may need to restart the kernel to use updated packages. - Note: you may need to restart the kernel to use updated packages. - Note: you may need to restart the kernel to use updated packages. - - .. code:: ipython3 import requests @@ -90,19 +92,10 @@ Prerequisites open("notebook_utils.py", "w").write(r.text) r = requests.get( - url="https://raw.githubusercontent.com/aleksandr-mokrov/openvino_notebooks/refs/heads/catvton/utils/cmd_helper.py", + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", ) open("cmd_helper.py", "w").write(r.text) - - - -.. parsed-literal:: - - 741 - - - .. code:: ipython3 from cmd_helper import clone_repo @@ -110,15 +103,6 @@ Prerequisites clone_repo("https://github.com/Zheng-Chong/CatVTON.git", "3b795364a4d2f3b5adb365f39cdea376d20bc53c") - - - -.. parsed-literal:: - - PosixPath('CatVTON') - - - Convert the model to OpenVINO IR ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -153,177 +137,207 @@ version). .. code:: ipython3 - from pathlib import Path - from ov_catvton_helper import download_models, convert_pipeline_models, convert_automasker_models - - MODEL_DIR = Path("models") - VAE_ENCODER_PATH = MODEL_DIR / "vae_encoder.xml" - VAE_DECODER_PATH = MODEL_DIR / "vae_decoder.xml" - UNET_PATH = MODEL_DIR / "unet.xml" - DENSEPOSE_PROCESSOR_PATH = MODEL_DIR / "densepose_processor.xml" - SCHP_PROCESSOR_ATR = MODEL_DIR / "schp_processor_atr.xml" - SCHP_PROCESSOR_LIP = MODEL_DIR / "schp_processor_lip.xml" - - - pipeline, mask_processor, automasker = download_models(MODEL_DIR) - convert_pipeline_models(pipeline, VAE_ENCODER_PATH, VAE_DECODER_PATH, UNET_PATH) - convert_automasker_models(automasker, DENSEPOSE_PROCESSOR_PATH, SCHP_PROCESSOR_ATR, SCHP_PROCESSOR_LIP) + pipeline, mask_processor, automasker = download_models() + convert_pipeline_models(pipeline) + convert_automasker_models(automasker) + +Compiling models +---------------- -.. parsed-literal:: - Note: switching to '3b795364a4d2f3b5adb365f39cdea376d20bc53c'. - - You are in 'detached HEAD' state. You can look around, make experimental - changes and commit them, and you can discard any commits you make in this - state without impacting any branches by switching back to a branch. - - If you want to create a new branch to retain commits you create, you may - do so (now or later) by using -c with the switch command. Example: +Select device from dropdown list for running inference using OpenVINO. + +.. code:: ipython3 + + import openvino as ov - git switch -c + from notebook_utils import device_widget - Or undo this operation with: - git switch - + core = ov.Core() - Turn off this advice by setting config variable advice.detachedHead to false + device = device_widget() - HEAD is now at 3b79536 Update default base model path - - - -.. parsed-literal:: - - Fetching 10 files: 0%| | 0/10 [00:00`__ provides a suite of +advanced algorithms for Neural Networks inference optimization in +OpenVINO with minimal accuracy drop. We will use 8-bit quantization in +post-training mode (without the fine-tuning pipeline) for the UNet +model, and 4-bit weight compression for the remaining models. + **NOTE**: Quantization is time and memory consuming operation. + Running quantization code below may take some time. You can disable + it using widget below: +.. code:: ipython3 -.. parsed-literal:: + from notebook_utils import quantization_widget + + to_quantize = quantization_widget() + + to_quantize - (…)nsePose/densepose_rcnn_R_50_FPN_s1x.yaml: 0%| | 0.00/182 [00:00= 64: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/unets/unet_2d_condition.py:1111: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if dim % default_overall_up_factor != 0: +.. code:: ipython3 + %%skip not $to_quantize.value + + import nncf + from ov_catvton_helper import UNET_PATH + + if not UNET_INT8_PATH.exists(): + unet = core.read_model(UNET_PATH) + quantized_model = nncf.quantize( + model=unet, + calibration_dataset=nncf.Dataset(calibration_data), + subset_size=subset_size, + model_type=nncf.ModelType.TRANSFORMER, + ) + ov.save_model(quantized_model, UNET_INT8_PATH) -Compiling models ----------------- +Run Weights Compression +~~~~~~~~~~~~~~~~~~~~~~~ -Select device from dropdown list for running inference using OpenVINO. +Quantizing of the remaining components of the pipeline does not +significantly improve inference performance but can lead to a +substantial degradation of accuracy. The weight compression will be +applied to footprint reduction. .. code:: ipython3 - import openvino as ov - - from notebook_utils import device_widget - + %%skip not $to_quantize.value - core = ov.Core() - - device = device_widget() + from catvton_quantization_helper import compress_models - device + compress_models(core) +.. code:: ipython3 + %%skip not $to_quantize.value + + from catvton_quantization_helper import ( + VAE_ENCODER_INT4_PATH, + VAE_DECODER_INT4_PATH, + DENSEPOSE_PROCESSOR_INT4_PATH, + SCHP_PROCESSOR_ATR_INT4, + SCHP_PROCESSOR_LIP_INT4, + ) + + optimized_pipe, _, optimized_automasker = download_models() + optimized_pipe = get_compiled_pipeline(optimized_pipe, core, device, VAE_ENCODER_INT4_PATH, VAE_DECODER_INT4_PATH, UNET_INT8_PATH) + optimized_automasker = get_compiled_automasker(optimized_automasker, core, device, DENSEPOSE_PROCESSOR_INT4_PATH, SCHP_PROCESSOR_ATR_INT4, SCHP_PROCESSOR_LIP_INT4) +Compare model file sizes +~~~~~~~~~~~~~~~~~~~~~~~~ -.. parsed-literal:: - Dropdown(description='Device:', index=1, options=('CPU', 'AUTO'), value='AUTO') +.. code:: ipython3 + %%skip not $to_quantize.value + from catvton_quantization_helper import compare_models_size + + compare_models_size() -``get_compiled_pipeline`` and ``get_compiled_automasker`` functions -defined in ``ov_catvton_helper.py`` provides convenient way for getting -the pipeline and the ``automasker`` with compiled ov-models that are -compatible with the original interface. It accepts the original pipeline -and ``automasker``, inference device and directories with converted -models as arguments. Under the hood we create callable wrapper classes -for compiled models to allow interaction with original pipelines. Note -that all of wrapper classes return ``torch.Tensor``\ s instead of -``np.array``\ s. And then insert wrappers instances in the pipeline. -.. code:: ipython3 +.. parsed-literal:: - from ov_catvton_helper import get_compiled_pipeline, get_compiled_automasker - + vae_encoder compression rate: 2.011 + vae_decoder compression rate: 2.007 + unet compression rate: 1.995 + densepose_processor compression rate: 2.019 + schp_processor_atr compression rate: 1.993 + schp_processor_lip compression rate: 1.993 - pipeline = get_compiled_pipeline(pipeline, core, device, VAE_ENCODER_PATH, VAE_DECODER_PATH, UNET_PATH) - automasker = get_compiled_automasker(automasker, core, device, DENSEPOSE_PROCESSOR_PATH, SCHP_PROCESSOR_ATR, SCHP_PROCESSOR_LIP) Interactive inference --------------------- @@ -333,28 +347,24 @@ Interactive inference Please select below whether you would like to use the quantized models to launch the interactive demo. +.. code:: ipython3 + + from ov_catvton_helper import get_pipeline_selection_option + + use_quantized_models = get_pipeline_selection_option(optimized_pipe) + + use_quantized_models + .. code:: ipython3 from gradio_helper import make_demo + pipe = optimized_pipe if use_quantized_models.value else pipeline + masker = optimized_automasker if use_quantized_models.value else automasker output_dir = "output" - demo = make_demo(pipeline, mask_processor, automasker, output_dir) + demo = make_demo(pipe, mask_processor, masker, output_dir) try: - demo.launch(debug=False) + demo.launch(debug=True) except Exception: - demo.launch(debug=False, share=True) - - -.. parsed-literal:: - - Running on local URL: http://127.0.0.1:7860 - - To create a public link, set `share=True` in `launch()`. - - - - - - - + demo.launch(debug=True, share=True) diff --git a/docs/notebooks/clip-language-saliency-map-with-output.rst b/docs/notebooks/clip-language-saliency-map-with-output.rst index 3c19a581410863..dd6f608429e1c5 100644 --- a/docs/notebooks/clip-language-saliency-map-with-output.rst +++ b/docs/notebooks/clip-language-saliency-map-with-output.rst @@ -122,7 +122,7 @@ Initial Implementation with Transformers and Pytorch # Install requirements %pip install -q "openvino>=2023.1.0" - %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu transformers "numpy<2" "torch>=2.1" "gradio>=4.19" + %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu transformers "numpy<2" "torch>=2.1" "gradio>=4.19" "matplotlib>=3.4" .. code:: ipython3 diff --git a/docs/notebooks/clip-zero-shot-classification-with-output.rst b/docs/notebooks/clip-zero-shot-classification-with-output.rst index fd572a83ffb834..3da831e6d9d0dd 100644 --- a/docs/notebooks/clip-zero-shot-classification-with-output.rst +++ b/docs/notebooks/clip-zero-shot-classification-with-output.rst @@ -729,6 +729,7 @@ up of the dynamic quantized models. Interactive demo ---------------- + Now, it is your turn! You can provide your own image and comma-separated list of labels for zero-shot classification. diff --git a/docs/notebooks/controlnet-stable-diffusion-with-output.rst b/docs/notebooks/controlnet-stable-diffusion-with-output.rst index 3ab43d897ea658..f3988f276e1ccf 100644 --- a/docs/notebooks/controlnet-stable-diffusion-with-output.rst +++ b/docs/notebooks/controlnet-stable-diffusion-with-output.rst @@ -197,16 +197,31 @@ Prerequisites .. code:: ipython3 - %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu "torch>=2.1" "torchvision" - %pip install -q "diffusers>=0.14.0" "matplotlib>=3.4" "transformers>=4.30.2" "controlnet-aux>=0.0.6" "gradio>=3.36" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q "openvino>=2023.1.0" "datasets>=2.14.6" "nncf>=2.7.0" - import requests + from pathlib import Path + + utility_files = ["notebook_utils.py", "pip_helper.py"] + + for utility in utility_files: + if not Path(utility).exists(): + r = requests.get(f"https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/{utility}") + with open(utility, "w") as f: + f.write(r.text) - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + + from pip_helper import pip_install + + pip_install("torch>=2.1", "torchvision", "--extra-index-url", "https://download.pytorch.org/whl/cpu") + pip_install( + "diffusers>=0.14.0", + "matplotlib>=3.4", + "transformers>=4.30.2", + "controlnet-aux>=0.0.6", + "gradio>=3.36", + "--extra-index-url", + "https://download.pytorch.org/whl/cpu", ) - open("notebook_utils.py", "w").write(r.text) + pip_install("openvino>=2023.1.0", "datasets>=2.14.6", "nncf>=2.7.0", "opencv-python") Instantiating Generation Pipeline --------------------------------- @@ -272,14 +287,18 @@ Now, let us check its result on example image: .. code:: ipython3 - import requests from PIL import Image import matplotlib.pyplot as plt import numpy as np - + from notebook_utils import download_file example_url = "https://user-images.githubusercontent.com/29454499/224540208-c172c92a-9714-4a7b-857a-b1e54b4d4791.jpg" - img = Image.open(requests.get(example_url, stream=True).raw) + + image_path = Path("example_image.jpg") + if not image_path.exists(): + download_file(image_path, filename="example_image.jpg") + + img = Image.open(image_path) pose = pose_estimator(img) @@ -1439,10 +1458,12 @@ Let’s load ``skip magic`` extension to skip quantization if # Fetch `skip_kernel_extension` module import requests - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", - ) - open("skip_kernel_extension.py", "w").write(r.text) + + if not Path("skip_kernel_extension.py").exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", + ) + open("skip_kernel_extension.py", "w").write(r.text) int8_pipe = None diff --git a/docs/notebooks/convert-to-openvino-with-output.rst b/docs/notebooks/convert-to-openvino-with-output.rst index 2baaf0043e7f04..507dd407eae739 100644 --- a/docs/notebooks/convert-to-openvino-with-output.rst +++ b/docs/notebooks/convert-to-openvino-with-output.rst @@ -184,10 +184,10 @@ NLP model from Hugging Face and export it in ONNX format: .. parsed-literal:: - 2024-11-04 22:48:30.842642: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-04 22:48:30.876775: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 00:16:16.864961: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 00:16:16.903350: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-04 22:48:31.539454: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT + 2024-11-22 00:16:17.575066: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT .. parsed-literal:: @@ -660,7 +660,7 @@ frameworks conversion guides. .. parsed-literal:: - 2024-11-04 22:48:47.716205: W tensorflow/core/common_runtime/gpu/gpu_device.cc:1956] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform. + 2024-11-22 00:16:33.997234: W tensorflow/core/common_runtime/gpu/gpu_device.cc:1956] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform. Skipping registering GPU devices... diff --git a/docs/notebooks/convnext-classification-with-output.rst b/docs/notebooks/convnext-classification-with-output.rst index 6e1c039f7013c6..9466f30c22898e 100644 --- a/docs/notebooks/convnext-classification-with-output.rst +++ b/docs/notebooks/convnext-classification-with-output.rst @@ -192,7 +192,7 @@ And print results Predicted Class: 281 Predicted Label: n02123045 tabby, tabby cat - Predicted Probability: 0.4661690592765808 + Predicted Probability: 0.5919997096061707 Convert the model to OpenVINO Intermediate representation format diff --git a/docs/notebooks/ct-segmentation-quantize-nncf-with-output.rst b/docs/notebooks/ct-segmentation-quantize-nncf-with-output.rst index 30778bafc8e884..c3d645f1841a17 100644 --- a/docs/notebooks/ct-segmentation-quantize-nncf-with-output.rst +++ b/docs/notebooks/ct-segmentation-quantize-nncf-with-output.rst @@ -154,10 +154,10 @@ Imports .. parsed-literal:: - 2024-11-04 22:49:10.827255: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-04 22:49:10.861330: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 00:16:56.689204: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 00:16:56.724390: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-04 22:49:11.454332: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT + 2024-11-22 00:16:57.319913: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT .. parsed-literal:: @@ -223,7 +223,7 @@ notebook `__. .. parsed-literal:: - /tmp/ipykernel_503635/1592321960.py:3: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + /tmp/ipykernel_3514722/1592321960.py:3: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. state_dict = torch.load(state_dict_file, map_location=torch.device("cpu")) @@ -444,7 +444,7 @@ this notebook. .. parsed-literal:: [ WARNING ] Please fix your imports. Module %s has been moved to %s. The old module will be deleted in version %s. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/monai/networks/nets/basic_unet.py:168: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/monai/networks/nets/basic_unet.py:168: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if x_e.shape[-i - 1] != x_0.shape[-i - 1]: @@ -526,18 +526,18 @@ Convert quantized model to OpenVINO IR model and save it. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/quantization/layers.py:340: TracerWarning: Converting a tensor to a Python number might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/quantization/layers.py:340: TracerWarning: Converting a tensor to a Python number might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! return self._level_low.item() - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/quantization/layers.py:348: TracerWarning: Converting a tensor to a Python number might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/quantization/layers.py:348: TracerWarning: Converting a tensor to a Python number might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! return self._level_high.item() - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/monai/networks/nets/basic_unet.py:168: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/monai/networks/nets/basic_unet.py:168: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if x_e.shape[-i - 1] != x_0.shape[-i - 1]: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Output nr 1. of the traced function does not match the corresponding output of the Python function. Detailed error: + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Output nr 1. of the traced function does not match the corresponding output of the Python function. Detailed error: Tensor-likes are not close! - Mismatched elements: 245783 / 262144 (93.8%) - Greatest absolute difference: 3.1180567741394043 at index (0, 0, 474, 435) (up to 1e-05 allowed) - Greatest relative difference: 16087.83647354372 at index (0, 0, 37, 224) (up to 1e-05 allowed) + Mismatched elements: 247214 / 262144 (94.3%) + Greatest absolute difference: 4.1846349239349365 at index (0, 0, 379, 430) (up to 1e-05 allowed) + Greatest relative difference: 15984.079041034269 at index (0, 0, 447, 390) (up to 1e-05 allowed) _check_trace( @@ -663,7 +663,7 @@ be run in the notebook with ``! benchmark_app`` or [ WARNING ] Performance hint was not explicitly specified in command line. Device(AUTO) performance hint will be set to PerformanceMode.LATENCY. [Step 4/11] Reading model files [ INFO ] Loading model files - [ INFO ] Read model took 8.85 ms + [ INFO ] Read model took 8.99 ms [ INFO ] Original model I/O parameters: [ INFO ] Model inputs: [ INFO ] x (node: x) : f32 / [...] / [1,1,512,512] @@ -677,7 +677,7 @@ be run in the notebook with ``! benchmark_app`` or [ INFO ] Model outputs: [ INFO ] ***NO_NAME*** (node: __module.final_conv/aten::_convolution/Add) : f32 / [...] / [1,1,512,512] [Step 7/11] Loading the model to the device - [ INFO ] Compile model took 253.47 ms + [ INFO ] Compile model took 240.78 ms [Step 8/11] Querying optimal runtime parameters [ INFO ] Model: [ INFO ] NETWORK_NAME: Model0 @@ -714,17 +714,17 @@ be run in the notebook with ``! benchmark_app`` or [ INFO ] Fill input 'x' with random values [Step 10/11] Measuring performance (Start inference synchronously, limits: 15000 ms duration) [ INFO ] Benchmarking in inference only mode (inputs filling are not included in measurement loop). - [ INFO ] First inference took 56.51 ms + [ INFO ] First inference took 49.70 ms [Step 11/11] Dumping statistics report [ INFO ] Execution Devices:['CPU'] - [ INFO ] Count: 406 iterations - [ INFO ] Duration: 15019.48 ms + [ INFO ] Count: 425 iterations + [ INFO ] Duration: 15023.51 ms [ INFO ] Latency: - [ INFO ] Median: 35.01 ms - [ INFO ] Average: 36.77 ms - [ INFO ] Min: 34.63 ms - [ INFO ] Max: 48.05 ms - [ INFO ] Throughput: 27.03 FPS + [ INFO ] Median: 34.55 ms + [ INFO ] Average: 35.13 ms + [ INFO ] Min: 34.21 ms + [ INFO ] Max: 47.23 ms + [ INFO ] Throughput: 28.29 FPS .. code:: ipython3 @@ -750,7 +750,7 @@ be run in the notebook with ``! benchmark_app`` or [ WARNING ] Performance hint was not explicitly specified in command line. Device(AUTO) performance hint will be set to PerformanceMode.LATENCY. [Step 4/11] Reading model files [ INFO ] Loading model files - [ INFO ] Read model took 10.78 ms + [ INFO ] Read model took 11.10 ms [ INFO ] Original model I/O parameters: [ INFO ] Model inputs: [ INFO ] x (node: x) : f32 / [...] / [1,1,512,512] @@ -764,7 +764,7 @@ be run in the notebook with ``! benchmark_app`` or [ INFO ] Model outputs: [ INFO ] ***NO_NAME*** (node: __module.final_conv/aten::_convolution/Add) : f32 / [...] / [1,1,512,512] [Step 7/11] Loading the model to the device - [ INFO ] Compile model took 250.08 ms + [ INFO ] Compile model took 251.41 ms [Step 8/11] Querying optimal runtime parameters [ INFO ] Model: [ INFO ] NETWORK_NAME: Model49 @@ -801,17 +801,17 @@ be run in the notebook with ``! benchmark_app`` or [ INFO ] Fill input 'x' with random values [Step 10/11] Measuring performance (Start inference synchronously, limits: 15000 ms duration) [ INFO ] Benchmarking in inference only mode (inputs filling are not included in measurement loop). - [ INFO ] First inference took 29.09 ms + [ INFO ] First inference took 29.68 ms [Step 11/11] Dumping statistics report [ INFO ] Execution Devices:['CPU'] - [ INFO ] Count: 938 iterations - [ INFO ] Duration: 15008.12 ms + [ INFO ] Count: 911 iterations + [ INFO ] Duration: 15009.49 ms [ INFO ] Latency: - [ INFO ] Median: 15.77 ms - [ INFO ] Average: 15.80 ms - [ INFO ] Min: 15.47 ms - [ INFO ] Max: 17.13 ms - [ INFO ] Throughput: 62.50 FPS + [ INFO ] Median: 15.73 ms + [ INFO ] Average: 16.27 ms + [ INFO ] Min: 15.41 ms + [ INFO ] Max: 24.40 ms + [ INFO ] Throughput: 60.69 FPS Visually Compare Inference Results @@ -905,7 +905,7 @@ seed is displayed to enable reproducing specific runs of this cell. .. parsed-literal:: - Visualizing results with seed 1730757034 + Visualizing results with seed 1732231099 @@ -989,7 +989,7 @@ performs inference, and displays the results on the frames loaded in .. parsed-literal:: Loaded model to AUTO in 0.15 seconds. - Total time for 68 frames: 2.36 seconds, fps:29.25 + Total time for 68 frames: 2.31 seconds, fps:29.91 References diff --git a/docs/notebooks/ct-segmentation-quantize-nncf-with-output_files/ct-segmentation-quantize-nncf-with-output_37_1.png b/docs/notebooks/ct-segmentation-quantize-nncf-with-output_files/ct-segmentation-quantize-nncf-with-output_37_1.png index 5aa37909b71cf7..a0c854d6dd33f6 100644 --- a/docs/notebooks/ct-segmentation-quantize-nncf-with-output_files/ct-segmentation-quantize-nncf-with-output_37_1.png +++ b/docs/notebooks/ct-segmentation-quantize-nncf-with-output_files/ct-segmentation-quantize-nncf-with-output_37_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:894600de56af211d4cc3e64ee092b5a62d1b0158c51048d17accadddea0f046e -size 382725 +oid sha256:588fb52eb7dcf0ede69419b9645ad6dc93526e8960af83679e12bac98e6817f6 +size 385527 diff --git a/docs/notebooks/ddcolor-image-colorization-with-output.rst b/docs/notebooks/ddcolor-image-colorization-with-output.rst index 409d2495e2fea6..cd3bf024065b55 100644 --- a/docs/notebooks/ddcolor-image-colorization-with-output.rst +++ b/docs/notebooks/ddcolor-image-colorization-with-output.rst @@ -25,8 +25,9 @@ In this tutorial we consider how to convert and run DDColor using OpenVINO. Additionally, we will demonstrate how to optimize this model using `NNCF `__. -🪄 Let’s start to explore magic of image colorization! #### Table of -contents: +🪄 Let’s start to explore magic of image colorization! + +**Table of contents:** - `Prerequisites <#prerequisites>`__ - `Load PyTorch model <#load-pytorch-model>`__ @@ -67,7 +68,7 @@ Prerequisites .. code:: ipython3 import platform - + %pip install -q "nncf>=2.11.0" "torch>=2.1" "torchvision" "timm" "opencv_python" "pillow" "PyYAML" "scipy" "scikit-image" "datasets" "gradio>=4.19" --extra-index-url https://download.pytorch.org/whl/cpu %pip install -Uq "openvino>=2024.3.0" if platform.python_version_tuple()[1] in ["8", "9"]: @@ -85,39 +86,42 @@ Prerequisites .. code:: ipython3 - import sys from pathlib import Path import requests - - repo_dir = Path("DDColor") - - if not repo_dir.exists(): - !git clone https://github.com/piddnad/DDColor.git - - sys.path.append(str(repo_dir)) - + + r = requests.get( url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", ) open("notebook_utils.py", "w").write(r.text) + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", + ) + open("cmd_helper.py", "w").write(r.text) + + + .. parsed-literal:: - Cloning into 'DDColor'... - remote: Enumerating objects: 241, done. - remote: Counting objects: 100% (84/84), done. - remote: Compressing objects: 100% (49/49), done. - remote: Total 241 (delta 57), reused 37 (delta 35), pack-reused 157 (from 1) - Receiving objects: 100% (241/241), 14.10 MiB | 21.95 MiB/s, done. - Resolving deltas: 100% (83/83), done. + 1491 + + + +.. code:: ipython3 + + from cmd_helper import clone_repo + + + clone_repo("https://github.com/piddnad/DDColor.git") .. parsed-literal:: - 24692 + PosixPath('DDColor') @@ -131,7 +135,7 @@ Prerequisites .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/timm/models/layers/__init__.py:48: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/timm/models/layers/__init__.py:48: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning) @@ -149,14 +153,14 @@ models from DDColor family. .. code:: ipython3 import torch - + model_name = "ddcolor_paper_tiny" - + ddcolor_model = DDColorHF.from_pretrained(f"piddnad/{model_name}") - - + + colorizer = ImageColorizationPipelineHF(model=ddcolor_model, input_size=512) - + ddcolor_model.to("cpu") colorizer.device = torch.device("cpu") @@ -169,18 +173,18 @@ Run PyTorch model inference import cv2 import PIL - + IMG_PATH = "DDColor/assets/test_images/Ansel Adams _ Moore Photography.jpeg" - - + + img = cv2.imread(IMG_PATH) - + PIL.Image.fromarray(img[:, :, ::-1]) -.. image:: ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_8_0.png +.. image:: ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_9_0.png @@ -192,7 +196,7 @@ Run PyTorch model inference -.. image:: ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_9_0.png +.. image:: ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_10_0.png @@ -213,9 +217,9 @@ loading on device using ``core.complie_model``. import openvino as ov import torch - + OV_COLORIZER_PATH = Path("ddcolor.xml") - + if not OV_COLORIZER_PATH.exists(): ov_model = ov.convert_model(ddcolor_model, example_input=torch.ones((1, 3, 512, 512)), input=[1, 3, 512, 512]) ov.save_model(ov_model, OV_COLORIZER_PATH) @@ -230,11 +234,11 @@ Select one of supported devices for inference using dropdown list. .. code:: ipython3 from notebook_utils import device_widget - + core = ov.Core() - + device = device_widget() - + device @@ -256,36 +260,36 @@ Select one of supported devices for inference using dropdown list. import numpy as np import torch import torch.nn.functional as F - - + + def process(img, compiled_model): # Preprocess input image height, width = img.shape[:2] - + # Normalize to [0, 1] range img = (img / 255.0).astype(np.float32) orig_l = cv2.cvtColor(img, cv2.COLOR_BGR2Lab)[:, :, :1] # (h, w, 1) - + # Resize rgb image -> lab -> get grey -> rgb img = cv2.resize(img, (512, 512)) img_l = cv2.cvtColor(img, cv2.COLOR_BGR2Lab)[:, :, :1] img_gray_lab = np.concatenate((img_l, np.zeros_like(img_l), np.zeros_like(img_l)), axis=-1) img_gray_rgb = cv2.cvtColor(img_gray_lab, cv2.COLOR_LAB2RGB) - + # Transpose HWC -> CHW and add batch dimension tensor_gray_rgb = torch.from_numpy(img_gray_rgb.transpose((2, 0, 1))).float().unsqueeze(0) - + # Run model inference output_ab = compiled_model(tensor_gray_rgb)[0] - + # Postprocess result # resize ab -> concat original l -> rgb output_ab_resize = F.interpolate(torch.from_numpy(output_ab), size=(height, width))[0].float().numpy().transpose(1, 2, 0) output_lab = np.concatenate((orig_l, output_ab_resize), axis=-1) output_bgr = cv2.cvtColor(output_lab, cv2.COLOR_LAB2BGR) - + output_img = (output_bgr * 255.0).round().astype(np.uint8) - + return output_img .. code:: ipython3 @@ -296,7 +300,7 @@ Select one of supported devices for inference using dropdown list. -.. image:: ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_16_0.png +.. image:: ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_17_0.png @@ -324,7 +328,7 @@ improve model inference speed. .. code:: ipython3 from notebook_utils import quantization_widget - + to_quantize = quantization_widget() to_quantize @@ -340,15 +344,15 @@ improve model inference speed. .. code:: ipython3 import requests - + OV_INT8_COLORIZER_PATH = Path("ddcolor_int8.xml") compiled_int8_model = None - + r = requests.get( url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", ) open("skip_kernel_extension.py", "w").write(r.text) - + %load_ext skip_kernel_extension Collect quantization dataset @@ -363,12 +367,12 @@ dataset from Hugging Face as calibration data. .. code:: ipython3 %%skip not $to_quantize.value - + from datasets import load_dataset - + subset_size = 300 calibration_data = [] - + if not OV_INT8_COLORIZER_PATH.exists(): dataset = load_dataset("ummagumm-a/colorization_dataset", split="train", streaming=True).shuffle(seed=42).take(subset_size) for idx, batch in enumerate(dataset): @@ -380,7 +384,7 @@ dataset from Hugging Face as calibration data. img_l = cv2.cvtColor(np.stack([img, img, img], axis=2), cv2.COLOR_BGR2Lab)[:, :, :1] img_gray_lab = np.concatenate((img_l, np.zeros_like(img_l), np.zeros_like(img_l)), axis=-1) img_gray_rgb = cv2.cvtColor(img_gray_lab, cv2.COLOR_LAB2RGB) - + image = np.expand_dims(img_gray_rgb.transpose((2, 0, 1)).astype(np.float32), axis=0) calibration_data.append(image) @@ -392,9 +396,9 @@ Perform model quantization .. code:: ipython3 %%skip not $to_quantize.value - + import nncf - + if not OV_INT8_COLORIZER_PATH.exists(): ov_model = core.read_model(OV_COLORIZER_PATH) quantized_model = nncf.quantize( @@ -412,10 +416,10 @@ Perform model quantization .. parsed-literal:: - 2024-11-04 22:52:53.152561: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-04 22:52:53.191342: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 00:20:47.511999: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 00:20:47.551328: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-04 22:52:53.595160: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT + 2024-11-22 00:20:47.960841: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT @@ -452,7 +456,7 @@ Run INT8 model inference .. code:: ipython3 from IPython.display import display - + if OV_INT8_COLORIZER_PATH.exists(): compiled_int8_model = core.compile_model(OV_INT8_COLORIZER_PATH, device.value) img = cv2.imread("DDColor/assets/test_images/Ansel Adams _ Moore Photography.jpeg") @@ -461,7 +465,7 @@ Run INT8 model inference -.. image:: ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_25_0.png +.. image:: ddcolor-image-colorization-with-output_files/ddcolor-image-colorization-with-output_26_0.png Compare FP16 and INT8 model size @@ -472,9 +476,9 @@ Compare FP16 and INT8 model size .. code:: ipython3 fp16_ir_model_size = OV_COLORIZER_PATH.with_suffix(".bin").stat().st_size / 2**20 - + print(f"FP16 model size: {fp16_ir_model_size:.2f} MB") - + if OV_INT8_COLORIZER_PATH.exists(): quantized_model_size = OV_INT8_COLORIZER_PATH.with_suffix(".bin").stat().st_size / 2**20 print(f"INT8 model size: {quantized_model_size:.2f} MB") @@ -513,17 +517,17 @@ Tool =2024.2.0" "datasets>=2.14.6" "nncf>=2.11.0" "tqdm" "matplotlib>=3.4" - %pip install -q "typing-extensions>=4.9.0" eval-type-backport "gradio>=4.19" - %pip install -q -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "typing-extensions>=4.9.0" eval-type-backport "gradio>=4.19" gradio_imageslider + %pip install -q torch torchvision "opencv-python" huggingface_hub --extra-index-url https://download.pytorch.org/whl/cpu + if platform.system() == "Darwin": + %pip install -q "numpy<2.0.0" if platform.python_version_tuple()[1] in ["8", "9"]: %pip install -q "gradio-imageslider<=0.0.17" "typing-extensions>=4.9.0" @@ -131,7 +146,7 @@ attention optimizations first. .. code:: ipython3 - attention_file_path = Path("./depth_anything_v2/dinov2_layers/attention.py") + attention_file_path = Path("./Depth-Anything-V2/depth_anything_v2/dinov2_layers/attention.py") orig_attention_path = attention_file_path.parent / ("orig_" + attention_file_path.name) if not orig_attention_path.exists(): @@ -152,19 +167,14 @@ Prepare input data from PIL import Image - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - - open("notebook_utils.py", "w").write(r.text) from notebook_utils import download_file, device_widget, quantization_widget - download_file( - "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/3f779fc1-c1b2-4dec-915a-64dae510a2bb", - "furseal.png", - ) + + if not Path("furseal.png").exists(): + download_file( + "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/3f779fc1-c1b2-4dec-915a-64dae510a2bb", + "furseal.png", + ) Image.open("furseal.png").resize((600, 400)) @@ -177,7 +187,7 @@ Prepare input data -.. image:: depth-anything-v2-with-output_files/depth-anything-v2-with-output_8_1.png +.. image:: depth-anything-v2-with-output_files/depth-anything-v2-with-output_9_1.png @@ -238,7 +248,7 @@ is preprocessed image height, ``W`` is preprocessed image width. xFormers not available xFormers not available - /tmp/ipykernel_506168/1110356474.py:8: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + /tmp/ipykernel_3517294/1110356474.py:8: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. model.load_state_dict(torch.load(model_path, map_location="cpu")) @@ -270,12 +280,12 @@ is preprocessed image height, ``W`` is preprocessed image width. .. parsed-literal:: - + -.. image:: depth-anything-v2-with-output_files/depth-anything-v2-with-output_14_1.png +.. image:: depth-anything-v2-with-output_files/depth-anything-v2-with-output_15_1.png Convert Model to OpenVINO IR format @@ -304,13 +314,13 @@ loading on device using ``core.complie_model``. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/depth_anything_v2/dinov2_layers/patch_embed.py:73: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/depth_anything_v2/dinov2_layers/patch_embed.py:73: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert H % patch_H == 0, f"Input image height {H} is not a multiple of patch height {patch_H}" - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/depth_anything_v2/dinov2_layers/patch_embed.py:74: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/depth_anything_v2/dinov2_layers/patch_embed.py:74: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert W % patch_W == 0, f"Input image width {W} is not a multiple of patch width: {patch_W}" - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/depth_anything_v2/dinov2.py:183: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/depth_anything_v2/dinov2.py:183: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if npatch == N and w == h: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/depth_anything_v2/dpt.py:147: TracerWarning: Converting a tensor to a Python integer might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/depth_anything_v2/dpt.py:147: TracerWarning: Converting a tensor to a Python integer might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! out = F.interpolate(out, (int(patch_h * 14), int(patch_w * 14)), mode="bilinear", align_corners=True) @@ -402,12 +412,12 @@ range. .. parsed-literal:: - + -.. image:: depth-anything-v2-with-output_files/depth-anything-v2-with-output_24_1.png +.. image:: depth-anything-v2-with-output_files/depth-anything-v2-with-output_25_1.png Run inference on video @@ -417,12 +427,14 @@ Run inference on video .. code:: ipython3 - download_file( - "https://storage.openvinotoolkit.org/repositories/openvino_notebooks/data/data/video/Coco%20Walking%20in%20Berkeley.mp4", - "./Coco Walking in Berkeley.mp4", - ) - VIDEO_FILE = "./Coco Walking in Berkeley.mp4" + + if not Path(VIDEO_FILE).exists(): + download_file( + "https://storage.openvinotoolkit.org/repositories/openvino_notebooks/data/data/video/Coco%20Walking%20in%20Berkeley.mp4", + VIDEO_FILE, + ) + # Number of seconds of input video to process. Set `NUM_SECONDS` to 0 to process # the full video. NUM_SECONDS = 4 @@ -624,7 +636,7 @@ Run inference on video .. parsed-literal:: - Processed 60 frames in 13.24 seconds. Total FPS (including video processing): 4.53.Inference FPS: 10.68 + Processed 60 frames in 13.34 seconds. Total FPS (including video processing): 4.50.Inference FPS: 10.65 Video saved to 'output/Coco Walking in Berkeley_depth_anything.mp4'. @@ -651,7 +663,7 @@ Run inference on video .. parsed-literal:: Showing video saved at - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/output/Coco Walking in Berkeley_depth_anything.mp4 + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/output/Coco Walking in Berkeley_depth_anything.mp4 If you cannot see the video in your browser, please click on the following link to download the video @@ -709,10 +721,11 @@ improve model inference speed. .. code:: ipython3 # Fetch `skip_kernel_extension` module - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", - ) - open("skip_kernel_extension.py", "w").write(r.text) + if not Path("skip_kernel_extension.py").exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", + ) + open("skip_kernel_extension.py", "w").write(r.text) OV_DEPTH_ANYTHING_INT8_PATH = Path(f"{model_id}_int8.xml") @@ -784,10 +797,10 @@ quantization code below may take some time. .. parsed-literal:: - 2024-11-04 23:01:18.047102: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-04 23:01:18.080343: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 00:29:02.540402: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 00:29:02.574640: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-04 23:01:18.654050: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT + 2024-11-22 00:29:03.160362: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT @@ -899,7 +912,7 @@ data. -.. image:: depth-anything-v2-with-output_files/depth-anything-v2-with-output_43_0.png +.. image:: depth-anything-v2-with-output_files/depth-anything-v2-with-output_44_0.png .. code:: ipython3 @@ -913,10 +926,10 @@ data. .. parsed-literal:: - Processed 60 frames in 12.60 seconds. Total FPS (including video processing): 4.76.Inference FPS: 13.12 + Processed 60 frames in 12.91 seconds. Total FPS (including video processing): 4.65.Inference FPS: 12.59 Video saved to 'output/Coco Walking in Berkeley_depth_anything_int8.mp4'. Showing video saved at - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything-V2/output/Coco Walking in Berkeley_depth_anything.mp4 + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/output/Coco Walking in Berkeley_depth_anything.mp4 If you cannot see the video in your browser, please click on the following link to download the video @@ -996,9 +1009,9 @@ Tool =2023.3.0" "datasets>=2.14.6" "nncf" "tqdm" %pip install -q "typing-extensions>=4.9.0" eval-type-backport "gradio>=4.19" "matplotlib>=3.4" - %pip install -q -r requirements.txt --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q torch torchvision "opencv-python" huggingface_hub --extra-index-url https://download.pytorch.org/whl/cpu + if platform.system() == "Darwin": + %pip install -q "numpy<2.0.0" if platform.python_version_tuple()[1] in ["8", "9"]: %pip install -q "gradio-imageslider<=0.0.17" "typing-extensions>=4.9.0" .. parsed-literal:: - Cloning into 'Depth-Anything'... - remote: Enumerating objects: 441, done. - remote: Counting objects: 100% (161/161), done. - remote: Compressing objects: 100% (120/120), done. - remote: Total 441 (delta 115), reused 44 (delta 41), pack-reused 280 (from 1) - Receiving objects: 100% (441/441), 237.90 MiB | 24.22 MiB/s, done. - Resolving deltas: 100% (158/158), done. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything Note: you may need to restart the kernel to use updated packages. Note: you may need to restart the kernel to use updated packages. - WARNING: typer 0.12.5 does not provide the extra 'all' Note: you may need to restart the kernel to use updated packages. Note: you may need to restart the kernel to use updated packages. @@ -110,6 +131,9 @@ attention optimizations first. .. code:: ipython3 + from pathlib import Path + + attention_file_path = Path("./torchhub/facebookresearch_dinov2_main/dinov2/layers/attention.py") orig_attention_path = attention_file_path.parent / ("orig_" + attention_file_path.name) @@ -156,15 +180,16 @@ Prepare input data from PIL import Image - import requests r = requests.get( url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", ) - open("notebook_utils.py", "w").write(r.text) + + from notebook_utils import download_file, device_widget, quantization_widget + download_file( "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/3f779fc1-c1b2-4dec-915a-64dae510a2bb", "furseal.png", @@ -181,7 +206,7 @@ Prepare input data -.. image:: depth-anything-with-output_files/depth-anything-with-output_9_1.png +.. image:: depth-anything-with-output_files/depth-anything-with-output_11_1.png @@ -255,7 +280,7 @@ image size and prepare it for visualization. -.. image:: depth-anything-with-output_files/depth-anything-with-output_16_0.png +.. image:: depth-anything-with-output_files/depth-anything-with-output_18_0.png Convert Model to OpenVINO IR format @@ -284,13 +309,13 @@ loading on device using ``core.complie_model``. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/torchhub/facebookresearch_dinov2_main/dinov2/layers/patch_embed.py:73: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/torchhub/facebookresearch_dinov2_main/dinov2/layers/patch_embed.py:73: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert H % patch_H == 0, f"Input image height {H} is not a multiple of patch height {patch_H}" - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/torchhub/facebookresearch_dinov2_main/dinov2/layers/patch_embed.py:74: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/torchhub/facebookresearch_dinov2_main/dinov2/layers/patch_embed.py:74: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert W % patch_W == 0, f"Input image width {W} is not a multiple of patch width: {patch_W}" - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/torchhub/facebookresearch_dinov2_main/vision_transformer.py:183: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/torchhub/facebookresearch_dinov2_main/vision_transformer.py:183: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if npatch == N and w == h: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/depth_anything/dpt.py:133: TracerWarning: Converting a tensor to a Python integer might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/depth_anything/dpt.py:133: TracerWarning: Converting a tensor to a Python integer might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! out = F.interpolate(out, (int(patch_h * 14), int(patch_w * 14)), mode="bilinear", align_corners=True) @@ -356,7 +381,7 @@ Run inference on image -.. image:: depth-anything-with-output_files/depth-anything-with-output_25_0.png +.. image:: depth-anything-with-output_files/depth-anything-with-output_27_0.png Run inference on video @@ -573,7 +598,7 @@ Run inference on video .. parsed-literal:: - Processed 60 frames in 13.24 seconds. Total FPS (including video processing): 4.53.Inference FPS: 10.62 + Processed 60 frames in 13.63 seconds. Total FPS (including video processing): 4.40.Inference FPS: 10.11 Video saved to 'output/Coco Walking in Berkeley_depth_anything.mp4'. @@ -600,7 +625,7 @@ Run inference on video .. parsed-literal:: Showing video saved at - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/output/Coco Walking in Berkeley_depth_anything.mp4 + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/output/Coco Walking in Berkeley_depth_anything.mp4 If you cannot see the video in your browser, please click on the following link to download the video @@ -733,10 +758,10 @@ quantization code below may take some time. .. parsed-literal:: - 2024-11-04 23:10:13.897258: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-04 23:10:13.929954: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 00:38:00.830321: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 00:38:00.863651: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-04 23:10:14.502746: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT + 2024-11-22 00:38:01.436355: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT @@ -848,7 +873,7 @@ data. -.. image:: depth-anything-with-output_files/depth-anything-with-output_44_0.png +.. image:: depth-anything-with-output_files/depth-anything-with-output_46_0.png .. code:: ipython3 @@ -862,10 +887,10 @@ data. .. parsed-literal:: - Processed 60 frames in 12.75 seconds. Total FPS (including video processing): 4.70.Inference FPS: 12.76 + Processed 60 frames in 12.91 seconds. Total FPS (including video processing): 4.65.Inference FPS: 12.73 Video saved to 'output/Coco Walking in Berkeley_depth_anything_int8.mp4'. Showing video saved at - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/output/Coco Walking in Berkeley_depth_anything.mp4 + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/depth-anything/Depth-Anything/output/Coco Walking in Berkeley_depth_anything.mp4 If you cannot see the video in your browser, please click on the following link to download the video @@ -945,9 +970,9 @@ Tool =4.8.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (4.12.2) - Requirement already satisfied: sympy in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (1.13.3) - Requirement already satisfied: networkx in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (3.1) - Requirement already satisfied: jinja2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (3.1.4) - Requirement already satisfied: fsspec in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (2024.9.0) - Requirement already satisfied: numpy in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torchvision) (1.23.5) - Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torchvision) (10.4.0) - Requirement already satisfied: MarkupSafe>=2.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from jinja2->torch) (2.1.5) - Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from sympy->torch) (1.3.0) + Requirement already satisfied: torch in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (2.4.1+cpu) + Requirement already satisfied: torchvision in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (0.19.1+cpu) + Requirement already satisfied: opencv-python in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (4.10.0.84) + Requirement already satisfied: wheel in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (0.45.0) + Requirement already satisfied: filelock in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (3.16.1) + Requirement already satisfied: typing-extensions>=4.8.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (4.12.2) + Requirement already satisfied: sympy in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (1.13.3) + Requirement already satisfied: networkx in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (3.1) + Requirement already satisfied: jinja2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (3.1.4) + Requirement already satisfied: fsspec in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch) (2024.9.0) + Requirement already satisfied: numpy in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torchvision) (1.23.5) + Requirement already satisfied: pillow!=8.3.*,>=5.3.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torchvision) (10.4.0) + Requirement already satisfied: MarkupSafe>=2.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from jinja2->torch) (2.1.5) + Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from sympy->torch) (1.3.0) Looking in indexes: https://pypi.org/simple, https://download.pytorch.org/whl/cpu Collecting git+https://github.com/facebookresearch/detectron2.git - Cloning https://github.com/facebookresearch/detectron2.git to /tmp/pip-req-build-9ds1xx43 + Cloning https://github.com/facebookresearch/detectron2.git to /tmp/pip-req-build-4klmx21d .. parsed-literal:: - Running command git clone --filter=blob:none --quiet https://github.com/facebookresearch/detectron2.git /tmp/pip-req-build-9ds1xx43 + Running command git clone --filter=blob:none --quiet https://github.com/facebookresearch/detectron2.git /tmp/pip-req-build-4klmx21d .. parsed-literal:: - Resolved https://github.com/facebookresearch/detectron2.git to commit 8d85329aed8506ea3672e3e208971345973ea761 + Resolved https://github.com/facebookresearch/detectron2.git to commit c69939aa85460e8135f40bce908a6cddaa73065f Preparing metadata (setup.py): started Preparing metadata (setup.py): finished with status 'done' - Requirement already satisfied: Pillow>=7.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (10.4.0) - Requirement already satisfied: black in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (24.3.0) - Requirement already satisfied: cloudpickle in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (3.1.0) - Requirement already satisfied: fvcore<0.1.6,>=0.1.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (0.1.5.post20221221) + Requirement already satisfied: Pillow>=7.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (10.4.0) + Requirement already satisfied: black in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (24.3.0) + Requirement already satisfied: cloudpickle in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (3.1.0) + Requirement already satisfied: fvcore<0.1.6,>=0.1.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (0.1.5.post20221221) Collecting hydra-core>=1.1 (from detectron2==0.6) Using cached hydra_core-1.3.2-py3-none-any.whl.metadata (5.5 kB) Collecting iopath<0.1.10,>=0.1.7 (from detectron2==0.6) Using cached https://download.pytorch.org/whl/iopath-0.1.9-py3-none-any.whl (27 kB) - Requirement already satisfied: matplotlib in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (3.7.5) + Requirement already satisfied: matplotlib in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (3.7.5) Collecting omegaconf<2.4,>=2.1 (from detectron2==0.6) Using cached omegaconf-2.3.0-py3-none-any.whl.metadata (3.9 kB) - Requirement already satisfied: packaging in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (24.1) - Requirement already satisfied: pycocotools>=2.0.2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (2.0.7) - Requirement already satisfied: tabulate in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (0.9.0) - Requirement already satisfied: tensorboard in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (2.12.3) - Requirement already satisfied: termcolor>=1.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (2.4.0) - Requirement already satisfied: tqdm>4.29.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (4.66.6) - Requirement already satisfied: yacs>=0.1.8 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (0.1.8) - Requirement already satisfied: numpy in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from fvcore<0.1.6,>=0.1.5->detectron2==0.6) (1.23.5) - Requirement already satisfied: pyyaml>=5.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from fvcore<0.1.6,>=0.1.5->detectron2==0.6) (6.0.2) - Requirement already satisfied: antlr4-python3-runtime==4.9.* in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from hydra-core>=1.1->detectron2==0.6) (4.9.3) - Requirement already satisfied: importlib-resources in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from hydra-core>=1.1->detectron2==0.6) (6.4.5) - Requirement already satisfied: portalocker in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from iopath<0.1.10,>=0.1.7->detectron2==0.6) (2.10.1) - Requirement already satisfied: contourpy>=1.0.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (1.1.1) - Requirement already satisfied: cycler>=0.10 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (0.12.1) - Requirement already satisfied: fonttools>=4.22.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (4.54.1) - Requirement already satisfied: kiwisolver>=1.0.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (1.4.7) - Requirement already satisfied: pyparsing>=2.3.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (3.1.4) - Requirement already satisfied: python-dateutil>=2.7 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (2.9.0.post0) - Requirement already satisfied: click>=8.0.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (8.1.7) - Requirement already satisfied: mypy-extensions>=0.4.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (1.0.0) - Requirement already satisfied: pathspec>=0.9.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (0.12.1) - Requirement already satisfied: platformdirs>=2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (4.3.6) - Requirement already satisfied: tomli>=1.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (2.0.2) - Requirement already satisfied: typing-extensions>=4.0.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (4.12.2) - Requirement already satisfied: absl-py>=0.4 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (1.4.0) - Requirement already satisfied: grpcio>=1.48.2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (1.67.1) - Requirement already satisfied: google-auth<3,>=1.6.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (2.35.0) - Requirement already satisfied: google-auth-oauthlib<1.1,>=0.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (1.0.0) - Requirement already satisfied: markdown>=2.6.8 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (3.7) - Requirement already satisfied: protobuf>=3.19.6 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (3.20.3) - Requirement already satisfied: requests<3,>=2.21.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (2.32.3) - Requirement already satisfied: setuptools>=41.0.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (44.0.0) - Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (0.7.2) - Requirement already satisfied: werkzeug>=1.0.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (3.0.6) - Requirement already satisfied: wheel>=0.26 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (0.44.0) - Requirement already satisfied: cachetools<6.0,>=2.0.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from google-auth<3,>=1.6.3->tensorboard->detectron2==0.6) (5.5.0) - Requirement already satisfied: pyasn1-modules>=0.2.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from google-auth<3,>=1.6.3->tensorboard->detectron2==0.6) (0.4.1) - Requirement already satisfied: rsa<5,>=3.1.4 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from google-auth<3,>=1.6.3->tensorboard->detectron2==0.6) (4.9) - Requirement already satisfied: requests-oauthlib>=0.7.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from google-auth-oauthlib<1.1,>=0.5->tensorboard->detectron2==0.6) (2.0.0) - Requirement already satisfied: zipp>=3.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from importlib-resources->hydra-core>=1.1->detectron2==0.6) (3.20.2) - Requirement already satisfied: importlib-metadata>=4.4 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from markdown>=2.6.8->tensorboard->detectron2==0.6) (8.5.0) - Requirement already satisfied: six>=1.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from python-dateutil>=2.7->matplotlib->detectron2==0.6) (1.16.0) - Requirement already satisfied: charset-normalizer<4,>=2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests<3,>=2.21.0->tensorboard->detectron2==0.6) (3.4.0) - Requirement already satisfied: idna<4,>=2.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests<3,>=2.21.0->tensorboard->detectron2==0.6) (3.10) - Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests<3,>=2.21.0->tensorboard->detectron2==0.6) (2.2.3) - Requirement already satisfied: certifi>=2017.4.17 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests<3,>=2.21.0->tensorboard->detectron2==0.6) (2024.8.30) - Requirement already satisfied: MarkupSafe>=2.1.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from werkzeug>=1.0.1->tensorboard->detectron2==0.6) (2.1.5) - Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard->detectron2==0.6) (0.6.1) - Requirement already satisfied: oauthlib>=3.0.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<1.1,>=0.5->tensorboard->detectron2==0.6) (3.2.2) + Requirement already satisfied: packaging in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (24.2) + Requirement already satisfied: pycocotools>=2.0.2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (2.0.7) + Requirement already satisfied: tabulate in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (0.9.0) + Requirement already satisfied: tensorboard in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (2.12.3) + Requirement already satisfied: termcolor>=1.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (2.4.0) + Requirement already satisfied: tqdm>4.29.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (4.67.0) + Requirement already satisfied: yacs>=0.1.8 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from detectron2==0.6) (0.1.8) + Requirement already satisfied: numpy in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from fvcore<0.1.6,>=0.1.5->detectron2==0.6) (1.23.5) + Requirement already satisfied: pyyaml>=5.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from fvcore<0.1.6,>=0.1.5->detectron2==0.6) (6.0.2) + Requirement already satisfied: antlr4-python3-runtime==4.9.* in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from hydra-core>=1.1->detectron2==0.6) (4.9.3) + Requirement already satisfied: importlib-resources in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from hydra-core>=1.1->detectron2==0.6) (6.4.5) + Requirement already satisfied: portalocker in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from iopath<0.1.10,>=0.1.7->detectron2==0.6) (3.0.0) + Requirement already satisfied: contourpy>=1.0.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (1.1.1) + Requirement already satisfied: cycler>=0.10 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (0.12.1) + Requirement already satisfied: fonttools>=4.22.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (4.55.0) + Requirement already satisfied: kiwisolver>=1.0.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (1.4.7) + Requirement already satisfied: pyparsing>=2.3.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (3.1.4) + Requirement already satisfied: python-dateutil>=2.7 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from matplotlib->detectron2==0.6) (2.9.0.post0) + Requirement already satisfied: click>=8.0.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (8.1.7) + Requirement already satisfied: mypy-extensions>=0.4.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (1.0.0) + Requirement already satisfied: pathspec>=0.9.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (0.12.1) + Requirement already satisfied: platformdirs>=2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (4.3.6) + Requirement already satisfied: tomli>=1.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (2.1.0) + Requirement already satisfied: typing-extensions>=4.0.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from black->detectron2==0.6) (4.12.2) + Requirement already satisfied: absl-py>=0.4 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (1.4.0) + Requirement already satisfied: grpcio>=1.48.2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (1.68.0) + Requirement already satisfied: google-auth<3,>=1.6.3 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (2.36.0) + Requirement already satisfied: google-auth-oauthlib<1.1,>=0.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (1.0.0) + Requirement already satisfied: markdown>=2.6.8 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (3.7) + Requirement already satisfied: protobuf>=3.19.6 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (3.20.3) + Requirement already satisfied: requests<3,>=2.21.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (2.32.3) + Requirement already satisfied: setuptools>=41.0.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (44.0.0) + Requirement already satisfied: tensorboard-data-server<0.8.0,>=0.7.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (0.7.2) + Requirement already satisfied: werkzeug>=1.0.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (3.0.6) + Requirement already satisfied: wheel>=0.26 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from tensorboard->detectron2==0.6) (0.45.0) + Requirement already satisfied: cachetools<6.0,>=2.0.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from google-auth<3,>=1.6.3->tensorboard->detectron2==0.6) (5.5.0) + Requirement already satisfied: pyasn1-modules>=0.2.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from google-auth<3,>=1.6.3->tensorboard->detectron2==0.6) (0.4.1) + Requirement already satisfied: rsa<5,>=3.1.4 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from google-auth<3,>=1.6.3->tensorboard->detectron2==0.6) (4.9) + Requirement already satisfied: requests-oauthlib>=0.7.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from google-auth-oauthlib<1.1,>=0.5->tensorboard->detectron2==0.6) (2.0.0) + Requirement already satisfied: zipp>=3.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from importlib-resources->hydra-core>=1.1->detectron2==0.6) (3.20.2) + Requirement already satisfied: importlib-metadata>=4.4 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from markdown>=2.6.8->tensorboard->detectron2==0.6) (8.5.0) + Requirement already satisfied: six>=1.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from python-dateutil>=2.7->matplotlib->detectron2==0.6) (1.16.0) + Requirement already satisfied: charset-normalizer<4,>=2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests<3,>=2.21.0->tensorboard->detectron2==0.6) (3.4.0) + Requirement already satisfied: idna<4,>=2.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests<3,>=2.21.0->tensorboard->detectron2==0.6) (3.10) + Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests<3,>=2.21.0->tensorboard->detectron2==0.6) (2.2.3) + Requirement already satisfied: certifi>=2017.4.17 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests<3,>=2.21.0->tensorboard->detectron2==0.6) (2024.8.30) + Requirement already satisfied: MarkupSafe>=2.1.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from werkzeug>=1.0.1->tensorboard->detectron2==0.6) (2.1.5) + Requirement already satisfied: pyasn1<0.7.0,>=0.4.6 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from pyasn1-modules>=0.2.1->google-auth<3,>=1.6.3->tensorboard->detectron2==0.6) (0.6.1) + Requirement already satisfied: oauthlib>=3.0.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests-oauthlib>=0.7.0->google-auth-oauthlib<1.1,>=0.5->tensorboard->detectron2==0.6) (3.2.2) Using cached hydra_core-1.3.2-py3-none-any.whl (154 kB) Using cached omegaconf-2.3.0-py3-none-any.whl (79 kB) Building wheels for collected packages: detectron2 Building wheel for detectron2 (setup.py): started Building wheel for detectron2 (setup.py): finished with status 'done' - Created wheel for detectron2: filename=detectron2-0.6-cp38-cp38-linux_x86_64.whl size=8313237 sha256=7cd84a15a89de76a7ab5b648f2fb7ebff63b7e43ffc90c7f19a568d16858de8a - Stored in directory: /tmp/pip-ephem-wheel-cache-uvptv5zg/wheels/19/ac/65/e48e5e4ec2702274d927c5a6efb75709b24014371d3bb778f2 + Created wheel for detectron2: filename=detectron2-0.6-cp38-cp38-linux_x86_64.whl size=8313367 sha256=4eb79589c47d522c993509a8f16dfbf494af0f494c6a73577d9d3668c1ee4a05 + Stored in directory: /tmp/pip-ephem-wheel-cache-mkdcktsx/wheels/19/ac/65/e48e5e4ec2702274d927c5a6efb75709b24014371d3bb778f2 Successfully built detectron2 Installing collected packages: omegaconf, iopath, hydra-core, detectron2 Attempting uninstall: omegaconf @@ -203,10 +203,10 @@ Install required packages for running model Uninstalling iopath-0.1.10: Successfully uninstalled iopath-0.1.10 Successfully installed detectron2-0.6 hydra-core-1.3.2 iopath-0.1.9 omegaconf-2.3.0 - Requirement already satisfied: openvino>=2023.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (2024.4.0) - Requirement already satisfied: numpy<2.1.0,>=1.16.6 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (1.23.5) - Requirement already satisfied: openvino-telemetry>=2023.2.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (2024.1.0) - Requirement already satisfied: packaging in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (24.1) + Requirement already satisfied: openvino>=2023.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (2024.4.0) + Requirement already satisfied: numpy<2.1.0,>=1.16.6 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (1.23.5) + Requirement already satisfied: openvino-telemetry>=2023.2.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (2024.5.0) + Requirement already satisfied: packaging in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (24.2) Define helpers for PyTorch model initialization and conversion diff --git a/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_22_0.jpg b/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_22_0.jpg index f5b1d98eea3213..2c18ecdc61719a 100644 --- a/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_22_0.jpg +++ b/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_22_0.jpg @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0df4e94924f81aab66086702d85a461f463078f0d06f67b1fe5d46ad8480aa91 -size 58652 +oid sha256:edc1fd6c9bb94b1ff9dde163988de0d5635f35a9cb918138eb058de89fe36b6c +size 58029 diff --git a/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_22_0.png b/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_22_0.png index f676b44edd1d9a..0890e13959d7b2 100644 --- a/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_22_0.png +++ b/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_22_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:b5a857cd060d740290ccc65aec47252aad9f41c665dc2808195c3185248977e8 -size 509376 +oid sha256:b54cfa3647ce484120c2dac840789885273b1a61d0fdf6fd1fdb93e78753c114 +size 509016 diff --git a/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_32_0.jpg b/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_32_0.jpg index 67719cdcbd66b0..d2b1ec1ee92784 100644 --- a/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_32_0.jpg +++ b/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_32_0.jpg @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:ddc40900fddf1a115903c4e200899306060114348bf2ca82fbb4d7d92a885b09 -size 53897 +oid sha256:0ffdd1e786238678562e14aa201c2a602b1733bb7db8b1c175f7d86b3c011fa2 +size 54276 diff --git a/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_32_0.png b/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_32_0.png index af63ef41697b47..d970f117246904 100644 --- a/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_32_0.png +++ b/docs/notebooks/detectron2-to-openvino-with-output_files/detectron2-to-openvino-with-output_32_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d1276209027e5aac72e4bb6f39f4494d2a807ee4bd85054a1285b0832e4515b9 -size 460797 +oid sha256:6b8a9ccae3ca190acfaa9ddaa9be7641e02edae972b15c49f21cf9a8de9ae454 +size 456077 diff --git a/docs/notebooks/distilbert-sequence-classification-with-output.rst b/docs/notebooks/distilbert-sequence-classification-with-output.rst deleted file mode 100644 index 862079f68aeeb7..00000000000000 --- a/docs/notebooks/distilbert-sequence-classification-with-output.rst +++ /dev/null @@ -1,338 +0,0 @@ -Sentiment Analysis with OpenVINO™ -================================= - -**Sentiment analysis** is the use of natural language processing, text -analysis, computational linguistics, and biometrics to systematically -identify, extract, quantify, and study affective states and subjective -information. This notebook demonstrates how to convert and run a -sequence classification model using OpenVINO. - - -**Table of contents:** - - -- `Imports <#imports>`__ -- `Initializing the Model <#initializing-the-model>`__ -- `Initializing the Tokenizer <#initializing-the-tokenizer>`__ -- `Convert Model to OpenVINO Intermediate Representation - format <#convert-model-to-openvino-intermediate-representation-format>`__ - - - `Select inference device <#select-inference-device>`__ - -- `Inference <#inference>`__ - - - `For a single input sentence <#for-a-single-input-sentence>`__ - - `Read from a text file <#read-from-a-text-file>`__ - -Installation Instructions -~~~~~~~~~~~~~~~~~~~~~~~~~ - -This is a self-contained example that relies solely on its own code. - -We recommend running the notebook in a virtual environment. You only -need a Jupyter server to start. For details, please refer to -`Installation -Guide `__. - -Imports -------- - - - -.. code:: ipython3 - - %pip install "openvino>=2023.1.0" transformers "torch>=2.1" tqdm --extra-index-url https://download.pytorch.org/whl/cpu - - -.. parsed-literal:: - - Looking in indexes: https://pypi.org/simple, https://download.pytorch.org/whl/cpu - Requirement already satisfied: openvino>=2023.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (2024.4.0) - Requirement already satisfied: transformers in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (4.46.1) - Requirement already satisfied: torch>=2.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (2.4.1+cpu) - Requirement already satisfied: tqdm in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (4.66.6) - Requirement already satisfied: numpy<2.1.0,>=1.16.6 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (1.23.5) - Requirement already satisfied: openvino-telemetry>=2023.2.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (2024.1.0) - Requirement already satisfied: packaging in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from openvino>=2023.1.0) (24.1) - Requirement already satisfied: filelock in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from transformers) (3.16.1) - Requirement already satisfied: huggingface-hub<1.0,>=0.23.2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from transformers) (0.26.2) - Requirement already satisfied: pyyaml>=5.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from transformers) (6.0.2) - Requirement already satisfied: regex!=2019.12.17 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from transformers) (2024.9.11) - Requirement already satisfied: requests in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from transformers) (2.32.3) - Requirement already satisfied: safetensors>=0.4.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from transformers) (0.4.5) - Requirement already satisfied: tokenizers<0.21,>=0.20 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from transformers) (0.20.2) - Requirement already satisfied: typing-extensions>=4.8.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch>=2.1) (4.12.2) - Requirement already satisfied: sympy in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch>=2.1) (1.13.3) - Requirement already satisfied: networkx in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch>=2.1) (3.1) - Requirement already satisfied: jinja2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch>=2.1) (3.1.4) - Requirement already satisfied: fsspec in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from torch>=2.1) (2024.9.0) - Requirement already satisfied: MarkupSafe>=2.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from jinja2->torch>=2.1) (2.1.5) - Requirement already satisfied: charset-normalizer<4,>=2 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests->transformers) (3.4.0) - Requirement already satisfied: idna<4,>=2.5 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests->transformers) (3.10) - Requirement already satisfied: urllib3<3,>=1.21.1 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests->transformers) (2.2.3) - Requirement already satisfied: certifi>=2017.4.17 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from requests->transformers) (2024.8.30) - Requirement already satisfied: mpmath<1.4,>=1.1.0 in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (from sympy->torch>=2.1) (1.3.0) - Note: you may need to restart the kernel to use updated packages. - - -.. code:: ipython3 - - import warnings - from pathlib import Path - import time - from transformers import AutoModelForSequenceClassification, AutoTokenizer - import numpy as np - import openvino as ov - -.. code:: ipython3 - - # Fetch `notebook_utils` module - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - - open("notebook_utils.py", "w").write(r.text) - from notebook_utils import download_file, device_widget - -Initializing the Model ----------------------- - - - -We will use the transformer-based `DistilBERT base uncased finetuned -SST-2 `__ -model from Hugging Face. - -.. code:: ipython3 - - checkpoint = "distilbert-base-uncased-finetuned-sst-2-english" - model = AutoModelForSequenceClassification.from_pretrained(pretrained_model_name_or_path=checkpoint) - - -.. parsed-literal:: - - 2024-11-04 23:18:47.102633: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-04 23:18:47.135966: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. - To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-04 23:18:47.793551: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT - - -Initializing the Tokenizer --------------------------- - - - -Text Preprocessing cleans the text-based input data so it can be fed -into the model. -`Tokenization `__ -splits paragraphs and sentences into smaller units that can be more -easily assigned meaning. It involves cleaning the data and assigning -tokens or IDs to the words, so they are represented in a vector space -where similar words have similar vectors. This helps the model -understand the context of a sentence. Here, we will use -`AutoTokenizer `__ -- a pre-trained tokenizer from Hugging Face: - -.. code:: ipython3 - - tokenizer = AutoTokenizer.from_pretrained(pretrained_model_name_or_path=checkpoint) - -Convert Model to OpenVINO Intermediate Representation format ------------------------------------------------------------- - - - -`Model conversion -API `__ -facilitates the transition between training and deployment environments, -performs static model analysis, and adjusts deep learning models for -optimal execution on end-point target devices. - -.. code:: ipython3 - - import torch - - ir_xml_name = checkpoint + ".xml" - MODEL_DIR = "model/" - ir_xml_path = Path(MODEL_DIR) / ir_xml_name - - MAX_SEQ_LENGTH = 128 - input_info = [ - (ov.PartialShape([1, -1]), ov.Type.i64), - (ov.PartialShape([1, -1]), ov.Type.i64), - ] - default_input = torch.ones(1, MAX_SEQ_LENGTH, dtype=torch.int64) - inputs = { - "input_ids": default_input, - "attention_mask": default_input, - } - - ov_model = ov.convert_model(model, input=input_info, example_input=inputs) - ov.save_model(ov_model, ir_xml_path) - - -.. parsed-literal:: - - WARNING:tensorflow:Please fix your imports. Module tensorflow.python.training.tracking.base has been moved to tensorflow.python.trackable.base. The old module will be deleted in version 2.11. - - -.. parsed-literal:: - - [ WARNING ] Please fix your imports. Module %s has been moved to %s. The old module will be deleted in version %s. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead - warnings.warn( - `loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`. - - -OpenVINO™ Runtime uses the `Infer -Request `__ -mechanism which enables running models on different devices in -asynchronous or synchronous manners. The model graph is sent as an -argument to the OpenVINO API and an inference request is created. The -default inference mode is AUTO but it can be changed according to -requirements and hardware available. You can explore the different -inference modes and their usage `in -documentation. `__ - -.. code:: ipython3 - - core = ov.Core() - -Select inference device -~~~~~~~~~~~~~~~~~~~~~~~ - - - -select device from dropdown list for running inference using OpenVINO - -.. code:: ipython3 - - device = device_widget() - - device - - - - -.. parsed-literal:: - - Dropdown(description='Device:', index=1, options=('CPU', 'AUTO'), value='AUTO') - - - -.. code:: ipython3 - - warnings.filterwarnings("ignore") - compiled_model = core.compile_model(ov_model, device.value) - infer_request = compiled_model.create_infer_request() - -.. code:: ipython3 - - def softmax(x): - """ - Defining a softmax function to extract - the prediction from the output of the IR format - Parameters: Logits array - Returns: Probabilities - """ - - e_x = np.exp(x - np.max(x)) - return e_x / e_x.sum() - -Inference ---------- - - - -.. code:: ipython3 - - def infer(input_text): - """ - Creating a generic inference function - to read the input and infer the result - into 2 classes: Positive or Negative. - Parameters: Text to be processed - Returns: Label: Positive or Negative. - """ - - input_text = tokenizer( - input_text, - truncation=True, - return_tensors="np", - ) - inputs = dict(input_text) - label = {0: "NEGATIVE", 1: "POSITIVE"} - result = infer_request.infer(inputs=inputs) - for i in result.values(): - probability = np.argmax(softmax(i)) - return label[probability] - -For a single input sentence -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - input_text = "I had a wonderful day" - start_time = time.perf_counter() - result = infer(input_text) - end_time = time.perf_counter() - total_time = end_time - start_time - print("Label: ", result) - print("Total Time: ", "%.2f" % total_time, " seconds") - - -.. parsed-literal:: - - Label: POSITIVE - Total Time: 0.03 seconds - - -Read from a text file -~~~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - # Download the text from the openvino_notebooks storage - vocab_file_path = download_file( - "https://storage.openvinotoolkit.org/repositories/openvino_notebooks/data/data/text/food_reviews.txt", - directory="data", - ) - - - -.. parsed-literal:: - - data/food_reviews.txt: 0%| | 0.00/71.0 [00:00`__) and -techniques such as `InstructGPT `__ -has been the core foundation of breakthroughs such as ChatGPT and GPT-4. -However, these powerful models remain hidden behind APIs and we know -very little about their underlying architecture. Instruction-following -models are capable of generating text in response to prompts and are -often used for tasks like writing assistance, chatbots, and content -generation. Many users now interact with these models regularly and even -use them for work but the majority of such models remain closed-source -and require massive amounts of computational resources to experiment -with. - -`Dolly -2.0 `__ -is the first open-source, instruction-following LLM fine-tuned by -Databricks on a transparent and freely available dataset that is also -open-sourced to use for commercial purposes. That means Dolly 2.0 is -available for commercial applications without the need to pay for API -access or share data with third parties. Dolly 2.0 exhibits similar -characteristics so ChatGPT despite being much smaller. - -In this tutorial, we consider how to run an instruction-following text -generation pipeline using Dolly 2.0 and OpenVINO. We will use a -pre-trained model from the `Hugging Face -Transformers `__ -library. To simplify the user experience, the `Hugging Face Optimum -Intel `__ library is -used to convert the models to OpenVINO™ IR format. - -The tutorial consists of the following steps: - -- Install prerequisites -- Download and convert the model from a public source using the - `OpenVINO integration with Hugging Face - Optimum `__. -- Compress model weights to INT8 with `OpenVINO - NNCF `__ -- Create an instruction-following inference pipeline -- Run instruction-following pipeline - -About Dolly 2.0 ---------------- - -Dolly 2.0 is an instruction-following large language model trained on -the Databricks machine-learning platform that is licensed for commercial -use. It is based on `Pythia `__ -and is trained on ~15k instruction/response fine-tuning records -generated by Databricks employees in various capability domains, -including brainstorming, classification, closed QA, generation, -information extraction, open QA, and summarization. Dolly 2.0 works by -processing natural language instructions and generating responses that -follow the given instructions. It can be used for a wide range of -applications, including closed question-answering, summarization, and -generation. - -The model training process was inspired by -`InstructGPT `__. To train InstructGPT -models, the core technique is reinforcement learning from human feedback -(RLHF), This technique uses human preferences as a reward signal to -fine-tune models, which is important as the safety and alignment -problems required to be solved are complex and subjective, and aren’t -fully captured by simple automatic metrics. More details about the -InstructGPT approach can be found in OpenAI `blog -post `__ The -breakthrough discovered with InstructGPT is that language models don’t -need larger and larger training sets. By using human-evaluated -question-and-answer training, authors were able to train a better -language model using one hundred times fewer parameters than the -previous model. Databricks used a similar approach to create a prompt -and response dataset called they call -`databricks-dolly-15k `__, -a corpus of more than 15,000 records generated by thousands of -Databricks employees to enable large language models to exhibit the -magical interactivity of InstructGPT. More details about the model and -dataset can be found in `Databricks blog -post `__ -and `repo `__ - - -**Table of contents:** - - -- `Prerequisites <#prerequisites>`__ -- `Convert model using Optimum-CLI - tool <#convert-model-using-optimum-cli-tool>`__ -- `Compress model weights <#compress-model-weights>`__ - - - `Weights Compression using - Optimum-CLI <#weights-compression-using-optimum-cli>`__ - -- `Select model variant and inference - device <#select-model-variant-and-inference-device>`__ -- `Instantiate Model using Optimum - Intel <#instantiate-model-using-optimum-intel>`__ -- `Create an instruction-following inference - pipeline <#create-an-instruction-following-inference-pipeline>`__ - - - `Setup imports <#setup-imports>`__ - - `Prepare template for user - prompt <#prepare-template-for-user-prompt>`__ - - `Helpers for output parsing <#helpers-for-output-parsing>`__ - - `Main generation function <#main-generation-function>`__ - - `Helpers for application <#helpers-for-application>`__ - -- `Run instruction-following - pipeline <#run-instruction-following-pipeline>`__ - -Installation Instructions -~~~~~~~~~~~~~~~~~~~~~~~~~ - -This is a self-contained example that relies solely on its own code. - -We recommend running the notebook in a virtual environment. You only -need a Jupyter server to start. For details, please refer to -`Installation -Guide `__. - -Prerequisites -------------- - - - -First, we should install the `Hugging Face -Optimum `__ library -accelerated by OpenVINO integration. The Hugging Face Optimum Intel API -is a high-level API that enables us to convert and quantize models from -the Hugging Face Transformers library to the OpenVINO™ IR format. For -more details, refer to the `Hugging Face Optimum Intel -documentation `__. - -.. code:: ipython3 - - import os - from pathlib import Path - import requests - - os.environ["GIT_CLONE_PROTECTION_ACTIVE"] = "false" - - %pip uninstall -q -y optimum optimum-intel - %pip install --pre -Uq "openvino>=2024.2.0" openvino-tokenizers[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly - %pip install -q "diffusers>=0.16.1" "transformers>=4.33.0" "torch>=2.1" "nncf>=2.10.0" "onnx<1.16.2" "gradio>=4.19" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q "git+https://github.com/huggingface/optimum-intel.git" - - - utility_files = ["notebook_utils.py", "cmd_helper.py"] - - for utility in utility_files: - local_path = Path(utility) - if not local_path.exists(): - r = requests.get( - url=f"https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/{local_path.name}", - ) - with local_path.open("w") as f: - f.write(r.text) - -Convert model using Optimum-CLI tool ------------------------------------- - - - -`Optimum Intel `__ is -the interface between the -`Transformers `__ and -`Diffusers `__ libraries -and OpenVINO to accelerate end-to-end pipelines on Intel architectures. -It provides ease-to-use cli interface for exporting models to `OpenVINO -Intermediate Representation -(IR) `__ -format. - -The command bellow demonstrates basic command for model export with -``optimum-cli`` - -.. code:: bash - - optimum-cli export openvino --model --task - -where ``--model`` argument is model id from HuggingFace Hub or local -directory with model (saved using ``.save_pretrained`` method), -``--task`` is one of `supported -task `__ -that exported model should solve. For LLMs it will be -``text-generation-with-past``. If model initialization requires to use -remote code, ``--trust-remote-code`` flag additionally should be passed. - -Compress model weights ----------------------- - - - -The `Weights -Compression `__ -algorithm is aimed at compressing the weights of the models and can be -used to optimize the model footprint and performance of large models -where the size of weights is relatively larger than the size of -activations, for example, Large Language Models (LLM). Compared to INT8 -compression, INT4 compression improves performance even more, but -introduces a minor drop in prediction quality. - -Weights Compression using Optimum-CLI -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -You can also apply fp16, 8-bit or 4-bit weight compression on the -Linear, Convolutional and Embedding layers when exporting your model -with the CLI by setting ``--weight-format`` to respectively fp16, int8 -or int4. This type of optimization allows to reduce the memory footprint -and inference latency. By default the quantization scheme for int8/int4 -will be -`asymmetric `__, -to make it -`symmetric `__ -you can add ``--sym``. - -For INT4 quantization you can also specify the following arguments : - -- The ``--group-size`` parameter will define the group size to use for - quantization, -1 it will results in per-column quantization. -- The ``--ratio`` parameter controls the ratio between 4-bit and 8-bit - quantization. If set to 0.9, it means that 90% of the layers will be - quantized to int4 while 10% will be quantized to int8. - -Smaller group_size and ratio values usually improve accuracy at the -sacrifice of the model size and inference latency. - - **Note**: There may be no speedup for INT4/INT8 compressed models on - dGPU. - -.. code:: ipython3 - - from IPython.display import display - import ipywidgets as widgets - - prepare_int4_model = widgets.Checkbox( - value=True, - description="Prepare INT4 model", - disabled=False, - ) - prepare_int8_model = widgets.Checkbox( - value=False, - description="Prepare INT8 model", - disabled=False, - ) - prepare_fp16_model = widgets.Checkbox( - value=False, - description="Prepare FP16 model", - disabled=False, - ) - - display(prepare_int4_model) - display(prepare_int8_model) - display(prepare_fp16_model) - - - -.. parsed-literal:: - - Checkbox(value=True, description='Prepare INT4 model') - - - -.. parsed-literal:: - - Checkbox(value=False, description='Prepare INT8 model') - - - -.. parsed-literal:: - - Checkbox(value=False, description='Prepare FP16 model') - - -.. code:: ipython3 - - from pathlib import Path - from cmd_helper import optimum_cli - - model_id = "databricks/dolly-v2-3b" - model_path = Path("dolly-v2-3b") - - fp16_model_dir = model_path / "FP16" - int8_model_dir = model_path / "INT8_compressed_weights" - int4_model_dir = model_path / "INT4_compressed_weights" - - - def convert_to_fp16(): - if (fp16_model_dir / "openvino_model.xml").exists(): - return - optimum_cli(model_id, fp16_model_dir, additional_args={"weight-format": "fp16"}) - - - def convert_to_int8(): - if (int8_model_dir / "openvino_model.xml").exists(): - return - optimum_cli(model_id, int8_model_dir, additional_args={"weight-format": "int8"}) - - - def convert_to_int4(): - if (int4_model_dir / "openvino_model.xml").exists(): - return - optimum_cli(model_id, int4_model_dir, additional_args={"weight-format": "int4"}) - - - if prepare_fp16_model.value: - convert_to_fp16() - if prepare_int8_model.value: - convert_to_int8() - if prepare_int4_model.value: - convert_to_int4() - - - -**Export command:** - - - -``optimum-cli export openvino --model databricks/dolly-v2-3b --task text-generation-with-past --weight-format int4 --ratio 1.0 --group-size 128 dolly-v2-3b/INT4_compressed_weights`` - - -.. parsed-literal:: - - 2024-07-24 11:40:56.083018: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-07-24 11:40:56.084962: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used. - 2024-07-24 11:40:56.121994: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used. - 2024-07-24 11:40:56.122347: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. - To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-07-24 11:40:56.845683: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/diffusers/utils/outputs.py:63: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: '/home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/torchvision/image.so: undefined symbol: _ZN3c1017RegisterOperatorsD1Ev'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? - warn( - WARNING[XFORMERS]: xFormers can't load C++/CUDA extensions. xFormers was built for: - PyTorch 2.0.1+cu118 with CUDA 1108 (you have 2.3.1+cpu) - Python 3.8.18 (you have 3.8.10) - Please reinstall xformers (see https://github.com/facebookresearch/xformers#installing-xformers) - Memory-efficient attention, SwiGLU, sparse and more won't be available. - Set XFORMERS_MORE_DETAILS=1 for more details - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/diffusers/utils/outputs.py:63: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/bitsandbytes/cextension.py:34: UserWarning: The installed version of bitsandbytes was compiled without GPU support. 8-bit optimizers, 8-bit multiplication, and GPU quantization are unavailable. - warn("The installed version of bitsandbytes was compiled without GPU support. " - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/bitsandbytes/libbitsandbytes_cpu.so: undefined symbol: cadam32bit_grad_fp32 - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/diffusers/utils/outputs.py:63: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - Framework not specified. Using pt to export the model. - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained. - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained. - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained. - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained. - Using framework PyTorch: 2.3.1+cpu - Overriding 1 configuration item(s) - - use_cache -> True - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/transformers/models/gpt_neox/modeling_gpt_neox.py:934: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - assert batch_size > 0, "batch_size has to be defined and > 0" - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:114: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if (input_shape[-1] > 1 or self.sliding_window is not None) and self.is_causal: - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/optimum/exporters/onnx/model_patcher.py:304: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if past_key_values_length > 0: - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/transformers/models/gpt_neox/modeling_gpt_neox.py:617: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if seq_len > self.max_seq_len_cached: - INFO:nncf:Statistics of the bitwidth distribution: - ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ - │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ - ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ - │ 8 │ 9% (2 / 130) │ 0% (0 / 128) │ - ├────────────────┼─────────────────────────────┼────────────────────────────────────────┤ - │ 4 │ 91% (128 / 130) │ 100% (128 / 128) │ - ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ - Applying Weight Compression ━━━━━━━━━━━━━━━━━━━ 100% 130/130 • 0:01:38 • 0:00:00;0;104;181m0:00:01181m0:00:04 - - -.. code:: ipython3 - - fp16_weights = fp16_model_dir / "openvino_model.bin" - int8_weights = int8_model_dir / "openvino_model.bin" - int4_weights = int4_model_dir / "openvino_model.bin" - - if fp16_weights.exists(): - print(f"Size of FP16 model is {fp16_weights.stat().st_size / 1024 / 1024:.2f} MB") - for precision, compressed_weights in zip([8, 4], [int8_weights, int4_weights]): - if compressed_weights.exists(): - print(f"Size of model with INT{precision} compressed weights is {compressed_weights.stat().st_size / 1024 / 1024:.2f} MB") - if compressed_weights.exists() and fp16_weights.exists(): - print(f"Compression rate for INT{precision} model: {fp16_weights.stat().st_size / compressed_weights.stat().st_size:.3f}") - - -.. parsed-literal:: - - Size of model with INT4 compressed weights is 1497.06 MB - - -Select model variant and inference device -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -select device from dropdown list for running inference using OpenVINO - -.. code:: ipython3 - - available_models = [] - if int4_model_dir.exists(): - available_models.append("INT4") - if int8_model_dir.exists(): - available_models.append("INT8") - if fp16_model_dir.exists(): - available_models.append("FP16") - - model_to_run = widgets.Dropdown( - options=available_models, - value=available_models[0], - description="Model to run:", - disabled=False, - ) - - model_to_run - - - - -.. parsed-literal:: - - Dropdown(description='Model to run:', options=('INT4',), value='INT4') - - - -.. code:: ipython3 - - from notebook_utils import device_widget - import openvino as ov - - core = ov.Core() - - device = device_widget("CPU", exclude=["NPU"]) - device - - - - -.. parsed-literal:: - - Dropdown(description='Device:', options=('CPU', 'AUTO'), value='CPU') - - - -Instantiate Model using Optimum Intel -------------------------------------- - - - -Optimum Intel can be used to load optimized models from the `Hugging -Face Hub `__ and -create pipelines to run an inference with OpenVINO Runtime using Hugging -Face APIs. The Optimum Inference models are API compatible with Hugging -Face Transformers models. This means we just need to replace -``AutoModelForXxx`` class with the corresponding ``OVModelForXxx`` -class. - -Below is an example of the Dolly model - -.. code:: diff - - -from transformers import AutoModelForCausalLM - +from optimum.intel.openvino import OVModelForCausalLM - from transformers import AutoTokenizer, pipeline - - model_id = "databricks/dolly-v2-3b" - -model = AutoModelForCausalLM.from_pretrained(model_id) - +model = OVModelForCausalLM.from_pretrained(model_id, export=True) - -Model class initialization starts with calling ``from_pretrained`` -method. When downloading and converting Transformers model, the -parameter ``export=True`` should be added (as we already converted model -before, we do not need to provide this parameter). We can save the -converted model for the next usage with the ``save_pretrained`` method. -Tokenizer class and pipelines API are compatible with Optimum models. - -You can find more details about OpenVINO LLM inference using HuggingFace -Optimum API in `LLM inference -guide `__. - -.. code:: ipython3 - - from pathlib import Path - - from transformers import AutoTokenizer - from optimum.intel.openvino import OVModelForCausalLM - - import openvino.properties as props - import openvino.properties.hint as hints - import openvino.properties.streams as streams - - - if model_to_run.value == "INT4": - model_dir = int4_model_dir - elif model_to_run.value == "INT8": - model_dir = int8_model_dir - else: - model_dir = fp16_model_dir - print(f"Loading model from {model_dir}") - - tokenizer = AutoTokenizer.from_pretrained(model_dir) - - current_device = device.value - - ov_config = {hints.performance_mode(): hints.PerformanceMode.LATENCY, streams.num(): "1", props.cache_dir(): ""} - - ov_model = OVModelForCausalLM.from_pretrained(model_dir, device=current_device, ov_config=ov_config) - - -.. parsed-literal:: - - 2024-07-24 11:43:17.404362: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-07-24 11:43:17.406313: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used. - 2024-07-24 11:43:17.443348: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used. - 2024-07-24 11:43:17.444995: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. - To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-07-24 11:43:18.193758: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/diffusers/utils/outputs.py:63: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/torchvision/io/image.py:13: UserWarning: Failed to load image Python extension: '/home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/torchvision/image.so: undefined symbol: _ZN3c1017RegisterOperatorsD1Ev'If you don't plan on using image functionality from `torchvision.io`, you can ignore this warning. Otherwise, there might be something wrong with your environment. Did you have `libjpeg` or `libpng` installed before building `torchvision` from source? - warn( - WARNING[XFORMERS]: xFormers can't load C++/CUDA extensions. xFormers was built for: - PyTorch 2.0.1+cu118 with CUDA 1108 (you have 2.3.1+cpu) - Python 3.8.18 (you have 3.8.10) - Please reinstall xformers (see https://github.com/facebookresearch/xformers#installing-xformers) - Memory-efficient attention, SwiGLU, sparse and more won't be available. - Set XFORMERS_MORE_DETAILS=1 for more details - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/diffusers/utils/outputs.py:63: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/bitsandbytes/cextension.py:34: UserWarning: The installed version of bitsandbytes was compiled without GPU support. 8-bit optimizers, 8-bit multiplication, and GPU quantization are unavailable. - warn("The installed version of bitsandbytes was compiled without GPU support. " - - -.. parsed-literal:: - - /home/ea/work/my_optimum_intel/optimum_env/lib/python3.8/site-packages/bitsandbytes/libbitsandbytes_cpu.so: undefined symbol: cadam32bit_grad_fp32 - Loading model from dolly-v2-3b/INT4_compressed_weights - - -.. parsed-literal:: - - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained. - Compiling the model to CPU ... - - -Create an instruction-following inference pipeline --------------------------------------------------- - - - -The ``run_generation`` function accepts user-provided text input, -tokenizes it, and runs the generation process. Text generation is an -iterative process, where each next token depends on previously generated -until a maximum number of tokens or stop generation condition is not -reached. To obtain intermediate generation results without waiting until -when generation is finished, we will use -`TextIteratorStreamer `__, -provided as part of HuggingFace `Streaming -API `__. - -The diagram below illustrates how the instruction-following pipeline -works - -.. figure:: https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/e881f4a4-fcc8-427a-afe1-7dd80aebd66e - :alt: generation pipeline) - - generation pipeline) - -As can be seen, on the first iteration, the user provided instructions -converted to token ids using a tokenizer, then prepared input provided -to the model. The model generates probabilities for all tokens in logits -format The way the next token will be selected over predicted -probabilities is driven by the selected decoding methodology. You can -find more information about the most popular decoding methods in this -`blog `__. - -There are several parameters that can control text generation quality: - -- | ``Temperature`` is a parameter used to control the level of - creativity in AI-generated text. By adjusting the ``temperature``, - you can influence the AI model’s probability distribution, making - the text more focused or diverse. - | Consider the following example: The AI model has to complete the - sentence “The cat is \____.” with the following token - probabilities: - - | playing: 0.5 - | sleeping: 0.25 - | eating: 0.15 - | driving: 0.05 - | flying: 0.05 - - - **Low temperature** (e.g., 0.2): The AI model becomes more focused - and deterministic, choosing tokens with the highest probability, - such as “playing.” - - **Medium temperature** (e.g., 1.0): The AI model maintains a - balance between creativity and focus, selecting tokens based on - their probabilities without significant bias, such as “playing,” - “sleeping,” or “eating.” - - **High temperature** (e.g., 2.0): The AI model becomes more - adventurous, increasing the chances of selecting less likely - tokens, such as “driving” and “flying.” - -- ``Top-p``, also known as nucleus sampling, is a parameter used to - control the range of tokens considered by the AI model based on their - cumulative probability. By adjusting the ``top-p`` value, you can - influence the AI model’s token selection, making it more focused or - diverse. Using the same example with the cat, consider the following - top_p settings: - - - **Low top_p** (e.g., 0.5): The AI model considers only tokens with - the highest cumulative probability, such as “playing.” - - **Medium top_p** (e.g., 0.8): The AI model considers tokens with a - higher cumulative probability, such as “playing,” “sleeping,” and - “eating.” - - **High top_p** (e.g., 1.0): The AI model considers all tokens, - including those with lower probabilities, such as “driving” and - “flying.” - -- ``Top-k`` is another popular sampling strategy. In comparison with - Top-P, which chooses from the smallest possible set of words whose - cumulative probability exceeds the probability P, in Top-K sampling K - most likely next words are filtered and the probability mass is - redistributed among only those K next words. In our example with cat, - if k=3, then only “playing”, “sleeping” and “eating” will be taken - into account as possible next word. - -To optimize the generation process and use memory more efficiently, the -``use_cache=True`` option is enabled. Since the output side is -auto-regressive, an output token hidden state remains the same once -computed for every further generation step. Therefore, recomputing it -every time you want to generate a new token seems wasteful. With the -cache, the model saves the hidden state once it has been computed. The -model only computes the one for the most recently generated output token -at each time step, re-using the saved ones for hidden tokens. This -reduces the generation complexity from O(n^3) to O(n^2) for a -transformer model. More details about how it works can be found in this -`article `__. -With this option, the model gets the previous step’s hidden states -(cached attention keys and values) as input and additionally provides -hidden states for the current step as output. It means for all next -iterations, it is enough to provide only a new token obtained from the -previous step and cached key values to get the next token prediction. - -The generation cycle repeats until the end of the sequence token is -reached or it also can be interrupted when maximum tokens will be -generated. As already mentioned before, we can enable printing current -generated tokens without waiting until when the whole generation is -finished using Streaming API, it adds a new token to the output queue -and then prints them when they are ready. - -Setup imports -~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - from threading import Thread - from time import perf_counter - from typing import List - import gradio as gr - from transformers import AutoTokenizer, TextIteratorStreamer - import numpy as np - -Prepare template for user prompt -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -For effective generation, model expects to have input in specific -format. The code below prepare template for passing user instruction -into model with providing additional context. - -.. code:: ipython3 - - INSTRUCTION_KEY = "### Instruction:" - RESPONSE_KEY = "### Response:" - END_KEY = "### End" - INTRO_BLURB = "Below is an instruction that describes a task. Write a response that appropriately completes the request." - - # This is the prompt that is used for generating responses using an already trained model. It ends with the response - # key, where the job of the model is to provide the completion that follows it (i.e. the response itself). - PROMPT_FOR_GENERATION_FORMAT = """{intro} - - {instruction_key} - {instruction} - - {response_key} - """.format( - intro=INTRO_BLURB, - instruction_key=INSTRUCTION_KEY, - instruction="{instruction}", - response_key=RESPONSE_KEY, - ) - -Helpers for output parsing -~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -Model was retrained to finish generation using special token ``### End`` -the code below find its id for using it as generation stop-criteria. - -.. code:: ipython3 - - def get_special_token_id(tokenizer: AutoTokenizer, key: str) -> int: - """ - Gets the token ID for a given string that has been added to the tokenizer as a special token. - - When training, we configure the tokenizer so that the sequences like "### Instruction:" and "### End" are - treated specially and converted to a single, new token. This retrieves the token ID each of these keys map to. - - Args: - tokenizer (PreTrainedTokenizer): the tokenizer - key (str): the key to convert to a single token - - Raises: - RuntimeError: if more than one ID was generated - - Returns: - int: the token ID for the given key - """ - token_ids = tokenizer.encode(key) - if len(token_ids) > 1: - raise ValueError(f"Expected only a single token for '{key}' but found {token_ids}") - return token_ids[0] - - - tokenizer_response_key = next( - (token for token in tokenizer.additional_special_tokens if token.startswith(RESPONSE_KEY)), - None, - ) - - end_key_token_id = None - if tokenizer_response_key: - try: - end_key_token_id = get_special_token_id(tokenizer, END_KEY) - # Ensure generation stops once it generates "### End" - except ValueError: - pass - -Main generation function -~~~~~~~~~~~~~~~~~~~~~~~~ - - - -As it was discussed above, ``run_generation`` function is the entry -point for starting generation. It gets provided input instruction as -parameter and returns model response. - -.. code:: ipython3 - - def run_generation( - user_text: str, - top_p: float, - temperature: float, - top_k: int, - max_new_tokens: int, - perf_text: str, - ): - """ - Text generation function - - Parameters: - user_text (str): User-provided instruction for a generation. - top_p (float): Nucleus sampling. If set to < 1, only the smallest set of most probable tokens with probabilities that add up to top_p or higher are kept for a generation. - temperature (float): The value used to module the logits distribution. - top_k (int): The number of highest probability vocabulary tokens to keep for top-k-filtering. - max_new_tokens (int): Maximum length of generated sequence. - perf_text (str): Content of text field for printing performance results. - Returns: - model_output (str) - model-generated text - perf_text (str) - updated perf text filed content - """ - - # Prepare input prompt according to model expected template - prompt_text = PROMPT_FOR_GENERATION_FORMAT.format(instruction=user_text) - - # Tokenize the user text. - model_inputs = tokenizer(prompt_text, return_tensors="pt") - - # Start generation on a separate thread, so that we don't block the UI. The text is pulled from the streamer - # in the main thread. Adds timeout to the streamer to handle exceptions in the generation thread. - streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True) - generate_kwargs = dict( - model_inputs, - streamer=streamer, - max_new_tokens=max_new_tokens, - do_sample=True, - top_p=top_p, - temperature=float(temperature), - top_k=top_k, - eos_token_id=end_key_token_id, - ) - t = Thread(target=ov_model.generate, kwargs=generate_kwargs) - t.start() - - # Pull the generated text from the streamer, and update the model output. - model_output = "" - per_token_time = [] - num_tokens = 0 - start = perf_counter() - for new_text in streamer: - current_time = perf_counter() - start - model_output += new_text - perf_text, num_tokens = estimate_latency(current_time, perf_text, new_text, per_token_time, num_tokens) - yield model_output, perf_text - start = perf_counter() - return model_output, perf_text - -Helpers for application -~~~~~~~~~~~~~~~~~~~~~~~ - - - -For making interactive user interface we will use Gradio library. The -code bellow provides useful functions used for communication with UI -elements. - -.. code:: ipython3 - - def estimate_latency( - current_time: float, - current_perf_text: str, - new_gen_text: str, - per_token_time: List[float], - num_tokens: int, - ): - """ - Helper function for performance estimation - - Parameters: - current_time (float): This step time in seconds. - current_perf_text (str): Current content of performance UI field. - new_gen_text (str): New generated text. - per_token_time (List[float]): history of performance from previous steps. - num_tokens (int): Total number of generated tokens. - - Returns: - update for performance text field - update for a total number of tokens - """ - num_current_toks = len(tokenizer.encode(new_gen_text)) - num_tokens += num_current_toks - per_token_time.append(num_current_toks / current_time) - if len(per_token_time) > 10 and len(per_token_time) % 4 == 0: - current_bucket = per_token_time[:-10] - return ( - f"Average generation speed: {np.mean(current_bucket):.2f} tokens/s. Total generated tokens: {num_tokens}", - num_tokens, - ) - return current_perf_text, num_tokens - - - def select_device(device_str: str, current_text: str = "", progress: gr.Progress = gr.Progress()): - """ - Helper function for uploading model on the device. - - Parameters: - device_str (str): Device name. - current_text (str): Current content of user instruction field (used only for backup purposes, temporally replacing it on the progress bar during model loading). - progress (gr.Progress): gradio progress tracker - Returns: - current_text - """ - if device_str != ov_model._device: - ov_model.request = None - ov_model._device = device_str - - for i in progress.tqdm(range(1), desc=f"Model loading on {device_str}"): - ov_model.compile() - return current_text - -Run instruction-following pipeline ----------------------------------- - - - -Now, we are ready to explore model capabilities. This demo provides a -simple interface that allows communication with a model using text -instruction. Type your instruction into the ``User instruction`` field -or select one from predefined examples and click on the ``Submit`` -button to start generation. Additionally, you can modify advanced -generation parameters: - -- ``Device`` - allows switching inference device. Please note, every - time when new device is selected, model will be recompiled and this - takes some time. -- ``Max New Tokens`` - maximum size of generated text. -- ``Top-p (nucleus sampling)`` - if set to < 1, only the smallest set - of most probable tokens with probabilities that add up to top_p or - higher are kept for a generation. -- ``Top-k`` - the number of highest probability vocabulary tokens to - keep for top-k-filtering. -- ``Temperature`` - the value used to module the logits distribution. - -.. code:: ipython3 - - import requests - - if not Path("gradio_helper.py").exists(): - r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/dolly-2-instruction-following/gradio_helper.py") - open("gradio_helper.py", "w").write(r.text) - - from gradio_helper import make_demo - - demo = make_demo(run_fn=run_generation, select_device_fn=select_device) - - try: - demo.queue().launch(debug=False, height=800) - except Exception: - demo.queue().launch(debug=False, share=True, height=800) - # If you are launching remotely, specify server_name and server_port - # EXAMPLE: `demo.launch(server_name='your server name', server_port='server port in int')` - # To learn more please refer to the Gradio docs: https://gradio.app/docs/ diff --git a/docs/notebooks/dynamicrafter-animating-images-with-output.rst b/docs/notebooks/dynamicrafter-animating-images-with-output.rst index 992c346194e31c..13b4c9475f7092 100644 --- a/docs/notebooks/dynamicrafter-animating-images-with-output.rst +++ b/docs/notebooks/dynamicrafter-animating-images-with-output.rst @@ -151,57 +151,29 @@ Prerequisites %pip install -q "openvino>=2024.2.0" "nncf>=2.11.0" "datasets>=2.20.0" %pip install -q "gradio>=4.19" omegaconf einops pytorch_lightning kornia "open_clip_torch==2.22.0" transformers av opencv-python "torch==2.2.2" --extra-index-url https://download.pytorch.org/whl/cpu - -.. parsed-literal:: - - Note: you may need to restart the kernel to use updated packages. - Note: you may need to restart the kernel to use updated packages. - - .. code:: ipython3 - import sys from pathlib import Path import requests - dynamicrafter_path = Path("dynamicrafter") - - if not dynamicrafter_path.exists(): - dynamicrafter_path.mkdir(parents=True, exist_ok=True) - !git clone https://github.com/Doubiiu/DynamiCrafter.git dynamicrafter - %cd dynamicrafter - !git checkout 26e665cd6c174234238d2ded661e2e56f875d360 -q # to avoid breaking changes - %cd .. - - sys.path.append(str(dynamicrafter_path)) + if not Path("cmd_helper.py").exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", + ) + open("cmd_helper.py", "w").write(r.text) r = requests.get( url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", ) open("notebook_utils.py", "w").write(r.text) +.. code:: ipython3 -.. parsed-literal:: - - Cloning into 'dynamicrafter'... - remote: Enumerating objects: 335, done. - remote: Counting objects: 100% (153/153), done. - remote: Compressing objects: 100% (99/99), done. - remote: Total 335 (delta 97), reused 54 (delta 54), pack-reused 182 (from 1) - Receiving objects: 100% (335/335), 72.41 MiB | 20.85 MiB/s, done. - Resolving deltas: 100% (123/123), done. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/dynamicrafter-animating-images/dynamicrafter - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/dynamicrafter-animating-images - - - - -.. parsed-literal:: - - 24692 - - + from cmd_helper import clone_repo + + + clone_repo("https://github.com/Doubiiu/DynamiCrafter.git", "26e665cd6c174234238d2ded661e2e56f875d360") Load and run the original pipeline ---------------------------------- @@ -221,7 +193,7 @@ We will use model for 256x256 resolution as example. Also, models for from huggingface_hub import hf_hub_download from omegaconf import OmegaConf - from dynamicrafter.utils.utils import instantiate_from_config + from utils.utils import instantiate_from_config def load_model_checkpoint(model, ckpt): @@ -280,38 +252,11 @@ We will use model for 256x256 resolution as example. Also, models for model = download_model() -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/huggingface_hub/file_download.py:834: UserWarning: `local_dir_use_symlinks` parameter is deprecated and will be ignored. The process to download files to a local folder has been updated and do not rely on symlinks anymore. You only need to pass a destination folder as`local_dir`. - For more details, check out https://huggingface.co/docs/huggingface_hub/main/en/guides/download#download-files-to-local-folder. - warnings.warn( - - - -.. parsed-literal:: - - model.ckpt: 0%| | 0.00/10.4G [00:00>> model checkpoint loaded. - + Convert the model to OpenVINO IR -------------------------------- @@ -363,7 +308,7 @@ Convert CLIP text encoder .. code:: ipython3 - from dynamicrafter.lvdm.modules.encoders.condition import FrozenOpenCLIPEmbedder + from lvdm.modules.encoders.condition import FrozenOpenCLIPEmbedder MODEL_DIR = Path("models") @@ -388,17 +333,6 @@ Convert CLIP text encoder del cond_stage_model gc.collect(); - -.. parsed-literal:: - - WARNING:tensorflow:Please fix your imports. Module tensorflow.python.training.tracking.base has been moved to tensorflow.python.trackable.base. The old module will be deleted in version 2.11. - - -.. parsed-literal:: - - WARNING:tensorflow:Please fix your imports. Module tensorflow.python.training.tracking.base has been moved to tensorflow.python.trackable.base. The old module will be deleted in version 2.11. - - Convert CLIP image encoder ~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -421,49 +355,6 @@ resolutions. del model.embedder gc.collect(); - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/utils/image.py:226: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if input.numel() == 0: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/geometry/transform/affwarp.py:573: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if size == input_size: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/geometry/transform/affwarp.py:579: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - antialias = antialias and (max(factors) > 1) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/geometry/transform/affwarp.py:581: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if antialias: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/geometry/transform/affwarp.py:584: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - sigmas = (max((factors[0] - 1.0) / 2.0, 0.001), max((factors[1] - 1.0) / 2.0, 0.001)) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/geometry/transform/affwarp.py:589: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - ks = int(max(2.0 * 2 * sigmas[0], 3)), int(max(2.0 * 2 * sigmas[1], 3)) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/geometry/transform/affwarp.py:589: TracerWarning: Converting a tensor to a Python integer might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - ks = int(max(2.0 * 2 * sigmas[0], 3)), int(max(2.0 * 2 * sigmas[1], 3)) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/filters/gaussian.py:55: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect. - sigma = tensor([sigma], device=input.device, dtype=input.dtype) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/filters/gaussian.py:55: TracerWarning: Converting a tensor to a Python float might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - sigma = tensor([sigma], device=input.device, dtype=input.dtype) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/core/check.py:78: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if x_shape_to_check[i] != dim: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/filters/kernels.py:92: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect. - mean = tensor([[mean]], device=sigma.device, dtype=sigma.dtype) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/enhance/normalize.py:101: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if len(mean.shape) == 0 or mean.shape[0] == 1: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/enhance/normalize.py:103: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if len(std.shape) == 0 or std.shape[0] == 1: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/enhance/normalize.py:107: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if mean.shape and mean.shape[0] != 1: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/enhance/normalize.py:108: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if mean.shape[0] != data.shape[1] and mean.shape[:2] != data.shape[:2]: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/enhance/normalize.py:112: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if std.shape and std.shape[0] != 1: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/enhance/normalize.py:113: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if std.shape[0] != data.shape[1] and std.shape[:2] != data.shape[:2]: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/enhance/normalize.py:116: TracerWarning: torch.as_tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect. - mean = torch.as_tensor(mean, device=data.device, dtype=data.dtype) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/kornia/enhance/normalize.py:117: TracerWarning: torch.as_tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect. - std = torch.as_tensor(std, device=data.device, dtype=data.dtype) - - Convert AE encoder ~~~~~~~~~~~~~~~~~~ @@ -486,13 +377,6 @@ Convert AE encoder del model.first_stage_model.encoder gc.collect(); - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/dynamicrafter-animating-images/dynamicrafter/lvdm/modules/networks/ae_modules.py:67: TracerWarning: Converting a tensor to a Python integer might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - w_ = w_ * (int(c)**(-0.5)) - - Convert Diffusion U-Net model ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -530,21 +414,6 @@ Convert Diffusion U-Net model del model.model.diffusion_model gc.collect(); - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/dynamicrafter-animating-images/dynamicrafter/lvdm/modules/networks/openaimodel3d.py:556: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if l_context == 77 + t*16: ## !!! HARD CODE here - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/dynamicrafter-animating-images/dynamicrafter/lvdm/modules/networks/openaimodel3d.py:205: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if batch_size: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/dynamicrafter-animating-images/dynamicrafter/lvdm/modules/networks/openaimodel3d.py:232: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if self.use_temporal_conv and batch_size: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/dynamicrafter-animating-images/dynamicrafter/lvdm/modules/networks/openaimodel3d.py:76: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - assert x.shape[1] == self.channels - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/dynamicrafter-animating-images/dynamicrafter/lvdm/modules/networks/openaimodel3d.py:99: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - assert x.shape[1] == self.channels - - Convert AE decoder ~~~~~~~~~~~~~~~~~~ @@ -928,15 +797,15 @@ Run OpenVINO pipeline inference .. parsed-literal:: Seed set to 234 - /tmp/ipykernel_511478/2451984876.py:25: UserWarning: The given NumPy array is not writable, and PyTorch does not support non-writable tensors. This means writing to this tensor will result in undefined behavior. You may want to copy the array to protect its data or make it writable before converting it to a tensor. This type of warning will be suppressed for the rest of this program. (Triggered internally at ../torch/csrc/utils/tensor_numpy.cpp:206.) + /tmp/ipykernel_971108/2451984876.py:25: UserWarning: The given NumPy array is not writable, and PyTorch does not support non-writable tensors. This means writing to this tensor will result in undefined behavior. You may want to copy the array to protect its data or make it writable before converting it to a tensor. This type of warning will be suppressed for the rest of this program. (Triggered internally at ../torch/csrc/utils/tensor_numpy.cpp:206.) img_tensor = torch.from_numpy(image).permute(2, 0, 1).float().to(model.device) - + .. parsed-literal:: - start: man fishing in a boat at sunset 2024-11-04 23:26:56 - Saved in man_fishing_in_a_boat_at_sunset.mp4. Time used: 206.55 seconds - + start: man fishing in a boat at sunset 2024-08-06 13:54:24 + Saved in man_fishing_in_a_boat_at_sunset.mp4. Time used: 164.28 seconds + .. code:: ipython3 @@ -959,7 +828,7 @@ Run OpenVINO pipeline inference - + @@ -1131,19 +1000,6 @@ To collect intermediate model inputs for calibration we should customize 0%| | 0/300 [00:00>> model checkpoint loaded. - + .. code:: ipython3 @@ -1360,13 +1317,13 @@ Let’s run the optimized pipeline .. parsed-literal:: Seed set to 234 - + .. parsed-literal:: - start: man fishing in a boat at sunset 2024-11-05 00:58:08 - Saved in man_fishing_in_a_boat_at_sunset.mp4. Time used: 97.78 seconds - + start: man fishing in a boat at sunset 2024-08-06 15:09:26 + Saved in man_fishing_in_a_boat_at_sunset.mp4. Time used: 81.47 seconds + .. code:: ipython3 @@ -1388,7 +1345,7 @@ Let’s run the optimized pipeline - + Compare model file sizes @@ -1416,7 +1373,7 @@ Compare model file sizes encoder_first_stage_ir compression rate: 3.986 embedder_ir compression rate: 3.977 model_ir compression rate: 3.981 - + Compare inference time of the FP32 and INT8 models ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -1470,10 +1427,10 @@ models, we use median inference time on calibration subset. .. parsed-literal:: - FP32 latency: 201.526 - INT8 latency: 96.036 - Performance speed up: 2.098 - + FP32 latency: 162.304 + INT8 latency: 79.590 + Performance speed up: 2.039 + Interactive inference --------------------- @@ -1497,15 +1454,6 @@ to launch the interactive demo. use_quantized_models - - - -.. parsed-literal:: - - Checkbox(value=True, description='Use quantized models') - - - .. code:: ipython3 from functools import partial @@ -1524,23 +1472,9 @@ to launch the interactive demo. demo = make_demo(fn=get_image_fn) try: - demo.queue().launch(debug=False) + demo.queue().launch(debug=True) except Exception: - demo.queue().launch(debug=False, share=True) + demo.queue().launch(debug=True, share=True) # if you are launching remotely, specify server_name and server_port # demo.launch(server_name='your server name', server_port='server port in int') # Read more in the docs: https://gradio.app/docs/ - - -.. parsed-literal:: - - Running on local URL: http://127.0.0.1:7860 - - To create a public link, set `share=True` in `launch()`. - - - - - - - diff --git a/docs/notebooks/efficient-sam-with-output.rst b/docs/notebooks/efficient-sam-with-output.rst index b50b82341f4af8..2341db94e22f68 100644 --- a/docs/notebooks/efficient-sam-with-output.rst +++ b/docs/notebooks/efficient-sam-with-output.rst @@ -92,39 +92,47 @@ Prerequisites .. code:: ipython3 + import requests from pathlib import Path - repo_dir = Path("EfficientSAM") - if not repo_dir.exists(): - !git clone https://github.com/yformer/EfficientSAM.git - %cd $repo_dir + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", + ) + open("cmd_helper.py", "w").write(r.text) + + .. parsed-literal:: - Cloning into 'EfficientSAM'... - remote: Enumerating objects: 424, done. - remote: Counting objects: 100% (85/85), done. - remote: Compressing objects: 100% (33/33), done. - remote: Total 424 (delta 76), reused 52 (delta 52), pack-reused 339 (from 1) - Receiving objects: 100% (424/424), 262.14 MiB | 23.37 MiB/s, done. - Resolving deltas: 100% (246/246), done. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM + 1491 + .. code:: ipython3 - import requests + from cmd_helper import clone_repo + + + repo_dir = clone_repo("https://github.com/yformer/EfficientSAM.git") + + %cd $repo_dir + r = requests.get( url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", ) - open("notebook_utils.py", "w").write(r.text) from notebook_utils import download_file, device_widget, quantization_widget # noqa: F401 + +.. parsed-literal:: + + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM + + Load PyTorch model ------------------ @@ -377,23 +385,23 @@ disk using ``openvino.save_model``. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam.py:220: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam.py:220: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if ( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:241: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:241: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert ( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:163: TracerWarning: Converting a tensor to a Python float might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:163: TracerWarning: Converting a tensor to a Python float might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! size = int(math.sqrt(xy_num)) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:164: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:164: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert size * size == xy_num - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:166: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:166: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if size != h or size != w: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:251: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam_encoder.py:251: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert x.shape[2] == num_patches - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam.py:85: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam.py:85: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if num_pts > self.decoder_max_num_input_points: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam.py:92: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam.py:92: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! elif num_pts < self.decoder_max_num_input_points: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam.py:126: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam/EfficientSAM/efficient_sam/efficient_sam.py:126: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if output_w > 0 and output_h > 0: @@ -640,10 +648,10 @@ architecture type, we should specify ``transformer`` in ``model_type``. .. parsed-literal:: - 2024-11-05 01:15:40.935673: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 01:15:40.968460: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 00:51:57.265752: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 00:51:57.297997: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 01:15:41.606156: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT + 2024-11-22 00:51:57.938257: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT @@ -810,7 +818,7 @@ models, we use ``bencmark_app``. [ WARNING ] Performance hint was not explicitly specified in command line. Device(AUTO) performance hint will be set to PerformanceMode.THROUGHPUT. [Step 4/11] Reading model files [ INFO ] Loading model files - [ INFO ] Read model took 30.24 ms + [ INFO ] Read model took 29.71 ms [ INFO ] Original model I/O parameters: [ INFO ] Model inputs: [ INFO ] batched_images (node: batched_images) : f32 / [...] / [?,?,?,?] @@ -830,7 +838,7 @@ models, we use ``bencmark_app``. [ INFO ] ***NO_NAME*** (node: aten::reshape/Reshape_3) : f32 / [...] / [?,?,?,?,?] [ INFO ] ***NO_NAME*** (node: aten::reshape/Reshape_2) : f32 / [...] / [?,?,?] [Step 7/11] Loading the model to the device - [ INFO ] Compile model took 1388.43 ms + [ INFO ] Compile model took 1398.31 ms [Step 8/11] Querying optimal runtime parameters [ INFO ] Model: [ INFO ] NETWORK_NAME: Model0 @@ -871,17 +879,17 @@ models, we use ``bencmark_app``. [ INFO ] Fill input 'batched_point_labels' with random values [Step 10/11] Measuring performance (Start inference asynchronously, 6 inference requests, limits: 15000 ms duration) [ INFO ] Benchmarking in full mode (inputs filling are included in measurement loop). - [ INFO ] First inference took 798.46 ms + [ INFO ] First inference took 793.15 ms [Step 11/11] Dumping statistics report [ INFO ] Execution Devices:['CPU'] - [ INFO ] Count: 49 iterations - [ INFO ] Duration: 16827.30 ms + [ INFO ] Count: 55 iterations + [ INFO ] Duration: 17124.15 ms [ INFO ] Latency: - [ INFO ] Median: 2025.54 ms - [ INFO ] Average: 1991.09 ms - [ INFO ] Min: 816.09 ms - [ INFO ] Max: 2176.67 ms - [ INFO ] Throughput: 2.91 FPS + [ INFO ] Median: 1829.15 ms + [ INFO ] Average: 1806.67 ms + [ INFO ] Min: 872.57 ms + [ INFO ] Max: 2037.03 ms + [ INFO ] Throughput: 3.21 FPS .. code:: ipython3 @@ -907,7 +915,7 @@ models, we use ``bencmark_app``. [ WARNING ] Performance hint was not explicitly specified in command line. Device(AUTO) performance hint will be set to PerformanceMode.THROUGHPUT. [Step 4/11] Reading model files [ INFO ] Loading model files - [ INFO ] Read model took 43.95 ms + [ INFO ] Read model took 43.85 ms [ INFO ] Original model I/O parameters: [ INFO ] Model inputs: [ INFO ] batched_images (node: batched_images) : f32 / [...] / [?,?,?,?] @@ -927,7 +935,7 @@ models, we use ``bencmark_app``. [ INFO ] ***NO_NAME*** (node: aten::reshape/Reshape_3) : f32 / [...] / [?,?,?,?,?] [ INFO ] ***NO_NAME*** (node: aten::reshape/Reshape_2) : f32 / [...] / [?,?,?] [Step 7/11] Loading the model to the device - [ INFO ] Compile model took 1607.96 ms + [ INFO ] Compile model took 1631.76 ms [Step 8/11] Querying optimal runtime parameters [ INFO ] Model: [ INFO ] NETWORK_NAME: Model0 @@ -968,17 +976,17 @@ models, we use ``bencmark_app``. [ INFO ] Fill input 'batched_point_labels' with random values [Step 10/11] Measuring performance (Start inference asynchronously, 6 inference requests, limits: 15000 ms duration) [ INFO ] Benchmarking in full mode (inputs filling are included in measurement loop). - [ INFO ] First inference took 596.94 ms + [ INFO ] First inference took 583.55 ms [Step 11/11] Dumping statistics report [ INFO ] Execution Devices:['CPU'] - [ INFO ] Count: 55 iterations - [ INFO ] Duration: 15959.69 ms + [ INFO ] Count: 56 iterations + [ INFO ] Duration: 16266.69 ms [ INFO ] Latency: - [ INFO ] Median: 1701.74 ms - [ INFO ] Average: 1692.86 ms - [ INFO ] Min: 653.76 ms - [ INFO ] Max: 1817.85 ms - [ INFO ] Throughput: 3.45 FPS + [ INFO ] Median: 1710.59 ms + [ INFO ] Average: 1692.97 ms + [ INFO ] Min: 713.08 ms + [ INFO ] Max: 1952.47 ms + [ INFO ] Throughput: 3.44 FPS Interactive segmentation demo @@ -1308,7 +1316,7 @@ Interactive segmentation demo .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/efficient-sam Running on local URL: http://127.0.0.1:7860 To create a public link, set `share=True` in `launch()`. diff --git a/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_17_1.png b/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_17_1.png index 9f65fa9db4554a..f9dfb53e3b8796 100644 --- a/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_17_1.png +++ b/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_17_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:9368b1fbd458d1e022a768f24e689af0fd6e5dacc98a920f45d3fc0f63062567 -size 1259373 +oid sha256:cffb9233e156bb558299a8c9bd3931dad6999f9bf7f358b208549949411460d1 +size 1259114 diff --git a/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_25_1.png b/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_25_1.png index 7c0716600906a1..108e6e0e4564e0 100644 --- a/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_25_1.png +++ b/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_25_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:22f0e5bfd74e7426218d2bd007f9219433556530ddb10f33b9706398eb7cd370 -size 1263404 +oid sha256:5760726cd720e435c5d3a85315e772a741d583553996d8cfe7833f5d941e79f3 +size 1260778 diff --git a/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_36_1.png b/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_36_1.png index 0a717e2c9aa38d..c767ab3d6193bd 100644 --- a/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_36_1.png +++ b/docs/notebooks/efficient-sam-with-output_files/efficient-sam-with-output_36_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d1863ccc9483f6cbd60768b311d104ee68692c3a7181e06da4bc751b52cf0ca1 -size 1262535 +oid sha256:3909739937c5c50e2b26b3cba0b8b30e98e13fee3eab6c4f382735ec82ae9250 +size 1261525 diff --git a/docs/notebooks/encodec-audio-compression-with-output.rst b/docs/notebooks/encodec-audio-compression-with-output.rst index 7f0e153ffa4a55..4cf2479f638656 100644 --- a/docs/notebooks/encodec-audio-compression-with-output.rst +++ b/docs/notebooks/encodec-audio-compression-with-output.rst @@ -67,7 +67,7 @@ Install required dependencies: .. code:: ipython3 - %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu "openvino>=2023.3.0" "torch>=2.1" "torchaudio>=2.1" "encodec>=0.1.1" "gradio>=4.19" "librosa>=0.8.1" "matplotlib<=3.7" tqdm + %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu "openvino>=2023.3.0" "torch>=2.1" "torchaudio>=2.1" "encodec>=0.1.1" "gradio>=4.19" "librosa>=0.8.1" "matplotlib>=3.4" tqdm .. parsed-literal:: @@ -142,7 +142,7 @@ bandwidth. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:134: FutureWarning: `torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:134: FutureWarning: `torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`. WeightNorm.apply(module, name, dim) @@ -302,7 +302,7 @@ similar as possible to the original. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:134: FutureWarning: `torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:134: FutureWarning: `torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`. WeightNorm.apply(module, name, dim) @@ -402,13 +402,13 @@ with ``ov.save_model``. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:60: TracerWarning: Converting a tensor to a Python float might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:60: TracerWarning: Converting a tensor to a Python float might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! ideal_length = (math.ceil(n_frames) - 1) * stride + (kernel_size - padding_total) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:85: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:85: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert padding_left >= 0 and padding_right >= 0, (padding_left, padding_right) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:87: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:87: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! max_pad = max(padding_left, padding_right) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:89: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:89: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if length <= max_pad: @@ -428,11 +428,11 @@ with ``ov.save_model``. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/quantization/core_vq.py:358: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/quantization/core_vq.py:358: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect. quantized_out = torch.tensor(0.0, device=q_indices.device) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/quantization/core_vq.py:359: TracerWarning: Iterating over a tensor might cause the trace to be incorrect. Passing a tensor of different shape won't change the number of iterations executed (and might lead to errors or silently give incorrect results). + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/quantization/core_vq.py:359: TracerWarning: Iterating over a tensor might cause the trace to be incorrect. Passing a tensor of different shape won't change the number of iterations executed (and might lead to errors or silently give incorrect results). for i, indices in enumerate(q_indices): - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:103: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/encodec/modules/conv.py:103: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert (padding_left + padding_right) <= x.shape[-1] diff --git a/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_19_1.png b/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_19_1.png index 0aeedba5d00a83..9f01201bccd659 100644 --- a/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_19_1.png +++ b/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_19_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:160e17b680bd3d5e8ae8d05736f6c8794af22597097cc8481d0986915fe9d696 +oid sha256:a031358d39936f6ccdb1e4e8c9eb8ddda651384ecf7d95fbe6c2dc1f7e65be95 size 44175 diff --git a/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_38_1.png b/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_38_1.png index dfab67e44f9be0..d157f39a8fc143 100644 --- a/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_38_1.png +++ b/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_38_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:aea9089d7a4630b53481b1277bbf8e7f52f1c992ed61d1e998250980f59df5ab +oid sha256:f2800c74996f567b92758358b136cc2acab70b48ea628ac392e59cecc1c416a3 size 44186 diff --git a/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_6_2.png b/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_6_2.png index a8af4e5b6153b9..93baa1aa5eeea6 100644 --- a/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_6_2.png +++ b/docs/notebooks/encodec-audio-compression-with-output_files/encodec-audio-compression-with-output_6_2.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:2df9c2103837505ffcf5543e55a8d1589385ddb5e73b917d5efe9a6ebfd0368c +oid sha256:491264f7b803244b0230b7a7bebee6b81da547541ccf928fbae1c9c0af719451 size 44933 diff --git a/docs/notebooks/explainable-ai-1-basic-with-output.rst b/docs/notebooks/explainable-ai-1-basic-with-output.rst index 1df31312fd752f..4dd115d7983c64 100644 --- a/docs/notebooks/explainable-ai-1-basic-with-output.rst +++ b/docs/notebooks/explainable-ai-1-basic-with-output.rst @@ -66,8 +66,6 @@ Guide =2024.2.0" opencv-python tqdm @@ -76,10 +74,7 @@ Guide =3.4" - else: - %pip install -q "matplotlib>=3.4,<3.7" + %pip install -q "matplotlib>=3.4" Imports ------- diff --git a/docs/notebooks/explainable-ai-2-deep-dive-with-output.rst b/docs/notebooks/explainable-ai-2-deep-dive-with-output.rst index 4e2ad0970661d2..c0722b01a9c9b4 100644 --- a/docs/notebooks/explainable-ai-2-deep-dive-with-output.rst +++ b/docs/notebooks/explainable-ai-2-deep-dive-with-output.rst @@ -116,10 +116,7 @@ Install requirements %pip install -q -U "numpy==1.*" %pip install -q scipy - if platform.system() != "Windows": - %pip install -q "matplotlib>=3.4" - else: - %pip install -q "matplotlib>=3.4,<3.7" + %pip install -q "matplotlib>=3.4" Imports ~~~~~~~ diff --git a/docs/notebooks/explainable-ai-3-map-interpretation-with-output.rst b/docs/notebooks/explainable-ai-3-map-interpretation-with-output.rst index 537ae36f6a331c..b26064fcf12e27 100644 --- a/docs/notebooks/explainable-ai-3-map-interpretation-with-output.rst +++ b/docs/notebooks/explainable-ai-3-map-interpretation-with-output.rst @@ -115,10 +115,7 @@ Install requirements %pip install -q -U "numpy==1.*" %pip install -q scipy - if platform.system() != "Windows": - %pip install -q "matplotlib>=3.4" - else: - %pip install -q "matplotlib>=3.4,<3.7" + %pip install -q "matplotlib>=3.4" Imports ~~~~~~~ diff --git a/docs/notebooks/fast-segment-anything-with-output.rst b/docs/notebooks/fast-segment-anything-with-output.rst index e0f20e0f79974b..9becf2719559bc 100644 --- a/docs/notebooks/fast-segment-anything-with-output.rst +++ b/docs/notebooks/fast-segment-anything-with-output.rst @@ -158,7 +158,7 @@ model and generate a segmentation map. .. parsed-literal:: - 100%|██████████| 138M/138M [00:02<00:00, 67.7MB/s] + 100%|██████████| 138M/138M [00:02<00:00, 48.9MB/s] @@ -170,8 +170,8 @@ model and generate a segmentation map. .. parsed-literal:: - image 1/1 /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/fast-segment-anything/coco_bike.jpg: 768x1024 37 objects, 728.3ms - Speed: 3.1ms preprocess, 728.3ms inference, 768.2ms postprocess per image at shape (1, 3, 768, 1024) + image 1/1 /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/fast-segment-anything/coco_bike.jpg: 768x1024 37 objects, 642.9ms + Speed: 3.9ms preprocess, 642.9ms inference, 771.9ms postprocess per image at shape (1, 3, 768, 1024) The model returns segmentation maps for all the objects on the image. @@ -214,10 +214,10 @@ tracing. The FastSAM model itself is based on YOLOv8 model. PyTorch: starting from 'FastSAM-x.pt' with input shape (1, 3, 1024, 1024) BCHW and output shape(s) ((1, 37, 21504), (1, 32, 256, 256)) (138.3 MB) OpenVINO: starting export with openvino 2024.4.0-16579-c3152d32c9c-releases/2024/4... - OpenVINO: export success ✅ 6.2s, saved as 'FastSAM-x_openvino_model/' (276.1 MB) + OpenVINO: export success ✅ 6.1s, saved as 'FastSAM-x_openvino_model/' (276.1 MB) Export complete (9.1s) - Results saved to /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/fast-segment-anything + Results saved to /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/fast-segment-anything Predict: yolo predict task=segment model=FastSAM-x_openvino_model imgsz=1024 Validate: yolo val task=segment model=FastSAM-x_openvino_model imgsz=1024 data=ultralytics/datasets/sa.yaml Visualize: https://netron.app @@ -321,8 +321,8 @@ pipeline. .. parsed-literal:: - image 1/1 /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/fast-segment-anything/coco_bike.jpg: 1024x1024 42 objects, 504.9ms - Speed: 5.8ms preprocess, 504.9ms inference, 31.6ms postprocess per image at shape (1, 3, 1024, 1024) + image 1/1 /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/fast-segment-anything/coco_bike.jpg: 1024x1024 42 objects, 494.2ms + Speed: 6.6ms preprocess, 494.2ms inference, 30.3ms postprocess per image at shape (1, 3, 1024, 1024) One can observe the converted model outputs in the next cell, they is @@ -615,8 +615,8 @@ calibration dataset to measure the performance. .. parsed-literal:: - Segmented in 69 seconds. - Resulting in 1.86 fps + Segmented in 72 seconds. + Resulting in 1.78 fps .. code:: ipython3 @@ -643,9 +643,9 @@ calibration dataset to measure the performance. .. parsed-literal:: - Segmented in 22 seconds - Resulting in 5.82 fps - That is 3.14 times faster! + Segmented in 23 seconds + Resulting in 5.57 fps + That is 3.13 times faster! Try out the converted pipeline diff --git a/docs/notebooks/film-slowmo-with-output.rst b/docs/notebooks/film-slowmo-with-output.rst index 0f5c9c7ba8c0d6..33d915ff72c326 100644 --- a/docs/notebooks/film-slowmo-with-output.rst +++ b/docs/notebooks/film-slowmo-with-output.rst @@ -79,7 +79,6 @@ Prerequisites .. code:: ipython3 - import platform import os %pip install -q "tensorflow-macos>=2.5; sys_platform == 'darwin' and platform_machine == 'arm64' and python_version > '3.8'" # macOS M1 and M2 @@ -92,10 +91,7 @@ Prerequisites %pip install -q tensorflow_hub tf_keras numpy "opencv-python" tqdm "gradio>=4.19" Pillow "openvino>=2023.2.0" - if platform.system() != "Windows": - %pip install -q "matplotlib>=3.4" - else: - %pip install -q "matplotlib>=3.4,<3.7" + %pip install -q "matplotlib>=3.4" .. code:: ipython3 diff --git a/docs/notebooks/florence2-with-output.rst b/docs/notebooks/florence2-with-output.rst index e929a95fb182c1..e4ab6fbcbd3a3b 100644 --- a/docs/notebooks/florence2-with-output.rst +++ b/docs/notebooks/florence2-with-output.rst @@ -100,10 +100,10 @@ available model. By default, we will use .. parsed-literal:: - 2024-11-05 01:28:54.034484: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 01:28:54.069316: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 01:05:34.426758: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:05:34.462006: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 01:28:54.728430: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT + 2024-11-22 01:05:35.115966: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT @@ -193,96 +193,96 @@ pipeline. .. parsed-literal:: - SUPPORT.md: 0%| | 0.00/1.24k [00:00 1 or self.sliding_window is not None: /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/chkpt/modeling_florence2.py:1205: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! is_causal = True if self.is_causal and attention_mask is None and tgt_len > 1 else False @@ -382,7 +382,7 @@ Run model inference -``OvFlorence@Model`` class defined in ``ov_florence2_helper.py`` +``OvFlorence2Model`` class defined in ``ov_florence2_helper.py`` provides convenient way for running model. It accepts directory with converted model and inference device as arguments. For running model we will use ``generate`` method. diff --git a/docs/notebooks/florence2-with-output_files/florence2-with-output_18_0.png b/docs/notebooks/florence2-with-output_files/florence2-with-output_18_0.png index 37d11a47fd30c9..c233468fe95f4e 100644 --- a/docs/notebooks/florence2-with-output_files/florence2-with-output_18_0.png +++ b/docs/notebooks/florence2-with-output_files/florence2-with-output_18_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:d85b3df68708172ed849a9e182bdec6a94f0174643833bd8cc7184ac0d090fae -size 259636 +oid sha256:d15ed97d6e50919caff2aee785bc4c90f91dcfcc9bb248f70e9d79bb203be64f +size 259663 diff --git a/docs/notebooks/freevc-voice-conversion-with-output.rst b/docs/notebooks/freevc-voice-conversion-with-output.rst index fe2ac780f5cca6..eb1dffbcf5da08 100644 --- a/docs/notebooks/freevc-voice-conversion-with-output.rst +++ b/docs/notebooks/freevc-voice-conversion-with-output.rst @@ -82,44 +82,43 @@ Install extra requirements Note: you may need to restart the kernel to use updated packages. -Check if FreeVC is installed and append its path to ``sys.path`` - .. code:: ipython3 - from pathlib import Path - import sys + # Fetch `notebook_utils` module + import requests - free_vc_repo = "FreeVC" - if not Path(free_vc_repo).exists(): - !git clone https://github.com/OlaWod/FreeVC.git + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) + open("notebook_utils.py", "w").write(r.text) + - sys.path.append(free_vc_repo) + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", + ) + open("cmd_helper.py", "w").write(r.text) + + .. parsed-literal:: - Cloning into 'FreeVC'... - remote: Enumerating objects: 131, done. - remote: Counting objects: 100% (74/74), done. - remote: Compressing objects: 100% (47/47), done. - remote: Total 131 (delta 43), reused 27 (delta 27), pack-reused 57 (from 1) - Receiving objects: 100% (131/131), 15.28 MiB | 17.50 MiB/s, done. - Resolving deltas: 100% (43/43), done. + 1491 + .. code:: ipython3 - # Fetch `notebook_utils` module - import requests + from pathlib import Path + import gdown + from cmd_helper import clone_repo + from notebook_utils import download_file, device_widget - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - open("notebook_utils.py", "w").write(r.text) - from notebook_utils import download_file, device_widget + clone_repo("https://github.com/OlaWod/FreeVC.git") + wavlm_large_dir_path = Path("FreeVC/wavlm") wavlm_large_path = wavlm_large_dir_path / "WavLM-Large.pt" @@ -134,8 +133,8 @@ Check if FreeVC is installed and append its path to ``sys.path`` Downloading... From: https://drive.google.com/uc?id=12-cB34qCTvByWT-QtOcZaqwwO21FLSqU&confirm=t&uuid=a703c43c-ccce-436c-8799-c11b88e9e7e4 - To: /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/WavLM-Large.pt - 100%|██████████| 1.26G/1.26G [00:32<00:00, 38.5MB/s] + To: /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/WavLM-Large.pt + 100%|██████████| 1.26G/1.26G [00:26<00:00, 47.5MB/s] .. code:: ipython3 @@ -239,7 +238,7 @@ Models initialization .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:28: UserWarning: torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:28: UserWarning: torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm. warnings.warn("torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm.") @@ -360,13 +359,13 @@ Converting to OpenVINO’s IR format. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/modules.py:495: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/modules.py:495: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert embed_dim == self.embed_dim - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/modules.py:496: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/modules.py:496: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert list(query.size()) == [tgt_len, bsz, embed_dim] - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/modules.py:500: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/modules.py:500: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert key_bsz == bsz - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/modules.py:502: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/freevc-voice-conversion/FreeVC/wavlm/modules.py:502: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert src_len, bsz == value.shape[:2] @@ -581,12 +580,12 @@ function to OpenVINO IR format. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1102: TracerWarning: Output nr 1. of the traced function does not match the corresponding output of the Python function. Detailed error: + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1102: TracerWarning: Output nr 1. of the traced function does not match the corresponding output of the Python function. Detailed error: Tensor-likes are not close! - Mismatched elements: 25915 / 25920 (100.0%) - Greatest absolute difference: 1.3485908806324005 at index (0, 0, 24258) (up to 1e-05 allowed) - Greatest relative difference: 8204.075456053068 at index (0, 0, 5777) (up to 1e-05 allowed) + Mismatched elements: 25919 / 25920 (100.0%) + Greatest absolute difference: 0.4560253918170929 at index (0, 0, 20759) (up to 1e-05 allowed) + Greatest relative difference: 13178.603217158177 at index (0, 0, 10045) (up to 1e-05 allowed) _check_trace( @@ -707,7 +706,7 @@ Result audio: diff --git a/docs/notebooks/gpu-device-with-output.rst b/docs/notebooks/gpu-device-with-output.rst index 732cc297aa9531..5953608eae62e5 100644 --- a/docs/notebooks/gpu-device-with-output.rst +++ b/docs/notebooks/gpu-device-with-output.rst @@ -330,7 +330,7 @@ categories of object. For details, see the ov_model_path = base_model_dir / model_name / f"{model_name}.xml" if not (ov_model_path).exists(): - hf_hub.snapshot_download("katuni4ka/ssdlite_mobilenet_v2_fp16", local_dir=base_model_dir) + hf_hub.snapshot_download("katuni4ka/ssdlite_mobilenet_v2_fp16", local_dir=base_model_dir / model_name) model = core.read_model(ov_model_path) @@ -541,7 +541,7 @@ with a latency focus: .. code:: ipython3 - !benchmark_app -m {model_path} -d GPU -hint latency + !benchmark_app -m {ov_model_path} -d GPU -hint latency .. parsed-literal:: @@ -622,7 +622,7 @@ CPU vs GPU with Latency Hint .. code:: ipython3 - !benchmark_app -m {model_path} -d CPU -hint latency + !benchmark_app -m {ov_model_path} -d CPU -hint latency .. parsed-literal:: @@ -1071,7 +1071,7 @@ Compile the Model .. code:: ipython3 # Read model and compile it on GPU in THROUGHPUT mode - model = core.read_model(model=model_path) + model = core.read_model(model=ov_model_path) device_name = "GPU" compiled_model = core.compile_model(model=model, device_name=device_name, config={hints.performance_mode(): hints.PerformanceMode.THROUGHPUT}) diff --git a/docs/notebooks/grounded-segment-anything-with-output.rst b/docs/notebooks/grounded-segment-anything-with-output.rst index 232629422b14e0..a51ce8249239f9 100644 --- a/docs/notebooks/grounded-segment-anything-with-output.rst +++ b/docs/notebooks/grounded-segment-anything-with-output.rst @@ -64,7 +64,7 @@ Clone repositories and install requirements .. parsed-literal:: - WARNING: supervision 0.24.0 does not provide the extra 'desktop' + WARNING: supervision 0.25.0 does not provide the extra 'desktop' Note: you may need to restart the kernel to use updated packages. @@ -96,46 +96,51 @@ segmentation you can select vanilla ``SAM``. use_efficient_sam = sam_type_widget.value == "EfficientSAM" +.. code:: ipython3 + + import requests + + + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) + open("notebook_utils.py", "w").write(r.text) + + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", + ) + open("cmd_helper.py", "w").write(r.text) + + + + +.. parsed-literal:: + + 1491 + + + .. code:: ipython3 from pathlib import Path import sys import os + from cmd_helper import clone_repo + + repo_dir = Path("Grounded-Segment-Anything") ground_dino_dir = Path("GroundingDINO") efficient_sam_dir = Path("EfficientSAM") - # we use grounding dino from a fork which contains modifications that allow conversion to OpenVINO IR format - if not ground_dino_dir.exists(): - !git clone https://github.com/wenyi5608/GroundingDINO/ - if use_efficient_sam and not efficient_sam_dir.exists(): - !git clone https://github.com/yformer/EfficientSAM - if not use_efficient_sam and not repo_dir.exists(): - !git clone https://github.com/IDEA-Research/Grounded-Segment-Anything + # we use grounding dino from a fork which contains modifications that allow conversion to OpenVINO IR + clone_repo("https://github.com/wenyi5608/GroundingDINO.git") - # append to sys.path so that modules from the repo could be imported - sys.path.append(str(ground_dino_dir)) - sys.path.append(str("EfficientSAM" if use_efficient_sam else repo_dir / "segment_anything")) - - -.. parsed-literal:: - - Cloning into 'GroundingDINO'... - remote: Enumerating objects: 379, done. - remote: Counting objects: 100% (190/190), done. - remote: Compressing objects: 100% (79/79), done. - remote: Total 379 (delta 136), reused 111 (delta 111), pack-reused 189 (from 1) - Receiving objects: 100% (379/379), 14.03 MiB | 20.95 MiB/s, done. - Resolving deltas: 100% (194/194), done. - Cloning into 'EfficientSAM'... - remote: Enumerating objects: 424, done. - remote: Counting objects: 100% (85/85), done. - remote: Compressing objects: 100% (33/33), done. - remote: Total 424 (delta 76), reused 52 (delta 52), pack-reused 339 (from 1) - Receiving objects: 100% (424/424), 262.14 MiB | 24.44 MiB/s, done. - Resolving deltas: 100% (246/246), done. - + if use_efficient_sam: + clone_repo("https://github.com/yformer/EfficientSAM.git") + if not use_efficient_sam: + clone_repo("https://github.com/IDEA-Research/Grounded-Segment-Anything.git", add_to_sys_path=False) + sys.path.append(repo_dir / "segment_anything") .. code:: ipython3 @@ -179,15 +184,9 @@ Download checkpoints and load PyTorch models .. code:: ipython3 - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - - open("notebook_utils.py", "w").write(r.text) from notebook_utils import download_file, device_widget + download_file( "https://github.com/IDEA-Research/GroundingDINO/releases/download/v0.1.0-alpha/groundingdino_swint_ogc.pth", directory=CKPT_BASE_PATH, @@ -222,10 +221,10 @@ GroundingDINO imports .. parsed-literal:: - 2024-11-05 01:34:53.765709: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 01:34:53.988314: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 01:12:47.444588: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:12:47.676832: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 01:34:54.760718: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT + 2024-11-22 01:12:48.469702: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers UserWarning: Failed to load custom C++ ops. Running on CPU mode Only! @@ -366,24 +365,10 @@ Convert GroundingDINO to OpenVINO IR format TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - TracerWarning: Converting a tensor to a Python number might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - TracerWarning: Converting a tensor to a Python number might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! .. parsed-literal:: - output layer_id 0 is nan - num_nan 230400, num_inf 0 - output layer_id 1 is nan - num_nan 230400, num_inf 0 - output layer_id 2 is nan - num_nan 230400, num_inf 0 - output layer_id 3 is nan - num_nan 230400, num_inf 0 - output layer_id 4 is nan - num_nan 230400, num_inf 0 - output layer_id 5 is nan - num_nan 230400, num_inf 0 WARNING:tensorflow:Please fix your imports. Module tensorflow.python.training.tracking.base has been moved to tensorflow.python.trackable.base. The old module will be deleted in version 2.11. @@ -557,7 +542,7 @@ Draw box detections -.. image:: grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_29_0.png +.. image:: grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_30_0.png @@ -805,7 +790,7 @@ Combine both boxes and segmentation masks and draw them. -.. image:: grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_45_0.png +.. image:: grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_46_0.png diff --git a/docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_29_0.jpg b/docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_30_0.jpg similarity index 100% rename from docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_29_0.jpg rename to docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_30_0.jpg diff --git a/docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_29_0.png b/docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_30_0.png similarity index 100% rename from docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_29_0.png rename to docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_30_0.png diff --git a/docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_45_0.jpg b/docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_46_0.jpg similarity index 100% rename from docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_45_0.jpg rename to docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_46_0.jpg diff --git a/docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_45_0.png b/docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_46_0.png similarity index 100% rename from docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_45_0.png rename to docs/notebooks/grounded-segment-anything-with-output_files/grounded-segment-anything-with-output_46_0.png diff --git a/docs/notebooks/handwritten-ocr-with-output_files/handwritten-ocr-with-output_22_0.png b/docs/notebooks/handwritten-ocr-with-output_files/handwritten-ocr-with-output_22_0.png index c712a34d825650..7e0c09a703a97b 100644 --- a/docs/notebooks/handwritten-ocr-with-output_files/handwritten-ocr-with-output_22_0.png +++ b/docs/notebooks/handwritten-ocr-with-output_files/handwritten-ocr-with-output_22_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6b33a1c6c4f57f798ae3f4b31dcb638cb618363ef6108e7f60cf81f1c5bdb151 +oid sha256:9ce052db324821165a2b1bc5dea9d05588886c1794c0f217aaa47b8442c76aad size 53571 diff --git a/docs/notebooks/handwritten-ocr-with-output_files/handwritten-ocr-with-output_32_1.png b/docs/notebooks/handwritten-ocr-with-output_files/handwritten-ocr-with-output_32_1.png index c712a34d825650..7e0c09a703a97b 100644 --- a/docs/notebooks/handwritten-ocr-with-output_files/handwritten-ocr-with-output_32_1.png +++ b/docs/notebooks/handwritten-ocr-with-output_files/handwritten-ocr-with-output_32_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:6b33a1c6c4f57f798ae3f4b31dcb638cb618363ef6108e7f60cf81f1c5bdb151 +oid sha256:9ce052db324821165a2b1bc5dea9d05588886c1794c0f217aaa47b8442c76aad size 53571 diff --git a/docs/notebooks/hello-detection-with-output_files/hello-detection-with-output_11_1.png b/docs/notebooks/hello-detection-with-output_files/hello-detection-with-output_11_1.png index 435c1891121eb0..b696d287ded448 100644 --- a/docs/notebooks/hello-detection-with-output_files/hello-detection-with-output_11_1.png +++ b/docs/notebooks/hello-detection-with-output_files/hello-detection-with-output_11_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:82b893e29e948379dac42c19763842f7ade2ccf03853c2c07f0b28bf2d58fe17 +oid sha256:c7a830fedc5653fd506c656144decc048cad5a7651c8e498024f0eb0ab8c8e96 size 305482 diff --git a/docs/notebooks/hello-detection-with-output_files/hello-detection-with-output_16_0.png b/docs/notebooks/hello-detection-with-output_files/hello-detection-with-output_16_0.png index e452c525ef05c2..5e6438a788597e 100644 --- a/docs/notebooks/hello-detection-with-output_files/hello-detection-with-output_16_0.png +++ b/docs/notebooks/hello-detection-with-output_files/hello-detection-with-output_16_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:7caf8b68c4e2dfd446836e5049d842227ad718a4bbde287269617e324c7d0cef +oid sha256:edb00cb4f0e2c42cd9e0f90939afbd6352ca40c90866821898f2c42c1fd9df64 size 457214 diff --git a/docs/notebooks/hello-segmentation-with-output.rst b/docs/notebooks/hello-segmentation-with-output.rst index 6ddc0e3b0aa78b..2750c2d019a017 100644 --- a/docs/notebooks/hello-segmentation-with-output.rst +++ b/docs/notebooks/hello-segmentation-with-output.rst @@ -188,7 +188,7 @@ is provided. .. parsed-literal:: - + @@ -215,7 +215,7 @@ Do Inference .. parsed-literal:: - + diff --git a/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_11_2.png b/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_11_2.png index 12a0ec3dda0bf1..5023362b06be2d 100644 --- a/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_11_2.png +++ b/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_11_2.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:fb21264c96554435f8c9331a342b9c3a20d8129dc0725f6ff226d789779645be +oid sha256:96f0eb3a9535d57b8784be4b717dc9f280e4bf107e5b61d7cf51b36e142e4c7a size 249032 diff --git a/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_13_1.png b/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_13_1.png index ec01c58bdf8be1..fe6d042ef77d30 100644 --- a/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_13_1.png +++ b/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_13_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:492235a08c36c9afbebabcb01c8325ac99dccff84174e7074ca321aba2ac7aac +oid sha256:caef59a6c15a5a1d512f4dd22395b12fbd754bba264ea5f0deae323ff8edee39 size 20550 diff --git a/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_17_0.png b/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_17_0.png index f8d59545b65f8c..310b0d3545d48c 100644 --- a/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_17_0.png +++ b/docs/notebooks/hello-segmentation-with-output_files/hello-segmentation-with-output_17_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:62376a2e159eca912bff4ce975d169f8ed71f9d9b75c4fd09937e7552120b14d +oid sha256:6a3137d9359a44fb19e1900e6b808f9e7e7ded0ba209abe8c4bd90fcf37b1c6a size 260045 diff --git a/docs/notebooks/hello-world-with-output_files/hello-world-with-output_11_1.png b/docs/notebooks/hello-world-with-output_files/hello-world-with-output_11_1.png index cca7858e3bc4af..a142093f6e675c 100644 --- a/docs/notebooks/hello-world-with-output_files/hello-world-with-output_11_1.png +++ b/docs/notebooks/hello-world-with-output_files/hello-world-with-output_11_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:bbd7b81cc8e7a73ea9bcb8be0c0575134f50b6af8f7de23ee9feed645a4cf66c +oid sha256:5712bd24e962ae0e0267607554ebe1f2869c223b108876ce10e5d20fe6285126 size 387941 diff --git a/docs/notebooks/hugging-face-hub-with-output.rst b/docs/notebooks/hugging-face-hub-with-output.rst index a92f8cd18fba31..537c963ca405c4 100644 --- a/docs/notebooks/hugging-face-hub-with-output.rst +++ b/docs/notebooks/hugging-face-hub-with-output.rst @@ -132,10 +132,10 @@ tutorials `__ is the -first AI model capable of binding data from six modalities at once, -without the need for explicit supervision (the process of organizing and -labeling raw data). By recognizing the relationships between these -modalities — images and video, audio, text, depth, thermal, and inertial -measurement units (IMUs) — this breakthrough helps advance AI by -enabling machines to better analyze many different forms of information, -together. - -.. figure:: https://user-images.githubusercontent.com/8495451/236859695-ffa13364-3e39-4d99-a8da-fbfab17f9a6b.gif - :alt: ImageBind - - ImageBind - -In this tutorial, we consider how to convert and run ImageBind model -using OpenVINO. - -The tutorial consists of following steps: - -1. Download the pre-trained model. -2. Prepare input data examples. -3. Convert the model to OpenVINO Intermediate Representation format - (IR). -4. Run model inference and analyze results. - -About ImageBind ---------------- - -ImageBind, released in May 2023 by Meta Research, is an embedding model -that combines data from six modalities: images and video, text, audio, -thermal imaging, depth, and IMUs, which contain sensors including -accelerometers and orientation monitors. Using ImageBind, you can -provide data in one modality – for example, audio – and find related -documents in different modalities, such as video or images. - -ImageBind was trained with pairs of data. Each pair mapped image data – -including videos – to another modality, and the combined data was used -to train an embedding model. ImageBind found that features for different -modalities could be learned using the image data used in their training. -A notable conclusion from ImageBind is that pairing images with another -modality, then combining the results in the same embedding space is -sufficient to create a multi-modal embedding model. More details about -the model can be found in the model -`repository `__, -`paper `__, and Meta AI `blog -post `__. - -Like all embedding models, there are many potential use cases for -ImageBind, among them information retrieval, zero-shot classification, -and usage created by ImageBind representation as input for downstream -tasks (e.g. image generation). Some of the potential use-cases -represented on the image below: - -.. figure:: https://user-images.githubusercontent.com/29454499/256303836-c8e7b311-0b7b-407c-8610-fd8a803e4197.png - :alt: usecases - - usecases - -In this tutorial, we consider how to use ImageBind for multimodal -zero-shot classification. - - -**Table of contents:** - - -- `Prerequisites <#prerequisites>`__ -- `Instantiate PyTorch model <#instantiate-pytorch-model>`__ -- `Prepare input data <#prepare-input-data>`__ -- `Convert Model to OpenVINO Intermediate Representation (IR) - format <#convert-model-to-openvino-intermediate-representation-ir-format>`__ - - - `Select inference device <#select-inference-device>`__ - -- `Zero-shot classification using ImageBind and - OpenVINO <#zero-shot-classification-using-imagebind-and-openvino>`__ - - - `Text-Image classification <#text-image-classification>`__ - - `Text-Audio classification <#text-audio-classification>`__ - - `Image-Audio classification <#image-audio-classification>`__ - -- `Post-Training Quantization of ImageBind model with - NNCF <#post-training-quantization-of-imagebind-model-with-nncf>`__ - - - `Prepare datasets <#prepare-datasets>`__ - - `Apply quantization <#apply-quantization>`__ - - - `Quantize ImageBind model for vision - modality. <#quantize-imagebind-model-for-vision-modality->`__ - - `Quantize ImageBind model for text - modality <#quantize-imagebind-model-for-text-modality>`__ - - `Quantize ImageBind model for audio - modality <#quantize-imagebind-model-for-audio-modality>`__ - - - `Compare results for the OpenVINO FP16 model and the quantized - model <#compare-results-for-the-openvino-fp16-model-and-the-quantized-model>`__ - - - `Select inference device <#select-inference-device>`__ - - - `Compare File Size <#compare-file-size>`__ - - `Compare inference time of the FP16 IR and quantized - models <#compare-inference-time-of-the-fp16-ir-and-quantized-models>`__ - - - `Vision model <#vision-model>`__ - - `Text model <#text-model>`__ - - `Audio model <#audio-model>`__ - -Installation Instructions -~~~~~~~~~~~~~~~~~~~~~~~~~ - -This is a self-contained example that relies solely on its own code. - -We recommend running the notebook in a virtual environment. You only -need a Jupyter server to start. For details, please refer to -`Installation -Guide `__. - -Prerequisites -------------- - - - -.. code:: ipython3 - - import platform - - %pip install -q "torch>=2.0.1" "torchvision>=0.15.2,<0.17.0" "torchaudio>=2.0.2" "matplotlib>=3.4" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q datasets regex librosa soundfile pytorchvideo ftfy "timm>=0.6.7" einops fvcore "openvino>=2024.0.0" "nncf>=2.9.0" numpy scipy --extra-index-url https://download.pytorch.org/whl/cpu - - - if platform.system() != "Windows": - %pip install -q "matplotlib>=3.4" - else: - %pip install -q "matplotlib>=3.4,<3.7" - -.. code:: ipython3 - - from pathlib import Path - - repo_dir = Path("ImageBind") - - if not repo_dir.exists(): - !git clone https://github.com/facebookresearch/ImageBind.git - - %cd {repo_dir} - -Instantiate PyTorch model -------------------------- - - - -To start work with the model, we should instantiate the PyTorch model -class. ``imagebind_model.imagebind_huge(pretrained=True)`` downloads -model weights and creates a PyTorch model object for ImageBind. -Currently, there is only one ImageBind model available for downloading, -``imagebind_huge``, more details about it can be found in `model -card `__. - - Please note, depending on internet connection speed, the model - downloading process can take some time. It also requires at least 5 - GB of free space on disk for saving model checkpoint. - -.. code:: ipython3 - - import imagebind.data as data - import torch - from imagebind.models import imagebind_model - from imagebind.models.imagebind_model import ModalityType - - # Instantiate model - model = imagebind_model.imagebind_huge(pretrained=True) - model.eval(); - -Prepare input data ------------------- - - - -ImageBind works with data across 6 different modalities. Each of them -requires its steps for preprocessing. ``data`` module is responsible for -data reading and preprocessing for each modality. - -- ``data.load_and_transform_text`` accepts a list of text labels and - tokenizes them. -- ``data.load_and_transform_vision_data`` accepts paths to input - images, reads them, resizes to save aspect ratio with smaller side - size 224, performs center crop, and normalizes data into [0, 1] - floating point range. -- ``data.load_and_transofrm_audio_data`` reads audio files from - provided paths, splits it on samples, and computes - `mel `__ - spectrogram. - -.. code:: ipython3 - - # Prepare inputs - - text_list = ["A car", "A bird", "A dog"] - image_paths = [ - ".assets/dog_image.jpg", - ".assets/car_image.jpg", - ".assets/bird_image.jpg", - ] - audio_paths = [ - ".assets/dog_audio.wav", - ".assets/bird_audio.wav", - ".assets/car_audio.wav", - ] - - inputs = { - ModalityType.TEXT: data.load_and_transform_text(text_list, "cpu"), - ModalityType.VISION: data.load_and_transform_vision_data(image_paths, "cpu"), - ModalityType.AUDIO: data.load_and_transform_audio_data(audio_paths, "cpu"), - } - -Convert Model to OpenVINO Intermediate Representation (IR) format ------------------------------------------------------------------ - - - -OpenVINO supports PyTorch through Model Conversion API. You will use -`model conversion Python -API `__ -to convert model to IR format. The ``ov.convert_model`` function returns -OpenVINO Model class instance ready to load on a device or save on a -disk for next loading using ``ov.save_model``. - -ImageBind accepts data that represents different modalities -simultaneously in any combinations, however, their processing is -independent of each other. For avoiding losing flexibility passing data, -we will export each modality encoder as an independent model. The code -below defines wrappers for the model to get only single-modality -embeddings. - -.. code:: ipython3 - - class ModelExporter(torch.nn.Module): - def __init__(self, model, modality): - super().__init__() - self.model = model - self.modality = modality - - def forward(self, data): - return self.model({self.modality: data}) - -.. code:: ipython3 - - import openvino as ov - - core = ov.Core() - -Select inference device -~~~~~~~~~~~~~~~~~~~~~~~ - - - -select device from dropdown list for running inference using OpenVINO - -.. code:: ipython3 - - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - open("notebook_utils.py", "w").write(r.text) - - from notebook_utils import device_widget - - device = device_widget() - - device - - - - -.. parsed-literal:: - - Dropdown(description='Device:', index=3, options=('CPU', 'GPU.0', 'GPU.1', 'AUTO'), value='AUTO') - - - -.. code:: ipython3 - - import openvino as ov - - core = ov.Core() - - ov_modality_models = {} - - modalities = [ModalityType.TEXT, ModalityType.VISION, ModalityType.AUDIO] - for modality in modalities: - export_dir = Path(f"image-bind-{modality}") - file_name = f"image-bind-{modality}" - export_dir.mkdir(exist_ok=True) - ir_path = export_dir / f"{file_name}.xml" - if not ir_path.exists(): - exportable_model = ModelExporter(model, modality) - model_input = inputs[modality] - ov_model = ov.convert_model(exportable_model, example_input=model_input) - ov.save_model(ov_model, ir_path) - else: - ov_model = core.read_model(ir_path) - ov_modality_models[modality] = core.compile_model(ov_model, device.value) - -Zero-shot classification using ImageBind and OpenVINO ------------------------------------------------------ - - - -In zero-shot classification, a piece of data is embedded and fed to the -model to retrieve a label that corresponds with the contents of the -data. In the case of ImageBind, you can classify audio, images, and -information in the other supported modalities. We already discussed how -to perform zero-shot image classification using the CLIP model (please -check this -`notebook `__ -for details), capabilities of ImageBind for this task wider, because it -allows using any combinations of supported modalities for -classification. - -To perform zero-shot classification using ImageBind we should perform -the following steps: - -1. Preprocess data batch for requested modalities (one modality in our - case treated as a data source, other - as a label). -2. Calculate embeddings for each modality. -3. Find dot-product between embeddings vectors to get probabilities - matrix. -4. Obtain the label with the highest probability for mapping the source - into label space. - -We already preprocessed data in previous step, now, we should run model -inference for getting embeddings. - -.. code:: ipython3 - - embeddings = {} - for modality in modalities: - embeddings[modality] = ov_modality_models[modality](inputs[modality])[ov_modality_models[modality].output(0)] - -The probability matrix shows the correspondence between source -embeddings and label embeddings, it is a 2D matrix, where x-dimension -represents label-modality data and y-dimension - source-modality data. -It can be calculated as a dot-product between embeddings vectors and -normalized into the [0,1] range using softmax. Then a higher score on -the intersection between x and y labels, then higher confidence that -they represent the same object. - -.. code:: ipython3 - - import matplotlib.pyplot as plt - import numpy as np - from scipy.special import softmax - - - def visualize_prob_matrix(matrix, x_label, y_label): - fig, ax = plt.subplots() - ax.matshow(matrix, cmap="winter") - - for (i, j), z in np.ndenumerate(matrix): - ax.text(j, i, "{:0.3f}".format(z), ha="center", va="center") - ax.set_xticks(range(len(x_label)), x_label) - ax.set_yticks(range(len(y_label)), y_label) - - - image_list = [img.split("/")[-1] for img in image_paths] - audio_list = [audio.split("/")[-1] for audio in audio_paths] - -Text-Image classification -~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - text_vision_scores = softmax(embeddings[ModalityType.VISION] @ embeddings[ModalityType.TEXT].T, axis=-1) - - visualize_prob_matrix(text_vision_scores, text_list, image_list) - - - -.. image:: image-bind-with-output_files/image-bind-with-output_20_0.png - - -Text-Audio classification -~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - text_audio_scores = softmax(embeddings[ModalityType.AUDIO] @ embeddings[ModalityType.TEXT].T, axis=-1) - - visualize_prob_matrix(text_audio_scores, text_list, audio_list) - - - -.. image:: image-bind-with-output_files/image-bind-with-output_22_0.png - - -Image-Audio classification -~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - audio_vision_scores = softmax(embeddings[ModalityType.VISION] @ embeddings[ModalityType.AUDIO].T, axis=-1) - - visualize_prob_matrix(audio_vision_scores, image_list, audio_list) - - - -.. image:: image-bind-with-output_files/image-bind-with-output_24_0.png - - -Putting all together, we can match text, image, and sound for our data. - -.. code:: ipython3 - - import IPython.display as ipd - from PIL import Image - - text_image_ids = np.argmax(text_vision_scores, axis=0) - text_audio_ids = np.argmax(text_audio_scores, axis=0) - print( - f"Predicted label: {text_list[0]} \nprobability for image - {text_vision_scores[text_image_ids[0], 0]:.3f}\nprobability for audio - {text_audio_scores[0, text_audio_ids[0]]:.3f}" - ) - display(Image.open(image_paths[text_image_ids[0]])) - ipd.Audio(audio_paths[text_audio_ids[0]]) - - -.. parsed-literal:: - - Predicted label: A car - probability for image - 1.000 - probability for audio - 1.000 - - - -.. image:: image-bind-with-output_files/image-bind-with-output_26_1.png - - - - -.. raw:: html - - - - - - - -.. code:: ipython3 - - print( - f"Predicted label: {text_list[1]} \nprobability for image - {text_vision_scores[text_image_ids[1], 1]:.3f}\nprobability for audio - {text_audio_scores[1, text_audio_ids[1]]:.3f}" - ) - display(Image.open(image_paths[text_image_ids[1]])) - ipd.Audio(audio_paths[text_audio_ids[1]]) - - -.. parsed-literal:: - - Predicted label: A bird - probability for image - 0.986 - probability for audio - 1.000 - - - -.. image:: image-bind-with-output_files/image-bind-with-output_27_1.png - - - - -.. raw:: html - - - - - - - -.. code:: ipython3 - - print( - f"Predicted label: {text_list[2]} \nprobability for image - {text_vision_scores[text_image_ids[2], 2]:.3f}\nprobability for audio - {text_audio_scores[2, text_audio_ids[2]]:.3f}" - ) - display(Image.open(image_paths[text_image_ids[2]])) - ipd.Audio(audio_paths[text_audio_ids[2]]) - - -.. parsed-literal:: - - Predicted label: A dog - probability for image - 0.984 - probability for audio - 1.000 - - - -.. image:: image-bind-with-output_files/image-bind-with-output_28_1.png - - - - -.. raw:: html - - - - - - - -Post-Training Quantization of ImageBind model with NNCF -------------------------------------------------------- - - - -The goal of this part of tutorial is to demonstrate how to speed up the -model by applying 8-bit post-training quantization from -`NNCF `__ (Neural Network -Compression Framework) and infer quantized model via OpenVINO™ Toolkit. - -The optimization process contains the following steps: 1. Prepare -quantization dataset 2. Quantize OpenVINO model with NNCF. 3. Compare -probability matrices between converted and quantized models on input -data examples. 4. Compare model size of converted and quantized models. -5. Compare performance of converted and quantized models. - -.. code:: ipython3 - - modalities = [ModalityType.TEXT, ModalityType.VISION, ModalityType.AUDIO] - fp_model_paths = {modality: Path(f"image-bind-{modality}") / f"image-bind-{modality}.xml" for modality in modalities} - int8_model_paths = {modality: Path(f"image-bind-{modality}") / f"image-bind-{modality}_int8.xml" for modality in modalities} - -Prepare datasets -~~~~~~~~~~~~~~~~ - - - -The `Conceptual -Captions `__ dataset -consisting of ~3.3M images annotated with captions. Dataset is used to -quantize image and text models. - -.. code:: ipython3 - - import imagebind.data as data - import os - import requests - import tempfile - - from requests.packages.urllib3.exceptions import InsecureRequestWarning - - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) - - - def check_text_data(data): - """ - Check if the given data is text-based. - """ - if isinstance(data, str): - return True - if isinstance(data, list): - return all(isinstance(x, str) for x in data) - return False - - - def collate_fn(examples, image_column="image_url", text_column="caption"): - """ - Collates examples into a batch for processing. - Preprocesses each example by loading and transforming image and text data. - Checks if the text data in the example is valid by calling the `check_text_data` function. - Downloads the image specified by the URL in the image_column of the example dictionary. - Constructs and returns a dictionary representing the collated batch with the following keys: - - "pixel_values": The pixel values of the preprocessed example. - - "input_ids": The transformed text data of the preprocessed example. - """ - assert len(examples) == 1 - example = examples[0] - if not check_text_data(example[text_column]): - raise ValueError("Text data is not valid") - - url = example[image_column] - with tempfile.TemporaryDirectory() as tempdir: - f_name = os.path.join(tempdir, "image.jpg") - try: - response = requests.get(url, verify=False, timeout=20) - with open(f_name, "wb") as file: - file.write(response.content) - pixel_values = data.load_and_transform_vision_data([f_name], "cpu") - except Exception: - print(f"Can't load image from url: {url}") - return None - - text = data.load_and_transform_text([example[text_column]], "cpu") - - return {"pixel_values": pixel_values, "input_ids": text} - -.. code:: ipython3 - - from datasets import load_dataset - import itertools - import torch - from tqdm.notebook import tqdm - - - def collect_vision_text_data(dataloader, init_steps): - """ - This function collects vision and text data from a dataloader for a specified number of initialization steps. - It iterates over the dataloader, fetching batches and storing the relevant vision and text data. - Returns a tuple containing the collected vision_data and text_data lists. - """ - text_data = [] - vision_data = [] - print(f"Fetching {init_steps} for the initialization...") - counter = 0 - for batch in tqdm(dataloader): - if counter == init_steps: - break - with torch.no_grad(): - if batch: - counter += 1 - text_data.append(batch["input_ids"].to("cpu")) - vision_data.append(batch["pixel_values"].to("cpu")) - return vision_data, text_data - - - def prepare_vision_text_dataset(opt_init_steps=50): - """ - Prepares a vision-text dataset for quantization by collecting vision and text data. - """ - dataset = load_dataset("google-research-datasets/conceptual_captions", streaming=False, trust_remote_code=True) - train_dataset = dataset["train"].shuffle(seed=0) - dataloader = torch.utils.data.DataLoader(train_dataset, collate_fn=collate_fn, batch_size=1) - vision_data, text_data = collect_vision_text_data(dataloader, opt_init_steps) - return vision_data, text_data - -The `ESC-50 `__ dataset is -used to quantize the audio modality of the ImageBind model. Dataset is a -labeled collection of 2000 environmental audio recordings suitable for -benchmarking methods of environmental sound classification. The dataset -consists of 5-second-long recordings organized into 50 semantic classes. - -.. code:: ipython3 - - import numpy as np - import torchaudio - - - def collect_audio_data(dataloader, init_steps=300): - """ - This function collects audio data from a dataloader for a specified number of initialization steps. - It iterates over the dataloader, fetching batches and storing them in a list. - """ - audio_data = [] - for _, batch in tqdm(zip(range(init_steps), itertools.islice(dataloader, 0, init_steps))): - with torch.no_grad(): - audio_data.append(batch) - return audio_data - - - def prepare_audio_dataset(): - """ - Prepares an "ashraq/esc50" audio dataset for quantization by collecting audio data. - Collects audio data from the dataloader by calling the `collect_audio_data` function. - Returns a list containing the collected calibration audio data batches. - """ - audio_dataset = load_dataset("ashraq/esc50", streaming=True, trust_remote_code=True) - train_dataset = audio_dataset["train"].shuffle(seed=42, buffer_size=1000) - - def collate_fn(examples): - assert len(examples) == 1 - with tempfile.TemporaryDirectory() as tempdir: - f_name = os.path.join(tempdir, "audio.wav") - audio_data = examples[0]["audio"]["array"] - sample_rate = examples[0]["audio"]["sampling_rate"] - audio_data = torch.from_numpy(audio_data).to(torch.float32).unsqueeze(0) - torchaudio.save(f_name, audio_data, sample_rate) - return data.load_and_transform_audio_data([f_name], "cpu") - - dataloader = torch.utils.data.DataLoader(train_dataset, collate_fn=collate_fn, batch_size=1) - calibration_data = collect_audio_data(dataloader) - return calibration_data - -.. code:: ipython3 - - vision_data, text_data = [], [] - - if not int8_model_paths[ModalityType.TEXT].exists() or not int8_model_paths[ModalityType.VISION].exists(): - vision_data, text_data = prepare_vision_text_dataset() - -Apply quantization -~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - import logging - import nncf - import openvino as ov - - nncf.set_log_level(logging.ERROR) - - core = ov.Core() - - - def quantize_openvino_model(modality, calibration_data): - model_path = fp_model_paths[modality] - model = core.read_model(model_path) - quantized_model = nncf.quantize( - model=model, - calibration_dataset=calibration_data, - model_type=nncf.ModelType.TRANSFORMER, - ) - ov.save_model(quantized_model, int8_model_paths[modality]) - return quantized_model - - -.. parsed-literal:: - - INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, tensorflow, onnx, openvino - - -Quantize ImageBind model for vision modality. -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - - - **NOTE**: Quantization is time and memory consuming operation. - Running quantization code below may take a long time. - -.. code:: ipython3 - - if not int8_model_paths[ModalityType.VISION].exists(): - if len(vision_data) == 0: - raise RuntimeError("Calibration dataset is empty. Please check internet connection and try to download images manually from the URLs above.") - - vision_dataset = nncf.Dataset(vision_data) - vision_quantized_model = quantize_openvino_model(modality=ModalityType.VISION, calibration_data=vision_dataset) - -Quantize ImageBind model for text modality -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - - -.. code:: ipython3 - - if not int8_model_paths[ModalityType.TEXT].exists(): - text_dataset = nncf.Dataset(text_data) - text_quantized_model = quantize_openvino_model(modality=ModalityType.TEXT, calibration_data=text_dataset) - -Quantize ImageBind model for audio modality -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - - - -.. code:: ipython3 - - if not int8_model_paths[ModalityType.AUDIO].exists(): - audio_calibration_data = prepare_audio_dataset() - audio_dataset = nncf.Dataset(audio_calibration_data) - audio_quantized_model = quantize_openvino_model(modality=ModalityType.AUDIO, calibration_data=audio_dataset) - -NNCF also supports quantization-aware training, and other algorithms -than quantization. See the `NNCF -documentation `__ -in the NNCF repository for more information. - -Compare results for the OpenVINO FP16 model and the quantized model -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -Compare the probability matrices for ``FP16`` and ``INT8`` models. - -.. code:: ipython3 - - # Prepare inputs - - text_list = ["A car", "A bird", "A dog"] - image_paths = [ - ".assets/dog_image.jpg", - ".assets/car_image.jpg", - ".assets/bird_image.jpg", - ] - audio_paths = [ - ".assets/dog_audio.wav", - ".assets/bird_audio.wav", - ".assets/car_audio.wav", - ] - - inputs = { - ModalityType.TEXT: data.load_and_transform_text(text_list, "cpu"), - ModalityType.VISION: data.load_and_transform_vision_data(image_paths, "cpu"), - ModalityType.AUDIO: data.load_and_transform_audio_data(audio_paths, "cpu"), - } - -Select inference device -^^^^^^^^^^^^^^^^^^^^^^^ - - - -select device from dropdown list for running inference using OpenVINO - -.. code:: ipython3 - - device - - - - -.. parsed-literal:: - - Dropdown(description='Device:', index=3, options=('CPU', 'GPU.0', 'GPU.1', 'AUTO'), value='AUTO') - - - -.. code:: ipython3 - - embeddings = {} - for modality in modalities: - ov_model = core.compile_model(fp_model_paths[modality], device.value) - embeddings[modality] = ov_model(inputs[modality])[0] - - quantized_embeddings = {} - for modality in modalities: - model = core.compile_model(int8_model_paths[modality], device.value) - quantized_embeddings[modality] = model(inputs[modality])[0] - -.. code:: ipython3 - - def visualize_prob_matrices(fp_matrix, int_matrix, x_label, y_label): - fig, ax = plt.subplots(1, 2) - for i, matrix in enumerate([fp_matrix, int_matrix]): - ax[i].matshow(matrix, cmap="winter") - - for (k, j), z in np.ndenumerate(matrix): - ax[i].title.set_text("FP16 probs" if i == 0 else "INT8 probs") - ax[i].text(j, k, "{:0.3f}".format(z), ha="center", va="center") - ax[i].set_xticks(range(len(x_label)), x_label) - ax[i].set_yticks(range(len(y_label)), y_label) - fig.tight_layout() - - - image_list = [img.split("/")[-1] for img in image_paths] - audio_list = [audio.split("/")[-1] for audio in audio_paths] - -.. code:: ipython3 - - fp_text_vision_scores = softmax(embeddings[ModalityType.VISION] @ embeddings[ModalityType.TEXT].T, axis=-1) - int_text_vision_scores = softmax( - quantized_embeddings[ModalityType.VISION] @ quantized_embeddings[ModalityType.TEXT].T, - axis=-1, - ) - - visualize_prob_matrices(fp_text_vision_scores, int_text_vision_scores, text_list, image_list) - - - -.. image:: image-bind-with-output_files/image-bind-with-output_52_0.png - - -.. code:: ipython3 - - fp_text_audio_scores = softmax(embeddings[ModalityType.AUDIO] @ embeddings[ModalityType.TEXT].T, axis=-1) - int_text_audio_scores = softmax( - quantized_embeddings[ModalityType.AUDIO] @ quantized_embeddings[ModalityType.TEXT].T, - axis=-1, - ) - - visualize_prob_matrices(fp_text_audio_scores, int_text_audio_scores, text_list, image_list) - - - -.. image:: image-bind-with-output_files/image-bind-with-output_53_0.png - - -.. code:: ipython3 - - fp_audio_vision_scores = softmax(embeddings[ModalityType.VISION] @ embeddings[ModalityType.AUDIO].T, axis=-1) - int_audio_vision_scores = softmax( - quantized_embeddings[ModalityType.VISION] @ quantized_embeddings[ModalityType.AUDIO].T, - axis=-1, - ) - - visualize_prob_matrices(fp_audio_vision_scores, int_audio_vision_scores, text_list, image_list) - - - -.. image:: image-bind-with-output_files/image-bind-with-output_54_0.png - - -Compare File Size -~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - def calculate_compression_rate(modality): - fp16_ir_model_size = Path(fp_model_paths[modality]).with_suffix(".bin").stat().st_size / 1024 - quantized_model_size = Path(int8_model_paths[modality]).with_suffix(".bin").stat().st_size / 1024 - print(f"Modality: {modality}") - print(f" * FP16 IR model size: {fp16_ir_model_size:.2f} KB") - print(f" * INT8 model size: {quantized_model_size:.2f} KB") - print(f" * Model compression rate: {fp16_ir_model_size / quantized_model_size:.3f}") - - - for modality in modalities: - calculate_compression_rate(modality) - - -.. parsed-literal:: - - Modality: text - * FP16 IR model size: 691481.69 KB - * INT8 model size: 347006.66 KB - * Model compression rate: 1.993 - Modality: vision - * FP16 IR model size: 1235995.15 KB - * INT8 model size: 620132.79 KB - * Model compression rate: 1.993 - Modality: audio - * FP16 IR model size: 168429.15 KB - * INT8 model size: 84818.40 KB - * Model compression rate: 1.986 - - -Compare inference time of the FP16 IR and quantized models -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -To measure the inference performance of the ``FP16`` and ``INT8`` -models, we use median inference time on calibration dataset. So we can -approximately estimate the speed up of the dynamic quantized models. - - **NOTE**: For the most accurate performance estimation, it is - recommended to run ``benchmark_app`` in a terminal/command prompt - after closing other applications with static shapes. - -.. code:: ipython3 - - import time - - - def calculate_inference_time(model_path, calibration_data): - model = core.compile_model(model_path) - output_layer = model.output(0) - inference_time = [] - for batch in calibration_data: - start = time.perf_counter() - _ = model(batch)[output_layer] - end = time.perf_counter() - delta = end - start - inference_time.append(delta) - return np.median(inference_time) - -Vision model -^^^^^^^^^^^^ - - - -.. code:: ipython3 - - fp16_latency = calculate_inference_time(fp_model_paths[ModalityType.VISION], vision_data) - int8_latency = calculate_inference_time(int8_model_paths[ModalityType.VISION], vision_data) - print(f"Performance speed up: {fp16_latency / int8_latency:.3f}") - - -.. parsed-literal:: - - Performance speed up: 2.375 - - -Text model -^^^^^^^^^^ - - - -.. code:: ipython3 - - fp16_latency = calculate_inference_time(fp_model_paths[ModalityType.TEXT], text_data) - int8_latency = calculate_inference_time(int8_model_paths[ModalityType.TEXT], text_data) - print(f"Performance speed up: {fp16_latency / int8_latency:.3f}") - - -.. parsed-literal:: - - Performance speed up: 1.492 - - -Audio model -^^^^^^^^^^^ - - - -.. code:: ipython3 - - fp16_latency = calculate_inference_time(fp_model_paths[ModalityType.AUDIO], audio_calibration_data) - int8_latency = calculate_inference_time(int8_model_paths[ModalityType.AUDIO], audio_calibration_data) - print(f"Performance speed up: {fp16_latency / int8_latency:.3f}") - - -.. parsed-literal:: - - Performance speed up: 5.770 - diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_20_0.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_20_0.png deleted file mode 100644 index b61da5d71d0e90..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_20_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:407f4039d44322edd717fb1eba4c0e029205b2c691614606f1a5b33ed31aa047 -size 15474 diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_22_0.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_22_0.png deleted file mode 100644 index bf96c415a07c15..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_22_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:098a56bdaf58b412fe6935d327bcd810942f01789ecd5c2efe834888eba3b819 -size 13795 diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_24_0.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_24_0.png deleted file mode 100644 index 54a9a68752100b..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_24_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fe454683f2419970a93baaea6a5beb973dd832627217464d87c14bf2a61e8032 -size 18633 diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_26_1.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_26_1.png deleted file mode 100644 index 6be4611dbc7a18..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_26_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:d09352f8474421fa78d601cc5afbe88df3d0403c157f91605d424b66a2f1809a -size 303014 diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_27_1.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_27_1.png deleted file mode 100644 index 174dcfdcbe8079..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_27_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:609e506939d69a89fb59d36622d72005d5b162afccf70c1e2463cd51d544d4dd -size 777583 diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_28_1.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_28_1.png deleted file mode 100644 index a4b0b02a4d7c0b..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_28_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7509d532217e990ed721424c57aecbadfb634d397bd1c069852f873fee8741a9 -size 572170 diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_52_0.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_52_0.png deleted file mode 100644 index 9274858833d2aa..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_52_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:41c01dd2ebbddd60573c560ddcb00f7671b63bf1e49ca68497be1d39fd5cb86c -size 19998 diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_53_0.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_53_0.png deleted file mode 100644 index 76f09aa4eb803b..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_53_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:acb5ca8757899c94fa8fd68a647975ea031ffa3f4955214b9a39d097b179ad27 -size 17315 diff --git a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_54_0.png b/docs/notebooks/image-bind-with-output_files/image-bind-with-output_54_0.png deleted file mode 100644 index f2f53fccfbdd8c..00000000000000 --- a/docs/notebooks/image-bind-with-output_files/image-bind-with-output_54_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e30a62c61037f25fa771225ab71ab9ecb407a0589103a79de2c5e0374583adf1 -size 22314 diff --git a/docs/notebooks/image-classification-quantization-with-output.rst b/docs/notebooks/image-classification-quantization-with-output.rst index 7bf7172f720588..491ca0eed2881a 100644 --- a/docs/notebooks/image-classification-quantization-with-output.rst +++ b/docs/notebooks/image-classification-quantization-with-output.rst @@ -70,7 +70,6 @@ Guide 4.36,<4.45" "torch>=2.1" "torchvision" "einops" "timm" "Pillow" "gradio>=4.36" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q "openvino>=2024.3.0" "nncf>=2.12.0" - - -.. parsed-literal:: - - Note: you may need to restart the kernel to use updated packages. - Note: you may need to restart the kernel to use updated packages. - + import platform + + %pip install -q "transformers>4.36" "torch>=2.1" "torchvision" "einops" "timm" "Pillow" "gradio>=4.36" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "nncf>=2.14.0" "datasets" + %pip install -q "git+https://github.com/huggingface/optimum-intel.git" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q -U "openvino>=2024.5" "openvino-tokenizers>=2024.5" "openvino-genai>=2024.5" + + if platform.system() == "Darwin": + %pip install -q "numpy<2.0.0" .. code:: ipython3 from pathlib import Path import requests - if not Path("conversation.py").exists(): - r = requests.get("https://huggingface.co/OpenGVLab/InternVL2-1B/raw/main/conversation.py") - open("conversation.py", "w", encoding="utf-8").write(r.text) - - if not Path("internvl2_helper.py").exists(): - r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/internvl2/internvl2_helper.py") - open("internvl2_helper.py", "w", encoding="utf-8").write(r.text) - if not Path("gradio_helper.py").exists(): r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/internvl2/gradio_helper.py") open("gradio_helper.py", "w", encoding="utf-8").write(r.text) @@ -87,6 +79,10 @@ Prerequisites if not Path("notebook_utils.py").exists(): r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py") open("notebook_utils.py", "w", encoding="utf-8").write(r.text) + + if not Path("cmd_helper.py").exists(): + r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py") + open("cmd_helper.py", "w", encoding="utf-8").write(r.text) Select model ------------ @@ -100,18 +96,25 @@ using widget bellow: .. code:: ipython3 - from internvl2_helper import model_selector + model_ids = ["OpenGVLab/InternVL2-1B", "OpenGVLab/InternVL2-2B", "OpenGVLab/InternVL2-4B", "OpenGVLab/InternVL2-8B"] + + + def model_selector(default=model_ids[0]): + import ipywidgets as widgets + + model_checkpoint = widgets.Dropdown( + options=model_ids, + default=default, + description="Model:", + ) + return model_checkpoint + model_id = model_selector() model_id -.. parsed-literal:: - - INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, tensorflow, onnx, openvino - - .. parsed-literal:: @@ -130,94 +133,84 @@ using widget bellow: .. parsed-literal:: Selected OpenGVLab/InternVL2-1B - + Convert and Optimize model -------------------------- -InternVL2 is PyTorch model. OpenVINO supports PyTorch models via -conversion to OpenVINO Intermediate Representation (IR). `OpenVINO model -conversion -API `__ -should be used for these purposes. ``ov.convert_model`` function accepts -original PyTorch model instance and example input for tracing and -returns ``ov.Model`` representing this model in OpenVINO framework. -Converted model can be used for saving on disk using ``ov.save_model`` -function or directly loading on device using ``core.complie_model``. -``internvl2_helper.py`` script contains helper function for model -conversion, please check its content if you interested in conversion -details. +Our model conversion and optimization consist of following steps: 1. +Download original PyTorch model. 2. Convert model to OpenVINO format. 3. +Compress model weights using NNCF. -.. raw:: html +Let’s consider each step more deeply. -
+Convert model to OpenVINO IR format using Optimum CLI +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -Click here for more detailed explanation of conversion steps InternVL2 -is autoregressive transformer generative model, it means that each next -model step depends from model output from previous step. The generation -approach is based on the assumption that the probability distribution of -a word sequence can be decomposed into the product of conditional next -word distributions. In other words, model predicts the next token in the -loop guided by previously generated tokens until the stop-condition will -be not reached (generated sequence of maximum length or end of string -token obtained). The way the next token will be selected over predicted -probabilities is driven by the selected decoding methodology. You can -find more information about the most popular decoding methods in this -blog. The entry point for the generation process for models from the -Hugging Face Transformers library is the ``generate`` method. You can -find more information about its parameters and configuration in the -documentation. To preserve flexibility in the selection decoding -methodology, we will convert only model inference for one step. - -The inference flow has difference on first step and for the next. On the -first step, model accept preprocessed input instruction and image, that -transformed to the unified embedding space using ``input_embedding`` and -``image_encoder`` models, after that ``language model``, LLM-based part -of model, runs on input embeddings to predict probability of next -generated tokens. On the next step, ``language_model`` accepts only next -token id selected based on sampling strategy and processed by -``input_embedding`` model and cached attention key and values. Since the -output side is auto-regressive, an output token hidden state remains the -same once computed for every further generation step. Therefore, -recomputing it every time you want to generate a new token seems -wasteful. With the cache, the model saves the hidden state once it has -been computed. The model only computes the one for the most recently -generated output token at each time step, re-using the saved ones for -hidden tokens. This reduces the generation complexity from -:math:`O(n^3)` to :math:`O(n^2)` for a transformer model. More details -about how it works can be found in this -`article `__. -To sum up above, model consists of 4 parts: - -- **Image encoder** for encoding input images into embedding space. -- **Input Embedding** for conversion input text tokens into embedding - space -- **Language Model** for generation answer based on input embeddings - provided by Image Encoder and Input Embedding models. -.. raw:: html -
+OpenVINO supports PyTorch models via conversion to OpenVINO Intermediate +Representation format. For convenience, we will use OpenVINO integration +with HuggingFace Optimum. `Optimum +Intel `__ is the +interface between the Transformers and Diffusers libraries and the +different tools and libraries provided by Intel to accelerate end-to-end +pipelines on Intel architectures. + +Among other use cases, Optimum Intel provides a simple interface to +optimize your Transformers and Diffusers models, convert them to the +OpenVINO Intermediate Representation (IR) format and run inference using +OpenVINO Runtime. ``optimum-cli`` provides command line interface for +model conversion and optimization. + +General command format: + +.. code:: bash + + optimum-cli export openvino --model --task + +where task is task to export the model for, if not specified, the task +will be auto-inferred based on the model. You can find a mapping between +tasks and model classes in Optimum TaskManager +`documentation `__. +Additionally, you can specify weights compression using +``--weight-format`` argument with one of following options: ``fp32``, +``fp16``, ``int8`` and ``int4``. Fro int8 and int4 +`nncf `__ will be used for +weight compression. More details about model export provided in `Optimum +Intel +documentation `__. Compress model weights to 4-bit ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ For reducing memory consumption, weights compression optimization can be applied using -`NNCF `__. +`NNCF `__ via ``optimum-cli`` +command. In this tutorial we will demonstrates how to apply accurate +int4 weight quantization using AWQ method. .. raw:: html
-Click here for more details about weight compression Weight compression -aims to reduce the memory footprint of a model. It can also lead to -significant performance improvement for large memory-bound models, such -as Large Language Models (LLMs). LLMs and other models, which require -extensive memory to store the weights during inference, can benefit from -weight compression in the following ways: +.. raw:: html + + + +Click here for more details about weight compression + +.. raw:: html + + + +Weight compression aims to reduce the memory footprint of a model. It +can also lead to significant performance improvement for large +memory-bound models, such as Large Language Models (LLMs). LLMs and +other models, which require extensive memory to store the weights during +inference, can benefit from weight compression in the following ways: - enabling the inference of exceptionally large models that cannot be accommodated in the memory of the device; @@ -238,11 +231,13 @@ with the performance of the full model quantization. In addition, weight compression is data-free and does not require a calibration dataset, making it easy to use. -``nncf.compress_weights`` function can be used for performing weights -compression. The function accepts an OpenVINO model and other -compression parameters. Compared to INT8 compression, INT4 compression -improves performance even more, but introduces a minor drop in -prediction quality. +Usually 4-bit compression allows to get maximal speedup and minimal +memory footprint comparing with 8-bit compression, but in the same time +it may significantly drop model accuracy. `Activation-aware Weight +Quantization `__ (AWQ) is an algorithm +that tunes model weights for more accurate INT4 compression. It slightly +improves generation quality of compressed models, but requires +additional time for tuning weights on a calibration dataset. More details about weights compression, can be found in `OpenVINO documentation `__. @@ -253,151 +248,99 @@ documentation self.max_seq_len_cached: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/qwen2/modeling_qwen2.py:324: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/qwen2/modeling_qwen2.py:339: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): - - -.. parsed-literal:: - - ✅ Language model successfully converted - ⌛ Weights compression with int4_asym mode started - INFO:nncf:Statistics of the bitwidth distribution: + Applying AWQ ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% 24/24 • 0:01:54 • 0:00:0054 • 0:00:06;2;97;53;69m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ • 0:00:00 + Applying Weight Compression ━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% • 0:00:17 • 0:00:00;0;104;181m0:00:01181m0:00:01 + [?25hINFO:nncf:Statistics of the bitwidth distribution: ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ - │ 8 │ 28% (1 / 169) │ 0% (0 / 168) │ - ├────────────────┼─────────────────────────────┼────────────────────────────────────────┤ - │ 4 │ 72% (168 / 169) │ 100% (168 / 168) │ - ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ - - - -.. parsed-literal:: - - Output() - - - - - - - - + │ 8 │ 100% (99 / 99) │ 100% (99 / 99) │ + ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━���━━━━━━━━━━━━━━━┙ + Applying Weight Compression ━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% • 0:00:01 • 0:00:00• 0:00:01:01 + [?25hINFO:nncf:Statistics of the bitwidth distribution: + ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ + │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ + ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ + │ 8 │ 100% (1 / 1) │ 100% (1 / 1) │ + ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━���━━━━━━━━━━━━━━━┙ + Applying Weight Compression ━━━━━━━━━━━━━━━━━━━━━━━━━━━ 100% • 0:00:00 • 0:00:00 + [?25h .. parsed-literal:: - ✅ Weights compression finished - ✅ OpenGVLab/InternVL2-1B model conversion finished. You can find results in InternVL2-1B - + Attempt to save config using standard API has failed with 'architectures'. There may be an issue with model config, please check its correctness before usage. + Select inference device ----------------------- @@ -426,49 +369,76 @@ Prepare model inference pipeline -As discussed, the model comprises Image Encoder and LLM (with separated -text embedding part) that generates answer. In ``internvl2_helper.py`` -we defined LLM inference class ``OvModelForCausalLMWithEmb`` that will -represent generation cycle, It is based on `HuggingFace Transformers -GenerationMixin `__ -and looks similar to `Optimum -Intel `__ -``OVModelForCausalLM`` that is used for LLM inference with only -difference that it can accept input embedding. In own turn, general -multimodal model class ``OVInternVLChatModel`` handles chatbot -functionality including image processing and answer generation using -LLM. +`OpenVINO™ GenAI `__ +is a library of the most popular Generative AI model pipelines, +optimized execution methods, and samples that run on top of highly +performant `OpenVINO +Runtime `__. -.. code:: ipython3 +This library is friendly to PC and laptop execution, and optimized for +resource consumption. It requires no external dependencies to run +generative models as it already includes all the core functionality +(e.g. tokenization via openvino-tokenizers). OpenVINO™ GenAI is a flavor +of OpenVINO™, aiming to simplify running inference of generative AI +models. It hides the complexity of the generation process and minimizes +the amount of code required. - from internvl2_helper import OVInternVLChatModel - from transformers import AutoTokenizer - - # Uncomment below lines to see the model inference class code - - # OVInternVLChatModel?? +Inference Visual language models can be implemented using OpenVINO GenAI +``VLMPipeline`` class. Similarly to LLMPipeline, that we discussed in +this +`notebook `__. +It supports chat mode with preserving conversational history inside +pipeline, that allows us effectively implements chatbot that supports +conversation about provided images content. For pipeline initialization +we should provide path to model directory and inference device. .. code:: ipython3 - tokenizer = AutoTokenizer.from_pretrained(model_dir, trust_remote_code=True) - ov_model = OVInternVLChatModel(model_dir, device.value) + import openvino_genai as ov_genai + + ov_model = ov_genai.VLMPipeline(model_dir, device=device.value) Run model inference ------------------- -Our interface is fully compatible with Transformers interface for -InternVL2, you can try any of represented here `usage -examples `__. -Let’s check model capabilities in answering questions about image: +For preparing input data, ``VLMPipeline`` use tokenizer and image +processor inside, we just need to convert image to input OpenVINO tensor +and provide question as string. Additionally, we can provides options +for controlling generation process (e.g. number of maximum generated +tokens or using multinomial sampling for decoding instead of greedy +search approach) using ``GenerationConfig``. + +Generation process for long response may be time consuming, for +accessing partial result as soon as it is generated without waiting when +whole process finished, Streaming API can be used. Token streaming is +the mode in which the generative system returns the tokens one by one as +the model generates them. This enables showing progressive generations +to the user rather than waiting for the whole generation. Streaming is +an essential aspect of the end-user experience as it reduces latency, +one of the most critical aspects of a smooth experience. .. code:: ipython3 - import PIL - from internvl2_helper import load_image - from transformers import TextIteratorStreamer - from threading import Thread + import requests + from PIL import Image + from io import BytesIO + import numpy as np + import openvino as ov + + config = ov_genai.GenerationConfig() + config.max_new_tokens = 100 + + + def load_image(image_file): + if isinstance(image_file, str) and (image_file.startswith("http") or image_file.startswith("https")): + response = requests.get(image_file) + image = Image.open(BytesIO(response.content)).convert("RGB") + else: + image = Image.open(image_file).convert("RGB") + image_data = np.array(image.getdata()).reshape(1, image.size[1], image.size[0], 3).astype(np.byte) + return image, ov.Tensor(image_data) EXAMPLE_IMAGE = Path("examples_image1.jpg") @@ -479,59 +449,41 @@ Let’s check model capabilities in answering questions about image: with EXAMPLE_IMAGE.open("wb") as handler: handler.write(img_data) - pixel_values = load_image(EXAMPLE_IMAGE, max_num=12) - streamer = TextIteratorStreamer(tokenizer, skip_prompt=True, skip_special_tokens=True) + def streamer(subword: str) -> bool: + """ - generation_config = dict(max_new_tokens=100, do_sample=True, streamer=streamer) - question = "\nPlease describe the image shortly." + Args: + subword: sub-word of the generated text. + + Returns: Return flag corresponds whether generation should be stopped. + + """ + print(subword, end="", flush=True) - display(PIL.Image.open(EXAMPLE_IMAGE)) - print(f"User: {question}\n") - print("Assistant:") - thread = Thread( - target=ov_model.chat, - kwargs=dict( - tokenizer=tokenizer, - pixel_values=pixel_values, - question=question, - history=None, - return_history=False, - generation_config=generation_config, - ), - ) - thread.start() + question = "Please describe the image shortly" - generated_text = "" - # Loop through the streamer to get the new text as it is generated - for new_text in streamer: - if new_text == ov_model.conv_template.sep: - break - generated_text += new_text - print(new_text, end="", flush=True) # Print each new chunk of generated text on the same line + + image, image_tensor = load_image(EXAMPLE_IMAGE) + display(image) + print(f"User: {question}\n") + print("Assistant:") + output = ov_model.generate(question, image=image_tensor, generation_config=config, streamer=streamer) -.. image:: internvl2-with-output_files/internvl2-with-output_16_0.png +.. image:: internvl2-with-output_files/internvl2-with-output_14_0.png .. parsed-literal:: - User: - Please describe the image shortly. + User: Please describe the image shortly Assistant: - - -.. parsed-literal:: - - Setting `pad_token_id` to `eos_token_id`:151645 for open-end generation. - - -.. parsed-literal:: - - The image shows a red panda lying on its side, partially wrapped in a wooden structure, possibly a container or log. The red panda appears to be looking at the camera with large, expressive eyes, displaying an endearing and lively appearance. The background consists of a portion of the red panda's habitat environment, which appears to be a tree and some greenery. + . + + The image shows a red panda, a type of mammal known for its distinctive red fur and white markings. The animal is resting on a wooden structure, possibly a platform or a platform-like object, with its head turned slightly towards the camera. The background is a natural setting, with trees and foliage visible, suggesting that the red panda is in a forested or wooded area. The red panda's eyes are large and expressive, and its ears are perked up, indicating that it is alert Interactive demo ---------------- @@ -542,25 +494,11 @@ Interactive demo from gradio_helper import make_demo - demo = make_demo(ov_model, tokenizer) + demo = make_demo(ov_model) try: - demo.launch(debug=False, height=600) + demo.launch(debug=True, height=600) except Exception: - demo.launch(debug=False, share=True, height=600) + demo.launch(debug=True, share=True, height=600) # if you are launching remotely, specify server_name and server_port # demo.launch(server_name='your server name', server_port='server port in int') # Read more in the docs: https://gradio.app/docs/ - - -.. parsed-literal:: - - Running on local URL: http://127.0.0.1:7860 - - To create a public link, set `share=True` in `launch()`. - - - - - - - diff --git a/docs/notebooks/internvl2-with-output_files/internvl2-with-output_16_0.jpg b/docs/notebooks/internvl2-with-output_files/internvl2-with-output_14_0.jpg similarity index 100% rename from docs/notebooks/internvl2-with-output_files/internvl2-with-output_16_0.jpg rename to docs/notebooks/internvl2-with-output_files/internvl2-with-output_14_0.jpg diff --git a/docs/notebooks/internvl2-with-output_files/internvl2-with-output_16_0.png b/docs/notebooks/internvl2-with-output_files/internvl2-with-output_14_0.png similarity index 100% rename from docs/notebooks/internvl2-with-output_files/internvl2-with-output_16_0.png rename to docs/notebooks/internvl2-with-output_files/internvl2-with-output_14_0.png diff --git a/docs/notebooks/jina-clip-with-output.rst b/docs/notebooks/jina-clip-with-output.rst index 1cdb2e1d286245..478d333d54d7e7 100644 --- a/docs/notebooks/jina-clip-with-output.rst +++ b/docs/notebooks/jina-clip-with-output.rst @@ -77,14 +77,7 @@ Prerequisites .. code:: ipython3 %pip install -q "openvino>=2024.2.0" "datasets>=2.20" "nncf>=2.11.0" - %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu "gradio>=4.19" "pillow" "einops" "timm" "transformers[torch]>=4.39" "torch>=2.1" "matplotlib>=3.4" - - -.. parsed-literal:: - - Note: you may need to restart the kernel to use updated packages. - Note: you may need to restart the kernel to use updated packages. - + %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu "gradio>=4.19" "pillow" "einops" "timm" "transformers[torch]>=4.39" "torch>=2.1" "matplotlib>=3.4" "typing_extensions>=4.9" Instantiate model ----------------- @@ -103,17 +96,6 @@ weights, using ``from_pretrained`` method. model = AutoModel.from_pretrained("jinaai/jina-clip-v1", trust_remote_code=True) - -.. parsed-literal:: - - 2024-11-05 01:41:58.578137: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 01:41:58.612620: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. - To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 01:41:59.276782: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/timm/models/layers/__init__.py:48: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers - warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning) - - Prepare input data ~~~~~~~~~~~~~~~~~~ @@ -127,28 +109,32 @@ passing in the PIL.Image objects. from PIL import Image import requests + from pathlib import Path - # image input data - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) + if not Path("notebook_utils.py").exists(): + # image input data + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) - open("notebook_utils.py", "w").write(r.text) + open("notebook_utils.py", "w").write(r.text) from notebook_utils import download_file, device_widget, quantization_widget - download_file( - "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/3f779fc1-c1b2-4dec-915a-64dae510a2bb", - "furseal.png", - directory="data", - ) + if not Path("data/furseal.png").exists(): + download_file( + "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/3f779fc1-c1b2-4dec-915a-64dae510a2bb", + "furseal.png", + directory="data", + ) img_furseal = Image.open("./data/furseal.png") - image_path = download_file( - "https://github.com/user-attachments/assets/1c66a05d-7442-45c2-a34c-bb08b95af7a6", - "coco.jpg", - directory="data", - ) + if not Path("data/coco.jpg").exists(): + image_path = download_file( + "https://github.com/user-attachments/assets/1c66a05d-7442-45c2-a34c-bb08b95af7a6", + "coco.jpg", + directory="data", + ) img_coco = Image.open("./data/coco.jpg") @@ -292,23 +278,6 @@ loading on device using ``core.complie_model``. ov_text_model = ov.convert_model(model.text_model, example_input=text_inputs["input_ids"]) ov.save_model(ov_text_model, fp16_text_model_path) - -.. parsed-literal:: - - WARNING:tensorflow:Please fix your imports. Module tensorflow.python.training.tracking.base has been moved to tensorflow.python.trackable.base. The old module will be deleted in version 2.11. - - -.. parsed-literal:: - - WARNING:tensorflow:Please fix your imports. Module tensorflow.python.training.tracking.base has been moved to tensorflow.python.trackable.base. The old module will be deleted in version 2.11. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:4713: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead - warnings.warn( - /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/jinaai/jina-bert-flash-implementation/b78d1595de294f13ffe7b19d6cd63892a6e4e7a4/mha.py:333: TracerWarning: Converting a tensor to a Python float might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - softmax_scale = self.softmax_scale or 1.0 / math.sqrt(q.shape[-1]) - /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/jinaai/jina-bert-flash-implementation/b78d1595de294f13ffe7b19d6cd63892a6e4e7a4/mha.py:343: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if seqlen > self.linear_biases.shape[-1]: - - .. code:: ipython3 fp16_vision_model_path = Path("jina-clip-vision_v1_fp16.xml") @@ -317,13 +286,6 @@ loading on device using ``core.complie_model``. ov_vision_model = ov.convert_model(model.vision_model, example_input=vision_inputs["pixel_values"]) ov.save_model(ov_vision_model, fp16_vision_model_path) - -.. parsed-literal:: - - /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/jinaai/jina-clip-implementation/96e41b892fe647a3c45bf921352f147184024aef/eva_model.py:468: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - assert H == self.img_size[0] and W == self.img_size[1], ( - - Select inference device ~~~~~~~~~~~~~~~~~~~~~~~ @@ -412,11 +374,15 @@ inference faster. The optimization process contains the following steps: .. code:: ipython3 - # Fetch `skip_kernel_extension` module - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", - ) - open("skip_kernel_extension.py", "w").write(r.text) + if not Path("skip_kernel_extension.py").exists(): + # Fetch `skip_kernel_extension` module + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", + ) + open("skip_kernel_extension.py", "w").write(r.text) + + int8_text_model_path = Path("jina-clip-text_v1_int8.xml") + int8_vision_model_path = Path("jina-clip-vision_v1_int8.xml") %load_ext skip_kernel_extension @@ -506,18 +472,19 @@ Dataset with text data import logging import nncf - dataset = load_dataset("google-research-datasets/conceptual_captions", trust_remote_code=True) - train_dataset = dataset["train"].shuffle(seed=42) + if not int8_text_model_path.exists(): + dataset = load_dataset("google-research-datasets/conceptual_captions", trust_remote_code=True) + train_dataset = dataset["train"].shuffle(seed=42) - dataloader_text = torch.utils.data.DataLoader(train_dataset, collate_fn=collate_fn_text, batch_size=1) - calibration_data_text = prepare_calibration_data_text(dataloader_text, 50) + dataloader_text = torch.utils.data.DataLoader(train_dataset, collate_fn=collate_fn_text, batch_size=1) + calibration_data_text = prepare_calibration_data_text(dataloader_text, 50) .. parsed-literal:: - INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, tensorflow, onnx, openvino + INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, openvino Fetching 50 samples for the initialization... - + .. parsed-literal:: @@ -588,17 +555,18 @@ Dataset with image data %%skip not $to_quantize.value - dataset = load_dataset("google-research-datasets/conceptual_captions", trust_remote_code=True) - train_dataset = dataset["train"].shuffle(seed=42) + if not int8_vision_model_path.exists(): + dataset = load_dataset("google-research-datasets/conceptual_captions", trust_remote_code=True) + train_dataset = dataset["train"].shuffle(seed=42) - dataloader_vis = torch.utils.data.DataLoader(train_dataset, collate_fn=collate_fn_vision, batch_size=1) - calibration_data_vision = prepare_calibration_data_vis(dataloader_vis, 50) + dataloader_vis = torch.utils.data.DataLoader(train_dataset, collate_fn=collate_fn_vision, batch_size=1) + calibration_data_vision = prepare_calibration_data_vis(dataloader_vis, 50) .. parsed-literal:: Fetching 50 samples for the initialization... - + .. parsed-literal:: @@ -621,108 +589,48 @@ Quantization of text model -.. code:: ipython3 - - int8_text_model_path = "jina-clip-text_v1_int8.xml" - .. code:: ipython3 %%skip not $to_quantize.value - if len(calibration_data_text) == 0: - raise RuntimeError( - 'Calibration dataset is empty. Please check internet connection and try to download images manually.' - ) + if not int8_text_model_path.exists(): + if len(calibration_data_text) == 0: + raise RuntimeError( + 'Calibration dataset is empty. Please check internet connection and try to download images manually.' + ) - ov_model_text = core.read_model(fp16_text_model_path) + ov_model_text = core.read_model(fp16_text_model_path) - calibration_dataset = nncf.Dataset(calibration_data_text) - quantized_model = nncf.quantize( - model=ov_model_text, - calibration_dataset=calibration_dataset - ) - ov.save_model(quantized_model, int8_text_model_path) - - - -.. parsed-literal:: - - Output() - - - - - - - - - - -.. parsed-literal:: - - Output() - - - - - - - - + calibration_dataset = nncf.Dataset(calibration_data_text) + quantized_model = nncf.quantize( + model=ov_model_text, + calibration_dataset=calibration_dataset + ) + ov.save_model(quantized_model, int8_text_model_path) Quantization of image model ^^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. code:: ipython3 - - int8_vision_model_path = "jina-clip-vision_v1_int8.xml" - .. code:: ipython3 %%skip not $to_quantize.value - if len(calibration_data_vision) == 0: - raise RuntimeError( - 'Calibration dataset is empty. Please check internet connection and try to download images manually.' - ) + if not int8_vision_model_path.exists(): + if len(calibration_data_vision) == 0: + raise RuntimeError( + 'Calibration dataset is empty. Please check internet connection and try to download images manually.' + ) - ov_model_vision = core.read_model(fp16_vision_model_path) + ov_model_vision = core.read_model(fp16_vision_model_path) - calibration_dataset = nncf.Dataset(calibration_data_vision) - quantized_model = nncf.quantize( - model=ov_model_vision, - calibration_dataset=calibration_dataset - ) - ov.save_model(quantized_model, int8_vision_model_path) - - - -.. parsed-literal:: - - Output() - - - - - - - - - - -.. parsed-literal:: - - Output() - - - - - - - - + calibration_dataset = nncf.Dataset(calibration_data_vision) + quantized_model = nncf.quantize( + model=ov_model_vision, + calibration_dataset=calibration_dataset + ) + ov.save_model(quantized_model, int8_vision_model_path) .. code:: ipython3 @@ -739,7 +647,7 @@ Quantization of image model -.. image:: jina-clip-with-output_files/jina-clip-with-output_39_0.png +.. image:: jina-clip-with-output_files/jina-clip-with-output_37_0.png Compare File Size @@ -771,7 +679,7 @@ Compare File Size Text model: FP16 model size - 266.88 MB; INT8 model size - 136.98 MB; Model compression rate: 1.948 Vision model: FP16 model size - 163.83 MB; INT8 model size - 82.64 MB; Model compression rate: 1.983 - + Compare inference time of the FP16 IR and quantized models ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -820,9 +728,9 @@ approximately estimate the speed up of the dynamic quantized models. .. parsed-literal:: - Performance speed up for text model: 1.978 - Performance speed up for vision model: 1.428 - + Performance speed up for text model: 1.610 + Performance speed up for vision model: 1.489 + Gradio demo ----------- @@ -906,23 +814,9 @@ example, ``cat,dog,bird``) demo = make_demo(image_text_fn=image_text_sim, text_text_fn=text_text_sim, image_image_fn=image_image_sim, model_choice_visible=model_choice_visible) try: - demo.queue().launch(debug=False) + demo.queue().launch(debug=True) except Exception: - demo.queue().launch(share=True, debug=False) + demo.queue().launch(share=True, debug=True) # if you are launching remotely, specify server_name and server_port # demo.launch(server_name='your server name', server_port='server port in int') # Read more in the docs: https://gradio.app/docs/ - - -.. parsed-literal:: - - Running on local URL: http://127.0.0.1:7860 - - To create a public link, set `share=True` in `launch()`. - - - - - - - diff --git a/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_11_0.png b/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_11_0.png index 83744f48df88cc..c5043ea82df122 100644 --- a/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_11_0.png +++ b/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_11_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:9b913407ebaac94ee389f4ecd1b166dfbbb2b9bfd12ceaff8df783460cbd5e64 +oid sha256:d328ce0b22f2a80ed7640ac0a2b292df687aaf303427e56d954d30de439c0c56 size 427929 diff --git a/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_21_0.png b/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_21_0.png index 83744f48df88cc..c5043ea82df122 100644 --- a/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_21_0.png +++ b/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_21_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:9b913407ebaac94ee389f4ecd1b166dfbbb2b9bfd12ceaff8df783460cbd5e64 +oid sha256:d328ce0b22f2a80ed7640ac0a2b292df687aaf303427e56d954d30de439c0c56 size 427929 diff --git a/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_37_0.png b/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_37_0.png new file mode 100644 index 00000000000000..71eaff4146ac7d --- /dev/null +++ b/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_37_0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8a465646b9f698e200a3934841843024767c8d0a559d0a1267f76c5bcf9b87e +size 428007 diff --git a/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_39_0.png b/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_39_0.png deleted file mode 100644 index dc44386559455c..00000000000000 --- a/docs/notebooks/jina-clip-with-output_files/jina-clip-with-output_39_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:9c3ee89570303f7037c893a1a6a2381569ec34fc5d9e29526f4ae1c94ead1f96 -size 428013 diff --git a/docs/notebooks/knowledge-graphs-conve-with-output.rst b/docs/notebooks/knowledge-graphs-conve-with-output.rst index de9115fd9ab4a8..aa8b1a20ea554f 100644 --- a/docs/notebooks/knowledge-graphs-conve-with-output.rst +++ b/docs/notebooks/knowledge-graphs-conve-with-output.rst @@ -233,7 +233,7 @@ Download Model Checkpoint .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/knowledge-graphs-conve/models/conve.pt') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/knowledge-graphs-conve/models/conve.pt') @@ -395,7 +395,7 @@ typical to use metrics such as Mean Reciprocal Rank, Hits@10 etc. .. parsed-literal:: - Average time taken for inference: 0.7430613040924072 ms + Average time taken for inference: 1.2128651142120361 ms Mean accuracy of the model on the test dataset: 0.875 @@ -534,7 +534,7 @@ select device from dropdown list for running inference using OpenVINO .. parsed-literal:: - Average time taken for inference: 1.0752081871032715 ms + Average time taken for inference: 0.8927186330159506 ms Mean accuracy of the model on the test dataset: 0.10416666666666667 @@ -553,7 +553,7 @@ Determine the platform specific speedup obtained through OpenVINO graph optimiza .. parsed-literal:: - Speedup with OpenVINO optimizations: 0.69 X + Speedup with OpenVINO optimizations: 1.36 X Benchmark the converted OpenVINO model using benchmark app @@ -598,7 +598,7 @@ inference can also be obtained by looking at the benchmark app results. [ WARNING ] Performance hint was not explicitly specified in command line. Device(AUTO) performance hint will be set to PerformanceMode.THROUGHPUT. [Step 4/11] Reading model files [ INFO ] Loading model files - [ INFO ] Read model took 4.36 ms + [ INFO ] Read model took 4.44 ms [ INFO ] Original model I/O parameters: [ INFO ] Model inputs: [ INFO ] e1 (node: e1) : i64 / [...] / [] @@ -614,7 +614,7 @@ inference can also be obtained by looking at the benchmark app results. [ INFO ] Model outputs: [ INFO ] ***NO_NAME*** (node: aten::softmax/Softmax) : f32 / [...] / [1,271] [Step 7/11] Loading the model to the device - [ INFO ] Compile model took 54.92 ms + [ INFO ] Compile model took 49.21 ms [Step 8/11] Querying optimal runtime parameters [ INFO ] Model: [ INFO ] NETWORK_NAME: Model0 @@ -653,17 +653,17 @@ inference can also be obtained by looking at the benchmark app results. [ INFO ] Fill input 'rel' with random values [Step 10/11] Measuring performance (Start inference asynchronously, 12 inference requests, limits: 10000 ms duration) [ INFO ] Benchmarking in inference only mode (inputs filling are not included in measurement loop). - [ INFO ] First inference took 1.66 ms + [ INFO ] First inference took 2.26 ms [Step 11/11] Dumping statistics report [ INFO ] Execution Devices:['CPU'] - [ INFO ] Count: 94596 iterations - [ INFO ] Duration: 10001.69 ms + [ INFO ] Count: 95532 iterations + [ INFO ] Duration: 10001.76 ms [ INFO ] Latency: - [ INFO ] Median: 1.07 ms - [ INFO ] Average: 1.09 ms - [ INFO ] Min: 0.79 ms - [ INFO ] Max: 8.57 ms - [ INFO ] Throughput: 9458.00 FPS + [ INFO ] Median: 1.06 ms + [ INFO ] Average: 1.08 ms + [ INFO ] Min: 0.73 ms + [ INFO ] Max: 29.28 ms + [ INFO ] Throughput: 9551.52 FPS Conclusions diff --git a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output.rst b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output.rst index b6a7a971fef3be..d90b461ac2023a 100644 --- a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output.rst +++ b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output.rst @@ -85,7 +85,7 @@ Install requirements .. parsed-literal:: - Requirement already satisfied: pip in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (24.3.1) + Requirement already satisfied: pip in /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages (24.3.1) Note: you may need to restart the kernel to use updated packages. Note: you may need to restart the kernel to use updated packages. Note: you may need to restart the kernel to use updated packages. @@ -152,10 +152,9 @@ example `__ .. parsed-literal:: - 2024-11-05 01:44:54.753766: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 01:44:54.788691: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 01:21:24.800927: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:21:24.825776: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 01:44:55.309895: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT .. parsed-literal:: @@ -374,11 +373,14 @@ Vision model accept ``pixel_values`` and returns ``image_embeds``. .. parsed-literal:: [ WARNING ] Please fix your imports. Module %s has been moved to %s. The old module will be deleted in version %s. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:4713: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead warnings.warn( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:465: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + `loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:452: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if not interpolate_pos_encoding and (height != self.image_size or width != self.image_size): + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:519: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:505: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:559: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): @@ -406,7 +408,7 @@ Convert Image To Text Projection model .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:165: UserWarning: The .grad attribute of a Tensor that is not a leaf Tensor is being accessed. Its .grad attribute won't be populated during autograd.backward(). If you indeed want the .grad field to be populated for a non-leaf Tensor, use .retain_grad() on the non-leaf Tensor. If you access the non-leaf Tensor by mistake, make sure you access the leaf Tensor instead. See github.com/pytorch/pytorch/pull/30531 for more informations. (Triggered internally at aten/src/ATen/core/TensorBody.h:489.) + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:165: UserWarning: The .grad attribute of a Tensor that is not a leaf Tensor is being accessed. Its .grad attribute won't be populated during autograd.backward(). If you indeed want the .grad field to be populated for a non-leaf Tensor, use .retain_grad() on the non-leaf Tensor. If you access the non-leaf Tensor by mistake, make sure you access the leaf Tensor instead. See github.com/pytorch/pytorch/pull/30531 for more informations. (Triggered internally at aten/src/ATen/core/TensorBody.h:489.) if a.grad is not None: @@ -541,13 +543,13 @@ generated text by ``AutoProcessor``. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:804: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:859: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if max_pos > self.weights.size(0): - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:1113: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:1168: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if input_shape[-1] > 1: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:920: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:975: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if attention_mask.size() != (batch_size, 1, seq_length, src_len): - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:1206: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/kosmos2/modeling_kosmos2.py:1261: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if past_key_values_length > 0: @@ -1389,9 +1391,9 @@ pipelines, we use mean inference time on 7 samples. .. parsed-literal:: - FP32 pipeline: 2.746 seconds - Optimized pipeline: 1.140 seconds - Performance speed-up: 2.409 + FP32 pipeline: 2.727 seconds + Optimized pipeline: 1.146 seconds + Performance speed-up: 2.380 Interactive inference diff --git a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_29_1.jpg b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_29_1.jpg index 2310cb001b0c6b..c4966e68a0f7c6 100644 --- a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_29_1.jpg +++ b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_29_1.jpg @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:9ca596f09c0f6c0dafa4aca0fbe7974941301cfcbc6bcb3a8c4255774c347d0b -size 123320 +oid sha256:d99c65937fed48b5c1ef214891a3ded6fc4acabbad731ecafdf30d897cd8807b +size 121119 diff --git a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_29_1.png b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_29_1.png index 91289c35d7c60c..717e205ccbaa23 100644 --- a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_29_1.png +++ b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_29_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:56d06f7d654939feda627f67196b813de9b38a718acba9f5daed59a43314829f -size 1150807 +oid sha256:4e416163b28e55e213c884e64462792c0cb5f9ae1389961c3a5467ef2c1ac101 +size 1150960 diff --git a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_48_1.png b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_48_1.png index d98f56141b1252..85633bcfcf04ae 100644 --- a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_48_1.png +++ b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_48_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:0d7f8506e5f1bd369debee273b45c601d05901af4937d8cc976f985cd4a81fed -size 1149292 +oid sha256:7561941945a717b6a4f6e6bda157e86c62c5ff638acad518558c176a0ba21be5 +size 1149449 diff --git a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_8_0.jpg b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_8_0.jpg index b53344f52b7396..5aed31c2359d29 100644 --- a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_8_0.jpg +++ b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_8_0.jpg @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:edd5a47baf47ae90532b47bc5ee05e8503b7d1deda59d956a354688ed949c8b5 -size 121605 +oid sha256:de647e8e1a39e8ee78c7c90a14f373b972e4f381f3348d6b28d0fe18a912eb51 +size 122484 diff --git a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_8_0.png b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_8_0.png index 2edc9a038ff8c3..5eb34946e278d0 100644 --- a/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_8_0.png +++ b/docs/notebooks/kosmos2-multimodal-large-language-model-with-output_files/kosmos2-multimodal-large-language-model-with-output_8_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:aa184084b598dac717e99fe9677f1fe9dd4f6b85ec123c075d4109c75b134841 -size 1150675 +oid sha256:77941b5ac0c4ca3379b3a66eb94aeaa24b8c68e225f6e9369ca1cb262feaab7a +size 1150730 diff --git a/docs/notebooks/language-quantize-bert-with-output.rst b/docs/notebooks/language-quantize-bert-with-output.rst index 21ecfe511f1b76..2ba6bca451ad0b 100644 --- a/docs/notebooks/language-quantize-bert-with-output.rst +++ b/docs/notebooks/language-quantize-bert-with-output.rst @@ -101,10 +101,9 @@ Imports .. parsed-literal:: - 2024-11-05 01:51:49.197259: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 01:51:49.231710: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 01:28:13.948145: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:28:13.973147: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 01:51:49.783615: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT .. parsed-literal:: @@ -211,8 +210,9 @@ PyTorch model formats are supported: .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:4713: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead warnings.warn( + `loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`. Prepare the Dataset @@ -244,13 +244,6 @@ tokenizer from HuggingFace. data_source = create_data_source() - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/tokenization_utils_base.py:1601: FutureWarning: `clean_up_tokenization_spaces` was not set. It will be set to `True` by default. This behavior will be depracted in transformers v4.45, and will be then set to `False` by default. For more details check this issue: https://github.com/huggingface/transformers/issues/31884 - warnings.warn( - - Optimize model using NNCF Post-training Quantization API -------------------------------------------------------- @@ -505,9 +498,9 @@ Frames Per Second (FPS) for images. .. parsed-literal:: - PyTorch model on CPU: 0.068 seconds per sentence, SPS: 14.68 - IR FP32 model in OpenVINO Runtime/AUTO: 0.020 seconds per sentence, SPS: 49.24 - OpenVINO IR INT8 model in OpenVINO Runtime/AUTO: 0.009 seconds per sentence, SPS: 108.47 + PyTorch model on CPU: 0.068 seconds per sentence, SPS: 14.67 + IR FP32 model in OpenVINO Runtime/AUTO: 0.020 seconds per sentence, SPS: 48.80 + OpenVINO IR INT8 model in OpenVINO Runtime/AUTO: 0.009 seconds per sentence, SPS: 107.05 Finally, measure the inference performance of OpenVINO ``FP32`` and @@ -548,7 +541,7 @@ in OpenVINO. [ WARNING ] Performance hint was not explicitly specified in command line. Device(AUTO) performance hint will be set to PerformanceMode.LATENCY. [Step 4/11] Reading model files [ INFO ] Loading model files - [ INFO ] Read model took 19.11 ms + [ INFO ] Read model took 18.80 ms [ INFO ] Original model I/O parameters: [ INFO ] Model inputs: [ INFO ] input_ids (node: input_ids) : i64 / [...] / [1,?] @@ -559,7 +552,7 @@ in OpenVINO. [Step 5/11] Resizing model to match image sizes and given batch [ INFO ] Model batch size: 1 [ INFO ] Reshaping model: 'input_ids': [1,128], '63': [1,128], 'token_type_ids': [1,128] - [ INFO ] Reshape model took 5.55 ms + [ INFO ] Reshape model took 5.49 ms [Step 6/11] Configuring input of the model [ INFO ] Model inputs: [ INFO ] input_ids (node: input_ids) : i64 / [...] / [1,128] @@ -568,7 +561,7 @@ in OpenVINO. [ INFO ] Model outputs: [ INFO ] logits (node: __module.classifier/aten::linear/Add) : f32 / [...] / [1,2] [Step 7/11] Loading the model to the device - [ INFO ] Compile model took 344.20 ms + [ INFO ] Compile model took 351.45 ms [Step 8/11] Querying optimal runtime parameters [ INFO ] Model: [ INFO ] NETWORK_NAME: Model0 @@ -609,17 +602,17 @@ in OpenVINO. [ INFO ] Fill input 'token_type_ids' with random values [Step 10/11] Measuring performance (Start inference synchronously, limits: 120000 ms duration) [ INFO ] Benchmarking in inference only mode (inputs filling are not included in measurement loop). - [ INFO ] First inference took 22.90 ms + [ INFO ] First inference took 24.58 ms [Step 11/11] Dumping statistics report [ INFO ] Execution Devices:['CPU'] - [ INFO ] Count: 6485 iterations - [ INFO ] Duration: 120011.48 ms + [ INFO ] Count: 6143 iterations + [ INFO ] Duration: 120005.00 ms [ INFO ] Latency: - [ INFO ] Median: 18.09 ms - [ INFO ] Average: 18.41 ms + [ INFO ] Median: 18.11 ms + [ INFO ] Average: 19.44 ms [ INFO ] Min: 17.32 ms - [ INFO ] Max: 26.49 ms - [ INFO ] Throughput: 54.04 FPS + [ INFO ] Max: 31.44 ms + [ INFO ] Throughput: 51.19 FPS .. code:: ipython3 @@ -646,7 +639,7 @@ in OpenVINO. [ WARNING ] Performance hint was not explicitly specified in command line. Device(AUTO) performance hint will be set to PerformanceMode.LATENCY. [Step 4/11] Reading model files [ INFO ] Loading model files - [ INFO ] Read model took 24.93 ms + [ INFO ] Read model took 24.80 ms [ INFO ] Original model I/O parameters: [ INFO ] Model inputs: [ INFO ] input_ids (node: input_ids) : i64 / [...] / [1,?] @@ -657,7 +650,7 @@ in OpenVINO. [Step 5/11] Resizing model to match image sizes and given batch [ INFO ] Model batch size: 1 [ INFO ] Reshaping model: 'input_ids': [1,128], '63': [1,128], 'token_type_ids': [1,128] - [ INFO ] Reshape model took 7.14 ms + [ INFO ] Reshape model took 7.21 ms [Step 6/11] Configuring input of the model [ INFO ] Model inputs: [ INFO ] input_ids (node: input_ids) : i64 / [...] / [1,128] @@ -666,7 +659,7 @@ in OpenVINO. [ INFO ] Model outputs: [ INFO ] logits (node: __module.classifier/aten::linear/Add) : f32 / [...] / [1,2] [Step 7/11] Loading the model to the device - [ INFO ] Compile model took 1080.21 ms + [ INFO ] Compile model took 1047.83 ms [Step 8/11] Querying optimal runtime parameters [ INFO ] Model: [ INFO ] NETWORK_NAME: Model0 @@ -707,15 +700,15 @@ in OpenVINO. [ INFO ] Fill input 'token_type_ids' with random values [Step 10/11] Measuring performance (Start inference synchronously, limits: 120000 ms duration) [ INFO ] Benchmarking in inference only mode (inputs filling are not included in measurement loop). - [ INFO ] First inference took 16.00 ms + [ INFO ] First inference took 15.79 ms [Step 11/11] Dumping statistics report [ INFO ] Execution Devices:['CPU'] - [ INFO ] Count: 13181 iterations - [ INFO ] Duration: 120003.10 ms + [ INFO ] Count: 13290 iterations + [ INFO ] Duration: 120007.52 ms [ INFO ] Latency: - [ INFO ] Median: 8.93 ms - [ INFO ] Average: 9.01 ms - [ INFO ] Min: 7.68 ms - [ INFO ] Max: 12.00 ms - [ INFO ] Throughput: 109.84 FPS + [ INFO ] Median: 8.89 ms + [ INFO ] Average: 8.94 ms + [ INFO ] Min: 7.64 ms + [ INFO ] Max: 13.94 ms + [ INFO ] Throughput: 110.74 FPS diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output.rst b/docs/notebooks/latent-consistency-models-image-generation-with-output.rst index fa0fdbd9718831..523afca76dd660 100644 --- a/docs/notebooks/latent-consistency-models-image-generation-with-output.rst +++ b/docs/notebooks/latent-consistency-models-image-generation-with-output.rst @@ -41,23 +41,17 @@ repository `__. In this tutorial, we consider how to convert and run LCM using OpenVINO. An additional part demonstrates how to run quantization with -`NNCF `__ to speed up -pipeline. +`NNCF `__ to speed up pipeline +and generate images using `OpenVINO +GenAI `__. **Table of contents:** - `Prerequisites <#prerequisites>`__ -- `Prepare models for OpenVINO format - conversion <#prepare-models-for-openvino-format-conversion>`__ - `Convert models to OpenVINO format <#convert-models-to-openvino-format>`__ - - - `Text Encoder <#text-encoder>`__ - - `U-Net <#u-net>`__ - - `VAE <#vae>`__ - - `Prepare inference pipeline <#prepare-inference-pipeline>`__ - `Configure Inference Pipeline <#configure-inference-pipeline>`__ @@ -69,9 +63,10 @@ pipeline. - `Run quantization <#run-quantization>`__ - `Compare inference time of the FP16 and INT8 models <#compare-inference-time-of-the-fp16-and-int8-models>`__ + - `Compare UNet file size <#compare-unet-file-size>`__ - - `Compare UNet file size <#compare-unet-file-size>`__ - +- `Run Text to image generation using OpenVINO + GenAI <#run-text-to-image-generation-using-openvino-genai>`__ - `Interactive demo <#interactive-demo>`__ Installation Instructions @@ -92,10 +87,27 @@ Prerequisites .. code:: ipython3 %pip install -q "torch>=2.1" --index-url https://download.pytorch.org/whl/cpu - %pip install -q "openvino>=2024.3.0" transformers "diffusers>=0.30.1" pillow "gradio>=4.19" "nncf>=2.12.0" "datasets>=2.14.6" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "transformers>=4.45" tqdm accelerate "diffusers>=0.30.1" pillow "gradio>=4.19" "nncf>=2.12.0" "datasets>=2.14.6" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "git+https://github.com/huggingface/optimum-intel.git" + %pip install -qU --pre "openvino>=2024.4.0" "openvino-tokenizers" "openvino-genai" --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly -Prepare models for OpenVINO format conversion ---------------------------------------------- +.. code:: ipython3 + + from pathlib import Path + import requests + + utility_files = [Path("notebook_utils.py"), Path("skip_kernel_extension.py"), Path("cmd_helper.py")] + + base_utils_url = "https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/" + + for utility_file in utility_files: + if not utility_file.exists(): + r = requests.get(base_utils_url + utility_file.name) + with utility_file.open("w") as f: + f.write(r.text) + +Convert models to OpenVINO format +--------------------------------- @@ -117,316 +129,48 @@ and distilled using LCD. The distillation approach efficiently converts a pre-trained guided diffusion model into a latent consistency model by solving an augmented PF-ODE. -For starting work with LCM, we should instantiate generation pipeline -first. ``DiffusionPipeline.from_pretrained`` method download all -pipeline components for LCM and configure them. This model uses custom -inference pipeline stored as part of model repository, we also should -provide which module should be loaded for initialization using -``custom_pipeline`` argument and revision for it. +For simplifying model export we will utilize Optimum Intel library. +`Optimum Intel `__ is +the interface between the +`Transformers `__ and +`Diffusers `__ libraries +and OpenVINO to accelerate end-to-end pipelines on Intel architectures. +It provides ease-to-use +`interface `__ +for exporting models to `OpenVINO Intermediate Representation +(IR) `__ +format. + +The command bellow demonstrates basic command for model export with +``optimum-cli`` + +.. code:: bash + + optimum-cli export openvino --model --task + +where ``--model`` argument is model id from HuggingFace Hub or local +directory with model (saved using ``.save_pretrained`` method), +``--task`` is one of `supported +task `__ +that exported model should solve. For image generation it will be +``text-to-image``. If model initialization requires to use remote code, +``--trust-remote-code`` flag additionally should be passed. You can also +apply fp16, 8-bit or 4-bit weight compression on the Linear, +Convolutional and Embedding layers when exporting your model with the +CLI by setting ``--weight-format`` to respectively fp16, int8 or int4. +This type of optimization allows to reduce the memory footprint and +inference latency. We will quantize our model later using nncf, so in +this step we will use fp16 as base model export precision. .. code:: ipython3 - import gc - import warnings - from pathlib import Path - from diffusers import DiffusionPipeline - import numpy as np - - - warnings.filterwarnings("ignore") - - TEXT_ENCODER_OV_PATH = Path("model/text_encoder.xml") - UNET_OV_PATH = Path("model/unet.xml") - VAE_DECODER_OV_PATH = Path("model/vae_decoder.xml") + from cmd_helper import optimum_cli + model_id = "SimianLuo/LCM_Dreamshaper_v7" + model_path = Path(model_id.split("/")[-1] + "_ov") - def load_orginal_pytorch_pipeline_componets(skip_models=False, skip_safety_checker=False): - pipe = DiffusionPipeline.from_pretrained("SimianLuo/LCM_Dreamshaper_v7") - scheduler = pipe.scheduler - tokenizer = pipe.tokenizer - feature_extractor = pipe.feature_extractor if not skip_safety_checker else None - safety_checker = pipe.safety_checker if not skip_safety_checker else None - text_encoder, unet, vae = None, None, None - if not skip_models: - text_encoder = pipe.text_encoder - text_encoder.eval() - unet = pipe.unet - unet.eval() - vae = pipe.vae - vae.eval() - del pipe - gc.collect() - return ( - scheduler, - tokenizer, - feature_extractor, - safety_checker, - text_encoder, - unet, - vae, - ) - -.. code:: ipython3 - - skip_conversion = TEXT_ENCODER_OV_PATH.exists() and UNET_OV_PATH.exists() and VAE_DECODER_OV_PATH.exists() - - ( - scheduler, - tokenizer, - feature_extractor, - safety_checker, - text_encoder, - unet, - vae, - ) = load_orginal_pytorch_pipeline_componets(skip_conversion) - - - -.. parsed-literal:: - - Fetching 15 files: 0%| | 0/15 [00:00`__ is crucial for - synthesizing high-quality text-aligned images in Stable Diffusion, - because it controls how similar the generated image will be to the - prompt. In Latent Consistency Models, CFG serves as augmentation - parameter for PF-ODE. - -Model predicts the ``sample`` state for the next step. - -.. code:: ipython3 - - def convert_unet(unet: torch.nn.Module, ir_path: Path): - """ - Convert U-net model to IR format. - Function accepts unet model, prepares example inputs for conversion, - Parameters: - unet (StableDiffusionPipeline): unet from Stable Diffusion pipeline - ir_path (Path): File for storing model - Returns: - None - """ - # prepare inputs - dummy_inputs = { - "sample": torch.randn((1, 4, 64, 64)), - "timestep": torch.ones([1]).to(torch.float32), - "encoder_hidden_states": torch.randn((1, 77, 768)), - "timestep_cond": torch.randn((1, 256)), - } - unet.eval() - with torch.no_grad(): - ov_model = ov.convert_model(unet, example_input=dummy_inputs) - ov.save_model(ov_model, ir_path) - del ov_model - cleanup_torchscript_cache() - gc.collect() - print(f"Unet successfully converted to IR and saved to {ir_path}") - - - if not UNET_OV_PATH.exists(): - convert_unet(unet, UNET_OV_PATH) - else: - print(f"Unet will be loaded from {UNET_OV_PATH}") - del unet - gc.collect(); - -VAE -~~~ - - - -The VAE model has two parts, an encoder and a decoder. The encoder is -used to convert the image into a low dimensional latent representation, -which will serve as the input to the U-Net model. The decoder, -conversely, transforms the latent representation back into an image. - -During latent diffusion training, the encoder is used to get the latent -representations (latents) of the images for the forward diffusion -process, which applies more and more noise at each step. During -inference, the denoised latents generated by the reverse diffusion -process are converted back into images using the VAE decoder. When you -run inference for text-to-image, there is no initial image as a starting -point. You can skip this step and directly generate initial random -noise. - -In our inference pipeline, we will not use VAE encoder part and skip its -conversion for reducing memory consumption. The process of conversion -VAE encoder, can be found in Stable Diffusion notebook. - -.. code:: ipython3 - - def convert_vae_decoder(vae: torch.nn.Module, ir_path: Path): - """ - Convert VAE model for decoding to IR format. - Function accepts vae model, creates wrapper class for export only necessary for inference part, - prepares example inputs for conversion, - Parameters: - vae (torch.nn.Module): VAE model frm StableDiffusion pipeline - ir_path (Path): File for storing model - Returns: - None - """ - - class VAEDecoderWrapper(torch.nn.Module): - def __init__(self, vae): - super().__init__() - self.vae = vae - - def forward(self, latents): - return self.vae.decode(latents) - - vae_decoder = VAEDecoderWrapper(vae) - latents = torch.zeros((1, 4, 64, 64)) - - vae_decoder.eval() - with torch.no_grad(): - ov_model = ov.convert_model(vae_decoder, example_input=latents) - ov.save_model(ov_model, ir_path) - del ov_model - cleanup_torchscript_cache() - print(f"VAE decoder successfully converted to IR and saved to {ir_path}") - - - if not VAE_DECODER_OV_PATH.exists(): - convert_vae_decoder(vae, VAE_DECODER_OV_PATH) - else: - print(f"VAE decoder will be loaded from {VAE_DECODER_OV_PATH}") - - del vae - gc.collect(); + if not model_path.exists(): + optimum_cli(model_id, model_path, additional_args={"weight-format": "fp16"}) Prepare inference pipeline -------------------------- @@ -461,237 +205,27 @@ number of steps required ~2-8) to step-by-step retrieve better latent image representations. When complete, the latent image representation is decoded by the decoder part of the variational auto encoder. -.. code:: ipython3 - - from typing import Union, Optional, Any, List, Dict - from transformers import CLIPTokenizer, CLIPImageProcessor - from diffusers.pipelines.stable_diffusion.safety_checker import ( - StableDiffusionSafetyChecker, - ) - from diffusers.pipelines.stable_diffusion import StableDiffusionPipelineOutput - from diffusers.image_processor import VaeImageProcessor - - - class OVLatentConsistencyModelPipeline(DiffusionPipeline): - def __init__( - self, - vae_decoder: ov.Model, - text_encoder: ov.Model, - tokenizer: CLIPTokenizer, - unet: ov.Model, - scheduler: None, - safety_checker: StableDiffusionSafetyChecker, - feature_extractor: CLIPImageProcessor, - requires_safety_checker: bool = True, - ): - super().__init__() - self.vae_decoder = vae_decoder - self.text_encoder = text_encoder - self.tokenizer = tokenizer - self.register_to_config(unet=unet) - self.scheduler = scheduler - self.safety_checker = safety_checker - self.feature_extractor = feature_extractor - self.vae_scale_factor = 2**3 - self.image_processor = VaeImageProcessor(vae_scale_factor=self.vae_scale_factor) - - def _encode_prompt( - self, - prompt, - num_images_per_prompt, - prompt_embeds: None, - ): - r""" - Encodes the prompt into text encoder hidden states. - Args: - prompt (`str` or `List[str]`, *optional*): - prompt to be encoded - num_images_per_prompt (`int`): - number of images that should be generated per prompt - prompt_embeds (`torch.FloatTensor`, *optional*): - Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not - provided, text embeddings will be generated from `prompt` input argument. - """ - - if prompt_embeds is None: - text_inputs = self.tokenizer( - prompt, - padding="max_length", - max_length=self.tokenizer.model_max_length, - truncation=True, - return_tensors="pt", - ) - text_input_ids = text_inputs.input_ids - - prompt_embeds = self.text_encoder(text_input_ids, share_inputs=True, share_outputs=True) - prompt_embeds = torch.from_numpy(prompt_embeds[0]) - - bs_embed, seq_len, _ = prompt_embeds.shape - # duplicate text embeddings for each generation per prompt - prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1) - prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1) - - # Don't need to get uncond prompt embedding because of LCM Guided Distillation - return prompt_embeds - - def run_safety_checker(self, image, dtype): - if self.safety_checker is None: - has_nsfw_concept = None - else: - if torch.is_tensor(image): - feature_extractor_input = self.image_processor.postprocess(image, output_type="pil") - else: - feature_extractor_input = self.image_processor.numpy_to_pil(image) - safety_checker_input = self.feature_extractor(feature_extractor_input, return_tensors="pt") - image, has_nsfw_concept = self.safety_checker(images=image, clip_input=safety_checker_input.pixel_values.to(dtype)) - return image, has_nsfw_concept - - def prepare_latents(self, batch_size, num_channels_latents, height, width, dtype, latents=None): - shape = ( - batch_size, - num_channels_latents, - height // self.vae_scale_factor, - width // self.vae_scale_factor, - ) - if latents is None: - latents = torch.randn(shape, dtype=dtype) - # scale the initial noise by the standard deviation required by the scheduler - latents = latents * self.scheduler.init_noise_sigma - return latents - - def get_w_embedding(self, w, embedding_dim=512, dtype=torch.float32): - """ - see https://github.com/google-research/vdm/blob/dc27b98a554f65cdc654b800da5aa1846545d41b/model_vdm.py#L298 - Args: - timesteps: torch.Tensor: generate embedding vectors at these timesteps - embedding_dim: int: dimension of the embeddings to generate - dtype: data type of the generated embeddings - Returns: - embedding vectors with shape `(len(timesteps), embedding_dim)` - """ - assert len(w.shape) == 1 - w = w * 1000.0 - - half_dim = embedding_dim // 2 - emb = torch.log(torch.tensor(10000.0)) / (half_dim - 1) - emb = torch.exp(torch.arange(half_dim, dtype=dtype) * -emb) - emb = w.to(dtype)[:, None] * emb[None, :] - emb = torch.cat([torch.sin(emb), torch.cos(emb)], dim=1) - if embedding_dim % 2 == 1: # zero pad - emb = torch.nn.functional.pad(emb, (0, 1)) - assert emb.shape == (w.shape[0], embedding_dim) - return emb - - @torch.no_grad() - def __call__( - self, - prompt: Union[str, List[str]] = None, - height: Optional[int] = 512, - width: Optional[int] = 512, - guidance_scale: float = 7.5, - num_images_per_prompt: Optional[int] = 1, - latents: Optional[torch.FloatTensor] = None, - num_inference_steps: int = 4, - lcm_origin_steps: int = 50, - prompt_embeds: Optional[torch.FloatTensor] = None, - output_type: Optional[str] = "pil", - return_dict: bool = True, - cross_attention_kwargs: Optional[Dict[str, Any]] = None, - ): - # 1. Define call parameters - if prompt is not None and isinstance(prompt, str): - batch_size = 1 - elif prompt is not None and isinstance(prompt, list): - batch_size = len(prompt) - else: - batch_size = prompt_embeds.shape[0] - - # do_classifier_free_guidance = guidance_scale > 0.0 - # In LCM Implementation: cfg_noise = noise_cond + cfg_scale * (noise_cond - noise_uncond) , (cfg_scale > 0.0 using CFG) - - # 2. Encode input prompt - prompt_embeds = self._encode_prompt( - prompt, - num_images_per_prompt, - prompt_embeds=prompt_embeds, - ) - - # 3. Prepare timesteps - self.scheduler.set_timesteps(num_inference_steps, original_inference_steps=lcm_origin_steps) - timesteps = self.scheduler.timesteps - - # 4. Prepare latent variable - num_channels_latents = 4 - latents = self.prepare_latents( - batch_size * num_images_per_prompt, - num_channels_latents, - height, - width, - prompt_embeds.dtype, - latents, - ) - - bs = batch_size * num_images_per_prompt - - # 5. Get Guidance Scale Embedding - w = torch.tensor(guidance_scale).repeat(bs) - w_embedding = self.get_w_embedding(w, embedding_dim=256) - - # 6. LCM MultiStep Sampling Loop: - with self.progress_bar(total=num_inference_steps) as progress_bar: - for i, t in enumerate(timesteps): - ts = torch.full((bs,), t, dtype=torch.long) - - # model prediction (v-prediction, eps, x) - model_pred = self.unet( - [latents, ts, prompt_embeds, w_embedding], - share_inputs=True, - share_outputs=True, - )[0] - - # compute the previous noisy sample x_t -> x_t-1 - latents, denoised = self.scheduler.step(torch.from_numpy(model_pred), t, latents, return_dict=False) - progress_bar.update() - - if not output_type == "latent": - image = torch.from_numpy(self.vae_decoder(denoised / 0.18215, share_inputs=True, share_outputs=True)[0]) - image, has_nsfw_concept = self.run_safety_checker(image, prompt_embeds.dtype) - else: - image = denoised - has_nsfw_concept = None - - if has_nsfw_concept is None: - do_denormalize = [True] * image.shape[0] - else: - do_denormalize = [not has_nsfw for has_nsfw in has_nsfw_concept] - - image = self.image_processor.postprocess(image, output_type=output_type, do_denormalize=do_denormalize) - - if not return_dict: - return (image, has_nsfw_concept) - - return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept) +For starting work with LCM, we should instantiate the generation +pipeline first. ``DiffusionPipeline.from_pretrained`` method downloads +all pipeline components (if required) for LCM and configure them. +Loading LCM for OpenVINO inference using Optimum Intel looks similar, we +only should replace ``DiffusionPipeline`` with ``OVDiffusionPpeline``. +This model class accepts model id from HuggingFace Hub or local +directory for original PyTorch pipeline or already converted. In case, +if path to original pipeline provided, it will be automatically +converted to OpenVINO format, but as we already converted model before +using Optimum CLI, we will use models from the previous step. Configure Inference Pipeline ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -First, you should create instances of OpenVINO Model and compile it -using selected device. Select device from dropdown list for running -inference using OpenVINO. +Optionally, we can setup which device will be used for running +inference. Select desired inference device from dropdown list bellow. .. code:: ipython3 - core = ov.Core() - - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - open("notebook_utils.py", "w").write(r.text) - from notebook_utils import device_widget device = device_widget() @@ -703,18 +237,27 @@ inference using OpenVINO. .. parsed-literal:: - Dropdown(description='Device:', options=('CPU', 'AUTO'), value='CPU') + Dropdown(description='Device:', index=1, options=('CPU', 'AUTO'), value='AUTO') .. code:: ipython3 - text_enc = core.compile_model(TEXT_ENCODER_OV_PATH, device.value) - unet_model = core.compile_model(UNET_OV_PATH, device.value) - - ov_config = {"INFERENCE_PRECISION_HINT": "f32"} if device.value != "CPU" else {} + from optimum.intel.openvino import OVDiffusionPipeline - vae_decoder = core.compile_model(VAE_DECODER_OV_PATH, device.value, ov_config) + ov_pipe = OVDiffusionPipeline.from_pretrained(model_path, device=device.value) + + +.. parsed-literal:: + + 2024-11-14 12:52:11.556586: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-14 12:52:11.570192: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:477] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered + WARNING: All log messages before absl::InitializeLog() is called are written to STDERR + E0000 00:00:1731574331.585339 2056327 cuda_dnn.cc:8310] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered + E0000 00:00:1731574331.589784 2056327 cuda_blas.cc:1418] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered + 2024-11-14 12:52:11.606540: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. + Model tokenizer and scheduler are also important parts of the pipeline. This pipeline is also can use Safety Checker, the filter for detecting @@ -724,18 +267,6 @@ embeddings using CLIP model, so additionally feature extractor component should be added in the pipeline. We reuse tokenizer, feature extractor, scheduler and safety checker from original LCM pipeline. -.. code:: ipython3 - - ov_pipe = OVLatentConsistencyModelPipeline( - tokenizer=tokenizer, - text_encoder=text_enc, - unet=unet_model, - vae_decoder=vae_decoder, - scheduler=scheduler, - feature_extractor=feature_extractor, - safety_checker=safety_checker, - ) - Text-to-image generation ------------------------ @@ -745,18 +276,13 @@ Now, let’s see model in action .. code:: ipython3 + import torch + prompt = "a beautiful pink unicorn, 8k" num_inference_steps = 4 - torch.manual_seed(1234567) images = ov_pipe( - prompt=prompt, - num_inference_steps=num_inference_steps, - guidance_scale=8.0, - lcm_origin_steps=50, - output_type="pil", - height=512, - width=512, + prompt=prompt, num_inference_steps=num_inference_steps, guidance_scale=8.0, height=512, width=512, generator=torch.Generator().manual_seed(1234567) ).images @@ -773,12 +299,19 @@ Now, let’s see model in action -.. image:: latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.png +.. image:: latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.png Nice. As you can see, the picture has quite a high definition 🔥. +.. code:: ipython3 + + import gc + + del ov_pipe + gc.collect(); + Quantization ------------ @@ -814,6 +347,7 @@ improve model inference speed. skip_for_device = "GPU" in device.value to_quantize = quantization_widget(not skip_for_device) + int8_model_path = model_path.parent / (model_path.name + "_int8") to_quantize @@ -826,22 +360,13 @@ improve model inference speed. -Let’s load ``skip magic`` extension to skip quantization if -``to_quantize`` is not selected - .. code:: ipython3 - int8_pipe = None - - # Fetch `skip_kernel_extension` module - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", - ) - open("skip_kernel_extension.py", "w").write(r.text) %load_ext skip_kernel_extension +Let’s load ``skip magic`` extension to skip quantization if +``to_quantize`` is not selected + Prepare calibration dataset ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -860,6 +385,8 @@ model inputs for calibration we should customize ``CompiledModel``. from tqdm.notebook import tqdm from transformers import set_seed from typing import Any, Dict, List + import openvino as ov + import numpy as np set_seed(1) @@ -874,9 +401,9 @@ model inputs for calibration we should customize ``CompiledModel``. self.data_cache.append(*args) return super().__call__(*args, **kwargs) - def collect_calibration_data(lcm_pipeline: OVLatentConsistencyModelPipeline, subset_size: int) -> List[Dict]: - original_unet = lcm_pipeline.unet - lcm_pipeline.unet = CompiledModelDecorator(original_unet, prob=0.3) + def collect_calibration_data(lcm_pipeline, subset_size: int) -> List[Dict]: + original_unet = lcm_pipeline.unet.request + lcm_pipeline.unet.request = CompiledModelDecorator(original_unet, prob=0.3) dataset = datasets.load_dataset("google-research-datasets/conceptual_captions", split="train", trust_remote_code=True).shuffle(seed=42) lcm_pipeline.set_progress_bar_config(disable=True) @@ -888,27 +415,25 @@ model inputs for calibration we should customize ``CompiledModel``. diff = 0 for batch in dataset: prompt = batch["caption"] - if len(prompt) > tokenizer.model_max_length: + if len(prompt) > lcm_pipeline.tokenizer.model_max_length: continue _ = lcm_pipeline( prompt, num_inference_steps=num_inference_steps, guidance_scale=8.0, - lcm_origin_steps=50, - output_type="pil", height=512, width=512, ) - collected_subset_size = len(lcm_pipeline.unet.data_cache) + collected_subset_size = len(lcm_pipeline.unet.request.data_cache) if collected_subset_size >= subset_size: pbar.update(subset_size - pbar.n) break pbar.update(collected_subset_size - diff) diff = collected_subset_size - calibration_dataset = lcm_pipeline.unet.data_cache + calibration_dataset = lcm_pipeline.unet.request.data_cache lcm_pipeline.set_progress_bar_config(disable=False) - lcm_pipeline.unet = original_unet + lcm_pipeline.unet.request = original_unet lcm_pipeline.safety_checker = safety_checker return calibration_dataset @@ -920,10 +445,12 @@ model inputs for calibration we should customize ``CompiledModel``. logging.basicConfig(level=logging.WARNING) logger = logging.getLogger(__name__) - UNET_INT8_OV_PATH = Path("model/unet_int8.xml") - if not UNET_INT8_OV_PATH.exists(): + if not int8_model_path.exists(): subset_size = 200 + ov_pipe = OVDiffusionPipeline.from_pretrained(model_path, device=device.value) unet_calibration_data = collect_calibration_data(ov_pipe, subset_size=subset_size) + del ov_pipe + gc.collect(); @@ -948,12 +475,11 @@ Create a quantized model from the pre-trained converted OpenVINO model. import nncf from nncf.scopes import IgnoredScope + import shutil + core = ov.Core() - if UNET_INT8_OV_PATH.exists(): - print("Loading quantized model") - quantized_unet = core.read_model(UNET_INT8_OV_PATH) - else: - unet = core.read_model(UNET_OV_PATH) + if not int8_model_path.exists(): + unet = core.read_model(model_path / "unet/openvino_model.xml") quantized_unet = nncf.quantize( model=unet, subset_size=subset_size, @@ -963,12 +489,19 @@ Create a quantized model from the pre-trained converted OpenVINO model. disable_bias_correction=True ) ) - ov.save_model(quantized_unet, UNET_INT8_OV_PATH) - - -.. parsed-literal:: - - INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, onnx, openvino + ov.save_model(quantized_unet, int8_model_path / "unet/openvino_model.xml") + del quantized_unet + del unet + gc.collect() + for filename in model_path.rglob("*"): + if filename.is_dir(): + continue + relative_file_name = filename.relative_to(model_path) + if (int8_model_path / relative_file_name).exists(): + continue + dst_path = int8_model_path / relative_file_name + dst_path.parent.mkdir(exist_ok=True, parents=True) + shutil.copy(filename, dst_path) @@ -985,14 +518,6 @@ Create a quantized model from the pre-trained converted OpenVINO model. - - - - - - - - .. parsed-literal:: Output() @@ -1006,19 +531,6 @@ Create a quantized model from the pre-trained converted OpenVINO model. - - - - - - - -.. parsed-literal:: - - INFO:nncf:122 ignored nodes were found by name in the NNCFGraph - - - .. parsed-literal:: Output() @@ -1031,29 +543,11 @@ Create a quantized model from the pre-trained converted OpenVINO model. - - - - - - - - .. code:: ipython3 %%skip not $to_quantize.value - unet_optimized = core.compile_model(UNET_INT8_OV_PATH, device.value) - - int8_pipe = OVLatentConsistencyModelPipeline( - tokenizer=tokenizer, - text_encoder=text_enc, - unet=unet_optimized, - vae_decoder=vae_decoder, - scheduler=scheduler, - feature_extractor=feature_extractor, - safety_checker=safety_checker, - ) + int8_pipe = OVDiffusionPipeline.from_pretrained(int8_model_path, device=device.value) Let us check predictions with the quantized UNet using the same input data. @@ -1066,16 +560,14 @@ data. prompt = "a beautiful pink unicorn, 8k" num_inference_steps = 4 - torch.manual_seed(1234567) images = int8_pipe( prompt=prompt, num_inference_steps=num_inference_steps, guidance_scale=8.0, - lcm_origin_steps=50, - output_type="pil", height=512, width=512, + generator=torch.Generator().manual_seed(1234567) ).images display(images[0]) @@ -1088,7 +580,7 @@ data. -.. image:: latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.png +.. image:: latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.png Compare inference time of the FP16 and INT8 models @@ -1127,8 +619,6 @@ pipelines, we use median inference time on calibration subset. prompt, num_inference_steps=num_inference_steps, guidance_scale=8.0, - lcm_origin_steps=50, - output_type="pil", height=512, width=512, ) @@ -1143,38 +633,154 @@ pipelines, we use median inference time on calibration subset. %%skip not $to_quantize.value - fp_latency = calculate_inference_time(ov_pipe, validation_data) int8_latency = calculate_inference_time(int8_pipe, validation_data) + del int8_pipe + gc.collect() + ov_pipe = OVDiffusionPipeline.from_pretrained(model_path, device=device.value) + fp_latency = calculate_inference_time(ov_pipe, validation_data) print(f"Performance speed up: {fp_latency / int8_latency:.3f}") + + del ov_pipe + gc.collect(); .. parsed-literal:: - Performance speed up: 1.319 + Performance speed up: 1.357 Compare UNet file size -^^^^^^^^^^^^^^^^^^^^^^ +~~~~~~~~~~~~~~~~~~~~~~ .. code:: ipython3 - %%skip not $to_quantize.value + UNET_OV_PATH = model_path / "unet/openvino_model.xml" + UNET_INT8_OV_PATH = int8_model_path / "unet/openvino_model.xml" - fp16_ir_model_size = UNET_OV_PATH.with_suffix(".bin").stat().st_size / 1024 - quantized_model_size = UNET_INT8_OV_PATH.with_suffix(".bin").stat().st_size / 1024 + if UNET_INT8_OV_PATH.exists(): + fp16_ir_model_size = UNET_OV_PATH.with_suffix(".bin").stat().st_size / 1024 + quantized_model_size = UNET_INT8_OV_PATH.with_suffix(".bin").stat().st_size / 1024 - print(f"FP16 model size: {fp16_ir_model_size:.2f} KB") - print(f"INT8 model size: {quantized_model_size:.2f} KB") - print(f"Model compression rate: {fp16_ir_model_size / quantized_model_size:.3f}") + print(f"FP16 model size: {fp16_ir_model_size:.2f} KB") + print(f"INT8 model size: {quantized_model_size:.2f} KB") + print(f"Model compression rate: {fp16_ir_model_size / quantized_model_size:.3f}") + + +.. parsed-literal:: + + FP16 model size: 1678912.69 KB + INT8 model size: 841591.46 KB + Model compression rate: 1.995 + + +Run Text to image generation using OpenVINO GenAI +------------------------------------------------- + + + +`OpenVINO™ GenAI `__ +is a library of the most popular Generative AI model pipelines, +optimized execution methods, and samples that run on top of highly +performant `OpenVINO +Runtime `__. + +|image0| + +This library is friendly to PC and laptop execution, and optimized for +resource consumption. It requires no external dependencies to run +generative models as it already includes all the core functionality. + +``openvino_genai.Text2ImagePipeline`` class supports inference of +`Diffusers +models `__. +For pipeline initialization, we should provide directory with converted +by Optimum Intel pipeline and specify inference device. Optionally, we +can provide configuration for LoRA Adapters using ``adapter_config``. +For starting generation process ``generate`` method should be used. +Basically, it required to provide input text prompt for image +generation. You can provide additional arguments like negative prompt, +number of steps, guidance scale, image width and height to control +generation process. + +.. |image0| image:: https://media.githubusercontent.com/media/openvinotoolkit/openvino.genai/refs/heads/master/src/docs/openvino_genai.svg + +.. code:: ipython3 + + device + + + + +.. parsed-literal:: + + Dropdown(description='Device:', index=1, options=('CPU', 'AUTO'), value='AUTO') + + + +.. code:: ipython3 + + import ipywidgets as widgets + + int8_can_be_used = int8_model_path.exists() and "GPU" not in device.value + use_quantized_model = widgets.Checkbox(value=int8_can_be_used, description="Use INT8 model", disabled=not int8_can_be_used) + + use_quantized_model + + .. parsed-literal:: - FP16 model size: 1678912.37 KB - INT8 model size: 840792.93 KB - Model compression rate: 1.997 + Checkbox(value=True, description='Use INT8 model') + + + +.. code:: ipython3 + + import openvino_genai as ov_genai + + used_model_path = model_path if not use_quantized_model.value else int8_model_path + + pipe = ov_genai.Text2ImagePipeline(used_model_path, device.value) + +.. code:: ipython3 + + from PIL import Image + import torch + import openvino as ov + + + class Generator(ov_genai.Generator): + def __init__(self, seed): + ov_genai.Generator.__init__(self) + self.generator = torch.Generator(device="cpu").manual_seed(seed) + + def next(self): + return torch.randn(1, generator=self.generator, dtype=torch.float32).item() + + def randn_tensor(self, shape: ov.Shape): + torch_tensor = torch.randn(list(shape), generator=self.generator, dtype=torch.float32) + return ov.Tensor(torch_tensor.numpy()) + + + prompt = "a beautiful pink unicorn, 8k" + num_inference_steps = 4 + + random_generator = Generator(1234567) + + image_tensor = pipe.generate(prompt, width=512, height=512, num_inference_steps=4, num_images_per_prompt=1, generator=random_generator) + + image = Image.fromarray(image_tensor.data[0]) + + image + + + + +.. image:: latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.png + Interactive demo @@ -1186,7 +792,7 @@ Interactive demo import random import gradio as gr - from functools import partial + import numpy as np MAX_SEED = np.iinfo(np.int32).max @@ -1198,7 +804,6 @@ Interactive demo def generate( - pipeline: OVLatentConsistencyModelPipeline, prompt: str, seed: int = 0, width: int = 512, @@ -1206,28 +811,15 @@ Interactive demo guidance_scale: float = 8.0, num_inference_steps: int = 4, randomize_seed: bool = False, - num_images: int = 1, progress=gr.Progress(track_tqdm=True), ): seed = randomize_seed_fn(seed, randomize_seed) - torch.manual_seed(seed) - result = pipeline( - prompt=prompt, - width=width, - height=height, - guidance_scale=guidance_scale, - num_inference_steps=num_inference_steps, - num_images_per_prompt=num_images, - lcm_origin_steps=50, - output_type="pil", - ).images[0] + random_generator = Generator(seed) + result = pipe.generate( + prompt, width=width, height=height, guidance_scale=guidance_scale, num_inference_steps=num_inference_steps, generator=random_generator + ) + result = Image.fromarray(result.data[0]) return result, seed - - - generate_original = partial(generate, ov_pipe) - generate_optimized = partial(generate, int8_pipe) - quantized_model_present = int8_pipe is not None - generate = generate_optimized if quantized_model_present else generate_original .. code:: ipython3 @@ -1239,7 +831,7 @@ Interactive demo from gradio_helper import make_demo_lcm - demo = make_demo_lcm(fn=generate, quantized=quantized_model_present) + demo = make_demo_lcm(fn=generate) try: demo.queue().launch(debug=False) @@ -1248,8 +840,3 @@ Interactive demo # if you are launching remotely, specify server_name and server_port # demo.launch(server_name='your server name', server_port='server port in int') # Read more in the docs: https://gradio.app/docs/ - -.. code:: ipython3 - - # please uncomment and run this cell for stopping gradio interface - # demo.close() diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.jpg b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.jpg new file mode 100644 index 00000000000000..1ea60cbbf8d222 --- /dev/null +++ b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:affe930458b7c4c643d79b905269590fc084ca969ee5f0545b8bba525006fa8a +size 19295 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.png b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.png new file mode 100644 index 00000000000000..5955c1e4362d9f --- /dev/null +++ b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_13_0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ef1cbdb29f5fea43c3624c52f20799e4677fc0f52f6451bbe24bf0cf11a8463 +size 389641 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.jpg b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.jpg deleted file mode 100644 index c6b4e28670b6d5..00000000000000 --- a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:124ac28d484e3f73c150deb379374cec294b47803cd2d8914461dc8ea215afd0 -size 25960 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.png b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.png deleted file mode 100644 index 08ecde8427d295..00000000000000 --- a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_21_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:bbd41bc286b8dfb86e049235d232d30fd7a61ea4febfb1e4ccc340367a84ebb0 -size 412225 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.jpg b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.jpg new file mode 100644 index 00000000000000..6408a5658cf117 --- /dev/null +++ b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8925bd54982f37545c019dbe0594bd794045ee40e5627f0121b221b44471c62 +size 19352 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.png b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.png new file mode 100644 index 00000000000000..7b0ec07f79f970 --- /dev/null +++ b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_27_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c7034ea0158e17cbd009e742938fe42fd1e0fb0011d0d2512524d6fab00889e +size 392614 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.jpg b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.jpg deleted file mode 100644 index 08bc3ddf0e0710..00000000000000 --- a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:4bce5a9ae0251f165e2becde51d5343c55a99c3234f327c9951f8a0279514a2e -size 22266 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.png b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.png deleted file mode 100644 index 75211e26b3b388..00000000000000 --- a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_34_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:89231776665c02abb82840d447f7804d7aca7118ec11d1296e7e1f738fd11e63 -size 392583 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.jpg b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.jpg new file mode 100644 index 00000000000000..4710b7e9307c1b --- /dev/null +++ b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b3bf64cb2d0dc5daa9387092f9c09eea26af451b5a6e0e7c5750d22a5fb66b1 +size 21932 diff --git a/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.png b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.png new file mode 100644 index 00000000000000..7667008b2d5aa5 --- /dev/null +++ b/docs/notebooks/latent-consistency-models-image-generation-with-output_files/latent-consistency-models-image-generation-with-output_37_0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:307294292b8bf501d51fae0bc667d06907d8d5b2adf9ed139467b766eccac901 +size 401843 diff --git a/docs/notebooks/latent-consistency-models-optimum-demo-with-output.rst b/docs/notebooks/latent-consistency-models-optimum-demo-with-output.rst deleted file mode 100644 index a0bce9d85c7196..00000000000000 --- a/docs/notebooks/latent-consistency-models-optimum-demo-with-output.rst +++ /dev/null @@ -1,252 +0,0 @@ -Latent Consistency Model using Optimum-Intel OpenVINO -===================================================== - -This notebook provides instructions how to run Latent Consistency Model -(LCM). It allows to setup standard Hugging Face diffusers pipeline and -Optimum Intel pipeline optimized for Intel hardware including CPU and -GPU. Running inference on CPU and GPU it is easy to compare performance -and time required to generate an image for provided prompt. The notebook -can be also used on other Intel hardware with minimal or no -modifications. - -.. image:: https://github.com/openvinotoolkit/openvino_notebooks/assets/10940214/1858dae4-72fd-401e-b055-66d503d82446 - -Optimum Intel is an interface from Hugging Face between both diffusers -and transformers libraries and various tools provided by Intel to -accelerate pipelines on Intel hardware. It allows to perform -quantization of the models hosted on Hugging Face. In this notebook -OpenVINO is used for AI-inference acceleration as a backend for Optimum -Intel! - -For more details please refer to Optimum Intel repository -https://github.com/huggingface/optimum-intel - -LCMs are the next generation of generative models after Latent Diffusion -Models (LDMs). They are proposed to overcome the slow iterative sampling -process of Latent Diffusion Models (LDMs), enabling fast inference with -minimal steps (from 2 to 4) on any pre-trained LDMs (e.g. Stable -Diffusion). To read more about LCM please refer to -https://latent-consistency-models.github.io/ - - -**Table of contents:** - - -- `Prerequisites <#prerequisites>`__ -- `Full precision model on the - CPU <#using-full-precision-model-in-cpu-with-latentconsistencymodelpipeline>`__ -- `Running inference using Optimum Intel - OVLatentConsistencyModelPipeline <#running-inference-using-optimum-intel-ovlatentconsistencymodelpipeline>`__ - -Installation Instructions -~~~~~~~~~~~~~~~~~~~~~~~~~ - -This is a self-contained example that relies solely on its own code. - -We recommend running the notebook in a virtual environment. You only -need a Jupyter server to start. For details, please refer to -`Installation -Guide `__. - -Prerequisites -~~~~~~~~~~~~~ - - - -Install required packages - -.. code:: ipython3 - - %pip install -q "openvino>=2023.3.0" - %pip install -q "onnx>=1.11.0,<1.16.2" - %pip install -q "optimum-intel[diffusers]@git+https://github.com/huggingface/optimum-intel.git" "ipywidgets" "torch>=2.1" "transformers>=4.33.0" --extra-index-url https://download.pytorch.org/whl/cpu - - -.. parsed-literal:: - - Note: you may need to restart the kernel to use updated packages. - - -.. code:: ipython3 - - import warnings - - warnings.filterwarnings("ignore") - -Showing Info Available Devices -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -The ``available_devices`` property shows the available devices in your -system. The “FULL_DEVICE_NAME” option to ``ie.get_property()`` shows the -name of the device. Check what is the ID name for the discrete GPU, if -you have integrated GPU (iGPU) and discrete GPU (dGPU), it will show -``device_name="GPU.0"`` for iGPU and ``device_name="GPU.1"`` for dGPU. -If you just have either an iGPU or dGPU that will be assigned to -``"GPU"`` - -Note: For more details about GPU with OpenVINO visit this -`link `__. -If you have been facing any issue in Ubuntu 20.04 or Windows 11 read -this -`blog `__. - -.. code:: ipython3 - - import openvino as ov - import openvino.properties as props - - - core = ov.Core() - devices = core.available_devices - - for device in devices: - device_name = core.get_property(device, props.device.full_name) - print(f"{device}: {device_name}") - - -.. parsed-literal:: - - CPU: Intel(R) Core(TM) Ultra 7 155H - GNA.GNA_SW: GNA_SW - GNA.GNA_HW: GNA_HW - GPU: Intel(R) Arc(TM) Graphics (iGPU) - NPU: Intel(R) AI Boost - - -Using full precision model in CPU with ``LatentConsistencyModelPipeline`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -Standard pipeline for the Latent Consistency Model(LCM) from Diffusers -library is used here. For more information please refer to -https://huggingface.co/docs/diffusers/en/api/pipelines/latent_consistency_models - -.. code:: ipython3 - - from diffusers import LatentConsistencyModelPipeline - import gc - - pipeline = LatentConsistencyModelPipeline.from_pretrained("SimianLuo/LCM_Dreamshaper_v7") - - - -.. parsed-literal:: - - Loading pipeline components...: 0%| | 0/7 [00:00=2.1.0" "torchvision" "torchaudio" --index-url https://download.pytorch.org/whl/cpu - %pip install -q "git+https://github.com/eaidova/optimum-intel.git@ea/minicpmv" - %pip install -q "nncf>=2.13.0" "sentencepiece" "tokenizers>=0.12.1" "transformers>=4.45.0" "gradio>=4.36" - %pip install -q -U --pre --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly openvino-tokenizers openvino openvino-genai + %pip install -q "git+https://github.com/huggingface/optimum-intel.git" --index-url https://download.pytorch.org/whl/cpu + %pip install -q "nncf>=2.14.0" "sentencepiece" "tokenizers>=0.12.1" "transformers>=4.45.0" "gradio>=4.36" + %pip install -q -U "openvino-tokenizers>=2024.5.0" "openvino>=2024.5.0" "openvino-genai>=2024.5.0"| utility_files = ["notebook_utils.py", "cmd_helper.py"] @@ -134,8 +134,8 @@ Install required dependencies r = requests.get( url=f"https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/{local_path.name}", ) - with local_path.open("w") as f: - f.write(r.text) + with local_path.open("w") as f: + f.write(r.text) Convert and Optimize Model -------------------------- @@ -329,7 +329,7 @@ conversation about provided images content. .. code:: ipython3 - from openvino_genai import VLMPipeline, GenerationConfig + import openvino_genai as ov_genai Select inference device ~~~~~~~~~~~~~~~~~~~~~~~ @@ -397,7 +397,7 @@ and inference device. .. code:: ipython3 - ov_model = VLMPipeline(str(model_base_path / model_variant.value), device=device.value) + ov_model = ov_genai.VLMPipeline(str(model_base_path / model_variant.value), device=device.value) Run model inference ------------------- @@ -435,7 +435,7 @@ one of the most critical aspects of a smooth experience. from io import BytesIO import numpy as np - config = GenerationConfig() + config = ov_genai.GenerationConfig() config.max_new_tokens = 100 @@ -445,7 +445,7 @@ one of the most critical aspects of a smooth experience. image = Image.open(BytesIO(response.content)).convert("RGB") else: image = Image.open(image_file).convert("RGB") - image_data = np.array(image.getdata()).reshape(1, 3, image.size[1], image.size[0]).astype(np.byte) + image_data = np.array(image.getdata()).reshape(1, image.size[1], image.size[0], 3).astype(np.byte) return image, ov.Tensor(image_data) diff --git a/docs/notebooks/llava-multimodal-chatbot-optimum-with-output.rst b/docs/notebooks/llava-multimodal-chatbot-optimum-with-output.rst index ae14876b33b633..b278013b8a258b 100644 --- a/docs/notebooks/llava-multimodal-chatbot-optimum-with-output.rst +++ b/docs/notebooks/llava-multimodal-chatbot-optimum-with-output.rst @@ -121,9 +121,9 @@ Install required dependencies import requests %pip install -q "torch>=2.1.0" "torchvision" "torchaudio" --index-url https://download.pytorch.org/whl/cpu - %pip install -q "git+https://github.com/eaidova/optimum-intel.git@ea/minicpmv" - %pip install -q "nncf>=2.13.0" "sentencepiece" "tokenizers>=0.12.1" "transformers>=4.45.0" "gradio>=4.36" - %pip install -q -U --pre --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly openvino-tokenizers openvino openvino-genai + %pip install -q "git+https://github.com/hugggingface/optimum-intel.git" --index-url https://download.pytorch.org/whl/cpu + %pip install -q "nncf>=2.14.0" "sentencepiece" "tokenizers>=0.12.1" "transformers>=4.45.0" "gradio>=4.36" --index-url https://download.pytorch.org/whl/cpu + %pip install -q -U "openvino-tokenizers>=2024.5.0" "openvino>=2024.5.0" "openvino-genai>=2024.5.0" utility_files = ["notebook_utils.py", "cmd_helper.py"] @@ -302,7 +302,7 @@ Prepare OpenVINO based inference pipeline OpenVINO integration with Optimum Intel provides ready-to-use API for model inference that can be used for smooth integration with -transformers-based solutions. For loading pixtral model, we will use +transformers-based solutions. For loading model, we will use ``OVModelForVisualCausalLM`` class that have compatible interface with Transformers LLaVA implementation. For loading a model, ``from_pretrained`` method should be used. It accepts path to the model diff --git a/docs/notebooks/llava-next-multimodal-chatbot-with-output.rst b/docs/notebooks/llava-next-multimodal-chatbot-with-output.rst index 3e26205ee0272b..dc2a129c207ec5 100644 --- a/docs/notebooks/llava-next-multimodal-chatbot-with-output.rst +++ b/docs/notebooks/llava-next-multimodal-chatbot-with-output.rst @@ -20,7 +20,7 @@ model for creating multimodal chatbot, but the similar actions are also applicable to other models of LLaVA family compatible with HuggingFace transformers implementation. Additionally, we demonstrate how to apply stateful transformation on LLM part and model optimization techniques -like weights compression and quantization using +like weights compression using `NNCF `__ @@ -28,28 +28,18 @@ like weights compression and quantization using - `Prerequisites <#prerequisites>`__ -- `Download PyTorch model <#download-pytorch-model>`__ -- `Convert model to OpenVINO Intermediate - Representation <#convert-model-to-openvino-intermediate-representation>`__ - - - `Image Encoder <#image-encoder>`__ - - `Text Embedding <#text-embedding>`__ - - `Language Model <#language-model>`__ - +- `Convert model to OpenVINO IR format using Optimum + CLI <#convert-model-to-openvino-ir-format-using-optimum-cli>`__ - `Compress Language Model Weights to 4 bits <#compress-language-model-weights-to-4-bits>`__ -- `Quantize Image Encoder to 8 - bits <#quantize-image-encoder-to-8-bits>`__ - - - `Prepare datasets <#prepare-datasets>`__ - - `Perform quantization <#perform-quantization>`__ - - `Prepare model inference pipeline <#prepare-model-inference-pipeline>`__ -- `Run OpenVINO model inference <#run-openvino-model-inference>`__ - `Select device <#select-device>`__ + - `Select model variant <#select-model-variant>`__ + - `Load OpenVINO Model <#load-openvino-model>`__ +- `Run OpenVINO model inference <#run-openvino-model-inference>`__ - `Interactive demo <#interactive-demo>`__ Installation Instructions @@ -69,7 +59,9 @@ Prerequisites .. code:: ipython3 - %pip install -q "openvino>=2024.0.0" "nncf>=2.9.0" "torch>=2.1" "transformers>=4.39.1" "accelerate" "pillow" "gradio>=4.26" "datasets>=2.14.6" "tqdm" --extra-index-url https://download.pytorch.org/whl/cpu + # %pip install -q "nncf>=2.14.0" "torch>=2.1" "transformers>=4.39.1" "accelerate" "pillow" "gradio>=4.26" "datasets>=2.14.6" "tqdm" --extra-index-url https://download.pytorch.org/whl/cpu + # %pip install -q -U "openvino>=2024.5.0" "openvino-tokenizers>=2024.5.0" "openvino-genai>=2024.5" + # %pip install -q "git+https://github.com/hugggingface/optimum-intel.git" --extra-index-url https://download.pytorch.org/whl/cpu .. code:: ipython3 @@ -77,435 +69,63 @@ Prerequisites import requests - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - open("notebook_utils.py", "w").write(r.text) - - MODEL_DIR = Path("model") - IMAGE_ENCODER_PATH = MODEL_DIR / "image_encoder.xml" - INPUT_EMBEDDING_PATH = MODEL_DIR / "input_embeddings.xml" - LANGUAGE_MODEL_PATH = MODEL_DIR / "language_model.xml" - - requires_pt_model_loading = not all([p.exists() for p in [IMAGE_ENCODER_PATH, INPUT_EMBEDDING_PATH, LANGUAGE_MODEL_PATH]]) - -Download PyTorch model ----------------------- - - - -.. code:: ipython3 - - from transformers import LlavaNextProcessor, LlavaNextForConditionalGeneration - import torch - import gc - - processor = LlavaNextProcessor.from_pretrained("llava-hf/llava-v1.6-mistral-7b-hf") - image_encoder_model, input_embedding_model, language_model = None, None, None - - - class ImageEncoder(torch.nn.Module): - def __init__(self, config, vision_tower, multi_modal_projector): - super().__init__() - self.config = config - self.vision_tower = vision_tower - self.multi_modal_projector = multi_modal_projector - - def forward(self, pixel_values): - batch_size, num_patches, num_channels, height, width = pixel_values.shape - reshaped_pixel_values = pixel_values.view(batch_size * num_patches, num_channels, height, width) - image_features = self.vision_tower(reshaped_pixel_values, output_hidden_states=True) - selected_image_feature = image_features.hidden_states[self.config.vision_feature_layer] - if self.config.vision_feature_select_strategy == "default": - selected_image_feature = selected_image_feature[:, 1:] - elif self.config.vision_feature_select_strategy == "full": - selected_image_feature = selected_image_feature - image_features = self.multi_modal_projector(selected_image_feature) - return image_features + utility_files = ["notebook_utils.py", "cmd_helper.py"] + for utility in utility_files: + local_path = Path(utility) + if not local_path.exists(): + r = requests.get( + url=f"https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/{local_path.name}", + ) + with local_path.open("w") as f: + f.write(r.text) - if requires_pt_model_loading: - model = LlavaNextForConditionalGeneration.from_pretrained("llava-hf/llava-v1.6-mistral-7b-hf", low_cpu_mem_usage=True) - model.config.save_pretrained(MODEL_DIR) - image_encoder_model = ImageEncoder(model.config, model.vision_tower, model.multi_modal_projector) - input_embedding_model = input_embedding_model = model.get_input_embeddings() - language_model = model.language_model - del model - gc.collect() + model_id = "llava-hf/llava-v1.6-mistral-7b-hf" + MODEL_DIR = Path(model_id.split("/")[-1].replace("-hf", "-ov")) -Convert model to OpenVINO Intermediate Representation +Convert model to OpenVINO IR format using Optimum CLI ----------------------------------------------------- OpenVINO supports PyTorch models via conversion to OpenVINO Intermediate -Representation (IR). `OpenVINO model conversion -API `__ -should be used for these purposes. ``ov.convert_model`` function accepts -original PyTorch model instance and example input for tracing and -returns ``ov.Model`` representing this model in OpenVINO framework. -Converted model can be used for saving on disk using ``ov.save_model`` -function or directly loading on device using ``core.complie_model``. - -LLaVA-NeXT is autoregressive transformer generative model, it means that -each next model step depends from model output from previous step. The -generation approach is based on the assumption that the probability -distribution of a word sequence can be decomposed into the product of -conditional next word distributions. In other words, model predicts the -next token in the loop guided by previously generated tokens until the -stop-condition will be not reached (generated sequence of maximum length -or end of string token obtained). The way the next token will be -selected over predicted probabilities is driven by the selected decoding -methodology. You can find more information about the most popular -decoding methods in this -`blog `__. The entry point -for the generation process for models from the Hugging Face Transformers -library is the ``generate`` method. You can find more information about -its parameters and configuration in the -`documentation `__. -To preserve flexibility in the selection decoding methodology, we will -convert only model inference for one step. - -The inference flow has difference on first step and for the next. On the -first step, model accept preprocessed input instruction and image, that -transformed to the unified embedding space using ``input_embedding`` and -``image_encoder`` models, after that ``language model``, LLM-based part -of model, runs on input embeddings to predict probability of next -generated tokens. On the next step, ``language_model`` accepts only next -token id selected based on sampling strategy and processed by -``input_embedding`` model and cached attention key and values. Since the -output side is auto-regressive, an output token hidden state remains the -same once computed for every further generation step. Therefore, -recomputing it every time you want to generate a new token seems -wasteful. With the cache, the model saves the hidden state once it has -been computed. The model only computes the one for the most recently -generated output token at each time step, re-using the saved ones for -hidden tokens. This reduces the generation complexity from -:math:`O(n^3)` to :math:`O(n^2)` for a transformer model. More details -about how it works can be found in this -`article `__. - -To sum up above, model consists of 3 parts: - -- **Image Encoder** for encoding input images into embedding space -- **Input Embedding** for conversion input text tokens into embedding - space -- **Language Model** for generation answer based on input embeddings - provided by Image Encoder and Input Embedding models. - -Let’s convert each model part. - -Image Encoder -~~~~~~~~~~~~~ - - - -Image Encoder is represented in LLaVA by pretrained CLIP model. - -.. code:: ipython3 - - import torch - import openvino as ov - import gc - - - def cleanup_torchscript_cache(): - """ - Helper for removing cached model representation - """ - torch._C._jit_clear_class_registry() - torch.jit._recursive.concrete_type_store = torch.jit._recursive.ConcreteTypeStore() - torch.jit._state._clear_class_state() - - - if not IMAGE_ENCODER_PATH.exists(): - ov_image_encoder = ov.convert_model(image_encoder_model, example_input=torch.zeros((1, 5, 3, 336, 336))) - ov.save_model(ov_image_encoder, IMAGE_ENCODER_PATH) - del ov_image_encoder - cleanup_torchscript_cache() - - del image_encoder_model - gc.collect(); - -Text Embedding -~~~~~~~~~~~~~~ - - - -In LLMs, input embedding is a part of language model, but for LLaVA the -first step hidden state produced by this model part should be integrated -with image embeddings into common embedding space. For ability to reuse -this model part and avoid introduction of llm model instance, we will -use it separately. - -.. code:: ipython3 - - llm_input = None - - if not LANGUAGE_MODEL_PATH.exists(): - llm_input = input_embedding_model(torch.ones((2, 2), dtype=torch.int64)) - - if not INPUT_EMBEDDING_PATH.exists(): - ov_input_embeddings_model = ov.convert_model(input_embedding_model, example_input=torch.ones((2, 2), dtype=torch.int64)) - ov.save_model(ov_input_embeddings_model, INPUT_EMBEDDING_PATH) - del ov_input_embeddings_model - cleanup_torchscript_cache() - - del input_embedding_model - gc.collect(); - -Language Model -~~~~~~~~~~~~~~ - - - -Language Model is responsible for generation answer in LLaVA. This part -is very similar to standard LLM for text generation. Our model uses -`mistralai/Mistral-7B-Instruct-v0.2 `__ -as base LLM. To optimize the generation process and use memory more -efficiently, HuggingFace transformers API provides a mechanism for -caching model state externally using ``use_cache=True`` parameter and -``past_key_values`` argument in inputs and outputs. With the cache, the -model saves the hidden state once it has been computed. The model only -computes the one for the most recently generated output token at each -time step, re-using the saved ones for hidden tokens. This reduces the -generation complexity from :math:`O(n^3)` to :math:`O(n^2)` for a -transformer model. With this option, the model gets the previous step’s -hidden states (cached attention keys and values) as input and -additionally provides hidden states for the current step as output. It -means for all next iterations, it is enough to provide only a new token -obtained from the previous step and cached key values to get the next -token prediction. - -With increasing model size like in modern LLMs, we also can note an -increase in the number of attention blocks and size past key values -tensors respectively. The strategy for handling cache state as model -inputs and outputs in the inference cycle may become a bottleneck for -memory-bounded systems, especially with processing long input sequences, -for example in a chatbot scenario. OpenVINO suggests a transformation -that removes inputs and corresponding outputs with cache tensors from -the model keeping cache handling logic inside the model. Such models are -also called stateful. A stateful model is a model that implicitly -preserves data between two consecutive inference calls. The tensors -saved from one run are kept in an internal memory buffer called a -``state`` or a ``variable`` and may be passed to the next run, while -never being exposed as model output. Hiding the cache enables storing -and updating the cache values in a more device-friendly representation. -It helps to reduce memory consumption and additionally optimize model -performance. More details about stateful models and working with state -can be found in `OpenVINO -documentation `__. - -.. code:: ipython3 - - from typing import Optional, Tuple, List - from openvino.runtime import opset13 - import numpy as np - - - def model_has_state(ov_model: ov.Model): - return len(ov_model.get_sinks()) > 0 - - - def model_has_input_output_name(ov_model: ov.Model, name: str): - """ - Helper function for checking that model has specified input or output name - - Parameters: - ov_model (ov.Model): - name (str): - name of input or output - - Returns: - True if input or output with requested name exists else False - """ - return name in sum([list(t.get_names()) for t in ov_model.inputs + ov_model.outputs], []) - - - def fuse_cache_reorder( - ov_model: ov.Model, - not_kv_inputs: List[str], - key_value_input_names: List[str], - gather_dim: int, - ): - """ - Fuses reored_cache during generate cycle into ov.Model. Used with stateful models, because we can not modify model state directly. - - Adds a new beam_idx parameter and Gather op per each kv-cache input in a given model. - Should be run before make_stateful. Implements optimumum's _reorder_cache - inside the model in the beginning of each iteration. - Gather works along given gather_dim dimension that may vary from model to model. - KV-cache inputs are identified based on names in key_value_input_names. - Append the new beam_idx parameter to not_kv_inputs. - - Parameters: - ov_model (`ov.Model`): - openvino model for processing - not_kv_inputs (`List[str]`): - list of input nodes in model that not related to past key values - key_value_input_names (`List[str]`): - list of names for key value input layers - gather_dim (int): - dimension for gathering cache during reorder pass - """ - - if model_has_input_output_name(ov_model, "beam_idx"): - raise ValueError("Model already has fused cache") - input_batch = ov_model.input("inputs_embeds").get_partial_shape()[0] - beam_idx = opset13.parameter(name="beam_idx", dtype=ov.Type.i32, shape=ov.PartialShape([input_batch])) - beam_idx.output(0).get_tensor().add_names({"beam_idx"}) # why list is not accepted? - ov_model.add_parameters([beam_idx]) - not_kv_inputs.append(ov_model.inputs[-1]) - # Go over all cache parameters and fuse _reorder_cache with indices provided by the new parameter beam_idx - for input_name in key_value_input_names: - parameter_output_port = ov_model.input(input_name) - consumers = parameter_output_port.get_target_inputs() - gather = opset13.gather(parameter_output_port, beam_idx, opset13.constant(gather_dim)) - for consumer in consumers: - consumer.replace_source_output(gather.output(0)) - ov_model.validate_nodes_and_infer_types() - - - def build_state_initializer(ov_model: ov.Model, batch_dim: int): - """ - Build initialization ShapeOf Expression for all ReadValue ops - - Parameters: - ov_model (ov.Model): - openvino model - batch_dim (int): - index of dimension corresponding to batch size - """ - input_ids = ov_model.input("inputs_embeds") - batch = opset13.gather( - opset13.shape_of(input_ids, output_type="i64"), - opset13.constant([0]), - opset13.constant(0), - ) - for op in ov_model.get_ops(): - if op.get_type_name() == "ReadValue": - dims = [dim.min_length for dim in list(op.get_output_partial_shape(0))] - dims[batch_dim] = batch - dims = [(opset13.constant(np.array([dim], dtype=np.int64)) if isinstance(dim, int) else dim) for dim in dims] - shape = opset13.concat(dims, axis=0) - broadcast = opset13.broadcast(opset13.constant(0.0, dtype=op.get_output_element_type(0)), shape) - op.set_arguments([broadcast]) - ov_model.validate_nodes_and_infer_types() - - - def make_stateful( - ov_model: ov.Model, - not_kv_inputs: List[str], - key_value_input_names: List[str], - key_value_output_names: List[str], - batch_dim: int, - num_attention_heads: int, - num_beams_and_batch: int = None, - ): - """ - Hides kv-cache inputs and outputs inside the model as variables. - - Parameters: - ov_model (ov.Model): - openvino model - not_kv_inputs (`List[str]`): - list of input nodes in model that not related to past key values - key_value_input_names (`List[str]`): - list of names for key value input layers - key_value_output_names (`List[str]`): - list of names for key value input layers - batch_dim (int): - index of batch dimension in key value layers - num_attention_heads (int): - number of attention heads for batch dimension initialization - num_beams_an_batch (int): - precalculated number of beams and batch for shapes initialization - """ - from openvino._offline_transformations import apply_make_stateful_transformation - - input_output_map = {} - - if num_beams_and_batch is not None: - # Set batch size for input_ids and attention mask to avoid dynamic dimension got propagated from the end of the model back to ReadValue - for input in not_kv_inputs: - shape = input.get_partial_shape() - if shape.rank.get_length() <= 2: # == 1 for beam_index - shape[0] = num_beams_and_batch - input.get_node().set_partial_shape(shape) - for kv_name_pair in zip(key_value_input_names, key_value_output_names): - input_output_map[kv_name_pair[0]] = kv_name_pair[1] - if num_beams_and_batch is not None: - input = ov_model.input(kv_name_pair[0]) - shape = input.get_partial_shape() - shape[batch_dim] = num_beams_and_batch * num_attention_heads - input.get_node().set_partial_shape(shape) - - if num_beams_and_batch is not None: - # Re-validation model if shapes are altered above - ov_model.validate_nodes_and_infer_types() - - apply_make_stateful_transformation(ov_model, input_output_map) - if num_beams_and_batch is None: - build_state_initializer(ov_model, batch_dim) - - - def patch_stateful(ov_model): - key_value_input_names = [key.get_any_name() for key in ov_model.inputs[2:-1]] - key_value_output_names = [key.get_any_name() for key in ov_model.outputs[1:]] - not_kv_inputs = [input for input in ov_model.inputs if not any(name in key_value_input_names for name in input.get_names())] - if not key_value_input_names or not key_value_output_names: - return - batch_dim = 0 - num_attention_heads = 1 - - fuse_cache_reorder(ov_model, not_kv_inputs, key_value_input_names, batch_dim) - make_stateful( - ov_model, - not_kv_inputs, - key_value_input_names, - key_value_output_names, - batch_dim, - num_attention_heads, - None, - ) +Representation (IR) format. For convenience, we will use OpenVINO +integration with HuggingFace Optimum. `Optimum +Intel `__ is the +interface between the Transformers and Diffusers libraries and the +different tools and libraries provided by Intel to accelerate end-to-end +pipelines on Intel architectures. + +Among other use cases, Optimum Intel provides a simple interface to +optimize your Transformers and Diffusers models, convert them to the +OpenVINO Intermediate Representation (IR) format and run inference using +OpenVINO Runtime. ``optimum-cli`` provides command line interface for +model conversion and optimization. + +General command format: + +.. code:: bash + + optimum-cli export openvino --model --task + +where task is task to export the model for, if not specified, the task +will be auto-inferred based on the model. You can find a mapping between +tasks and model classes in Optimum TaskManager +`documentation `__. +Additionally, you can specify weights compression using +``--weight-format`` argument with one of following options: ``fp32``, +``fp16``, ``int8`` and ``int4``. Fro int8 and int4 +`nncf `__ will be used for +weight compression. More details about model export provided in `Optimum +Intel +documentation `__. .. code:: ipython3 - make_stateful_model = True - core = ov.Core() + from cmd_helper import optimum_cli - if not LANGUAGE_MODEL_PATH.exists(): - pkv = language_model(inputs_embeds=llm_input, attention_mask=torch.ones((2, 2), dtype=torch.int64))[1] - model_inputs = ["attention_mask", "position_ids"] - model_outputs = ["logits"] - for idx in range(len(pkv)): - model_inputs.extend([f"past_key_values.{idx}.key", f"past_key_values.{idx}.value"]) - model_outputs.extend([f"present.{idx}.key", f"present.{idx}.value"]) - model_inputs.append("inputs_embeds") - language_model.config.torchscript = True - position_ids = torch.tensor([[2, 3], [2, 3]]) - ov_model = ov.convert_model( - language_model, - example_input={ - "inputs_embeds": llm_input, - "attention_mask": torch.ones((2, 4)), - "past_key_values": pkv, - "position_ids": position_ids, - }, - ) - - for input, input_name in zip(ov_model.inputs, model_inputs): - input.get_tensor().set_names({input_name}) - - for output, output_name in zip(ov_model.outputs, model_outputs): - output.get_tensor().set_names({output_name}) - if make_stateful_model: - patch_stateful(ov_model) - ov.save_model(ov_model, LANGUAGE_MODEL_PATH) - del ov_model - cleanup_torchscript_cache() - del language_model - gc.collect() + if not (MODEL_DIR / "FP16").exists(): + optimum_cli(model_id, MODEL_DIR / "FP16", additional_args={"weight-format": "fp16"}) Compress Language Model Weights to 4 bits ----------------------------------------- @@ -516,9 +136,11 @@ For reducing memory consumption, weights compression optimization can be applied using `NNCF `__. Weight compression aims to reduce the memory footprint of a model. It can also lead to significant performance improvement for large memory-bound -models, such as Large Language Models (LLMs). LLMs and other models, -which require extensive memory to store the weights during inference, -can benefit from weight compression in the following ways: +models, such as Large Language Models (LLMs). + +LLMs and other models, which require extensive memory to store the +weights during inference, can benefit from weight compression in the +following ways: - enabling the inference of exceptionally large models that cannot be accommodated in the memory of the device; @@ -574,7 +196,10 @@ documentation `__ (Neural Network -Compression Framework) and infer quantized model via OpenVINO™ Toolkit. -`NNCF `__ enables -post-training quantization by adding quantization layers into model -graph and then using a subset of the training dataset to initialize the -parameters of these additional quantization layers. Quantized operations -are executed in ``INT8`` instead of ``FP32``/``FP16`` making model -inference faster. The optimization process contains the following steps: - -1. Prepare quantization dataset -2. Quantize the converted OpenVINO model with NNCF. -3. Save quantized model on disk for next usage. - -.. - - **Note:** quantization process may require additional time and memory - for performing. You can disable it using widget below: - -.. code:: ipython3 - - from notebook_utils import quantization_widget - - to_quantize = quantization_widget() - to_quantize - - + copy_model_folder(MODEL_DIR / "FP16", MODEL_DIR / "INT4", ["openvino_language_model.xml", "openvino_language_model.bin"]) .. parsed-literal:: - Checkbox(value=True, description='Quantization') - - - -.. code:: ipython3 - - IMAGE_ENCODER_PATH_INT8 = IMAGE_ENCODER_PATH.parent / IMAGE_ENCODER_PATH.name.replace(".xml", "-int8.xml") - - - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", - ) - open("skip_kernel_extension.py", "w").write(r.text) - - %load_ext skip_kernel_extension - -Prepare datasets -~~~~~~~~~~~~~~~~ - - - -The `Conceptual -Captions `__ dataset -consisting of ~3.3M images annotated with captions is used to quantize -model. - -.. code:: ipython3 - - %%skip not $to_quantize.value - - import requests - from io import BytesIO - import numpy as np - from PIL import Image - from requests.packages.urllib3.exceptions import InsecureRequestWarning - requests.packages.urllib3.disable_warnings(InsecureRequestWarning) - - - def get_pil_from_url(url): - """ - Downloads and converts an image from a URL to a PIL Image object. - """ - response = requests.get(url, verify=False, timeout=20) - image = Image.open(BytesIO(response.content)) - return image.convert("RGB") - - def collate_fn(example, image_column="image_url"): - """ - Preprocesses an example by loading and transforming image and text data. - Checks if the text data in the example is valid by calling the `check_text_data` function. - Downloads the image specified by the URL in the image_column by calling the `get_pil_from_url` function. - If there is any error during the download process, returns None. - Returns the preprocessed inputs with transformed image and text data. - """ - assert len(example) == 1 - example = example[0] - url = example[image_column] - try: - image = get_pil_from_url(url) - h, w = image.size - if h == 1 or w == 1: - return None - except Exception: - return None - - inputs = processor.image_processor(images=[image], return_tensors="pt") - return inputs - -.. code:: ipython3 - - %%skip not $to_quantize.value - - import torch - from datasets import load_dataset - from tqdm.notebook import tqdm - - def prepare_calibration_data(dataloader, init_steps): - """ - This function prepares calibration data from a dataloader for a specified number of initialization steps. - It iterates over the dataloader, fetching batches and storing the relevant data. - """ - data = [] - print(f"Fetching {init_steps} samples for the initialization...") - with tqdm(total=init_steps) as pbar: - for batch in dataloader: - if len(data) == init_steps: - break - if batch: - pbar.update(1) - with torch.no_grad(): - data.append( - { - "pixel_values": batch["pixel_values"].to("cpu") - } - ) - return data - - - def prepare_dataset(opt_init_steps=50, max_train_samples=1000): - """ - Prepares a vision-text dataset for quantization. - """ - dataset = load_dataset("google-research-datasets/conceptual_captions", trust_remote_code=True) - train_dataset = dataset["train"].shuffle(seed=42) - dataloader = torch.utils.data.DataLoader(train_dataset, collate_fn=collate_fn, batch_size=1) - calibration_data = prepare_calibration_data(dataloader, opt_init_steps) - return calibration_data - -.. code:: ipython3 - - %%skip not $to_quantize.value - - vcalibration_data = [] - if not IMAGE_ENCODER_PATH_INT8.exists(): - calibration_data = prepare_dataset() - -Perform quantization -~~~~~~~~~~~~~~~~~~~~ - - - -Create a quantized model from the pre-trained model. - - **NOTE**: Quantization is time and memory consuming operation. - Running quantization code below may take some time. - -.. code:: ipython3 + INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, tensorflow, onnx, openvino - %%skip not $to_quantize.value - - - if not IMAGE_ENCODER_PATH_INT8.exists(): - if len(calibration_data) == 0: - raise RuntimeError( - 'Calibration dataset is empty. Please check internet connection and try to download images manually.' - ) - - ov_model = core.read_model(IMAGE_ENCODER_PATH) - calibration_dataset = nncf.Dataset(calibration_data) - quantized_model = nncf.quantize( - model=ov_model, - calibration_dataset=calibration_dataset, - model_type=nncf.ModelType.TRANSFORMER, - subset_size=len(calibration_data), - # Smooth Quant algorithm reduces activation quantization error; optimal alpha value was obtained through grid search - advanced_parameters=nncf.AdvancedQuantizationParameters(smooth_quant_alpha=0.6) - ) - ov.save_model(quantized_model, IMAGE_ENCODER_PATH_INT8) - del ov_model - del quantized_model - gc.collect() Prepare model inference pipeline -------------------------------- @@ -796,392 +244,42 @@ Prepare model inference pipeline |image0| -``OVLlavaForCausalLM`` class provides ease-to-use interface for using -model in generation scenario. It is based on -``transformers.generation.GenerationMixin`` that gives us opportunity to -reuse all reach capabilities for generation implemented in HuggingFace -Transformers library. More details about this interface can be found in -`HuggingFace -documentation `__. +`OpenVINO™ GenAI `__ +is a library of the most popular Generative AI model pipelines, +optimized execution methods, and samples that run on top of highly +performant `OpenVINO +Runtime `__. + +This library is friendly to PC and laptop execution, and optimized for +resource consumption. It requires no external dependencies to run +generative models as it already includes all the core functionality +(e.g. tokenization via openvino-tokenizers). OpenVINO™ GenAI is a flavor +of OpenVINO™, aiming to simplify running inference of generative AI +models. It hides the complexity of the generation process and minimizes +the amount of code required. + +Inference Visual language models can be implemented using OpenVINO GenAI +``VLMPipeline`` class. Similarly to LLMPipeline, that we discussed in +this +`notebook `__. +It supports chat mode with preserving conversational history inside +pipeline, that allows us effectively implements chatbot that supports +conversation about provided images content. .. |image0| image:: https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/a562e9de-5b94-4e24-ac52-532019fc92d3 -.. code:: ipython3 - - import torch - from transformers.generation import GenerationConfig, GenerationMixin - from transformers.modeling_outputs import CausalLMOutputWithPast - from transformers import AutoConfig - from transformers.models.llava_next.modeling_llava_next import ( - get_anyres_image_grid_shape, - unpad_image, - ) - import openvino as ov - - - class OVLlavaForCausalLM(GenerationMixin): - def __init__( - self, - core, - image_encoder_path, - input_embedding_path, - language_model_path, - lm_device, - img_encoder_device, - ): - self.image_encoder = core.compile_model(core.read_model(image_encoder_path), img_encoder_device) - self.input_embeddings = core.compile_model(core.read_model(input_embedding_path), lm_device) - self.model = core.read_model(language_model_path) - self.input_names = {key.get_any_name(): idx for idx, key in enumerate(self.model.inputs)} - self.output_names = {idx: key for idx, key in enumerate(self.model.outputs)} - self.key_value_input_names = [key for key in list(self.input_names) if key not in ["beam_idx", "inputs_embeds", "attention_mask", "position_ids"]] - self.key_value_output_names = [key for key in list(self.output_names)[1:]] - self.stateful = len(self.key_value_input_names) == 0 - compiled_model = core.compile_model(self.model, lm_device) - self.request = compiled_model.create_infer_request() - self.config = AutoConfig.from_pretrained(Path(language_model_path).parent) - self.generation_config = GenerationConfig.from_model_config(self.config) - self.main_input_name = "input_ids" - self.device = torch.device("cpu") - self.num_pkv = 2 - self.next_beam_idx = None - self.image_newline = torch.zeros(self.config.text_config.hidden_size, dtype=torch.float32) - self.pad_token_id = self.config.pad_token_id if self.config.pad_token_id is not None else -1 - self.past_len = 0 - self._supports_cache_class = False - - def can_generate(self): - """Returns True to validate the check that the model using `GenerationMixin.generate()` can indeed generate.""" - return True - - def __call__( - self, - input_ids: torch.LongTensor, - pixel_values: torch.Tensor, - attention_mask: Optional[torch.LongTensor] = None, - past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, - position_ids: Optional[torch.LongTensor] = None, - image_sizes=None, - **kwargs, - ) -> CausalLMOutputWithPast: - return self.forward( - input_ids, - pixel_values, - attention_mask, - past_key_values, - position_ids, - image_sizes, - **kwargs, - ) - - def forward( - self, - input_ids: torch.LongTensor, - pixel_values: torch.Tensor, - attention_mask: Optional[torch.LongTensor] = None, - past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, - position_ids: Optional[torch.LongTensor] = None, - image_sizes=None, - **kwargs, - ) -> CausalLMOutputWithPast: - """General inference method""" - inputs = {} - if past_key_values is not None: - inputs = {} - if not self.stateful: - past_key_values = tuple(past_key_value for pkv_per_layer in past_key_values for past_key_value in pkv_per_layer) - # Add the past_key_values to the decoder inputs - inputs = dict(zip(self.key_value_input_names, past_key_values)) - # input_ids = np.array(input_ids)[:, -1:] - inputs_embeds = self.input_embeddings(input_ids)[0] - inputs["inputs_embeds"] = inputs_embeds - # inputs["attention_mask"] = attention_mask - if "beam_idx" in self.input_names: - inputs["beam_idx"] = self.next_beam_idx if self.next_beam_idx is not None else np.arange(batch_size, dtype=int) - - if not self.stateful: - first_layer_past_key_value = torch.from_numpy(past_key_values[0][0][:, :, :, 0]) - else: - first_layer_past_key_value = torch.from_numpy(self.request.query_state()[0].state.data[:, :, :, 0]) - - # Sum all dimensions of head_dim (-2) to avoid random errors such as: https://github.com/huggingface/transformers/pull/28032#issuecomment-1863691941 - batch_index, non_attended_tokens = torch.where(first_layer_past_key_value.float().sum(-2) == 0) - - # Get the target length - target_length = input_ids.shape[1] - past_length = first_layer_past_key_value.shape[-1] - - extended_attention_mask = torch.ones( - (attention_mask.shape[0], past_length), - dtype=attention_mask.dtype, - device=attention_mask.device, - ) - - # Filter out only the tokens that can be un-attended, this can happen - # if one uses Llava + Fused modules where the cache on the - # first iteration is already big enough, or if one passes custom cache - valid_indices = non_attended_tokens < extended_attention_mask.size(-1) - new_batch_index = batch_index[valid_indices] - new_non_attended_tokens = non_attended_tokens[valid_indices] - - # Zero-out the places where we don't need to attend - extended_attention_mask[new_batch_index, new_non_attended_tokens] = 0 - - attention_mask = torch.cat((extended_attention_mask, attention_mask[:, -target_length:]), dim=1) - position_ids = torch.sum(attention_mask, dim=1).unsqueeze(-1) - 1 - inputs["attention_mask"] = attention_mask - inputs["position_ids"] = position_ids - - else: - inputs = self.prepare_multimodal_input(input_ids, pixel_values, attention_mask, position_ids, image_sizes) - - # Run inference - self.request.start_async(inputs, share_inputs=True) - self.request.wait() - - logits = torch.from_numpy(self.request.get_tensor(self.output_names[0]).data) - - if not self.stateful: - # Tuple of length equal to : number of layer * number of past_key_value per decoder layer (2 corresponds to the self-attention layer) - past_key_values = tuple(self.request.get_tensor(key).data for key in self.key_value_output_names) - # Tuple of tuple of length `n_layers`, with each tuple of length equal to 2 (k/v of self-attention) - past_key_values = tuple(past_key_values[i : i + self.num_pkv] for i in range(0, len(past_key_values), self.num_pkv)) - else: - past_key_values = ((),) - self.past_len += inputs["inputs_embeds"].shape[1] - return CausalLMOutputWithPast(logits=logits, past_key_values=past_key_values) - - def prepare_multimodal_input(self, input_ids, pixel_values, attention_mask, position_ids, image_sizes=None): - """Preprocessing function for embedding multimodal data""" - inputs = {} - inputs_embeds = torch.from_numpy(self.input_embeddings(input_ids)[0]) - batch_size = input_ids.shape[0] - if not self.stateful: - for input_name in self.key_value_input_names: - model_inputs = self.model.input(input_name) - shape = model_inputs.get_partial_shape() - shape[0] = batch_size - if shape[2].is_dynamic: - shape[2] = 0 - else: - shape[1] = 0 - inputs[input_name] = ov.Tensor(model_inputs.get_element_type(), shape.get_shape()) - else: - self.past_len = 0 - self.request.reset_state() - # Set initial value for the next beam_idx input that will be used at the current iteration - # and will be optionally updated by _reorder_cache at the next iterations if beam_search is used - self.next_beam_idx = np.arange(batch_size, dtype=int) - - if "beam_idx" in self.input_names: - inputs["beam_idx"] = self.next_beam_idx if self.next_beam_idx is not None else np.arange(batch_size, dtype=int) - if pixel_values is None: - inputs["inputs_embeds"] = inputs_embeds - inputs["attention_mask"] = attention_mask - if position_ids is None: - position_ids = torch.cumsum(attention_mask, axis=1) - 1 - position_ids[attention_mask == 0] = 1 - inputs["position_ids"] = position_ids - res = self.image_encoder(pixel_values) - image_features = torch.from_numpy(res[0]) - split_sizes = [image.shape[0] for image in pixel_values] - image_features = torch.split(image_features, split_sizes, dim=0) - - # NOTE we only support multimodal_patch_merge_type == "spatial_unpad" - height = width = self.config.vision_config.image_size // self.config.vision_config.patch_size - - new_image_features = [] - for image_idx, image_feature in enumerate(image_features): - if image_feature.shape[0] > 1: - base_image_feature = image_feature[0] - image_feature = image_feature[1:] - - if height * width != base_image_feature.shape[0]: - raise ValueError("The number of patches is not consistent with the image size.") - num_patch_height, num_patch_width = get_anyres_image_grid_shape( - image_sizes[image_idx], - self.config.image_grid_pinpoints, - self.config.vision_config.image_size, - ) - image_feature = image_feature.view(num_patch_height, num_patch_width, height, width, -1) - image_feature = image_feature.permute(4, 0, 2, 1, 3).contiguous() - image_feature = image_feature.flatten(1, 2).flatten(2, 3) - image_feature = unpad_image(image_feature, image_sizes[image_idx]) - image_feature = torch.cat( - ( - image_feature, - self.image_newline[:, None, None].expand(*image_feature.shape[:-1], 1), - ), - dim=-1, - ) - image_feature = image_feature.flatten(1, 2).transpose(0, 1) - image_feature = torch.cat((base_image_feature, image_feature), dim=0) - else: - image_feature = image_feature[0] - image_feature = torch.cat((image_feature, self.image_newline[None]), dim=0) - new_image_features.append(image_feature) - image_features = torch.stack(new_image_features, dim=0) - - ( - inputs_embeds, - attention_mask, - position_ids, - ) = self._merge_input_ids_with_image_features(image_features, inputs_embeds, input_ids, attention_mask, None) - inputs["inputs_embeds"] = inputs_embeds - inputs["attention_mask"] = attention_mask - inputs["position_ids"] = position_ids - - return inputs - - def _merge_input_ids_with_image_features(self, image_features, inputs_embeds, input_ids, attention_mask, labels): - num_images, num_image_patches, embed_dim = image_features.shape - batch_size, sequence_length = input_ids.shape - left_padding = not torch.sum(input_ids[:, -1] == torch.tensor(self.pad_token_id)) - # 1. Create a mask to know where special image tokens are - special_image_token_mask = input_ids == self.config.image_token_index - num_special_image_tokens = torch.sum(special_image_token_mask, dim=-1) - # Compute the maximum embed dimension - max_embed_dim = (num_special_image_tokens.max() * (num_image_patches - 1)) + sequence_length - batch_indices, non_image_indices = torch.where(input_ids != self.config.image_token_index) - - # 2. Compute the positions where text should be written - # Calculate new positions for text tokens in merged image-text sequence. - # `special_image_token_mask` identifies image tokens. Each image token will be replaced by `nb_text_tokens_per_images - 1` text tokens. - # `torch.cumsum` computes how each image token shifts subsequent text token positions. - # - 1 to adjust for zero-based indexing, as `cumsum` inherently increases indices by one. - new_token_positions = torch.cumsum((special_image_token_mask * (num_image_patches - 1) + 1), -1) - 1 - nb_image_pad = max_embed_dim - 1 - new_token_positions[:, -1] - if left_padding: - new_token_positions += nb_image_pad[:, None] # offset for left padding - text_to_overwrite = new_token_positions[batch_indices, non_image_indices] - - # 3. Create the full embedding, already padded to the maximum position - final_embedding = torch.zeros( - batch_size, - max_embed_dim, - embed_dim, - dtype=inputs_embeds.dtype, - device=inputs_embeds.device, - ) - final_attention_mask = torch.zeros( - batch_size, - max_embed_dim, - dtype=attention_mask.dtype, - device=inputs_embeds.device, - ) - # In case the Vision model or the Language model has been offloaded to CPU, we need to manually - # set the corresponding tensors into their correct target device. - target_device = inputs_embeds.device - batch_indices, non_image_indices, text_to_overwrite = ( - batch_indices.to(target_device), - non_image_indices.to(target_device), - text_to_overwrite.to(target_device), - ) - attention_mask = attention_mask.to(target_device) - - # 4. Fill the embeddings based on the mask. If we have ["hey" "", "how", "are"] - # we need to index copy on [0, 577, 578, 579] for the text and [1:576] for the image features - final_embedding[batch_indices, text_to_overwrite] = inputs_embeds[batch_indices, non_image_indices] - final_attention_mask[batch_indices, text_to_overwrite] = attention_mask[batch_indices, non_image_indices] - if labels is not None: - final_labels[batch_indices, text_to_overwrite] = labels[batch_indices, non_image_indices] - - # 5. Fill the embeddings corresponding to the images. Anything that is still zeros needs filling - image_to_overwrite = torch.all(final_embedding == 0, dim=-1) - image_to_overwrite &= image_to_overwrite.cumsum(-1) - 1 >= nb_image_pad[:, None].to(target_device) - if image_to_overwrite.sum() != image_features.shape[:-1].numel(): - raise ValueError( - f"The input provided to the model are wrong. The number of image tokens is {torch.sum(special_image_token_mask)} while" - f" the number of image given to the model is {num_images}. This prevents correct indexing and breaks batch generation." - ) - - final_embedding[image_to_overwrite] = image_features.contiguous().reshape(-1, embed_dim).to(target_device) - final_attention_mask |= image_to_overwrite - position_ids = (final_attention_mask.cumsum(-1) - 1).masked_fill_((final_attention_mask == 0), 1) - - # 6. Mask out the embedding at padding positions, as we later use the past_key_value value to determine the non-attended tokens. - batch_indices, pad_indices = torch.where(input_ids == self.pad_token_id) - indices_to_mask = new_token_positions[batch_indices, pad_indices] - - final_embedding[batch_indices, indices_to_mask] = 0 - - return final_embedding, final_attention_mask, position_ids - - def prepare_inputs_for_generation( - self, - input_ids, - past_key_values=None, - inputs_embeds=None, - pixel_values=None, - image_sizes=None, - attention_mask=None, - **kwargs, - ): - if past_key_values is not None: - if not self.stateful: - cache_length = past_length = past_key_values[0][0].shape[2] - else: - cache_length = past_length = self.past_len - - # Keep only the unprocessed tokens: - # 1 - If the length of the attention_mask exceeds the length of input_ids, then we are in a setting where - # some of the inputs are exclusively passed as part of the cache (e.g. when passing input_embeds as - # input) - if attention_mask is not None and attention_mask.shape[1] > input_ids.shape[1]: - input_ids = input_ids[:, -(attention_mask.shape[1] - past_length) :] - # 2 - If the past_length is smaller than input_ids', then input_ids holds all input tokens. We can discard - # input_ids based on the past_length.llava - elif past_length < input_ids.shape[1]: - input_ids = input_ids[:, past_length:] - # 3 - Otherwise (past_length >= input_ids.shape[1]), let's assume input_ids only has unprocessed tokens. - elif self.config.image_token_index in input_ids: - input_ids = input_ids[:, input_ids.shape[1] - 1 :] - # If the cache has seen more tokens than it can hold, then the cache has a size limit. Let's discard the - # older attention values, as their corresponding values are not part of the input. - if cache_length < past_length and attention_mask is not None: - attention_mask = attention_mask[:, -(cache_length + input_ids.shape[1]) :] - - position_ids = kwargs.get("position_ids", None) - if attention_mask is not None and position_ids is None: - # create position_ids on the fly for batch gllavaenerationsubset_siz - position_ids = attention_mask.long().cumsum(-1) - 1 - position_ids.masked_fill_(attention_mask == 0, 1) - if past_key_values: - position_ids = position_ids[:, -input_ids.shape[1] :] - - # if `inputs_embeds` are passed, we only want to use them in the 1st generation step - if inputs_embeds is not None and past_key_values is None: - model_inputs = {"inputs_embeds": inputs_embeds} - else: - model_inputs = {"input_ids": input_ids} - - model_inputs.update( - { - "position_ids": position_ids, - "past_key_values": past_key_values, - "use_cache": kwargs.get("use_cache"), - "attention_mask": attention_mask, - "pixel_values": pixel_values, - "image_sizes": image_sizes, - } - ) - return model_inputs - -Run OpenVINO model inference ----------------------------- +Select inference device +~~~~~~~~~~~~~~~~~~~~~~~ -Select device for language model -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - +Select device from dropdown list for running inference using OpenVINO. .. code:: ipython3 from notebook_utils import device_widget - device = device_widget(exclude=["NPU"]) + device = device_widget("CPU", exclude=["NPU"]) device @@ -1190,29 +288,19 @@ Select device for language model .. parsed-literal:: - Dropdown(description='Device:', options=('CPU', 'GPU.0', 'GPU.1'), value='CPU') - - - -.. code:: ipython3 - - lm_device = device.value - -Select device for image encoder -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - + Dropdown(description='Device:', options=('CPU', 'AUTO'), value='CPU') -.. code:: ipython3 - device +Select model variant +~~~~~~~~~~~~~~~~~~~~ -.. code:: ipython3 - img_encoder_device = device.value .. code:: ipython3 + import ipywidgets as widgets + use_int4_lang_model = widgets.Checkbox( value=LANGUAGE_MODEL_PATH_INT4.exists(), description="INT4 language model", @@ -1230,126 +318,110 @@ Select device for image encoder -.. code:: ipython3 +Load OpenVINO model +~~~~~~~~~~~~~~~~~~~ - use_int8_image_encoder = widgets.Checkbox( - value=IMAGE_ENCODER_PATH_INT8.exists(), - description="INT8 image encoder", - disabled=not IMAGE_ENCODER_PATH_INT8.exists(), - ) - - use_int8_image_encoder +For pipeline initialization we should provide path to model directory +and inference device. +.. code:: ipython3 -.. parsed-literal:: + import openvino_genai as ov_genai + + model_dir = MODEL_DIR / "FP16" if not use_int4_lang_model.value else MODEL_DIR / "INT4" + + ov_model = ov_genai.VLMPipeline(model_dir, device=device.value) - Checkbox(value=True, description='INT4 language model') +Run OpenVINO model inference +---------------------------- -.. code:: ipython3 +Now, when we have model and defined generation pipeline, we can run +model inference. - lang_model_path = LANGUAGE_MODEL_PATH_INT4 if use_int4_lang_model.value else LANGUAGE_MODEL_PATH - image_encoder_path = IMAGE_ENCODER_PATH_INT8 if use_int8_image_encoder.value else IMAGE_ENCODER_PATH - - ov_llava_model = OVLlavaForCausalLM(core, image_encoder_path, INPUT_EMBEDDING_PATH, lang_model_path, lm_device, img_encoder_device) +For preparing input data, ``VLMPipeline`` use tokenizer and image +processor inside, we just need to convert image to input OpenVINO tensor +and provide question as string. Additionally, we can provides options +for controlling generation process (e.g. number of maximum generated +tokens or using multinomial sampling for decoding instead of greedy +search approach) using ``GenerationConfig``. + +Generation process for long response may be time consuming, for +accessing partial result as soon as it is generated without waiting when +whole process finished, Streaming API can be used. Token streaming is +the mode in which the generative system returns the tokens one by one as +the model generates them. This enables showing progressive generations +to the user rather than waiting for the whole generation. Streaming is +an essential aspect of the end-user experience as it reduces latency, +one of the most critical aspects of a smooth experience. .. code:: ipython3 - from PIL import Image import requests + from PIL import Image + from io import BytesIO + import numpy as np + config = ov_genai.GenerationConfig() + config.max_new_tokens = 100 - from transformers import TextStreamer - url = "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11" - image = Image.open(requests.get(url, stream=True).raw) - question = "What is unusual on this image?" - prompt = f"[INST] \n{question}[/INST]" - streamer = TextStreamer(processor, skip_special_tokens=True, skip_prompt=True) + def load_image(image_file): + if image_file.startswith("http") or image_file.startswith("https"): + response = requests.get(image_file) + image = Image.open(BytesIO(response.content)).convert("RGB") + else: + image = Image.open(image_file).convert("RGB") + image_data = np.array(image.getdata()).reshape(1, image.size[1], image.size[0], 3).astype(np.byte) + return image, ov.Tensor(image_data) - inputs = processor(prompt, image, return_tensors="pt") - print(f"Question:\n{question}") - image - - -.. parsed-literal:: - - Question: - What is unusual on this image? - - - - -.. image:: llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_36_1.png - - - -.. code:: ipython3 - + + def streamer(subword: str) -> bool: + """ + + Args: + subword: sub-word of the generated text. + + Returns: Return flag corresponds whether generation should be stopped. + + """ + print(subword, end="", flush=True) + + + image_file = "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11" + + image, image_tensor = load_image(image_file) + text_message = "What is unusual on this image?" + + prompt = text_message + + display(image) + print(f"Question:\n{text_message}") print("Answer:") - streamer = TextStreamer(processor, skip_special_tokens=True, skip_prompt=True) - output = ov_llava_model.generate(**inputs, max_new_tokens=49, streamer=streamer) + output = ov_model.generate(prompt, image=image_tensor, generation_config=config, streamer=streamer) -.. parsed-literal:: - Setting `pad_token_id` to `eos_token_id`:2 for open-end generation. +.. image:: llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_17_0.png .. parsed-literal:: + Question: + What is unusual on this image? Answer: - The image shows a cat lying on its back inside a cardboard box. What's unusual is that the cat appears to be in a relaxed and somewhat human-like pose, with its paws up in the air and its belly exposed. - + + + The unusual aspect of this image is that a cat is lying inside a cardboard box. Cats are known for their curiosity and love for small, enclosed spaces. They often find comfort and security in boxes, bags, or other confined spaces. In this case, the cat has chosen to lie down in a cardboard box, which is an unconventional and amusing sight. It is not common to see a cat lounging in a box, as they usually Interactive demo ---------------- -.. code:: ipython3 - - import gradio as gr - from transformers import TextIteratorStreamer - from threading import Thread - from PIL import Image - import torch - - - def bot_streaming(message, history): - print(message) - if message["files"]: - image = message["files"][-1]["path"] if isinstance(message["files"][-1], dict) else message["files"][-1] - else: - # if there's no image uploaded for this turn, look for images in the past turns - # kept inside tuples, take the last one - for hist in history: - if isinstance(hist[0], tuple): - image = hist[0][0] - - if image is None: - gr.Error("You need to upload an image for LLaVA to work.") - prompt = f"[INST] \n{message['text']} [/INST]" - image = Image.open(image).convert("RGB") - inputs = processor(prompt, image, return_tensors="pt") - - streamer = TextIteratorStreamer(processor, **{"skip_special_tokens": True}) - generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=100) - - thread = Thread(target=ov_llava_model.generate, kwargs=generation_kwargs) - thread.start() - - text_prompt = f"[INST] \n{message['text']} [/INST]" - - buffer = "" - for new_text in streamer: - buffer += new_text - generated_text_without_prompt = buffer[len(text_prompt) :] - yield generated_text_without_prompt - .. code:: ipython3 if not Path("gradio_helper.py").exists(): @@ -1358,7 +430,7 @@ Interactive demo from gradio_helper import make_demo - demo = make_demo(fn=bot_streaming) + demo = make_demo(ov_model) try: demo.launch(debug=False) @@ -1367,8 +439,3 @@ Interactive demo # if you are launching remotely, specify server_name and server_port # demo.launch(server_name='your server name', server_port='server port in int') # Read more in the docs: https://gradio.app/docs/ - -.. code:: ipython3 - - # please uncomment and run this cell for stopping gradio interface - # demo.close() diff --git a/docs/notebooks/llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_36_1.jpg b/docs/notebooks/llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_17_0.jpg similarity index 100% rename from docs/notebooks/llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_36_1.jpg rename to docs/notebooks/llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_17_0.jpg diff --git a/docs/notebooks/llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_36_1.png b/docs/notebooks/llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_17_0.png similarity index 100% rename from docs/notebooks/llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_36_1.png rename to docs/notebooks/llava-next-multimodal-chatbot-with-output_files/llava-next-multimodal-chatbot-with-output_17_0.png diff --git a/docs/notebooks/llm-agent-react-with-output.rst b/docs/notebooks/llm-agent-react-with-output.rst index 653b57a491dbf2..aced34d99d90bd 100644 --- a/docs/notebooks/llm-agent-react-with-output.rst +++ b/docs/notebooks/llm-agent-react-with-output.rst @@ -108,17 +108,18 @@ does not serve its own LLMs, but rather provides a standard interface for interacting with many different LLMs. In this example, we can select ``Qwen2.5`` as LLM in agent pipeline. -* **qwen2.5-3b-instruct/qwen2.5-7b-instruct/qwen2.5-14b-instruct** - - Qwen2.5 is the latest series of Qwen large language models. Comparing - with Qwen2, Qwen2.5 series brings significant improvements in coding, - mathematics and general knowledge skills. Additionally, it brings - long-context and multiple languages support including Chinese, English, - French, Spanish, Portuguese, German, Italian, Russian, Japanese, Korean, - Vietnamese, Thai, Arabic, and more. For more details, please refer to - `model_card `__, - `blog `__, - `GitHub `__, and - `Documentation `__. + +**qwen2.5-3b-instruct/qwen2.5-7b-instruct/qwen2.5-14b-instruct** - +Qwen2.5 is the latest series of Qwen large language models. Comparing +with Qwen2, Qwen2.5 series brings significant improvements in coding, +mathematics and general knowledge skills. Additionally, it brings +long-context and multiple languages support including Chinese, English, +French, Spanish, Portuguese, German, Italian, Russian, Japanese, Korean, +Vietnamese, Thai, Arabic, and more. For more details, please refer to +`model_card `__, +`blog `__, +`GitHub `__, and +`Documentation `__. .. code:: ipython3 diff --git a/docs/notebooks/llm-chatbot-generate-api-with-output.rst b/docs/notebooks/llm-chatbot-generate-api-with-output.rst index dab94c37d77a4c..817a34011fde2d 100644 --- a/docs/notebooks/llm-chatbot-generate-api-with-output.rst +++ b/docs/notebooks/llm-chatbot-generate-api-with-output.rst @@ -749,7 +749,7 @@ to make it `symmetric `__ you can add ``--sym``. -For INT4 quantization you can also specify the following arguments : +For INT4 quantization you can also specify the following arguments: - The ``--group-size`` parameter will define the group size to use for quantization, -1 it will results in per-column quantization. @@ -852,12 +852,12 @@ of the available generation parameters more deeply later. .. code:: ipython3 - from openvino_genai import LLMPipeline + import openvino_genai as ov_genai print(f"Loading model from {model_dir}\n") - pipe = LLMPipeline(str(model_dir), device.value) + pipe = ov_genai.LLMPipeline(str(model_dir), device.value) generation_config = pipe.get_generation_config() diff --git a/docs/notebooks/llm-chatbot-with-output.rst b/docs/notebooks/llm-chatbot-with-output.rst index 0d214f5cccc0fc..88dda48053d8ec 100644 --- a/docs/notebooks/llm-chatbot-with-output.rst +++ b/docs/notebooks/llm-chatbot-with-output.rst @@ -655,13 +655,14 @@ to make it `symmetric `__ you can add ``--sym``. -For INT4 quantization you can also specify the following arguments : - -- The ``--group-size`` parameter will define the group size to use for - quantization, -1 it will results in per-column quantization. -- The ``--ratio`` parameter controls the ratio between 4-bit and 8-bit - quantization. If set to 0.9, it means that 90% of the layers will be - quantized to int4 while 10% will be quantized to int8. +For INT4 quantization you can also specify the following arguments: + +- +The ``--group-size`` parameter will define the group size to use for +quantization, -1 it will results in per-column quantization. - The +``--ratio`` parameter controls the ratio between 4-bit and 8-bit +quantization. If set to 0.9, it means that 90% of the layers will be +quantized to int4 while 10% will be quantized to int8. Smaller group_size and ratio values usually improve accuracy at the sacrifice of the model size and inference latency. diff --git a/docs/notebooks/llm-question-answering-with-output.rst b/docs/notebooks/llm-question-answering-with-output.rst index 2feb5ce81a08f5..f9c792ba1657d6 100644 --- a/docs/notebooks/llm-question-answering-with-output.rst +++ b/docs/notebooks/llm-question-answering-with-output.rst @@ -581,9 +581,9 @@ generation is finished, we will write class-iterator based on .. code:: ipython3 - from openvino_genai import LLMPipeline + import openvino_genai as ov_genai - pipe = LLMPipeline(model_dir.as_posix(), device.value) + pipe = ov_genai.LLMPipeline(model_dir.as_posix(), device.value) print(pipe.generate("The Sun is yellow bacause", temperature=1.2, top_k=4, do_sample=True, max_new_tokens=150)) @@ -675,7 +675,6 @@ Setup imports from time import perf_counter from typing import List import numpy as np - from openvino_genai import StreamerBase from queue import Queue import re @@ -695,7 +694,7 @@ when it is needed. It will help estimate performance. detokinizer_dir = Path(model_dir, "openvino_detokenizer.xml") - class TextIteratorStreamer(StreamerBase): + class TextIteratorStreamer(ov_genai.StreamerBase): def __init__(self, tokenizer): super().__init__() self.tokenizer = tokenizer diff --git a/docs/notebooks/magika-content-type-recognition-with-output.rst b/docs/notebooks/magika-content-type-recognition-with-output.rst index 3ef21583fa5807..383fdc6eebf499 100644 --- a/docs/notebooks/magika-content-type-recognition-with-output.rst +++ b/docs/notebooks/magika-content-type-recognition-with-output.rst @@ -43,6 +43,7 @@ In this tutorial we consider how to bring OpenVINO power into Magika. **Table of contents:** + - `Prerequisites <#prerequisites>`__ - `Define model loading class <#define-model-loading-class>`__ - `Run OpenVINO model inference <#run-openvino-model-inference>`__ @@ -77,8 +78,13 @@ Prerequisites .. parsed-literal:: ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. - supervision 0.24.0 requires numpy<1.23.3,>=1.21.2; python_full_version <= "3.10.0", but you have numpy 1.24.4 which is incompatible. + supervision 0.25.0 requires numpy<1.23.3,>=1.21.2; python_full_version <= "3.10.0", but you have numpy 1.24.4 which is incompatible. + tensorflow 2.12.0 requires keras<2.13,>=2.12.0, but you have keras 2.13.1 which is incompatible. tensorflow 2.12.0 requires numpy<1.24,>=1.22, but you have numpy 1.24.4 which is incompatible. + tensorflow 2.12.0 requires tensorboard<2.13,>=2.12, but you have tensorboard 2.13.0 which is incompatible. + tensorflow 2.12.0 requires tensorflow-estimator<2.13,>=2.12.0, but you have tensorflow-estimator 2.13.0 which is incompatible. + tensorflow-cpu 2.13.1 requires numpy<=1.24.3,>=1.22, but you have numpy 1.24.4 which is incompatible. + tensorflow-cpu 2.13.1 requires typing-extensions<4.6.0,>=3.6.6, but you have typing-extensions 4.12.2 which is incompatible. Note: you may need to restart the kernel to use updated packages. diff --git a/docs/notebooks/meter-reader-with-output.rst b/docs/notebooks/meter-reader-with-output.rst index c1317625880917..713c4d68edae6a 100644 --- a/docs/notebooks/meter-reader-with-output.rst +++ b/docs/notebooks/meter-reader-with-output.rst @@ -637,7 +637,7 @@ bounds of input batch size. .. parsed-literal:: - + diff --git a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_16_1.png b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_16_1.png index f5f465e5e0dad2..52a1b757cb6589 100644 --- a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_16_1.png +++ b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_16_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:5277177823d4b99e277b1ecd207f67b850c5fd312974c2e691e260e016811526 +oid sha256:08c5ae3bb47e095d707bdaa7f8008bed7eeb1f672c82ae4d63334e665ec3e4d8 size 170121 diff --git a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_18_1.png b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_18_1.png index 373f323c93bd56..7151cac5e2d0e8 100644 --- a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_18_1.png +++ b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_18_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:889b083b05c7dd518506e68c76a9c7e78d2cbc1273606e1edbd3c2f308a49d9e +oid sha256:6433ef738eeb00f8d0dc4343ab289073c76321d2e12fe46318fbe374b0f745e2 size 190271 diff --git a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_20_1.png b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_20_1.png index 6c3df0677c7f11..05c23937df9fe5 100644 --- a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_20_1.png +++ b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_20_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:8594e7ed5ce58de7b10de8aa066fa4f9adc43308be46e2ef4dd208da4913301e +oid sha256:3d67df91f05c9aeb0442a1c4aaef7527cf27e9be0938642eed807f8b5342aa7b size 26914 diff --git a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_22_1.png b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_22_1.png index 20a9bb7513c0bc..61e57d642da114 100644 --- a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_22_1.png +++ b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_22_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:eafe2bfb1d91093d1208523063def5d5b4d13285153568d173c302b3d600adfa +oid sha256:50b9f932b844d99b59b51f2c6947dd048f96bf1553fe36de3975d3a3ad1715e4 size 8966 diff --git a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_24_1.png b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_24_1.png index 4647a76e34c861..b113bcf292fe00 100644 --- a/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_24_1.png +++ b/docs/notebooks/meter-reader-with-output_files/meter-reader-with-output_24_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:91a9b23ec86373699c0dbbb252a2cb1b9351ebb08b771a79a4fec4bffbb1787d +oid sha256:ad7114f80f8925643c865222d0fe0e05d4f65ab54e0b0d354edebe3e5c1ade7c size 170338 diff --git a/docs/notebooks/minicpm-v-multimodal-chatbot-with-output.rst b/docs/notebooks/minicpm-v-multimodal-chatbot-with-output.rst index 98f1217902a587..7f64dd936292c5 100644 --- a/docs/notebooks/minicpm-v-multimodal-chatbot-with-output.rst +++ b/docs/notebooks/minicpm-v-multimodal-chatbot-with-output.rst @@ -31,11 +31,10 @@ techniques like weights compression using - `Prepare model inference pipeline <#prepare-model-inference-pipeline>`__ -- `Run OpenVINO model inference <#run-openvino-model-inference>`__ - `Select device <#select-device>`__ - - `Select language model variant <#select-language-model-variant>`__ +- `Run OpenVINO model inference <#run-openvino-model-inference>`__ - `Interactive demo <#interactive-demo>`__ Installation Instructions @@ -55,30 +54,19 @@ Prerequisites .. code:: ipython3 - %pip install -q "torch>=2.1" "torchvision" "timm>=0.9.2" "transformers>=4.40" "Pillow" "gradio>=4.19" "tqdm" "sentencepiece" "peft" "huggingface-hub>=0.24.0" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q "openvino>=2024.3.0" "nncf>=2.12.0" - - -.. parsed-literal:: - - WARNING: Error parsing dependencies of torchsde: .* suffix can only be used with `==` or `!=` operators - numpy (>=1.19.*) ; python_version >= "3.7" - ~~~~~~~^ - Note: you may need to restart the kernel to use updated packages. - WARNING: Error parsing dependencies of torchsde: .* suffix can only be used with `==` or `!=` operators - numpy (>=1.19.*) ; python_version >= "3.7" - ~~~~~~~^ - Note: you may need to restart the kernel to use updated packages. - + %pip install -q "torch>=2.1" "torchvision" "timm>=0.9.2" "transformers>=4.45" "Pillow" "gradio>=4.19" "tqdm" "sentencepiece" "peft" "huggingface-hub>=0.24.0" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "nncf>=2.14.0" + %pip install -q "git+https://github.com/huggingface/optimum-intel.git" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q -U "openvino>=2024.5" "openvino-tokenizers>=2024.5" "openvino-genai>=2024.5" .. code:: ipython3 import requests from pathlib import Path - if not Path("minicpm_helper.py").exists(): - r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/minicpm-v-multimodal-chatbot/minicpm_helper.py") - open("minicpm_helper.py", "w").write(r.text) + if not Path("cmd_helper.py").exists(): + r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py") + open("cmd_helper.py", "w").write(r.text) if not Path("gradio_helper.py").exists(): @@ -97,184 +85,36 @@ Convert model to OpenVINO Intermediate Representation OpenVINO supports PyTorch models via conversion to OpenVINO Intermediate -Representation (IR). `OpenVINO model conversion -API `__ -should be used for these purposes. ``ov.convert_model`` function accepts -original PyTorch model instance and example input for tracing and -returns ``ov.Model`` representing this model in OpenVINO framework. -Converted model can be used for saving on disk using ``ov.save_model`` -function or directly loading on device using ``core.complie_model``. - -``minicpm_helper.py`` script contains helper function for model -conversion, please check its content if you interested in conversion -details. - -.. raw:: html - -
- -.. raw:: html - - - -Click here for more detailed explanation of conversion steps - -.. raw:: html - - - -MiniCPM-V2.6 is autoregressive transformer generative model, it means -that each next model step depends from model output from previous step. -The generation approach is based on the assumption that the probability -distribution of a word sequence can be decomposed into the product of -conditional next word distributions. In other words, model predicts the -next token in the loop guided by previously generated tokens until the -stop-condition will be not reached (generated sequence of maximum length -or end of string token obtained). The way the next token will be -selected over predicted probabilities is driven by the selected decoding -methodology. You can find more information about the most popular -decoding methods in this -`blog `__. The entry point -for the generation process for models from the Hugging Face Transformers -library is the ``generate`` method. You can find more information about -its parameters and configuration in the -`documentation `__. -To preserve flexibility in the selection decoding methodology, we will -convert only model inference for one step. - -The inference flow has difference on first step and for the next. On the -first step, model accept preprocessed input instruction and image, that -transformed to the unified embedding space using ``input_embedding`` and -``image encoder`` models, after that ``language model``, LLM-based part -of model, runs on input embeddings to predict probability of next -generated tokens. On the next step, ``language_model`` accepts only next -token id selected based on sampling strategy and processed by -``input_embedding`` model and cached attention key and values. Since the -output side is auto-regressive, an output token hidden state remains the -same once computed for every further generation step. Therefore, -recomputing it every time you want to generate a new token seems -wasteful. With the cache, the model saves the hidden state once it has -been computed. The model only computes the one for the most recently -generated output token at each time step, re-using the saved ones for -hidden tokens. This reduces the generation complexity from -:math:`O(n^3)` to :math:`O(n^2)` for a transformer model. More details -about how it works can be found in this -`article `__. - -With increasing model size like in modern LLMs, we also can note an -increase in the number of attention blocks and size past key values -tensors respectively. The strategy for handling cache state as model -inputs and outputs in the inference cycle may become a bottleneck for -memory-bounded systems, especially with processing long input sequences, -for example in a chatbot scenario. OpenVINO suggests a transformation -that removes inputs and corresponding outputs with cache tensors from -the model keeping cache handling logic inside the model. Such models are -also called stateful. A stateful model is a model that implicitly -preserves data between two consecutive inference calls. The tensors -saved from one run are kept in an internal memory buffer called a -``state`` or a ``variable`` and may be passed to the next run, while -never being exposed as model output. Hiding the cache enables storing -and updating the cache values in a more device-friendly representation. -It helps to reduce memory consumption and additionally optimize model -performance. More details about stateful models and working with state -can be found in `OpenVINO -documentation `__. - -In LLMs, ``input_embedding`` is a part of language model, but for -multimodal case, the first step hidden state produced by this model part -should be integrated with image embeddings into common embedding space. -For ability to reuse this model part and avoid introduction of llm model -instance, we will use it separately. - -``image_encoder`` is represented in MiniCPM-V by pretrained -`SigLIP `__ -model. Additionally, MiniCPM uses perceiver ``resampler`` that -compresses the image representations. To preserve model ability to -process images of different size with respect aspect ratio combined in -batch, we will use ``image_encoder`` and ``resampler`` as separated -models. - -To sum up above, model consists of 4 parts: - -- **Image Encoder** for encoding input images into embedding space. It - includes SigLIP model. -- **Resampler** for compression image representation. -- **Input Embedding** for conversion input text tokens into embedding - space. -- **Language Model** for generation answer based on input embeddings - provided by Image Encoder and Input Embedding models. - -Let’s convert each model part. - -.. raw:: html - -
- -.. code:: ipython3 - - from minicpm_helper import convert_minicpmv26 - - # uncomment the line to see model conversion code - # ??convert_minicpmv26 - - -.. parsed-literal:: - - 2024-10-07 09:57:53.402018: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-10-07 09:57:53.403877: I tensorflow/tsl/cuda/cudart_stub.cc:28] Could not find cuda drivers on your machine, GPU will not be used. - 2024-10-07 09:57:53.440490: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. - To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-10-07 09:57:54.270302: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT - - -.. code:: ipython3 - - model_id = "openbmb/MiniCPM-V-2_6" - - model_dir = convert_minicpmv26(model_id) - - -.. parsed-literal:: - - ⌛ openbmb/MiniCPM-V-2_6 conversion started. Be patient, it may takes some time. - ⌛ Load Original model - - - -.. parsed-literal:: - - Loading checkpoint shards: 0%| | 0/4 [00:00`__ is the +interface between the Transformers and Diffusers libraries and the +different tools and libraries provided by Intel to accelerate end-to-end +pipelines on Intel architectures. + +Among other use cases, Optimum Intel provides a simple interface to +optimize your Transformers and Diffusers models, convert them to the +OpenVINO Intermediate Representation (IR) format and run inference using +OpenVINO Runtime. ``optimum-cli`` provides command line interface for +model conversion and optimization. + +General command format: + +.. code:: bash + + optimum-cli export openvino --model --task + +where task is task to export the model for, if not specified, the task +will be auto-inferred based on the model. You can find a mapping between +tasks and model classes in Optimum TaskManager +`documentation `__. +Additionally, you can specify weights compression using +``--weight-format`` argument with one of following options: ``fp32``, +``fp16``, ``int8`` and ``int4``. Fro int8 and int4 +`nncf `__ will be used for +weight compression. More details about model export provided in `Optimum +Intel +documentation `__. Compress Language Model Weights to 4 bits ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -336,50 +176,37 @@ documentation -.. - - **Note:** weights compression process may require additional time and - memory for performing. You can disable it using widget below: - -.. code:: ipython3 - - from minicpm_helper import compression_widget - - to_compress_weights = compression_widget() - - to_compress_weights - - - - -.. parsed-literal:: - - Checkbox(value=True, description='Weights Compression') - - - .. code:: ipython3 + from cmd_helper import optimum_cli import nncf - import gc import openvino as ov - - from minicpm_helper import llm_path, copy_llm_files - - - compression_configuration = {"mode": nncf.CompressWeightsMode.INT4_SYM, "group_size": 64, "ratio": 1.0, "all_layers": True} + import shutil + import gc - core = ov.Core() - llm_int4_path = Path("language_model_int4") / llm_path.name - if to_compress_weights.value and not (model_dir / llm_int4_path).exists(): - ov_model = core.read_model(model_dir / llm_path) + def compress_lm_weights(model_dir): + compression_configuration = {"mode": nncf.CompressWeightsMode.INT4_SYM, "group_size": 64, "ratio": 1.0, "all_layers": True} + ov_model_path = model_dir / "openvino_language_model.xml" + ov_int4_model_path = model_dir / "openvino_language_model_int4.xml" + ov_model = ov.Core().read_model(ov_model_path) ov_compressed_model = nncf.compress_weights(ov_model, **compression_configuration) - ov.save_model(ov_compressed_model, model_dir / llm_int4_path) + ov.save_model(ov_compressed_model, ov_int4_model_path) del ov_compressed_model del ov_model gc.collect() - copy_llm_files(model_dir, llm_int4_path.parent) + ov_model_path.unlink() + ov_model_path.with_suffix(".bin").unlink() + shutil.move(ov_int4_model_path, ov_model_path) + shutil.move(ov_int4_model_path.with_suffix(".bin"), ov_model_path.with_suffix(".bin")) + + + model_id = "openbmb/MiniCPM-V-2_6" + model_dir = Path(model_id.split("/")[-1] + "-ov") + + if not model_dir.exists(): + optimum_cli(model_id, model_dir, additional_args={"trust-remote-code": "", "weight-format": "fp16"}) + compress_lm_weights(model_dir) .. parsed-literal:: @@ -394,32 +221,27 @@ Prepare model inference pipeline .. image:: https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/2727402e-3697-442e-beca-26b149967c84 -As discussed, the model comprises Image Encoder and LLM (with separated -text embedding part) that generates answer. In ``minicpm_helper.py`` we -defined LLM inference class ``OvModelForCausalLMWithEmb`` that will -represent generation cycle, It is based on `HuggingFace Transformers -GenerationMixin `__ -and looks similar to `Optimum -Intel `__ -``OVModelForCausalLM``\ that is used for LLM inference with only -difference that it can accept input embedding. In own turn, general -multimodal model class ``OvMiniCPMVModel`` handles chatbot functionality -including image processing and answer generation using LLM. - -.. code:: ipython3 - - from minicpm_helper import OvModelForCausalLMWithEmb, OvMiniCPMV, init_model # noqa: F401 - - # uncomment the line to see model inference class - # ??OVMiniCPMV - - # uncomment the line to see language model inference class - # ??OvModelForCausalLMWithEmb - -Run OpenVINO model inference ----------------------------- - - +`OpenVINO™ GenAI `__ +is a library of the most popular Generative AI model pipelines, +optimized execution methods, and samples that run on top of highly +performant `OpenVINO +Runtime `__. + +This library is friendly to PC and laptop execution, and optimized for +resource consumption. It requires no external dependencies to run +generative models as it already includes all the core functionality +(e.g. tokenization via openvino-tokenizers). OpenVINO™ GenAI is a flavor +of OpenVINO™, aiming to simplify running inference of generative AI +models. It hides the complexity of the generation process and minimizes +the amount of code required. + +Inference Visual language models can be implemented using OpenVINO GenAI +``VLMPipeline`` class. Similarly to LLMPipeline, that we discussed in +this +`notebook `__. +It supports chat mode with preserving conversational history inside +pipeline, that allows us effectively implements chatbot that supports +conversation about provided images content. Select device ~~~~~~~~~~~~~ @@ -443,46 +265,78 @@ Select device -Select language model variant -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - .. code:: ipython3 - from minicpm_helper import lm_variant_selector - + import openvino_genai as ov_genai - use_int4_lang_model = lm_variant_selector(model_dir / llm_int4_path) - - use_int4_lang_model - - + ov_model = ov_genai.VLMPipeline(model_dir, device=device.value) +Run OpenVINO model inference +---------------------------- -.. parsed-literal:: - Checkbox(value=True, description='INT4 language model') +For preparing input data, ``VLMPipeline`` use tokenizer and image +processor inside, we just need to convert image to input OpenVINO tensor +and provide question as string. Additionally, we can provides options +for controlling generation process (e.g. number of maximum generated +tokens or using multinomial sampling for decoding instead of greedy +search approach) using ``GenerationConfig``. +Generation process for long response may be time consuming, for +accessing partial result as soon as it is generated without waiting when +whole process finished, Streaming API can be used. Token streaming is +the mode in which the generative system returns the tokens one by one as +the model generates them. This enables showing progressive generations +to the user rather than waiting for the whole generation. Streaming is +an essential aspect of the end-user experience as it reduces latency, +one of the most critical aspects of a smooth experience. .. code:: ipython3 - ov_model = init_model(model_dir, llm_path.parent if not use_int4_lang_model.value else llm_int4_path.parent, device.value) - - -.. parsed-literal:: - - applied slice for lm head + import requests + from PIL import Image + from io import BytesIO + import numpy as np + image_path = "cat.png" + + + config = ov_genai.GenerationConfig() + config.max_new_tokens = 100 + + + def load_image(image_file): + if isinstance(image_file, str) and (image_file.startswith("http") or image_file.startswith("https")): + response = requests.get(image_file) + image = Image.open(BytesIO(response.content)).convert("RGB") + else: + image = Image.open(image_file).convert("RGB") + image_data = np.array(image.getdata()).reshape(1, image.size[1], image.size[0], 3).astype(np.byte) + return image, ov.Tensor(image_data) + + + def streamer(subword: str) -> bool: + """ + + Args: + subword: sub-word of the generated text. + + Returns: Return flag corresponds whether generation should be stopped. + + """ + print(subword, end="", flush=True) + + + if not Path(image_path).exists(): + url = "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11" + image = Image.open(requests.get(url, stream=True).raw) + image.save(image_path) .. code:: ipython3 - import requests - from PIL import Image + image, image_tensor = load_image(image_path) - url = "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/d5fbbd1a-d484-415c-88cb-9986625b7b11" - image = Image.open(requests.get(url, stream=True).raw) question = "What is unusual on this image?" print(f"Question:\n{question}") @@ -497,30 +351,19 @@ Select language model variant -.. image:: minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_17_1.png +.. image:: minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_12_1.png .. code:: ipython3 - tokenizer = ov_model.processor.tokenizer - - msgs = [{"role": "user", "content": question}] - - - print("Answer:") - res = ov_model.chat(image=image, msgs=msgs, context=None, tokenizer=tokenizer, sampling=False, stream=True, max_new_tokens=50) - - generated_text = "" - for new_text in res: - generated_text += new_text - print(new_text, flush=True, end="") + ov_model.start_chat() + output = ov_model.generate(question, image=image_tensor, generation_config=config, streamer=streamer) .. parsed-literal:: - Answer: - The unusual aspect of this image is the cat's relaxed and vulnerable position. Typically, cats avoid exposing their bellies to potential threats or dangers because it leaves them open for attack by predators in nature; however here we see a domesticated pet comfortably lying + The unusual aspect of this image is the cat's relaxed and vulnerable position. Typically, cats avoid exposing their bellies, which are sensitive and vulnerable areas, to potential threats. In this image, the cat is lying on its back in a cardboard box, exposing its belly and hindquarters, which is not a common sight. This behavior could indicate that the cat feels safe and comfortable in its environment, suggesting a strong bond with its owner and a sense of security in its home. Interactive demo ---------------- diff --git a/docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_17_1.jpg b/docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_12_1.jpg similarity index 100% rename from docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_17_1.jpg rename to docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_12_1.jpg diff --git a/docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_17_1.png b/docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_12_1.png similarity index 100% rename from docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_17_1.png rename to docs/notebooks/minicpm-v-multimodal-chatbot-with-output_files/minicpm-v-multimodal-chatbot-with-output_12_1.png diff --git a/docs/notebooks/mllama-3.2-with-output.rst b/docs/notebooks/mllama-3.2-with-output.rst index ba338d67dc677e..19ebd2d658174e 100644 --- a/docs/notebooks/mllama-3.2-with-output.rst +++ b/docs/notebooks/mllama-3.2-with-output.rst @@ -53,9 +53,9 @@ Prerequisites .. code:: ipython3 - %pip install -q "torch>=2.1" "torchvision" "Pillow" "tqdm" "datasets>=2.14.6" "gradio>=4.36" "nncf>=2.13.0" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "torch>=2.1" "torchvision" "Pillow" "tqdm" "datasets>=2.14.6" "gradio>=4.36" "nncf>=2.14.0" --extra-index-url https://download.pytorch.org/whl/cpu %pip install -q "transformers>=4.45" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -Uq --pre "openvino>2024.4.0" --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly + %pip install -Uq "openvino>=2024.5.0" .. code:: ipython3 diff --git a/docs/notebooks/mobileclip-video-search-with-output.rst b/docs/notebooks/mobileclip-video-search-with-output.rst index a606830470aa94..6c195540cda7d7 100644 --- a/docs/notebooks/mobileclip-video-search-with-output.rst +++ b/docs/notebooks/mobileclip-video-search-with-output.rst @@ -62,39 +62,152 @@ Prerequisites .. code:: ipython3 - from pathlib import Path + import requests + + + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) + open("notebook_utils.py", "w").write(r.text) + + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", + ) + open("cmd_helper.py", "w").write(r.text) - repo_dir = Path("./ml-mobileclip") - if not repo_dir.exists(): - !git clone https://github.com/apple/ml-mobileclip.git .. parsed-literal:: - Cloning into 'ml-mobileclip'... - remote: Enumerating objects: 95, done. - remote: Counting objects: 100% (95/95), done. - remote: Compressing objects: 100% (66/66), done. - remote: Total 95 (delta 38), reused 85 (delta 28), pack-reused 0 (from 0) - Unpacking objects: 100% (95/95), 469.11 KiB | 3.13 MiB/s, done. + 1491 + + + +.. code:: ipython3 + + from cmd_helper import clone_repo + + + clone_repo("https://github.com/apple/ml-mobileclip.git") + + + + +.. parsed-literal:: + + PosixPath('ml-mobileclip') + .. code:: ipython3 %pip install -q "./ml-mobileclip" --no-deps - %pip install -q "clip-benchmark>=1.4.0" "datasets>=2.8.0" "open-clip-torch>=2.20.0" "timm>=0.9.5" "torch>=1.13.1" "torchvision>=0.14.1" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "clip-benchmark>=1.4.0" "datasets>=2.8.0" "open-clip-torch>=2.20.0" "timm>=0.9.5" "torch>=2.5.0" "torchvision>=0.20.0" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q "openvino>=2024.0.0" "gradio>=4.19" "matplotlib" "Pillow" "altair" "pandas" "opencv-python" "tqdm" "matplotlib>=3.4" + %pip install -q "matplotlib>=3.4" "Pillow" "altair" "pandas" "tqdm" "salesforce-lavis==1.0.2" .. parsed-literal:: Note: you may need to restart the kernel to use updated packages. - ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. - mobileclip 0.1.0 requires torchvision==0.14.1, but you have torchvision 0.17.2+cpu which is incompatible. + ERROR: Could not find a version that satisfies the requirement torch>=2.5.0 (from versions: 1.4.0, 1.4.0+cpu, 1.5.0, 1.5.0+cpu, 1.5.1, 1.5.1+cpu, 1.6.0, 1.6.0+cpu, 1.7.0, 1.7.0+cpu, 1.7.1, 1.7.1+cpu, 1.8.0, 1.8.0+cpu, 1.8.1, 1.8.1+cpu, 1.9.0, 1.9.0+cpu, 1.9.1, 1.9.1+cpu, 1.10.0, 1.10.0+cpu, 1.10.1, 1.10.1+cpu, 1.10.2, 1.10.2+cpu, 1.11.0, 1.11.0+cpu, 1.12.0, 1.12.0+cpu, 1.12.1, 1.12.1+cpu, 1.13.0, 1.13.0+cpu, 1.13.1, 1.13.1+cpu, 2.0.0, 2.0.0+cpu, 2.0.1, 2.0.1+cpu, 2.1.0, 2.1.0+cpu, 2.1.1, 2.1.1+cpu, 2.1.2, 2.1.2+cpu, 2.2.0, 2.2.0+cpu, 2.2.1, 2.2.1+cpu, 2.2.2, 2.2.2+cpu, 2.3.0, 2.3.0+cpu, 2.3.1, 2.3.1+cpu, 2.4.0, 2.4.0+cpu, 2.4.1, 2.4.1+cpu) + ERROR: No matching distribution found for torch>=2.5.0 Note: you may need to restart the kernel to use updated packages. + error: subprocess-exited-with-error + + × pip subprocess to install build dependencies did not run successfully. + │ exit code: 1 + ╰─> [68 lines of output] + Ignoring numpy: markers 'python_version >= "3.9"' don't match your environment + Collecting setuptools + Using cached setuptools-75.3.0-py3-none-any.whl.metadata (6.9 kB) + Collecting cython<3.0,>=0.25 + Using cached Cython-0.29.37-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl.metadata (3.1 kB) + Collecting cymem<2.1.0,>=2.0.2 + Using cached cymem-2.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (8.4 kB) + Collecting preshed<3.1.0,>=3.0.2 + Using cached preshed-3.0.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.2 kB) + Collecting murmurhash<1.1.0,>=0.28.0 + Using cached murmurhash-1.0.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.0 kB) + Collecting thinc<8.4.0,>=8.3.0 + Using cached thinc-8.3.2.tar.gz (193 kB) + Installing build dependencies: started + Installing build dependencies: finished with status 'error' + error: subprocess-exited-with-error + + × pip subprocess to install build dependencies did not run successfully. + │ exit code: 1 + ╰─> [38 lines of output] + Ignoring numpy: markers 'python_version >= "3.9"' don't match your environment + Collecting setuptools + Using cached setuptools-75.3.0-py3-none-any.whl.metadata (6.9 kB) + Collecting cython<3.0,>=0.25 + Using cached Cython-0.29.37-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl.metadata (3.1 kB) + Collecting murmurhash<1.1.0,>=1.0.2 + Using cached murmurhash-1.0.10-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.0 kB) + Collecting cymem<2.1.0,>=2.0.2 + Using cached cymem-2.0.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (8.4 kB) + Collecting preshed<3.1.0,>=3.0.2 + Using cached preshed-3.0.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.2 kB) + Collecting blis<1.1.0,>=1.0.0 + Using cached blis-1.0.1.tar.gz (3.6 MB) + Installing build dependencies: started + Installing build dependencies: finished with status 'error' + error: subprocess-exited-with-error + + × pip subprocess to install build dependencies did not run successfully. + │ exit code: 1 + ╰─> [8 lines of output] + Collecting setuptools + Using cached setuptools-75.3.0-py3-none-any.whl.metadata (6.9 kB) + Collecting cython>=0.25 + Using cached Cython-3.0.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (3.2 kB) + ERROR: Ignored the following versions that require a different python version: 1.25.0 Requires-Python >=3.9; 1.25.1 Requires-Python >=3.9; 1.25.2 Requires-Python >=3.9; 1.26.0 Requires-Python <3.13,>=3.9; 1.26.1 Requires-Python <3.13,>=3.9; 1.26.2 Requires-Python >=3.9; 1.26.3 Requires-Python >=3.9; 1.26.4 Requires-Python >=3.9; 2.0.0 Requires-Python >=3.9; 2.0.1 Requires-Python >=3.9; 2.0.2 Requires-Python >=3.9; 2.1.0 Requires-Python >=3.10; 2.1.0rc1 Requires-Python >=3.10; 2.1.1 Requires-Python >=3.10; 2.1.2 Requires-Python >=3.10; 2.1.3 Requires-Python >=3.10; 75.4.0 Requires-Python >=3.9; 75.5.0 Requires-Python >=3.9; 75.6.0 Requires-Python >=3.9 + ERROR: Could not find a version that satisfies the requirement numpy<3.0.0,>=2.0.0 (from versions: 1.3.0, 1.4.1, 1.5.0, 1.5.1, 1.6.0, 1.6.1, 1.6.2, 1.7.0, 1.7.1, 1.7.2, 1.8.0, 1.8.1, 1.8.2, 1.9.0, 1.9.1, 1.9.2, 1.9.3, 1.10.0.post2, 1.10.1, 1.10.2, 1.10.4, 1.11.0, 1.11.1, 1.11.2, 1.11.3, 1.12.0, 1.12.1, 1.13.0, 1.13.1, 1.13.3, 1.14.0, 1.14.1, 1.14.2, 1.14.3, 1.14.4, 1.14.5, 1.14.6, 1.15.0, 1.15.1, 1.15.2, 1.15.3, 1.15.4, 1.16.0, 1.16.1, 1.16.2, 1.16.3, 1.16.4, 1.16.5, 1.16.6, 1.17.0, 1.17.1, 1.17.2, 1.17.3, 1.17.4, 1.17.5, 1.18.0, 1.18.1, 1.18.2, 1.18.3, 1.18.4, 1.18.5, 1.19.0, 1.19.1, 1.19.2, 1.19.3, 1.19.4, 1.19.5, 1.20.0, 1.20.1, 1.20.2, 1.20.3, 1.21.0, 1.21.1, 1.21.2, 1.21.3, 1.21.4, 1.21.5, 1.21.6, 1.22.0, 1.22.1, 1.22.2, 1.22.3, 1.22.4, 1.23.0, 1.23.1, 1.23.2, 1.23.3, 1.23.4, 1.23.5, 1.24.0, 1.24.1, 1.24.2, 1.24.3, 1.24.4) + ERROR: No matching distribution found for numpy<3.0.0,>=2.0.0 + + [end of output] + + note: This error originates from a subprocess, and is likely not a problem with pip. + error: subprocess-exited-with-error + + × pip subprocess to install build dependencies did not run successfully. + │ exit code: 1 + ╰─> See above for output. + + note: This error originates from a subprocess, and is likely not a problem with pip. + [end of output] + + note: This error originates from a subprocess, and is likely not a problem with pip. + error: subprocess-exited-with-error + + × pip subprocess to install build dependencies did not run successfully. + │ exit code: 1 + ╰─> See above for output. + + note: This error originates from a subprocess, and is likely not a problem with pip. + [end of output] + + note: This error originates from a subprocess, and is likely not a problem with pip. + error: subprocess-exited-with-error + + × pip subprocess to install build dependencies did not run successfully. + │ exit code: 1 + ╰─> See above for output. + + note: This error originates from a subprocess, and is likely not a problem with pip. + Note: you may need to restart the kernel to use updated packages. + + +.. code:: ipython3 + + %pip install -q "git+https://github.com/huggingface/optimum-intel.git" "openvino>=2024.0.0" "altair" "opencv-python" "opencv-contrib-python" "gradio>=4.19" + + +.. parsed-literal:: + Note: you may need to restart the kernel to use updated packages. @@ -138,13 +251,37 @@ comparison purposes, you can select different models among: faster and 2.8x smaller. More details about model can be found in `research paper `__ and `GitHub repository `__. +- **BLIP-2** - BLIP2 was introduced in the paper `BLIP-2: Bootstrapping + Language-Image Pre-training with Frozen Image Encoders and Large + Language Models `__ by Li et + al. and first released in this + `repository `__. + It is a generic and efficient pre-training strategy that easily + harvests development of pretrained vision models and large language + models (LLMs) for vision-language pretraining. BLIP-2 consists of 3 + models: a CLIP-like image encoder, a Querying Transformer (Q-Former) + and a large language model. .. code:: ipython3 + from pathlib import Path + import ipywidgets as widgets + model_dir = Path("checkpoints") + + def default_image_probs(image_features, text_features): + image_probs = (100.0 * text_features @ image_features.T).softmax(dim=-1) + return image_probs + + + def blip2_image_probs(image_features, text_features): + image_probs = image_features[:, 0, :] @ text_features[:, 0, :].t() + return image_probs + + supported_models = { "MobileCLIP": { "mobileclip_s0": { @@ -152,30 +289,35 @@ comparison purposes, you can select different models among: "pretrained": model_dir / "mobileclip_s0.pt", "url": "https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_s0.pt", "image_size": 256, + "image_probs": default_image_probs, }, "mobileclip_s1": { "model_name": "mobileclip_s1", "pretrained": model_dir / "mobileclip_s1.pt", "url": "https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_s1.pt", "image_size": 256, + "image_probs": default_image_probs, }, "mobileclip_s2": { "model_name": "mobileclip_s0", "pretrained": model_dir / "mobileclip_s2.pt", "url": "https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_s2.pt", "image_size": 256, + "image_probs": default_image_probs, }, "mobileclip_b": { "model_name": "mobileclip_b", "pretrained": model_dir / "mobileclip_b.pt", "url": "https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_b.pt", "image_size": 224, + "image_probs": default_image_probs, }, "mobileclip_blt": { "model_name": "mobileclip_b", "pretrained": model_dir / "mobileclip_blt.pt", "url": "https://docs-assets.developer.apple.com/ml-research/datasets/mobileclip/mobileclip_blt.pt", "image_size": 224, + "image_probs": default_image_probs, }, }, "CLIP": { @@ -183,21 +325,25 @@ comparison purposes, you can select different models among: "model_name": "ViT-B-32", "pretrained": "laion2b_s34b_b79k", "image_size": 224, + "image_probs": default_image_probs, }, "clip-vit-b-16": { "model_name": "ViT-B-16", "pretrained": "openai", "image_size": 224, + "image_probs": default_image_probs, }, "clip-vit-l-14": { "model_name": "ViT-L-14", "pretrained": "datacomp_xl_s13b_b90k", "image_size": 224, + "image_probs": default_image_probs, }, "clip-vit-h-14": { "model_name": "ViT-H-14", "pretrained": "laion2b_s32b_b79k", "image_size": 224, + "image_probs": default_image_probs, }, }, "SigLIP": { @@ -205,11 +351,21 @@ comparison purposes, you can select different models among: "model_name": "ViT-B-16-SigLIP", "pretrained": "webli", "image_size": 224, + "image_probs": default_image_probs, }, "siglip-vit-l-16": { "model_name": "ViT-L-16-SigLIP-256", "pretrained": "webli", "image_size": 256, + "image_probs": default_image_probs, + }, + }, + "Blip2": { + "blip2_feature_extractor": { + "model_name": "blip2_feature_extractor", + "pretrained": "pretrain_vitL", + "image_size": 224, + "image_probs": blip2_image_probs, }, }, } @@ -223,7 +379,7 @@ comparison purposes, you can select different models among: .. parsed-literal:: - Dropdown(description='Model type:', options=('MobileCLIP', 'CLIP', 'SigLIP'), value='MobileCLIP') + Dropdown(description='Model type:', options=('MobileCLIP', 'CLIP', 'SigLIP', 'Blip2'), value='MobileCLIP') @@ -250,14 +406,6 @@ comparison purposes, you can select different models among: .. code:: ipython3 - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - - open("notebook_utils.py", "w").write(r.text) - from notebook_utils import download_file, device_widget model_config = available_models[model_checkpoint.value] @@ -373,7 +521,7 @@ Prepare image gallery -.. image:: mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_10_4.png +.. image:: mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_12_4.png Prepare model @@ -384,23 +532,79 @@ Prepare model The code bellow download model weights, create model class instance and preprocessing utilities +.. code:: ipython3 + + import torch + + + class Blip2Model(torch.nn.Module): + def __init__(self, ln_vision, visual_encoder, query_tokens, q_former, vision_proj, text_proj, tokenizer): + super().__init__() + self.ln_vision = ln_vision + self.visual_encoder = visual_encoder + self.query_tokens = query_tokens + self.q_former = q_former + self.vision_proj = vision_proj + self.text_proj = text_proj + self.tok = tokenizer + + def encode_image(self, image): + image_embeds_frozen = self.ln_vision(self.visual_encoder(image)) + image_embeds_frozen = image_embeds_frozen.float() + image_atts = torch.ones(image_embeds_frozen.size()[:-1], dtype=torch.long) + query_tokens = self.query_tokens.expand(image_embeds_frozen.shape[0], -1, -1) + + query_output = self.q_former.bert( + query_embeds=query_tokens, + encoder_hidden_states=image_embeds_frozen, + encoder_attention_mask=image_atts, + return_dict=True, + ) + image_embeds = query_output.last_hidden_state + image_features = self.vision_proj(image_embeds) + + return image_features + + def encode_text(self, input_ids, attention_mask): + text_output = self.q_former.bert( + input_ids, + attention_mask=attention_mask, + return_dict=True, + ) + text_embeds = text_output.last_hidden_state + text_features = self.text_proj(text_embeds) + return text_features + + def tokenizer(self, text_descriptions): + input_ids = self.tok(text_descriptions, return_tensors="pt", padding=True).input_ids + attention_mask = self.tok(text_descriptions, return_tensors="pt", padding=True).attention_mask + text = {"input_ids": input_ids, "attention_mask": attention_mask} + return text + .. code:: ipython3 import torch import time - from PIL import Image import mobileclip import open_clip # instantiate model model_name = model_config["model_name"] pretrained = model_config["pretrained"] + if model_type.value == "MobileCLIP": model_dir.mkdir(exist_ok=True) model_url = model_config["url"] download_file(model_url, directory=model_dir) model, _, preprocess = mobileclip.create_model_and_transforms(model_name, pretrained=pretrained) tokenizer = mobileclip.get_tokenizer(model_name) + elif model_type.value == "Blip2": + from lavis.models import load_model_and_preprocess + + model, vis_processors, txt_processors = load_model_and_preprocess(name=model_name, model_type=pretrained, is_eval=True) + model = Blip2Model(model.ln_vision, model.visual_encoder, model.query_tokens, model.Qformer, model.vision_proj, model.text_proj, model.tokenizer) + preprocess = vis_processors["eval"] + tokenizer = model.tokenizer else: model, _, preprocess = open_clip.create_model_and_transforms(model_name, pretrained=pretrained) tokenizer = open_clip.get_tokenizer(model_name) @@ -408,7 +612,7 @@ preprocessing utilities .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/timm/models/layers/__init__.py:48: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/timm/models/layers/__init__.py:48: FutureWarning: Importing from timm.models.layers is deprecated, please import via timm.layers warnings.warn(f"Importing from {__name__} is deprecated, please import via timm.layers", FutureWarning) @@ -427,7 +631,7 @@ Perform search image_tensor = torch.stack([preprocess(image) for image in images]) text = tokenizer(text_descriptions) - + image_probs_function = model_config["image_probs"] with torch.no_grad(): # calculate image embeddings @@ -437,16 +641,13 @@ Perform search print(f"Image encoding took {image_encoding_end - image_encoding_start:.3} ms") # calculate text embeddings text_encoding_start = time.perf_counter() - text_features = model.encode_text(text) + text_features = model.encode_text(**text) if model_type.value == "Blip2" else model.encode_text(text) text_encoding_end = time.perf_counter() print(f"Text encoding took {text_encoding_end - text_encoding_start:.3} ms") - # normalize embeddings image_features /= image_features.norm(dim=-1, keepdim=True) text_features /= text_features.norm(dim=-1, keepdim=True) - - # calcualte similarity score - image_probs = (100.0 * text_features @ image_features.T).softmax(dim=-1) + image_probs = image_probs_function(image_features, text_features) selected_image = [torch.argmax(image_probs).item()] visualize_result(images, input_labels[0], selected_image); @@ -454,12 +655,12 @@ Perform search .. parsed-literal:: - Image encoding took 0.114 ms - Text encoding took 0.0113 ms + Image encoding took 0.0979 ms + Text encoding took 0.0114 ms -.. image:: mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_14_1.png +.. image:: mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_17_1.png Convert Model to OpenVINO Intermediate Representation format @@ -522,7 +723,10 @@ be used separately. Let’s convert each part to OpenVINO. if not text_encoder_path.exists(): model.forward = model.encode_text - ov_text_encoder = ov.convert_model(model, example_input=text, input=[-1, text.shape[1]]) + if model_type.value == "Blip2": + ov_text_encoder = ov.convert_model(model, example_input=text) + else: + ov_text_encoder = ov.convert_model(model, example_input=text, input=[-1, text.shape[1]]) ov.save_model(ov_text_encoder, text_encoder_path) del ov_text_encoder gc.collect() @@ -533,7 +737,7 @@ be used separately. Let’s convert each part to OpenVINO. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/mobileclip/modules/common/transformer.py:125: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/mobileclip-video-search/ml-mobileclip/mobileclip/modules/common/transformer.py:125: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if seq_len != self.num_embeddings: @@ -610,7 +814,7 @@ Perform search image_features /= image_features.norm(dim=-1, keepdim=True) text_features /= text_features.norm(dim=-1, keepdim=True) - image_probs = (100.0 * text_features @ image_features.T).softmax(dim=-1) + image_probs = image_probs_function(image_features, text_features) selected_image = [torch.argmax(image_probs).item()] visualize_result(images, input_labels[0], selected_image); @@ -618,12 +822,77 @@ Perform search .. parsed-literal:: - Image encoding took 0.0294 ms - Text encoding took 0.00498 ms + Image encoding took 0.0282 ms + Text encoding took 0.0049 ms + + + +.. image:: mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_28_1.png + + +(optional) Translation model +~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Since all text embedding models in this notebook natively supports input +in English only, we can insert a translation model in this pipeline to +support searching in Chinese. + +- **opus-mt-zh-en t** - This is a translation model developed by + Language Technology Research Group at the University of Helsinki. It + supports Chinese as source Language and English as target Language + `model card `__. + +.. code:: ipython3 + + from pathlib import Path + + cn2en_trans_model_path = "ov_models/cn2en_trans_model" + cn2en_trans_model_id = "Helsinki-NLP/opus-mt-zh-en" + + if not Path(cn2en_trans_model_path).exists(): + !optimum-cli export openvino --model {cn2en_trans_model_id} --task text2text-generation-with-past --trust-remote-code {cn2en_trans_model_path} + + +.. parsed-literal:: + + 2024-11-22 01:36:23.757087: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:36:23.781523: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/marian/tokenization_marian.py:175: UserWarning: Recommended: pip install sacremoses. + warnings.warn("Recommended: pip install sacremoses.") + Moving the following attributes in the config to the generation config: {'max_length': 512, 'num_beams': 6, 'bad_words_ids': [[65000]]}. You are seeing this warning because you've set generation parameters in the model config, as opposed to in the generation config. + `loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/marian/modeling_marian.py:207: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if attn_weights.size() != (bsz * self.num_heads, tgt_len, src_len): + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/marian/modeling_marian.py:214: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if attention_mask.size() != (bsz, 1, tgt_len, src_len): + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/marian/modeling_marian.py:246: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if attn_output.size() != (bsz * self.num_heads, tgt_len, self.head_dim): + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:88: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if input_shape[-1] > 1 or self.sliding_window is not None: + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:164: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if past_key_values_length > 0: + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/marian/modeling_marian.py:166: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if ( + Exporting tokenizers to OpenVINO is not supported for tokenizers version > 0.19 and openvino version <= 2024.4. Please downgrade to tokenizers version <= 0.19 to export tokenizers to OpenVINO. + + +.. code:: ipython3 + + from transformers import AutoTokenizer + from optimum.intel import OVModelForSeq2SeqLM + + tr_tokenizer = AutoTokenizer.from_pretrained(cn2en_trans_model_path) + tr_model = OVModelForSeq2SeqLM.from_pretrained(cn2en_trans_model_path) +.. parsed-literal:: -.. image:: mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_25_1.png + 2024-11-22 01:36:43.187797: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:36:43.213112: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/marian/tokenization_marian.py:175: UserWarning: Recommended: pip install sacremoses. + warnings.warn("Recommended: pip install sacremoses.") Interactive Demo @@ -634,7 +903,9 @@ Interactive Demo In this part, you can try different supported by tutorial models in searching frames in the video by text query or image. Upload video and provide text query or reference image for search and model will find the -most relevant frames according to provided query. Please note, different +most relevant frames according to provided query. You can also try +querying in Chinese, and translation model will be triggered +automatically for Chinese-to-English translation. Please note, different models can require different optimal threshold for search. .. code:: ipython3 @@ -674,7 +945,22 @@ models can require different optimal threshold for search. ) - def get_preprocess_and_tokenizer(model_name): + def is_english(text): + for char in text: + if not char.isascii(): + return False + return True + + + def translate(text): + if tr_tokenizer: + t = tr_tokenizer(text, return_tensors="pt") + r = tr_model.generate(**t) + text = tr_tokenizer.decode(r[0][1:-1]) + return text + + + def get_preprocess_probs_tokenizer(model_name): if "mobileclip" in model_name: resolution = supported_models["MobileCLIP"][model_name]["image_size"] resize_size = resolution @@ -689,13 +975,23 @@ models can require different optimal threshold for search. ] preprocess = Compose(aug_list) tokenizer = mobileclip.get_tokenizer(supported_models["MobileCLIP"][model_name]["model_name"]) + image_probs = default_image_probs + elif "blip2" in model_name: + from lavis.models import load_model_and_preprocess + + model, vis_processors, txt_processors = load_model_and_preprocess(name=model_name, model_type=pretrained, is_eval=True) + model = Blip2Model(model.ln_vision, model.visual_encoder, model.query_tokens, model.Qformer, model.vision_proj, model.text_proj, model.tokenizer) + preprocess = vis_processors["eval"] + tokenizer = model.tokenizer + image_probs = blip2_image_probs else: model_configs = supported_models["SigLIP"] if "siglip" in model_name else supported_models["CLIP"] resize_size = model_configs[model_name]["image_size"] preprocess = image_transform((resize_size, resize_size), is_train=False, resize_mode="longest") tokenizer = open_clip.get_tokenizer(model_configs[model_name]["model_name"]) + image_probs = default_image_probs - return preprocess, tokenizer + return preprocess, image_probs, tokenizer def run( @@ -716,11 +1012,12 @@ models can require different optimal threshold for search. global tokenizer global ov_compiled_image_encoder global ov_compiled_text_encoder + global image_probs_function if current_model != model_name or device != current_device: ov_compiled_image_encoder = core.compile_model(ov_models_dir / f"{model_name}_im_encoder.xml", device) ov_compiled_text_encoder = core.compile_model(ov_models_dir / f"{model_name}_text_encoder.xml", device) - preprocess, tokenizer = get_preprocess_and_tokenizer(model_name) + preprocess, image_probs_function, tokenizer = get_preprocess_probs_tokenizer(model_name) current_model = model_name current_device = device # Load video @@ -734,6 +1031,9 @@ models can require different optimal threshold for search. query_features /= query_features.norm(dim=-1, keepdim=True) # Get text query features else: + if not is_english(text_search): + text_search = translate(text_search) + print(f"Translated input text: {text_search}") # Tokenize search phrase text = tokenizer([text_search]) # Encode text query @@ -748,9 +1048,8 @@ models can require different optimal threshold for search. image_features = torch.from_numpy(ov_compiled_image_encoder(image)[0]) image_features /= image_features.norm(dim=-1, keepdim=True) - probs = query_features.cpu().numpy() @ image_features.cpu().numpy().T - probs = probs[0] - + probs = image_probs_function(image_features, query_features) + probs = probs.cpu().numpy().squeeze(1) if "blip2" in model_name else probs[0] # Save frame similarity values df = pd.DataFrame( { diff --git a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_10_4.png b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_10_4.png deleted file mode 100644 index 3097711be91501..00000000000000 --- a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_10_4.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:012ab44a6e4292be32171ccb588f72c75c17a662e04cf27f271e5ddd33c89b99 -size 627462 diff --git a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_12_4.png b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_12_4.png new file mode 100644 index 00000000000000..1ae3f7b2579a93 --- /dev/null +++ b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_12_4.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3a4911e0d1407c8830ec6c68e3b24190f1a49da24b7532db29d77b298e36af4 +size 627462 diff --git a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_14_1.png b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_14_1.png deleted file mode 100644 index 7d60b0ba72dd72..00000000000000 --- a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_14_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fc816c6ac9360e9432eb81eca13dd8f4afa0c0ad90312c876fad89dbbb80a65e -size 449871 diff --git a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_17_1.png b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_17_1.png new file mode 100644 index 00000000000000..4a223a2ea61f46 --- /dev/null +++ b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_17_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59965056d04847ce7a28c35bc515102682954ca33d8b0dc43f7d54dc6d677f18 +size 449871 diff --git a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_25_1.png b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_25_1.png deleted file mode 100644 index 7d60b0ba72dd72..00000000000000 --- a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_25_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:fc816c6ac9360e9432eb81eca13dd8f4afa0c0ad90312c876fad89dbbb80a65e -size 449871 diff --git a/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_28_1.png b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_28_1.png new file mode 100644 index 00000000000000..4a223a2ea61f46 --- /dev/null +++ b/docs/notebooks/mobileclip-video-search-with-output_files/mobileclip-video-search-with-output_28_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59965056d04847ce7a28c35bc515102682954ca33d8b0dc43f7d54dc6d677f18 +size 449871 diff --git a/docs/notebooks/mobilevlm-language-assistant-with-output.rst b/docs/notebooks/mobilevlm-language-assistant-with-output.rst deleted file mode 100644 index 5902537e3026a5..00000000000000 --- a/docs/notebooks/mobilevlm-language-assistant-with-output.rst +++ /dev/null @@ -1,765 +0,0 @@ -Mobile language assistant with MobileVLM and OpenVINO -===================================================== - -`MobileVLM `__ is a competent -multimodal vision language model (MMVLM) targeted to run on mobile -devices. It is an amalgamation of a myriad of architectural designs and -techniques that are mobile-oriented, which comprises a set of language -models at the scale of 1.4B and 2.7B parameters, trained from scratch, a -multimodal vision model that is pre-trained in the CLIP fashion, -cross-modality interaction via an efficient projector. - -|image0| - -The MobileVLM architecture (right) utilizes -`MobileLLaMA `__ as -its language model, intakes :math:`\mathbf{X}_v` and -:math:`\mathbf{X}_q` which are image and language instructions as -respective inputs and gives :math:`\mathbf{Y}_a` as the output language -response. LDP refers to a lightweight downsample projector (left). - -See more information on official -`GitHub `__ project page -and `paper `__. - - -**Table of contents:** - - -- `Install requirements <#install-requirements>`__ -- `Clone MobileVLM repository <#clone-mobilevlm-repository>`__ -- `Import required packages <#import-required-packages>`__ -- `Load the model <#load-the-model>`__ -- `Convert model to OpenVINO Intermediate Representation - (IR) <#convert-model-to-openvino-intermediate-representation-ir>`__ -- `Inference <#inference>`__ - - - `Load OpenVINO model <#load-openvino-model>`__ - - `Prepare input data <#prepare-input-data>`__ - - `Run generation process <#run-generation-process>`__ - -- `Interactive inference <#interactive-inference>`__ - -Installation Instructions -~~~~~~~~~~~~~~~~~~~~~~~~~ - -This is a self-contained example that relies solely on its own code. - -We recommend running the notebook in a virtual environment. You only -need a Jupyter server to start. For details, please refer to -`Installation -Guide `__. - -.. |image0| image:: https://github.com/Meituan-AutoML/MobileVLM/raw/main/assets/mobilevlm_arch.png - -Install requirements --------------------- - - - -.. code:: ipython3 - - %pip install -q "torch>=2.1.0" "timm>=0.9.12" --extra-index-url "https://download.pytorch.org/whl/cpu" - %pip install -q "transformers>=4.33.1,<4.35.0" accelerate "sentencepiece>=0.1.99" "openvino>=2023.2.0" "nncf>=2.7.0" ipywidgets numpy "gradio>=4.19" - - -.. parsed-literal:: - - Note: you may need to restart the kernel to use updated packages. - ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. - mobileclip 0.1.0 requires torchvision==0.14.1, but you have torchvision 0.17.2+cpu which is incompatible. - Note: you may need to restart the kernel to use updated packages. - - -Clone MobileVLM repository --------------------------- - - - -.. code:: ipython3 - - from pathlib import Path - import sys - - MOBILEVLM_REPO_DIR = Path("./MobileVLM") - if not MOBILEVLM_REPO_DIR.exists(): - !git clone -q "https://github.com/Meituan-AutoML/MobileVLM.git" - sys.path.insert(0, str(MOBILEVLM_REPO_DIR)) - -Import required packages ------------------------- - - - -.. code:: ipython3 - - import warnings - import itertools - import gc - from typing import Optional, List, Tuple - - from mobilevlm.model.mobilevlm import load_pretrained_model - from mobilevlm.conversation import conv_templates, SeparatorStyle - from mobilevlm.utils import ( - disable_torch_init, - process_images, - tokenizer_image_token, - KeywordsStoppingCriteria, - ) - from mobilevlm.constants import IMAGE_TOKEN_INDEX, DEFAULT_IMAGE_TOKEN - import PIL - import torch - import transformers - import numpy as np - import gradio as gr - import openvino as ov - import nncf - import ipywidgets as widgets - - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/utils/generic.py:311: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - 2024-11-05 02:02:06.143728: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 02:02:06.177889: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. - To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 02:02:06.679118: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/utils/generic.py:311: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - - -.. parsed-literal:: - - INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, tensorflow, onnx, openvino - - -.. code:: ipython3 - - MODELS_DIR = Path("./models") - MODEL_PATH = "mtgv/MobileVLM-1.7B" - - TEMPERATURE = 0.2 - TOP_P = None - NUM_BEAMS = 1 - MAX_NEW_TOKENS = 512 - - IMAGE_PATH = MOBILEVLM_REPO_DIR / "assets" / "samples" / "demo.jpg" - PROMPT_STR = "Who is the author of this book?\nAnswer the question using a single word or phrase." - -Load the model --------------- - - - -To load the model, we use pre-defined ``load_pretrained_model`` function -in ``mobilevlm`` module. It returns the model itself, tokenizer, and -image processor to convert images to appropriate tensors. - -.. code:: ipython3 - - model_name = MODEL_PATH.split("/")[-1] - disable_torch_init() - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - tokenizer, model, image_processor, _ = load_pretrained_model(MODEL_PATH, device="cpu") - model = model.to(dtype=torch.float32) - - -.. parsed-literal:: - - You are resizing the embedding layer without providing a `pad_to_multiple_of` parameter. This means that the new embedding dimension will be 32000. This might induce some performance reduction as *Tensor Cores* will not be available. For more details about this, or help on choosing the correct value for resizing, refer to this guide: https://docs.nvidia.com/deeplearning/performance/dl-performance-matrix-multiplication/index.html#requirements-tc - - -Convert model to OpenVINO Intermediate Representation (IR) ----------------------------------------------------------- - - - -.. code:: ipython3 - - def cleanup_torchscript_cache(): - """ - Helper for removing cached model representation - """ - torch._C._jit_clear_class_registry() - torch.jit._recursive.concrete_type_store = torch.jit._recursive.ConcreteTypeStore() - torch.jit._state._clear_class_state() - -For reducing memory consumption, weights compression optimization can be -applied using `NNCF `__. Weight -compression aims to reduce the memory footprint of a model. It can also -lead to significant performance improvement for large memory-bound -models, such as Large Language Models (LLMs). LLMs and other models, -which require extensive memory to store the weights during inference, -can benefit from weight compression in the following ways: - -- enabling the inference of exceptionally large models that cannot be - accommodated in the memory of the device; - -- improving the inference performance of the models by reducing the - latency of the memory access when computing the operations with - weights, for example, Linear layers. - -`Neural Network Compression Framework -(NNCF) `__ provides 4-bit / -8-bit mixed weight quantization as a compression method primarily -designed to optimize LLMs. The main difference between weights -compression and full model quantization (post-training quantization) is -that activations remain floating-point in the case of weights -compression which leads to a better accuracy. Weight compression for -LLMs provides a solid inference performance improvement which is on par -with the performance of the full model quantization. In addition, weight -compression is data-free and does not require a calibration dataset, -making it easy to use. - -``nncf.compress_weights`` function can be used for performing weights -compression. The function accepts an OpenVINO model and other -compression parameters. Compared to INT8 compression, INT4 compression -improves performance even more, but introduces a minor drop in -prediction quality. - -More details about weights compression, can be found in `OpenVINO -documentation `__. - -Please select below whether you would like to run INT4 weight -compression instead of INT8 weight compression. - -.. code:: ipython3 - - compression_mode = widgets.Dropdown( - options=["INT4", "INT8"], - value="INT4", - description="Compression mode:", - disabled=False, - ) - - compression_mode - - - - -.. parsed-literal:: - - Dropdown(description='Compression mode:', options=('INT4', 'INT8'), value='INT4') - - - -.. code:: ipython3 - - stage1_xml_path = MODELS_DIR / f"stage1_{compression_mode.value}.xml" - stage2_xml_path = MODELS_DIR / f"stage2_{compression_mode.value}.xml" - -.. code:: ipython3 - - if compression_mode.value == "INT4": - wc_parameters = dict(mode=nncf.CompressWeightsMode.INT4_ASYM, group_size=128, ratio=0.8) - else: - wc_parameters = dict(mode=nncf.CompressWeightsMode.INT8) - -.. code:: ipython3 - - class ModelWrapper(torch.nn.Module): - def __init__(self, model): - super().__init__() - self.model = model - - def forward( - self, - input_ids: torch.LongTensor = None, - attention_mask: Optional[torch.Tensor] = None, - past_key_values: Optional[List[torch.FloatTensor]] = None, - inputs_embeds: Optional[torch.FloatTensor] = None, - ): - outputs = self.model.model( - input_ids=input_ids, - attention_mask=attention_mask, - past_key_values=past_key_values, - inputs_embeds=inputs_embeds, - ) - hidden_states = outputs[0] - logits = self.model.lm_head(hidden_states) - - return (logits,) + outputs[1:] - -.. code:: ipython3 - - def set_input_names(model, past_key_values): - input_names = [ - "input_ids", - "attention_mask", - *itertools.chain.from_iterable([f"past_key_values.{idx}.key", f"past_key_values.{idx}.value"] for idx, _ in enumerate(past_key_values)), - ] - assert len(input_names) == len(model.inputs) - for _input, input_name in zip(model.inputs, input_names): - _input.get_tensor().set_names({input_name}) - -.. code:: ipython3 - - def set_output_names(model, past_key_values): - output_names = [ - "logits", - *itertools.chain.from_iterable([f"present.{idx}.key", f"present.{idx}.value"] for idx, _ in enumerate(past_key_values)), - ] - assert len(output_names) == len(model.outputs) - for out, out_name in zip(ov_model.outputs, output_names): - out.get_tensor().set_names({out_name}) - -.. code:: ipython3 - - example_input = { - "inputs_embeds": torch.zeros((1, 205, 2048)), - "attention_mask": torch.ones((1, 205), dtype=torch.long), - } - - wrapped = ModelWrapper(model) - past_key_values = wrapped(**example_input)[1] - - if not stage1_xml_path.exists(): - ov_model = ov.convert_model(wrapped, example_input=example_input) - set_output_names(ov_model, past_key_values) - ov_model = nncf.compress_weights(ov_model, **wc_parameters) - ov.save_model(ov_model, stage1_xml_path) - cleanup_torchscript_cache() - del ov_model - gc.collect() - - -.. parsed-literal:: - - WARNING:tensorflow:Please fix your imports. Module tensorflow.python.training.tracking.base has been moved to tensorflow.python.trackable.base. The old module will be deleted in version 2.11. - - -.. parsed-literal:: - - [ WARNING ] Please fix your imports. Module %s has been moved to %s. The old module will be deleted in version %s. - - -.. parsed-literal:: - - WARNING:nncf:NNCF provides best results with torch==2.4.*, while current torch version is 2.2.2+cpu. If you encounter issues, consider switching to torch==2.4.* - - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/llama/modeling_llama.py:595: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if input_shape[-1] > 1: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/llama/modeling_llama.py:119: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if seq_len > self.max_seq_len_cached: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/llama/modeling_llama.py:348: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/llama/modeling_llama.py:355: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/llama/modeling_llama.py:365: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! - if attn_output.size() != (bsz, self.num_heads, q_len, self.head_dim): - - - -.. parsed-literal:: - - Output() - - - - - - - - - -.. parsed-literal:: - - INFO:nncf:Statistics of the bitwidth distribution: - ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ - │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ - ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ - │ 8 │ 24% (43 / 169) │ 20% (42 / 168) │ - ├────────────────┼─────────────────────────────┼────────────────────────────────────────┤ - │ 4 │ 76% (126 / 169) │ 80% (126 / 168) │ - ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ - - - -.. parsed-literal:: - - Output() - - - - - - - - - -.. code:: ipython3 - - example_input = { - "input_ids": torch.ones((1, 1), dtype=torch.long), - "past_key_values": past_key_values, - "attention_mask": torch.ones((1, past_key_values[-1][-1].shape[-2] + 1), dtype=torch.long), - } - - if not stage2_xml_path.exists(): - ov_model = ov.convert_model( - wrapped, - example_input=example_input, - ) - set_input_names(ov_model, past_key_values) - set_output_names(ov_model, past_key_values) - ov_model = nncf.compress_weights(ov_model, **wc_parameters) - ov.save_model(ov_model, stage2_xml_path) - cleanup_torchscript_cache() - del ov_model - gc.collect() - - -.. parsed-literal:: - - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:165: UserWarning: The .grad attribute of a Tensor that is not a leaf Tensor is being accessed. Its .grad attribute won't be populated during autograd.backward(). If you indeed want the .grad field to be populated for a non-leaf Tensor, use .retain_grad() on the non-leaf Tensor. If you access the non-leaf Tensor by mistake, make sure you access the leaf Tensor instead. See github.com/pytorch/pytorch/pull/30531 for more informations. (Triggered internally at aten/src/ATen/core/TensorBody.h:489.) - if a.grad is not None: - - - -.. parsed-literal:: - - Output() - - - - - - - - - -.. parsed-literal:: - - INFO:nncf:Statistics of the bitwidth distribution: - ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ - │ Num bits (N) │ % all parameters (layers) │ % ratio-defining parameters (layers) │ - ┝━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┿━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┥ - │ 8 │ 28% (44 / 170) │ 20% (42 / 168) │ - ├────────────────┼─────────────────────────────┼────────────────────────────────────────┤ - │ 4 │ 72% (126 / 170) │ 80% (126 / 168) │ - ┕━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┙ - - - -.. parsed-literal:: - - Output() - - - - - - - - - -.. code:: ipython3 - - prepare_inputs_labels_for_multimodal = model.prepare_inputs_labels_for_multimodal - prepare_inputs_for_generation = model.prepare_inputs_for_generation - config = model.config - config.save_pretrained(MODELS_DIR) - -.. code:: ipython3 - - del wrapped - del model - gc.collect(); - -Inference ---------- - - - -``OVMobileLlamaForCausalLM`` class provides ease-to-use interface for -using model in generation scenario. It is based on -``transformers.generation.GenerationMixin`` that gives us opportunity to -reuse all reach capabilities for generation implemented in HuggingFace -Transformers library. More details about this interface can be found in -`HuggingFace -documentation `__. - -.. code:: ipython3 - - core = ov.Core() - - - class OVMobileLlamaForCausalLM(transformers.GenerationMixin): - def __init__(self, stage1_path, stage2_path, device): - self.stage1 = core.compile_model(stage1_path, device) - self.stage2 = core.read_model(stage2_path) - - self.generation_config = transformers.GenerationConfig.from_model_config(config) - self.config = transformers.AutoConfig.from_pretrained(MODELS_DIR) - self.main_input_name = "input_ids" - self.device = torch.device("cpu") - self.prepare_inputs_for_generation = prepare_inputs_for_generation - self.num_pkv = 2 - self.input_names = {key.get_any_name(): idx for idx, key in enumerate(self.stage2.inputs)} - self.output_names = {key.get_any_name(): idx for idx, key in enumerate(self.stage2.outputs)} - self.key_value_input_names = [key for key in self.input_names if "key_values" in key] - self.key_value_output_names = [key for key in self.output_names if "present" in key] - stage2 = core.compile_model(self.stage2, device) - self.request = stage2.create_infer_request() - self._supports_cache_class = False - - def can_generate(self): - """Returns True to validate the check that the model using `GenerationMixin.generate()` can indeed generate.""" - return True - - def __call__( - self, - input_ids: torch.LongTensor, - images: torch.Tensor, - attention_mask: Optional[torch.LongTensor] = None, - prefix_mask: Optional[torch.LongTensor] = None, - past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, - **kwargs, - ) -> transformers.modeling_outputs.CausalLMOutputWithPast: - return self.forward(input_ids, images, attention_mask, prefix_mask, past_key_values) - - def forward( - self, - input_ids: torch.LongTensor, - images: torch.Tensor, - attention_mask: Optional[torch.LongTensor] = None, - prefix_mask: Optional[torch.LongTensor] = None, - past_key_values: Optional[Tuple[Tuple[torch.FloatTensor]]] = None, - **kwargs, - ) -> transformers.modeling_outputs.CausalLMOutputWithPast: - """General inference method""" - inputs = {} - if past_key_values is not None: - # Flatten the past_key_values - attention_mask = torch.ones( - (input_ids.shape[0], past_key_values[-1][-1].shape[-2] + 1), - dtype=input_ids.dtype, - ) - past_key_values = tuple(past_key_value for pkv_per_layer in past_key_values for past_key_value in pkv_per_layer) - # Add the past_key_values to the decoder inputs - inputs = dict(zip(self.key_value_input_names, past_key_values)) - - else: - return self.forward_with_image(input_ids, images, attention_mask) - inputs["input_ids"] = np.array(input_ids) - - if "attention_mask" in self.input_names: - inputs["attention_mask"] = np.array(attention_mask) - - # Run inference - self.request.start_async(inputs, share_inputs=True) - self.request.wait() - - logits = torch.from_numpy(self.request.get_tensor("logits").data) - - # Tuple of length equal to : number of layer * number of past_key_value per decoder layer (2 corresponds to the self-attention layer) - past_key_values = tuple(self.request.get_tensor(key).data for key in self.key_value_output_names) - # Tuple of tuple of length `n_layers`, with each tuple of length equal to 2 (k/v of self-attention) - - past_key_values = tuple(past_key_values[i : i + self.num_pkv] for i in range(0, len(past_key_values), self.num_pkv)) - - return transformers.modeling_outputs.CausalLMOutputWithPast(logits=logits, past_key_values=past_key_values) - - def forward_with_image(self, input_ids, images, attention_mask): - """First step inference method, that resolves multimodal data""" - _, attention_mask, _, input_embed, _ = prepare_inputs_labels_for_multimodal(input_ids, attention_mask, images=images, past_key_values=None, labels=None) - outs = self.stage1({"inputs_embeds": input_embed, "attention_mask": attention_mask}) - logits = outs[0] - pkv = list(outs.values())[1:] - pkv = tuple(pkv[i : i + self.num_pkv] for i in range(0, len(pkv), self.num_pkv)) - return transformers.modeling_outputs.CausalLMOutputWithPast(logits=torch.from_numpy(logits), past_key_values=pkv) - -Now, when we have model and defined generation pipeline, we can run -model inference. - -Select device from dropdown list for running inference using OpenVINO. - -.. code:: ipython3 - - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - open("notebook_utils.py", "w").write(r.text) - - from notebook_utils import device_widget - - device = device_widget("CPU", exclude=["NPU"]) - - device - - - - -.. parsed-literal:: - - Dropdown(description='Device:', options=('CPU', 'AUTO'), value='CPU') - - - -Load OpenVINO model -~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - ov_model = OVMobileLlamaForCausalLM(stage1_xml_path, stage2_xml_path, device.value) - -Prepare input data -~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - images = [PIL.Image.open(IMAGE_PATH).convert("RGB")] - images_tensor = process_images(images, image_processor, transformers.AutoConfig.from_pretrained(MODELS_DIR)) - -.. code:: ipython3 - - conv = conv_templates["v1"].copy() - conv.append_message(conv.roles[0], DEFAULT_IMAGE_TOKEN + "\n" + PROMPT_STR) - conv.append_message(conv.roles[1], None) - prompt = conv.get_prompt() - stop_str = conv.sep if conv.sep_style != SeparatorStyle.TWO else conv.sep2 - input_ids = tokenizer_image_token(prompt, tokenizer, IMAGE_TOKEN_INDEX, return_tensors="pt").unsqueeze(0) - stopping_criteria = KeywordsStoppingCriteria([stop_str], tokenizer, input_ids) - -.. code:: ipython3 - - print(PROMPT_STR) - images[0] - - -.. parsed-literal:: - - Who is the author of this book? - Answer the question using a single word or phrase. - - - - -.. image:: mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.png - - - -Run generation process -~~~~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - output_ids = ov_model.generate( - input_ids, - images=images_tensor, - do_sample=True if TEMPERATURE > 0 else False, - temperature=TEMPERATURE, - top_p=TOP_P, - num_beams=NUM_BEAMS, - max_new_tokens=MAX_NEW_TOKENS, - use_cache=True, - stopping_criteria=[stopping_criteria], - ) - input_token_len = input_ids.shape[1] - n_diff_input_output = (input_ids != output_ids[:, :input_token_len]).sum().item() - if n_diff_input_output > 0: - print(f"[Warning] {n_diff_input_output} output_ids are not the same as the input_ids") - outputs = tokenizer.batch_decode(output_ids[:, input_token_len:], skip_special_tokens=True)[0] - outputs = outputs.strip() - if outputs.endswith(stop_str): - outputs = outputs[: -len(stop_str)] - print(f"🚀 {model_name} with OpenVINO: {outputs.strip()}\n") - - -.. parsed-literal:: - - 🚀 MobileVLM-1.7B with OpenVINO: Susan Wise Bauer - - - -Interactive inference ---------------------- - - - -.. code:: ipython3 - - def generate(img, prompt): - images_tensor = process_images([img], image_processor, transformers.AutoConfig.from_pretrained(MODELS_DIR)) - prompt = DEFAULT_IMAGE_TOKEN + "\n" + prompt - conv = conv_templates["v1"].copy() - conv.append_message(conv.roles[0], prompt) - conv.append_message(conv.roles[1], None) - prompt = conv.get_prompt() - stop_str = conv.sep if conv.sep_style != SeparatorStyle.TWO else conv.sep2 - input_ids = tokenizer_image_token(prompt, tokenizer, IMAGE_TOKEN_INDEX, return_tensors="pt").unsqueeze(0) - stopping_criteria = KeywordsStoppingCriteria([stop_str], tokenizer, input_ids) - - output_ids = ov_model.generate( - input_ids, - images=images_tensor, - do_sample=True if TEMPERATURE > 0 else False, - temperature=TEMPERATURE, - top_p=TOP_P, - num_beams=NUM_BEAMS, - max_new_tokens=MAX_NEW_TOKENS, - use_cache=True, - stopping_criteria=[stopping_criteria], - ) - input_token_len = input_ids.shape[1] - outputs = tokenizer.batch_decode(output_ids[:, input_token_len:], skip_special_tokens=True)[0] - outputs = outputs.strip() - if outputs.endswith(stop_str): - outputs = outputs[: -len(stop_str)] - - return outputs.strip() - -.. code:: ipython3 - - demo = gr.Interface( - fn=generate, - inputs=[gr.Image(label="Image", type="pil"), gr.Textbox(label="Prompt")], - outputs=gr.Textbox(), - examples=[ - [ - str(IMAGE_PATH), - PROMPT_STR, - ] - ], - allow_flagging="never", - ) - - try: - demo.launch(debug=False) - except Exception: - demo.launch(debug=False, share=True) - # if you are launching remotely, specify server_name and server_port - # demo.launch(server_name='your server name', server_port='server port in int') - # Read more in the docs: https://gradio.app/docs/ - - -.. parsed-literal:: - - Running on local URL: http://127.0.0.1:7860 - - To create a public link, set `share=True` in `launch()`. - - - - - - - - -.. code:: ipython3 - - # please uncomment and run this cell for stopping gradio interface - # demo.close() diff --git a/docs/notebooks/mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.jpg b/docs/notebooks/mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.jpg deleted file mode 100644 index e42650c7277fc7..00000000000000 --- a/docs/notebooks/mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.jpg +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:e56dcd6fe79cd88720a73dcbf31e50faf6d057787713d62b0a35fa49d4789a52 -size 24608 diff --git a/docs/notebooks/mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.png b/docs/notebooks/mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.png deleted file mode 100644 index 55c71c94f52e35..00000000000000 --- a/docs/notebooks/mobilevlm-language-assistant-with-output_files/mobilevlm-language-assistant-with-output_32_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:74e9cd0ac19f22348008108002eaf4c5c3a666e15c7b205041138107020b3883 -size 162588 diff --git a/docs/notebooks/multilora-image-generation-with-output.rst b/docs/notebooks/multilora-image-generation-with-output.rst new file mode 100644 index 00000000000000..7b6f4bc381ff27 --- /dev/null +++ b/docs/notebooks/multilora-image-generation-with-output.rst @@ -0,0 +1,468 @@ +Multi LoRA Image Generation +=========================== + +LoRA, or `Low-Rank Adaptation `__, is +a popular and lightweight training technique used for fine-tuning Large +Language and Stable Diffusion Models without needing full model +training. Full fine-tuning of larger models (consisting of billions of +parameters) is inherently expensive and time-consuming. LoRA works by +adding a smaller number of new weights to the model for training, rather +than retraining the entire parameter space of the model. This makes +training with LoRA much faster, memory-efficient, and produces smaller +model weights (a few hundred MBs), which are easier to store and share. + +At its core, LoRA leverages the concept of low-rank matrix +factorization. Instead of updating all the parameters in a neural +network, LoRA decomposes the parameter space into two low-rank matrices. +This decomposition allows the model to capture essential information +with fewer parameters, significantly reducing the amount of data and +computation required for fine-tuning. + +|image0| + +By incorporating LoRA into Stable Diffusion models, we can enhance their +ability to understand complex relationships and patterns in data. This +approach opens up numerous possibilities: \* **Art and Design**: Artists +can fine-tune models to generate images that align with their unique +styles, creating personalized artwork effortlessly. \* **Content +Creation**: Businesses can customize image generation models to produce +branded visuals, enhancing marketing and media production. \* +**Entertainment**: Game developers and filmmakers can use fine-tuned +models to create realistic and imaginative worlds, streamlining the +creative process. + +In this tutorial we explore possibilities to use LoRA with OpenVINO +Generative API. + + +**Table of contents:** + + +- `Prerequisites <#prerequisites>`__ +- `Convert Diffusion Model using Optimum + Intel <#convert-diffusion-model-using-optimum-intel>`__ + + - `Applying LoRA to Original Diffusers pipeline before + conversion <#applying-lora-to-original-diffusers-pipeline-before-conversion>`__ + +- `Image Generation using OpenVINO + GenAI <#image-generation-using-openvino-genai>`__ + + - `Integration LoRA into + pipeline <#integration-lora-into-pipeline>`__ + - `Prepare LoRA Adapters <#prepare-lora-adapters>`__ + - `Create Inference Pipeline <#create-inference-pipeline>`__ + - `Selection specific adapter during + generation <#selection-specific-adapter-during-generation>`__ + - `Use multiple adapters + simultaneously <#use-multiple-adapters-simultaneously>`__ + - `Disable adapters <#disable-adapters>`__ + +- `Interactive demo <#interactive-demo>`__ + +Installation Instructions +~~~~~~~~~~~~~~~~~~~~~~~~~ + +This is a self-contained example that relies solely on its own code. + +We recommend running the notebook in a virtual environment. You only +need a Jupyter server to start. For details, please refer to +`Installation +Guide `__. + +.. |image0| image:: https://github.com/user-attachments/assets/bf823c71-13b4-402c-a7b4-d6fc30a60d88 + +.. code:: ipython3 + + import platform + + %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu torch torchvision transformers accelerate "diffusers>0.25.0" pillow "gradio>=4.19" "peft>=0.7.0" + %pip install -q "git+https://github.com/huggingface/optimum-intel.git" + %pip install -q -U "openvino>=2024.5.0" "openvino-tokenizers>=2024.5.0" "openvino-genai>=2024.5.0" + + if platform.system() == "Darwin": + %pip install -q "numpy<2.0.0" + +.. code:: ipython3 + + import requests + from pathlib import Path + + notebook_utils_path = Path("notebook_utils.py") + lora_config_path = Path("lora_config.py") + + if not notebook_utils_path.exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) + notebook_utils_path.open("w").write(r.text) + + if not lora_config_path.exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/multilora-image-generation/lora_config.py", + ) + lora_config_path.open("w").write(r.text) + +Convert Diffusion Model using Optimum Intel +------------------------------------------- + + + +`Optimum Intel `__ is +the interface between the +`Transformers `__ and +`Diffusers `__ libraries +and OpenVINO to accelerate end-to-end pipelines on Intel architectures. +It provides ease-to-use +`interface `__ +for exporting models to `OpenVINO Intermediate Representation +(IR) `__ +format. + +Applying LoRA to Original Diffusers pipeline before conversion +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +LoRA can be easily added to `Diffusers +pipeline `__ +before export. At the export stage, LoRA weights will be fused to +original model weights and converted model will preserve LoRA provided +behavior. This approach is suitable when you need model with adapter +capabilities by default and it does not required configuration at +inference time (e.g. changing weight coefficient for adapter). For +example, we can use this method for speedup generation process with +integration `LCM LoRA `__. +Previously, we already considered with approach in this +`tutorial `__. + +Using ``optimum-cli`` for exporting models requires to provide model id +on HuggingFace Hub or local directory with saved model. In case, if +model stored in multiple separated repositories or directories (e.g. you +want to replace VAE component or add LoRA), it should be merged and +saved on disk before export. For avoiding this, we will use +``export_from_model`` function that accepts initialized model. +Additionally, for using model with OpenVINO GenAI, we need to export +tokenizers to OpenVINO format using `OpenVINO +Tokenizers `__ +library. + +In this tutorial we will use `Stable Diffusion +XL `__ +model, but the same steps are also applicable to other models of Stable +Diffusion family. + +.. code:: ipython3 + + from pathlib import Path + from diffusers import DiffusionPipeline, AutoencoderKL, LCMScheduler + from optimum.exporters.openvino import export_from_model + from optimum.intel.openvino import OVConfig + from optimum.exporters.openvino.convert import export_tokenizer + import gc + + model_dir = Path("sdxl-lcm") + + if not model_dir.exists(): + model_id = "stabilityai/stable-diffusion-xl-base-1.0" + adapter_id = "latent-consistency/lcm-lora-sdxl" + vae_id = "madebyollin/sdxl-vae-fp16-fix" + vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix") + pipe = DiffusionPipeline.from_pretrained(model_id, vae=vae, variant="fp16", use_safetensors=True) + pipe.scheduler = LCMScheduler.from_config(pipe.scheduler.config) + pipe.load_lora_weights(adapter_id) + pipe.fuse_lora() + export_from_model(pipe, model_dir, task="text-to-image", stateful=False, ov_config=OVConfig(dtype="fp16")) + for tokenizer in ["tokenizer", "tokenizer_2"]: + tokenizer_model = getattr(pipe, tokenizer, None) + if tokenizer_model is not None: + export_tokenizer(tokenizer_model, model_dir / tokenizer, task="text-to-image") + del vae + del pipe + gc.collect() + + +.. parsed-literal:: + + 2024-11-08 16:49:48.963221: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-08 16:49:48.977712: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:477] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered + WARNING: All log messages before absl::InitializeLog() is called are written to STDERR + E0000 00:00:1731070188.992824 718925 cuda_dnn.cc:8310] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered + E0000 00:00:1731070188.997386 718925 cuda_blas.cc:1418] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered + 2024-11-08 16:49:49.014687: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. + + +Image Generation using OpenVINO GenAI +------------------------------------- + + + +`OpenVINO™ GenAI `__ +is a library of the most popular Generative AI model pipelines, +optimized execution methods, and samples that run on top of highly +performant `OpenVINO +Runtime `__. + +This library is friendly to PC and laptop execution, and optimized for +resource consumption. It requires no external dependencies to run +generative models as it already includes all the core functionality. + +``openvino_genai.Text2ImagePipeline`` class supports inference of +`Diffusers +models `__. +For pipeline initialization, we should provide directory with converted +by Optimum Intel pipeline and specify inference device. Optionally, we +can provide configuration for LoRA Adapters using ``adapter_config``. +For starting generation process ``generate`` method should be used. +Basically, it required to provide input text prompt for image +generation. You can provide additional arguments like negative prompt, +number of steps, guidance scale, image width and height to control +generation process. + +Integration LoRA into pipeline +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +Similarly to Diffusers pipeline, you can store separately and load LoRA +into base pipeline before inference using OpenVINO GenAI. +``openvino_genai.AdapterConfig`` serves for adapters management in +``openvino_genai.Text2ImagePipeline``. It can be used for adding and +removing adapters or changing their weight coefficient for blending into +pipeline. You can add one or multiple adapters into config and also +specify alpha blending coefficients for their addition. OpenVINO GenAI +supports LoRA adapters saved in Safetensors format. You can use one of +publicly available pretrained adapters from +`CivitAI `__ or `HuggingFace +Hub `__ or train your own. > **Important +Note**: Before loading pretrained adapters, please make sure that they +are compatible with your base model architecture. E.g. if you use SDXL +model, you need to provide adapters trained for this model type and +loading adapter, for example, trained for FLUX is not allowed. + +Generally, process of adapters configuration consists of 2 steps: 1. +Register adapters in pipeline constructor. At this moment, it is +recommended to provide all adapters that you plan to use on this stage. +2. Choose which adapter (or a combination of adapters) to apply in each +``generate`` call. It is not obligated to use all of provided in +constructor adapters simultaneously, you can select one or combination +of several among them for each generation cycle. + +Prepare LoRA Adapters +~~~~~~~~~~~~~~~~~~~~~ + + + +.. _prepare-lora-adapters-1: + +Prepare LoRA Adapters +~~~~~~~~~~~~~~~~~~~~~ + +.. code:: ipython3 + + from lora_config import LORA + + # uncomment this line to see predefined LoRA adapters configuration used in this notebook + # LORA + +.. code:: ipython3 + + from huggingface_hub import hf_hub_download + + lora_dir = Path("lora") + adapter_paths = [] + + for lora in LORA: + lora_model_dir = lora_dir / lora["name"].lower().replace(" ", "_") + file_name = lora["file_name"] + if not (lora_model_dir / file_name).exists(): + hf_hub_download(repo_id=lora["model_id"], filename=file_name, local_dir=lora_model_dir) + adapter_paths.append(lora_model_dir / file_name) + +.. code:: ipython3 + + import openvino_genai as ov_genai + + + def prepare_adapter_config(scales=None): + if scales is None: + scales = [1 / len(adapter_paths)] * len(adapter_paths) + if isinstance(scales, float): + scales = [scales] * len(adapter_paths) + adapter_config = ov_genai.AdapterConfig() + for adapter, scale in zip(adapter_paths, scales): + adapter_config.add(ov_genai.Adapter(adapter), scale) + + return adapter_config + + + adapters_config = prepare_adapter_config(0.0) + adapters = adapters_config.get_adapters() + +Create Inference Pipeline +~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +diffusion process involves random for preparing initial state for +denoising. For reproducibility of generation results, we will use +``Generator`` class. + +.. code:: ipython3 + + from notebook_utils import device_widget + + device = device_widget(default="CPU", exclude=["NPU"]) + device + + + + +.. parsed-literal:: + + Dropdown(description='Device:', options=('CPU', 'AUTO'), value='CPU') + + + +.. code:: ipython3 + + import openvino as ov + import torch + + + class Generator(ov_genai.Generator): + def __init__(self, seed): + ov_genai.Generator.__init__(self) + self.generator = torch.Generator(device="cpu").manual_seed(seed) + + def next(self): + return torch.randn(1, generator=self.generator, dtype=torch.float32).item() + + def randn_tensor(self, shape: ov.Shape): + torch_tensor = torch.randn(list(shape), generator=self.generator, dtype=torch.float32) + return ov.Tensor(torch_tensor.numpy()) + + + pipe = ov_genai.Text2ImagePipeline(model_dir, "CPU", adapters=adapters_config) + +Selection specific adapter during generation +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +As it was already mention before, it is not necessary to use all +adapters specified at initialization stage for generation in the same +time. Providing adapters argument with ``openvino_genai.AdapterConfig`` +into ``generate`` allow to select one or several from them. For example, +let’s select LoRA for generation images in X-Ray style. + +.. code:: ipython3 + + subject = "a cute cat in sunglasses" + prompt_template = LORA[0].get("prompt", "") + adapter_weight = LORA[0].get("weight", 1.0) + prompt = prompt_template.replace("", subject) + adapter_config = ov_genai.AdapterConfig() + adapter_config.add(adapters[0], adapter_weight) + image_tensor = pipe.generate(prompt, num_inference_steps=4, guidance_scale=0, adapters=adapter_config, generator=Generator(421235)) + +.. code:: ipython3 + + from PIL import Image + + image = Image.fromarray(image_tensor.data[0]) + image + + + + +.. image:: multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.png + + + +Use multiple adapters simultaneously +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + + +You also can use combination of adapters that will be applied in the +same time. Let’s see what happens if traditional Japanese art will meet +modern illustration pointillistic style. + +.. code:: ipython3 + + prompt_template1 = LORA[1].get("prompt", "") + prompt_template2 = LORA[2].get("prompt", "") + adapter1_weight = LORA[1].get("weight", 1.0) + adapter2_weight = LORA[2].get("weight", 1.0) + + prompt = prompt_template2.replace("", prompt_template1.replace("", subject)) + adapter_config = ov_genai.AdapterConfig() + adapter_config.add(adapters[1], adapter1_weight) + adapter_config.add(adapters[2], adapter2_weight) + image_tensor = pipe.generate(prompt, num_inference_steps=4, guidance_scale=0, adapters=adapter_config, generator=Generator(421235)) + +.. code:: ipython3 + + image = Image.fromarray(image_tensor.data[0]) + image + + + + +.. image:: multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.png + + + +Disable adapters +~~~~~~~~~~~~~~~~ + + + +You can disable adapters providing empty ``AdapterConfig`` into generate + +.. code:: ipython3 + + image_tensor = pipe.generate(subject, num_inference_steps=4, guidance_scale=0, adapters=ov_genai.AdapterConfig(), generator=Generator(421235)) + +.. code:: ipython3 + + image = Image.fromarray(image_tensor.data[0]) + image + + + + +.. image:: multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.png + + + +Interactive demo +---------------- + + + +.. code:: ipython3 + + gradio_helper_path = Path("gradio_helper.py") + + if not gradio_helper_path.exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/multilora-image-generation/gradio_helper.py", + ) + lora_config_path.open("w").write(r.text) + +.. code:: ipython3 + + from gradio_helper import make_demo + + demo = make_demo(pipe, Generator, adapters, LORA) + + try: + demo.launch(debug=False) + except Exception: + demo.launch(share=True, debug=False) + # if you are launching remotely, specify server_name and server_port + # demo.launch(server_name='your server name', server_port='server port in int') + # Read more in the docs: https://gradio.app/docs/ diff --git a/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.jpg b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.jpg new file mode 100644 index 00000000000000..1427e6afb594ac --- /dev/null +++ b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:378fce8c53832fa402e94c50995aa5f188d16a6a6886c08fe4f8323bcf7daabe +size 42135 diff --git a/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.png b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.png new file mode 100644 index 00000000000000..873721f87cc2a3 --- /dev/null +++ b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_15_0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:27ac6d45499eb6e67ddf78f8f3493fd3e9dc3885cec2b4fda8067f9b1f7a9ebf +size 1252162 diff --git a/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.jpg b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.jpg new file mode 100644 index 00000000000000..1b6a88d2cde069 --- /dev/null +++ b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:de879c60657ad9c471ccc971d63cc2ac25be5b477c6ebcd8b2e1a2a438b2f3c1 +size 146062 diff --git a/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.png b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.png new file mode 100644 index 00000000000000..9b26d20ef04ab8 --- /dev/null +++ b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_18_0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ae8cec0bac904c1868d7786121978b2ca819ead5c8b02cf09bb07f75b927a3a1 +size 1940316 diff --git a/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.jpg b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.jpg new file mode 100644 index 00000000000000..199be9b483e18f --- /dev/null +++ b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:564848925f540cf500457a4996631ba616cc6547b63d377ce22ac8c3e9431c04 +size 87425 diff --git a/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.png b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.png new file mode 100644 index 00000000000000..bbf4eaaf030a42 --- /dev/null +++ b/docs/notebooks/multilora-image-generation-with-output_files/multilora-image-generation-with-output_21_0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b573ab59972699e762f8a52c0ce17a0db060230effe78e2ae3408290a9173103 +size 1417021 diff --git a/docs/notebooks/music-generation-with-output.rst b/docs/notebooks/music-generation-with-output.rst index 4adc89b9ff79e7..a5bdcbd8049318 100644 --- a/docs/notebooks/music-generation-with-output.rst +++ b/docs/notebooks/music-generation-with-output.rst @@ -124,14 +124,9 @@ Imports .. parsed-literal:: - 2024-11-05 02:04:23.419260: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 02:04:23.453089: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 01:43:50.913766: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:43:50.938403: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 02:04:24.059462: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/utils/generic.py:311: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/utils/generic.py:311: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( MusicGen in HF Transformers @@ -170,12 +165,134 @@ generate a text-conditioned music sample. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/huggingface_hub/file_download.py:797: FutureWarning: `resume_download` is deprecated and will be removed in version 1.0.0. Downloads always resume when possible. If you want to force a new download, use `force_download=True`. - warnings.warn( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/utils/generic.py:311: UserWarning: torch.utils._pytree._register_pytree_node is deprecated. Please use torch.utils._pytree.register_pytree_node instead. - torch.utils._pytree._register_pytree_node( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:28: UserWarning: torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm. - warnings.warn("torch.nn.utils.weight_norm is deprecated in favor of torch.nn.utils.parametrizations.weight_norm.") + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/encodec/modeling_encodec.py:124: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor). + self.register_buffer("padding_total", torch.tensor(kernel_size - stride, dtype=torch.int64), persistent=False) + Config of the text_encoder: is overwritten by shared text_encoder config: T5Config { + "_name_or_path": "t5-base", + "architectures": [ + "T5ForConditionalGeneration" + ], + "classifier_dropout": 0.0, + "d_ff": 3072, + "d_kv": 64, + "d_model": 768, + "decoder_start_token_id": 0, + "dense_act_fn": "relu", + "dropout_rate": 0.1, + "eos_token_id": 1, + "feed_forward_proj": "relu", + "initializer_factor": 1.0, + "is_encoder_decoder": true, + "is_gated_act": false, + "layer_norm_epsilon": 1e-06, + "model_type": "t5", + "n_positions": 512, + "num_decoder_layers": 12, + "num_heads": 12, + "num_layers": 12, + "output_past": true, + "pad_token_id": 0, + "relative_attention_max_distance": 128, + "relative_attention_num_buckets": 32, + "task_specific_params": { + "summarization": { + "early_stopping": true, + "length_penalty": 2.0, + "max_length": 200, + "min_length": 30, + "no_repeat_ngram_size": 3, + "num_beams": 4, + "prefix": "summarize: " + }, + "translation_en_to_de": { + "early_stopping": true, + "max_length": 300, + "num_beams": 4, + "prefix": "translate English to German: " + }, + "translation_en_to_fr": { + "early_stopping": true, + "max_length": 300, + "num_beams": 4, + "prefix": "translate English to French: " + }, + "translation_en_to_ro": { + "early_stopping": true, + "max_length": 300, + "num_beams": 4, + "prefix": "translate English to Romanian: " + } + }, + "transformers_version": "4.46.3", + "use_cache": true, + "vocab_size": 32128 + } + + Config of the audio_encoder: is overwritten by shared audio_encoder config: EncodecConfig { + "_name_or_path": "facebook/encodec_32khz", + "architectures": [ + "EncodecModel" + ], + "audio_channels": 1, + "chunk_length_s": null, + "codebook_dim": 128, + "codebook_size": 2048, + "compress": 2, + "dilation_growth_rate": 2, + "hidden_size": 128, + "kernel_size": 7, + "last_kernel_size": 7, + "model_type": "encodec", + "norm_type": "weight_norm", + "normalize": false, + "num_filters": 64, + "num_lstm_layers": 2, + "num_residual_layers": 1, + "overlap": null, + "pad_mode": "reflect", + "residual_kernel_size": 3, + "sampling_rate": 32000, + "target_bandwidths": [ + 2.2 + ], + "torch_dtype": "float32", + "transformers_version": "4.46.3", + "trim_right_ratio": 1.0, + "upsampling_ratios": [ + 8, + 5, + 4, + 4 + ], + "use_causal_conv": false, + "use_conv_shortcut": false + } + + Config of the decoder: is overwritten by shared decoder config: MusicgenDecoderConfig { + "activation_dropout": 0.0, + "activation_function": "gelu", + "attention_dropout": 0.0, + "audio_channels": 1, + "bos_token_id": 2048, + "classifier_dropout": 0.0, + "dropout": 0.1, + "ffn_dim": 4096, + "hidden_size": 1024, + "initializer_factor": 0.02, + "layerdrop": 0.0, + "max_position_embeddings": 2048, + "model_type": "musicgen_decoder", + "num_attention_heads": 16, + "num_codebooks": 4, + "num_hidden_layers": 24, + "pad_token_id": 2048, + "scale_embedding": false, + "tie_word_embeddings": false, + "transformers_version": "4.46.3", + "use_cache": true, + "vocab_size": 2048 + } + In the cell below user is free to change the desired music sample @@ -229,7 +346,7 @@ vocabulary. It helps the model understand the context of a sentence. @@ -314,6 +431,9 @@ runtime .. parsed-literal:: [ WARNING ] Please fix your imports. Module %s has been moved to %s. The old module will be deleted in version %s. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead + warnings.warn( + `loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`. 2. Convert MusicGen Language Model @@ -655,7 +775,7 @@ We can now infer the pipeline backed by OpenVINO models. diff --git a/docs/notebooks/nano-llava-multimodal-chatbot-with-output.rst b/docs/notebooks/nano-llava-multimodal-chatbot-with-output.rst index 337458e35bbf0c..0bac7af3f39c32 100644 --- a/docs/notebooks/nano-llava-multimodal-chatbot-with-output.rst +++ b/docs/notebooks/nano-llava-multimodal-chatbot-with-output.rst @@ -16,7 +16,6 @@ OpenVINO. Additionally, we will optimize model using - `Prerequisites <#prerequisites>`__ - `Select Model <#select-model>`__ -- `Download PyTorch model <#download-pytorch-model>`__ - `Convert and Optimize model <#convert-and-optimize-model>`__ - `Convert model to OpenVINO IR @@ -51,23 +50,23 @@ Prerequisites .. code:: ipython3 - %pip install -q "torch>=2.1" "transformers>=4.40" "accelerate" "pillow" "gradio>=4.26" "tqdm" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q "nncf>=2.13" - %pip install -q -U --pre --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly "openvino-tokenizers[transformers]" "openvino>=2024.4.0" - %pip install -q "git+https://github.com/eaidova/optimum-intel.git@ea/minicpmv" + %pip install -q "torch>=2.1" "transformers>=4.45" "accelerate" "pillow" "gradio>=4.26" "tqdm" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "nncf>=2.14" + %pip install -q -U "openvino-tokenizers[transformers]>=2024.5.0" "openvino>=2024.5.0" + %pip install -q "git+https://github.com/huggingface/optimum-intel.git" .. parsed-literal:: - ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. - mobileclip 0.1.0 requires torchvision==0.14.1, but you have torchvision 0.17.2+cpu which is incompatible. Note: you may need to restart the kernel to use updated packages. + ERROR: Ignored the following versions that require a different python version: 2.14.0 Requires-Python >=3.9 + ERROR: Could not find a version that satisfies the requirement nncf>=2.14 (from versions: 1.4, 1.4.1, 1.5.0, 1.6.0, 1.7.0, 1.7.1, 2.0.0, 2.0.1, 2.0.2, 2.1.0, 2.2.0, 2.3.0, 2.4.0, 2.5.0, 2.6.0, 2.7.0, 2.8.0, 2.8.1, 2.9.0, 2.10.0, 2.11.0, 2.12.0, 2.13.0) + ERROR: No matching distribution found for nncf>=2.14 Note: you may need to restart the kernel to use updated packages. - ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. - openvino-genai 2024.4.0.0 requires openvino_tokenizers~=2024.4.0.0.dev, but you have openvino-tokenizers 2024.5.0.0.dev20241022 which is incompatible. + ERROR: Ignored the following versions that require a different python version: 2024.5.0.0 Requires-Python >=3.9 + ERROR: Could not find a version that satisfies the requirement openvino-tokenizers>=2024.5.0 (from versions: 2023.3.0.0, 2024.0.0.0, 2024.1.0.0, 2024.1.0.2, 2024.2.0.0, 2024.3.0.0, 2024.4.0.0, 2024.4.1.0.dev20240926) + ERROR: No matching distribution found for openvino-tokenizers>=2024.5.0 Note: you may need to restart the kernel to use updated packages. - ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. - mobileclip 0.1.0 requires torchvision==0.14.1, but you have torchvision 0.17.2+cpu which is incompatible. Note: you may need to restart the kernel to use updated packages. @@ -77,6 +76,7 @@ Prerequisites import requests helper_file = Path("ov_nano_llava_helper.py") + cmd_helper_file = Path("cmd_helper.py") if not helper_file.exists(): r = requests.get( @@ -84,6 +84,10 @@ Prerequisites ) helper_file.open("w").write(r.text) + if not cmd_helper_file.exists(): + r = requests.get(url=f"https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/{cmd_helper_file.name}") + cmd_helper_file.open("w").write(r.text) + Select Model ------------ @@ -127,40 +131,12 @@ Download PyTorch model .. code:: ipython3 - from ov_nano_llava_helper import download_original_model, converted_model_exists, copy_model_files + from ov_nano_llava_helper import converted_model_exists, copy_model_files model_id = model_dropdown.value model_dir = Path(model_id.split("/")[-1]) ov_model_dir = Path("ov_" + model_dir.name) / "FP16" - if not converted_model_exists(ov_model_dir): - download_original_model(model_id, model_dir) - - - -.. parsed-literal:: - - Fetching 14 files: 0%| | 0/14 [00:00 1 or self.sliding_window is not None) and self.is_causal: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/optimum/exporters/onnx/model_patcher.py:306: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/optimum/exporters/onnx/model_patcher.py:306: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if past_key_values_length > 0: /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/qnguyen3/nanoLLaVA/13d60cec183a86755afed64da495fcc2c382ea80/modeling_llava_qwen2.py:939: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if seq_len > self.max_seq_len_cached: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/cache_utils.py:432: TracerWarning: Using len to get tensor shape might cause the trace to be incorrect. Recommended usage would be tensor.shape[0]. Passing a tensor of different shape might lead to errors or silently give incorrect results. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/cache_utils.py:443: TracerWarning: Using len to get tensor shape might cause the trace to be incorrect. Recommended usage would be tensor.shape[0]. Passing a tensor of different shape might lead to errors or silently give incorrect results. elif len(self.key_cache[layer_idx]) == 0: # fills previously skipped layers; checking for tensor causes errors /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/qnguyen3/nanoLLaVA/13d60cec183a86755afed64da495fcc2c382ea80/modeling_llava_qwen2.py:1499: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if attention_mask.size() != (bsz, 1, q_len, kv_seq_len): - OpenVINO and OpenVINO Tokenizers versions are not binary compatible. - OpenVINO version: 2024.5.0-16993 - OpenVINO Tokenizers version: 2024.5.0.0 - First 3 numbers should be the same. Update OpenVINO Tokenizers to compatible version. It is recommended to use the same day builds for pre-release version. To install both OpenVINO and OpenVINO Tokenizers release version perform: - pip install --force-reinstall openvino openvino-tokenizers - To update both OpenVINO and OpenVINO Tokenizers to the latest pre-release version perform: - pip install --pre -U openvino openvino-tokenizers --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly - Tokenizer won't be converted. - Traceback (most recent call last): - File "/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/bin/optimum-cli", line 10, in - sys.exit(main()) - File "/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/optimum/commands/optimum_cli.py", line 208, in main - service.run() - File "/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/optimum/commands/export/openvino.py", line 349, in run - main_export( - File "/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/optimum/exporters/openvino/__main__.py", line 416, in main_export - core = Core() - File "/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/openvino_tokenizers/__init__.py", line 53, in new_core_init - self.add_extension(str(_ext_path)) # Core.add_extension doesn't support Path object - RuntimeError: Exception from src/inference/src/cpp/core.cpp:158: - Cannot add extension. Cannot find entry point to the extension library. This error happened: Cannot load library '/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/openvino_tokenizers/lib/libopenvino_tokenizers.so': /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/openvino_tokenizers/lib/libopenvino_tokenizers.so: undefined symbol: _ZNK2ov4Node17can_constant_foldERKSt6vectorINS_6OutputIS0_EESaIS3_EE + /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/qnguyen3/nanoLLaVA/13d60cec183a86755afed64da495fcc2c382ea80/modeling_llava_qwen2.py:169: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if attn_weights.size() != (batch_size, self.num_heads, q_len, k_v_seq_len): + /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/qnguyen3/nanoLLaVA/13d60cec183a86755afed64da495fcc2c382ea80/modeling_llava_qwen2.py:187: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + if attn_output.size() != (batch_size, self.num_heads, q_len, self.head_dim): + Exporting tokenizers to OpenVINO is not supported for tokenizers version > 0.19 and openvino version <= 2024.4. Please downgrade to tokenizers version <= 0.19 to export tokenizers to OpenVINO. + +.. parsed-literal:: + + [ WARNING ] Unexpectedly found already patched module model.layers.22.mlp.up_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.layers.22.mlp.down_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.layers.23.self_attn.q_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.layers.23.self_attn.k_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.layers.23.self_attn.v_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.layers.23.self_attn.o_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.layers.23.mlp.gate_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.layers.23.mlp.up_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.layers.23.mlp.down_proj while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.mm_projector.0 while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module model.mm_projector.2 while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module lm_head while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. + [ WARNING ] Unexpectedly found already patched module while applying ModuleExtension during PyTorch model conversion. Result of the conversion maybe broken. Depending on the exact issue it may lead to broken original model. Compress Model weights to 4 and 8 bits using NNCF @@ -380,12 +530,11 @@ image encoder model. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/quantization/quantize_model.py:432: FutureWarning: `CompressWeightsMode.INT8` is deprecated. Please, use `CompressWeightsMode.INT8_ASYM` as value instead. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/quantization/quantize_model.py:432: FutureWarning: `CompressWeightsMode.INT8` is deprecated. Please, use `CompressWeightsMode.INT8_ASYM` as value instead. warning_deprecated( - 2024-11-05 02:09:38.791476: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 02:09:38.825207: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 01:48:49.764790: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:48:49.789684: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 02:09:39.427301: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT .. parsed-literal:: @@ -508,10 +657,11 @@ Select device import requests - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - open("notebook_utils.py", "w").write(r.text) + if not Path("notebook_utils.py").exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) + open("notebook_utils.py", "w").write(r.text) from notebook_utils import device_widget @@ -558,8 +708,14 @@ can use the same tokenizer and image processor that provided with model. messages = [{"role": "user", "content": f"\n{prompt}"}] text = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True) - url = "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/8bf7d9f2-018a-4498-bec4-55f17c273ecc" - image = Image.open(requests.get(url, stream=True).raw) + test_image = Path("nanollava.png") + + if not test_image.exists(): + url = "https://github.com/openvinotoolkit/openvino_notebooks/assets/29454499/8bf7d9f2-018a-4498-bec4-55f17c273ecc" + image = Image.open(requests.get(url, stream=True).raw) + image.save(test_image) + else: + image = Image.open(test_image) image_tensor = process_images(image, None, processor) input_ids, attention_mask = process_text_input(text, tokenizer) @@ -569,7 +725,7 @@ can use the same tokenizer and image processor that provided with model. print(f"Question:\n{prompt}") print("Answer:") - output_ids = ov_model.generate(input_ids, attention_mask=attention_mask, images=image_tensor, max_new_tokens=128, use_cache=True, streamer=streamer) + output_ids = ov_model.generate(input_ids, attention_mask=attention_mask, pixel_values=image_tensor, max_new_tokens=128, use_cache=True, streamer=streamer) @@ -586,8 +742,8 @@ can use the same tokenizer and image processor that provided with model. Question: Describe this image in detail Answer: - The image features a small, adorable white lamb standing amidst a fire. The lamb's fur is fluffy and white, and it is adorned with tiny black eyes that are bright and lively. The lamb's face is cute, with a small black nose and a small mouth. It seems like the lamb is looking straight at the camera, making it appear even more adorable. - The lamb's right ear is visible, and it is white and pink. The lamb's right eye is also black and pink. The lamb's face is quite detailed, with the nose and mouth visible. There are also details like the lamb's right foot, which is white + This image features a cute, white lama, possibly a llama, which is depicted in a playful pose. The llama is surrounded by a fire, indicating it's being set on a burner. The flame appears to be a bright, bright yellow, and there are several tiny flames, possibly from the llama's actions. + The llama itself is quite detailed. It has a small brown nose and dark eyes that are expressive. The face of the llama is quite detailed as well, with a pair of ears that are also light brown. The llama's mouth is open, revealing its pink lips. There are also small pink spots on its face, Interactive demo @@ -679,7 +835,7 @@ Interactive demo generation_kwargs = dict( input_ids=input_ids, attention_mask=attention_mask, - images=image_tensor, + pixel_values=image_tensor, streamer=streamer, max_new_tokens=128, stopping_criteria=[stopping_criteria], diff --git a/docs/notebooks/notebooks_with_colab_buttons.txt b/docs/notebooks/notebooks_with_colab_buttons.txt index 0f45238db3a4fb..59b3348a4c90f7 100644 --- a/docs/notebooks/notebooks_with_colab_buttons.txt +++ b/docs/notebooks/notebooks_with_colab_buttons.txt @@ -24,6 +24,7 @@ knowledge-graphs-conve language-quantize-bert magika-content-type-recognition mobileclip-video-search +modelscope-to-openvino music-generation named-entity-recognition nano-llava-multimodal-chatbot diff --git a/docs/notebooks/nuextract-structure-extraction-with-output.rst b/docs/notebooks/nuextract-structure-extraction-with-output.rst index fc2d250626fba4..8dd88ca62bd161 100644 --- a/docs/notebooks/nuextract-structure-extraction-with-output.rst +++ b/docs/notebooks/nuextract-structure-extraction-with-output.rst @@ -391,9 +391,9 @@ LLMPipeline. .. code:: ipython3 - from openvino_genai import LLMPipeline + import openvino_genai as ov_genai - pipe = LLMPipeline(model_dir.as_posix(), device.value) + pipe = ov_genai.LLMPipeline(model_dir.as_posix(), device.value) def run_structure_extraction(text: str, schema: str) -> str: diff --git a/docs/notebooks/object-detection-with-output.rst b/docs/notebooks/object-detection-with-output.rst index a34f72f5d8ff1e..5debc4e7ed88d4 100644 --- a/docs/notebooks/object-detection-with-output.rst +++ b/docs/notebooks/object-detection-with-output.rst @@ -84,7 +84,7 @@ Install requirements .. parsed-literal:: - 24692 + 24717 @@ -136,7 +136,7 @@ Download and convert the Model .. parsed-literal:: - 100%|██████████| 6.25M/6.25M [00:00<00:00, 25.9MB/s] + 100%|██████████| 6.25M/6.25M [00:00<00:00, 26.9MB/s] .. parsed-literal:: @@ -147,10 +147,10 @@ Download and convert the Model PyTorch: starting from 'yolov8n.pt' with input shape (1, 3, 640, 640) BCHW and output shape(s) (1, 84, 8400) (6.2 MB) OpenVINO: starting export with openvino 2024.4.0-16579-c3152d32c9c-releases/2024/4... - OpenVINO: export success ✅ 1.3s, saved as 'yolov8n_openvino_model/' (6.4 MB) + OpenVINO: export success ✅ 1.4s, saved as 'yolov8n_openvino_model/' (6.4 MB) - Export complete (1.5s) - Results saved to /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/object-detection-webcam + Export complete (1.6s) + Results saved to /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/object-detection-webcam Predict: yolo predict task=detect model=yolov8n_openvino_model imgsz=640 half Validate: yolo val task=detect model=yolov8n_openvino_model imgsz=640 data=coco.yaml half Visualize: https://netron.app diff --git a/docs/notebooks/omniparser-with-output.rst b/docs/notebooks/omniparser-with-output.rst new file mode 100644 index 00000000000000..28676a03a84ba7 --- /dev/null +++ b/docs/notebooks/omniparser-with-output.rst @@ -0,0 +1,663 @@ +Screen Parsing with OmniParser and OpenVINO +=========================================== + +Recent breakthrough in Visual Language Processing and Large Language +models made significant strides in understanding and interacting with +the world through text and images. However, accurately parsing and +understanding complex graphical user interfaces (GUIs) remains a +significant challenge. OmniParser is a comprehensive method for parsing +user interface screenshots into structured and easy-to-understand +elements. This enables more accurate and efficient interaction with +GUIs, empowering AI agents to perform tasks across various platforms and +applications. + +|image0| + +More details about model can be found in `Microsoft blog +post `__, +`paper `__, `original +repo `__ and `model +card `__. In this tutorial +we consider how to run OmniParser using OpenVINO. + + +**Table of contents:** + +- `Prerequisites <#prerequisites>`__ +- `Prepare models <#prepare-models>`__ + + - `Convert models to OpenVINO Intermediate representation + format <#convert-models-to-openvino-intermediate-representation-format>`__ + + - `Icon Detector <#icon-detector>`__ + - `Screen captioning model <#screen-captioning-model>`__ + +- `Run OmniParser using OpenVINO <#run-omniparser-using-openvino>`__ + + - `Icon Detector <#icon-detector>`__ + + - `Select inference device for icon + detector <#select-inference-device-for-icon-detector>`__ + + - `Screen regions captioning <#screen-regions-captioning>`__ + + - `Select device for screen region + captioning <#select-device-for-screen-region-captioning>`__ + + - `Recognition text on the + screen <#recognition-text-on-the-screen>`__ + + - `Select device for OCR <#select-device-for-ocr>`__ + + - `Run model inference <#run-model-inference>`__ + +- `Interactive demo <#interactive-demo>`__ + +Installation Instructions +~~~~~~~~~~~~~~~~~~~~~~~~~ + +This is a self-contained example that relies solely on its own code. + +We recommend running the notebook in a virtual environment. You only +need a Jupyter server to start. For details, please refer to +`Installation +Guide `__. + +.. |image0| image:: https://microsoft.github.io/OmniParser/static/images/flow_merged0.png + +Prerequisites +------------- + + + +.. code:: ipython3 + + %pip install -q "torch>=2.1" easyocr torchvision accelerate "supervision==0.18.0" accelerate timm "einops==0.8.0" "ultralytics==8.1.24" pillow opencv-python "gradio>=4.19" --extra-index-url https://download.pytorch.org/whl/cpu + %pip install -q "openvino>=2024.4.0" + + +.. parsed-literal:: + + Note: you may need to restart the kernel to use updated packages. + Note: you may need to restart the kernel to use updated packages. + + +.. code:: ipython3 + + from pathlib import Path + import requests + + notebook_utils_path = Path("notebook_utils.py") + florence_helper_path = Path("ov_florence2_helper.py") + + if not notebook_utils_path.exists(): + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) + notebook_utils_path.open("w").write(r.text) + + if not florence_helper_path.exists(): + r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/florence2/ov_florence2_helper.py") + florence_helper_path.open("w").write(r.text) + +Prepare models +-------------- + + + +OmniParser leverages a two-step process: 1. Interactable Region +Detection: - Identifies clickable elements like buttons and icons within +a UI. - Employs a specialized model trained on a diverse dataset of web +pages. - Accurately detects interactive elements, even in complex UIs. + +2. Semantic Captioning: + + - Assigns meaningful descriptions to detected elements. + - Combines optical character recognition (OCR) and a captioning + model. + - Provides context for accurate action generation. + +Convert models to OpenVINO Intermediate representation format +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +For starting work with OpenVINO +we should convert models to OpenVINO Intermediate Representation format +first. + +`OpenVINO model conversion +API `__ +should be used for these purposes. ``ov.convert_model`` function accepts +original model instance and example input for tracing and returns +``ov.Model`` representing this model in OpenVINO framework. Converted +model can be used for saving on disk using ``ov.save_model`` function or +directly loading on device using ``core.complie_model``. + +Let’s consider each pipeline part. + +Icon Detector +^^^^^^^^^^^^^ + + + +Icon detector in OmniParser is represented by YOLO based model trained +on curated by model authors interactable icon detection dataset. + +For conversion and model inference we will utilize Ultralytics provided +API. You can find more examples of this API usage in these +`tutorials `__ + +.. code:: ipython3 + + from ov_omniparser_helper import download_omniparser_icon_detector + + icon_detector_dir = download_omniparser_icon_detector() + + +.. parsed-literal:: + + 2024-11-22 01:51:07.385705: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 01:51:07.410345: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. + + + +.. parsed-literal:: + + weights/icon_detect/best.pt: 0%| | 0.00/11.7M [00:00`__. + +.. code:: ipython3 + + from ov_omniparser_helper import download_omniparser_florence_model + + florence_caption_dir = download_omniparser_florence_model() + + + +.. parsed-literal:: + + Fetching 15 files: 0%| | 0/15 [00:00`__ is a python module for +extracting text from image. It is a general OCR that can read both +natural scene text and dense text in document and supports 80+ +languages. EasyOCR utilizes AI for detection text regions and recognize +text inside of predicted regions. We will also utilize both text +detection and recognition models using OpenVINO. + +Select device for OCR +^^^^^^^^^^^^^^^^^^^^^ + + + +.. code:: ipython3 + + import ipywidgets as widgets + + device_detector = device_widget(exclude=["NPU"], description="Detector device:") + device_recognizer = device_widget(exclude=["NPU"], description="Recognizer device:") + + device_box = widgets.VBox([device_detector, device_recognizer]) + device_box + + + + +.. parsed-literal:: + + VBox(children=(Dropdown(description='Detector device:', index=1, options=('CPU', 'AUTO'), value='AUTO'), Dropd… + + + +.. code:: ipython3 + + from ov_omniparser_helper import easyocr_reader + + # Uncomment the line to see easyocr_reader helper code + # ??easyocr_reader + +.. code:: ipython3 + + reader = easyocr_reader("weights/easyocr", device_detector.value, device_recognizer.value) + + +.. parsed-literal:: + + Neither CUDA nor MPS are available - defaulting to CPU. Note: This module is much faster with a GPU. + + + + +.. code:: ipython3 + + from PIL import Image + + test_image_path = Path("examples/windows_home.png") + test_image_path.parent.mkdir(exist_ok=True, parents=True) + + if not test_image_path.exists(): + Image.open(requests.get("https://github.com/microsoft/OmniParser/blob/master/imgs/windows_home.png?raw=true", stream=True).raw).save(test_image_path) + +Run model inference +~~~~~~~~~~~~~~~~~~~ + + + +``process_image`` function defined in ``ov_omniparser_helper.py`` +provides easy-to-use interface for screen parsing process. + +.. code:: ipython3 + + from ov_omniparser_helper import process_image + + # Uncomment this line to see process_image code + # ??process_image + +.. code:: ipython3 + + procesed_image, label_coordinates, icon_descriptions = process_image( + test_image_path, ov_icon_detector, {"model": ov_icon_caption_gen, "processor": processor}, reader + ) + + +.. parsed-literal:: + + + image 1/1 /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/omniparser/examples/windows_home.png: 640x640 32 0s, 38.2ms + Speed: 2.4ms preprocess, 38.2ms inference, 1.1ms postprocess per image at shape (1, 3, 640, 640) + finish processing + + +Function returns image with drawn detected boxes, boxes coordinates and +description for each region. + +.. code:: ipython3 + + display(procesed_image.resize((1200, 1200))) + print(icon_descriptions) + + + +.. image:: omniparser-with-output_files/omniparser-with-output_32_0.png + + +.. parsed-literal:: + + Text Box ID 0: 3.46 PM + Text Box ID 1: Search + Text Box ID 2: Microsoft + Text Box ID 3: 10/25/2024 + Icon Box ID 4: Microsoft Outlook. + Icon Box ID 5: Image + Icon Box ID 6: Microsoft OneNote. + Icon Box ID 7: Microsoft Office. + Icon Box ID 8: a folder for organizing files. + Icon Box ID 9: Microsoft Office. + Icon Box ID 10: Security shield. + Icon Box ID 11: Microsoft 365. + Icon Box ID 12: Microsoft Edge browser. + Icon Box ID 13: Microsoft Edge browser. + Icon Box ID 14: Decrease + Icon Box ID 15: the Windows operating system. + Icon Box ID 16: mountains and a beach. + Icon Box ID 17: a search function. + + +Interactive demo +---------------- + +.. code:: ipython3 + + from gradio_helper import make_demo + + + def process_image_gradio(image, box_threshold, iou_threshold, imgsz): + image_result, _, parsed_text = process_image( + image, + ov_icon_detector, + {"model": ov_icon_caption_gen, "processor": processor}, + reader, + box_threshold=box_threshold, + iou_threshold=iou_threshold, + imgsz=imgsz, + ) + return image_result, parsed_text + + + demo = make_demo(process_image_gradio) + + try: + demo.launch(debug=False, height=600) + except Exception: + demo.launch(debug=False, share=True, height=600) + # if you are launching remotely, specify server_name and server_port + # demo.launch(server_name='your server name', server_port='server port in int') + # Read more in the docs: https://gradio.app/docs/ + + +.. parsed-literal:: + + Running on local URL: http://127.0.0.1:7860 + + To create a public link, set `share=True` in `launch()`. + + + + + + + diff --git a/docs/notebooks/omniparser-with-output_files/omniparser-with-output_32_0.jpg b/docs/notebooks/omniparser-with-output_files/omniparser-with-output_32_0.jpg new file mode 100644 index 00000000000000..513db4e6d0da5d --- /dev/null +++ b/docs/notebooks/omniparser-with-output_files/omniparser-with-output_32_0.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c83af55e4296ff1dadb270b93c31084e983048437f848323c0e9677d2c3ed22 +size 161384 diff --git a/docs/notebooks/omniparser-with-output_files/omniparser-with-output_32_0.png b/docs/notebooks/omniparser-with-output_files/omniparser-with-output_32_0.png new file mode 100644 index 00000000000000..a09fc0a47cd036 --- /dev/null +++ b/docs/notebooks/omniparser-with-output_files/omniparser-with-output_32_0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:382e19a8751851ad8a151bea1f4f7bc4be62b47c7a8a4f70da0a3dae257b0c20 +size 1411816 diff --git a/docs/notebooks/openvino-api-with-output.rst b/docs/notebooks/openvino-api-with-output.rst index b2b4c8c0f04fdd..3931d96040da7e 100644 --- a/docs/notebooks/openvino-api-with-output.rst +++ b/docs/notebooks/openvino-api-with-output.rst @@ -201,7 +201,7 @@ notebooks. .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.bin') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.bin') @@ -250,7 +250,7 @@ points to the filename of an ONNX model. .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/segmentation.onnx') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/segmentation.onnx') @@ -310,7 +310,7 @@ without any conversion step. Pass the filename with extension to .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/inference.pdiparams') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/inference.pdiparams') @@ -354,7 +354,7 @@ TensorFlow models saved in frozen graph format can also be passed to .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.pb') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.pb') @@ -407,7 +407,7 @@ It is pre-trained model optimized to work with TensorFlow Lite. .. parsed-literal:: - Warning: Looks like you're using an outdated `kagglehub` version, please consider updating (latest version: 0.3.3) + Warning: Looks like you're using an outdated `kagglehub` version, please consider updating (latest version: 0.3.4) .. code:: ipython3 @@ -497,7 +497,7 @@ Information about the inputs and outputs of the model are in .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.bin') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.bin') @@ -703,7 +703,7 @@ produced data as values. .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.bin') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.bin') @@ -892,7 +892,7 @@ input shape. .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/segmentation.bin') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/segmentation.bin') @@ -1044,7 +1044,7 @@ the cache. .. parsed-literal:: - PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.bin') + PosixPath('/opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvino-api/model/classification.bin') @@ -1074,7 +1074,7 @@ the cache. .. parsed-literal:: - Loading the network to the AUTO device took 0.15 seconds. + Loading the network to the AUTO device took 0.14 seconds. After running the previous cell, we know the model exists in the cache @@ -1092,5 +1092,5 @@ measure the time it takes now. .. parsed-literal:: - Loading the network to the AUTO device took 0.08 seconds. + Loading the network to the AUTO device took 0.07 seconds. diff --git a/docs/notebooks/openvoice-with-output.rst b/docs/notebooks/openvoice-with-output.rst index 2ee11fcded84dc..0c912bfe36ee96 100644 --- a/docs/notebooks/openvoice-with-output.rst +++ b/docs/notebooks/openvoice-with-output.rst @@ -62,13 +62,33 @@ Clone repository and install requirements .. code:: ipython3 - import sys + # Fetch `notebook_utils` module + import requests + + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", + ) + open("notebook_utils.py", "w").write(r.text) + + r = requests.get( + url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/cmd_helper.py", + ) + open("cmd_helper.py", "w").write(r.text) + + + from notebook_utils import download_file, device_widget + +.. code:: ipython3 + from pathlib import Path + from cmd_helper import clone_repo + + repo_dir = Path("OpenVoice") if not repo_dir.exists(): - !git clone https://github.com/myshell-ai/OpenVoice + clone_repo("https://github.com/myshell-ai/OpenVoice") orig_english_path = Path("OpenVoice/openvoice/text/_orig_english.py") english_path = Path("OpenVoice/openvoice/text/english.py") @@ -79,8 +99,6 @@ Clone repository and install requirements data = data.replace("unidecode", "anyascii") with english_path.open("w") as out_f: out_f.write(data) - # append to sys.path so that modules from the repo could be imported - sys.path.append(str(repo_dir)) # fix a problem with silero downloading and installing @@ -97,30 +115,19 @@ Clone repository and install requirements .. parsed-literal:: - Cloning into 'OpenVoice'... - remote: Enumerating objects: 438, done. - remote: Total 438 (delta 0), reused 0 (delta 0), pack-reused 438 (from 1) - Receiving objects: 100% (438/438), 3.84 MiB | 21.51 MiB/s, done. - Resolving deltas: 100% (207/207), done. ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. + mobileclip 0.1.0 requires clip-benchmark>=1.4.0, which is not installed. mobileclip 0.1.0 requires torchvision==0.14.1, but you have torchvision 0.17.2+cpu which is incompatible. + tensorflow 2.12.0 requires keras<2.13,>=2.12.0, but you have keras 2.13.1 which is incompatible. tensorflow 2.12.0 requires numpy<1.24,>=1.22, but you have numpy 1.24.4 which is incompatible. + tensorflow 2.12.0 requires tensorboard<2.13,>=2.12, but you have tensorboard 2.13.0 which is incompatible. + tensorflow 2.12.0 requires tensorflow-estimator<2.13,>=2.12.0, but you have tensorflow-estimator 2.13.0 which is incompatible. + tensorflow-cpu 2.13.1 requires numpy<=1.24.3,>=1.22, but you have numpy 1.24.4 which is incompatible. + tensorflow-cpu 2.13.1 requires typing-extensions<4.6.0,>=3.6.6, but you have typing-extensions 4.12.2 which is incompatible. torchvision 0.17.2+cpu requires torch==2.2.2, but you have torch 2.4.1 which is incompatible. Note: you may need to restart the kernel to use updated packages. -.. code:: ipython3 - - # Fetch `notebook_utils` module - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - open("notebook_utils.py", "w").write(r.text) - - from notebook_utils import download_file, device_widget - Download checkpoints and load PyTorch model ------------------------------------------- @@ -243,9 +250,9 @@ True .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:134: FutureWarning: `torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/utils/weight_norm.py:134: FutureWarning: `torch.nn.utils.weight_norm` is deprecated in favor of `torch.nn.utils.parametrizations.weight_norm`. WeightNorm.apply(module, name, dim) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/api.py:36: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/api.py:36: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. checkpoint_dict = torch.load(ckpt_path, map_location=torch.device(self.device)) @@ -259,9 +266,9 @@ True .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/wavmark/__init__.py:16: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/wavmark/__init__.py:16: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. checkpoint = torch.load(resume_path, map_location=torch.device('cpu')) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/api.py:36: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/api.py:36: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. checkpoint_dict = torch.load(ckpt_path, map_location=torch.device(self.device)) @@ -411,40 +418,39 @@ documentation 0 No CUDA runtime is found, using CUDA_HOME='/usr/local/cuda' - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/attentions.py:283: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/attentions.py:283: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert ( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/attentions.py:346: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/attentions.py:346: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! pad_length = max(length - (self.window_size + 1), 0) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/attentions.py:347: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/attentions.py:347: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! slice_start_position = max((self.window_size + 1) - length, 0) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/attentions.py:349: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/attentions.py:349: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if pad_length > 0: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/transforms.py:114: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/transforms.py:114: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if torch.min(inputs) < left or torch.max(inputs) > right: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/transforms.py:119: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/transforms.py:119: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if min_bin_width * num_bins > 1.0: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/transforms.py:121: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/transforms.py:121: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if min_bin_height * num_bins > 1.0: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/transforms.py:171: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/openvoice/OpenVoice/openvoice/transforms.py:171: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert (discriminant >= 0).all() - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Trace had nondeterministic nodes. Did you forget call .eval() on your model? Nodes: - %3293 : Float(1, 2, 43, strides=[86, 43, 1], requires_grad=0, device=cpu) = aten::randn(%3288, %3289, %3290, %3291, %3292) # /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/dynamic_graph/wrappers.py:86:0 - %5559 : Float(1, 192, 153, strides=[29376, 1, 192], requires_grad=0, device=cpu) = aten::randn_like(%m_p, %5554, %5555, %5556, %5557, %5558) # /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/dynamic_graph/wrappers.py:86:0 + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Trace had nondeterministic nodes. Did you forget call .eval() on your model? Nodes: + %3293 : Float(1, 2, 43, strides=[86, 43, 1], requires_grad=0, device=cpu) = aten::randn(%3288, %3289, %3290, %3291, %3292) # /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/dynamic_graph/wrappers.py:86:0 + %5559 : Float(1, 192, 153, strides=[29376, 1, 192], requires_grad=0, device=cpu) = aten::randn_like(%m_p, %5554, %5555, %5556, %5557, %5558) # /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/dynamic_graph/wrappers.py:86:0 This may cause errors in trace checking. To disable trace checking, pass check_trace=False to torch.jit.trace() _check_trace( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Output nr 1. of the traced function does not match the corresponding output of the Python function. Detailed error: - The values for attribute 'shape' do not match: torch.Size([1, 1, 39424]) != torch.Size([1, 1, 38656]). + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Output nr 1. of the traced function does not match the corresponding output of the Python function. Detailed error: + The values for attribute 'shape' do not match: torch.Size([1, 1, 39680]) != torch.Size([1, 1, 38400]). _check_trace( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Output nr 2. of the traced function does not match the corresponding output of the Python function. Detailed error: - The values for attribute 'shape' do not match: torch.Size([1, 1, 154, 43]) != torch.Size([1, 1, 151, 43]). + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Output nr 2. of the traced function does not match the corresponding output of the Python function. Detailed error: + The values for attribute 'shape' do not match: torch.Size([1, 1, 155, 43]) != torch.Size([1, 1, 150, 43]). _check_trace( - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Output nr 3. of the traced function does not match the corresponding output of the Python function. Detailed error: - The values for attribute 'shape' do not match: torch.Size([1, 1, 154]) != torch.Size([1, 1, 151]). + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/jit/_trace.py:1303: TracerWarning: Output nr 3. of the traced function does not match the corresponding output of the Python function. Detailed error: + The values for attribute 'shape' do not match: torch.Size([1, 1, 155]) != torch.Size([1, 1, 150]). _check_trace( - 2024-11-05 02:13:33.268258: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT .. parsed-literal:: @@ -477,16 +483,16 @@ documentation )`. (Triggered internally at ../aten/src/ATen/native/SpectralOps.cpp:836.) + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/nn/modules/module.py:1562: UserWarning: A window was not provided. A rectangular window will be applied,which is known to cause spectral leakage. Other windows such as torch.hann_window or torch.hamming_window can are recommended to reduce spectral leakage.To suppress this warning and use a rectangular window, explicitly set `window=torch.ones(n_fft, device=)`. (Triggered internally at ../aten/src/ATen/native/SpectralOps.cpp:836.) return forward_call(\*args, \*\*kwargs) @@ -714,7 +720,7 @@ Load speaker embeddings .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/functional.py:666: UserWarning: stft with return_complex=False is deprecated. In a future pytorch release, stft will return complex tensors for all inputs, and return_complex=False will raise an error. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/torch/functional.py:666: UserWarning: stft with return_complex=False is deprecated. In a future pytorch release, stft will return complex tensors for all inputs, and return_complex=False will raise an error. Note: you can still call torch.view_as_real on the complex output to recover the old return format. (Triggered internally at ../aten/src/ATen/native/SpectralOps.cpp:873.) return _VF.stft(input, n_fft, hop_length, win_length, window, # type: ignore[attr-defined] @@ -869,7 +875,7 @@ And finally, run voice tone conversion with OpenVINO optimized model @@ -887,7 +893,7 @@ And finally, run voice tone conversion with OpenVINO optimized model @@ -1076,7 +1082,7 @@ voice tone conversion online. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/gradio/components/dropdown.py:100: UserWarning: The `max_choices` parameter is ignored when `multiselect` is False. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/gradio/components/dropdown.py:100: UserWarning: The `max_choices` parameter is ignored when `multiselect` is False. warnings.warn( diff --git a/docs/notebooks/optical-character-recognition-with-output_files/optical-character-recognition-with-output_13_0.png b/docs/notebooks/optical-character-recognition-with-output_files/optical-character-recognition-with-output_13_0.png index 435c1891121eb0..b696d287ded448 100644 --- a/docs/notebooks/optical-character-recognition-with-output_files/optical-character-recognition-with-output_13_0.png +++ b/docs/notebooks/optical-character-recognition-with-output_files/optical-character-recognition-with-output_13_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:82b893e29e948379dac42c19763842f7ade2ccf03853c2c07f0b28bf2d58fe17 +oid sha256:c7a830fedc5653fd506c656144decc048cad5a7651c8e498024f0eb0ab8c8e96 size 305482 diff --git a/docs/notebooks/optical-character-recognition-with-output_files/optical-character-recognition-with-output_23_0.png b/docs/notebooks/optical-character-recognition-with-output_files/optical-character-recognition-with-output_23_0.png index 6fd2096da517cd..8ef607e85695bb 100644 --- a/docs/notebooks/optical-character-recognition-with-output_files/optical-character-recognition-with-output_23_0.png +++ b/docs/notebooks/optical-character-recognition-with-output_files/optical-character-recognition-with-output_23_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:5f91e976b303813a4c4bea13d923dc370c4aabfbf2256f1f37f86faf7005bf5c +oid sha256:7c26cc00284b92b50ecf8f61935c461f7c243736bf210546323eec6b5f68739f size 923830 diff --git a/docs/notebooks/optimize-preprocessing-with-output.rst b/docs/notebooks/optimize-preprocessing-with-output.rst index 96d307be3d692f..d681f9588b9321 100644 --- a/docs/notebooks/optimize-preprocessing-with-output.rst +++ b/docs/notebooks/optimize-preprocessing-with-output.rst @@ -201,15 +201,6 @@ and save it to the disk. .. parsed-literal:: WARNING:tensorflow:Compiled the loaded model, but the compiled metrics have yet to be built. `model.compile_metrics` will be empty until you train or evaluate the model. - - -.. parsed-literal:: - - WARNING:absl:Found untraced functions such as _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op, _jit_compiled_convolution_op while saving (showing 5 of 94). These functions will not be directly callable after loading. - - -.. parsed-literal:: - INFO:tensorflow:Assets written to: model/InceptionResNetV2/assets @@ -353,7 +344,7 @@ for mean/scale normalization. .. parsed-literal:: - + @@ -384,7 +375,7 @@ may be specified is input data .. parsed-literal:: - + @@ -422,7 +413,7 @@ then such conversion will be added explicitly. .. parsed-literal:: - + @@ -636,6 +627,6 @@ Compare performance .. parsed-literal:: - IR model in OpenVINO Runtime/CPU with manual image preprocessing: 0.0153 seconds per image, FPS: 65.39 - IR model in OpenVINO Runtime/CPU with preprocessing API: 0.0166 seconds per image, FPS: 60.23 + IR model in OpenVINO Runtime/CPU with manual image preprocessing: 0.0153 seconds per image, FPS: 65.56 + IR model in OpenVINO Runtime/CPU with preprocessing API: 0.0143 seconds per image, FPS: 70.14 diff --git a/docs/notebooks/optimize-preprocessing-with-output_files/optimize-preprocessing-with-output_14_1.png b/docs/notebooks/optimize-preprocessing-with-output_files/optimize-preprocessing-with-output_14_1.png index cca7858e3bc4af..a142093f6e675c 100644 --- a/docs/notebooks/optimize-preprocessing-with-output_files/optimize-preprocessing-with-output_14_1.png +++ b/docs/notebooks/optimize-preprocessing-with-output_files/optimize-preprocessing-with-output_14_1.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:bbd7b81cc8e7a73ea9bcb8be0c0575134f50b6af8f7de23ee9feed645a4cf66c +oid sha256:5712bd24e962ae0e0267607554ebe1f2869c223b108876ce10e5d20fe6285126 size 387941 diff --git a/docs/notebooks/paddle-ocr-webcam-with-output.rst b/docs/notebooks/paddle-ocr-webcam-with-output.rst index 9f7510cd5efe96..3fae2e47d99b24 100644 --- a/docs/notebooks/paddle-ocr-webcam-with-output.rst +++ b/docs/notebooks/paddle-ocr-webcam-with-output.rst @@ -76,7 +76,12 @@ Guide =2.12.0, but you have keras 2.13.1 which is incompatible. tensorflow 2.12.0 requires numpy<1.24,>=1.22, but you have numpy 1.24.4 which is incompatible. + tensorflow 2.12.0 requires tensorboard<2.13,>=2.12, but you have tensorboard 2.13.0 which is incompatible. + tensorflow 2.12.0 requires tensorflow-estimator<2.13,>=2.12.0, but you have tensorflow-estimator 2.13.0 which is incompatible. + tensorflow-cpu 2.13.1 requires numpy<=1.24.3,>=1.22, but you have numpy 1.24.4 which is incompatible. + tensorflow-cpu 2.13.1 requires typing-extensions<4.6.0,>=3.6.6, but you have typing-extensions 4.12.2 which is incompatible. Note: you may need to restart the kernel to use updated packages. Note: you may need to restart the kernel to use updated packages. @@ -209,7 +214,7 @@ Download the Model for Text **Detection** .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-no… + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-no… .. parsed-literal:: @@ -255,7 +260,7 @@ Download the Model for Text **Recognition** .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-no… + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-no… .. parsed-literal:: diff --git a/docs/notebooks/paddle-ocr-webcam-with-output_files/paddle-ocr-webcam-with-output_30_0.png b/docs/notebooks/paddle-ocr-webcam-with-output_files/paddle-ocr-webcam-with-output_30_0.png index 38a0d5d593351b..2593a5f1244bc5 100644 --- a/docs/notebooks/paddle-ocr-webcam-with-output_files/paddle-ocr-webcam-with-output_30_0.png +++ b/docs/notebooks/paddle-ocr-webcam-with-output_files/paddle-ocr-webcam-with-output_30_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:ac7efd85f2c50b0a189dbf00c0cd2252f362e6469cd014d8f255c53172152c3d -size 591373 +oid sha256:9f0c3a97843163a24439a4c8a7a8206c55f2fb928bb87e985d3835493668af62 +size 593381 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output.rst b/docs/notebooks/paddle-to-openvino-classification-with-output.rst index 25feb9293ee93a..7358ce29c8972c 100644 --- a/docs/notebooks/paddle-to-openvino-classification-with-output.rst +++ b/docs/notebooks/paddle-to-openvino-classification-with-output.rst @@ -56,14 +56,9 @@ Imports .. code:: ipython3 - import platform - - if platform.system() == "Windows": - %pip install -q "paddlepaddle>=2.5.1,<2.6.0" - else: - %pip install -q "paddlepaddle>=2.5.1" + %pip install -q "paddlepaddle>=2.5.1,<2.6.0" %pip install -q "paddleclas>=2.5.2" --no-deps - %pip install -q "prettytable" "ujson" "visualdl>=2.5.3" "faiss-cpu>=1.7.1" Pillow tqdm "matplotlib>=3.4" + %pip install -q "prettytable" "ujson" "visualdl>=2.5.3" "faiss-cpu>=1.7.1" Pillow tqdm "matplotlib>=3.4" "opencv-python" "scikit-learn" # Install openvino package %pip install -q "openvino>=2023.1.0" @@ -73,31 +68,13 @@ Imports Note: you may need to restart the kernel to use updated packages. Note: you may need to restart the kernel to use updated packages. ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. - paddleclas 2.5.2 requires easydict, which is not installed. - paddleclas 2.5.2 requires gast==0.3.3, but you have gast 0.4.0 which is incompatible. - paddleclas 2.5.2 requires opencv-python==4.6.0.66, but you have opencv-python 4.10.0.84 which is incompatible. + paddleclas 2.6.0 requires easydict, which is not installed. + paddleclas 2.6.0 requires gast==0.3.3, but you have gast 0.4.0 which is incompatible. + paddleclas 2.6.0 requires opencv-python<=4.6.0.66, but you have opencv-python 4.10.0.84 which is incompatible. Note: you may need to restart the kernel to use updated packages. Note: you may need to restart the kernel to use updated packages. -.. code:: ipython3 - - if platform.system() == "Linux": - !wget http://nz2.archive.ubuntu.com/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2.19_amd64.deb - !sudo dpkg -i libssl1.1_1.1.1f-1ubuntu2.19_amd64.deb - - -.. parsed-literal:: - - --2024-11-05 02:15:59-- http://nz2.archive.ubuntu.com/ubuntu/pool/main/o/openssl/libssl1.1_1.1.1f-1ubuntu2.19_amd64.deb - Resolving proxy-dmz.intel.com (proxy-dmz.intel.com)... 10.241.208.166 - Connecting to proxy-dmz.intel.com (proxy-dmz.intel.com)|10.241.208.166|:911... connected. - Proxy request sent, awaiting response... 404 Not Found - 2024-11-05 02:16:00 ERROR 404: Not Found. - - dpkg: error: cannot access archive 'libssl1.1_1.1.1f-1ubuntu2.19_amd64.deb': No such file or directory - - .. code:: ipython3 import time @@ -124,8 +101,8 @@ Imports .. parsed-literal:: - 2024-11-05 02:16:02 INFO: Loading faiss with AVX512 support. - 2024-11-05 02:16:02 INFO: Successfully loaded faiss with AVX512 support. + 2024-11-22 01:57:57 INFO: Loading faiss with AVX512 support. + 2024-11-22 01:57:57 INFO: Successfully loaded faiss with AVX512 support. Settings @@ -209,7 +186,7 @@ inference on that image, and then show the top three prediction results. .. parsed-literal:: - [2024/11/05 02:16:41] ppcls WARNING: The current running environment does not support the use of GPU. CPU has been used instead. + [2024/11/22 01:58:21] ppcls WARNING: The current running environment does not support the use of GPU. CPU has been used instead. Labrador retriever, 0.75138 German short-haired pointer, 0.02373 Great Dane, 0.01848 @@ -218,7 +195,7 @@ inference on that image, and then show the top three prediction results. -.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_8_1.png +.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_7_1.png ``classifier.predict()`` takes an image file name, reads the image, @@ -275,7 +252,7 @@ clipping values. .. parsed-literal:: - 2024-11-05 02:16:42 WARNING: Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers). + 2024-11-22 01:58:22 WARNING: Clipping input data to the valid range for imshow with RGB data ([0..1] for floats or [0..255] for integers). .. parsed-literal:: @@ -287,12 +264,12 @@ clipping values. .. parsed-literal:: - + -.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_15_3.png +.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_14_3.png To decode the labels predicted by the model to names of classes, we need @@ -403,7 +380,7 @@ Notebook `__ for more information. -.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_23_1.png +.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_22_1.png Timing and Comparison @@ -462,7 +439,7 @@ Note that many optimizations are possible to improve the performance. .. parsed-literal:: - PaddlePaddle model on CPU: 0.0074 seconds per image, FPS: 134.37 + PaddlePaddle model on CPU: 0.0069 seconds per image, FPS: 144.32 PaddlePaddle result: Labrador retriever, 0.75138 @@ -473,7 +450,7 @@ Note that many optimizations are possible to improve the performance. -.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_27_1.png +.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_26_1.png Select inference device @@ -523,7 +500,7 @@ select device from dropdown list for running inference using OpenVINO .. parsed-literal:: - OpenVINO IR model in OpenVINO Runtime (AUTO): 0.0027 seconds per image, FPS: 373.31 + OpenVINO IR model in OpenVINO Runtime (AUTO): 0.0026 seconds per image, FPS: 380.57 OpenVINO result: Labrador retriever, 0.74909 @@ -534,7 +511,7 @@ select device from dropdown list for running inference using OpenVINO -.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_30_1.png +.. image:: paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_29_1.png References diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_14_3.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_14_3.png new file mode 100644 index 00000000000000..35e0c81123f0a1 --- /dev/null +++ b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_14_3.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99b8398ef76f2959d210e2d30bb44420f8d34a885a4480bc26e2af6627ba7119 +size 120883 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_15_3.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_15_3.png deleted file mode 100644 index 97c14460591759..00000000000000 --- a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_15_3.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ba922b89ca992098fd516d86f4d0c97858a8264664f9a49d431978b790a9135f -size 120883 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_22_1.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_22_1.png new file mode 100644 index 00000000000000..35c91e327be1ce --- /dev/null +++ b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_22_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1381e5922057c6bc70eb4ba9a04f3164382ad01191d320c1acbc819e7261f8c1 +size 224886 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_23_1.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_23_1.png deleted file mode 100644 index 74feaaeb12e5bc..00000000000000 --- a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_23_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6b77eb48f499b17e5306d574b90a5d123ab82440225c034a20256a0ce6378cba -size 224886 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_26_1.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_26_1.png new file mode 100644 index 00000000000000..35c91e327be1ce --- /dev/null +++ b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_26_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1381e5922057c6bc70eb4ba9a04f3164382ad01191d320c1acbc819e7261f8c1 +size 224886 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_27_1.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_27_1.png deleted file mode 100644 index 74feaaeb12e5bc..00000000000000 --- a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_27_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6b77eb48f499b17e5306d574b90a5d123ab82440225c034a20256a0ce6378cba -size 224886 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_29_1.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_29_1.png new file mode 100644 index 00000000000000..35c91e327be1ce --- /dev/null +++ b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_29_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1381e5922057c6bc70eb4ba9a04f3164382ad01191d320c1acbc819e7261f8c1 +size 224886 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_30_1.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_30_1.png deleted file mode 100644 index 74feaaeb12e5bc..00000000000000 --- a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_30_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6b77eb48f499b17e5306d574b90a5d123ab82440225c034a20256a0ce6378cba -size 224886 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_7_1.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_7_1.png new file mode 100644 index 00000000000000..35c91e327be1ce --- /dev/null +++ b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_7_1.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1381e5922057c6bc70eb4ba9a04f3164382ad01191d320c1acbc819e7261f8c1 +size 224886 diff --git a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_8_1.png b/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_8_1.png deleted file mode 100644 index 74feaaeb12e5bc..00000000000000 --- a/docs/notebooks/paddle-to-openvino-classification-with-output_files/paddle-to-openvino-classification-with-output_8_1.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:6b77eb48f499b17e5306d574b90a5d123ab82440225c034a20256a0ce6378cba -size 224886 diff --git a/docs/notebooks/paint-by-example-with-output.rst b/docs/notebooks/paint-by-example-with-output.rst deleted file mode 100644 index 2f1371652c5750..00000000000000 --- a/docs/notebooks/paint-by-example-with-output.rst +++ /dev/null @@ -1,1359 +0,0 @@ -Paint By Example: Exemplar-based Image Editing with Diffusion Models -==================================================================== - - -**Table of contents:** - - -- `Stable Diffusion in Diffusers - library <#stable-diffusion-in-diffusers-library>`__ -- `Download default images <#download-default-images>`__ -- `Convert models to OpenVINO Intermediate representation (IR) - format <#convert-models-to-openvino-intermediate-representation-ir-format>`__ -- `Prepare Inference pipeline <#prepare-inference-pipeline>`__ -- `Select inference device <#select-inference-device>`__ -- `Configure Inference Pipeline <#configure-inference-pipeline>`__ -- `Quantization <#quantization>`__ - - - `Prepare Inference pipeline <#prepare-inference-pipeline>`__ - - `Run quantization <#run-quantization>`__ - - `Run inference and compare inference - time <#run-inference-and-compare-inference-time>`__ - - `Compare UNet file size <#compare-unet-file-size>`__ - -- `Interactive inference <#interactive-inference>`__ - -Installation Instructions -~~~~~~~~~~~~~~~~~~~~~~~~~ - -This is a self-contained example that relies solely on its own code. - -We recommend running the notebook in a virtual environment. You only -need a Jupyter server to start. For details, please refer to -`Installation -Guide `__. - -Stable Diffusion in Diffusers library -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -To work with Stable Diffusion, -we will use the Hugging Face -`Diffusers `__ library. To -experiment with in-painting we can use Diffusers which exposes the -`StableDiffusionInpaintPipeline `__ -similar to the `other Diffusers -pipelines `__. -The code below demonstrates how to create -``StableDiffusionInpaintPipeline`` using -``stable-diffusion-2-inpainting``. To create the drawing tool we will -install Gradio for handling user interaction. - -This is the overall flow of the application: - -.. figure:: https://user-images.githubusercontent.com/103226580/236954918-f364b227-293c-4f78-a9bf-9dcebcb1034a.png - :alt: Flow Diagram - - Flow Diagram - -.. code:: ipython3 - - %pip install -q "torch>=2.1" torchvision --extra-index-url "https://download.pytorch.org/whl/cpu" - %pip install -q "diffusers>=0.25.0" "peft>=0.6.2" "openvino>=2023.2.0" "transformers>=4.25.1" "matplotlib>=3.4" ipywidgets opencv-python pillow "nncf>=2.7.0" "gradio==3.44.1" tqdm - -Download the model from `HuggingFace -Paint-by-Example `__. -This might take several minutes because it is over 5GB - -.. code:: ipython3 - - from diffusers import DiffusionPipeline - from diffusers.schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler - - - pipeline = DiffusionPipeline.from_pretrained("Fantasy-Studio/Paint-By-Example") - - scheduler_inpaint = DDIMScheduler.from_config(pipeline.scheduler.config) - -.. code:: ipython3 - - import gc - - extractor = pipeline.feature_extractor - image_encoder = pipeline.image_encoder - image_encoder.eval() - unet_inpaint = pipeline.unet - unet_inpaint.eval() - vae_inpaint = pipeline.vae - vae_inpaint.eval() - - del pipeline - gc.collect(); - -Download default images -~~~~~~~~~~~~~~~~~~~~~~~ - - - -Download default images. - -.. code:: ipython3 - - # Fetch `notebook_utils` module - import requests - - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", - ) - - open("notebook_utils.py", "w").write(r.text) - - from notebook_utils import download_file, device_widget, quantization_widget - - download_file( - "https://github-production-user-asset-6210df.s3.amazonaws.com/103226580/286377210-edc98e97-0e43-4796-b771-dacd074c39ea.png", - "0.png", - "data/image", - ) - - download_file( - "https://github-production-user-asset-6210df.s3.amazonaws.com/103226580/286377233-b2c2d902-d379-415a-8183-5bdd37c52429.png", - "1.png", - "data/image", - ) - - download_file( - "https://github-production-user-asset-6210df.s3.amazonaws.com/103226580/286377248-da1db61e-3521-4cdb-85c8-1386d360ce22.png", - "2.png", - "data/image", - ) - - download_file( - "https://github-production-user-asset-6210df.s3.amazonaws.com/103226580/286377279-fa496f17-e850-4351-87c5-2552dfbc4633.jpg", - "bird.jpg", - "data/reference", - ) - - download_file( - "https://github-production-user-asset-6210df.s3.amazonaws.com/103226580/286377298-06a25ff2-84d8-4d46-95cd-8c25efa690d8.jpg", - "car.jpg", - "data/reference", - ) - - download_file( - "https://github-production-user-asset-6210df.s3.amazonaws.com/103226580/286377318-8841a801-1933-4523-a433-7d2fb64c47e6.jpg", - "dog.jpg", - "data/reference", - ) - -Convert models to OpenVINO Intermediate representation (IR) format -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -Adapted from `Stable Diffusion v2 Infinite Zoom -notebook `__ - -.. code:: ipython3 - - from pathlib import Path - import torch - import numpy as np - import openvino as ov - - model_dir = Path("model") - model_dir.mkdir(exist_ok=True) - sd2_inpainting_model_dir = Path("model/paint_by_example") - sd2_inpainting_model_dir.mkdir(exist_ok=True) - -Functions to convert to OpenVINO IR format - -.. code:: ipython3 - - def cleanup_torchscript_cache(): - """ - Helper for removing cached model representation - """ - torch._C._jit_clear_class_registry() - torch.jit._recursive.concrete_type_store = torch.jit._recursive.ConcreteTypeStore() - torch.jit._state._clear_class_state() - - - def convert_image_encoder(image_encoder: torch.nn.Module, ir_path: Path): - """ - Convert Image Encoder model to IR. - Function accepts pipeline, prepares example inputs for conversion - Parameters: - image_encoder (torch.nn.Module): image encoder PyTorch model - ir_path (Path): File for storing model - Returns: - None - """ - - class ImageEncoderWrapper(torch.nn.Module): - def __init__(self, image_encoder): - super().__init__() - self.image_encoder = image_encoder - - def forward(self, image): - image_embeddings, negative_prompt_embeds = self.image_encoder(image, return_uncond_vector=True) - return image_embeddings, negative_prompt_embeds - - if not ir_path.exists(): - image_encoder = ImageEncoderWrapper(image_encoder) - image_encoder.eval() - input_ids = torch.randn((1, 3, 224, 224)) - # switch model to inference mode - - # disable gradients calculation for reducing memory consumption - with torch.no_grad(): - ov_model = ov.convert_model(image_encoder, example_input=input_ids, input=([1, 3, 224, 224],)) - ov.save_model(ov_model, ir_path) - del ov_model - cleanup_torchscript_cache() - print("Image Encoder successfully converted to IR") - - - def convert_unet( - unet: torch.nn.Module, - ir_path: Path, - num_channels: int = 4, - width: int = 64, - height: int = 64, - ): - """ - Convert Unet model to IR format. - Function accepts pipeline, prepares example inputs for conversion - Parameters: - unet (torch.nn.Module): UNet PyTorch model - ir_path (Path): File for storing model - num_channels (int, optional, 4): number of input channels - width (int, optional, 64): input width - height (int, optional, 64): input height - Returns: - None - """ - dtype_mapping = {torch.float32: ov.Type.f32, torch.float64: ov.Type.f64} - if not ir_path.exists(): - # prepare inputs - encoder_hidden_state = torch.ones((2, 1, 768)) - latents_shape = (2, num_channels, width, height) - latents = torch.randn(latents_shape) - t = torch.from_numpy(np.array(1, dtype=np.float32)) - unet.eval() - dummy_inputs = (latents, t, encoder_hidden_state) - input_info = [] - for input_tensor in dummy_inputs: - shape = ov.PartialShape(tuple(input_tensor.shape)) - element_type = dtype_mapping[input_tensor.dtype] - input_info.append((shape, element_type)) - - with torch.no_grad(): - ov_model = ov.convert_model(unet, example_input=dummy_inputs, input=input_info) - ov.save_model(ov_model, ir_path) - del ov_model - cleanup_torchscript_cache() - print("U-Net successfully converted to IR") - - - def convert_vae_encoder(vae: torch.nn.Module, ir_path: Path, width: int = 512, height: int = 512): - """ - Convert VAE model to IR format. - Function accepts VAE model, creates wrapper class for export only necessary for inference part, - prepares example inputs for conversion, - Parameters: - vae (torch.nn.Module): VAE PyTorch model - ir_path (Path): File for storing model - width (int, optional, 512): input width - height (int, optional, 512): input height - Returns: - None - """ - - class VAEEncoderWrapper(torch.nn.Module): - def __init__(self, vae): - super().__init__() - self.vae = vae - - def forward(self, image): - latents = self.vae.encode(image).latent_dist.sample() - return latents - - if not ir_path.exists(): - vae_encoder = VAEEncoderWrapper(vae) - vae_encoder.eval() - image = torch.zeros((1, 3, width, height)) - with torch.no_grad(): - ov_model = ov.convert_model(vae_encoder, example_input=image, input=([1, 3, width, height],)) - ov.save_model(ov_model, ir_path) - del ov_model - cleanup_torchscript_cache() - print("VAE encoder successfully converted to IR") - - - def convert_vae_decoder(vae: torch.nn.Module, ir_path: Path, width: int = 64, height: int = 64): - """ - Convert VAE decoder model to IR format. - Function accepts VAE model, creates wrapper class for export only necessary for inference part, - prepares example inputs for conversion, - Parameters: - vae (torch.nn.Module): VAE model - ir_path (Path): File for storing model - width (int, optional, 64): input width - height (int, optional, 64): input height - Returns: - None - """ - - class VAEDecoderWrapper(torch.nn.Module): - def __init__(self, vae): - super().__init__() - self.vae = vae - - def forward(self, latents): - latents = 1 / 0.18215 * latents - return self.vae.decode(latents) - - if not ir_path.exists(): - vae_decoder = VAEDecoderWrapper(vae) - latents = torch.zeros((1, 4, width, height)) - - vae_decoder.eval() - with torch.no_grad(): - ov_model = ov.convert_model(vae_decoder, example_input=latents, input=([1, 4, width, height],)) - ov.save_model(ov_model, ir_path) - del ov_model - cleanup_torchscript_cache() - print("VAE decoder successfully converted to ") - -Do the conversion of the in-painting model: - -.. code:: ipython3 - - IMAGE_ENCODER_OV_PATH_INPAINT = sd2_inpainting_model_dir / "image_encoder.xml" - - if not IMAGE_ENCODER_OV_PATH_INPAINT.exists(): - convert_image_encoder(image_encoder, IMAGE_ENCODER_OV_PATH_INPAINT) - else: - print(f"Image encoder will be loaded from {IMAGE_ENCODER_OV_PATH_INPAINT}") - - del image_encoder - gc.collect(); - -Do the conversion of the Unet model - -.. code:: ipython3 - - UNET_OV_PATH_INPAINT = sd2_inpainting_model_dir / "unet.xml" - if not UNET_OV_PATH_INPAINT.exists(): - convert_unet(unet_inpaint, UNET_OV_PATH_INPAINT, num_channels=9, width=64, height=64) - del unet_inpaint - gc.collect() - else: - del unet_inpaint - print(f"U-Net will be loaded from {UNET_OV_PATH_INPAINT}") - gc.collect(); - -Do the conversion of the VAE Encoder model - -.. code:: ipython3 - - VAE_ENCODER_OV_PATH_INPAINT = sd2_inpainting_model_dir / "vae_encoder.xml" - - if not VAE_ENCODER_OV_PATH_INPAINT.exists(): - convert_vae_encoder(vae_inpaint, VAE_ENCODER_OV_PATH_INPAINT, 512, 512) - else: - print(f"VAE encoder will be loaded from {VAE_ENCODER_OV_PATH_INPAINT}") - - VAE_DECODER_OV_PATH_INPAINT = sd2_inpainting_model_dir / "vae_decoder.xml" - if not VAE_DECODER_OV_PATH_INPAINT.exists(): - convert_vae_decoder(vae_inpaint, VAE_DECODER_OV_PATH_INPAINT, 64, 64) - else: - print(f"VAE decoder will be loaded from {VAE_DECODER_OV_PATH_INPAINT}") - - del vae_inpaint - gc.collect(); - -Prepare Inference pipeline -~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -Function to prepare the mask and masked image. - -Adapted from `Stable Diffusion v2 Infinite Zoom -notebook `__ - -The main difference is that instead of encoding a text prompt it will -now encode an image as the prompt. - -This is the detailed flowchart for the pipeline: - -.. figure:: https://github.com/openvinotoolkit/openvino_notebooks/assets/103226580/cde2d5c4-2540-4a45-ad9c-339f7a69459d - :alt: pipeline-flowchart - - pipeline-flowchart - -.. code:: ipython3 - - import inspect - from typing import Optional, Union, Dict - - import PIL - import cv2 - - from transformers import CLIPImageProcessor - from diffusers.pipelines.pipeline_utils import DiffusionPipeline - from openvino.runtime import Model - - - def prepare_mask_and_masked_image(image: PIL.Image.Image, mask: PIL.Image.Image): - """ - Prepares a pair (image, mask) to be consumed by the Stable Diffusion pipeline. This means that those inputs will be - converted to ``np.array`` with shapes ``batch x channels x height x width`` where ``channels`` is ``3`` for the - ``image`` and ``1`` for the ``mask``. - - The ``image`` will be converted to ``np.float32`` and normalized to be in ``[-1, 1]``. The ``mask`` will be - binarized (``mask > 0.5``) and cast to ``np.float32`` too. - - Args: - image (Union[np.array, PIL.Image]): The image to inpaint. - It can be a ``PIL.Image``, or a ``height x width x 3`` ``np.array`` - mask (_type_): The mask to apply to the image, i.e. regions to inpaint. - It can be a ``PIL.Image``, or a ``height x width`` ``np.array``. - - Returns: - tuple[np.array]: The pair (mask, masked_image) as ``torch.Tensor`` with 4 - dimensions: ``batch x channels x height x width``. - """ - if isinstance(image, (PIL.Image.Image, np.ndarray)): - image = [image] - - if isinstance(image, list) and isinstance(image[0], PIL.Image.Image): - image = [np.array(i.convert("RGB"))[None, :] for i in image] - image = np.concatenate(image, axis=0) - elif isinstance(image, list) and isinstance(image[0], np.ndarray): - image = np.concatenate([i[None, :] for i in image], axis=0) - - image = image.transpose(0, 3, 1, 2) - image = image.astype(np.float32) / 127.5 - 1.0 - - # preprocess mask - if isinstance(mask, (PIL.Image.Image, np.ndarray)): - mask = [mask] - - if isinstance(mask, list) and isinstance(mask[0], PIL.Image.Image): - mask = np.concatenate([np.array(m.convert("L"))[None, None, :] for m in mask], axis=0) - mask = mask.astype(np.float32) / 255.0 - elif isinstance(mask, list) and isinstance(mask[0], np.ndarray): - mask = np.concatenate([m[None, None, :] for m in mask], axis=0) - - mask = 1 - mask - - mask[mask < 0.5] = 0 - mask[mask >= 0.5] = 1 - - masked_image = image * mask - - return mask, masked_image - -Class for the pipeline which will connect all the models together: VAE -decode –> image encode –> tokenizer –> Unet –> VAE model –> scheduler - -.. code:: ipython3 - - class OVStableDiffusionInpaintingPipeline(DiffusionPipeline): - def __init__( - self, - vae_decoder: Model, - image_encoder: Model, - image_processor: CLIPImageProcessor, - unet: Model, - scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler], - vae_encoder: Model = None, - ): - """ - Pipeline for text-to-image generation using Stable Diffusion. - Parameters: - vae_decoder (Model): - Variational Auto-Encoder (VAE) Model to decode images to and from latent representations. - image_encoder (Model): - https://huggingface.co/Fantasy-Studio/Paint-by-Example/blob/main/image_encoder/config.json - tokenizer (CLIPTokenizer): - Tokenizer of class CLIPTokenizer(https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer). - unet (Model): Conditional U-Net architecture to denoise the encoded image latents. - vae_encoder (Model): - Variational Auto-Encoder (VAE) Model to encode images to latent representation. - scheduler (SchedulerMixin): - A scheduler to be used in combination with unet to denoise the encoded image latents. Can be one of - DDIMScheduler, LMSDiscreteScheduler, or PNDMScheduler. - """ - super().__init__() - self.scheduler = scheduler - self.vae_decoder = vae_decoder - self.vae_encoder = vae_encoder - self.image_encoder = image_encoder - self.unet = unet - self.register_to_config(unet=unet) - self._unet_output = unet.output(0) - self._vae_d_output = vae_decoder.output(0) - self._vae_e_output = vae_encoder.output(0) if vae_encoder is not None else None - self.height = self.unet.input(0).shape[2] * 8 - self.width = self.unet.input(0).shape[3] * 8 - self.image_processor = image_processor - - def prepare_mask_latents( - self, - mask, - masked_image, - height=512, - width=512, - do_classifier_free_guidance=True, - ): - """ - Prepare mask as Unet nput and encode input masked image to latent space using vae encoder - - Parameters: - mask (np.array): input mask array - masked_image (np.array): masked input image tensor - heigh (int, *optional*, 512): generated image height - width (int, *optional*, 512): generated image width - do_classifier_free_guidance (bool, *optional*, True): whether to use classifier free guidance or not - Returns: - mask (np.array): resized mask tensor - masked_image_latents (np.array): masked image encoded into latent space using VAE - """ - mask = torch.nn.functional.interpolate(torch.from_numpy(mask), size=(height // 8, width // 8)) - mask = mask.numpy() - - # encode the mask image into latents space so we can concatenate it to the latents - masked_image_latents = self.vae_encoder(masked_image)[self._vae_e_output] - masked_image_latents = 0.18215 * masked_image_latents - - mask = np.concatenate([mask] * 2) if do_classifier_free_guidance else mask - masked_image_latents = np.concatenate([masked_image_latents] * 2) if do_classifier_free_guidance else masked_image_latents - return mask, masked_image_latents - - def __call__( - self, - image: PIL.Image.Image, - mask_image: PIL.Image.Image, - reference_image: PIL.Image.Image, - num_inference_steps: Optional[int] = 50, - guidance_scale: Optional[float] = 7.5, - eta: Optional[float] = 0, - output_type: Optional[str] = "pil", - seed: Optional[int] = None, - ): - """ - Function invoked when calling the pipeline for generation. - Parameters: - image (PIL.Image.Image): - Source image for inpainting. - mask_image (PIL.Image.Image): - Mask area for inpainting - reference_image (PIL.Image.Image): - Reference image to inpaint in mask area - num_inference_steps (int, *optional*, defaults to 50): - The number of denoising steps. More denoising steps usually lead to a higher quality image at the - expense of slower inference. - guidance_scale (float, *optional*, defaults to 7.5): - Guidance scale as defined in Classifier-Free Diffusion Guidance(https://arxiv.org/abs/2207.12598). - guidance_scale is defined as `w` of equation 2. - Higher guidance scale encourages to generate images that are closely linked to the text prompt, - usually at the expense of lower image quality. - eta (float, *optional*, defaults to 0.0): - Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to - [DDIMScheduler], will be ignored for others. - output_type (`str`, *optional*, defaults to "pil"): - The output format of the generate image. Choose between - [PIL](https://pillow.readthedocs.io/en/stable/): PIL.Image.Image or np.array. - seed (int, *optional*, None): - Seed for random generator state initialization. - Returns: - Dictionary with keys: - sample - the last generated image PIL.Image.Image or np.array - """ - if seed is not None: - np.random.seed(seed) - # here `guidance_scale` is defined analog to the guidance weight `w` of equation (2) - # of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1` - # corresponds to doing no classifier free guidance. - do_classifier_free_guidance = guidance_scale > 1.0 - - # get reference image embeddings - image_embeddings = self._encode_image(reference_image, do_classifier_free_guidance=do_classifier_free_guidance) - - # prepare mask - mask, masked_image = prepare_mask_and_masked_image(image, mask_image) - # set timesteps - accepts_offset = "offset" in set(inspect.signature(self.scheduler.set_timesteps).parameters.keys()) - extra_set_kwargs = {} - if accepts_offset: - extra_set_kwargs["offset"] = 1 - - self.scheduler.set_timesteps(num_inference_steps, **extra_set_kwargs) - timesteps, num_inference_steps = self.get_timesteps(num_inference_steps, 1) - latent_timestep = timesteps[:1] - - # get the initial random noise unless the user supplied it - latents, meta = self.prepare_latents(latent_timestep) - mask, masked_image_latents = self.prepare_mask_latents( - mask, - masked_image, - do_classifier_free_guidance=do_classifier_free_guidance, - ) - - # prepare extra kwargs for the scheduler step, since not all schedulers have the same signature - # eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers. - # eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502 - # and should be between [0, 1] - accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys()) - extra_step_kwargs = {} - if accepts_eta: - extra_step_kwargs["eta"] = eta - - for t in self.progress_bar(timesteps): - # expand the latents if we are doing classifier free guidance - latent_model_input = np.concatenate([latents] * 2) if do_classifier_free_guidance else latents - latent_model_input = self.scheduler.scale_model_input(latent_model_input, t) - latent_model_input = np.concatenate([latent_model_input, masked_image_latents, mask], axis=1) - # predict the noise residual - noise_pred = self.unet([latent_model_input, np.array(t, dtype=np.float32), image_embeddings])[self._unet_output] - # perform guidance - if do_classifier_free_guidance: - noise_pred_uncond, noise_pred_text = noise_pred[0], noise_pred[1] - noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) - - # compute the previous noisy sample x_t -> x_t-1 - latents = self.scheduler.step( - torch.from_numpy(noise_pred), - t, - torch.from_numpy(latents), - **extra_step_kwargs, - )["prev_sample"].numpy() - # scale and decode the image latents with vae - image = self.vae_decoder(latents)[self._vae_d_output] - - image = self.postprocess_image(image, meta, output_type) - return {"sample": image} - - def _encode_image(self, image: PIL.Image.Image, do_classifier_free_guidance: bool = True): - """ - Encodes the image into image encoder hidden states. - - Parameters: - image (PIL.Image.Image): base image to encode - do_classifier_free_guidance (bool): whether to use classifier free guidance or not - Returns: - image_embeddings (np.ndarray): image encoder hidden states - """ - processed_image = self.image_processor(image) - processed_image = processed_image["pixel_values"][0] - processed_image = np.expand_dims(processed_image, axis=0) - - output = self.image_encoder(processed_image) - image_embeddings = output[self.image_encoder.output(0)] - negative_embeddings = output[self.image_encoder.output(1)] - - image_embeddings = np.concatenate([negative_embeddings, image_embeddings]) - - return image_embeddings - - def prepare_latents(self, latent_timestep: torch.Tensor = None): - """ - Function for getting initial latents for starting generation - - Parameters: - latent_timestep (torch.Tensor, *optional*, None): - Predicted by scheduler initial step for image generation, required for latent image mixing with nosie - Returns: - latents (np.ndarray): - Image encoded in latent space - """ - latents_shape = (1, 4, self.height // 8, self.width // 8) - noise = np.random.randn(*latents_shape).astype(np.float32) - # if we use LMSDiscreteScheduler, let's make sure latents are mulitplied by sigmas - if isinstance(self.scheduler, LMSDiscreteScheduler): - noise = noise * self.scheduler.sigmas[0].numpy() - return noise, {} - - def postprocess_image(self, image: np.ndarray, meta: Dict, output_type: str = "pil"): - """ - Postprocessing for decoded image. Takes generated image decoded by VAE decoder, unpad it to initila image size (if required), - normalize and convert to [0, 255] pixels range. Optionally, convertes it from np.ndarray to PIL.Image format - - Parameters: - image (np.ndarray): - Generated image - meta (Dict): - Metadata obtained on latents preparing step, can be empty - output_type (str, *optional*, pil): - Output format for result, can be pil or numpy - Returns: - image (List of np.ndarray or PIL.Image.Image): - Postprocessed images - """ - if "padding" in meta: - pad = meta["padding"] - (_, end_h), (_, end_w) = pad[1:3] - h, w = image.shape[2:] - unpad_h = h - end_h - unpad_w = w - end_w - image = image[:, :, :unpad_h, :unpad_w] - image = np.clip(image / 2 + 0.5, 0, 1) - image = np.transpose(image, (0, 2, 3, 1)) - # 9. Convert to PIL - if output_type == "pil": - image = self.numpy_to_pil(image) - if "src_height" in meta: - orig_height, orig_width = meta["src_height"], meta["src_width"] - image = [img.resize((orig_width, orig_height), PIL.Image.Resampling.LANCZOS) for img in image] - else: - if "src_height" in meta: - orig_height, orig_width = meta["src_height"], meta["src_width"] - image = [cv2.resize(img, (orig_width, orig_width)) for img in image] - return image - - def get_timesteps(self, num_inference_steps: int, strength: float): - """ - Helper function for getting scheduler timesteps for generation - In case of image-to-image generation, it updates number of steps according to strength - - Parameters: - num_inference_steps (int): - number of inference steps for generation - strength (float): - value between 0.0 and 1.0, that controls the amount of noise that is added to the input image. - Values that approach 1.0 allow for lots of variations but will also produce images that are not semantically consistent with the input. - """ - # get the original timestep using init_timestep - init_timestep = min(int(num_inference_steps * strength), num_inference_steps) - - t_start = max(num_inference_steps - init_timestep, 0) - timesteps = self.scheduler.timesteps[t_start:] - - return timesteps, num_inference_steps - t_start - -Select inference device -~~~~~~~~~~~~~~~~~~~~~~~ - - - -select device from dropdown list for running inference using OpenVINO - -.. code:: ipython3 - - device = device_widget() - - device - - - - -.. parsed-literal:: - - Dropdown(description='Device:', index=4, options=('CPU', 'GPU.0', 'GPU.1', 'GPU.2', 'AUTO'), value='AUTO') - - - -Configure Inference Pipeline -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -Configuration steps: 1. Load models on device 2. Configure tokenizer and -scheduler 3. Create instance of OvStableDiffusionInpaintingPipeline -class - -This can take a while to run. - -.. code:: ipython3 - - ov_config = {"INFERENCE_PRECISION_HINT": "f32"} if device.value != "CPU" else {} - - core = ov.Core() - - - def get_ov_pipeline(): - image_encoder_inpaint = core.compile_model(IMAGE_ENCODER_OV_PATH_INPAINT, device.value) - unet_model_inpaint = core.compile_model(UNET_OV_PATH_INPAINT, device.value) - vae_decoder_inpaint = core.compile_model(VAE_DECODER_OV_PATH_INPAINT, device.value, ov_config) - vae_encoder_inpaint = core.compile_model(VAE_ENCODER_OV_PATH_INPAINT, device.value, ov_config) - - ov_pipe_inpaint = OVStableDiffusionInpaintingPipeline( - image_processor=extractor, - image_encoder=image_encoder_inpaint, - unet=unet_model_inpaint, - vae_encoder=vae_encoder_inpaint, - vae_decoder=vae_decoder_inpaint, - scheduler=scheduler_inpaint, - ) - - return ov_pipe_inpaint - - - ov_pipe_inpaint = get_ov_pipeline() - -Quantization ------------- - - - -`NNCF `__ enables -post-training quantization by adding quantization layers into model -graph and then using a subset of the training dataset to initialize the -parameters of these additional quantization layers. Quantized operations -are executed in ``INT8`` instead of ``FP32``/``FP16`` making model -inference faster. - -According to ``StableDiffusionInpaintingPipeline`` structure, UNet used -for iterative denoising of input. It means that model runs in the cycle -repeating inference on each diffusion step, while other parts of -pipeline take part only once. That is why computation cost and speed of -UNet denoising becomes the critical path in the pipeline. Quantizing the -rest of the SD pipeline does not significantly improve inference -performance but can lead to a substantial degradation of accuracy. - -The optimization process contains the following steps: - -1. Create a calibration dataset for quantization. -2. Run ``nncf.quantize()`` to obtain quantized model. -3. Save the ``INT8`` model using ``openvino.save_model()`` function. - -Please select below whether you would like to run quantization to -improve model inference speed. - -.. code:: ipython3 - - UNET_INT8_OV_PATH = Path("model/unet_int8.xml") - int8_ov_pipe_inpaint = None - - - to_quantize = quantization_widget() - - to_quantize - - - - -.. parsed-literal:: - - Checkbox(value=True, description='Quantization') - - - -Let’s load ``skip magic`` extension to skip quantization if -``to_quantize`` is not selected - -.. code:: ipython3 - - # Fetch `skip_kernel_extension` module - r = requests.get( - url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/skip_kernel_extension.py", - ) - open("skip_kernel_extension.py", "w").write(r.text) - - if to_quantize.value and "GPU" in device.value: - to_quantize.value = False - - %load_ext skip_kernel_extension - -Prepare calibration dataset -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -We use 3 examples from -`Paint-by-Example `__ -to create a calibration dataset. - -.. code:: ipython3 - - import PIL - import requests - from io import BytesIO - - - def download_image(url): - response = requests.get(url) - return PIL.Image.open(BytesIO(response.content)).convert("RGB") - - - example1 = [ - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/image/example_1.png?raw=true", - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/mask/example_1.png?raw=true", - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/reference/example_1.jpg?raw=true", - ] - example2 = [ - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/image/example_2.png?raw=true", - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/mask/example_2.png?raw=true", - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/reference/example_2.jpg?raw=true", - ] - example3 = [ - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/image/example_3.png?raw=true", - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/mask/example_3.png?raw=true", - "https://github.com/Fantasy-Studio/Paint-by-Example/blob/main/examples/reference/example_3.jpg?raw=true", - ] - examples = [example1, example2, example3] - - - img_examples = [] - for init_image_url, mask_image_url, example_image_url in examples: - init_image = download_image(init_image_url).resize((512, 512)) - mask_image = download_image(mask_image_url).resize((512, 512)) - example_image = download_image(example_image_url).resize((512, 512)) - img_examples.append((init_image, mask_image, example_image)) - -To collect intermediate model inputs for calibration we should customize -``CompiledModel``. - -.. code:: ipython3 - - %%skip not $to_quantize.value - - from tqdm.notebook import tqdm - from transformers import set_seed - from typing import Any, Dict, List - - - class CompiledModelDecorator(ov.CompiledModel): - def __init__(self, compiled_model, data_cache: List[Any] = None): - super().__init__(compiled_model) - self.data_cache = data_cache if data_cache else [] - - def __call__(self, *args, **kwargs): - self.data_cache.append(*args) - return super().__call__(*args, **kwargs) - - - def collect_calibration_data(pipeline) -> List[Dict]: - original_unet = pipeline.unet - pipeline.unet = CompiledModelDecorator(original_unet) - pipeline.set_progress_bar_config(disable=True) - prev_example_image = None - for init_image, mask_image, example_image in img_examples: - - _ = pipeline( - image=init_image, - mask_image=mask_image, - reference_image=example_image, - ) - if prev_example_image: - _ = pipeline( - image=init_image, - mask_image=mask_image, - reference_image=prev_example_image, - ) - prev_example_image = example_image - - - calibration_dataset = pipeline.unet.data_cache - pipeline.set_progress_bar_config(disable=False) - pipeline.unet = original_unet - - return calibration_dataset - -.. code:: ipython3 - - %%skip not $to_quantize.value - - UNET_INT8_OV_PATH = Path("model/unet_int8.xml") - if not UNET_INT8_OV_PATH.exists(): - unet_calibration_data = collect_calibration_data(ov_pipe_inpaint) - -Run quantization -~~~~~~~~~~~~~~~~ - - - -Create a quantized model from the pre-trained converted OpenVINO model. - - **NOTE**: Quantization is time and memory consuming operation. - Running quantization code below may take some time. - -.. code:: ipython3 - - %%skip not $to_quantize.value - - import nncf - - - def get_quantized_pipeline(): - if UNET_INT8_OV_PATH.exists(): - print("Loading quantized model") - quantized_unet = core.read_model(UNET_INT8_OV_PATH) - else: - unet = core.read_model(UNET_OV_PATH_INPAINT) - quantized_unet = nncf.quantize( - model=unet, - preset=nncf.QuantizationPreset.MIXED, - calibration_dataset=nncf.Dataset(unet_calibration_data), - model_type=nncf.ModelType.TRANSFORMER, - ) - ov.save_model(quantized_unet, UNET_INT8_OV_PATH) - - unet_optimized = core.compile_model(UNET_INT8_OV_PATH, device.value) - - image_encoder_inpaint = core.compile_model(IMAGE_ENCODER_OV_PATH_INPAINT, device.value) - vae_decoder_inpaint = core.compile_model(VAE_DECODER_OV_PATH_INPAINT, device.value, ov_config) - vae_encoder_inpaint = core.compile_model(VAE_ENCODER_OV_PATH_INPAINT, device.value, ov_config) - - int8_ov_pipe_inpaint = OVStableDiffusionInpaintingPipeline( - image_processor=extractor, - image_encoder=image_encoder_inpaint, - unet=unet_optimized, - vae_encoder=vae_encoder_inpaint, - vae_decoder=vae_decoder_inpaint, - scheduler=scheduler_inpaint, - ) - - return int8_ov_pipe_inpaint - - - int8_ov_pipe_inpaint = get_quantized_pipeline() - - -.. parsed-literal:: - - INFO:nncf:NNCF initialized successfully. Supported frameworks detected: torch, openvino - - - -.. parsed-literal:: - - Output() - - - - - - - - - - - - - - - - - - -.. parsed-literal:: - - Output() - - - - - - - - - - - - - - - - - -.. parsed-literal:: - - INFO:nncf:121 ignored nodes were found by name in the NNCFGraph - - - -.. parsed-literal:: - - Output() - - - - - - - - - - - - - - - - - - -.. parsed-literal:: - - Output() - - - - - - - - - - - - - - - - - -Run inference and compare inference time -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - - -OV pipeline: - -.. code:: ipython3 - - init_image, mask_image, example_image = img_examples[1] - - - ov_image = ov_pipe_inpaint(image=init_image, mask_image=mask_image, reference_image=example_image, seed=2) - -Quantized pipeline: - -.. code:: ipython3 - - %%skip not $to_quantize.value - - int8_image = int8_ov_pipe_inpaint(image=init_image, mask_image=mask_image, reference_image=example_image, seed=2) - -.. code:: ipython3 - - %%skip not $to_quantize.value - - import matplotlib.pyplot as plt - from PIL import Image - - def visualize_results(orig_img:Image.Image, optimized_img:Image.Image): - """ - Helper function for results visualization - - Parameters: - orig_img (Image.Image): generated image using FP16 models - optimized_img (Image.Image): generated image using quantized models - Returns: - fig (matplotlib.pyplot.Figure): matplotlib generated figure contains drawing result - """ - orig_title = "FP16 pipeline" - control_title = "INT8 pipeline" - figsize = (20, 20) - fig, axs = plt.subplots(1, 2, figsize=figsize, sharex='all', sharey='all') - list_axes = list(axs.flat) - for a in list_axes: - a.set_xticklabels([]) - a.set_yticklabels([]) - a.get_xaxis().set_visible(False) - a.get_yaxis().set_visible(False) - a.grid(False) - list_axes[0].imshow(np.array(orig_img)) - list_axes[1].imshow(np.array(optimized_img)) - list_axes[0].set_title(orig_title, fontsize=15) - list_axes[1].set_title(control_title, fontsize=15) - - fig.subplots_adjust(wspace=0.01, hspace=0.01) - fig.tight_layout() - return fig - - - visualize_results(ov_image["sample"][0], int8_image["sample"][0]) - - - -.. image:: paint-by-example-with-output_files/paint-by-example-with-output_41_0.png - - -.. code:: ipython3 - - %%skip $to_quantize.value - - display(ov_image["sample"][0]) - -Compare UNet file size -~~~~~~~~~~~~~~~~~~~~~~ - - - -.. code:: ipython3 - - %%skip not $to_quantize.value - - fp16_ir_model_size = UNET_OV_PATH_INPAINT.with_suffix(".bin").stat().st_size / 1024 - quantized_model_size = UNET_INT8_OV_PATH.with_suffix(".bin").stat().st_size / 1024 - - print(f"FP16 model size: {fp16_ir_model_size:.2f} KB") - print(f"INT8 model size: {quantized_model_size:.2f} KB") - print(f"Model compression rate: {fp16_ir_model_size / quantized_model_size:.3f}") - - -.. parsed-literal:: - - FP16 model size: 1678780.62 KB - INT8 model size: 840725.98 KB - Model compression rate: 1.997 - - -Interactive inference ---------------------- - - - -Choose what model do you want to use in the interactive interface. You -can choose both, FP16 and INT8. - -.. code:: ipython3 - - import ipywidgets as widgets - - available_models = ["FP16"] - - if UNET_INT8_OV_PATH.exists(): - available_models.append("INT8") - - model_to_use = widgets.Select( - options=available_models, - value="FP16", - description="Select model:", - disabled=False, - ) - - model_to_use - - - - -.. parsed-literal:: - - Select(description='Select model:', options=('FP16', 'INT8'), value='FP16') - - - -.. code:: ipython3 - - if "INT8" == model_to_use.value: - chosen_pipeline = int8_ov_pipe_inpaint or get_quantized_pipeline() - ov_pipe_inpaint = None - else: - chosen_pipeline = ov_pipe_inpaint or get_ov_pipeline() - int8_ov_pipe_inpaint = None - - - gc.collect() - -.. code:: ipython3 - - # Code adapated from https://huggingface.co/spaces/Fantasy-Studio/Paint-by-Example/blob/main/app.py - - import os - - - def predict(input_dict, reference, seed, steps): - """ - This function runs when the 'paint' button is pressed. It takes 3 input images. Takes generated image decoded by VAE decoder, unpad it to initila image size (if required), - normalize and convert to [0, 255] pixels range. Optionally, convertes it from np.ndarray to PIL.Image format - - Parameters: - input_dict (Dict): - Contains two images in a dictionary - 'image' is the image that will be painted on - 'mask' is the black/white image specifying where to paint (white) and not to paint (black) - image (PIL.Image.Image): - Reference image that will be used by the model to know what to paint in the specified area - seed (int): - Used to initialize the random number generator state - steps (int): - The number of denoising steps to run during inference. Low = fast/low quality, High = slow/higher quality - use_quantize_model (bool): - Use fp16 or int8 model - Returns: - image (PIL.Image.Image): - Postprocessed images - """ - width, height = input_dict["image"].size - - # If the image is not 512x512 then resize - if width < height: - factor = width / 512.0 - width = 512 - height = int((height / factor) / 8.0) * 8 - else: - factor = height / 512.0 - height = 512 - width = int((width / factor) / 8.0) * 8 - - init_image = input_dict["image"].convert("RGB").resize((width, height)) - mask = input_dict["mask"].convert("RGB").resize((width, height)) - - # If the image is not a 512x512 square then crop - if width > height: - buffer = (width - height) / 2 - input_image = init_image.crop((buffer, 0, width - buffer, 512)) - mask = mask.crop((buffer, 0, width - buffer, 512)) - elif width < height: - buffer = (height - width) / 2 - input_image = init_image.crop((0, buffer, 512, height - buffer)) - mask = mask.crop((0, buffer, 512, height - buffer)) - else: - input_image = init_image - - if not os.path.exists("output"): - os.mkdir("output") - input_image.save("output/init.png") - mask.save("output/mask.png") - reference.save("output/ref.png") - - mask = [mask] - - result = chosen_pipeline( - image=input_image, - mask_image=mask, - reference_image=reference, - seed=seed, - num_inference_steps=steps, - )[ - "sample" - ][0] - - out_dir = Path("output") - out_dir.mkdir(exist_ok=True) - result.save("output/result.png") - - return result - -Choose a source image and a reference image, draw a mask in source image -and push “Paint!” - -.. code:: ipython3 - - if not Path("gradio_helper.py").exists(): - r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/paint-by-example/gradio_helper.py") - open("gradio_helper.py", "w").write(r.text) - - from gradio_helper import make_demo - - demo = make_demo(fn=predict) - - # Launching the Gradio app - try: - demo.launch(debug=False, height=680) - except Exception: - demo.queue().launch(share=True, debug=False, height=680) - # if you are launching remotely, specify server_name and server_port - # image_blocks.launch(server_name='your server name', server_port='server port in int') - # Read more in the docs: https://gradio.app/docs/ - -.. code:: ipython3 - - # please uncomment and run this cell for stopping gradio interface - # demo.close() diff --git a/docs/notebooks/paint-by-example-with-output_files/paint-by-example-with-output_41_0.png b/docs/notebooks/paint-by-example-with-output_files/paint-by-example-with-output_41_0.png deleted file mode 100644 index be911bee3ee1a5..00000000000000 --- a/docs/notebooks/paint-by-example-with-output_files/paint-by-example-with-output_41_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:ff1da225f5d53354c6bcdb34a891dcd1ef77e23b7bd76bee3367414d7efcde6e -size 2092300 diff --git a/docs/notebooks/person-tracking-with-output_files/person-tracking-with-output_17_3.png b/docs/notebooks/person-tracking-with-output_files/person-tracking-with-output_17_3.png index 20280b15f5dc07..1be4ba9fa45c92 100644 --- a/docs/notebooks/person-tracking-with-output_files/person-tracking-with-output_17_3.png +++ b/docs/notebooks/person-tracking-with-output_files/person-tracking-with-output_17_3.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:3983e7e27f73b5e03e02cfb02950ce8aef26d9d6a19a7376c51a6f0b00913732 +oid sha256:eacdcf6e619052ffe8bea1810c93678559cf210808d871e0e2b8a81939e1fd26 size 106259 diff --git a/docs/notebooks/person-tracking-with-output_files/person-tracking-with-output_25_0.png b/docs/notebooks/person-tracking-with-output_files/person-tracking-with-output_25_0.png index b5ff9a7ccdcd2c..f827c9c1094e46 100644 --- a/docs/notebooks/person-tracking-with-output_files/person-tracking-with-output_25_0.png +++ b/docs/notebooks/person-tracking-with-output_files/person-tracking-with-output_25_0.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:aa4b6d563d3c164036182f80dfc247e3c19a892fae85b49f7eb51518f0bc0141 -size 219418 +oid sha256:5dffde5665ae619cc99fddef72befb32d1002becce56dfccf50e7577f1fab020 +size 218904 diff --git a/docs/notebooks/phi-3-vision-with-output.rst b/docs/notebooks/phi-3-vision-with-output.rst index 778fc5aa7d6bc7..71981daac13be4 100644 --- a/docs/notebooks/phi-3-vision-with-output.rst +++ b/docs/notebooks/phi-3-vision-with-output.rst @@ -260,10 +260,9 @@ documentation 1 or self.sliding_window is not None) and self.is_causal: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:164: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:164: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if past_key_values_length > 0: /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3.5-vision-instruct/4a0d683eba9f1d0cbfb6151705d1ee73c25a80ca/modeling_phi3_v.py:444: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! seq_len = seq_len or torch.max(position_ids) + 1 /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3.5-vision-instruct/4a0d683eba9f1d0cbfb6151705d1ee73c25a80ca/modeling_phi3_v.py:445: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if seq_len > self.original_max_position_embeddings: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/dynamic_graph/wrappers.py:86: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/nncf/torch/dynamic_graph/wrappers.py:86: TracerWarning: torch.tensor results are registered as constants in the trace. You can safely ignore this warning if you use this function to create tensors out of constant variables that would be the same every time you call this function. In any other case, this might cause the trace to be incorrect. op1 = operator(\*args, \*\*kwargs) - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/cache_utils.py:443: TracerWarning: Using len to get tensor shape might cause the trace to be incorrect. Recommended usage would be tensor.shape[0]. Passing a tensor of different shape might lead to errors or silently give incorrect results. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/cache_utils.py:443: TracerWarning: Using len to get tensor shape might cause the trace to be incorrect. Recommended usage would be tensor.shape[0]. Passing a tensor of different shape might lead to errors or silently give incorrect results. elif len(self.key_cache[layer_idx]) == 0: # fills previously skipped layers; checking for tensor causes errors /opt/home/k8sworker/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3.5-vision-instruct/4a0d683eba9f1d0cbfb6151705d1ee73c25a80ca/modeling_phi3_v.py:683: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if attn_weights.size() != (bsz, self.num_heads, q_len, kv_seq_len): @@ -375,7 +374,7 @@ documentation =2.1" "diffusers>=0.26,<0.30" "gradio>=4.19" "openvino>=2024.0.0" "einops" torchvision "peft>=0.6.2" "nncf>=2.9.0" "protobuf==3.20.3" "insightface" "onnxruntime" .. parsed-literal:: - Cloning into 'PhotoMaker'... - remote: Enumerating objects: 306, done. - remote: Counting objects: 100% (151/151), done. - remote: Compressing objects: 100% (98/98), done. - remote: Total 306 (delta 132), reused 53 (delta 53), pack-reused 155 (from 1) - Receiving objects: 100% (306/306), 10.24 MiB | 23.03 MiB/s, done. - Resolving deltas: 100% (164/164), done. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/photo-maker/PhotoMaker Note: switching to '1e78aa6514c11a84ef1be27b56c7c72d6c70f8fc'. You are in 'detached HEAD' state. You can look around, make experimental @@ -119,24 +136,20 @@ Clone PhotoMaker repository Turn off this advice by setting config variable advice.detachedHead to false HEAD is now at 1e78aa6 Update README.md - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/photo-maker - - -Install required packages - -.. code:: ipython3 - - %pip install -q --extra-index-url https://download.pytorch.org/whl/cpu \ - transformers "torch>=2.1" "diffusers>=0.26,<0.30" "gradio>=4.19" "openvino>=2024.0.0" "einops" torchvision "peft>=0.6.2" "nncf>=2.9.0" "protobuf==3.20.3" "insightface" "onnxruntime" .. parsed-literal:: ERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts. - paddleclas 2.5.2 requires gast==0.3.3, but you have gast 0.4.0 which is incompatible. - paddleclas 2.5.2 requires opencv-python==4.6.0.66, but you have opencv-python 4.10.0.84 which is incompatible. + paddleclas 2.6.0 requires gast==0.3.3, but you have gast 0.4.0 which is incompatible. + paddleclas 2.6.0 requires opencv-python<=4.6.0.66, but you have opencv-python 4.10.0.84 which is incompatible. parler-tts 0.2.1 requires protobuf>=4.0.0, but you have protobuf 3.20.3 which is incompatible. + tensorflow 2.12.0 requires keras<2.13,>=2.12.0, but you have keras 2.13.1 which is incompatible. tensorflow 2.12.0 requires numpy<1.24,>=1.22, but you have numpy 1.24.4 which is incompatible. + tensorflow 2.12.0 requires tensorboard<2.13,>=2.12, but you have tensorboard 2.13.0 which is incompatible. + tensorflow 2.12.0 requires tensorflow-estimator<2.13,>=2.12.0, but you have tensorflow-estimator 2.13.0 which is incompatible. + tensorflow-cpu 2.13.1 requires numpy<=1.24.3,>=1.22, but you have numpy 1.24.4 which is incompatible. + tensorflow-cpu 2.13.1 requires typing-extensions<4.6.0,>=3.6.6, but you have typing-extensions 4.12.2 which is incompatible. Note: you may need to restart the kernel to use updated packages. @@ -197,10 +210,9 @@ PhotoMaker to generate the original PhotoMaker pipeline. .. parsed-literal:: - 2024-11-05 02:22:09.727876: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 02:22:09.761823: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 02:03:50.933677: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 02:03:50.958255: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 02:22:10.482979: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT .. code:: ipython3 @@ -389,12 +401,12 @@ output(text embeddings) which will be the input for U-Net model. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead warnings.warn( `loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/clip/modeling_clip.py:243: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/models/clip/modeling_clip.py:243: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if not interpolate_pos_encoding and (height != self.image_size or width != self.image_size): - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/notebooks/photo-maker/PhotoMaker/photomaker/model.py:84: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/notebooks/photo-maker/PhotoMaker/photomaker/model.py:84: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert class_tokens_mask.sum() == stacked_id_embeds.shape[0], f"{class_tokens_mask.sum()} != {stacked_id_embeds.shape[0]}" @@ -469,9 +481,9 @@ sequence of latent text embeddings. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:88: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:88: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if input_shape[-1] > 1 or self.sliding_window is not None: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:164: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_attn_mask_utils.py:164: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if past_key_values_length > 0: @@ -575,15 +587,15 @@ original Stable Diffusion XL model. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/unets/unet_2d_condition.py:1103: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/unets/unet_2d_condition.py:1103: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if dim % default_overall_up_factor != 0: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/downsampling.py:136: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/downsampling.py:136: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert hidden_states.shape[1] == self.channels - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/downsampling.py:145: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/downsampling.py:145: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert hidden_states.shape[1] == self.channels - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:146: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:146: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert hidden_states.shape[1] == self.channels - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:162: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:162: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if hidden_states.shape[0] >= 64: @@ -686,8 +698,6 @@ Select inference device for Stable Diffusion pipeline .. code:: ipython3 - import requests - r = requests.get( url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py", ) @@ -921,7 +931,7 @@ Running Text-to-Image Generation with OpenVINO -.. image:: photo-maker-with-output_files/photo-maker-with-output_33_0.png +.. image:: photo-maker-with-output_files/photo-maker-with-output_34_0.png Interactive Demo diff --git a/docs/notebooks/photo-maker-with-output_files/photo-maker-with-output_33_0.png b/docs/notebooks/photo-maker-with-output_files/photo-maker-with-output_33_0.png deleted file mode 100644 index 28ccdbf331406d..00000000000000 --- a/docs/notebooks/photo-maker-with-output_files/photo-maker-with-output_33_0.png +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:21913b4e38eb996bd7d8faedb660aa56fdbf4a6c1ef71157d5e845c9b8a31e7e -size 357743 diff --git a/docs/notebooks/photo-maker-with-output_files/photo-maker-with-output_34_0.png b/docs/notebooks/photo-maker-with-output_files/photo-maker-with-output_34_0.png new file mode 100644 index 00000000000000..5c425ae841f4c7 --- /dev/null +++ b/docs/notebooks/photo-maker-with-output_files/photo-maker-with-output_34_0.png @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99978c67369aac55e26840e7e4b59aa54bcbf4cda132774760a9e3da86803cb9 +size 357743 diff --git a/docs/notebooks/pixart-with-output.rst b/docs/notebooks/pixart-with-output.rst index c1c9a4b4e8ec57..517191e17501ef 100644 --- a/docs/notebooks/pixart-with-output.rst +++ b/docs/notebooks/pixart-with-output.rst @@ -118,10 +118,9 @@ directly in latent space, achieving super fast inference with few steps. .. parsed-literal:: - 2024-11-05 02:30:04.644117: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. - 2024-11-05 02:30:04.680089: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. + 2024-11-22 02:11:50.540718: I tensorflow/core/util/port.cc:110] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`. + 2024-11-22 02:11:50.565755: I tensorflow/core/platform/cpu_feature_guard.cc:182] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations. To enable the following instructions: AVX2 AVX512F AVX512_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags. - 2024-11-05 02:30:05.360275: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT @@ -130,6 +129,11 @@ directly in latent space, achieving super fast inference with few steps. Loading pipeline components...: 0%| | 0/5 [00:00. This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565 + + .. parsed-literal:: @@ -138,7 +142,6 @@ directly in latent space, achieving super fast inference with few steps. .. parsed-literal:: - You are using the default legacy behaviour of the . This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565 Some weights of the model checkpoint were not used when initializing PixArtTransformer2DModel: ['caption_projection.y_embedding'] @@ -229,7 +232,7 @@ Convert text encoder .. parsed-literal:: [ WARNING ] Please fix your imports. Module %s has been moved to %s. The old module will be deleted in version %s. - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/modeling_utils.py:5006: FutureWarning: `_is_quantized_training_enabled` is going to be deprecated in transformers 4.39.0. Please use `model.hf_quantizer.is_trainable` instead warnings.warn( `loss_type=None` was set in the config but it is unrecognised.Using the default loss: `ForCausalLMLoss`. @@ -272,11 +275,11 @@ Convert transformer .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/embeddings.py:219: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/embeddings.py:219: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if self.height != height or self.width != width: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/attention_processor.py:682: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/attention_processor.py:682: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if current_length != target_length: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/attention_processor.py:697: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/attention_processor.py:697: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if attention_mask.shape[0] < batch_size * head_size: @@ -301,9 +304,9 @@ Convert VAE decoder .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:146: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:146: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! assert hidden_states.shape[1] == self.channels - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:162: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/models/upsampling.py:162: TracerWarning: Converting a tensor to a Python boolean might cause the trace to be incorrect. We can't record the data flow of Python values, so this value will be treated as a constant in the future. This means that the trace might not generalize to other inputs! if hidden_states.shape[0] >= 64: @@ -449,7 +452,7 @@ And insert wrappers instances in the pipeline: .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/configuration_utils.py:140: FutureWarning: Accessing config attribute `_execution_device` directly via 'PixArtAlphaPipeline' object attribute is deprecated. Please access '_execution_device' over 'PixArtAlphaPipeline's config object instead, e.g. 'scheduler.config._execution_device'. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/configuration_utils.py:140: FutureWarning: Accessing config attribute `_execution_device` directly via 'PixArtAlphaPipeline' object attribute is deprecated. Please access '_execution_device' over 'PixArtAlphaPipeline's config object instead, e.g. 'scheduler.config._execution_device'. deprecate("direct config name access", "1.0.0", deprecation_message, standard_warn=False) @@ -564,7 +567,7 @@ To collect intermediate model inputs for calibration we should customize .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/configuration_utils.py:140: FutureWarning: Accessing config attribute `_execution_device` directly via 'PixArtAlphaPipeline' object attribute is deprecated. Please access '_execution_device' over 'PixArtAlphaPipeline's config object instead, e.g. 'scheduler.config._execution_device'. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/configuration_utils.py:140: FutureWarning: Accessing config attribute `_execution_device` directly via 'PixArtAlphaPipeline' object attribute is deprecated. Please access '_execution_device' over 'PixArtAlphaPipeline's config object instead, e.g. 'scheduler.config._execution_device'. deprecate("direct config name access", "1.0.0", deprecation_message, standard_warn=False) @@ -1649,7 +1652,7 @@ pipelines. .. parsed-literal:: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/configuration_utils.py:140: FutureWarning: Accessing config attribute `_execution_device` directly via 'PixArtAlphaPipeline' object attribute is deprecated. Please access '_execution_device' over 'PixArtAlphaPipeline's config object instead, e.g. 'scheduler.config._execution_device'. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/diffusers/configuration_utils.py:140: FutureWarning: Accessing config attribute `_execution_device` directly via 'PixArtAlphaPipeline' object attribute is deprecated. Please access '_execution_device' over 'PixArtAlphaPipeline's config object instead, e.g. 'scheduler.config._execution_device'. deprecate("direct config name access", "1.0.0", deprecation_message, standard_warn=False) @@ -1706,9 +1709,9 @@ pipelines, we use mean inference time on 3 samples. .. parsed-literal:: - FP16 pipeline: 43.073 seconds - Optimized pipeline: 41.450 seconds - Performance speed-up: 1.039 + FP16 pipeline: 40.248 seconds + Optimized pipeline: 39.688 seconds + Performance speed-up: 1.014 Interactive inference diff --git a/docs/notebooks/pixart-with-output_files/pixart-with-output_40_2.png b/docs/notebooks/pixart-with-output_files/pixart-with-output_40_2.png index 8ac4e49184e284..47dd4083f93179 100644 --- a/docs/notebooks/pixart-with-output_files/pixart-with-output_40_2.png +++ b/docs/notebooks/pixart-with-output_files/pixart-with-output_40_2.png @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:1d0099ef65d0edd9b83af849991c6a48ebcb6b589320f0edd85c317ca3ecfa26 +oid sha256:cf4837ff583d9f6ab905f723de121d443fad3a6955444819659b09fba2a580de size 2311803 diff --git a/docs/notebooks/pixtral-with-output.rst b/docs/notebooks/pixtral-with-output.rst index 25e1004aac09f7..fcbc6b2262118e 100644 --- a/docs/notebooks/pixtral-with-output.rst +++ b/docs/notebooks/pixtral-with-output.rst @@ -153,20 +153,19 @@ documentation 0.19 and openvino version <= 2024.4. Please downgrade to tokenizers version <= 0.19 to export tokenizers to OpenVINO. INFO:nncf:Statistics of the bitwidth distribution: ┍━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┑ @@ -469,7 +465,7 @@ documentation =0.24.0" transformers "torch>=2.1" "gradio>=4.19" qrcode opencv-python "peft>=0.6.2" --extra-index-url https://download.pytorch.org/whl/cpu - %pip install -q "openvino>=2023.1.0" "nncf>=2.7.0" + %pip install -q "openvino>=2023.1.0" "nncf>=2.7.0" "matplotlib>=3.4" Instantiating Generation Pipeline --------------------------------- diff --git a/docs/notebooks/qwen2-audio-with-output.rst b/docs/notebooks/qwen2-audio-with-output.rst index 6b32837a5c2c5e..04fb11ed6bae6d 100644 --- a/docs/notebooks/qwen2-audio-with-output.rst +++ b/docs/notebooks/qwen2-audio-with-output.rst @@ -29,8 +29,9 @@ In this tutorial we consider how to convert and optimize Qwen2Audio model for creating multimodal chatbot. Additionally, we demonstrate how to apply stateful transformation on LLM part and model optimization techniques like weights compression using -`NNCF `__ #### Table of -contents: +`NNCF `__ + +**Table of contents:** - `Prerequisites <#prerequisites>`__ - `Convert and Optimize model <#convert-and-optimize-model>`__ @@ -78,11 +79,11 @@ Prerequisites from pathlib import Path import requests - + if not Path("ov_qwen2_audio_helper.py").exists(): r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/notebooks/qwen2-audio/ov_qwen2_audio_helper.py") open("ov_qwen2_audio_helper.py", "w").write(r.text) - + if not Path("notebook_utils.py").exists(): r = requests.get(url="https://raw.githubusercontent.com/openvinotoolkit/openvino_notebooks/latest/utils/notebook_utils.py") open("notebook_utils.py", "w").write(r.text) @@ -211,13 +212,13 @@ documentation target_length: - /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/810/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/cache_utils.py:443: TracerWarning: Using len to get tensor shape might cause the trace to be incorrect. Recommended usage would be tensor.shape[0]. Passing a tensor of different shape might lead to errors or silently give incorrect results. + /opt/home/k8sworker/ci-ai/cibuilds/jobs/ov-notebook/jobs/OVNotebookOps/builds/823/archive/.workspace/scm/ov-notebook/.venv/lib/python3.8/site-packages/transformers/cache_utils.py:443: TracerWarning: Using len to get tensor shape might cause the trace to be incorrect. Recommended usage would be tensor.shape[0]. Passing a tensor of different shape might lead to errors or silently give incorrect results. elif len(self.key_cache[layer_idx]) == 0: # fills previously skipped layers; checking for tensor causes errors @@ -396,16 +396,16 @@ Intel `__ .. code:: ipython3 from ov_qwen2_audio_helper import OVQwen2AudioForConditionalGeneration - + # Uncomment below lines to see the model inference class code # OVQwen2AudioForConditionalGeneration?? .. code:: ipython3 from notebook_utils import device_widget - + device = device_widget(default="AUTO", exclude=["NPU"]) - + device @@ -431,20 +431,20 @@ Run model inference from transformers import AutoProcessor, TextStreamer import librosa import IPython.display as ipd - - + + processor = AutoProcessor.from_pretrained(model_dir) - + audio_url = "https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen2-Audio/audio/1272-128104-0000.flac" audio_chat_url = "https://qianwen-res.oss-cn-beijing.aliyuncs.com/Qwen2-Audio/audio/guess_age_gender.wav" audio_file = Path(audio_url.split("/")[-1]) audio_chat_file = Path(audio_chat_url.split("/")[-1]) - + if not audio_file.exists(): r = requests.get(audio_url) with audio_file.open("wb") as f: f.write(r.content) - + if not audio_chat_file.exists(): r = requests.get(audio_chat_url) with audio_chat_file.open("wb") as f: @@ -466,14 +466,14 @@ Voice chat ], }, ] - + text = processor.apply_chat_template(conversation, add_generation_prompt=True, tokenize=False) audios = [librosa.load(audio_chat_file, sr=processor.feature_extractor.sampling_rate)[0]] - + inputs = processor(text=text, audios=audios, return_tensors="pt", padding=True) display(ipd.Audio(audio_chat_file)) print("Answer:") - + generate_ids = ov_model.generate(**inputs, max_new_tokens=50, streamer=TextStreamer(processor.tokenizer, skip_prompt=True, skip_special_tokens=True)) @@ -485,7 +485,7 @@ Voice chat .. raw:: html - +

E{2pEB1IN=)hSXa=|v z$xR-8-gLXY&oziUQ%wI&^`)uhf}^Cl)mI6*z0tU5I=vHBL38fdG#JnkL;JIT)SW=9T!LB z$AJ$%=9O!Os^5)g;nM-0EkTWBBF9bbrk--MpL-3Kxg2cOa7(6fd=C5#hPK{g#n*>&hAL0)tqiSb3Am^;y1T6~yu(m|^4Rg&QuhRErscKSld%WYm?&srXHod&QATOAwAc=g@tCH}wVkq;dJjkB_)u z?=TEJv~`ERhpr7&dZFxv#o`_4`%~pKATXU{E;WaDdr%Ai7LIO)NfSyDMWXl`yju=w z-`3CAYb(j+%Z?tUA@c?f(8^8^*$k7<*NQYz9OJ*c%p>IV2!85K*4a0{2w2?ReAZ3I zs$59iEb`bMCBO0jd*&xcXldmAZRe0pj$m2j*}AvzRf0c+;(wlw1w~;CQXP8kVZe3s z%p8w#tVqnE+!r^At%TM=fjtkMm_W{Wj90^bt$m0XkrAcOB2IN(IALjeo+5w_kU{B! zNH4jli@QeM@0Q#M*Pm3lRimAoMAYSvd>me2)(Trppd}{UQm$CGnc|Rm}a0Y zSFl#g4MGDbe{abT_1hpigo3LkP-f77xwV<#aO`dXQ2?>KV3`pK+Dr)&h!m5*9!zEe zBq3!{0wYt6Vo>PqX!gQv_kM*waN*Fg>zfQ#P1%5OhOlIfkc*P2{DBFrs+8w(US2i| zmN&H>&2-}Ui#G~O8N(G_?mbJ92M{7cXMMEx!P{ZxKtqux+;( zRhH!;q2*YO8P<=2YsUW#hOmy5tXR4l1lDPV|C}C~0O>bMX4vX3*`RE!W+YHxf8uE_ zu0Z07q?IAl5HDv=r8ytjl&aipeX;2toD?PoPR*Yw*v__7=AfP5rGFwr<=ALS6=ajb zsjaR#o*io%gzed5>sEHv*R0-1Jl3tex4Q;x=k6);U%F2K=6~POGts5go0(O%-sCoy z*$;aL55{e+VvKS33Y6k;b_5NRi(xiG(%|!p*;N$*<)* z#@IWpdG7R!rl*v&{E=@^Cuh5iUdm&58ssh&TG-YAZM2`{@@Jn`j{TBfZ9Si-3g)V{ z?TG#mGn`t;3W&yijmYT5Pe%hdLp913;82h(;M#=ow0lhh;SHqKVR#NX^lq+5J*vj*89y{>gi`MSjbg12S^&s?rUavR(s@#-Pb zkzO-|330axsh2ZXI-#1jmYE0bDE0XP6u)e*sXmWFm!y)Xo%1pApv&`bK%z0B?jKda zTzH{I;!@4zRKtGF7FKtKbPFLFt-Y7O-f!wWAJ^Z1n1CGBZ5|(ScLe)!DoUP`G12-1 zk0YKVHD@f_CB@{!d@UHmp5gD)*w@>we8YT?m_yGcp5R3F&}qF;;Fu8NMwz*3hN0jT zY0`P%aKMoGDY$-Sl#uJ7Ym`U(`A1?>Id9ay%0+0+`Ag&$z^KGrE7z{L@D2L~(%Y2$ z7JsVNCe7VpPM9(#@u=30oc*k$ReTl!jQk02b~hQKg0bbKw_2Xna7w&qHdwT*Q7!h?LUbvmTtio(^3XRa}j`yq@nc~O2hiik%X=R07__1oBd z-^PXr<41&|`(QdN%7G9dIOQ6#r(}Knoq<}Vvex}CyxsJmgm!Hwc)VEENH%V)FAB#|9N zS|T1O-iq8wL_F$jQe_9(uu)3=dd-(~IjA(5k8}|VL5&P6s#Ppz9cMnBeLM9QzU2JS zT+SKo{4;ix4?i}_O@A8md}iS!uScc0XwE#V+G4Fi7i;0fr#^Cf=ks%8YM?69cjMU_ z0=$X~i2m$>yJgq6{!2Y?q>=UwO0+iBB&r_Y;i{4} z%kC=5_}oLX@H<0{C!u2&?dH0nyY{Ga8g(>ZbKlvp?lx8F+-!4nbceYUbIw* z`}A>{5u0E3_vhqe`}oy0rkA!^rE$~vT}8IU+mROV;vwIsK>uTBKn&XK4*z& zAxGwsrnZ3^xX=K&t^06k+-j9Ds2E~@O)O={1dJ=eF$_9QDrGwcE4Sr@R4gh{0+%g- z=saUawXHYle(3~{>7!VkUD~~F_Tw*)dipiPN4|eXqX$&LXAt%d%+scI---?wKl$ySG4v)abJ;j9psWcEpOTl?DBl z!g1EF*XIbhI-lCGr_cx-X8f_z;ITXP^9ST}go3z&!50n(>K2}m_U#(c=%`I`hzW;Y>h=wm*+wFv7@TeAYdr~^scmi z($JpV{zOlZgqB!)fqi^TmL+#hzdm`{6B0%Whx*B2EecU#CAg-gSrjgrZMEs;&z(V#f55eQYuqafG&2?xaBMv&?M0N06OX1laRZ8{Nd1BN}jzaU4zryrYT`<{ffb^7u*f~Mn!#C2lY6+1@Sk% zZ&1j*gS-KaPI|t?+z=@E`Z*e;gfN4{BS;~q)FRB;;~~QkN+T+RC6A*(DN03a<|h`9 z*(@ZOImr-|95I;5epC_pr~P@VNEDYSSi7IA5aNQspbN|b1@)N)Luf22SOkexDb@`;ERVG z1Tbs;Fmp;V#N)p)RKnSTL*J52wh7fmvD|W=6*%VdBj6GbsAQ@;qk#Sya=4iTV$1Xb za#j#j$-28qw{{R{VO4k%QwC%ClQ(y8E({S zS*TNM+F)n8_85jjWg00nl%OUT1XN1=V0H-v+C-Ac6O(jM)ff4-WtvzF=puM0RCe+t zQj+R^o0cQ}r$5x1ZND6_SVwAQou?~Ufya|(~ ze!Lu%!aEWzM^zba)B?ibm_OwXVJ^&K9+mhONgPi#%sDqHy)v>|#|lv>);#l^LcxvvdgLv7HzsI zAI*iuhkum(Y7*C;uSrVXq!1C6hcS|7-j^mH#dS!mJU`I|vqIz~78C4_kguRK00=gq zSOG+?KDB@xIWa2eXp(jLjAW|v-jNN{m^QHKb5l~m=Yu*-^z&8>y{Xu=tq@_xrq2b6 zI?`QF!B}%LOohp>;~-X}mc5}cYKl3~$hE4WLR$A$my16_QNQbI=W7~%!L^aeU`v(Z zw-Q%iAZaCO6EoSIV#uS(=b!jezxluBhHj_$c_@n+jP;%LzlNMw)Q+K^zOWv@ex227 zMVCYLo|L}rwnJO?A{ZUu>`!YytHT*3q`*F)g?q7{)V|>u>&oe_`4)(MgmSk27scUn zYSo$;VL?E2IMU*EK!2p!kAp%3&z(0qP(ODv{(w*1PV(d+QQVfoQ_bTOPuAB1B7Je7aFdX3wgq)R z6bj&;7&jB1q#=K6x(HHUaoZS2zQAGFGYDey&WrEw32p>c)1@o#Rtu?dD#R>040%NH zlINc(PtL4FnCr%JDM#7|dhSvw5a>I|vAZa3Q;&9+cw-ULX=hIK>Ie!uxhic8l*SMD z?g~_98VRtC;5Z9i5?*!$K64#T<&pMGOIrqLmWA#Y*#k;F>7m1{K> zZm|@3C081+1)&PckwsnD}gVvOH~oCjA7<*6_iVH$uf%V zDLIvN3vCc20VMg{*05){G~+B3${c)kdDNpg6u?`O0@W?4pMxYvnQ!~U>NB&XJVy2Q z=dT3BWydcWZ-VgJiaQj0RcPp5E$axa<3!ibo|;2cgmZzX+>Iywx&g%94t9Ng!}w&EWA<7k;*@Zu5#ea4 zDJyTq?7Xuz1e^H$x==L+GL~Y|_=kie9rtjz8wpA+hc7}1EajRQ3D4$$8k~4xuS`a@ zbCC4OR!>UFNeRImuxC%;!{0>^o$%jIaXgk%6WrkNJFJYe3c&TO3m)a=B@k>}QXsRd zvm>R5<8Yy?mUmDFvmxgCxJ2JldEw^{@YCiLGHLgha)B%^+3758|Eq*t-SWD$C=u%h zbyV*$U77RDr+|DGfHr4s?gBA4c<&Ewj&pnM?QP26amXIWO5u3r02=$9RDZ>CyZ}vs z%^UNXrnSa3H9EnJSmAx-NLK$J+Q8N2P~_Z`zP$5xpYxxs_agjH%VE$$tWw)JbSzA( z$-zxca4$i6L zi__(41gMCvlX-_>s%BUWYbd4T!FN&pW&8?Jhl#MS&ApZhkg( zf_2ro2c897H}9XOcW(sik3@c(mZWwDPP^L=>>jp)`5x#F6+Ci?y`A7Yw{m3vA6M@b zUS}J&TgSF-+jbh;wrw<;F&f)yY}fRycJ{^`B?ob&D8a1SQV=}!2Q>mytTPS^@g7&~g-t1~y*Vuf@w z!IKxQS+1;+?TNFfVcuioV>D=XQiMm(^iSP*f8&6uXTav2q}O6U5yF`u%a3)AA1!A{ zzfNm<%sPY%wp`C`5(Q@KhPV{))*<%nj%q_tNsWV5%1W%3taa}{@E-v47vkl^4^UqD zN)CDFkGj2O6h}xz8|N-98nxRugk7R-pA~|xn`w3Wpaj}WW1Ae`*9f{+PVP$5%Y`0# z96HQ!XcR{LqwMbjsyRJs*Dpg0t)5>u!KY3U#Oz#m_cARmd{IT zo$rpu3<(A3&Q0cxTPm#2X*WPk~{daj?LCWkWa4S>~!+w~#EnGIScB<$nPB#aB+E-84WhL1bQRf2| z)KLQ%TkGCYWzb+694t){cRaG0bR4k;i(^vJPvf`XFX`0ED}^%|mDtKMJS;xh{E!H? zl51)(d-^O6S3fPk_1Wv=On-Dvf`@TT`UFDLA-6`u*C`nT^e{QT%Cx@Pl_VN^gN~Cg zNH?<36Q^gILh{*KgJOy`0&h=@zddZTGFupJF;XTSr^JySs8Cgso)=cNmSRkrx#}N; zwOomLr{rR&C+yL~O)mFi?r_<{fV(WVXqU1M%*9)j`fN=acOO!uS%}HOZZYRRKLf0J zoS#3y`a0+U^@NTg3#Z-Rrl3s5z2k5u1SL(LaYaAjha8!!emGN`+DV+b1#xA{ zy}d-9ueSK}Ti+4pam(H{LB!%+XFwMuo@5!j#Ja#n6zFSgrMRTxtq8O&U~D|88WhsK zs9=VlRL0GvWE?^>dT!>T<~a=6FzTpoZ6&{PlvvmRbiVe-$>b>%9KWy;r$ZW|JTfRv zDC4#=Pf;9vLXdQ}2R~HZtT=q*E4xL6%-!BD^vxC)XV5qh{ym-UlJw1urZ9q~o^PE& zJ#n4XOj*A=&H*>e3?_A22Cn5nxo`bMG&lQ=S+NQlvgn}hO)v7~$IwBpotUzu>O40m zqqj~X;vcf-)buHEqxbys`Om-ppEMn%6dWQwK@}9a?dKxsG$h>Xsjwn#40<4d|{@xHx8kR2Y@ig-(?j<4$S=DrbKcF}O7UYKbMx{n#8JR@FE|&#Q zD&RY>)-T_r_IEq{1dg7b<5yVOnP}&VIB>HSr??QaAZ&0bZyTN;-p27VB$=eq(9)>f z?YDkxyt&*Q2@4`i4g@>9q$LL8ktFRG8QU|3j!T|<5;0iI-)LsdL-;qPjD6Wjn_DNP z)*>NlYPDMj^!!7oadMbp3fanQzwGNj_E?a2B#7TKsKn-i)g&Q!5}3a;)RjXj#&>zq zjH>i63gkv7lU->-JNUl1Uwo1yT18^4vGg(qeco zv`Y1au}6Ex&0`P;u+q;qWGtsiM%S7I@vwaW8aDXFnfh^?VKSGZGzZ1Jkk&rw0pvYI zf1@ushi2@~oox)qLMfo0eS+3q2fe1iCo(C~_>%X*1na5)!&GE33Np^5|1nkT|7EI3 zX_iW++5_(R;E{b^T!qD}!xVf0Ojq<60r)twylC{KXthC7FF+h^i%9qAN=%8CSuz`x zP9zCpBcZs>c$Lf6zQqLYn z;l|A!K#G~^8;cqVrIMPEg{JHzAJ^gw{(qTjI*BjDpyDnpX4w2&Y{ULb4Tw=;PL8q} zE6hsPp4Ts?k-Dv@mi_zDC41ClHAO&(wC?!)0{{4N-mavZ8%FfVFi(Jo5X(nr> zMt82FAfAb^edHzO*p%j$@e2&;waYr?pjH%$o-x$~x;+`V#Js6&@X7Lmk_rG#kJhMa z!x-&+-We5R@i;^g?QZWEh=Y4kbv(jmJ815Rh^d{ImQbkbaneVQrQGz;?I&8fbOGYw z1hJ7|6%0e>36Du}8l}Y&Tf)EzKXEJ9Vf&jQyTEv_u{uNidXE%bs}yl@-BDTwzbPJS z;kSd;kEX&D4^9wqe=u=sXe+>A1UpvA)B^f+XpoLIJ$?)mHRk>2`_W1Xwsdg1Y#5tO zOtE+s%qc$SfUX@q;(Nv_l_*GlUCt!o>nn>x9NM4hIIFs2g7DWy_sN6aaB8(QdhEC1 z8Yq565H&t}^VTfhFiga$Yj$}uY zYPV|l`}y6J&*F~Z#W{Tmma<-mx?f)*g(&8@#XVm&$k_se!1==Jv;N5A#W&952mejY zMfH)+5Fn(`?B1a5)GNZkslM-XigE3M$nP&xDY@*-0-1`yY1ol^OWiFEr@u>l?p~fe z0mxM95mP+ypdQJ*1&rC?Ua~2#IH5Y?G9s~1gVjMtW2mKkST@rSs09m1nk&%o(GJR3 z?Q1w3%Unz(wq-=eEDw3S568H@U5~GbPCbBao>9=a_ebu%d=7;xw@MUuRDAF@t3G{C zq6Ji&GIEnHcY68|qhbaQA4jWtmx3-flrlLwK~0Wkf?x8lWeQGb_ISU>`ah(gJh>6i zwBrz?ds+YtUNCS>D|r~#LjqkJA-%~CvWzyoU{WT z9duSa^m;(G4g9&{${%cb>=B{1t{xVpbS`>S`mr8{J+aJeO-w*~OarDmwtcL(W}ux{@a5q`zDKJ1js6t+ht@yjK= zk3QK#eif!(+|ikyL57CiF@Zb7$|->O;l;Df>5};UJ5svcj3_L#mWt&cB|MrY`>z<2 zcf!b%Z>IgK5jvX45B$a*;AvAxzR(O_{jRW&7l9U0PhR~CPlkadb&4QQy@H^A0mi(? zZJmKf_g2o#<7eI?fq@6hV?fOcmf8Qi2FaA_F}_?8tUtYGpx%wS@vJ51a03uQpZxhv zrS#T@rb);*fN?jjR3|(6j97&R%c9?+K*Wq?K_)`JEm-hnTCSEp@M0}e^`3C-qWT<{ zC+}jQD(l)T=lwxl)r`GFfJEAN=3tfO09A~KE~ttcO=?2U)nM>rGz12mJgW!elyt>o zC+ZKaW1?uYfcd$3z%eZ)+A_e8C{g+?4n2~O()g*&2@#h-KUpMH{{+L8u>SLX!5#=# zJ_}3nlZ~J}1S5m(u>!F(*cc%o0G5I)2(Evus*(~Cca8{kf$=34DJSn%D?j^@d!DZn)?wP`CYcAkE@H3^5 zjjBvTRvVuAcgdfr;HcyMB+(AMcCfAQ;VhB4wIgh?umJ%K?8~>BP!s(P(mv6*+x$|F$p|c>@0z*pu}w~dqA>+Z8zglW&n~EZ@P*) z7&hSF_ioShHXZjmQG1zxcqxa0lu3~4B~gCD*b)D7?S@foWaV0QnCI91?!cK+UQ+&c zCVKfo;U6gUg}6j%fs}Tl(TrO0!Jd27g9$AWLu%hSKxkHe>&*`z{T#1};k%4c8(wMc zE=7S!3d74w#8lM$a!X7R#g|a3_PXs|dBEq_tJm!w*lyz}oIYulKu(e*bHg7i={aSF zO_`8#1Vz=NYB~MCavE`5tQ#fR&5xlX$BJ#TExTx=jv+zs5WrZ!CAkW%TM6aP6@kRt zB#iOw+F~N04+nv-vdK0j=^D3W`p26RK*m_Q&*Fo|W`EP}pe3>Fn6cFYH&%`0X$AO{ z78#H)tSI!*^1;etRf1EGM!+vmSm%ZE+|kPI@Yr6C1#wdlG(ku|bL`fi1RGr&62zl+ zO{#&XWe-wWp+-IBZnO${gEzlZLIzl0UY7}&@|>Oq9aL-Y2&95qmaRIhjdToJGLB6> z5f0djr`berasdxzP|Bl_vSaAOZUL4IYYh!zqocB?QJRWzqM(Uff)9pjo@Txq^LmGL z<04*`%A2J`i~nfk$oRsrS4Oj^HY{?%U|>7xx`rrj zV>Bmp5NTo{E(5aJ&pJC9K3C`vB}f);{w8K`z@<8$teE+P{`=WiTj2rM&;uZ^JttpF zm(+LLn-^FK+hR74S*|a}yq{ca5QNf?p-j*L`TT-lk?}>0Q-Mcm-y-G9E(O^au23S= ze3lXa0J{kIU%)@C5&oWM_rfQwRveK!ZhyvYyHO)x`Eqw#^4sh+^!Jnd*(yF#3xmJMw~ux8kLD< zv4-IG<6PK5c~pjGUsZZybaZ((tHgQ;JUvNKR35fduZdk}U^o;mNZAUI0~4oCC=xqP zB#^%sm~W|n1>LMXi_x4qOmrueaj8`<>WC80N@ zL(;sB{8{jcu-&+(y&w=Hw>)(ANO$y~tsv0p!ADwXcnJdF9b<)f-I8asWudjaLMY-0 zbXeXh-d$8HDPaWy9`Tg`cMyg{Ujc0P!;t)0P+vcRuU@r56+>UT`Vlya3l#?Xxtrzg z7K!5|r8M{2{^>rxiPpXEU&w05+MN4v^ev1Ucce|R!5}d??sSi5my1ihvZ3t!nSDd>EE_9;Ff}G5E`i)c`OY0| zN3L-hFqV?#mQdpTMo^TgFsQ2d=@arUgO1u1Lrr8afZYa*@+!hLB{2S(p)~}lFuDr23rUQJsbv~ul3NB*2oWN__M$Pw z2F_?s`dWymgOqCl9&NCf@i(h6JJH9t{;Fj-LC?I9(;8BLf+C#4J~7}Rlyql7X5UYU z+cq_=ta0}qhPiXTu545Klu zeL*TXLT28B!otiFu-XN1jw|i;1NUVF=gkn}tK+;l1R86`-ov{#opSmipdaeuTc@Yhw(w31McRADJCvU5w**bJBZ}QG>Wx%w_F$icbu`|?X9x`|J z{#xT~07vHsAj`XZD%j(u!D9J3eQM{R(6s;ZO&U{jKX+CxdJzQ;n;fVL3`OoYHs)H& zvNgztF&>jy+cX?vt=T#`YS?Sq^c-r*HKd&>U3D zi`=wx=Q0F~)ozgBl=W7<@~={{Yw0Y#etlq}XyG(J#@jv{!@X&RfdbE?Q zc{eXQTpEOSU{?CB#4ALrd|XGV7GvhNkE6Km5#wmZ!%DT;uxb}i*v1}QS7kSi1+OeJ zsbH?to`BcL;FA}|@V*VB6Trz*Ax58_{tTWW5IEsM?GKU+&C(I>fo$9;@(j&V@lIom z*3BCTkoi0fp2%AUA?XTyf?<7qnDwZ(OO*oi)>5Xu(T6zwoJoNM>vT&961t z-V&_7(M>G5w-|5iH0@0*m)uNZRDMaP*`Fe-T}b6&y;652)#wS=(2xA#06Xb`9;`w* z#in<%(8anCwjM(eq_n&@>^y=$xj*74s}o29=xRaH5}Eu`D|KkIEcTm*SG)~!sjdfR z*DzAIF3kOh1Evv~ZImws93pm-8yWIl(oLAF;M_R&NB>tb%$Jq7KhO%pAZ-+IeHMJ@ zBo=kl`0v&BcuxZ|(=8f-6?J_h%6maDI9OTYZ@8utS~$Tq662(jfU+|-xZ2qQSMf|% zfDS6jt-bM`C$ejXdVC2r3P?xs+z3>My`hb%OuAt{i3}F0Pnr(YS=Tsra(R z3=BrUdHeKR=;YL}f+vDEyE^gk6wy4}iJ1#PFjT zvObDow@u2uD%>A`YQ<5L?P-SHx9NkOr?rMQDlWWAmdODUPiR* z=OmPcOgsZgijszXX+OKLHRI`SKs_dzz`=q7-2~dr1MB20x++E-X8I}1?9tQ+Am8OWS=0_ z$Hif^x9rjyLmk9umyo9D`SreG;S5ql(*&vdl+QZ27kpEAD^n-}KPVV6x>h1~6;Z&h zNkam~6_wzm)4ggC&=?`dC&o_iIT6l-N(WT~!%8mz1tV#*yaAnogzXZ%uc8B@mC%1^ z#rmJK+WhORdX0>?MaeNTbQsL_8sDPVz@_4%jTgAA7cNXMt)p673Ik+SY!(JK&XcFX z&&1D)&XUihR8`H#9CO_}74%*Q)11q6hN=Xe8NZas!$=qH<4qNGYUtuvuw-zi}B z4#xfbS({!;6JgPyLn&DaGr2hc>;R(AXC3h#Yd&whYcYippZzD(l z*J9NSa?n>id#3Yy(MT4~B{S$mhUb4#P^i|&h+NIJY^h|;z(4_ro{LU#*23UtGB(lT z<3^=-295qab=w?I7C^Gzp%fVPT*`}o+YN=CG)uTi(l-7H+4sGNhX^$wvka>=+cfbu zA?&v#unY=AIyj{U5|7fqa3S-~NR~QvwJ|tfP6cY#tvd0I04-K4|MW0o5wHwS0n4B! zuncCPjvfy80aCgv1oZ`d)|sIl*~H^iO(~ea(BNobuiRuueAvFFARNg!yNx2|!F0e% zroA*JSp7~h{~hBzp6ejXh{Al`Vm#56D1#fq5SjqBio`_m)??GXM?sjmx}NjbVm-|N zwOCu;V2`api`6KfGX=C*>aE3A`X3PP*Kkb}TYx>4wy@{vQCQe>MHtH@$JqsFy_g%8mg&*%hNEWv*kt0vFn>zx3syI$KO7_Z>AfKP|^ z@uBDH2E;@X9GBxBq^g6d_(riiC_f9^C*BAadKLjCK4Ho+lJNTR6+0yWgVJot4o;Ic zz=cKb1i`ap37ZXL&#YpCbcwXUbS=pZ-Iiu_N0&10M)iTR^xn^&71qmIde4 z_l3p{FmG4c#vd_Z7 zT>PjFr)tA5!;}R7mAxbePWK;OF`%L=MtPcZ%P>id63@auQjpNpmYJBi{iuCkM?+!? zJ6b_IX%Yz8SQPOxL=D)ZWYBT*LlK9=yp1q%?x9+NMJ^Bk0P)HL1nU;lU>r62I-WU5 za4V#WF_?3DE|}iscG%FZk_gD7lDQQ4g*QFQ|bis4%%mZFc}SdeGtzZs}k@ zM|0|Zgw|ft8{qT)bq$|lfWAb$qFHjGFg5f(Hp`@>wd7mCxIOxIM53DFLEL*!K3_zG zyCrorwmU3f$X|o!z|<^ck#{NuK49N6BzHKF_Pm$zy+D~eP@8b$%4e>7t7-jj8&o8@ zX;RkdLt{KU9fU@{+sQxa@LdO{L1}@g@{W-$++>iNrr#9;@ zx63^sTzO$YU&t%Dja?wDo)4ee&YFUId;M_XE8Ugz?gFVCvPgjn?7T+%k;WLrPdVui z0~o9>ps1HbG|}w81;Q0!G>7#oh^oy?_Gw7i2p0{!z9T{w9sUt|i`wHIPBL{IzhCg> z$Jo21Ro6Dp08nEcZI5o-pzR&WlO7AOy(qTb`)z)emMYGT3q4<`u}26v=b85Lb5Yc;Jb@__Nla z;(XPH4+)nVvH}VVgsTnPW8UQ~F4X?GTz7*qldg+`mvXPomyfRXl?xC2%>kygbYF|M zbIG~QXYCI5U*VD~9-b=nj|1nN1klu@CuZBLL3*r%e5`*?`cPX4o#~y z!PbqR{Y`Ok>v0|%Zl$vYD;Ldzb(SlgkF9o&jKMz$4_%(P%BUex-dpsems@)>5BQ|i z?0$E$S8?BVj-BrnO)O=&#fk*caSvGRO4#l+T7N}pzxyC@eqOyQj;poLJ3{up z38Y?mzXMewy4uIv5ZsvnJ0`vY6kqr3$auT$$oK&i1y(OPI&&`suZr#X5mzoQS-%ga zIrf3>e6lV%b$J%;T?{v2wi18@dG^WJ+W_!dt7kKT@x91#Dvjb@4<}LsRsRot@iDST zIO%dt8t_%!xCiW1E*6=p;JRbW{e)LhnUj}0vY`et)?N;`3U1713R^yaWTYkKs6TWn zG+y@SNz@mUw;X+JZx)G0Y4@`4YpKreA@HI$m?rgYs8A;)`Ldep10b>(l zgrD5qO)KFD5RMaA0TVfEqLaOODR&{NwlG!GWF{MunUaU>-^7pyjkS{qM6DrGm}$vs zC4P{Seab&c92*HqJ%@7v%qkf~DA$2(&cmgWy=gI4C+of8ie;QdNlaW`8NI^0x!%*-~4aZEIeTLkKJsfQ6 z_D0UELS)90k;a~dGtFkPs}Gnr%YWMLN*Hr|&0|N+Gi0UFLev8EJE)^Tp)S^FmvRjV z6HQIucc%_`j}D1wg4Z0l2zvz|eHQNS+-yPyc#IfAZ-krY*9NS&jh~uAV%O`Jbg)=j z9mPt8uOp3tWdc4m{p5O}Y~hWQD+Zb4e+qR3T!KXR4m3#y`TaCjVZ^!x)h_OgEBBVN z0S!}5qI3F|AsYs;5o*Z-S}RTd@)I9*OrW(gWrxgrR)p6@J_@Np*%gX#)RX~QD{hkq zHcN?nZI^bWK?XrFlY0v;THAJ%TL#5(nvbkWS?Oo3{~@d2ol2XBejP=Yx9j{svI0c3 zOuqW&JgO3n{wA=vfdQ>@n~_~^m`Q?w96lLO<4dBH$S}tpEiaRbax98a<$CtYmPF<( z9I1OdiufA^4c5Pi{EdQ`jNJ@M9;9!hQwJt>FYAj>fWJ+Uv#lQ#3%blrB9>;D3L5TpF-HvdJ%lq_BWjRXvCUE%Y2cm87 z=?{+I$vnA;n{7u@hI1aSG7GRK0wv;)S%(+#zJNEt(5*GfOjLpuZd5rqnT=3n_=WzY zy>i{rd=3>w-E~YUPPS_zN>pjaGIg*Y#C7O0e(!dL9a$}<2$3iWsf+F`_WJbEq7A`3 zI1whSLWxdTL5y^ch){e}cQFH2+Y$C_; zE8`I$slM>E%VT*$nvb23<9;hIQ*j{LU5uI(oA#H=SD`272?dWrtEqgye@gIf&cZb^VSb!2?lE+_o4z1H;9@DqQLE%#}mXUEqg9& zMAxq~<*Uj&+)Iw9(T^vT1gN8<6rIXeYG1tp*T*yMS?xYJwrN=8_Y$K?D(?rf{@-}A zE(NJWSo8Js;iQ}^GIoH2D~xFUyafF`G|&!s{SwF$n(#jq*DWB$H6b ztdwYDVK#s!vY=Lc0cMkjm3C5+W*%vYXd>|7M&7}In{7Ec7w+CktjwuC>kXaA;aM9n zr(H+)Gk@fVLZPkf`4>OXRJrHOk3&9e6fpUq-^Yx{V`CI$44RdHLh{WM+_045Oh?_t z|BxbPvXn8`fxRQpC#CzDzVJB^?@#+g@87%Ic4k|65mLy}8E;I&r>qsnb+~)Le(HB)t7P>? zK$NMszCV5uE4+C`jc?a&S^C1snWcy^hY9V7-Vzan8*b9pURvxmFwU>p#-q{UMQ4pc z56-WxGa+Bc@) zFqJ^+C_`uWmKpXK$2D@z0a6wKGaBl8-&Ur!BSYkCCDH0J2*U+Fp9XuWuW&b%=U1<{ z6X45mnM^lHNRHyeoI%m09JiaH{Je8vyBFDC_XyL7um~ktxf3X}`nu^>ni(?CQ!JW% zc={Kq`=5_>0OlT1)b1s|6x!?(0#lh^9CvWowtAJv2A26>P=QwhW!ugGk3&nmimB`Z zdA;A+aX#cuml(~csm;>tus?=pfVB`5cyUbRS)~F$(At6Hi-);B;p*8*Oa6=`%5#yn zULTr+Jv``3{1=e?)90>JLBL*=_eTvQvIrN&Ws2T3LWpaIZd1bC^B6iGct!s4H@l&6 z{@|+k#wlIS0-Iy_C_f1RaDb)xuKd~Z&H^plk+w;dx6M3iCOP+8sZAzV8SK^#Y+6&| zc|OpP_Ok*5W;;g=1nGl-$a8Jxg60Z|PpWVw%X7`}vw+xpQ?dxN^`*ws3GkVY-@FxY z+s1c@$ua1Zotjcm`5R_JAm)(>2Y z_SN#k=P7jYn2&%S5U#~fn??NWY${LI2H&yapQTX>%vUB2Z4dEjK+I}+Y?dZEUK^m{ zdBT?r8XO$R4wQs3u0L9t&Z0A@f-wfC4$)zu5)5$gp!^Y+P0kjJR;f)?&FMU2IR`)+Ch%eGBldZXm(>DCtDc`GR@0nH-LGs#g?D_gHswQHk&e#7HfG@h0|ABVwpw=+F2|!w#LT)3gN2BW#9plHq-7F zVSRHX007O9dkLvMm<_J|`!*VE%CHU~oOagS8kmG1WDI*1M3^l{SYlpeI7gloyKR;kZREaLOcdxn=A%E^K1X)4^`aJBz$sYT`Aiv;Y z@@iGBakEb=?Ko;BLV!fB_@wnFaR$QfM?)YM4T)-qg_z_)-KS_+s{gX?BzglYK53B& z`uw)E1tb!;l-)-_6*e3O4~=QgS5ymZ}c2!VYTJX?Sfg}RwvM8@>L14ZnSn` zm3P0k{no>{Vz$fw3Br|1_Vzx%X0>vDZamNJy;47J1&gI&%IeimCfKZWAO(wq>eilEVh{Uy{HG)7p^eix%?qD0X2JB4cP}95=B;erSq_OR(V`=N}Vx6-M78Pt367~b1<(eMI zSOotViC9yhs-8SIFipG2N zs1fz1-6k?M8;o_%NwW(tM24NLg4>z@C#4DbdP{a?a-z8jpY`u)eKdXZ>+^9}9WeG^)@BT3BWbCtGH%x~-V1 zsMBfXre>T9Xj~b|`=Qhq#_!I&0kYw0LR~_bX*;Tf!Ko#?6lj?Oq?rPgV6I22vhe%# zZ&~ag026byQ4)C6V$u5$czwU-8sy+&a(>@@bWw+B3ewJjzZgX_{{Y0O1dD$drMjG) z2X}8Jc~sYsbw+M@-mX2TUPrgkcbk|ivYfIObqq$Ae#QQ7uJ?Z^YNd>#h(WdT&u@VV zmOX|n1+xDrDuZPsT`La|5OHt~FU0O|uno!Ev?UP0nV)^(xw`q+O(8j+gUdRg$r+Zp zBk{8|eiBe4C5IDG+JUEJ1hMGh!>}^~)zqC8oTg0xEQ{P91jWBxB5}?JUM8nG=a(j#1dhMK!oywx3-0&03ww6 zSE!CM_J0V~k89)-`d3uJN6equN&pE;^y>-HPar`Z+wp0!AMWKFf<|`>20Tv&|0$a0 zD+0q?s8=+(!vD$p{0CVHDZknGKZ1e*AmnG|PJ+zpU@=Wsv!r6BnN&OSbQ7fimjV6$ zLnu1CVgmht2=y-mGO;TTqTq<|XX-sX1)LC2PIeH^87)8}JX`D*M~CF*%IlOG+>mqQ zpYed&+dK*$V3#<8xISA)zKk~uHEw#?^<@%?>{BPU22lF^0xXvx6CKYBuOvTm`QH2_ zzuxE9BMP*X-lAn@VL_N|DrSLSO#<)~!Y90h)pl^jbW^rZ(sWg80QXkCf#c~EhM4xt z$oJd7klhMW?3NvVzYqES+go{O1NT-e!qol79!o?>e^PJqWrMEF55TT2R(5n&oF0(` zj>`FAb7leEr~DyBK}UDGHHtrQ-QPZTcZD5)v;i(pr3k;iW`Vb*S+(yyi+eU*b!IgU zgMAP_F>betL3$mDdxL+CwSc<*Im-Scunq0bt|5=Jb^PKFZ4EekT0c`XT!@Shx&N z<)~rV$zv0{YF3<=Y|kAdM$6H{;l^#&QnPLO!YTKcZg%84l?D+#9uJXy>FZQ0b8DNz znThFTTvVX96p??gt7zIS1i^Byq#@!NiHv(t=(NY+ zB{BFAg;IoU!^i}%URjILpQb+xdT)e`X_R$C$NOHj2Rl6OG>F{$URl4>?4WKD?65w4 z-)UHJe*GxBHid2rUW@Pbv=8>HCBPvdGHqysOPIhx&;9?zZ7Fjld)R8T!!~>ljZ*`7# zk_RTlWS3WL5(e2wc~~jBTulCBcH}FDVYi4&II9`opE@-w7N2!!N~-0dJy=tMqYa$y z*yl}V{OPdOZFQjna1#mD&$yeQ&ESsg#aYFuP@?4X58aM@DZ>$$_>K6=+DQP?x3^j- zLd<57%y>M8MFu+hcWyekRjr!TGvw?>@O|2EcWkbhrFXWI%6m!kY?*eU*ojIXWL9i^ z9p=ilc(sA+>_V>fzp!hZHYKILFZ{{+j0g{zYU6&INp(GU z!B_PvZFN{dtG%Vv?>EgnT*w0C6}G&}-Cdy|;6FJfI;Hmb z7!nJVQ|W)@R8m`;$^)H=1f#Y~N1lX^PbAx)=(B?Iak1$pKs#ws%Bl1+XQde63)Tv- z`_*^KVII0Slz{!~emC0)^d5JQ!0tyTyN~;A>mN9k)XHW%$GIB&TgXiWlgT$mqN$G+ z?X=E|n`FIMWl17q=8YKL0|ZrREnC#Rl$BopcgqMo zo%;rqxy^;-ucDHh=amxx;*k~bH99$q6a#|5adh&O^xA)caQlf z<$+GhF!-WlvwX;?30v&%=YBtcP;2Mk=%)lLXUfOoWNrDu^9FqN7Y=xL7H#K|laWS5 ziE0YqVZjzFez#4BCjuJ_p%T`mY74e(MQmk5bq$9Hw4HO9KPx|7* zn}5kBoH_G7fr!eax47X))lLd3D3W9YgF3{@Zo2ou4NaW1!h0uFBCW$ z$SXI=QoE*4Nq5u}cYoLGs5dAv42WZ>bHl6?!?e2z{Oc*YBwN4DMw6<7{${@~!0d+% z%znu>`YuUG(`ROiJe-JV5ywzt5X|^NBI{|s6#i*Z>>VD<(iTcSlLvjYxjjR+jY?0I zD}z2%09%d3muYBA1dU|du_E|#{hXW%>c#dz6k|9HDF;!1e=)Yd>vhH)>OWSn(WQHl ztjh^L(%}DPzuuvn0~xkue}_IuSD&8en{)fpuFb|S*d$6!f{iu!; z9=Lk))~+elh|1rg2kjk5rc;ztVpQsArL{|*Mg|xv*m+V_jiikbl+a+5T?&af#Cwy zeXM3w>#s>ta;Q;MB^@=Z1<82elJkSa5Qg>R+jL3-~U z({>IixOnbw;o}7sKDSk#VqoF>6*cHl!hlK+EPQ5v3m?^)5)3j00Zkrq(ctd_Bi}aO z4L!KNI?DgRNlnljAoHY`2B$i=Bm=gvHI(lMbxiRE3A3XHXqS zJG`~Q2tg9qvr-n=1TRCGF?rgyA`F$rZRo?Qmgtr;5CK|R@~d~K=ndE}bumdRWzU)&y~;oNN(G<4sF zsl(@ddMw}U119tZ1F-NtyqE3dfV`~>3bF2A~|3o$~CKecUrnXGfp>c7p#jS7h z%>#6@BfBV5iRe#sxa=)Y%N(5Cf!JhhdXyKn)f!c6(PrG>^$`tnwdeON4aBD4 zkUl{g<#K{CZ3nUy=hR9-^TDkxYPhFxd{I2Fzk$ed{#1>&!6S^* zV+gn%hvQ?lC-NKa>iN+1?mY9!WXHTbM!Oqf09HULlyGz&^}yVYR&c%%g3HM9(AFiF!ojQVa51@Hay(nD4NSA9E!77C|I>z7d+s13}wvYY*Ip2eDXA?dd(9 zVwFEUblVIKs@2LPsIA@8H9U#p9Qg0M+!~yFddZxJBhuNE0N{g=FDF=ziVJ-bY7I2O zYzAS(L%p-|#hojS(d!r;Oaq;Z05OUxvvo$T?Ti)4&el4qt4f~)+{dBBNk99tlrV^> zhK_DwDgm(!sS@vdu{0GwvD#>Qk@M(CO!EC6G-R?B4d-uvC@&Jsn*H|g&C&kwmf$V> z~ZmNvKW!> z&cKG4$hbn(p+@!=L@r?!Mp8ao4bBx^{f3YrUW=(P1xqasnO_&8VW}XBj7R;+tJFNO zo+0zmXkwP}u3!PXxw(Mf6&A})-CpB~D&(Ws`uaG@CU|?2AZ4CuzO2bb$2I}yuKok; zK-{%n{&@WDQJ99=+^AI(a3gO&C^;U95oK>K;V!chTIfktWU&U^s+*Qg$jp9m_nRot z=hO#vQW3=&R^c|yTmu96GqBH}0=Mdz@uiL7)|8*GCFtUPVL+zx;(l6AjRZ1PQfQ~8 zLG@Fq#Q^$*fmGGm2c#;f|4UVZ z_0ouh%D|yIu=!2W;Y-Mu407b_g~6TcR`9Cdi&VZS3u-iEg3bArh1I{l4oF5`Wyoz9%0z$OJd)XpD5W#5{7ZhZ->?kR$~prB&JS)HwcfRD|2O%KGyml&(CT3Z zEg(;s{I{dx{GT1w|1bIduA{LbJ0DL;y{;zoHKjhkLn0%M;*@3~InF;jGsrJS{tjL}9K|X*v(PWERevYy zIYk^|p{3ph+k5&z+xj#n+>P|Q3&t9dS%_jMLNW8YD`MoFcMrUDrN0Alz?ij*@nV@m z89Bt5-9{en800Os)@tLqmK14-CNf8oP{30&+GH@gxXOTZoe?e(tX69|z#>Wjd<^&- z_!uxuGh!8)hRS8!TG6T-bD_8{W!1TcNCv*SwEjJ@z`YjF z*|xh+JvIi*CbTH95u>%}+7InPD%30utRk?Kz|!D`el-gCn_@-6vfrIud!K`fwJ&|@ zXGC3)XMI6G@;Tv$_AtT`xM1@JCH#gzuJOyc<2utLCYD{td(Jj`HI{j1tF}W|KwZut z1S*@vZp#ipVQ=?k-l7kueHMXL_FH264lA{`;YO4bRz&o!-BSL-@kg*tjB5j|ZeCb& z#=#rL!5bKB0>2!#ZvWh_o#gSk$XI9~!!QZrcfxj&RceDS3mW|6=P@wWYC9UlLR_9v z9S?Np_O@X>mHhoTRR)7C^Y4^h#=ofto4i=E-_QjBCs{1{6*(FVebGUA!GAaE*PP}0 zvdjmmwFV(5{V2eVI>hr6#zDp~aHIZ9X=#o&sz^pYvK!SW5_}Nj@i~_=pH#wCl6?8+ zwLD4Lp&rFiQ>LwF<^LX7=n*MbsJwptcVOYJkE%knoh)Kr7h1BCe0qg%P#;bLFoN?8 zys(gDgpfh${_n!V($Nq*++d8_bS%E!bj(x`3s@~?7x9(;xM2?>zD<=u#n-_IY=npg zmcKkz@PR(mayTpfaZ~7d^H5p!{uOl&JHy7MR)H0I=3@%aB1ck;E-a>+QLm=j0G3PN znf3@HG^OAblRi0}LB0Y78qhLSu_L_+vJ`1Q(1eThY%>c95P?3B7S)?L74;3h{(x(- zf)-WvWq_z?%qicwlii469Uw^e{O)dmTj)2;qUv~-hb3k{_<_+N(jRo=Ly(RIy_V{g z9pXFHe@PFED3chnu_x`ZMdC@UET*+V6iEt&3IFQi-bS{PB2gfq0030FgPa}k3!sx* z4iV3Se)Z$|>RTIB2pp*o14rs527zClY_yIegnRgW4s^tR9l+q=10f4|VglTGq}yHF zHXL`w7;kbk{5{)5;sfyc@;HePF@Rqoo>?zBn5z-G9GhcO)0+BC$g)`lCwMHK=^*HS zQz>^3xB*UH)f0dQH2JG_?z@^5Ez(YufCubahU653sOHpCc9W!YgVYPOo6O|dG+A{O zr5Uo}2aPM4+Pu?M*H?jG3N8Wwwc8KeuMV$aPv^P7!e^e)4X;aP$U-w_V}o@A*&{gO z2Ba)QH-{g$paTOEzbq>73T%F0k$hlLRs}PziN15UhVTMVCR*KJF5h zXeEsU6hU1$S$%$uc?|;KAcN^^j6n_qkmC6B_Tm}LHZmc?`A@wioi>^fwp%y*&pQks zkB`$Se?B}2*>kwGQuAIJ`+Vb1TcMtc;$F@v#w*y&v-0i4f^+5vmxoT5!23$7Z@%G>T#Iiu>-f12;Qly(V_~)*n_NFwfuBr#nEhzjAXRp5 zcxaY>a6hkC+1W|Qhs3ajt{X%J1qmr!RduuF_^YS*|M(Ji9e7n6-e2dh32iskq*-*_ zM_#m|F88jc9kt4@R7lh=Dy_pM#akF#jW$?KqC;B5)kPa^=Iso)an$8Vv; zKhFj1xzCU|BRXyHa_Ki=<4BFgIS{-8*Xqx4M-ZYzTOJRsUXJ9oKmK6byfySwQ;NLf z&f4FY$xmMlI}@(3clKW5j{W@q*gEU4toncH)7>DQ(k0!ENOws~Bi$VrNOy>ofFLE^ z-Q6J}-Q7~s@azlk`<(AN=lN-`_4x<(teJVg=I7ZQX{_GGz}-7iP9DNgEZefcvB_pm zQH%L=rU`-v*{qAmb@^A$+9KP)5f-FH+1)AhnBdyTD}0}hRM0l$nm+hVGgyT}f;3wBQ)fEn?u_K6>v^YM%+1LALFdpam*zKCGEMyG zb8W{p#8ZBRahjr2^-G%N0iNEgXcINU=pTQD{w$|{`pIeIocPEbbMrl_!-d)g<(>gG zzEHE9Ay#p(tP8YPhiJsO3WccAzge4Ewp~c-p~3MTCA2eM-WiZzp$Jum&W>8SOd%;T zBfCZ6RE}ODC$d`reF3T)W7PohDXW)kYtyEGjkDb<-o-yZe$t8Jgm`1cPn7vT&t zS+IDgT$s#DajGzl>IB>TpPm<~SI|`dT$z@B`{EvmKMbN;Utq`KnphQe`$_>75{!#c z81mzSE2P4PAV7LTguXeV`;g8;MIEl;xJ5QNMC0lc@lFFTwl@o%i%t*ywd=xG1 z_6%LE`UljzEUOwWKZr0%X7-CvmAuH$S~g3Wre7mQ3z0=aW?x&(!^i8bDEL;e%zX+n!ajQeU_DRY+XlK#ORs+mep& zl1w|8T2#$3tnIxGLE_5w40b~)veAI8wS`k-o!6m^`#hlf)>EV3TvqTRL$j5144#Oh z3+P;_$|c3sqdWR#CRFv52eC7e$Uq)gqys3hjf(fr9RDh? z!j!G-YcDYf zp-;)Rcu*9eH`S-gQ91rUqsBZavmDhh8S!9ym}P^h6;|LKXVg57V@^X6jOe?K4KE=aug zV@xhZH-NMSi3N}?=sr-y;*Z^V8P+BpRNJtrLo?0gaL^UZf6bzdIK=Jw*JGrYwM~|7nQ& zQqN$2ff@l41+q6@E-qF{B}9fweJbct!a!06NAb1zR`kT%BkcT!aeJm)l)+7G!XoZj zwt)0Bd(v-X=0%Mf+&lrMGO*ls$FSlcTde5Sd=EIctnz0ZTDbKHVo7Tf{K6P=-9rR+ zdLS0u0vhW;ZeW-+K`;kcS8@ID_Kr!0cD(s1xXC$djt!bEZQTwO9y6piR3&w}#t3BT zY{U{>++K{QqARo^)>pRiBiqonPQR*Z$?$&q1b~kLah#P8+sjpeV?YU25W>Jh+RRW| zo_aTIvbds{IAW7N8=H1#dk~Vg^5R$E7!Y9R4d4oXUH2!m0FD81lw*Knz={(MDXkN{ zAN&Fc2@Bw3zG z2%Jbd)DakEr4G#)8txWD@KO4$0QBO@#G489RkA3w^fC-LKB&Gs1eWF`$pOjMnl1fuQQw0g(7tSGrIWQ-~<`q3zQTMR?r;8UGO;X2@<3>m4W1lx#}$DR>O zKYp6kBf12&NI6(xx@i}|eI%^84Qm%WD>969Q+|#UH1Od&g-<@0$Q4}_N`@qcdIdTNAT{w=yKD(u4_LgbS#{vPkEY~p zj(>j8y+@sb@f)M^7&9rgZr&Lzy1)j zUeRHm)^6i%V!a6P+>VwwDq3v~hb!bO<^=BhjYCB{dlxHV>0G}Ar`O}EZ*uCm{) ze9B3u?L)QINnoi?GcLbFt6;eB*3A$9>~>J{2!{?f$hZc*zmAR2y-(75t6`IL#F~@% znPNPFNI^7!em2`|{Vgz{_0bcj=28T^=E3-*YOuyH^mA{{Myb!m241l=*B_l=k4}D? zLS9^dRK3@npz?4Q{rr^t?z3&dI#fxK@OfCJgq1rwwBoEy2c}afrX1%!CsTE^L`f@1 zsq>j6z^vfsapQ1!$}f_ob5)i6E6zmi44x0NVL5g)J9Lj+NVne+s$8heQ3StON^2Aw zy(*w|QG;?>3>r}HuNq0>hb+HxVV1h;C_WG~;XOq)qcIW4V-k(qPK&%CH^U6FHEq*W z0HQ!_7qyP3g+Pq^V2nSqKJd%xoiyJ;`3-NoCL9v;g^F9UrB#J!>~Bzaku^v~FUD6a zUz0abSOe%PYj1jQKJ9&}yk8yYipFKkISRgyTp>E>5*-y2#Gig*XtniVcrtgPv)$2} z;i))yLcJm$iW(e(NOghEsc-$ZEi`e!~lFH^l%qT5< zMz)C)TYU2;KX(AtU~bp(|)yB*ATRMT)Cn z)Fg~QPKIX!DIjz*==9C+-WhMevZ8uahg>RiHNZo5fV_B@2B4t$?FBiO{#q-e2YP9!Yl0i<&ge`D`jjK(Y;UqK67wUzzRp)QN-NO(@(KAv zAw}YqP*s|;!C$v3C%E+fqVuyYZy}Z3z~ViffC5Nb<((#RSvQ@Wl(M03Dr%(8n=nsY zT2neGqR0s8Gkb`?Ue&Z~^4;jZkKFOjhtrPM;SZ#m48O@7R^+r3e>>A9|D%+I0Hs7n z1t=w)7p0^E1&kjRE$)fRe7)%h?*3|ldiu>p=#g@FufANUe-w~3Gw7iIt`gLMJ+o%P z3vE64rWE+MI_0GZu1=-Y{d}oTsewfjIp}}kskWL`yyFHOG36&=To?3@M0E0PZGXto zGbE$!tb&!1;EOWyqoUS!OP`imOsxU9Cs_2P@u9otXX%^CA}c{74c`S(H$EEjb1iay z-0H6;*FhPn$1^FmCM+cX8CAji9r_|Ee|tShE`^M>qd4W|s0k@nfNtB5ao$F_v$z@kMZ4Kom>jgCnWE%WV(L-5%~7$t7aavT zbK~NKh(gGU1ej~0O(+R9Svvx@Co9?baKf=u9&fDth<|Za8mo}+)EsOVg&0moAsk#D z!<;ZHrbd`>FmJ~YTMH`#rJD-=5Vm=VAkcF`|Mq#1DPKbJxdIXdBz3fQurpt zda|~$Q4=CAAF39)-AvW!y4=K{B(@u)mh97N|0x&4J4NT-Pdg?AhVBQ;O20%GizhBKHGl19l%i^7xv;L=Z`j%ru$X=LvK)%>$xM~(u~MQV{Y z>;XuCCW!aSImzNkw z^SlYd`h%Y^G;o|&DB~!6)|U%`1|sp-VJ}X@4oZ4=`!(W_i9~2;fUvlDsf){ zWEEb$f2`cvLv7FAc4gL)Oy%ziK zr1la8+sKe`VdCzw*hma8scv15Q2`FZwOh$xwOB79a^<%F~_4e_$}@$T{Y2x<+&1f!pwx$?He*N`n4s z`RuYvM0AaMWN5e>5HNZV2jXVx59wW*7ugYjflaoKt@<5bMnVJ&-@ZVnOh?iZBjW)9 z_qI8ISKV?!0^<58ck|a#=5n!*aGh_AzI9`?z9i0l$oq!1KL?vWHr=e>sfBQ7< zMt`v!EH%!^Z#6h$`|bGULy24@(RM1Ga&(Tfgz&l|m|sk^OQn1FG@c9sSzs*hEW{b9 zfToXDDzK$(yDf@Mhi`WZi*;;$;W~7*%jT=|PQyoxa;*5&@Va29cL> zU)hg(E&=C>LWeo`*wkx}&kSqECo@jwA(JDFfzr@p7hqHoE+vQn!5^^`LD}fu5I`Myk23>iRP_*aO(L*;Q0+0iAb%_UL~;??(JuH-A(Y#sNK-U# z)$sD<6bFQ_j#?>&FsIkEux2}o37Yo@AXRlmfhTt1k;_nBi)yo)0_Z40w2O~Y+p(FR zXZ@7tQdM+}LIch{NG^#JP%~mx)zUKqBY)cI#sT>{&N_T>YfT!Viv`yfn2Ax$kf_yaEe;{OJJ?=+@yJ7_+K zf&_K$w<6lWx!*V;-??_+yDwTJ&=u0IB&v=ImT1AsxeWH_DYD_fsb3rT)DNWagTYr_ zKf8&+go6*b^^?=Pe!2By18)6ffm=U!F)!T8mP{rQz41?+d=iCMW?pgcT`6`_zs0Gb zaVONqpFcx}G1-wne&2IVmL1#Q$YcNjRe+fKM*hp{jLdOG&I2PH5os65{dUvxifoAb zfkbE~o|ys}$o*c2DYF^HeqYvs@3lP_;jKk0lDPPA>y9y#9Xi|`67C9gCNeLU`*SDuC@BWP>nc9wA;_VTqCeQQEnGb3UEMW@>&FX97) z<@oGKS42rm={mDUtw00`vdoTl2r8l7ruSfx9LG1BVB@h;S<$sDX$>~;+SQ? zwex~u-D|o&sx2lH=bs4LOVd3=?;zO=1ewhWNvox#N9{P7|FBc*Pb2~p@z4P~x>^uA>_rXLjcVY?7^O2H_eLp;1eZ5Sv= z0b!nL1z8`_gD@?z1vH1t(hP0``ZC0#_1&nZV_Ah(|FOkJjBzV@zun*rcky)*y*x%9 z2adsDD;SB;bdU_q!vi1@Y^YdZFmZ|_b#F&y8i?B@9tk2(-hgT8p@ycGycB}i1$5YW zVbGvezWhWeN3;*lLw0a9`8XZ{5Ih5Ja{w(pr4LnVzhh)&7fef+K^Uzt`IO8P;O{L} zhIwU4lkc>DK=XxpA2wM8GqXJaFqqa|7`!>ucbeXq&x{z2K?*ZpEcGC< zJ1{VaYhkW#0NxZ9eHwQW6n|;+P6MI&E7-BNU9onsLU(&;g~?6aGR=mju(&N4WXBAO!P!ZK(a-YCXy5&|1`{f&oilPaNvp1{oH7~mo# zGy}+VbBm5boaNHOw3500AeqdG{`3KoLPCE+4Q3DF{%qdSpd~Vl7pWxgjX_b^*)E2; z5Ey?+AScN%Lpuc9oCxAEk+19suA)17FPn<*7*)gEY|?0&S|RdQFfJvlf%LQZv1l{5 zBXWf!Jw9kOn_`q>&6{|C%(~|kN^(B!7@E6>+}P-wM7_1os0U1!8c&f9oTz16r!$|i z@WVdwT7UEB*-|vKl3FJ^ zwnc)pw;MYs8ZWE49JyJK*v5rFC$*|;T+epmGfbT9qI)hq&HAk$IO-8+%ykdd}|>8Hi(OnC}C zAanhCiSkjsRw#ZOVBb4 z-SNTBX}}*Ck*&(!)59W^MWsW3zh~w>aa&j!s08&b%NRCZ*!3Sod&g|o>e#LQS&9L< zT;3x&xRV|^PItOV9e1?u?=L&@`!=O{=#N+$Lu?(p>X1w!PZ zB7;^>+WqaXr`&R?m+jL1(WRa*>^JRGsDC?Wi@J~!`4wA#Dyu8aJZAZeo2|LZzlJbmts(MKtOeyOz*KBf>DT}K^RRNT2|Bo0@awj&x1wy( zryO#=Aj~`rWK{Ll`Nq#$zJ9-^Stfi8ynl8g6sq?=)zVyqHC8)m>*2l=tYc_)TdWW4 zsO8^0soY*@?cnFPt1mGr=d!zMaVsE!;WC@`t*COgjt{mIQ5v`>{)V%6S?|=F1Ulw7 z8NQ-1vEKS*&wEarahA)0NRxARIyAFXM1b_8F`O59O7(5}yWt!Ri{GgjyLiy4{0CM0 zyJVs}kfvosr_cj}{v%u{J`ky}cOw4sn^KUe0q1aQ-rJ}?esTL;>7;AN2i4ePUxYtd z^EAaoSeJGwGb(+DWAa{ZH8kEwyaAyu&ijWmR&g;EF11EfT`WV$?OJwJoeQ>qF_hAe z2l!L(vBJ$T?ap4wqulfF#vN!5(?3wT=o+6Vp|UZIJ08{j0R)s!K@Px8zxvJQ;Yayp zYEFS38FQ-@cap<2T@*sf%;8GHI3YcI5o&*yG>DEBrh4|Z&|V#qfQnGB5J)vlQ|kWx zIKyYUDUH}cs;2=0i7%L}pCao+P*Cj#$^1r{76XRcG6b)35^Nw|(NwZ(Iu-=nBE7wW z|DL6#g=S<4=q97EPF&1T{KvpEIV^(=F@;dgzq(0u|M&!l^#`f361kW2U`^Lz8s$?J zq>=LdViDTIBNpC0Y!IlvpsW<@T#$D8vG!bQyya#S@|IFN@k?|BF@W}bRpWm^KChi` zWomwZnu6EDa{7y1+C@?-?vPG(Iu0zGIFRz#2;N|KrFl{?42?N44D55kt{1H%u@b!s z3UJuhWK#ROwhN%AgzWQJ4IHrsv9fDNPLdUGA4I5#-2i4fsJ#)X0@^O{^4pv6#rpF@ zCqx>wUqo2#D-pBeAaD)s~}xsbN1;UcXnQ(`+_ z&$tHlqV~b!e0r_nsn(;;Ab&A^r^>o+Jrh(kDK+z|sF;CUZ7r3orsrynOi(*?Dx#XI zy_Is_Q#{fORMpiOBb`JlR(Pes%}~??dr9=p?B}8Im-Oo17>2Ra z@%5EmB@FA@8}#4Ts)|@_!CjsRkc{o-g$#={QDQQcPV=fY9T+s)aWD)=*&plI#T>sr zpeuFb_^gMLYre%Ouzx*0B>K*tL^lL-+ofds{KUUvtytqETI|I1Nj{(s9+i!bFU zrGLs%wtve}kf@O`1$v{Oj6U%?kbmgWHc&w>1qXKW8*x9=$2=HwWppu%ae=_DUccN5 zK|TVbUvu^7V{#aZi<*6;u!AW)v-_UBW(q|DiU3+rc>mFYiU+r#{QtI~n4gsY zwxIX{B)VV{qgfUv?zT1M7-j0gSx z-(+qFNan21MmHSTe1ae7qwU82CUe``yfNBiu_|Mr{~67px`3lOl>dt67(=Q8GC_10 zKr}ar0gmQ0cpzVrxm&7T>`E8U&*8w#O!zlo9pPnW=3{@*t^9V0@vT0}C62nn50Xe0 z(QQ@!`1eh2km3+;MIE=Wwl>ugO|Xz>IE_T&zo}t?q&8D9JSi_R;DhK-F0AE*-$rZ& zNfwnJrim(!)=Mi|aLgR3kJ^%Js|0|N=*nN!H+cv$14!`qL}>rEpn#bfpau2*r3LkW zN>E!D>!wbI>B<-%(~a#~>P36rGUN|X!{Q^&Mx45*umn|rR$^;eR2J1ZLeaKeT?qZp6e4h@TEswxGZRk2!Ud=umk%=u3i9K`3F>Bvw1>_hRN>adnF2oV{dUKRbs z{gnGh3qjRP9Hi2@-`K#{0RTG*qb5Vmg3d9!rEjZ=bP(bZAL)$)I$!}Xx?URT$M<%2 zXnt>?A+o4AsotSE#Zu0AhR-R8BI=)mULv~7%{?!fGWtkS&$Ji~Afg+4iReN^GNE8R zUn085&~WZfa1|;eAj95rcdx(wlGQOj8p7+xnG}E2V{2v2EV>Yi)a+khQx!`$m%xz-Dq}@L717 zn|W+w@^x;k{CSqRHHqEsP%RdtD#{pRf#?oIbmYutSdu0wAMHngC~XI%&Z^V&yU5j4t9mkkJ7V-KT#e zI#?j08?1VX=xSdgI_72|qH9l4M9l2@uZT{xoJI@X*N6FUM27_TnB&&CNDas^t!q3LX7b&4 zPq$e?ZMVO~&Iy3n3Bb|Suq^c#&$!WQ-otfKU?T&x15{+2{M*BP9{#Uze+&&zGXpXd zph>#fJB{s+$%aXwVDYS#BDVmWiQB5ZQAIS{?Ki_4ObAx|G!Qe_zjXAZsl%EU;gZ4! z>FkE9NB4SkGzZ0DOxpHPN%ebk_l%@NSJDM)jDVzsLD%x~x}K`mq}?l`ZvtN(hpCj4 zB6gvE{?OO5-XQMh2+T5RfM0%_WAX>u8qrAC=1+th1D0lj zI0BV|I7EnQ)c-Ec=$vG2R}HU-4U^R9iMI)S)HZ%1shMuW`-r$goMrYqH9K#&PO$oU z!n30)L$SE}tdj5@l(MUBK}bgaBYBp~Ju_&w1-9s`)b!e9rLNHZSD55q7aeB4I}bWt z#0n*1u3!*k&+i_BnrHe%=iy*S|&HO%MJqGb3ykz z><|sw^qI?ghLIm~;aP|e86zwYJd~zMV-?WuHA&n@!&5vV8y)50qc@d7D@rIhM#6x; zuQNn@sCY`m|L~F}J@XzT8`AT~O>do&PjrK)6@E)J2y7VQU0<;6vCL`@ zk>`F3gM!RVPLA5Sjg0bhmi-_u3#8&k8j44&{4;wbr35D~oCiEM!-rx8H>(!5mab+7 zjLn3K*wcZ>X40VTn-hE8ve&LaGS)e|-@z@XcJ-aT;AB6+*UnV3RY1#$e z7hH1MuN+Yy)g_FYWr~pqm{3)`cBb`yA_GcJZOt8^^0FuE&_(jI0;G+)(tG|7e_Kv( z31)Jyx*QgV8CaIWXLZi*)cqB5QQHM|yxo{r(f;W-F?E9bO{74-=?`0b7r5V)`A@$o zQ&$`5jZa~W@~@kL2=RaWO}O$yox6!xc^51iCy)WFP4L(GQYV(9C`NFViI z&SE3gf)sBl<^q01;5?)G4{RV!mJjLI7p|v|@l4zMdBdOBK0#Hg2S0~h81!ui%t1mKS(BPkY*NgC*B>}7()9v8deWE|?Rda` z6gatiZKU^SxA{_O=YlosGt~ekA@WOdHy2<-kgqMn?6A<{8f%OLCpehU_z^~C8f-`f zq-^pOYoJyF9E2ZJ)k2#Q6`%miuL|0O(k&COIO|q24rMd9&Z5Tp5Mv#QYp-*%M2tV3 zL zX(f@Nd)hJ>guwLc>@M|hbf@c7J8GrJ6-?PRmk5*W^a4Tvt<(<39Db$nrsqyTe~lAW z6iReE+HJ_^BXc)p>gElmbb2C4Cc#NjjQ1c8_7JSE3G^qblCP{;k0l%e=X%gAL2Gc@ zvHH?CQd12(R3s_$p)Zt9H05X#>=vdd*dbtRX21G{(kaehJ<0I~WbX2WTRkYep)ua) zlMk}e%m!OhC@PLij#smp-I2DYj=R?CYVwFx<$em(B?25No3O>iv21v_oew#4 z#^RrIOiKf>mN87NQN{98zAxc($yb6jtlD|G<%R$hLL%NI9-zU)UDN!tE#-r+Rv;tc z!_&Vj<4n97_gX!*Z8PmU5adSt9$TJRi zKI~ALyM(}DA`2_=z+#lCjy&=tha@G+Ye zi72T`B8r!KA#<4qFr~6YsC}3m#m)o~k!J$EYk#`R&?e0m8^^GMZtB=)FxEGmE|b9m?ZRej59soh|E(j$@2IG>9{b zWA8S^QOxgVDl!mH=skP$K|q@clrOD!ficQgCZypPQ)+5rhFJjlzf@e9#pR1ck6sq=2JvFR#g_ppZ>YocJe*e zSf%WBC80O=C7~exxUO0&?z1N`;S6>!Qok0ZgCMLQ->^l?`L)pRE+rR~=tVsMTBrK; z#ZAR=b4I+0!h)2^@Yj0ZVxE0I&4QWpzJG7|0_>iD*2nO#{h2{q`kJ{y(tldr96sBd z-V0}FKW=}Uk13rf0N>DU0>H~WHeoT|=tB6(R)Fqtva~5=0TlW~$?x5Qfcz#B%ZWdT zxsxcrX5mpIO_T`%6uPlbesw3T& zsrBIt5f^9d*tY8?=iPpibIDOM1?4n3FP90CdMAlklIdhF6Y0W-+70s`)zFk0^j|HX zHRPWx#A>=5jJ8jl>^k016!gOiKN7s>zN*w4uVo(p8daiFZ>x)49(X2o67N2D|4yHm z=O6^irp#olt6`eeLgj*Lgzrf_<)XM#{@7c){rJ>qht#|b0OpM>b&tHl0kwpt!al8b z%WrxIq*m7= zKrp)LL`va}X!?HJ2C>#fWs8|Z{%mLN+4mvcD0!2&8bOv@whq7&ba}CdyRfG;ZrJt? z9VCjZnF|`kpG^6U z(MIxWZ3a8M>n3pt=e_?zqyt9xccZ$t%-VNy%XHb$v2vcpj3t*R2;%+$jpEM zEpi|U_NtQm+0|aWsxt=~jxfznZ65>(FRi6{!CqB+BTS``-CI*-i;?3re7DLVL@lN% zJ$xwu=J{e(J*{W9a#zhJ16Ebp&0u}gS|PkBpv+Y*g-zOwbfg^+|Q4!8Y$^>vY$ zdouNG&|84x861HNH>{Sy)ecOyA8CNT5I&J5uM0zQ)XztP!@bX=jRF#Y;Cc@hTzH^= z;OPHbcTpuYDdm!UP2$L`H32xD$si0wnUYR9(TU`v%Mbr`({~3+ZYi5&Ge3XCAAb4v zHRJ{cFK;sXE^+JA8I5t@t8ML+9?HL%1V5~xGau8ocHWx*V58S_a)3a@&rj~<02`hB zKiTNWf7$5B|FY4UUfAdt02`evOsZtRMrS$ywJ|Qt00)8uRn#ze;cw6e_wJJXKiTNa z|FY4q|FY3j!~ZWET?}+AR&1+>$<3~7uEx!dHSP)>U3$IS6i@3(JkcTN5q11Si0rmo zh=wj!Y)H;HKpXENqHe=O;>IjfC&ez}o#I>E2;Ev6=Lf=?)^|{T_@{gQf?J$Kscp+` zCSv!6C?BuV9`n_X7;&xZL;uPyJ3c^m9kf~@g#!GkGZqlFy%qZV*p;i{fZ~z3oJ9OV zWK1NMYsJaHAGpGXW4&8Ks+2pTMFSE%&5C|;KR5&uS#93hz zZE)vt%mu{X?$XtzOl;9cQwA}M>8yFN0! zbssS>89fo{9gtNr>9Ahv=5v4ukb;-y~t zten*#jDTz%=yp86<6ukg)0U+CZJS^#6RznZ;rq0tx?KF(-=o?%w z{Jj?XZoTvpN6pBPbfM#}yVz0Y8GO<=$p}WHOB8ZSO(wsf(T(%!^J0WeE5lE_ml;7$ z7ELC*(w+|P42>#LhoF*)0`rIZZY7SVvGw~%^+JSvs6qU(-Z5idRKiU$3Z zf?0u%#VQbxuf8iQ6w?`Y8#K8Utk=vSpCq|}X46-)e(u-9&lI$WA8xz3dZQ8}ox-6s z#U888S~fhs8DriCZznI2*Dvp(+|{~Yo&+Qdgot?#k;EZLfldc?KJPXGnDpAxF)?@J zT3zVzg>MZpT>E_RV_CaydgH~ZEhnE944QG@@1b4rKBP?LK;=H^y;)hf_R=pFN+H$? z9BeZIc4aIk&zHiRL+cOK#t83~$;UL-w0Tnqe>3T1!V0#2K=SZoD=7?oQ{4YR@@vUx z)QqgHJ58XZ5d;(Lu2szob5uc-b$tDaEAK7*uSAg#Sk`}*C0G59{MVXVLj?AIDKDES zSI@#d_~ID+ISSeQ^#hIj(+ZNoA2PUHLKR$(hr>_Fe|9^E4{+{kVT2EU*#HoQpa3nL z?Buqth$m%Qa#dhLxz{s%DRE%QlEPr7`Z%drsh3g6w;7b<4L}r_2rN2;zO2p3(_bJu zeQ@@al=lcTj2YSh)6#v!?T}hN0EGY}iMr(P!dv-BVr)Xa%#&f7@CBTyj9xPH5?tRb zOtITHnZ7Hch~g1MF-@INO#-S^AU{-am5K$ZQb8@QbLT?2YA@Bze859)@V|XM8tmxd zw&G#bVC&8lH-Bs2TQ9x#AxXLv(uO_!MQpt}UF`gB|GPv5ME;FhOFgnz5F%22R`}Iy zBCayRtK#!~V3HelaF$ZX>qK~a+x(Au>0&y+7F7Yn0dec=IMa;dxgHMtNf)l)Q{ zgDwn)y0y}CB1kC=UCXq8Fw}QbiTPIYSLK;1V$!6BvJD-pbS&;=PbfU07yNwN`|*tN z8S6X$rGw{g8p41y!(IOO;N(c#`G~4!G#lSx!*NTc}jwr`nIvYOZoRi6syiS&B>T zaE*?V^@AR|ryTI#AmUjNAQnQtmJvpg+5s|bn>oAP^@Khu`#b`L2_7X+c^JFf??Rhf zLSg+S#|v)trC}-5kmnFkY~cx3f~#ge{NJP~gk-y4=YuO&iI@Q-1Ia2$@4*$TWnG+2 zx-HAhv3HE}n{_mr%t$@N?LZ_LrI3axmJx7F0E{Qg{~b>b29GE2$n$`J^<;Aw++4v< zN6yUH?sZ7)L|XD!x#1lp!;cZ}3uh_6BVSjuE~7-IOEO{2TFvj=(0_y<#<@3Y$v=$< zoANOYiE*!nUGe4VIH|q2trrA4S|N=M=Kqw@-5!B5IwDX;??P_mi4HxKvcKEzxEhVi z!65>a6{6$}sQ1gmc+LykbIQfdcL+5T32?LkVnW#NM#|PkqQmbD6Ipr;+8Z8={d-_{ z2AV};4Q@V0AH1IIF)qW+e&G%aZlhy;ClH|VaK%Pg5^#z(9dL4_NiMTvpHIkey5-8b zm`zc5f{gXz-r>-wj+lNnHl@MfqIVN)@*e$V@+?WC8*)b{H$@z zG)*aZfqPCzC%z?QEpz{%$9F)W*~2oGYIk_YJ+Oa^dp77(_EkW`nTU#!fw!R+Pr6bD z#WCY2-(|Zc@Kw8;-Szw77O<)gHE8Gf3>AeAw(aV+A zKT3?r{t)vZob@WUKd#?uzC+-ioS{`c4aNWFL@24sh4y|$EQE}}j#KH~F(9r!PsLb2 zcmgobL?RVCpoy;jYzBsTCQwp?N|?3FOZ3^e-UMx&;h+o+Tt$Qo4j;>@4kF#TU^h~_ zLjPc!5#MztjM&s0cno+b1Oeh|XQp*ay7%#w>=;@U0HtDA&Et}xm~{4ZmH@=nG-lrX zkQM_PAg+S+3xZ8k9eAk)ngd=2a1Kn)raanB%Y}$4v;$HEAovdBz^;t+5&)%&7 zRC);}V#(^_S8a;7C|y%Ec{*@ej31^NiP^}W9iXPq{`3}OLxW^v*f*n`q@{;>gRSfd zcDF)Bg!p_m=7a{1tq|bgVfYo%#w8#Gm}IT<3Q`cEFD8(*qf<$M$?YE%EY??er|LM0C$q$)wIR7)ai~H)(o<6&iX9B;WAf61Ru8HKHgvd;Y=f6d@tZ&?d9c83>vVb zS-}D3ll_~ZSFaQ(69?STM1lF_uaMg`$XPmd5L<~5*A#12lJO8ks1DjDW6hm;=V6>y z&Co?Ppzfq{-Y;su*b%cmH!&}uJ8&LFzTtE%1HxRLZQA@{xJ4udOIRB}dlm+ z0Epz0-;Wl7kfGe${>v`c2GX(-Fr~P@PcKCvpq}aYWbLN!PD^TXz zVe;bWLt;*&ib!IZGl@qMTt1}Wa1uP^_}Vp8<~LO9gRJ4EukHP5-X)Dz-Pc;qv0U%7 zev?`!DRz)rLoN8RXsHpp7%KSqDwfP&--9}=$(%8~Jb>{4v?|+ElbcD{5Lfso-{0GY z69-r6B$DdN)rsH0KKDuiX|+;-v=U28vlXjqf5%bD)XnM}Ow^sN zC~4X><0QKpuBSn_T#MO!z!$Uplk{9iY4yw%-C!n)v71a#c`)WU@jQ+ky)N#_7&Jr) ze{R7VAQN?#~4hLxcTzyuc;Oa;nIh^FVe_V2B2lS(%wipcp4c-t+nMdBXMfvaC7a zT|qjLo1GAPV(52rnbCb2wERN%Ya}wu@~+D8HGi8aMj2e0|NIXM6ksy|V11f=Y8OdL zJ@8}jvPOKBoO5fZtE!;i9qIr(tFy;_)U1yx|aUh3aB=dzIT^CsG+MPxi2k=^ye z-JYkgs$e+}tXcpB)#O{r)VcVQ)Wmi?QBz0M{r58!&FUVCc)CRb?Lwj6$z<)i9R{0EmzvNo7q%AN`X@ z$wLAxwD>l+ka{S8xd6OmHB2^Ss4YyG*I0eq9&N)nro!mZ{4$>ezgPY%w33W~E<03n zcZL8?4(l2~`g}(z{XL&X^9@puO*sHd2m%W&(6JxjVPSD|{`Ig}TmTQN-U94l3FdH! zth0gJ>^LZW^2(%viQjsls^9!!T(In?>|!~e?s8FMaw4o!UixA7HD@>Lm<9VN^em)p z{H#{2Amb-55RFYOnINE=)jaeF6bet2VS) z%KrX8q!!r3s(siosvrioXe`;A`L^Mdf&^hG;o_S-TYnntB_A0{JZd1@bBFp$hRZs7#A$HkY?P4 zIZeXSa0EyVM7K7!egU{fI8lJi<29-Pq+(jn)4taHdTjiY8tCkc)!B%%+Uw$M9m zYp9#yV2U-M;aL84_SeNK-*sL-N*YxF>g;Q4zYNHGX}Eh7WOpZ^-(J|O*sx^}s#N_F z3qL`qs*NEBsmZI@MR_Sy{%3f|j*$bRVX9o2uS@@;9Qd2F>`HTW-hR-12J`CDidjU!5Zjh2rC8R|VloA94X(<5-0YzL;5J4K@N_VG(w4|gohzeM=hyo&Ez;7<= z*Y|zD{-5W+`Z)K@J)fz)-Z^__=5wy*mgwfbCKs1m0BeTMETzqNNk22XO&nTUjAshU zZEv-b(rfoeib>{QP%b`b)JW=tf2yJy?a!tAj4 z0gaDmVvErekDRl<%93?y@y_on?@wrIUHba*vS;pxeGHGo5pZNuh_PrQ`51j)Lvxaz z2EMSjy7d!-#QWj}>M4~e$6z69(){2bRKzq>@s%Soj#G)Q=<&pkMVF&wCh0+6R&;zX zQ{A3Zi@B)7_A*Ab>yFCFrpxbFgNHdlfWJ${_Nc~9va|upucN*dL2}^C-%VHLJ`mtf z|0t0YLxJ8J*<9+a9oXT>@YN>a2E>h(q1=PGv8+Q+TmN=rnY9z#9nc4Etk$26o5X^tO=e&Axg&Q9 znASdD$yrmN%xS6qh*l(6O5??!3DJ;^AZdvajxV7Gh8Ft@Vra?#$I#jXhSunR3@z+$ zOKTpnwBCm{<(}sUmKJ)Z7}~N@uE;wyw78dmp|uyvI=mL;>M|y~S+?CgXZlO)?UTl+ z@$KnfPp0Hg?(We!`jc-Xgb9Z7;LP7m->dFzZhV*Nz37+&8v}+nbhu-1h8BMsdZGJ| zp(T!eSEULpE$M5f)i3eNqeQnD zbYfvgfD^0e(213Les!UEFD73zg*EI|o;9FP;Ifv?nX7LpgVJh)t_c^_m|DN>;4&*? zuLDjjnaT%2*x6{(tM1IpWWonDoNn{zY1f#qvbji>6cKVHdXD>S$jyx(f9MbG|-Djj$6HuqD% ze#~>=?807si^kMl<_6c#47Zyszvp*}Jd3?L;a5KGg5A44y~&%VRx zWL(;rVz4UZkAsYZ#+$9QN3rXnwLX~5SyU>oYHy5GA+ zbB~ljPBa#5XBU~__q5>hDZsz7{Mu(=atIh&!`s-yh=1ap(kvy-Jw9hmm1C$bX-q&!jF0~M zi?(&$w@eZ@>p6P}pH8eS9M3-ZVgF&wgU=QX*reslmVl_f!sSvAP zP8sqvlW?4F1FRLwzhAPxwn4SPW)@i_fl0`|J2w4B0PJfIiKd(eJfI!ZkIw8ji`y3d zItTVm&w4U9J&QO|P*-*yeI-5a^L~zjj&8HaJX(O)v?3w$fkG9suU+|l%qO90S&1LN zMEgnJz`lZ7yfXH-xoY83G6r0YkFYNNdeZTu5)P$F^yrCs`yq8d&pr->2h;_yD&QF+ zJfL`3e<8`qug1i4_~Z^%HywHQel%38wj%ZxI1czg@F=ji>Gk=W`yYj)Cp%hKJkRhomWEVlQl+J53@w zU}&M(y6w3FRt)2re&IY=FnKjZQ0K#esl%&#`4mHmCK}v4A}k3EuhTfgOX?pgsy{&h z{g5DlA1aP{0!Gl9|34!rAX$I>HzVjTKbQ(If^ucO0^tDwBdAceN8DiO_ok9C@sZj) zfLJQ{ieKT|IaR<2x&;_P!|yl-F|VLqg9cbpif;=sk@|3lA4{Z=9Q}QHlm8g zqu&0j+w&1&3SF%!nZe5!sYV4c%Zzvi5ADABJsI?qOW~nZ%+^o(5K&?(rbBV$D9wXo zqR!5-leXL3;Z9WIW+RMW)TP#r+kDFoe6(;q#V9gROZ>x2?B=bQ0P zhN}Tv#EzjP#6L&gd?2^)xF$ZqJrH0O)FcQ!zwJ9xTM`iR%*;)1-Tya9)tbydBvp-c z%~+>XGYa2FL%ZH)MKiDHf?OBBLU>=@?geCnNig zB?p10!-^`&uI#Nr!WPz#k0@e7@N!jZAbS2Eg)ViZ3u-O9dR+Ieli5>BK|Lo++*Ob6 zB${v&YqpC%8(TOhVzx=bd)r3ApQKtqu_Fgte#lbAv)RP*kr833N;JRQZ@*;#SgNG| zi=`?^i|df3N<~GBN|)-8rHX|LuvC2kEL8&Il1wE}(j73e(r+bBWei@ZHay`QWOoj* zR4w4Ex=KX8T>>OkR)q)BgQOj_vJu8|Vg{`GkH;u!4fvh0-Mv#6O5=XovZDL?v^se<*p)Q#P0$^{QniM# zRB^A(4^Aro_{x<8w!p`!f-UgQzhJNh{y~T)G1}C$+RU^r(HU%k?-kJGSJ~0#%`e=A z^*KqL{Ux#OvHAb(f!`Rv1NOjU4)?&nX!%Gu@Qbz>Svy4bz`M%m_pKz<%XWY*@Qh*o z?hC=2;$RDWrN0?*Y}g$Eo2)j>;tlUY3`6(?+2VaK57hA>Q9@UR|B6}RNJ@Yqc5lwvc5YliAa^GKp#!xG)JqNGF$Z*@f#rY> zbS>Sf39KC^ib$~E?bW4n`sJKiS+=vCvC^zG<+L*;59mNou7QAm?`#n|&~^(z2Wo0h z7ygG1)C16gvb7iep#$aGC&(K(qywD-bfAC@6#EI#fp&T#bf9~?kABmEy49j@(!7&@ zuM~o5PcW7MW>;v>so|Aq;km*SL}b1%zLFmF)b|$*|#o4ip2E zE&i0Mf5^v$|5`EVLgVPi?`<6&GMXgj?xa%EG9s#;1wLC0b9XO`o@8L(Lc1z1oM;An z;PHms=35U&_oJK$l<27x;3Any|HW5T|HWIy2zaYZR$mQS%_;la<9MsW+)b<`2a(mo zS5lSa5cWp~!?GLGGD9K}oxjV0$r=TJy@|hZzI>z4%filW4RlC%dx?oUVVjkm&qS)( zIS}!$^Ze_mRo`1K?@nYCJQ)Uf9>z#9tJ(k;#{T#^%H2BFC9q&@6j4ya` z)P6z3Y`Mz$23;d5mdAVV87eQlv^0HTC(x6eA7=f=ruP}WK=o!8>L*_Lg3jA+L7iP+ zMDb%gMM?NA-mW}~YJoCqeqMI4lGTlN%*Prsrh%L=Tow2q%x zb0@fBmT^QfMG}N9Uvx~VmF+)B?7F3oZ$>~Rr4iIGGhaMeSximUSVAe6>a9@iZknHS zzFqJXG+=mYz1Zg+!_Y0;q`{XuMfWM}8W%t4U^PuKSRZ2YbYVFo zT4?4}e9yIo6$-e;N=YebPK0%hX!o=B?Ahyunc{(4?7LeHYA?sJ*@ zbpsV^d7oDMbd&^J2q+V^`fEe)kLzJ0v)(|IVnQ{+fl_#Ig}Az9$*x-XzSb)vDvJyi zEN!TSZIzrd@0+$|*U$4}g{JY#pZJ7%FU zZaij1Puk27^GpoC+_3$uAE(CI;jA;^!1T`2mH zA6_H{Rtg2RRhkk%652;bm-0Wnc(EvlpIp(q?V#b{H$xUH!(-{9-v$A%Y4Gte_cL+--Y_1ab+t@ zvF1B178T3tTwm|yP1s9#{9}F=zNny=5l!o+r^ic|{a;ewmFiUbFc^Y578WQB16$Pq zd#i*Eil?iadI=m|b)@pp0}Mt&rY`9eBuHK-unaQc(~S@XY-9v6!nYbB;<{%B&=Ld_ zcYw@hV~DZt(=|vD4Tm-Tmf^N0h#2-Wf{xZ1nLzc_#5V=jh`7-xR5KAgY!9(x4{6;+ zJngS~H7A~P?i8cY*a>;l{`_fT-m&urTn>iBuM)RVs%!^(mn7oMlBX|H@RJ3vxEXiT zeP7(T5`63A?P0@>5YDJKPBHfQ_;Rb$qu-d`3|HvN^V>+2OUL^bzQ02mH8|J#o{TSk zGyi0SwvcD|1EzZq6CF6SS+B`F#%_@`HjnLzr98A$6Q#3N(ur0zG7|21OX}z*@xZWz zGxfFk!7WqIg@fvVsYp7@=3l9!dGb8LtU8*qBOn~py{hKhAduzE!z8+{0E&hW-1@Yq zI|#Y7VuWqoU)Z_RaQCGjv-Zx`e)<9(twE5}|rbSY3jR*?_e*YD`umb8)eSiOHDZ!esWf;#UJR?>TXMdQ1jDASi*to2&~>H8kR!UT z${nIa*Sex9(eNcO{0RYQx;i~isE!QJwKupMwhDn1v2--C`7QU~8Erv{-qQrr$Lw;J z-!iXfbNDg&ga%X_)Uh?0&F2uU-zs*xO}KTMJ~gV6ORXT=9Ul}V(Erfqrsis@Yj0h z@I(^q)OcR$X~|$O#F2^c#(9t4lVGCH&dVbnHwZeHyC{*6JaEBTM6#&idER1GO!9!L zvq;EPVXfH%pzxPhH{v9S_Q+C-~5$!_pWe4Fjv(-9h4GYmrjx;>Pj4yQ` zN77V^ zGp4Fx=`50-Q#nggB)>`s-Vg@mMn={l z-`9eT3Z@G#G|oRsXw$M|f=l{}JtZso?DJs#O^ zE+V$X|6(_RUR2X=aAPBab!2vR$7QKBi~jp0oxeVH?_T%>TS%#mv&yLHT)ynALQ#%T z%St}?9N%Xa!#g_s%jec-fB9>CFC$AP|K9t8`!Vw#*luX|uy>cUiD8BlKjHJcoQpb* zqk&Ye%my~9+@Nv`pn0aQn@l{gVr}I8Hwnryk$#>bcToRY17J>>|9=lcB?5vjjd}vRU zAX&V9)cER=ur#AH2Z?eIU!SRuR8&?c5#Z*7&$1eS!WRcIwSHkD&q^AcjUh4ecy4Cl$aJAjnHMcVqb$yjZPp(=dH3q*IAvC*?xnvn zl%ewt@?q+k-I2M;!T0SJOn%sbVl~GNeyjgO5U4BG!f|ES?F`GAoPUSq=vC3O^|dMGg@E z`5b{WlOY9kJv^HX3A5mZN(B&ra?}8x;Xg{?N}D47eK{ShkOI-LYz68)X#}N}6aQN) zlp1K!!a*sJD8`rJ@VW=N=4U&+CQAj^Q{0EwhN%!O%SP3CZ!D-wfaKpLISL$VB_Rk_ z(ZlPv$n~-mavi@ei4Px7gD6;Z%n~0NX`oOAXVCv@@pZXFluH_@XOqf=(p(gUGJlOi zG5$}#P(eV84z5D9qz24H+(Ea4o&T*BUkS9B;eDjW&rvg!<lh@2aSm_l>LeUa(3!7(UJfCdGLI6bhn8fv@ntKYJ_iS~|qba@smM zy&2r#1r{Xte=DHKH(=2WNEbu82?u6C$Ki}_hzV2j3m)}{7+}R9V1&+g1G~b#9b`A} zBiZzDHV|NqOpx>Z0722hzL}tfOBlE?6H>;U5`sS>>8=ESx$VQV_|t>iesDgK2M{RXQQ0sV-5G+cqk(7|a%pgv6q-YA8r;h7@vJZB$y zc1#X{ef|R9z=O$(L8XgiupW|TQ{iCw#Q?VdPk{8lVB^&A6i|o6S;5aJ4p8T_Qc&j_ zCk}kA1c3j92mK4o#tmncfVv`u;CUp~634;hN&)Oo6s*5sHzi<~QlO5f0GA_an=%69 zXZ;hf;V;C73I^U#2d&d71Fbu&4f~Y=9DyFLx}h?N7XOgQ7^Zj((PB>M!w29#lwgK| zhb;igv>c%HEpgh9$`NgZst>1q#|rK%2iglx7?{HqXggGZ|85jn$O~Eu1H1=i5HTjp z4F|TV06qIdTk}^t$K2ug3eYmC8}JJxT@S#)gen2-&ynrFV4PUkq7tZQ-iI@h)I1Va zsRm#9HUj%|@aQjCWE9Nz7^vetf}N4HEfoiA1B1YFxPKNrDYk#XHqzh?MBSnQmZ}0N zV<`?6UIk!(wut-%J6i@fA?mG9;C&?ZZ$e=FEPvKE{z8;C!y(l`ld2o;Mbe>uTt#d( z;DyCg4>3Up0~p*ZMu`Q&hcDHGugPf$)~x}`m<2cvNuRFZK+6b7x&eT`eEba(l?Nb~ z1^~LW1~b+I<@-NjbC5#nXb8@%h4_%){7jhZ7}&ZIykTP@BxQyN8zCtSKR!-iYXYxQ z;00j9L=)h0fFpny)<*mSz!|`V**=2H2L1pkj0F)+plk+07covCxdZ@Ln?XASNpU&V z$nBeCh`<8lw*X-PP|hO4MMPkszzIc&;7SREzsrG`hJfqMhC;9mMuP!mjpXw$>5?LghDAO|X-gXM27)hM|5^U8!d}Qa8D2?oO=cYSO_PiAVQ%qBIsZmL~xlbSkOS^@nA)~760!!yAA3O)I#|N@7-5_*|LSF$w*}2>5 zdAeWo61Bc|?a-8g*#1W1VHAo~9hk4E F{{w3#9E$(| delta 108909 zcmYJZV{{-*7cCsym{=3rwlT47+nOXDXOf9Mv29O0v2EM7^W}NJyYBtdt5#K?uG4i+ z?b^FfC+{J?ry*l2{s4!-0D%I50RaIa0V#U4F>eG10YT@i#byBmj7)6v9RG1x%?QLA z3U-s(<8AlF*>!%r<8x+eLe6myF8$3FI=#j3iq`AmzP>*8?M=|3P@`WFPqVmXGqR&D zYt^Hl5lq=Mm$b@bv!fjmAN_U=u5x@lyvjM4f=r+iZna(}-n!8GD*KCOu_BiSoxo&fO61aDSpE-)2D;;bHmhGt=Y=C=~*{M zyP)CJQ%~6rjl9{e5&Td1ejS4r`AGUe@PsNQPUT!0^ZHdqLL~IS% zG=X>0pJWc^#wyoAr|njp7Y6$9uHkpD_G~FaB8vFIZLymtt~kEvuV!)h zUfg7H{E({p9N$8G+4w^M>MMG62x9$Vo7ge*MB=4`DxNEmmmGOHL&I`vMLXpc*FzQ` zjr8RU#?Tc2um4CgCB^U&@3NaLfmiv<_j*C2zPADN@y^R|_4h=`dWn%nTe$t|UlN&r zA*vt)MMeN9Z=g4KM-+jOBo>FdBnWr`sUI}=7^r_VgKb4LNN9a@+i5B%z2q49+XS9~ zAA{}L6DW3$m%j&+BZ1zMCIKiyY_#Vg_~LX()I2KC4N^_Haq#f#9^w*vd-t$sAezV@ zlu*#YX5`m(FR$Ix)8CrfU1&r91?IO*u$KRS3MeT_WDlM#5uSh3(S!T?6G#vgW~!Ii zeSD$LF@zT!Bvm6QAJe5_`c)&TNG!-kBqS(H5HVH`1BvCGxf_F-mG8bB8=(&zTT$O&LUI75t{gHxN>m)=uz% z?t;i;_Xrew=>MM9sQIlx`&*f=dY>z=yrb*(H9M_&4DGhKZ1XOJBC zN34XFO?{_UQ6J+uxhGrTQ!9m_R*p}CJTFmbQ10>oKF`l;_mCgWo^$+z@=i)zNWKMB zX$)r^6j-OgYj=+U4SU?ESHl+Gja!KY^BKx<7Ohs;(OKx~C3wi7g8h=*nRUSuW_4p$ zBpmWwKaGMo9h`}O3XdYyC0%{oD_9_dVWO5Mw7ywDIV#D|A{p{%OoDl53( zxUXUmJX71GnA|XEEMOVSooQCKv^V_)Mu>2DM5?JYiCi3kG|CE>R^z(RpQ>xmWHc6+ zqh3y-k)P*?0S(rb5JDT29m_v$i&ps4>moO73(8~rjm4a`N@dkpl*5HbAmn(<;IsEh zec8KHemf4cq_Xq(HmG&9rQD{{ZmBB@9N3D-Qy#&p1^DIQT_$js+c%G-nA;GZr4qqc zbzd5}B28BVLvS@qNQ$ZU{XD^r8|yBS?ZS~F8oxBkuc@&D5olf*}ojaWB_uE8_ zI}mp_=&i-FHXkI=^I6}e*36Tx04yN|5+1v}(P zEP8DOM3AA%<-72Z{$ZhkwB<?bL)T6zsH5 zC0S#t-(Y_I>GPh#;%x>`cTT$VNcsVLAXlT_+wcuw-G^EtstMafkQ7XWp8|?>uhnMZ z=e&@+xxYula7kn#-P+CUeT*DLnJTfSdRTJF7PSjlLn**`Ya;zifF{qKF=i*;YAd7{E&1}V263duk9 zl;Dqm3lLMyXB1K=HG?(2o`YL%epudXQ(4~Y3=LR#lO7eLf~?DdbWzAt&S}G8_&UPMtRBGs$um*`AIphcktO1>p*0>K?h<90+-(XkHWys&et^64QslQ zt6J<4VPhnob;$vh!@FwRWE@+wDU^S6j?eawHb5kK(CrRAP28}5O~&Z^Pea#t3qjny zrcF*J(WR$9IAIU;A}7TaH;|_gDtFVhdl>?6Mc+~r1a**Vf$0X39)X}@S;=;pA>sv` z*AJ9-HrwxJXZyI3o!oeM8ww%k^ioJ*_x#Auu@z^jX|)_r*AZuaF*NG_Cww*WYh+dn z7j`GNlM&#`{I{gkhcy84-;LI&$YxUED^YzdVhI@BVTUTV1sDj3HAI3CAx^yy$`~jR zGY!|ngdh$S?ty2hh{d`w3c$hzK7qQ8H`T2qvej)i~*;&?C`MX-WT}) z5ECw;_$LDj1PDk!?0-H3EDWA@wiyX>G3iVwp_fT7M0A_~NI+Y2V%3ztxiPBEPu81+ z8bX>_^BR=BZ1Pgbxfkl@C4nGEzK!vYoj&g>xtSsl$gE?r!BHj(F}l*f$_WEL)q&75 z6}!IUyz8RE3WmqKJMV4J!s(TEp!mWf4)7~HoO_X4(}6o2bKrMCs#Sy1+IIOe6EF7G zeT%f4Ezh3hRQJw%Bca8YWL zGhK^OWx~;(Z;KW~t9j2}vHNvn^Z(gIJf10=zxk<1OB*%JN5RL9O{6)jPHGOcw8Y!L zcEO|fk9F(6fOh5pMGgrVCjAV_S=iz>7~=eIEVU(Wkf^i0DZS8 z8x83}O}1TQQQN_H2ZvpjkB|ptgFSe+E-Yrs?Tg%9bWKhHbKuKKPduGd<LKIve6Gn)E_yfoUpdm|gEpCb!*_|gX=&<=0 zQr=YypoTG7eRjWG{=-(e;Jbb&N6lEA?m?|N#OGqq6Eyv8w`ov_>5`BlYUP5ckweBK z72z#4Njc6`GA&RX^tQ&3$(R0C_Qr<#Zx8Z~f6T!X*J~gp;uTl_jti%NegNe-2IJb3 zX$W`0pB+wnpO>Ha$A$vo-I7>Ayx$#fjqWx*fFHP9(;@u;m7sxXO=pkm0D}C z^VXSd-*R_6p6~T zQTm&kLs{9N&0y*mNSN}bSCn=B9GoasF0gArWAc9oSWP~&`0-7#+8ynJPkxByVMDfC zlzn?I5)$*5;s6qZxxThIaPWdb7)2KkRTwFijnZV%-vz>e{(HW!u_ zkSGj40WSXDx)U1k41!UJ)UOKtyoDpRjfCnMM(DHld!>ndWI!p)B7c}m5_A=(e6M;J zgnKwfMAMlu*b@!Su!F?k>>ljbqHMyjrL0dGJ7*JG)f*@E)SqG@J|$(eLMyalvVg$L zf^J!b0MDv@K>G~qg7N=>36Q9&=dj9z@J5^QGDWHLY#oXd9oC@b52jOZB>sq9!a@M= zMy8>>cE6btp~p$hk>ka@H#<#sDOFr0b_c&Lo~(Tf;XV+GpiAG7ufk$><-vu@GMtTd zL}o2$6685nrwVn#=V)Qucd?g z#*a$G^X)KBPRE#r<7Ce+qf7UzGIEcVGaR;@b*P4mtLD!k`1)-6Jp_0+Yc;e>aS#wr zNzepgLhOVh9#VjrLKAY#wpd#Iyz6pFXr@$ogoeB}$d|C|@QI&}@lo!akGZtP(j{1r zh`XXNZ|j^wa#r{u)V7K$KLai)^+i2@d-sUQ8~p<6_7c}&TJ1E;fBrIMpUyE* zYgZwy%vEtjjEh;dmk^%?H6FLALjD$!5z1iCSs81pC%W+yANAyn+z0F_beH+zxjX?^ zzECl5jv=-~|Mv8bmGKsaKVJV(5H^TZp^nm?;VKwzgA#U7Qr$5y*@Z=nA2tqEIOh2Y8!nbw`yua<14 zr~L&9{{uz0BHsO|xzVRkjmOybzR#G287~~1N=>}2&AZx}0BdJyyzUm?(#lF7T!8@|N z7t>h(o?bV%FgBF+TufE}+d6dFG+n&0@Kb-7uKV|uvN`!JSuE@xH>FdYmJu+@=4ubd zH5Bvc(`g|1{`B;;e{;3Jon9>nyfmjzcKNCEw|TuhZ||aizLZ&KufG*q6K(k2_Zl$9 zC^!%a`aR#*-uGwbuc`tiQn-tYeG zV7c1<;-u^KWuN!)dDG|Z=4o5-bNApeTd>2|$NS~(`QY`$r_<~99Uz)pe735fMrOU0~U*&2n zeR`~>%j>qMq7$<&gW%Ww+PC7D)lGqqYg-dw#sKkEb*&qu?6kGn%g#-9BYK|w#i$KzWCWTH)h&Zon{7zg0(c>A`C?_AgK z`eNHa;VVSPy__6=_F3p%66iWS~I1^N8EJU_Rup+Bzg9&TT=*Dtf}1v=WEZ+1=YlmS1HV7#>o zg~yNG?2p&_`_pjge*-C9Gnq9R7?~RoSf~{Jb&^@3Rd|C%M4_#vclK0fZ7QqLDy!}$ z>vD37^3JV*Ub`!M%-UmdYw2od+FQ1^+v9Inw((^6X|~-Sp5}SN>-X=veB++~yj&br zrhNUIvjsYTeijP?ewv-C3%0Mjvsitboohu4M4Q*s+4>*7f*p>t*K_Sp&%M1^oBIe2WbiBB?@blpP)x{SW9kHH1!gLiE`}Ett{Evs;GoxGbQj03U zNt&PSq{^oId&y+y?-czacNg5X41sFE8oAw|fHaB$>wDD8b%sQUyg$V=k5kVH#F8q1 ze%>k|-gjCckX=?pSeB)weOqNMw+_4Q&WG5$++i_W%dDE9-G#;2QBqJPzpJScg?rfO;yqLW7b#s@LT)YzXFfzs*NiA z4JZ7J`aVh$YW1*pZtIUHBo|n8zAs@Auy_aa8<$MUvtbji78MJIeEZhpjNMT#Ml?cm+1qw&mlaLVyn_sHlu7i(6ciU)fkhAJ7i;;xEtLB z<$P0#y`isnYRwzi?sj&> z6v)&m>a}i^!|n0B_PoGz#S3k45gcg7KV9Va#(8Q&OKrqeF!@bB`aIkCW`#{!xO$Oev8(R- z8yERmHlF(x?}v3-3G=Vx?D5JjDUK{4-wY7i#tFTPgIE_dPyf@%BE&77}mgK(-&xGfb0wh!&7t>&@! zPN9FP)ra|>V!slWntu}r?9`_^W0rk1!Y+nw%dzs**{9qrs218JcEx=0&b3^BqK(7t z5i(z&W4_~86@|LVgx0l?Qp*gj{WFkT{#sPdV+SH-6=`@TeRg^~<$hSIygErP^_uC< z#qK$qD*d5AYQXsQ*2J3tQ-PXnqz^J?0y^Y7||y{*&lh&w~3&Yos0LJ z>sLtz>*o(imj!{W*xv8QxCTP61_w9frzQIdp{v_+$H_vRCpIivW7_1&D|0^HG2_QB;%Sw$BR z@xA)`4JI6*K?d)){jJNr)o!qUnIXvSP5J|;Q0>zuzdZB+HFkgYkC-eDLVku?fvtRv35^e0RZyx=M@O-2kmx(}IxFs9gL%``L+UU*uK0QYt8KWd1WO7Iy?_zt9> ze6-kZl>M{-7WT3C0`diEv<|s1?94{vq<^cp zu3^3ytBE;FT2E9-X5%VOR`7jG!X9pNOh_@ZKRI#%SwXUZ0PayxR(oReRUQ{2qL7C> zsbeQDL|ejh>y~sY0!5D@)Sm=UOXe>gJMm*T)S#nsP?`@ctD{Zms4`%#CL!D_rRZ1k z;W5Rf{h$l2LrDJ=T?}Qet4NI?GdCn7*Yu1<(|qif2%^9@qT)!7^uOU9zOLVLn+A3x!Spp<>9&Ag;Xq7+~FRMdgy zOI@h{`FlsPHc(apG02I-wJ{ouvoKc-nQiwxN971cx-6U5Eoyg<9 z*0c!*i!m_FfYnJ#q4n;Fe&3jIg`$`dpT9%f>$d!H6M65ckEaRt-Zz{P_oiPy^a48k zNroR3y%y(Y?OACbVl_K!Zv3^aaiTJ-qRRNlAFw~1OoMtEPP|=iynG0q*(cUj(P>wL z-cE1rS8l-jR#;Ltg2Px=tbdI8YdsAiT#4u~&1Js4gQVG|H_h|DQuy}6!H8&7f-qtj z=>g<*U_CDvPjEPgZovw)tIE%D#VO6hT~@_uK^{?m)#pitOZGz{+gu2VjR z?91) z@I*^6`sU))p2IJtly+8@;U-quX^HkQ7g1uMs2NwWSG4@cA)v}Jlua9c4`vfW{id9F zcqD6#S$BS8&+)P^47I2$2zdWSJ=f>h8JxRe;>NyRf^2ApY!!G)GT8kbNG_Q~f0#A= zu4C@4ATK_epOmEO4G^_{0I;`|}UTV8-wPjrbDjGUci^uUVld>jPyEU#di^7gm5{mxPkDA z@DYvjqX??$9IVn4Vp(c$|V)Cy*hajJ!0!MlH6% zH80G1L*RL7TS z2<$V||CCSKZ&JZR<0?;g#=Uu{*=-xvUuU( z-o6O$G;>JPY)d@Y(g?TfLG9eLd{k13kRKF^8^f8S*P@zng$XAQpOppPr*Iroeo$Vg zZ2S=Xa`=qSC)RJrEb#NHKxKRnxO8mwGYGexoR!x2v;NAwQ{DbX<+s~W5MKq6G0_7p zO|DM?kR>(j`C2wGh`+=gS8r-_#pCpx4O8g!A>(%4>NiOl1PWMelinSNcpiheU7QvE zXu%*~dj2upe5;Z72qVZ!+rTIsQ-1~OeaEuhK*G>jnBF#{*U0+ADd(Ydw#W3n+^9P>mMs9!QJ~z+k^3Cv{{7267axbo__rmVYIoR4C4*p&Nk!Trbv!}Giie~*tr>df7#UlrfO3TbRh=UK21>w*8Pn2twf*V z3Gts)epOELch%_F(o!T%YUO&u1qj$jMiP=J_LswgO1R}rGody;d*Q`Q5 z2hIZYa%aQ6TPUTp&oLU;vc(fm#~5yi%RkeOzwk9nidZ=&;#k~powtoUo>>p(b1lyZ{x>p*MZGR^4^juie)jT80lf3a!}8v5F9+!*gk zHcw7VSG$hb$3)6k*H09aUT>HJ2Ww_T{7B?SPTvHCV)QA_m`JI@k4OlRhptH|HQVwF zJP&8oj@(7>5pe!Nq`T%A01@ca#digyBuk%G4YdMS9KE|GA8Vn;n1cu%Pm%KMgQDsi4m(GuKw-=ZIo_S8J84n~qzqY^@mo$sveUBqaK_oub8U1q~W za*RrXCn;6zuqn>n1`fnW5-@gpmQ8ga zeTv*nrmV1uH&9KwX*~MSTzO1$R2b3ym%Tv#D6!8fqb$ptH%Wbxzl(S0+D-BqMbu_s zc*mJ!OS4H{7c5l(+ML^7&0Z$LHrQ;;4l+Wgv7p)=k&AN3j>QmB&b)ce7)tE83sTpA zVX?3MoU=-^EkLdJfnWZ>&pe9XGrMZ+xZ4$zZN~#pZ@uVbt}20cVF2X z^`RNGi%cRU0@R2RcFV>?j>I8>LrqrAl>Igjwmd0pdoWLe{0_1$f(R(prP!FG>}p^E zVF)m1W`XwF?R^$00lYQTk(tMZ#Qj=s1yAv5{hx{rEGH8o02cMd&H>x=@{ZYoKTE(T zlTbwl7nTr}1s&*A{S~VJg9xG5j797Y+k=`l4Oik%6^!<91K2m{-0}c`5d+i_AP3tr zA_ngsL<13kCjxO~L`SmzPDwvnd`=Oq-&%BBsS(je=u-uAh`?~XR#ecePd`CNQf(Yn zZhiUo(tUaXph5kj2yY>yl!lcjVs-Qu8Q5w~j2t+|VRN!yPOe=WANmO zs&`y9OwtkFh&K1QW4d}%^-&1G;s;;i0+m1MO~T7Cn@Ze{>UMyj zqzo2FC4OY4`!DJ1h;HK;f||dub2$IYG!~KA;wi)*vG*VE?4oeq^%8faMD-OT=o}nxbXI z^E{7HbroF&PI)_E8{WfsCY!=qA8|+F&NEfM5FT69@+VF4Mzc3~x|NxUpHHtMT`yH2 z8R)*#dLjn>kGRCn%MTRL`hw}VM)=wFNjZg~=w>@;j0xkCK&oqVf-Sb~l)1=}AH5Wd zKAfUE{2m6ozd{J_R|T6JM98@?1KaBPb?mQmD~l~Hv|uCs{c^=C{$H{zfvYHeiF!0j zv4PcO!!oDw!r{8BNr1QKRZWia7$$1EN7v=8=A7y3)>6|8;br~ zb9!POfjWP*F1EqR5Bl&TT7}$BJ;+o3MH}`LiSL=gOMl{2XLx5&(G@Qfd?U<^qV}3% zmE@{AE|e6ttI?hfT0BZDri_j)g38Q~c^Tk8_3+)0<7B2W;FI1CsI)Ia%MpMFh(O&K zF=l{?Dk2z{(4aDxQL$CeFCCRb_@$%1MnK)ZGVJ6l!{&;LyIg|;u8rub5LqMs?~Rl; zzFT%2I%I?cvS%*jQa&S6qgO232*i?EEg<8urEObBtfM+yV^)BHe6|ap)q!%`8ZaSKjfSHgpW4+e|fka*h!n=N{(v%+bMFZDHBvg$s zFSyJz*P}G{6H@$S)vNqKrsT-g<0V8 zr8f)scPsvW9;3n|Sw`xK2WJt^2F@+P+D|$1RJ3aO#12>2>SbyH10qkAXz^X6vHz&) zQ)0lt7d6dUQqkL<2#&alHXNhwquz&=sI!=TZ7a)E0mRaMv8baA-d&4|q6b)yN8auO zIQKQ#zSfQE3pN#SEZ0UWB&uvC5lE4v6dgfP%4b{StjM0kX!dmTgOZDoSL*%_P3vtE zEhd_~W!Q8*|0g%;@R3cl_j7K4Wd}6WVoyeCN6@Wc)%rql_ zc^UekNaeMaVpG)I_%;`68H>fb7WnA4G}ojFwizB~HkIv}3&M4zC|Ss#DPOC`MI{I9 z#xRwnT2nAz6jmkVIm zzOT!mw$|}VW z1L!2|DuaqpP$YtwL(Ih4kTJ}cvcM+Tj83FC1RJHb}^F(f?bH_6+v;{3xHEv`-&3eSq%1r#y{eH8*-7Xl7_6>JqoH^3nr{O`Hj zja@)S`FVrt7Y!h*-&%%JJ9AJQFZpP?R2Rsuis)=?r$yF7x{EhenZOkN<=RTBfUpmv6cYs%Iw}MQ{8Z;)cn7SbS zHv=a9xen|kdUXX;uwp!L);E#{dkz-$UK{UB>>FV?g|9|U0)EUI3|uT<96Iy>RRvuy zralD0RU$8`9#d%GRk2wT*mBL_$Kd=qKq>?lU#!3Z(OAnQrj%g)(>~CSFV%GoJ5gC0 zHp|z+%|rlA5!E+_u$UCf&!&Uj%;xMQXY8g;4OONo+>qj^SMBl24(-LFCPc4$hLJPs^~H=leBQk|p? z3jMm`ID*kNCh8E4(1&M`^#5S|2zX9;-?HU zAL~h+p9lVeEPOQjgwaiGLkc0DE)_J)DQ;@%ka#RwTJ6QNTTP;QM0ZCqwkc$Fx|>^JF# z_KI{utmnLIATd5-ik})WEO@&;<*k!pG1(KMi0PltI_6geIOJTXmE2f#Mtt$_S(JlP zL;@4I4Q+0|qJ=5($IDP$1R9DULb|dNYTu)7@HO8F27X5frcTLFBZEgULa+#BgDI{h zjaNjYyeU&Yr86o1YQc5;(DkqvJ`>CXt7XrVajjM{@cx1gOOAO>GgCD&K{`s!@CQ@c z(}E}@8$*!g{d1^z`c?5;vvJE~z~V+7Z79)z{;Qs!cm$6Oac41UBxRKS2Lx$@{}aO= z{4f?`!FsE6MUw=92;n979X=ff0{EEdX+uhH0+uXF9ARY}4B&fUXB0wW9L!iXobPK6 zH3k(9WJxkf%x3oNA5m>vbLLQzn|cknWIU9{%@rZdKCvh#f)g?5LB8Wr?hxE)ygz|# zbt$++6Ai%QA3r7gCb;F()&NBUW1o54H2|q*2E8vA+%g61D-YMcV)~zZuz`9@>Hz1x zu+%c}jqm4{iM{A>+7DK*uoYK9at>>kMKK<0X^SnsQYSU0_x#SWh&~< zqD0l;7k;&T#rTWPFp(YB6f0phMxT@ky{MRt2W}0)tzT8d4grZ!a<$ji3i#S+oc@5= ztW{E-VKwEhmQFU$j5643n2}1DJ?g_FS=3H!EnW%4VRSZbZ(Ka=_fBkY^F~;wc1g!3 z|KUT7pEHF~k4nx8#?stBU3BbLF8dBlecy}_n3LXI3Kn$YaAJFf!`CUd@J_?V^qtk5 zmfEO|$3XgNm$%yd;UZ2pk=}}=$x{KT8S5NlNQ2db-R9A8ZBQeaTv9<>8A)+?(omUdY%LV2Cr1s2ct#?B_9@PxjP2(rcRB~93SNbb|4IX1gBCKb z8tE&i`4V_>pSg;1K63Vp;uVRS0o+HxZfwT8Z&fbDHDQss=ymNEsHdV^%d6e>7DFH2K%rz!{Y zrIQ84mx>;_vyj3I!j+Y3yABNSxSCn)Bv`3KKYj@B@z`3c(6B%|OVQ@OqDrMe*yf4) zIK1E*s-J>kZDk;Q-?}ni`RCC6QZO>t;*`j>^=mx3(svTK<}T@!pkHjI8Oq#x;4M5c z;fa)*suo$eoxu!2#@~=jax*F1R0Zfk z5s`j%7e>>ONy_{TSrfTMWKiNA7#&rDg2+6^Oo2nYTxRIvDT()iv}s?Oha$E85#02Y)#1=F zaMam10wOP+o8UG@M1G>nG$YtIrW2hjHWYMbMeLETX8_D+>S(5a#YqZGt7M%WR>az8 z@et)R(@CppEy*IcORsI!imd_asa!(u>^dz1VKanJF@ z(^U}3CpAKgUAget?AwJwohq74ykbyj&$Mv$D7)6MiYY8?(FLg3>a(8(Rrn5UClnKcsj;Y7STS93%chU~8MFu~Y0zn>ap4 z7KJR{STmIJtoWl@3BT&5a2fngOfUhF5&>i)zKY8c-T&_B@fze=+jR(y%6AKFBc4G0 z2_O5rtL#U%9XakyGW)9j5S)Is?hwj!`n$z#SdY=`dC-5GHdADJ)^Z&cNc!Ly?9&bZS{CWUQsHxJ>6ID#ze zkDn&X{#MV8ZT8j78=I5j*?Ome>8B8a(x4Drl?53!IgWe&^#0sV8Jy(on;zJti1Nqp z89P#Me)~BHgVNk;424l^uFl=?2N0&3Q`k1hzlLL=Pw2)EJ80)qXzdV2o7lyU-_Jo)T_W zg#xY>h0=Evs2^5ERGd^{FF*oBttM;m+Av=4no}^&mW~avw3j|cv=mmGIBi99pB0@; zj4mZQ9LrOpGOvETCUnqP`oU(EpW^6B}HYAIpLNZJ*D0`FUnAnWeC0itq4B-S% zk3BB^Rk7ac7f!g%oD13F!@6elbxF`u0br?`X=QaKe~7_j8$XaIU>Nh$P= z0=7S((sSRsFC*`OEr{Om*XR?OWk?uF2*nLlAXz51I>eSg$LNGy7*+C{lXW zGO-yeJCNpfB0oHP2818kLXZ997G_Vcl!#9(qR%x@o^A4sE0xeD^49tKLDh5Sm(2`U z+A(*12TQ!VE96I&@C)eGB#9vTZ}#5DY*8Q$U;1&*6SKg0FIzZbWRhdg;aSEcn@?%? z`jzZrGFLpx-gUIAn)@>aPfz$=ystzJiPahfZfZfR(GaQ$3TRcWby(>Tra+0)mmboW#cDv^T&&Q3pyy|3-Bl# z8tNkti!B<@pHON$y5@ETrNB#TJ{6>SP-8R*o^e@H3cvW>4H0e3SHd8S(ZwR}G%%KM zgZcb9F*|bR2%JH;dPaDp^ybr+fDAN(mcCzc_)01kdZ#ujWW$xZPJ_nnmZz*0S7G35 zAY&R%P|ArkSQ(S;PlJ-3dBW6&UrU}^Yr2vt^1w%va$PiASu>kBtFa45_^Qu6XCasNGu-74)@tSP6YY(l&PJ~FenU2YU+|r@{mJ~NJndM#hBK@rN#DEYk*hiW zt45cwRZjV0HL_L6brgH&E*UQ6;MZfaAV|BfCqo_%cM$u#RC^u|%V-@RSDodVY@xpf zv^REBl9|THrM~)04qRc#+?MKfde3ghE*`exW;sh7#GkgLUic9ZQbuP|wrDOE%eRc1 zGt9ibGeV!rOsFm*M&H3FgULnd6;?|F8*w+mGk7=s2Q$uB@wHftt~fDWpqi!J`3s0} zOE_p*D+$Wr}eQ%D?N2?SXtfUis<0?ptti|wu@R`PWaJ`*LwvA;1% zbhMe4+$GQocW}FwjK=dxW`8R?-h^8NcNU%9h&Ys-tk)I~`(5RJrtLR?h5QU($=N-d%STAE4eG5>(8i4Ah%LgPI!26{Y@y z!TMMhKr99pf@&@bekTd?ym>N~h^di)o zLwibQdc4tN(VoaOhUJr%d=9M{5U8k3LA#(j(vmsi2)vsDDk)B^2bxss*TSED_g!D_ z%=ekU%5xI4CBZ37l2k(x*`$p)>3@;1#F;HlW^h2D>yRDQounRvZuB81hsTU^#b$&8 zc6T`I>rIfH(iCv{VhQ|PL~~02PO~^{M|R0v3b7)5K(12Iu}AUF#w3*Tbo-|{A+6O2 zrUL^L_*cr~>?IW@3I=G%c)vZh1qDU;H;2cR!%M&*COY37uMbHD#DDAr5r5)t`@qjl z2he^ZC5itqGejj_&~u#JFzxt^8BEoFPy~8&*hy&!E^I;!5@0?x>X1Lt9?vFHOL^MN z!+*mZZ^#8CnrC>>_4uw0kZY{H+ZZcmC{-_CgnDz>N6GgUFOT&Ph+&PpZ|&M^nhin1 zSezMDvboN|lq@8Q5ajTwqbzJF^ zZHSQ2bi0 z>z76W6w#qX%VWh$N>LeGU&LdqcY&C6RLzB=ekYQ^bH%?df%O3Y=6}$P(uFny|E5Sy zjhj1Tq*tli>1KSdgV2?>h;E<_5nHlQgNX6>B@uB3EopqOTkd;^h~2%eAmSo+QZg4Z z5V2$sW%nllMCAwVsR;%Fs`Wmo8Apj2Ir0$xhtBAhqekTy6uG~A9nm&y)8zq{CerV7o* z_BlkUfcRKfF0aOw2hLr2D1fq!A*4L#I0Wm=b`sKQOw!o`c7G|4ZyS37$F1vzLq0%b zn7X7q;bcdR*sna9b`nR%fSOoE2^~4~shnqGVs(I$6FstN9zztz_Cl57Xm$c8%_ft> ze%dv}ViLnUa!oR4;RM8y=$d4jDqbm1V5KGXhQP{>Z-|+->l2@;MDu4>PCce%JjI&B zWF|~T%!`Z;TYm?l#2sA_ofnCx)4>x<%Gd;oP?hpLdMTh)_yEYh)MaR_LSaLI*QZXj zO|r`{s^3&ItH_+lp;yK49nUQI3P|>3OCdoDWr3p1o+X{=G|nZQ=BgO5DI9sRwxARr zD|r`_DK`dj3hM@Q#r4;W%oRJc z_RJL)NdY`xq>so~gA^V1=h``3aC2P{zAMTbO!EF+B!4AKWtH+ArkHZH%x~8F7-kXNV!6&_Y*~7XyJ&e9pljvK+2%4{sBKacOUq*>kyNuDg}V6z>Wf-JiXDs0 zxFTJp6J8wRyl(m|_cWYf12iWYIu%sXp$QmHdFfuzA<1($;E4--s-=y+7fi0LA$KuJa*>HvuDF;9Tp_QOc~haklXc6V}_FlM1^*T_qa z?;1A(g2`m!yI?oy7B_>}<9lP zI42$EA*jijh)gn#PnZ_MrXmtRIxnT#qzp^17gA+K`0b^dRkEV0J$ndKrASr5 zREU_qG+V?n!|XA^2x;#8)(MtLMAIm?km47-J?=nW~BZsDJu!s zA~7KkUk9g`PW(eddqxmBJCTMQ$ba}t)jqua>W9Bx9-rPmeuVN2*CF_P&Q|z#5tqQ; zSV2L^FFh*1LEx?f3k>T-luX5ylBc^pTB2ftD4heVCih6mJ6nOB$(`5@xju>WS6e~! zeT$fpwV8#scC8@6SbMCkEXrEynimV(n?ZO=9Qy^%E0<Se6t} zQ|^H&#Ac-ty_dgk1$m(;ptC-_wSs0=*I-^}=qkqyQslIqTYWCxyKG$hXw;d)v82dV?eZ{<{5)Ptn%3w8ayjS9&OrT@b7+*cjs~~S%TT`VP&Y6g{mDiY`y1Y=0$V1|>$aW>D&O#_kFp&+$Apo5xY!u3FndcdFy^w9 zcg@&X@7jsHJn7aO(0^6HL7ivtfD9;M3u&uwWenD7Tx>ICy2!CmBmr)V!Ait=x8Cw@ zd4}yR378Uv(!mEUMQJG2{&s~4d!&W{C)ytkV-;K5u01l7$rh}z9D_vi-VUyfYW#S{ zXP{VkUH+%F*rX5z5GT9E3(JFjM>nk|qV4T;J44E>jvVS-ynoyAHsbi8=u2P#q0A41 z)?@CAzT=DIgpv7SIEY-$n^`P!RQwEPvFw@@f*<$z+|MG&#djY}iV&~3OI*9G*Q&MK z>3z7i?R&56r8ny@E$$Ztw;3;#gZm`~txLVuA680`%+1#~_+AQ8szA|#@d_#-8YAeB z857(}Kb9b_&3}0JL^xwM>VACq{P@GOo6nE$e*NrQg2sH!_gLc3zQF=N2Kfo{S;L9E z3@SmsDRWN=RSLf|VQd&fLmB4FB6I~(yMZ=Dy-#c_n5A^C{%;Cz#InetuzK_TAJnPv z_k}lNt!3Uw?H#|3Ze;dRL93fF-x5ZH*5EC%=VkMl=6|p(J!7F^(Tf1R38~4Y-g79g z(9<|frTknyOFG1QQt6!OFg|y|2lX^Riq1=6R>e&+XBQO{z+B^7nQN#uzNVIO9Qw#m zt2GJ?^*U@}%C|JnTmxh+lxi!DIBG1_%kLTVG#L7dI$*m$M1S#O@4a#}ebacnIAm zwg!uWafA`VbwtPX1s=!6!u;DLFV7WmM|m0pfyHv^r7ymRN^^0YgRkOBp}skWu` z**(WT>i%RXGY1k3x|8Os=4gpeF?t;ks5d3wN{XyT8lb=!ITXBD!_2a}1H7Gtyq`6lRjBR`4U@Mdn+eA7QoY6})T zbnz#sXf66r2wC)>LuyM}**x*$z>&@ncz+XwuA={q!IqDgTV-tVe+F4D!*BUgnYT(= z0A;sgI_13;w=y=sb}M582oBwh)stld1Y+7{_29oJC>mA$8uMGq*QA;fZQP)O@!?8& z&YXo(cIOPfr{}(RB_*u^wbIJadxAPQf!+f#?V=Eu2}5G!R>ETZ`%>OD5q0U}+JC_> zr8ry2p@;A4<2E13A*(eQF?dq&c0lb|}!gt$c9KBQW4r-C0vCW%J@g3(9*Am}Y2kt|6J>*YSQ3Q@NbM-H`pIS(N-3gtj#D92 zotuaVtHVC){bF9(7*}Ww<-RWNUGEpqgE+56ohn9K#F{IGPzi!haO8vs#eo93b09My!sGF&7GO zx1g%cSQ54b!s6~g1nadxA{o`;5%$v7hm@ymi9~%)oA761qEfyF*Ns>_8h?*_sjQ7b zaM2!XAxTDS@Oa>Q-6jJ~Hliq`R)dtUN$>9J>s7v{696Er|>SgKWl&C94g9afcW@LHuelctXfx@Ei)+ zIw7PB3El*us~CLai@ilYuYZu>)$c6`&)}wI{0)TX-YbQ|%kFi_eyL&v>lj}BUv>+g~BP|13U2*ctQL)@AdL$axZsGy=-@VM&y;8PDQ$S~}Fa-o~#$SbV z#&oY#QVw3bR<`?IEj22xUE(L(DQaLv;#v3APzJO;+iJYXXw;y+T7i>wnFBSMJERKl zz91Wd@Lf|sLpPXxw|^j;qmXx<+@XNk6k?)uiKpIINEzn#46Ct1Eanb0tQ)ET4n{Qe zW_3vPO~8z*+iR;sqHkCaNKB9NYc%dmJoLUk%Dmp5U^UiVG;d=wxDd3n@j%Q%7HZ z#c3twIbGe(x*afPw@5tez8cDW;0gNIgbzFrw1!BK%z~UW%?CDnm=~K5yljm7FopmN zdw5*Xxn_L(G#)?ZbA@WsI;oAzE+J&`cn+Uy>UKIi?oAN7ipMwd)?4Hc@%Ykb$indq zo>+$8;CSY(Qh#W?>{gfDw@QKW?N$bi3-Z{EiIoM$152ylRNV5X#il-Cq?B`c0x6CE zTa5+qAhy0+K+mcw@P7+>*o>FY%3cXftLw|VMv^8BAfU ziomUNE5T*EzoV8x@B{;_O*{!)s$L5eepB+dML|s^jenGHC`TsaVv{${J}K|ILs`Z# zCWFn9+1ElD7*BA#CcNH(ICZr|=y{)*3<9#du{DH>M`!2==c^74FBW_YzZER8`X<;h#80oy)!VogZM?wXj7)Qj%;(ou*_pWmMi zWe_|;*?(#wcpyRblYtWbHm={WYeGP3z$C_elu>UI2j%#nu9{|2`wRoLM#&Eq&eWv^ z6C}q09_Ua#1jmHJK-=8|?=Uw77R!r7Q(o{4l2*pwV0Z4lQnROmtbwSO1@~GuTZahJolOp$hfDr6>3h4oKX~|BL4|-o%+y z*enNxq7^2a?5sVW`NGHoJ@_+?ktnIGbr}ANBWdpIjRjS}+s!>Se-qg_xNi6+ZpqqX zHWof=cxGEm6_G-?Y#I?slevRlJ`SOCj(_t{K^GF-&9Af&bcWXL9;lwRVWMG2ASePS z#@GZBb7YH9J4Ss4uB(xiXvG(FJrx+yemD(qoj4s)F5;bVNn)+U+(C)=?`YGlDOm2Rrg_tv}Z1+g*A>+u&mIRdL#iPG?P?Fc5Q4pdrq^j{Q z{HCUbEYHehW*0lgPcavua5E?^#GHY`?!np@G#v-1Ra#dCvb5~sI?n+LBYy%GZfdB= z5TOC2*Y+`fidlcyzi>BLuYO+8TYv9WfxGT(+9%qcSGZ51e2Re2MXQ!^I4b=RV9-DH z_mZW&!tz*iv^W1Qg=mAlY9i(oqICn*j%YDX8%Z2KNvd^>9}ssvhh~2^Uavy8KK(u) zkJt77y8pZZ0096000030|J+n{wtQhPFb*lBt#(+uwLT0N$&#-~BjQyYE(@F8%Fjiix;j$W3=G-RwyOv z7pY)r#-x^PjCKNM%u&#d1NUy2>KX*&fpL}U>Eq)@NF~!!y@}zFy+!sqm+>$G z6$&Xv^nx8P008mXlTpbUe=sh1ZET#BU2EGg6o&5y`wv3*u`Pc)#4SrhSjQM+pVx{l z$D)?yNlKG4_TP6TJ4qo>(n6AB$2!k@bj~X;ZV#eEk4$MU`w|l`z=-vgta-mJ@y{RY zS&or5w6AF=dsgBnrt$5~`*(}IR1dmk%pf52y2Pz9!`%0E)iOb~eK>fPqgFezkbq!8X^{^Xe zl@tRYHoW8J$w4s^)%2m*9VMy#NfA+}j0MDrKZ6%T9#0t-_=#8-S1hef@^Bu2C{CAnJgY<4iW?2$0 zXK|6QXGK=zGZHR?D2$iII#2%Ln}zeqSB1cg>Gypi(3uDyrCe*&w?I#yXLAqwGIEbo`Uh^XsGN+SwkeuUSKPo%tsB9~s}a2F_UQ0nPblUO%mvuE5A3??tx! zN_N12UnICakl;glaBY>gTPQ}=P8%tHkE_VU8YwVKM}as| z2=THki{dD|W}X5=bQFkZN|MBB{t5_e-9aEukivT{14?!bh!Z2syfl4XPtpkjae_pl z7e!$Z5!>08*EJgl@Iqt3My?WTr?K%N^!U15c(J-%h{tvYUY@5Jas7NjwU{y?o>ZTi z;xf~BuKEv?X@jDFd69qp?w4=Ae)rL1Zat=5fEo!odr0@@o&TeYZMcC@M827Lw0>8z%p<54l=Mqxy0; zLaK2NUZ44hpT0T&%XjDZzrB9PfBxoI*W&Nq+`Rd_bMdz{U%&h0KJkYyuMeO9|Npyr z|N8XP&0pH8sINC4zWc43{%?;qe7o83-S@d#EgzQ4t&r39lf8tn0r!(`g;jqS$AMb< zBP7qKU3oT7F6PNKG#t){yYazdZ|esimc1y|DjC7atAvF(Yyh`{-28of`ts@VkAHr? zA1C_IV)^~cEAWr+=9TgLPkghNcmMKlZ{2D-m2ZFkeE;gc|L;FPzPz4({^3*n%dlmS zpGgiZzf7&q=@G)0aeKU^BY7NO^?l#bw$Sj_`iM8sR_T6V1_dT z9aOiIm4-HdT>xi+uJY>CG-Xz`SG)F&DfD%kqw&FFj`V{M{sm@^x|BJ8a$q$_dS{FfKS%kgEQ&U7vZBt~ zzRDESI}31e%A2yT>b7<(kHw_;%tfH9wm8+yd|s^_HKDK59E}eabEF@9@GmfP)Thjm z1FO?R4f}S4ggGkP6NhHjRCQL?eb%ZCaTe$z&rey~bxqaOSyh$2o+N>8PC1JxcZdD> z(^NZubatAg@xfw}^n(xn1!j_*M3Ep~IJ7#GjCe6({5)0cHx@?OvX%0_t9w09LkEy; z#%r$0PJL0cLG!9EnzCrs&@U36cc(gIHLY$r4=*QnbLXEdrI4}1^OFzZC3d#bgfS}; zX~BbGYuUs|9ubTYJq}>55lo}L#+}D(NgsKCK8hcC9+y9wLH>CB_WR$(gplJexIOR! zA&iKdHcyfe2lNh6_LzJYiqH1ZB|Mt>^mS@{+OGb|at#LIOf!Z&iSG~S9a`xzjKlHD zqCRDH&KcG=UDK6{C!Q6!IAukbLm9F@s|rOEF9AGl<(zvqpD2TB`#N=Yo);rUc+4ZXnp+Z?WxU+CgXZ!x?Y#CzhG0WLnqt^M7TSE!9b%_yf zZ8nW3sD7=$1&fNY7j@A?uZ>#KpqtE1X_kZ6GkY02J23+mAWWhzIIG5=nJPoCeFMv5 z))4IQ61xqT6nl57EXA%hePltotKP4xf4)97MO(IIS7m)(*Ns_M5?-FlrmfqmEc2{y z`AfNJFQN5uUFGGeuBx`r*F+HyU-BYDv^{3IuGXk_zU11wlV!QK)lqxCIVRiEe^hAolO=?-oN|FMozz}bsZX<&NkL}hwNOn+%)X+M$6D=39m|Y z7;RfrTpjSSuFyh~gcr@p?VDm%e@OE1VqV|=$>QW(K9QD~CDGoUnvv!%r^cu~ixJ}m z!Ws#WGqU7bLCuiot=4-=c+0`ZB-Ulqb6&Q(@ry)5eyci%_pTk29zO5<51|2bUldEC zzB>})h0nDPb7*i{47t;Lzyg>IJ}*aso`*?^J&1T2 z*Jq2Sw?YY1L;BTiDBDKif0>neHH&SUXLiZMKU-<6nUR zK&x!8Q#a?~uhHWog&q&!9mY3O%WcMIF_!V|W3G|zXkGW6$80jb-ZQ@5Fg`5h|MOq} zee>A9|IXmteF}~)uIQp7(&R3?;^Z~qQX%~v|x;(LeTYd#uuvCmc`F5}bs0 z(2jy_T%qM*@MWjzs)Rc#TtMbJv$z*-8*;cyQo1`_OG=GmuM{-1=vmx7I=T)uDF4{6 zEZs1w7AlWOlNZCswqIHKVU$HvX?}C{U5I?!B?H|ZtX23L;#CTc$k~DfU7+>t9aYZk zB_`p2O@2a(WfK%gWd&!FKDh+k>C$sd=Bu^@-yKi!LCxd4Dt9>lt1;z`MlpKZO zr-;bZVKeL{(cB%j)w>#{S|ykxO0^2#$O_G32b*H#nuHrX!X*MGGPhGQwv84YB_0=l zD%2@CK6YA(;&Jxms+MGPcf?kmYN*%H;G{C)$USlLqSZj(>`ZmWjZ|n~Q3&EBSoLff zbZC8Es$2FVTSu(M1FqA|+Jhp4P2ri9<(YEK&r^rZuv5rnZONqJR~|FnxXp?Cwn3Qh@wP#JQ2SZJxt5`0TqxMaZDh3azX{Bec@Q4S9pNcWga5r6rbPgh}KEGiU*JRz%!7VA`#7z|cqD z#lJr6hb+T|j?J&+itE%%Gu9L?Sz9h?NR`L<4iS@2k~M$d0dk1L0QPbWGlbD;Ys*=} zaY{tViENXWLRR0LwER0LzF)B(&jicxx( zI}h68Qgt(Osk&L^QuW&$z47l>snqee+#Y?)I+LnX=S&&k0XD9c5DjgV*G*(om%6NO$vt`!0Plh zaja4s*%vq=Wti}|a8P69CXiLYA=xJ!&K==w5f!j9sqy+AOq>&mtEu#0r*7(x+oW*E z0lX7n;4z^)7_cD0*t$@$Va(O6=q%yrT~9cQI?=q7PNFBA#aa65 zikL8G}X{fj$et zNb@P#x=S!Y)VCIpuTwwG#!O+4O@uv~J4et#XcUiGdj$3d zYYJ{C@J{23BJo6~H7C5XV9s8k)#rb3=&~7j3V&=Yf3$f)1jEB7KxPU^15zi4Y~(Ww zI96|b4V`wQB+wwYK@`3R6Q9EFKz?KEW;=|WI|DQt!zCOFiohYj+)`7(8o3s6{HE3$AsaMeS?i$ z!xeCABxpsenI#;@BsM0p1j^%vfDkQETvY2=$hZi!4sPnE8EguBY$NWHTI%%}KiJNN zZ#X3sGAw}(_=&2^Ny0hb2r7SWi0sC`hmzHJ3QIUw8HcnE)xA#LG=ojyjO|F-w%Ahy zV%SpzW7yLH%r({>wb1T7W{W(v)kvP&YRj-k=+QcPI{ud1qish@vlfu>N!HsarD zcB3G3F2WE66k|oljpvX#Xr%$Y!?B9oNQ9xmg>VJZx)|SqaCwy{Jff|q_S8)?*c5`; zTK`VtAmuR=1;XzbJFN>;aO!{Jf%LZ(3^6;k_|}mqfiQJ04Ps6;MU?I59A>z`W-f|t z{3#YOb=eF&g)6p^h$4Tjg7TOky5i2qaf?looCD5RLI+e!IAQ=^{7vaJ6p$h9RdtbQ z!VJ~>QQ$ojBx<(a3_xm!XI7<9lC^?VGw`s28~j~mT)div>NDO zxs0*bn_#nxgyR#+rob~=4|nRQ8Ey(y>>^H&nmc(+;7&dCp4+!Hy{% z>y8@uoyTnPsjeLPR9CL>so#BmeZD6=u&&oVrH;Sn_V|0&IaQZJg9q>qr}CI-wQ|gu zjVX=i9M(pVnOk9<4e2kh)631uOQD5b#2?daIKlJqvnPLWRv)DjLKYn3BH-V|M9f22 zi{}Vb{u}Lx@=-4_1!Q9F9 zr^hgnx_JzNJDOFLLc}^t#0(|z7(WIafN;0bt4QeeunS=~&_kFY^AOh5Z(Rzj8o+z} zLK;||HA;VvKr$&87NrL;A?5+Bsnq%uf;52DHqea0V}b&LC@=F3Bm~(8l^3KYd6^IAPbQ@733tA@gw7OsRbeu^GVI(f<*M zx#1%ibHg9NTw~qQ?mHf{b;I|*WI=CA3w;0lL=As{7m~Mshr3BEW9aebZ+Dwtzt8&= z8a9A;?sJb>Tw`bZ=wcsTr^d~;NTD3PwbmMkWsjM*1#$6&$@G}xab`o4qs}Y^cS)$6 zDag4%4#BH>iwL$-I)^&EYo@SlEe=f`Hp5P#DSd>dXwA9Dh%YnIlCYgG7ImE(H(gAj zA$@-v41JSIqNsLb#QAi@^OoMX#KY!Z$i36U+>(JS|P(6ec82?a+iBI($Abl%iq zGwc-l(p%+|x%xdOT>Thj31~;ELQ905)g&sFaJ-9IUM0Eju?*n5R%zE52`4FnlWDaJ z!*%MX8Ep#D=_5qPtV6+KNDz+p!9_ZdXVrfN^61^C(9X`tl;BN5>IhuD^zM_Ma74o4 zeu4{)(yvYZG^0)7M7`BC8NXtWSxV#BVw)Nd4%7 zxLlDl9np5Hsl$5M+(C9lVoL>JC*#M+V)gw&#e-e?Qu+-E!X6yAjr%%mv$$7m;~@+$G7+j7@y=kGg;2hW4uiHDX(&qwXOb`U$v3;JWC;)s4;D6 z2(@77*9cmu4;P^*s7}oSmUs;8ru=`5Y7_a*BnO|mW|wISRYNebl^wL=B7nl-gz{fI z!=5z;yOdTAuordghmbRj_vPF+sjr&=@`H3?zvjzH1rwnw>Q+&0a2OLY zt*(^0Gj>oZRE`MV{Q@`v6eLx&w#MN@VK@MZY0=`g2A%A=xu%4|sF!qSSpI)!h|t*N z6t&NdM=+uJTEfx@^C2d{d@t#UcG50eoSAFNDRMg`4J*R$>=G2#!jjrCc2oiq*<#{< z$G8$%fNQ!cJVuj-6mc=rnv$`}`AgN&eI>{k1`?wX;^aD~;>Rh4X%81&eBe~C)4fueG&!u64f`wsBWNA{mM4qvY*1{K% zm6JMZNs=v<5~{$2Itql&nee0Wy>m8Zav2z79iKpuDLiA;RmZ?WhwQRSB1(}OFqTnF zpkd8)@FZ00{Pe(>24S0qFGpbddBx<$nZNWUIud~%W9%L zi)Ce=(a%-lBdVnA_tO#|Tl>S88F7W0fBA-jg2rx0zXh=&aRlkdG@X+ejC(qYEwmEb z)k)lEDiAW4j0K@e2m=44R_Lcoo#cz%x^RFc+ds;V)6HsgniVmn+$Gh?vN_!b67 z!M(tP!LSEY%7PJDOJL*~OKg?ph@uz!U&68+%O0kZ6T>}AbiC%0^jj!;3Y0RTkKh9Z?MESH!-14ANvmYxR2C>T_psA7<9OPzHtJS6aK6UWF~d1$U#B?+tI~qBBKt0YHt9p*vj+p#^x^%# zo~h3Do~#T194+wb`*5WB+nwgOh(eOe%NMBFAqrAeRS|^miZGMQ$Yt6$#fI>CkOx1B z=Si;uaUa}A}7M5wq zA`|$(6Awx3CmDb#rb0RzO{^roEGSzh6@}dg#lg#W`H8BvgD&yd5c-w`NQ##Ii6(u+^KkVxI{kwXSAcM zMJa!%W|gA*h?J$Ux(``5f(jf&(vs7T6#Tfqw~@h1Lmz=c1Wvl#_m#^~qUMU~z!9dl zT7rU*gQW3UC6pyJvQC8abs})hmBJ-d3Ew31ZKW~K-8W1`$ES6*lJ#^jK8Z)w=vrP4 zgb4TunHx$q9KMLA!VX(hN7j}gh|R?{6pVk#l~c1tZB^w8w4#tZ0^6N?7kGOjo3ymrmK{-%7*Ba7tI=81x``L^=n3a13d=v14FBP6{ z?qIje+Av1WtW|b6RBAZ>X(PU3Mqw=LPmlo^w|f`aO^PfW#u}sMQ+Vp(;Cet6L^6LI zuqI6|ccw|)b1uz2!BQLRdR4JD)N43C_>rh>5-eWF z=3!fhBP&&~3tiBlOIpvN!zFC)Cc!;9n^4!d6k^mRaOA_NcVzQUfFx;xVVqM>&4`og zPmu7*E2Op*JhDav3MaDLb)+dF-zH?z1Yw3ogBu-T*wlW6NRm~nDQyV~g&%(k8Bp;+ z-$bjWCkVq%xrwHN4qK`sSslWCUzbWBsey4vGRclzm<{X%Y!WU-WoshDxw}jjM};SU z4N2A(DeJYX);0V=gn}g1p&%8I`QVm*sag@Fr9%VZGSNHFMzYK5HLmb%e(Vbn5JWMT z%f<)~5|46;l*D*VPCrAgSL=ULkXSP5+egT!kt}5K2&W{>sfmKJjC50s%CJvW%7PNn zh)<|M$mijbR2dqg@wn6xpl#Rtsxft_3UT<{kCdgbdMjCIf|L*B+98$}WmV&<3YZTr zwTXo2;5mqRNma-m>w;mOvUxr@6Ut_qenz1+b=sb9OWMhmwpsBG7j1vD+^s@OmYZPN zvK`@9WVl1HvO(sdQGdRXx`;0bNyX(KcV1k2HRqP?ldb?kjRX_nX9vH^Nr!?G;mhw2 zex!Gsw9J>N*rsI>kS@t|A^ZO@)ytNw+A9Xqamb;iP3lyQX@+kBuhKF>od%opw<&F( zx7z;j-EY;7{fCsc-E@B)qmxooG*p>5z5uMj+ay6?2EDQtqB3bUTcy=xE+*7ashTwU zbws?BC=#YZZI?L)%%yI7s1uRgg;y^b-&AeGo)1go`TiD(kYa6hWQM%iduN6w)Swv8 zaPKE)-}Ey~Z*Fy%QB)j60&Sg%wy%-78bM;N7LU|wTC(H)xf*|&?2;X8b1GBi>25#x zPe267yt5EN4Lz24G`T}MleM#vZ6n&}u0?==K(5A6M9P6gCe12q{of7qC?`#n<=CCH zNL>z`K;<^LCE3a-xNk4888!N*I2Elce&v5=fk`}uCX(Z96sHKiSbktTSH98Z0$V;( zbG2mV`*UUWzG#0t%v^az?zqBSdO4`ae484s5=wN8c+&r%LUhG#{LeVfOMH#bo=Y~k zep^S1kVIxJM6%500RMNf6}D6u@6VcL^tzxH3(RYD{k9|xH&p>wt2vw%o^46^lS>)4 zy3|DbxQmma6`j_qwmee8Br<7ZqhhHo9;v0a6a@As%?f`4z3~?a6+iq+!x0Z@-)JEv zYDh0tQ=2jTbUb=4@iiwTv1PM;*H}R#WlthgHI7G2)#8zws--jL{!~Rg5VXab1vW_p zTO=Z&#u;Tjn%u6E7piFXaXSUJJeEF=jMk;){TIp%Eg8fL%EbW{%e zY68qIkMn=Cfd^2NSp1CcC(@OL>R1iZk@CZQ@jyuu0$2^rw}$cv$O;Tcs*=QBr<42M z1<*?h<3k8+0Ts$CG;TmUP;cM#Ow_3Tz4Pb?o)j1HwQFix3q&iB3$%hIi+xuZg4L<(3 zaFTx_U~%f}dr;wBe)MWFx8{n7^mTm}k~n&-Y>MURp|*)%>D==LQs5egK4S<8@{gMj zyc1ld=Y>Mp%!kBdCZ+(L(V#~9x;_g@oI6rR#d7XY>SW^F_*;-TVukE534%jcsF6uO zFmq_5N0xZUxiOf}*^-PBJ<@4QBt2+JB$t1WU<16HPu zN%O88BOX<3hBbXdCYJ4!vjL^P^8oTcJ84!>SYJa)ygE{)1=YE}S!CfA?6D0u#_G+< zBJw=kGi+DW&{cFi+rZhlkHMU+#Z;FdzO1&-TSm zmO6LKvm+p@n2&(8L+%}Bpe zAm}sxtR*>$W;$YIu({QNlTphK54EjHNMPQgvdr!_h?SPHI`mNgWqTH8lp7UM<`gAa zXpfEUR^rL6;Iz#f_hY25s~bX8nt$1!g+to$P!7gxyD0`k=JWYywBr&_3fGqSq4_Ti z=~Q@Ezyw?Qx^_HLAZL9(k~ny*7LB1#p z%Spp$f?b3}6n;jAjdXf_n0U(!V|P0PGJ%7yT5n8jz*5PL_79>NM&)7dc#f}gu& zL6n-@(EdmIx;_g@d^=V?#%i=fi5D-)8@GV@f1k;>{2QFd3~h+KdzFe06;+Ne^;9bE z(v0j-abMGA+ean>vQ#_*vVT-O2GXcIVpsOLcpJEQyGoHwa=aKFPk7gvj;(~T-^3E} z87G>Acd?t2-nB-`htf62SJ)v^9;)Yz`v!#_esOc{91cD0QYZg_;}qY?c~T(oj}iA;@<^5GZ7#Xk3SW4OsTdVewLmIl20~xeD-lI zUX@%TonD;|QQT*juEwhSSkfLU@C4F<3830i6iYnnFr0r#JG?p{63={Kcp^e!1%G8~ zt&V8s%yO3u^>Gd#8-J1VKN07aP@-p?4}!TmT<6VMAyFkE1<3M7fRiZF%~D5lq3Se7 zv`7yvOZA{7OIwR7GH74Tjty@fPdE;QBb0&O^MUHc1vf1 z1dV4xDXk3iU*Ay)3UpDbXa9sb`V)W@Ysb%s=0O~-UUE)?V? z#NC5-v_Ug!@LQ^oSu#^d79W5m$8ehXe(CsPv?Y-at`1!knA+VDkqT20pO#P&%G;D- z4|w;^8^Q=J5|1cLw3m(&z2p*wo3%^ZvB9(=eO;f0B%hW@VX6o<4~3-sh9HQ|SS@mCF7KA4YbOYg)XzNuv73u5xEF{ryU%hFm6}9x+gLbyTj2gB9RujU0 zO@kz@cGXOc?A^P&YfblBR!t{` zN(-$Dh8KLa1uA=CMyz0!WUTKPn|dHe7W7pSq>#93bQPy z$Yg-iOd}6{&5f|<>+|*Q?!Egblh5SGrzrFPmB3xlTzCJhCw{|?Hx^7tLTNIKvw=v) zgBPsZG~5DlAXSGD00B}g=XcwTwaI0p_&C<4k8+IX{2hW$?hsy-5usAJp~Y|tPJ@6| zNg^>l9u}IXXycn3lK{T#r6@Hy-Z-0?02mBLPDcB5CHs77Zg>F92`y$%KE z$qNXES{C)%@n2+-v1~q{IP2Wi<-;l$s8T!)cKfW%uK^tr8Jp@9r}CRs${)wP(s}Q# zU!&o0B6VRQ{AkDjI6nkx!!!l1Dpfy);t>&#dtQ{b^Nkx&(v+-WUDOZ-jcoTVt6k{o zUl+BDF4U2W1q)LujJrs5k?L<{2+qUvl8XWJi-!3fu!~D_Eqtzn?TGGf%q8GzwM#yd zg|Ard+tcfR(Ua>?agnggiEkc1mHayF{wob`7_60s0-ZT;jHC{mlorE`z(rTIo1i4( zYqyXPBINu9j0TZ-9xUlbS9>oACm+0sjEabn0TRKqtD>WTY%jD9Ju6=n^kdd~w6h1n z^u12ufYtMa>~pu5i}SO0m#XEJ{(cnIVTUPO0lF@Cj$*POUT!*h@)O@ zF8m`1K?l%XuG=QxYxdxp6Rra%52+jo84>bfwwYDt~% z->MM_`UmZoORjJk)DiHA-m}c$@)8L0@f_h(QDLPxBQ~1lt$FBReB~z$DDm)*N!grP zXs-j%VaKyAMeWGx{N!w^i|1j{MJ>eJVz?RN>C%sfzWUk2MwoQh|JGP@fE)OCkmYR4 zgzDY_j2d;=>K#$y=4*SnOxE61NkMHqDe;4kLNp^ zYAmf^)g(N)^qVfLJoISzkO5UuY(HON*qo*C_`(veh;3y&`;eVc)_ozP6MI17(*MGd4^-|tJX#Mu&S+H zBc_J!T3}beT;N-T{NpV0V=LQFy+}_JR+Y_VuPZyM5g9(=}B`silUWP ze1pCqX5IVh&gfpt)PnZvkZD{MU3`oRjH-8wv9O=?Tf``15&p$I^vV%x99=W0eG!Zr z>iDCj3#i9j>eSgd2b;*^vv6cc9B5JUg>B6tj8{tOVh}s|1sOZ}G~&e*WbIzT*0zY1 zeCl{YJ!%OXrW`G98o?n0%R|V)-TXT^LTMc%A{NhpzOG$Iwfcfpsk-dHr)4aPP46W* zH(8=myt%C_MkXYbsn1GR3MW4_(}LJfo#dC2*MSOY@UwKg!q%0EeZG#eID_kO5O3wx zNAxviUsae?bjtX)wog0{gKQCb+s6)1>O>^zkKBQGmu=HgGW_@!zXO^wRzuxQ92{Ld zj{D^1`B+QP0c&#drpdoG*V`ET1CuxqTQC*My;2Y|{UUE z5XzMjDa0t!R0}*!z)@NxEs0bsXI%o#6su4yKeQh0cZncfL0?(SnVa! z@0D=XoY1S6yay)1`V$-hXLous;8YMXrY+J4hUtz=MEK3)+=q_qmsI_%>h57Xi#QAr64fDpkU#tfF>4Hc%Xg~`AQ>B0Cbflzez%85Nh7(aurix>0===$3(ct(`tP! zN7_r38|XVk>vu&(F%@;Tb}n#%dhDAg2IbQWO5Toxx>xPv%hoT>A+ zAhZd=3h;W$b?@NOgd5+>!vgiHA1AEL`=Jb0^}hAAs^?Vs1v;pEq;F%&(idZ4_||0h z$5htSb*D6G=TXjQ$S`3h=i3-P@-&(mT2&=}{(+_eVH*&Y7N46sUD^P7q_ksqrDkM+ z&LaN77~U~YU*^xax^Qz=0zOo~iAJAgy9SDrDdS*@4IVT74q+-}>`+Dnfo-4Ju(r1bdEH#SeK%9X=#o@iCaDaAlCo;-< z*Z*X?hTINkF2IgRF<+oKhf*V3KZ%ID_7LXHEIDeJ(CWXBVe)wX+ZzmvHw?$dX-^@; zDmf7sj{3om8lOL5%M_7O(uN~Ci)hY8756y&w!FBVtA6{uar1TCxckx1ah7{!E(Vld z-(&&h9hXx=p3Qfb?P-_gdziUAnkv*|I6&SWEDE;1n(*5Hazf%Er9wi^&tG#=Hpzq> zuO?7Cp~=g(`PH7jII(2+uF&D67@O5{7Hmz#yPM9lpSvPeV}@YViy_b1=|@O>&TRf= zhjLK{d!%0VfMW0jSEC*P2ZL8}ECAvO&QuS>XosK#TkG;9Ww5zcCPhlP*TX{F%cMor zvk4$TCX$v>J&4Wo{_o6(&%h!iaZyQ7AV-%}_8Ru`#Cb9{QGF|rlREjbfRs(ryJ@yg zWh>Aeys8e3-3~zmrdh?@Rf7mK*AfGSZ9D27#!%B^inB*U!$z$I7i54)k8S%GW~6A1 zi(-HR(o*K(2H}fCbcx>)xDS9mV%Ym(cDxxxStq}dxstdf&$z7_Lnkzfk#{mRYH943 zZNp`RLminlrQ{E?>mVoMh_cwmx!9?8S-UBK93JFEZ=H%+id<$`${CzoyVJ8$&s{F8 z+b=@O|KAFe~4wUMUbo&d6(`IWF7 z;hdC_KN$UIQaJCTuIHLuxb`? z^*5hcMK29}GvhYaQ-azG3iC-&(O?Fd0+LW29U&WvL2z_RkZQn5M7lKr_|jU~sAltc zC~1~-2A%?hVjKoCQ`E}z)i_jB>xHXu`kj~(dFyC6O$u{Ysxu3eJ)ZIM$sv0S|wC9&%KDTY)FCYd?Dg)J`E(z%Pn<#ya580Xg3Z)u{Zaw*H7t!FmxqOzHcLy9J zHOB@*#Eq-PPp^t?Dc}ZRu=IQSb=E(m{ryZ#ZZ5NQJ9C+STXPjj3v5mz)%HskJC@mS zWiB)P2J)bvoyD(+E!3)fs(8(jOaj+XdcetAP=7NbUy1?)FeJ%Rd@Xz`oV|LMrYk8< z_6JS_Is10=gVHQD&t}(pX~RlHO<}xBrx>w(@?TfH)H;pc*^uF|)Z{d-pC0@|RI_k; znET0dXSTLHl%qC(D<}~S#IWQ!opI4(LYl>h13d`$Y8}SWMy*>A+|H>7aaUnLMj1ooFQ4;3;RNX_)WfMu`?uHXEg!BYX zu6V;leb0dVXNFQpJW?;UAkKYG2>S{xGn=Jd<2>}ApQ;^Twiu2JHyh50GzSe=N9MUJ z7AbD*V)?XsY|Jis**bji$&wyE_84`4_#L@&Pt3;PXHeNiE+VA5lF@@OtHTltn2f%7 z@Y{z|a46_kn;>`-dy5sz;&khD5FLH))QKc6XhCVqi6npN8tl;0jK!2(cRw*Mp2i5| zLN6FMqNtyW@*)YY+;^YW)1FzU8D*s zyP+l?^8sjqkcK{z5GKwu!y1S~1TXpD~rbZNr>_=-QSunS2bu7Pe;h~KpmU${x& z!i#Mq4*vmr8h3Mx(R3t9bA3YuK7)ub*fBROUea5PCM9{kFGW47$u(Y#E@Q2EatxNJ zncP6^Yu~b(?577y04hXMv_>Hi6%bj*winCE&dUcD;{O-@mrbKsdlO3J;~(jw`t-N{)Z1t`}uE< zJ_d*(+4@s`Xsm9EkWcH~OPu#EAgSLwCX%{QKln_2JPxzFBMAlA377ufx!`>jOC^d# zbaJIhf*KCOWD(#6^XzM`uK$FDHGx2qPu}9G=b490h{kNPsBUR(l+J~QHGxLT^@YR% z!+~l=Q`?z7Ew^GUQeMa&u7`T9egQt2GbZ~yriL*7a!>6{ z3}cL4f;s{|ZaaJ{RtE*I`&sh1_J6KWfPTeO?a0~h(uvxqjewKJy#xcUP_eHH2QVcp zfv36x_>u_R9bajUS2mMrRzn#4QHN1!yD+pQyV%z~-2Gmz)1t+oeMS0b2V)TIhf5Ry z<2~V7x!u^; zQ5?rNdCt3NG^Y`AzsCn0N-T3nODl1hCu%1{9N60PQPKdX82@bxxxf&u=_T7yj1d@l|=X8H=FboAtkeNdnfF4ZcBRQzU-I<5bt+fR(rBitV)WsumshZ6|DmX*nL+}5r z9!`9ZBXmNJAaufxE_A~FWEnB}1S|A+IK?$q=m{r+M$pIl;Y+^kcik|GPumd(TlZ=& zLkQOPf3i4M_U8ODbOIcqDxhK%59%1AtY3h$I!@}~#6H7JFmRGB-_4k~k|4*uf0nvB zo9Q8cTo^!=!B>$e3*vxFQYVUC13t%*azkJtOQ3#EE`)584MUR8@Twk3u^6=oB*qVy z9;4u2h+g@YVra6OkLHfh+3WvZ4FR-^j5pW^(s&EvR21Sh)cBW29_XQjKvy8adY@q9 z+{}D2R*>8zSemS>RCM-n6y%QCE ziMklsa#?2E&TM*^Z?dE{&6w6RQ*?;Rafw#(zd{mpAxPypxyblsabTO?Ba#Tts;>z> z368Heg-CY#dI#VKe0P7ZK-j}AC61=|unDn(97;7=&16B-*S&lS4r)UP9D|hz$R-J- z3Hc|i!Jy%StY?9k3UT!6f3H>HmV4i+Q7I+Y%0e8AN zyP9>;ca^A}qV`wN!KYy+xK8*>{NVxz)E<2{FPK;cz095H#1f(d zXW2-T7Q~njPw|PVXx~inx_9`6Y#pyAPeGuts}G3(AI@3KhVq7+XyT*Gmad$Pm`VBF zc@5l%*wy7_sD^^NvXEiyUH)M#IXB*HQv2}DbEw`kp~Nj2x`LA)r{o6)3AdU(Bi<=q zJ;qr5HGALJyU_HHy(3G*O9@H?&8dQCBno|w|4>dv{gA|DKT}`O?;*N!R|e~OotJh;{U3o?R&@j75U1EC@b{rPGI}K0iEqX2JB7Awb4oQA zC#_!*iOHuCvp$GGlQDR~kBg5aqw(EO#P=WBl@}vAv4Y^E7C0RW!n@*-xh_Xvjv!fe z*3Fc-eq-S6{NHj(e!-}Vnwrz|Bn#HHRkw42g=lvh=0HPE*$)E+}D4t9N;EH zBH|zsh7h0df1;b1exK+j02I5RbWj~OE>EI2^bP`+fWd6M#~U!AW+ix!XHM9YZ{D`J6*QpJkl^GJyi6(I zWi+*vHKDM2KU=m5#v+xi!21JzVnd{8|KKTznMgs z={fGm|AP*YE_Gpy0Ug?C|3f0~#?;O+3+kA#WIT)Q#`$(5lE1x;nnuV7xB6zpPq2M- zNTHW<>)0oA+DR8QzsmJ=&_HP`^Yj}xuTRH?H*MCJr-quW$0d`%mcf6VMlmfa^ZSEj z{}dT!kW$WtsKox!OmnNhp~l`@$K1bJRZ&x;nDlVGa38E)0#M#g?JHVUOHU{on!1pP z!>h2!DRbC<-4zj0x6>wm*9(Mum)p}dh-5RdSe02y#)1b2rn`pK1S_b-mLsSLEBpLJX}Dtc9V4%W*LAQdq50S3Q<3K$OI z`zC6rCF2XG;2A9jF#FPrx82~^^d(xY)RG3(ET#iLiV56?)@*F{68 zAkF*ti^aD~6S66vsPb`q9V>OCF2BIzf)=!w!7qVLAw8l4+d=-tDgJ@Cr;}&d9l@(* z1el)p@-58rO?zdyZS1-!qaw;bJ1 zhJEc^&Tc6`p8QpQoKLR$@^X7Le;v@-xz^cXD3W8cxv`OdF*SHPuzNaxeX8|0k+*%} ze)4^Iet6vL^(-4b>Nslt_T_DUbN3%;cwm0B$_LJ80!YqqCzE^osJaay|OX zl|L`czqwCj~s!1DbkNZcTsN;bn=a0r`|6AAabw5z(2ME;qH!(5l z>r?ve=IwRZ%k%BgU2R*ZU_tgjUpNFqQ_jB^Pj~a=zsi^kuyi3zqn0(|@eZ9perI@4 z*}PqZEmmgP?IiDPRa@+h3<3ivhm7vr)-Z30$ng*a#XLL2*Sg2KWipnp%Dm-+h) z22Z;IkWZVFPkSKWU>*1F=ZhBy8zyP5w_P`o+pou{;r(ZgfL2gO(A8B^^$}uJgpI@X z?XeIh+wL>5f!3Ev5n;GfB+W0N&8Miq@F&L42Gw&Nr0MM*x2Jw6C(Kax&<;;s?{ud@ z0vgkQ9o0&+3zN2%)GZvb&PX-Ho;`70TMs8kNVX{(&>Kg8cn@n|$yNr|;gnk8*)r2E zvjIx8l|<*+b%*Q0D!gb9_5S;44C|xB3LWt@bdC$b1ucyyFdP|U%tOggPgkgH_6F(L4rac z0ilLz5BH?a-+=l2EC$%egI+l?mtFSfnxHpNexdi{?Q9T^A% zZeC2sj@Wb4qO?KPtV$c`>g6r|Y6<7u*7V?zE(slXBj;+yW!E9QJy|rD?2kjuW27Ll)aV}L&~)};b$4}wQP|0msHcQ01JsP^<3TOg0fv7Z+n!E&L6= zd^t~V>;}0X8lm7Mq0dQvqwe&JH+Ge!JELwHp3Aq3du_K|&}WmP9YDK@<#OPI8@VVNC*%{LhfH)XIQ&KBNSa2YztT_}{><2tQOLWr!) z9;>gio=#8+TWCvh7w()WqUlaqj!Rt?#+UhV6+kBtr*eCQ68*7{FgQKGfGNZ8RNBBcOC!&eR9Qj#N zSd28Fm@hOmIX0pIPcPJJ^~r;?4Q_S+Uo{j;wVK1Q3rp13=CG#Vh`3Rew8#mX%MGZJ zm6`|~%Xc{o+1;j4+Lk6||MoTZdZh#zSPmGG?U zW-t+`pY6Z)PqgD~EE~3*NfylNvhXxAeT#0OzPDR^NxRdcdOW{*p%}^CvT%fAY3)3> z5Bm@`XWm2tunUmRoa>0b6bgtH1yz zSA8IpQSV4s1JeM)-w! zs8eYVZ`Z`nEL|E+M9NvQ0j}C9sc2Ax=P@4P>Ccc*67a+>CRJ3+ou{b9tjvG!Zrv|X zMzX|F*t6Cfj;ckmbCU>uQ6oXa7067d!qmf_t8OSK!Nd`w1-&9|#P~&-fTZ*}e_x}H z>Kp(^5gI+u`j`Mmeie|(Y$Z~!O20H4Fx`4xBdaYUotP!z988;-rKsfmEgD-Dv7;g=5@D zezmIh%K27C6eIWb3cODM5*NN)`4PECK~|}*v@XU{C-palKH3)8e%7bh=LU2C)rn~3 z%y1JsVjay`Zb}2FzsAPw71aqADtz%v5eNz0ER+9(fmjxak^h9zwvG{f+I%{QgN^f} z{Li3UFEBL|>&;;fkW~^aY|4i#(4vb)tjgMutxt0sB9BvVhIGmSI@)u`ZyB*gTgqsk zS@bpSee*$zl2CJq<#1RA^EDv8%QJ1+aSRHs<`ft}Y|3wn(Gxy;(aVSrrm=YDy6F(b z?D$^+)@P|?7sZ^v#6wz2%;7jwdQ^Et^~Xy|3Hv5pME^O(;&IR{E1;+{Qt*g-<8*FG zl`Dp}uybtXV&Mw~y_;M7Ehvy6!}q^g3F&7p5#ydpvq|u;79(sSQ`+p!!lk%?Lpc^< zoaIJ0Mo59C+3n7-GdP7)VQug@C(V&4Z<|)onhA+fw*-vo@y?jIHKMqR6yR`~jO6s6 z6ODfl&qzy!Zae50LG?tA1KMW&JmC^}Ka5d@gXdVL9{YU=iZcTAcr&~nln-L!^sD-a zg%N)ekN59>-?ydBPLNx0q~80}r-(x+?zV0<3piI=h#Ke%;d+{&yfu>3THwHKGL%jq z0Y}q$Vy6O&CpIB{_${6BVq4ta0kR)ZW}9nVVCm3J!B}V-O2rt4HQku7EDXOdp1rR1 zj^=sJ%zAL3RZI5U1&(={Sp}Sh5O%$_bkD#i#so;375;_a$AS28zYJhudnza>Jq*AQ z4OXS%+G|jIcw$!K)KZ>hljV>EBo|$D=NXSH)!4d^55{QVsh=h41^2bUcN{hqM1PEEfDWEj4v09*O@v-=4Z> zAdfp*^&Os(y}yWxF!w1?=p^ZOY4XORA@h6PN-R~$C!bM(-3$hGz4@Rdv%wpRH&aq* zZHr-cI^ndO5jouwYjb+PKDNGymS!S|AmAVL1+a^TY+}!rYzm!VDkN9xOvtwqpAV>9 z+3i?U`vx?ORX=sM&zzu2sW2N}s11|{eZ&~Rd_f7tkFb~mqL{lH*dEAQE7A$b&z$z^ z9;_>^s_|r={NkwySj212Vx%c{y&54}7*Om{i@Ec5@K*uQkrav@a(!Ic#(MWy)P0fr6$sdpvrmBar@bpmIaIXfVhk~Ax%T;s~$+)W@Ox9D_+fpscTf6o77}85l*}ql^cG{wNz@?-tK^vj)+v~K4yx0k<>q}RR{8JUH5Jhae0UO+7}?GVd|&BW>Aqr zjF>-{-GLXFTEy~#RUp8h+j5k$Pqf*lOj{y+F$LH1X)?ope6p90y06UL*57502d4+- zCZH*Tp5w4}Iwf?(%?{ND55qQ5T~h^OBsyO@DFNEYy4Y5F_{I~-+P-kZCQ~WF^a;)V zjd{$|a)Uj?fi7u>&&mdrCx6OB#y1h+&Xf`>*P;To_!FAn0VDZnX!5C2u*!%ks%#1P z*F@cOED>(Fa8TG!sQ%DbF6{>qwW7z`5$#2FXH5;#tsg>a8G0*v;F)E3){Uh5lIVc3 z+;Jt>)TWtRbN()sJkP^dXbtlywq#^0%Te?bJ{5{CHRgsA8^#hmhdXy zsl0 z3)p+|Y^Y8U6C*YGNR2@q4n!uV5JZB#4MP6wr34unDeFaY;+qf@|8NmH3=1%07udug zryLt}Vy6+o`G}2c$|%_3Yz85qv3qN=(hWLr4_muU2ys}en={GKkDBVmkd-3P?+w)) zeL5T5qTU5iJSsqYS08X7?vo*c(}os{F?Pytk|Y@&*`K35Gc!7xyFiRFNw0yXe5t&4 z`nez*2sWI|7i34=_9*CfqKkV!IwtPm580Nxc5ZO-d0i` z<>etv8pqI?k;uYL3MPBjVmKNtBi7IoY}8FP(+W~UH6A7i% z%xp8{a7HHM`;;k`0<+Ds^ZPgmt^5v=A(Gg$P2_tGTT(h){S|!p(G5pzaZbz*Dy%U2 z0<)UQX3C?Dg$9Q+3o^!Y==5Wy!Ioj{2EZi&{n3Qe?`^aw{_P zCg`M&5;ZRyj*1CUEd}mot(MGYMe+;V1x?B)t(UHR0K4XwbSfpXs~ zupPSz%NDM(l!lw}e`hOYTz|SSlO1LSoi+9%>SH(v{bL(`EMk`%D~#c8OB0?|=O^`t zku8YCi6|$R2i1}YAy65lbLCS?IX*OKIdi6tlHN-B_t_u5C2G9jR zsq_1m)tkFV8<+1tR7P-EGJrBc|MO)dibPJY9CY2VbEj9f?FZ-KnF@U>`V~v;)_x*T zN+W5V0#Rj;H0RBytbBYJY{sw&X2(I_|{y#9zQse)Y8q545T{d*e zLCl1VTc=rxQBFRpDefi0L6*k^w5WV^A$xIj$+33WG}%1<{nO|GF_D@Er!Y`bE3jjG zSPv2sONBZ%k|J@J`UU$zoS&EbW4luAyrQD;1;3W=SE+TG9dBbqG*14${7&m{d)ws= zwJj=|%L%VGU$0o?`^C+rmi&KGz~}ZZcb~U!&acBls$Cv0(5*2O&FNou4~gr%=6XLK zY9Ig5piMPYGxMw)eD#=bczL;#Sa!oPTTcOHTVPr&SNWkFHmnTVpBu7N3FV8#C*ojN z{*4eXeQ;&F6dw=)5HjFftQYeg3mS)Jm4C!I8{^nc{f30UayFnE9ivSXG5aEQmyU#( ze=jfUD{38*A8tpW!a@5$+l@U%& z1j@01)#i0+PO_JRHs6%DuzWe<7GsT?M4NRNsC`kGI z)dv#r5D?@9$(Vm=L1g|Flu6)wi^O83y&m5`$Pzr1H{`yZb~ZKGuXuJqN$pj(bsrJ$ujc_*(ZUuF!=*)@r8QB3S_l2N^MbO zlXkSlLO)cqyq4W<=Pgfy6di>8}GFsa}A{eXnf= ziFEU!;yE3tMZk|jG!D6R4ZE8&@phc~XhAchZ!=}3?1%}PM*GOAA&GKa$I-sC+b?ai zqmJEj3J9pI>QzYE)IM8*;h6fsuI5JHQc28^y*fH(kdfgwdhF4(Os@C!x)i}M4FNpf zPGW6qKw(bw`!jJ6p8hWASOBST{Q>^bD|8ruCBt~))p=p3jv)Cf+`{NrtEtmS{os_y zExYPBD4%{gv-#pBJ&|v}scv6NYgA?VmSkREUh*kACNA%UE{BVXbX6^ZBYjqgWvK~w zzWizipsPYP`PAQRCn(JVP&W|5{1sORjt7siX)IJ^9!u75h{#79xasp+eBmwSEstWm z;aoXi**E|NL>>rVW@UBj;Dl{->AyL%jfxr?Zeuv>)msV1E-fE;%T#RM{`8h_G49~3 zzzsmNk9y8CnaWnKH7x}g{%}_4X6_=v3jKzD3`v%z4`diP9ItE0R2VsZiE-(3Vg4zC>{-wQH!_*+z^t?6QRfY*+uD`$SZ8)8 z47xO`Of%fRTwYA)MaIjpt`|*`f#Sq=%v#x21NN1VYBkFuJ0kJa{ zjyDL-uJevDVk<9o{5`dC>Iz{z$Z0t6kAtdMun>9I_U-}k;f=_9z^j5;IeB>qr?zfepoxs9U20vt~!50L{@RP^( z%N>8>ySLbqs=P$SeP=tcc=mh2*R5@m(WY0qQlE>N&2!GU38Pl#Nq?`Ytvqf2uQjxNlD3`+Qzm9BpQ7STu*&Nca2n8VoYlcHN3^ zw1CiX%vM<8i0{#eNGepAd^o*=vY<#Ln0Sb?efsk}%Me*iCa^p(%oa8v25mNpQm=#| z+m#l{o}vW9?%NJjd-V4?`tOBC4!Q)SG$8|!T758>8M>s@2z{|Xxt;0oLfXH-7h#7@ z8N?Ahzryq)<1-(zGuF<>#Y#{YI=IJpwsvjrVdwHaNFC{a!yh4}QDRGM^8?j#=dY%K z6#TY)&eji*5iK=`R%!XZ-eOxPaaKt_{X~vW^a!cN6vAgq#X%cOL6JZ)pmQ`m02I#? zXvgj%AIdb7lzkWd8ijb(u0FJv-*clFLUkiy)6|+7xTff3tpmr3?ESC&Igm9%zgRx` zQe_%F#Y=ZzBEUwBk8ZVo(*e{@pLy!NYH4mo$A8^Kvr+ORy6`A{#+0c69(E%BZ(3AJ zkEg-}3CvuL9OzFNGk08E!}9PQ==-fkpM>|3PF=&;SR}Swj2hm^Ei%93GX7GIZO_OB zdMCMk_VYPA&k65akZ`iU1r)#~mBX{<)((?Mq8#-YiANdZv{S zH{}xpjdm zf+ODUu>idilcRzS7y~(;He96b94yIiE(>&+IWs_YtT#&j5+aB&3b%`1P0?tBc35%l z3(3o9E``_{yQBUA(5x@dLnH3o^d@YE3`&HS5WpvaB54#bUb~G|xE;;n+xJ}^WRKX2 zYZm3#8q-l8zw>UsdovoI+7kaT)xs&G{dGHD73i(je_;6gbU;Ygk9xRn)jD_XXTAus z^e?+6j9)#z4+6P;UbCe*>)1JMXl{S>VhPB<^}1X_e(A~GOEdY9h-|(J0X>ob*Q5g@ zG=zwhOnJ?Y(}&d0h9Pws`hoG_JjW0%H*YL;l1|A6O~AvQ=!fMK>)17~HU=&IsRYq@6 z{}c6zZt{^~m@a4g(55d)<$ne4NHK|#{FFY*3bH|{Nb7#MW2xn@E z&@B?D-$V=Td?C>QGpjQcAIgbinwqqp7U>zr-d{6)R8EU_GUdyd9N+t^*OU=Hhd0ay za93U6SFwKTi+?bGpOQIKZfj@F|FIa5^~P>uBf9bSVv_&antV3e;#DC#(`PRnVp6`k zZ+$C7n7G_9{9JDc`jL0cxbmX5IPMPJvt%;#==F@+ZY&Z{KjhvS^z-=e?Vp>-cKCUo zuvilJA0qyMNo@ios%=qqtj$iQJuQW%bMg`_qIxF<(A6p((`N#td;)tEp1uH!$oF% z!SPgw=z4m4K#<`?Y`fu;1BSV{5)O7jex_h20VP zxIZVrRhK`r$1zCX#oM5G`)rWwrGc&j+pkX@(jeMm7?d>LCJwuyqVi>zJ{iOLBBH`X-_37fawd>#8$GMSXR8QOfCf{qojndfYF4b#g)?2glKl|X^`kjdK8gympfDor+=n3+wi+>kc7>-SG@K5&}w(nxjks+f+(wJ(<#zj(>ecR z5Zievjy7)CanAW1&io-{wE5g_xzA_v?c6ld{^7;Fd(9D2Z90F=$Na6;*Vv}mf37p| zKJn3#*B;(yBVp@oz?OV=_v)|AwH;(2FS@O$2FOP4%8ZXq`GM1U_?r$N$;59e0WJ^E zVWNy~@`$j4fNjOc-#OB?QhWo)Qvt9-wH87%Qr8$69z&S1wP|oyrgZhC~I+wgqui-W^6m0{SwNUSbh@j#$F*?w^xs#h*kBMUlPTJVdU8QKjc;_)wM{442_c*Hx!1NeQbt3f26{3`fo&mpMesZ*_^=4VV**1CG4`STvd@fsBGrgu z1WbQATM1@Zh@D75hEU#|c3zLrp^D2(qRvfuSVj!XNK!-zazYd(27m8@mSXY{1NH8- zvQzrTVUTp^@hTW#L|2oqVbG{cu^Eo!2!vJ~W4M18prFbWi|7;LyC`fTz?hYlssLh! zv~9r?s&*gDzKS!+`082SFw6S6TN6_LjtAp~dxEq0)CCC6`FD-Qu~J=sHzAGKLnESm zL$oL>m`e&l3oHp!0i(s$*^*FXfy7%AAR!SK=`y%tS_xXXC@iBe0qQashhIV}WwXp7 z)e#)OJ8huq9CXg0aQ@otjTYhPzhf4YH@6E9d+PI%BNxFah2fv^lOi!aTsu`R5(xiZ z`Hvun0lf?c3RBxcTCElkiWhwau?MeO>J3flpSc6W>XYrKpg=MxHj05O3mTk5tc&r_ zOr*q#u~Marr`X%csG=%L9)++hq0h%r!k`g{PLkY!#3;nAC&9@H7a*!0#yb-iSyhz+ zR^}hbbShw}-X#lxkKv>Kz3fnr9VKPV4@fe3H1SLx|*6uEOs;&f_fE@z9kY%W)H+!m4WFf z7w4F(s4iAlBM`ix(=3gLnf>v_*H7byR9)QU#F+#q!N-UcFWG8?tw~&PkdSNf4@wO| zp*~VcV`OWo-GL@3!xUx@LAvlL`@pPX^G`WxQps=qzALp8R{ zZx}7!ID@$aF{q5BvR~@KDC@XdRQuIZIkwRTM;V0@@Ty3x;lCv_Wkd;+N``K7%@OgC z4c^baC@Jm;Qk_G31&i|AjS%tAqp5=N?B*zs!})hj+Xz@d!ZVaU6!{!j^ka_3Y%!Y@ z34F$tmAuHqz{RXhI?Z(`ozleg6hB4+=h+C@et-E@(33mbPsS{ zIAuF}uGH?9%C#fBD%sQGB7>29QljTkK-{!T9E)39c5kLi*I6XYI8AJ9v)haShW3+9 zZoK?6TdH6XVGM`FaUYyn5=)kXYK&PReTdnp)RU|ktPqw2fq80|p$UV0j1^F6^fcld zw_#+I57G?($1&_mb#Y8Vr9A-0vVz%6t!TF`Vvl(@i8=x{MoP0N#N$dllH1bXPF(DU zNgR?VYC+5pF7Q;r6~BxFtpvAxYzovomF#tZ0TIW@H5t_lT>xngUV({hZNXcb!iAI1 zlw`clIF`({5ruT)w6p5?>(ub~Ve=!Ik_->gni7Kt5duP$J6((e5;xsx7!n@j;JnX= z_1@FFAz8ATGo+~SB<#@b;?J3N9d8wdlnclYc9JI&C#jYK)($P7S7H$ zj|2l5Jl6LIAvRxGsm1zL*BtaA;!+z3$0rV<*MvsT+Y|ZxsEL|5zs_K!{7Mj z9O5?3{9$`R8w8>ePM)T;NMtqQ*4e9zP28;A|A_1@`BY-OSJvr}uq*&&`x|g!rA%o- ztHh%p|45nz#Jl3CrS*Gr<*7=5r1-4qza!a>nJrQGfNS{jD7^J7;gop8q*ctZZVlQT z1E1h*;XROEXSViLr6*iPW+carYI_5jpeLL=mYc@|BL?+<=HD8GE`~DA<5f8=U#4pE zNrfw^`O^E&`?dyo&a#5wesW00fz5#|z7vRP_`G}BgS!DwaL?F8n|bW<>MT6s;|*7A z*8HL}{GyU52UBwqcvqTFSy=wUGwhTU$n0`qw(u-!4|>Z#3CH04-z&vX&WIEwZQ&B! zv^Qb?4_o&XU1tNm3B0jw+h}atwr$%dwv9%OZ8mP~290gAG3K=2f7Z;bHCN{@Yvm#* zzxUbCemDLlLxaSA%D4QOl8GF-W;08kYxTHSXA!|}oG~mFKc~(`4z0iZc z2Ru#oq@!!DMHU$p8#EfYcR`e2fS;mOUJCOX_4X~9izgN)iBkm0StxT?ptoV4wOC5Q zs(6(r4`zSCFJ|A3M}10RaUC)l17o>84GX$Onmr(Ahh{k_=tDFaH02_Ml=pZGDyoA}x1RMWwoJmgnSkjLHn(B{pTV_G`Y!O@SO`g%o|>WmB1 zk8aY~=+e^YL9yo4UsA4`s2c2$0WWXh!dxOkx&0e98Qj-K;154}bCDdpX$s{@?`^A2MY zBm{o&CqNIEz)BO*0g{jrY^jc%ja(ejC4p?wJ8aor2uA+>bmKO|eYmRaD5wL-GwIcHS;0rb` zF_W8#(=eFRaF?65O)$=q5hm~f^JB68`^P8X)A9r&csot4P!5}HXO?#0R6?Qt_o|HO z73&%`eE*SbnajwBfI`lm8dp=%2`?YmPk(iu0~f=hJpjyD30%OAMhJe%7u9ch6mP^C zoL?IF+Ks33q?;_8s?q~k@PY=3hUcEtt&DZCHUZzE0JTS#wU6r$a5w9^c!XcZh`lYF z2fK_E5$EP;_g#JbV=aYFsk(Mo3XaHvi(;52RM>^)3mAmn2xQVBbelsS>g=IxiBoM? zYzOZ4OaLq-PkbMK84Ju3M*i_2uE1u`@gCleKp1F$ng&y#YcDty!OTfKy=kv2Huh!n z9?tuFkGY%u8p3+d*`MJ(LoBN`-+SkxsAm4LAl`Y^^#;Ctddf{T*JPVO&)2+*6CLX9 z3v?v{JxXnGCC*%sW(wB6T+jmn2;<*Ms09*W#(>{_cLAX^3F5F#8(?ayJEt#w0|OjV zaC-LeY>OO>VlHKbCrlE)@x7d@X9DN!U4IPrpyM6)0iTN>LDsN>mZ*YQQNNpb562qM zkR7Bt@Ih9?RJC#5(vzjmmK7ZLgnZ43zcDmiYdUY$_nBl1+WQnWSsP(NdR*ZgJK!E; zrvqRfd~C0@E|JRHw0w2Q6YkFH<@bg;`kz)is*J?GWExquF2v%>S8W1*3h3}REnf7k z)VydR+2&EXn_*Adbj@Y`dPY2g_BssN6#;<{DwnsU%kZ81-uPF51bTnbWQ3ici z!^g6cxailpJLKYLh1UDIc7=~!yMh#Z69h2DgVOf}9tM>WC8(8TmT* zq#BeU!}TINL{Wt0)w)9%>pR@eyUH1SGs=ria8KXH`l6mkzqA^Rs#_~EIhVyoY_W1n z9*N_J6ZyQTR1)1#op3}ns77XMgq%MZGMgy>Q z#t%>N$}nRc+ySL*US__Q3;C2nl_{QSfGAa(d&Z zGm*^ScgPn0$9@dX@uku^h37|HyLT*eb18w$_v)=Asx{&2gekM~5YzT(1FHHHEClYz z1}7)ld8a{|>|i5K=Xhgw!xl2$k;H&4cC-Q#I4F`2IOFpY zVx-OrG6gYt=a_}UoeHw8m~rB0u|-vu$StQsr329B?eh@I@4>b#5yTm^o_dm}fZn%N zg>eJLfEVc#cIaAsN*4SyVrnQa2xJa zoD_CYmK#V3M^~x{;i+RESmSSSmGGOZ=hsVd5Fsmun)b76jFt`h^j`jRtL4EqU&lN? zl`D#{4DN3t7m}!l$0f}@w1VStb%fLtkE$#CK&fJFPWOotYtUnNv-{wvs2!xqJ102I|))Xc^a| zmUU2GRSM+ykv_c!AI^Px2-sfGfR8BmP2h33H<`W#N$0qx0ddYS5d{D-_4QQeG~6Y= zwodf*Cl!W5`uimHu-nS`nbgcfJO-zYT$DWfpVAE4BAWuk&(<>IE{(T=1Vi2&;UZ@f zerlNR!s|_Z6r$|QJ=AgNp9q1+yRfTuG6rol``;!x(49Y?5jWP}Kuju7KT#}uVuxr1 zBO1M-!6urf^e|fqRA7Z5hHjVB3vt4g#^@CadkAejboI;a(LQ^v-3z6f-!K3WNdz|S z?XGUnw#imoNVfaEQJne>bB^x-eVX z92K-eB}NPiaea20{xw15NNS;{oEIj|$C4XvkIVLuLL7TUcK1Hiu*exFm&ip-A9XU? zRg(3*_mlweIzf7e*xBWy5MUP~pAP>R)q@5kA?8d?8qP+V#gUgVzgWN|MD~&8bAl<< z3iFxBFSP>K0tV@!6QPhJrLZ&ho*DO2R_O0fjCYSv;==ME%3`6Nh;>7)HB|0$89lF* zVny<)v{}#)1f#OEwK}MgU3sQ#)J>Uah=X~+e%R}waIn}n(cs}k9d!l`|9*Ad7)=52 zAzE!y@C|z`Y$m+zfS{$05Fb!9^i{(Ji1Rx0k-o7gGRScbk(QCb4b|whatFYyZjyos z#(LhH61rr^$!%GG)t>G73U+SLwB;7#KV*{oz=YNodkVqbN~gJ2-BVTSB!``Y`l)hT z`rz{Cw9ybz&oUgpWKe1|Qi-Y3!C@be7V%;7mJDAe?c_U*kO$oXBa!|xLNYT*Gd75` zlgK-gszbcF%VIkAXS6t07*j+l#10Az^<#{!mRnlV;(?j!TQ+2rKU;`ta0XIdUlZAq zR5(4_k;};`NgBfmPR1uFk8i|^S^l-n+OS`#jYeVTAsK6UrF6^Q4uu+{!t^2_*IG{m znh89*D80ZSKRLEhsj@Wgo%|=gv9S|AODSP7p-O`Dl)v*{xu+Ikw*u(K?~7&PvZEYH zM@!a?b=sU9Vr`ZF;TnITN_o4;wbW@ND2YSEqGc12lp?d55Y&wwOIw0~($Mc_u`hD{ zv_oGJEu?e%Yg1oD!cF*whP7M(@97D$#8gxZTJQtihff5pvNu&8CS=~-%ow_ZPqC_k zshBS9w+Jk615QOf+|ffOcHg=>j`08np?44m62KRD&=#x%w2S2SgkVe2Q(Dr_(%5BJ zhU1jJ=Lsp`bm%j)Y4;CtC7~8_a;M{0;0nc6VDjWB8-BA8xrMSRmylTnKBhjxl3zTs zsud7`OJ6mh7teVm>ycyK?j*9%S{c*AUZpjP;XBDRQ~i*4p<(%Ol4++x5q1V(V@g0> ztoBds?5MN{Fcb<*u#WO0xE*2av>6N}W6VG?7b`IXupv3=Tyn3!jH`N9=s+2L2gxg|b97O{7Fncp6Hr7HKl}j2O?{BMlISqez z()J0iU}A!^H-Fz*Y%l)k1-$znfF*CIsT8W=lAX-T47>=-*Uzo0hzeS^MqT|pqWdeV zO~ss_T0BiDD|kE+J3X}DTpH^7Pg2dtAq1VP`V~z2QZ=@c0+aydP(sy+gkIg2I#(-~ zm1+5MNW?cIg)9LSI?Ib;NQB4zL}wkoIs9#}VtkL&;ZVdl$)4%xnOG0|)Qr_4tx~m$ zC-}ty-)-{{_=%w78RzKf76VHQr^t2@qBJ0MTSZ?vTYut89q9bY_5vg;mc`~pO4W!~2jlZb+C zmkVdYjhQCEL|pa)CoDQGR2>*9p;P~|!!h?__HUHF^jy2gb&1{!_{!dojMln~)ws=q zuoUr|BRf3hOFZn~R)3tgUw0#2enN(XU+gM9AD|p4bmU6(9&N5>%QQMv>{x#XzgtPm|AtevcFhuX&3R#9N8K9mWO_P zf8;WhKJQG4=`qp*Ic+@Q!rNWsFyrueK6dl#Ou)|M>AF@xpTCY9faS(?A z2H11n_~F$GSu+&%%A7i`$sY{xgWsWR?lH*kBY3(KPc`Z#^Bupgobgbz<(L z@)#y2KGD4Uur=`lGW->(g_Cmpu!!!gdzA};j&7kqbjK@U)hgDWK41u}3h&o+=lw9T#TGe{2I^d!=r5WBBnP>VCWfyfr~e zO;&Ln5sfL3Ue<{kaItsWr}=(u{RYSgdIKfNAXWkc&-fY*g49N_4HOoPXK6zW7EoB| z8A&WaS^c}neXe`vu*QW6%&>YwB&eC1v_@OWCpZ@fxn%UfQ(A>nJh1-DuufH?$4f$< z+Z0)l>X8}}8WkIfq{os7c-`ktIP7zYqNJiF_jfw{_>T6UXO@^*8EIyU*l@Bvip+7@ zVCI-)4n^-So(E5}Bp9VoQPU~itTtx>>u=uoM}quFkp00<&gn^kxWvi(Ax2h=p`(yf z7KHRxa<>xMzranKQb)uJ#*U^EQ|OQrwCAl`c>a}-&B(LP5VN%$XWXb1W5|mO^KZdNlzjyeAx$Z%6@|tX(Sp=7-l9Esy?}onN!nTt z;qnS(m{E8w_OAE%&F|u{0oabmucW(dMA<g;G`BhsgrQR9C zEkM;%Y(|RU%Jq`-DyZh$1Cfnj(IxGLy9Fq?&6CuoFsD=CZ&Ss0$W7$ff#%BpYcBgL zPc4*QInj-!8y(`ZCEdX~pt&wIHslkl%eS|G3lJ|-G7rj2Sk{Ay-qksqFc*#~H*yF? z5klX#1QBZjM3k8&{7cQ!cCnzEil?{LX#GjRn6r0{=p~-M>4>8SA{FZ8E6p<_&19c= za($(6^2WyNEts(w!(rw*V2dnH!msuVffr!eD9=yWZN$b&QC(sV&=LE~u2eiAySDza zD@AF%G&Qwl>MWO??q)bdX)aGh4W~o9f!%uyFiM^#QaHn%gs<%ph^xx)#!(c+H5Rsy z#HKuz+T5~m=`NLhU8fY`flTf{tddCkDh->IH!)0Nt~NWTlZr@hT7m5<)6Duq8jgmg z-$t*3tTwjHgd4`6~ z%gu`>fZs^Hk#1m22~RJoyK3abT{-&sXxLllCyd&q#*->-OP;GEuEn_kn_;6{aqM53 z0}k@m<3jIgcl~pLTc%i)HxJPL-4*+4Vvacj3NKGiyLSB=jT7&j3q;M?xAi_ug#Aks zADfrInGVs)Wo=sB$)O zm()}X62oFmDmO%x-dXmg@FY+0B`oWVG#jD$*(0=@=)tOBhS$OmyjOrRVzEn2r+gPc zNe<{PaU!6*3c(MLz}u_h7eYX~(?5Xh%8LyKn|nVca_4K~ZLRq^J=nI|6mR(l>9uvy z{afqSWn1@Vf4^f}itFa0TPlP`A<{3EQuOlBP?k$`ZI z9(-N?FtE)I@th_JaA~S^h^hc;y8!&~+e~0tT$BoF^-Yvv)!HJWr;iub-aa*&ZE1D} zQKtGMl)xNd;R!V}JcIwb7w~iKoHO1z%JlQO?Zz{k^2xPQ$qfaMzii&8=g*Ln8$=_esdX60cIm<(Xm;7jJYm~s<{@|g_ z6~S~WaEJ_F#ptJ?S^_9{B|&N>Egmm|)N2rUiJEW&)f!s@5Ib(^HQVLdy9TnRjqtse zwr=&@neMOZmO&Xju=VS^F}f%fPY#@`o2#;kuWM6h5)dLXmkw()%G zuAUNC1Ge0G430)~+yYcNN8)@8Ku6r)w0NR%1L%DT!-(Dmp#=jtM$1E}&z2te&DKSS z)it{!Vtq&3gRLGD8wPKEN3Hz6CZcTOC;s~EN^IBwT)*%VORU{^2*qBG^znWviV5w% zn9*p~5lyIFtNUCBp)#aPZ5Bk3VRVc)l&$~n(0NGys*4^ij>C9DV}uRwHcdR=ovy#> zI_e%akc(*|r@!XV@xu6nW3GvxKUi_`XK#B)oVtPuZwVhUuqk~o#dv@M%0vrMg@r0P zChMXPFnL{#iQFZ{?nbR3S(>OvnU`HBfwJ-*{V{ewtttai`G=1b0|=@w?qXZlid(oW z&21!~j}%R2zBI2ss8b0t$^A$QCdTW-ap8w>(YFXdNzH*8n}Rr~<&@h3DFp&shN1)LUU!(5Aa<+!lCwxX`pn z(=e&pxM4{9c{fG=|}w-dFN-$C?6VX65-r_;SB5H9m;qeubvY4y#Z)7pm) zG3)78v}1)PyZskCmgmQVU#nD_`xnN)IK7srlV)J)D~C)jgT!r)1Ec9=WT`_*RadA@ zX=UZ^D*hDC)&jo-)GLZkK^Ny)I8*_~ zCF4{}DC)GH(GhcXOq09=1e^oCh{jfB%qOzO_VD|v?~D4h%w=~B>X}=cVZIrkCR!CG zfIk1Pd&;EKN>A~~^_@%fYUZot^2(^ywRSjJs-e<{%E7SS=o=iJ)Cwt|V)K;8-^ZKY z=sxpB5spt953-?0rD!`Fio+5xvKZ|RU)#IR89uYD?^AA0Fx!-_KrVoOvObyT0s6_| zpPw@H{{Sa|Yc6Z;I6YT#TR{hA7HcO;q%@~oNgiga(_X)&`5IC@e4vsN2XXnp!Tm?4 zH~*i!s?=;HJ$I@$MG6ZQ2ATMpNbv2mEHIF)|*@U=4x6?+BWcs*wz*i! z;~e0zPtZ2MUx?ftgZs?3KUz0ztE4AgKyO1~Oi(Eny?Ume+yoLpJM$sM<`)SBP z(ML0s(>0qQ6cT>4S^S45tQz9DHJ?Ts)y&D0)-!#1q2fg7Q+WHCc49k#-KvEy2TL-& z8<>3cV_9cXnKs~EY*hAloCh=F;PG$>n$Nw6koPhU6L zqm%?aR+-D$DP;=73QUF|Np9~?n_ES%g{FqQEFE<~aZgM`me6AH>di4eZANj~={Vao z2{<hCN*>(&I5$#E& zvPb^6oyKzf$IA;!TBr%MhEtG)JhmtD;k~eAIm*Cjed92^$E*js)x6? z4*4}yoc#g#e0dzl+YiN(iD0pgD^{(d{LN_-*0QQW>yv*%Dg33k4u66H?~Thn9`$54 z{?PFlbolkV+a!R(^yePVL>gYep+yJ*MIdc%5Sf3W5@(>c{`$4WO!bOu+YgN%mK>3N zBr@}N+QIr7JR*T#F@V~~OUH-JOV*oBU0uR2=2Dx|I=cqgsUgK|aL+8=e5G)5f6(+@ z?T?>@ocxkJv@0Ol5bOm|9HqKwmm3LAdm9K2g=m*5*9HN1zC(7y$JX~J4{9T+eQ)?+ zp6p3JOy4!<05T8*Y06GQo%!z}25s~CJii03X4|^F>ueDoBjOi5^=BT#4Ls}H9w!iX zF9lvke~GQ>iwEgW%r7{1l6f>;mu5E&emNxY*r>GzN8OzcI(NdcB-wUo{L*C(XQUYA zmLJSw%LN0d{w0*f{?tWeURjg`Y5z!^=271xsHIg8)KfGtv$EF{LI(~3nM=RwIo^YA zg+#ndcs6Zu4UojYKMD?SS!Zt_i4`7`azHxF1a$x3c7p7c(YCr@pWdDhx?C0d##C{y z^=sC}i1i}>(#Q0uHpn0Te2aDV2f)^7#KlhQyyo3>jD)&<-B)EW!R^8b9sdoXh)MKf z+g)h@iYarMp}SOKo(Hr&BqeX{_WZK`#yiqKH=ydMGY-VYozrS3dt-tn7necb#NfU9m^> z8}DETXryOk6Cdn3a@kEcF+jn~l~}oi}xpKuQ3Eabl^uZsNJ4GIhE+u|)xsDbw6aq+FYh zf_E^fCWerwy;$8#ys76sjdzXZwwpVW!6H2Uo*b9@O8gJYaj@XjWq?Q?jw&HZb5ZgS z9}?%(E=o;`b4R|&C#HdQI%Z-Qb*SP@gI&xO@v4~EJUM*+GV&sw8MNfsEoT?yBUt#) z{ce(wa6FcnMA=+}TCnF{{SSL8b3;YgSLqW@a4ker<8`s(VPH-*fc#HaAhzK7Gc@0u z%%A@0N*nzfpjVP(dGjD_PyPpCkmVC^1idlm>_Pd(gyP zhz#YVtZLMSO3eT#fCgD5mIhe2k(0^QOXFqc4_ZOM31ESfl!t~ie%9Rw)IrJl#E-Of z)SL#IMq=|@+$?1MNp{on>@n*!+TQ~)$ks2NMx;D|9wVh%$lAcRauRrxGQ0*k)1Y2~ z^sEzix{#Y_$--(vow4VD6SG;z!kvQK3xojoA!FY4@WF!G^tD#%b2}KUdM%4ECPYOA z-5L0{i+BhY&Ci-AOqjPSp9+LP`BDr3p!s2?C4g7!9wCoiX1@tE2Dc&8*C^?9wP-ha z4iy$q=%>D!%AAHf@8@tp(`;KZt+e>1B3jg4eZ)eB?=Ba!_i`Evk7U3Uaicn&p2m^r z`fGlX?0HQ?1b=e3 zHiLX~J|(NwP6T(mGH$3*%BU+Op|x^t(<=`}E|}z%?W*JLaI}>|r7@L*SAA3OT3@Jr zQvfs8f0PAvKE?sj<%s-xo9=SpX8ZInrFn3`A)r26?}Ll?a@8j#`}1F5WdMOi+}8O4 zG6ey{8`H*13sjcmf6Dr*1_qH)4GxL~_%UB77G3;7ITtUloi)i5MddN4a7B!otQ8_i zB0Ms!aP{zij{{51#?o_(8_-TfNI`ayxwr(0rn!ASCd>z$;j#~mQ4`#%%ujKFlO*W9 zAt`_-2m4EBx+^MFCK1z`tIqD8jC^(n#t?Z`j!=l*}_}8VlPYTBOS;WC?qz}j+=kMB9JvBny-M`8)-GU zNY6434Fz>;o_UDDzIAGmi-%q`JCn?pb(fwmW-nf?gC42v@WWscTMibw?027AGu(hg zXGNPh8QF~p`|L8n%d--9M1i2bQW_Ek@ZqPhp#I$@fp02}IhijaS z*RuZ+Q1@*hm5GDwqhnisZ|CLLSpc(BE{;hmI+=4nKy}z2f9jQtJB|( z)oS~;${boG4VfmL2j$l+Y$_4r1;UZP9jkKY6_F#?)4RQI0Jsk!INiwTYCC;Hv<~kCt-;w7c)(160u>TX{kdPAteLFfw{v1 z{pU6ad_CBiI`Rceam0?Ypwlkmm9&g{v!NSdcp9}yqVWEMXgwY9(OxnT{uV zv6%?~AFy-a>AZt8PFy(x^HrLu0uqJ`aLAsL+@$=E`}6B8genrk^Z=?mX!zWAg$^jo zN!g4Ni%b`Uc_oeCL= z)mP9R(R63hVA^mV0{&6_By}PVA>NWcquOiVA_UMK3!-}7^RHwTsKw~e_?K5r8|t!x zS%gCSWrfWF82@`DD7fVM$l-fAO?I=+t4kPY2IQ5B36NJs?SFao>?Maq{Dga(%*CuR4c~Sn6Ye+s|Ipea^2D5uaTWYnR;#c=E*;GKlM$dAsok zh%0{vb8Mzf5WGCMJ@oYNAi8qF7I#==z=SruK&&P(|8=au|8}gjih`JIx4n4>Qb$N3 z0ILqdwDtgh|N8o6Nb8`gIAK-$JwH;<(>%hx?JRqQ{GGM#HOKax_hDK5?Pb76&3har zbASSQFe7Qr)=74qMs9?Y*!T(bGUNLA>NXY(T(iT-r6a+zHeTdB1qLAGN4D z=js~d15>@{gplQ$&|1Bm)_Lx`X@{s1z^UP(J@WEowL#hGdI=8#wk1Rp1O}~AfrL~z;A*kXr@o6+apyVrH{`4L;*I>@2h>OKK| zJi?x`AP=E2uD@)xYxl#^C(pD!<-F!hTofn_>k zNf7HqI@FHb72M8b9VCN#f%&^tZ$!GURmW-Lgc|;yIUe%W8#vn)Yikn zD6K4-zGg-={ZG(c#UNL9x}ES`Z#alyx^jBYVAO|F39ibbjn=_Uq@G4WmnkXRViWzm zhU9ijUe7@#VOJ#%`*6^eP$^5R>bQTf-jWDDbc&o=fZ1;id2-l2&Hctuz)Kty%9}X6 zFGZgy>X2==ElVq?6@5c23|uzl3c3kEP$fW+qI`BgrCTsBK=x6( zgl`en_oA@v6K5^ZR*k;eq9R5O z=V!)U*i~uDN&(1w7N*mEJPL_J-Vum!(2z|}YB}|aO*TeS`fu1|c7~?}hAXpNoe|YiN=PoT#3v)8 zWN-HbB(S~8! zO?aG6h+Ssu)zTFk8B!nNFZ>}Hn5z=No&j?ogq$sIO*Q8zm(>@3+-B#8dwsLS!Ich0kTsPWpM#eUgNneP2{ zUR3cTk@`m!AoKFZN%N6nZw8FIL|q`OAHkr4&5uv2gv{Um{ONPapm%yS4_tBZ0AzbK z)YWTET$sP*8MP2kG!NP2EczaxQ${Azz}7-I+8}8rFuo4;xm!dktHcgMByjcL?ke~- zQES0(7Hp=5KVr$5rpCz6HnL@k(eIT}?_os-t!sDAf(9TOvWhsq@=$`Ck$_qwU-UtY z3!JSoh*}%Y2pmKj>x)W%nd)Pn28`q7|Lmst9qB{9mtk^pONqg70)Zw-lGpvWMzy27 z6#6W8P)+)r`}hZ3O}?BUrdId>)rY3e?E*LSOL*Y!D8=YV8TC(c)Jp4tfbRwp(&!4L zBKvW(qUUI(?K4;O&+ub=thkk}>6W0}>|1;S`BQ35svMj_X3`KG4i^w20DIK9tuil2 zkAk{Sb_MtW4DQ(9JWMJC!mYni%v7jlhDVpstCQbM`sfQ;>*-2*3J8ht;pF3m2t+1F zN;B~?M2+F~=)wiJI}>|s$T(CqUsasLkQlKMNsWnp>8^yjslsjlbSL2-f?e!ys7t=y zk!F8on*T1B1N>@#&tMFgn!P02!w(Dp&q1*pu6HO$I@FS2-beWk-B*-=iX#Gi* zAmu14!IrG}*YmM(AAy+TNx`kj%OM}ocki%&ETM!|eCfd}nscd-?q$|vnfvjv(!Vl7g4Xwx+&76mu2e-d z@rfcyATi*4l)c(Wmr}zD_}mjJ-M-Kt`tqTXS%eTxgZTRKd3)9dRs0nDF$m9AT&N!y z|7V3Z_Hoy(zA`n*)zi%8-=edl#)NUdOx~vA%(9c zPL*h@lboz5jNJibmRc3|72gxG?9suE_KQ`$jR23TN7N|O=VlLxGyP6m=n5Un%hzr) zEGNN+?JLAoFn#D{rZ9q|2M9v+NDf;6Q=}@%my002O_9>+MP)cY9r!VGzioKhu?8$s zy^8>gROwJcEK-{g3`}3KQ}>FQ;8ub+^LDutUtj=r9URBck=w_l=Z$Izvusv(D1eAv z^?uBYf)Bj4ethTy9rv8;H|l%^n4VYxL2~zKl+1k9K0WtT!u#vo>wJ-*ZJ7z2zK;C& zz4W8zh;(KLdB2~zEqq11mpT$_t@>LRod%X{FXmCe|E&pl?!_0v%*PWdT z5C!SgcD9mN3jw-0{bjM={9E`J_Y`g+T^6Wy8l_8DWD5ui5ZeXd2Op7S=B-w-&>ERk z2~Kq#5l|OO68cgvc zJjaU1@Iy5R_)=~Sr$(XeIyhY&!*GTw)+1XO*9H3({1QNnNDnnA0|6ikJtS|OqquUh zJ{k?Rlg_xqUc{}^Uc0nNq2HFDU_zDuy3%JGd)iMlYYNz;f z9D)6w(pW{^E)b%EXg+Kf6)4vg^lNQ%%vg!PJx}P%Q8aBx8#C{lyJ)}B11AQan}@9S zIVSApw`L9<2o@jd*4M{z{O;@Q1b!CD@0fIWm@CFGJhT8S5OLw}O>%RQ-yQ^+=*$e) zYxd(uQ{hP_l*!9T<_lq4O2KJOSoBJR)2>B~Qu7W%CCfO$Q>()1`gGNhAxTSt8I^tT z-;Ao#TQ=k-UA(XyUT5qi;KFPMm{Ii|M8NsEhZ#bxLq-Xz!?fAgDL^ zaTs7b0C@4?#27zxZx}$IGX5yxeaVr+`}nZBh_QV}tUt!%vqc}Zguq-hVNU2HU11-1 z!`uUf751)dr!k?$D1DwAR{>4|c|^4L$mn1X)G=~st|vWp)_u5$>_VYm@jIm{{x;Pm zR=h=`Wg2&Z8I?9qFzMNuoz{Y@@MIGgi6{wRSxZekx)(yo%o*V% z4fe?R>918Wu(j8f(MY|Mp~QkjV&q+`J(}GLQ=%kCpI}FmZ3>+-Sp;r#R;btZE4-28 zW+_Jk?^HC#@a8M>N?~{muVh4EY{7R2TAk}gK6UXM+pky=-KSrbLO#L%-emy$q9mIU zqYeW9UtDEK01Aw&XzQBKIf2IV`Y*1MBLjs7{2yR}*~N&olI!*upNG{9h8U$tGmXi` zeBjLf|A}QlE*qcAH8$yBG4i~VS?nc7m^U%v8gKaNg@(2V+eH6;Y}O686?oFH05}y4 zKYWd;SY(ZhEg}~SrS!7e+?{-Lz8?|nLIgneqcR8NegA?dL_P-DKda}GhnPJ@ueFP9 zj*z_zK75!u6e*g*kWC`GB*aGvEV1*-OAju9&!RyOQp-d?+hsie9SZshg#cdxp;={= zNsNd;ny#FUu>e~3bDbA`kJ^Z>$aNFiYF%wGS{{UX#5Vm0E=Y>VV8eXd+As#GC}5r= z=X*+lSs{;;Nw=V8ZtS6Sq-cI4czn^VA3pqWsA>8antV!pjhbhStHB%Wh6%nFqYfH7 z7tEx{t#SLDq9vg5*5YflR&y0Ws1N((fCcmbQ5N0A}5AH7YpBmVNt)@;V!|Kh_b$>l7-8I zQGmFCjd0Q73PiM$Jw!s1?;V+(H6}&p-N!}&Nb-6^7J>Tu)rxSIHKq;_zhx!LhAGch z;>FYt9;dk13t)`{?geH7_X691dx3@UDN^O72uD$nI26iUanMN!g%QhCa?bf%T(>yM zBUEf^{sc3SE;ilXemwbAmm0q|z>^^`xc+5U((nN}yWGn`h0jY|4bYnVOgeadQV8HH zt3rv=h1A$&hDF*GfaiaEf!k`uJ=I)POH-0Ywv2*}#L@!6;8dBu50Znzg^dXy2<2Mr z8DbRd-$HjMhhWwzDziXX7lJmp*-5$&gg2WyldJsPhMkaMz<9NE$H3;}PFGHIap#-T zZQs0c7}7Iwlr1Kipw_N4HXh2ckaVflzaJq$QUIClTV@e_wb|KDPhV|V!5av)z7x&^# zMdUftxEOzjVJd7CJOsz2=L`DFt8lSB<~`kK`xk!&5N>Lq$Nq5)!GOf$OjX1-9&Hdg zBpLz`Lfus{#%@kRt++@=NAM-)6>(-L_W3wMwU3b~elri(h?bdoni&ki8ryQ47ZuabF&p(kgUjaq5PkszNWo)im(uBS$E`ug+Vf5tDJ z#uJPre=-LAwxb87^*KfOMd$q6Q1Qy#x&Nhqc=+uiZr4iS)fhpD=!l=!TIy+Sq zh*!r|N1in}DB%nU#=sI60thXD3NCQi45VGXV84ub0W})*cRBE*eKSGnphc#m>VGO$ zT3TGd7%L_usCI4`LMm^8jDPq^eGRWGc;@5he+{c`P^phsqP?q3%IhmR(rZ2rABPcv z3SOk0$kch1dc*d#mN7?an7H`0Q~TWGTKUj58cagEQ(Z*BvfOK#y`!Cd&T?4PhA(An zo4lQNJi=d(l`BG{>z(tD?MZfea8Wy5SA~GWmH=}s1fszHQHOGN_0GT5_7pc<|EoeU zS6qEblUNLLty~*tE{~$c2wSs*8UJG~Bblt5(Nm z>l%$2z{7j%0f8H0P3y*E&6x2-3*wKlgSHLDN65K+_Eo zWGhD}edvu6C?2&MFN+?vToNd3l{^zD*hjZa0n&TE{mI)?AhH8TnnTImwRcRJ=gpnG z#yzl&=L?pj{C1#45Zh2@C)_O=_Z~v&o2$FM#%3pcNZRR~la3VS$}MJasUHlm&ngk> zKB!cXU}?Hi>U9UlugIM=LLIiE^p$5^9UCVAl$!^OsWZEwPv{iIslLaX}7Cm)HWT=P0XAUT11|xm%HxEI-A@ z6Mrd=`?$RL2g2uhH;G7Ki(Jk)TCiTFm@7{D=buJvVCl*uc|}pCR5;We*VPom5I5VA zDK(N9G(7AOIUro=mLgoM3Xyy8I6qY%Al82XgC0Q5N(l#RmW>d*`5=s#QdxKpKER2k zhSL?-v0aDv1OaSau@$!I2Aj0?T^*vgEdDb7o?-R_P{sEPt`6B-EKf?!!Q3<#F1?^A z6jMor95N;~cGTz-jFsW_yZQur5)O{2f@s-ylDfk2P7Cl;OBOxy03f+yQ}gx#p^_t- zKyw8H&6SD_xmyA>mwAvO;l$)ZXGy=;&+keLD2puZhP*9zwpLvKT2~z`4g3yZa09&2SO|E2+ZKj3{0?VKZHx+}wF+eGR&Xg5*#;>fT`t`NH z$)1abLUwd5f|^%8vKDVXtXC5Tw=uk>L*_<94%lhrfa;$sMm_OOJD$ zlaQlynH?1%ypeWcY+q{EtZhg={>}XWBD>AFUp;VUEStE#OY+g8X3Ma`3F9O!2l=5` zdW$pM@(lEfGDng1y9%VlMN-L&{+;`I3%hnyqHtKtkpFoB{&J*tRzEyc*;$gO<-z20 zmySx6uSs*~tVhLRJr?p5zxRWrs1*}q+^U)UJ_w-Tz3>tlG*9BrbO;B&SpgF;f!A&4 zdyn4y6JjJltM2#*DJC1yYSL2AM(~3_XWs>fuYB)59p+gJR7aQb0{y6~f6MCOZMP4s z>A-TRScVNS!TL%BJQV}B#3>p#XHv{)Mvt?I1>7Va{AF^V3Qy_h<4Q^2Ut$uRX8DT? zvxASKgO3c<KD|Bd}jc)t79kd3z}so@nF zsyd`?ehr#Z`L9<6=>iVt`m7;l{etO|qk_}!7!`_p;7CU3%&u7>9Ryj60CwplEWMY^ zjJIX+)VvO$;O0SNi%#puukHfY)0x*R?_{pp@wKVGGWz@`!n*Mj4#PP59V&DDlK-1} z-mP+9O?qloA~TJhPI)A3A$f+RHFmvvIk#(lA(ak%(%s$N-O?f5Al)TQ8bn$o z6bb1N>F$VBTdC)TFY`%`%4i)FR+}_9tX7cUZ6xittaxsOc%uU)Z}n z^tWhPFqv3hB2X4mAgJy_SsTtU_mq5OF0SFca69}`BGhtO+`E_O3!np3gZWj1W;IC2 z0I!S5%jXxNRQprw(!x4J8k0T9i_`n?>}g6s^^3budTX{j{(0#ig;d}_R8_Cs=$s0` z)2U${?5>2j-sHDIZX&r*?E?P`2EQ;oIe%YPAX(XYA%h=o$xMtM=9bibiSiXD0eRz2z?_~=G{~&#HnKty-h`NMQc#yS3+8ioq^cxKO9ZGp7yMHBIoD z?o$Mjsv!Trh#Al7EACBgD`*Q(UgJW-kNqt7XvYDMMN}u^CmrAS(wELcAe+B~0rmHg zFW1mw6+h)Wmk88ISwA}Ln5i$@&TfP9hk)nXc-K?vQ~0;1OQ5@&6$IXgSZv~5pagV$`f`pVW<#j{ z=Hv8*(D8-#P7ZORs~}Pz6L=O#V53>OQSWB}e-F$8=ko?FdIS1HM?64t*L}b&&SY zy{-H;dD^ixUs&rX-OKlD;{DY=6}M?)@0)r`4G!g4J>UMnn7D!#GRe^ z#ul~63pPh(Ckw4fBOK}Y_lboZ>&XNo`&m{5j_>6H+ZT}Dndxx_eE0v>=Ebr*PJmVscjOi36wKQ8H#uND+)$0dL=D}^X2RF~7ew{D^ zD54$n`O!RK62|hc6omgkV`N5Y`3H1?n*?`Q%o7S5U%Xi<)dRWUjMMF)jN9iQ3DrCG z`%%?5b_09O&uH|Mt$c%*i75?FhasFo$4KvvS2|f%7zZy!(XCj~^Fb?$#CcBB0IRMf z{N)oT*5qUw2Vt6rwDmiZU&4}!2T0iutX;ZBWA1>wspaqF1=M7y?ljd9KMv`xN57Y% zuXF<0w^AvpHyq|df|K^NNj-b+Zbn_t^PvbBRHLm}FqAVxQ8-*c_cy%@BY#pnJYg}& zzYlbO8$kC*s8ZDdLK-tRV0_;|QG3gS?E;QX=X2v7q?orC*EQ?NyKuAMb6Csy{tlSL_N2+b93$N<|hd z({zw9c$1u>hHzE7Ul3Ew%rUZ)k}27&gM{jdnZPPAr8_jM{LvU?z^s1_(~84)G>Saw9$~PxU4z{pVB)Y{ls+1q}tTd?&Wha&Dai-T= zfJF<-5;)YA5%O>_Btk;||5F17Ldbtj%GdMaZC0f;lBX6YhJxSO=@yV<3vAKn-1vIV zthgS$$L%ng?V6^X>m$m(VhnV0c9tPMnq5h0Xu}3y)5XwUZX@5Afy4|kSXni&G@zx| z%gE4%!!hjI=Z3prjgra|^SEVKnAemjU50i>80g5vKm7h$?)dP}v_OZUmqgFne__-| zSFMlpCzpCSl^f3wFN2e^BiI2mUtfX6x@sh&ZZ;Iv66ol?RO z1x?DL$!dc>9y1U$aHXnBO!O%yvUO-}9gh|My}|Gu4l%xxq;@CO4AcX&#UT%?z|4LS zVlwodzm58l@tgA%IqsWxY_U4^?P^^(Y=B{ z18JQIrjgwobj$1XTXj{;0M$amsUo0SaJkwA8MPBPjG_oAeoSn^NHo3o{RC+*#y^5PyP8gex8ha;9>X=ybeK3FbObiVf9IX{f6@~<%rBFk{X?($1 z@MjNG>l62`V!H%>X)blMkkSYCOeW!c8HAy}!eoIAfU*AS1+Gr?93ru#?pB6sBt`v+}ZNn7_tR^-aZ2 zNfEM_H?lbqU!e@87XH?%zjGwoy!>#nHRn+9t+lEVF^LA(VR0TgZ@wp@L1hoc%hdGQ zlfXdP_8(a#)aCG#YB*UfIY#{#gB~dVwzreHm}~ZnhKalh%3Ev%uGr#=aw7q*95{2B za-QasGs8jST2vyt1EXRe%7~qeh(zxAr!U?K&(Phm_oMXB(K#HVH-;NypKRN*7o#{W zBw@K9*Z{Cf_=^UEm8IjHpVf#)g)2pJ1|5cmtdnk;vQ7oNY99YCNgRz_2^>hxq9lCO znj42MJ>fAJtt2t2E@h_mT7KgR_x%`JNZySel7^jt0&9|zue&q>ZcPPt_<^E6rECV) zQJ>Ri!)WE%EDTb4el9?g-sVlff8tWg{SJwgy`8tn&IZ~ZN)q(H)8?LP0> zqZvpvo!t*Jpylm;7*<*qHhHJm&5wj(SgdiM_1EY>%fJ5 zk}KlUji#U3)AqH^k&*3-W=Ge#=tQ0wOFG02d?F(xg-bTx|6B{%OU4V1-(=}Kk5JOk zQinW{vWdnZ9p;FOG|d+aSBH?~fzco6)CyKeES7CL2Uz{RCI4dG&?c&<=MY*2RD{{Z zRjw53LRa$f9D=qt_wc#UEB>?2HGB`Ty8LaGFVFqALMrBu8BC8EY_a7B3x%blQi7C6 z9@Ms}PF9YmLt!eYD_2ebWD9kGY(YlkKe7c`=YM;3pKhSB zEIRr$I+OgG-fiMvn3CvSve;t16D745z+GC_>Ds#XF=}3Wu;=PCIBv*uR;#x`|bTD)T&*da~%y!@sv&-I~~_M7e$ z!O^O~suhbNkvhYxIe&-7{uHKh?|2sE@%H|dKG01F%i2gVODNkX7Jsi_YYofY@|D7b zKDXR0#y4otPSCM5b%(&C#531Sx9{soL(;#Kt{#MrS53PSk*q8s$+nv(?|mR0EK|)g zQR>JA2d*arZro3ln}4*^I1Ue5q)H4}3i+85zhPxJ^R8==^jC@mkQyKC2|6wTflF!t z#QzklWo#jo&rB`vKeDyQzJ?jX&lp@vR~56a>89BMB(5kJs_a&*BMLt4-H&?=#|aWO z^8@q0aVFdqsY~vJDH3l<=rBBWIuJ&IaiDMmMI>NbVAy1|d|9xX2W$&hdBs`Dwo)}# zEjWEfhjylI(t_Wx;kuJH5mcy#KgfVEBU!r8mCnb)Ok~%l859L;2sHDd7-8Xa>svKV z)kKC#sgm8(ZGnpL?W0v_7yHAvXtbtg8a6LvBxh9<1DbVTRd7Q{A#yPvFSnUssiMQn zc;18Lp*0TGD^XRgJTq{3Ek@5~X|nv&GI2I6HsOT+&(whLZpcqWDJwv>0LrVtGwxCP zJel!vck`=6huFq>EwpA16(*AIsOYSwaB-8Ntp z_}32x>lOEddVJZo)R~o8)r8|QDg=RHb=rBT zF}!;m1ckmr?7p;6oJrAb#kOl!z9mB&Gsdjw{?jHo5vivukd&_nD<38B!OC*n7e)ri zTT=hPE5839@M?&+&zN6P7=bq(=JRmPuGqT*N1o)OoFYT(%9nfUfFMz6vi!GyxvldOf~cUIgE*nLBu@ZK&ZtOXv^xOvZSw(Y|TX(Wc5c8`sC$$t6 z^{PfT-q05)4BJZBwBSzG{EX861ZA!|UardnL#u+>4^kbwj%AXDD{!onp^}(0_l%>>~Wn@D*$EnpS%Irg5*Fg_hwBWZ%JRyGnR}1oX(x3ZeRD_V_hWxKu za0H>1G!0w}!sJQR{I?d|g08;Rq6BI|D-Ld;7A$q5M7xR8quT!*@~R6ptl0!~p~`26 zBEto@D4_Uz-eDU`n=b<=Zl>1R0L!lDhRB#Y;qGfim`V=A4ud1dDj_N=0ZaQ)O@-(h z`wCexX^)>mdKp2a2h%M2;ju4qEP+!bM|1iwq62y1wn0H*b^813l5Ji=RPbmPNn`D;0%wr09~N_efCD`G=Btsl^Zs=Xwx$-3K6gYL%p z{k$i5o?GO%k%wdQ*lE*3tzWFdIY$d58N*}Bm<@iG86ol-x-jhEge7M$&@gG*;#q4& zfkNlklwIja_tjVd zD$54f%oBjJN*(impc6Ra^sk=fD>d2#WEG4m&>GH79Fk;$Lg(84uUiwHP?FE@M z&XLOz1mXNTKwtx?EQm4)C%tN$KZkRNMfs`2a^&Y5((l@lm>do|{Kqa=*sd1V{MwJ& zu-aQ|w;-cXlDuVk)&WR1&8}LFdH0GV6S_F-G}=BiGaiUsU93ve5GtW{2xs2(MtBYr zX#4GTB*UO!wQ{R^#Icjq&xoBnzjq$;;y9o#+)<)RKTd3jwqZVN(qkAB&_Dy3%VQR? z#br|u{=2K?UA3y|6`EfhZc{ngoru`(@yUx@WNj!N8Shtqp#*T`>LsDu{oMSKZ#u&g z4v1WVvQ>j!L0O?}_T-2F&CX^&d&l0E97zc~Pb$uM z&Xlx0m?2t*V89F%rEZp@5%lW3XFjc=G8Y1Iwf|-&yS2PkI4MWJ@K31o!Bm+*rEw# z%P%accKAXNzXzt%WGQ~gQ{Nk#)+no7A~9&&f!E|Te&=Y55Z9z7vFITP?1|MCxf6fwbfs_h9((nRdHeLs-%ZTCAmD#}x#{9Th{LD!!dT)ItY9ci?+vS)LX!05F=v_?9>FqiLjw zPsv(4OiTZv7Lwp2+~y+<8)uWQNbM5Z1zuprMwaVr)s0?8wbKPX9^H#1Ou{LQYlJC z%J2?LYE?UuNaOPDAZyYs;1A^o{GqjwoIC&Zhc@`Z<$QbfhxP#eP$;lJ)M|W*)u7S{ z$0wj|e&3e-;;X}r^A~QvtzOc%@`N8L%gpnzyRkf`#-U1rrER`sVeTD*Obe_`7FqIR z2f|{=J3%=Ye-bv;aNl7=<+H<59VOa9r+r2V(X2xAs&!1oe-EmEX7U2ZMw4v%9^8w8j7|lvu#LxO( zZ%p9iZv-1IboP$P*aWHKW6YX!{9j|kzWPv^c2IY9gsiAiMG@vFUkJS4B3SvxGt(yPdOFB%0BVME;8iN0N>sX9OGMv?G@L?$2^q_K z7ZH`{4`lnLY@ZTYw?fhiMJc3EXM^dZa9-ep2G)`O{9-h!XIRM?=1X>Zenwkdc-%89 zhR}LJ+-c3AArOmv53UK_M=ns+4u)6U?l6l{cU$@ur}m>c7xsTz?rLf-Tey(khxs{-MA`D zq2NJFZVOGA|FJV{XQ)PEfuz;fr3I7XR(Fs3cWE970M(+ASGTlhZ_$*Uqaqv z1SE?pyn#?qui6|?{lov!gCY^45(e+#!p!^XJar+=b|!6$Lj}HRn$r`tkRo>7gJXQf zST#1Ye2X|Alajj^`U!34o~>K#dCiJ(H&UtRRwhCcVBNWIx`W#`g)bJ4gbmkWwphNd z;oNU0(?oT%7}QWjIHgv&K;eP{`X=v$(aypmWI{Cm6R{H5Hkf}Eh3llw-O$sz4%n*h zwBY{9`xmiN7G5KkOq4~bvsaD1G~DZ4P-0J#XLZWQKL9sYSM$U11(Y`$b{!X zIs`Fke>3mZ2t=&Xjh)B1|8VcG5$lz_`UUfvu*&ayJ5>J5y$b-`yUvGdenT`9N|0=B zvwH!@U@xt$FP^GJ$^~}vg?o?qF|&~^q*&-LfgC<)5^Sg{3{WU{l-DYh6`99TM2TUyC$wC8Qujf z1}!%hEk;W&&DkTY5hpsD7bL>#E9N^9^`a2n9DCuk<9g&hA?RyICo%a_EPx-NLtu>&^tu^s+RI?LFCtzbI{P@TDpN;*lLg>}TP5?3w?tt35qGlaI ztznv}gIGxkp47!6OrnHu>`&Zpkv(M14lBPC#*ji`#JhL(Xr`S>mdO@Yu>sOm%odUG z8>;Q@tWgNB7NNj^g5WOz^WFiZtNibwj?Q+eyMILKn#H$mrm)dI}UcK8_qb*B3wfbN2O5Xt|V)Zy*tkK4gtZ>wo@p1O- zk;gb_q>c5-wXt&Icg*IW2T}KGi8xJhdet#3)`TGZ?bk<#HQ{17rxg$DQ8?Sr@(_wIpmiGSk_p9sUWx}Oj^!Zg-y7}b#CC@^^IJe z`Y-d2i~)*G0w61Zc{ljSyn6%8dqdg?LR*yBBAsUlz`SDz8K?x2-rwqgAuE@BT$F@G z=T7w*&z3|h<)+Dw-Qni)iL6VO?&>4e@jdUwWuM0fB#>=$yr>-fWi!)$%OgZ>G z4NL`9HBhsiR%(rmkYTI7LuZE$QGbHiDnJ&mwS-L2gjg&L8X}Z|`fCe(xQn8&Zhe#w zu7gid6k0SS)6@Gm6$~zz?+ced1~XQ>;XAi%s|ybeD=zg9u@?Nzi7$@^4@3nFj~Pm! zQ$y-^W5-_$4@RZe|C;%oP!yg`Fi=ea*;v}m<}e zG0!k#pB`8LRmP;=*1)AO393+l$p@59x4o*!fAJZ zRES3S^+W&GX(nZYhzC}T!YeyaRJCYUAQMGmLAXKo;V;*oR4cUtx-*O2(qZX>cc`Vp zDme>CWuYIM3gCC}G;YfPWdTjp23f@m4I4;@iN+{r6F9E zQqJtR*U>btk1Q**lA}XShq%|~83lR9P0Wf=C16mHS;&1*gLj!w66ZN_60<>SUIdZ4 zg=%Lx!)ayI2?~Pje$7GxDDUCcH4zRBHLOnStdrUuX7Pug>%N992kyl_b@eS^FUsON z`vX@jd#m(JU%LUSDv6VCs?`(xUeNCroi{^WmqeAt>tist{-vbBq+~ibkZ%jXnUbM%zLA<2JDuJjf6U7ensxK9 z*#OTDqE_{nSTRIOyxhOy)>}u&=Sf+`zu%6LlkZ?LnUacg3i6RYD0i&ikB&;m7Jo~Q zL4oyV<4$yA?t?0nu(brMz60xf$WQxU^(yJf zw|%bMw4qHkqF}t5ye1oDYnZ`|C|Igg)_HCvizzK)~npQ%(kK_ zkY3Hp#m#qZUFP4d*>Jsa4qm0~25c*(1?&>ZqiQZRzM8%bA3Ubn;GH=CIj&$pm^h`M z4DLo0Du`ReTM3X)SQ|Ri6TX?|DX>!r;}+86zKIH56eyU2az!(7A9T8>Ex@r14&ojF zQq{(Z%0@(Po^!zoXHTVFHh+-yYT9aVEZgdGqP?`p*XUxyrX~eTCAZG4w^2L4%y{?w zeQNBwKUlxlkq)p4;DQ7DL4SjAiwuVg0LfLkID#nxecZbczYh@_${)2}?^YVJ3j7qw zC4l5=w$RYsWKnRR;H!)c^OPeCfV2Cx>3fJ~9c;@^7{UWM`)ot(8~NIgsIIB20M6ca zr-@^rW$T9T2t0Em6ylLfCI3YY2Pg`E>jOPuu)YTH$_-M0gb0WuO|}28eMX-i_t%uo z=!{IYTr+$_`^*)K7Mu<@wz@WPXO-Xh91W}dNXO)yr|{qs07{>E;PE zOzOpQ56$aCK~_6ycUJ3jrI*n}8tj|D8r%=}=nhxw8Wq=IXfoj9u^a7AQ%E0QAXbV? zMiq}O!4uBf5YGS=0Ah9=Kyp=6J5H)4|L&AcE&LjTx)qRIAzggr>}1^ZtbgV}!L%TbIaYI8p@T;GiPiVZ{Xc6jzA=D^P7K&HnY?4McT@aoLWvSO~ zKZv7n6zPpqtsQ+eA~_2zUVpw3G@}R*f>EJhQs$TF;|$gf&zk=qPG@_{wyNh()jGCj<%u8SJ~t*E1;*wYdD3| zcYvqvXX2$DtXV*=wO)y1&JT2bCmfj7>n|paHX%T?qE2}}w0uTl>+mw!2^3fX=7h4E zTj5!abD(TG4)QOu{fXs3Stzbgx_J72tZcq|!GK9O9Ytc3PFlYmiv94QxB0EdI@v+X z!ov6Z2jz?#AX~YB_k=@c;5{KfAM#$(QpP$EtzciH)u+`<%A+cc*EwOLPxued&XoT| zE5OVSj#l*FM(8t>Zf+Dn-^~F5O{eQlVo(odwTtfz=C_g1Hl9OdUre_dV*c3z_R3t$TLI{KoO+w8q-*ghUX&&6x-3 zvgDY@#6cv6vI+>ce8vz>`N%<;Si7RImMOMa6|%s`?1foEu_{QGutp?EYrk{*^{E?r zWJ3oO?k~Ko;_tzhs|dO8UMIFbQ=X!Sd@V=7pm5aXQtX8{u$ws|3mNuD^}uce%Y(y` zue@B|_c-a}KR+E%0)N}u)-1}}?1+0?kQ1|4Uh3u93jmrc6R+0FO8eJ6A^z+lPlcHP zuijzn6Ud@X>ebDTC-SGZTQ;_Y2o^^X{+9)oC`4$m(WH9`w{o zmc`6hNADTUUSr5PIQI8f>RokRI82rFyojVYjYhqZ}2oq;6vtH2`CIcaJ~wI9ONP0{py9Nm!qg zqFoUAUTs3ZWFMlmv(~-NTWZQPf~FCWdf=7>0;CkRKg)2a(@+}rwB{le$ksQZ`g3E> zPO_}lg|8&xnSm-7T@L zP>_enX;II%m{1^Rh37GHK%9XJA*EdOyW`f%xJX5dMEI>s-=W~NHx4+YomfHXygmll zJz2bjDApi@-x4%8*ogw#Tx zgkzQMU!X4xueX-o-A9e?VqlT@1`;NbG#T2<=CIQC%JAwTvq?+O>C3_jv=(F=;u=Ca z<%Y3iFfK(~F=9U+R1PbTfeZ*@QZvG;L~U7qJ``Vw04x?f>x#+h8{KEm4#$Lt$t#zK zlmH{vPFUs?1={SS_wW3z8;)F<6ZUR@_10@C{iuWBO&554v^~ll0uKh6gzYy6Y+eC& zbf6F9=~{=|AV!i1FKPL0!AMhZH@glD25U7WwYPJlk*2i?K;M3!4fDNQ0WSu_!G++Y z2B2VXrP=dUzA&$PbRkLNCXo1kr7bwD)I`3Wo|TY;=fI3iwUq@;UeyqM`6+rPV36PD z6A4X*$x5()7D&7dcO7iCa>4q%f}OkG%opro zxIfzG6nn+5%zi?>Nqr0YcbfVyfb}17s0IkzPOhLV#^z`iTj1Yis_6AHWodKrKW!j% zP#%N68?{ZfJ>8#hX{a6w7neZM4EM|X{)IHcnKYX)8t6&8*@EED2{P3BD8o0Y*roP_ z>Ea4u^h&24E#1IFH>bcuHzcV%W>BmkASr!|j?%{fCnc(+WbpenKvI$vwl*W(kp53n zvQ$d4rl}pbbH9P?O$*Dga$qH-_yoAr8-b*B3p`&VWvUSdXmI=ak8YfsZyVZ{Dx4uWh zmfk}N*G=N9jVzTrdDsHH3j^wkF`MD!A?BX0Ozi@QsogV0VipriuzdX3ZrCx*sB~dA z>o`{HP_79=An@xoqigo6b0Zx+$sx=bfJq(A(Dd=M-J(k}+VG)896v8=a@aTuF?LG} zA`BR*DtgPs@kL3Ouh>C>t>N+G=_AQ7?E_4_A5icWSj$X#i?U;biK-04;6WOPl5*t@ zq*v`i5lvCQ4SjdA{B(_(n7WunvNX12aT6%fFA92)=ce~(Zf1_}hd#=szt@i7*K7KB z02`>&mk|}<^_t*V1OH&HZ#@{^4B+!q8YmX~6OsyAZY^4jhg{lE&#-i?=p>GP9@tWx zc7ArHec=35%vQt}F}XME7g-Gh%(fNE2lfzo2qQXcx9_0)lpxEl%{P!Q|7yj6FuRfC zq$nLNL0cq6FX?Nt0sc6jd&l{G{pWy^kNOkMHr($R0=(>HK&2k@Sw6Y|C%Lv3{DRG1 z1aeN;_uxDYk9za>RUz?+pE+~t0h4+$VGyASK+G;?%t$X^G-zf2B_&oF+QTsnbZT1q^8P&b|0X3Qa8i=LK-naf z11F{CAoaTSe@SVUZsI5!NJ_!yQ~#3E$tgG~fqMHWTO|KhMjO znIyml+A#|O3jX;yR}(Wx3@?1<50`Q{SdZ_C zgD(VK=%!Hx)OCo2fGSdT0?XV8gE0h9@_hwLK6C_CHZWHOF8Lrn*3%o~I?MqjUnR+K zuvWP?(D9LNRTK1Tp6^iDX-QZCq1> zDq4IO>isq{Ej$fpkXtf$yyN2n8{rq9m+9gJXRmSLNtNHa_n$pn1_J7Z!jF&%^*haN zc41g+qmZ+Jc|kVF>oaV&K@-F-L-4VtxOF%!U|s+|M9ARN_bu}zUd;>T|I7f|?(E+t-}0iRfG+a$la}&%B4RZX4Ce1>k?qB;){`$1wu_dn#T-W7bLb)yo_H8H1FiI-2#!p0vx~^?j!~;*9Ef-Rd$-69Flz^ zfr3r7e|zrpb_R_I7VSCkRlVS;O!>dKDiGja5c##%Z2ar`Zt=RdDOMFZO*dfCk!mpD zfLri+r>mx*_SX`S{d~y5ME1KTy+x9)gN!X9j30g26JhOg5Uo>huK zxxTbP6B1WMz;`!!&(bZNergW`D36&44DEi`!xT}ZUsoO^CN~|W`x6%Yx3OA^V3RBeoYP&Op(%xwJTAtJ@s+_6{h+y0Gj`OIR1ZVDiJyi zZOIM0oN{QSoHo}%6C;07}sCFk|;0{&r zBlOq_@)m! zI9Lh_gY|s^GfyxYg(pwQqw{Z!q?K~S&<6mgLP)Ht&OmbWQ+dUyT4Z3&$i12{ho)$6 z4=mEtGwV6=o%ugn0yvd9ps@N7hztV4lbGS^CngA?Ro;Q;z32ubEk3Y zQA+ZW#gE_XXv--EJ-sFZ7&F@(3=^^twF_L-S*r3^$Rmtx*d#m}Z#(ORURwK6dw z8!sI4Vb5_)-224P90u_%DZ;pmyj+4my{)@aPp~vCB;%wka!lkG2k3PfSFV(Xn<+wt zto(CB7NZsF5*h*QeH+;wXO3@|*rXuGrnE7wF~+HtxsJG;_lif-K8RmF-8HsV{Zx9T z*DtXyH#Czp@EE5(!1THs-Y6}F#l7qv1qqi~1KTf=CzPQd*rHsHM=8i|*1%CpFzegv zQR;UH5?FDi=xDCAOznXt#9r}2*o*c#f|$mhBsndP+~@#7g9Qf`WN~Tlreha~$gTt+ z?)l?y{-kzaTu1?gezu)*{>5@=6uhB|`g{(`{11f z9a$%xVU@3-3(jpt)?uCgXeui}54r^CK?w&ov>2bo39jU277&h&W0P@Tkfv?nB{==d zV2asus6RhSE)i?U(u|A%W%mYTd87X!jWz*BCQ|Y+1WUJ$*VH|8b zGt$0!xTjJx3E%~0K)%j|)5-Kx`Xi^@U?pzYNNlEgaea8)vQ3);M#qi)j$1#Pm}*wH zoV=7P@y5<2jU(9Q=Nn|{rd_qvb6M{xcm(B<4sv!V-0_i$>Z;Y8|3={o*(%??Lw zpD`^1vEE8ny2)9mc|M^ z!Q8aHFn*D_ija#^t%GVyINvATMln%hTI#h6dg=3;h0ZrD4a6QJvz%ne-B~Ma&WmFR zeiDGYF8Ohl{r1-vXl41PWbvqjH{~$Tz0L2cxT{8ht?5P|fL#Ov_ zQ*RH<#wO+u1&lZ7`vI}S!AWxet-z$7;HR?k6|c8COOb9eeMj^Vfa!jIRX1a#tZrr3 ziDdT|YLCEY#w^@xVsxF!C%Xj@5fKo#*el$}I=1VAQQ;T4SiR}c(@!w941bD@{^mT8DkU#jfpU zI_R+*zglZ|)ZryjUCCk9>c1cGb?TPB?&z3W-AL%o*#?z2<39fi(1-6DNmQj>7&~e?7#a0j|nMD?oTryRX!Aj4IU1L z@75&LX%++LDupTFTt$24KT5vu5X2+uZ&O^f8nH;R`DZg>NdCz=)oej&Zsv1gphgHP z<`)I(p9=_69f6>5!#Z-5h;W-Id8(!l3YdW-8>pCfbZ_36x%h^BMYHJh_!*?y+PUj^ zl8mnvTizeFWawgX%DWxe?!XLohfYg)*Jt!PRj!WM*%yBAkIDqxp{*KnV=cEEn8j4u z$zS5mA1mNUXlleg6ZV;_xEV{I&>7w4biJ8=Q}J3+<$(mwFS#{H3e4>X)d)ZTp?m`j z2szXPlQ;-SO*rUvVRc`n3!Q&r^$IvUO~{6nM_Ot-eQ3JU88P1}><8{SfJIlU=S#4G zp17v|ixI%v#8b;*7PE_`jmf zUqR03pV~UBW#c~Rn5xs@xuY}xF@o0$%;S!fq*v4C^96esvJ4E6j@}hhPsxZj6{wkf zsZ1vZybD`DA1*p&qfXPL_?W;8!UuMdSbbx~6tZ#~v#-S2!}pvdTd+n^zT0$`S|KDQ zRq|Ia5Yl)v34!XTO-h(M9+mk)7YjlLE+^-7QC|cf3yWkTVBdaBW)Y7!Wj*#E}G z4k&zDjvYws@f$FUkRq#^Z5CA^%SNA&+xvW(MF70W{5uhN;pRK=!i_-poj~Nv=@WeG z^BFJ2Jdd@n@?@}6t+G+KoqR685y~ZSr^+q07m%|HLr^&rZ|`n={&%Ok?xr#S@HGd+ zUYvU5VNKk`4U~EH|ASBk`AE*!+1)!TNV6ek(_zS~W$0|oiEaQZkw_oqZpuyu-D;n&x>Zp_ir>ek5jlf%re))Lo5t7g4nnzQf6?kSHR4%)M>7|>TX52Q=| zi4MMKDVT}Xc;8xJ0ww+BxSOuQs;C$Auxj%{-H>~D?>i^49dDRYSp5PPorUrhrb2d! zuwgYHn(Klym#5tl6O_e86b{9z#D-N4Iz8jXXpl2rvyL*6xUSe;5Q?PY3D`60hK@w@qH%YP{nY@v!E4GirpAT!;UI zR0Skmfc@)Wq^c8)RGk1w6`ju%kC`Z!K&f8f8)y+&b!_wgbta6)WL41CmvSRW><$bJ z;zpReTa>M|9!YK${RWf@8uEMBT? z5!F{|Vc4dghPb&FUYM*^>sS=X7HUwgLLhf;f{0SW3lf8!4?y};MnMx43-_lCa<^L^ zq}e2gR4W3ZJ-@bnFW%k7A&+WEg2Fv5$SQrFM{36@Ys%U5q^0;HRZe_& z0;*}Z_}VoV-t$N1ho|F;umK-g7Y;);6+{a=Gg0V@8kSw#k%u>04If-c*2nOXX#WV< zcax8HGeqBwpn=~Z7aii>GvfUf%HwnXAcKMP81bIo`AJIOGSA3dD5@2H#%y&WU)&4? z2ojW$l&{*kcG$3o{7Ur)4J?zmjJg43V~yFQ+n;5QBD9;z7`8Kklj4~ z!LA;Mvwg1pqzkQa8v+vJMe78@Ov0`|)c|E0Kx86W)@A7lix4YXwtMfo^v?d%Bw6x|mK4mFSk<$l_<5UJ0h=>>k)H|YDp_9KK4 zdWAbGE}5y#9y+lpUH0M$tQQg}6s-a~u#NWcQn}yRvjsP55fPE8y)*oS)X-5{>;A?O zR3gveq365Sauf93-+FVr+3$RjT1huC18jJ@R<7a*z3#uKZ1I}zz~Sr2SBD+4A$z?u zffKTqyUOp8{rb)+xT?Ju(JKI`XG<;q9-YstouoUKm((cefxQ-7SrTq)164Al)5{ZYk;RP`W#$ zyQEvXQwfRlEOhVp-D7;;&AB*NFp!(&dXVY-&-t6c)sR`u#OaTE1aLKkWlGfQsvGRB z{WW5@)WwX6bZ^^RkGQV?Ud0;Pih3Mke1TcZp@abEYXY)2NX%r>V?^#NRegwfNfIef z4=Yvn6JR(1nRfX@6|v^ouNWqVF{KiaOnYGu=bE|fQJ2U(G8Xx!)0fm5*beW|%ggxq zZL8o1fD4;hez6A6Jmw=lh zw&*xz3}u}9zndbpkfBQKy4t^+;+A~eHeTJpz5|fH3Y1LWwqnu7l3xV&nISjDV~_VQ z3{)dwUEp2%8^xVIK^6v8<5`?Qie=Zk&9OB}{sYj&;0(S@N*}F{K+r1SIcTL01g+ZO z7dIe5D?n_C@r{L;ZaJN3PPo4U9qF~KxeR*;^$KKA5;Xh^S^;7U2wF)2K`SQ)_i49A zpR00xbRDx`(t;itJw$4Hbpqatv@f3kZm2&|gzfm>oYj^V7p)LMKRp51?0vDqkf7C!0{wH)3VO7+2?$yZew26V+g6JFr?qO0{{^iG zBsk%ogI2Tv^lEG-!Cc25vlCO9v<~(cL~3D|r1BimAv;LRf`w?GgI0X`kf0Uc0g*Nh z9n-9}?iwU$CFLNHb!b{)VFMZrNB&D>$PaEDJtGC6SNE22+K`}CnKKZydWK#lD62t; z4F3sQz48|GBWweEx*1B*H&IvhPF?l+C-6Chi>pAS)~!$Uttuk;@L~Q2yG@wev1c!G zGgL;9sI(OX7mdoB`XQf?S-S6P#PPET>!49slbC~XFx<1LwaJzElOf2&`oY8GSnm6V zIK7`4Wf|^Dd2(9omdQ}Q&R&@!?VAJmZ0YwXnd9~}QI1r%prK-ExkYWeYYUpP8;xRV zLBzOKV+NT7B(PMF0l|f-KLdUv7hwU$Fg=cORP1lFhSC^n95~J%i7=F*LnHx3z3QY@{GR7yeA$i2bB-O3rd=!6vd>sKv(R`(0PwWPGKaA)Cozhk z#9osEudnl`p+&keI`I_8Sm#(nB+6BHmL6HF12{b@-h~?$J_HdplQc67GrgK#H!0KT zC2Dd=eZ7hu_t(9@Xoesfw}N>3FFP^<_2B@T;a%yk`l-o~5#Dg>vxlLk<(h7#X|p)F zB9~`aXlI4ouFi>-mr!1lLoe&H%|ssVdib1cz6dVxw~~m#+q%GM`P<5MVMyN;Y#ejT zpt4gTj@HtA(ZwlzyX$ebYKn2vj)~D;vkzSf^aT6lj!^Ko*_e;T+dt0ryFusse79^z z^LS0k_g$vDR1L(1wkS@sn)~s6rHSAkU}{w&s5YrTyc7Z}kC@_SeNnoWB{PQoy-QfB zfm8Mdx;++M?G;cHghSB0c66Hu|7u&EuX?#+{>gbqca8WheAJBNisC$x)BR&t7prUW z68NGjFRanK*Uxi$dfEQocU7UaWQU6z>Kfe(Py;gG|0ylPzeT*q!qI0PqSeuWHh{?Qs^eDadDLv|lsO=`S_^TWJZX z36jhQ__ZOxyVRpyG4;czwbg_$T_dXLlnXP*6?wSMzuMnAtw;X!Ls1WbPFxfL{Gz46 zZ03{p8|V+er}i7H*1WBzNjqi^J(e=Rx(rx037bvaTv03$SJWkZszXv7zQ!HU>Y{P= zg32f+Wb}(L{Ut5G7)^fGyPEcp%Lrj5 zTV}8yh9Y79(u^;vB%1q7qW{2c6CCEuM&wjFXb7Hi^n7&sN&1+J{)J?w9X9UJt@?@t zO$Y#8p(q7+;Ah?&VX7ji&qiN(A~uD@d@Faypt{C%fZI5s`WSY6jfstUcnw2UAc26$ z0B&argk4{|If9`>D4&xI7Ze>9#O`gcgroa)2?1C zpb87=h>dt&b%w~}_zOg_ua;CwevvqcbH({(nKI&2I8N`xpZWqYuY%@($>|`h{1AKj zwWv{IcH)BS9bJtWD}#IS4)`*1Yr{32>jx6l3n5S20nznfC#r!0MUuI$+clPw`lRX# zI_`(kbYfVo!Z5ZI>bDY?Xu!a%&e;v6_!*uk+^>@RQ@o;q9Oz1L%!*qe!S`cfT!6OFE_6j^(4(`O&{FQFYs zt{;TAgMS0jb?;_mn>nM32q3x+B-deoC;~k|LYpNDl#h=O0HGcGMneihXn%_XA+*<^ z+yLw=fYAQTzT!4whXk&ieS5k4XmR~h)!g6psaXI3hVA@pUB=#kujOwNu)cm1;CTm` zY0h;9zXoQSGadE{>psf5{}H_aPsbCE*%tb9BXPIMoyD^)$70x$LYWM7TtD3XhJ9_ZAlIa|H!%l6kK zTi~ee#C)k0gNuBSBiS$$pvrUGkfx4dAwH~si-9?l6EZ9asRQQc+FD=&eL(8zgfmOl zB%lv?s+9M)4+v3CNtC((_)4k|rV+N`YYPk}G}27)pJd2z^ZUOCM6Q1a#3)EPkafaU zIS8r<*ujZo9^^5iaNBmSF5^86meT7>m!oR1Rm5(tIvX^%yD?meAc2vi^ zIx1Tudjtu!WAUeFMRRV>iLAZ+(by+M$3&L;jAM{ydbw}(sl6p#TX=c^PR>|TD?DU} zY-|+#j2Jxx5TjTG1VrJL(2`SLm}sK~m#7gB;dd+?!j zwa1I~YwD6CaZn!cez38%#N)0+<2#Pn8%Po1utnnI{>I_3c9i;YKvhsGYJ3F&7YExu zv8L&h6qC87(M+yy@@C!P`_I0z?AY`B;3eHn;>jHIH0uQ!mF5XFzPeT2J?XJP2*b0L;e#NHbr#7GpUBhylaMW`Sd;X2O#-N#q$w8VP%hh6GXODXVJqh*3u(g*0 z`MdpVt`|j~G0a~TKIz8%99oRBq1b7eO*#sE>R z6sy6pru(q-d*+dsPl(YjU~6Dla5TL5)aX-TwuS03UlUA%{K7N+3KND9COj`On>;Ir z^d0s4>EH*+fOHjXC8UA_=>sZshPh>ehA59<{6VA$=d{xWQN{7+rr^qQN$QelbuJ7^ z!*7@po)W4P4j}|VHm;}ie0;oAQfW&v!u=?E{licPT3kG1@DEElbp{`MgTx<1^9geL z6QYr;8RUA}SaRjER#jYWbq2Ii>AA|d2J||X?IH&4xZgcQc-kdxl}1ScA^{)ulp?zN zsvmq7p$%7q47+w@WZv{}=6OG@785(KjLk?T6yDP`u}ONLs!%~?X_L17Pk=c6z1hDho2G?pyrv?a=3&E|F?fJ4P+qSDMF-20Z0 z4a3?xuj^NFE)Z>hwA{vvTdz~ieVh7XfBx0?J86Zg*(JF*6r5}H;hoovyBD44A_`fu zufQ3LllQo+oxb`>V@=BoH*V67z2O8lyn8Chb_LUp|I`-3f2*zk7F&;h#g;)0f~$V$ z^94~8eo)X1aJ3>Fk`rC1^sCd10avSIoqG*l_!NuOFyB!9D|9NnH5GOdJpdP70Hfal zfvqD*V5Ka5KIkSdPnNU25_+YNhbb4pbUlZ>#Oa%9)(!4 z$$|(@2uZT|pe+RgD0}ce%vq6S%eYu7cRp<8q-ZKPlsBpQ*UeaI>uAvgm)iE%m6erf zLoga7AceZm5iyG;bj^|Mji|0?a*sDzze!h3$L(R?@ZV*DNx;2KL0OJL1ra@aP3_** z2e0!Pr)=K9xb17s!gm;Q@pv^%F9}TOk|6HBW%9<~|A4Nk@FxjuZF^)zLfF~49_zPa z4H8T=4j5fskVy1|eZ?}Di9h(RP0%XA>EikW9G3-MMF0*}c0laPRE-5!S)MPmCy|qO zCdx>$2(t3>pQjL#J=YKIb4w7akc();L0VjgaL*VfzmcoyWW)7%rQcZ?akO zrfG{OmntWRoCUzbf~YNtyyqBYl~NocG(<<~IkCBAzie#k(CviR+>LY7_lb3~5q=mT7m zo$nK=ofd^)M}o|o=|pDb-vrnZCPbF|4Ecm;PJL9|VLZKz-J!yfE`{8rQ zohl3nxi@|p%t?xu2SV=BEI?t9>cGnvE3<>!Z#A7D!4;2=EAFTpn#c)?IsL||u`i8Z1Qn#ma?HRn&9g(2zhL5Qz*%h4^{)J%QW;^ zb^Sm9RF~3=%SCYaU)A**qPisCq+|zw3@KkoRiTdpR2SMT_&Z9h%3s;V@GQHQfbr(Q zw|`|9037`v*~NsdEqmy*?=}7;(d{>kL2xrV<9lpRvi0bsvRq!LpCoSw9bux5XzbxL z>0t=rjRGNefH%rznG5hnQ@EdbqldTu#Tx}QS2Uoxz<}sNgNQC{fO_>;bj=h1qN^_O z>&Tn;c>hEfMQ!K*6KYfK-+X^+Sw|FCVO(4P>zS zVbgjb(sF2;G77#>*D@LY^Sv9fB*3_ObO0DvFYstmvGLzH8LUqJxVOLgv)10b$BNRJis|5VE1&XcLKkR$oSQ-!oi<0IH}w(_7vyYC33?o(u?86m4;py3dF3bg!Rjor^5FeMQ?w z?68VtV(&+bGuwpcPL*1_nAOa5ox8GlH=J2 z#G%Y%j^~4t!5p7S6yy9-d`1w93dN`?M(5ePB1n|hR}(F1yc5Gwnw*@!3?y!)4`(rC zv~^oeDY@0obb7tCQ1`nVgExhRcvu09#X4YI{bI)HMt@8LMBF#bV#2Sf7)<=sM&WN{ z@61R1_xTLouE+@IlW%$e8ThhHLoGLACFc)I*1qYPw)mN&rp--STytX8*)V-&ZK7Wl zK8Cggju{}p?__lk7>R$$`&br$y|PhcN#^r4Fn6BtSE|>yA3pY0fUdHGZ*l|2L2&U>3z5--C*VJSb$_>DmPGfK`!pg-u7|GO(GK_+^XQI+3MywQu9Xr?42V@nN&zm@S7|Lmi|y0mWkTL zvhqhS@_di*B>7XmZYkN1y(c1q%Y_a3-gv+&1@HKQAg*f(CxIj@IR2ZS>Y#w@>| z3ejJL4@01$$!i#e4U7(CmjG1sKpudKs<-F;V~Q4UcAqbCf~2Fs9Gm_QKu?e_#Jqwi zX)8peRlv(cP?zD@0ukO5mPZj?DG)(~$-%auC>arSIRQU=s*izk_ndy$qXN?Jv~v0U zaqXp)K>8h5zKz-GiYh|Ei{eD@_KRoXwf!Bb1p9DvjwR({KKq}Hw3Q-sz8~I$eNV3U%bc=R0aW`(QkfI8)8b8sFl!)KQ9DgMP2v==_^-E^MVHz{(Qt;U} z#iNIXOwaJ>O91_O6bG#8N>4$6VE%WB`NxL zBGF8J>_TIj6a_Fv(~=cF*H0e-aja6bp2;}5EW9fw67`z%>3UwC2DBp}{O;#hW(`V9 zT=9iysU0Ez@Zq{QS~p!#d-hQ(U>1Dig;D&~6Bn|>2@*1`QTzVaR(!{kIbJQVx2)JO_}6v%Gn&99>tn6k zvW?S!3hZ_@0#XHbs(psmBu0gQ=s0h97B_{YGRnANa9?tEl3Tzm_^>L!6L1n$xc5>h zUU0T_LGf+7F9mni{ZX!&{<^SY1IK3*6o^;6aHzg<#G+hd+)xiaMf=0*n)p3z3;Y-E z=8>G+VgJ zTF5KG7_b!9U78Vgd(T}Oxw-B)%%_+ zL{TAtC<-nBy<+}{6y?4kr=oTsvIHzG*L&y99tcttv$Zk0kZ#1sXFoPh@6cGZaFI8Y$u6-pio&q)Y8 zog0IFASRCbdkVyaz}%4!kehT-x>yJxF!wklTrJ%n0E+?QXyrmS2eEGmW>f)3YlQCc zH?TR#2%)(HjuucMgoAA0JUd!4y=VpexZ>x}6+$r3A)IP{_i0WpA?Fp8>aXB~uaTo$ zZaad^-YZ!cJmP`$wlHbuHTK2yjsT{pkGOQgyiDTT zp$-(FLKu7%VKlFcEJSz7+?%ns>gSpCBw6}n4^kmKqr@hlFMb255aNGbwD4U#2L7`G$#B?l;+42vc-h zKkt8;qBsq#+mI6BN6Q0V$R+Tvz5?_74^woB5C5^FvpqnXo~=f6AdzX9_XjzRoGOIO z@WqAo=~GI5T@Fh@v5vqz_>$%i)E~@0^nYmn#Ku2EMKf%^%sv{*|JT%@7a99z+y&Fy zpE-Xq?lEt_yYX6Vm=jF?aE?(CAjU?glU_iR4IkRGKuMVD zNHrx^lLF^R9QzmJ&b5Js)wo@=6ZnM@Gg`C8WGSPN+6W(Z00&W$CUW@P>&y9|6EVCK zE7C=1A7Ep>lZNGs8SUm`EP+!DX*f}i{O(Jq#ew&kGAtF+IK*^pHLd%KmCQv2_80JO zvDU%n;eu2SiNeg9{rYDAaIOq|5z3qh%Q_=mQt^C^3@`XrnLHpqs|Tird*n&(w&)}jX8XIGSb!1k5ke8FU9Vda3QmEcAu*VTl<3@f3b}&b~4PJXFz;AwZ za2!I*LJKaAqPuQVd@lVq_6uKMr!0^T~#o{${63 zPlXQyaRnIrC-Js-w!N4GhH`Ms9QF@hCNS?aB#Gb%cmq?<;4EM%r_Za;LOtgWxOAdI z1Xnjou}*q=381%p|5tClBr?eX;0%9c;kCYh9~2u%ZjSYk@$LgUhjn?GhVD-3Z=F8$ z*N7>O^F}KczFRp;994*6Dlynh)T5kv$$$36R$F70@VmJvCP-@?`2Daiv6_gHL(ZDaF6 z2*kY@fVfA}VJb`8;4=XbcUHxG`aST;ieCT)U0wyd$0OJ1LCJunOvk~H)$kQw9oCXEe0T6d9&c=G& z*Ff8_69RFkYrc#-BcTXyUB93V#(jm4`hD=1zYd)j%y!8iuxc5oGU0SjEh;(6M+o}h z#_r~Kk8f<=2wxs4@f4e);UY;N|Lu8*MoR_h+?T2HV1r5}gUziowVSi<+%t8L@Det_ zyfO&r+Ahw2T3Z9c#>lc~sthWxMq{r`XaO!mq2|7IN#3=V6*jEh^-$I_dt3DME?c6{ zk=` z)pjaik<&a*A8|6ScLk15EQ(e1|Vbbn-KP}WJDKrV18M6~;c zJ`n)jmI%2NmPe+81|JSiU4V@&H%{8`RWSBm|gs@1mWE1nr12x)|?kk3)n* z8WtT%e_r)HUfOO}aD|;aeqDDYCu&pgf34?4F@m$x#)54_?DUq6TD#M~b|x;ig`?+J^SWEp+ASG7v^0gg_8 z>D5GWdTZ318&QN8EjecIEw!bgEq_;=G?0|<{;XB@N)O?g*k3@KbGd8Ca(*$CVd+Y; zf=;OCy&O`H`l8jPf+x4mMzTpGYH6>DPqq95g4%AP>s?_Fd++bkTA%Wz8gFoiwYz7A z_c!ZnZ%fxTYpp*kvwGjw;5NNFOdkZI>wS*3LvuI>HqVR|F;H6XESvBx=T2QpUBKY+ zgVWRc(lMWkI@>=d?JWTfhoEufcPhT)CaBsssl@QiKwj3970Jn9)I2+9yHnmmMVvqcsBfGy?0joXA@ zoLD(+$$Mv06#UK;qf2qza(;EO=v6UNv*HuFo_opJ)UA-47MYB`9BRJJs42~ZJRjC2 zHS+X3O?JXP9m!{UyFlzAc1nX)V0m+tam$@=>yK$iAqc&_8`yzIKPlGf_mI;Pje$=a z+%)kk0E9sR{X8zvO>+X=V$KfVN%>#bTs%K zn}NAwxP*`Uo)Xfi8e4cBroXKjO<3uyAM&?I?$^a=BA1kZOn6DCpo$Zs%E!IX{ycCA zC-td5R8Vq@0!ajn5fEVgL&sO`*v!b(1-gj3D`tFVj# zg%z0yQCNuaFZ=nyI2)6+0S+^EaLdwghL~KsVI~$h_YYMgntv5m`apP2P~d42ps+-^ zNOjs0`!Wn7;NJo%l(8wM+_U1d#3Ez@PuiY z6s7-RwtFMid`ZSLF$I|I*rxj|a+R#r8M*T)kd9&Gt14kF4u&Tdz`eqrVvR85WjG@p zpT%{BTnrT+ajKN_o;IU%VlJMy#Ytpt|6rG}gb}TU^cbtd!*Q8ak$K{?SLa;*)#b{ z0>QCCi35WTqbj;usaoWjd}Tki&tv{fzS0-E4>=XN`4!E}Fj>yVh@Fb&{LT1)c#N8u zZo#U!ql1vdTbGe<$Vl>2aL64q%M}Iq4fwwMkUn%L^!$=kN)tq_g(WHBJ?2jC)FQ>z z&+tlstDopffk7TCBWjxR^DqHZ@fynK-*l-s zekimt5+e@F}I251nCnolkqbNTu=(pD9rcvU#83?2J#rhhw+!d6N_p zwiQx*ep#9Cw%gUeCM~nc#u$!5exrk#aDpRp$hA(4P?q~`eD_`d=(Y#|K!W}`p@>RUxvXHbl1P?~*2LMqt?sE^E5+6VmmG-KkNsZRaGZ5cg z;)D=IsXj@Of5uTiGgdp}?D0Ki(v^W0394YonUesBqOCDdyrC}ha6_;67r7;Fm04dW zh8pThXu*zF8mG_jQiXWRmtw)?ukRY|U%Vt>z9qsDVb5$2SisF%R{_rj-oD)`#lF== z|Ai{45K1P`BDTfh!BXLLjGYV>?7KcdJJ3&8+)0C#YyRCmH#?gqtRIQ$LVAD-9qC=m zxGmEyD#J7m@wfU+GZ=pEH4fnn#rHMucwPNID!`%JY{Q9LaeQ9eaYObnD1PD|Mg5M) zAE_?Drb?z#HHC#2$8Z4t99vORFLy!^FpA9qDpDOrLB83eh*#c}5W%VW>3}JOHfQne z^r&IIWe0I~>OI}m5duX9J1MO=H@y6*Xcwrv?sv&R+x81aZsic&pE4TNvslGf) zO!PO&HSpJ@&lYv|j!|2LUraDb^D`gr+F?ZhETaXrwyEh1>^775Q?jNA8AzZ~y{MkN2STn%kZ-}{X_r|+uIhw zCQbG0F8q)vmQy-m<|XD4TSaL?s?ivCa)@drB#O2FbiLNW(k71C8Ydo);!IC^7LLK^wFJZIjAPTDw@oJpCvy4X+m}Y;0T8v5AI3))!bFzC@xy%2UX+nC{ zm$RYhnaR#as2Oa#WV45^Q_<1!GztlwOVluKU(0fA-V%EU?;5!TR~{*hR8#m~&M z86WARAe<OksuE&X_z_S^L>=MDdtv!=Io?JJy0 ziK8KGQbR|GBP^)HL+iZpJ=)_gcO(dljG(T%p6{l#pl+Bl^T*o#&trvtuTihSr$l=; z#CwE^NCKXgC|VgV?sDCw6Mz2Uw$!KXFOx^}Yb2STAbwOlK^#rJza1rmBvj zW_S@si=HN+7kxU6J;saz(^p{FnW9=>H)XEbdt0&_tiN{pDBQ3qe+Z=)d4I+8xp;iY zUNF<7VYR9n9W$eMwIq$WU#-6etp0~zaSc^s?hM@{B-j#N-1upt>a)b!>q><7O@71q zDWA4CmLd&!Mtg83iC#?SDnT0HaspdYw@mZK)p_>2!eiox)pkL`>)-wDmXFllV|8D1 zVY0pv)>d;fqiWX)Aq50?5WoZNmUq4S>cWp?N6TGfCXVd^8G`dof#I%|;F=2rmr9&x zkrgC{FwfxAPIGkSNj8mer3WqzF(?9VdI+SEJ1<`7Uw0|49oO11i*_Rz7TE$Qk0n%U zf{A*+jEATtTZyGEGSW*`L%PQMERZ0{bIg5F+a~y;gO)u@V=|?^Cq^aP0k_G5yL6~P zoR{0ELBUSjYnTH}w#`>@b;YdotMl99hN&IS^+)uNoCVb4G304Tym8gAKZQ9a0lw(V z1*GPZd*9grOzzlq<9~C?w9h&ADhwRS9cWA0~WDvQH=x%)EHXp__tX}MBwW>%K36SbTf@xZqD!2 z8dNq5cJqOO!sh*yxlkeM6&j0%@U~Y)Ok!_9%-NwbPO~Xwu}DvJrg*0jEd_x%DKWw_ zpVb(wWulA0-^-EwkW*piZjF0eF+~gLXg3DW@t`*>Hjvp!&;tDo4z!u3Q`b+QaqX&x zt%kBpufh_syYvMluAXA-=C@l=yzGOF09^Zm_uuZST(we9(Pp=GEgt1trC1PV3u@?oK_$n!@mDb#h}QknvnH=%sCzR_I13e zKECIBEW#ckMhv2_{pgYm1veznlK2`iKDsn53LCp0>vQ%lmw*+s+ln65<43g@_kWq~ z;+}}m@*SouC>wq8;z-p_3l3b}m@iXkiPW|vKHK((X!D(GWJF;|70guJ%!M8)clT!G zNr*}-tMH-$_rjEKs&@%VC}tJ6!$0<(p;+9|ybWS~Fs-n_$4NJ63}_^<7XpC{AiRwK z3NOBY&K88-{MmB}3bv20sut#H2OYIj~!Af8o}Z<$s&mmql+4&?#k zhfAv^oO#TVIc6#wNXPZ+UAjFtN-)yTPCujAXXPbix|Wyx5-Sfb<6Kd4x|~{_6**MP zra@~4F&XzoR2jH^|I=J3s`Ifuap|aRV63spY-#hK%L>8gNKtvV)M^PvLc2 z79qk571`%arFM+TqEX$-PMZVro6s1B@N!sNur>y7G|Qh}0HqbGq8j;6cmXJP8tq8E z+^+aO*WI_q@ev9W;X(p(nq5lDUE^4wrW&UlxO>!g)4)|!=eLLh;xJQ`i+(8a-V=2s z!52Att*ltx&B4`(sebBB{1rq_5mdTa4-u@z9(s?@!4miL+j^?!-Vl1yXhP_Pk&(|GwTb%iCmCQ>Qh2~%# zEZR*xc+yIC^j4PP;L;^G4`?`^+K%`%P6+xwdc#B zMRXA5j5>^xJY}qkHLzrr>Uqf(*h=i36KD8hp6N3h(5_0t8xnLyTl_B zMDw5Qgdtih>q*-^h|-NYfu3MT=B7HevN`sh)m8oMWC#;aIJMpNRMs*l#LS|>@i}t) zO+4OZs;}|bxVUihQ4V<@;$>0x6H&(N86Xh=>p0nr1U8y2W{W2aCj|u&BmI<@Z@^(( zU~c94ng0Q}|4I7`a33o~c;>g4t+Qyxcb8V9^!fw*c76adT5i}7b*d&PP+DjmSS0Ir8n4U161#Ooh6M1x?u64W_F9Xf(T zGM6sbK%KnnGzp-FwyFg_@KeAiT}b9DFM*T`NXq__ zvuZnk0pu*>O==p~gIE^h`|n_f(JqwpO|NCgMj-aip1wK&ek^;yn@05+qxE8W$6P6V z=``bwsFA-M)p_*2suayfO19M9g>_zFLXQOTjXaZ=`V=oyW$F&=((3#Y&qRpdBM6jX z4e|^&@rN7_)geKw+02Cf7i}%`uk0?$%3z2;f6+J(#hmCGI4afx*Fmm_iEUT++0zXH zK9}br^?sQPr^)2$s|V4**mIjBr32ZTMVo|wB5j{0vTU14;-=Gmd)sHbHnH-h^SiFL zV@(bu+`^q^QEm%hO{htam8~0qw}-iY;xiTDLPz(kx8GX=;O*DTdW7{O>DQk)cWaH1 zR2fGm z>dZuey#HqS-i*%a z!wZ+C!1)AQEmNh8m^lxa38n=g7vG<-A;gzB7uNf?Z{VHoOAA?Z6|jT1#}5iXdIi$*ZzMs0hQ&Jp48T4{jkv6DzD;nl?V%Z5}$ zA{!YdpV6x&y`lDu)?Z($ZGEYtRLhx-RU{tpf-Nv5J1Zx!zt3QL08PTQvY%KCcL*j) zvQEy(+~h-%gNx89Ng!a55NHy*>UK?(oAe5M$=ZCM%Oe1iJ8ztlZL{=mByLUy-H0RcTr9p<2$pA*H_T|lO)ri_C z_ycjI22+^S;DlTsFiYtFNI{IQ1GEWk%c-rRmnfNw<+fbPU|V#2QIc6N;w<0n5@eVK zFa7R^i7k231FF(G79ay0q+W>JYWe|zIds=Z2SH#CjQE#OOmVsh$!i8WVpPR?c)ai5 z7IblCX}cjW)~TA(Gx~n)%4xL#8*$=g-ilf3U+)Hd#@t{Jv6+&AP~Bzi6Jx)Ql?v$e z%f?cOqWyl)UjDU>az`s8Cg-gcwZ+TcC%ESe;-SU7?kl09OXiu3H2^}IBqCLcyAQb_ z*3fJcD60SzE%$P*f$)YGayhe_&kIuX~KuXE+Aq>mx&{E4WV;r^VS$0Z(E=w`6`bNL=@ za?>x;ivDKp5nvSX22Wp$Wi1Vbofw zI(MK#IA*^hc>H(d^FSew6s_34&inNr9^mp<|1+OM`sVn7<|rDmp9xMCT7iE=`&6TP zI*Kk-7e56UC^U$jPVV_TPzb-+<^LVXZk_d0oAOa4=WH@Jbb22BU4%XWSPuevBD)O$ z>w&+XQGKbjFX0{G3-LnwR$Nc;JI^K$=LHsla@*iN+65V*vt%453Y#9@i0pPq9J(G$0|`& zAQLZ9989)Z$^=U~^gK|Q4jCvEe`!&Tgd>8L*%q*{m9_pJJRW@ecDvN*Ru^>y7h-lr zu!?P|dawXZM!@W9em1-2is<`k@N><(-E(~uWl299i+>aY%q~QT*+qJb&Jg#GxTLw> z76h1G)ck3qYR=zEZD0oDkV%&pp~S6J1J+F4;5~pvXj)ljh}ktu%%+NF*glK76UQ(F zm|fj_WpRIc4Qc{MI!od&{O`PO(03Vj5^W>Q?H5q=Ld@#sTzPcZ^qk>IkiNYr;R(c` zgAg7*KP0d#KlzI)>05;kWwik#e}ITDnikxy&~Gi zVeMFHO+@J3n?uFmuy&S34(TxB0J_u=rilrv`68$DfO*yAk^d!|>Ej|9w*8LxHDF(WaK8*F#B=2W| zf#2Kc%PE=v|MPy1Be0)?{%=1Aj0_Cu6#ES$ zJFa4|Y<~s@bUb6^ICb>@d0oQ+eqZJKgCJfPbKlemmC}Pqz){ zyl>y#q{smKIns01z#w#=1R=EvWIxB$r0kMJjymtSzWIY-vnAPvQCLuKxMw0rVEWN{ zDYVR*OmL#g5v=sa_CtT-_I!4gYkl-xplIha-yz+c0Kr*Vgg#rD$Zb0U5sDD?I5_oP znFSfN6~Ao5_Ala)6i+NgO71pU6W4~zb*AHbnHQX-EF9;v#D?Pd<2L#RxvD>nF;Z<&R z^*DGB;L&*I*o$~rxJODXGXwbpSOfVxh)2{O6MvCBlAE3w?tHyn?H&;x(GlzNOfETp zA~yvSiy)7nZh1qsCtdroVsN&VPuqEe!=NbwQ=nu?To9;X1-svNOn6vAKf1@d5_a z%C`4Bahe&hnk@syzi#rm$gg~!({7Hc&^7=UHwzNk_`e|9oL6MP!0PGP`B^BVg^Er_ zH25Az_U9Gu%BN+0PpsDO>&trQ_t|MfXi`6LT@h)uUk9E_Htz9D;ck0nVyfKxy$E|H zZUFn`I@k`+CX~)+xOgdZSWmI_PF9S!GGH^1L|)`ar&j@gG32?3MXZ^efCv7ufv|2Yg5tsa8tJ2hIJ$nSQml4_(5R-VPyq| zsD(e2H>A3(URq{c))oBCZHiL~#$8fKmd0%R1sNxxx%}~CXlaOtCq9QdJ&|#a{>iPy zR-t~!;XfaOt|f6VVPd0zls~Z5h>FPw-hm5Y5N1Yvjq%LknY_r}WLX@_0;9Ai#ANqjd~1nVK^l;1|q-uRn{hvxsQwi-6JpfRBv z&_-awU|=}e8j{CglJKE98|+S@TVWwzc=o><9u8n4fInw%2tS5l#DIKae+u0Kd;|Nx zZwP<^5yLjvlY>4%ClwijFq6vQK}1Qbh9Kl5b2t!sgA^jD5eKc8XgU@L6D!BoBn69ZHq4;wpw+HV`oo;E>G_glg}Bws`_O?*Ps$%S;-w&TP z1b?!zz%Nn{v$Jq~x-=Qgc&Y$qNV$IZrj0K&I~4_S1)wt!{K#pL4XbW_9X5wzHB_oT z@JHOBw+NO|73l>IO$r+|G&T}U9>3sFImBCElEC4I}GE(LfHMg;jM!YO< zGQsOPpI+t^?AKIVP@LMJeD2Q1XYNdX1)fuW>xI!LU{6uFsO#jB?$ysn$B5_==nMy)jPp7J!&!qOU|D>D*(Yv;{J(F6A z0vDw<&RH(fD-FkaJ8P(fE^nR5(azkD#B^tMMtP6+$i7Ud5x-HH=5lXPX|Yk7v8mG0 zW5Dc4;x;JI%B&)^TT;9$D&&EEXlVU|ar;@Lo>)Y?&2#Gv?F%##E^O`VrYj>pA$v&kP9oB*rX4P@9qw=!_kM9Am9q74r&h`;iucgZox-;k`* zdpXt>FR(-Or7Fwx<9vJu?JP_Rb4d45$H&#!7Zsgo5%2%9&wqxSEud;Xu%|IfEh z)c610TL=69^;@_5|M#sU$zzK6LUm4?1kDB_LU4to9&EJqkRcO;uqf2GmenD<+5X%t79mx>XmSoEY!V~_{ zOke-Xha>yZD7=0ebEzEG-X}QyKI{0;DzSi@W7=Q=1=dg`;X8OEQhM{4i?)phE98_8 zxc3?d-*g{fk~SQn@jxU4%zvLLPPQ*+k3e++7laSyw`LGL`S$>=_-p9@=K(?>paUMD zh80eFhL`Aljx=Z}Na=U|A)7&v2dL)n1Jv~V02TgyfTW)vASB2GM7`W_j05~s?C}CW z6{ZpFg1DktCa>vcgc=X3s4(=x7w{1+BuSU1R!r@y^wkCM$ zsMgmmPDHj`^|UPSKiOXnJdz|Ca)6|hl&paoIvEGZ1v*gz#L=)O0*ZwJK5I)-wg)@&Jj zb_%ICAtA4YsZQIo6lo!Ck_hRgs3%HMiU{q9>XnrL@6MekHUIJX%sJoR@B97z&N=ta zz2}}8_iCZtER9>jcTe-tXnry@nkbD%iw{+2t_@|H1g#AYClBbcZXMyzbVWC-pBeXg zvAK=0vRX1qYLA%xhAo>zmeoc+wx)R#9m)osdc75#&W3euK?R?$S_3X;0Z)1JsMc2N? zwpR@#J7lRM>T=tLWaB?yvEkT{WJ|c1f2Hw|&^qx$JRndG5V0&);5RRn{o% z+BFjFzPE4F#tl;>?%1X^gn7!iH)S@YC8k!6^!~AZVQJ{0@pmV;TNs*cKs_HvJu4e? z`?D0T>0Owrxh$ULVmi8=*jHUwGL&KeDBw(6$mw3)GRNYh&K1)0T6INYVwTxtWQ_#$ zU#Qi;n|eKRwDjm*e&<}TJk8FWa7E)b$JBu$0fW39^RCpdF)dy3-ktMiQJVVRoqW@k z)b|vB9O$UNB`bhWs$a|RfuyTz>N@j}$7VjUk-VG}!irs{^ORoF|732bY7e}N^X-yav4l3X{Zj3J>S;H58nE>DQ5HD#xd)SRk+2<1iWHdv}Sm?OuIHRRh?2ABFuHrz9-!{#?|u@cUhSa+AJRmKkz@3Bb6VvGg0S( zfA7-aSG!6+SPHy$4xO)fEI<52p?2$M0o%ic`Au!siiL`=r)30vsT2;#xaTyt=&V=V zfG$$KU)&OWGg=S=dSnq>a-@_T6tFZL16vs6w5TJ>rNEMy)R{^%Z+AvKs-(!N0a&+`?v8=R&; zJF=kn;@u=IlO ze7^v7Ij3&+sY{imjYDE}gKsvaDQh0twzO;2)P*8$1Ge)$MJnDb{g&L5{aWVSZ7)#= zi`1d<4y*Zf0aI&tMCi=i{s42|{GeUxvsu394XhjfVMJGF_81lOsqKkM z5$>OB>rx`ze>wlp!}W*N{d%<@e4gd3YhlDZ+oupKsg>8UM3;VQ=q5l!ZqY_u{Il+&2qoWUq-rjFK*z5X9M^NEe& z?!KJxbkpfWX}`{mH9v0@d-+{~Sj7&PBPP3CO!F-UvlO-5GenwSXlI=>+b*6OS!J$& zsbP;cnpWB%EM_+E(olBrr}5em7W=G-WcmKz5S`9n(JNKPF3-_JN_5t(@D2^pD(M%? zrdHjW7q%o)a1H&@+#&&GQE9=A%$qx|na&lAmcP#Gx_@oEJYPlBh5+-*qn_T?y6fK& z3oD;4v1;P`wf2zDUtMACU2mN#d{>ygEjgW%CZfMQ0N-y@Unjm-Q1DxUsA7JK$8PqW z#=k`pP5<0J5;{A-CBaO{w%3GZvBNAm_{LG)I~%8LQu~`ff>w6_ctW6SeM8oceGjbm zMdiNgtYu8s(SEu(Mvpyaow@tq*jkf2hm~d1(k%3MJj>a*Mr${FRHt%f`^C4#p6{8> zW7%DvGa)6oQzF1MuCvZi=z*2vYQuTe5`ooyETo?c%sZP)NkR=hxUw0T>1t&~Ix z!&qOlJTtfEqD%3?%nFt3=Oji-L_?zTt20A4TP;YbRpEsBvsb zOp&VG^DC=m0tp%|1h{ZIoAk;MWoIpw#8xYiq#U~_QB4#W*I_yON0yQ}hn2kosS}~n zoO(N1mzU?%XPZI&_FPWg&kQ2XSE_owmY+r&6#6-mr!t3il2p7`<bW+zUzK+E9yaGaSyOl(4^ ze++(Sh9u;2^@@9qr_g8$FlG`zH0Jl$_?rb%k~63m6siJ`5?mDi@&n`CFkZeA*%GBA zIBO-`KUHP0r4DNZAt}OU9A_;;(s1pGlYHSKQzZy7kT;EvlNKUr>;s7XXk1j}d$dS8 z9s77837obH#xs)-Us{Eh6WywKlq?TxVgpNLMBLWGEK9&XhWIL3${J&21$IjlY-R;G z+5xANrK$^V1UvF{OJaUID-&oG|@^tSpr9XIzvZ#^B5sC?Y9vzgMM zlK`FM_l*Iyffw|B{XolefI<0fn2x;%0WENZE7LiaNEpwtBlvKQ3lbvUf5bB!k+NJ| z@XLGT-I2n1V$J`A9UWnmvgriIj?fqz0Zey6;`ogt^j1H!T%UY8usM&8H3iGBhEabk ziham(za%BQxEe{yKNOTXYYwtiJhBEU0uPB3Quy0yXuHWA0^7&~y6+4dbbKBaVv7@G zUQ~MF%@PO^&l5sJ37>U>j0e^6d$M%arev#}LDsh}JW&;7i9E729h~S49ew^gffWt@ z%X!p<6f4XB%KzvOGtfEk=vqw)Jlz=h9xl*EYa5*I0!alOsid7Qkkqd^fo}zfO?bph zSK}G3KtJb+9mq1=m)k{ecyd*VD{SQxJu!P1gEopso696{t`8(0a)rbw49mGegqkQS zLUTAoh;xGo6HjpFi7*sR;K2xpaM=w8+j8i;U#Ne#AOb_i0_BQoUPoNONpCF@x;yN zdBptrnC=0&4xhxPWa(N;$qsseOzporrt-)#&fr@leV_tQ_k{9fH7Qe;o47p~kD{Xn z?*%5ZXdtk9J+yMr6NZPciAtyD1?eVkd%+{txq}P6Kzy>DzzTgJ9`^!qz!OURyElj@ z{-={ij5~3eH?(o=IsQbJm)=k^8y_U8Hu0YdJhGQ>3C|>M52j}?2zK5BZAx*l?2R^Y7@W8<4I!ub#f?fk}P{9iO)D7hQv()G^q{TOdR)* z2O!C<(;anMlUZ{((s*(bAZdL`-M0pOXaqoFxC~O?FX9z(=04 zv7b?M4-6^nz1o|EnXd%INPYTRsA|0Z`ivqDEQ1PKaJpiN6^ z@k}I$BmCf#f|+<|9g@M7;$W!)3o7#Ckl>RfnQ4B>L_2Ox(0_to>CCrjh&xGJSUP;Cw4s$yW+*svUoANwI8v?rW0 zvj@Kk;SO@ZJHd|%A~%erGdZqhk<>`+0Dxf$Sri)tK@_=YF8y>6q*9IH zqA?Qv9?M0mf}u;c>+zOgm_qq%;9C2^ilXAUNIL{jay%C$lZc(bL5fuCUWGs_3N}(B zJbN8PEK8(7C<*E}QQ(%!fBVlxRc<4Rw#}r^ig@t{#d)g+9G_5d!zwt{5#lq8@_o?& z-#F=mwQ&X^lth^a0Z{AXoJpr_GRI6`!kKeY3(6daN(#k?os2;(%5uzxTPZW_ZVY*H z4fFrca&1z%8?Go6e9>liV3$&^C`WoD_-Xk;``K#A_}nAi>6T z%JVr44sn$cz!l7(EU>1>k55Iw&dTOmzC?gPemmv1iv&>20gi-zCS@Fr{4dIFutXt8 z!ue0iV;hB}@Qf&k-%nYH#f!0hBpf)Vhl6p$4vJ)YnEvlFN*Cmiz)F`4aqv MYSCz44uG8YKdhF50ssI2 diff --git a/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ovms.json b/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ovms.json index 18a36073d582f5..f601a8120117d6 100644 --- a/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ovms.json +++ b/docs/sphinx_setup/_static/benchmarks_files/data/graph-data-ovms.json @@ -1,1047 +1,1283 @@ [ - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "bert-base-cased", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 159.534, - "fp32_ovms": 157.334, - "int8_ov": 432.339, - "int8_ovms": 420.793 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 13.125, - "fp32_ovms": 13.254, - "int8_ov": 38.151, - "int8_ovms": 37.623 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "efficientdet-d0", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 247.445, - "fp32_ovms": 253.09, - "int8_ov": 413.083, - "int8_ovms": 377.844 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "mask_rcnn_resnet50_atrous_coco", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 1.638, - "fp32_ovms": 1.714, - "int8_ov": 6.202, - "int8_ovms": 6.126 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "mobilenet-v2", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 3333.399, - "fp32_ovms": 2905.171, - "int8_ov": 10422.241, - "int8_ovms": 7461.99 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "resnet-50", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 575.208, - "fp32_ovms": 569.925, - "int8_ov": 2199.072, - "int8_ovms": 2064.581 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "ssd-resnet34-1200", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 10.598, - "fp32_ovms": 10.472, - "int8_ov": 40.683, - "int8_ovms": 38.737 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "ssd_mobilenet_v1_coco", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 1219.441, - "fp32_ovms": 1201.096, - "int8_ov": 4400.471, - "int8_ovms": 4270.702 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "unet-camvid-onnx-0001", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 15.924, - "fp32_ovms": 15.763, - "int8_ov": 67.731, - "int8_ovms": 64.658 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "yolo_v5m", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 74.189, - "fp32_ovms": 68.788, - "int8_ov": 247.757, - "int8_ovms": 180.302 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Gold 6238M", - "Model": "yolo_v8n", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 332.326, - "fp32_ovms": 278.054, - "int8_ov": 740.985, - "int8_ovms": 609.062 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "bert-base-cased", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 182.025, - "fp32_ovms": 180.764, - "int8_ov": 485.82, - "int8_ovms": 472.842 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 14.625, - "fp32_ovms": 15.132, - "int8_ov": 42.906, - "int8_ovms": 42.406 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "efficientdet-d0", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 288.531, - "fp32_ovms": 278.548, - "int8_ov": 483.438, - "int8_ovms": 443.032 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "mask_rcnn_resnet50_atrous_coco", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 1.872, - "fp32_ovms": 1.95, - "int8_ov": 6.856, - "int8_ovms": 6.763 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "mobilenet-v2", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 3909.405, - "fp32_ovms": 3327.621, - "int8_ov": 12375.018, - "int8_ovms": 7554.235 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "resnet-50", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 634.732, - "fp32_ovms": 634.102, - "int8_ov": 2481.256, - "int8_ovms": 2349.872 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "ssd-resnet34-1200", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 12.166, - "fp32_ovms": 12.027, - "int8_ov": 47.295, - "int8_ovms": 44.525 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "ssd_mobilenet_v1_coco", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 1384.145, - "fp32_ovms": 1356.126, - "int8_ov": 5037.197, - "int8_ovms": 4834.045 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "unet-camvid-onnx-0001", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 18.26, - "fp32_ovms": 18.052, - "int8_ov": 77.933, - "int8_ovms": 73.527 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "yolo_v5m", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 85.149, - "fp32_ovms": 78.205, - "int8_ov": 281.889, - "int8_ovms": 204.353 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Xeon® Platinum 8260M", - "Model": "yolo_v8n", - "PlatformType": "Server Platforms (Intel® Xeon®)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 376.079, - "fp32_ovms": 312.181, - "int8_ov": 801.556, - "int8_ovms": 678.929 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "bert-base-cased", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 35.915, - "fp32_ovms": 34.381, - "int8_ov": 101.976, - "int8_ovms": 99.024 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 3.232, - "fp32_ovms": 3.266, - "int8_ov": 10.132, - "int8_ovms": 10.133 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "efficientdet-d0", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 51.747, - "fp32_ovms": 48.906, - "int8_ov": 142.489, - "int8_ovms": 124.167 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "mask_rcnn_resnet50_atrous_coco", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 0.352, - "fp32_ovms": 0.364, - "int8_ov": 1.322, - "int8_ovms": 1.336 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "mobilenet-v2", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 795.18, - "fp32_ovms": 664.842, - "int8_ov": 2721.454, - "int8_ovms": 2063.761 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "resnet-50", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 114.859, - "fp32_ovms": 110.835, - "int8_ov": 467.591, - "int8_ovms": 445.408 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "ssd-resnet34-1200", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 2.053, - "fp32_ovms": 2.074, - "int8_ov": 8.023, - "int8_ovms": 7.987 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "ssd_mobilenet_v1_coco", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 260.104, - "fp32_ovms": 250.094, - "int8_ov": 991.064, - "int8_ovms": 930.128 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "unet-camvid-onnx-0001", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 3.273, - "fp32_ovms": 3.3, - "int8_ov": 12.884, - "int8_ovms": 12.727 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "yolo_v5m", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 14.714, - "fp32_ovms": 14.243, - "int8_ov": 55.058, - "int8_ovms": 47.548 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i7-11700K", - "Model": "yolo_v8n", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 71.446, - "fp32_ovms": 64.775, - "int8_ov": 200.864, - "int8_ovms": 144.792 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "bert-base-cased", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 36.227, - "fp32_ovms": 35.646, - "int8_ov": 101.562, - "int8_ovms": 100.382 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 3.23, - "fp32_ovms": 3.254, - "int8_ov": 10.05, - "int8_ovms": 10.092 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "efficientdet-d0", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 59.759, - "fp32_ovms": 55.851, - "int8_ov": 149.505, - "int8_ovms": 131.453 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "mask_rcnn_resnet50_atrous_coco", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 0.368, - "fp32_ovms": 0.394, - "int8_ov": 1.308, - "int8_ovms": 1.338 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "mobilenet-v2", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 865.806, - "fp32_ovms": 734.822, - "int8_ov": 2743.201, - "int8_ovms": 2163.412 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "resnet-50", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 116.784, - "fp32_ovms": 113.046, - "int8_ov": 457.358, - "int8_ovms": 440.924 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "ssd-resnet34-1200", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 2.006, - "fp32_ovms": 2.031, - "int8_ov": 7.817, - "int8_ovms": 7.75 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "ssd_mobilenet_v1_coco", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 274.42, - "fp32_ovms": 264.153, - "int8_ov": 997.987, - "int8_ovms": 915.681 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "unet-camvid-onnx-0001", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 3.246, - "fp32_ovms": 3.272, - "int8_ov": 12.668, - "int8_ovms": 12.585 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "yolo_v5m", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 14.985, - "fp32_ovms": 14.514, - "int8_ov": 54.937, - "int8_ovms": 47.767 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i9-11900K", - "Model": "yolo_v8n", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 74.1, - "fp32_ovms": 67.472, - "int8_ov": 203.493, - "int8_ovms": 151.175 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "bert-base-cased", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 17.054, - "fp32_ovms": 17.124, - "int8_ov": 26.043, - "int8_ovms": 25.872 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "bert-large-uncased-whole-word-masking-squad-0001", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 1.434, - "fp32_ovms": 1.456, - "int8_ov": 2.421, - "int8_ovms": 2.450 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "efficientdet-d0", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 31.321, - "fp32_ovms": 30.316, - "int8_ov": 50.629, - "int8_ovms": 47.377 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "mask_rcnn_resnet50_atrous_coco", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 0.151, - "fp32_ovms": 0.182, - "int8_ov": 0.361, - "int8_ovms": 0.389 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "mobilenet-v2", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 442.763, - "fp32_ovms": 380.661, - "int8_ov": 724.232, - "int8_ovms": 617.393 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "resnet-50", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 57.978, - "fp32_ovms": 57.038, - "int8_ov": 118.213, - "int8_ovms": 113.691 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "ssd-resnet34-1200", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 1.0, - "fp32_ovms": 1.031, - "int8_ov": 1.937, - "int8_ovms": 1.954 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "ssd_mobilenet_v1_coco", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 133.421, - "fp32_ovms": 129.949, - "int8_ov": 267.141, - "int8_ovms": 256.821 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "unet-camvid-onnx-0001", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 1.515, - "fp32_ovms": 1.534, - "int8_ov": 2.96, - "int8_ovms": 2.973 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "yolo_v5m", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 7.691, - "fp32_ovms": 7.511, - "int8_ov": 14.919, - "int8_ovms": 13.832 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - }, - { - "Platform": "Intel® Core™ i3-10100", - "Model": "yolo_v8n", - "PlatformType": "Client Platforms (Intel® Core™)", - "Parameters": { - "throughput": { - "Precisions": [ - { - "fp32_ov": 38.482, - "fp32_ovms": 34.513, - "int8_ov": 68.126, - "int8_ovms": 55.698 - } - ], - "Unit": "FPS", - "UnitDesc": "higher is better" - } - } - } + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 161.041, + "fp32_ovms": 157.547, + "int8_ov": 435.257, + "int8_ovms": 422.689 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 259.576, + "fp32_ovms": 256.524, + "int8_ov": 412.419, + "int8_ovms": 376.69 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "manual_yolo11", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 385.381, + "fp32_ovms": 312.784, + "int8_ov": "", + "int8_ovms": "" + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 1.64, + "fp32_ovms": 1.718, + "int8_ov": 6.426, + "int8_ovms": 6.258 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 3349.207, + "fp32_ovms": 2904.878, + "int8_ov": 10365.087, + "int8_ovms": 7521.115 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 580.494, + "fp32_ovms": 572.921, + "int8_ov": 2196.814, + "int8_ovms": 2072.444 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 10.627, + "fp32_ovms": 10.524, + "int8_ov": 40.619, + "int8_ovms": 38.733 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 1234.49, + "fp32_ovms": 1203.314, + "int8_ov": 4445.793, + "int8_ovms": 4261.084 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Gold 6238M", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 337.397, + "fp32_ovms": 279.585, + "int8_ov": 758.758, + "int8_ovms": 641.433 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 182.454, + "fp32_ovms": 181.015, + "int8_ov": 487.412, + "int8_ovms": 475.32 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 291.999, + "fp32_ovms": 289.402, + "int8_ov": 485.657, + "int8_ovms": 442.145 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "manual_yolo11", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 437.295, + "fp32_ovms": 354.521, + "int8_ov": "", + "int8_ovms": "" + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 1.889, + "fp32_ovms": 1.961, + "int8_ov": 7.085, + "int8_ovms": 6.985 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 3923.365, + "fp32_ovms": 3332.521, + "int8_ov": 12328.807, + "int8_ovms": 7562.762 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 645.638, + "fp32_ovms": 639.958, + "int8_ov": 2493.033, + "int8_ovms": 2349.919 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 12.196, + "fp32_ovms": 12.091, + "int8_ov": 47.197, + "int8_ovms": 44.379 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 1385.809, + "fp32_ovms": 1374.891, + "int8_ov": 5079.624, + "int8_ovms": 4836.539 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Xeon® Platinum 8260M", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Server Platforms (Intel® Xeon®)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 383.81, + "fp32_ovms": 315.245, + "int8_ov": 858.66, + "int8_ovms": 704.713 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 34.685, + "fp32_ovms": 32.405, + "int8_ov": 100.893, + "int8_ovms": 94.564 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 51.125, + "fp32_ovms": 46.351, + "int8_ov": 141.548, + "int8_ovms": 115.788 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 0.331, + "fp32_ovms": 0.336, + "int8_ov": 1.331, + "int8_ovms": 1.354 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 774.104, + "fp32_ovms": 628.503, + "int8_ov": 2723.303, + "int8_ovms": 1832.886 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 113.363, + "fp32_ovms": 106.029, + "int8_ov": 466.473, + "int8_ovms": 433.532 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 2.047, + "fp32_ovms": 2.047, + "int8_ov": 8.016, + "int8_ovms": 7.886 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 259.504, + "fp32_ovms": 236.341, + "int8_ov": 995.124, + "int8_ovms": 869.518 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 71.24, + "fp32_ovms": 62.319, + "int8_ov": 199.772, + "int8_ovms": 133.145 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 36.251, + "fp32_ovms": 35.465, + "int8_ov": 101.305, + "int8_ovms": 99.151 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 59.247, + "fp32_ovms": 55.459, + "int8_ov": 148.119, + "int8_ovms": 130.171 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 0.37, + "fp32_ovms": 0.388, + "int8_ov": 1.321, + "int8_ovms": 1.332 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 855.526, + "fp32_ovms": 713.553, + "int8_ov": 2745.282, + "int8_ovms": 2129.129 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 115.109, + "fp32_ovms": 112.189, + "int8_ov": 455.027, + "int8_ovms": 437.03 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 2.004, + "fp32_ovms": 2.022, + "int8_ov": 7.796, + "int8_ovms": 7.729 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 274.523, + "fp32_ovms": 260.272, + "int8_ov": 966.639, + "int8_ovms": 893.165 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 74.006, + "fp32_ovms": 67.143, + "int8_ov": 204.296, + "int8_ovms": 151.136 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 17.146, + "fp32_ovms": 17.085, + "int8_ov": 26.112, + "int8_ovms": 25.962 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 30.601, + "fp32_ovms": 29.76, + "int8_ov": 49.646, + "int8_ovms": 47.222 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "manual_yolo11", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 43.751, + "fp32_ovms": 38.752, + "int8_ov": "", + "int8_ovms": "" + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 0.148, + "fp32_ovms": 0.18, + "int8_ov": 0.36, + "int8_ovms": 0.39 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 440.453, + "fp32_ovms": 380.439, + "int8_ov": 714.915, + "int8_ovms": 611.391 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 57.896, + "fp32_ovms": 56.88, + "int8_ov": 117.702, + "int8_ovms": 113.447 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 0.996, + "fp32_ovms": 1.033, + "int8_ov": 1.935, + "int8_ovms": 1.946 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 132.73, + "fp32_ovms": 128.89, + "int8_ov": 266.502, + "int8_ovms": 256.113 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i3-10100", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 38.386, + "fp32_ovms": 34.599, + "int8_ov": 68.072, + "int8_ovms": 55.668 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 34.685, + "fp32_ovms": 33.575, + "int8_ov": 100.893, + "int8_ovms": 96.251 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 51.125, + "fp32_ovms": 47.06, + "int8_ov": 141.548, + "int8_ovms": 117.642 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "manual_yolo11", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 80.399, + "fp32_ovms": 68.631, + "int8_ov": "", + "int8_ovms": "" + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 0.331, + "fp32_ovms": 0.344, + "int8_ov": 1.331, + "int8_ovms": 1.417 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 774.104, + "fp32_ovms": 628.386, + "int8_ov": 2723.303, + "int8_ovms": 1905.703 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 113.363, + "fp32_ovms": 106.07, + "int8_ov": 466.473, + "int8_ovms": 433.345 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 2.047, + "fp32_ovms": 2.055, + "int8_ov": 8.016, + "int8_ovms": 7.884 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 259.504, + "fp32_ovms": 238.91, + "int8_ov": 995.124, + "int8_ovms": 880.377 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i7-11700K", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 71.24, + "fp32_ovms": 62.386, + "int8_ov": 199.772, + "int8_ovms": 139.345 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "bert-base-cased", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 36.251, + "fp32_ovms": 35.522, + "int8_ov": 101.305, + "int8_ovms": 99.886 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "efficientdet-d0", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 59.247, + "fp32_ovms": 55.715, + "int8_ov": 148.119, + "int8_ovms": 131.749 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "manual_yolo11", + "featured_SKU": false, + "whats_new_model": true, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 85.883, + "fp32_ovms": 76.288, + "int8_ov": "", + "int8_ovms": "" + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "mask_rcnn_resnet50_atrous_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 0.37, + "fp32_ovms": 0.396, + "int8_ov": 1.321, + "int8_ovms": 1.337 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "mobilenet-v2", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 855.526, + "fp32_ovms": 731.031, + "int8_ov": 2745.282, + "int8_ovms": 2154.044 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "resnet-50", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 115.109, + "fp32_ovms": 112.697, + "int8_ov": 455.027, + "int8_ovms": 439.19 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "ssd-resnet34-1200", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 2.004, + "fp32_ovms": 2.027, + "int8_ov": 7.796, + "int8_ovms": 7.748 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "ssd_mobilenet_v1_coco", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 274.523, + "fp32_ovms": 263.584, + "int8_ov": 966.639, + "int8_ovms": 916.111 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + }, + { + "Platform": "Intel® Core™ i9-11900K", + "Model": "yolo_v8n", + "featured_SKU": false, + "whats_new_model": false, + "PlatformType": "Client Platforms (Intel® Core™)", + "Parameters": { + "throughput": { + "Precisions": [ + { + "fp32_ov": 74.006, + "fp32_ovms": 67.401, + "int8_ov": 204.296, + "int8_ovms": 151.665 + } + ], + "Unit": "FPS", + "UnitDesc": "higher is better" + } + } + } ] \ No newline at end of file From c85e88bb705ba3546de394ba4261bd996c869738 Mon Sep 17 00:00:00 2001 From: Piotr Kowalczyk Date: Tue, 26 Nov 2024 14:43:06 +0100 Subject: [PATCH 62/62] [GPU]: SearchSorted basic implementation. (#27356) Added GPU reference SearchSorted op implementation with unit and func tests. Kernel supports dynamic shapes. ### Details: - Fixed a bug in reference implementation, when sorted had exactly one element. Added tests for that case. ### Tickets: - CVS-156238 --- .../openvino/reference/search_sorted.hpp | 12 +- .../intel_gpu/plugin/primitives_list.hpp | 1 + .../intel_gpu/primitives/search_sorted.hpp | 54 +++++++ .../src/graph/impls/ocl/register.cpp | 1 + .../src/graph/impls/ocl/register.hpp | 1 + .../src/graph/impls/ocl/search_sorted.cpp | 107 +++++++++++++ .../src/graph/impls/registry/registry.hpp | 1 + .../src/graph/include/search_sorted_inst.h | 46 ++++++ .../intel_gpu/src/graph/search_sorted.cpp | 59 +++++++ .../cl_kernels/search_sorted_ref.cl | 56 +++++++ .../src/kernel_selector/common_types.h | 3 +- .../search_sorted_kernel_base.cpp | 72 +++++++++ .../search_sorted/search_sorted_kernel_base.h | 34 ++++ .../search_sorted_kernel_ref.cpp | 45 ++++++ .../search_sorted/search_sorted_kernel_ref.h | 18 +++ .../search_sorted_kernel_selector.cpp | 14 ++ .../search_sorted_kernel_selector.h | 21 +++ .../src/plugin/ops/search_sorted.cpp | 25 +++ .../intel_gpu/src/plugin/program_builder.cpp | 7 + .../single_layer_tests/search_sorted.cpp | 18 +++ .../test_cases/search_sorted_gpu_test.cpp | 148 ++++++++++++++++++ .../src/single_op/search_sorted.cpp | 19 +++ .../tests_data/search_sorted_data.h | 40 ++++- 23 files changed, 790 insertions(+), 12 deletions(-) create mode 100644 src/plugins/intel_gpu/include/intel_gpu/primitives/search_sorted.hpp create mode 100644 src/plugins/intel_gpu/src/graph/impls/ocl/search_sorted.cpp create mode 100644 src/plugins/intel_gpu/src/graph/include/search_sorted_inst.h create mode 100644 src/plugins/intel_gpu/src/graph/search_sorted.cpp create mode 100644 src/plugins/intel_gpu/src/kernel_selector/cl_kernels/search_sorted_ref.cl create mode 100644 src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_base.cpp create mode 100644 src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_base.h create mode 100644 src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_ref.cpp create mode 100644 src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_ref.h create mode 100644 src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_selector.cpp create mode 100644 src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_selector.h create mode 100644 src/plugins/intel_gpu/src/plugin/ops/search_sorted.cpp create mode 100644 src/plugins/intel_gpu/tests/functional/shared_tests_instances/single_layer_tests/search_sorted.cpp create mode 100644 src/plugins/intel_gpu/tests/unit/test_cases/search_sorted_gpu_test.cpp diff --git a/src/core/reference/include/openvino/reference/search_sorted.hpp b/src/core/reference/include/openvino/reference/search_sorted.hpp index 7ea8ec1078a2a1..629509b28ef78d 100644 --- a/src/core/reference/include/openvino/reference/search_sorted.hpp +++ b/src/core/reference/include/openvino/reference/search_sorted.hpp @@ -32,6 +32,7 @@ void search_sorted(const T* sorted, } const size_t size = shape_size(values_shape); + const size_t sorted_inner_dim = sorted_shape.back(); auto func = [&](size_t i) { auto it = values_transform.begin(); @@ -44,15 +45,12 @@ void search_sorted(const T* sorted, Coordinate sorted_coord_begin = values_coord; sorted_coord_begin.back() = 0; - Coordinate sorted_coord_last = values_coord; - sorted_coord_last.back() = sorted_shape.back(); - const auto sorted_index_begin = coordinate_index(sorted_coord_begin, sorted_shape); - const auto sorted_index_last = coordinate_index(sorted_coord_last, sorted_shape); - - const T* idx_ptr = compare_func(sorted + sorted_index_begin, sorted + sorted_index_last, value); + const T* sorted_begin_ptr = sorted + sorted_index_begin; + const T* sorted_end_ptr = sorted_begin_ptr + sorted_inner_dim; + const T* idx_ptr = compare_func(sorted_begin_ptr, sorted_end_ptr, value); - const ptrdiff_t sorted_index = (idx_ptr - sorted) - sorted_index_begin; + const ptrdiff_t sorted_index = idx_ptr - sorted_begin_ptr; out[values_index] = static_cast(sorted_index); }; diff --git a/src/plugins/intel_gpu/include/intel_gpu/plugin/primitives_list.hpp b/src/plugins/intel_gpu/include/intel_gpu/plugin/primitives_list.hpp index ced915d25610e8..e234bc68de0750 100644 --- a/src/plugins/intel_gpu/include/intel_gpu/plugin/primitives_list.hpp +++ b/src/plugins/intel_gpu/include/intel_gpu/plugin/primitives_list.hpp @@ -272,6 +272,7 @@ REGISTER_FACTORY(v13, BitwiseXor); REGISTER_FACTORY(v15, ROIAlignRotated); REGISTER_FACTORY(v15, BitwiseRightShift); REGISTER_FACTORY(v15, BitwiseLeftShift); +REGISTER_FACTORY(v15, SearchSorted); // --------------------------- Supported internal ops --------------------------- // REGISTER_FACTORY(internal, NonMaxSuppressionIEInternal); diff --git a/src/plugins/intel_gpu/include/intel_gpu/primitives/search_sorted.hpp b/src/plugins/intel_gpu/include/intel_gpu/primitives/search_sorted.hpp new file mode 100644 index 00000000000000..4dfb5c87f8c58c --- /dev/null +++ b/src/plugins/intel_gpu/include/intel_gpu/primitives/search_sorted.hpp @@ -0,0 +1,54 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once +#include "primitive.hpp" + +namespace cldnn { + +/// @brief +/// @details +struct search_sorted : public primitive_base { + CLDNN_DECLARE_PRIMITIVE(search_sorted) + + search_sorted() : primitive_base("", {}) {} + + /// @brief Constructs search_sorted primitive. + /// @param id This primitive id. + /// @param sorted Sorted input. + /// @param values Values input. + /// @param right_mode Enable/Disable right mode(check specification for details).. + search_sorted(const primitive_id& id, const input_info& sorted, const input_info& values, bool right_mode) + : primitive_base(id, {sorted, values}), + right_mode(right_mode) {} + + /// @brief Enable/Disable right mode(check specification for details). + bool right_mode = false; + + size_t hash() const override { + size_t seed = primitive::hash(); + seed = hash_combine(seed, right_mode); + return seed; + } + + bool operator==(const primitive& rhs) const override { + if (!compare_common_params(rhs)) + return false; + + auto rhs_casted = downcast(rhs); + + return right_mode == rhs_casted.right_mode; + } + + void save(BinaryOutputBuffer& ob) const override { + primitive_base::save(ob); + ob << right_mode; + } + + void load(BinaryInputBuffer& ib) override { + primitive_base::load(ib); + ib >> right_mode; + } +}; +} // namespace cldnn diff --git a/src/plugins/intel_gpu/src/graph/impls/ocl/register.cpp b/src/plugins/intel_gpu/src/graph/impls/ocl/register.cpp index 2597e419e66a41..7f2fab7a6d1581 100644 --- a/src/plugins/intel_gpu/src/graph/impls/ocl/register.cpp +++ b/src/plugins/intel_gpu/src/graph/impls/ocl/register.cpp @@ -88,6 +88,7 @@ void register_implementations() { REGISTER_OCL(unique_gather); REGISTER_OCL(scaled_dot_product_attention); REGISTER_OCL(rope); + REGISTER_OCL(search_sorted); } } // namespace ocl diff --git a/src/plugins/intel_gpu/src/graph/impls/ocl/register.hpp b/src/plugins/intel_gpu/src/graph/impls/ocl/register.hpp index d4b08b5154ef4b..0a605945fcf6cc 100644 --- a/src/plugins/intel_gpu/src/graph/impls/ocl/register.hpp +++ b/src/plugins/intel_gpu/src/graph/impls/ocl/register.hpp @@ -162,6 +162,7 @@ REGISTER_OCL(unique_count); REGISTER_OCL(unique_gather); REGISTER_OCL(scaled_dot_product_attention); REGISTER_OCL(rope); +REGISTER_OCL(search_sorted); #undef REGISTER_OCL diff --git a/src/plugins/intel_gpu/src/graph/impls/ocl/search_sorted.cpp b/src/plugins/intel_gpu/src/graph/impls/ocl/search_sorted.cpp new file mode 100644 index 00000000000000..4243d75b5c7367 --- /dev/null +++ b/src/plugins/intel_gpu/src/graph/impls/ocl/search_sorted.cpp @@ -0,0 +1,107 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "primitive_base.hpp" +#include "search_sorted/search_sorted_kernel_base.h" +#include "search_sorted/search_sorted_kernel_selector.h" +#include "search_sorted_inst.h" + +namespace cldnn { +namespace ocl { + +struct search_sorted_impl : typed_primitive_impl_ocl { + using parent = typed_primitive_impl_ocl; + using parent::parent; + using kernel_selector_t = kernel_selector::search_sorted_kernel_selector; + using kernel_params_t = kernel_selector::search_sorted_params; + + DECLARE_OBJECT_TYPE_SERIALIZATION(cldnn::ocl::search_sorted_impl) + + std::unique_ptr clone() const override { + return make_unique(*this); + } + + void load(BinaryInputBuffer& ib) override { + parent::load(ib); + if (is_dynamic()) { + auto& kernel_selector = kernel_selector_t::Instance(); + auto kernel_impl = kernel_selector.GetImplementation(_kernel_data.kernelName); + kernel_impl->GetUpdateDispatchDataFunc(_kernel_data); + } + } + + void update_dispatch_data(const kernel_impl_params& impl_param) override { + // If model loaded from cache, params are not initialized, so we create a new object and reuse it in the future + if (_kernel_data.params == nullptr) { + _kernel_data.params = std::make_shared(get_kernel_params(impl_param, true)); + } + + update_shapes(*_kernel_data.params, impl_param); + (_kernel_data.update_dispatch_data_func)(*_kernel_data.params, _kernel_data); + } + + static kernel_params_t get_kernel_params(const kernel_impl_params& impl_param, bool shape_agnostic = false) { + const auto& primitive = impl_param.typed_desc(); + auto params = get_default_params(impl_param, shape_agnostic); + + // Manually add all inputs except first one, since get_default_params does not handle it. + for (size_t i = 1; i < impl_param.input_layouts.size(); ++i) { + params.inputs.push_back(convert_data_tensor(impl_param.get_input_layout(i))); + } + + params.right_mode = primitive->right_mode; + return params; + } + + // [NOTE]: Has to be added as a separete static function, since it is called via static dispatching in + // typed_primitive_impl_ocl::create().. + static kernel_impl_params static_canonicalize_shapes(const kernel_impl_params& impl_params) { + auto updated_impl_params = canonicalize_fused_shapes(impl_params); + + for (auto& input_layout : updated_impl_params.input_layouts) { + input_layout.set_partial_shape(extend_shape_to_rank_from_begin(input_layout.get_partial_shape())); + } + + for (auto& output_layout : updated_impl_params.output_layouts) { + output_layout.set_partial_shape(extend_shape_to_rank_from_begin(output_layout.get_partial_shape())); + } + + return updated_impl_params; + } + + kernel_impl_params canonicalize_shapes(const kernel_impl_params& impl_params) const override { + return static_canonicalize_shapes(impl_params); + } +}; + +namespace detail { + +attach_search_sorted_impl::attach_search_sorted_impl() { + auto types = { + data_types::i8, + data_types::u8, + data_types::i16, + data_types::u16, + data_types::i32, + data_types::u32, + data_types::i64, + data_types::f16, + data_types::f32, + }; + + auto formats = {format::bfyx, format::bfzyx}; + + implementation_map::add(impl_types::ocl, + shape_types::any, + typed_primitive_impl_ocl::create, + types, + formats); +} + +} // namespace detail +} // namespace ocl +} // namespace cldnn + +BIND_BINARY_BUFFER_WITH_TYPE(cldnn::ocl::search_sorted_impl) +BIND_BINARY_BUFFER_WITH_TYPE(cldnn::search_sorted) diff --git a/src/plugins/intel_gpu/src/graph/impls/registry/registry.hpp b/src/plugins/intel_gpu/src/graph/impls/registry/registry.hpp index a6bb8ad6eebcc2..77c4262a7513cc 100644 --- a/src/plugins/intel_gpu/src/graph/impls/registry/registry.hpp +++ b/src/plugins/intel_gpu/src/graph/impls/registry/registry.hpp @@ -214,3 +214,4 @@ REGISTER_DEFAULT_IMPLS(unique_count, OCL_S, OCL_D); REGISTER_DEFAULT_IMPLS(unique_gather, OCL_S, OCL_D); REGISTER_DEFAULT_IMPLS(scaled_dot_product_attention, OCL_S, OCL_D); REGISTER_DEFAULT_IMPLS(rope, OCL_S, OCL_D); +REGISTER_DEFAULT_IMPLS(search_sorted, OCL_S, OCL_D); diff --git a/src/plugins/intel_gpu/src/graph/include/search_sorted_inst.h b/src/plugins/intel_gpu/src/graph/include/search_sorted_inst.h new file mode 100644 index 00000000000000..50ffdf8112e2ae --- /dev/null +++ b/src/plugins/intel_gpu/src/graph/include/search_sorted_inst.h @@ -0,0 +1,46 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// +#pragma once + +#include + +#include "primitive_inst.h" + +namespace cldnn { + +template <> +struct typed_program_node : public typed_program_node_base { + using parent = typed_program_node_base; + typed_program_node(const std::shared_ptr prim, program& prog) : parent(prim, prog) {} + +public: + using parent::parent; + + program_node& input(size_t idx = 0) const { + return get_dependency(idx); + } + std::vector get_shape_infer_dependencies() const override { + return {}; + } +}; + +using search_sorted_node = typed_program_node; + +template <> +class typed_primitive_inst : public typed_primitive_inst_base { + using parent = typed_primitive_inst_base; + using parent::parent; + +public: + typed_primitive_inst(network& network, search_sorted_node const& desc); + template + static std::vector calc_output_layouts(search_sorted_node const& node, + kernel_impl_params const& impl_param); + static layout calc_output_layout(search_sorted_node const& node, kernel_impl_params const& impl_param); + static std::string to_string(search_sorted_node const& node); +}; + +using search_sorted_inst = typed_primitive_inst; + +} // namespace cldnn diff --git a/src/plugins/intel_gpu/src/graph/search_sorted.cpp b/src/plugins/intel_gpu/src/graph/search_sorted.cpp new file mode 100644 index 00000000000000..761b6751ace3b7 --- /dev/null +++ b/src/plugins/intel_gpu/src/graph/search_sorted.cpp @@ -0,0 +1,59 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include + +#include + +#include "openvino/core/enum_names.hpp" +#include "primitive_type_base.h" +#include "search_sorted_shape_inference.hpp" + +namespace cldnn { +GPU_DEFINE_PRIMITIVE_TYPE_ID(search_sorted) + +search_sorted_inst::typed_primitive_inst(network& network, search_sorted_node const& node) : parent(network, node) {} + +layout search_sorted_inst::calc_output_layout(search_sorted_node const& node, kernel_impl_params const& impl_param) { + return calc_output_layouts(node, impl_param)[0]; +} + +template +std::vector search_sorted_inst::calc_output_layouts(search_sorted_node const& node, + kernel_impl_params const& impl_param) { + auto primitive = impl_param.typed_desc(); + + auto input0_layout = impl_param.get_input_layout(0); + auto input1_layout = impl_param.get_input_layout(1); + + const data_types output_type = impl_param.desc->output_data_types[0].value_or(data_types::i64); + + std::vector input_shapes = { + input0_layout.get(), // sorted shape + input1_layout.get(), // values shape + }; + + std::vector output_shapes; + + ov::op::v15::SearchSorted op; + op.set_right_mode(primitive->right_mode); + output_shapes = shape_infer(&op, input_shapes); + + return {layout{output_shapes[0], output_type, input1_layout.format}}; +} + +std::string search_sorted_inst::to_string(search_sorted_node const& node) { + auto node_info = node.desc_to_json(); + json_composite search_sorted_info; + search_sorted_info.add("sorted id", node.input(0).id()); + search_sorted_info.add("values id", node.input(1).id()); + search_sorted_info.add("right_mode", node.get_primitive()->right_mode); + node_info->add("search_sorted info", search_sorted_info); + std::stringstream primitive_description; + node_info->dump(primitive_description); + return primitive_description.str(); +} + +} // namespace cldnn \ No newline at end of file diff --git a/src/plugins/intel_gpu/src/kernel_selector/cl_kernels/search_sorted_ref.cl b/src/plugins/intel_gpu/src/kernel_selector/cl_kernels/search_sorted_ref.cl new file mode 100644 index 00000000000000..b9e26405688f12 --- /dev/null +++ b/src/plugins/intel_gpu/src/kernel_selector/cl_kernels/search_sorted_ref.cl @@ -0,0 +1,56 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "include/batch_headers/fetch_data.cl" + +#if RIGHT_MODE == 0 +#define CMP <= +#else +#define CMP < +#endif + +OUTPUT_TYPE FUNC(binary_search_thread)(const INPUT0_TYPE search_val, + const __global INPUT0_TYPE* restrict sorted, + OUTPUT_TYPE sorted_begin_idx, + OUTPUT_TYPE sorted_end_idx) { + while(sorted_begin_idx != sorted_end_idx) { + const OUTPUT_TYPE half_offset = (sorted_end_idx-sorted_begin_idx)/2; + const OUTPUT_TYPE half_idx = sorted_begin_idx+half_offset; + const INPUT0_TYPE half_val = sorted[half_idx]; + if ( search_val CMP half_val ) + sorted_end_idx = half_idx; + else + sorted_begin_idx = half_idx + 1; + } + + return sorted_begin_idx; +} + +#undef CMP + +KERNEL(search_sorted_ref)( + OPTIONAL_SHAPE_INFO_ARG + const __global INPUT0_TYPE* restrict sorted, + const __global INPUT1_TYPE* restrict values, + __global OUTPUT_TYPE* restrict output) +{ + // INPUT0_TYPE has to be egual to INPUT1_TYPE + const int this_thread_idx = get_global_id(0); + const INPUT0_TYPE search_val = values[this_thread_idx]; + + const int SORTED_STRIDE = INPUT0_BATCH_NUM*INPUT0_FEATURE_NUM*INPUT0_SIZE_Y*INPUT0_SIZE_Z; + + // NOTE: SORTED_STRIDE-1 handles here a special case when sorted is actually 1D + // tensor and values is ND tensor. In such case we effectively want sorted_offset + // to be 0. + const int sorted_offset = min(this_thread_idx/INPUT1_SIZE_X, SORTED_STRIDE-1); + + OUTPUT_TYPE sorted_begin_idx = sorted_offset * INPUT0_SIZE_X; + const OUTPUT_TYPE idx = FUNC_CALL(binary_search_thread)(search_val, + sorted + sorted_begin_idx, + 0, + INPUT0_SIZE_X); + + output[this_thread_idx] = idx; +} \ No newline at end of file diff --git a/src/plugins/intel_gpu/src/kernel_selector/common_types.h b/src/plugins/intel_gpu/src/kernel_selector/common_types.h index bc9cc9f5b8da07..37139dbaeeffd2 100644 --- a/src/plugins/intel_gpu/src/kernel_selector/common_types.h +++ b/src/plugins/intel_gpu/src/kernel_selector/common_types.h @@ -101,7 +101,8 @@ enum class KernelType { RMS, SWIGLU, ROPE, - DYNAMIC_QUANTIZE + DYNAMIC_QUANTIZE, + SEARCH_SORTED }; //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_base.cpp b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_base.cpp new file mode 100644 index 00000000000000..ce4527a1f93aa7 --- /dev/null +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_base.cpp @@ -0,0 +1,72 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "search_sorted_kernel_base.h" + +#include + +#include "kernel_selector_utils.h" + +namespace kernel_selector { +JitConstants SearchSortedKernelBase::GetJitConstants(const search_sorted_params& params) const { + JitConstants jit = MakeBaseParamsJitConstants(params); + + jit.AddConstants({MakeJitConstant("RIGHT_MODE", params.right_mode)}); + + return jit; +} + +void SearchSortedKernelBase::GetUpdateDispatchDataFunc(KernelData& kd) const { + kd.update_dispatch_data_func = [](const Params& params, KernelData& kd) { + const auto& prim_params = static_cast(params); + auto dispatchData = SetDefault(prim_params); + OPENVINO_ASSERT(kd.kernels.size() == 1, "[GPU] Invalid kernels size for update dispatch data func"); + kd.kernels[0].params.workGroups.global = dispatchData.gws; + kd.kernels[0].params.workGroups.local = dispatchData.lws; + kd.kernels[0].skip_execution = KernelData::SkipKernelExecution(prim_params); + }; +} + +SearchSortedKernelBase::DispatchData SearchSortedKernelBase::SetDefault(const search_sorted_params& params) { + DispatchData dispatchData; + dispatchData.gws[0] = params.outputs[0].LogicalSize(); + dispatchData.gws[1] = 1; + dispatchData.gws[2] = 1; + dispatchData.lws = GetOptimalLocalWorkGroupSizes(dispatchData.gws, params.engineInfo); + + return dispatchData; +} + +KernelsData SearchSortedKernelBase::GetCommonKernelsData(const Params& params) const { + assert(params.GetType() == KernelType::SEARCH_SORTED); + + const auto& prim_params = static_cast(params); + + auto dispatchData = SetDefault(prim_params); + KernelData k_data = KernelData::Default(params); + + auto cldnn_jit = GetJitConstants(prim_params); + auto entry_point = GetEntryPoint(kernelName, prim_params.layerID, params); + auto jit = CreateJit(kernelName, cldnn_jit, entry_point); + + GetUpdateDispatchDataFunc(k_data); + + auto& kernel = k_data.kernels[0]; + FillCLKernelData(kernel, + dispatchData, + params.engineInfo, + kernelName, + jit, + entry_point, + "", + false, + false, + 2, + GetFusedPrimitiveInputsCount(params), + 1, + prim_params.is_shape_agnostic); + + return {k_data}; +} +} // namespace kernel_selector diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_base.h b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_base.h new file mode 100644 index 00000000000000..734229b6645fd6 --- /dev/null +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_base.h @@ -0,0 +1,34 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "kernel_base_opencl.h" +#include "kernel_selector_params.h" + +namespace kernel_selector { +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// search_sorted +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +struct search_sorted_params : public base_params { + search_sorted_params() : base_params(KernelType::SEARCH_SORTED), right_mode(false) {} + bool right_mode; +}; + +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// SearchSortedKernelBase +//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +class SearchSortedKernelBase : public KernelBaseOpenCL { +public: + using KernelBaseOpenCL::KernelBaseOpenCL; + + using DispatchData = CommonDispatchData; + +protected: + JitConstants GetJitConstants(const search_sorted_params& params) const; + static DispatchData SetDefault(const search_sorted_params& params); + KernelsData GetCommonKernelsData(const Params& params) const; + void GetUpdateDispatchDataFunc(KernelData& kd) const override; +}; +} // namespace kernel_selector diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_ref.cpp b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_ref.cpp new file mode 100644 index 00000000000000..5bbd22f24ebfec --- /dev/null +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_ref.cpp @@ -0,0 +1,45 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "search_sorted_kernel_ref.h" + +namespace kernel_selector { +ParamsKey SearchSortedKernelRef::GetSupportedKey() const { + ParamsKey k; + + k.EnableInputDataType(Datatype::INT8); + k.EnableInputDataType(Datatype::UINT8); + k.EnableInputDataType(Datatype::INT16); + k.EnableInputDataType(Datatype::UINT16); + k.EnableInputDataType(Datatype::INT32); + k.EnableInputDataType(Datatype::UINT32); + k.EnableInputDataType(Datatype::INT64); + k.EnableInputDataType(Datatype::F32); + k.EnableInputDataType(Datatype::F16); + + k.EnableOutputDataType(Datatype::INT32); + k.EnableOutputDataType(Datatype::INT64); + + k.EnableInputLayout(DataLayout::bfyx); + k.EnableInputLayout(DataLayout::bfzyx); + + k.EnableOutputLayout(DataLayout::bfyx); + k.EnableOutputLayout(DataLayout::bfzyx); + + k.EnableTensorOffset(); + k.EnableTensorPitches(); + k.EnableBatching(); + k.EnableDifferentTypes(); + k.EnableDynamicShapesSupport(); + return k; +} + +KernelsData SearchSortedKernelRef::GetKernelsData(const Params& params) const { + return GetCommonKernelsData(params); +} + +KernelsPriority SearchSortedKernelRef::GetKernelsPriority(const Params& /*params*/) const { + return FORCE_PRIORITY_9; +} +} // namespace kernel_selector diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_ref.h b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_ref.h new file mode 100644 index 00000000000000..bc7738013c4867 --- /dev/null +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_ref.h @@ -0,0 +1,18 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "search_sorted_kernel_base.h" + +namespace kernel_selector { +class SearchSortedKernelRef : public SearchSortedKernelBase { +public: + SearchSortedKernelRef() : SearchSortedKernelBase("search_sorted_ref") {} + + KernelsData GetKernelsData(const Params& params) const override; + KernelsPriority GetKernelsPriority(const Params& params) const override; + ParamsKey GetSupportedKey() const override; +}; +} // namespace kernel_selector diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_selector.cpp b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_selector.cpp new file mode 100644 index 00000000000000..b83c4d09fd56dd --- /dev/null +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_selector.cpp @@ -0,0 +1,14 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "search_sorted_kernel_selector.h" +#include "search_sorted_kernel_ref.h" + +namespace kernel_selector { +search_sorted_kernel_selector::search_sorted_kernel_selector() { Attach(); } + +KernelsData search_sorted_kernel_selector::GetBestKernels(const Params& params) const { + return GetNaiveBestKernel(params, KernelType::SEARCH_SORTED); +} +} // namespace kernel_selector diff --git a/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_selector.h b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_selector.h new file mode 100644 index 00000000000000..25f9a30fb0d895 --- /dev/null +++ b/src/plugins/intel_gpu/src/kernel_selector/kernels/search_sorted/search_sorted_kernel_selector.h @@ -0,0 +1,21 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#pragma once + +#include "kernel_selector.h" + +namespace kernel_selector { +class search_sorted_kernel_selector : public kernel_selector_base { +public: + static search_sorted_kernel_selector& Instance() { + static search_sorted_kernel_selector instance; + return instance; + } + + search_sorted_kernel_selector(); + + KernelsData GetBestKernels(const Params& params) const override; +}; +} // namespace kernel_selector diff --git a/src/plugins/intel_gpu/src/plugin/ops/search_sorted.cpp b/src/plugins/intel_gpu/src/plugin/ops/search_sorted.cpp new file mode 100644 index 00000000000000..dbb4fecbd66ab5 --- /dev/null +++ b/src/plugins/intel_gpu/src/plugin/ops/search_sorted.cpp @@ -0,0 +1,25 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "openvino/op/search_sorted.hpp" + +#include "intel_gpu/plugin/common_utils.hpp" +#include "intel_gpu/plugin/program_builder.hpp" +#include "intel_gpu/primitives/search_sorted.hpp" + +namespace ov { +namespace intel_gpu { + +static void CreateSearchSortedOp(ProgramBuilder& p, const std::shared_ptr& op) { + validate_inputs_count(op, {2}); + auto inputs = p.GetInputInfo(op); + auto prim = cldnn::search_sorted(layer_type_name_ID(op), inputs[0], inputs[1], op->get_right_mode()); + prim.output_data_types = get_output_data_types(op, {{ov::element::i64, ov::element::i32}}); + p.add_primitive(*op, prim); +} + +REGISTER_FACTORY_IMPL(v15, SearchSorted); + +} // namespace intel_gpu +} // namespace ov diff --git a/src/plugins/intel_gpu/src/plugin/program_builder.cpp b/src/plugins/intel_gpu/src/plugin/program_builder.cpp index 899110872ba633..a2316270e1ef3a 100644 --- a/src/plugins/intel_gpu/src/plugin/program_builder.cpp +++ b/src/plugins/intel_gpu/src/plugin/program_builder.cpp @@ -8,6 +8,7 @@ #include "openvino/op/variadic_split.hpp" #include "openvino/op/lstm_cell.hpp" #include "openvino/op/loop.hpp" +#include "openvino/op/search_sorted.hpp" #include "intel_gpu/plugin/common_utils.hpp" #include "intel_gpu/plugin/program_builder.hpp" @@ -349,6 +350,12 @@ bool ProgramBuilder::requires_new_shape_infer(const std::shared_ptr& o return true; } + // HACK: SearchSorted has specific shape requirements. + // E.g. static input shapes: sorted:[8], values:[2,3,4] are prefectly fine, + // but sorted:[8,1,1,1], values:[2,3,4,1] is not valid. + if (ov::is_type(op)) + return true; + if (ov::is_type(op)) { const auto body_function = std::static_pointer_cast(op)->get_function(); if (body_function->is_dynamic()) diff --git a/src/plugins/intel_gpu/tests/functional/shared_tests_instances/single_layer_tests/search_sorted.cpp b/src/plugins/intel_gpu/tests/functional/shared_tests_instances/single_layer_tests/search_sorted.cpp new file mode 100644 index 00000000000000..0117463880a607 --- /dev/null +++ b/src/plugins/intel_gpu/tests/functional/shared_tests_instances/single_layer_tests/search_sorted.cpp @@ -0,0 +1,18 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include "single_op_tests/search_sorted.hpp" + +namespace ov { +namespace test { + +INSTANTIATE_TEST_SUITE_P(smoke_SearchSortedTest, + SearchSortedLayerTest, + ::testing::Combine(::testing::ValuesIn(SearchSortedLayerTest::GenerateParams()), + testing::Values(ElementType::f32, ElementType::f16, ElementType::i64, ElementType::u32), + testing::Values(ov::test::utils::DEVICE_GPU)), + SearchSortedLayerTest::getTestCaseName); + +} // namespace test +} // namespace ov diff --git a/src/plugins/intel_gpu/tests/unit/test_cases/search_sorted_gpu_test.cpp b/src/plugins/intel_gpu/tests/unit/test_cases/search_sorted_gpu_test.cpp new file mode 100644 index 00000000000000..f9dfa0aeb0fc2b --- /dev/null +++ b/src/plugins/intel_gpu/tests/unit/test_cases/search_sorted_gpu_test.cpp @@ -0,0 +1,148 @@ +// Copyright (C) 2018-2024 Intel Corporation +// SPDX-License-Identifier: Apache-2.0 +// + +#include +#include +#include + +#include "test_utils.h" + +using namespace cldnn; +using namespace ::tests; + +namespace { + +constexpr float EPS = 2e-3f; + +namespace helpers { +// TODO: Move to common place. + +// Converts float vector to another type vector. +template +std::vector ConverFloatVector(const std::vector& vec) { + std::vector ret; + ret.reserve(vec.size()); + for (const auto& val : vec) { + ret.push_back(T(val)); + } + return ret; +} + +// Allocates tensoer with given shape and data. +template +memory::ptr AllocateTensor(ov::PartialShape shape, const std::vector& data) { + const layout lo = {shape, ov::element::from(), cldnn::format::bfyx}; + EXPECT_EQ(lo.get_linear_size(), data.size()); + memory::ptr tensor = get_test_engine().allocate_memory(lo); + set_values(tensor, data); + return tensor; +} +} // namespace helpers + +struct SearchSortedTestParams { + ov::PartialShape sortedShape; + ov::PartialShape valuesShape; + bool rightMode; + std::vector sortedData; + std::vector valuesData; + std::vector expectedOutput; + std::string testcaseName; +}; + +class search_sorted_test : public ::testing::TestWithParam { +public: + static std::string getTestCaseName(const testing::TestParamInfo& obj) { + auto param = obj.param; + std::ostringstream result; + result << "sortedShape=" << param.sortedShape; + result << "_valuesShape=" << param.valuesShape; + result << "_rightMode=" << param.rightMode; + result << "_" << param.testcaseName; + return result.str(); + } + + struct SearchSortedInferenceParams { + bool rightMode; + memory::ptr sorted; + memory::ptr values; + memory::ptr expectedOutput; + }; + + template + SearchSortedInferenceParams PrepareInferenceParams(const SearchSortedTestParams& testParam) { + using T = typename ov::element_type_traits::value_type; + SearchSortedInferenceParams ret; + + ret.rightMode = testParam.rightMode; + + ret.sorted = + helpers::AllocateTensor(testParam.sortedShape, helpers::ConverFloatVector(testParam.sortedData)); + ret.values = + helpers::AllocateTensor(testParam.valuesShape, helpers::ConverFloatVector(testParam.valuesData)); + ret.expectedOutput = helpers::AllocateTensor(testParam.valuesShape, testParam.expectedOutput); + + return ret; + } + + void Execute(const SearchSortedInferenceParams& params) { + // Prepare the network. + auto stream = get_test_stream_ptr(get_test_default_config(engine_)); + + topology topology; + topology.add(input_layout("sorted", params.sorted->get_layout())); + topology.add(input_layout("values", params.values->get_layout())); + topology.add(search_sorted("search_sorted", input_info("sorted"), input_info("values"), params.rightMode)); + + cldnn::network::ptr network = get_network(engine_, topology, get_test_default_config(engine_), stream, false); + + network->set_input_data("sorted", params.sorted); + network->set_input_data("values", params.values); + + // Run and check results. + auto outputs = network->execute(); + + auto output = outputs.at("search_sorted").get_memory(); + cldnn::mem_lock output_ptr(output, get_test_stream()); + cldnn::mem_lock wanted_output_ptr(params.expectedOutput, get_test_stream()); + + ASSERT_EQ(output->get_layout(), params.expectedOutput->get_layout()); + ASSERT_EQ(output_ptr.size(), wanted_output_ptr.size()); + for (size_t i = 0; i < output_ptr.size(); ++i) + ASSERT_TRUE(are_equal(wanted_output_ptr[i], output_ptr[i], EPS)); + } + +private: + engine& engine_ = get_test_engine(); +}; + +std::vector generateTestParams() { + std::vector params; +#define TEST_DATA(sorted_shape, values_shape, right_mode, sorted_data, values_data, expected_output_data, description) \ + params.push_back(SearchSortedTestParams{sorted_shape, \ + values_shape, \ + right_mode, \ + sorted_data, \ + values_data, \ + expected_output_data, \ + description}); + +#include "unit_test_utils/tests_data/search_sorted_data.h" +#undef TEST_DATA + return params; +} + +} // namespace + +#define SEARCH_SORTED_TEST_P(precision) \ + TEST_P(search_sorted_test, ref_comp_##precision) { \ + Execute(PrepareInferenceParams(GetParam())); \ + } + +SEARCH_SORTED_TEST_P(f16); +SEARCH_SORTED_TEST_P(u8); + +INSTANTIATE_TEST_SUITE_P(search_sorted_test_suit, + search_sorted_test, + testing::ValuesIn(generateTestParams()), + search_sorted_test::getTestCaseName); diff --git a/src/tests/functional/shared_test_classes/src/single_op/search_sorted.cpp b/src/tests/functional/shared_test_classes/src/single_op/search_sorted.cpp index a92d87d51f9a10..c7c10ad8767ff6 100644 --- a/src/tests/functional/shared_test_classes/src/single_op/search_sorted.cpp +++ b/src/tests/functional/shared_test_classes/src/single_op/search_sorted.cpp @@ -88,11 +88,30 @@ void SearchSortedLayerTest::SetUp() { const std::vector SearchSortedLayerTest::GenerateParams() { const std::vector params = { + SearchSortedSpecificParams{InputShape{PartialShape::dynamic(3), {{1, 18, 104}}}, + InputShape{PartialShape::dynamic(3), {{1, 18, 104}}}, + true}, + SearchSortedSpecificParams{InputShape{PartialShape::dynamic(4), {{1, 2, 3, 100}}}, + InputShape{PartialShape::dynamic(4), {{1, 2, 3, 10}}}, + true}, + SearchSortedSpecificParams{InputShape{PartialShape::dynamic(5), {{2, 1, 2, 3, 10}}}, + InputShape{PartialShape::dynamic(5), {{2, 1, 2, 3, 20}}}, + false}, + SearchSortedSpecificParams{InputShape{PartialShape::dynamic(1), {{1}}}, + InputShape{PartialShape::dynamic(5), {{2, 1, 2, 3, 20}}}, + false}, + SearchSortedSpecificParams{InputShape{PartialShape::dynamic(1), {{50}}}, + InputShape{{1, -1, 10}, {{1, 18, 10}}}, + false}, SearchSortedSpecificParams{InputShape{{}, {{1, 18, 104}}}, InputShape{{}, {{1, 18, 104}}}, true}, SearchSortedSpecificParams{InputShape{{}, {{1, 2, 3, 100}}}, InputShape{{}, {{1, 2, 3, 10}}}, true}, SearchSortedSpecificParams{InputShape{{}, {{2, 1, 2, 3, 10}}}, InputShape{{}, {{2, 1, 2, 3, 20}}}, false}, SearchSortedSpecificParams{InputShape{{}, {{1}}}, InputShape{{}, {{2, 1, 2, 3, 20}}}, false}, SearchSortedSpecificParams{InputShape{{}, {{50}}}, InputShape{{1, -1, 10}, {{1, 18, 10}}}, false}, + SearchSortedSpecificParams{InputShape{{2, -1, 50}, {{2, 3, 50}}}, + InputShape{{-1, -1, 10}, {{2, 3, 10}}}, + false}, + SearchSortedSpecificParams{InputShape{{2, -1, 50}, {{2, 3, 50}}}, InputShape{{-1, 3, 10}, {{2, 3, 10}}}, false}, }; return params; diff --git a/src/tests/test_utils/unit_test_utils/tests_data/search_sorted_data.h b/src/tests/test_utils/unit_test_utils/tests_data/search_sorted_data.h index ee355c2daee15e..43e680aa080686 100644 --- a/src/tests/test_utils/unit_test_utils/tests_data/search_sorted_data.h +++ b/src/tests/test_utils/unit_test_utils/tests_data/search_sorted_data.h @@ -13,6 +13,22 @@ // NOTE: expected output were generated using pyTorch.searchsorted implementation. +TEST_DATA(LIST(5), + LIST(2, 3), + false, + LIST(3, 3, 3, 3, 3), + LIST(3, 6, 9, 3, 6, 9), + LIST(0, 5, 5, 0, 5, 5), + "1d_tensor_0"); + +TEST_DATA(LIST(5), + LIST(2, 3), + true, + LIST(3, 3, 3, 3, 3), + LIST(3, 6, 9, 3, 6, 9), + LIST(5, 5, 5, 5, 5, 5), + "1d_tensor_0_right_mode"); + TEST_DATA(LIST(5), LIST(2, 3), false, @@ -53,6 +69,22 @@ TEST_DATA(LIST(5), LIST(0, 3, 5, 1, 3, 5, 1, 0, 0, 5, 5, 5), "1d_tensor_3_right_mode"); +TEST_DATA(LIST(1), + LIST(2, 2, 3), + false, + LIST(2), + LIST(0, 6, 20, 2, 6, 9, 1, 0, 0, 9, 10, 20), + LIST(0, 1, 1, 0, 1, 1, 0, 0, 0, 1, 1, 1), + "1d_tensor_4"); + +TEST_DATA(LIST(1), + LIST(2, 2, 3), + true, + LIST(2), + LIST(0, 6, 20, 2, 6, 9, 1, 0, 0, 9, 10, 20), + LIST(0, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1), + "1d_tensor_4_right_mode"); + TEST_DATA(LIST(2, 5), LIST(2, 3), false, @@ -72,15 +104,15 @@ TEST_DATA(LIST(2, 5), TEST_DATA(LIST(2, 2, 5), LIST(2, 2, 3), false, - LIST(1, 3, 5, 7, 9, 0, 2, 4, 6, 8, -20, 5, 10, 23, 41, 100, 125, 130, 132, 139), + LIST(1, 3, 5, 7, 9, 0, 2, 4, 6, 8, 0, 5, 10, 23, 41, 100, 125, 130, 132, 139), LIST(0, 6, 20, 1, 6, 9, 1, 0, 0, 9, 10, 20), - LIST(0, 3, 5, 1, 3, 5, 1, 1, 1, 0, 0, 0), + LIST(0, 3, 5, 1, 3, 5, 1, 0, 0, 0, 0, 0), "nd_tensor_2"); TEST_DATA(LIST(2, 2, 5), LIST(2, 2, 3), true, - LIST(1, 3, 5, 7, 9, 0, 2, 4, 6, 8, -20, 5, 10, 23, 41, 100, 125, 130, 132, 139), + LIST(1, 3, 5, 7, 9, 0, 2, 4, 6, 8, 0, 5, 10, 23, 41, 100, 125, 130, 132, 139), LIST(0, 6, 20, 1, 6, 9, 1, 0, 0, 9, 10, 20), LIST(0, 3, 5, 1, 4, 5, 1, 1, 1, 0, 0, 0), - "nd_tensor_2"); \ No newline at end of file + "nd_tensor_2_right_mode"); \ No newline at end of file