Skip to content

Commit

Permalink
Added missed space in error message
Browse files Browse the repository at this point in the history
  • Loading branch information
ilya-lavrenov committed Jan 16, 2025
1 parent ca38e56 commit 4b4abfc
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 6 deletions.
2 changes: 1 addition & 1 deletion src/core/src/pass/sdpa_to_paged_attention.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ bool ov::pass::SDPAToPagedAttention::run_on_model(const std::shared_ptr<ov::Mode
RUN_ON_MODEL_SCOPE(SDPAToPagedAttention);

OPENVINO_ASSERT(ov::op::util::has_op_with_type<ov::op::v13::ScaledDotProductAttention>(model),
"No ScaledDotProductAttention operation observed in the graph, cannot perform"
"No ScaledDotProductAttention operation observed in the graph, cannot perform "
"the SDPAToPagedAttention transformation.");

auto max_context_len = setName(std::make_shared<v0::Parameter>(element::i32, PartialShape{}), "max_context_len");
Expand Down
15 changes: 11 additions & 4 deletions src/plugins/intel_cpu/src/plugin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -195,13 +195,20 @@ void Plugin::calculate_streams(Config& conf, const std::shared_ptr<ov::Model>& m
}

static Config::ModelType getModelType(const std::shared_ptr<const Model>& model) {
if (op::util::has_op_with_type<op::v1::Convolution>(model) ||
op::util::has_op_with_type<op::v1::ConvolutionBackpropData>(model))
return Config::ModelType::CNN;
std::cout << model->get_friendly_name() << std::endl;

// if (op::util::has_op_with_type<op::v1::Convolution>(model) ||
// op::util::has_op_with_type<op::v1::ConvolutionBackpropData>(model)) {
// std::cout << "Model type CNN" << std::endl;
// return Config::ModelType::CNN;
// }

if ((op::util::has_op_with_type<op::v13::ScaledDotProductAttention>(model) && model->get_variables().size() > 0) ||
op::util::has_op_with_type<ov::op::PagedAttentionExtension>(model))
op::util::has_op_with_type<ov::op::PagedAttentionExtension>(model)) {
std::cout << "Model type LLM" << std::endl;
return Config::ModelType::LLM;
}
std::cout << "Model type Unknown" << std::endl;

return Config::ModelType::Unknown;
}
Expand Down
2 changes: 1 addition & 1 deletion src/plugins/intel_gpu/thirdparty/onednn_gpu
Submodule onednn_gpu updated 1215 files

0 comments on commit 4b4abfc

Please sign in to comment.