|
| 1 | +/* |
| 2 | + * Copyright 2022 The Modelbox Project Authors. All Rights Reserved. |
| 3 | + * |
| 4 | + * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | + * you may not use this file except in compliance with the License. |
| 6 | + * You may obtain a copy of the License at |
| 7 | + * |
| 8 | + * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | + * |
| 10 | + * Unless required by applicable law or agreed to in writing, software |
| 11 | + * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | + * See the License for the specific language governing permissions and |
| 14 | + * limitations under the License. |
| 15 | + */ |
| 16 | + |
| 17 | +#include "video_out_flowunit.h" |
| 18 | + |
| 19 | +#include <securec.h> |
| 20 | + |
| 21 | +#include <nlohmann/json.hpp> |
| 22 | +#include <opencv2/opencv.hpp> |
| 23 | +#include <regex> |
| 24 | + |
| 25 | +#include "modelbox/flowunit.h" |
| 26 | +#include "modelbox/flowunit_api_helper.h" |
| 27 | + |
| 28 | +modelbox::Status VideoOutFlowUnit::Open( |
| 29 | + const std::shared_ptr<modelbox::Configuration> &opts) { |
| 30 | + default_dest_url_ = opts->GetString("default_dest_url", ""); |
| 31 | + |
| 32 | + return modelbox::STATUS_OK; |
| 33 | +} |
| 34 | + |
| 35 | +modelbox::Status VideoOutFlowUnit::Close() { return modelbox::STATUS_OK; } |
| 36 | + |
| 37 | +modelbox::Status VideoOutFlowUnit::Process( |
| 38 | + std::shared_ptr<modelbox::DataContext> ctx) { |
| 39 | + auto image_queue = std::static_pointer_cast< |
| 40 | + modelbox::BlockingQueue<std::shared_ptr<modelbox::Buffer>>>( |
| 41 | + ctx->GetPrivate(SHOW_QUEUE_CTX)); |
| 42 | + if (image_queue != nullptr) { |
| 43 | + auto input_buffer_list = ctx->Input(FRAME_INFO_INPUT); |
| 44 | + for (size_t i = 0; i < input_buffer_list->Size(); ++i) { |
| 45 | + image_queue->Push(input_buffer_list->At(i), 50); |
| 46 | + } |
| 47 | + return modelbox::STATUS_SUCCESS; |
| 48 | + } |
| 49 | + |
| 50 | + // others do video encoding |
| 51 | + auto muxer = |
| 52 | + std::static_pointer_cast<FfmpegVideoMuxer>(ctx->GetPrivate(MUXER_CTX)); |
| 53 | + auto encoder = std::static_pointer_cast<FfmpegVideoEncoder>( |
| 54 | + ctx->GetPrivate(ENCODER_CTX)); |
| 55 | + if (muxer == nullptr || encoder == nullptr) { |
| 56 | + MBLOG_ERROR << "Stream not inited"; |
| 57 | + return {modelbox::STATUS_FAULT, "Stream not inited"}; |
| 58 | + } |
| 59 | + |
| 60 | + std::vector<std::shared_ptr<AVFrame>> av_frame_list; |
| 61 | + auto ret = ReadFrames(ctx, av_frame_list); |
| 62 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 63 | + MBLOG_ERROR << "Read input frame failed"; |
| 64 | + return {modelbox::STATUS_FAULT, "Read input frame failed"}; |
| 65 | + } |
| 66 | + |
| 67 | + std::vector<std::shared_ptr<AVPacket>> av_packet_list; |
| 68 | + ret = EncodeFrame(encoder, av_frame_list, av_packet_list); |
| 69 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 70 | + MBLOG_ERROR << "Encode frame failed"; |
| 71 | + return {modelbox::STATUS_FAULT, "Encode frame failed"}; |
| 72 | + } |
| 73 | + |
| 74 | + ret = MuxPacket(muxer, encoder->GetCtx()->time_base, av_packet_list); |
| 75 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 76 | + MBLOG_ERROR << "Mux packet failed"; |
| 77 | + return {modelbox::STATUS_FAULT, "Mux packet failed"}; |
| 78 | + } |
| 79 | + |
| 80 | + return modelbox::STATUS_SUCCESS; |
| 81 | +} |
| 82 | + |
| 83 | +modelbox::Status VideoOutFlowUnit::ReadFrames( |
| 84 | + const std::shared_ptr<modelbox::DataContext> &ctx, |
| 85 | + std::vector<std::shared_ptr<AVFrame>> &av_frame_list) { |
| 86 | + auto frame_buffer_list = ctx->Input(FRAME_INFO_INPUT); |
| 87 | + if (frame_buffer_list == nullptr || frame_buffer_list->Size() == 0) { |
| 88 | + MBLOG_ERROR << "Input frame list is empty"; |
| 89 | + return {modelbox::STATUS_FAULT, "Input frame list is empty"}; |
| 90 | + } |
| 91 | + |
| 92 | + auto frame_index_ptr = |
| 93 | + std::static_pointer_cast<int64_t>(ctx->GetPrivate(FRAME_INDEX_CTX)); |
| 94 | + for (auto frame_buffer : *frame_buffer_list) { |
| 95 | + std::shared_ptr<AVFrame> av_frame; |
| 96 | + auto ret = ReadFrameFromBuffer(frame_buffer, av_frame); |
| 97 | + av_frame->pts = *frame_index_ptr; |
| 98 | + ++(*frame_index_ptr); |
| 99 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 100 | + MBLOG_ERROR << "Read frame from buffer failed"; |
| 101 | + return ret; |
| 102 | + } |
| 103 | + |
| 104 | + av_frame_list.push_back(av_frame); |
| 105 | + } |
| 106 | + |
| 107 | + return modelbox::STATUS_SUCCESS; |
| 108 | +} |
| 109 | + |
| 110 | +modelbox::Status VideoOutFlowUnit::ReadFrameFromBuffer( |
| 111 | + std::shared_ptr<modelbox::Buffer> &frame_buffer, |
| 112 | + std::shared_ptr<AVFrame> &av_frame) { |
| 113 | + auto *frame_ptr = av_frame_alloc(); |
| 114 | + if (frame_ptr == nullptr) { |
| 115 | + MBLOG_ERROR << "Alloca frame failed"; |
| 116 | + return {modelbox::STATUS_FAULT, "Alloca frame failed"}; |
| 117 | + } |
| 118 | + |
| 119 | + av_frame.reset(frame_ptr, [](AVFrame *ptr) { av_frame_free(&ptr); }); |
| 120 | + frame_buffer->Get("width", av_frame->width); |
| 121 | + frame_buffer->Get("height", av_frame->height); |
| 122 | + std::string pix_fmt; |
| 123 | + frame_buffer->Get("pix_fmt", pix_fmt); |
| 124 | + auto iter = videodecode::g_av_pix_fmt_map.find(pix_fmt); |
| 125 | + if (iter == videodecode::g_av_pix_fmt_map.end()) { |
| 126 | + MBLOG_ERROR << "Encoder not support pix fmt " << pix_fmt; |
| 127 | + return {modelbox::STATUS_NOTSUPPORT, |
| 128 | + "Encoder not support pix fmt " + pix_fmt}; |
| 129 | + } |
| 130 | + av_frame->format = iter->second; |
| 131 | + auto ret = |
| 132 | + av_image_fill_arrays(av_frame->data, av_frame->linesize, |
| 133 | + (const uint8_t *)frame_buffer->ConstData(), |
| 134 | + iter->second, av_frame->width, av_frame->height, 1); |
| 135 | + if (ret < 0) { |
| 136 | + GET_FFMPEG_ERR(ret, ffmpeg_err); |
| 137 | + MBLOG_ERROR << "avpicture_fill failed, err " << ffmpeg_err; |
| 138 | + return {modelbox::STATUS_FAULT, "avpicture_fill failed, err "}; |
| 139 | + } |
| 140 | + |
| 141 | + return modelbox::STATUS_SUCCESS; |
| 142 | +} |
| 143 | + |
| 144 | +modelbox::Status VideoOutFlowUnit::EncodeFrame( |
| 145 | + const std::shared_ptr<FfmpegVideoEncoder> &encoder, |
| 146 | + const std::vector<std::shared_ptr<AVFrame>> &av_frame_list, |
| 147 | + std::vector<std::shared_ptr<AVPacket>> &av_packet_list) { |
| 148 | + for (const auto &frame : av_frame_list) { |
| 149 | + auto ret = encoder->Encode(GetBindDevice(), frame, av_packet_list); |
| 150 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 151 | + MBLOG_ERROR << "Encoder encode frame failed reason: " + ret.Errormsg(); |
| 152 | + return ret; |
| 153 | + } |
| 154 | + } |
| 155 | + |
| 156 | + return modelbox::STATUS_SUCCESS; |
| 157 | +} |
| 158 | + |
| 159 | +modelbox::Status VideoOutFlowUnit::MuxPacket( |
| 160 | + const std::shared_ptr<FfmpegVideoMuxer> &muxer, const AVRational &time_base, |
| 161 | + std::vector<std::shared_ptr<AVPacket>> &av_packet_list) { |
| 162 | + for (const auto &packet : av_packet_list) { |
| 163 | + auto ret = muxer->Mux(time_base, packet); |
| 164 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 165 | + MBLOG_ERROR << "Muxer mux packet failed"; |
| 166 | + return ret; |
| 167 | + } |
| 168 | + } |
| 169 | + |
| 170 | + return modelbox::STATUS_SUCCESS; |
| 171 | +} |
| 172 | + |
| 173 | +void VideoOutFlowUnit::ProcessShow( |
| 174 | + const std::string &dest_url, |
| 175 | + const std::shared_ptr< |
| 176 | + modelbox::BlockingQueue<std::shared_ptr<modelbox::Buffer>>> |
| 177 | + &image_queue) { |
| 178 | + std::string win_name = "modelbox_show"; |
| 179 | + if (dest_url.length() > 2) { |
| 180 | + win_name = dest_url.substr(2); |
| 181 | + } |
| 182 | + |
| 183 | + cv::namedWindow(win_name, cv::WINDOW_AUTOSIZE); |
| 184 | + std::shared_ptr<modelbox::Buffer> buf; |
| 185 | + std::shared_ptr<modelbox::Buffer> back_buf; |
| 186 | + while (image_queue->Pop(&buf)) { |
| 187 | + if (buf == nullptr) { |
| 188 | + break; |
| 189 | + } |
| 190 | + |
| 191 | + // at least 1, even not set widht, height |
| 192 | + int32_t width = 1; |
| 193 | + int32_t height = 1; |
| 194 | + std::string pix_fmt = "bgr"; |
| 195 | + buf->Get("width", width); |
| 196 | + buf->Get("height", height); |
| 197 | + buf->Get("pix_fmt", pix_fmt); |
| 198 | + void *input_data = const_cast<void *>(buf->ConstData()); |
| 199 | + bool isnv12 = (pix_fmt == "nv12"); |
| 200 | + cv::Mat img_data(cv::Size(width, isnv12 ? height * 3 / 2 : height), |
| 201 | + isnv12 ? CV_8UC1 : CV_8UC3, input_data); |
| 202 | + cv::Mat show_img = img_data; |
| 203 | + // todo color change |
| 204 | + if (pix_fmt == "rgb") { |
| 205 | + cv::cvtColor(img_data, show_img, cv::COLOR_RGB2BGR); |
| 206 | + } else if (pix_fmt == "nv12") { |
| 207 | + cv::cvtColor(img_data, show_img, cv::COLOR_YUV2BGR_NV12); |
| 208 | + } |
| 209 | + |
| 210 | + cv::imshow(win_name, show_img); |
| 211 | + cv::waitKey(10); |
| 212 | + back_buf = buf; |
| 213 | + } |
| 214 | + |
| 215 | + cv::destroyWindow(win_name); |
| 216 | +} |
| 217 | + |
| 218 | +modelbox::Status VideoOutFlowUnit::PrepareVideoOut( |
| 219 | + const std::shared_ptr<modelbox::DataContext> &data_ctx, |
| 220 | + const std::string &dest_url, const std::string &format_name) { |
| 221 | + auto frame_buffer_list = data_ctx->Input(FRAME_INFO_INPUT); |
| 222 | + if (frame_buffer_list == nullptr || frame_buffer_list->Size() == 0) { |
| 223 | + MBLOG_ERROR << "Input [frame_info] is empty"; |
| 224 | + return {modelbox::STATUS_FAULT, "Input [frame_info] is empty"}; |
| 225 | + } |
| 226 | + |
| 227 | + auto frame_buffer = frame_buffer_list->At(0); |
| 228 | + int32_t width = 0; |
| 229 | + int32_t height = 0; |
| 230 | + int32_t rate_num = 25; |
| 231 | + int32_t rate_den = 1; |
| 232 | + frame_buffer->Get("width", width); |
| 233 | + frame_buffer->Get("height", height); |
| 234 | + frame_buffer->Get("rate_num", rate_num); |
| 235 | + frame_buffer->Get("rate_den", rate_den); |
| 236 | + |
| 237 | + if (width == 0 || height == 0) { |
| 238 | + MBLOG_ERROR << "buffer meta is invalid"; |
| 239 | + return {modelbox::STATUS_INVALID, "buffer meta is invalid"}; |
| 240 | + } |
| 241 | + |
| 242 | + auto encoder = std::make_shared<FfmpegVideoEncoder>(); |
| 243 | + auto ret = |
| 244 | + encoder->Init(GetBindDevice(), width, height, {rate_num, rate_den}); |
| 245 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 246 | + MBLOG_ERROR << "Init encoder failed"; |
| 247 | + return {modelbox::STATUS_FAULT, "Init encoder failed"}; |
| 248 | + } |
| 249 | + |
| 250 | + auto writer = std::make_shared<FfmpegWriter>(); |
| 251 | + ret = writer->Open(format_name, dest_url); |
| 252 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 253 | + MBLOG_ERROR << "Open ffmepg writer failed, format " << format_name |
| 254 | + << ", url " << dest_url; |
| 255 | + return {modelbox::STATUS_FAULT, "Open ffmepg writer failed, format " + |
| 256 | + format_name + ", url " + dest_url}; |
| 257 | + } |
| 258 | + |
| 259 | + auto muxer = std::make_shared<FfmpegVideoMuxer>(); |
| 260 | + ret = muxer->Init(encoder->GetCtx(), writer); |
| 261 | + if (ret != modelbox::STATUS_SUCCESS) { |
| 262 | + MBLOG_ERROR << "Init muxer failed"; |
| 263 | + return {modelbox::STATUS_FAULT, "Init muxer failed"}; |
| 264 | + } |
| 265 | + |
| 266 | + auto color_cvt = std::make_shared<FfmpegColorConverter>(); |
| 267 | + |
| 268 | + data_ctx->SetPrivate(MUXER_CTX, muxer); |
| 269 | + data_ctx->SetPrivate(ENCODER_CTX, encoder); |
| 270 | + data_ctx->SetPrivate(COLOR_CVT_CTX, color_cvt); |
| 271 | + return modelbox::STATUS_OK; |
| 272 | +} |
| 273 | + |
| 274 | +modelbox::Status VideoOutFlowUnit::DataPre( |
| 275 | + std::shared_ptr<modelbox::DataContext> data_ctx) { |
| 276 | + std::string dest_url; |
| 277 | + auto ret = GetDestUrl(data_ctx, dest_url); |
| 278 | + if (ret != modelbox::STATUS_SUCCESS || dest_url.empty()) { |
| 279 | + MBLOG_ERROR << "dest_url in config is empty, no dest url available"; |
| 280 | + return {modelbox::STATUS_FAULT, |
| 281 | + "dest_url in config is empty, no dest url available"}; |
| 282 | + } |
| 283 | + |
| 284 | + MBLOG_INFO << "videoout url=" << dest_url; |
| 285 | + |
| 286 | + auto frame_index_ptr = std::make_shared<int64_t>(0); |
| 287 | + data_ctx->SetPrivate(FRAME_INDEX_CTX, frame_index_ptr); |
| 288 | + |
| 289 | + if (dest_url[0] >= '0' && dest_url[0] <= '9') { |
| 290 | + // 视频输出, 类似0:windows_name配置 |
| 291 | + std::shared_ptr<std::thread> show_thread; |
| 292 | + auto image_queue = std::make_shared< |
| 293 | + modelbox::BlockingQueue<std::shared_ptr<modelbox::Buffer>>>(2); |
| 294 | + show_thread.reset(new std::thread(&VideoOutFlowUnit::ProcessShow, this, |
| 295 | + dest_url, image_queue), |
| 296 | + [image_queue](std::thread *p) { |
| 297 | + image_queue->Shutdown(); |
| 298 | + if (p && p->joinable()) { |
| 299 | + p->join(); |
| 300 | + } |
| 301 | + delete p; |
| 302 | + }); |
| 303 | + data_ctx->SetPrivate(SHOW_CTX, show_thread); |
| 304 | + data_ctx->SetPrivate(SHOW_QUEUE_CTX, image_queue); |
| 305 | + return modelbox::STATUS_OK; |
| 306 | + } |
| 307 | + |
| 308 | + std::string format_name = "mp4"; |
| 309 | + if (dest_url.substr(0, 4) == "rtsp") { |
| 310 | + format_name = "rtsp"; |
| 311 | + } |
| 312 | + |
| 313 | + return PrepareVideoOut(data_ctx, dest_url, format_name); |
| 314 | +} |
| 315 | + |
| 316 | +modelbox::Status VideoOutFlowUnit::GetDestUrl( |
| 317 | + const std::shared_ptr<modelbox::DataContext> &data_ctx, |
| 318 | + std::string &dest_url) { |
| 319 | + dest_url = default_dest_url_; |
| 320 | + |
| 321 | + Defer { |
| 322 | + std::regex url_auth_pattern("://[^ /]*?:[^ /]*?@"); |
| 323 | + auto result = std::regex_replace(dest_url, url_auth_pattern, "://*:*@"); |
| 324 | + MBLOG_INFO << "video_out url is " << result; |
| 325 | + }; |
| 326 | + |
| 327 | + // 3种方式获取 |
| 328 | + auto stream_meta = data_ctx->GetInputMeta(FRAME_INFO_INPUT); |
| 329 | + if (stream_meta != nullptr) { |
| 330 | + auto dest_url_ptr = |
| 331 | + std::static_pointer_cast<std::string>(stream_meta->GetMeta(DEST_URL)); |
| 332 | + if (dest_url_ptr != nullptr && !(*dest_url_ptr).empty()) { |
| 333 | + dest_url = *dest_url_ptr; |
| 334 | + return modelbox::STATUS_SUCCESS; |
| 335 | + } |
| 336 | + } |
| 337 | + |
| 338 | + auto config = data_ctx->GetSessionConfig(); |
| 339 | + auto cfg_str = config->GetString("iva_task_output"); |
| 340 | + if (cfg_str.empty()) { |
| 341 | + return modelbox::STATUS_SUCCESS; |
| 342 | + } |
| 343 | + |
| 344 | + nlohmann::json url_json = nlohmann::json::parse(cfg_str); |
| 345 | + if (url_json.contains("data") && url_json["data"].contains("url")) { |
| 346 | + dest_url = url_json["data"]["url"].get<std::string>(); |
| 347 | + } |
| 348 | + |
| 349 | + return modelbox::STATUS_SUCCESS; |
| 350 | +} |
| 351 | + |
| 352 | +modelbox::Status VideoOutFlowUnit::DataPost( |
| 353 | + std::shared_ptr<modelbox::DataContext> data_ctx) { |
| 354 | + data_ctx->SetPrivate(MUXER_CTX, nullptr); |
| 355 | + data_ctx->SetPrivate(ENCODER_CTX, nullptr); |
| 356 | + data_ctx->SetPrivate(SHOW_CTX, nullptr); |
| 357 | + data_ctx->SetPrivate(SHOW_QUEUE_CTX, nullptr); |
| 358 | + return modelbox::STATUS_OK; |
| 359 | +} |
| 360 | + |
| 361 | +MODELBOX_FLOWUNIT(VideoOutFlowUnit, desc) { |
| 362 | + desc.SetFlowUnitName(FLOWUNIT_NAME); |
| 363 | + desc.SetFlowUnitGroupType("Video"); |
| 364 | + desc.AddFlowUnitInput({FRAME_INFO_INPUT, "cpu"}); |
| 365 | + desc.SetFlowType(modelbox::STREAM); |
| 366 | + desc.SetInputContiguous(false); |
| 367 | + // 禁止异步执行,编码必须一帧帧的编码 |
| 368 | + desc.SetResourceNice(false); |
| 369 | + desc.SetDescription(FLOWUNIT_DESC); |
| 370 | + desc.AddFlowUnitOption(modelbox::FlowUnitOption( |
| 371 | + "default_dest_url", "string", true, "", "the encoder dest url")); |
| 372 | +} |
| 373 | + |
| 374 | +MODELBOX_DRIVER_FLOWUNIT(desc) { |
| 375 | + desc.Desc.SetName(FLOWUNIT_NAME); |
| 376 | + desc.Desc.SetClass(modelbox::DRIVER_CLASS_FLOWUNIT); |
| 377 | + desc.Desc.SetType(FLOWUNIT_TYPE); |
| 378 | + desc.Desc.SetDescription(FLOWUNIT_DESC); |
| 379 | + desc.Desc.SetVersion("1.0.0"); |
| 380 | +} |
0 commit comments