#include "Channel.h" #include "Constant.h" #include "DatabaseManager.h" #include "Json.h" #include "Log.h" #include "Tool.h" #include #include #include #include #include #include LinkObject* Channel::lineIn = nullptr; LinkObject* Channel::lineOut = nullptr; LinkObject* Channel::rtspServer = nullptr; LinkObject* Channel::resample = nullptr; LinkObject* Channel::gain = nullptr; LinkObject* Channel::volume = nullptr; // 这里设置最大12dB增益,最小30dB增益(超过12dB会爆音) int Channel::maxGian = 12; int Channel::minGain = -30; int Channel::curGain = 0; extern DatabaseManager* db; void configureOutput2(QString resolutionStr); Channel::Channel(const QString& name, const QVariantMap& params, QObject* parent) : channelName(name) , videoOutputParams(params) , QObject(parent) { videoOutput = Link::create("OutputVo"); videoOutput->start(videoOutputParams); resolutionMap[Channel::VO_OUTPUT_1080I60] = "1080I60"; resolutionMap[Channel::VO_OUTPUT_1600x1200_60] = "1600x1200_60"; resolutionMap[Channel::VO_OUTPUT_1280x1024_60] = "1280x1024_60"; resolutionMap[Channel::VO_OUTPUT_1280x720_60] = "1280x720_60"; resolutionMap[Channel::VO_OUTPUT_1024x768_60] = "1024x768_60"; resolutionMap[Channel::VO_OUTPUT_800x600_60] = "800x600_60"; } Channel::~Channel() { } /** * @brief 初始化 */ void Channel::init() { // 输出通道2的额外配置 if (channelName == Constant::SecondaryChannel) { QString resolution = videoOutputParams["output"].toString(); configureOutput2(resolution); } if (lineIn == nullptr) { lineIn = Link::create("InputAlsa"); QVariantMap dataIn; dataIn["path"] = "hw:0"; dataIn["channels"] = 2; lineIn->start(dataIn); } if (resample == nullptr) { resample = Link::create("Resample"); resample->start(); lineIn->linkA(resample); } if (lineOut == nullptr) { lineOut = Link::create("OutputAlsa"); QVariantMap dataOut; dataOut["path"] = "hw:0"; lineOut->start(dataOut); } if (gain == nullptr) { gain = Link::create("Gain"); gain->start(); } if (volume == nullptr) { volume = Link::create("Volume"); volume->start(); gain->linkA(volume); gain->linkA(lineOut); } if (rtspServer == nullptr) { rtspServer = Link::create("Rtsp"); rtspServer->start(); } image = Link::create("InputImage"); loadOverlayConfig(); // 水印,用于显示录制状态 overlay = Link::create("Overlay"); overlay->start(norecordOverlay); overlay->linkV(videoOutput); // 视频输入 videoInput = Link::create("InputVi"); QVariantMap dataVi; dataVi["interface"] = channelName; dataVi["width"] = 1920; dataVi["height"] = 1080; videoInput->start(dataVi); videoInput->linkV(overlay)->linkV(videoOutput); // 通道音频输出 audioOutput = Link::create("OutputAo"); // 通道音频输入 audioInput = Link::create("InputAi"); QVariantMap dataAi; dataAi["interface"] = channelName; dataAi["channels"] = 2; audioInput->start(dataAi); audioInput->linkA(audioOutput); // 音频编码 audioEncoder = Link::create("EncodeA"); audioEncoder->start(audioEncoderParams); // 视频编码 videoEncoder = Link::create("EncodeV"); videoEncoder->start(videoEncoderParams); // 录制 record = Link::create("Mux"); QVariantMap dataMp4; dataMp4["format"] = "mp4"; dataMp4["filecache"] = 20480000; dataMp4["lowLatency"] = true; dataMp4["thread"] = true; dataMp4["segmentDuration"] = duration; record->setData(dataMp4); videoInput->linkV(videoEncoder)->linkV(record); audioInput->linkA(audioEncoder)->linkV(record); resample->linkA(audioEncoder)->linkA(record); connect(record, SIGNAL(newEvent(QString, QVariant)), this, SLOT(onNewEvent(QString, QVariant))); // 测试执行时间,用时18-20ms // connect(record, &LinkObject::newEvent, [=](QString msg, QVariant data) { // Tool::getCostTime( // [=] { // onNewEvent(msg, data); // }, // "onNewEvent"); // }); // 用于计算文件时长 calDuration = Link::create("InputFile"); calDuration->start(); calAudioDecoder = Link::create("DecodeA"); calAudioDecoder->start(); calDuration->linkA(calAudioDecoder); calVideoDecoder = Link::create("DecodeV"); calVideoDecoder->start(); calDuration->linkV(calVideoDecoder); // rstp流 rtsp = Link::create("Mux"); QVariantMap dataRtsp; dataRtsp["path"] = QString("mem://%1").arg(pushCode); dataRtsp["format"] = "rtsp"; rtsp->start(dataRtsp); videoInput->linkV(videoEncoder)->linkV(rtsp)->linkV(rtspServer); audioInput->linkA(audioEncoder)->linkV(rtsp)->linkV(rtspServer); resample->linkA(audioEncoder)->linkA(rtsp)->linkA(rtspServer); // 播放文件 inputFile = Link::create("InputFile"); connect(inputFile, &LinkObject::newEvent, [=](QString type, QVariant msg) { if (type == "EOF") { Log::info("{} one video playback end", channelName.toStdString()); emit playEnd(); } }); // 音频解码,只在HDMI-OUT0输出音频 audioDecoder = Link::create("DecodeA"); audioDecoder->start(); if (channelName == Constant::MainChannel) { inputFile->linkA(audioDecoder)->linkA(gain); } else { inputFile->linkA(audioDecoder); } // 视频解码 videoDecoder = Link::create("DecodeV"); videoDecoder->start(); inputFile->linkV(videoDecoder)->linkV(videoOutput); } void Channel::changeOutputResolution(Channel::Resolution resolution) { QVariantMap dataVo; dataVo["output"] = resolutionMap[resolution]; videoOutput->setData(dataVo); if (channelName == Constant::SecondaryChannel) { configureOutput2(resolutionMap[resolution]); } } void Channel::changeInputResolution(int width, int height) { QVariantMap dataVi; dataVi["width"] = width; dataVi["height"] = height; videoInput->setData(dataVi); qDebug() << videoInput << "set " } /** * @brief 开始录制 */ void Channel::startRecord() { // 记录本次录制开始时间以及当前视频录制的开始时间 QString time = QDateTime::currentDateTime().toString("yyyyMMddhhmmss"); startTime = time; curTime = time; QVariantMap dataRecord; QString path = QString("%1/%2/%3_%d.mp4").arg(Constant::VideoPath).arg(channelName).arg(curTime); dataRecord["path"] = path; record->start(dataRecord); isRecord = true; Log::info("{} start recording...", channelName.toStdString()); // 显示录制状态水印 overlay->setData(recordOverlay); } /** * @brief 新事件槽函数,用于分段录制 * @param msg 时间类型 * @param data 数据 */ void Channel::onNewEvent(QString msg, QVariant data) { if (msg == "newSegment") { QString datetime = curTime; // 重新设置视频的录制起始时间 curTime = QDateTime::currentDateTime().toString("yyyyMMddhhmmss"); segmentId = data.toInt(); // 异步执行保存文件信息的函数 QTimer::singleShot(3000, this, [=]() { saveVideoInfo(datetime, segmentId - 1); }); } } /** * @brief 停止录制 */ void Channel::stopRecord() { Log::info("{} stop recording...", channelName.toStdString()); record->stop(true); // 异步执行保存文件信息的函数 QTimer::singleShot(1000, this, [=]() { saveVideoInfo(curTime, segmentId); }); // 重置水印和时间 overlay->setData(norecordOverlay); startTime = ""; curTime = ""; } /** * @brief 开始回放 * @param path 路径 * @return 成功/失败 */ bool Channel::startPlayback(QString path) { QFileInfo info(path); if (!info.exists()) { Log::error("cannot open video {} , video file does not exist", path.toStdString()); videoInput->unLinkV(videoOutput); QVariantMap dataImage; dataImage["path"] = Constant::EmptyImagePath; image->start(dataImage); image->linkV(videoOutput); state = Error; return false; } // 开始回放 QVariantMap dataFile; dataFile["path"] = path; dataFile["sync"] = true; inputFile->start(dataFile); // 判断视频是否损坏,如果损坏则输出提示图片 int duration = inputFile->invoke("getDuration", path).toInt(); if (duration == 0) { Log::error("cannot open video {}, video file was corrupted", path.toStdString()); inputFile->stop(); videoInput->unLinkV(videoOutput); QVariantMap dataImage; dataImage["path"] = Constant::ErrorImagePath; image->start(dataImage); image->linkV(videoOutput); state = Error; return false; } // 断开视频信号输出,启动回放输出 overlay->unLinkV(videoOutput); videoDecoder->linkV(videoOutput); // 断开音频信号输出,启动回放输出 audioInput->unLinkA(audioOutput); audioDecoder->linkA(gain); playbackDuration = duration; state = Playback; return true; } /** * @brief 播放直播 */ void Channel::startPlayLive() { if (state == Playback) { videoDecoder->unLinkV(videoOutput); audioDecoder->unLinkA(audioOutput); inputFile->stop(true); } else if (state == Error) { image->unLinkV(videoOutput); image->stop(true); } // 打开视频和音频输出 overlay->linkV(videoOutput); audioInput->linkA(audioOutput); // 关闭外部音频输出 audioDecoder->unLinkA(gain); state = Stop; } /** * @brief 回放视频跳转 * @param pos */ void Channel::seek(int pos) { inputFile->invoke("seek", pos); } /** * @brief 暂停 */ void Channel::togglePause() { if (state == Stop || state == Error) return; if (state == Playback) state = Pause; else state = Playback; inputFile->invoke("pause", state == Pause); } /** * @brief 显示播放结束提示 */ void Channel::showFinishPromot() { videoInput->unLinkV(videoDecoder); QVariantMap dataImage; dataImage["path"] = Constant::FinishImagePath; image->start(dataImage); image->linkV(videoOutput); state = Finish; } /** * @brief 获取音量 * @return L左声道音量,R右声道音量 */ QVariantMap Channel::getVolume() { QVariantMap result; QVariantMap data = volume->invoke("getVolume").toMap(); result["L"] = data["max"].toInt(); if (data["avg"].toInt() < 15) result["L"] = 0; result["R"] = data["max2"].toInt(); if (data["avg2"].toInt() < 15) result["R"] = 0; return result; } /** * @brief 增大音量 */ void Channel::volumeUp() { if (curGain < maxGian) curGain += 6; QVariantMap data; data["gain"] = curGain; gain->setData(data); Log::info("current volumn gain: {}dB", curGain); } /** * @brief 减小音量 */ void Channel::volumeDown() { if (curGain > minGain) curGain -= 6; QVariantMap data; data["gain"] = curGain; gain->setData(data); Log::info("current volumn gain: {}dB", curGain); } /** * @brief 将水印配置属性加载到内存中 */ void Channel::loadOverlayConfig() { auto loadFromJson = [](const QString& path) { QVariantMap dataOver; QVariantList list = Json::loadFile(path).toList(); QVariantList list2; for (int i = 0; i < list.count(); i++) { QVariantMap map = list[i].toMap(); list2 << map; } dataOver["lays"] = list2; return dataOver; }; recordOverlay = loadFromJson(Constant::RecordOverlay); norecordOverlay = loadFromJson(Constant::NoRecordOverlay); } /** * @brief 修改文件名并获取视频时长,然后将信息保存到数据库中 * @param startTime */ void Channel::saveVideoInfo(QString curTime, int segmentId) { // 修改文件名:本次录制起始时间_%d ==> 当前视频录制起始时间 QString filename = QString("%1_%2.mp4").arg(startTime).arg(segmentId); QString path = QString("%1/%2/%3").arg(Constant::VideoPath).arg(channelName).arg(filename); QString newPath = QString("%1/%2/%3.mp4").arg(Constant::VideoPath).arg(channelName).arg(curTime); QFile file(path); if (!file.rename(newPath)) { Log::error("rename file name failed in function onNewEvent, old filename: {}, target filename: {} , channel name: {},reason: {}", path.toStdString(), newPath.toStdString(), channelName.toStdString(), file.errorString().toStdString()); return; } // 获取视频文件的时间 int curDuration = calDuration->invoke("getDuration", newPath).toInt() / 1000; // 将录制文件的信息存入数据库 DatabaseManager::File fileInfo; fileInfo.channel = channelName == Constant::MainChannel ? DatabaseManager::MainChannel : DatabaseManager::SecondaryChannel; fileInfo.datetime = QDateTime::fromString(curTime, "yyyyMMddhhmmss").toString("yyyy-MM-dd hh:mm:ss"); fileInfo.filename = QString("%1.mp4").arg(curTime); fileInfo.duration = curDuration; if (db->insert(fileInfo)) { Log::info("insert one record into database success, name: {}, channel: {}, duration: {}s", fileInfo.filename.toStdString(), channelName.toStdString(), curDuration); } else { Log::error("insert one record into database failed, name: {}, channel: {}, duration: {}s", fileInfo.filename.toStdString(), channelName.toStdString(), curDuration); } // 更新当前录制录制视频的时间 emit appendOneVideo(); } /** * @brief 针对输出口2的分辨率进行配置 * @param resolutionStr */ void configureOutput2(QString resolutionStr) { static int lastNorm = 0; int norm = 0; int ddr = 0; if (resolutionStr == "1080P60") norm = 9; else if (resolutionStr == "1080P50") norm = 10; else if (resolutionStr == "1080P30") norm = 12; else if (resolutionStr == "720P60") norm = 5; else if (resolutionStr == "720P50") norm = 6; else if (resolutionStr == "3840x2160_30") { norm = 14; ddr = 1; } if (norm != lastNorm) { lastNorm = norm; QString cmd = "rmmod hi_lt8618sx_lp.ko"; system(cmd.toLatin1().data()); cmd = cmd.sprintf("insmod /ko/extdrv/hi_lt8618sx_lp.ko norm=%d USE_DDRCLK=%d", norm, ddr); system(cmd.toLatin1().data()); } }