Client/ÍõÓêÑô/code/UI.html
New file @@ -0,0 +1,202 @@ <!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>人è¸è¯å«èå¤ - 人è¸å½å ¥æ¨¡å</title> <style> body { font-family: Arial, sans-serif; margin: 20px; } .container { max-width: 600px; margin: auto; padding: 20px; border: 1px solid #ccc; border-radius: 8px; box-shadow: 0 4px 8px rgba(0, 0, 0, 0.1); } h1 { text-align: center; color: #333; } .form-group { margin-bottom: 15px; } label { display: block; margin-bottom: 5px; font-weight: bold; } input[type="text"], button { width: 100%; padding: 10px; font-size: 16px; border: 1px solid #ccc; border-radius: 4px; } button { background-color: #007bff; color: white; cursor: pointer; } button:hover { background-color: #0056b3; } .camera-preview { margin-top: 20px; text-align: center; } .camera-preview video { max-width: 100%; height: auto; border: 1px solid #ccc; border-radius: 4px; } .image-preview { margin-top: 20px; text-align: center; } .image-preview img { max-width: 100%; height: auto; border: 1px solid #ccc; border-radius: 4px; } .status-message { margin-top: 15px; text-align: center; font-size: 14px; color: #555; } </style> </head> <body> <div class="container"> <h1>人è¸è¯å«èå¤ - 人è¸å½å ¥æ¨¡å</h1> <!-- å·¥å·è¾å ¥ --> <div class="form-group"> <label for="employee-id">è¾å ¥å·¥å·:</label> <input type="text" id="employee-id" placeholder="请è¾å ¥å·¥å·"> </div> <!-- æ¥è¯¢ä¿¡æ¯æé® --> <button onclick="searchUserInfo()">æ¥è¯¢ä¿¡æ¯</button> <!-- ç¨æ·ä¿¡æ¯å±ç¤º --> <div class="form-group" id="user-info" style="display: none;"> <label>ç¨æ·ä¿¡æ¯:</label> <p id="user-details"></p> </div> <!-- 模ææå头æåå¾ç --> <div class="camera-preview" id="camera-preview" style="display: none;"> <video id="camera-video" autoplay playsinline></video> <div> <button onclick="captureImage('front')">æåæ£è¸</button> <button onclick="captureImage('left')">æå左侧è¸</button> <button onclick="captureImage('right')">æåå³ä¾§è¸</button> </div> </div> <!-- å¾çé¢è§ --> <div class="image-preview" id="image-preview" style="display: none;"> <h3>æåçå¾çé¢è§</h3> <div id="captured-images"> <img id="front-image" src="" alt="æ£è¸å¾ç" style="display: none;"> <img id="left-image" src="" alt="左侧è¸å¾ç" style="display: none;"> <img id="right-image" src="" alt="å³ä¾§è¸å¾ç" style="display: none;"> </div> </div> <!-- æäº¤æé® --> <button onclick="submitData()" style="display: none;" id="submit-btn">æäº¤</button> <!-- ç¶ææ¶æ¯ --> <div class="status-message" id="status-message"></div> </div> <script> let cameraStream = null; // æ¨¡ææ¥è¯¢ç¨æ·ä¿¡æ¯ function searchUserInfo() { const employeeId = document.getElementById('employee-id').value; if (!employeeId) { alert('请è¾å ¥å·¥å·'); return; } // æ¨¡ææ¥è¯¢ç»æ const userInfo = { id: employeeId, name: 'å¼ ä¸', department: 'ææ¯é¨' }; // æ¾ç¤ºç¨æ·ä¿¡æ¯ document.getElementById('user-details').innerText = `å§å: ${userInfo.name}, é¨é¨: ${userInfo.department}`; document.getElementById('user-info').style.display = 'block'; // æ¾ç¤ºæå头 startCamera(); } // æå¼æå头 async function startCamera() { try { cameraStream = await navigator.mediaDevices.getUserMedia({ video: true }); const videoElement = document.getElementById('camera-video'); videoElement.srcObject = cameraStream; document.getElementById('camera-preview').style.display = 'block'; } catch (error) { alert('æ æ³è®¿é®æå头ï¼è¯·æ£æ¥æéæè®¾å¤æ¯å¦å¯ç¨ã'); console.error('æå头访é®å¤±è´¥:', error); } } // æåå¾ç function captureImage(faceType) { const videoElement = document.getElementById('camera-video'); const canvas = document.createElement('canvas'); canvas.width = videoElement.videoWidth; canvas.height = videoElement.videoHeight; const context = canvas.getContext('2d'); context.drawImage(videoElement, 0, 0, canvas.width, canvas.height); // å°æåçå¾çæ¾ç¤ºå¨é¡µé¢ä¸ const imageUrl = canvas.toDataURL('image/png'); const imageElement = document.getElementById(`${faceType}-image`); imageElement.src = imageUrl; imageElement.style.display = 'block'; // 妿ä¸å¼ å¾çé½å·²æåï¼æ¾ç¤ºæäº¤æé® const frontImage = document.getElementById('front-image').src; const leftImage = document.getElementById('left-image').src; const rightImage = document.getElementById('right-image').src; if (frontImage && leftImage && rightImage) { document.getElementById('submit-btn').style.display = 'block'; } } // 模ææäº¤æ°æ® function submitData() { const employeeId = document.getElementById('employee-id').value; if (!employeeId) { alert('请å è¾å ¥å·¥å·å¹¶æ¥è¯¢ä¿¡æ¯'); return; } // 模æä¸ä¼ å¾çåæäº¤æ°æ® const statusMessage = document.getElementById('status-message'); statusMessage.innerText = 'æ£å¨ä¸ä¼ å¾çå¹¶æäº¤æ°æ®...'; setTimeout(() => { statusMessage.innerText = 'æäº¤æåï¼'; }, 2000); // 模æä¸ä¼ å»¶è¿ } </script> </body> </html> Client/ÍõÓêÑô/document/ÏîÄ¿ÐèÇóÊé2.0°æ.docxBinary files differ
Client/ÍõÓêÑô/log/ÈÕÖ¾_ÍõÓêÑô_250218.docBinary files differ
Client/ÍõÓêÑô/log/ÍõÓêÑô_250218.docBinary files differ
Client/¶¼Ñçù/log/ÈÕÖ¾_¶¼Ñçù_0218.docBinary files differ
Client/¶Åìèº/code/facexmainwindow.cpp
New file @@ -0,0 +1,204 @@ #include "facexmainwindow.h" #include "ui_facexmainwindow.h" #include <QMessageBox> #include <QStringList> #include <QFileDialog> #include <QGuiApplication> #include <QScreen> #include <QException> #include <QThread> FaceXMainWindow::FaceXMainWindow(QWidget *parent) : QMainWindow(parent), ui(new Ui::FaceXMainWindow), m_player(new QMediaPlayer), m_playerlist(new QMediaPlaylist), m_videowidget(new QVideoWidget(this)), m_ffmpeg(new QFFmpeg(this)), m_rtspThread(nullptr) { ui->setupUi(this); //设置å¯ç§»å¨åºå dragArea = QRect(0, 0, width(), 40); setWindowFlags(Qt::FramelessWindowHint);//éèè¾¹æ¡ QCoreApplication::setAttribute(Qt::AA_EnableHighDpiScaling); // è®¾ç½®ææ¾æé®å¾æ ä¸ºææ¾å¾æ ui->btn_play->setStyleSheet("border-image: url(:/image/start.png)"); //è®¾ç½®ææ¾å¨ææ¾éå m_player->setPlaylist(m_playerlist); //è®¾ç½®ææ¾å¨æ¾ç¤ºçªå£ m_player->setVideoOutput(m_videowidget); // è®¾ç½®ææ¾å¨æ¾ç¤ºçªå£ m_videowidget->resize(ui->label_video->size()); m_videowidget->move(ui->label_video->pos()); m_videowidget->show(); ui->label_video->show(); // è¿æ¥ QFFmpeg çä¿¡å· connect(m_ffmpeg, &QFFmpeg::GetImage, this, &FaceXMainWindow::SetImage, Qt::QueuedConnection); // è¿æ¥ææ¾å¨çéè¯¯ä¿¡å· connect(m_player, QOverload<QMediaPlayer::Error>::of(&QMediaPlayer::error), this, [this](QMediaPlayer::Error error) { QMessageBox::critical(this, "ææ¾é误", m_player->errorString()); }); } FaceXMainWindow::~FaceXMainWindow() { if (m_rtspThread && m_rtspThread->isRunning()) { m_rtspThread->quit(); m_rtspThread->wait(); } delete ui; } void FaceXMainWindow::mousePressEvent(QMouseEvent *event) { if (dragArea.contains(event->pos())) { offset = event->globalPos() - pos(); isDragging = true; } } void FaceXMainWindow::mouseMoveEvent(QMouseEvent *event) { if (isDragging && dragArea.contains(event->pos())) { move(event->globalPos() - offset); } } void FaceXMainWindow::mouseReleaseEvent(QMouseEvent *event) { isDragging = false; } //æ¥è¯¢æé® void FaceXMainWindow::on_btnSelect_clicked() { QDate dateStart = ui->dateStart->date(); QDate dateEnd = ui->dateEnd->date(); if (dateStart > dateEnd) { QMessageBox::critical(nullptr, "é误", "å¼å§æ¶é´ä¸è½å¤§äºç»ææ¶é´!"); return; } //å°æ¥æè½¬æ¢ä¸ºæ¥ææ¶é´è®¡ç®ç¸å·®ç天æ°,è¶ è¿60天åæç¤ºä¸ç¨ç»§ç» QDateTime dateTimeStart = ui->dateStart->dateTime(); QDateTime dateTimeEnd = ui->dateEnd->dateTime(); if (dateTimeStart.daysTo(dateTimeEnd) >= 60) { QMessageBox::critical(nullptr, "é误", "æ¯æ¬¡æå¤§åªè½æ¥è¯¢60天å !"); return; } QStringList fileNames = QFileDialog::getOpenFileNames(this, "éæ©æä»¶", "D:/", "è§é¢æä»¶ (*.mp4 *.avi *.mov);;æææä»¶ (*.*)"); if (!fileNames.isEmpty()) { m_playerlist->clear(); m_ffmpeg->SetUrl(fileNames.first()); if (m_ffmpeg->Init()) { if (m_rtspThread && m_rtspThread->isRunning()) { m_rtspThread->quit(); m_rtspThread->wait(); } m_rtspThread = new RtspThread(m_ffmpeg, this); m_rtspThread->start(); } QMessageBox::information(this, "æå", "æä»¶å·²æ·»å å°ææ¾å表"); } else { QMessageBox::information(this, "æç¤º", "æªéæ©ä»»ä½æä»¶"); } } //éèçªå£æé® void FaceXMainWindow::on_toolButton_clicked() { hide(); } //æå°åæé® void FaceXMainWindow::on_toolButton_3_clicked() { showMinimized(); } //æå¤§åæé® void FaceXMainWindow::on_toolButton_2_clicked() { if(windowState() != Qt::WindowMaximized) { this->showMaximized(); } else { this->showNormal(); } } //é¼ æ å廿大å void FaceXMainWindow::mouseDoubleClickEvent(QMouseEvent *event) { if(windowState() != Qt::WindowMaximized) { this->showMaximized(); } else { this->showNormal(); } } void FaceXMainWindow::setPlayButtonIcon(bool isPlaying) { if (isPlaying) { ui->btn_play->setStyleSheet("border-image: url(:/image/pause.png)"); } else { ui->btn_play->setStyleSheet("border-image: url(:/image/start.png)"); } } //ææ¾æé® void FaceXMainWindow::on_btn_play_clicked() { if (m_ffmpeg) { if (!m_rtspThread || !m_rtspThread->isRunning()) { if (m_ffmpeg->Init()) { m_rtspThread = new RtspThread(m_ffmpeg, this); m_rtspThread->start(); } } } else { QMessageBox::critical(this, "é误", "ææ¾å¨æªåå§å"); } } //æªå¾æé® void FaceXMainWindow::on_btn_cut_clicked() { QScreen *screen = QGuiApplication::primaryScreen(); if (screen) { QPixmap screenshot = screen->grabWindow(this->winId()); screenshot.save("screenshot.png"); QMessageBox::information(this, "æç¤º", "æªå¾å·²ä¿å为 screenshot.png"); } } void FaceXMainWindow::SetImage(const QImage &image) { qDebug() << "å¾å尺寸: " << image.width() << "x" << image.height(); qDebug() << "å¾åæ ¼å¼: " << image.format(); qDebug() << "æ¥æ¶å°å¾åä¿¡å·"; if (!image.isNull()) { ui->label_video->setScaledContents(true); // 设置å¾åèªå¨ç¼©æ¾ ui->label_video->setPixmap(QPixmap::fromImage(image)); ui->label_video->adjustSize(); ui->label_video->update(); qDebug() << "å¾åå·²æ´æ°"; } else { qDebug() << "æ¥æ¶å°çå¾å为空"; } } Client/¶Åìèº/code/facexmainwindow.h
New file @@ -0,0 +1,82 @@ #ifndef FACEXMAINWINDOW_H #define FACEXMAINWINDOW_H #include <QMainWindow> #include <QMouseEvent> #include <QRect> #include <QMediaPlayer> //ææ¾å¨ #include <QMediaPlaylist> //ææ¾éå #include <QVideoWidget> //è§é¢æ¾ç¤ºçªå£ #include "qffmpeg.h" #include "rtspthread.h" //å¿ é¡»å 以ä¸å 容,å¦åç¼è¯ä¸è½éè¿,为äºå ¼å®¹CåC99æ å #ifndef INT64_C #define INT64_C #define UINT64_C #endif //å 为#include <libavcodec/avcodec.h>æ¯Cæä»¶ï¼æä»¥éè¦ç¨extern extern "C" { #include <libavcodec/avcodec.h> //å®ç°é³è§é¢çç¼è§£ç åè½ #include <libavformat/avformat.h> //å®ç°é³è§é¢æä»¶ç读åååå ¥åè½ï¼æ¯æå¤ç§é³è§é¢æ ¼å¼ #include <libavfilter/avfilter.h> #include <libswscale/swscale.h> #include <libavutil/frame.h> } QT_BEGIN_NAMESPACE namespace Ui { class FaceXMainWindow; } QT_END_NAMESPACE class FaceXMainWindow : public QMainWindow { Q_OBJECT public: explicit FaceXMainWindow(QWidget *parent = 0); ~FaceXMainWindow(); void mousePressEvent(QMouseEvent *event)override; void mouseMoveEvent(QMouseEvent *event)override; void mouseReleaseEvent(QMouseEvent *event)override; void mouseDoubleClickEvent(QMouseEvent *event)override; private slots: void on_btnSelect_clicked(); void on_toolButton_clicked(); void on_toolButton_3_clicked(); void on_toolButton_2_clicked(); void on_btn_play_clicked(); void on_btn_cut_clicked(); void SetImage(const QImage &image); private: Ui::FaceXMainWindow *ui; QPoint offset; QRect dragArea; bool isDragging; QMediaPlayer *m_player; QMediaPlaylist *m_playerlist; QVideoWidget *m_videowidget; QFFmpeg *m_ffmpeg; RtspThread *m_rtspThread; void setPlayButtonIcon(bool isPlaying); }; #endif // FACEXMAINWINDOW_H Client/¶Åìèº/code/main.cpp
New file @@ -0,0 +1,11 @@ #include "facexmainwindow.h" #include <QApplication> int main(int argc, char *argv[]) { QApplication a(argc, argv); FaceXMainWindow w; w.show(); return a.exec(); } Client/¶Åìèº/code/qffmpeg.cpp
New file @@ -0,0 +1,113 @@ #include "qffmpeg.h" #include <QDateTime> #include <QDebug> QFFmpeg::QFFmpeg(QObject *parent) : QObject(parent) { videoStreamIndex=-1; av_register_all();//注ååºä¸ææå¯ç¨çæä»¶æ ¼å¼åè§£ç å¨ avformat_network_init();//åå§åç½ç»æµæ ¼å¼,使ç¨RTSPç½ç»æµæ¶å¿ é¡»å æ§è¡ pAVFormatContext = avformat_alloc_context();//ç³è¯·ä¸ä¸ªAVFormatContextç»æçå å,å¹¶è¿è¡ç®ååå§å pAVFrame=av_frame_alloc(); } QFFmpeg::~QFFmpeg() { if (pAVFormatContext) { avformat_close_input(&pAVFormatContext); avformat_free_context(pAVFormatContext); } if (pAVFrame) { av_frame_free(&pAVFrame); } if (pSwsContext) { sws_freeContext(pSwsContext); } avpicture_free(&pAVPicture); } bool QFFmpeg::Init() { //æå¼è§é¢æµ int result=avformat_open_input(&pAVFormatContext, url.toStdString().c_str(),NULL,NULL); if (result<0){ qDebug()<<"æå¼è§é¢æµå¤±è´¥"; return false; } //è·åè§é¢æµä¿¡æ¯ result=avformat_find_stream_info(pAVFormatContext,NULL); if (result<0){ qDebug()<<"è·åè§é¢æµä¿¡æ¯å¤±è´¥"; avformat_close_input(&pAVFormatContext); return false; } //è·åè§é¢æµç´¢å¼ videoStreamIndex = -1; // qDebug()<<"nb:"<<pAVFormatContext->nb_streams; // qDebug()<<"type:"<<pAVFormatContext->streams[0]->codec->codec_type; // qDebug()<<"AVMEDIA_TYPE_VIDEO:"<<AVMEDIA_TYPE_VIDEO; for (uint i = 0; i < pAVFormatContext->nb_streams; i++) { if (pAVFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) { videoStreamIndex = i; break; } } if (videoStreamIndex==-1){ qDebug()<<"è·åè§é¢æµç´¢å¼å¤±è´¥"; avformat_close_input(&pAVFormatContext); return false; } //è·åè§é¢æµçå辨çå¤§å° pAVCodecContext = pAVFormatContext->streams[videoStreamIndex]->codec; videoWidth=pAVCodecContext->width; videoHeight=pAVCodecContext->height; avpicture_alloc(&pAVPicture,AV_PIX_FMT_RGB24,videoWidth,videoHeight); AVCodec *pAVCodec; //è·åè§é¢æµè§£ç å¨ pAVCodec = avcodec_find_decoder(pAVCodecContext->codec_id); pSwsContext = sws_getContext(videoWidth,videoHeight,AV_PIX_FMT_YUV420P,videoWidth,videoHeight,AV_PIX_FMT_RGB24,SWS_BICUBIC,0,0,0); //æå¼å¯¹åºè§£ç å¨ result=avcodec_open2(pAVCodecContext,pAVCodec,NULL); if (result<0){ qDebug()<<"æå¼è§£ç å¨å¤±è´¥"; avpicture_free(&pAVPicture); sws_freeContext(pSwsContext); avformat_close_input(&pAVFormatContext); return false; } qDebug()<<"åå§åè§é¢æµæå"; return true; } void QFFmpeg::Play() { //ä¸å¸§ä¸å¸§è¯»åè§é¢ int frameFinished = 0; while (av_read_frame(pAVFormatContext, &pAVPacket) >= 0) { if (pAVPacket.stream_index == videoStreamIndex) { qDebug() << "å¼å§è§£ç " << QDateTime::currentDateTime().toString("yyyy-MM-dd HH:mm:ss"); avcodec_decode_video2(pAVCodecContext, pAVFrame, &frameFinished, &pAVPacket); if (frameFinished) { mutex.lock(); sws_scale(pSwsContext, (const uint8_t* const *)pAVFrame->data, pAVFrame->linesize, 0, videoHeight, pAVPicture.data, pAVPicture.linesize); QImage image(pAVPicture.data[0], videoWidth, videoHeight, QImage::Format_RGB888); QImage copyImage = image.copy(); // æ·±æ·è´ emit GetImage(copyImage, this->index); qDebug() << "è§£ç æåï¼åéå¾åä¿¡å·"; mutex.unlock(); } } av_packet_unref(&pAVPacket); } avformat_close_input(&pAVFormatContext); } Client/¶Åìèº/code/qffmpeg.h
New file @@ -0,0 +1,63 @@ #ifndef QFFMPEG_H #define QFFMPEG_H //å¿ é¡»å 以ä¸å 容,å¦åç¼è¯ä¸è½éè¿,为äºå ¼å®¹CåC99æ å #ifndef INT64_C #define INT64_C #define UINT64_C #endif //å¼å ¥ffmpeg头æä»¶ extern "C" { #include <libavcodec/avcodec.h> #include <libavformat/avformat.h> #include <libavfilter/avfilter.h> #include <libswscale/swscale.h> #include <libavutil/frame.h> } #include <QObject> #include <QMutex> #include <QImage> class QFFmpeg : public QObject { Q_OBJECT public: explicit QFFmpeg(QObject *parent = 0); ~QFFmpeg(); bool Init(); // åå§å void Play(); // ææ¾ void SetUrl(QString url){this->url=url;} // 设置è§é¢æº QString Url()const{return url;} int VideoWidth()const{return videoWidth;} int VideoHeight()const{return videoHeight;} void SetIndex(int x){this->index=x;} private: QMutex mutex; AVPicture pAVPicture; AVFormatContext *pAVFormatContext; AVCodecContext *pAVCodecContext; AVFrame *pAVFrame; SwsContext * pSwsContext; AVPacket pAVPacket; QString url; int videoWidth; int videoHeight; int videoStreamIndex; int index; signals: void GetImage(const QImage &image,int x); // åéè§£ç åçå¾åä¿¡å· public slots: }; #endif // QFFMPEG_H Client/¶Åìèº/code/rtspthread.cpp
New file @@ -0,0 +1,13 @@ #include "rtspthread.h" RtspThread::RtspThread(QFFmpeg *ffmpeg,QObject *parent) : QThread(parent), ffmpeg(ffmpeg) { } void RtspThread::run() { if (ffmpeg) { ffmpeg->Play(); } } Client/¶Åìèº/code/rtspthread.h
New file @@ -0,0 +1,22 @@ #ifndef RTSPTHREAD_H #define RTSPTHREAD_H #include <QThread> #include "qffmpeg.h" class RtspThread : public QThread { Q_OBJECT public: explicit RtspThread(QFFmpeg *ffmpeg,QObject *parent = 0); protected: void run()override; private: QFFmpeg * ffmpeg; }; #endif // RTSPTHREAD_H Client/¶Åìèº/log/ÈÕÖ¾_¶Åìèº_0218.docBinary files differ
Client/¶Åìèº/log/ÈÕÖ¾_¶Åìèº_0219.docBinary files differ
Client/½âÀ´öÎ/log/ÈÕÖ¾_ÐÕÃû_ÈÕÆÚ.docBinary files differ
Client/¹ùÎÄÇ¿/document/ÐÂÐèÇó¹¦Äܹæ¸ñ˵Ã÷Êé - ¹ùÎÄÇ¿.docxBinary files differ
Client/¹ùÎÄÇ¿/log/¹ùÎÄÇ¿_20250218.docBinary files differ
Client/¹ùÎÄÇ¿/log/¹ùÎÄÇ¿_20250219.docBinary files differ
ProjectInformation/1.2ÏîÄ¿½ø¶È±í.etBinary files differ
Server/Ƚ¿/document/ÈÕÖ¾_Ƚ¿_20250218.docBinary files differ
Server/Ƚ¿/log/ÈÕÖ¾_Ƚ¿_20250219.docBinary files differ
Server/¬Ãô/log/ÈÕÖ¾_¬Ãô_2.18.docBinary files differ
Server/¬Ãô/log/ÈÕÖ¾_¬Ãô_2.19.docBinary files differ
Server/ÕÅÃôÀö/document/ÈÕÖ¾_ÐÕÃû_ÈÕÆÚ.docBinary files differ