Add some more frame stats granularity, skip frame stats on no-data calls

This commit is contained in:
Lauri Kasanen 2021-08-02 13:47:48 +03:00
parent c1ed769780
commit f57e6e644b
7 changed files with 66 additions and 9 deletions

View File

@ -42,6 +42,7 @@ namespace network {
uint32_t jpeg, uint32_t webp, uint32_t analysis,
uint32_t jpegarea, uint32_t webparea,
uint16_t njpeg, uint16_t nwebp,
uint16_t enc, uint16_t scale, uint16_t shot,
uint16_t w, uint16_t h);
void mainUpdateClientFrameStats(const char userid[], uint32_t render, uint32_t all,
uint32_t ping);
@ -110,6 +111,9 @@ namespace network {
uint32_t webparea;
uint16_t njpeg;
uint16_t nwebp;
uint16_t enc;
uint16_t scale;
uint16_t shot;
uint16_t w;
uint16_t h;
uint8_t changedPerc;

View File

@ -123,6 +123,7 @@ void GetAPIMessager::mainUpdateServerFrameStats(uint8_t changedPerc,
uint32_t all, uint32_t jpeg, uint32_t webp, uint32_t analysis,
uint32_t jpegarea, uint32_t webparea,
uint16_t njpeg, uint16_t nwebp,
uint16_t enc, uint16_t scale, uint16_t shot,
uint16_t w, uint16_t h) {
if (pthread_mutex_lock(&frameStatMutex))
@ -137,6 +138,9 @@ void GetAPIMessager::mainUpdateServerFrameStats(uint8_t changedPerc,
serverFrameStats.webparea = webparea;
serverFrameStats.njpeg = njpeg;
serverFrameStats.nwebp = nwebp;
serverFrameStats.enc = enc;
serverFrameStats.scale = scale;
serverFrameStats.shot = shot;
serverFrameStats.w = w;
serverFrameStats.h = h;
@ -487,10 +491,15 @@ void GetAPIMessager::netGetFrameStats(char *buf, uint32_t len) {
fprintf(f, "\t\"server_side\" : [\n"
"\t\t{ \"process_name\": \"Analysis\", \"time\": %u },\n"
"\t\t{ \"process_name\": \"Screenshot\", \"time\": %u },\n"
"\t\t{ \"process_name\": \"Encoding_total\", \"time\": %u, \"videoscaling\": %u },\n"
"\t\t{ \"process_name\": \"TightJPEGEncoder\", \"time\": %u, \"count\": %u, \"area\": %u },\n"
"\t\t{ \"process_name\": \"TightWEBPEncoder\", \"time\": %u, \"count\": %u, \"area\": %u }\n"
"\t],\n",
serverFrameStats.analysis,
serverFrameStats.shot,
serverFrameStats.enc,
serverFrameStats.scale,
serverFrameStats.jpeg,
serverFrameStats.njpeg,
serverFrameStats.jpegarea,

View File

@ -1085,6 +1085,9 @@ void EncodeManager::writeRects(const Region& changed, const PixelBuffer* pb,
// In case the current resolution is above the max video res, and video was detected,
// scale to that res, keeping aspect ratio
struct timeval scalestart;
gettimeofday(&scalestart, NULL);
const PixelBuffer *scaledpb = NULL;
if (videoDetected &&
(maxVideoX < pb->getRect().width() || maxVideoY < pb->getRect().height())) {
@ -1133,6 +1136,7 @@ void EncodeManager::writeRects(const Region& changed, const PixelBuffer* pb,
}
}
}
scalingTime = msSince(&scalestart);
#pragma omp parallel for schedule(dynamic, 1)
for (i = 0; i < subrects.size(); ++i) {

View File

@ -68,9 +68,16 @@ namespace rfb {
const RenderedCursor* renderedCursor,
size_t maxUpdateSize);
void clearEncodingTime() {
encodingTime = 0;
};
unsigned getEncodingTime() const {
return encodingTime;
};
unsigned getScalingTime() const {
return scalingTime;
};
struct codecstats_t {
uint32_t ms;
@ -192,6 +199,7 @@ namespace rfb {
bool webpTookTooLong;
unsigned encodingTime;
unsigned maxEncodingTime, framesSinceEncPrint;
unsigned scalingTime;
EncCache *encCache;

View File

@ -1194,6 +1194,7 @@ bool VNCSConnectionST::isCongested()
void VNCSConnectionST::writeFramebufferUpdate()
{
congestion.updatePosition(sock->outStream().length());
encodeManager.clearEncodingTime();
// We're in the middle of processing a command that's supposed to be
// synchronised. Allowing an update to slip out right now might violate

View File

@ -186,6 +186,13 @@ namespace rfb {
return encodeManager.webpstats;
}
unsigned getEncodingTime() const {
return encodeManager.getEncodingTime();
}
unsigned getScalingTime() const {
return encodeManager.getScalingTime();
}
private:
// SConnection callbacks

View File

@ -1006,14 +1006,20 @@ void VNCServerST::writeUpdate()
}
}
unsigned shottime = 0;
if (apimessager) {
struct timeval shotstart;
gettimeofday(&shotstart, NULL);
apimessager->mainUpdateScreen(pb);
shottime = msSince(&shotstart);
trackingFrameStats = 0;
checkAPIMessages(apimessager, trackingFrameStats, trackingClient);
}
const rdr::U8 origtrackingFrameStats = trackingFrameStats;
EncodeManager::codecstats_t jpegstats, webpstats;
unsigned enctime = 0, scaletime = 0;
memset(&jpegstats, 0, sizeof(EncodeManager::codecstats_t));
memset(&webpstats, 0, sizeof(EncodeManager::codecstats_t));
@ -1053,10 +1059,14 @@ void VNCServerST::writeUpdate()
webpstats.ms += subwebp.ms;
webpstats.area += subwebp.area;
webpstats.rects += subwebp.rects;
enctime += (*ci)->getEncodingTime();
scaletime += (*ci)->getScalingTime();
}
}
if (trackingFrameStats) {
if (enctime) {
const unsigned totalMs = msSince(&start);
if (apimessager)
@ -1065,8 +1075,22 @@ void VNCServerST::writeUpdate()
analysisMs,
jpegstats.area, webpstats.area,
jpegstats.rects, webpstats.rects,
enctime, scaletime, shottime,
pb->getRect().width(),
pb->getRect().height());
} else {
// Zero encoding time means this was a no-data frame; restore the stats request
if (apimessager && pthread_mutex_lock(&apimessager->userMutex) == 0) {
network::GetAPIMessager::action_data act;
act.action = (network::GetAPIMessager::USER_ACTION) origtrackingFrameStats;
memcpy(act.data.password, trackingClient, 128);
apimessager->actionQueue.push_back(act);
pthread_mutex_unlock(&apimessager->userMutex);
}
}
}
}