我有一个程序模拟随时间变化的物理系统.我希望以预定的间隔(例如每10秒)输出模拟状态到文件的可视化.我想以这样的方式做到这一点,即很容易"关闭可视化"而根本不输出可视化.
我正在将OpenGL和GLUT作为图形工具来进行可视化.然而问题似乎是,首先,它看起来只是输出到窗口而无法输出到文件.其次,为了生成可视化,您必须调用GLUTMainLoop并停止执行main函数 - 从那时起调用的唯一函数是来自GUI的调用.但是我不希望这是一个基于GUI的应用程序 - 我希望它只是一个从命令行运行的应用程序,它会生成一系列图像.有没有办法在GLUT/OpenGL中执行此操作?或者OpenGL完全是错误的工具,我应该使用别的东西
glReadPixels
可运行的PBO示例
以下示例生成:
每帧一个ppm,200 FPS,没有额外的依赖性,
使用libpng,每帧一帧,每帧600帧
使用FFmpeg,1200 FPS的所有帧均为1 mpg
在ramfs上.压缩越好,FPS越大,因此我们必须对内存IO进行绑定.
我的60 FPS屏幕上的FPS大于200,并且所有图像都不同,所以我确信它不仅限于屏幕的FPS.
此答案中的GIF来自视频,如下所述:https://askubuntu.com/questions/648603/how-to-create-an-animated-gif-from-mp4-video-via-command-line/ 837574#837574
glReadPixels
是从屏幕读取像素的关键OpenGL函数.另请看下面的设置init()
.
glReadPixels
与大多数图像格式不同,它首先读取像素的底线,因此通常需要转换.
#ifndef PPM #define PPM 1 #endif #ifndef LIBPNG #define LIBPNG 1 #endif #ifndef FFMPEG #define FFMPEG 1 #endif #include#include #include #include #define GL_GLEXT_PROTOTYPES 1 #include #include #include #include #if LIBPNG #include #endif #if FFMPEG #include #include #include #include #endif enum Constants { SCREENSHOT_MAX_FILENAME = 256 }; static GLubyte *pixels = NULL; static GLuint fbo; static GLuint rbo_color; static GLuint rbo_depth; static int offscreen = 1; static unsigned int max_nframes = 128; static unsigned int nframes = 0; static unsigned int time0; static unsigned int height = 128; static unsigned int width = 128; #define PPM_BIT (1 << 0) #define LIBPNG_BIT (1 << 1) #define FFMPEG_BIT (1 << 2) static unsigned int output_formats = PPM_BIT | LIBPNG_BIT | FFMPEG_BIT; /* Model. */ static double angle; static double delta_angle; #if PPM /* Take screenshot with glReadPixels and save to a file in PPM format. * * - filename: file path to save to, without extension * - width: screen width in pixels * - height: screen height in pixels * - pixels: intermediate buffer to avoid repeated mallocs across multiple calls. * Contents of this buffer do not matter. May be NULL, in which case it is initialized. * You must `free` it when you won't be calling this function anymore. */ static void screenshot_ppm(const char *filename, unsigned int width, unsigned int height, GLubyte **pixels) { size_t i, j, cur; const size_t format_nchannels = 3; FILE *f = fopen(filename, "w"); fprintf(f, "P3\n%d %d\n%d\n", width, height, 255); *pixels = realloc(*pixels, format_nchannels * sizeof(GLubyte) * width * height); glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, *pixels); for (i = 0; i < height; i++) { for (j = 0; j < width; j++) { cur = format_nchannels * ((height - i - 1) * width + j); fprintf(f, "%3d %3d %3d ", (*pixels)[cur], (*pixels)[cur + 1], (*pixels)[cur + 2]); } fprintf(f, "\n"); } fclose(f); } #endif #if LIBPNG /* Adapted from https://github.com/cirosantilli/cpp-cheat/blob/19044698f91fefa9cb75328c44f7a487d336b541/png/open_manipulate_write.c */ static png_byte *png_bytes = NULL; static png_byte **png_rows = NULL; static void screenshot_png(const char *filename, unsigned int width, unsigned int height, GLubyte **pixels, png_byte **png_bytes, png_byte ***png_rows) { size_t i, nvals; const size_t format_nchannels = 4; FILE *f = fopen(filename, "wb"); nvals = format_nchannels * width * height; *pixels = realloc(*pixels, nvals * sizeof(GLubyte)); *png_bytes = realloc(*png_bytes, nvals * sizeof(png_byte)); *png_rows = realloc(*png_rows, height * sizeof(png_byte*)); glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, *pixels); for (i = 0; i < nvals; i++) (*png_bytes)[i] = (*pixels)[i]; for (i = 0; i < height; i++) (*png_rows)[height - i - 1] = &(*png_bytes)[i * width * format_nchannels]; png_structp png = png_create_write_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL); if (!png) abort(); png_infop info = png_create_info_struct(png); if (!info) abort(); if (setjmp(png_jmpbuf(png))) abort(); png_init_io(png, f); png_set_IHDR( png, info, width, height, 8, PNG_COLOR_TYPE_RGBA, PNG_INTERLACE_NONE, PNG_COMPRESSION_TYPE_DEFAULT, PNG_FILTER_TYPE_DEFAULT ); png_write_info(png, info); png_write_image(png, *png_rows); png_write_end(png, NULL); png_destroy_write_struct(&png, &info); fclose(f); } #endif #if FFMPEG /* Adapted from: https://github.com/cirosantilli/cpp-cheat/blob/19044698f91fefa9cb75328c44f7a487d336b541/ffmpeg/encode.c */ static AVCodecContext *c = NULL; static AVFrame *frame; static AVPacket pkt; static FILE *file; static struct SwsContext *sws_context = NULL; static uint8_t *rgb = NULL; static void ffmpeg_encoder_set_frame_yuv_from_rgb(uint8_t *rgb) { const int in_linesize[1] = { 4 * c->width }; sws_context = sws_getCachedContext(sws_context, c->width, c->height, AV_PIX_FMT_RGB32, c->width, c->height, AV_PIX_FMT_YUV420P, 0, NULL, NULL, NULL); sws_scale(sws_context, (const uint8_t * const *)&rgb, in_linesize, 0, c->height, frame->data, frame->linesize); } void ffmpeg_encoder_start(const char *filename, int codec_id, int fps, int width, int height) { AVCodec *codec; int ret; avcodec_register_all(); codec = avcodec_find_encoder(codec_id); if (!codec) { fprintf(stderr, "Codec not found\n"); exit(1); } c = avcodec_alloc_context3(codec); if (!c) { fprintf(stderr, "Could not allocate video codec context\n"); exit(1); } c->bit_rate = 400000; c->width = width; c->height = height; c->time_base.num = 1; c->time_base.den = fps; c->gop_size = 10; c->max_b_frames = 1; c->pix_fmt = AV_PIX_FMT_YUV420P; if (codec_id == AV_CODEC_ID_H264) av_opt_set(c->priv_data, "preset", "slow", 0); if (avcodec_open2(c, codec, NULL) < 0) { fprintf(stderr, "Could not open codec\n"); exit(1); } file = fopen(filename, "wb"); if (!file) { fprintf(stderr, "Could not open %s\n", filename); exit(1); } frame = av_frame_alloc(); if (!frame) { fprintf(stderr, "Could not allocate video frame\n"); exit(1); } frame->format = c->pix_fmt; frame->width = c->width; frame->height = c->height; ret = av_image_alloc(frame->data, frame->linesize, c->width, c->height, c->pix_fmt, 32); if (ret < 0) { fprintf(stderr, "Could not allocate raw picture buffer\n"); exit(1); } } void ffmpeg_encoder_finish(void) { uint8_t endcode[] = { 0, 0, 1, 0xb7 }; int got_output, ret; do { fflush(stdout); ret = avcodec_encode_video2(c, &pkt, NULL, &got_output); if (ret < 0) { fprintf(stderr, "Error encoding frame\n"); exit(1); } if (got_output) { fwrite(pkt.data, 1, pkt.size, file); av_packet_unref(&pkt); } } while (got_output); fwrite(endcode, 1, sizeof(endcode), file); fclose(file); avcodec_close(c); av_free(c); av_freep(&frame->data[0]); av_frame_free(&frame); } void ffmpeg_encoder_encode_frame(uint8_t *rgb) { int ret, got_output; ffmpeg_encoder_set_frame_yuv_from_rgb(rgb); av_init_packet(&pkt); pkt.data = NULL; pkt.size = 0; ret = avcodec_encode_video2(c, &pkt, frame, &got_output); if (ret < 0) { fprintf(stderr, "Error encoding frame\n"); exit(1); } if (got_output) { fwrite(pkt.data, 1, pkt.size, file); av_packet_unref(&pkt); } } void ffmpeg_encoder_glread_rgb(uint8_t **rgb, GLubyte **pixels, unsigned int width, unsigned int height) { size_t i, j, k, cur_gl, cur_rgb, nvals; const size_t format_nchannels = 4; nvals = format_nchannels * width * height; *pixels = realloc(*pixels, nvals * sizeof(GLubyte)); *rgb = realloc(*rgb, nvals * sizeof(uint8_t)); /* Get RGBA to align to 32 bits instead of just 24 for RGB. May be faster for FFmpeg. */ glReadPixels(0, 0, width, height, GL_RGBA, GL_UNSIGNED_BYTE, *pixels); for (i = 0; i < height; i++) { for (j = 0; j < width; j++) { cur_gl = format_nchannels * (width * (height - i - 1) + j); cur_rgb = format_nchannels * (width * i + j); for (k = 0; k < format_nchannels; k++) (*rgb)[cur_rgb + k] = (*pixels)[cur_gl + k]; } } } #endif static void model_init(void) { angle = 0; delta_angle = 1; } static int model_update(void) { angle += delta_angle; return 0; } static int model_finished(void) { return nframes >= max_nframes; } static void init(void) { int glget; if (offscreen) { /* Framebuffer */ glGenFramebuffers(1, &fbo); glBindFramebuffer(GL_FRAMEBUFFER, fbo); /* Color renderbuffer. */ glGenRenderbuffers(1, &rbo_color); glBindRenderbuffer(GL_RENDERBUFFER, rbo_color); /* Storage must be one of: */ /* GL_RGBA4, GL_RGB565, GL_RGB5_A1, GL_DEPTH_COMPONENT16, GL_STENCIL_INDEX8. */ glRenderbufferStorage(GL_RENDERBUFFER, GL_RGB565, width, height); glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, rbo_color); /* Depth renderbuffer. */ glGenRenderbuffers(1, &rbo_depth); glBindRenderbuffer(GL_RENDERBUFFER, rbo_depth); glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH_COMPONENT16, width, height); glFramebufferRenderbuffer(GL_DRAW_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_RENDERBUFFER, rbo_depth); glReadBuffer(GL_COLOR_ATTACHMENT0); /* Sanity check. */ assert(glCheckFramebufferStatus(GL_FRAMEBUFFER)); glGetIntegerv(GL_MAX_RENDERBUFFER_SIZE, &glget); assert(width < (unsigned int)glget); assert(height < (unsigned int)glget); } else { glReadBuffer(GL_BACK); } glClearColor(0.0, 0.0, 0.0, 0.0); glEnable(GL_DEPTH_TEST); glPixelStorei(GL_PACK_ALIGNMENT, 1); glViewport(0, 0, width, height); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glMatrixMode(GL_MODELVIEW); time0 = glutGet(GLUT_ELAPSED_TIME); model_init(); #if FFMPEG ffmpeg_encoder_start("tmp.mpg", AV_CODEC_ID_MPEG1VIDEO, 25, width, height); #endif } static void deinit(void) { printf("FPS = %f\n", 1000.0 * nframes / (double)(glutGet(GLUT_ELAPSED_TIME) - time0)); free(pixels); #if LIBPNG if (output_formats & LIBPNG_BIT) { free(png_bytes); free(png_rows); } #endif #if FFMPEG if (output_formats & FFMPEG_BIT) { ffmpeg_encoder_finish(); free(rgb); } #endif if (offscreen) { glDeleteFramebuffers(1, &fbo); glDeleteRenderbuffers(1, &rbo_color); glDeleteRenderbuffers(1, &rbo_depth); } } static void draw_scene(void) { glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glLoadIdentity(); glRotatef(angle, 0.0f, 0.0f, -1.0f); glBegin(GL_TRIANGLES); glColor3f(1.0f, 0.0f, 0.0f); glVertex3f( 0.0f, 0.5f, 0.0f); glColor3f(0.0f, 1.0f, 0.0f); glVertex3f(-0.5f, -0.5f, 0.0f); glColor3f(0.0f, 0.0f, 1.0f); glVertex3f( 0.5f, -0.5f, 0.0f); glEnd(); } static void display(void) { char filename[SCREENSHOT_MAX_FILENAME]; draw_scene(); if (offscreen) { glFlush(); } else { glutSwapBuffers(); } #if PPM if (output_formats & PPM_BIT) { snprintf(filename, SCREENSHOT_MAX_FILENAME, "tmp.%d.ppm", nframes); screenshot_ppm(filename, width, height, &pixels); } #endif #if LIBPNG if (output_formats & LIBPNG_BIT) { snprintf(filename, SCREENSHOT_MAX_FILENAME, "tmp.%d.png", nframes); screenshot_png(filename, width, height, &pixels, &png_bytes, &png_rows); } #endif # if FFMPEG if (output_formats & FFMPEG_BIT) { frame->pts = nframes; ffmpeg_encoder_glread_rgb(&rgb, &pixels, width, height); ffmpeg_encoder_encode_frame(rgb); } #endif nframes++; if (model_finished()) exit(EXIT_SUCCESS); } static void idle(void) { while (model_update()); glutPostRedisplay(); } int main(int argc, char **argv) { int arg; GLint glut_display; /* CLI args. */ glutInit(&argc, argv); arg = 1; if (argc > arg) { offscreen = (argv[arg][0] == '1'); } else { offscreen = 1; } arg++; if (argc > arg) { max_nframes = strtoumax(argv[arg], NULL, 10); } arg++; if (argc > arg) { width = strtoumax(argv[arg], NULL, 10); } arg++; if (argc > arg) { height = strtoumax(argv[arg], NULL, 10); } arg++; if (argc > arg) { output_formats = strtoumax(argv[arg], NULL, 10); } /* Work. */ if (offscreen) { /* TODO: if we use anything smaller than the window, it only renders a smaller version of things. */ /*glutInitWindowSize(50, 50);*/ glutInitWindowSize(width, height); glut_display = GLUT_SINGLE; } else { glutInitWindowSize(width, height); glutInitWindowPosition(100, 100); glut_display = GLUT_DOUBLE; } glutInitDisplayMode(glut_display | GLUT_RGBA | GLUT_DEPTH); glutCreateWindow(argv[0]); if (offscreen) { /* TODO: if we hide the window the program blocks. */ /*glutHideWindow();*/ } init(); glutDisplayFunc(display); glutIdleFunc(idle); atexit(deinit); glutMainLoop(); return EXIT_SUCCESS; }
在GitHub上.
编译:
sudo apt-get install libpng-dev libavcodec-dev libavutil-dev gcc -DPPM=1 -DLIBPNG=1 -DFFMPEG=1 -ggdb3 -std=c99 -O0 -Wall -Wextra \ -o offscreen offscreen.c -lGL -lGLU -lglut -lpng -lavcodec -lswscale -lavutil
在屏幕外运行10帧(主要是TODO,有效,但没有优势),尺寸为200 x 100,所有输出格式:
./offscreen 1 10 200 100 7
CLI格式是:
./offscreen [offscreen [nframes [width [height [output_formats]]]]]
并且output_formats
是一个位掩码:
ppm >> 0 | png >> 1 | mpeg >> 2
在屏幕上运行(也不限制我的FPS):
./offscreen 0
基于Ubuntu 15.10,OpenGL 4.4.0 NVIDIA 352.63,联想Thinkpad T430进行基准测试.
还在ubuntu 18.04,OpenGL 4.6.0 NVIDIA 390.77,联想Thinkpad P51上进行了测试.
TODO:在没有GUI的机器上找到一种方法(例如X11).似乎OpenGL不是用于屏幕外渲染,而是在与窗口系统(例如GLX)的接口上实现读回GPU的像素.请参阅:Linux中没有X.org的OpenGL
TODO:使用1x1窗口,使其不可调整大小,并隐藏它以使事情更加健壮.如果我执行其中任何一个,渲染失败,请参阅代码注释.在Glut中防止调整大小似乎是不可能的,但GLFW支持它.在任何情况下,那些并不重要,因为我的FPS不受屏幕刷新频率的限制,即使offscreen
关闭也是如此.
PBO以外的其他选择
渲染到后台缓冲区(默认渲染位置)
渲染到纹理
渲染到Pixelbuffer
对象(PBO)
Framebuffer
并且Pixelbuffer
比backbuffer和纹理更好,因为它们是为了将数据读回CPU而制作的,而后备缓冲区和纹理则留在GPU上并显示在屏幕上.
PBO用于异步传输,所以我认为我们不需要它,请参阅:OpenGL中帧缓冲区对象和像素缓冲区对象之间有什么区别?,
也许屏幕外的梅萨值得研究:http://www.mesa3d.org/osmesa.html
apiretrace
https://github.com/apitrace/apitrace
只是工作,并且根本不需要您修改代码:
git clone https://github.com/apitrace/apitrace cd apitrace git checkout 7.0 mkdir build cd build cmake .. make # Creates opengl_executable.out.trace ./apitrace trace /path/to/opengl_executable.out ./apitrace dump-images opengl_executable.out.trace
也可以在Ubuntu 18.10上使用:
sudo apt-get install apitrace
您现在有一堆名为的屏幕截图:
animation.out..png
TODO:工作原理.
文档还建议用于视频:
apitrace dump-images -o - application.trace \ | ffmpeg -r 30 -f image2pipe -vcodec ppm -i pipe: -vcodec mpeg4 -y output.mp4
也可以看看:
图像到GIF:https://unix.stackexchange.com/questions/24014/creating-a-gif-animation-from-png-files/489210#489210
图像到视频:如何使用FFmpeg从图像创建视频?
福尔康
It seems that Vulkan is designed to support offscreen rendering better than OpenGL.
This is mentioned on this NVIDIA overview: https://developer.nvidia.com/transitioning-opengl-vulkan
There is a runnable example at: https://github.com/SaschaWillems/Vulkan/blob/0616eeff4e697e4cd23cb9c97f5dd83afb79d908/offscreen/offscreen.cpp but I haven't managed get Vulkan running yet. 1 kloc :-)
Related: Is it possible to do offscreen rendering without Surface in Vulkan?
Bibliography
How to use GLUT/OpenGL to render to a file?
How to take screenshot in OpenGL
How to render offscreen on OpenGL?
glReadPixels() "data" argument usage?
Render OpenGL ES 2.0 to image
http://www.songho.ca/opengl/gl_fbo.html
http://www.mesa3d.org/brianp/sig97/offscrn.htm
Render off screen (with FBO and RenderBuffer) and pixel transfer of color, depth, stencil
https://gamedev.stackexchange.com/questions/59204/opengl-fbo-render-off-screen-and-texture
What are the differences between a Frame Buffer Object and a Pixel Buffer Object in OpenGL?
glReadPixels() "data" argument usage?
FBO larger than window:
OpenGL how to create, and render to, a framebuffer that's larger than the window?
FBO lwjgl bigger than Screen Size - What I'm doing wrong?
Renderbuffers larger than window size - OpenGL
problem saving openGL FBO larger than window
No Window/X11:
OpenGL without X.org in linux
Can you create OpenGL context without opening a window?
Using OpenGL Without X-Window System
无论如何,你几乎肯定不想要GLUT.您的要求不适合什么它打算做(甚至当你的要求做符合其预期的目的,你通常不希望也无妨).
您可以使用OpenGL.要在文件中生成输出,您基本上将OpenGL设置为渲染到纹理,然后将生成的纹理读取到主存储器中并将其保存到文件中.至少在某些系统(例如Windows)上,我很确定你仍然需要创建一个窗口并将渲染上下文与窗口相关联,但如果窗口总是被隐藏的话,它可能会很好.