GRASS Programmer's Manual
6.4.2(2012)
|
00001 00019 #include <stdlib.h> 00020 #include <string.h> 00021 00022 #include <grass/gis.h> 00023 #include <grass/glocale.h> 00024 #include <grass/ogsf_proto.h> 00025 #include <grass/gstypes.h> 00026 00027 /* FFMPEG stuff */ 00028 #ifdef HAVE_FFMPEG 00029 #include <avformat.h> 00030 00031 /* 5 seconds stream duration */ 00032 #define STREAM_DURATION 5.0 00033 #define STREAM_FRAME_RATE 25 /* 25 images/s */ 00034 #define STREAM_NB_FRAMES ((int)(STREAM_DURATION * STREAM_FRAME_RATE)) 00035 #define STREAM_PIX_FMT PIX_FMT_YUV420P /* default pix_fmt */ 00036 00037 AVFrame *picture, *tmp_picture; 00038 uint8_t *video_outbuf; 00039 int frame_count, video_outbuf_size; 00040 00041 AVOutputFormat *fmt; 00042 AVFormatContext *oc; 00043 AVStream *video_st; 00044 00055 static AVStream *add_video_stream(AVFormatContext * oc, int codec_id, int w, 00056 int h) 00057 { 00058 AVCodecContext *c; 00059 AVStream *st; 00060 00061 st = av_new_stream(oc, 0); 00062 if (!st) { 00063 G_warning(_("Unable to allocate stream")); 00064 return NULL; 00065 } 00066 00067 c = st->codec; 00068 c->codec_id = codec_id; 00069 c->codec_type = CODEC_TYPE_VIDEO; 00070 00071 /* put sample parameters */ 00072 c->bit_rate = 400000; 00073 /* resolution must be a multiple of two */ 00074 c->width = w; 00075 c->height = h; 00076 /* time base: this is the fundamental unit of time (in seconds) in terms 00077 of which frame timestamps are represented. for fixed-fps content, 00078 timebase should be 1/framerate and timestamp increments should be 00079 identically 1. */ 00080 c->time_base.den = STREAM_FRAME_RATE; 00081 c->time_base.num = 1; 00082 c->gop_size = 12; /* emit one intra frame every twelve frames at most */ 00083 c->pix_fmt = STREAM_PIX_FMT; 00084 if (c->codec_id == CODEC_ID_MPEG2VIDEO) { 00085 /* just for testing, we also add B frames */ 00086 c->max_b_frames = 2; 00087 } 00088 if (c->codec_id == CODEC_ID_MPEG1VIDEO) { 00089 /* Needed to avoid using macroblocks in which some coeffs overflow. 00090 This does not happen with normal video, it just happens here as 00091 the motion of the chroma plane does not match the luma plane. */ 00092 c->mb_decision = 2; 00093 } 00094 /* some formats want stream headers to be separate */ 00095 if (!strcmp(oc->oformat->name, "mp4") || !strcmp(oc->oformat->name, "mov") 00096 || !strcmp(oc->oformat->name, "3gp")) 00097 c->flags |= CODEC_FLAG_GLOBAL_HEADER; 00098 00099 c->flags |= CODEC_FLAG_QSCALE; 00100 c->global_quality = st->quality = FF_QP2LAMBDA * 10; 00101 00102 return st; 00103 } 00104 00115 static AVFrame *alloc_picture(int pix_fmt, int width, int height) 00116 { 00117 AVFrame *picture; 00118 uint8_t *picture_buf; 00119 int size; 00120 00121 picture = avcodec_alloc_frame(); 00122 00123 if (!picture) 00124 return NULL; 00125 00126 size = avpicture_get_size(pix_fmt, width, height); 00127 picture_buf = av_malloc(size); 00128 00129 if (!picture_buf) { 00130 av_free(picture); 00131 return NULL; 00132 } 00133 00134 avpicture_fill((AVPicture *) picture, picture_buf, 00135 pix_fmt, width, height); 00136 00137 return picture; 00138 } 00139 00146 static void open_video(AVFormatContext * oc, AVStream * st) 00147 { 00148 AVCodec *codec; 00149 AVCodecContext *c; 00150 00151 c = st->codec; 00152 00153 /* find the video encoder */ 00154 codec = avcodec_find_encoder(c->codec_id); 00155 if (!codec) { 00156 G_warning(_("Video codec not found")); 00157 return; 00158 } 00159 00160 /* open the codec */ 00161 if (avcodec_open(c, codec) < 0) { 00162 G_warning(_("Unable to open codec")); 00163 return; 00164 } 00165 00166 video_outbuf = NULL; 00167 if (!(oc->oformat->flags & AVFMT_RAWPICTURE)) { 00168 /* allocate output buffer */ 00169 /* XXX: API change will be done */ 00170 /* buffers passed into lav* can be allocated any way you prefer, 00171 as long as they're aligned enough for the architecture, and 00172 they're freed appropriately (such as using av_free for buffers 00173 allocated with av_malloc) */ 00174 video_outbuf_size = 200000; 00175 video_outbuf = av_malloc(video_outbuf_size); 00176 } 00177 00178 /* allocate the encoded raw picture */ 00179 picture = alloc_picture(c->pix_fmt, c->width, c->height); 00180 if (!picture) { 00181 G_warning(_("Unable to allocate picture")); 00182 return; 00183 } 00184 00185 /* if the output format is not YUV420P, then a temporary YUV420P 00186 picture is needed too. It is then converted to the required 00187 output format */ 00188 tmp_picture = NULL; 00189 if (c->pix_fmt != PIX_FMT_YUV420P) { 00190 tmp_picture = alloc_picture(PIX_FMT_YUV420P, c->width, c->height); 00191 if (!tmp_picture) { 00192 G_warning(_("Unable to allocate temporary picture")); 00193 return; 00194 } 00195 } 00196 } 00197 00204 static void write_video_frame(AVFormatContext * oc, AVStream * st) 00205 { 00206 int out_size, ret; 00207 AVCodecContext *c; 00208 00209 c = st->codec; 00210 00211 if (oc->oformat->flags & AVFMT_RAWPICTURE) { 00212 /* raw video case. The API will change slightly in the near 00213 future for that */ 00214 AVPacket pkt; 00215 00216 av_init_packet(&pkt); 00217 00218 pkt.flags |= PKT_FLAG_KEY; 00219 pkt.stream_index = st->index; 00220 pkt.data = (uint8_t *) picture; 00221 pkt.size = sizeof(AVPicture); 00222 00223 ret = av_write_frame(oc, &pkt); 00224 } 00225 else { 00226 /* encode the image */ 00227 out_size = 00228 avcodec_encode_video(c, video_outbuf, video_outbuf_size, picture); 00229 /* if zero size, it means the image was buffered */ 00230 if (out_size > 0) { 00231 AVPacket pkt; 00232 00233 av_init_packet(&pkt); 00234 00235 pkt.pts = 00236 av_rescale_q(c->coded_frame->pts, c->time_base, 00237 st->time_base); 00238 if (c->coded_frame->key_frame) 00239 pkt.flags |= PKT_FLAG_KEY; 00240 pkt.stream_index = st->index; 00241 pkt.data = video_outbuf; 00242 pkt.size = out_size; 00243 00244 /* write the compressed frame in the media file */ 00245 ret = av_write_frame(oc, &pkt); 00246 } 00247 else { 00248 ret = 0; 00249 } 00250 } 00251 if (ret != 0) { 00252 G_warning(_("Error while writing video frame")); 00253 return; 00254 } 00255 frame_count++; 00256 } 00257 00264 static void close_video(AVFormatContext * oc, AVStream * st) 00265 { 00266 avcodec_close(st->codec); 00267 av_free(picture->data[0]); 00268 av_free(picture); 00269 if (tmp_picture) { 00270 av_free(tmp_picture->data[0]); 00271 av_free(tmp_picture); 00272 } 00273 av_free(video_outbuf); 00274 } 00275 00276 #endif 00277 00286 int gsd_init_mpeg(const char *filename) 00287 { 00288 #ifdef HAVE_FFMPEG 00289 GLuint l, r, b, t; 00290 GLint tmp[4]; 00291 00292 glGetIntegerv(GL_VIEWPORT, tmp); 00293 l = tmp[0]; 00294 r = tmp[0] + tmp[2] - 1; 00295 b = tmp[1]; 00296 t = tmp[1] + tmp[3] - 1; 00297 00298 G_verbose_message(_("Opening MPEG stream <%s>..."), filename); 00299 00300 /* initialize libavcodec, and register all codecs and formats */ 00301 av_register_all(); 00302 00303 /* auto detect the output format from the name. default is mpeg. */ 00304 fmt = guess_format(NULL, filename, NULL); 00305 if (!fmt) { 00306 G_warning(_("Unable to deduce output format from file extension: using MPEG")); 00307 fmt = guess_format("mpeg", NULL, NULL); 00308 } 00309 if (!fmt) { 00310 G_warning(_("Unable to find suitable output format")); 00311 return (-1); 00312 } 00313 00314 /* allocate the output media context */ 00315 oc = av_alloc_format_context(); 00316 if (!oc) { 00317 G_warning(_("Out of memory")); 00318 return (-1); 00319 } 00320 oc->oformat = fmt; 00321 snprintf(oc->filename, sizeof(oc->filename), "%s", filename); 00322 00323 /* if you want to hardcode the codec (eg #ifdef USE_XVID) 00324 this may be the place to do it (?????) */ 00325 #ifdef USE_XVID 00326 fmt->video_codec = CODEC_ID_XVID; 00327 #endif 00328 00329 video_st = NULL; 00330 if (fmt->video_codec != CODEC_ID_NONE) { 00331 video_st = 00332 add_video_stream(oc, fmt->video_codec, (r - l + 1), (t - b + 1)); 00333 } 00334 00335 /* set the output parameters (must be done even if no parameters). */ 00336 if (av_set_parameters(oc, NULL) < 0) { 00337 G_warning(_("Invalid output format parameters")); 00338 return (-1); 00339 } 00340 00341 dump_format(oc, 0, filename, 1); 00342 00343 /* now that all the parameters are set, we can open the audio and 00344 video codecs and allocate the necessary encode buffers */ 00345 if (video_st) 00346 open_video(oc, video_st); 00347 00348 /* open the output file, if needed */ 00349 if (!(fmt->flags & AVFMT_NOFILE)) { 00350 if (url_fopen(&oc->pb, filename, URL_WRONLY) < 0) { 00351 G_warning(_("Unable to open <%s>"), filename); 00352 return (-1); 00353 } 00354 } 00355 00356 /* write the stream header, if any */ 00357 av_write_header(oc); 00358 00359 00360 #else 00361 G_warning(_("NVIZ has not been built with MPEG output support")); 00362 return (-1); 00363 #endif 00364 return (0); 00365 } 00366 00374 int gsd_write_mpegframe(void) 00375 { 00376 #ifdef HAVE_FFMPEG 00377 unsigned int xsize, ysize; 00378 int x, y, xy, xy_uv; 00379 int yy, uu, vv; 00380 unsigned char *pixbuf; 00381 00382 gsd_getimage(&pixbuf, &xsize, &ysize); 00383 xy = xy_uv = 0; 00384 for (y = ysize - 1; y >= 0; y--) { 00385 for (x = 0; x < xsize; x++) { 00386 unsigned char r = pixbuf[(y * xsize + x) * 4 + 0]; 00387 unsigned char g = pixbuf[(y * xsize + x) * 4 + 1]; 00388 unsigned char b = pixbuf[(y * xsize + x) * 4 + 2]; 00389 00390 yy = (0.257 * r) + (0.504 * g) + (0.098 * b) + 16;; 00391 vv = (0.439 * r) - (0.368 * g) - (0.071 * b) + 128; 00392 uu = -(0.148 * r) - (0.291 * g) + (0.439 * b) + 128; 00393 fflush(stdout); 00394 picture->data[0][xy] = yy; 00395 00396 if ((x % 2) && (y % 2)) { 00397 picture->data[1][xy_uv] = uu; 00398 picture->data[2][xy_uv] = vv; 00399 xy_uv++; 00400 } 00401 00402 xy++; 00403 } 00404 00405 } 00406 G_free(pixbuf); 00407 00408 write_video_frame(oc, video_st); 00409 00410 00411 #endif 00412 00413 return (0); 00414 } 00415 00421 int gsd_close_mpeg(void) 00422 { 00423 #ifdef HAVE_FFMPEG 00424 int i; 00425 00426 close_video(oc, video_st); 00427 00428 /* write the trailer, if any */ 00429 av_write_trailer(oc); 00430 00431 /* free the streams */ 00432 for (i = 0; i < oc->nb_streams; i++) { 00433 av_freep(&oc->streams[i]->codec); 00434 av_freep(&oc->streams[i]); 00435 } 00436 00437 if (!(fmt->flags & AVFMT_NOFILE)) { 00438 /* close the output file */ 00439 #if (LIBAVFORMAT_VERSION_INT>>16) < 52 00440 url_fclose(&oc->pb); 00441 #else 00442 url_fclose(oc->pb); 00443 #endif 00444 } 00445 00446 /* free the stream */ 00447 av_free(oc); 00448 00449 00450 G_debug(3, "Closed MPEG stream"); 00451 #endif 00452 00453 return (0); 00454 }