]> git.pld-linux.org Git - packages/blender.git/blob - ffmpeg3.patch
- release 10 (by relup.sh)
[packages/blender.git] / ffmpeg3.patch
1 diff -ur blender-2.76/CMakeLists.txt blender-2.76.ffmpeg/CMakeLists.txt
2 --- blender-2.76/CMakeLists.txt 2015-10-12 00:58:22.000000000 +0200
3 +++ blender-2.76.ffmpeg/CMakeLists.txt  2016-04-16 15:31:11.524037254 +0200
4 @@ -982,7 +982,7 @@
5  
6         if(WITH_CODEC_FFMPEG)
7                 set(FFMPEG /usr CACHE PATH "FFMPEG Directory")
8 -               set(FFMPEG_LIBRARIES avformat avcodec avutil avdevice swscale CACHE STRING "FFMPEG Libraries")
9 +               set(FFMPEG_LIBRARIES avformat avcodec avutil avdevice swscale avfilter CACHE STRING "FFMPEG Libraries")
10  
11                 mark_as_advanced(FFMPEG)
12  
13 diff -ur blender-2.76/intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp blender-2.76.ffmpeg/intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp
14 --- blender-2.76/intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp   2015-10-07 02:09:33.000000000 +0200
15 +++ blender-2.76.ffmpeg/intern/audaspace/ffmpeg/AUD_FFMPEGReader.cpp    2016-04-16 15:31:11.524037254 +0200
16 @@ -58,9 +58,9 @@
17                 got_frame = 0;
18  
19                 if(!frame)
20 -                       frame = avcodec_alloc_frame();
21 +                       frame = av_frame_alloc();
22                 else
23 -                       avcodec_get_frame_defaults(frame);
24 +                       av_frame_unref(frame);
25  
26                 read_length = avcodec_decode_audio4(m_codecCtx, frame, &got_frame, &packet);
27                 if(read_length < 0)
28 diff -ur blender-2.76/intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp blender-2.76.ffmpeg/intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp
29 --- blender-2.76/intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp   2015-10-07 02:09:33.000000000 +0200
30 +++ blender-2.76.ffmpeg/intern/audaspace/ffmpeg/AUD_FFMPEGWriter.cpp    2016-04-16 15:31:11.524037254 +0200
31 @@ -202,7 +202,7 @@
32                         m_frame = av_frame_alloc();
33                         if (!m_frame)
34                                 AUD_THROW(AUD_ERROR_FFMPEG, codec_error);
35 -                       avcodec_get_frame_defaults(m_frame);
36 +                       av_frame_unref(m_frame);
37                         m_frame->linesize[0]    = m_input_size * samplesize;
38                         m_frame->format         = m_codecCtx->sample_fmt;
39                         m_frame->nb_samples     = m_input_size;
40 diff -ur blender-2.76/source/blender/blenkernel/intern/writeffmpeg.c blender-2.76.ffmpeg/source/blender/blenkernel/intern/writeffmpeg.c
41 --- blender-2.76/source/blender/blenkernel/intern/writeffmpeg.c 2015-10-12 00:58:22.000000000 +0200
42 +++ blender-2.76.ffmpeg/source/blender/blenkernel/intern/writeffmpeg.c  2016-04-16 15:31:11.527370628 +0200
43 @@ -138,8 +138,8 @@
44         context->audio_time += (double) context->audio_input_samples / (double) c->sample_rate;
45  
46  #ifdef FFMPEG_HAVE_ENCODE_AUDIO2
47 -       frame = avcodec_alloc_frame();
48 -       avcodec_get_frame_defaults(frame);
49 +       frame = av_frame_alloc();
50 +       av_frame_unref(frame);
51         frame->pts = context->audio_time / av_q2d(c->time_base);
52         frame->nb_samples = context->audio_input_samples;
53         frame->format = c->sample_fmt;
54 @@ -172,7 +172,7 @@
55         }
56  
57         if (!got_output) {
58 -               avcodec_free_frame(&frame);
59 +               av_frame_free(&frame);
60                 return 0;
61         }
62  #else
63 @@ -202,7 +202,7 @@
64                 if (av_interleaved_write_frame(context->outfile, &pkt) != 0) {
65                         fprintf(stderr, "Error writing audio packet!\n");
66                         if (frame)
67 -                               avcodec_free_frame(&frame);
68 +                               av_frame_free(&frame);
69                         return -1;
70                 }
71  
72 @@ -210,7 +210,7 @@
73         }
74  
75         if (frame)
76 -               avcodec_free_frame(&frame);
77 +               av_frame_free(&frame);
78  
79         return 0;
80  }
81 @@ -224,7 +224,7 @@
82         int size;
83         
84         /* allocate space for the struct */
85 -       f = avcodec_alloc_frame();
86 +       f = av_frame_alloc();
87         if (!f) return NULL;
88         size = avpicture_get_size(pix_fmt, width, height);
89         /* allocate the actual picture buffer */
90 @@ -363,8 +363,8 @@
91         int height = c->height;
92         AVFrame *rgb_frame;
93  
94 -       if (c->pix_fmt != PIX_FMT_BGR32) {
95 -               rgb_frame = alloc_picture(PIX_FMT_BGR32, width, height);
96 +       if (c->pix_fmt != AV_PIX_FMT_BGR32) {
97 +               rgb_frame = alloc_picture(AV_PIX_FMT_BGR32, width, height);
98                 if (!rgb_frame) {
99                         BKE_report(reports, RPT_ERROR, "Could not allocate temporary frame");
100                         return NULL;
101 @@ -414,14 +414,14 @@
102                 }
103         }
104  
105 -       if (c->pix_fmt != PIX_FMT_BGR32) {
106 +       if (c->pix_fmt != AV_PIX_FMT_BGR32) {
107                 sws_scale(context->img_convert_ctx, (const uint8_t *const *) rgb_frame->data,
108                           rgb_frame->linesize, 0, c->height,
109                           context->current_frame->data, context->current_frame->linesize);
110                 delete_picture(rgb_frame);
111         }
112  
113 -       context->current_frame->format = PIX_FMT_BGR32;
114 +       context->current_frame->format = AV_PIX_FMT_BGR32;
115         context->current_frame->width = width;
116         context->current_frame->height = height;
117  
118 @@ -586,12 +586,12 @@
119         }
120         else {
121                 /* makes HuffYUV happy ... */
122 -               c->pix_fmt = PIX_FMT_YUV422P;
123 +               c->pix_fmt = AV_PIX_FMT_YUV422P;
124         }
125  
126         if (context->ffmpeg_type == FFMPEG_XVID) {
127                 /* arghhhh ... */
128 -               c->pix_fmt = PIX_FMT_YUV420P;
129 +               c->pix_fmt = AV_PIX_FMT_YUV420P;
130                 c->codec_tag = (('D' << 24) + ('I' << 16) + ('V' << 8) + 'X');
131         }
132  
133 @@ -604,26 +604,26 @@
134         /* Keep lossless encodes in the RGB domain. */
135         if (codec_id == AV_CODEC_ID_HUFFYUV) {
136                 if (rd->im_format.planes == R_IMF_PLANES_RGBA) {
137 -                       c->pix_fmt = PIX_FMT_BGRA;
138 +                       c->pix_fmt = AV_PIX_FMT_BGRA;
139                 }
140                 else {
141 -                       c->pix_fmt = PIX_FMT_RGB32;
142 +                       c->pix_fmt = AV_PIX_FMT_RGB32;
143                 }
144         }
145  
146         if (codec_id == AV_CODEC_ID_FFV1) {
147 -               c->pix_fmt = PIX_FMT_RGB32;
148 +               c->pix_fmt = AV_PIX_FMT_RGB32;
149         }
150  
151         if (codec_id == AV_CODEC_ID_QTRLE) {
152                 if (rd->im_format.planes == R_IMF_PLANES_RGBA) {
153 -                       c->pix_fmt = PIX_FMT_ARGB;
154 +                       c->pix_fmt = AV_PIX_FMT_ARGB;
155                 }
156         }
157  
158         if (codec_id == AV_CODEC_ID_PNG) {
159                 if (rd->im_format.planes == R_IMF_PLANES_RGBA) {
160 -                       c->pix_fmt = PIX_FMT_RGBA;
161 +                       c->pix_fmt = AV_PIX_FMT_RGBA;
162                 }
163         }
164  
165 @@ -661,7 +661,7 @@
166  
167         context->current_frame = alloc_picture(c->pix_fmt, c->width, c->height);
168  
169 -       context->img_convert_ctx = sws_getContext(c->width, c->height, PIX_FMT_BGR32, c->width, c->height, c->pix_fmt, SWS_BICUBIC,
170 +       context->img_convert_ctx = sws_getContext(c->width, c->height, AV_PIX_FMT_BGR32, c->width, c->height, c->pix_fmt, SWS_BICUBIC,
171                                          NULL, NULL, NULL);
172         return st;
173  }
174 diff -ur blender-2.76/source/blender/imbuf/intern/anim_movie.c blender-2.76.ffmpeg/source/blender/imbuf/intern/anim_movie.c
175 --- blender-2.76/source/blender/imbuf/intern/anim_movie.c       2015-10-07 02:09:33.000000000 +0200
176 +++ blender-2.76.ffmpeg/source/blender/imbuf/intern/anim_movie.c        2016-04-16 15:31:11.527370628 +0200
177 @@ -474,6 +474,10 @@
178         const int *inv_table;
179  #endif
180  
181 +       anim->last_width = -1;
182 +       anim->last_height = -1;
183 +       anim->last_pixfmt = AV_PIX_FMT_NONE;
184 +
185         if (anim == NULL) return(-1);
186  
187         streamcount = anim->streamindex;
188 @@ -562,21 +566,21 @@
189         anim->next_pts = -1;
190         anim->next_packet.stream_index = -1;
191  
192 -       anim->pFrame = avcodec_alloc_frame();
193 +       anim->pFrame = av_frame_alloc();
194         anim->pFrameComplete = false;
195 -       anim->pFrameDeinterlaced = avcodec_alloc_frame();
196 -       anim->pFrameRGB = avcodec_alloc_frame();
197 +       anim->pFrameDeinterlaced = av_frame_alloc();
198 +       anim->pFrameRGB = av_frame_alloc();
199  
200 -       if (avpicture_get_size(PIX_FMT_RGBA, anim->x, anim->y) !=
201 +       if (avpicture_get_size(AV_PIX_FMT_RGBA, anim->x, anim->y) !=
202             anim->x * anim->y * 4)
203         {
204                 fprintf(stderr,
205                         "ffmpeg has changed alloc scheme ... ARGHHH!\n");
206                 avcodec_close(anim->pCodecCtx);
207                 avformat_close_input(&anim->pFormatCtx);
208 -               av_free(anim->pFrameRGB);
209 -               av_free(anim->pFrameDeinterlaced);
210 -               av_free(anim->pFrame);
211 +               av_frame_free(&anim->pFrameRGB);
212 +               av_frame_free(&anim->pFrameDeinterlaced);
213 +               av_frame_free(&anim->pFrame);
214                 anim->pCodecCtx = NULL;
215                 return -1;
216         }
217 @@ -606,7 +610,7 @@
218                 anim->pCodecCtx->pix_fmt,
219                 anim->x,
220                 anim->y,
221 -               PIX_FMT_RGBA,
222 +               AV_PIX_FMT_RGBA,
223                 SWS_FAST_BILINEAR | SWS_PRINT_INFO | SWS_FULL_CHR_H_INT,
224                 NULL, NULL, NULL);
225                 
226 @@ -615,9 +619,9 @@
227                         "Can't transform color space??? Bailing out...\n");
228                 avcodec_close(anim->pCodecCtx);
229                 avformat_close_input(&anim->pFormatCtx);
230 -               av_free(anim->pFrameRGB);
231 -               av_free(anim->pFrameDeinterlaced);
232 -               av_free(anim->pFrame);
233 +               av_frame_free(&anim->pFrameRGB);
234 +               av_frame_free(&anim->pFrameDeinterlaced);
235 +               av_frame_free(&anim->pFrame);
236                 anim->pCodecCtx = NULL;
237                 return -1;
238         }
239 @@ -644,6 +648,74 @@
240         return (0);
241  }
242  
243 +static void delete_filter_graph(struct anim *anim) {
244 +    if (anim->filter_graph) {
245 +        av_frame_free(&anim->filter_frame);
246 +        avfilter_graph_free(&anim->filter_graph);
247 +    }
248 +}
249 +
250 +static int init_filter_graph(struct anim *anim, enum AVPixelFormat pixfmt, int width, int height) {
251 +    AVFilterInOut *inputs = NULL, *outputs = NULL;
252 +    char args[512];
253 +    int res;
254 +
255 +    delete_filter_graph(anim);
256 +    anim->filter_graph = avfilter_graph_alloc();
257 +    snprintf(args, sizeof(args),
258 +             "buffer=video_size=%dx%d:pix_fmt=%d:time_base=1/1:pixel_aspect=0/1[in];"
259 +             "[in]yadif[out];"
260 +             "[out]buffersink",
261 +             width, height, pixfmt);
262 +    res = avfilter_graph_parse2(anim->filter_graph, args, &inputs, &outputs);
263 +    if (res < 0)
264 +        return res;
265 +    if(inputs || outputs)
266 +        return -1;
267 +    res = avfilter_graph_config(anim->filter_graph, NULL);
268 +    if (res < 0)
269 +        return res;
270 +
271 +    anim->buffersrc_ctx = avfilter_graph_get_filter(anim->filter_graph, "Parsed_buffer_0");
272 +    anim->buffersink_ctx = avfilter_graph_get_filter(anim->filter_graph, "Parsed_buffersink_2");
273 +    if (!anim->buffersrc_ctx || !anim->buffersink_ctx)
274 +        return -1;
275 +    anim->filter_frame = av_frame_alloc();
276 +    anim->last_width = width;
277 +    anim->last_height = height;
278 +    anim->last_pixfmt = pixfmt;
279 +
280 +    return 0;
281 +}
282 +
283 +static int process_filter_graph(struct anim *anim, AVPicture *dst, const AVPicture *src,
284 +                                enum AVPixelFormat pixfmt, int width, int height) {
285 +    int res;
286 +
287 +    if (!anim->filter_graph || width != anim->last_width ||
288 +        height != anim->last_height || pixfmt != anim->last_pixfmt) {
289 +        res = init_filter_graph(anim, pixfmt, width, height);
290 +        if (res < 0)
291 +            return res;
292 +    }
293 +
294 +    memcpy(anim->filter_frame->data, src->data, sizeof(src->data));
295 +    memcpy(anim->filter_frame->linesize, src->linesize, sizeof(src->linesize));
296 +    anim->filter_frame->width = width;
297 +    anim->filter_frame->height = height;
298 +    anim->filter_frame->format = pixfmt;
299 +    res = av_buffersrc_add_frame(anim->buffersrc_ctx, anim->filter_frame);
300 +    if (res < 0)
301 +        return res;
302 +    res = av_buffersink_get_frame(anim->buffersink_ctx, anim->filter_frame);
303 +    if (res < 0)
304 +        return res;
305 +    av_picture_copy(dst, (const AVPicture *) anim->filter_frame, pixfmt, width, height);
306 +    av_frame_unref(anim->filter_frame);
307 +
308 +    return 0;
309 +}
310 +
311  /* postprocess the image in anim->pFrame and do color conversion
312   * and deinterlacing stuff.
313   *
314 @@ -677,7 +749,8 @@
315  
316  
317         if (anim->ib_flags & IB_animdeinterlace) {
318 -               if (avpicture_deinterlace(
319 +               if (process_filter_graph(
320 +                       anim,
321                         (AVPicture *)
322                         anim->pFrameDeinterlaced,
323                         (const AVPicture *)
324 @@ -695,7 +768,7 @@
325         
326         avpicture_fill((AVPicture *) anim->pFrameRGB,
327                        (unsigned char *) ibuf->rect,
328 -                      PIX_FMT_RGBA, anim->x, anim->y);
329 +                      AV_PIX_FMT_RGBA, anim->x, anim->y);
330  
331         if (ENDIAN_ORDER == B_ENDIAN) {
332                 int *dstStride   = anim->pFrameRGB->linesize;
333 @@ -1138,16 +1211,18 @@
334  {
335         if (anim == NULL) return;
336  
337 +       delete_filter_graph(anim);
338 +
339         if (anim->pCodecCtx) {
340                 avcodec_close(anim->pCodecCtx);
341                 avformat_close_input(&anim->pFormatCtx);
342 -               av_free(anim->pFrameRGB);
343 -               av_free(anim->pFrame);
344 +               av_frame_free(&anim->pFrameRGB);
345 +               av_frame_free(&anim->pFrame);
346  
347                 if (anim->ib_flags & IB_animdeinterlace) {
348                         MEM_freeN(anim->pFrameDeinterlaced->data[0]);
349                 }
350 -               av_free(anim->pFrameDeinterlaced);
351 +               av_frame_free(&anim->pFrameDeinterlaced);
352                 sws_freeContext(anim->img_convert_ctx);
353                 IMB_freeImBuf(anim->last_frame);
354                 if (anim->next_packet.stream_index != -1) {
355 diff -ur blender-2.76/source/blender/imbuf/intern/IMB_anim.h blender-2.76.ffmpeg/source/blender/imbuf/intern/IMB_anim.h
356 --- blender-2.76/source/blender/imbuf/intern/IMB_anim.h 2015-10-07 02:09:33.000000000 +0200
357 +++ blender-2.76.ffmpeg/source/blender/imbuf/intern/IMB_anim.h  2016-04-16 15:31:11.527370628 +0200
358 @@ -76,6 +76,9 @@
359  #  include <libavformat/avformat.h>
360  #  include <libavcodec/avcodec.h>
361  #  include <libswscale/swscale.h>
362 +#  include <libavfilter/avfilter.h>
363 +#  include <libavfilter/buffersrc.h>
364 +#  include <libavfilter/buffersink.h>
365  #endif
366  
367  #ifdef WITH_REDCODE
368 @@ -175,6 +178,14 @@
369         int64_t last_pts;
370         int64_t next_pts;
371         AVPacket next_packet;
372 +
373 +       AVFilterContext *buffersink_ctx;
374 +       AVFilterContext *buffersrc_ctx;
375 +       AVFilterGraph *filter_graph;
376 +       AVFrame *filter_frame;
377 +       int last_width;
378 +       int last_height;
379 +       enum AVPixelFormat last_pixfmt;
380  #endif
381  
382  #ifdef WITH_REDCODE
383 diff -ur blender-2.76/source/blender/imbuf/intern/indexer.c blender-2.76.ffmpeg/source/blender/imbuf/intern/indexer.c
384 --- blender-2.76/source/blender/imbuf/intern/indexer.c  2015-10-07 02:09:33.000000000 +0200
385 +++ blender-2.76.ffmpeg/source/blender/imbuf/intern/indexer.c   2016-04-16 15:31:11.527370628 +0200
386 @@ -519,7 +519,7 @@
387                 rv->c->pix_fmt = rv->codec->pix_fmts[0];
388         }
389         else {
390 -               rv->c->pix_fmt = PIX_FMT_YUVJ420P;
391 +               rv->c->pix_fmt = AV_PIX_FMT_YUVJ420P;
392         }
393  
394         rv->c->sample_aspect_ratio =
395 @@ -554,7 +554,7 @@
396         if (st->codec->width != width || st->codec->height != height ||
397             st->codec->pix_fmt != rv->c->pix_fmt)
398         {
399 -               rv->frame = avcodec_alloc_frame();
400 +               rv->frame = av_frame_alloc();
401                 avpicture_fill((AVPicture *) rv->frame,
402                                MEM_mallocN(avpicture_get_size(
403                                                rv->c->pix_fmt,
404 @@ -675,7 +675,7 @@
405                 sws_freeContext(ctx->sws_ctx);
406  
407                 MEM_freeN(ctx->frame->data[0]);
408 -               av_free(ctx->frame);
409 +               av_frame_free(&ctx->frame);
410         }
411  
412         get_proxy_filename(ctx->anim, ctx->proxy_size, 
413 @@ -905,7 +905,7 @@
414  
415         memset(&next_packet, 0, sizeof(AVPacket));
416  
417 -       in_frame = avcodec_alloc_frame();
418 +       in_frame = av_frame_alloc();
419  
420         stream_size = avio_size(context->iFormatCtx->pb);
421  
422 @@ -973,7 +973,7 @@
423                 } while (frame_finished);
424         }
425  
426 -       av_free(in_frame);
427 +       av_frame_free(&in_frame);
428  
429         return 1;
430  }
431 diff -ur blender-2.76/source/gameengine/VideoTexture/VideoFFmpeg.cpp blender-2.76.ffmpeg/source/gameengine/VideoTexture/VideoFFmpeg.cpp
432 --- blender-2.76/source/gameengine/VideoTexture/VideoFFmpeg.cpp 2015-10-12 00:58:22.000000000 +0200
433 +++ blender-2.76.ffmpeg/source/gameengine/VideoTexture/VideoFFmpeg.cpp  2016-04-16 15:31:11.527370628 +0200
434 @@ -79,11 +79,16 @@
435         BLI_listbase_clear(&m_frameCacheBase);
436         BLI_listbase_clear(&m_packetCacheFree);
437         BLI_listbase_clear(&m_packetCacheBase);
438 +       last_width = -1;
439 +       last_height = -1;
440 +       last_pixfmt = AV_PIX_FMT_NONE;
441 +
442  }
443  
444  // destructor
445  VideoFFmpeg::~VideoFFmpeg () 
446  {
447 +       delete_filter_graph(this);
448  }
449  
450  void VideoFFmpeg::refresh(void)
451 @@ -140,23 +145,23 @@
452  AVFrame        *VideoFFmpeg::allocFrameRGB()
453  {
454         AVFrame *frame;
455 -       frame = avcodec_alloc_frame();
456 +       frame = av_frame_alloc();
457         if (m_format == RGBA32)
458         {
459                 avpicture_fill((AVPicture*)frame, 
460                         (uint8_t*)MEM_callocN(avpicture_get_size(
461 -                               PIX_FMT_RGBA,
462 +                               AV_PIX_FMT_RGBA,
463                                 m_codecCtx->width, m_codecCtx->height),
464                                 "ffmpeg rgba"),
465 -                       PIX_FMT_RGBA, m_codecCtx->width, m_codecCtx->height);
466 +                       AV_PIX_FMT_RGBA, m_codecCtx->width, m_codecCtx->height);
467         } else 
468         {
469                 avpicture_fill((AVPicture*)frame, 
470                         (uint8_t*)MEM_callocN(avpicture_get_size(
471 -                               PIX_FMT_RGB24,
472 +                               AV_PIX_FMT_RGB24,
473                                 m_codecCtx->width, m_codecCtx->height),
474                                 "ffmpeg rgb"),
475 -                       PIX_FMT_RGB24, m_codecCtx->width, m_codecCtx->height);
476 +                       AV_PIX_FMT_RGB24, m_codecCtx->width, m_codecCtx->height);
477         }
478         return frame;
479  }
480 @@ -236,8 +241,8 @@
481         m_codecCtx = codecCtx;
482         m_formatCtx = formatCtx;
483         m_videoStream = videoStream;
484 -       m_frame = avcodec_alloc_frame();
485 -       m_frameDeinterlaced = avcodec_alloc_frame();
486 +       m_frame = av_frame_alloc();
487 +       m_frameDeinterlaced = av_frame_alloc();
488  
489         // allocate buffer if deinterlacing is required
490         avpicture_fill((AVPicture*)m_frameDeinterlaced, 
491 @@ -248,10 +253,10 @@
492                 m_codecCtx->pix_fmt, m_codecCtx->width, m_codecCtx->height);
493  
494         // check if the pixel format supports Alpha
495 -       if (m_codecCtx->pix_fmt == PIX_FMT_RGB32 ||
496 -               m_codecCtx->pix_fmt == PIX_FMT_BGR32 ||
497 -               m_codecCtx->pix_fmt == PIX_FMT_RGB32_1 ||
498 -               m_codecCtx->pix_fmt == PIX_FMT_BGR32_1) 
499 +       if (m_codecCtx->pix_fmt == AV_PIX_FMT_RGB32 ||
500 +               m_codecCtx->pix_fmt == AV_PIX_FMT_BGR32 ||
501 +               m_codecCtx->pix_fmt == AV_PIX_FMT_RGB32_1 ||
502 +               m_codecCtx->pix_fmt == AV_PIX_FMT_BGR32_1) 
503         {
504                 // allocate buffer to store final decoded frame
505                 m_format = RGBA32;
506 @@ -262,7 +267,7 @@
507                         m_codecCtx->pix_fmt,
508                         m_codecCtx->width,
509                         m_codecCtx->height,
510 -                       PIX_FMT_RGBA,
511 +                       AV_PIX_FMT_RGBA,
512                         SWS_FAST_BILINEAR,
513                         NULL, NULL, NULL);
514         } else
515 @@ -276,7 +281,7 @@
516                         m_codecCtx->pix_fmt,
517                         m_codecCtx->width,
518                         m_codecCtx->height,
519 -                       PIX_FMT_RGB24,
520 +                       AV_PIX_FMT_RGB24,
521                         SWS_FAST_BILINEAR,
522                         NULL, NULL, NULL);
523         }
524 @@ -293,13 +298,81 @@
525                 av_free(m_frameDeinterlaced);
526                 m_frameDeinterlaced = NULL;
527                 MEM_freeN(m_frameRGB->data[0]);
528 -               av_free(m_frameRGB);
529 +               av_frame_free(&m_frameRGB);
530                 m_frameRGB = NULL;
531                 return -1;
532         }
533         return 0;
534  }
535  
536 +void VideoFFmpeg::delete_filter_graph(VideoFFmpeg* video) {
537 +    if (video->filter_graph) {
538 +        av_frame_free(&video->filter_frame);
539 +        avfilter_graph_free(&video->filter_graph);
540 +    }
541 +}
542 +
543 +int VideoFFmpeg::init_filter_graph(VideoFFmpeg* video, enum AVPixelFormat pixfmt, int width, int height) {
544 +    AVFilterInOut *inputs = NULL, *outputs = NULL;
545 +    char args[512];
546 +    int res;
547 +
548 +    delete_filter_graph(video);
549 +    video->filter_graph = avfilter_graph_alloc();
550 +    snprintf(args, sizeof(args),
551 +             "buffer=video_size=%dx%d:pix_fmt=%d:time_base=1/1:pixel_aspect=0/1[in];"
552 +             "[in]yadif[out];"
553 +             "[out]buffersink",
554 +             width, height, pixfmt);
555 +    res = avfilter_graph_parse2(video->filter_graph, args, &inputs, &outputs);
556 +    if (res < 0)
557 +        return res;
558 +    if(inputs || outputs)
559 +        return -1;
560 +    res = avfilter_graph_config(video->filter_graph, NULL);
561 +    if (res < 0)
562 +        return res;
563 +
564 +    video->buffersrc_ctx = avfilter_graph_get_filter(video->filter_graph, "Parsed_buffer_0");
565 +    video->buffersink_ctx = avfilter_graph_get_filter(video->filter_graph, "Parsed_buffersink_2");
566 +    if (!video->buffersrc_ctx || !video->buffersink_ctx)
567 +        return -1;
568 +    video->filter_frame = av_frame_alloc();
569 +    video->last_width = width;
570 +    video->last_height = height;
571 +    video->last_pixfmt = pixfmt;
572 +
573 +    return 0;
574 +}
575 +
576 +int VideoFFmpeg::process_filter_graph(VideoFFmpeg* video, AVPicture *dst, const AVPicture *src,
577 +                                      enum AVPixelFormat pixfmt, int width, int height) {
578 +    int res;
579 +
580 +    if (!video->filter_graph || width != video->last_width ||
581 +        height != video->last_height || pixfmt != video->last_pixfmt) {
582 +        res = init_filter_graph(video, pixfmt, width, height);
583 +        if (res < 0)
584 +            return res;
585 +    }
586 +
587 +    memcpy(video->filter_frame->data, src->data, sizeof(src->data));
588 +    memcpy(video->filter_frame->linesize, src->linesize, sizeof(src->linesize));
589 +    video->filter_frame->width = width;
590 +    video->filter_frame->height = height;
591 +    video->filter_frame->format = pixfmt;
592 +    res = av_buffersrc_add_frame(video->buffersrc_ctx, video->filter_frame);
593 +    if (res < 0)
594 +        return res;
595 +    res = av_buffersink_get_frame(video->buffersink_ctx, video->filter_frame);
596 +    if (res < 0)
597 +        return res;
598 +    av_picture_copy(dst, (const AVPicture *) video->filter_frame, pixfmt, width, height);
599 +    av_frame_unref(video->filter_frame);
600 +
601 +    return 0;
602 +}
603 +
604  /*
605   * This thread is used to load video frame asynchronously.
606   * It provides a frame caching service. 
607 @@ -392,7 +465,7 @@
608                                         {
609                                                 if (video->m_deinterlace) 
610                                                 {
611 -                                                       if (avpicture_deinterlace(
612 +                                                       if (process_filter_graph(video,
613                                                                 (AVPicture*) video->m_frameDeinterlaced,
614                                                                 (const AVPicture*) video->m_frame,
615                                                                 video->m_codecCtx->pix_fmt,
616 @@ -486,14 +559,14 @@
617                 {
618                         BLI_remlink(&m_frameCacheBase, frame);
619                         MEM_freeN(frame->frame->data[0]);
620 -                       av_free(frame->frame);
621 +                       av_frame_free(&frame->frame);
622                         delete frame;
623                 }
624                 while ((frame = (CacheFrame *)m_frameCacheFree.first) != NULL)
625                 {
626                         BLI_remlink(&m_frameCacheFree, frame);
627                         MEM_freeN(frame->frame->data[0]);
628 -                       av_free(frame->frame);
629 +                       av_frame_free(&frame->frame);
630                         delete frame;
631                 }
632                 while ((packet = (CachePacket *)m_packetCacheBase.first) != NULL)
633 @@ -1057,7 +1130,7 @@
634  
635                                 if (m_deinterlace) 
636                                 {
637 -                                       if (avpicture_deinterlace(
638 +                                       if (process_filter_graph(this,
639                                                 (AVPicture*) m_frameDeinterlaced,
640                                                 (const AVPicture*) m_frame,
641                                                 m_codecCtx->pix_fmt,
642 diff -ur blender-2.76/source/gameengine/VideoTexture/VideoFFmpeg.h blender-2.76.ffmpeg/source/gameengine/VideoTexture/VideoFFmpeg.h
643 --- blender-2.76/source/gameengine/VideoTexture/VideoFFmpeg.h   2015-10-10 10:20:56.000000000 +0200
644 +++ blender-2.76.ffmpeg/source/gameengine/VideoTexture/VideoFFmpeg.h    2016-04-16 15:31:11.527370628 +0200
645 @@ -39,6 +39,9 @@
646  extern "C" {
647  #include <pthread.h>
648  #include "ffmpeg_compat.h"
649 +#include <libavfilter/avfilter.h>
650 +#include <libavfilter/buffersrc.h>
651 +#include <libavfilter/buffersink.h>
652  #include "DNA_listBase.h"
653  #include "BLI_threads.h"
654  #include "BLI_blenlib.h"
655 @@ -207,6 +210,18 @@
656  
657         AVFrame *allocFrameRGB();
658         static void *cacheThread(void *);
659 +
660 +       AVFilterContext *buffersink_ctx;
661 +       AVFilterContext *buffersrc_ctx;
662 +       AVFilterGraph *filter_graph;
663 +       AVFrame *filter_frame;
664 +       int last_width;
665 +       int last_height;
666 +       enum AVPixelFormat last_pixfmt;
667 +
668 +       static void delete_filter_graph(VideoFFmpeg* video);
669 +       static int init_filter_graph(VideoFFmpeg* video, enum AVPixelFormat pixfmt, int width, int height);
670 +       static int process_filter_graph(VideoFFmpeg* video, AVPicture *dst, const AVPicture *src, enum AVPixelFormat pixfmt, int width, int height);
671  };
672  
673  inline VideoFFmpeg *getFFmpeg(PyImage *self)
This page took 0.155176 seconds and 3 git commands to generate.