0001-ffmpeg30.patch 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638
  1. From a61b19b524cd2b66a7c43e67edd7cc780bf46cbb Mon Sep 17 00:00:00 2001
  2. From: Alexander Alekhin <alexander.alekhin@itseez.com>
  3. Date: Wed, 2 Mar 2016 17:54:17 +0300
  4. Subject: [PATCH] backport ffmpeg fixes
  5. Signed-off-by: Bernd Kuhls <bernd.kuhls@t-online.de>
  6. Downloaded from upstream commit:
  7. https://github.com/Itseez/opencv/commit/a61b19b524cd2b66a7c43e67edd7cc780bf46cbb
  8. ---
  9. modules/highgui/src/cap_ffmpeg_impl.hpp | 364 +++++++++++++++++++++++++++-----
  10. 1 file changed, 314 insertions(+), 50 deletions(-)
  11. diff --git a/modules/highgui/src/cap_ffmpeg_impl.hpp b/modules/highgui/src/cap_ffmpeg_impl.hpp
  12. index 1b79870..6df542a 100644
  13. --- a/modules/highgui/src/cap_ffmpeg_impl.hpp
  14. +++ b/modules/highgui/src/cap_ffmpeg_impl.hpp
  15. @@ -118,11 +118,6 @@ extern "C" {
  16. #define CV_WARN(message) fprintf(stderr, "warning: %s (%s:%d)\n", message, __FILE__, __LINE__)
  17. #endif
  18. -/* PIX_FMT_RGBA32 macro changed in newer ffmpeg versions */
  19. -#ifndef PIX_FMT_RGBA32
  20. -#define PIX_FMT_RGBA32 PIX_FMT_RGB32
  21. -#endif
  22. -
  23. #define CALC_FFMPEG_VERSION(a,b,c) ( a<<16 | b<<8 | c )
  24. #if defined WIN32 || defined _WIN32
  25. @@ -132,6 +127,11 @@ extern "C" {
  26. #include <stdio.h>
  27. #include <sys/types.h>
  28. #include <sys/sysctl.h>
  29. + #include <sys/time.h>
  30. +#if defined __APPLE__
  31. + #include <mach/clock.h>
  32. + #include <mach/mach.h>
  33. +#endif
  34. #endif
  35. #ifndef MIN
  36. @@ -156,6 +156,155 @@ extern "C" {
  37. # define CV_CODEC(name) name
  38. #endif
  39. +#if LIBAVUTIL_BUILD < (LIBAVUTIL_VERSION_MICRO >= 100 \
  40. + ? CALC_FFMPEG_VERSION(51, 74, 100) : CALC_FFMPEG_VERSION(51, 42, 0))
  41. +#define AVPixelFormat PixelFormat
  42. +#define AV_PIX_FMT_BGR24 PIX_FMT_BGR24
  43. +#define AV_PIX_FMT_RGB24 PIX_FMT_RGB24
  44. +#define AV_PIX_FMT_GRAY8 PIX_FMT_GRAY8
  45. +#define AV_PIX_FMT_YUV422P PIX_FMT_YUV422P
  46. +#define AV_PIX_FMT_YUV420P PIX_FMT_YUV420P
  47. +#define AV_PIX_FMT_YUV444P PIX_FMT_YUV444P
  48. +#define AV_PIX_FMT_YUVJ420P PIX_FMT_YUVJ420P
  49. +#define AV_PIX_FMT_GRAY16LE PIX_FMT_GRAY16LE
  50. +#define AV_PIX_FMT_GRAY16BE PIX_FMT_GRAY16BE
  51. +#endif
  52. +
  53. +#if LIBAVUTIL_BUILD >= (LIBAVUTIL_VERSION_MICRO >= 100 \
  54. + ? CALC_FFMPEG_VERSION(52, 38, 100) : CALC_FFMPEG_VERSION(52, 13, 0))
  55. +#define USE_AV_FRAME_GET_BUFFER 1
  56. +#else
  57. +#define USE_AV_FRAME_GET_BUFFER 0
  58. +#ifndef AV_NUM_DATA_POINTERS // required for 0.7.x/0.8.x ffmpeg releases
  59. +#define AV_NUM_DATA_POINTERS 4
  60. +#endif
  61. +#endif
  62. +
  63. +
  64. +#ifndef USE_AV_INTERRUPT_CALLBACK
  65. +#if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(53, 21, 0)
  66. +#define USE_AV_INTERRUPT_CALLBACK 1
  67. +#else
  68. +#define USE_AV_INTERRUPT_CALLBACK 0
  69. +#endif
  70. +#endif
  71. +
  72. +#if USE_AV_INTERRUPT_CALLBACK
  73. +#define LIBAVFORMAT_INTERRUPT_TIMEOUT_MS 30000
  74. +
  75. +#ifdef WIN32
  76. +// http://stackoverflow.com/questions/5404277/porting-clock-gettime-to-windows
  77. +
  78. +static
  79. +inline LARGE_INTEGER get_filetime_offset()
  80. +{
  81. + SYSTEMTIME s;
  82. + FILETIME f;
  83. + LARGE_INTEGER t;
  84. +
  85. + s.wYear = 1970;
  86. + s.wMonth = 1;
  87. + s.wDay = 1;
  88. + s.wHour = 0;
  89. + s.wMinute = 0;
  90. + s.wSecond = 0;
  91. + s.wMilliseconds = 0;
  92. + SystemTimeToFileTime(&s, &f);
  93. + t.QuadPart = f.dwHighDateTime;
  94. + t.QuadPart <<= 32;
  95. + t.QuadPart |= f.dwLowDateTime;
  96. + return t;
  97. +}
  98. +
  99. +static
  100. +inline void get_monotonic_time(timespec *tv)
  101. +{
  102. + LARGE_INTEGER t;
  103. + FILETIME f;
  104. + double microseconds;
  105. + static LARGE_INTEGER offset;
  106. + static double frequencyToMicroseconds;
  107. + static int initialized = 0;
  108. + static BOOL usePerformanceCounter = 0;
  109. +
  110. + if (!initialized)
  111. + {
  112. + LARGE_INTEGER performanceFrequency;
  113. + initialized = 1;
  114. + usePerformanceCounter = QueryPerformanceFrequency(&performanceFrequency);
  115. + if (usePerformanceCounter)
  116. + {
  117. + QueryPerformanceCounter(&offset);
  118. + frequencyToMicroseconds = (double)performanceFrequency.QuadPart / 1000000.;
  119. + }
  120. + else
  121. + {
  122. + offset = get_filetime_offset();
  123. + frequencyToMicroseconds = 10.;
  124. + }
  125. + }
  126. +
  127. + if (usePerformanceCounter)
  128. + {
  129. + QueryPerformanceCounter(&t);
  130. + } else {
  131. + GetSystemTimeAsFileTime(&f);
  132. + t.QuadPart = f.dwHighDateTime;
  133. + t.QuadPart <<= 32;
  134. + t.QuadPart |= f.dwLowDateTime;
  135. + }
  136. +
  137. + t.QuadPart -= offset.QuadPart;
  138. + microseconds = (double)t.QuadPart / frequencyToMicroseconds;
  139. + t.QuadPart = microseconds;
  140. + tv->tv_sec = t.QuadPart / 1000000;
  141. + tv->tv_nsec = (t.QuadPart % 1000000) * 1000;
  142. +}
  143. +#else
  144. +static
  145. +inline void get_monotonic_time(timespec *time)
  146. +{
  147. +#if defined(__APPLE__) && defined(__MACH__)
  148. + clock_serv_t cclock;
  149. + mach_timespec_t mts;
  150. + host_get_clock_service(mach_host_self(), CALENDAR_CLOCK, &cclock);
  151. + clock_get_time(cclock, &mts);
  152. + mach_port_deallocate(mach_task_self(), cclock);
  153. + time->tv_sec = mts.tv_sec;
  154. + time->tv_nsec = mts.tv_nsec;
  155. +#else
  156. + clock_gettime(CLOCK_MONOTONIC, time);
  157. +#endif
  158. +}
  159. +#endif
  160. +
  161. +static
  162. +inline timespec get_monotonic_time_diff(timespec start, timespec end)
  163. +{
  164. + timespec temp;
  165. + if (end.tv_nsec - start.tv_nsec < 0)
  166. + {
  167. + temp.tv_sec = end.tv_sec - start.tv_sec - 1;
  168. + temp.tv_nsec = 1000000000 + end.tv_nsec - start.tv_nsec;
  169. + }
  170. + else
  171. + {
  172. + temp.tv_sec = end.tv_sec - start.tv_sec;
  173. + temp.tv_nsec = end.tv_nsec - start.tv_nsec;
  174. + }
  175. + return temp;
  176. +}
  177. +
  178. +static
  179. +inline double get_monotonic_time_diff_ms(timespec time1, timespec time2)
  180. +{
  181. + timespec delta = get_monotonic_time_diff(time1, time2);
  182. + double milliseconds = delta.tv_sec * 1000 + (double)delta.tv_nsec / 1000000.0;
  183. +
  184. + return milliseconds;
  185. +}
  186. +#endif // USE_AV_INTERRUPT_CALLBACK
  187. +
  188. static int get_number_of_cpus(void)
  189. {
  190. #if LIBAVFORMAT_BUILD < CALC_FFMPEG_VERSION(52, 111, 0)
  191. @@ -205,12 +354,36 @@ struct Image_FFMPEG
  192. };
  193. +#if USE_AV_INTERRUPT_CALLBACK
  194. +struct AVInterruptCallbackMetadata
  195. +{
  196. + timespec value;
  197. + unsigned int timeout_after_ms;
  198. + int timeout;
  199. +};
  200. +
  201. +static
  202. inline void _opencv_ffmpeg_free(void** ptr)
  203. {
  204. if(*ptr) free(*ptr);
  205. *ptr = 0;
  206. }
  207. +static
  208. +inline int _opencv_ffmpeg_interrupt_callback(void *ptr)
  209. +{
  210. + AVInterruptCallbackMetadata* metadata = (AVInterruptCallbackMetadata*)ptr;
  211. + assert(metadata);
  212. +
  213. + timespec now;
  214. + get_monotonic_time(&now);
  215. +
  216. + metadata->timeout = get_monotonic_time_diff_ms(metadata->value, now) > metadata->timeout_after_ms;
  217. +
  218. + return metadata->timeout ? -1 : 0;
  219. +}
  220. +#endif
  221. +
  222. struct CvCapture_FFMPEG
  223. {
  224. @@ -264,6 +437,10 @@ struct CvCapture_FFMPEG
  225. #if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(52, 111, 0)
  226. AVDictionary *dict;
  227. #endif
  228. +
  229. +#if USE_AV_INTERRUPT_CALLBACK
  230. + AVInterruptCallbackMetadata interrupt_metadata;
  231. +#endif
  232. };
  233. void CvCapture_FFMPEG::init()
  234. @@ -301,8 +478,10 @@ void CvCapture_FFMPEG::close()
  235. if( picture )
  236. {
  237. - // FFmpeg and Libav added avcodec_free_frame in different versions.
  238. #if LIBAVCODEC_BUILD >= (LIBAVCODEC_VERSION_MICRO >= 100 \
  239. + ? CALC_FFMPEG_VERSION(55, 45, 101) : CALC_FFMPEG_VERSION(55, 28, 1))
  240. + av_frame_free(&picture);
  241. +#elif LIBAVCODEC_BUILD >= (LIBAVCODEC_VERSION_MICRO >= 100 \
  242. ? CALC_FFMPEG_VERSION(54, 59, 100) : CALC_FFMPEG_VERSION(54, 28, 0))
  243. avcodec_free_frame(&picture);
  244. #else
  245. @@ -333,11 +512,15 @@ void CvCapture_FFMPEG::close()
  246. ic = NULL;
  247. }
  248. +#if USE_AV_FRAME_GET_BUFFER
  249. + av_frame_unref(&rgb_picture);
  250. +#else
  251. if( rgb_picture.data[0] )
  252. {
  253. free( rgb_picture.data[0] );
  254. rgb_picture.data[0] = 0;
  255. }
  256. +#endif
  257. // free last packet if exist
  258. if (packet.data) {
  259. @@ -556,6 +739,16 @@ bool CvCapture_FFMPEG::open( const char* _filename )
  260. close();
  261. +#if USE_AV_INTERRUPT_CALLBACK
  262. + /* interrupt callback */
  263. + interrupt_metadata.timeout_after_ms = LIBAVFORMAT_INTERRUPT_TIMEOUT_MS;
  264. + get_monotonic_time(&interrupt_metadata.value);
  265. +
  266. + ic = avformat_alloc_context();
  267. + ic->interrupt_callback.callback = _opencv_ffmpeg_interrupt_callback;
  268. + ic->interrupt_callback.opaque = &interrupt_metadata;
  269. +#endif
  270. +
  271. #if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(52, 111, 0)
  272. av_dict_set(&dict, "rtsp_transport", "tcp", 0);
  273. int err = avformat_open_input(&ic, _filename, NULL, &dict);
  274. @@ -619,19 +812,18 @@ bool CvCapture_FFMPEG::open( const char* _filename )
  275. video_stream = i;
  276. video_st = ic->streams[i];
  277. +#if LIBAVCODEC_BUILD >= (LIBAVCODEC_VERSION_MICRO >= 100 \
  278. + ? CALC_FFMPEG_VERSION(55, 45, 101) : CALC_FFMPEG_VERSION(55, 28, 1))
  279. + picture = av_frame_alloc();
  280. +#else
  281. picture = avcodec_alloc_frame();
  282. -
  283. - rgb_picture.data[0] = (uint8_t*)malloc(
  284. - avpicture_get_size( PIX_FMT_BGR24,
  285. - enc->width, enc->height ));
  286. - avpicture_fill( (AVPicture*)&rgb_picture, rgb_picture.data[0],
  287. - PIX_FMT_BGR24, enc->width, enc->height );
  288. +#endif
  289. frame.width = enc->width;
  290. frame.height = enc->height;
  291. frame.cn = 3;
  292. - frame.step = rgb_picture.linesize[0];
  293. - frame.data = rgb_picture.data[0];
  294. + frame.step = 0;
  295. + frame.data = NULL;
  296. break;
  297. }
  298. }
  299. @@ -668,6 +860,16 @@ bool CvCapture_FFMPEG::grabFrame()
  300. // get the next frame
  301. while (!valid)
  302. {
  303. + av_free_packet (&packet);
  304. +
  305. +#if USE_AV_INTERRUPT_CALLBACK
  306. + if (interrupt_metadata.timeout)
  307. + {
  308. + valid = false;
  309. + break;
  310. + }
  311. +#endif
  312. +
  313. int ret = av_read_frame(ic, &packet);
  314. if (ret == AVERROR(EAGAIN)) continue;
  315. @@ -703,6 +905,11 @@ bool CvCapture_FFMPEG::grabFrame()
  316. picture_pts = packet.pts != AV_NOPTS_VALUE_ && packet.pts != 0 ? packet.pts : packet.dts;
  317. frame_number++;
  318. valid = true;
  319. +
  320. +#if USE_AV_INTERRUPT_CALLBACK
  321. + // update interrupt value
  322. + get_monotonic_time(&interrupt_metadata.value);
  323. +#endif
  324. }
  325. else
  326. {
  327. @@ -727,38 +934,59 @@ bool CvCapture_FFMPEG::retrieveFrame(int, unsigned char** data, int* step, int*
  328. if( !video_st || !picture->data[0] )
  329. return false;
  330. - avpicture_fill((AVPicture*)&rgb_picture, rgb_picture.data[0], PIX_FMT_RGB24,
  331. - video_st->codec->width, video_st->codec->height);
  332. -
  333. if( img_convert_ctx == NULL ||
  334. frame.width != video_st->codec->width ||
  335. - frame.height != video_st->codec->height )
  336. + frame.height != video_st->codec->height ||
  337. + frame.data == NULL )
  338. {
  339. - if( img_convert_ctx )
  340. - sws_freeContext(img_convert_ctx);
  341. -
  342. - frame.width = video_st->codec->width;
  343. - frame.height = video_st->codec->height;
  344. + // Some sws_scale optimizations have some assumptions about alignment of data/step/width/height
  345. + // Also we use coded_width/height to workaround problem with legacy ffmpeg versions (like n0.8)
  346. + int buffer_width = video_st->codec->coded_width, buffer_height = video_st->codec->coded_height;
  347. img_convert_ctx = sws_getCachedContext(
  348. - NULL,
  349. - video_st->codec->width, video_st->codec->height,
  350. + img_convert_ctx,
  351. + buffer_width, buffer_height,
  352. video_st->codec->pix_fmt,
  353. - video_st->codec->width, video_st->codec->height,
  354. - PIX_FMT_BGR24,
  355. + buffer_width, buffer_height,
  356. + AV_PIX_FMT_BGR24,
  357. SWS_BICUBIC,
  358. NULL, NULL, NULL
  359. );
  360. if (img_convert_ctx == NULL)
  361. return false;//CV_Error(0, "Cannot initialize the conversion context!");
  362. +
  363. +#if USE_AV_FRAME_GET_BUFFER
  364. + av_frame_unref(&rgb_picture);
  365. + rgb_picture.format = AV_PIX_FMT_BGR24;
  366. + rgb_picture.width = buffer_width;
  367. + rgb_picture.height = buffer_height;
  368. + if (0 != av_frame_get_buffer(&rgb_picture, 32))
  369. + {
  370. + CV_WARN("OutOfMemory");
  371. + return false;
  372. + }
  373. +#else
  374. + int aligns[AV_NUM_DATA_POINTERS];
  375. + avcodec_align_dimensions2(video_st->codec, &buffer_width, &buffer_height, aligns);
  376. + rgb_picture.data[0] = (uint8_t*)realloc(rgb_picture.data[0],
  377. + avpicture_get_size( AV_PIX_FMT_BGR24,
  378. + buffer_width, buffer_height ));
  379. + avpicture_fill( (AVPicture*)&rgb_picture, rgb_picture.data[0],
  380. + AV_PIX_FMT_BGR24, buffer_width, buffer_height );
  381. +#endif
  382. + frame.width = video_st->codec->width;
  383. + frame.height = video_st->codec->height;
  384. + frame.cn = 3;
  385. + frame.data = rgb_picture.data[0];
  386. + frame.step = rgb_picture.linesize[0];
  387. }
  388. sws_scale(
  389. img_convert_ctx,
  390. picture->data,
  391. picture->linesize,
  392. - 0, video_st->codec->height,
  393. + 0, video_st->codec->coded_height,
  394. rgb_picture.data,
  395. rgb_picture.linesize
  396. );
  397. @@ -1099,10 +1327,20 @@ static AVFrame * icv_alloc_picture_FFMPEG(int pix_fmt, int width, int height, bo
  398. uint8_t * picture_buf;
  399. int size;
  400. +#if LIBAVCODEC_BUILD >= (LIBAVCODEC_VERSION_MICRO >= 100 \
  401. + ? CALC_FFMPEG_VERSION(55, 45, 101) : CALC_FFMPEG_VERSION(55, 28, 1))
  402. + picture = av_frame_alloc();
  403. +#else
  404. picture = avcodec_alloc_frame();
  405. +#endif
  406. if (!picture)
  407. return NULL;
  408. - size = avpicture_get_size( (PixelFormat) pix_fmt, width, height);
  409. +
  410. + picture->format = pix_fmt;
  411. + picture->width = width;
  412. + picture->height = height;
  413. +
  414. + size = avpicture_get_size( (AVPixelFormat) pix_fmt, width, height);
  415. if(alloc){
  416. picture_buf = (uint8_t *) malloc(size);
  417. if (!picture_buf)
  418. @@ -1111,7 +1349,7 @@ static AVFrame * icv_alloc_picture_FFMPEG(int pix_fmt, int width, int height, bo
  419. return NULL;
  420. }
  421. avpicture_fill((AVPicture *)picture, picture_buf,
  422. - (PixelFormat) pix_fmt, width, height);
  423. + (AVPixelFormat) pix_fmt, width, height);
  424. }
  425. else {
  426. }
  427. @@ -1211,7 +1449,7 @@ static AVStream *icv_add_video_stream_FFMPEG(AVFormatContext *oc,
  428. #endif
  429. c->gop_size = 12; /* emit one intra frame every twelve frames at most */
  430. - c->pix_fmt = (PixelFormat) pixel_format;
  431. + c->pix_fmt = (AVPixelFormat) pixel_format;
  432. if (c->codec_id == CV_CODEC(CODEC_ID_MPEG2VIDEO)) {
  433. c->max_b_frames = 2;
  434. @@ -1372,12 +1610,12 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
  435. #endif
  436. // check parameters
  437. - if (input_pix_fmt == PIX_FMT_BGR24) {
  438. + if (input_pix_fmt == AV_PIX_FMT_BGR24) {
  439. if (cn != 3) {
  440. return false;
  441. }
  442. }
  443. - else if (input_pix_fmt == PIX_FMT_GRAY8) {
  444. + else if (input_pix_fmt == AV_PIX_FMT_GRAY8) {
  445. if (cn != 1) {
  446. return false;
  447. }
  448. @@ -1390,13 +1628,13 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
  449. assert( input_picture );
  450. // let input_picture point to the raw data buffer of 'image'
  451. avpicture_fill((AVPicture *)input_picture, (uint8_t *) data,
  452. - (PixelFormat)input_pix_fmt, width, height);
  453. + (AVPixelFormat)input_pix_fmt, width, height);
  454. if( !img_convert_ctx )
  455. {
  456. img_convert_ctx = sws_getContext(width,
  457. height,
  458. - (PixelFormat)input_pix_fmt,
  459. + (AVPixelFormat)input_pix_fmt,
  460. c->width,
  461. c->height,
  462. c->pix_fmt,
  463. @@ -1414,7 +1652,7 @@ bool CvVideoWriter_FFMPEG::writeFrame( const unsigned char* data, int step, int
  464. }
  465. else{
  466. avpicture_fill((AVPicture *)picture, (uint8_t *) data,
  467. - (PixelFormat)input_pix_fmt, width, height);
  468. + (AVPixelFormat)input_pix_fmt, width, height);
  469. }
  470. picture->pts = frame_idx;
  471. @@ -1547,10 +1785,10 @@ bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc,
  472. /* determine optimal pixel format */
  473. if (is_color) {
  474. - input_pix_fmt = PIX_FMT_BGR24;
  475. + input_pix_fmt = AV_PIX_FMT_BGR24;
  476. }
  477. else {
  478. - input_pix_fmt = PIX_FMT_GRAY8;
  479. + input_pix_fmt = AV_PIX_FMT_GRAY8;
  480. }
  481. /* Lookup codec_id for given fourcc */
  482. @@ -1587,21 +1825,21 @@ bool CvVideoWriter_FFMPEG::open( const char * filename, int fourcc,
  483. break;
  484. #endif
  485. case CV_CODEC(CODEC_ID_HUFFYUV):
  486. - codec_pix_fmt = PIX_FMT_YUV422P;
  487. + codec_pix_fmt = AV_PIX_FMT_YUV422P;
  488. break;
  489. case CV_CODEC(CODEC_ID_MJPEG):
  490. case CV_CODEC(CODEC_ID_LJPEG):
  491. - codec_pix_fmt = PIX_FMT_YUVJ420P;
  492. + codec_pix_fmt = AV_PIX_FMT_YUVJ420P;
  493. bitrate_scale = 3;
  494. break;
  495. case CV_CODEC(CODEC_ID_RAWVIDEO):
  496. - codec_pix_fmt = input_pix_fmt == PIX_FMT_GRAY8 ||
  497. - input_pix_fmt == PIX_FMT_GRAY16LE ||
  498. - input_pix_fmt == PIX_FMT_GRAY16BE ? input_pix_fmt : PIX_FMT_YUV420P;
  499. + codec_pix_fmt = input_pix_fmt == AV_PIX_FMT_GRAY8 ||
  500. + input_pix_fmt == AV_PIX_FMT_GRAY16LE ||
  501. + input_pix_fmt == AV_PIX_FMT_GRAY16BE ? input_pix_fmt : AV_PIX_FMT_YUV420P;
  502. break;
  503. default:
  504. // good for lossy formats, MPEG, etc.
  505. - codec_pix_fmt = PIX_FMT_YUV420P;
  506. + codec_pix_fmt = AV_PIX_FMT_YUV420P;
  507. break;
  508. }
  509. @@ -1826,7 +2064,7 @@ struct OutputMediaStream_FFMPEG
  510. void write(unsigned char* data, int size, int keyFrame);
  511. // add a video output stream to the container
  512. - static AVStream* addVideoStream(AVFormatContext *oc, CV_CODEC_ID codec_id, int w, int h, int bitrate, double fps, PixelFormat pixel_format);
  513. + static AVStream* addVideoStream(AVFormatContext *oc, CV_CODEC_ID codec_id, int w, int h, int bitrate, double fps, AVPixelFormat pixel_format);
  514. AVOutputFormat* fmt_;
  515. AVFormatContext* oc_;
  516. @@ -1873,7 +2111,7 @@ void OutputMediaStream_FFMPEG::close()
  517. }
  518. }
  519. -AVStream* OutputMediaStream_FFMPEG::addVideoStream(AVFormatContext *oc, CV_CODEC_ID codec_id, int w, int h, int bitrate, double fps, PixelFormat pixel_format)
  520. +AVStream* OutputMediaStream_FFMPEG::addVideoStream(AVFormatContext *oc, CV_CODEC_ID codec_id, int w, int h, int bitrate, double fps, AVPixelFormat pixel_format)
  521. {
  522. #if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(53, 10, 0)
  523. AVStream* st = avformat_new_stream(oc, 0);
  524. @@ -2011,7 +2249,7 @@ bool OutputMediaStream_FFMPEG::open(const char* fileName, int width, int height,
  525. oc_->max_delay = (int)(0.7 * AV_TIME_BASE); // This reduces buffer underrun warnings with MPEG
  526. // set a few optimal pixel formats for lossless codecs of interest..
  527. - PixelFormat codec_pix_fmt = PIX_FMT_YUV420P;
  528. + AVPixelFormat codec_pix_fmt = AV_PIX_FMT_YUV420P;
  529. int bitrate_scale = 64;
  530. // TODO -- safe to ignore output audio stream?
  531. @@ -2150,6 +2388,10 @@ struct InputMediaStream_FFMPEG
  532. AVFormatContext* ctx_;
  533. int video_stream_id_;
  534. AVPacket pkt_;
  535. +
  536. +#if USE_AV_INTERRUPT_CALLBACK
  537. + AVInterruptCallbackMetadata interrupt_metadata;
  538. +#endif
  539. };
  540. bool InputMediaStream_FFMPEG::open(const char* fileName, int* codec, int* chroma_format, int* width, int* height)
  541. @@ -2160,6 +2402,16 @@ bool InputMediaStream_FFMPEG::open(const char* fileName, int* codec, int* chroma
  542. video_stream_id_ = -1;
  543. memset(&pkt_, 0, sizeof(AVPacket));
  544. +#if USE_AV_INTERRUPT_CALLBACK
  545. + /* interrupt callback */
  546. + interrupt_metadata.timeout_after_ms = LIBAVFORMAT_INTERRUPT_TIMEOUT_MS;
  547. + get_monotonic_time(&interrupt_metadata.value);
  548. +
  549. + ctx_ = avformat_alloc_context();
  550. + ctx_->interrupt_callback.callback = _opencv_ffmpeg_interrupt_callback;
  551. + ctx_->interrupt_callback.opaque = &interrupt_metadata;
  552. +#endif
  553. +
  554. #if LIBAVFORMAT_BUILD >= CALC_FFMPEG_VERSION(53, 13, 0)
  555. avformat_network_init();
  556. #endif
  557. @@ -2220,15 +2472,15 @@ bool InputMediaStream_FFMPEG::open(const char* fileName, int* codec, int* chroma
  558. switch (enc->pix_fmt)
  559. {
  560. - case PIX_FMT_YUV420P:
  561. + case AV_PIX_FMT_YUV420P:
  562. *chroma_format = ::VideoChromaFormat_YUV420;
  563. break;
  564. - case PIX_FMT_YUV422P:
  565. + case AV_PIX_FMT_YUV422P:
  566. *chroma_format = ::VideoChromaFormat_YUV422;
  567. break;
  568. - case PIX_FMT_YUV444P:
  569. + case AV_PIX_FMT_YUV444P:
  570. *chroma_format = ::VideoChromaFormat_YUV444;
  571. break;
  572. @@ -2276,11 +2528,23 @@ bool InputMediaStream_FFMPEG::read(unsigned char** data, int* size, int* endOfFi
  573. // get the next frame
  574. for (;;)
  575. {
  576. +#if USE_AV_INTERRUPT_CALLBACK
  577. + if(interrupt_metadata.timeout)
  578. + {
  579. + break;
  580. + }
  581. +#endif
  582. +
  583. int ret = av_read_frame(ctx_, &pkt_);
  584. if (ret == AVERROR(EAGAIN))
  585. continue;
  586. +#if USE_AV_INTERRUPT_CALLBACK
  587. + // update interrupt value
  588. + get_monotonic_time(&interrupt_metadata.value);
  589. +#endif
  590. +
  591. if (ret < 0)
  592. {
  593. if (ret == (int)AVERROR_EOF)