gifvideo.cpp 50 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400
  1. #include <jni.h>
  2. #include <android/bitmap.h>
  3. #include <cstdint>
  4. #include <limits>
  5. #include <string>
  6. #include <unistd.h>
  7. #include <linux/stat.h>
  8. #include <asm/fcntl.h>
  9. #include <fcntl.h>
  10. #include <libyuv.h>
  11. #include <tgnet/FileLog.h>
  12. #include "tgnet/ConnectionsManager.h"
  13. #include "voip/webrtc/common_video/h264/sps_parser.h"
  14. #include "voip/webrtc/common_video/h264/h264_common.h"
  15. #include "c_utils.h"
  16. extern "C" {
  17. #include <libavformat/avformat.h>
  18. #include <libavformat/isom.h>
  19. #include <libavcodec/bytestream.h>
  20. #include <libavcodec/get_bits.h>
  21. #include <libavcodec/golomb.h>
  22. #include <libavutil/eval.h>
  23. #include <libavutil/intmath.h>
  24. #include <libswscale/swscale.h>
  25. }
  26. #define RGB8888_A(p) ((p & (0xff<<24)) >> 24 )
  27. static const std::string av_make_error_str(int errnum) {
  28. char errbuf[AV_ERROR_MAX_STRING_SIZE];
  29. av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE);
  30. return (std::string) errbuf;
  31. }
  32. #undef av_err2str
  33. #define av_err2str(errnum) av_make_error_str(errnum).c_str()
  34. #define FFMPEG_AVSEEK_SIZE 0x10000
  35. jclass jclass_AnimatedFileDrawableStream;
  36. jmethodID jclass_AnimatedFileDrawableStream_read;
  37. jmethodID jclass_AnimatedFileDrawableStream_cancel;
  38. jmethodID jclass_AnimatedFileDrawableStream_isFinishedLoadingFile;
  39. jmethodID jclass_AnimatedFileDrawableStream_getFinishedFilePath;
  40. typedef struct H2645NAL {
  41. uint8_t *rbsp_buffer;
  42. int size;
  43. const uint8_t *data;
  44. int size_bits;
  45. int raw_size;
  46. const uint8_t *raw_data;
  47. int type;
  48. int temporal_id;
  49. int nuh_layer_id;
  50. int skipped_bytes;
  51. int skipped_bytes_pos_size;
  52. int *skipped_bytes_pos;
  53. int ref_idc;
  54. GetBitContext gb;
  55. } H2645NAL;
  56. typedef struct H2645RBSP {
  57. uint8_t *rbsp_buffer;
  58. AVBufferRef *rbsp_buffer_ref;
  59. int rbsp_buffer_alloc_size;
  60. int rbsp_buffer_size;
  61. } H2645RBSP;
  62. typedef struct H2645Packet {
  63. H2645NAL *nals;
  64. H2645RBSP rbsp;
  65. int nb_nals;
  66. int nals_allocated;
  67. unsigned nal_buffer_size;
  68. } H2645Packet;
  69. void ff_h2645_packet_uninit(H2645Packet *pkt) {
  70. int i;
  71. for (i = 0; i < pkt->nals_allocated; i++) {
  72. av_freep(&pkt->nals[i].skipped_bytes_pos);
  73. }
  74. av_freep(&pkt->nals);
  75. pkt->nals_allocated = pkt->nal_buffer_size = 0;
  76. if (pkt->rbsp.rbsp_buffer_ref) {
  77. av_buffer_unref(&pkt->rbsp.rbsp_buffer_ref);
  78. pkt->rbsp.rbsp_buffer = NULL;
  79. } else
  80. av_freep(&pkt->rbsp.rbsp_buffer);
  81. pkt->rbsp.rbsp_buffer_alloc_size = pkt->rbsp.rbsp_buffer_size = 0;
  82. }
  83. typedef struct VideoInfo {
  84. ~VideoInfo() {
  85. if (video_dec_ctx) {
  86. avcodec_close(video_dec_ctx);
  87. video_dec_ctx = nullptr;
  88. }
  89. if (fmt_ctx) {
  90. avformat_close_input(&fmt_ctx);
  91. fmt_ctx = nullptr;
  92. }
  93. if (frame) {
  94. av_frame_free(&frame);
  95. frame = nullptr;
  96. }
  97. if (src) {
  98. delete [] src;
  99. src = nullptr;
  100. }
  101. if (stream != nullptr) {
  102. JNIEnv *jniEnv = nullptr;
  103. JavaVMAttachArgs jvmArgs;
  104. jvmArgs.version = JNI_VERSION_1_6;
  105. bool attached;
  106. if (JNI_EDETACHED == javaVm->GetEnv((void **) &jniEnv, JNI_VERSION_1_6)) {
  107. javaVm->AttachCurrentThread(&jniEnv, &jvmArgs);
  108. attached = true;
  109. } else {
  110. attached = false;
  111. }
  112. jniEnv->DeleteGlobalRef(stream);
  113. if (attached) {
  114. javaVm->DetachCurrentThread();
  115. }
  116. stream = nullptr;
  117. }
  118. if (ioContext != nullptr) {
  119. if (ioContext->buffer) {
  120. av_freep(&ioContext->buffer);
  121. }
  122. avio_context_free(&ioContext);
  123. ioContext = nullptr;
  124. }
  125. if (sws_ctx != nullptr) {
  126. sws_freeContext(sws_ctx);
  127. sws_ctx = nullptr;
  128. }
  129. if (fd >= 0) {
  130. close(fd);
  131. fd = -1;
  132. }
  133. ff_h2645_packet_uninit(&h2645Packet);
  134. av_packet_unref(&orig_pkt);
  135. video_stream_idx = -1;
  136. video_stream = nullptr;
  137. audio_stream = nullptr;
  138. }
  139. AVFormatContext *fmt_ctx = nullptr;
  140. char *src = nullptr;
  141. int video_stream_idx = -1;
  142. AVStream *video_stream = nullptr;
  143. AVStream *audio_stream = nullptr;
  144. AVCodecContext *video_dec_ctx = nullptr;
  145. AVFrame *frame = nullptr;
  146. bool has_decoded_frames = false;
  147. AVPacket pkt;
  148. AVPacket orig_pkt;
  149. bool stopped = false;
  150. bool seeking = false;
  151. int firstWidth = 0;
  152. int firstHeight = 0;
  153. bool dropFrames = false;
  154. H2645Packet h2645Packet = {nullptr};
  155. int32_t dst_linesize[1];
  156. struct SwsContext *sws_ctx = nullptr;
  157. AVIOContext *ioContext = nullptr;
  158. unsigned char *ioBuffer = nullptr;
  159. jobject stream = nullptr;
  160. int32_t account = 0;
  161. int fd = -1;
  162. int64_t file_size = 0;
  163. int64_t last_seek_p = 0;
  164. };
  165. void custom_log(void *ptr, int level, const char* fmt, va_list vl){
  166. va_list vl2;
  167. char line[1024];
  168. static int print_prefix = 1;
  169. va_copy(vl2, vl);
  170. av_log_format_line(ptr, level, fmt, vl2, line, sizeof(line), &print_prefix);
  171. va_end(vl2);
  172. LOGE(line);
  173. }
  174. static enum AVPixelFormat get_format(AVCodecContext *ctx,
  175. const enum AVPixelFormat *pix_fmts)
  176. {
  177. const enum AVPixelFormat *p;
  178. for (p = pix_fmts; *p != -1; p++) {
  179. LOGE("available format %d", p);
  180. }
  181. return pix_fmts[0];
  182. }
  183. int open_codec_context(int *stream_idx, AVCodecContext **dec_ctx, AVFormatContext *fmt_ctx, enum AVMediaType type) {
  184. int ret, stream_index;
  185. AVStream *st;
  186. AVCodec *dec = NULL;
  187. AVDictionary *opts = NULL;
  188. ret = av_find_best_stream(fmt_ctx, type, -1, -1, NULL, 0);
  189. if (ret < 0) {
  190. LOGE("can't find %s stream in input file", av_get_media_type_string(type));
  191. return ret;
  192. } else {
  193. stream_index = ret;
  194. st = fmt_ctx->streams[stream_index];
  195. dec = avcodec_find_decoder(st->codecpar->codec_id);
  196. if (!dec) {
  197. LOGE("failed to find %s codec", av_get_media_type_string(type));
  198. return AVERROR(EINVAL);
  199. }
  200. *dec_ctx = avcodec_alloc_context3(dec);
  201. if (!*dec_ctx) {
  202. LOGE("Failed to allocate the %s codec context", av_get_media_type_string(type));
  203. return AVERROR(ENOMEM);
  204. }
  205. if ((ret = avcodec_parameters_to_context(*dec_ctx, st->codecpar)) < 0) {
  206. LOGE("Failed to copy %s codec parameters to decoder context", av_get_media_type_string(type));
  207. return ret;
  208. }
  209. av_dict_set(&opts, "refcounted_frames", "1", 0);
  210. if ((ret = avcodec_open2(*dec_ctx, dec, &opts)) < 0) {
  211. LOGE("Failed to open %s codec", av_get_media_type_string(type));
  212. return ret;
  213. }
  214. *stream_idx = stream_index;
  215. }
  216. return 0;
  217. }
  218. #define MAX_MBPAIR_SIZE (256*1024)
  219. int ff_h2645_extract_rbsp(const uint8_t *src, int length, H2645RBSP *rbsp, H2645NAL *nal)
  220. {
  221. int i, si, di;
  222. uint8_t *dst;
  223. nal->skipped_bytes = 0;
  224. #define STARTCODE_TEST \
  225. if (i + 2 < length && src[i + 1] == 0 && src[i + 2] <= 3) { \
  226. if (src[i + 2] != 3 && src[i + 2] != 0) { \
  227. /* startcode, so we must be past the end */ \
  228. length = i; \
  229. } \
  230. break; \
  231. }
  232. for (i = 0; i + 1 < length; i += 2) {
  233. if (src[i])
  234. continue;
  235. if (i > 0 && src[i - 1] == 0)
  236. i--;
  237. STARTCODE_TEST;
  238. }
  239. if (i > length)
  240. i = length;
  241. nal->rbsp_buffer = &rbsp->rbsp_buffer[rbsp->rbsp_buffer_size];
  242. dst = nal->rbsp_buffer;
  243. memcpy(dst, src, i);
  244. si = di = i;
  245. while (si + 2 < length) {
  246. if (src[si + 2] > 3) {
  247. dst[di++] = src[si++];
  248. dst[di++] = src[si++];
  249. } else if (src[si] == 0 && src[si + 1] == 0 && src[si + 2] != 0) {
  250. if (src[si + 2] == 3) {
  251. dst[di++] = 0;
  252. dst[di++] = 0;
  253. si += 3;
  254. if (nal->skipped_bytes_pos) {
  255. nal->skipped_bytes++;
  256. if (nal->skipped_bytes_pos_size < nal->skipped_bytes) {
  257. nal->skipped_bytes_pos_size *= 2;
  258. av_reallocp_array(&nal->skipped_bytes_pos,
  259. nal->skipped_bytes_pos_size,
  260. sizeof(*nal->skipped_bytes_pos));
  261. if (!nal->skipped_bytes_pos) {
  262. nal->skipped_bytes_pos_size = 0;
  263. return AVERROR(ENOMEM);
  264. }
  265. }
  266. if (nal->skipped_bytes_pos)
  267. nal->skipped_bytes_pos[nal->skipped_bytes-1] = di - 1;
  268. }
  269. continue;
  270. } else // next start code
  271. goto nsc;
  272. }
  273. dst[di++] = src[si++];
  274. }
  275. while (si < length)
  276. dst[di++] = src[si++];
  277. nsc:
  278. memset(dst + di, 0, AV_INPUT_BUFFER_PADDING_SIZE);
  279. nal->data = dst;
  280. nal->size = di;
  281. nal->raw_data = src;
  282. nal->raw_size = si;
  283. rbsp->rbsp_buffer_size += si;
  284. return si;
  285. }
  286. static inline int get_nalsize(int nal_length_size, const uint8_t *buf, int buf_size, int *buf_index) {
  287. int i, nalsize = 0;
  288. if (*buf_index >= buf_size - nal_length_size) {
  289. return AVERROR(EAGAIN);
  290. }
  291. for (i = 0; i < nal_length_size; i++)
  292. nalsize = ((unsigned)nalsize << 8) | buf[(*buf_index)++];
  293. if (nalsize <= 0 || nalsize > buf_size - *buf_index) {
  294. return AVERROR_INVALIDDATA;
  295. }
  296. return nalsize;
  297. }
  298. static int find_next_start_code(const uint8_t *buf, const uint8_t *next_avc) {
  299. int i = 0;
  300. if (buf + 3 >= next_avc)
  301. return next_avc - buf;
  302. while (buf + i + 3 < next_avc) {
  303. if (buf[i] == 0 && buf[i + 1] == 0 && buf[i + 2] == 1)
  304. break;
  305. i++;
  306. }
  307. return i + 3;
  308. }
  309. static int get_bit_length(H2645NAL *nal, int skip_trailing_zeros) {
  310. int size = nal->size;
  311. int v;
  312. while (skip_trailing_zeros && size > 0 && nal->data[size - 1] == 0)
  313. size--;
  314. if (!size)
  315. return 0;
  316. v = nal->data[size - 1];
  317. if (size > INT_MAX / 8)
  318. return AVERROR(ERANGE);
  319. size *= 8;
  320. /* remove the stop bit and following trailing zeros,
  321. * or nothing for damaged bitstreams */
  322. if (v)
  323. size -= ff_ctz(v) + 1;
  324. return size;
  325. }
  326. static void alloc_rbsp_buffer(H2645RBSP *rbsp, unsigned int size) {
  327. int min_size = size;
  328. if (size > INT_MAX - AV_INPUT_BUFFER_PADDING_SIZE)
  329. goto fail;
  330. size += AV_INPUT_BUFFER_PADDING_SIZE;
  331. if (rbsp->rbsp_buffer_alloc_size >= size &&
  332. (!rbsp->rbsp_buffer_ref || av_buffer_is_writable(rbsp->rbsp_buffer_ref))) {
  333. memset(rbsp->rbsp_buffer + min_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
  334. return;
  335. }
  336. size = FFMIN(size + size / 16 + 32, INT_MAX);
  337. if (rbsp->rbsp_buffer_ref)
  338. av_buffer_unref(&rbsp->rbsp_buffer_ref);
  339. else
  340. av_free(rbsp->rbsp_buffer);
  341. rbsp->rbsp_buffer = (uint8_t *) av_mallocz(size);
  342. if (!rbsp->rbsp_buffer)
  343. goto fail;
  344. rbsp->rbsp_buffer_alloc_size = size;
  345. return;
  346. fail:
  347. rbsp->rbsp_buffer_alloc_size = 0;
  348. if (rbsp->rbsp_buffer_ref) {
  349. av_buffer_unref(&rbsp->rbsp_buffer_ref);
  350. rbsp->rbsp_buffer = NULL;
  351. } else
  352. av_freep(&rbsp->rbsp_buffer);
  353. return;
  354. }
  355. static int h264_parse_nal_header(H2645NAL *nal) {
  356. GetBitContext *gb = &nal->gb;
  357. if (get_bits1(gb) != 0)
  358. return AVERROR_INVALIDDATA;
  359. nal->ref_idc = get_bits(gb, 2);
  360. nal->type = get_bits(gb, 5);
  361. return 1;
  362. }
  363. int ff_h2645_packet_split(H2645Packet *pkt, const uint8_t *buf, int length, int is_nalff, int nal_length_size) {
  364. GetByteContext bc;
  365. int consumed, ret = 0;
  366. int next_avc = is_nalff ? 0 : length;
  367. int64_t padding = MAX_MBPAIR_SIZE;
  368. bytestream2_init(&bc, buf, length);
  369. alloc_rbsp_buffer(&pkt->rbsp, length + padding);
  370. if (!pkt->rbsp.rbsp_buffer)
  371. return AVERROR(ENOMEM);
  372. pkt->rbsp.rbsp_buffer_size = 0;
  373. pkt->nb_nals = 0;
  374. while (bytestream2_get_bytes_left(&bc) >= 4) {
  375. H2645NAL *nal;
  376. int extract_length = 0;
  377. int skip_trailing_zeros = 1;
  378. if (bytestream2_tell(&bc) == next_avc) {
  379. int i = 0;
  380. extract_length = get_nalsize(nal_length_size, bc.buffer, bytestream2_get_bytes_left(&bc), &i);
  381. if (extract_length < 0)
  382. return extract_length;
  383. bytestream2_skip(&bc, nal_length_size);
  384. next_avc = bytestream2_tell(&bc) + extract_length;
  385. } else {
  386. int buf_index;
  387. buf_index = find_next_start_code(bc.buffer, buf + next_avc);
  388. bytestream2_skip(&bc, buf_index);
  389. if (!bytestream2_get_bytes_left(&bc)) {
  390. if (pkt->nb_nals > 0) {
  391. return 0;
  392. } else {
  393. return AVERROR_INVALIDDATA;
  394. }
  395. }
  396. extract_length = FFMIN(bytestream2_get_bytes_left(&bc), next_avc - bytestream2_tell(&bc));
  397. if (bytestream2_tell(&bc) >= next_avc) {
  398. bytestream2_skip(&bc, next_avc - bytestream2_tell(&bc));
  399. continue;
  400. }
  401. }
  402. if (pkt->nals_allocated < pkt->nb_nals + 1) {
  403. int new_size = pkt->nals_allocated + 1;
  404. void *tmp;
  405. if (new_size >= INT_MAX / sizeof(*pkt->nals))
  406. return AVERROR(ENOMEM);
  407. tmp = av_fast_realloc(pkt->nals, &pkt->nal_buffer_size, new_size * sizeof(*pkt->nals));
  408. if (!tmp)
  409. return AVERROR(ENOMEM);
  410. pkt->nals = (H2645NAL *) tmp;
  411. memset(pkt->nals + pkt->nals_allocated, 0, sizeof(*pkt->nals));
  412. nal = &pkt->nals[pkt->nb_nals];
  413. nal->skipped_bytes_pos_size = 1024;
  414. nal->skipped_bytes_pos = (int *) av_malloc_array(nal->skipped_bytes_pos_size, sizeof(*nal->skipped_bytes_pos));
  415. if (!nal->skipped_bytes_pos)
  416. return AVERROR(ENOMEM);
  417. pkt->nals_allocated = new_size;
  418. }
  419. nal = &pkt->nals[pkt->nb_nals];
  420. consumed = ff_h2645_extract_rbsp(bc.buffer, extract_length, &pkt->rbsp, nal);
  421. if (consumed < 0)
  422. return consumed;
  423. pkt->nb_nals++;
  424. bytestream2_skip(&bc, consumed);
  425. /* see commit 3566042a0 */
  426. if (bytestream2_get_bytes_left(&bc) >= 4 &&
  427. bytestream2_peek_be32(&bc) == 0x000001E0)
  428. skip_trailing_zeros = 0;
  429. nal->size_bits = get_bit_length(nal, skip_trailing_zeros);
  430. ret = init_get_bits(&nal->gb, nal->data, nal->size_bits);
  431. if (ret < 0)
  432. return ret;
  433. ret = h264_parse_nal_header(nal);
  434. if (ret <= 0 || nal->size <= 0 || nal->size_bits <= 0) {
  435. pkt->nb_nals--;
  436. }
  437. }
  438. return 0;
  439. }
  440. #define MAX_SPS_COUNT 32
  441. const uint8_t ff_zigzag_direct[64] = {
  442. 0, 1, 8, 16, 9, 2, 3, 10,
  443. 17, 24, 32, 25, 18, 11, 4, 5,
  444. 12, 19, 26, 33, 40, 48, 41, 34,
  445. 27, 20, 13, 6, 7, 14, 21, 28,
  446. 35, 42, 49, 56, 57, 50, 43, 36,
  447. 29, 22, 15, 23, 30, 37, 44, 51,
  448. 58, 59, 52, 45, 38, 31, 39, 46,
  449. 53, 60, 61, 54, 47, 55, 62, 63
  450. };
  451. const uint8_t ff_zigzag_scan[16+1] = {
  452. 0 + 0 * 4, 1 + 0 * 4, 0 + 1 * 4, 0 + 2 * 4,
  453. 1 + 1 * 4, 2 + 0 * 4, 3 + 0 * 4, 2 + 1 * 4,
  454. 1 + 2 * 4, 0 + 3 * 4, 1 + 3 * 4, 2 + 2 * 4,
  455. 3 + 1 * 4, 3 + 2 * 4, 2 + 3 * 4, 3 + 3 * 4,
  456. };
  457. static int decode_scaling_list(GetBitContext *gb, uint8_t *factors, int size) {
  458. int i, last = 8, next = 8;
  459. const uint8_t *scan = size == 16 ? ff_zigzag_scan : ff_zigzag_direct;
  460. if (!get_bits1(gb)) {
  461. } else {
  462. for (i = 0; i < size; i++) {
  463. if (next) {
  464. int v = get_se_golomb(gb);
  465. if (v < -128 || v > 127) {
  466. return AVERROR_INVALIDDATA;
  467. }
  468. next = (last + v) & 0xff;
  469. }
  470. if (!i && !next) { /* matrix not written, we use the preset one */
  471. break;
  472. }
  473. last = factors[scan[i]] = next ? next : last;
  474. }
  475. }
  476. return 0;
  477. }
  478. static int decode_scaling_matrices(GetBitContext *gb, int chroma_format_idc, uint8_t(*scaling_matrix4)[16], uint8_t(*scaling_matrix8)[64]) {
  479. int ret = 0;
  480. if (get_bits1(gb)) {
  481. ret |= decode_scaling_list(gb, scaling_matrix4[0], 16); // Intra, Y
  482. ret |= decode_scaling_list(gb, scaling_matrix4[1], 16); // Intra, Cr
  483. ret |= decode_scaling_list(gb, scaling_matrix4[2], 16); // Intra, Cb
  484. ret |= decode_scaling_list(gb, scaling_matrix4[3], 16); // Inter, Y
  485. ret |= decode_scaling_list(gb, scaling_matrix4[4], 16); // Inter, Cr
  486. ret |= decode_scaling_list(gb, scaling_matrix4[5], 16); // Inter, Cb
  487. ret |= decode_scaling_list(gb, scaling_matrix8[0], 64); // Intra, Y
  488. ret |= decode_scaling_list(gb, scaling_matrix8[3], 64); // Inter, Y
  489. if (chroma_format_idc == 3) {
  490. ret |= decode_scaling_list(gb, scaling_matrix8[1], 64); // Intra, Cr
  491. ret |= decode_scaling_list(gb, scaling_matrix8[4], 64); // Inter, Cr
  492. ret |= decode_scaling_list(gb, scaling_matrix8[2], 64); // Intra, Cb
  493. ret |= decode_scaling_list(gb, scaling_matrix8[5], 64); // Inter, Cb
  494. }
  495. if (!ret)
  496. ret = 1;
  497. }
  498. return ret;
  499. }
  500. int ff_h264_decode_seq_parameter_set(GetBitContext *gb, int &width, int &height) {
  501. int profile_idc, level_idc, constraint_set_flags = 0;
  502. unsigned int sps_id;
  503. int i, log2_max_frame_num_minus4;
  504. int ret;
  505. profile_idc = get_bits(gb, 8);
  506. constraint_set_flags |= get_bits1(gb) << 0;
  507. constraint_set_flags |= get_bits1(gb) << 1;
  508. constraint_set_flags |= get_bits1(gb) << 2;
  509. constraint_set_flags |= get_bits1(gb) << 3;
  510. constraint_set_flags |= get_bits1(gb) << 4;
  511. constraint_set_flags |= get_bits1(gb) << 5;
  512. skip_bits(gb, 2);
  513. level_idc = get_bits(gb, 8);
  514. sps_id = get_ue_golomb_31(gb);
  515. if (sps_id >= MAX_SPS_COUNT) {
  516. return false;
  517. }
  518. if (profile_idc == 100 || // High profile
  519. profile_idc == 110 || // High10 profile
  520. profile_idc == 122 || // High422 profile
  521. profile_idc == 244 || // High444 Predictive profile
  522. profile_idc == 44 || // Cavlc444 profile
  523. profile_idc == 83 || // Scalable Constrained High profile (SVC)
  524. profile_idc == 86 || // Scalable High Intra profile (SVC)
  525. profile_idc == 118 || // Stereo High profile (MVC)
  526. profile_idc == 128 || // Multiview High profile (MVC)
  527. profile_idc == 138 || // Multiview Depth High profile (MVCD)
  528. profile_idc == 144) { // old High444 profile
  529. int chroma_format_idc = get_ue_golomb_31(gb);
  530. if (chroma_format_idc > 3U) {
  531. return false;
  532. } else if (chroma_format_idc == 3) {
  533. int residual_color_transform_flag = get_bits1(gb);
  534. if (residual_color_transform_flag) {
  535. return false;
  536. }
  537. }
  538. int bit_depth_luma = get_ue_golomb(gb) + 8;
  539. int bit_depth_chroma = get_ue_golomb(gb) + 8;
  540. if (bit_depth_chroma != bit_depth_luma) {
  541. return false;
  542. }
  543. if (bit_depth_luma < 8 || bit_depth_luma > 14 || bit_depth_chroma < 8 || bit_depth_chroma > 14) {
  544. return false;
  545. }
  546. get_bits1(gb);
  547. uint8_t scaling_matrix4[6][16];
  548. uint8_t scaling_matrix8[6][64];
  549. ret = decode_scaling_matrices(gb, chroma_format_idc, scaling_matrix4, scaling_matrix8);
  550. if (ret < 0)
  551. return false;
  552. }
  553. get_ue_golomb(gb);
  554. int poc_type = get_ue_golomb_31(gb);
  555. if (poc_type == 0) {
  556. unsigned t = get_ue_golomb(gb);
  557. if (t > 12) {
  558. return false;
  559. }
  560. } else if (poc_type == 1) {
  561. get_bits1(gb);
  562. int offset_for_non_ref_pic = get_se_golomb_long(gb);
  563. int offset_for_top_to_bottom_field = get_se_golomb_long(gb);
  564. if (offset_for_non_ref_pic == INT32_MIN || offset_for_top_to_bottom_field == INT32_MIN) {
  565. return false;
  566. }
  567. int poc_cycle_length = get_ue_golomb(gb);
  568. if ((unsigned) poc_cycle_length >= 256) {
  569. return false;
  570. }
  571. for (i = 0; i < poc_cycle_length; i++) {
  572. int offset_for_ref_frame = get_se_golomb_long(gb);
  573. if (offset_for_ref_frame == INT32_MIN) {
  574. return false;
  575. }
  576. }
  577. } else if (poc_type != 2) {
  578. return false;
  579. }
  580. get_ue_golomb_31(gb);
  581. get_bits1(gb);
  582. int mb_width = get_ue_golomb(gb) + 1;
  583. int mb_height = get_ue_golomb(gb) + 1;
  584. if (width == 0 || height == 0) {
  585. width = mb_width;
  586. height = mb_height;
  587. }
  588. return mb_width != width || mb_height != height;
  589. }
  590. int decode_packet(VideoInfo *info, int *got_frame) {
  591. int ret = 0;
  592. int decoded = info->pkt.size;
  593. *got_frame = 0;
  594. if (info->pkt.stream_index == info->video_stream_idx) {
  595. if (info->video_stream->codecpar->codec_id == AV_CODEC_ID_H264 && decoded > 0) {
  596. ff_h2645_packet_split(&info->h2645Packet, info->pkt.data, info->pkt.size, 1, 4);
  597. for (int i = 0; i < info->h2645Packet.nb_nals; i++) {
  598. H2645NAL *nal = &info->h2645Packet.nals[i];
  599. switch (nal->type) {
  600. case 7: {
  601. GetBitContext tmp_gb = nal->gb;
  602. info->dropFrames = ff_h264_decode_seq_parameter_set(&tmp_gb, info->firstWidth, info->firstHeight);
  603. }
  604. }
  605. }
  606. }
  607. if (!info->dropFrames) {
  608. ret = avcodec_decode_video2(info->video_dec_ctx, info->frame, got_frame, &info->pkt);
  609. if (ret != 0) {
  610. return ret;
  611. }
  612. }
  613. }
  614. return decoded;
  615. }
  616. void requestFd(VideoInfo *info) {
  617. JNIEnv *jniEnv = nullptr;
  618. JavaVMAttachArgs jvmArgs;
  619. jvmArgs.version = JNI_VERSION_1_6;
  620. bool attached;
  621. if (JNI_EDETACHED == javaVm->GetEnv((void **) &jniEnv, JNI_VERSION_1_6)) {
  622. javaVm->AttachCurrentThread(&jniEnv, &jvmArgs);
  623. attached = true;
  624. } else {
  625. attached = false;
  626. }
  627. jniEnv->CallIntMethod(info->stream, jclass_AnimatedFileDrawableStream_read, (jint) 0, (jint) 1);
  628. jboolean loaded = jniEnv->CallBooleanMethod(info->stream, jclass_AnimatedFileDrawableStream_isFinishedLoadingFile);
  629. if (loaded) {
  630. delete[] info->src;
  631. jstring src = (jstring) jniEnv->CallObjectMethod(info->stream, jclass_AnimatedFileDrawableStream_getFinishedFilePath);
  632. char const *srcString = jniEnv->GetStringUTFChars(src, 0);
  633. size_t len = strlen(srcString);
  634. info->src = new char[len + 1];
  635. memcpy(info->src, srcString, len);
  636. info->src[len] = '\0';
  637. if (srcString != 0) {
  638. jniEnv->ReleaseStringUTFChars(src, srcString);
  639. }
  640. }
  641. if (attached) {
  642. javaVm->DetachCurrentThread();
  643. }
  644. info->fd = open(info->src, O_RDONLY, S_IRUSR);
  645. }
  646. int readCallback(void *opaque, uint8_t *buf, int buf_size) {
  647. VideoInfo *info = (VideoInfo *) opaque;
  648. if (!info->stopped) {
  649. if (info->fd < 0) {
  650. requestFd(info);
  651. }
  652. if (info->fd >= 0) {
  653. if (info->last_seek_p + buf_size > info->file_size) {
  654. buf_size = (int) (info->file_size - info->last_seek_p);
  655. }
  656. if (buf_size > 0) {
  657. JNIEnv *jniEnv = nullptr;
  658. JavaVMAttachArgs jvmArgs;
  659. jvmArgs.version = JNI_VERSION_1_6;
  660. bool attached;
  661. if (JNI_EDETACHED == javaVm->GetEnv((void **) &jniEnv, JNI_VERSION_1_6)) {
  662. javaVm->AttachCurrentThread(&jniEnv, &jvmArgs);
  663. attached = true;
  664. } else {
  665. attached = false;
  666. }
  667. buf_size = jniEnv->CallIntMethod(info->stream, jclass_AnimatedFileDrawableStream_read, (jint) info->last_seek_p, (jint) buf_size);
  668. info->last_seek_p += buf_size;
  669. if (attached) {
  670. javaVm->DetachCurrentThread();
  671. }
  672. int ret = (int) read(info->fd, buf, (size_t) buf_size);
  673. return ret ? ret : AVERROR_EOF;
  674. }
  675. }
  676. }
  677. return AVERROR_EOF;
  678. }
  679. int64_t seekCallback(void *opaque, int64_t offset, int whence) {
  680. VideoInfo *info = (VideoInfo *) opaque;
  681. if (!info->stopped) {
  682. if (info->fd < 0) {
  683. requestFd(info);
  684. }
  685. if (info->fd >= 0) {
  686. if (whence & FFMPEG_AVSEEK_SIZE) {
  687. return info->file_size;
  688. } else {
  689. info->last_seek_p = offset;
  690. lseek(info->fd, off_t(offset), SEEK_SET);
  691. return offset;
  692. }
  693. }
  694. }
  695. return 0;
  696. }
  697. enum PARAM_NUM {
  698. PARAM_NUM_SUPPORTED_VIDEO_CODEC = 0,
  699. PARAM_NUM_WIDTH = 1,
  700. PARAM_NUM_HEIGHT = 2,
  701. PARAM_NUM_BITRATE = 3,
  702. PARAM_NUM_DURATION = 4,
  703. PARAM_NUM_AUDIO_FRAME_SIZE = 5,
  704. PARAM_NUM_VIDEO_FRAME_SIZE = 6,
  705. PARAM_NUM_FRAMERATE = 7,
  706. PARAM_NUM_ROTATION = 8,
  707. PARAM_NUM_SUPPORTED_AUDIO_CODEC = 9,
  708. PARAM_NUM_HAS_AUDIO = 10,
  709. PARAM_NUM_COUNT = 11,
  710. };
  711. extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDrawable_getVideoInfo(JNIEnv *env, jclass clazz,jint sdkVersion, jstring src, jintArray data) {
  712. VideoInfo *info = new VideoInfo();
  713. char const *srcString = env->GetStringUTFChars(src, 0);
  714. size_t len = strlen(srcString);
  715. info->src = new char[len + 1];
  716. memcpy(info->src, srcString, len);
  717. info->src[len] = '\0';
  718. if (srcString != nullptr) {
  719. env->ReleaseStringUTFChars(src, srcString);
  720. }
  721. int ret;
  722. if ((ret = avformat_open_input(&info->fmt_ctx, info->src, NULL, NULL)) < 0) {
  723. LOGE("can't open source file %s, %s", info->src, av_err2str(ret));
  724. delete info;
  725. return;
  726. }
  727. if ((ret = avformat_find_stream_info(info->fmt_ctx, NULL)) < 0) {
  728. LOGE("can't find stream information %s, %s", info->src, av_err2str(ret));
  729. delete info;
  730. return;
  731. }
  732. if ((ret = av_find_best_stream(info->fmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0)) >= 0) {
  733. info->video_stream = info->fmt_ctx->streams[ret];
  734. }
  735. if ((ret = av_find_best_stream(info->fmt_ctx, AVMEDIA_TYPE_AUDIO, -1, -1, NULL, 0)) >= 0) {
  736. info->audio_stream = info->fmt_ctx->streams[ret];
  737. }
  738. if (info->video_stream == nullptr) {
  739. LOGE("can't find video stream in the input, aborting %s", info->src);
  740. delete info;
  741. return;
  742. }
  743. jint *dataArr = env->GetIntArrayElements(data, 0);
  744. if (dataArr != nullptr) {
  745. //https://developer.android.com/guide/topics/media/media-formats
  746. dataArr[PARAM_NUM_SUPPORTED_VIDEO_CODEC] =
  747. info->video_stream->codecpar->codec_id == AV_CODEC_ID_H264 ||
  748. info->video_stream->codecpar->codec_id == AV_CODEC_ID_H263 ||
  749. info->video_stream->codecpar->codec_id == AV_CODEC_ID_MPEG4 ||
  750. info->video_stream->codecpar->codec_id == AV_CODEC_ID_VP8 ||
  751. info->video_stream->codecpar->codec_id == AV_CODEC_ID_VP9 ||
  752. (sdkVersion > 21 && info->video_stream->codecpar->codec_id == AV_CODEC_ID_HEVC);
  753. if (strstr(info->fmt_ctx->iformat->name, "mov") != 0 && dataArr[PARAM_NUM_SUPPORTED_VIDEO_CODEC]) {
  754. MOVStreamContext *mov = (MOVStreamContext *) info->video_stream->priv_data;
  755. dataArr[PARAM_NUM_VIDEO_FRAME_SIZE] = (jint) mov->data_size;
  756. if (info->audio_stream != nullptr) {
  757. mov = (MOVStreamContext *) info->audio_stream->priv_data;
  758. dataArr[PARAM_NUM_AUDIO_FRAME_SIZE] = (jint) mov->data_size;
  759. }
  760. }
  761. if (info->audio_stream != nullptr) {
  762. //https://developer.android.com/guide/topics/media/media-formats
  763. dataArr[PARAM_NUM_SUPPORTED_AUDIO_CODEC] =
  764. info->audio_stream->codecpar->codec_id == AV_CODEC_ID_AAC ||
  765. info->audio_stream->codecpar->codec_id == AV_CODEC_ID_AAC_LATM ||
  766. info->audio_stream->codecpar->codec_id == AV_CODEC_ID_VORBIS ||
  767. info->audio_stream->codecpar->codec_id == AV_CODEC_ID_AMR_NB ||
  768. info->audio_stream->codecpar->codec_id == AV_CODEC_ID_AMR_WB ||
  769. info->audio_stream->codecpar->codec_id == AV_CODEC_ID_FLAC ||
  770. info->audio_stream->codecpar->codec_id == AV_CODEC_ID_MP3 ||
  771. // not supported codec, skip audio in this case
  772. info->audio_stream->codecpar->codec_id == AV_CODEC_ID_ADPCM_IMA_WAV ||
  773. (sdkVersion > 21 && info->audio_stream->codecpar->codec_id == AV_CODEC_ID_OPUS);
  774. dataArr[PARAM_NUM_HAS_AUDIO] = 1;
  775. } else {
  776. dataArr[PARAM_NUM_HAS_AUDIO] = 0;
  777. }
  778. dataArr[PARAM_NUM_BITRATE] = (jint) info->video_stream->codecpar->bit_rate;
  779. dataArr[PARAM_NUM_WIDTH] = info->video_stream->codecpar->width;
  780. dataArr[PARAM_NUM_HEIGHT] = info->video_stream->codecpar->height;
  781. AVDictionaryEntry *rotate_tag = av_dict_get(info->video_stream->metadata, "rotate", NULL, 0);
  782. if (rotate_tag && *rotate_tag->value && strcmp(rotate_tag->value, "0") != 0) {
  783. char *tail;
  784. dataArr[PARAM_NUM_ROTATION] = (jint) av_strtod(rotate_tag->value, &tail);
  785. if (*tail) {
  786. dataArr[PARAM_NUM_ROTATION] = 0;
  787. }
  788. } else {
  789. dataArr[PARAM_NUM_ROTATION] = 0;
  790. }
  791. if (info->video_stream->codecpar->codec_id == AV_CODEC_ID_H264 || info->video_stream->codecpar->codec_id == AV_CODEC_ID_HEVC) {
  792. dataArr[PARAM_NUM_FRAMERATE] = (jint) av_q2d(info->video_stream->avg_frame_rate);
  793. } else {
  794. dataArr[PARAM_NUM_FRAMERATE] = (jint) av_q2d(info->video_stream->r_frame_rate);
  795. }
  796. dataArr[PARAM_NUM_DURATION] = (int32_t) (info->fmt_ctx->duration * 1000 / AV_TIME_BASE);
  797. env->ReleaseIntArrayElements(data, dataArr, 0);
  798. delete info;
  799. }
  800. }
  801. extern "C" JNIEXPORT jlong JNICALL Java_org_telegram_ui_Components_AnimatedFileDrawable_createDecoder(JNIEnv *env, jclass clazz, jstring src, jintArray data, jint account, jlong streamFileSize, jobject stream, jboolean preview) {
  802. VideoInfo *info = new VideoInfo();
  803. char const *srcString = env->GetStringUTFChars(src, 0);
  804. size_t len = strlen(srcString);
  805. info->src = new char[len + 1];
  806. memcpy(info->src, srcString, len);
  807. info->src[len] = '\0';
  808. if (srcString != 0) {
  809. env->ReleaseStringUTFChars(src, srcString);
  810. }
  811. int ret;
  812. if (streamFileSize != 0) {
  813. info->file_size = streamFileSize;
  814. info->stream = env->NewGlobalRef(stream);
  815. info->account = account;
  816. info->fd = open(info->src, O_RDONLY, S_IRUSR);
  817. info->ioBuffer = (unsigned char *) av_malloc(64 * 1024);
  818. info->ioContext = avio_alloc_context(info->ioBuffer, 64 * 1024, 0, info, readCallback, nullptr, seekCallback);
  819. if (info->ioContext == nullptr) {
  820. delete info;
  821. return 0;
  822. }
  823. info->fmt_ctx = avformat_alloc_context();
  824. info->fmt_ctx->pb = info->ioContext;
  825. AVDictionary *options = NULL;
  826. av_dict_set(&options, "usetoc", "1", 0);
  827. ret = avformat_open_input(&info->fmt_ctx, "http://localhost/file", NULL, &options);
  828. av_dict_free(&options);
  829. if (ret < 0) {
  830. LOGE("can't open source file %s, %s", info->src, av_err2str(ret));
  831. delete info;
  832. return 0;
  833. }
  834. info->fmt_ctx->flags |= AVFMT_FLAG_FAST_SEEK;
  835. if (preview) {
  836. info->fmt_ctx->flags |= AVFMT_FLAG_NOBUFFER;
  837. }
  838. } else {
  839. if ((ret = avformat_open_input(&info->fmt_ctx, info->src, NULL, NULL)) < 0) {
  840. LOGE("can't open source file %s, %s", info->src, av_err2str(ret));
  841. delete info;
  842. return 0;
  843. }
  844. }
  845. if ((ret = avformat_find_stream_info(info->fmt_ctx, NULL)) < 0) {
  846. LOGE("can't find stream information %s, %s", info->src, av_err2str(ret));
  847. delete info;
  848. return 0;
  849. }
  850. if (open_codec_context(&info->video_stream_idx, &info->video_dec_ctx, info->fmt_ctx, AVMEDIA_TYPE_VIDEO) >= 0) {
  851. info->video_stream = info->fmt_ctx->streams[info->video_stream_idx];
  852. }
  853. if (info->video_stream == nullptr) {
  854. LOGE("can't find video stream in the input, aborting %s", info->src);
  855. delete info;
  856. return 0;
  857. }
  858. info->frame = av_frame_alloc();
  859. if (info->frame == nullptr) {
  860. LOGE("can't allocate frame %s", info->src);
  861. delete info;
  862. return 0;
  863. }
  864. av_init_packet(&info->pkt);
  865. info->pkt.data = NULL;
  866. info->pkt.size = 0;
  867. jint *dataArr = env->GetIntArrayElements(data, 0);
  868. if (dataArr != nullptr) {
  869. dataArr[0] = info->video_dec_ctx->width;
  870. dataArr[1] = info->video_dec_ctx->height;
  871. //float pixelWidthHeightRatio = info->video_dec_ctx->sample_aspect_ratio.num / info->video_dec_ctx->sample_aspect_ratio.den; TODO support
  872. AVDictionaryEntry *rotate_tag = av_dict_get(info->video_stream->metadata, "rotate", NULL, 0);
  873. if (rotate_tag && *rotate_tag->value && strcmp(rotate_tag->value, "0")) {
  874. char *tail;
  875. dataArr[2] = (jint) av_strtod(rotate_tag->value, &tail);
  876. if (*tail) {
  877. dataArr[2] = 0;
  878. }
  879. } else {
  880. dataArr[2] = 0;
  881. }
  882. dataArr[4] = (int32_t) (info->fmt_ctx->duration * 1000 / AV_TIME_BASE);
  883. //(int32_t) (1000 * info->video_stream->duration * av_q2d(info->video_stream->time_base));
  884. env->ReleaseIntArrayElements(data, dataArr, 0);
  885. }
  886. //LOGD("successfully opened file %s", info->src);
  887. return (jlong) (intptr_t) info;
  888. }
  889. extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDrawable_destroyDecoder(JNIEnv *env, jclass clazz, jlong ptr) {
  890. if (ptr == NULL) {
  891. return;
  892. }
  893. VideoInfo *info = (VideoInfo *) (intptr_t) ptr;
  894. if (info->stream != nullptr) {
  895. JNIEnv *jniEnv = nullptr;
  896. JavaVMAttachArgs jvmArgs;
  897. jvmArgs.version = JNI_VERSION_1_6;
  898. bool attached;
  899. if (JNI_EDETACHED == javaVm->GetEnv((void **) &jniEnv, JNI_VERSION_1_6)) {
  900. javaVm->AttachCurrentThread(&jniEnv, &jvmArgs);
  901. attached = true;
  902. } else {
  903. attached = false;
  904. }
  905. jniEnv->CallVoidMethod(info->stream, jclass_AnimatedFileDrawableStream_cancel);
  906. if (attached) {
  907. javaVm->DetachCurrentThread();
  908. }
  909. }
  910. delete info;
  911. }
  912. extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDrawable_stopDecoder(JNIEnv *env, jclass clazz, jlong ptr) {
  913. if (ptr == NULL) {
  914. return;
  915. }
  916. VideoInfo *info = (VideoInfo *) (intptr_t) ptr;
  917. info->stopped = true;
  918. }
  919. extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDrawable_prepareToSeek(JNIEnv *env, jclass clazz, jlong ptr) {
  920. if (ptr == NULL) {
  921. return;
  922. }
  923. VideoInfo *info = (VideoInfo *) (intptr_t) ptr;
  924. info->seeking = true;
  925. }
  926. extern "C" JNIEXPORT void JNICALL Java_org_telegram_ui_Components_AnimatedFileDrawable_seekToMs(JNIEnv *env, jclass clazz, jlong ptr, jlong ms, jboolean precise) {
  927. if (ptr == NULL) {
  928. return;
  929. }
  930. VideoInfo *info = (VideoInfo *) (intptr_t) ptr;
  931. info->seeking = false;
  932. int64_t pts = (int64_t) (ms / av_q2d(info->video_stream->time_base) / 1000);
  933. int ret = 0;
  934. if ((ret = av_seek_frame(info->fmt_ctx, info->video_stream_idx, pts, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME)) < 0) {
  935. LOGE("can't seek file %s, %s", info->src, av_err2str(ret));
  936. return;
  937. } else {
  938. avcodec_flush_buffers(info->video_dec_ctx);
  939. if (!precise) {
  940. return;
  941. }
  942. int got_frame = 0;
  943. int32_t tries = 1000;
  944. while (tries > 0) {
  945. if (info->pkt.size == 0) {
  946. ret = av_read_frame(info->fmt_ctx, &info->pkt);
  947. if (ret >= 0) {
  948. info->orig_pkt = info->pkt;
  949. }
  950. }
  951. if (info->pkt.size > 0) {
  952. ret = decode_packet(info, &got_frame);
  953. if (ret < 0) {
  954. if (info->has_decoded_frames) {
  955. ret = 0;
  956. }
  957. info->pkt.size = 0;
  958. } else {
  959. info->pkt.data += ret;
  960. info->pkt.size -= ret;
  961. }
  962. if (info->pkt.size == 0) {
  963. av_packet_unref(&info->orig_pkt);
  964. }
  965. } else {
  966. info->pkt.data = NULL;
  967. info->pkt.size = 0;
  968. ret = decode_packet(info, &got_frame);
  969. if (ret < 0) {
  970. return;
  971. }
  972. if (got_frame == 0) {
  973. av_seek_frame(info->fmt_ctx, info->video_stream_idx, 0, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
  974. return;
  975. }
  976. }
  977. if (ret < 0) {
  978. return;
  979. }
  980. if (got_frame) {
  981. info->has_decoded_frames = true;
  982. bool finished = false;
  983. if (info->frame->format == AV_PIX_FMT_YUV444P || info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) {
  984. int64_t pkt_pts = info->frame->best_effort_timestamp;
  985. if (pkt_pts >= pts) {
  986. finished = true;
  987. }
  988. }
  989. av_frame_unref(info->frame);
  990. if (finished) {
  991. return;
  992. }
  993. }
  994. tries--;
  995. }
  996. }
  997. }
  998. uint32_t premultiply_channel_value(const uint32_t pixel, const uint8_t offset, const float normalizedAlpha) {
  999. auto multipliedValue = ((pixel >> offset) & 0xFF) * normalizedAlpha;
  1000. return ((uint32_t)std::min(multipliedValue, 255.0f)) << offset;
  1001. }
  1002. static inline void writeFrameToBitmap(JNIEnv *env, VideoInfo *info, jintArray data, jobject bitmap, jint stride) {
  1003. jint *dataArr = env->GetIntArrayElements(data, 0);
  1004. int32_t wantedWidth;
  1005. int32_t wantedHeight;
  1006. AndroidBitmapInfo bitmapInfo;
  1007. AndroidBitmap_getInfo(env, bitmap, &bitmapInfo);
  1008. int32_t bitmapWidth = bitmapInfo.width;
  1009. int32_t bitmapHeight = bitmapInfo.height;
  1010. if (dataArr != nullptr) {
  1011. wantedWidth = dataArr[0];
  1012. wantedHeight = dataArr[1];
  1013. dataArr[3] = (jint) (1000 * info->frame->best_effort_timestamp * av_q2d(info->video_stream->time_base));
  1014. env->ReleaseIntArrayElements(data, dataArr, 0);
  1015. } else {
  1016. wantedWidth = bitmapWidth;
  1017. wantedHeight = bitmapHeight;
  1018. }
  1019. if (wantedWidth == info->frame->width && wantedHeight == info->frame->height || wantedWidth == info->frame->height && wantedHeight == info->frame->width) {
  1020. void *pixels;
  1021. if (AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0) {
  1022. if (info->sws_ctx == nullptr) {
  1023. if (info->frame->format > AV_PIX_FMT_NONE && info->frame->format < AV_PIX_FMT_NB && info->frame->format != AV_PIX_FMT_YUVA420P) {
  1024. info->sws_ctx = sws_getContext(info->frame->width, info->frame->height, (AVPixelFormat) info->frame->format, bitmapWidth, bitmapHeight, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
  1025. } else if (info->video_dec_ctx->pix_fmt > AV_PIX_FMT_NONE && info->video_dec_ctx->pix_fmt < AV_PIX_FMT_NB && info->frame->format != AV_PIX_FMT_YUVA420P) {
  1026. info->sws_ctx = sws_getContext(info->video_dec_ctx->width, info->video_dec_ctx->height, info->video_dec_ctx->pix_fmt, bitmapWidth, bitmapHeight, AV_PIX_FMT_RGBA, SWS_BILINEAR, NULL, NULL, NULL);
  1027. }
  1028. }
  1029. if (info->sws_ctx == nullptr || ((intptr_t) pixels) % 16 != 0) {
  1030. if (info->frame->format == AV_PIX_FMT_YUVA420P) {
  1031. libyuv::I420AlphaToARGBMatrix(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], info->frame->data[3], info->frame->linesize[3], (uint8_t *) pixels, bitmapWidth * 4, &libyuv::kYvuI601Constants, bitmapWidth, bitmapHeight, 1);
  1032. } else if (info->frame->format == AV_PIX_FMT_YUV444P) {
  1033. libyuv::H444ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight);
  1034. } else if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_YUVJ420P) {
  1035. if (info->frame->colorspace == AVColorSpace::AVCOL_SPC_BT709) {
  1036. libyuv::H420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight);
  1037. } else {
  1038. libyuv::I420ToARGB(info->frame->data[0], info->frame->linesize[0], info->frame->data[2], info->frame->linesize[2], info->frame->data[1], info->frame->linesize[1], (uint8_t *) pixels, bitmapWidth * 4, bitmapWidth, bitmapHeight);
  1039. }
  1040. } else if (info->frame->format == AV_PIX_FMT_BGRA) {
  1041. libyuv::ABGRToARGB(info->frame->data[0], info->frame->linesize[0], (uint8_t *) pixels, info->frame->width * 4, info->frame->width, info->frame->height);
  1042. }
  1043. } else {
  1044. uint8_t __attribute__ ((aligned (16))) *dst_data[1];
  1045. dst_data[0] = (uint8_t *) pixels;
  1046. info->dst_linesize[0] = stride;
  1047. sws_scale(info->sws_ctx, info->frame->data, info->frame->linesize, 0, info->frame->height, dst_data, info->dst_linesize);
  1048. }
  1049. }
  1050. AndroidBitmap_unlockPixels(env, bitmap);
  1051. }
  1052. }
  1053. extern "C" JNIEXPORT int JNICALL Java_org_telegram_ui_Components_AnimatedFileDrawable_getFrameAtTime(JNIEnv *env, jclass clazz, jlong ptr, jlong ms, jobject bitmap, jintArray data, jint stride) {
  1054. if (ptr == NULL || bitmap == nullptr || data == nullptr) {
  1055. return 0;
  1056. }
  1057. VideoInfo *info = (VideoInfo *) (intptr_t) ptr;
  1058. info->seeking = false;
  1059. int64_t pts = (int64_t) (ms / av_q2d(info->video_stream->time_base) / 1000);
  1060. int ret = 0;
  1061. if ((ret = av_seek_frame(info->fmt_ctx, info->video_stream_idx, pts, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME)) < 0) {
  1062. LOGE("can't seek file %s, %s", info->src, av_err2str(ret));
  1063. return 0;
  1064. } else {
  1065. avcodec_flush_buffers(info->video_dec_ctx);
  1066. int got_frame = 0;
  1067. int32_t tries = 1000;
  1068. bool readNextPacket = true;
  1069. while (tries > 0) {
  1070. if (info->pkt.size == 0 && readNextPacket) {
  1071. ret = av_read_frame(info->fmt_ctx, &info->pkt);
  1072. if (ret >= 0) {
  1073. info->orig_pkt = info->pkt;
  1074. }
  1075. }
  1076. if (info->pkt.size > 0) {
  1077. ret = decode_packet(info, &got_frame);
  1078. if (ret < 0) {
  1079. if (info->has_decoded_frames) {
  1080. ret = 0;
  1081. }
  1082. info->pkt.size = 0;
  1083. } else {
  1084. info->pkt.data += ret;
  1085. info->pkt.size -= ret;
  1086. }
  1087. if (info->pkt.size == 0) {
  1088. av_packet_unref(&info->orig_pkt);
  1089. }
  1090. } else {
  1091. info->pkt.data = NULL;
  1092. info->pkt.size = 0;
  1093. ret = decode_packet(info, &got_frame);
  1094. if (ret < 0) {
  1095. return 0;
  1096. }
  1097. if (got_frame == 0) {
  1098. av_seek_frame(info->fmt_ctx, info->video_stream_idx, 0, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME);
  1099. return 0;
  1100. }
  1101. }
  1102. if (ret < 0) {
  1103. return 0;
  1104. }
  1105. if (got_frame) {
  1106. bool finished = false;
  1107. if (info->frame->format == AV_PIX_FMT_YUV444P || info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P) {
  1108. int64_t pkt_pts = info->frame->best_effort_timestamp;
  1109. bool isLastPacket = false;
  1110. if (info->pkt.size == 0) {
  1111. readNextPacket = false;
  1112. isLastPacket = av_read_frame(info->fmt_ctx, &info->pkt) < 0;
  1113. }
  1114. if (pkt_pts >= pts || isLastPacket) {
  1115. writeFrameToBitmap(env, info, data, bitmap, stride);
  1116. finished = true;
  1117. }
  1118. }
  1119. av_frame_unref(info->frame);
  1120. if (finished) {
  1121. return 1;
  1122. }
  1123. } else {
  1124. readNextPacket = true;
  1125. }
  1126. tries--;
  1127. }
  1128. return 0;
  1129. }
  1130. }
  1131. extern "C" JNIEXPORT jint JNICALL Java_org_telegram_ui_Components_AnimatedFileDrawable_getVideoFrame(JNIEnv *env, jclass clazz, jlong ptr, jobject bitmap, jintArray data, jint stride, jboolean preview, jfloat start_time, jfloat end_time) {
  1132. if (ptr == NULL || bitmap == nullptr) {
  1133. return 0;
  1134. }
  1135. //int64_t time = ConnectionsManager::getInstance(0).getCurrentTimeMonotonicMillis();
  1136. VideoInfo *info = (VideoInfo *) (intptr_t) ptr;
  1137. int ret = 0;
  1138. int got_frame = 0;
  1139. int32_t triesCount = preview ? 50 : 6;
  1140. //info->has_decoded_frames = false;
  1141. while (!info->stopped && triesCount != 0) {
  1142. if (info->pkt.size == 0) {
  1143. ret = av_read_frame(info->fmt_ctx, &info->pkt);
  1144. if (ret >= 0) {
  1145. double pts = info->pkt.pts * av_q2d(info->video_stream->time_base);
  1146. if (end_time > 0 && info->pkt.stream_index == info->video_stream_idx && pts > end_time) {
  1147. av_packet_unref(&info->pkt);
  1148. info->pkt.data = NULL;
  1149. info->pkt.size = 0;
  1150. } else {
  1151. info->orig_pkt = info->pkt;
  1152. }
  1153. }
  1154. }
  1155. if (info->pkt.size > 0) {
  1156. ret = decode_packet(info, &got_frame);
  1157. if (ret < 0) {
  1158. if (info->has_decoded_frames) {
  1159. ret = 0;
  1160. }
  1161. info->pkt.size = 0;
  1162. } else {
  1163. //LOGD("read size %d from packet", ret);
  1164. info->pkt.data += ret;
  1165. info->pkt.size -= ret;
  1166. }
  1167. if (info->pkt.size == 0) {
  1168. av_packet_unref(&info->orig_pkt);
  1169. }
  1170. } else {
  1171. info->pkt.data = NULL;
  1172. info->pkt.size = 0;
  1173. ret = decode_packet(info, &got_frame);
  1174. if (ret < 0) {
  1175. LOGE("can't decode packet flushed %s", info->src);
  1176. return 0;
  1177. }
  1178. if (!preview && got_frame == 0) {
  1179. if (info->has_decoded_frames) {
  1180. int64_t start_from = 0;
  1181. if (start_time > 0) {
  1182. start_from = (int64_t)(start_time / av_q2d(info->video_stream->time_base));
  1183. }
  1184. if ((ret = av_seek_frame(info->fmt_ctx, info->video_stream_idx, start_from, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_FRAME)) < 0) {
  1185. LOGE("can't seek to begin of file %s, %s", info->src, av_err2str(ret));
  1186. return 0;
  1187. } else {
  1188. avcodec_flush_buffers(info->video_dec_ctx);
  1189. }
  1190. }
  1191. }
  1192. }
  1193. if (ret < 0 || info->seeking) {
  1194. return 0;
  1195. }
  1196. if (got_frame) {
  1197. //LOGD("decoded frame with w = %d, h = %d, format = %d", info->frame->width, info->frame->height, info->frame->format);
  1198. if (info->frame->format == AV_PIX_FMT_YUV420P || info->frame->format == AV_PIX_FMT_BGRA || info->frame->format == AV_PIX_FMT_YUVJ420P || info->frame->format == AV_PIX_FMT_YUV444P || info->frame->format == AV_PIX_FMT_YUVA420P) {
  1199. writeFrameToBitmap(env, info, data, bitmap, stride);
  1200. }
  1201. info->has_decoded_frames = true;
  1202. av_frame_unref(info->frame);
  1203. return 1;
  1204. }
  1205. if (!info->has_decoded_frames) {
  1206. triesCount--;
  1207. }
  1208. }
  1209. return 0;
  1210. }
  1211. extern "C" jint videoOnJNILoad(JavaVM *vm, JNIEnv *env) {
  1212. //av_log_set_callback(custom_log);
  1213. jclass_AnimatedFileDrawableStream = (jclass) env->NewGlobalRef(env->FindClass("org/telegram/messenger/AnimatedFileDrawableStream"));
  1214. if (jclass_AnimatedFileDrawableStream == 0) {
  1215. return JNI_FALSE;
  1216. }
  1217. jclass_AnimatedFileDrawableStream_read = env->GetMethodID(jclass_AnimatedFileDrawableStream, "read", "(II)I");
  1218. if (jclass_AnimatedFileDrawableStream_read == 0) {
  1219. return JNI_FALSE;
  1220. }
  1221. jclass_AnimatedFileDrawableStream_cancel = env->GetMethodID(jclass_AnimatedFileDrawableStream, "cancel", "()V");
  1222. if (jclass_AnimatedFileDrawableStream_cancel == 0) {
  1223. return JNI_FALSE;
  1224. }
  1225. jclass_AnimatedFileDrawableStream_isFinishedLoadingFile = env->GetMethodID(jclass_AnimatedFileDrawableStream, "isFinishedLoadingFile", "()Z");
  1226. if (jclass_AnimatedFileDrawableStream_isFinishedLoadingFile == 0) {
  1227. return JNI_FALSE;
  1228. }
  1229. jclass_AnimatedFileDrawableStream_getFinishedFilePath = env->GetMethodID(jclass_AnimatedFileDrawableStream, "getFinishedFilePath", "()Ljava/lang/String;");
  1230. if (jclass_AnimatedFileDrawableStream_getFinishedFilePath == 0) {
  1231. return JNI_FALSE;
  1232. }
  1233. return JNI_TRUE;
  1234. }