avcodec/get_bits: Make sure the input bitstream with padding can be addressed
authorMichael Niedermayer <michael@niedermayer.cc>
Sat, 24 Mar 2018 00:38:53 +0000 (01:38 +0100)
committerMichael Niedermayer <michael@niedermayer.cc>
Mon, 9 Jul 2018 23:18:52 +0000 (01:18 +0200)
Signed-off-by: Michael Niedermayer <michael@niedermayer.cc>
(cherry picked from commit e529fe7633762cb26a665fb6dee3be29b15285cc)
Signed-off-by: Michael Niedermayer <michael@niedermayer.cc>
libavcodec/get_bits.h

index 72f8b5f..5a71795 100644 (file)
@@ -32,6 +32,7 @@
 #include "libavutil/intreadwrite.h"
 #include "libavutil/log.h"
 #include "libavutil/avassert.h"
+#include "avcodec.h"
 #include "mathops.h"
 
 /*
@@ -417,7 +418,7 @@ static inline int init_get_bits(GetBitContext *s, const uint8_t *buffer,
     int buffer_size;
     int ret = 0;
 
-    if (bit_size >= INT_MAX - 7 || bit_size < 0 || !buffer) {
+    if (bit_size >= INT_MAX - FFMAX(7, AV_INPUT_BUFFER_PADDING_SIZE*8) || bit_size < 0 || !buffer) {
         bit_size    = 0;
         buffer      = NULL;
         ret         = AVERROR_INVALIDDATA;