summaryrefslogtreecommitdiffstats
path: root/libavcodec/bitstream.h
diff options
context:
space:
mode:
authorLuca Barbato <[email protected]>2017-04-29 00:34:12 +0000
committerDiego Biurrun <[email protected]>2017-05-02 18:39:10 +0200
commit0ac1fec1c3dacedabbf3dd4122ef4bf8523e688c (patch)
tree1a9513582759fa7f12de6888d4b4c01b8649631a /libavcodec/bitstream.h
parent73fc82f3434d9b68f0948700c4fd8f5056ae5afe (diff)
bitstream: Move VLC reading functions into the vlc.h header
This makes the bitstream.h header leaner. Signed-off-by: Diego Biurrun <[email protected]>
Diffstat (limited to 'libavcodec/bitstream.h')
-rw-r--r--libavcodec/bitstream.h73
1 files changed, 0 insertions, 73 deletions
diff --git a/libavcodec/bitstream.h b/libavcodec/bitstream.h
index 894a13e348..d6dd2b9411 100644
--- a/libavcodec/bitstream.h
+++ b/libavcodec/bitstream.h
@@ -33,7 +33,6 @@
#include "libavutil/log.h"
#include "mathops.h"
-#include "vlc.h"
typedef struct BitstreamContext {
uint64_t bits; // stores bits read from the buffer
@@ -287,78 +286,6 @@ static inline int bitstream_read_xbits(BitstreamContext *bc, unsigned length)
return ((((uint32_t)(sign ^ cache)) >> (32 - length)) ^ sign) - sign;
}
-/* Return the LUT element for the given bitstream configuration. */
-static inline int set_idx(BitstreamContext *bc, int code, int *n, int *nb_bits,
- VLC_TYPE (*table)[2])
-{
- unsigned idx;
-
- *nb_bits = -*n;
- idx = bitstream_peek(bc, *nb_bits) + code;
- *n = table[idx][1];
-
- return table[idx][0];
-}
-
-/**
- * Parse a VLC code.
- * @param bits is the number of bits which will be read at once, must be
- * identical to nb_bits in init_vlc()
- * @param max_depth is the number of times bits bits must be read to completely
- * read the longest VLC code
- * = (max_vlc_length + bits - 1) / bits
- * If the VLC code is invalid and max_depth = 1, then no bits will be removed.
- * If the VLC code is invalid and max_depth > 1, then the number of bits removed
- * is undefined. */
-static inline int bitstream_read_vlc(BitstreamContext *bc, VLC_TYPE (*table)[2],
- int bits, int max_depth)
-{
- int nb_bits;
- unsigned idx = bitstream_peek(bc, bits);
- int code = table[idx][0];
- int n = table[idx][1];
-
- if (max_depth > 1 && n < 0) {
- skip_remaining(bc, bits);
- code = set_idx(bc, code, &n, &nb_bits, table);
- if (max_depth > 2 && n < 0) {
- skip_remaining(bc, nb_bits);
- code = set_idx(bc, code, &n, &nb_bits, table);
- }
- }
- skip_remaining(bc, n);
-
- return code;
-}
-
-#define BITSTREAM_RL_VLC(level, run, bc, table, bits, max_depth) \
- do { \
- int n, nb_bits; \
- unsigned index = bitstream_peek(bc, bits); \
- level = table[index].level; \
- n = table[index].len; \
- \
- if (max_depth > 1 && n < 0) { \
- bitstream_skip(bc, bits); \
- \
- nb_bits = -n; \
- \
- index = bitstream_peek(bc, nb_bits) + level; \
- level = table[index].level; \
- n = table[index].len; \
- if (max_depth > 2 && n < 0) { \
- bitstream_skip(bc, nb_bits); \
- nb_bits = -n; \
- \
- index = bitstream_peek(bc, nb_bits) + level; \
- level = table[index].level; \
- n = table[index].len; \
- } \
- } \
- run = table[index].run; \
- bitstream_skip(bc, n); \
- } while (0)
-
/* Return decoded truncated unary code for the values 0, 1, 2. */
static inline int bitstream_decode012(BitstreamContext *bc)
{