View Javadoc
1   /*
2    * Copyright 2014 The Netty Project
3    *
4    * The Netty Project licenses this file to you under the Apache License,
5    * version 2.0 (the "License"); you may not use this file except in compliance
6    * with the License. You may obtain a copy of the License at:
7    *
8    *   https://www.apache.org/licenses/LICENSE-2.0
9    *
10   * Unless required by applicable law or agreed to in writing, software
11   * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12   * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13   * License for the specific language governing permissions and limitations
14   * under the License.
15   */
16  package io.netty.handler.codec.compression;
17  
18  import io.netty.buffer.ByteBuf;
19  import io.netty.channel.ChannelHandlerContext;
20  import io.netty.handler.codec.MessageToByteEncoder;
21  
22  import java.util.zip.Adler32;
23  import java.util.zip.Checksum;
24  
25  import static io.netty.handler.codec.compression.FastLz.BLOCK_TYPE_COMPRESSED;
26  import static io.netty.handler.codec.compression.FastLz.BLOCK_TYPE_NON_COMPRESSED;
27  import static io.netty.handler.codec.compression.FastLz.BLOCK_WITHOUT_CHECKSUM;
28  import static io.netty.handler.codec.compression.FastLz.BLOCK_WITH_CHECKSUM;
29  import static io.netty.handler.codec.compression.FastLz.CHECKSUM_OFFSET;
30  import static io.netty.handler.codec.compression.FastLz.LEVEL_1;
31  import static io.netty.handler.codec.compression.FastLz.LEVEL_2;
32  import static io.netty.handler.codec.compression.FastLz.LEVEL_AUTO;
33  import static io.netty.handler.codec.compression.FastLz.MAGIC_NUMBER;
34  import static io.netty.handler.codec.compression.FastLz.MAX_CHUNK_LENGTH;
35  import static io.netty.handler.codec.compression.FastLz.MIN_LENGTH_TO_COMPRESSION;
36  import static io.netty.handler.codec.compression.FastLz.OPTIONS_OFFSET;
37  import static io.netty.handler.codec.compression.FastLz.calculateOutputBufferLength;
38  import static io.netty.handler.codec.compression.FastLz.compress;
39  
40  /**
41   * Compresses a {@link ByteBuf} using the FastLZ algorithm.
42   *
43   * See <a href="https://github.com/netty/netty/issues/2750">FastLZ format</a>.
44   */
45  public class FastLzFrameEncoder extends MessageToByteEncoder<ByteBuf> {
46      /**
47       * Compression level.
48       */
49      private final int level;
50  
51      /**
52       * Underlying checksum calculator in use.
53       */
54      private final ByteBufChecksum checksum;
55  
56      /**
57       * Creates a FastLZ encoder without checksum calculator and with auto detection of compression level.
58       */
59      public FastLzFrameEncoder() {
60          this(LEVEL_AUTO, null);
61      }
62  
63      /**
64       * Creates a FastLZ encoder with specified compression level and without checksum calculator.
65       *
66       * @param level supports only these values:
67       *        0 - Encoder will choose level automatically depending on the length of the input buffer.
68       *        1 - Level 1 is the fastest compression and generally useful for short data.
69       *        2 - Level 2 is slightly slower but it gives better compression ratio.
70       */
71      public FastLzFrameEncoder(int level) {
72          this(level, null);
73      }
74  
75      /**
76       * Creates a FastLZ encoder with auto detection of compression
77       * level and calculation of checksums as specified.
78       *
79       * @param validateChecksums
80       *        If true, the checksum of each block will be calculated and this value
81       *        will be added to the header of block.
82       *        By default {@link FastLzFrameEncoder} uses {@link java.util.zip.Adler32}
83       *        for checksum calculation.
84       */
85      public FastLzFrameEncoder(boolean validateChecksums) {
86          this(LEVEL_AUTO, validateChecksums ? new Adler32() : null);
87      }
88  
89      /**
90       * Creates a FastLZ encoder with specified compression level and checksum calculator.
91       *
92       * @param level supports only these values:
93       *        0 - Encoder will choose level automatically depending on the length of the input buffer.
94       *        1 - Level 1 is the fastest compression and generally useful for short data.
95       *        2 - Level 2 is slightly slower but it gives better compression ratio.
96       * @param checksum
97       *        the {@link Checksum} instance to use to check data for integrity.
98       *        You may set {@code null} if you don't want to validate checksum of each block.
99       */
100     public FastLzFrameEncoder(int level, Checksum checksum) {
101         super(ByteBuf.class);
102         if (level != LEVEL_AUTO && level != LEVEL_1 && level != LEVEL_2) {
103             throw new IllegalArgumentException(String.format(
104                     "level: %d (expected: %d or %d or %d)", level, LEVEL_AUTO, LEVEL_1, LEVEL_2));
105         }
106         this.level = level;
107         this.checksum = checksum == null ? null : ByteBufChecksum.wrapChecksum(checksum);
108     }
109 
110     @Override
111     protected void encode(ChannelHandlerContext ctx, ByteBuf in, ByteBuf out) throws Exception {
112         final ByteBufChecksum checksum = this.checksum;
113 
114         for (;;) {
115             if (!in.isReadable()) {
116                 return;
117             }
118             final int idx = in.readerIndex();
119             final int length = Math.min(in.readableBytes(), MAX_CHUNK_LENGTH);
120 
121             final int outputIdx = out.writerIndex();
122             out.setMedium(outputIdx, MAGIC_NUMBER);
123             int outputOffset = outputIdx + CHECKSUM_OFFSET + (checksum != null ? 4 : 0);
124 
125             final byte blockType;
126             final int chunkLength;
127             if (length < MIN_LENGTH_TO_COMPRESSION) {
128                 blockType = BLOCK_TYPE_NON_COMPRESSED;
129 
130                 out.ensureWritable(outputOffset + 2 + length);
131                 final int outputPtr = outputOffset + 2;
132 
133                 if (checksum != null) {
134                     checksum.reset();
135                     checksum.update(in, idx, length);
136                     out.setInt(outputIdx + CHECKSUM_OFFSET, (int) checksum.getValue());
137                 }
138                 out.setBytes(outputPtr, in, idx, length);
139                 chunkLength = length;
140             } else {
141                 // try to compress
142                 if (checksum != null) {
143                     checksum.reset();
144                     checksum.update(in, idx, length);
145                     out.setInt(outputIdx + CHECKSUM_OFFSET, (int) checksum.getValue());
146                 }
147 
148                 final int maxOutputLength = calculateOutputBufferLength(length);
149                 out.ensureWritable(outputOffset + 4 + maxOutputLength);
150                 final int outputPtr = outputOffset + 4;
151                 final int compressedLength = compress(in, in.readerIndex(), length, out, outputPtr, level);
152 
153                 if (compressedLength < length) {
154                     blockType = BLOCK_TYPE_COMPRESSED;
155                     chunkLength = compressedLength;
156 
157                     out.setShort(outputOffset, chunkLength);
158                     outputOffset += 2;
159                 } else {
160                     blockType = BLOCK_TYPE_NON_COMPRESSED;
161                     out.setBytes(outputOffset + 2, in, idx, length);
162                     chunkLength = length;
163                 }
164             }
165             out.setShort(outputOffset, length);
166 
167             out.setByte(outputIdx + OPTIONS_OFFSET,
168                     blockType | (checksum != null ? BLOCK_WITH_CHECKSUM : BLOCK_WITHOUT_CHECKSUM));
169             out.writerIndex(outputOffset + 2 + chunkLength);
170             in.skipBytes(length);
171         }
172     }
173 }