1 | /* Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved. |
2 | |
3 | This program is free software; you can redistribute it and/or modify |
4 | it under the terms of the GNU General Public License as published by |
5 | the Free Software Foundation; version 2 of the License. |
6 | |
7 | This program is distributed in the hope that it will be useful, |
8 | but WITHOUT ANY WARRANTY; without even the implied warranty of |
9 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
10 | GNU General Public License for more details. |
11 | |
12 | You should have received a copy of the GNU General Public License |
13 | along with this program; if not, write to the Free Software |
14 | Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ |
15 | |
16 | /* Functions to compressed records */ |
17 | |
18 | #include "fulltext.h" |
19 | |
20 | #define IS_CHAR ((uint) 32768) /* Bit if char (not offset) in tree */ |
21 | |
22 | /* Some definitions to keep in sync with myisampack.c */ |
23 | #define HEAD_LENGTH 32 /* Length of fixed header */ |
24 | |
25 | #if INT_MAX > 32767 |
26 | #define BITS_SAVED 32 |
27 | #define MAX_QUICK_TABLE_BITS 9 /* Because we may shift in 24 bits */ |
28 | #else |
29 | #define BITS_SAVED 16 |
30 | #define MAX_QUICK_TABLE_BITS 6 |
31 | #endif |
32 | |
33 | #define get_bit(BU) ((BU)->bits ? \ |
34 | (BU)->current_byte & ((mi_bit_type) 1 << --(BU)->bits) :\ |
35 | (fill_buffer(BU), (BU)->bits= BITS_SAVED-1,\ |
36 | (BU)->current_byte & ((mi_bit_type) 1 << (BITS_SAVED-1)))) |
37 | #define skip_to_next_byte(BU) ((BU)->bits&=~7) |
38 | #define get_bits(BU,count) (((BU)->bits >= count) ? (((BU)->current_byte >> ((BU)->bits-=count)) & mask[count]) : fill_and_get_bits(BU,count)) |
39 | |
40 | #define decode_bytes_test_bit(bit) \ |
41 | if (low_byte & (1 << (7-bit))) \ |
42 | pos++; \ |
43 | if (*pos & IS_CHAR) \ |
44 | { bits-=(bit+1); break; } \ |
45 | pos+= *pos |
46 | |
47 | /* Size in uint16 of a Huffman tree for byte compression of 256 byte values. */ |
48 | #define OFFSET_TABLE_SIZE 512 |
49 | |
50 | static uint read_huff_table(MI_BIT_BUFF *bit_buff,MI_DECODE_TREE *decode_tree, |
51 | uint16 **decode_table,uchar **intervall_buff, |
52 | uint16 *tmp_buff); |
53 | static void make_quick_table(uint16 *to_table,uint16 *decode_table, |
54 | uint *next_free,uint value,uint bits, |
55 | uint max_bits); |
56 | static void fill_quick_table(uint16 *table,uint bits, uint max_bits, |
57 | uint value); |
58 | static uint copy_decode_table(uint16 *to_pos,uint offset, |
59 | uint16 *decode_table); |
60 | static uint find_longest_bitstream(uint16 *table, uint16 *end); |
61 | static void (*get_unpack_function(MI_COLUMNDEF *rec))(MI_COLUMNDEF *field, |
62 | MI_BIT_BUFF *buff, |
63 | uchar *to, |
64 | uchar *end); |
65 | static void uf_zerofill_skip_zero(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
66 | uchar *to,uchar *end); |
67 | static void uf_skip_zero(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
68 | uchar *to,uchar *end); |
69 | static void uf_space_normal(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
70 | uchar *to,uchar *end); |
71 | static void uf_space_endspace_selected(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
72 | uchar *to, uchar *end); |
73 | static void uf_endspace_selected(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
74 | uchar *to,uchar *end); |
75 | static void uf_space_endspace(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
76 | uchar *to,uchar *end); |
77 | static void uf_endspace(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
78 | uchar *to,uchar *end); |
79 | static void uf_space_prespace_selected(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
80 | uchar *to, uchar *end); |
81 | static void uf_prespace_selected(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
82 | uchar *to,uchar *end); |
83 | static void uf_space_prespace(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
84 | uchar *to,uchar *end); |
85 | static void uf_prespace(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
86 | uchar *to,uchar *end); |
87 | static void uf_zerofill_normal(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
88 | uchar *to,uchar *end); |
89 | static void uf_constant(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
90 | uchar *to,uchar *end); |
91 | static void uf_intervall(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
92 | uchar *to,uchar *end); |
93 | static void uf_zero(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
94 | uchar *to,uchar *end); |
95 | static void uf_blob(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
96 | uchar *to, uchar *end); |
97 | static void uf_varchar1(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
98 | uchar *to, uchar *end); |
99 | static void uf_varchar2(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
100 | uchar *to, uchar *end); |
101 | static void decode_bytes(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff, |
102 | uchar *to,uchar *end); |
103 | static uint decode_pos(MI_BIT_BUFF *bit_buff,MI_DECODE_TREE *decode_tree); |
104 | static void init_bit_buffer(MI_BIT_BUFF *bit_buff,uchar *buffer,uint length); |
105 | static uint fill_and_get_bits(MI_BIT_BUFF *bit_buff,uint count); |
106 | static void fill_buffer(MI_BIT_BUFF *bit_buff); |
107 | static uint max_bit(uint value); |
108 | static uint read_pack_length(uint version, const uchar *buf, ulong *length); |
109 | #ifdef HAVE_MMAP |
110 | static uchar *_mi_mempack_get_block_info(MI_INFO *myisam, MI_BIT_BUFF *bit_buff, |
111 | MI_BLOCK_INFO *info, uchar **rec_buff_p, |
112 | uchar *); |
113 | #endif |
114 | |
115 | static mi_bit_type mask[]= |
116 | { |
117 | 0x00000000, |
118 | 0x00000001, 0x00000003, 0x00000007, 0x0000000f, |
119 | 0x0000001f, 0x0000003f, 0x0000007f, 0x000000ff, |
120 | 0x000001ff, 0x000003ff, 0x000007ff, 0x00000fff, |
121 | 0x00001fff, 0x00003fff, 0x00007fff, 0x0000ffff, |
122 | #if BITS_SAVED > 16 |
123 | 0x0001ffff, 0x0003ffff, 0x0007ffff, 0x000fffff, |
124 | 0x001fffff, 0x003fffff, 0x007fffff, 0x00ffffff, |
125 | 0x01ffffff, 0x03ffffff, 0x07ffffff, 0x0fffffff, |
126 | 0x1fffffff, 0x3fffffff, 0x7fffffff, 0xffffffff, |
127 | #endif |
128 | }; |
129 | |
130 | |
131 | /* Read all packed info, allocate memory and fix field structs */ |
132 | |
133 | my_bool _mi_read_pack_info(MI_INFO *info, pbool fix_keys) |
134 | { |
135 | File file; |
136 | int diff_length; |
137 | uint i,trees,huff_tree_bits,rec_reflength,length; |
138 | uint16 *decode_table,*tmp_buff; |
139 | ulong elements,intervall_length; |
140 | uchar *disk_cache; |
141 | uchar *intervall_buff; |
142 | uchar [HEAD_LENGTH]; |
143 | MYISAM_SHARE *share=info->s; |
144 | MI_BIT_BUFF bit_buff; |
145 | DBUG_ENTER("_mi_read_pack_info" ); |
146 | |
147 | if (myisam_quick_table_bits < 4) |
148 | myisam_quick_table_bits=4; |
149 | else if (myisam_quick_table_bits > MAX_QUICK_TABLE_BITS) |
150 | myisam_quick_table_bits=MAX_QUICK_TABLE_BITS; |
151 | |
152 | file=info->dfile; |
153 | my_errno=0; |
154 | if (mysql_file_read(file, (uchar*) header, sizeof(header), MYF(MY_NABP))) |
155 | { |
156 | if (!my_errno) |
157 | my_errno=HA_ERR_END_OF_FILE; |
158 | goto err0; |
159 | } |
160 | /* Only the first three bytes of magic number are independent of version. */ |
161 | if (memcmp((uchar*) header, (uchar*) myisam_pack_file_magic, 3)) |
162 | { |
163 | my_errno=HA_ERR_WRONG_IN_RECORD; |
164 | goto err0; |
165 | } |
166 | share->pack.version= header[3]; /* fourth byte of magic number */ |
167 | share->pack.header_length= uint4korr(header+4); |
168 | share->min_pack_length=(uint) uint4korr(header+8); |
169 | share->max_pack_length=(uint) uint4korr(header+12); |
170 | elements=uint4korr(header+16); |
171 | intervall_length=uint4korr(header+20); |
172 | trees=uint2korr(header+24); |
173 | share->pack.ref_length=header[26]; |
174 | rec_reflength=header[27]; |
175 | diff_length=(int) rec_reflength - (int) share->base.rec_reflength; |
176 | if (fix_keys) |
177 | share->rec_reflength=rec_reflength; |
178 | share->base.min_block_length=share->min_pack_length+1; |
179 | if (share->min_pack_length > 254) |
180 | share->base.min_block_length+=2; |
181 | DBUG_PRINT("info" , ("fixed header length: %u" , HEAD_LENGTH)); |
182 | DBUG_PRINT("info" , ("total header length: %lu" , share->pack.header_length)); |
183 | DBUG_PRINT("info" , ("pack file version: %u" , share->pack.version)); |
184 | DBUG_PRINT("info" , ("min pack length: %lu" , share->min_pack_length)); |
185 | DBUG_PRINT("info" , ("max pack length: %lu" , share->max_pack_length)); |
186 | DBUG_PRINT("info" , ("elements of all trees: %lu" , elements)); |
187 | DBUG_PRINT("info" , ("distinct values bytes: %lu" , intervall_length)); |
188 | DBUG_PRINT("info" , ("number of code trees: %u" , trees)); |
189 | DBUG_PRINT("info" , ("bytes for record lgt: %u" , share->pack.ref_length)); |
190 | DBUG_PRINT("info" , ("record pointer length: %u" , rec_reflength)); |
191 | |
192 | /* |
193 | Memory segment #1: |
194 | - Decode tree heads |
195 | - Distinct column values |
196 | */ |
197 | if (!(share->decode_trees=(MI_DECODE_TREE*) |
198 | my_malloc((uint) (trees*sizeof(MI_DECODE_TREE)+ |
199 | intervall_length*sizeof(uchar)), |
200 | MYF(MY_WME)))) |
201 | goto err0; |
202 | intervall_buff=(uchar*) (share->decode_trees+trees); |
203 | |
204 | /* |
205 | Memory segment #2: |
206 | - Decode tables |
207 | - Quick decode tables |
208 | - Temporary decode table |
209 | - Compressed data file header cache |
210 | This segment will be reallocated after construction of the tables. |
211 | */ |
212 | length=(uint) (elements*2+trees*(1 << myisam_quick_table_bits)); |
213 | /* |
214 | To keep some algorithms simpler, we accept that they access |
215 | bytes beyond the end of the input data. This can affect up to |
216 | one byte less than the "word size" size used in this file, |
217 | which is BITS_SAVED / 8. To avoid accessing non-allocated |
218 | data, we add (BITS_SAVED / 8) - 1 bytes to the buffer size. |
219 | */ |
220 | if (!(share->decode_tables=(uint16*) |
221 | my_malloc((length + OFFSET_TABLE_SIZE) * sizeof(uint16) + |
222 | (uint) (share->pack.header_length - sizeof(header) + |
223 | (BITS_SAVED / 8) - 1), MYF(MY_WME | MY_ZEROFILL)))) |
224 | goto err1; |
225 | tmp_buff=share->decode_tables+length; |
226 | disk_cache= (uchar*) (tmp_buff+OFFSET_TABLE_SIZE); |
227 | |
228 | if (mysql_file_read(file, disk_cache, |
229 | (uint) (share->pack.header_length-sizeof(header)), |
230 | MYF(MY_NABP))) |
231 | goto err2; |
232 | |
233 | huff_tree_bits=max_bit(trees ? trees-1 : 0); |
234 | init_bit_buffer(&bit_buff, disk_cache, |
235 | (uint) (share->pack.header_length-sizeof(header))); |
236 | /* Read new info for each field */ |
237 | for (i=0 ; i < share->base.fields ; i++) |
238 | { |
239 | share->rec[i].base_type=(enum en_fieldtype) get_bits(&bit_buff,5); |
240 | share->rec[i].pack_type=(uint) get_bits(&bit_buff,6); |
241 | share->rec[i].space_length_bits=get_bits(&bit_buff,5); |
242 | share->rec[i].huff_tree=share->decode_trees+(uint) get_bits(&bit_buff, |
243 | huff_tree_bits); |
244 | share->rec[i].unpack=get_unpack_function(share->rec+i); |
245 | DBUG_PRINT("info" , ("col: %2u type: %2u pack: %u slbits: %2u" , |
246 | i, share->rec[i].base_type, share->rec[i].pack_type, |
247 | share->rec[i].space_length_bits)); |
248 | } |
249 | skip_to_next_byte(&bit_buff); |
250 | /* |
251 | Construct the decoding tables from the file header. Keep track of |
252 | the used memory. |
253 | */ |
254 | decode_table=share->decode_tables; |
255 | for (i=0 ; i < trees ; i++) |
256 | if (read_huff_table(&bit_buff,share->decode_trees+i,&decode_table, |
257 | &intervall_buff,tmp_buff)) |
258 | goto err3; |
259 | /* Reallocate the decoding tables to the used size. */ |
260 | decode_table=(uint16*) |
261 | my_realloc((uchar*) share->decode_tables, |
262 | (uint) ((uchar*) decode_table - (uchar*) share->decode_tables), |
263 | MYF(MY_HOLD_ON_ERROR)); |
264 | /* Fix the table addresses in the tree heads. */ |
265 | { |
266 | my_ptrdiff_t diff=PTR_BYTE_DIFF(decode_table,share->decode_tables); |
267 | share->decode_tables=decode_table; |
268 | for (i=0 ; i < trees ; i++) |
269 | share->decode_trees[i].table=ADD_TO_PTR(share->decode_trees[i].table, |
270 | diff, uint16*); |
271 | } |
272 | |
273 | /* Fix record-ref-length for keys */ |
274 | if (fix_keys) |
275 | { |
276 | for (i=0 ; i < share->base.keys ; i++) |
277 | { |
278 | MI_KEYDEF *keyinfo= &share->keyinfo[i]; |
279 | keyinfo->keylength+= (uint16) diff_length; |
280 | keyinfo->minlength+= (uint16) diff_length; |
281 | keyinfo->maxlength+= (uint16) diff_length; |
282 | keyinfo->seg[keyinfo->flag & HA_FULLTEXT ? |
283 | FT_SEGS : keyinfo->keysegs].length= (uint16) rec_reflength; |
284 | } |
285 | if (share->ft2_keyinfo.seg) |
286 | { |
287 | MI_KEYDEF *ft2_keyinfo= &share->ft2_keyinfo; |
288 | ft2_keyinfo->keylength+= (uint16) diff_length; |
289 | ft2_keyinfo->minlength+= (uint16) diff_length; |
290 | ft2_keyinfo->maxlength+= (uint16) diff_length; |
291 | } |
292 | } |
293 | |
294 | if (bit_buff.error || bit_buff.pos < bit_buff.end) |
295 | goto err3; |
296 | |
297 | DBUG_RETURN(0); |
298 | |
299 | err3: |
300 | my_errno=HA_ERR_WRONG_IN_RECORD; |
301 | err2: |
302 | my_free(share->decode_tables); |
303 | err1: |
304 | my_free(share->decode_trees); |
305 | err0: |
306 | DBUG_RETURN(1); |
307 | } |
308 | |
309 | |
310 | /* |
311 | Read a huff-code-table from datafile. |
312 | |
313 | SYNOPSIS |
314 | read_huff_table() |
315 | bit_buff Bit buffer pointing at start of the |
316 | decoding table in the file header cache. |
317 | decode_tree Pointer to the decode tree head. |
318 | decode_table IN/OUT Address of a pointer to the next free space. |
319 | intervall_buff IN/OUT Address of a pointer to the next unused values. |
320 | tmp_buff Buffer for temporary extraction of a full |
321 | decoding table as read from bit_buff. |
322 | |
323 | RETURN |
324 | 0 OK. |
325 | 1 Error. |
326 | */ |
327 | |
328 | static uint read_huff_table(MI_BIT_BUFF *bit_buff, MI_DECODE_TREE *decode_tree, |
329 | uint16 **decode_table, uchar **intervall_buff, |
330 | uint16 *tmp_buff) |
331 | { |
332 | uint min_chr,elements,char_bits,offset_bits,size,intervall_length,table_bits, |
333 | next_free_offset; |
334 | uint16 *ptr,*end; |
335 | DBUG_ENTER("read_huff_table" ); |
336 | |
337 | if (!get_bits(bit_buff,1)) |
338 | { |
339 | /* Byte value compression. */ |
340 | min_chr=get_bits(bit_buff,8); |
341 | elements=get_bits(bit_buff,9); |
342 | char_bits=get_bits(bit_buff,5); |
343 | offset_bits=get_bits(bit_buff,5); |
344 | intervall_length=0; |
345 | ptr=tmp_buff; |
346 | DBUG_PRINT("info" , ("byte value compression" )); |
347 | DBUG_PRINT("info" , ("minimum byte value: %u" , min_chr)); |
348 | DBUG_PRINT("info" , ("number of tree nodes: %u" , elements)); |
349 | DBUG_PRINT("info" , ("bits for values: %u" , char_bits)); |
350 | DBUG_PRINT("info" , ("bits for tree offsets: %u" , offset_bits)); |
351 | if (elements > 256) |
352 | { |
353 | DBUG_PRINT("error" , ("ERROR: illegal number of tree elements: %u" , |
354 | elements)); |
355 | DBUG_RETURN(1); |
356 | } |
357 | } |
358 | else |
359 | { |
360 | /* Distinct column value compression. */ |
361 | min_chr=0; |
362 | elements=get_bits(bit_buff,15); |
363 | intervall_length=get_bits(bit_buff,16); |
364 | char_bits=get_bits(bit_buff,5); |
365 | offset_bits=get_bits(bit_buff,5); |
366 | decode_tree->quick_table_bits=0; |
367 | ptr= *decode_table; |
368 | DBUG_PRINT("info" , ("distinct column value compression" )); |
369 | DBUG_PRINT("info" , ("number of tree nodes: %u" , elements)); |
370 | DBUG_PRINT("info" , ("value buffer length: %u" , intervall_length)); |
371 | DBUG_PRINT("info" , ("bits for value index: %u" , char_bits)); |
372 | DBUG_PRINT("info" , ("bits for tree offsets: %u" , offset_bits)); |
373 | } |
374 | size=elements*2-2; |
375 | DBUG_PRINT("info" , ("tree size in uint16: %u" , size)); |
376 | DBUG_PRINT("info" , ("tree size in bytes: %u" , |
377 | size * (uint) sizeof(uint16))); |
378 | |
379 | for (end=ptr+size ; ptr < end ; ptr++) |
380 | { |
381 | if (get_bit(bit_buff)) |
382 | { |
383 | *ptr= (uint16) get_bits(bit_buff,offset_bits); |
384 | if ((ptr + *ptr >= end) || !*ptr) |
385 | { |
386 | DBUG_PRINT("error" , ("ERROR: illegal pointer in decode tree" )); |
387 | DBUG_RETURN(1); |
388 | } |
389 | } |
390 | else |
391 | *ptr= (uint16) (IS_CHAR + (get_bits(bit_buff,char_bits) + min_chr)); |
392 | } |
393 | skip_to_next_byte(bit_buff); |
394 | |
395 | decode_tree->table= *decode_table; |
396 | decode_tree->intervalls= *intervall_buff; |
397 | if (! intervall_length) |
398 | { |
399 | /* Byte value compression. ptr started from tmp_buff. */ |
400 | /* Find longest Huffman code from begin to end of tree in bits. */ |
401 | table_bits= find_longest_bitstream(tmp_buff, ptr); |
402 | if (table_bits >= OFFSET_TABLE_SIZE) |
403 | DBUG_RETURN(1); |
404 | if (table_bits > myisam_quick_table_bits) |
405 | table_bits=myisam_quick_table_bits; |
406 | DBUG_PRINT("info" , ("table bits: %u" , table_bits)); |
407 | |
408 | next_free_offset= (1 << table_bits); |
409 | make_quick_table(*decode_table,tmp_buff,&next_free_offset,0,table_bits, |
410 | table_bits); |
411 | (*decode_table)+= next_free_offset; |
412 | decode_tree->quick_table_bits=table_bits; |
413 | } |
414 | else |
415 | { |
416 | /* Distinct column value compression. ptr started from *decode_table */ |
417 | (*decode_table)=end; |
418 | /* |
419 | get_bits() moves some bytes to a cache buffer in advance. May need |
420 | to step back. |
421 | */ |
422 | bit_buff->pos-= bit_buff->bits/8; |
423 | /* Copy the distinct column values from the buffer. */ |
424 | memcpy(*intervall_buff,bit_buff->pos,(size_t) intervall_length); |
425 | (*intervall_buff)+=intervall_length; |
426 | bit_buff->pos+=intervall_length; |
427 | bit_buff->bits=0; |
428 | } |
429 | DBUG_RETURN(0); |
430 | } |
431 | |
432 | |
433 | /* |
434 | Make a quick_table for faster decoding. |
435 | |
436 | SYNOPSIS |
437 | make_quick_table() |
438 | to_table Target quick_table and remaining decode table. |
439 | decode_table Source Huffman (sub-)tree within tmp_buff. |
440 | next_free_offset IN/OUT Next free offset from to_table. |
441 | Starts behind quick_table on the top-level. |
442 | value Huffman bits found so far. |
443 | bits Remaining bits to be collected. |
444 | max_bits Total number of bits to collect (table_bits). |
445 | |
446 | DESCRIPTION |
447 | |
448 | The quick table is an array of 16-bit values. There exists one value |
449 | for each possible code representable by max_bits (table_bits) bits. |
450 | In most cases table_bits is 9. So there are 512 16-bit values. |
451 | |
452 | If the high-order bit (16) is set (IS_CHAR) then the array slot for |
453 | this value is a valid Huffman code for a resulting byte value. |
454 | |
455 | The low-order 8 bits (1..8) are the resulting byte value. |
456 | |
457 | Bits 9..14 are the length of the Huffman code for this byte value. |
458 | This means so many bits from the input stream were needed to |
459 | represent this byte value. The remaining bits belong to later |
460 | Huffman codes. This also means that for every Huffman code shorter |
461 | than table_bits there are multiple entires in the array, which |
462 | differ just in the unused bits. |
463 | |
464 | If the high-order bit (16) is clear (0) then the remaining bits are |
465 | the position of the remaining Huffman decode tree segment behind the |
466 | quick table. |
467 | |
468 | RETURN |
469 | void |
470 | */ |
471 | |
472 | static void make_quick_table(uint16 *to_table, uint16 *decode_table, |
473 | uint *next_free_offset, uint value, uint bits, |
474 | uint max_bits) |
475 | { |
476 | DBUG_ENTER("make_quick_table" ); |
477 | |
478 | /* |
479 | When down the table to the requested maximum, copy the rest of the |
480 | Huffman table. |
481 | */ |
482 | if (!bits--) |
483 | { |
484 | /* |
485 | Remaining left Huffman tree segment starts behind quick table. |
486 | Remaining right Huffman tree segment starts behind left segment. |
487 | */ |
488 | to_table[value]= (uint16) *next_free_offset; |
489 | /* |
490 | Re-construct the remaining Huffman tree segment at |
491 | next_free_offset in to_table. |
492 | */ |
493 | *next_free_offset= copy_decode_table(to_table, *next_free_offset, |
494 | decode_table); |
495 | DBUG_VOID_RETURN; |
496 | } |
497 | |
498 | /* Descent on the left side. Left side bits are clear (0). */ |
499 | if (!(*decode_table & IS_CHAR)) |
500 | { |
501 | /* Not a leaf. Follow the pointer. */ |
502 | make_quick_table(to_table, decode_table + *decode_table, |
503 | next_free_offset, value, bits, max_bits); |
504 | } |
505 | else |
506 | { |
507 | /* |
508 | A leaf. A Huffman code is complete. Fill the quick_table |
509 | array for all possible bit strings starting with this Huffman |
510 | code. |
511 | */ |
512 | fill_quick_table(to_table + value, bits, max_bits, (uint) *decode_table); |
513 | } |
514 | |
515 | /* Descent on the right side. Right side bits are set (1). */ |
516 | decode_table++; |
517 | value|= (1 << bits); |
518 | if (!(*decode_table & IS_CHAR)) |
519 | { |
520 | /* Not a leaf. Follow the pointer. */ |
521 | make_quick_table(to_table, decode_table + *decode_table, |
522 | next_free_offset, value, bits, max_bits); |
523 | } |
524 | else |
525 | { |
526 | /* |
527 | A leaf. A Huffman code is complete. Fill the quick_table |
528 | array for all possible bit strings starting with this Huffman |
529 | code. |
530 | */ |
531 | fill_quick_table(to_table + value, bits, max_bits, (uint) *decode_table); |
532 | } |
533 | |
534 | DBUG_VOID_RETURN; |
535 | } |
536 | |
537 | |
538 | /* |
539 | Fill quick_table for all possible values starting with this Huffman code. |
540 | |
541 | SYNOPSIS |
542 | fill_quick_table() |
543 | table Target quick_table position. |
544 | bits Unused bits from max_bits. |
545 | max_bits Total number of bits to collect (table_bits). |
546 | value The byte encoded by the found Huffman code. |
547 | |
548 | DESCRIPTION |
549 | |
550 | Fill the segment (all slots) of the quick_table array with the |
551 | resulting value for the found Huffman code. There are as many slots |
552 | as there are combinations representable by the unused bits. |
553 | |
554 | In most cases we use 9 table bits. Assume a 3-bit Huffman code. Then |
555 | there are 6 unused bits. Hence we fill 2**6 = 64 slots with the |
556 | value. |
557 | |
558 | RETURN |
559 | void |
560 | */ |
561 | |
562 | static void fill_quick_table(uint16 *table, uint bits, uint max_bits, |
563 | uint value) |
564 | { |
565 | uint16 *end; |
566 | DBUG_ENTER("fill_quick_table" ); |
567 | |
568 | /* |
569 | Bits 1..8 of value represent the decoded byte value. |
570 | Bits 9..14 become the length of the Huffman code for this byte value. |
571 | Bit 16 flags a valid code (IS_CHAR). |
572 | */ |
573 | value|= (max_bits - bits) << 8 | IS_CHAR; |
574 | |
575 | for (end= table + ((my_ptrdiff_t) 1 << bits); table < end; table++) |
576 | { |
577 | *table= (uint16) value; |
578 | } |
579 | DBUG_VOID_RETURN; |
580 | } |
581 | |
582 | |
583 | /* |
584 | Reconstruct a decode subtree at the target position. |
585 | |
586 | SYNOPSIS |
587 | copy_decode_table() |
588 | to_pos Target quick_table and remaining decode table. |
589 | offset Next free offset from to_pos. |
590 | decode_table Source Huffman subtree within tmp_buff. |
591 | |
592 | NOTE |
593 | Pointers in the decode tree are relative to the pointers position. |
594 | |
595 | RETURN |
596 | next free offset from to_pos. |
597 | */ |
598 | |
599 | static uint copy_decode_table(uint16 *to_pos, uint offset, |
600 | uint16 *decode_table) |
601 | { |
602 | uint prev_offset= offset; |
603 | DBUG_ENTER("copy_decode_table" ); |
604 | |
605 | /* Descent on the left side. */ |
606 | if (!(*decode_table & IS_CHAR)) |
607 | { |
608 | /* Set a pointer to the next target node. */ |
609 | to_pos[offset]=2; |
610 | /* Copy the left hand subtree there. */ |
611 | offset=copy_decode_table(to_pos,offset+2,decode_table+ *decode_table); |
612 | } |
613 | else |
614 | { |
615 | /* Copy the byte value. */ |
616 | to_pos[offset]= *decode_table; |
617 | /* Step behind this node. */ |
618 | offset+=2; |
619 | } |
620 | |
621 | /* Descent on the right side. */ |
622 | decode_table++; |
623 | if (!(*decode_table & IS_CHAR)) |
624 | { |
625 | /* Set a pointer to the next free target node. */ |
626 | to_pos[prev_offset+1]=(uint16) (offset-prev_offset-1); |
627 | /* Copy the right hand subtree to the entry of that node. */ |
628 | offset=copy_decode_table(to_pos,offset,decode_table+ *decode_table); |
629 | } |
630 | else |
631 | { |
632 | /* Copy the byte value. */ |
633 | to_pos[prev_offset+1]= *decode_table; |
634 | } |
635 | DBUG_RETURN(offset); |
636 | } |
637 | |
638 | |
639 | /* |
640 | Find the length of the longest Huffman code in this table in bits. |
641 | |
642 | SYNOPSIS |
643 | find_longest_bitstream() |
644 | table Code (sub-)table start. |
645 | end End of code table. |
646 | |
647 | IMPLEMENTATION |
648 | |
649 | Recursively follow the branch(es) of the code pair on every level of |
650 | the tree until two byte values (and no branch) are found. Add one to |
651 | each level when returning back from each recursion stage. |
652 | |
653 | 'end' is used for error checking only. A clean tree terminates |
654 | before reaching 'end'. Hence the exact value of 'end' is not too |
655 | important. However having it higher than necessary could lead to |
656 | misbehaviour should 'next' jump into the dirty area. |
657 | |
658 | RETURN |
659 | length Length of longest Huffman code in bits. |
660 | >= OFFSET_TABLE_SIZE Error, broken tree. It does not end before 'end'. |
661 | */ |
662 | |
663 | static uint find_longest_bitstream(uint16 *table, uint16 *end) |
664 | { |
665 | uint length= 1; |
666 | uint length2; |
667 | |
668 | if (!(*table & IS_CHAR)) |
669 | { |
670 | uint16 *next= table + *table; |
671 | if (next > end || next == table) |
672 | { |
673 | DBUG_PRINT("error" , ("ERROR: illegal pointer in decode tree" )); |
674 | return OFFSET_TABLE_SIZE; |
675 | } |
676 | length= find_longest_bitstream(next, end) + 1; |
677 | } |
678 | table++; |
679 | if (!(*table & IS_CHAR)) |
680 | { |
681 | uint16 *next= table + *table; |
682 | if (next > end || next == table) |
683 | { |
684 | DBUG_PRINT("error" , ("ERROR: illegal pointer in decode tree" )); |
685 | return OFFSET_TABLE_SIZE; |
686 | } |
687 | length2= find_longest_bitstream(next, end) + 1; |
688 | length=MY_MAX(length,length2); |
689 | } |
690 | return length; |
691 | } |
692 | |
693 | |
694 | /* |
695 | Read record from datafile. |
696 | |
697 | SYNOPSIS |
698 | _mi_read_pack_record() |
699 | info A pointer to MI_INFO. |
700 | filepos File offset of the record. |
701 | buf RETURN The buffer to receive the record. |
702 | |
703 | RETURN |
704 | 0 on success |
705 | HA_ERR_WRONG_IN_RECORD or -1 on error |
706 | */ |
707 | |
708 | int _mi_read_pack_record(MI_INFO *info, my_off_t filepos, uchar *buf) |
709 | { |
710 | MI_BLOCK_INFO block_info; |
711 | File file; |
712 | DBUG_ENTER("mi_read_pack_record" ); |
713 | |
714 | if (filepos == HA_OFFSET_ERROR) |
715 | DBUG_RETURN(-1); /* _search() didn't find record */ |
716 | |
717 | file=info->dfile; |
718 | if (_mi_pack_get_block_info(info, &info->bit_buff, &block_info, |
719 | &info->rec_buff, file, filepos)) |
720 | goto err; |
721 | if (mysql_file_read(file, (uchar*) info->rec_buff + block_info.offset, |
722 | block_info.rec_len - block_info.offset, MYF(MY_NABP))) |
723 | goto panic; |
724 | info->update|= HA_STATE_AKTIV; |
725 | DBUG_RETURN(_mi_pack_rec_unpack(info, &info->bit_buff, buf, |
726 | info->rec_buff, block_info.rec_len)); |
727 | panic: |
728 | my_errno=HA_ERR_WRONG_IN_RECORD; |
729 | err: |
730 | DBUG_RETURN(-1); |
731 | } |
732 | |
733 | |
734 | |
735 | int _mi_pack_rec_unpack(register MI_INFO *info, MI_BIT_BUFF *bit_buff, |
736 | register uchar *to, uchar *from, ulong reclength) |
737 | { |
738 | uchar *end_field; |
739 | reg3 MI_COLUMNDEF *end; |
740 | MI_COLUMNDEF *current_field; |
741 | MYISAM_SHARE *share=info->s; |
742 | DBUG_ENTER("_mi_pack_rec_unpack" ); |
743 | |
744 | init_bit_buffer(bit_buff, (uchar*) from, reclength); |
745 | |
746 | for (current_field=share->rec, end=current_field+share->base.fields ; |
747 | current_field < end ; |
748 | current_field++,to=end_field) |
749 | { |
750 | end_field=to+current_field->length; |
751 | (*current_field->unpack)(current_field, bit_buff, (uchar*) to, |
752 | (uchar*) end_field); |
753 | } |
754 | if (!bit_buff->error && |
755 | bit_buff->pos - bit_buff->bits / 8 == bit_buff->end) |
756 | DBUG_RETURN(0); |
757 | info->update&= ~HA_STATE_AKTIV; |
758 | DBUG_RETURN(my_errno=HA_ERR_WRONG_IN_RECORD); |
759 | } /* _mi_pack_rec_unpack */ |
760 | |
761 | |
762 | /* Return function to unpack field */ |
763 | |
764 | static void (*get_unpack_function(MI_COLUMNDEF *rec)) |
765 | (MI_COLUMNDEF *, MI_BIT_BUFF *, uchar *, uchar *) |
766 | { |
767 | switch (rec->base_type) { |
768 | case FIELD_SKIP_ZERO: |
769 | if (rec->pack_type & PACK_TYPE_ZERO_FILL) |
770 | return &uf_zerofill_skip_zero; |
771 | return &uf_skip_zero; |
772 | case FIELD_NORMAL: |
773 | if (rec->pack_type & PACK_TYPE_SPACE_FIELDS) |
774 | return &uf_space_normal; |
775 | if (rec->pack_type & PACK_TYPE_ZERO_FILL) |
776 | return &uf_zerofill_normal; |
777 | return &decode_bytes; |
778 | case FIELD_SKIP_ENDSPACE: |
779 | if (rec->pack_type & PACK_TYPE_SPACE_FIELDS) |
780 | { |
781 | if (rec->pack_type & PACK_TYPE_SELECTED) |
782 | return &uf_space_endspace_selected; |
783 | return &uf_space_endspace; |
784 | } |
785 | if (rec->pack_type & PACK_TYPE_SELECTED) |
786 | return &uf_endspace_selected; |
787 | return &uf_endspace; |
788 | case FIELD_SKIP_PRESPACE: |
789 | if (rec->pack_type & PACK_TYPE_SPACE_FIELDS) |
790 | { |
791 | if (rec->pack_type & PACK_TYPE_SELECTED) |
792 | return &uf_space_prespace_selected; |
793 | return &uf_space_prespace; |
794 | } |
795 | if (rec->pack_type & PACK_TYPE_SELECTED) |
796 | return &uf_prespace_selected; |
797 | return &uf_prespace; |
798 | case FIELD_CONSTANT: |
799 | return &uf_constant; |
800 | case FIELD_INTERVALL: |
801 | return &uf_intervall; |
802 | case FIELD_ZERO: |
803 | case FIELD_CHECK: |
804 | return &uf_zero; |
805 | case FIELD_BLOB: |
806 | return &uf_blob; |
807 | case FIELD_VARCHAR: |
808 | if (rec->length <= 256) /* 255 + 1 byte length */ |
809 | return &uf_varchar1; |
810 | return &uf_varchar2; |
811 | case FIELD_LAST: |
812 | default: |
813 | return 0; /* This should never happend */ |
814 | } |
815 | } |
816 | |
817 | /* The different functions to unpack a field */ |
818 | |
819 | static void uf_zerofill_skip_zero(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
820 | uchar *to, uchar *end) |
821 | { |
822 | if (get_bit(bit_buff)) |
823 | bzero((char*) to,(uint) (end-to)); |
824 | else |
825 | { |
826 | end-=rec->space_length_bits; |
827 | decode_bytes(rec,bit_buff,to,end); |
828 | bzero((char*) end,rec->space_length_bits); |
829 | } |
830 | } |
831 | |
832 | static void uf_skip_zero(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
833 | uchar *end) |
834 | { |
835 | if (get_bit(bit_buff)) |
836 | bzero((char*) to,(uint) (end-to)); |
837 | else |
838 | decode_bytes(rec,bit_buff,to,end); |
839 | } |
840 | |
841 | static void uf_space_normal(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
842 | uchar *end) |
843 | { |
844 | if (get_bit(bit_buff)) |
845 | bfill((uchar*) to,(end-to),' '); |
846 | else |
847 | decode_bytes(rec,bit_buff,to,end); |
848 | } |
849 | |
850 | static void uf_space_endspace_selected(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
851 | uchar *to, uchar *end) |
852 | { |
853 | uint spaces; |
854 | if (get_bit(bit_buff)) |
855 | bfill((uchar*) to,(end-to),' '); |
856 | else |
857 | { |
858 | if (get_bit(bit_buff)) |
859 | { |
860 | if ((spaces=get_bits(bit_buff,rec->space_length_bits))+to > end) |
861 | { |
862 | bit_buff->error=1; |
863 | return; |
864 | } |
865 | if (to+spaces != end) |
866 | decode_bytes(rec,bit_buff,to,end-spaces); |
867 | bfill((uchar*) end-spaces,spaces,' '); |
868 | } |
869 | else |
870 | decode_bytes(rec,bit_buff,to,end); |
871 | } |
872 | } |
873 | |
874 | static void uf_endspace_selected(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
875 | uchar *to, uchar *end) |
876 | { |
877 | uint spaces; |
878 | if (get_bit(bit_buff)) |
879 | { |
880 | if ((spaces=get_bits(bit_buff,rec->space_length_bits))+to > end) |
881 | { |
882 | bit_buff->error=1; |
883 | return; |
884 | } |
885 | if (to+spaces != end) |
886 | decode_bytes(rec,bit_buff,to,end-spaces); |
887 | bfill((uchar*) end-spaces,spaces,' '); |
888 | } |
889 | else |
890 | decode_bytes(rec,bit_buff,to,end); |
891 | } |
892 | |
893 | static void uf_space_endspace(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
894 | uchar *end) |
895 | { |
896 | uint spaces; |
897 | if (get_bit(bit_buff)) |
898 | bfill((uchar*) to,(end-to),' '); |
899 | else |
900 | { |
901 | if ((spaces=get_bits(bit_buff,rec->space_length_bits))+to > end) |
902 | { |
903 | bit_buff->error=1; |
904 | return; |
905 | } |
906 | if (to+spaces != end) |
907 | decode_bytes(rec,bit_buff,to,end-spaces); |
908 | bfill((uchar*) end-spaces,spaces,' '); |
909 | } |
910 | } |
911 | |
912 | static void uf_endspace(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
913 | uchar *end) |
914 | { |
915 | uint spaces; |
916 | if ((spaces=get_bits(bit_buff,rec->space_length_bits))+to > end) |
917 | { |
918 | bit_buff->error=1; |
919 | return; |
920 | } |
921 | if (to+spaces != end) |
922 | decode_bytes(rec,bit_buff,to,end-spaces); |
923 | bfill((uchar*) end-spaces,spaces,' '); |
924 | } |
925 | |
926 | static void uf_space_prespace_selected(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
927 | uchar *to, uchar *end) |
928 | { |
929 | uint spaces; |
930 | if (get_bit(bit_buff)) |
931 | bfill((uchar*) to,(end-to),' '); |
932 | else |
933 | { |
934 | if (get_bit(bit_buff)) |
935 | { |
936 | if ((spaces=get_bits(bit_buff,rec->space_length_bits))+to > end) |
937 | { |
938 | bit_buff->error=1; |
939 | return; |
940 | } |
941 | bfill((uchar*) to,spaces,' '); |
942 | if (to+spaces != end) |
943 | decode_bytes(rec,bit_buff,to+spaces,end); |
944 | } |
945 | else |
946 | decode_bytes(rec,bit_buff,to,end); |
947 | } |
948 | } |
949 | |
950 | |
951 | static void uf_prespace_selected(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
952 | uchar *to, uchar *end) |
953 | { |
954 | uint spaces; |
955 | if (get_bit(bit_buff)) |
956 | { |
957 | if ((spaces=get_bits(bit_buff,rec->space_length_bits))+to > end) |
958 | { |
959 | bit_buff->error=1; |
960 | return; |
961 | } |
962 | bfill((uchar*) to,spaces,' '); |
963 | if (to+spaces != end) |
964 | decode_bytes(rec,bit_buff,to+spaces,end); |
965 | } |
966 | else |
967 | decode_bytes(rec,bit_buff,to,end); |
968 | } |
969 | |
970 | |
971 | static void uf_space_prespace(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
972 | uchar *end) |
973 | { |
974 | uint spaces; |
975 | if (get_bit(bit_buff)) |
976 | bfill((uchar*) to,(end-to),' '); |
977 | else |
978 | { |
979 | if ((spaces=get_bits(bit_buff,rec->space_length_bits))+to > end) |
980 | { |
981 | bit_buff->error=1; |
982 | return; |
983 | } |
984 | bfill((uchar*) to,spaces,' '); |
985 | if (to+spaces != end) |
986 | decode_bytes(rec,bit_buff,to+spaces,end); |
987 | } |
988 | } |
989 | |
990 | static void uf_prespace(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
991 | uchar *end) |
992 | { |
993 | uint spaces; |
994 | if ((spaces=get_bits(bit_buff,rec->space_length_bits))+to > end) |
995 | { |
996 | bit_buff->error=1; |
997 | return; |
998 | } |
999 | bfill((uchar*) to,spaces,' '); |
1000 | if (to+spaces != end) |
1001 | decode_bytes(rec,bit_buff,to+spaces,end); |
1002 | } |
1003 | |
1004 | static void uf_zerofill_normal(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
1005 | uchar *end) |
1006 | { |
1007 | end-=rec->space_length_bits; |
1008 | decode_bytes(rec,bit_buff,(uchar*) to,end); |
1009 | bzero((char*) end,rec->space_length_bits); |
1010 | } |
1011 | |
1012 | static void uf_constant(MI_COLUMNDEF *rec, |
1013 | MI_BIT_BUFF *bit_buff __attribute__((unused)), |
1014 | uchar *to, |
1015 | uchar *end) |
1016 | { |
1017 | memcpy(to,rec->huff_tree->intervalls,(size_t) (end-to)); |
1018 | } |
1019 | |
1020 | static void uf_intervall(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
1021 | uchar *end) |
1022 | { |
1023 | reg1 uint field_length=(uint) (end-to); |
1024 | memcpy(to,rec->huff_tree->intervalls+field_length*decode_pos(bit_buff, |
1025 | rec->huff_tree), |
1026 | (size_t) field_length); |
1027 | } |
1028 | |
1029 | |
1030 | /*ARGSUSED*/ |
1031 | static void uf_zero(MI_COLUMNDEF *rec __attribute__((unused)), |
1032 | MI_BIT_BUFF *bit_buff __attribute__((unused)), |
1033 | uchar *to, uchar *end) |
1034 | { |
1035 | bzero((char*) to,(uint) (end-to)); |
1036 | } |
1037 | |
1038 | static void uf_blob(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
1039 | uchar *to, uchar *end) |
1040 | { |
1041 | if (get_bit(bit_buff)) |
1042 | bzero((uchar*) to,(end-to)); |
1043 | else |
1044 | { |
1045 | ulong length=get_bits(bit_buff,rec->space_length_bits); |
1046 | uint pack_length=(uint) (end-to)-portable_sizeof_char_ptr; |
1047 | if (bit_buff->blob_pos+length > bit_buff->blob_end) |
1048 | { |
1049 | bit_buff->error=1; |
1050 | bzero((uchar*) to,(end-to)); |
1051 | return; |
1052 | } |
1053 | decode_bytes(rec,bit_buff,bit_buff->blob_pos,bit_buff->blob_pos+length); |
1054 | _mi_store_blob_length((uchar*) to,pack_length,length); |
1055 | memcpy(to+pack_length, &bit_buff->blob_pos, sizeof(char*)); |
1056 | bit_buff->blob_pos+=length; |
1057 | } |
1058 | } |
1059 | |
1060 | |
1061 | static void uf_varchar1(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
1062 | uchar *to, uchar *end __attribute__((unused))) |
1063 | { |
1064 | if (get_bit(bit_buff)) |
1065 | to[0]= 0; /* Zero lengths */ |
1066 | else |
1067 | { |
1068 | ulong length=get_bits(bit_buff,rec->space_length_bits); |
1069 | *to= (uchar) length; |
1070 | decode_bytes(rec,bit_buff,to+1,to+1+length); |
1071 | } |
1072 | } |
1073 | |
1074 | |
1075 | static void uf_varchar2(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, |
1076 | uchar *to, uchar *end __attribute__((unused))) |
1077 | { |
1078 | if (get_bit(bit_buff)) |
1079 | to[0]=to[1]=0; /* Zero lengths */ |
1080 | else |
1081 | { |
1082 | ulong length=get_bits(bit_buff,rec->space_length_bits); |
1083 | int2store(to,length); |
1084 | decode_bytes(rec,bit_buff,to+2,to+2+length); |
1085 | } |
1086 | } |
1087 | |
1088 | /* Functions to decode of buffer of bits */ |
1089 | |
1090 | #if BITS_SAVED == 64 |
1091 | |
1092 | static void decode_bytes(MI_COLUMNDEF *rec,MI_BIT_BUFF *bit_buff,uchar *to, |
1093 | uchar *end) |
1094 | { |
1095 | reg1 uint bits,low_byte; |
1096 | reg3 uint16 *pos; |
1097 | reg4 uint table_bits,table_and; |
1098 | MI_DECODE_TREE *decode_tree; |
1099 | |
1100 | decode_tree=rec->decode_tree; |
1101 | bits=bit_buff->bits; /* Save in reg for quicker access */ |
1102 | table_bits=decode_tree->quick_table_bits; |
1103 | table_and= (1 << table_bits)-1; |
1104 | |
1105 | do |
1106 | { |
1107 | if (bits <= 32) |
1108 | { |
1109 | if (bit_buff->pos > bit_buff->end+4) |
1110 | { |
1111 | bit_buff->error=1; |
1112 | return; /* Can't be right */ |
1113 | } |
1114 | bit_buff->current_byte= (bit_buff->current_byte << 32) + |
1115 | ((((uint) bit_buff->pos[3])) + |
1116 | (((uint) bit_buff->pos[2]) << 8) + |
1117 | (((uint) bit_buff->pos[1]) << 16) + |
1118 | (((uint) bit_buff->pos[0]) << 24)); |
1119 | bit_buff->pos+=4; |
1120 | bits+=32; |
1121 | } |
1122 | /* |
1123 | First use info in quick_table. |
1124 | |
1125 | The quick table is an array of 16-bit values. There exists one |
1126 | value for each possible code representable by table_bits bits. |
1127 | In most cases table_bits is 9. So there are 512 16-bit values. |
1128 | |
1129 | If the high-order bit (16) is set (IS_CHAR) then the array slot |
1130 | for this value is a valid Huffman code for a resulting byte value. |
1131 | |
1132 | The low-order 8 bits (1..8) are the resulting byte value. |
1133 | |
1134 | Bits 9..14 are the length of the Huffman code for this byte value. |
1135 | This means so many bits from the input stream were needed to |
1136 | represent this byte value. The remaining bits belong to later |
1137 | Huffman codes. This also means that for every Huffman code shorter |
1138 | than table_bits there are multiple entires in the array, which |
1139 | differ just in the unused bits. |
1140 | |
1141 | If the high-order bit (16) is clear (0) then the remaining bits are |
1142 | the position of the remaining Huffman decode tree segment behind the |
1143 | quick table. |
1144 | */ |
1145 | low_byte=(uint) (bit_buff->current_byte >> (bits - table_bits)) & table_and; |
1146 | low_byte=decode_tree->table[low_byte]; |
1147 | if (low_byte & IS_CHAR) |
1148 | { |
1149 | /* |
1150 | All Huffman codes of less or equal table_bits length are in the |
1151 | quick table. This is one of them. |
1152 | */ |
1153 | *to++ = (low_byte & 255); /* Found char in quick table */ |
1154 | bits-= ((low_byte >> 8) & 31); /* Remove bits used */ |
1155 | } |
1156 | else |
1157 | { /* Map through rest of decode-table */ |
1158 | /* This means that the Huffman code must be longer than table_bits. */ |
1159 | pos=decode_tree->table+low_byte; |
1160 | bits-=table_bits; |
1161 | /* NOTE: decode_bytes_test_bit() is a macro which contains a break !!! */ |
1162 | for (;;) |
1163 | { |
1164 | low_byte=(uint) (bit_buff->current_byte >> (bits-8)); |
1165 | decode_bytes_test_bit(0); |
1166 | decode_bytes_test_bit(1); |
1167 | decode_bytes_test_bit(2); |
1168 | decode_bytes_test_bit(3); |
1169 | decode_bytes_test_bit(4); |
1170 | decode_bytes_test_bit(5); |
1171 | decode_bytes_test_bit(6); |
1172 | decode_bytes_test_bit(7); |
1173 | bits-=8; |
1174 | } |
1175 | *to++ = *pos; |
1176 | } |
1177 | } while (to != end); |
1178 | |
1179 | bit_buff->bits=bits; |
1180 | return; |
1181 | } |
1182 | |
1183 | #else |
1184 | |
1185 | static void decode_bytes(MI_COLUMNDEF *rec, MI_BIT_BUFF *bit_buff, uchar *to, |
1186 | uchar *end) |
1187 | { |
1188 | reg1 uint bits,low_byte; |
1189 | reg3 uint16 *pos; |
1190 | reg4 uint table_bits,table_and; |
1191 | MI_DECODE_TREE *decode_tree; |
1192 | |
1193 | decode_tree=rec->huff_tree; |
1194 | bits=bit_buff->bits; /* Save in reg for quicker access */ |
1195 | table_bits=decode_tree->quick_table_bits; |
1196 | table_and= (1 << table_bits)-1; |
1197 | |
1198 | do |
1199 | { |
1200 | if (bits < table_bits) |
1201 | { |
1202 | if (bit_buff->pos > bit_buff->end+1) |
1203 | { |
1204 | bit_buff->error=1; |
1205 | return; /* Can't be right */ |
1206 | } |
1207 | #if BITS_SAVED == 32 |
1208 | bit_buff->current_byte= (bit_buff->current_byte << 24) + |
1209 | (((uint) ((uchar) bit_buff->pos[2]))) + |
1210 | (((uint) ((uchar) bit_buff->pos[1])) << 8) + |
1211 | (((uint) ((uchar) bit_buff->pos[0])) << 16); |
1212 | bit_buff->pos+=3; |
1213 | bits+=24; |
1214 | #else |
1215 | if (bits) /* We must have at leasts 9 bits */ |
1216 | { |
1217 | bit_buff->current_byte= (bit_buff->current_byte << 8) + |
1218 | (uint) ((uchar) bit_buff->pos[0]); |
1219 | bit_buff->pos++; |
1220 | bits+=8; |
1221 | } |
1222 | else |
1223 | { |
1224 | bit_buff->current_byte= ((uint) ((uchar) bit_buff->pos[0]) << 8) + |
1225 | ((uint) ((uchar) bit_buff->pos[1])); |
1226 | bit_buff->pos+=2; |
1227 | bits+=16; |
1228 | } |
1229 | #endif |
1230 | } |
1231 | /* First use info in quick_table */ |
1232 | low_byte=(bit_buff->current_byte >> (bits - table_bits)) & table_and; |
1233 | low_byte=decode_tree->table[low_byte]; |
1234 | if (low_byte & IS_CHAR) |
1235 | { |
1236 | *to++ = (low_byte & 255); /* Found char in quick table */ |
1237 | bits-= ((low_byte >> 8) & 31); /* Remove bits used */ |
1238 | } |
1239 | else |
1240 | { /* Map through rest of decode-table */ |
1241 | pos=decode_tree->table+low_byte; |
1242 | bits-=table_bits; |
1243 | for (;;) |
1244 | { |
1245 | if (bits < 8) |
1246 | { /* We don't need to check end */ |
1247 | #if BITS_SAVED == 32 |
1248 | bit_buff->current_byte= (bit_buff->current_byte << 24) + |
1249 | (((uint) ((uchar) bit_buff->pos[2]))) + |
1250 | (((uint) ((uchar) bit_buff->pos[1])) << 8) + |
1251 | (((uint) ((uchar) bit_buff->pos[0])) << 16); |
1252 | bit_buff->pos+=3; |
1253 | bits+=24; |
1254 | #else |
1255 | bit_buff->current_byte= (bit_buff->current_byte << 8) + |
1256 | (uint) ((uchar) bit_buff->pos[0]); |
1257 | bit_buff->pos+=1; |
1258 | bits+=8; |
1259 | #endif |
1260 | } |
1261 | low_byte=(uint) (bit_buff->current_byte >> (bits-8)); |
1262 | decode_bytes_test_bit(0); |
1263 | decode_bytes_test_bit(1); |
1264 | decode_bytes_test_bit(2); |
1265 | decode_bytes_test_bit(3); |
1266 | decode_bytes_test_bit(4); |
1267 | decode_bytes_test_bit(5); |
1268 | decode_bytes_test_bit(6); |
1269 | decode_bytes_test_bit(7); |
1270 | bits-=8; |
1271 | } |
1272 | *to++ = (uchar) *pos; |
1273 | } |
1274 | } while (to != end); |
1275 | |
1276 | bit_buff->bits=bits; |
1277 | return; |
1278 | } |
1279 | #endif /* BIT_SAVED == 64 */ |
1280 | |
1281 | |
1282 | static uint decode_pos(MI_BIT_BUFF *bit_buff, MI_DECODE_TREE *decode_tree) |
1283 | { |
1284 | uint16 *pos=decode_tree->table; |
1285 | for (;;) |
1286 | { |
1287 | if (get_bit(bit_buff)) |
1288 | pos++; |
1289 | if (*pos & IS_CHAR) |
1290 | return (uint) (*pos & ~IS_CHAR); |
1291 | pos+= *pos; |
1292 | } |
1293 | } |
1294 | |
1295 | |
1296 | int _mi_read_rnd_pack_record(MI_INFO *info, uchar *buf, |
1297 | register my_off_t filepos, |
1298 | my_bool skip_deleted_blocks) |
1299 | { |
1300 | uint b_type; |
1301 | MI_BLOCK_INFO block_info; |
1302 | MYISAM_SHARE *share=info->s; |
1303 | DBUG_ENTER("_mi_read_rnd_pack_record" ); |
1304 | |
1305 | if (filepos >= info->state->data_file_length) |
1306 | { |
1307 | my_errno= HA_ERR_END_OF_FILE; |
1308 | goto err; |
1309 | } |
1310 | |
1311 | if (info->opt_flag & READ_CACHE_USED) |
1312 | { |
1313 | if (_mi_read_cache(&info->rec_cache, (uchar*) block_info.header, |
1314 | filepos, share->pack.ref_length, |
1315 | skip_deleted_blocks ? READING_NEXT : 0)) |
1316 | goto err; |
1317 | b_type=_mi_pack_get_block_info(info, &info->bit_buff, &block_info, |
1318 | &info->rec_buff, -1, filepos); |
1319 | } |
1320 | else |
1321 | b_type=_mi_pack_get_block_info(info, &info->bit_buff, &block_info, |
1322 | &info->rec_buff, info->dfile, filepos); |
1323 | if (b_type) |
1324 | goto err; /* Error code is already set */ |
1325 | #ifndef DBUG_OFF |
1326 | if (block_info.rec_len > share->max_pack_length) |
1327 | { |
1328 | my_errno=HA_ERR_WRONG_IN_RECORD; |
1329 | goto err; |
1330 | } |
1331 | #endif |
1332 | |
1333 | if (info->opt_flag & READ_CACHE_USED) |
1334 | { |
1335 | if (_mi_read_cache(&info->rec_cache, (uchar*) info->rec_buff, |
1336 | block_info.filepos, block_info.rec_len, |
1337 | skip_deleted_blocks ? READING_NEXT : 0)) |
1338 | goto err; |
1339 | } |
1340 | else |
1341 | { |
1342 | if (mysql_file_read(info->dfile, |
1343 | (uchar*) info->rec_buff + block_info.offset, |
1344 | block_info.rec_len-block_info.offset, MYF(MY_NABP))) |
1345 | goto err; |
1346 | } |
1347 | info->packed_length=block_info.rec_len; |
1348 | info->lastpos=filepos; |
1349 | info->nextpos=block_info.filepos+block_info.rec_len; |
1350 | info->update|= HA_STATE_AKTIV | HA_STATE_KEY_CHANGED; |
1351 | |
1352 | DBUG_RETURN (_mi_pack_rec_unpack(info, &info->bit_buff, buf, |
1353 | info->rec_buff, block_info.rec_len)); |
1354 | err: |
1355 | DBUG_RETURN(my_errno); |
1356 | } |
1357 | |
1358 | |
1359 | /* Read and process header from a huff-record-file */ |
1360 | |
1361 | uint _mi_pack_get_block_info(MI_INFO *myisam, MI_BIT_BUFF *bit_buff, |
1362 | MI_BLOCK_INFO *info, uchar **rec_buff_p, |
1363 | File file, my_off_t filepos) |
1364 | { |
1365 | uchar *=info->header; |
1366 | uint head_length, UNINIT_VAR(ref_length); |
1367 | |
1368 | if (file >= 0) |
1369 | { |
1370 | ref_length=myisam->s->pack.ref_length; |
1371 | /* |
1372 | We can't use mysql_file_pread() here because mi_read_rnd_pack_record assumes |
1373 | position is ok |
1374 | */ |
1375 | mysql_file_seek(file, filepos, MY_SEEK_SET, MYF(0)); |
1376 | if (mysql_file_read(file, header, ref_length, MYF(MY_NABP))) |
1377 | return BLOCK_FATAL_ERROR; |
1378 | DBUG_DUMP("header" ,(uchar*) header,ref_length); |
1379 | } |
1380 | head_length= read_pack_length((uint) myisam->s->pack.version, header, |
1381 | &info->rec_len); |
1382 | if (myisam->s->base.blobs) |
1383 | { |
1384 | head_length+= read_pack_length((uint) myisam->s->pack.version, |
1385 | header + head_length, &info->blob_len); |
1386 | /* |
1387 | Ensure that the record buffer is big enough for the compressed |
1388 | record plus all expanded blobs. [We do not have an extra buffer |
1389 | for the resulting blobs. Sigh.] |
1390 | */ |
1391 | if (!(mi_alloc_rec_buff(myisam,info->rec_len + info->blob_len, |
1392 | rec_buff_p))) |
1393 | return BLOCK_FATAL_ERROR; /* not enough memory */ |
1394 | bit_buff->blob_pos= (uchar*) *rec_buff_p + info->rec_len; |
1395 | bit_buff->blob_end= bit_buff->blob_pos + info->blob_len; |
1396 | myisam->blob_length=info->blob_len; |
1397 | } |
1398 | info->filepos=filepos+head_length; |
1399 | if (file > 0) |
1400 | { |
1401 | info->offset=MY_MIN(info->rec_len, ref_length - head_length); |
1402 | memcpy(*rec_buff_p, header + head_length, info->offset); |
1403 | } |
1404 | return 0; |
1405 | } |
1406 | |
1407 | |
1408 | /* rutines for bit buffer */ |
1409 | /* Note buffer must be 6 byte bigger than longest row */ |
1410 | |
1411 | static void init_bit_buffer(MI_BIT_BUFF *bit_buff, uchar *buffer, uint length) |
1412 | { |
1413 | bit_buff->pos=buffer; |
1414 | bit_buff->end=buffer+length; |
1415 | bit_buff->bits=bit_buff->error=0; |
1416 | bit_buff->current_byte=0; /* Avoid purify errors */ |
1417 | } |
1418 | |
1419 | static uint fill_and_get_bits(MI_BIT_BUFF *bit_buff, uint count) |
1420 | { |
1421 | uint tmp; |
1422 | count-=bit_buff->bits; |
1423 | tmp=(bit_buff->current_byte & mask[bit_buff->bits]) << count; |
1424 | fill_buffer(bit_buff); |
1425 | bit_buff->bits=BITS_SAVED - count; |
1426 | return tmp+(bit_buff->current_byte >> (BITS_SAVED - count)); |
1427 | } |
1428 | |
1429 | /* Fill in empty bit_buff->current_byte from buffer */ |
1430 | /* Sets bit_buff->error if buffer is exhausted */ |
1431 | |
1432 | static void fill_buffer(MI_BIT_BUFF *bit_buff) |
1433 | { |
1434 | if (bit_buff->pos >= bit_buff->end) |
1435 | { |
1436 | bit_buff->error= 1; |
1437 | bit_buff->current_byte=0; |
1438 | return; |
1439 | } |
1440 | |
1441 | #if BITS_SAVED == 64 |
1442 | bit_buff->current_byte= ((((uint) ((uchar) bit_buff->pos[7]))) + |
1443 | (((uint) ((uchar) bit_buff->pos[6])) << 8) + |
1444 | (((uint) ((uchar) bit_buff->pos[5])) << 16) + |
1445 | (((uint) ((uchar) bit_buff->pos[4])) << 24) + |
1446 | ((ulonglong) |
1447 | ((((uint) ((uchar) bit_buff->pos[3]))) + |
1448 | (((uint) ((uchar) bit_buff->pos[2])) << 8) + |
1449 | (((uint) ((uchar) bit_buff->pos[1])) << 16) + |
1450 | (((uint) ((uchar) bit_buff->pos[0])) << 24)) << 32)); |
1451 | bit_buff->pos+=8; |
1452 | #else |
1453 | #if BITS_SAVED == 32 |
1454 | bit_buff->current_byte= (((uint) ((uchar) bit_buff->pos[3])) + |
1455 | (((uint) ((uchar) bit_buff->pos[2])) << 8) + |
1456 | (((uint) ((uchar) bit_buff->pos[1])) << 16) + |
1457 | (((uint) ((uchar) bit_buff->pos[0])) << 24)); |
1458 | bit_buff->pos+=4; |
1459 | #else |
1460 | bit_buff->current_byte= (uint) (((uint) ((uchar) bit_buff->pos[1]))+ |
1461 | (((uint) ((uchar) bit_buff->pos[0])) << 8)); |
1462 | bit_buff->pos+=2; |
1463 | #endif |
1464 | #endif |
1465 | } |
1466 | |
1467 | /* Get number of bits neaded to represent value */ |
1468 | |
1469 | static uint max_bit(register uint value) |
1470 | { |
1471 | reg2 uint power=1; |
1472 | |
1473 | while ((value>>=1)) |
1474 | power++; |
1475 | return (power); |
1476 | } |
1477 | |
1478 | |
1479 | /***************************************************************************** |
1480 | Some redefined functions to handle files when we are using memmap |
1481 | *****************************************************************************/ |
1482 | #ifdef HAVE_SYS_MMAN_H |
1483 | #include <sys/mman.h> |
1484 | #endif |
1485 | |
1486 | #ifdef HAVE_MMAP |
1487 | |
1488 | static int _mi_read_mempack_record(MI_INFO *info,my_off_t filepos,uchar *buf); |
1489 | static int _mi_read_rnd_mempack_record(MI_INFO*, uchar *,my_off_t, my_bool); |
1490 | |
1491 | my_bool _mi_memmap_file(MI_INFO *info) |
1492 | { |
1493 | MYISAM_SHARE *share=info->s; |
1494 | my_bool eom; |
1495 | |
1496 | DBUG_ENTER("mi_memmap_file" ); |
1497 | |
1498 | if (!info->s->file_map) |
1499 | { |
1500 | my_off_t data_file_length= share->state.state.data_file_length; |
1501 | |
1502 | if (myisam_mmap_size != SIZE_T_MAX) |
1503 | { |
1504 | mysql_mutex_lock(&THR_LOCK_myisam_mmap); |
1505 | eom= data_file_length > myisam_mmap_size - myisam_mmap_used - MEMMAP_EXTRA_MARGIN; |
1506 | if (!eom) |
1507 | myisam_mmap_used+= data_file_length + MEMMAP_EXTRA_MARGIN; |
1508 | mysql_mutex_unlock(&THR_LOCK_myisam_mmap); |
1509 | } |
1510 | else |
1511 | eom= data_file_length > myisam_mmap_size - MEMMAP_EXTRA_MARGIN; |
1512 | |
1513 | if (eom) |
1514 | { |
1515 | DBUG_PRINT("warning" , ("File is too large for mmap" )); |
1516 | DBUG_RETURN(0); |
1517 | } |
1518 | if (mysql_file_seek(info->dfile, 0L, MY_SEEK_END, MYF(0)) < |
1519 | share->state.state.data_file_length+MEMMAP_EXTRA_MARGIN) |
1520 | { |
1521 | DBUG_PRINT("warning" ,("File isn't extended for memmap" )); |
1522 | if (myisam_mmap_size != SIZE_T_MAX) |
1523 | { |
1524 | mysql_mutex_lock(&THR_LOCK_myisam_mmap); |
1525 | myisam_mmap_used-= data_file_length + MEMMAP_EXTRA_MARGIN; |
1526 | mysql_mutex_unlock(&THR_LOCK_myisam_mmap); |
1527 | } |
1528 | DBUG_RETURN(0); |
1529 | } |
1530 | if (mi_dynmap_file(info, |
1531 | share->state.state.data_file_length + |
1532 | MEMMAP_EXTRA_MARGIN)) |
1533 | { |
1534 | if (myisam_mmap_size != SIZE_T_MAX) |
1535 | { |
1536 | mysql_mutex_lock(&THR_LOCK_myisam_mmap); |
1537 | myisam_mmap_used-= data_file_length + MEMMAP_EXTRA_MARGIN; |
1538 | mysql_mutex_unlock(&THR_LOCK_myisam_mmap); |
1539 | } |
1540 | DBUG_RETURN(0); |
1541 | } |
1542 | } |
1543 | info->opt_flag|= MEMMAP_USED; |
1544 | info->read_record= share->read_record= _mi_read_mempack_record; |
1545 | share->read_rnd= _mi_read_rnd_mempack_record; |
1546 | DBUG_RETURN(1); |
1547 | } |
1548 | |
1549 | |
1550 | void _mi_unmap_file(MI_INFO *info) |
1551 | { |
1552 | DBUG_ASSERT(info->s->options & HA_OPTION_COMPRESS_RECORD); |
1553 | |
1554 | (void) my_munmap((char*) info->s->file_map, info->s->mmaped_length); |
1555 | |
1556 | if (myisam_mmap_size != SIZE_T_MAX) |
1557 | { |
1558 | mysql_mutex_lock(&THR_LOCK_myisam_mmap); |
1559 | myisam_mmap_used-= info->s->mmaped_length; |
1560 | mysql_mutex_unlock(&THR_LOCK_myisam_mmap); |
1561 | } |
1562 | } |
1563 | |
1564 | |
1565 | static uchar *_mi_mempack_get_block_info(MI_INFO *myisam, MI_BIT_BUFF *bit_buff, |
1566 | MI_BLOCK_INFO *info, uchar **rec_buff_p, |
1567 | uchar *) |
1568 | { |
1569 | header+= read_pack_length((uint) myisam->s->pack.version, header, |
1570 | &info->rec_len); |
1571 | if (myisam->s->base.blobs) |
1572 | { |
1573 | header+= read_pack_length((uint) myisam->s->pack.version, header, |
1574 | &info->blob_len); |
1575 | /* mi_alloc_rec_buff sets my_errno on error */ |
1576 | if (!(mi_alloc_rec_buff(myisam, info->blob_len, |
1577 | rec_buff_p))) |
1578 | return 0; /* not enough memory */ |
1579 | bit_buff->blob_pos= (uchar*) *rec_buff_p; |
1580 | bit_buff->blob_end= (uchar*) *rec_buff_p + info->blob_len; |
1581 | } |
1582 | return header; |
1583 | } |
1584 | |
1585 | |
1586 | static int _mi_read_mempack_record(MI_INFO *info, my_off_t filepos, uchar *buf) |
1587 | { |
1588 | MI_BLOCK_INFO block_info; |
1589 | MYISAM_SHARE *share=info->s; |
1590 | uchar *pos; |
1591 | DBUG_ENTER("mi_read_mempack_record" ); |
1592 | |
1593 | if (filepos == HA_OFFSET_ERROR) |
1594 | DBUG_RETURN(-1); /* _search() didn't find record */ |
1595 | |
1596 | if (!(pos= (uchar*) _mi_mempack_get_block_info(info, &info->bit_buff, |
1597 | &block_info, &info->rec_buff, |
1598 | (uchar*) share->file_map+ |
1599 | filepos))) |
1600 | DBUG_RETURN(-1); |
1601 | DBUG_RETURN(_mi_pack_rec_unpack(info, &info->bit_buff, buf, |
1602 | pos, block_info.rec_len)); |
1603 | } |
1604 | |
1605 | |
1606 | /*ARGSUSED*/ |
1607 | static int _mi_read_rnd_mempack_record(MI_INFO *info, uchar *buf, |
1608 | register my_off_t filepos, |
1609 | my_bool skip_deleted_blocks |
1610 | __attribute__((unused))) |
1611 | { |
1612 | MI_BLOCK_INFO block_info; |
1613 | MYISAM_SHARE *share=info->s; |
1614 | uchar *pos,*start; |
1615 | DBUG_ENTER("_mi_read_rnd_mempack_record" ); |
1616 | |
1617 | if (filepos >= share->state.state.data_file_length) |
1618 | { |
1619 | my_errno=HA_ERR_END_OF_FILE; |
1620 | goto err; |
1621 | } |
1622 | if (!(pos= (uchar*) _mi_mempack_get_block_info(info, &info->bit_buff, |
1623 | &block_info, &info->rec_buff, |
1624 | (uchar*) |
1625 | (start=share->file_map+ |
1626 | filepos)))) |
1627 | goto err; |
1628 | #ifndef DBUG_OFF |
1629 | if (block_info.rec_len > info->s->max_pack_length) |
1630 | { |
1631 | my_errno=HA_ERR_WRONG_IN_RECORD; |
1632 | goto err; |
1633 | } |
1634 | #endif |
1635 | info->packed_length=block_info.rec_len; |
1636 | info->lastpos=filepos; |
1637 | info->nextpos=filepos+(uint) (pos-start)+block_info.rec_len; |
1638 | info->update|= HA_STATE_AKTIV | HA_STATE_KEY_CHANGED; |
1639 | |
1640 | DBUG_RETURN (_mi_pack_rec_unpack(info, &info->bit_buff, buf, |
1641 | pos, block_info.rec_len)); |
1642 | err: |
1643 | DBUG_RETURN(my_errno); |
1644 | } |
1645 | |
1646 | #endif /* HAVE_MMAP */ |
1647 | |
1648 | /* Save length of row */ |
1649 | |
1650 | uint save_pack_length(uint version, uchar *block_buff, ulong length) |
1651 | { |
1652 | if (length < 254) |
1653 | { |
1654 | *(uchar*) block_buff= (uchar) length; |
1655 | return 1; |
1656 | } |
1657 | if (length <= 65535) |
1658 | { |
1659 | *(uchar*) block_buff=254; |
1660 | int2store(block_buff+1,(uint) length); |
1661 | return 3; |
1662 | } |
1663 | *(uchar*) block_buff=255; |
1664 | if (version == 1) /* old format */ |
1665 | { |
1666 | DBUG_ASSERT(length <= 0xFFFFFF); |
1667 | int3store(block_buff + 1, (ulong) length); |
1668 | return 4; |
1669 | } |
1670 | else |
1671 | { |
1672 | int4store(block_buff + 1, (ulong) length); |
1673 | return 5; |
1674 | } |
1675 | } |
1676 | |
1677 | |
1678 | static uint read_pack_length(uint version, const uchar *buf, ulong *length) |
1679 | { |
1680 | if (buf[0] < 254) |
1681 | { |
1682 | *length= buf[0]; |
1683 | return 1; |
1684 | } |
1685 | else if (buf[0] == 254) |
1686 | { |
1687 | *length= uint2korr(buf + 1); |
1688 | return 3; |
1689 | } |
1690 | if (version == 1) /* old format */ |
1691 | { |
1692 | *length= uint3korr(buf + 1); |
1693 | return 4; |
1694 | } |
1695 | else |
1696 | { |
1697 | *length= uint4korr(buf + 1); |
1698 | return 5; |
1699 | } |
1700 | } |
1701 | |
1702 | |
1703 | uint calc_pack_length(uint version, ulong length) |
1704 | { |
1705 | return (length < 254) ? 1 : (length < 65536) ? 3 : (version == 1) ? 4 : 5; |
1706 | } |
1707 | |