This repository has been archived by the owner on Jul 2, 2022. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 0
/
LibCompress.lua
1117 lines (999 loc) · 36.5 KB
/
LibCompress.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
----------------------------------------------------------------------------------
--
-- LibCompress.lua
--
-- Authors: jjsheets and Galmok of European Stormrage (Horde)
-- Email : [email protected] and [email protected]
-- Licence: GPL version 2 (General Public License)
----------------------------------------------------------------------------------
local MAJOR, MINOR = "LibCompress",2
local LibCompress = {}
-- list of codecs in this file:
-- \000 - Never used
-- \001 - Uncompressed
-- \002 - LZW
-- \003 - Huffman
-- local is faster than global
local type = type
local select = select
local next = next
local loadstring = loadstring
local setmetatable = setmetatable
local assert = assert
local table_insert = table.insert
local table_remove = table.remove
local table_concat = table.concat
local string_char = string.char
local string_byte = string.byte
local string_len = string.len
local string_sub = string.sub
local unpack = unpack
local pairs = pairs
local math_modf = math.modf
local bit_band = bit32.band
local bit_bor = bit32.bor
local bit_bxor = bit32.bxor
local bit_bnot = bit32.bnot
local bit_lshift = bit32.lshift
local bit_rshift = bit32.rshift
local sort = table.sort
--------------------------------------------------------------------------------
-- Cleanup
local tables = {} -- tables that may be cleaned have to be kept here
local tables_to_clean = {} -- list of tables by name (string) that may be reset to {} after a timeout
local timeout = 15
local tTimer
-- tables that may be erased
function LibCompress:OnCleanup()
for k,v in pairs(tables_to_clean) do
tables[k]={}
tables_to_clean[k]=nil
end
end
function LibCompress:OnLoad() end
local function setCleanupTables(...)
if tTimer then
tTimer:Start()
else
tTimer = ApolloTimer.Create(timeout, false, "OnCleanup", LibCompress)
end
for i=1,select("#",...) do
tables_to_clean[(select(i, ...))] = true
end
end
----------------------------------------------------------------------
----------------------------------------------------------------------
--
-- compression algorithms
--------------------------------------------------------------------------------
-- LZW codec
-- implemented by [email protected]
-- encode is used to uniquely encode a number into a sequence of bytes that can be decoded using decode()
-- the bytes returned by this do not contain "\000"
local bytes = {}
local function encode(x)
for k = 1, #bytes do bytes[k] = nil end
local xmod
x, xmod = math_modf(x/255)
xmod = xmod * 255
bytes[#bytes + 1] = xmod
while x > 0 do
x, xmod = math_modf(x/255)
xmod = xmod * 255
bytes[#bytes + 1] = xmod
end
if #bytes == 1 and bytes[1] > 0 and bytes[1] < 250 then
return string_char(bytes[1])
else
for i = 1, #bytes do bytes[i] = bytes[i] + 1 end
return string_char(256 - #bytes, unpack(bytes))
end
end
--decode converts a unique character sequence into its equivalent number, from ss, beginning at the ith char.
-- returns the decoded number and the count of characters used in the decode process.
local function decode(ss,i)
i = i or 1
local a = string_byte(ss,i,i)
if a > 249 then
local r = 0
a = 256 - a
for n = i+a, i+1, -1 do
r = r * 255 + string_byte(ss,n,n) - 1
end
return r, a + 1
else
return a, 1
end
end
-- Compresses the given uncompressed string.
-- Unless the uncompressed string starts with "\002", this is guaranteed to return a string equal to or smaller than
-- the passed string.
-- the returned string will only contain "\000" characters in rare circumstances, and will contain none if the
-- source string has none.
local dict = {}
function LibCompress:CompressLZW(uncompressed)
if type(uncompressed) == "string" then
local dict_size = 256
for k in pairs(dict) do
dict[k] = nil
end
local result = {"\002"}
local w = ''
local ressize = 1
for i = 0, 255 do
dict[string_char(i)] = i
end
for i = 1, #uncompressed do
local c = uncompressed:sub(i,i)
local wc = w..c
if dict[wc] then
w = wc
else
dict[wc] = dict_size
dict_size = dict_size +1
local r = encode(dict[w])
ressize = ressize + #r
result[#result + 1] = r
w = c
end
end
if w then
local r = encode(dict[w])
ressize = ressize + #r
result[#result + 1] = r
end
if (#uncompressed+1) > ressize then
return table_concat(result)
else
return string_char(1)..uncompressed
end
else
return nil, "Can only compress strings"
end
end
-- if the passed string is a compressed string, this will decompress it and return the decompressed string.
-- Otherwise it return an error message
-- compressed strings are marked by beginning with "\002"
function LibCompress:DecompressLZW(compressed)
if type(compressed) == "string" then
if compressed:sub(1,1) ~= "\002" then
return nil, "Can only decompress LZW compressed data ("..tostring(compressed:sub(1,1))..")"
end
compressed = compressed:sub(2)
local dict_size = 256
for k in pairs(dict) do
dict[k] = nil
end
for i = 0, 255 do
dict[i] = string_char(i)
end
local result = {}
local t = 1
local delta, k
k, delta = decode(compressed,t)
t = t + delta
result[#result+1] = dict[k]
local w = dict[k]
local entry
while t <= #compressed do
k, delta = decode(compressed,t)
t = t + delta
entry = dict[k] or (w..w:sub(1,1))
result[#result+1] = entry
dict[dict_size] = w..entry:sub(1,1)
dict_size = dict_size + 1
w = entry
end
return table_concat(result)
else
return nil, "Can only uncompress strings"
end
end
--------------------------------------------------------------------------------
-- Huffman codec
-- implemented by Galmok of European Stormrage (Horde), [email protected]
local function addCode(tree, bcode,len)
if tree then
tree.bcode = bcode;
tree.blength = len;
if tree.c1 then
addCode(tree.c1, bit_bor(bcode, bit_lshift(1,len)), len+1)
end
if tree.c2 then
addCode(tree.c2, bcode, len+1)
end
end
end
local function escape_code(code, len)
local escaped_code = 0;
local b;
local l = 0;
for i = len-1, 0,- 1 do
b = bit_band( code, bit_lshift(1,i))==0 and 0 or 1
escaped_code = bit_lshift(escaped_code,1+b) + b
l = l + b;
end
return escaped_code, len+l
end
tables.Huffman_compressed = {}
tables.Huffman_large_compressed = {}
local compressed_size = 0
local remainder;
local remainder_length;
local function addBits(tbl, code, len)
remainder = remainder + bit_lshift(code, remainder_length)
remainder_length = len + remainder_length
if remainder_length > 32 then
return true -- Bits lost due to too long code-words.
end
while remainder_length>=8 do
compressed_size = compressed_size + 1
tbl[compressed_size] = string_char(bit_band(remainder, 255))
remainder = bit_rshift(remainder, 8)
remainder_length = remainder_length -8
end
end
-- word size for this huffman algorithm is 8 bits (1 byte). This means the best compression is representing 1 byte with 1 bit, i.e. compress to 0.125 of original size.
function LibCompress:CompressHuffman(uncompressed)
if not type(uncompressed)=="string" then
return nil, "Can only compress strings"
end
if #uncompressed == 0 then
return "\001"
end
-- make histogram
local hist = {}
local n = 0
-- dont have to use all datat to make the histogram
local uncompressed_size = string_len(uncompressed)
local c;
for i = 1, uncompressed_size do
c = string_byte(uncompressed, i)
hist[c] = (hist[c] or 0) + 1
end
--Start with as many leaves as there are symbols.
local leafs = {}
local leaf;
local symbols = {}
for symbol, weight in pairs(hist) do
leaf = { symbol=string_char(symbol), weight=weight };
symbols[symbol] = leaf;
table_insert(leafs, leaf)
end
--Enqueue all leaf nodes into the first queue (by probability in increasing order so that the least likely item is in the head of the queue).
sort(leafs, function(a,b) if a.weight<b.weight then return true elseif a.weight>b.weight then return false else return nil end end)
local nLeafs = #leafs
-- create tree
local huff = {}
--While there is more than one node in the queues:
local l,h, li, hi, leaf1, leaf2
local newNode;
while (#leafs+#huff > 1) do
-- Dequeue the two nodes with the lowest weight.
-- Dequeue first
if not next(huff) then
li, leaf1 = next(leafs)
table_remove(leafs, li)
elseif not next(leafs) then
hi, leaf1 = next(huff)
table_remove(huff, hi)
else
li, l = next(leafs);
hi, h = next(huff);
if l.weight<=h.weight then
leaf1 = l;
table_remove(leafs, li)
else
leaf1 = h;
table_remove(huff, hi)
end
end
-- Dequeue second
if not next(huff) then
li, leaf2 = next(leafs)
table_remove(leafs, li)
elseif not next(leafs) then
hi, leaf2 = next(huff)
table_remove(huff, hi)
else
li, l = next(leafs);
hi, h = next(huff);
if l.weight<=h.weight then
leaf2 = l;
table_remove(leafs, li)
else
leaf2 = h;
table_remove(huff, hi)
end
end
--Create a new internal node, with the two just-removed nodes as children (either node can be either child) and the sum of their weights as the new weight.
newNode = { c1 = leaf1, c2 = leaf2, weight = leaf1.weight+leaf2.weight }
table_insert(huff,newNode)
end
if #leafs>0 then
li, l = next(leafs)
table_insert(huff, l)
table_remove(leafs, li)
end
huff = huff[1];
-- assign codes to each symbol
-- c1 = "0", c2 = "1"
-- As a common convention, bit '0' represents following the left child and bit '1' represents following the right child.
-- c1 = left, c2 = right
addCode(huff,0,0);
if huff then
huff.bcode = 0
huff.blength = 1
end
-- READING
-- bitfield = 0
-- bitfield_len = 0
-- read byte1
-- bitfield = bitfield + bit_lshift(byte1, bitfield_len)
-- bitfield_len = bitfield_len + 8
-- read byte2
-- bitfield = bitfield + bit_lshift(byte2, bitfield_len)
-- bitfield_len = bitfield_len + 8
-- (use 5 bits)
-- word = bit_band( bitfield, bit_lshift(1,5)-1)
-- bitfield = bit_rshift( bitfield, 5)
-- bitfield_len = bitfield_len - 5
-- read byte3
-- bitfield = bitfield + bit_lshift(byte3, bitfield_len)
-- bitfield_len = bitfield_len + 8
-- WRITING
remainder = 0;
remainder_length = 0;
local compressed = tables.Huffman_compressed
--compressed_size = 0
-- first byte is version info. 0 = uncompressed, 1 = 8-bit word huffman compressed
compressed[1] = "\003"
-- Header: byte 0=#leafs, byte 1-3=size of uncompressed data
-- max 2^24 bytes
local l = string_len(uncompressed)
compressed[2] = string_char(bit_band(nLeafs-1, 255)) -- number of leafs
compressed[3] = string_char(bit_band(l, 255)) -- bit 0-7
compressed[4] = string_char(bit_band(bit_rshift(l, 8), 255)) -- bit 8-15
compressed[5] = string_char(bit_band(bit_rshift(l, 16), 255)) -- bit 16-23
compressed_size = 5
-- create symbol/code map
for symbol, leaf in pairs(symbols) do
addBits(compressed, symbol, 8);
if addBits(compressed, escape_code(leaf.bcode, leaf.blength)) then
-- code word too long. Needs new revision to be able to handle more than 32 bits
return string_char(0)..uncompressed
end
addBits(compressed, 3, 2);
end
-- create huffman code
local large_compressed = tables.Huffman_large_compressed
local large_compressed_size = 0
local ulimit
for i = 1, l, 200 do
ulimit = l<(i+199) and l or (i+199)
for sub_i = i, ulimit do
c = string_byte(uncompressed, sub_i)
addBits(compressed, symbols[c].bcode, symbols[c].blength)
end
large_compressed_size = large_compressed_size + 1
large_compressed[large_compressed_size] = table_concat(compressed, "", 1, compressed_size)
compressed_size = 0
end
-- add remainding bits (if any)
if remainder_length>0 then
large_compressed_size = large_compressed_size + 1
large_compressed[large_compressed_size] = string_char(remainder)
end
local compressed_string = table_concat(large_compressed, "", 1, large_compressed_size)
-- is compression worth it? If not, return uncompressed data.
if (#uncompressed+1) <= #compressed_string then
return "\001"..uncompressed
end
setCleanupTables("Huffman_compressed", "Huffman_large_compressed")
return compressed_string
end
-- lookuptable (cached between calls)
local lshiftMask = {}
setmetatable(lshiftMask, {
__index = function (t, k)
local v = bit_lshift(1, k)
rawset(t, k, v)
return v
end
})
-- lookuptable (cached between calls)
local lshiftMinusOneMask = {}
setmetatable(lshiftMinusOneMask, {
__index = function (t, k)
local v = bit_lshift(1, k)-1
rawset(t, k, v)
return v
end
})
local function getCode(bitfield, field_len)
if field_len>=2 then
local b;
local p = 0;
for i = 0, field_len-1 do
b = bit_band(bitfield, lshiftMask[i])
if not (p==0) and not (b == 0) then
-- found 2 bits set right after each other (stop bits)
return bit_band( bitfield, lshiftMinusOneMask[i-1]), i-1,
bit_rshift(bitfield, i+1), field_len-i-1
end
p = b
end
end
return nil
end
local function unescape_code(code, code_len)
local unescaped_code=0;
local b;
local l = 0;
local i = 0
while i < code_len do
b = bit_band( code, lshiftMask[i])
if not (b==0) then
unescaped_code = bit_bor(unescaped_code, lshiftMask[l])
i = i + 1
end
i = i + 1
l = l + 1
end
return unescaped_code, l
end
tables.Huffman_uncompressed = {}
tables.Huffman_large_uncompressed = {} -- will always be as big as the larges string ever decompressed. Bad, but clearing i every timetakes precious time.
function LibCompress:DecompressHuffman(compressed)
if not type(uncompressed)=="string" then
return nil, "Can only uncompress strings"
end
local compressed_size = #compressed
--decode header
local info_byte = string_byte(compressed)
-- is data compressed
if info_byte==1 then
return compressed:sub(2) --return uncompressed data
end
if not (info_byte==3) then
return nil, "Can only decompress Huffman compressed data ("..tostring(info_byte)..")"
end
local num_symbols = string_byte(string_sub(compressed, 2, 2)) + 1
local c0 = string_byte(string_sub(compressed, 3, 3))
local c1 = string_byte(string_sub(compressed, 4, 4))
local c2 = string_byte(string_sub(compressed, 5, 5))
local orig_size = c2*65536 + c1*256 + c0
if orig_size==0 then
return "";
end
-- decode code->symbal map
local bitfield = 0;
local bitfield_len = 0;
local map = {} -- only table not reused in Huffman decode.
setmetatable(map, {
__index = function (t, k)
local v = {}
rawset(t, k, v)
return v
end
})
local i = 6; -- byte 1-5 are header bytes
local c, cl;
local minCodeLen = 1000;
local maxCodeLen = 0;
local symbol, code, code_len, _bitfield, _bitfield_len;
local n = 0;
local state = 0; -- 0 = get symbol (8 bits), 1 = get code (varying bits, ends with 2 bits set)
while n<num_symbols do
if i>compressed_size then
return nil, "Cannot decode map"
end
c = string_byte(compressed, i)
bitfield = bit_bor(bitfield, bit_lshift(c, bitfield_len))
bitfield_len = bitfield_len + 8
if state == 0 then
symbol = bit_band(bitfield, 255)
bitfield = bit_rshift(bitfield, 8)
bitfield_len = bitfield_len -8
state = 1 -- search for code now
else
code, code_len, _bitfield, _bitfield_len = getCode(bitfield, bitfield_len)
if code then
bitfield, bitfield_len = _bitfield, _bitfield_len
c, cl = unescape_code(code, code_len)
map[cl][c]=string_char(symbol)
minCodeLen = cl<minCodeLen and cl or minCodeLen
maxCodeLen = cl>maxCodeLen and cl or maxCodeLen
--print("symbol: "..string_char(symbol).." code: "..tobinary(c, cl))
n = n + 1
state = 0 -- search for next symbol (if any)
end
end
i=i+1
end
-- dont create new subtables for entries not in the map. Waste of space.
-- But do return an empty table to prevent runtime errors. (instead of returning nil)
local mt = {}
setmetatable(map, {
__index = function (t, k)
return mt
end
})
local uncompressed = tables.Huffman_uncompressed
local large_uncompressed = tables.Huffman_large_uncompressed
local uncompressed_size = 0
local large_uncompressed_size = 0
local test_code
local test_code_len = minCodeLen;
local symbol;
local dec_size = 0;
compressed_size = compressed_size + 1
local temp_limit = 200; -- first limit of uncompressed data. large_uncompressed will hold strings of length 200
temp_limit = temp_limit > orig_size and orig_size or temp_limit
while true do
if test_code_len<=bitfield_len then
test_code=bit_band( bitfield, lshiftMinusOneMask[test_code_len])
symbol = map[test_code_len][test_code]
if symbol then
uncompressed_size = uncompressed_size + 1
uncompressed[uncompressed_size]=symbol
dec_size = dec_size + 1
if dec_size >= temp_limit then
if dec_size>=orig_size then -- checked here for speed reasons
break;
end
-- process compressed bytes in smaller chunks
large_uncompressed_size = large_uncompressed_size + 1
large_uncompressed[large_uncompressed_size] = table_concat(uncompressed, "", 1, uncompressed_size)
uncompressed_size = 0
temp_limit = temp_limit + 200 -- repeated chunk size is 200 uncompressed bytes
temp_limit = temp_limit > orig_size and orig_size or temp_limit
end
bitfield = bit_rshift(bitfield, test_code_len)
bitfield_len = bitfield_len - test_code_len
test_code_len = minCodeLen
else
test_code_len = test_code_len + 1
if test_code_len>maxCodeLen then
return nil, "Decompression error at "..tostring(i).."/"..tostring(#compressed)
end
end
else
c = string_byte(compressed, i)
bitfield = bitfield + bit_lshift(c or 0, bitfield_len)
bitfield_len = bitfield_len + 8
if i > compressed_size then
break;
end
i = i + 1
end
end
setCleanupTables("Huffman_uncompressed", "Huffman_large_uncompressed")
return table_concat(large_uncompressed, "", 1, large_uncompressed_size)..table_concat(uncompressed, "", 1, uncompressed_size)
end
--------------------------------------------------------------------------------
-- Generic codec interface
function LibCompress:DecompressUncompressed(data)
if type(data)~="string" then
return nil, "Can only handle strings"
end
if string.byte(data) ~= 1 then
return nil, "Can only handle uncompressed data"
end
return data:sub(2)
end
local compression_methods = {
[2] = LibCompress.CompressLZW,
[3] = LibCompress.CompressHuffman
}
local decompression_methods = {
[1] = LibCompress.DecompressUncompressed,
[2] = LibCompress.DecompressLZW,
[3] = LibCompress.DecompressHuffman
}
-- try all compression codecs and return best result
function LibCompress:Compress(data)
local method = next(compression_methods)
local result = compression_methods[method](self, data);
local n;
method = next(compression_methods, method)
while method do
n = compression_methods[method](self, data)
if #n < #result then
result = n
end
method = next(compression_methods, method)
end
return result
end
function LibCompress:Decompress(data)
local header_info = string.byte(data)
if decompression_methods[header_info] then
return decompression_methods[header_info](self, data)
else
return nil, "Unknown compression method ("..tostring(header_info)..")"
end
end
----------------------------------------------------------------------
----------------------------------------------------------------------
--
-- Encoding algorithms
--------------------------------------------------------------------------------
-- Prefix encoding algorithm
-- implemented by Galmok of European Stormrage (Horde), [email protected]
--[[
Howto: Encode and Decode:
3 functions are supplied, 2 of them are variants of the first. They return a table with functions to encode and decode text.
table, msg = LibCompress:GetEncodeTable(reservedChars, escapeChars, mapChars)
reservedChars: The characters in this string will not appear in the encoded data.
escapeChars: A string of characters used as escape-characters (don't supply more than needed). #escapeChars >= 1
mapChars: First characters in reservedChars maps to first characters in mapChars. (#mapChars <= #reservedChars)
return value:
table
if nil then msg holds an error message, otherwise use like this:
encoded_message = table:Encode(message)
message = table:Decode(encoded_message)
GetAddonEncodeTable: Sets up encoding for the addon channel (\000 is encoded)
GetChatEncodeTable: Sets up encoding for the chat channel (many bytes encoded, see the function for details)
Except for the mapped characters, all encoding will be with 1 escape character followed by 1 suffix, i.e. 2 bytes.
]]
-- to be able to match any requested byte value, the search string must be preprocessed
-- characters to escape with %:
-- ( ) . % + - * ? [ ] ^ $
-- "illegal" byte values:
-- 0 is replaces %z
local gsub_escape_table = {
['\000'] = "%z",
[('(')] = "%(",
[(')')] = "%)",
[('.')] = "%.",
[('%')] = "%%",
[('+')] = "%+",
[('-')] = "%-",
[('*')] = "%*",
[('?')] = "%?",
[('[')] = "%[",
[(']')] = "%]",
[('^')] = "%^",
[('$')] = "%$"
}
local function escape_for_gsub(str)
return str:gsub("([%z%(%)%.%%%+%-%*%?%[%]%^%$])", gsub_escape_table)
end
function LibCompress:GetEncodeTable(reservedChars, escapeChars, mapChars)
reservedChars = reservedChars or ""
escapeChars = escapeChars or ""
mapChars = mapChars or ""
-- select a default escape character
if escapeChars == "" then
return nil, "No escape characters supplied"
end
if #reservedChars < #mapChars then
return nil, "Number of reserved characters must be at least as many as the number of mapped chars"
end
if reservedChars == "" then
return nil, "No characters to encode"
end
-- list of characters that must be encoded
local encodeBytes = reservedChars..escapeChars..mapChars
-- build list of bytes not available as a suffix to a prefix byte
local taken = {}
for i=1, strlen(encodeBytes) do
taken[string.sub(encodeBytes, i, i)] = true
end
-- allocate a table to holde encode/decode strings/functions
local codecTable = {}
-- the encoding can be a single gsub, but the decoding can require multiple gsubs
local decode_func_string = {}
local encode_search = {}
local encode_translate = {}
local decode_search = {}
local decode_translate = {}
local c,r,i,to,from
local escapeCharIndex = 0
-- map single byte to single byte
if #mapChars > 0 then
for i=1, #mapChars do
from = string.sub(reservedChars, i, i)
to = string.sub(mapChars, i, i)
encode_translate[from] = to
table.insert(encode_search, from)
decode_translate[to] = from
table.insert(decode_search, to)
end
codecTable["decode_search"..tostring(escapeCharIndex)] = "([".. escape_for_gsub(table.concat(decode_search)).."])"
codecTable["decode_translate"..tostring(escapeCharIndex)] = decode_translate
tinsert(decode_func_string, "str = str:gsub(self.decode_search"..tostring(escapeCharIndex)..", self.decode_translate"..tostring(escapeCharIndex)..");")
end
-- map single byte to double-byte
escapeCharIndex = escapeCharIndex +1
local escapeChar = string.sub(escapeChars, escapeCharIndex, escapeCharIndex)
r = 0 -- suffix char value to the escapeChar
decode_search = {}
decode_translate = {}
for i = 1, strlen(encodeBytes) do
c = string.sub(encodeBytes, i, i)
if not encode_translate[c] then
-- this loop will update escapeChar and r
while r<256 and taken[string.char(r)] do
r=r+1
if r>255 then -- switch to next escapeChar
if escapeChar == "" then -- we are out of escape chars and we need more!
return nil, "Out of escape characters"
end
codecTable["decode_search"..tostring(escapeCharIndex)] = escape_for_gsub(escapeChar).."([".. escape_for_gsub(table.concat(decode_search)).."])"
codecTable["decode_translate"..tostring(escapeCharIndex)] = decode_translate
tinsert(decode_func_string, "str = str:gsub(self.decode_search"..tostring(escapeCharIndex)..", self.decode_translate"..tostring(escapeCharIndex)..");")
escapeCharIndex = escapeCharIndex + 1
escapeChar = string.sub(escapeChars, escapeCharIndex, escapeCharIndex)
r = 0
decode_search = {}
decode_translate = {}
end
end
encode_translate[c] = escapeChar..string.char(r)
table.insert(encode_search, c)
decode_translate[string.char(r)] = c
table.insert(decode_search, string.char(r))
r = r + 1
end
end
if r>0 then
codecTable["decode_search"..tostring(escapeCharIndex)] = escape_for_gsub(escapeChar).."([".. escape_for_gsub(table.concat(decode_search)).."])"
codecTable["decode_translate"..tostring(escapeCharIndex)] = decode_translate
tinsert(decode_func_string, "str = str:gsub(self.decode_search"..tostring(escapeCharIndex)..", self.decode_translate"..tostring(escapeCharIndex)..");")
end
-- change last line from "str = ...;" to "return ...;";
decode_func_string[#decode_func_string] = decode_func_string[#decode_func_string]:gsub("str = (.*);", "return %1;");
decode_func_string = "return function(self, str) "..table.concat(decode_func_string).." end"
encode_search = "([".. escape_for_gsub(table.concat(encode_search)).."])"
decode_search = escape_for_gsub(escapeChars).."([".. escape_for_gsub(table.concat(decode_search)).."])"
local encode_func = assert(loadstring("return function(self, str) return str:gsub(self.encode_search, self.encode_translate); end"))()
local decode_func = assert(loadstring(decode_func_string))()
codecTable.encode_search = encode_search
codecTable.encode_translate = encode_translate
codecTable.Encode = encode_func
codecTable.decode_search = decode_search
codecTable.decode_translate = decode_translate
codecTable.Decode = decode_func
codecTable.decode_func_string = decode_func_string -- to be deleted
return codecTable
end
-- Addons: Call this only once and reuse the returned table for all encodings/decodings.
function LibCompress:GetAddonEncodeTable(reservedChars, escapeChars, mapChars )
reservedChars = reservedChars or ""
escapeChars = escapeChars or ""
mapChars = mapChars or ""
-- Following byte values are not allowed:
-- \000
if escapeChars == "" then
escapeChars = "\001"
end
return self:GetEncodeTable( (reservedChars or "").."\000", escapeChars, mapChars)
end
-- Addons: Call this only once and reuse the returned table for all encodings/decodings.
function LibCompress:GetChatEncodeTable(reservedChars, escapeChars, mapChars)
reservedChars = reservedChars or ""
escapeChars = escapeChars or ""
mapChars = mapChars or ""
-- Following byte values are not allowed:
-- \000, s, S, \010, \013, \124, %
-- Because SendChatMessage will error if an UTF8 multibyte character is incomplete,
-- all character values above 127 have to be encoded to avoid this. This costs quite a bit of bandwidth (about 13-14%)
-- Also, because drunken status is unknown for the received, strings used with SendChatMessage should be terminated with
-- an identifying byte value, after which the server MAY add "...hic!" or as much as it can fit(!).
-- Pass the identifying byte as a reserved character to this function to ensure the encoding doesn't contain that value.
-- or use this: local message, match = arg1:gsub("^(.*)\029.-$", "%1")
-- arg1 is message from channel, \029 is the string terminator, but may be used in the encoded datastream as well. :-)
-- This encoding will expand data anywhere from:
-- 0% (average with pure ascii text)
-- 53.5% (average with random data valued zero to 255)
-- 100% (only encoding data that encodes to two bytes)
local i
local r={}
for i=128, 255 do
table.insert(r, string.char(i))
end
reservedChars = "sS\000\010\013\124%"..table.concat(r)..(reservedChars or "")
if escapeChars == "" then
escapeChars = "\029\031"
end
if mapChars == "" then
mapChars = "\015\020";
end
return self:GetEncodeTable(reservedChars, escapeChars, mapChars)
end
--------------------------------------------------------------------------------
-- 7 bit encoding algorithm
-- implemented by Galmok of European Stormrage (Horde), [email protected]
-- The encoded data holds values from 0 to 127 inclusive. Additional encoding may be necessary.
-- This algorithm isn't exactly fast and be used with care and consideration
tables.encode7bit = {}
function LibCompress:Encode7bit(str)
local remainder = 0;
local remainder_length = 0;
local tbl = tables.encode7bit
local encoded_size = 0
local l=#str
for i=1,l do
local code = string.byte(str, i)
remainder = remainder + bit_lshift(code, remainder_length)
remainder_length = 8 + remainder_length
while remainder_length>=7 do
encoded_size = encoded_size + 1
tbl[encoded_size] = string_char(bit_band(remainder, 127))
remainder = bit_rshift(remainder, 7)
remainder_length = remainder_length -7
end
end
if remainder_length>0 then
encoded_size = encoded_size + 1
tbl[encoded_size] = string_char(remainder)
end
setCleanupTables("encode7bit")
return table.concat(tbl, "", 1, encoded_size)
end
tables.decode8bit = {}
function LibCompress:Decode7bit(str)
local bit8 = tables.decode8bit
local decoded_size = 0
local ch
local i=1
local bitfield_len=0
local bitfield=0
local l=#str
while true do
if bitfield_len >=8 then
decoded_size = decoded_size + 1
bit8[decoded_size] = string_char(bit.band(bitfield, 255))
bitfield = bit_rshift(bitfield, 8)
bitfield_len = bitfield_len - 8
end
ch=string_byte(str,i)
bitfield=bitfield+bit_lshift(ch or 0, bitfield_len)
bitfield_len = bitfield_len + 7
if i > l then
break
end
i=i+1
end
setCleanupTables("decode8bit")
return table.concat(bit8, "", 1, decoded_size)
end
----------------------------------------------------------------------
----------------------------------------------------------------------
--
-- Checksum/hash algorithms
--------------------------------------------------------------------------------
-- FCS16/32 checksum algorithms
-- converted from C by Galmok of European Stormrage (Horde), [email protected]
-- usage:
-- code = LibCompress:fcs16init()
-- code = LibCompress:fcs16update(code, data1)
-- code = LibCompress:fcs16update(code, data2)
-- code = LibCompress:fcs16update(code, data...)
-- code = LibCompress:fcs16final(code)
--
-- data = string
-- fcs16 provides a 16 bit checksum, fcs32 provides a 32 bit checksum.
--[[/* The following copyright notice concerns only the FCS hash algorithm
---------------------------------------------------------------------------
Copyright (c) 2003, Dominik Reichl <[email protected]>, Germany.
All rights reserved.
Distributed under the terms of the GNU General Public License v2.
This software is provided 'as is' with no explicit or implied warranties
in respect of its properties, including, but not limited to, correctness
and/or fitness for purpose.
---------------------------------------------------------------------------
*/]]
--// FCS-16 algorithm implemented as described in RFC 1331
local FCSINIT16 = 65535;
--// Fast 16 bit FCS lookup table
local fcs16tab = { [0]=0, 4489, 8978, 12955, 17956, 22445, 25910, 29887,
35912, 40385, 44890, 48851, 51820, 56293, 59774, 63735,
4225, 264, 13203, 8730, 22181, 18220, 30135, 25662,
40137, 36160, 49115, 44626, 56045, 52068, 63999, 59510,