forked from ChineduOpara/KCS.Common.Shared
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Compression.cs
310 lines (278 loc) · 10.3 KB
/
Compression.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.IO;
using System.IO.Compression;
using System.Data;
using System.Xml;
namespace KCS.Common.Shared
{
/// <summary>
/// Provides static compression-related methods.
/// </summary>
/// <remarks>
/// These routines are mainly used for converting a DataSet into byte[] so that it can be
/// transported through web services. XML comments above each method were added by 9OPARA7.
///
/// There are 2 sets of routines:
/// 1. Compress and de-compress a dataset with the Current Data. RowState will always be equal to DataRowState.Added.
/// 2. Compress and de-compress a dataset with the Current Data AND Original Data, preserving RowState.
///
/// #2 above will result in bigger size.
///
/// #1 usage:
/// Before sending the DataSet to a web service, call CompressDataSet and pass the resulting byte[] to
/// the Web Service. On the Web Service side, call the DeCompressDataSet or DeCompressDataTable to get back the
/// DataSet or DataTable, respectively.
///
/// #2 usage:
/// Before sending the DataSet to a web service, call CompressDataSetDiffGram and CompressDataSetSchema to return
/// the DataSet content and DataSet schema. Note the Schema and content cannot be stored in the same
/// XML file so we have to send the data and schema separately. On the Web Service side, call DeCompressDataSet
/// and pass the DiffGram and Schema byte[] to get back the dataset.
/// </remarks>
public static class Compression
{
/// <summary>
/// Compresses a Dataset's schema.
/// </summary>
/// <param name="ds">DataSet whose schema will be compressed.</param>
/// <returns>Byte array.</returns>
public static byte[] CompressDataSetSchema(DataSet ds)
{
return CompressDataSet(ds, true, false);
}
/// <summary>
/// Compresses a DataSet's DiffGram.
/// </summary>
/// <param name="ds">DataSet whose DiffGram will be compressed.</param>
/// <returns>Byte array.</returns>
public static byte[] CompressDataSetDiffGram(DataSet ds)
{
return CompressDataSet(ds, false, true);
}
/// <summary>
/// Compresses a DataRow. This works by attaching the row into a dummy table, and compressing that.
/// </summary>
/// <param name="dr">DataRow to be compressed.</param>
/// <returns>Byte array.</returns>
public static byte[] CompressDataRow(DataRow dr)
{
// Created by COpara. Not yet tested as of 2/16
DataTable dt = dr.Table.Clone();
dt.Rows.Add(dr.ItemArray);
return CompressDataTable(dt);
}
/// <summary>
/// Compresses a DataTable. This works by attaching a default DataSet to the table, and calling CompressDataSet.
/// </summary>
/// <param name="dt">DataTable to be compressed.</param>
/// <returns>Byte array.</returns>
public static byte[] CompressDataTable(DataTable dt)
{
if (dt.DataSet == null)
{
DataSet ds = new DataSet();
ds.Tables.Add(dt);
}
return CompressDataSet(dt.DataSet, false, false);
}
/// <summary>
/// Compresses a DataSet.
/// </summary>
/// <param name="ds">DataSet to be compressed.</param>
/// <returns>Byte array.</returns>
public static byte[] CompressDataSet(DataSet ds)
{
return CompressDataSet(ds, false, false);
}
/// <summary>
/// Compresses a DataSet.
/// </summary>
/// <param name="ds">DataSet to be compressed.</param>
/// <param name="schemaOnly">If TRUE, compresses only the schema.</param>
/// <param name="diffGram"></param>
/// <returns>Byte array.</returns>
private static byte[] CompressDataSet(DataSet ds, bool schemaOnly, bool diffGram)
{
SetNeutralDataTable(ds);
MemoryStream ms2 = new MemoryStream();
ICSharpCode.SharpZipLib.Zip.Compression.Deflater defl = new ICSharpCode.SharpZipLib.Zip.Compression.Deflater(9, false);
Stream s = new ICSharpCode.SharpZipLib.Zip.Compression.Streams.DeflaterOutputStream(ms2, defl);
MemoryStream ms3 = new MemoryStream();
if (schemaOnly)
{
ds.WriteXmlSchema(ms3);
}
else
{
ds.WriteXml(ms3, (diffGram) ? XmlWriteMode.DiffGram : XmlWriteMode.WriteSchema);
}
s.Write(ms3.ToArray(), 0, (int)ms3.Length);
s.Close();
return (byte[])ms2.ToArray();
}
/// <summary>
/// Converts all the DateTime columns in a dataset to Unspecified mode.
/// </summary>
/// <param name="ds">DataSet to convert.</param>
/// <remarks>
/// I don't think this method is properly named.
/// </remarks>
public static void SetNeutralDataTable(DataSet ds)
{
foreach (DataTable dt in ds.Tables)
{
foreach (DataColumn dc in dt.Columns)
{
if (dc.DataType == typeof(DateTime))
{
dc.DateTimeMode = DataSetDateTime.Unspecified;
}
}
}
}
/// <summary>
/// Converts all the DataTime columns in a dataset to UnspecifiedLocal mode.
/// </summary>
/// <param name="ds">DataSet to convert.</param>
/// <remarks>
/// I don't think this method is properly named.
/// </remarks>
public static void SetDefaultDataTable(DataSet ds)
{
foreach (DataTable dt in ds.Tables)
{
foreach (DataColumn dc in dt.Columns)
{
if (dc.DataType == typeof(DateTime))
{
dc.DateTimeMode = DataSetDateTime.UnspecifiedLocal;
}
}
}
}
/// <summary>
/// Decompresses a byte array representation of a DataTable.
/// It works by decompressing the entire DataSet and returning the first table.
/// </summary>
/// <param name="bytDs">Byte array to be decompressed.</param>
/// <returns>DataTable.</returns>
public static DataTable DecompressDataTable(byte[] bytDs)
{
return DecompressDataSet(bytDs).Tables[0];
}
/// <summary>
/// Decompresses a byte array representation of a DataSet.
/// </summary>
/// <param name="bytDs">Byte array to be decompressed.</param>
/// <returns>DataSet.</returns>
public static DataSet DecompressDataSet(byte[] bytDs)
{
return DecompressDataSet(bytDs, null);
}
/// <summary>
/// Decompresses a byte array representation of a DataSet.
/// </summary>
/// <param name="bytDs">Byte array to be decompressed.</param>
/// <param name="bytSchema"></param>
/// <returns>DataSet.</returns>
public static DataSet DecompressDataSet(byte[] bytDs, byte[] bytSchema)
{
MemoryStream ms = new MemoryStream(bytDs);
Stream s2 = new ICSharpCode.SharpZipLib.Zip.Compression.Streams.InflaterInputStream(ms);// Inflater(); // SharpZipLib.Zip.Compression.Streams.InflaterInputStream(ms);
DataSet ds = new DataSet();
if (bytSchema != null)
{
MemoryStream ms10 = new MemoryStream(bytSchema);
Stream s10 = new ICSharpCode.SharpZipLib.Zip.Compression.Streams.InflaterInputStream(ms10);// Inflater(); // SharpZipLib.Zip.Compression.Streams.InflaterInputStream(ms);
ds.ReadXmlSchema(s10);
ds.ReadXml(s2, XmlReadMode.DiffGram);
}
else
{
ds.ReadXml(s2);
}
SetDefaultDataTable(ds);
return ds;
}
/// <summary>
/// Compresses File Content
/// </summary>
/// <param name="ds">File Content to be compressed.</param>
/// <returns>Byte array.</returns>
public static byte[] CompressFileContent(byte[] blobContent)
{
MemoryStream ms2 = new MemoryStream(blobContent);
StreamReader reader = new StreamReader(ms2);
MemoryStream ms3 = new MemoryStream();
string str = reader.ReadToEnd();
ICSharpCode.SharpZipLib.Zip.Compression.Deflater defl = new ICSharpCode.SharpZipLib.Zip.Compression.Deflater(9 , false);
ms2.Position = 0;
Stream s = new ICSharpCode.SharpZipLib.Zip.Compression.Streams.DeflaterOutputStream(ms3, defl);
s.Write(ms2.ToArray(), 0,(int) ms2.Length);
s.Close();
return (byte[])ms3.ToArray();
}
/// <summary>
/// Decompresses a byte array
/// </summary>
/// <param name="bytDs">Byte array to be decompressed.</param>
/// <returns>byte[].</returns>
public static byte[] DeCompressFileContent(byte[] bytDs)
{
byte[] blobContent = new byte[100];
MemoryStream ms = new MemoryStream(bytDs);
ms.Position = 0;
Stream s2 = new ICSharpCode.SharpZipLib.Zip.Compression.Streams.InflaterInputStream(ms);// Inflater(); // SharpZipLib.Zip.Compression.Streams.InflaterInputStream(ms);
blobContent = ReadFully(s2, 100);
return (byte[]) blobContent;
}
/// <summary>
/// Reads data from a stream until the end is reached. The
/// data is returned as a byte array. An IOException is
/// thrown if any of the underlying IO calls fail.
/// </summary>
/// <param name="stream">The stream to read data from</param>
/// <param name="initialLength">The initial buffer length</param>
public static byte[] ReadFully(Stream stream, int initialLength)
{
// If we've been passed an unhelpful initial length, just
// use 32K.
if (initialLength < 1)
{
initialLength = 32768;
}
byte[] buffer = new byte[initialLength];
int read = 0;
int chunk;
while ((chunk = stream.Read(buffer, read, buffer.Length - read)) > 0)
{
read += chunk;
// If we've reached the end of our buffer, check to see if there's
// any more information
if (read == buffer.Length)
{
int nextByte = stream.ReadByte();
// End of stream? If so, we're done
if (nextByte == -1)
{
return buffer;
}
// Nope. Resize the buffer, put in the byte we've just
// read, and continue
byte[] newBuffer = new byte[buffer.Length * 2];
Array.Copy(buffer, newBuffer, buffer.Length);
newBuffer[read] = (byte)nextByte;
buffer = newBuffer;
read++;
}
}
// Buffer is now too big. Shrink it.
byte[] ret = new byte[read];
Array.Copy(buffer, ret, read);
return ret;
}
}
}