1
1
using System ;
2
- using System . Collections . Concurrent ;
3
- using System . Collections . Generic ;
2
+ using System . Diagnostics . CodeAnalysis ;
4
3
using System . IO ;
5
4
using System . Linq ;
5
+ using System . Runtime . InteropServices ;
6
6
using System . Threading ;
7
7
using System . Threading . Tasks ;
8
8
9
9
namespace LibBundle
10
10
{
11
11
public class BundleContainer
12
12
{
13
- [ System . Runtime . InteropServices . DllImport ( "oo2core_8_win64.dll" ) ]
14
- public static extern int OodleLZ_Decompress ( byte [ ] buffer , int bufferSize , byte [ ] result , long outputBufferSize , int a , int b , int c , IntPtr d , long e , IntPtr f , IntPtr g , IntPtr h , long i , int ThreadModule ) ;
15
- [ System . Runtime . InteropServices . DllImport ( "oo2core_8_win64.dll" ) ]
13
+ [ DllImport ( "oo2core_8_win64.dll" ) ]
14
+ [ SuppressMessage ( "Interoperability" , "CA1401:不應看得見 P/Invoke" ) ]
15
+ public static extern int OodleLZ_Decompress ( byte [ ] buffer , int bufferSize , byte [ ] result , long outputBufferSize , int a , int b , int c , IntPtr d , long e , IntPtr f , IntPtr g , IntPtr h , long i , int ThreadModule ) ;
16
+ [ DllImport ( "oo2core_8_win64.dll" ) ]
17
+ [ SuppressMessage ( "Interoperability" , "CA1401:不應看得見 P/Invoke" ) ]
16
18
public static extern int OodleLZ_Compress ( ENCODE_TYPES format , byte [ ] buffer , long bufferSize , byte [ ] outputBuffer , COMPRESSTION_LEVEL level , IntPtr opts , long offs , long unused , IntPtr scratch , long scratch_size ) ;
17
19
public enum ENCODE_TYPES
18
20
{
@@ -63,15 +65,15 @@ public enum COMPRESSTION_LEVEL
63
65
public int unknown5 ; // 0
64
66
public int unknown6 ; // 0
65
67
66
- //For UnPacking
68
+ // For UnPacking
67
69
public BundleContainer ( string path )
68
70
{
69
71
this . path = path ;
70
72
var br = new BinaryReader ( File . Open ( path , FileMode . Open , FileAccess . Read , FileShare . ReadWrite ) ) ;
71
73
Initialize ( br ) ;
72
74
br . Close ( ) ;
73
75
}
74
- //For UnPacking
76
+ // For UnPacking
75
77
public BundleContainer ( BinaryReader br )
76
78
{
77
79
Initialize ( br ) ;
@@ -94,7 +96,7 @@ private void Initialize(BinaryReader br)
94
96
unknown6 = br . ReadInt32 ( ) ;
95
97
}
96
98
97
- //For Packing
99
+ // For Packing
98
100
public BundleContainer ( )
99
101
{
100
102
offset = 0 ;
@@ -104,36 +106,34 @@ public BundleContainer()
104
106
unknown3 = unknown4 = unknown5 = unknown6 = 0 ;
105
107
}
106
108
107
- //UnPacking
109
+ // UnPacking
108
110
public virtual MemoryStream Read ( string path = null )
109
111
{
110
- if ( path == null )
111
- path = this . path ;
112
112
offset = 0 ;
113
- var br = new BinaryReader ( File . Open ( path , FileMode . Open , FileAccess . Read , FileShare . ReadWrite ) ) ;
113
+ var br = new BinaryReader ( File . OpenRead ( path ?? this . path ) ) ;
114
114
var ms = Read ( br ) ;
115
115
br . Close ( ) ;
116
116
return ms ;
117
117
}
118
- //UnPacking
118
+ // UnPacking
119
119
public virtual MemoryStream Read ( BinaryReader br )
120
120
{
121
121
br . BaseStream . Seek ( offset + 60 , SeekOrigin . Begin ) ;
122
122
123
123
var chunks = new int [ entry_count ] ;
124
- for ( int i = 0 ; i < entry_count ; i ++ )
124
+ for ( var i = 0 ; i < entry_count ; i ++ )
125
125
chunks [ i ] = br . ReadInt32 ( ) ;
126
126
127
127
var compressed = new byte [ entry_count ] [ ] ;
128
128
for ( int i = 0 ; i < entry_count ; i ++ )
129
129
compressed [ i ] = br . ReadBytes ( chunks [ i ] ) ;
130
130
131
- Parallel . For ( 0 , entry_count , i => {
132
- var size = ( i + 1 == entry_count ) ? uncompressed_size - ( chunk_size * ( entry_count - 1 ) ) : chunk_size ; // isLast ?
133
- var toSave = new byte [ size + 64 ] ;
134
- OodleLZ_Decompress ( compressed [ i ] , compressed [ i ] . Length , toSave , size , 0 , 0 , 0 , IntPtr . Zero , 0 , IntPtr . Zero , IntPtr . Zero , IntPtr . Zero , 0 , 3 ) ;
135
- compressed [ i ] = toSave . Take ( size ) . ToArray ( ) ;
136
- } ) ;
131
+ Parallel . For ( 0 , entry_count , i => {
132
+ var size = ( i + 1 == entry_count ) ? uncompressed_size - ( chunk_size * ( entry_count - 1 ) ) : chunk_size ; // isLast ?
133
+ var toSave = new byte [ size + 64 ] ;
134
+ _ = OodleLZ_Decompress ( compressed [ i ] , compressed [ i ] . Length , toSave , size , 0 , 0 , 0 , IntPtr . Zero , 0 , IntPtr . Zero , IntPtr . Zero , IntPtr . Zero , 0 , 3 ) ;
135
+ compressed [ i ] = toSave . Take ( size ) . ToArray ( ) ;
136
+ } ) ;
137
137
138
138
var data = new MemoryStream ( uncompressed_size ) ;
139
139
for ( int i = 0 ; i < entry_count ; i ++ )
@@ -144,10 +144,8 @@ public virtual MemoryStream Read(BinaryReader br)
144
144
145
145
public virtual byte [ ] AppendAndSave ( Stream newData , string path = null )
146
146
{
147
- if ( path == null )
148
- path = this . path ;
149
147
offset = 0 ;
150
- return AppendAndSave ( newData , File . Open ( path , FileMode . Open , FileAccess . Read , FileShare . ReadWrite ) ) ;
148
+ return AppendAndSave ( newData , File . Open ( path ?? this . path , FileMode . Open , FileAccess . Read , FileShare . ReadWrite ) ) ;
151
149
}
152
150
153
151
public virtual byte [ ] AppendAndSave ( Stream newData , Stream originalData )
@@ -158,7 +156,7 @@ public virtual byte[] AppendAndSave(Stream newData, Stream originalData)
158
156
159
157
var lastCunkCompressedSize = originalData . ReadByte ( ) | originalData . ReadByte ( ) << 8 | originalData . ReadByte ( ) << 16 | originalData . ReadByte ( ) << 24 ; // ReadInt32
160
158
161
- var lastCunkDecompressedSize = uncompressed_size - ( chunk_size * ( entry_count - 1 ) ) ;
159
+ var lastCunkDecompressedSize = uncompressed_size - chunk_size * ( entry_count - 1 ) ;
162
160
163
161
uncompressed_size = ( int ) ( size_decompressed += newData . Length ) ;
164
162
entry_count = uncompressed_size / chunk_size ;
@@ -168,7 +166,7 @@ public virtual byte[] AppendAndSave(Stream newData, Stream originalData)
168
166
var msToSave = new MemoryStream ( ) ;
169
167
var bw = new BinaryWriter ( msToSave ) ;
170
168
171
- msToSave . Seek ( 60 + ( entry_count * 4 ) , SeekOrigin . Begin ) ;
169
+ msToSave . Seek ( 60 + entry_count * 4 , SeekOrigin . Begin ) ;
172
170
var o = new byte [ compressed_size - lastCunkCompressedSize ] ;
173
171
originalData . Read ( o ) ;
174
172
bw . Write ( o ) ;
@@ -180,19 +178,19 @@ public virtual byte[] AppendAndSave(Stream newData, Stream originalData)
180
178
181
179
newData . Seek ( 0 , SeekOrigin . Begin ) ;
182
180
compressed_size -= lastCunkCompressedSize ;
183
- var NewChunkCompressedSizes = new int [ entry_count - ( OldChunkCompressedSizes . Length / 4 ) ] ;
181
+ var NewChunkCompressedSizes = new int [ entry_count - OldChunkCompressedSizes . Length / 4 ] ;
184
182
185
183
var FirstNewDataChunk = new byte [ Math . Min ( chunk_size - lastCunkDecompressedSize , newData . Length ) ] ;
186
184
newData . Read ( FirstNewDataChunk ) ;
187
- FirstNewDataChunk = lastCunkDecompressedData . Take ( lastCunkDecompressedSize ) . Concat ( FirstNewDataChunk ) . ToArray ( ) ; // Decompressed
185
+ FirstNewDataChunk = lastCunkDecompressedData . Take ( lastCunkDecompressedSize ) . Concat ( FirstNewDataChunk ) . ToArray ( ) ;
188
186
var CompressedChunk = new byte [ FirstNewDataChunk . Length + 548 ] ;
189
187
var CompressedLength = OodleLZ_Compress ( encoder , FirstNewDataChunk , FirstNewDataChunk . Length , CompressedChunk , Compression_Level , IntPtr . Zero , 0 , 0 , IntPtr . Zero , 0 ) ;
190
188
compressed_size += NewChunkCompressedSizes [ 0 ] = CompressedLength ;
191
- bw . Write ( CompressedChunk , 0 , CompressedLength ) ; // Compressed
189
+ bw . Write ( CompressedChunk , 0 , CompressedLength ) ; // The Length is important
192
190
var byteArrays = new byte [ NewChunkCompressedSizes . Length ] [ ] ;
193
- for ( int i = 1 ; i < NewChunkCompressedSizes . Length ; i ++ )
191
+ for ( var i = 1 ; i < NewChunkCompressedSizes . Length ; i ++ )
194
192
{
195
- var size = ( i + 1 == NewChunkCompressedSizes . Length ) ? uncompressed_size - ( chunk_size * ( entry_count - 1 ) ) : chunk_size ;
193
+ var size = ( i + 1 == NewChunkCompressedSizes . Length ) ? uncompressed_size - chunk_size * ( entry_count - 1 ) : chunk_size ;
196
194
newData . Read ( byteArrays [ i ] = new byte [ size ] ) ;
197
195
}
198
196
Parallel . For ( 1 , NewChunkCompressedSizes . Length , i => {
@@ -201,16 +199,14 @@ public virtual byte[] AppendAndSave(Stream newData, Stream originalData)
201
199
byteArrays [ i ] = by ;
202
200
Interlocked . Add ( ref compressed_size , NewChunkCompressedSizes [ i ] = l ) ;
203
201
} ) ;
204
- for ( int i = 0 ; i < NewChunkCompressedSizes . Length - 1 ; i ++ )
202
+ for ( int i = 1 ; i < NewChunkCompressedSizes . Length ; i ++ )
205
203
bw . Write ( byteArrays [ i ] , 0 , NewChunkCompressedSizes [ i ] ) ;
206
204
207
205
size_compressed = compressed_size ;
208
206
209
207
msToSave . Seek ( 60 , SeekOrigin . Begin ) ;
210
208
bw . Write ( OldChunkCompressedSizes ) ;
211
- for ( int i = 0 ; i < NewChunkCompressedSizes . Length ; i ++ )
212
- bw . Write ( NewChunkCompressedSizes [ i ] ) ;
213
-
209
+ foreach ( var t in NewChunkCompressedSizes ) bw . Write ( t ) ;
214
210
msToSave . Seek ( 0 , SeekOrigin . Begin ) ;
215
211
bw . Write ( uncompressed_size ) ;
216
212
bw . Write ( compressed_size ) ;
@@ -232,21 +228,21 @@ public virtual byte[] AppendAndSave(Stream newData, Stream originalData)
232
228
return result ;
233
229
}
234
230
235
- //Packing
231
+ // Packing
236
232
public virtual void Save ( Stream newData , string path )
237
233
{
238
- var bw = new BinaryWriter ( File . Open ( path , FileMode . Open , FileAccess . Write , FileShare . ReadWrite ) ) ;
234
+ var bw = new BinaryWriter ( File . OpenWrite ( path ) ) ;
239
235
240
236
uncompressed_size = ( int ) ( size_decompressed = newData . Length ) ;
241
237
entry_count = uncompressed_size / chunk_size ;
242
238
if ( uncompressed_size % chunk_size != 0 ) entry_count ++ ;
243
239
head_size = entry_count * 4 + 48 ;
244
240
245
- bw . BaseStream . Seek ( 60 + ( entry_count * 4 ) , SeekOrigin . Begin ) ;
241
+ bw . BaseStream . Seek ( 60 + entry_count * 4 , SeekOrigin . Begin ) ;
246
242
newData . Seek ( 0 , SeekOrigin . Begin ) ;
247
243
compressed_size = 0 ;
248
244
var chunks = new int [ entry_count ] ;
249
- for ( int i = 0 ; i < entry_count ; i ++ )
245
+ for ( var i = 0 ; i < entry_count ; i ++ )
250
246
{
251
247
var b = new byte [ i + 1 == entry_count ? uncompressed_size - ( entry_count - 1 ) * chunk_size : chunk_size ] ;
252
248
newData . Read ( b , 0 , b . Length ) ;
@@ -258,8 +254,7 @@ public virtual void Save(Stream newData, string path)
258
254
size_compressed = compressed_size ;
259
255
260
256
bw . BaseStream . Seek ( 60 , SeekOrigin . Begin ) ;
261
- for ( int i = 0 ; i < entry_count ; i ++ )
262
- bw . Write ( chunks [ i ] ) ;
257
+ foreach ( var c in chunks ) bw . Write ( c ) ;
263
258
264
259
bw . BaseStream . Seek ( 0 , SeekOrigin . Begin ) ;
265
260
bw . Write ( uncompressed_size ) ;
@@ -279,7 +274,7 @@ public virtual void Save(Stream newData, string path)
279
274
bw . Flush ( ) ;
280
275
bw . Close ( ) ;
281
276
}
282
- //Packing
277
+ // Packing
283
278
public virtual byte [ ] Save ( Stream newData )
284
279
{
285
280
var msToSave = new MemoryStream ( ) ;
@@ -290,12 +285,12 @@ public virtual byte[] Save(Stream newData)
290
285
if ( uncompressed_size % chunk_size != 0 ) entry_count ++ ;
291
286
head_size = entry_count * 4 + 48 ;
292
287
293
- msToSave . Seek ( 60 + ( entry_count * 4 ) , SeekOrigin . Begin ) ;
288
+ msToSave . Seek ( 60 + entry_count * 4 , SeekOrigin . Begin ) ;
294
289
newData . Seek ( 0 , SeekOrigin . Begin ) ;
295
290
compressed_size = 0 ;
296
291
var chunks = new int [ entry_count ] ;
297
292
var byteArrays = new byte [ entry_count ] [ ] ;
298
- for ( int i = 0 ; i < entry_count ; i ++ )
293
+ for ( var i = 0 ; i < entry_count ; i ++ )
299
294
{
300
295
var b = new byte [ i + 1 == entry_count ? uncompressed_size - ( entry_count - 1 ) * chunk_size : chunk_size ] ;
301
296
newData . Read ( byteArrays [ i ] = b ) ;
@@ -311,8 +306,7 @@ public virtual byte[] Save(Stream newData)
311
306
bw . Write ( byteArrays [ i ] , 0 , chunks [ i ] ) ;
312
307
313
308
msToSave . Seek ( 60 , SeekOrigin . Begin ) ;
314
- for ( int i = 0 ; i < entry_count ; i ++ )
315
- bw . Write ( chunks [ i ] ) ;
309
+ foreach ( var c in chunks ) bw . Write ( c ) ;
316
310
317
311
msToSave . Seek ( 0 , SeekOrigin . Begin ) ;
318
312
bw . Write ( uncompressed_size ) ;
0 commit comments