Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
douban-api-proxy
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
captainwong
douban-api-proxy
Commits
5b9b36f9
Commit
5b9b36f9
authored
Sep 17, 2017
by
OzzieIsaacs
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Code cosmetics
parent
85068d75
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
1243 additions
and
1257 deletions
+1243
-1257
kthoom.js
cps/static/js/kthoom.js
+480
-486
unrar.js
cps/static/js/unrar.js
+645
-653
untar.js
cps/static/js/untar.js
+118
-118
No files found.
cps/static/js/kthoom.js
View file @
5b9b36f9
...
...
@@ -5,7 +5,7 @@
*
* Copyright(c) 2011 Google Inc.
* Copyright(c) 2011 antimatter15
*/
*/
/* Reference Documentation:
...
...
@@ -17,27 +17,27 @@
*/
if
(
!
window
.
console
)
{
window
.
console
=
{};
window
.
console
.
log
=
function
(
str
)
{};
window
.
console
.
dir
=
function
(
str
)
{};
window
.
console
=
{};
window
.
console
.
log
=
function
(
str
)
{};
window
.
console
.
dir
=
function
(
str
)
{};
}
if
(
window
.
opera
)
{
window
.
console
.
log
=
function
(
str
)
{
opera
.
postError
(
str
);};
window
.
console
.
dir
=
function
(
str
)
{};
window
.
console
.
log
=
function
(
str
)
{
opera
.
postError
(
str
);};
window
.
console
.
dir
=
function
(
str
)
{};
}
// gets the element with the given id
function
getElem
(
id
)
{
if
(
document
.
documentElement
.
querySelector
)
{
// querySelector lookup
return
document
.
body
.
querySelector
(
'#'
+
id
);
}
// getElementById lookup
return
document
.
getElementById
(
id
);
if
(
document
.
documentElement
.
querySelector
)
{
// querySelector lookup
return
document
.
body
.
querySelector
(
'#'
+
id
);
}
// getElementById lookup
return
document
.
getElementById
(
id
);
}
if
(
window
.
kthoom
==
undefined
)
{
window
.
kthoom
=
{};
window
.
kthoom
=
{};
}
// key codes
...
...
@@ -65,32 +65,32 @@ var imageFilenames = [];
var
totalImages
=
0
;
var
lastCompletion
=
0
;
var
library
=
{
allBooks
:
[],
currentBookNum
:
0
,
allBooks
:
[],
currentBookNum
:
0
,
};
var
hflip
=
false
,
vflip
=
false
,
fitMode
=
kthoom
.
Key
.
B
;
var
canKeyNext
=
true
,
canKeyPrev
=
true
;
kthoom
.
saveSettings
=
function
()
{
localStorage
.
kthoom_settings
=
JSON
.
stringify
({
rotateTimes
:
kthoom
.
rotateTimes
,
hflip
:
hflip
,
vflip
:
vflip
,
fitMode
:
fitMode
});
localStorage
.
kthoom_settings
=
JSON
.
stringify
({
rotateTimes
:
kthoom
.
rotateTimes
,
hflip
:
hflip
,
vflip
:
vflip
,
fitMode
:
fitMode
});
}
kthoom
.
loadSettings
=
function
()
{
try
{
if
(
localStorage
.
kthoom_settings
.
length
<
10
)
return
;
var
s
=
JSON
.
parse
(
localStorage
.
kthoom_settings
);
kthoom
.
rotateTimes
=
s
.
rotateTimes
;
hflip
=
s
.
hflip
;
vflip
=
s
.
vflip
;
fitMode
=
s
.
fitMode
;
}
catch
(
err
)
{
}
try
{
if
(
localStorage
.
kthoom_settings
.
length
<
10
)
return
;
var
s
=
JSON
.
parse
(
localStorage
.
kthoom_settings
);
kthoom
.
rotateTimes
=
s
.
rotateTimes
;
hflip
=
s
.
hflip
;
vflip
=
s
.
vflip
;
fitMode
=
s
.
fitMode
;
}
catch
(
err
)
{
}
}
// Stores an image filename and its data: URI.
...
...
@@ -98,511 +98,505 @@ kthoom.loadSettings = function() {
// non-safe URL characters are encoded as %xx ?)
// This would save 25% on memory since base64-encoded strings are 4/3 the size of the binary
kthoom
.
ImageFile
=
function
(
file
)
{
this
.
filename
=
file
.
filename
;
var
fileExtension
=
file
.
filename
.
split
(
'.'
).
pop
().
toLowerCase
();
var
mimeType
=
fileExtension
==
'png'
?
'image/png'
:
(
fileExtension
==
'jpg'
||
fileExtension
==
'jpeg'
)
?
'image/jpeg'
:
fileExtension
==
'gif'
?
'image/gif'
:
undefined
;
this
.
dataURI
=
createURLFromArray
(
file
.
fileData
,
mimeType
);
this
.
data
=
file
;
this
.
filename
=
file
.
filename
;
var
fileExtension
=
file
.
filename
.
split
(
'.'
).
pop
().
toLowerCase
();
var
mimeType
=
fileExtension
==
'png'
?
'image/png'
:
(
fileExtension
==
'jpg'
||
fileExtension
==
'jpeg'
)
?
'image/jpeg'
:
fileExtension
==
'gif'
?
'image/gif'
:
undefined
;
this
.
dataURI
=
createURLFromArray
(
file
.
fileData
,
mimeType
);
this
.
data
=
file
;
};
kthoom
.
initProgressMeter
=
function
()
{
var
svgns
=
'http://www.w3.org/2000/svg'
;
var
pdiv
=
$
(
'#progress'
)[
0
];
// document.getElementById('progress');
var
svg
=
document
.
createElementNS
(
svgns
,
'svg'
);
svg
.
style
.
width
=
'100%'
;
svg
.
style
.
height
=
'100%'
;
var
defs
=
document
.
createElementNS
(
svgns
,
'defs'
);
var
patt
=
document
.
createElementNS
(
svgns
,
'pattern'
);
patt
.
id
=
'progress_pattern'
;
patt
.
setAttribute
(
'width'
,
'30'
);
patt
.
setAttribute
(
'height'
,
'20'
);
patt
.
setAttribute
(
'patternUnits'
,
'userSpaceOnUse'
);
var
rect
=
document
.
createElementNS
(
svgns
,
'rect'
);
rect
.
setAttribute
(
'width'
,
'100%'
);
rect
.
setAttribute
(
'height'
,
'100%'
);
rect
.
setAttribute
(
'fill'
,
'#cc2929'
);
var
poly
=
document
.
createElementNS
(
svgns
,
'polygon'
);
poly
.
setAttribute
(
'fill'
,
'yellow'
);
poly
.
setAttribute
(
'points'
,
'15,0 30,0 15,20 0,20'
);
patt
.
appendChild
(
rect
);
patt
.
appendChild
(
poly
);
defs
.
appendChild
(
patt
);
svg
.
appendChild
(
defs
);
var
g
=
document
.
createElementNS
(
svgns
,
'g'
);
var
outline
=
document
.
createElementNS
(
svgns
,
'rect'
);
outline
.
setAttribute
(
'y'
,
'1'
);
outline
.
setAttribute
(
'width'
,
'100%'
);
outline
.
setAttribute
(
'height'
,
'15'
);
outline
.
setAttribute
(
'fill'
,
'#777'
);
outline
.
setAttribute
(
'stroke'
,
'white'
);
outline
.
setAttribute
(
'rx'
,
'5'
);
outline
.
setAttribute
(
'ry'
,
'5'
);
g
.
appendChild
(
outline
);
var
title
=
document
.
createElementNS
(
svgns
,
'text'
);
title
.
id
=
'progress_title'
;
title
.
appendChild
(
document
.
createTextNode
(
'0%'
));
title
.
setAttribute
(
'y'
,
'13'
);
title
.
setAttribute
(
'x'
,
'99.5%'
);
title
.
setAttribute
(
'fill'
,
'white'
);
title
.
setAttribute
(
'font-size'
,
'12px'
);
title
.
setAttribute
(
'text-anchor'
,
'end'
);
g
.
appendChild
(
title
);
var
meter
=
document
.
createElementNS
(
svgns
,
'rect'
);
meter
.
id
=
'meter'
;
meter
.
setAttribute
(
'width'
,
'0%'
);
meter
.
setAttribute
(
'height'
,
'17'
);
meter
.
setAttribute
(
'fill'
,
'url(#progress_pattern)'
);
meter
.
setAttribute
(
'rx'
,
'5'
);
meter
.
setAttribute
(
'ry'
,
'5'
);
var
svgns
=
'http://www.w3.org/2000/svg'
;
var
pdiv
=
$
(
'#progress'
)[
0
];
// document.getElementById('progress');
var
svg
=
document
.
createElementNS
(
svgns
,
'svg'
);
svg
.
style
.
width
=
'100%'
;
svg
.
style
.
height
=
'100%'
;
var
defs
=
document
.
createElementNS
(
svgns
,
'defs'
);
var
patt
=
document
.
createElementNS
(
svgns
,
'pattern'
);
patt
.
id
=
'progress_pattern'
;
patt
.
setAttribute
(
'width'
,
'30'
);
patt
.
setAttribute
(
'height'
,
'20'
);
patt
.
setAttribute
(
'patternUnits'
,
'userSpaceOnUse'
);
var
rect
=
document
.
createElementNS
(
svgns
,
'rect'
);
rect
.
setAttribute
(
'width'
,
'100%'
);
rect
.
setAttribute
(
'height'
,
'100%'
);
rect
.
setAttribute
(
'fill'
,
'#cc2929'
);
var
poly
=
document
.
createElementNS
(
svgns
,
'polygon'
);
poly
.
setAttribute
(
'fill'
,
'yellow'
);
poly
.
setAttribute
(
'points'
,
'15,0 30,0 15,20 0,20'
);
patt
.
appendChild
(
rect
);
patt
.
appendChild
(
poly
);
defs
.
appendChild
(
patt
);
svg
.
appendChild
(
defs
);
var
g
=
document
.
createElementNS
(
svgns
,
'g'
);
var
outline
=
document
.
createElementNS
(
svgns
,
'rect'
);
outline
.
setAttribute
(
'y'
,
'1'
);
outline
.
setAttribute
(
'width'
,
'100%'
);
outline
.
setAttribute
(
'height'
,
'15'
);
outline
.
setAttribute
(
'fill'
,
'#777'
);
outline
.
setAttribute
(
'stroke'
,
'white'
);
outline
.
setAttribute
(
'rx'
,
'5'
);
outline
.
setAttribute
(
'ry'
,
'5'
);
g
.
appendChild
(
outline
);
var
title
=
document
.
createElementNS
(
svgns
,
'text'
);
title
.
id
=
'progress_title'
;
title
.
appendChild
(
document
.
createTextNode
(
'0%'
));
title
.
setAttribute
(
'y'
,
'13'
);
title
.
setAttribute
(
'x'
,
'99.5%'
);
title
.
setAttribute
(
'fill'
,
'white'
);
title
.
setAttribute
(
'font-size'
,
'12px'
);
title
.
setAttribute
(
'text-anchor'
,
'end'
);
g
.
appendChild
(
title
);
var
meter
=
document
.
createElementNS
(
svgns
,
'rect'
);
meter
.
id
=
'meter'
;
meter
.
setAttribute
(
'width'
,
'0%'
);
meter
.
setAttribute
(
'height'
,
'17'
);
meter
.
setAttribute
(
'fill'
,
'url(#progress_pattern)'
);
meter
.
setAttribute
(
'rx'
,
'5'
);
meter
.
setAttribute
(
'ry'
,
'5'
);
var
meter2
=
document
.
createElementNS
(
svgns
,
'rect'
);
meter2
.
id
=
'meter2'
;
meter2
.
setAttribute
(
'width'
,
'0%'
);
meter2
.
setAttribute
(
'height'
,
'17'
);
meter2
.
setAttribute
(
'opacity'
,
'0.8'
);
meter2
.
setAttribute
(
'fill'
,
'#007fff'
);
meter2
.
setAttribute
(
'rx'
,
'5'
);
meter2
.
setAttribute
(
'ry'
,
'5'
);
g
.
appendChild
(
meter
);
g
.
appendChild
(
meter2
);
var
page
=
document
.
createElementNS
(
svgns
,
'text'
);
page
.
id
=
'page'
;
page
.
appendChild
(
document
.
createTextNode
(
'0/0'
));
page
.
setAttribute
(
'y'
,
'13'
);
page
.
setAttribute
(
'x'
,
'0.5%'
);
page
.
setAttribute
(
'fill'
,
'white'
);
page
.
setAttribute
(
'font-size'
,
'12px'
);
g
.
appendChild
(
page
);
var
meter2
=
document
.
createElementNS
(
svgns
,
'rect'
);
meter2
.
id
=
'meter2'
;
meter2
.
setAttribute
(
'width'
,
'0%'
);
meter2
.
setAttribute
(
'height'
,
'17'
);
meter2
.
setAttribute
(
'opacity'
,
'0.8'
);
meter2
.
setAttribute
(
'fill'
,
'#007fff'
);
meter2
.
setAttribute
(
'rx'
,
'5'
);
meter2
.
setAttribute
(
'ry'
,
'5'
);
g
.
appendChild
(
meter
);
g
.
appendChild
(
meter2
);
svg
.
appendChild
(
g
);
pdiv
.
appendChild
(
svg
);
var
page
=
document
.
createElementNS
(
svgns
,
'text'
);
page
.
id
=
'page'
;
page
.
appendChild
(
document
.
createTextNode
(
'0/0'
));
page
.
setAttribute
(
'y'
,
'13'
);
page
.
setAttribute
(
'x'
,
'0.5%'
);
page
.
setAttribute
(
'fill'
,
'white'
);
page
.
setAttribute
(
'font-size'
,
'12px'
);
g
.
appendChild
(
page
);
svg
.
appendChild
(
g
);
pdiv
.
appendChild
(
svg
);
svg
.
onclick
=
function
(
e
)
{
for
(
var
x
=
pdiv
,
l
=
0
;
x
!=
document
.
documentElement
;
x
=
x
.
parentNode
)
l
+=
x
.
offsetLeft
;
var
page
=
Math
.
max
(
1
,
Math
.
ceil
(((
e
.
clientX
-
l
)
/
pdiv
.
offsetWidth
)
*
totalImages
))
-
1
;
currentImage
=
page
;
updatePage
();
};
svg
.
onclick
=
function
(
e
)
{
for
(
var
x
=
pdiv
,
l
=
0
;
x
!=
document
.
documentElement
;
x
=
x
.
parentNode
)
l
+=
x
.
offsetLeft
;
var
page
=
Math
.
max
(
1
,
Math
.
ceil
(((
e
.
clientX
-
l
)
/
pdiv
.
offsetWidth
)
*
totalImages
))
-
1
;
currentImage
=
page
;
updatePage
();
};
}
kthoom
.
setProgressMeter
=
function
(
pct
,
opt_label
)
{
var
pct
=
(
pct
*
100
);
var
part
=
1
/
totalImages
;
var
remain
=
((
pct
-
lastCompletion
)
/
100
)
/
part
;
var
fract
=
Math
.
min
(
1
,
remain
);
var
smartpct
=
((
imageFiles
.
length
/
totalImages
)
+
fract
*
part
)
*
100
;
if
(
totalImages
==
0
)
smartpct
=
pct
;
var
pct
=
(
pct
*
100
);
var
part
=
1
/
totalImages
;
var
remain
=
((
pct
-
lastCompletion
)
/
100
)
/
part
;
var
fract
=
Math
.
min
(
1
,
remain
);
var
smartpct
=
((
imageFiles
.
length
/
totalImages
)
+
fract
*
part
)
*
100
;
if
(
totalImages
==
0
)
smartpct
=
pct
;
// + Math.min((pct - lastCompletion), 100/totalImages * 0.9 + (pct - lastCompletion - 100/totalImages)/2, 100/totalImages);
var
oldval
=
parseFloat
(
getElem
(
'meter'
).
getAttribute
(
'width'
));
if
(
isNaN
(
oldval
))
oldval
=
0
;
var
weight
=
0.5
;
smartpct
=
(
weight
*
smartpct
+
(
1
-
weight
)
*
oldval
);
if
(
pct
==
100
)
smartpct
=
100
;
// + Math.min((pct - lastCompletion), 100/totalImages * 0.9 + (pct - lastCompletion - 100/totalImages)/2, 100/totalImages);
var
oldval
=
parseFloat
(
getElem
(
'meter'
).
getAttribute
(
'width'
));
if
(
isNaN
(
oldval
))
oldval
=
0
;
var
weight
=
0.5
;
smartpct
=
(
weight
*
smartpct
+
(
1
-
weight
)
*
oldval
);
if
(
pct
==
100
)
smartpct
=
100
;
if
(
!
isNaN
(
smartpct
))
{
getElem
(
'meter'
).
setAttribute
(
'width'
,
smartpct
+
'%'
);
}
var
title
=
getElem
(
'progress_title'
);
while
(
title
.
firstChild
)
title
.
removeChild
(
title
.
firstChild
);
var
labelText
=
pct
.
toFixed
(
2
)
+
'% '
+
imageFiles
.
length
+
'/'
+
totalImages
+
''
;
if
(
opt_label
)
{
labelText
=
opt_label
+
' '
+
labelText
;
}
title
.
appendChild
(
document
.
createTextNode
(
labelText
));
// fade it out as it approaches finish
//title.setAttribute('fill-opacity', (pct > 90) ? ((100-pct)*5)/100 : 1);
getElem
(
'meter2'
).
setAttribute
(
'width'
,
100
*
(
totalImages
==
0
?
0
:
((
currentImage
+
1
)
/
totalImages
))
+
'%'
);
if
(
!
isNaN
(
smartpct
))
{
getElem
(
'meter'
).
setAttribute
(
'width'
,
smartpct
+
'%'
);
}
var
title
=
getElem
(
'progress_title'
);
while
(
title
.
firstChild
)
title
.
removeChild
(
title
.
firstChild
);
var
labelText
=
pct
.
toFixed
(
2
)
+
'% '
+
imageFiles
.
length
+
'/'
+
totalImages
+
''
;
if
(
opt_label
)
{
labelText
=
opt_label
+
' '
+
labelText
;
}
title
.
appendChild
(
document
.
createTextNode
(
labelText
));
getElem
(
'meter2'
).
setAttribute
(
'width'
,
100
*
(
totalImages
==
0
?
0
:
((
currentImage
+
1
)
/
totalImages
))
+
'%'
);
var
title
=
getElem
(
'page'
);
while
(
title
.
firstChild
)
title
.
removeChild
(
title
.
firstChild
);
title
.
appendChild
(
document
.
createTextNode
(
(
currentImage
+
1
)
+
'/'
+
totalImages
));
var
title
=
getElem
(
'page'
);
while
(
title
.
firstChild
)
title
.
removeChild
(
title
.
firstChild
);
title
.
appendChild
(
document
.
createTextNode
(
(
currentImage
+
1
)
+
'/'
+
totalImages
));
if
(
pct
>
0
)
{
//getElem('nav').className = '';
getElem
(
'progress'
).
className
=
''
;
}
if
(
pct
>
0
)
{
//getElem('nav').className = '';
getElem
(
'progress'
).
className
=
''
;
}
}
function
loadFromArrayBuffer
(
ab
)
{
var
start
=
(
new
Date
).
getTime
();
var
h
=
new
Uint8Array
(
ab
,
0
,
10
);
var
pathToBitJS
=
'../../static/js/'
;
if
(
h
[
0
]
==
0x52
&&
h
[
1
]
==
0x61
&&
h
[
2
]
==
0x72
&&
h
[
3
]
==
0x21
)
{
//Rar!
unarchiver
=
new
bitjs
.
archive
.
Unrarrer
(
ab
,
pathToBitJS
);
}
else
if
(
h
[
0
]
==
80
&&
h
[
1
]
==
75
)
{
//PK (Zip)
unarchiver
=
new
bitjs
.
archive
.
Unzipper
(
ab
,
pathToBitJS
);
}
else
{
// Try with tar
unarchiver
=
new
bitjs
.
archive
.
Untarrer
(
ab
,
pathToBitJS
);
}
// Listen for UnarchiveEvents.
if
(
unarchiver
)
{
unarchiver
.
addEventListener
(
bitjs
.
archive
.
UnarchiveEvent
.
Type
.
PROGRESS
,
function
(
e
)
{
var
percentage
=
e
.
currentBytesUnarchived
/
e
.
totalUncompressedBytesInArchive
;
totalImages
=
e
.
totalFilesInArchive
;
kthoom
.
setProgressMeter
(
percentage
,
'Unzipping'
);
// display nav
lastCompletion
=
percentage
*
100
;
});
unarchiver
.
addEventListener
(
bitjs
.
archive
.
UnarchiveEvent
.
Type
.
EXTRACT
,
function
(
e
)
{
// convert DecompressedFile into a bunch of ImageFiles
if
(
e
.
unarchivedFile
)
{
var
f
=
e
.
unarchivedFile
;
// add any new pages based on the filename
if
(
imageFilenames
.
indexOf
(
f
.
filename
)
==
-
1
)
{
imageFilenames
.
push
(
f
.
filename
);
imageFiles
.
push
(
new
kthoom
.
ImageFile
(
f
));
}
}
// display first page if we haven't yet
if
(
imageFiles
.
length
==
currentImage
+
1
)
{
updatePage
();
}
});
unarchiver
.
addEventListener
(
bitjs
.
archive
.
UnarchiveEvent
.
Type
.
FINISH
,
function
(
e
)
{
var
diff
=
((
new
Date
).
getTime
()
-
start
)
/
1000
;
console
.
log
(
'Unarchiving done in '
+
diff
+
's'
);
})
unarchiver
.
start
();
}
else
{
alert
(
'Some error'
);
}
var
start
=
(
new
Date
).
getTime
();
var
h
=
new
Uint8Array
(
ab
,
0
,
10
);
var
pathToBitJS
=
'../../static/js/'
;
if
(
h
[
0
]
==
0x52
&&
h
[
1
]
==
0x61
&&
h
[
2
]
==
0x72
&&
h
[
3
]
==
0x21
)
{
//Rar!
unarchiver
=
new
bitjs
.
archive
.
Unrarrer
(
ab
,
pathToBitJS
);
}
else
if
(
h
[
0
]
==
80
&&
h
[
1
]
==
75
)
{
//PK (Zip)
unarchiver
=
new
bitjs
.
archive
.
Unzipper
(
ab
,
pathToBitJS
);
}
else
{
// Try with tar
unarchiver
=
new
bitjs
.
archive
.
Untarrer
(
ab
,
pathToBitJS
);
}
// Listen for UnarchiveEvents.
if
(
unarchiver
)
{
unarchiver
.
addEventListener
(
bitjs
.
archive
.
UnarchiveEvent
.
Type
.
PROGRESS
,
function
(
e
)
{
var
percentage
=
e
.
currentBytesUnarchived
/
e
.
totalUncompressedBytesInArchive
;
totalImages
=
e
.
totalFilesInArchive
;
kthoom
.
setProgressMeter
(
percentage
,
'Unzipping'
);
// display nav
lastCompletion
=
percentage
*
100
;
});
unarchiver
.
addEventListener
(
bitjs
.
archive
.
UnarchiveEvent
.
Type
.
EXTRACT
,
function
(
e
)
{
// convert DecompressedFile into a bunch of ImageFiles
if
(
e
.
unarchivedFile
)
{
var
f
=
e
.
unarchivedFile
;
// add any new pages based on the filename
if
(
imageFilenames
.
indexOf
(
f
.
filename
)
==
-
1
)
{
imageFilenames
.
push
(
f
.
filename
);
imageFiles
.
push
(
new
kthoom
.
ImageFile
(
f
));
}
}
// display first page if we haven't yet
if
(
imageFiles
.
length
==
currentImage
+
1
)
{
updatePage
();
}
});
unarchiver
.
addEventListener
(
bitjs
.
archive
.
UnarchiveEvent
.
Type
.
FINISH
,
function
(
e
)
{
var
diff
=
((
new
Date
).
getTime
()
-
start
)
/
1000
;
console
.
log
(
'Unarchiving done in '
+
diff
+
's'
);
});
unarchiver
.
start
();
}
else
{
alert
(
'Some error'
);
}
}
var
createURLFromArray
=
function
(
array
,
mimeType
)
{
var
offset
=
array
.
byteOffset
,
len
=
array
.
byteLength
;
var
bb
,
url
;
var
blob
;
// TODO: Move all this browser support testing to a common place
// and do it just once.
// Blob constructor, see http://dev.w3.org/2006/webapi/FileAPI/#dfn-Blob.
if
(
typeof
Blob
==
'function'
)
{
blob
=
new
Blob
([
array
],
{
type
:
mimeType
});
}
else
{
throw
'Browser support for Blobs is missing.'
}
if
(
blob
.
slice
)
{
blob
=
blob
.
slice
(
offset
,
offset
+
len
,
mimeType
);
}
else
{
throw
'Browser support for Blobs is missing.'
}
if
((
typeof
URL
!=
'function'
&&
typeof
URL
!=
'object'
)
||
var
offset
=
array
.
byteOffset
,
len
=
array
.
byteLength
;
var
bb
,
url
;
var
blob
;
// TODO: Move all this browser support testing to a common place
// and do it just once.
// Blob constructor, see http://dev.w3.org/2006/webapi/FileAPI/#dfn-Blob.
if
(
typeof
Blob
==
'function'
)
{
blob
=
new
Blob
([
array
],
{
type
:
mimeType
});
}
else
{
throw
'Browser support for Blobs is missing.'
}
if
(
blob
.
slice
)
{
blob
=
blob
.
slice
(
offset
,
offset
+
len
,
mimeType
);
}
else
{
throw
'Browser support for Blobs is missing.'
}
if
((
typeof
URL
!=
'function'
&&
typeof
URL
!=
'object'
)
||
typeof
URL
.
createObjectURL
!=
'function'
)
{
throw
'Browser support for Object URLs is missing'
;
}
throw
'Browser support for Object URLs is missing'
;
}
return
URL
.
createObjectURL
(
blob
);
return
URL
.
createObjectURL
(
blob
);
}
function
updatePage
()
{
var
title
=
getElem
(
'page'
);
while
(
title
.
firstChild
)
title
.
removeChild
(
title
.
firstChild
);
title
.
appendChild
(
document
.
createTextNode
(
(
currentImage
+
1
)
+
'/'
+
totalImages
));
getElem
(
'meter2'
).
setAttribute
(
'width'
,
var
title
=
getElem
(
'page'
);
while
(
title
.
firstChild
)
title
.
removeChild
(
title
.
firstChild
);
title
.
appendChild
(
document
.
createTextNode
(
(
currentImage
+
1
)
+
'/'
+
totalImages
));
getElem
(
'meter2'
).
setAttribute
(
'width'
,
100
*
(
totalImages
==
0
?
0
:
((
currentImage
+
1
)
/
totalImages
))
+
'%'
);
if
(
imageFiles
[
currentImage
])
{
setImage
(
imageFiles
[
currentImage
].
dataURI
);
}
else
{
setImage
(
'loading'
);
}
if
(
imageFiles
[
currentImage
])
{
setImage
(
imageFiles
[
currentImage
].
dataURI
);
}
else
{
setImage
(
'loading'
);
}
}
function
setImage
(
url
)
{
var
canvas
=
$
(
"#mainImage"
)[
0
];
var
x
=
$
(
"#mainImage"
)[
0
].
getContext
(
'2d'
);
$
(
'#mainText'
).
hide
();
if
(
url
==
'loading'
)
{
updateScale
(
true
);
canvas
.
width
=
innerWidth
-
100
;
canvas
.
height
=
200
;
x
.
fillStyle
=
'red'
;
x
.
font
=
'50px sans-serif'
;
x
.
strokeStyle
=
'black'
;
x
.
fillText
(
'Loading Page #'
+
(
currentImage
+
1
),
100
,
100
)
}
else
{
if
(
$
(
'body'
).
css
(
'scrollHeight'
)
/
innerHeight
>
1
)
{
$
(
'body'
).
css
(
'overflowY'
,
'scroll'
);
}
var
img
=
new
Image
();
img
.
onerror
=
function
(
e
)
{
canvas
.
width
=
innerWidth
-
100
;
canvas
.
height
=
300
;
updateScale
(
true
);
x
.
fillStyle
=
'orange'
;
x
.
font
=
'50px sans-serif'
;
x
.
strokeStyle
=
'black'
;
x
.
fillText
(
'Page #'
+
(
currentImage
+
1
)
+
' ('
+
imageFiles
[
currentImage
].
filename
+
')'
,
100
,
100
)
x
.
fillStyle
=
'red'
;
x
.
fillText
(
'Is corrupt or not an image'
,
100
,
200
);
if
(
/
(
html|htm
)
$/
.
test
(
imageFiles
[
currentImage
].
filename
))
{
var
xhr
=
new
XMLHttpRequest
();
xhr
.
open
(
'GET'
,
url
,
true
);
xhr
.
onload
=
function
()
{
//document.getElementById('mainText').style.display = '';
$
(
"#mainText"
).
css
(
"display"
,
""
);
$
(
"#mainText"
).
innerHTML
(
'<iframe style="width:100%;height:700px;border:0" src="data:text/html,'
+
escape
(
xhr
.
responseText
)
+
'"></iframe>'
);
var
canvas
=
$
(
"#mainImage"
)[
0
];
var
x
=
$
(
"#mainImage"
)[
0
].
getContext
(
'2d'
);
$
(
'#mainText'
).
hide
();
if
(
url
==
'loading'
)
{
updateScale
(
true
);
canvas
.
width
=
innerWidth
-
100
;
canvas
.
height
=
200
;
x
.
fillStyle
=
'red'
;
x
.
font
=
'50px sans-serif'
;
x
.
strokeStyle
=
'black'
;
x
.
fillText
(
'Loading Page #'
+
(
currentImage
+
1
),
100
,
100
)
}
else
{
if
(
$
(
'body'
).
css
(
'scrollHeight'
)
/
innerHeight
>
1
)
{
$
(
'body'
).
css
(
'overflowY'
,
'scroll'
);
}
xhr
.
send
(
null
);
}
else
if
(
!
/
(
jpg|jpeg|png|gif
)
$/
.
test
(
imageFiles
[
currentImage
].
filename
)
&&
imageFiles
[
currentImage
].
data
.
uncompressedSize
<
10
*
1024
)
{
var
xhr
=
new
XMLHttpRequest
();
xhr
.
open
(
'GET'
,
url
,
true
);
xhr
.
onload
=
function
()
{
$
(
"#mainText"
).
css
(
"display"
,
""
);
$
(
"#mainText"
).
innerText
(
xhr
.
responseText
);
var
img
=
new
Image
();
img
.
onerror
=
function
(
e
)
{
canvas
.
width
=
innerWidth
-
100
;
canvas
.
height
=
300
;
updateScale
(
true
);
x
.
fillStyle
=
'orange'
;
x
.
font
=
'50px sans-serif'
;
x
.
strokeStyle
=
'black'
;
x
.
fillText
(
'Page #'
+
(
currentImage
+
1
)
+
' ('
+
imageFiles
[
currentImage
].
filename
+
')'
,
100
,
100
)
x
.
fillStyle
=
'red'
;
x
.
fillText
(
'Is corrupt or not an image'
,
100
,
200
);
if
(
/
(
html|htm
)
$/
.
test
(
imageFiles
[
currentImage
].
filename
))
{
var
xhr
=
new
XMLHttpRequest
();
xhr
.
open
(
'GET'
,
url
,
true
);
xhr
.
onload
=
function
()
{
//document.getElementById('mainText').style.display = '';
$
(
"#mainText"
).
css
(
"display"
,
""
);
$
(
"#mainText"
).
innerHTML
(
'<iframe style="width:100%;height:700px;border:0" src="data:text/html,'
+
escape
(
xhr
.
responseText
)
+
'"></iframe>'
);
}
xhr
.
send
(
null
);
}
else
if
(
!
/
(
jpg|jpeg|png|gif
)
$/
.
test
(
imageFiles
[
currentImage
].
filename
)
&&
imageFiles
[
currentImage
].
data
.
uncompressedSize
<
10
*
1024
)
{
var
xhr
=
new
XMLHttpRequest
();
xhr
.
open
(
'GET'
,
url
,
true
);
xhr
.
onload
=
function
()
{
$
(
"#mainText"
).
css
(
"display"
,
""
);
$
(
"#mainText"
).
innerText
(
xhr
.
responseText
);
};
xhr
.
send
(
null
);
}
};
xhr
.
send
(
null
);
}
};
img
.
onload
=
function
()
{
var
h
=
img
.
height
,
w
=
img
.
width
,
sw
=
w
,
sh
=
h
;
kthoom
.
rotateTimes
=
(
4
+
kthoom
.
rotateTimes
)
%
4
;
x
.
save
();
if
(
kthoom
.
rotateTimes
%
2
==
1
)
{
sh
=
w
;
sw
=
h
;}
canvas
.
height
=
sh
;
canvas
.
width
=
sw
;
x
.
translate
(
sw
/
2
,
sh
/
2
);
x
.
rotate
(
Math
.
PI
/
2
*
kthoom
.
rotateTimes
);
x
.
translate
(
-
w
/
2
,
-
h
/
2
);
if
(
vflip
)
{
x
.
scale
(
1
,
-
1
)
x
.
translate
(
0
,
-
h
);
}
if
(
hflip
)
{
x
.
scale
(
-
1
,
1
)
x
.
translate
(
-
w
,
0
);
}
canvas
.
style
.
display
=
'none'
;
scrollTo
(
0
,
0
);
x
.
drawImage
(
img
,
0
,
0
);
updateScale
();
canvas
.
style
.
display
=
''
;
$
(
"body"
).
css
(
"overflowY"
,
""
);
x
.
restore
();
};
img
.
src
=
url
;
};
img
.
onload
=
function
()
{
var
h
=
img
.
height
,
w
=
img
.
width
,
sw
=
w
,
sh
=
h
;
kthoom
.
rotateTimes
=
(
4
+
kthoom
.
rotateTimes
)
%
4
;
x
.
save
();
if
(
kthoom
.
rotateTimes
%
2
==
1
)
{
sh
=
w
;
sw
=
h
;}
canvas
.
height
=
sh
;
canvas
.
width
=
sw
;
x
.
translate
(
sw
/
2
,
sh
/
2
);
x
.
rotate
(
Math
.
PI
/
2
*
kthoom
.
rotateTimes
);
x
.
translate
(
-
w
/
2
,
-
h
/
2
);
if
(
vflip
)
{
x
.
scale
(
1
,
-
1
)
x
.
translate
(
0
,
-
h
);
}
if
(
hflip
)
{
x
.
scale
(
-
1
,
1
)
x
.
translate
(
-
w
,
0
);
}
canvas
.
style
.
display
=
'none'
;
scrollTo
(
0
,
0
);
x
.
drawImage
(
img
,
0
,
0
);
updateScale
();
canvas
.
style
.
display
=
''
;
$
(
"body"
).
css
(
"overflowY"
,
""
);
x
.
restore
();
};
img
.
src
=
url
;
}
}
function
showPrevPage
()
{
currentImage
--
;
currentImage
--
;
if
(
currentImage
<
0
)
{
if
(
library
.
allBooks
.
length
==
1
)
{
currentImage
=
imageFiles
.
length
-
1
;
}
else
if
(
library
.
currentBookNum
>
0
)
{
loadPrevBook
();
}
else
{
// Freeze on the current page.
currentImage
++
;
return
;
if
(
currentImage
<
0
)
{
if
(
library
.
allBooks
.
length
==
1
)
{
currentImage
=
imageFiles
.
length
-
1
;
}
else
if
(
library
.
currentBookNum
>
0
)
{
loadPrevBook
();
}
else
{
// Freeze on the current page.
currentImage
++
;
return
;
}
}
}
updatePage
();
updatePage
();
}
function
showNextPage
()
{
currentImage
++
;
currentImage
++
;
if
(
currentImage
>=
Math
.
max
(
totalImages
,
imageFiles
.
length
))
{
if
(
library
.
allBooks
.
length
==
1
)
{
currentImage
=
0
;
}
else
if
(
library
.
currentBookNum
<
library
.
allBooks
.
length
-
1
)
{
loadNextBook
();
}
else
{
// Freeze on the current page.
currentImage
--
;
return
;
if
(
currentImage
>=
Math
.
max
(
totalImages
,
imageFiles
.
length
))
{
if
(
library
.
allBooks
.
length
==
1
)
{
currentImage
=
0
;
}
else
if
(
library
.
currentBookNum
<
library
.
allBooks
.
length
-
1
)
{
loadNextBook
();
}
else
{
// Freeze on the current page.
currentImage
--
;
return
;
}
}
}
updatePage
();
updatePage
();
}
function
updateScale
(
clear
)
{
var
mainImageStyle
=
getElem
(
'mainImage'
).
style
;
mainImageStyle
.
width
=
''
;
mainImageStyle
.
height
=
''
;
mainImageStyle
.
maxWidth
=
''
;
mainImageStyle
.
maxHeight
=
''
;
var
maxheight
=
innerHeight
-
15
;
if
(
!
/main/
.
test
(
getElem
(
'titlebar'
).
className
))
{
maxheight
-=
25
;
}
if
(
clear
||
fitMode
==
kthoom
.
Key
.
N
)
{
}
else
if
(
fitMode
==
kthoom
.
Key
.
B
)
{
mainImageStyle
.
maxWidth
=
'100%'
;
mainImageStyle
.
maxHeight
=
maxheight
+
'px'
;
}
else
if
(
fitMode
==
kthoom
.
Key
.
H
)
{
mainImageStyle
.
height
=
maxheight
+
'px'
;
}
else
if
(
fitMode
==
kthoom
.
Key
.
W
)
{
mainImageStyle
.
width
=
'100%'
;
}
kthoom
.
saveSettings
();
var
mainImageStyle
=
getElem
(
'mainImage'
).
style
;
mainImageStyle
.
width
=
''
;
mainImageStyle
.
height
=
''
;
mainImageStyle
.
maxWidth
=
''
;
mainImageStyle
.
maxHeight
=
''
;
var
maxheight
=
innerHeight
-
15
;
if
(
!
/main/
.
test
(
getElem
(
'titlebar'
).
className
))
{
maxheight
-=
25
;
}
if
(
clear
||
fitMode
==
kthoom
.
Key
.
N
)
{
}
else
if
(
fitMode
==
kthoom
.
Key
.
B
)
{
mainImageStyle
.
maxWidth
=
'100%'
;
mainImageStyle
.
maxHeight
=
maxheight
+
'px'
;
}
else
if
(
fitMode
==
kthoom
.
Key
.
H
)
{
mainImageStyle
.
height
=
maxheight
+
'px'
;
}
else
if
(
fitMode
==
kthoom
.
Key
.
W
)
{
mainImageStyle
.
width
=
'100%'
;
}
kthoom
.
saveSettings
();
}
function
keyHandler
(
evt
)
{
var
code
=
evt
.
keyCode
;
if
(
getComputedStyle
(
getElem
(
'progress'
)).
display
==
'none'
)
return
;
canKeyNext
=
((
document
.
body
.
offsetWidth
+
document
.
body
.
scrollLeft
)
/
document
.
body
.
scrollWidth
)
>=
1
;
canKeyPrev
=
(
scrollX
<=
0
);
if
(
evt
.
ctrlKey
||
evt
.
shiftKey
||
evt
.
metaKey
)
return
;
switch
(
code
)
{
case
kthoom
.
Key
.
LEFT
:
if
(
canKeyPrev
)
showPrevPage
();
break
;
case
kthoom
.
Key
.
RIGHT
:
if
(
canKeyNext
)
showNextPage
();
break
;
case
kthoom
.
Key
.
L
:
kthoom
.
rotateTimes
--
;
if
(
kthoom
.
rotateTimes
<
0
)
{
kthoom
.
rotateTimes
=
3
;
}
updatePage
();
break
;
case
kthoom
.
Key
.
R
:
kthoom
.
rotateTimes
++
;
if
(
kthoom
.
rotateTimes
>
3
)
{
kthoom
.
rotateTimes
=
0
;
}
updatePage
();
break
;
case
kthoom
.
Key
.
F
:
if
(
!
hflip
&&
!
vflip
)
{
hflip
=
true
;
}
else
if
(
hflip
==
true
)
{
vflip
=
true
;
hflip
=
false
;
}
else
if
(
vflip
==
true
)
{
vflip
=
false
;
}
updatePage
();
break
;
case
kthoom
.
Key
.
W
:
fitMode
=
kthoom
.
Key
.
W
;
updateScale
();
break
;
case
kthoom
.
Key
.
H
:
fitMode
=
kthoom
.
Key
.
H
;
updateScale
();
break
;
case
kthoom
.
Key
.
B
:
fitMode
=
kthoom
.
Key
.
B
;
updateScale
();
break
;
case
kthoom
.
Key
.
N
:
fitMode
=
kthoom
.
Key
.
N
;
updateScale
();
break
;
default
:
//console.log('KeyCode = ' + code);
break
;
}
var
code
=
evt
.
keyCode
;
if
(
getComputedStyle
(
getElem
(
'progress'
)).
display
==
'none'
)
return
;
canKeyNext
=
((
document
.
body
.
offsetWidth
+
document
.
body
.
scrollLeft
)
/
document
.
body
.
scrollWidth
)
>=
1
;
canKeyPrev
=
(
scrollX
<=
0
);
if
(
evt
.
ctrlKey
||
evt
.
shiftKey
||
evt
.
metaKey
)
return
;
switch
(
code
)
{
case
kthoom
.
Key
.
LEFT
:
if
(
canKeyPrev
)
showPrevPage
();
break
;
case
kthoom
.
Key
.
RIGHT
:
if
(
canKeyNext
)
showNextPage
();
break
;
case
kthoom
.
Key
.
L
:
kthoom
.
rotateTimes
--
;
if
(
kthoom
.
rotateTimes
<
0
)
{
kthoom
.
rotateTimes
=
3
;
}
updatePage
();
break
;
case
kthoom
.
Key
.
R
:
kthoom
.
rotateTimes
++
;
if
(
kthoom
.
rotateTimes
>
3
)
{
kthoom
.
rotateTimes
=
0
;
}
updatePage
();
break
;
case
kthoom
.
Key
.
F
:
if
(
!
hflip
&&
!
vflip
)
{
hflip
=
true
;
}
else
if
(
hflip
==
true
)
{
vflip
=
true
;
hflip
=
false
;
}
else
if
(
vflip
==
true
)
{
vflip
=
false
;
}
updatePage
();
break
;
case
kthoom
.
Key
.
W
:
fitMode
=
kthoom
.
Key
.
W
;
updateScale
();
break
;
case
kthoom
.
Key
.
H
:
fitMode
=
kthoom
.
Key
.
H
;
updateScale
();
break
;
case
kthoom
.
Key
.
B
:
fitMode
=
kthoom
.
Key
.
B
;
updateScale
();
break
;
case
kthoom
.
Key
.
N
:
fitMode
=
kthoom
.
Key
.
N
;
updateScale
();
break
;
default
:
//console.log('KeyCode = ' + code);
break
;
}
}
function
init
(
filename
)
{
if
(
!
window
.
FileReader
)
{
alert
(
'Sorry, kthoom will not work with your browser because it does not support the File API. Please try kthoom with Chrome 12+ or Firefox 7+'
);
}
else
{
var
request
=
new
XMLHttpRequest
();
request
.
open
(
"GET"
,
filename
);
request
.
responseType
=
"arraybuffer"
;
request
.
setRequestHeader
(
"X-Test"
,
"test1"
);
request
.
setRequestHeader
(
"X-Test"
,
"test2"
);
request
.
addEventListener
(
'load'
,
function
(
event
)
{
if
(
request
.
status
>=
200
&&
request
.
status
<
300
)
{
loadFromArrayBuffer
(
request
.
response
);
}
else
{
console
.
warn
(
request
.
statusText
,
request
.
responseText
);
}
});
request
.
send
();
kthoom
.
initProgressMeter
();
document
.
body
.
className
+=
/AppleWebKit/
.
test
(
navigator
.
userAgent
)
?
' webkit'
:
''
;
//kthoom.resetFileUploader();
kthoom
.
loadSettings
();
$
(
document
).
keydown
(
keyHandler
);
$
(
window
).
resize
(
function
()
{
var
f
=
(
screen
.
width
-
innerWidth
<
4
&&
screen
.
height
-
innerHeight
<
4
);
getElem
(
'titlebar'
).
className
=
f
?
'main'
:
''
;
updateScale
();
});
$
(
'#mainImage'
).
click
(
function
(
evt
)
{
// Firefox does not support offsetX/Y so we have to manually calculate
// where the user clicked in the image.
var
mainContentWidth
=
getElem
(
'mainContent'
).
clientWidth
;
var
mainContentHeight
=
getElem
(
'mainContent'
).
clientHeight
;
var
comicWidth
=
evt
.
target
.
clientWidth
;
var
comicHeight
=
evt
.
target
.
clientHeight
;
var
offsetX
=
(
mainContentWidth
-
comicWidth
)
/
2
;
var
offsetY
=
(
mainContentHeight
-
comicHeight
)
/
2
;
var
clickX
=
!!
evt
.
offsetX
?
evt
.
offsetX
:
(
evt
.
clientX
-
offsetX
);
var
clickY
=
!!
evt
.
offsetY
?
evt
.
offsetY
:
(
evt
.
clientY
-
offsetY
);
// Determine if the user clicked/tapped the left side or the
// right side of the page.
var
clickedPrev
=
false
;
switch
(
kthoom
.
rotateTimes
)
{
case
0
:
clickedPrev
=
clickX
<
(
comicWidth
/
2
);
break
;
case
1
:
clickedPrev
=
clickY
<
(
comicHeight
/
2
);
break
;
case
2
:
clickedPrev
=
clickX
>
(
comicWidth
/
2
);
break
;
case
3
:
clickedPrev
=
clickY
>
(
comicHeight
/
2
);
break
;
}
if
(
clickedPrev
)
{
showPrevPage
();
}
else
{
showNextPage
();
}
});
}
if
(
!
window
.
FileReader
)
{
alert
(
'Sorry, kthoom will not work with your browser because it does not support the File API. Please try kthoom with Chrome 12+ or Firefox 7+'
);
}
else
{
var
request
=
new
XMLHttpRequest
();
request
.
open
(
"GET"
,
filename
);
request
.
responseType
=
"arraybuffer"
;
request
.
setRequestHeader
(
"X-Test"
,
"test1"
);
request
.
setRequestHeader
(
"X-Test"
,
"test2"
);
request
.
addEventListener
(
'load'
,
function
(
event
)
{
if
(
request
.
status
>=
200
&&
request
.
status
<
300
)
{
loadFromArrayBuffer
(
request
.
response
);
}
else
{
console
.
warn
(
request
.
statusText
,
request
.
responseText
);
}
});
request
.
send
();
kthoom
.
initProgressMeter
();
document
.
body
.
className
+=
/AppleWebKit/
.
test
(
navigator
.
userAgent
)
?
' webkit'
:
''
;
kthoom
.
loadSettings
();
$
(
document
).
keydown
(
keyHandler
);
$
(
window
).
resize
(
function
()
{
var
f
=
(
screen
.
width
-
innerWidth
<
4
&&
screen
.
height
-
innerHeight
<
4
);
getElem
(
'titlebar'
).
className
=
f
?
'main'
:
''
;
updateScale
();
});
$
(
'#mainImage'
).
click
(
function
(
evt
)
{
// Firefox does not support offsetX/Y so we have to manually calculate
// where the user clicked in the image.
var
mainContentWidth
=
getElem
(
'mainContent'
).
clientWidth
;
var
mainContentHeight
=
getElem
(
'mainContent'
).
clientHeight
;
var
comicWidth
=
evt
.
target
.
clientWidth
;
var
comicHeight
=
evt
.
target
.
clientHeight
;
var
offsetX
=
(
mainContentWidth
-
comicWidth
)
/
2
;
var
offsetY
=
(
mainContentHeight
-
comicHeight
)
/
2
;
var
clickX
=
!!
evt
.
offsetX
?
evt
.
offsetX
:
(
evt
.
clientX
-
offsetX
);
var
clickY
=
!!
evt
.
offsetY
?
evt
.
offsetY
:
(
evt
.
clientY
-
offsetY
);
// Determine if the user clicked/tapped the left side or the
// right side of the page.
var
clickedPrev
=
false
;
switch
(
kthoom
.
rotateTimes
)
{
case
0
:
clickedPrev
=
clickX
<
(
comicWidth
/
2
);
break
;
case
1
:
clickedPrev
=
clickY
<
(
comicHeight
/
2
);
break
;
case
2
:
clickedPrev
=
clickX
>
(
comicWidth
/
2
);
break
;
case
3
:
clickedPrev
=
clickY
>
(
comicHeight
/
2
);
break
;
}
if
(
clickedPrev
)
{
showPrevPage
();
}
else
{
showNextPage
();
}
});
}
}
cps/static/js/unrar.js
View file @
5b9b36f9
...
...
@@ -23,196 +23,196 @@ var totalFilesInArchive = 0;
// Helper functions.
var
info
=
function
(
str
)
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveInfoEvent
(
str
));
postMessage
(
new
bitjs
.
archive
.
UnarchiveInfoEvent
(
str
));
};
var
err
=
function
(
str
)
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveErrorEvent
(
str
));
postMessage
(
new
bitjs
.
archive
.
UnarchiveErrorEvent
(
str
));
};
var
postProgress
=
function
()
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveProgressEvent
(
currentFilename
,
currentFileNumber
,
currentBytesUnarchivedInFile
,
currentBytesUnarchived
,
totalUncompressedBytesInArchive
,
totalFilesInArchive
));
postMessage
(
new
bitjs
.
archive
.
UnarchiveProgressEvent
(
currentFilename
,
currentFileNumber
,
currentBytesUnarchivedInFile
,
currentBytesUnarchived
,
totalUncompressedBytesInArchive
,
totalFilesInArchive
));
};
// shows a byte value as its hex representation
var
nibble
=
"0123456789ABCDEF"
;
var
byteValueToHexString
=
function
(
num
)
{
return
nibble
[
num
>>
4
]
+
nibble
[
num
&
0xF
];
return
nibble
[
num
>>
4
]
+
nibble
[
num
&
0xF
];
};
var
twoByteValueToHexString
=
function
(
num
)
{
return
nibble
[(
num
>>
12
)
&
0xF
]
+
nibble
[(
num
>>
8
)
&
0xF
]
+
nibble
[(
num
>>
4
)
&
0xF
]
+
nibble
[
num
&
0xF
];
return
nibble
[(
num
>>
12
)
&
0xF
]
+
nibble
[(
num
>>
8
)
&
0xF
]
+
nibble
[(
num
>>
4
)
&
0xF
]
+
nibble
[
num
&
0xF
];
};
// Volume Types
var
MARK_HEAD
=
0x72
,
MAIN_HEAD
=
0x73
,
FILE_HEAD
=
0x74
,
COMM_HEAD
=
0x75
,
AV_HEAD
=
0x76
,
SUB_HEAD
=
0x77
,
PROTECT_HEAD
=
0x78
,
SIGN_HEAD
=
0x79
,
NEWSUB_HEAD
=
0x7a
,
ENDARC_HEAD
=
0x7b
;
MAIN_HEAD
=
0x73
,
FILE_HEAD
=
0x74
,
COMM_HEAD
=
0x75
,
AV_HEAD
=
0x76
,
SUB_HEAD
=
0x77
,
PROTECT_HEAD
=
0x78
,
SIGN_HEAD
=
0x79
,
NEWSUB_HEAD
=
0x7a
,
ENDARC_HEAD
=
0x7b
;
// bstream is a bit stream
var
RarVolumeHeader
=
function
(
bstream
)
{
var
headPos
=
bstream
.
bytePtr
;
// byte 1,2
info
(
"Rar Volume Header @"
+
bstream
.
bytePtr
);
this
.
crc
=
bstream
.
readBits
(
16
);
info
(
" crc="
+
this
.
crc
);
var
headPos
=
bstream
.
bytePtr
;
// byte 1,2
info
(
"Rar Volume Header @"
+
bstream
.
bytePtr
);
this
.
crc
=
bstream
.
readBits
(
16
);
info
(
" crc="
+
this
.
crc
);
// byte 3
this
.
headType
=
bstream
.
readBits
(
8
);
info
(
" headType="
+
this
.
headType
);
// byte 3
this
.
headType
=
bstream
.
readBits
(
8
);
info
(
" headType="
+
this
.
headType
);
// Get flags
// bytes 4,5
this
.
flags
=
{};
this
.
flags
.
value
=
bstream
.
peekBits
(
16
);
// Get flags
// bytes 4,5
this
.
flags
=
{};
this
.
flags
.
value
=
bstream
.
peekBits
(
16
);
info
(
" flags="
+
twoByteValueToHexString
(
this
.
flags
.
value
));
switch
(
this
.
headType
)
{
case
MAIN_HEAD
:
this
.
flags
.
MHD_VOLUME
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_COMMENT
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_LOCK
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_SOLID
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_PACK_COMMENT
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_NEWNUMBERING
=
this
.
flags
.
MHD_PACK_COMMENT
;
this
.
flags
.
MHD_AV
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_PROTECT
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_PASSWORD
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_FIRSTVOLUME
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_ENCRYPTVER
=
!!
bstream
.
readBits
(
1
);
bstream
.
readBits
(
6
);
// unused
break
;
case
FILE_HEAD
:
this
.
flags
.
LHD_SPLIT_BEFORE
=
!!
bstream
.
readBits
(
1
);
// 0x0001
this
.
flags
.
LHD_SPLIT_AFTER
=
!!
bstream
.
readBits
(
1
);
// 0x0002
this
.
flags
.
LHD_PASSWORD
=
!!
bstream
.
readBits
(
1
);
// 0x0004
this
.
flags
.
LHD_COMMENT
=
!!
bstream
.
readBits
(
1
);
// 0x0008
this
.
flags
.
LHD_SOLID
=
!!
bstream
.
readBits
(
1
);
// 0x0010
bstream
.
readBits
(
3
);
// unused
this
.
flags
.
LHD_LARGE
=
!!
bstream
.
readBits
(
1
);
// 0x0100
this
.
flags
.
LHD_UNICODE
=
!!
bstream
.
readBits
(
1
);
// 0x0200
this
.
flags
.
LHD_SALT
=
!!
bstream
.
readBits
(
1
);
// 0x0400
this
.
flags
.
LHD_VERSION
=
!!
bstream
.
readBits
(
1
);
// 0x0800
this
.
flags
.
LHD_EXTTIME
=
!!
bstream
.
readBits
(
1
);
// 0x1000
this
.
flags
.
LHD_EXTFLAGS
=
!!
bstream
.
readBits
(
1
);
// 0x2000
bstream
.
readBits
(
2
);
// unused
info
(
" LHD_SPLIT_BEFORE = "
+
this
.
flags
.
LHD_SPLIT_BEFORE
);
break
;
default
:
bstream
.
readBits
(
16
);
}
info
(
" flags="
+
twoByteValueToHexString
(
this
.
flags
.
value
));
switch
(
this
.
headType
)
{
case
MAIN_HEAD
:
this
.
flags
.
MHD_VOLUME
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_COMMENT
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_LOCK
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_SOLID
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_PACK_COMMENT
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_NEWNUMBERING
=
this
.
flags
.
MHD_PACK_COMMENT
;
this
.
flags
.
MHD_AV
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_PROTECT
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_PASSWORD
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_FIRSTVOLUME
=
!!
bstream
.
readBits
(
1
);
this
.
flags
.
MHD_ENCRYPTVER
=
!!
bstream
.
readBits
(
1
);
bstream
.
readBits
(
6
);
// unused
break
;
case
FILE_HEAD
:
this
.
flags
.
LHD_SPLIT_BEFORE
=
!!
bstream
.
readBits
(
1
);
// 0x0001
this
.
flags
.
LHD_SPLIT_AFTER
=
!!
bstream
.
readBits
(
1
);
// 0x0002
this
.
flags
.
LHD_PASSWORD
=
!!
bstream
.
readBits
(
1
);
// 0x0004
this
.
flags
.
LHD_COMMENT
=
!!
bstream
.
readBits
(
1
);
// 0x0008
this
.
flags
.
LHD_SOLID
=
!!
bstream
.
readBits
(
1
);
// 0x0010
bstream
.
readBits
(
3
);
// unused
this
.
flags
.
LHD_LARGE
=
!!
bstream
.
readBits
(
1
);
// 0x0100
this
.
flags
.
LHD_UNICODE
=
!!
bstream
.
readBits
(
1
);
// 0x0200
this
.
flags
.
LHD_SALT
=
!!
bstream
.
readBits
(
1
);
// 0x0400
this
.
flags
.
LHD_VERSION
=
!!
bstream
.
readBits
(
1
);
// 0x0800
this
.
flags
.
LHD_EXTTIME
=
!!
bstream
.
readBits
(
1
);
// 0x1000
this
.
flags
.
LHD_EXTFLAGS
=
!!
bstream
.
readBits
(
1
);
// 0x2000
bstream
.
readBits
(
2
);
// unused
info
(
" LHD_SPLIT_BEFORE = "
+
this
.
flags
.
LHD_SPLIT_BEFORE
);
break
;
default
:
bstream
.
readBits
(
16
);
}
// byte 6,7
this
.
headSize
=
bstream
.
readBits
(
16
);
info
(
" headSize="
+
this
.
headSize
);
switch
(
this
.
headType
)
{
case
MAIN_HEAD
:
this
.
highPosAv
=
bstream
.
readBits
(
16
);
this
.
posAv
=
bstream
.
readBits
(
32
);
if
(
this
.
flags
.
MHD_ENCRYPTVER
)
{
this
.
encryptVer
=
bstream
.
readBits
(
8
);
}
info
(
"Found MAIN_HEAD with highPosAv="
+
this
.
highPosAv
+
", posAv="
+
this
.
posAv
);
break
;
case
FILE_HEAD
:
this
.
packSize
=
bstream
.
readBits
(
32
);
this
.
unpackedSize
=
bstream
.
readBits
(
32
);
this
.
hostOS
=
bstream
.
readBits
(
8
);
this
.
fileCRC
=
bstream
.
readBits
(
32
);
this
.
fileTime
=
bstream
.
readBits
(
32
);
this
.
unpVer
=
bstream
.
readBits
(
8
);
this
.
method
=
bstream
.
readBits
(
8
);
this
.
nameSize
=
bstream
.
readBits
(
16
);
this
.
fileAttr
=
bstream
.
readBits
(
32
);
// byte 6,7
this
.
headSize
=
bstream
.
readBits
(
16
);
info
(
" headSize="
+
this
.
headSize
);
switch
(
this
.
headType
)
{
case
MAIN_HEAD
:
this
.
highPosAv
=
bstream
.
readBits
(
16
);
this
.
posAv
=
bstream
.
readBits
(
32
);
if
(
this
.
flags
.
MHD_ENCRYPTVER
)
{
this
.
encryptVer
=
bstream
.
readBits
(
8
);
}
info
(
"Found MAIN_HEAD with highPosAv="
+
this
.
highPosAv
+
", posAv="
+
this
.
posAv
);
break
;
case
FILE_HEAD
:
this
.
packSize
=
bstream
.
readBits
(
32
);
this
.
unpackedSize
=
bstream
.
readBits
(
32
);
this
.
hostOS
=
bstream
.
readBits
(
8
);
this
.
fileCRC
=
bstream
.
readBits
(
32
);
this
.
fileTime
=
bstream
.
readBits
(
32
);
this
.
unpVer
=
bstream
.
readBits
(
8
);
this
.
method
=
bstream
.
readBits
(
8
);
this
.
nameSize
=
bstream
.
readBits
(
16
);
this
.
fileAttr
=
bstream
.
readBits
(
32
);
if
(
this
.
flags
.
LHD_LARGE
)
{
info
(
"Warning: Reading in LHD_LARGE 64-bit size values"
);
this
.
HighPackSize
=
bstream
.
readBits
(
32
);
this
.
HighUnpSize
=
bstream
.
readBits
(
32
);
}
else
{
this
.
HighPackSize
=
0
;
this
.
HighUnpSize
=
0
;
if
(
this
.
unpackedSize
==
0xffffffff
)
{
this
.
HighUnpSize
=
0x7fffffff
this
.
unpackedSize
=
0xffffffff
;
}
}
this
.
fullPackSize
=
0
;
this
.
fullUnpackSize
=
0
;
this
.
fullPackSize
|=
this
.
HighPackSize
;
this
.
fullPackSize
<<=
32
;
this
.
fullPackSize
|=
this
.
packSize
;
if
(
this
.
flags
.
LHD_LARGE
)
{
info
(
"Warning: Reading in LHD_LARGE 64-bit size values"
);
this
.
HighPackSize
=
bstream
.
readBits
(
32
);
this
.
HighUnpSize
=
bstream
.
readBits
(
32
);
}
else
{
this
.
HighPackSize
=
0
;
this
.
HighUnpSize
=
0
;
if
(
this
.
unpackedSize
==
0xffffffff
)
{
this
.
HighUnpSize
=
0x7fffffff
this
.
unpackedSize
=
0xffffffff
;
}
}
this
.
fullPackSize
=
0
;
this
.
fullUnpackSize
=
0
;
this
.
fullPackSize
|=
this
.
HighPackSize
;
this
.
fullPackSize
<<=
32
;
this
.
fullPackSize
|=
this
.
packSize
;
// read in filename
// read in filename
this
.
filename
=
bstream
.
readBytes
(
this
.
nameSize
);
for
(
var
_i
=
0
,
_s
=
''
;
_i
<
this
.
filename
.
length
;
_i
++
)
{
_s
+=
String
.
fromCharCode
(
this
.
filename
[
_i
]);
}
this
.
filename
=
bstream
.
readBytes
(
this
.
nameSize
);
for
(
var
_i
=
0
,
_s
=
''
;
_i
<
this
.
filename
.
length
;
_i
++
)
{
_s
+=
String
.
fromCharCode
(
this
.
filename
[
_i
]);
}
this
.
filename
=
_s
;
this
.
filename
=
_s
;
if
(
this
.
flags
.
LHD_SALT
)
{
info
(
"Warning: Reading in 64-bit salt value"
);
this
.
salt
=
bstream
.
readBits
(
64
);
// 8 bytes
}
if
(
this
.
flags
.
LHD_SALT
)
{
info
(
"Warning: Reading in 64-bit salt value"
);
this
.
salt
=
bstream
.
readBits
(
64
);
// 8 bytes
}
if
(
this
.
flags
.
LHD_EXTTIME
)
{
// 16-bit flags
var
extTimeFlags
=
bstream
.
readBits
(
16
);
if
(
this
.
flags
.
LHD_EXTTIME
)
{
// 16-bit flags
var
extTimeFlags
=
bstream
.
readBits
(
16
);
// this is adapted straight out of arcread.cpp, Archive::ReadHeader()
for
(
var
I
=
0
;
I
<
4
;
++
I
)
{
var
rmode
=
extTimeFlags
>>
((
3
-
I
)
*
4
);
if
((
rmode
&
8
)
==
0
)
continue
;
if
(
I
!=
0
)
bstream
.
readBits
(
16
);
var
count
=
(
rmode
&
3
);
for
(
var
J
=
0
;
J
<
count
;
++
J
)
bstream
.
readBits
(
8
);
}
}
// this is adapted straight out of arcread.cpp, Archive::ReadHeader()
for
(
var
I
=
0
;
I
<
4
;
++
I
)
{
var
rmode
=
extTimeFlags
>>
((
3
-
I
)
*
4
);
if
((
rmode
&
8
)
==
0
)
continue
;
if
(
I
!=
0
)
bstream
.
readBits
(
16
);
var
count
=
(
rmode
&
3
);
for
(
var
J
=
0
;
J
<
count
;
++
J
)
bstream
.
readBits
(
8
);
}
}
if
(
this
.
flags
.
LHD_COMMENT
)
{
info
(
"Found a LHD_COMMENT"
);
}
if
(
this
.
flags
.
LHD_COMMENT
)
{
info
(
"Found a LHD_COMMENT"
);
}
while
(
headPos
+
this
.
headSize
>
bstream
.
bytePtr
)
bstream
.
readBits
(
1
);
while
(
headPos
+
this
.
headSize
>
bstream
.
bytePtr
)
bstream
.
readBits
(
1
);
info
(
"Found FILE_HEAD with packSize="
+
this
.
packSize
+
", unpackedSize= "
+
this
.
unpackedSize
+
", hostOS="
+
this
.
hostOS
+
", unpVer="
+
this
.
unpVer
+
", method="
+
this
.
method
+
", filename="
+
this
.
filename
);
info
(
"Found FILE_HEAD with packSize="
+
this
.
packSize
+
", unpackedSize= "
+
this
.
unpackedSize
+
", hostOS="
+
this
.
hostOS
+
", unpVer="
+
this
.
unpVer
+
", method="
+
this
.
method
+
", filename="
+
this
.
filename
);
break
;
default
:
info
(
"Found a header of type 0x"
+
byteValueToHexString
(
this
.
headType
));
// skip the rest of the header bytes (for now)
bstream
.
readBytes
(
this
.
headSize
-
7
);
break
;
}
break
;
default
:
info
(
"Found a header of type 0x"
+
byteValueToHexString
(
this
.
headType
));
// skip the rest of the header bytes (for now)
bstream
.
readBytes
(
this
.
headSize
-
7
);
break
;
}
};
var
BLOCK_LZ
=
0
,
BLOCK_PPM
=
1
;
var
BLOCK_LZ
=
0
,
BLOCK_PPM
=
1
;
var
rLDecode
=
[
0
,
1
,
2
,
3
,
4
,
5
,
6
,
7
,
8
,
10
,
12
,
14
,
16
,
20
,
24
,
28
,
32
,
40
,
48
,
56
,
64
,
80
,
96
,
112
,
128
,
160
,
192
,
224
],
rLBits
=
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
1
,
1
,
1
,
1
,
2
,
2
,
2
,
2
,
3
,
3
,
3
,
3
,
4
,
4
,
4
,
4
,
5
,
5
,
5
,
5
],
rDBitLengthCounts
=
[
4
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
14
,
0
,
12
],
rSDDecode
=
[
0
,
4
,
8
,
16
,
32
,
64
,
128
,
192
],
rSDBits
=
[
2
,
2
,
3
,
4
,
5
,
6
,
6
,
6
];
rLBits
=
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
1
,
1
,
1
,
1
,
2
,
2
,
2
,
2
,
3
,
3
,
3
,
3
,
4
,
4
,
4
,
4
,
5
,
5
,
5
,
5
],
rDBitLengthCounts
=
[
4
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
2
,
14
,
0
,
12
],
rSDDecode
=
[
0
,
4
,
8
,
16
,
32
,
64
,
128
,
192
],
rSDBits
=
[
2
,
2
,
3
,
4
,
5
,
6
,
6
,
6
];
var
rDDecode
=
[
0
,
1
,
2
,
3
,
4
,
6
,
8
,
12
,
16
,
24
,
32
,
48
,
64
,
96
,
128
,
192
,
256
,
384
,
512
,
768
,
1024
,
1536
,
2048
,
3072
,
...
...
@@ -227,11 +227,11 @@ var rDBits = [0, 0, 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5,
var
rLOW_DIST_REP_COUNT
=
16
;
var
rNC
=
299
,
rDC
=
60
,
rLDC
=
17
,
rRC
=
28
,
rBC
=
20
,
rHUFF_TABLE_SIZE
=
(
rNC
+
rDC
+
rRC
+
rLDC
);
rDC
=
60
,
rLDC
=
17
,
rRC
=
28
,
rBC
=
20
,
rHUFF_TABLE_SIZE
=
(
rNC
+
rDC
+
rRC
+
rLDC
);
var
UnpBlockType
=
BLOCK_LZ
;
var
UnpOldTable
=
new
Array
(
rHUFF_TABLE_SIZE
);
...
...
@@ -266,223 +266,220 @@ var rBuffer;
// read in Huffman tables for RAR
function
RarReadTables
(
bstream
)
{
var
BitLength
=
new
Array
(
rBC
),
Table
=
new
Array
(
rHUFF_TABLE_SIZE
);
var
BitLength
=
new
Array
(
rBC
),
Table
=
new
Array
(
rHUFF_TABLE_SIZE
);
// before we start anything we need to get byte-aligned
bstream
.
readBits
(
(
8
-
bstream
.
bitPtr
)
&
0x7
);
// before we start anything we need to get byte-aligned
bstream
.
readBits
(
(
8
-
bstream
.
bitPtr
)
&
0x7
);
if
(
bstream
.
readBits
(
1
))
{
info
(
"Error! PPM not implemented yet"
);
return
;
}
if
(
bstream
.
readBits
(
1
))
{
info
(
"Error! PPM not implemented yet"
);
return
;
}
if
(
!
bstream
.
readBits
(
1
))
{
//discard old table
for
(
var
i
=
UnpOldTable
.
length
;
i
--
;)
UnpOldTable
[
i
]
=
0
;
}
// read in bit lengths
for
(
var
I
=
0
;
I
<
rBC
;
++
I
)
{
var
Length
=
bstream
.
readBits
(
4
);
if
(
Length
==
15
)
{
var
ZeroCount
=
bstream
.
readBits
(
4
);
if
(
ZeroCount
==
0
)
{
BitLength
[
I
]
=
15
;
}
else
{
ZeroCount
+=
2
;
while
(
ZeroCount
--
>
0
&&
I
<
rBC
)
BitLength
[
I
++
]
=
0
;
--
I
;
}
if
(
!
bstream
.
readBits
(
1
))
{
//discard old table
for
(
var
i
=
UnpOldTable
.
length
;
i
--
;)
UnpOldTable
[
i
]
=
0
;
}
else
{
BitLength
[
I
]
=
Length
;
// read in bit lengths
for
(
var
I
=
0
;
I
<
rBC
;
++
I
)
{
var
Length
=
bstream
.
readBits
(
4
);
if
(
Length
==
15
)
{
var
ZeroCount
=
bstream
.
readBits
(
4
);
if
(
ZeroCount
==
0
)
{
BitLength
[
I
]
=
15
;
}
else
{
ZeroCount
+=
2
;
while
(
ZeroCount
--
>
0
&&
I
<
rBC
)
BitLength
[
I
++
]
=
0
;
--
I
;
}
}
else
{
BitLength
[
I
]
=
Length
;
}
}
}
// now all 20 bit lengths are obtained, we construct the Huffman Table:
RarMakeDecodeTables
(
BitLength
,
0
,
BD
,
rBC
);
// now all 20 bit lengths are obtained, we construct the Huffman Table:
RarMakeDecodeTables
(
BitLength
,
0
,
BD
,
rBC
);
var
TableSize
=
rHUFF_TABLE_SIZE
;
//console.log(DecodeLen, DecodePos, DecodeNum);
for
(
var
i
=
0
;
i
<
TableSize
;)
{
var
num
=
RarDecodeNumber
(
bstream
,
BD
);
if
(
num
<
16
)
{
Table
[
i
]
=
(
num
+
UnpOldTable
[
i
])
&
0xf
;
i
++
;
}
else
if
(
num
<
18
)
{
var
N
=
(
num
==
16
)
?
(
bstream
.
readBits
(
3
)
+
3
)
:
(
bstream
.
readBits
(
7
)
+
11
);
while
(
N
--
>
0
&&
i
<
TableSize
)
{
Table
[
i
]
=
Table
[
i
-
1
];
i
++
;
}
}
else
{
var
N
=
(
num
==
18
)
?
(
bstream
.
readBits
(
3
)
+
3
)
:
(
bstream
.
readBits
(
7
)
+
11
);
var
TableSize
=
rHUFF_TABLE_SIZE
;
//console.log(DecodeLen, DecodePos, DecodeNum);
for
(
var
i
=
0
;
i
<
TableSize
;)
{
var
num
=
RarDecodeNumber
(
bstream
,
BD
);
if
(
num
<
16
)
{
Table
[
i
]
=
(
num
+
UnpOldTable
[
i
])
&
0xf
;
i
++
;
}
else
if
(
num
<
18
)
{
var
N
=
(
num
==
16
)
?
(
bstream
.
readBits
(
3
)
+
3
)
:
(
bstream
.
readBits
(
7
)
+
11
);
while
(
N
--
>
0
&&
i
<
TableSize
)
{
Table
[
i
]
=
Table
[
i
-
1
];
i
++
;
}
}
else
{
var
N
=
(
num
==
18
)
?
(
bstream
.
readBits
(
3
)
+
3
)
:
(
bstream
.
readBits
(
7
)
+
11
);
while
(
N
--
>
0
&&
i
<
TableSize
)
{
Table
[
i
++
]
=
0
;
}
while
(
N
--
>
0
&&
i
<
TableSize
)
{
Table
[
i
++
]
=
0
;
}
}
}
}
RarMakeDecodeTables
(
Table
,
0
,
LD
,
rNC
);
RarMakeDecodeTables
(
Table
,
rNC
,
DD
,
rDC
);
RarMakeDecodeTables
(
Table
,
rNC
+
rDC
,
LDD
,
rLDC
);
RarMakeDecodeTables
(
Table
,
rNC
+
rDC
+
rLDC
,
RD
,
rRC
);
RarMakeDecodeTables
(
Table
,
0
,
LD
,
rNC
);
RarMakeDecodeTables
(
Table
,
rNC
,
DD
,
rDC
);
RarMakeDecodeTables
(
Table
,
rNC
+
rDC
,
LDD
,
rLDC
);
RarMakeDecodeTables
(
Table
,
rNC
+
rDC
+
rLDC
,
RD
,
rRC
);
for
(
var
i
=
UnpOldTable
.
length
;
i
--
;)
{
UnpOldTable
[
i
]
=
Table
[
i
];
}
return
true
;
for
(
var
i
=
UnpOldTable
.
length
;
i
--
;)
{
UnpOldTable
[
i
]
=
Table
[
i
];
}
return
true
;
}
function
RarDecodeNumber
(
bstream
,
dec
)
{
var
DecodeLen
=
dec
.
DecodeLen
,
DecodePos
=
dec
.
DecodePos
,
DecodeNum
=
dec
.
DecodeNum
;
var
bitField
=
bstream
.
getBits
()
&
0xfffe
;
//some sort of rolled out binary search
var
bits
=
((
bitField
<
DecodeLen
[
8
])?
((
bitField
<
DecodeLen
[
4
])?
((
bitField
<
DecodeLen
[
2
])?
((
bitField
<
DecodeLen
[
1
])?
1
:
2
)
:((
bitField
<
DecodeLen
[
3
])?
3
:
4
))
:(
bitField
<
DecodeLen
[
6
])?
((
bitField
<
DecodeLen
[
5
])?
5
:
6
)
:((
bitField
<
DecodeLen
[
7
])?
7
:
8
))
:((
bitField
<
DecodeLen
[
12
])?
((
bitField
<
DecodeLen
[
10
])?
((
bitField
<
DecodeLen
[
9
])?
9
:
10
)
:((
bitField
<
DecodeLen
[
11
])?
11
:
12
))
:(
bitField
<
DecodeLen
[
14
])?
((
bitField
<
DecodeLen
[
13
])?
13
:
14
)
:
15
));
bstream
.
readBits
(
bits
);
var
N
=
DecodePos
[
bits
]
+
((
bitField
-
DecodeLen
[
bits
-
1
])
>>>
(
16
-
bits
));
var
DecodeLen
=
dec
.
DecodeLen
,
DecodePos
=
dec
.
DecodePos
,
DecodeNum
=
dec
.
DecodeNum
;
var
bitField
=
bstream
.
getBits
()
&
0xfffe
;
//some sort of rolled out binary search
var
bits
=
((
bitField
<
DecodeLen
[
8
])?
((
bitField
<
DecodeLen
[
4
])?
((
bitField
<
DecodeLen
[
2
])?
((
bitField
<
DecodeLen
[
1
])?
1
:
2
)
:((
bitField
<
DecodeLen
[
3
])?
3
:
4
))
:(
bitField
<
DecodeLen
[
6
])?
((
bitField
<
DecodeLen
[
5
])?
5
:
6
)
:((
bitField
<
DecodeLen
[
7
])?
7
:
8
))
:((
bitField
<
DecodeLen
[
12
])?
((
bitField
<
DecodeLen
[
10
])?
((
bitField
<
DecodeLen
[
9
])?
9
:
10
)
:((
bitField
<
DecodeLen
[
11
])?
11
:
12
))
:(
bitField
<
DecodeLen
[
14
])?
((
bitField
<
DecodeLen
[
13
])?
13
:
14
)
:
15
));
bstream
.
readBits
(
bits
);
var
N
=
DecodePos
[
bits
]
+
((
bitField
-
DecodeLen
[
bits
-
1
])
>>>
(
16
-
bits
));
return
DecodeNum
[
N
];
return
DecodeNum
[
N
];
}
function
RarMakeDecodeTables
(
BitLength
,
offset
,
dec
,
size
)
{
var
DecodeLen
=
dec
.
DecodeLen
,
DecodePos
=
dec
.
DecodePos
,
DecodeNum
=
dec
.
DecodeNum
;
var
LenCount
=
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
],
TmpPos
=
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
],
N
=
0
,
M
=
0
;
for
(
var
i
=
DecodeNum
.
length
;
i
--
;)
DecodeNum
[
i
]
=
0
;
for
(
var
i
=
0
;
i
<
size
;
i
++
)
{
LenCount
[
BitLength
[
i
+
offset
]
&
0xF
]
++
;
}
LenCount
[
0
]
=
0
;
TmpPos
[
0
]
=
0
;
DecodePos
[
0
]
=
0
;
DecodeLen
[
0
]
=
0
;
var
DecodeLen
=
dec
.
DecodeLen
,
DecodePos
=
dec
.
DecodePos
,
DecodeNum
=
dec
.
DecodeNum
;
var
LenCount
=
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
],
TmpPos
=
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
],
N
=
0
,
M
=
0
;
for
(
var
i
=
DecodeNum
.
length
;
i
--
;)
DecodeNum
[
i
]
=
0
;
for
(
var
i
=
0
;
i
<
size
;
i
++
)
{
LenCount
[
BitLength
[
i
+
offset
]
&
0xF
]
++
;
}
LenCount
[
0
]
=
0
;
TmpPos
[
0
]
=
0
;
DecodePos
[
0
]
=
0
;
DecodeLen
[
0
]
=
0
;
for
(
var
I
=
1
;
I
<
16
;
++
I
)
{
N
=
2
*
(
N
+
LenCount
[
I
]);
M
=
(
N
<<
(
15
-
I
));
if
(
M
>
0xFFFF
)
M
=
0xFFFF
;
DecodeLen
[
I
]
=
M
;
DecodePos
[
I
]
=
DecodePos
[
I
-
1
]
+
LenCount
[
I
-
1
];
TmpPos
[
I
]
=
DecodePos
[
I
];
}
for
(
I
=
0
;
I
<
size
;
++
I
)
for
(
var
I
=
1
;
I
<
16
;
++
I
)
{
N
=
2
*
(
N
+
LenCount
[
I
]);
M
=
(
N
<<
(
15
-
I
));
if
(
M
>
0xFFFF
)
M
=
0xFFFF
;
DecodeLen
[
I
]
=
M
;
DecodePos
[
I
]
=
DecodePos
[
I
-
1
]
+
LenCount
[
I
-
1
];
TmpPos
[
I
]
=
DecodePos
[
I
];
}
for
(
I
=
0
;
I
<
size
;
++
I
)
if
(
BitLength
[
I
+
offset
]
!=
0
)
DecodeNum
[
TmpPos
[
BitLength
[
offset
+
I
]
&
0xF
]
++
]
=
I
;
DecodeNum
[
TmpPos
[
BitLength
[
offset
+
I
]
&
0xF
]
++
]
=
I
;
}
// TODO: implement
function
Unpack15
(
bstream
,
Solid
)
{
info
(
"ERROR! RAR 1.5 compression not supported"
);
info
(
"ERROR! RAR 1.5 compression not supported"
);
}
function
Unpack20
(
bstream
,
Solid
)
{
var
destUnpSize
=
rBuffer
.
data
.
length
;
var
oldDistPtr
=
0
;
var
destUnpSize
=
rBuffer
.
data
.
length
;
var
oldDistPtr
=
0
;
RarReadTables20
(
bstream
);
while
(
destUnpSize
>
rBuffer
.
ptr
)
{
var
num
=
RarDecodeNumber
(
bstream
,
LD
);
if
(
num
<
256
)
{
rBuffer
.
insertByte
(
num
);
continue
;
}
if
(
num
>
269
)
{
var
Length
=
rLDecode
[
num
-=
270
]
+
3
;
if
((
Bits
=
rLBits
[
num
])
>
0
)
{
Length
+=
bstream
.
readBits
(
Bits
);
}
var
DistNumber
=
RarDecodeNumber
(
bstream
,
DD
);
var
Distance
=
rDDecode
[
DistNumber
]
+
1
;
if
((
Bits
=
rDBits
[
DistNumber
])
>
0
)
{
Distance
+=
bstream
.
readBits
(
Bits
);
}
if
(
Distance
>=
0x2000
)
{
Length
++
;
if
(
Distance
>=
0x40000
)
Length
++
;
}
lastLength
=
Length
;
lastDist
=
rOldDist
[
oldDistPtr
++
&
3
]
=
Distance
;
RarCopyString
(
Length
,
Distance
);
continue
;
}
if
(
num
==
269
)
{
RarReadTables20
(
bstream
);
RarReadTables20
(
bstream
);
while
(
destUnpSize
>
rBuffer
.
ptr
)
{
var
num
=
RarDecodeNumber
(
bstream
,
LD
);
if
(
num
<
256
)
{
rBuffer
.
insertByte
(
num
);
continue
;
}
if
(
num
>
269
)
{
var
Length
=
rLDecode
[
num
-=
270
]
+
3
;
if
((
Bits
=
rLBits
[
num
])
>
0
)
{
Length
+=
bstream
.
readBits
(
Bits
);
}
var
DistNumber
=
RarDecodeNumber
(
bstream
,
DD
);
var
Distance
=
rDDecode
[
DistNumber
]
+
1
;
if
((
Bits
=
rDBits
[
DistNumber
])
>
0
)
{
Distance
+=
bstream
.
readBits
(
Bits
);
}
if
(
Distance
>=
0x2000
)
{
Length
++
;
if
(
Distance
>=
0x40000
)
Length
++
;
}
lastLength
=
Length
;
lastDist
=
rOldDist
[
oldDistPtr
++
&
3
]
=
Distance
;
RarCopyString
(
Length
,
Distance
);
continue
;
}
if
(
num
==
269
)
{
RarReadTables20
(
bstream
);
RarUpdateProgress
()
continue
;
}
if
(
num
==
256
)
{
lastDist
=
rOldDist
[
oldDistPtr
++
&
3
]
=
lastDist
;
RarCopyString
(
lastLength
,
lastDist
);
continue
;
}
if
(
num
<
261
)
{
var
Distance
=
rOldDist
[(
oldDistPtr
-
(
num
-
256
))
&
3
];
var
LengthNumber
=
RarDecodeNumber
(
bstream
,
RD
);
var
Length
=
rLDecode
[
LengthNumber
]
+
2
;
if
((
Bits
=
rLBits
[
LengthNumber
])
>
0
)
{
Length
+=
bstream
.
readBits
(
Bits
);
}
if
(
Distance
>=
0x101
)
{
Length
++
;
if
(
Distance
>=
0x2000
)
{
Length
++
if
(
Distance
>=
0x40000
)
Length
++
;
RarUpdateProgress
()
continue
;
}
if
(
num
==
256
)
{
lastDist
=
rOldDist
[
oldDistPtr
++
&
3
]
=
lastDist
;
RarCopyString
(
lastLength
,
lastDist
);
continue
;
}
if
(
num
<
261
)
{
var
Distance
=
rOldDist
[(
oldDistPtr
-
(
num
-
256
))
&
3
];
var
LengthNumber
=
RarDecodeNumber
(
bstream
,
RD
);
var
Length
=
rLDecode
[
LengthNumber
]
+
2
;
if
((
Bits
=
rLBits
[
LengthNumber
])
>
0
)
{
Length
+=
bstream
.
readBits
(
Bits
);
}
if
(
Distance
>=
0x101
)
{
Length
++
;
if
(
Distance
>=
0x2000
)
{
Length
++
if
(
Distance
>=
0x40000
)
Length
++
;
}
}
lastLength
=
Length
;
lastDist
=
rOldDist
[
oldDistPtr
++
&
3
]
=
Distance
;
RarCopyString
(
Length
,
Distance
);
continue
;
}
if
(
num
<
270
)
{
var
Distance
=
rSDDecode
[
num
-=
261
]
+
1
;
if
((
Bits
=
rSDBits
[
num
])
>
0
)
{
Distance
+=
bstream
.
readBits
(
Bits
);
}
lastLength
=
2
;
lastDist
=
rOldDist
[
oldDistPtr
++
&
3
]
=
Distance
;
RarCopyString
(
2
,
Distance
);
continue
;
}
}
lastLength
=
Length
;
lastDist
=
rOldDist
[
oldDistPtr
++
&
3
]
=
Distance
;
RarCopyString
(
Length
,
Distance
);
continue
;
}
if
(
num
<
270
)
{
var
Distance
=
rSDDecode
[
num
-=
261
]
+
1
;
if
((
Bits
=
rSDBits
[
num
])
>
0
)
{
Distance
+=
bstream
.
readBits
(
Bits
);
}
lastLength
=
2
;
lastDist
=
rOldDist
[
oldDistPtr
++
&
3
]
=
Distance
;
RarCopyString
(
2
,
Distance
);
continue
;
}
}
RarUpdateProgress
()
RarUpdateProgress
()
}
function
RarUpdateProgress
()
{
var
change
=
rBuffer
.
ptr
-
currentBytesUnarchivedInFile
;
currentBytesUnarchivedInFile
=
rBuffer
.
ptr
;
currentBytesUnarchived
+=
change
;
postProgress
();
var
change
=
rBuffer
.
ptr
-
currentBytesUnarchivedInFile
;
currentBytesUnarchivedInFile
=
rBuffer
.
ptr
;
currentBytesUnarchived
+=
change
;
postProgress
();
}
...
...
@@ -495,43 +492,43 @@ var rNC20 = 298,
var
UnpOldTable20
=
new
Array
(
rMC20
*
4
);
function
RarReadTables20
(
bstream
)
{
var
BitLength
=
new
Array
(
rBC20
);
var
Table
=
new
Array
(
rMC20
*
4
);
var
TableSize
,
N
,
I
;
var
AudioBlock
=
bstream
.
readBits
(
1
);
if
(
!
bstream
.
readBits
(
1
))
for
(
var
i
=
UnpOldTable20
.
length
;
i
--
;)
UnpOldTable20
[
i
]
=
0
;
TableSize
=
rNC20
+
rDC20
+
rRC20
;
for
(
var
I
=
0
;
I
<
rBC20
;
I
++
)
var
BitLength
=
new
Array
(
rBC20
);
var
Table
=
new
Array
(
rMC20
*
4
);
var
TableSize
,
N
,
I
;
var
AudioBlock
=
bstream
.
readBits
(
1
);
if
(
!
bstream
.
readBits
(
1
))
for
(
var
i
=
UnpOldTable20
.
length
;
i
--
;)
UnpOldTable20
[
i
]
=
0
;
TableSize
=
rNC20
+
rDC20
+
rRC20
;
for
(
var
I
=
0
;
I
<
rBC20
;
I
++
)
BitLength
[
I
]
=
bstream
.
readBits
(
4
);
RarMakeDecodeTables
(
BitLength
,
0
,
BD
,
rBC20
);
I
=
0
;
while
(
I
<
TableSize
)
{
var
num
=
RarDecodeNumber
(
bstream
,
BD
);
if
(
num
<
16
)
{
Table
[
I
]
=
num
+
UnpOldTable20
[
I
]
&
0xf
;
I
++
;
}
else
if
(
num
==
16
)
{
N
=
bstream
.
readBits
(
2
)
+
3
;
while
(
N
--
>
0
&&
I
<
TableSize
)
{
Table
[
I
]
=
Table
[
I
-
1
];
I
++
;
}
}
else
{
if
(
num
==
17
)
{
N
=
bstream
.
readBits
(
3
)
+
3
;
}
else
{
N
=
bstream
.
readBits
(
7
)
+
11
;
}
while
(
N
--
>
0
&&
I
<
TableSize
)
{
Table
[
I
++
]
=
0
;
}
RarMakeDecodeTables
(
BitLength
,
0
,
BD
,
rBC20
);
I
=
0
;
while
(
I
<
TableSize
)
{
var
num
=
RarDecodeNumber
(
bstream
,
BD
);
if
(
num
<
16
)
{
Table
[
I
]
=
num
+
UnpOldTable20
[
I
]
&
0xf
;
I
++
;
}
else
if
(
num
==
16
)
{
N
=
bstream
.
readBits
(
2
)
+
3
;
while
(
N
--
>
0
&&
I
<
TableSize
)
{
Table
[
I
]
=
Table
[
I
-
1
];
I
++
;
}
}
else
{
if
(
num
==
17
)
{
N
=
bstream
.
readBits
(
3
)
+
3
;
}
else
{
N
=
bstream
.
readBits
(
7
)
+
11
;
}
while
(
N
--
>
0
&&
I
<
TableSize
)
{
Table
[
I
++
]
=
0
;
}
}
}
}
RarMakeDecodeTables
(
Table
,
0
,
LD
,
rNC20
);
RarMakeDecodeTables
(
Table
,
rNC20
,
DD
,
rDC20
);
RarMakeDecodeTables
(
Table
,
rNC20
+
rDC20
,
RD
,
rRC20
);
for
(
var
i
=
UnpOldTable20
.
length
;
i
--
;)
UnpOldTable20
[
i
]
=
Table
[
i
];
RarMakeDecodeTables
(
Table
,
0
,
LD
,
rNC20
);
RarMakeDecodeTables
(
Table
,
rNC20
,
DD
,
rDC20
);
RarMakeDecodeTables
(
Table
,
rNC20
+
rDC20
,
RD
,
rRC20
);
for
(
var
i
=
UnpOldTable20
.
length
;
i
--
;)
UnpOldTable20
[
i
]
=
Table
[
i
];
}
var
lowDistRepCount
=
0
,
prevLowDist
=
0
;
...
...
@@ -542,200 +539,195 @@ var lastLength;
function
Unpack29
(
bstream
,
Solid
)
{
// lazy initialize rDDecode and rDBits
// lazy initialize rDDecode and rDBits
var
DDecode
=
new
Array
(
rDC
);
var
DBits
=
new
Array
(
rDC
);
var
Dist
=
0
,
BitLength
=
0
,
Slot
=
0
;
for
(
var
I
=
0
;
I
<
rDBitLengthCounts
.
length
;
I
++
,
BitLength
++
)
{
for
(
var
J
=
0
;
J
<
rDBitLengthCounts
[
I
];
J
++
,
Slot
++
,
Dist
+=
(
1
<<
BitLength
))
{
DDecode
[
Slot
]
=
Dist
;
DBits
[
Slot
]
=
BitLength
;
var
DDecode
=
new
Array
(
rDC
);
var
DBits
=
new
Array
(
rDC
);
var
Dist
=
0
,
BitLength
=
0
,
Slot
=
0
;
for
(
var
I
=
0
;
I
<
rDBitLengthCounts
.
length
;
I
++
,
BitLength
++
)
{
for
(
var
J
=
0
;
J
<
rDBitLengthCounts
[
I
];
J
++
,
Slot
++
,
Dist
+=
(
1
<<
BitLength
))
{
DDecode
[
Slot
]
=
Dist
;
DBits
[
Slot
]
=
BitLength
;
}
}
}
var
Bits
;
//tablesRead = false;
var
Bits
;
//tablesRead = false;
rOldDist
=
[
0
,
0
,
0
,
0
]
lastDist
=
0
;
lastLength
=
0
;
rOldDist
=
[
0
,
0
,
0
,
0
]
for
(
var
i
=
UnpOldTable
.
length
;
i
--
;)
UnpOldTable
[
i
]
=
0
;
lastDist
=
0
;
lastLength
=
0
;
for
(
var
i
=
UnpOldTable
.
length
;
i
--
;)
UnpOldTable
[
i
]
=
0
;
// read in Huffman tables
RarReadTables
(
bstream
);
// read in Huffman tables
RarReadTables
(
bstream
);
while
(
true
)
{
var
num
=
RarDecodeNumber
(
bstream
,
LD
);
while
(
true
)
{
var
num
=
RarDecodeNumber
(
bstream
,
LD
);
if
(
num
<
256
)
{
rBuffer
.
insertByte
(
num
);
continue
;
}
if
(
num
>=
271
)
{
var
Length
=
rLDecode
[
num
-=
271
]
+
3
;
if
((
Bits
=
rLBits
[
num
])
>
0
)
{
Length
+=
bstream
.
readBits
(
Bits
);
}
var
DistNumber
=
RarDecodeNumber
(
bstream
,
DD
);
var
Distance
=
DDecode
[
DistNumber
]
+
1
;
if
((
Bits
=
DBits
[
DistNumber
])
>
0
)
{
if
(
DistNumber
>
9
)
{
if
(
Bits
>
4
)
{
Distance
+=
((
bstream
.
getBits
()
>>>
(
20
-
Bits
))
<<
4
);
bstream
.
readBits
(
Bits
-
4
);
//todo: check this
}
if
(
lowDistRepCount
>
0
)
{
lowDistRepCount
--
;
Distance
+=
prevLowDist
;
}
else
{
var
LowDist
=
RarDecodeNumber
(
bstream
,
LDD
);
if
(
LowDist
==
16
)
{
lowDistRepCount
=
rLOW_DIST_REP_COUNT
-
1
;
Distance
+=
prevLowDist
;
}
else
{
Distance
+=
LowDist
;
prevLowDist
=
LowDist
;
if
(
num
<
256
)
{
rBuffer
.
insertByte
(
num
);
continue
;
}
if
(
num
>=
271
)
{
var
Length
=
rLDecode
[
num
-=
271
]
+
3
;
if
((
Bits
=
rLBits
[
num
])
>
0
)
{
Length
+=
bstream
.
readBits
(
Bits
);
}
}
}
else
{
Distance
+=
bstream
.
readBits
(
Bits
);
var
DistNumber
=
RarDecodeNumber
(
bstream
,
DD
);
var
Distance
=
DDecode
[
DistNumber
]
+
1
;
if
((
Bits
=
DBits
[
DistNumber
])
>
0
)
{
if
(
DistNumber
>
9
)
{
if
(
Bits
>
4
)
{
Distance
+=
((
bstream
.
getBits
()
>>>
(
20
-
Bits
))
<<
4
);
bstream
.
readBits
(
Bits
-
4
);
//todo: check this
}
if
(
lowDistRepCount
>
0
)
{
lowDistRepCount
--
;
Distance
+=
prevLowDist
;
}
else
{
var
LowDist
=
RarDecodeNumber
(
bstream
,
LDD
);
if
(
LowDist
==
16
)
{
lowDistRepCount
=
rLOW_DIST_REP_COUNT
-
1
;
Distance
+=
prevLowDist
;
}
else
{
Distance
+=
LowDist
;
prevLowDist
=
LowDist
;
}
}
}
else
{
Distance
+=
bstream
.
readBits
(
Bits
);
}
}
if
(
Distance
>=
0x2000
)
{
Length
++
;
if
(
Distance
>=
0x40000
)
{
Length
++
;
}
}
RarInsertOldDist
(
Distance
);
RarInsertLastMatch
(
Length
,
Distance
);
RarCopyString
(
Length
,
Distance
);
continue
;
}
}
if
(
Distance
>=
0x2000
)
{
Length
++
;
if
(
Distance
>=
0x40000
)
{
Length
++
;
if
(
num
==
256
)
{
if
(
!
RarReadEndOfBlock
(
bstream
))
break
;
continue
;
}
if
(
num
==
257
)
{
//console.log("READVMCODE");
if
(
!
RarReadVMCode
(
bstream
))
break
;
continue
;
}
if
(
num
==
258
)
{
if
(
lastLength
!=
0
)
{
RarCopyString
(
lastLength
,
lastDist
);
}
continue
;
}
if
(
num
<
263
)
{
var
DistNum
=
num
-
259
;
var
Distance
=
rOldDist
[
DistNum
];
for
(
var
I
=
DistNum
;
I
>
0
;
I
--
)
{
rOldDist
[
I
]
=
rOldDist
[
I
-
1
];
}
rOldDist
[
0
]
=
Distance
;
var
LengthNumber
=
RarDecodeNumber
(
bstream
,
RD
);
var
Length
=
rLDecode
[
LengthNumber
]
+
2
;
if
((
Bits
=
rLBits
[
LengthNumber
])
>
0
)
{
Length
+=
bstream
.
readBits
(
Bits
);
}
RarInsertLastMatch
(
Length
,
Distance
);
RarCopyString
(
Length
,
Distance
);
continue
;
}
if
(
num
<
272
)
{
var
Distance
=
rSDDecode
[
num
-=
263
]
+
1
;
if
((
Bits
=
rSDBits
[
num
])
>
0
)
{
Distance
+=
bstream
.
readBits
(
Bits
);
}
RarInsertOldDist
(
Distance
);
RarInsertLastMatch
(
2
,
Distance
);
RarCopyString
(
2
,
Distance
);
continue
;
}
}
RarInsertOldDist
(
Distance
);
RarInsertLastMatch
(
Length
,
Distance
);
RarCopyString
(
Length
,
Distance
);
continue
;
}
if
(
num
==
256
)
{
if
(
!
RarReadEndOfBlock
(
bstream
))
break
;
continue
;
}
if
(
num
==
257
)
{
//console.log("READVMCODE");
if
(
!
RarReadVMCode
(
bstream
))
break
;
continue
;
}
if
(
num
==
258
)
{
if
(
lastLength
!=
0
)
{
RarCopyString
(
lastLength
,
lastDist
);
}
continue
;
}
if
(
num
<
263
)
{
var
DistNum
=
num
-
259
;
var
Distance
=
rOldDist
[
DistNum
];
for
(
var
I
=
DistNum
;
I
>
0
;
I
--
)
{
rOldDist
[
I
]
=
rOldDist
[
I
-
1
];
}
rOldDist
[
0
]
=
Distance
;
var
LengthNumber
=
RarDecodeNumber
(
bstream
,
RD
);
var
Length
=
rLDecode
[
LengthNumber
]
+
2
;
if
((
Bits
=
rLBits
[
LengthNumber
])
>
0
)
{
Length
+=
bstream
.
readBits
(
Bits
);
}
RarInsertLastMatch
(
Length
,
Distance
);
RarCopyString
(
Length
,
Distance
);
continue
;
}
if
(
num
<
272
)
{
var
Distance
=
rSDDecode
[
num
-=
263
]
+
1
;
if
((
Bits
=
rSDBits
[
num
])
>
0
)
{
Distance
+=
bstream
.
readBits
(
Bits
);
}
RarInsertOldDist
(
Distance
);
RarInsertLastMatch
(
2
,
Distance
);
RarCopyString
(
2
,
Distance
);
continue
;
}
}
RarUpdateProgress
()
RarUpdateProgress
()
}
function
RarReadEndOfBlock
(
bstream
)
{
RarUpdateProgress
()
var
NewTable
=
false
,
NewFile
=
false
;
if
(
bstream
.
readBits
(
1
))
{
NewTable
=
true
;
}
else
{
NewFile
=
true
;
NewTable
=
!!
bstream
.
readBits
(
1
);
}
//tablesRead = !NewTable;
return
!
(
NewFile
||
NewTable
&&
!
RarReadTables
(
bstream
));
RarUpdateProgress
()
var
NewTable
=
false
,
NewFile
=
false
;
if
(
bstream
.
readBits
(
1
))
{
NewTable
=
true
;
}
else
{
NewFile
=
true
;
NewTable
=
!!
bstream
.
readBits
(
1
);
}
//tablesRead = !NewTable;
return
!
(
NewFile
||
NewTable
&&
!
RarReadTables
(
bstream
));
}
function
RarReadVMCode
(
bstream
)
{
var
FirstByte
=
bstream
.
readBits
(
8
);
var
Length
=
(
FirstByte
&
7
)
+
1
;
if
(
Length
==
7
)
{
Length
=
bstream
.
readBits
(
8
)
+
7
;
}
else
if
(
Length
==
8
)
{
Length
=
bstream
.
readBits
(
16
);
}
var
vmCode
=
[];
for
(
var
I
=
0
;
I
<
Length
;
I
++
)
{
//do something here with cheking readbuf
vmCode
.
push
(
bstream
.
readBits
(
8
));
}
return
RarAddVMCode
(
FirstByte
,
vmCode
,
Length
);
var
FirstByte
=
bstream
.
readBits
(
8
);
var
Length
=
(
FirstByte
&
7
)
+
1
;
if
(
Length
==
7
)
{
Length
=
bstream
.
readBits
(
8
)
+
7
;
}
else
if
(
Length
==
8
)
{
Length
=
bstream
.
readBits
(
16
);
}
var
vmCode
=
[];
for
(
var
I
=
0
;
I
<
Length
;
I
++
)
{
//do something here with cheking readbuf
vmCode
.
push
(
bstream
.
readBits
(
8
));
}
return
RarAddVMCode
(
FirstByte
,
vmCode
,
Length
);
}
function
RarAddVMCode
(
firstByte
,
vmCode
,
length
)
{
//console.log(vmCode);
if
(
vmCode
.
length
>
0
)
{
info
(
"Error! RarVM not supported yet!"
);
}
return
true
;
//console.log(vmCode);
if
(
vmCode
.
length
>
0
)
{
info
(
"Error! RarVM not supported yet!"
);
}
return
true
;
}
function
RarInsertLastMatch
(
length
,
distance
)
{
lastDist
=
distance
;
lastLength
=
length
;
lastDist
=
distance
;
lastLength
=
length
;
}
function
RarInsertOldDist
(
distance
)
{
rOldDist
.
splice
(
3
,
1
);
rOldDist
.
splice
(
0
,
0
,
distance
);
rOldDist
.
splice
(
3
,
1
);
rOldDist
.
splice
(
0
,
0
,
distance
);
}
//this is the real function, the other one is for debugging
function
RarCopyString
(
length
,
distance
)
{
var
destPtr
=
rBuffer
.
ptr
-
distance
;
if
(
destPtr
<
0
){
var
l
=
rOldBuffers
.
length
;
while
(
destPtr
<
0
){
destPtr
=
rOldBuffers
[
--
l
].
data
.
length
+
destPtr
;
var
destPtr
=
rBuffer
.
ptr
-
distance
;
if
(
destPtr
<
0
){
var
l
=
rOldBuffers
.
length
;
while
(
destPtr
<
0
){
destPtr
=
rOldBuffers
[
--
l
].
data
.
length
+
destPtr
;
}
//TODO: lets hope that it never needs to read beyond file boundaries
while
(
length
--
)
rBuffer
.
insertByte
(
rOldBuffers
[
l
].
data
[
destPtr
++
]);
}
if
(
length
>
distance
)
{
while
(
length
--
)
rBuffer
.
insertByte
(
rBuffer
.
data
[
destPtr
++
]);
}
else
{
rBuffer
.
insertBytes
(
rBuffer
.
data
.
subarray
(
destPtr
,
destPtr
+
length
));
}
//TODO: lets hope that it never needs to read beyond file boundaries
while
(
length
--
)
rBuffer
.
insertByte
(
rOldBuffers
[
l
].
data
[
destPtr
++
]);
}
if
(
length
>
distance
)
{
while
(
length
--
)
rBuffer
.
insertByte
(
rBuffer
.
data
[
destPtr
++
]);
}
else
{
rBuffer
.
insertBytes
(
rBuffer
.
data
.
subarray
(
destPtr
,
destPtr
+
length
));
}
}
var
rOldBuffers
=
[]
...
...
@@ -743,154 +735,154 @@ var rOldBuffers = []
function
unpack
(
v
)
{
// TODO: implement what happens when unpVer is < 15
var
Ver
=
v
.
header
.
unpVer
<=
15
?
15
:
v
.
header
.
unpVer
,
Solid
=
v
.
header
.
LHD_SOLID
,
bstream
=
new
bitjs
.
io
.
BitStream
(
v
.
fileData
.
buffer
,
true
/* rtl */
,
v
.
fileData
.
byteOffset
,
v
.
fileData
.
byteLength
);
var
Ver
=
v
.
header
.
unpVer
<=
15
?
15
:
v
.
header
.
unpVer
,
Solid
=
v
.
header
.
LHD_SOLID
,
bstream
=
new
bitjs
.
io
.
BitStream
(
v
.
fileData
.
buffer
,
true
/* rtl */
,
v
.
fileData
.
byteOffset
,
v
.
fileData
.
byteLength
);
rBuffer
=
new
bitjs
.
io
.
ByteBuffer
(
v
.
header
.
unpackedSize
);
rBuffer
=
new
bitjs
.
io
.
ByteBuffer
(
v
.
header
.
unpackedSize
);
info
(
"Unpacking "
+
v
.
filename
+
" RAR v"
+
Ver
);
info
(
"Unpacking "
+
v
.
filename
+
" RAR v"
+
Ver
);
switch
(
Ver
)
{
case
15
:
// rar 1.5 compression
Unpack15
(
bstream
,
Solid
);
break
;
case
20
:
// rar 2.x compression
case
26
:
// files larger than 2GB
Unpack20
(
bstream
,
Solid
);
break
;
case
29
:
// rar 3.x compression
case
36
:
// alternative hash
Unpack29
(
bstream
,
Solid
);
break
;
}
// switch(method)
switch
(
Ver
)
{
case
15
:
// rar 1.5 compression
Unpack15
(
bstream
,
Solid
);
break
;
case
20
:
// rar 2.x compression
case
26
:
// files larger than 2GB
Unpack20
(
bstream
,
Solid
);
break
;
case
29
:
// rar 3.x compression
case
36
:
// alternative hash
Unpack29
(
bstream
,
Solid
);
break
;
}
// switch(method)
rOldBuffers
.
push
(
rBuffer
);
//TODO: clear these old buffers when there's over 4MB of history
return
rBuffer
.
data
;
rOldBuffers
.
push
(
rBuffer
);
//TODO: clear these old buffers when there's over 4MB of history
return
rBuffer
.
data
;
}
// bstream is a bit stream
var
RarLocalFile
=
function
(
bstream
)
{
this
.
header
=
new
RarVolumeHeader
(
bstream
);
this
.
filename
=
this
.
header
.
filename
;
this
.
header
=
new
RarVolumeHeader
(
bstream
);
this
.
filename
=
this
.
header
.
filename
;
if
(
this
.
header
.
headType
!=
FILE_HEAD
&&
this
.
header
.
headType
!=
ENDARC_HEAD
)
{
this
.
isValid
=
false
;
info
(
"Error! RAR Volume did not include a FILE_HEAD header "
);
}
else
{
// read in the compressed data
this
.
fileData
=
null
;
if
(
this
.
header
.
packSize
>
0
)
{
this
.
fileData
=
bstream
.
readBytes
(
this
.
header
.
packSize
);
this
.
isValid
=
true
;
if
(
this
.
header
.
headType
!=
FILE_HEAD
&&
this
.
header
.
headType
!=
ENDARC_HEAD
)
{
this
.
isValid
=
false
;
info
(
"Error! RAR Volume did not include a FILE_HEAD header "
);
}
else
{
// read in the compressed data
this
.
fileData
=
null
;
if
(
this
.
header
.
packSize
>
0
)
{
this
.
fileData
=
bstream
.
readBytes
(
this
.
header
.
packSize
);
this
.
isValid
=
true
;
}
}
}
};
RarLocalFile
.
prototype
.
unrar
=
function
()
{
if
(
!
this
.
header
.
flags
.
LHD_SPLIT_BEFORE
)
{
// unstore file
if
(
this
.
header
.
method
==
0x30
)
{
info
(
"Unstore "
+
this
.
filename
);
this
.
isValid
=
true
;
if
(
!
this
.
header
.
flags
.
LHD_SPLIT_BEFORE
)
{
// unstore file
if
(
this
.
header
.
method
==
0x30
)
{
info
(
"Unstore "
+
this
.
filename
);
this
.
isValid
=
true
;
currentBytesUnarchivedInFile
+=
this
.
fileData
.
length
;
currentBytesUnarchived
+=
this
.
fileData
.
length
;
// Create a new buffer and copy it over.
var
len
=
this
.
header
.
packSize
;
var
newBuffer
=
new
bitjs
.
io
.
ByteBuffer
(
len
);
newBuffer
.
insertBytes
(
this
.
fileData
);
this
.
fileData
=
newBuffer
.
data
;
}
else
{
this
.
isValid
=
true
;
this
.
fileData
=
unpack
(
this
);
currentBytesUnarchivedInFile
+=
this
.
fileData
.
length
;
currentBytesUnarchived
+=
this
.
fileData
.
length
;
// Create a new buffer and copy it over.
var
len
=
this
.
header
.
packSize
;
var
newBuffer
=
new
bitjs
.
io
.
ByteBuffer
(
len
);
newBuffer
.
insertBytes
(
this
.
fileData
);
this
.
fileData
=
newBuffer
.
data
;
}
else
{
this
.
isValid
=
true
;
this
.
fileData
=
unpack
(
this
);
}
}
}
}
var
unrar
=
function
(
arrayBuffer
)
{
currentFilename
=
""
;
currentFileNumber
=
0
;
currentBytesUnarchivedInFile
=
0
;
currentBytesUnarchived
=
0
;
totalUncompressedBytesInArchive
=
0
;
totalFilesInArchive
=
0
;
postMessage
(
new
bitjs
.
archive
.
UnarchiveStartEvent
());
var
bstream
=
new
bitjs
.
io
.
BitStream
(
arrayBuffer
,
false
/* rtl */
);
currentFilename
=
""
;
currentFileNumber
=
0
;
currentBytesUnarchivedInFile
=
0
;
currentBytesUnarchived
=
0
;
totalUncompressedBytesInArchive
=
0
;
totalFilesInArchive
=
0
;
postMessage
(
new
bitjs
.
archive
.
UnarchiveStartEvent
());
var
bstream
=
new
bitjs
.
io
.
BitStream
(
arrayBuffer
,
false
/* rtl */
);
var
header
=
new
RarVolumeHeader
(
bstream
);
if
(
header
.
crc
==
0x6152
&&
header
.
headType
==
0x72
&&
header
.
flags
.
value
==
0x1A21
&&
header
.
headSize
==
7
)
{
info
(
"Found RAR signature"
);
var
mhead
=
new
RarVolumeHeader
(
bstream
);
if
(
mhead
.
headType
!=
MAIN_HEAD
)
{
info
(
"Error! RAR did not include a MAIN_HEAD header"
);
var
header
=
new
RarVolumeHeader
(
bstream
);
if
(
header
.
crc
==
0x6152
&&
header
.
headType
==
0x72
&&
header
.
flags
.
value
==
0x1A21
&&
header
.
headSize
==
7
)
{
info
(
"Found RAR signature"
);
var
mhead
=
new
RarVolumeHeader
(
bstream
);
if
(
mhead
.
headType
!=
MAIN_HEAD
)
{
info
(
"Error! RAR did not include a MAIN_HEAD header"
);
}
else
{
var
localFiles
=
[],
localFile
=
null
;
do
{
try
{
localFile
=
new
RarLocalFile
(
bstream
);
info
(
"RAR localFile isValid="
+
localFile
.
isValid
+
", volume packSize="
+
localFile
.
header
.
packSize
);
if
(
localFile
&&
localFile
.
isValid
&&
localFile
.
header
.
packSize
>
0
)
{
totalUncompressedBytesInArchive
+=
localFile
.
header
.
unpackedSize
;
localFiles
.
push
(
localFile
);
}
else
if
(
localFile
.
header
.
packSize
==
0
&&
localFile
.
header
.
unpackedSize
==
0
)
{
localFile
.
isValid
=
true
;
}
}
catch
(
err
)
{
break
;
}
//info("bstream" + bstream.bytePtr+"/"+bstream.bytes.length);
}
while
(
localFile
.
isValid
);
totalFilesInArchive
=
localFiles
.
length
;
// now we have all information but things are unpacked
// TODO: unpack
localFiles
=
localFiles
.
sort
(
function
(
a
,
b
)
{
var
aname
=
a
.
filename
.
toLowerCase
();
var
bname
=
b
.
filename
.
toLowerCase
();
return
aname
>
bname
?
1
:
-
1
;
});
info
(
localFiles
.
map
(
function
(
a
){
return
a
.
filename
}).
join
(
', '
));
for
(
var
i
=
0
;
i
<
localFiles
.
length
;
++
i
)
{
var
localfile
=
localFiles
[
i
];
// update progress
currentFilename
=
localfile
.
header
.
filename
;
currentBytesUnarchivedInFile
=
0
;
// actually do the unzipping
localfile
.
unrar
();
if
(
localfile
.
isValid
)
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveExtractEvent
(
localfile
));
postProgress
();
}
}
postProgress
();
}
}
else
{
var
localFiles
=
[],
localFile
=
null
;
do
{
try
{
localFile
=
new
RarLocalFile
(
bstream
);
info
(
"RAR localFile isValid="
+
localFile
.
isValid
+
", volume packSize="
+
localFile
.
header
.
packSize
);
if
(
localFile
&&
localFile
.
isValid
&&
localFile
.
header
.
packSize
>
0
)
{
totalUncompressedBytesInArchive
+=
localFile
.
header
.
unpackedSize
;
localFiles
.
push
(
localFile
);
}
else
if
(
localFile
.
header
.
packSize
==
0
&&
localFile
.
header
.
unpackedSize
==
0
)
{
localFile
.
isValid
=
true
;
}
}
catch
(
err
)
{
break
;
}
//info("bstream" + bstream.bytePtr+"/"+bstream.bytes.length);
}
while
(
localFile
.
isValid
);
totalFilesInArchive
=
localFiles
.
length
;
// now we have all information but things are unpacked
// TODO: unpack
localFiles
=
localFiles
.
sort
(
function
(
a
,
b
)
{
var
aname
=
a
.
filename
.
toLowerCase
();
var
bname
=
b
.
filename
.
toLowerCase
();
return
aname
>
bname
?
1
:
-
1
;
});
info
(
localFiles
.
map
(
function
(
a
){
return
a
.
filename
}).
join
(
', '
));
for
(
var
i
=
0
;
i
<
localFiles
.
length
;
++
i
)
{
var
localfile
=
localFiles
[
i
];
// update progress
currentFilename
=
localfile
.
header
.
filename
;
currentBytesUnarchivedInFile
=
0
;
// actually do the unzipping
localfile
.
unrar
();
if
(
localfile
.
isValid
)
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveExtractEvent
(
localfile
));
postProgress
();
}
}
postProgress
();
err
(
"Invalid RAR file"
);
}
}
else
{
err
(
"Invalid RAR file"
);
}
postMessage
(
new
bitjs
.
archive
.
UnarchiveFinishEvent
());
postMessage
(
new
bitjs
.
archive
.
UnarchiveFinishEvent
());
};
// event.data.file has the ArrayBuffer.
onmessage
=
function
(
event
)
{
var
ab
=
event
.
data
.
file
;
unrar
(
ab
,
true
);
var
ab
=
event
.
data
.
file
;
unrar
(
ab
,
true
);
};
cps/static/js/untar.js
View file @
5b9b36f9
...
...
@@ -22,147 +22,147 @@ var totalFilesInArchive = 0;
// Helper functions.
var
info
=
function
(
str
)
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveInfoEvent
(
str
));
postMessage
(
new
bitjs
.
archive
.
UnarchiveInfoEvent
(
str
));
};
var
err
=
function
(
str
)
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveErrorEvent
(
str
));
postMessage
(
new
bitjs
.
archive
.
UnarchiveErrorEvent
(
str
));
};
var
postProgress
=
function
()
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveProgressEvent
(
currentFilename
,
currentFileNumber
,
currentBytesUnarchivedInFile
,
currentBytesUnarchived
,
totalUncompressedBytesInArchive
,
totalFilesInArchive
));
postMessage
(
new
bitjs
.
archive
.
UnarchiveProgressEvent
(
currentFilename
,
currentFileNumber
,
currentBytesUnarchivedInFile
,
currentBytesUnarchived
,
totalUncompressedBytesInArchive
,
totalFilesInArchive
));
};
// Removes all characters from the first zero-byte in the string onwards.
var
readCleanString
=
function
(
bstr
,
numBytes
)
{
var
str
=
bstr
.
readString
(
numBytes
);
var
zIndex
=
str
.
indexOf
(
String
.
fromCharCode
(
0
));
return
zIndex
!=
-
1
?
str
.
substr
(
0
,
zIndex
)
:
str
;
var
str
=
bstr
.
readString
(
numBytes
);
var
zIndex
=
str
.
indexOf
(
String
.
fromCharCode
(
0
));
return
zIndex
!=
-
1
?
str
.
substr
(
0
,
zIndex
)
:
str
;
};
// takes a ByteStream and parses out the local file information
var
TarLocalFile
=
function
(
bstream
)
{
this
.
isValid
=
false
;
// Read in the header block
this
.
name
=
readCleanString
(
bstream
,
100
);
this
.
mode
=
readCleanString
(
bstream
,
8
);
this
.
uid
=
readCleanString
(
bstream
,
8
);
this
.
gid
=
readCleanString
(
bstream
,
8
);
this
.
size
=
parseInt
(
readCleanString
(
bstream
,
12
),
8
);
this
.
mtime
=
readCleanString
(
bstream
,
12
);
this
.
chksum
=
readCleanString
(
bstream
,
8
);
this
.
typeflag
=
readCleanString
(
bstream
,
1
);
this
.
linkname
=
readCleanString
(
bstream
,
100
);
this
.
maybeMagic
=
readCleanString
(
bstream
,
6
);
if
(
this
.
maybeMagic
==
"ustar"
)
{
this
.
version
=
readCleanString
(
bstream
,
2
);
this
.
uname
=
readCleanString
(
bstream
,
32
);
this
.
gname
=
readCleanString
(
bstream
,
32
);
this
.
devmajor
=
readCleanString
(
bstream
,
8
);
this
.
devminor
=
readCleanString
(
bstream
,
8
);
this
.
prefix
=
readCleanString
(
bstream
,
155
);
if
(
this
.
prefix
.
length
)
{
this
.
name
=
this
.
prefix
+
this
.
name
;
}
bstream
.
readBytes
(
12
);
// 512 - 500
}
else
{
bstream
.
readBytes
(
255
);
// 512 - 257
}
this
.
isValid
=
false
;
// Read in the header block
this
.
name
=
readCleanString
(
bstream
,
100
);
this
.
mode
=
readCleanString
(
bstream
,
8
);
this
.
uid
=
readCleanString
(
bstream
,
8
);
this
.
gid
=
readCleanString
(
bstream
,
8
);
this
.
size
=
parseInt
(
readCleanString
(
bstream
,
12
),
8
);
this
.
mtime
=
readCleanString
(
bstream
,
12
);
this
.
chksum
=
readCleanString
(
bstream
,
8
);
this
.
typeflag
=
readCleanString
(
bstream
,
1
);
this
.
linkname
=
readCleanString
(
bstream
,
100
);
this
.
maybeMagic
=
readCleanString
(
bstream
,
6
);
if
(
this
.
maybeMagic
==
"ustar"
)
{
this
.
version
=
readCleanString
(
bstream
,
2
);
this
.
uname
=
readCleanString
(
bstream
,
32
);
this
.
gname
=
readCleanString
(
bstream
,
32
);
this
.
devmajor
=
readCleanString
(
bstream
,
8
);
this
.
devminor
=
readCleanString
(
bstream
,
8
);
this
.
prefix
=
readCleanString
(
bstream
,
155
);
if
(
this
.
prefix
.
length
)
{
this
.
name
=
this
.
prefix
+
this
.
name
;
}
bstream
.
readBytes
(
12
);
// 512 - 500
}
else
{
bstream
.
readBytes
(
255
);
// 512 - 257
}
// Done header, now rest of blocks are the file contents.
this
.
filename
=
this
.
name
;
this
.
fileData
=
null
;
info
(
"Untarring file '"
+
this
.
filename
+
"'"
);
info
(
" size = "
+
this
.
size
);
info
(
" typeflag = "
+
this
.
typeflag
);
// A regular file.
if
(
this
.
typeflag
==
0
)
{
info
(
" This is a regular file."
);
var
sizeInBytes
=
parseInt
(
this
.
size
);
this
.
fileData
=
new
Uint8Array
(
bstream
.
bytes
.
buffer
,
bstream
.
ptr
,
this
.
size
);
if
(
this
.
name
.
length
>
0
&&
this
.
size
>
0
&&
this
.
fileData
&&
this
.
fileData
.
buffer
)
{
this
.
isValid
=
true
;
}
bstream
.
readBytes
(
this
.
size
);
// Round up to 512-byte blocks.
var
remaining
=
512
-
this
.
size
%
512
;
if
(
remaining
>
0
&&
remaining
<
512
)
{
bstream
.
readBytes
(
remaining
);
}
}
else
if
(
this
.
typeflag
==
5
)
{
info
(
" This is a directory."
)
}
// Done header, now rest of blocks are the file contents.
this
.
filename
=
this
.
name
;
this
.
fileData
=
null
;
info
(
"Untarring file '"
+
this
.
filename
+
"'"
);
info
(
" size = "
+
this
.
size
);
info
(
" typeflag = "
+
this
.
typeflag
);
// A regular file.
if
(
this
.
typeflag
==
0
)
{
info
(
" This is a regular file."
);
var
sizeInBytes
=
parseInt
(
this
.
size
);
this
.
fileData
=
new
Uint8Array
(
bstream
.
bytes
.
buffer
,
bstream
.
ptr
,
this
.
size
);
if
(
this
.
name
.
length
>
0
&&
this
.
size
>
0
&&
this
.
fileData
&&
this
.
fileData
.
buffer
)
{
this
.
isValid
=
true
;
}
bstream
.
readBytes
(
this
.
size
);
// Round up to 512-byte blocks.
var
remaining
=
512
-
this
.
size
%
512
;
if
(
remaining
>
0
&&
remaining
<
512
)
{
bstream
.
readBytes
(
remaining
);
}
}
else
if
(
this
.
typeflag
==
5
)
{
info
(
" This is a directory."
)
}
};
// Takes an ArrayBuffer of a tar file in
// returns null on error
// returns an array of DecompressedFile objects on success
var
untar
=
function
(
arrayBuffer
)
{
currentFilename
=
""
;
currentFileNumber
=
0
;
currentBytesUnarchivedInFile
=
0
;
currentBytesUnarchived
=
0
;
totalUncompressedBytesInArchive
=
0
;
totalFilesInArchive
=
0
;
postMessage
(
new
bitjs
.
archive
.
UnarchiveStartEvent
());
var
bstream
=
new
bitjs
.
io
.
ByteStream
(
arrayBuffer
);
var
localFiles
=
[];
// While we don't encounter an empty block, keep making TarLocalFiles.
while
(
bstream
.
peekNumber
(
4
)
!=
0
)
{
var
oneLocalFile
=
new
TarLocalFile
(
bstream
);
if
(
oneLocalFile
&&
oneLocalFile
.
isValid
)
{
localFiles
.
push
(
oneLocalFile
);
totalUncompressedBytesInArchive
+=
oneLocalFile
.
size
;
}
}
totalFilesInArchive
=
localFiles
.
length
;
// got all local files, now sort them
localFiles
.
sort
(
function
(
a
,
b
)
{
var
aname
=
a
.
filename
.
toLowerCase
();
var
bname
=
b
.
filename
.
toLowerCase
();
return
aname
>
bname
?
1
:
-
1
;
});
// report # files and total length
if
(
localFiles
.
length
>
0
)
{
postProgress
();
}
// now do the shipping of each file
for
(
var
i
=
0
;
i
<
localFiles
.
length
;
++
i
)
{
var
localfile
=
localFiles
[
i
];
info
(
"Sending file '"
+
localfile
.
filename
+
"' up"
);
// update progress
currentFilename
=
localfile
.
filename
;
currentFileNumber
=
i
;
currentBytesUnarchivedInFile
=
localfile
.
size
;
currentBytesUnarchived
+=
localfile
.
size
;
postMessage
(
new
bitjs
.
archive
.
UnarchiveExtractEvent
(
localfile
));
postProgress
();
}
currentFilename
=
""
;
currentFileNumber
=
0
;
currentBytesUnarchivedInFile
=
0
;
currentBytesUnarchived
=
0
;
totalUncompressedBytesInArchive
=
0
;
totalFilesInArchive
=
0
;
postMessage
(
new
bitjs
.
archive
.
UnarchiveStartEvent
());
var
bstream
=
new
bitjs
.
io
.
ByteStream
(
arrayBuffer
);
var
localFiles
=
[];
// While we don't encounter an empty block, keep making TarLocalFiles.
while
(
bstream
.
peekNumber
(
4
)
!=
0
)
{
var
oneLocalFile
=
new
TarLocalFile
(
bstream
);
if
(
oneLocalFile
&&
oneLocalFile
.
isValid
)
{
localFiles
.
push
(
oneLocalFile
);
totalUncompressedBytesInArchive
+=
oneLocalFile
.
size
;
}
}
totalFilesInArchive
=
localFiles
.
length
;
// got all local files, now sort them
localFiles
.
sort
(
function
(
a
,
b
)
{
var
aname
=
a
.
filename
.
toLowerCase
();
var
bname
=
b
.
filename
.
toLowerCase
();
return
aname
>
bname
?
1
:
-
1
;
});
// report # files and total length
if
(
localFiles
.
length
>
0
)
{
postProgress
();
}
// now do the shipping of each file
for
(
var
i
=
0
;
i
<
localFiles
.
length
;
++
i
)
{
var
localfile
=
localFiles
[
i
];
info
(
"Sending file '"
+
localfile
.
filename
+
"' up"
);
// update progress
currentFilename
=
localfile
.
filename
;
currentFileNumber
=
i
;
currentBytesUnarchivedInFile
=
localfile
.
size
;
currentBytesUnarchived
+=
localfile
.
size
;
postMessage
(
new
bitjs
.
archive
.
UnarchiveExtractEvent
(
localfile
));
postProgress
();
}
postProgress
();
postProgress
();
postMessage
(
new
bitjs
.
archive
.
UnarchiveFinishEvent
());
postMessage
(
new
bitjs
.
archive
.
UnarchiveFinishEvent
());
};
// event.data.file has the ArrayBuffer.
onmessage
=
function
(
event
)
{
var
ab
=
event
.
data
.
file
;
untar
(
ab
);
var
ab
=
event
.
data
.
file
;
untar
(
ab
);
};
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment