Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
douban-api-proxy
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
captainwong
douban-api-proxy
Commits
a360b175
Commit
a360b175
authored
Mar 16, 2019
by
Ozzieisaacs
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Code cosmetics
bugfix import HttpError from pydrive
parent
765b8173
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
33 additions
and
106 deletions
+33
-106
admin.py
cps/admin.py
+1
-1
helper.py
cps/helper.py
+1
-1
unzip.js
cps/static/js/unzip.js
+30
-30
web.py
cps/web.py
+0
-72
worker.py
cps/worker.py
+1
-2
No files found.
cps/admin.py
View file @
a360b175
...
@@ -676,7 +676,7 @@ def edit_user(user_id):
...
@@ -676,7 +676,7 @@ def edit_user(user_id):
elif
"edit_shelf_role"
not
in
to_save
and
content
.
role_edit_shelfs
():
elif
"edit_shelf_role"
not
in
to_save
and
content
.
role_edit_shelfs
():
content
.
role
=
content
.
role
-
ub
.
ROLE_EDIT_SHELFS
content
.
role
=
content
.
role
-
ub
.
ROLE_EDIT_SHELFS
val
=
[
int
(
k
[
5
:])
for
k
,
v
in
to_save
.
items
()
if
k
.
startswith
(
'show'
)]
val
=
[
int
(
k
[
5
:])
for
k
,
__
in
to_save
.
items
()
if
k
.
startswith
(
'show'
)]
sidebar
=
ub
.
get_sidebar_config
()
sidebar
=
ub
.
get_sidebar_config
()
for
element
in
sidebar
:
for
element
in
sidebar
:
if
element
[
'visibility'
]
in
val
and
not
content
.
check_visibility
(
element
[
'visibility'
]):
if
element
[
'visibility'
]
in
val
and
not
content
.
check_visibility
(
element
[
'visibility'
]):
...
...
cps/helper.py
View file @
a360b175
...
@@ -310,7 +310,7 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
...
@@ -310,7 +310,7 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
os
.
renames
(
path
,
new_title_path
)
os
.
renames
(
path
,
new_title_path
)
else
:
else
:
app
.
logger
.
info
(
"Copying title: "
+
path
+
" into existing: "
+
new_title_path
)
app
.
logger
.
info
(
"Copying title: "
+
path
+
" into existing: "
+
new_title_path
)
for
dir_name
,
subdir_list
,
file_list
in
os
.
walk
(
path
):
for
dir_name
,
__
,
file_list
in
os
.
walk
(
path
):
for
file
in
file_list
:
for
file
in
file_list
:
os
.
renames
(
os
.
path
.
join
(
dir_name
,
file
),
os
.
renames
(
os
.
path
.
join
(
dir_name
,
file
),
os
.
path
.
join
(
new_title_path
+
dir_name
[
len
(
path
):],
file
))
os
.
path
.
join
(
new_title_path
+
dir_name
[
len
(
path
):],
file
))
...
...
cps/static/js/unzip.js
View file @
a360b175
...
@@ -47,7 +47,7 @@ var zDigitalSignatureSignature = 0x05054b50;
...
@@ -47,7 +47,7 @@ var zDigitalSignatureSignature = 0x05054b50;
// takes a ByteStream and parses out the local file information
// takes a ByteStream and parses out the local file information
var
ZipLocalFile
=
function
(
bstream
)
{
var
ZipLocalFile
=
function
(
bstream
)
{
if
(
typeof
bstream
!=
typeof
{}
||
!
bstream
.
readNumber
||
typeof
bstream
.
readNumber
!
=
typeof
function
()
{})
{
if
(
typeof
bstream
!=
=
typeof
{}
||
!
bstream
.
readNumber
||
typeof
bstream
.
readNumber
!=
=
typeof
function
()
{})
{
return
null
;
return
null
;
}
}
...
@@ -98,7 +98,7 @@ var ZipLocalFile = function(bstream) {
...
@@ -98,7 +98,7 @@ var ZipLocalFile = function(bstream) {
// "This descriptor exists only if bit 3 of the general purpose bit flag is set"
// "This descriptor exists only if bit 3 of the general purpose bit flag is set"
// But how do you figure out how big the file data is if you don't know the compressedSize
// But how do you figure out how big the file data is if you don't know the compressedSize
// from the header?!?
// from the header?!?
if
((
this
.
generalPurpose
&
bitjs
.
BIT
[
3
])
!=
0
)
{
if
((
this
.
generalPurpose
&
bitjs
.
BIT
[
3
])
!=
=
0
)
{
this
.
crc32
=
bstream
.
readNumber
(
4
);
this
.
crc32
=
bstream
.
readNumber
(
4
);
this
.
compressedSize
=
bstream
.
readNumber
(
4
);
this
.
compressedSize
=
bstream
.
readNumber
(
4
);
this
.
uncompressedSize
=
bstream
.
readNumber
(
4
);
this
.
uncompressedSize
=
bstream
.
readNumber
(
4
);
...
@@ -109,14 +109,14 @@ var ZipLocalFile = function(bstream) {
...
@@ -109,14 +109,14 @@ var ZipLocalFile = function(bstream) {
ZipLocalFile
.
prototype
.
unzip
=
function
()
{
ZipLocalFile
.
prototype
.
unzip
=
function
()
{
// Zip Version 1.0, no compression (store only)
// Zip Version 1.0, no compression (store only)
if
(
this
.
compressionMethod
==
0
)
{
if
(
this
.
compressionMethod
==
=
0
)
{
info
(
"ZIP v"
+
this
.
version
+
", store only: "
+
this
.
filename
+
" ("
+
this
.
compressedSize
+
" bytes)"
);
info
(
"ZIP v"
+
this
.
version
+
", store only: "
+
this
.
filename
+
" ("
+
this
.
compressedSize
+
" bytes)"
);
currentBytesUnarchivedInFile
=
this
.
compressedSize
;
currentBytesUnarchivedInFile
=
this
.
compressedSize
;
currentBytesUnarchived
+=
this
.
compressedSize
;
currentBytesUnarchived
+=
this
.
compressedSize
;
this
.
fileData
=
zeroCompression
(
this
.
fileData
,
this
.
uncompressedSize
);
this
.
fileData
=
zeroCompression
(
this
.
fileData
,
this
.
uncompressedSize
);
}
}
// version == 20, compression method == 8 (DEFLATE)
// version == 20, compression method == 8 (DEFLATE)
else
if
(
this
.
compressionMethod
==
8
)
{
else
if
(
this
.
compressionMethod
==
=
8
)
{
info
(
"ZIP v2.0, DEFLATE: "
+
this
.
filename
+
" ("
+
this
.
compressedSize
+
" bytes)"
);
info
(
"ZIP v2.0, DEFLATE: "
+
this
.
filename
+
" ("
+
this
.
compressedSize
+
" bytes)"
);
this
.
fileData
=
inflate
(
this
.
fileData
,
this
.
uncompressedSize
);
this
.
fileData
=
inflate
(
this
.
fileData
,
this
.
uncompressedSize
);
}
}
...
@@ -143,10 +143,10 @@ var unzip = function(arrayBuffer) {
...
@@ -143,10 +143,10 @@ var unzip = function(arrayBuffer) {
var
bstream
=
new
bitjs
.
io
.
ByteStream
(
arrayBuffer
);
var
bstream
=
new
bitjs
.
io
.
ByteStream
(
arrayBuffer
);
// detect local file header signature or return null
// detect local file header signature or return null
if
(
bstream
.
peekNumber
(
4
)
==
zLocalFileHeaderSignature
)
{
if
(
bstream
.
peekNumber
(
4
)
==
=
zLocalFileHeaderSignature
)
{
var
localFiles
=
[];
var
localFiles
=
[];
// loop until we don't see any more local files
// loop until we don't see any more local files
while
(
bstream
.
peekNumber
(
4
)
==
zLocalFileHeaderSignature
)
{
while
(
bstream
.
peekNumber
(
4
)
==
=
zLocalFileHeaderSignature
)
{
var
oneLocalFile
=
new
ZipLocalFile
(
bstream
);
var
oneLocalFile
=
new
ZipLocalFile
(
bstream
);
// this should strip out directories/folders
// this should strip out directories/folders
if
(
oneLocalFile
&&
oneLocalFile
.
uncompressedSize
>
0
&&
oneLocalFile
.
fileData
)
{
if
(
oneLocalFile
&&
oneLocalFile
.
uncompressedSize
>
0
&&
oneLocalFile
.
fileData
)
{
...
@@ -164,7 +164,7 @@ var unzip = function(arrayBuffer) {
...
@@ -164,7 +164,7 @@ var unzip = function(arrayBuffer) {
});
});
// archive extra data record
// archive extra data record
if
(
bstream
.
peekNumber
(
4
)
==
zArchiveExtraDataSignature
)
{
if
(
bstream
.
peekNumber
(
4
)
==
=
zArchiveExtraDataSignature
)
{
info
(
" Found an Archive Extra Data Signature"
);
info
(
" Found an Archive Extra Data Signature"
);
// skipping this record for now
// skipping this record for now
...
@@ -175,7 +175,7 @@ var unzip = function(arrayBuffer) {
...
@@ -175,7 +175,7 @@ var unzip = function(arrayBuffer) {
// central directory structure
// central directory structure
// TODO: handle the rest of the structures (Zip64 stuff)
// TODO: handle the rest of the structures (Zip64 stuff)
if
(
bstream
.
peekNumber
(
4
)
==
zCentralFileHeaderSignature
)
{
if
(
bstream
.
peekNumber
(
4
)
==
=
zCentralFileHeaderSignature
)
{
info
(
" Found a Central File Header"
);
info
(
" Found a Central File Header"
);
// read all file headers
// read all file headers
...
@@ -205,7 +205,7 @@ var unzip = function(arrayBuffer) {
...
@@ -205,7 +205,7 @@ var unzip = function(arrayBuffer) {
}
}
// digital signature
// digital signature
if
(
bstream
.
peekNumber
(
4
)
==
zDigitalSignatureSignature
)
{
if
(
bstream
.
peekNumber
(
4
)
==
=
zDigitalSignatureSignature
)
{
info
(
" Found a Digital Signature"
);
info
(
" Found a Digital Signature"
);
bstream
.
readNumber
(
4
);
bstream
.
readNumber
(
4
);
...
@@ -230,7 +230,7 @@ var unzip = function(arrayBuffer) {
...
@@ -230,7 +230,7 @@ var unzip = function(arrayBuffer) {
// actually do the unzipping
// actually do the unzipping
localfile
.
unzip
();
localfile
.
unzip
();
if
(
localfile
.
fileData
!=
null
)
{
if
(
localfile
.
fileData
!=
=
null
)
{
postMessage
(
new
bitjs
.
archive
.
UnarchiveExtractEvent
(
localfile
));
postMessage
(
new
bitjs
.
archive
.
UnarchiveExtractEvent
(
localfile
));
postProgress
();
postProgress
();
}
}
...
@@ -245,7 +245,7 @@ var unzip = function(arrayBuffer) {
...
@@ -245,7 +245,7 @@ var unzip = function(arrayBuffer) {
// containing {length: 6, symbol: X}
// containing {length: 6, symbol: X}
function
getHuffmanCodes
(
bitLengths
)
{
function
getHuffmanCodes
(
bitLengths
)
{
// ensure bitLengths is an array containing at least one element
// ensure bitLengths is an array containing at least one element
if
(
typeof
bitLengths
!=
typeof
[]
||
bitLengths
.
length
<
1
)
{
if
(
typeof
bitLengths
!=
=
typeof
[]
||
bitLengths
.
length
<
1
)
{
err
(
"Error! getHuffmanCodes() called with an invalid array"
);
err
(
"Error! getHuffmanCodes() called with an invalid array"
);
return
null
;
return
null
;
}
}
...
@@ -259,7 +259,7 @@ function getHuffmanCodes(bitLengths) {
...
@@ -259,7 +259,7 @@ function getHuffmanCodes(bitLengths) {
for
(
var
i
=
0
;
i
<
numLengths
;
++
i
)
{
for
(
var
i
=
0
;
i
<
numLengths
;
++
i
)
{
var
length
=
bitLengths
[
i
];
var
length
=
bitLengths
[
i
];
// test to ensure each bit length is a positive, non-zero number
// test to ensure each bit length is a positive, non-zero number
if
(
typeof
length
!=
typeof
1
||
length
<
0
)
{
if
(
typeof
length
!=
=
typeof
1
||
length
<
0
)
{
err
(
"bitLengths contained an invalid number in getHuffmanCodes(): "
+
length
+
" of type "
+
(
typeof
length
));
err
(
"bitLengths contained an invalid number in getHuffmanCodes(): "
+
length
+
" of type "
+
(
typeof
length
));
return
null
;
return
null
;
}
}
...
@@ -275,9 +275,9 @@ function getHuffmanCodes(bitLengths) {
...
@@ -275,9 +275,9 @@ function getHuffmanCodes(bitLengths) {
var
nextCode
=
[],
var
nextCode
=
[],
code
=
0
;
code
=
0
;
for
(
var
bits
=
1
;
bits
<=
MAX_BITS
;
++
bits
)
{
for
(
var
bits
=
1
;
bits
<=
MAX_BITS
;
++
bits
)
{
var
length
=
bits
-
1
;
var
length
2
=
bits
-
1
;
// ensure undefined lengths are zero
// ensure undefined lengths are zero
if
(
blCount
[
length
]
==
undefined
)
blCount
[
length
]
=
0
;
if
(
blCount
[
length
2
]
==
undefined
)
blCount
[
length2
]
=
0
;
code
=
(
code
+
blCount
[
bits
-
1
])
<<
1
;
code
=
(
code
+
blCount
[
bits
-
1
])
<<
1
;
nextCode
[
bits
]
=
code
;
nextCode
[
bits
]
=
code
;
}
}
...
@@ -286,7 +286,7 @@ function getHuffmanCodes(bitLengths) {
...
@@ -286,7 +286,7 @@ function getHuffmanCodes(bitLengths) {
var
table
=
{},
tableLength
=
0
;
var
table
=
{},
tableLength
=
0
;
for
(
var
n
=
0
;
n
<
numLengths
;
++
n
)
{
for
(
var
n
=
0
;
n
<
numLengths
;
++
n
)
{
var
len
=
bitLengths
[
n
];
var
len
=
bitLengths
[
n
];
if
(
len
!=
0
)
{
if
(
len
!=
=
0
)
{
table
[
nextCode
[
len
]]
=
{
length
:
len
,
symbol
:
n
};
//, bitstring: binaryValueToString(nextCode [len],len) };
table
[
nextCode
[
len
]]
=
{
length
:
len
,
symbol
:
n
};
//, bitstring: binaryValueToString(nextCode [len],len) };
tableLength
++
;
tableLength
++
;
nextCode
[
len
]
++
;
nextCode
[
len
]
++
;
...
@@ -358,7 +358,7 @@ function decodeSymbol(bstream, hcTable) {
...
@@ -358,7 +358,7 @@ function decodeSymbol(bstream, hcTable) {
++
len
;
++
len
;
// check against Huffman Code table and break if found
// check against Huffman Code table and break if found
if
(
hcTable
.
hasOwnProperty
(
code
)
&&
hcTable
[
code
].
length
==
len
)
{
if
(
hcTable
.
hasOwnProperty
(
code
)
&&
hcTable
[
code
].
length
==
=
len
)
{
break
;
break
;
}
}
...
@@ -457,7 +457,7 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
...
@@ -457,7 +457,7 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
}
}
else
{
else
{
// end of block reached
// end of block reached
if
(
symbol
==
256
)
{
if
(
symbol
==
=
256
)
{
break
;
break
;
}
}
else
{
else
{
...
@@ -485,7 +485,7 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
...
@@ -485,7 +485,7 @@ function inflateBlockData(bstream, hcLiteralTable, hcDistanceTable, buffer) {
buffer
.
insertByte
(
data
[
ch
++
]);
buffer
.
insertByte
(
data
[
ch
++
]);
}
}
}
else
{
}
else
{
buffer
.
insertBytes
(
buffer
.
data
.
subarray
(
ch
,
ch
+
length
));
buffer
.
insertBytes
(
buffer
.
data
.
subarray
(
ch
,
ch
+
length
));
}
}
}
// length-distance pair
}
// length-distance pair
...
@@ -514,12 +514,13 @@ function inflate(compressedData, numDecompressedBytes) {
...
@@ -514,12 +514,13 @@ function inflate(compressedData, numDecompressedBytes) {
compressedData
.
byteOffset
,
compressedData
.
byteOffset
,
compressedData
.
byteLength
);
compressedData
.
byteLength
);
var
buffer
=
new
bitjs
.
io
.
ByteBuffer
(
numDecompressedBytes
);
var
buffer
=
new
bitjs
.
io
.
ByteBuffer
(
numDecompressedBytes
);
var
numBlocks
=
0
,
blockSize
=
0
;
var
blockSize
=
0
;
// block format: http://tools.ietf.org/html/rfc1951#page-9
// block format: http://tools.ietf.org/html/rfc1951#page-9
var
bFinal
=
0
;
do
{
do
{
var
bFinal
=
bstream
.
readBits
(
1
),
bFinal
=
bstream
.
readBits
(
1
);
bType
=
bstream
.
readBits
(
2
);
var
bType
=
bstream
.
readBits
(
2
);
blockSize
=
0
;
blockSize
=
0
;
++
numBlocks
;
++
numBlocks
;
// no compression
// no compression
...
@@ -528,23 +529,23 @@ function inflate(compressedData, numDecompressedBytes) {
...
@@ -528,23 +529,23 @@ function inflate(compressedData, numDecompressedBytes) {
while
(
bstream
.
bitPtr
!=
0
)
bstream
.
readBits
(
1
);
while
(
bstream
.
bitPtr
!=
0
)
bstream
.
readBits
(
1
);
var
len
=
bstream
.
readBits
(
16
);
var
len
=
bstream
.
readBits
(
16
);
bstream
.
readBits
(
16
);
bstream
.
readBits
(
16
);
// TODO: check if nlen is the ones-complement of len?
// TODO: check if nlen is the ones-complement of len?
if
(
len
>
0
)
buffer
.
insertBytes
(
bstream
.
readBytes
(
len
));
if
(
len
>
0
)
buffer
.
insertBytes
(
bstream
.
readBytes
(
len
));
blockSize
=
len
;
blockSize
=
len
;
}
}
// fixed Huffman codes
// fixed Huffman codes
else
if
(
bType
==
1
)
{
else
if
(
bType
==
1
)
{
blockSize
=
inflateBlockData
(
bstream
,
getFixedLiteralTable
(),
getFixedDistanceTable
(),
buffer
);
blockSize
=
inflateBlockData
(
bstream
,
getFixedLiteralTable
(),
getFixedDistanceTable
(),
buffer
);
}
}
// dynamic Huffman codes
// dynamic Huffman codes
else
if
(
bType
==
2
)
{
else
if
(
bType
==
2
)
{
var
numLiteralLengthCodes
=
bstream
.
readBits
(
5
)
+
257
;
var
numLiteralLengthCodes
=
bstream
.
readBits
(
5
)
+
257
;
var
numDistanceCodes
=
bstream
.
readBits
(
5
)
+
1
,
var
numDistanceCodes
=
bstream
.
readBits
(
5
)
+
1
,
numCodeLengthCodes
=
bstream
.
readBits
(
4
)
+
4
;
numCodeLengthCodes
=
bstream
.
readBits
(
4
)
+
4
;
// populate the array of code length codes (first de-compaction)
// populate the array of code length codes (first de-compaction)
var
codeLengthsCodeLengths
=
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
];
var
codeLengthsCodeLengths
=
[
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
,
0
];
for
(
var
i
=
0
;
i
<
numCodeLengthCodes
;
++
i
)
{
for
(
var
i
=
0
;
i
<
numCodeLengthCodes
;
++
i
)
{
codeLengthsCodeLengths
[
CodeLengthCodeOrder
[
i
]
]
=
bstream
.
readBits
(
3
);
codeLengthsCodeLengths
[
CodeLengthCodeOrder
[
i
]
]
=
bstream
.
readBits
(
3
);
}
}
...
@@ -576,18 +577,17 @@ function inflate(compressedData, numDecompressedBytes) {
...
@@ -576,18 +577,17 @@ function inflate(compressedData, numDecompressedBytes) {
literalCodeLengths
.
push
(
symbol
);
literalCodeLengths
.
push
(
symbol
);
prevCodeLength
=
symbol
;
prevCodeLength
=
symbol
;
}
}
else
if
(
symbol
==
16
)
{
else
if
(
symbol
==
=
16
)
{
var
repeat
=
bstream
.
readBits
(
2
)
+
3
;
var
repeat
=
bstream
.
readBits
(
2
)
+
3
;
while
(
repeat
--
)
{
while
(
repeat
--
)
{
literalCodeLengths
.
push
(
prevCodeLength
);
literalCodeLengths
.
push
(
prevCodeLength
);
}
}
}
}
else
if
(
symbol
===
17
)
{
else
if
(
symbol
==
17
)
{
var
repeat1
=
bstream
.
readBits
(
3
)
+
3
;
var
repeat1
=
bstream
.
readBits
(
3
)
+
3
;
while
(
repeat1
--
)
{
while
(
repeat1
--
)
{
literalCodeLengths
.
push
(
0
);
literalCodeLengths
.
push
(
0
);
}
}
}
else
if
(
symbol
==
18
)
{
}
else
if
(
symbol
==
=
18
)
{
var
repeat2
=
bstream
.
readBits
(
7
)
+
11
;
var
repeat2
=
bstream
.
readBits
(
7
)
+
11
;
while
(
repeat2
--
)
{
while
(
repeat2
--
)
{
literalCodeLengths
.
push
(
0
);
literalCodeLengths
.
push
(
0
);
...
@@ -613,7 +613,7 @@ function inflate(compressedData, numDecompressedBytes) {
...
@@ -613,7 +613,7 @@ function inflate(compressedData, numDecompressedBytes) {
currentBytesUnarchived
+=
blockSize
;
currentBytesUnarchived
+=
blockSize
;
postProgress
();
postProgress
();
}
while
(
bFinal
!=
1
);
}
while
(
bFinal
!=
=
1
);
// we are done reading blocks if the bFinal bit was set for this block
// we are done reading blocks if the bFinal bit was set for this block
// return the buffer data bytes
// return the buffer data bytes
...
...
cps/web.py
View file @
a360b175
...
@@ -62,11 +62,6 @@ try:
...
@@ -62,11 +62,6 @@ try:
except
ImportError
:
except
ImportError
:
feature_support
[
'ldap'
]
=
False
feature_support
[
'ldap'
]
=
False
try
:
from
googleapiclient.errors
import
HttpErrort
except
ImportError
:
pass
try
:
try
:
from
goodreads.client
import
GoodreadsClient
from
goodreads.client
import
GoodreadsClient
feature_support
[
'goodreads'
]
=
True
feature_support
[
'goodreads'
]
=
True
...
@@ -540,39 +535,6 @@ def books_list(data,sort, page):
...
@@ -540,39 +535,6 @@ def books_list(data,sort, page):
title
=
_
(
u"Books"
),
page
=
"newest"
)
title
=
_
(
u"Books"
),
page
=
"newest"
)
'''
@web.route("/hot", defaults={'page': 1})
@web.route('/hot/page/<int:page>')
@login_required_if_no_ano
def hot_books(page):
@web.route("/rated", defaults={'page': 1})
@web.route('/rated/page/<int:page>')
@login_required_if_no_ano
def best_rated_books(page):
if current_user.check_visibility(ub.SIDEBAR_BEST_RATED):
entries, random, pagination = fill_indexpage(page, db.Books, db.Books.ratings.any(db.Ratings.rating > 9),
[db.Books.timestamp.desc()])
return render_title_template('index.html', random=random, entries=entries, pagination=pagination,
title=_(u"Best rated books"), page="rated")
else:
abort(404)
@web.route("/discover", defaults={'page': 1})
@web.route('/discover/page/<int:page>')
@login_required_if_no_ano
def discover(page):
if current_user.check_visibility(ub.SIDEBAR_RANDOM):
entries, __, pagination = fill_indexpage(page, db.Books, True, [func.randomblob(2)])
pagination = Pagination(1, config.config_books_per_page, config.config_books_per_page)
return render_title_template('discover.html', entries=entries, pagination=pagination,
title=_(u"Random Books"), page="discover")
else:
abort(404)'''
@
web
.
route
(
"/author"
)
@
web
.
route
(
"/author"
)
@
login_required_if_no_ano
@
login_required_if_no_ano
def
author_list
():
def
author_list
():
...
@@ -843,16 +805,6 @@ def search():
...
@@ -843,16 +805,6 @@ def search():
def
advanced_search
():
def
advanced_search
():
# Build custom columns names
# Build custom columns names
cc
=
helper
.
get_cc_columns
()
cc
=
helper
.
get_cc_columns
()
'''tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
if config.config_columns_to_ignore:
cc = []
for col in tmpcc:
r = re.compile(config.config_columns_to_ignore)
if r.match(col.label):
cc.append(col)
else:
cc = tmpcc'''
db
.
session
.
connection
()
.
connection
.
connection
.
create_function
(
"lower"
,
1
,
db
.
lcase
)
db
.
session
.
connection
()
.
connection
.
connection
.
create_function
(
"lower"
,
1
,
db
.
lcase
)
q
=
db
.
session
.
query
(
db
.
Books
)
q
=
db
.
session
.
query
(
db
.
Books
)
...
@@ -988,20 +940,6 @@ def advanced_search():
...
@@ -988,20 +940,6 @@ def advanced_search():
series
=
series
,
title
=
_
(
u"search"
),
cc
=
cc
,
page
=
"advsearch"
)
series
=
series
,
title
=
_
(
u"search"
),
cc
=
cc
,
page
=
"advsearch"
)
'''@web.route("/unreadbooks/", defaults={'page': 1})
@web.route("/unreadbooks/<int:page>'")
@login_required_if_no_ano
def unread_books(page):
return render_read_books(page, False)
@web.route("/readbooks/", defaults={'page': 1})
@web.route("/readbooks/<int:page>'")
@login_required_if_no_ano
def read_books(page):
return render_read_books(page, True)'''
def
render_read_books
(
page
,
are_read
,
as_xml
=
False
,
order
=
[]):
def
render_read_books
(
page
,
are_read
,
as_xml
=
False
,
order
=
[]):
if
not
config
.
config_read_column
:
if
not
config
.
config_read_column
:
readBooks
=
ub
.
session
.
query
(
ub
.
ReadBook
)
.
filter
(
ub
.
ReadBook
.
user_id
==
int
(
current_user
.
id
))
\
readBooks
=
ub
.
session
.
query
(
ub
.
ReadBook
)
.
filter
(
ub
.
ReadBook
.
user_id
==
int
(
current_user
.
id
))
\
...
@@ -1426,16 +1364,6 @@ def show_book(book_id):
...
@@ -1426,16 +1364,6 @@ def show_book(book_id):
entries
.
languages
[
index
]
.
language_name
=
_
(
entries
.
languages
[
index
]
.
language_name
=
_
(
isoLanguages
.
get
(
part3
=
entries
.
languages
[
index
]
.
lang_code
)
.
name
)
isoLanguages
.
get
(
part3
=
entries
.
languages
[
index
]
.
lang_code
)
.
name
)
cc
=
helper
.
get_cc_columns
()
cc
=
helper
.
get_cc_columns
()
'''tmpcc = db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
if config.config_columns_to_ignore:
cc = []
for col in tmpcc:
r = re.compile(config.config_columns_to_ignore)
if r.match(col.label):
cc.append(col)
else:
cc = tmpcc'''
book_in_shelfs
=
[]
book_in_shelfs
=
[]
shelfs
=
ub
.
session
.
query
(
ub
.
BookShelf
)
.
filter
(
ub
.
BookShelf
.
book_id
==
book_id
)
.
all
()
shelfs
=
ub
.
session
.
query
(
ub
.
BookShelf
)
.
filter
(
ub
.
BookShelf
.
book_id
==
book_id
)
.
all
()
for
entry
in
shelfs
:
for
entry
in
shelfs
:
...
...
cps/worker.py
View file @
a360b175
...
@@ -533,5 +533,4 @@ class StderrLogger(object):
...
@@ -533,5 +533,4 @@ class StderrLogger(object):
else
:
else
:
self
.
buffer
+=
message
self
.
buffer
+=
message
except
:
except
:
pass
self
.
logger
.
debug
(
"Logging Error"
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment