Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
douban-api-proxy
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
captainwong
douban-api-proxy
Commits
24c743d2
Commit
24c743d2
authored
Apr 19, 2020
by
Ozzieisaacs
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Code cosmetics
parent
7bb5afa5
Show whitespace changes
Inline
Side-by-side
Showing
15 changed files
with
262 additions
and
192 deletions
+262
-192
cli.py
cps/cli.py
+1
-1
db.py
cps/db.py
+1
-1
helper.py
cps/helper.py
+81
-55
jinjia.py
cps/jinjia.py
+5
-1
kobo.py
cps/kobo.py
+12
-5
logger.py
cps/logger.py
+1
-1
oauth.py
cps/oauth.py
+2
-2
opds.py
cps/opds.py
+43
-21
server.py
cps/server.py
+2
-2
SyncToken.py
cps/services/SyncToken.py
+7
-5
shelf.py
cps/shelf.py
+32
-27
subproc_wrapper.py
cps/subproc_wrapper.py
+5
-5
list.html
cps/templates/list.html
+6
-6
ub.py
cps/ub.py
+59
-55
updater.py
cps/updater.py
+5
-5
No files found.
cps/cli.py
View file @
24c743d2
cps/db.py
View file @
24c743d2
...
...
@@ -25,7 +25,7 @@ import ast
from
sqlalchemy
import
create_engine
from
sqlalchemy
import
Table
,
Column
,
ForeignKey
from
sqlalchemy
import
String
,
Integer
,
Boolean
,
TIMESTAMP
,
Float
from
sqlalchemy
import
String
,
Integer
,
Boolean
,
TIMESTAMP
,
Float
,
DateTime
from
sqlalchemy.orm
import
relationship
,
sessionmaker
,
scoped_session
from
sqlalchemy.ext.declarative
import
declarative_base
...
...
cps/helper.py
View file @
24c743d2
...
...
@@ -141,36 +141,52 @@ def check_send_to_kindle(entry):
returns all available book formats for sending to Kindle
"""
if
len
(
entry
.
data
):
bookformats
=
list
()
bookformats
=
list
()
if
config
.
config_ebookconverter
==
0
:
# no converter - only for mobi and pdf formats
for
ele
in
iter
(
entry
.
data
):
if
'MOBI'
in
ele
.
format
:
bookformats
.
append
({
'format'
:
'Mobi'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Mobi'
)})
bookformats
.
append
({
'format'
:
'Mobi'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Mobi'
)})
if
'PDF'
in
ele
.
format
:
bookformats
.
append
({
'format'
:
'Pdf'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Pdf'
)})
bookformats
.
append
({
'format'
:
'Pdf'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Pdf'
)})
if
'AZW'
in
ele
.
format
:
bookformats
.
append
({
'format'
:
'Azw'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Azw'
)})
'''if 'AZW3' in ele.format:
bookformats.append({'format':'Azw3','convert':0,'text':_('Send
%(format)
s to Kindle',format='Azw3')})'''
bookformats
.
append
({
'format'
:
'Azw'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Azw'
)})
else
:
formats
=
list
()
for
ele
in
iter
(
entry
.
data
):
formats
.
append
(
ele
.
format
)
if
'MOBI'
in
formats
:
bookformats
.
append
({
'format'
:
'Mobi'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Mobi'
)})
bookformats
.
append
({
'format'
:
'Mobi'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Mobi'
)})
if
'AZW'
in
formats
:
bookformats
.
append
({
'format'
:
'Azw'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Azw'
)})
bookformats
.
append
({
'format'
:
'Azw'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Azw'
)})
if
'PDF'
in
formats
:
bookformats
.
append
({
'format'
:
'Pdf'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Pdf'
)})
bookformats
.
append
({
'format'
:
'Pdf'
,
'convert'
:
0
,
'text'
:
_
(
'Send
%(format)
s to Kindle'
,
format
=
'Pdf'
)})
if
config
.
config_ebookconverter
>=
1
:
if
'EPUB'
in
formats
and
not
'MOBI'
in
formats
:
bookformats
.
append
({
'format'
:
'Mobi'
,
'convert'
:
1
,
'text'
:
_
(
'Convert
%(orig)
s to
%(format)
s and send to Kindle'
,
orig
=
'Epub'
,
format
=
'Mobi'
)})
bookformats
.
append
({
'format'
:
'Mobi'
,
'convert'
:
1
,
'text'
:
_
(
'Convert
%(orig)
s to
%(format)
s and send to Kindle'
,
orig
=
'Epub'
,
format
=
'Mobi'
)})
if
config
.
config_ebookconverter
==
2
:
if
'AZW3'
in
formats
and
not
'MOBI'
in
formats
:
bookformats
.
append
({
'format'
:
'Mobi'
,
'convert'
:
2
,
'text'
:
_
(
'Convert
%(orig)
s to
%(format)
s and send to Kindle'
,
orig
=
'Azw3'
,
format
=
'Mobi'
)})
bookformats
.
append
({
'format'
:
'Mobi'
,
'convert'
:
2
,
'text'
:
_
(
'Convert
%(orig)
s to
%(format)
s and send to Kindle'
,
orig
=
'Azw3'
,
format
=
'Mobi'
)})
return
bookformats
else
:
log
.
error
(
u'Cannot find book entry
%
d'
,
entry
.
id
)
...
...
@@ -204,7 +220,6 @@ def send_mail(book_id, book_format, convert, kindle_mail, calibrepath, user_id):
# returns None if success, otherwise errormessage
return
convert_book_format
(
book_id
,
calibrepath
,
u'azw3'
,
book_format
.
lower
(),
user_id
,
kindle_mail
)
for
entry
in
iter
(
book
.
data
):
if
entry
.
format
.
upper
()
==
book_format
.
upper
():
converted_file_name
=
entry
.
name
+
'.'
+
book_format
.
lower
()
...
...
@@ -395,7 +410,7 @@ def update_dir_structure_gdrive(book_id, first_author):
def
delete_book_gdrive
(
book
,
book_format
):
error
=
False
error
=
False
if
book_format
:
name
=
''
for
entry
in
book
.
data
:
...
...
@@ -403,12 +418,12 @@ def delete_book_gdrive(book, book_format):
name
=
entry
.
name
+
'.'
+
book_format
gFile
=
gd
.
getFileFromEbooksFolder
(
book
.
path
,
name
)
else
:
gFile
=
gd
.
getFileFromEbooksFolder
(
os
.
path
.
dirname
(
book
.
path
),
book
.
path
.
split
(
'/'
)[
1
])
gFile
=
gd
.
getFileFromEbooksFolder
(
os
.
path
.
dirname
(
book
.
path
),
book
.
path
.
split
(
'/'
)[
1
])
if
gFile
:
gd
.
deleteDatabaseEntry
(
gFile
[
'id'
])
gFile
.
Trash
()
else
:
error
=
_
(
u'Book path
%(path)
s not found on Google Drive'
,
path
=
book
.
path
)
# file not found
error
=
_
(
u'Book path
%(path)
s not found on Google Drive'
,
path
=
book
.
path
)
# file not found
return
error
...
...
@@ -417,24 +432,25 @@ def reset_password(user_id):
password
=
generate_random_password
()
existing_user
.
password
=
generate_password_hash
(
password
)
if
not
config
.
get_mail_server_configured
():
return
(
2
,
None
)
return
2
,
None
try
:
ub
.
session
.
commit
()
send_registration_mail
(
existing_user
.
email
,
existing_user
.
nickname
,
password
,
True
)
return
(
1
,
existing_user
.
nickname
)
return
1
,
existing_user
.
nickname
except
Exception
:
ub
.
session
.
rollback
()
return
(
0
,
None
)
return
0
,
None
def
generate_random_password
():
s
=
"abcdefghijklmnopqrstuvwxyz01234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ!@#$
%
&*()?"
passlen
=
8
return
""
.
join
(
random
.
sample
(
s
,
passlen
))
return
""
.
join
(
random
.
sample
(
s
,
passlen
))
################################## External interface
def
update_dir_stucture
(
book_id
,
calibrepath
,
first_author
=
None
):
def
update_dir_stucture
(
book_id
,
calibrepath
,
first_author
=
None
):
if
config
.
config_use_google_drive
:
return
update_dir_structure_gdrive
(
book_id
,
first_author
)
else
:
...
...
@@ -454,15 +470,18 @@ def get_cover_on_failure(use_generic_cover):
else
:
return
None
def
get_book_cover
(
book_id
):
book
=
db
.
session
.
query
(
db
.
Books
)
.
filter
(
db
.
Books
.
id
==
book_id
)
.
filter
(
common_filters
())
.
first
()
return
get_book_cover_internal
(
book
,
use_generic_cover_on_failure
=
True
)
def
get_book_cover_with_uuid
(
book_uuid
,
use_generic_cover_on_failure
=
True
):
book
=
db
.
session
.
query
(
db
.
Books
)
.
filter
(
db
.
Books
.
uuid
==
book_uuid
)
.
first
()
return
get_book_cover_internal
(
book
,
use_generic_cover_on_failure
)
def
get_book_cover_internal
(
book
,
use_generic_cover_on_failure
):
if
book
and
book
.
has_cover
:
...
...
@@ -470,7 +489,7 @@ def get_book_cover_internal(book,
try
:
if
not
gd
.
is_gdrive_ready
():
return
get_cover_on_failure
(
use_generic_cover_on_failure
)
path
=
gd
.
get_cover_via_gdrive
(
book
.
path
)
path
=
gd
.
get_cover_via_gdrive
(
book
.
path
)
if
path
:
return
redirect
(
path
)
else
:
...
...
@@ -530,7 +549,7 @@ def save_cover(img, book_path):
return
False
,
_
(
"Only jpg/jpeg/png/webp files are supported as coverfile"
)
# convert to jpg because calibre only supports jpg
if
content_type
in
(
'image/png'
,
'image/webp'
):
if
hasattr
(
img
,
'stream'
):
if
hasattr
(
img
,
'stream'
):
imgc
=
PILImage
.
open
(
img
.
stream
)
else
:
imgc
=
PILImage
.
open
(
io
.
BytesIO
(
img
.
content
))
...
...
@@ -539,7 +558,7 @@ def save_cover(img, book_path):
im
.
save
(
tmp_bytesio
,
format
=
'JPEG'
)
img
.
_content
=
tmp_bytesio
.
getvalue
()
else
:
if
content_type
not
in
(
'image/jpeg'
)
:
if
content_type
not
in
'image/jpeg'
:
log
.
error
(
"Only jpg/jpeg files are supported as coverfile"
)
return
False
,
_
(
"Only jpg/jpeg files are supported as coverfile"
)
...
...
@@ -557,7 +576,6 @@ def save_cover(img, book_path):
return
save_cover_from_filestorage
(
os
.
path
.
join
(
config
.
config_calibre_dir
,
book_path
),
"cover.jpg"
,
img
)
def
do_download_file
(
book
,
book_format
,
data
,
headers
):
if
config
.
config_use_google_drive
:
startTime
=
time
.
time
()
...
...
@@ -579,7 +597,6 @@ def do_download_file(book, book_format, data, headers):
##################################
def
check_unrar
(
unrarLocation
):
if
not
unrarLocation
:
return
...
...
@@ -601,13 +618,12 @@ def check_unrar(unrarLocation):
return
'Error excecuting UnRar'
def
json_serial
(
obj
):
"""JSON serializer for objects not serializable by default json code"""
if
isinstance
(
obj
,
(
datetime
)
):
if
isinstance
(
obj
,
datetime
):
return
obj
.
isoformat
()
if
isinstance
(
obj
,
(
timedelta
)
):
if
isinstance
(
obj
,
timedelta
):
return
{
'__type__'
:
'timedelta'
,
'days'
:
obj
.
days
,
...
...
@@ -615,7 +631,7 @@ def json_serial(obj):
'microseconds'
:
obj
.
microseconds
,
}
# return obj.isoformat()
raise
TypeError
(
"Type
%
s not serializable"
%
type
(
obj
))
raise
TypeError
(
"Type
%
s not serializable"
%
type
(
obj
))
# helper function for displaying the runtime of tasks
...
...
@@ -637,7 +653,7 @@ def format_runtime(runtime):
# helper function to apply localize status information in tasklist entries
def
render_task_status
(
tasklist
):
renderedtasklist
=
list
()
renderedtasklist
=
list
()
for
task
in
tasklist
:
if
task
[
'user'
]
==
current_user
.
nickname
or
current_user
.
role_admin
():
if
task
[
'formStarttime'
]:
...
...
@@ -653,7 +669,7 @@ def render_task_status(tasklist):
task
[
'runtime'
]
=
format_runtime
(
task
[
'formRuntime'
])
# localize the task status
if
isinstance
(
task
[
'stat'
],
int
):
if
isinstance
(
task
[
'stat'
],
int
):
if
task
[
'stat'
]
==
STAT_WAITING
:
task
[
'status'
]
=
_
(
u'Waiting'
)
elif
task
[
'stat'
]
==
STAT_FAIL
:
...
...
@@ -666,7 +682,7 @@ def render_task_status(tasklist):
task
[
'status'
]
=
_
(
u'Unknown Status'
)
# localize the task type
if
isinstance
(
task
[
'taskType'
],
int
):
if
isinstance
(
task
[
'taskType'
],
int
):
if
task
[
'taskType'
]
==
TASK_EMAIL
:
task
[
'taskMessage'
]
=
_
(
u'E-mail: '
)
+
task
[
'taskMess'
]
elif
task
[
'taskType'
]
==
TASK_CONVERT
:
...
...
@@ -733,7 +749,8 @@ def tags_filters():
# Creates for all stored languages a translated speaking name in the array for the UI
def
speaking_language
(
languages
=
None
):
if
not
languages
:
languages
=
db
.
session
.
query
(
db
.
Languages
)
.
join
(
db
.
books_languages_link
)
.
join
(
db
.
Books
)
.
filter
(
common_filters
())
\
languages
=
db
.
session
.
query
(
db
.
Languages
)
.
join
(
db
.
books_languages_link
)
.
join
(
db
.
Books
)
\
.
filter
(
common_filters
())
\
.
group_by
(
text
(
'books_languages_link.lang_code'
))
.
all
()
for
lang
in
languages
:
try
:
...
...
@@ -743,6 +760,7 @@ def speaking_language(languages=None):
lang
.
name
=
_
(
isoLanguages
.
get
(
part3
=
lang
.
lang_code
)
.
name
)
return
languages
# checks if domain is in database (including wildcards)
# example SELECT * FROM @TABLE WHERE 'abcdefg' LIKE Name;
# from https://code.luasoftware.com/tutorials/flask/execute-raw-sql-in-flask-sqlalchemy/
...
...
@@ -787,21 +805,25 @@ def fill_indexpage_with_archived_books(page, database, db_filter, order, allow_s
randm
=
false
()
off
=
int
(
int
(
config
.
config_books_per_page
)
*
(
page
-
1
))
pagination
=
Pagination
(
page
,
config
.
config_books_per_page
,
len
(
db
.
session
.
query
(
database
)
.
filter
(
db_filter
)
.
filter
(
common_filters
(
allow_show_archived
))
.
all
()))
entries
=
db
.
session
.
query
(
database
)
.
join
(
*
join
,
isouter
=
True
)
.
filter
(
db_filter
)
.
filter
(
common_filters
(
allow_show_archived
))
.
\
order_by
(
*
order
)
.
offset
(
off
)
.
limit
(
config
.
config_books_per_page
)
.
all
()
len
(
db
.
session
.
query
(
database
)
.
filter
(
db_filter
)
.
filter
(
common_filters
(
allow_show_archived
))
.
all
()))
entries
=
db
.
session
.
query
(
database
)
.
join
(
*
join
,
isouter
=
True
)
.
filter
(
db_filter
)
\
.
filter
(
common_filters
(
allow_show_archived
))
\
.
order_by
(
*
order
)
.
offset
(
off
)
.
limit
(
config
.
config_books_per_page
)
.
all
()
for
book
in
entries
:
book
=
order_authors
(
book
)
return
entries
,
randm
,
pagination
def
get_typeahead
(
database
,
query
,
replace
=
(
''
,
''
),
tag_filter
=
true
()):
def
get_typeahead
(
database
,
query
,
replace
=
(
''
,
''
),
tag_filter
=
true
()):
query
=
query
or
''
db
.
session
.
connection
()
.
connection
.
connection
.
create_function
(
"lower"
,
1
,
lcase
)
entries
=
db
.
session
.
query
(
database
)
.
filter
(
tag_filter
)
.
filter
(
func
.
lower
(
database
.
name
)
.
ilike
(
"
%
"
+
query
+
"
%
"
))
.
all
()
entries
=
db
.
session
.
query
(
database
)
.
filter
(
tag_filter
)
.
\
filter
(
func
.
lower
(
database
.
name
)
.
ilike
(
"
%
"
+
query
+
"
%
"
))
.
all
()
json_dumps
=
json
.
dumps
([
dict
(
name
=
r
.
name
.
replace
(
*
replace
))
for
r
in
entries
])
return
json_dumps
# read search results from calibre-database and return it (function is used for feed and simple search
def
get_search_results
(
term
):
db
.
session
.
connection
()
.
connection
.
connection
.
create_function
(
"lower"
,
1
,
lcase
)
...
...
@@ -820,6 +842,7 @@ def get_search_results(term):
func
.
lower
(
db
.
Books
.
title
)
.
ilike
(
"
%
"
+
term
+
"
%
"
)
))
.
all
()
def
get_cc_columns
():
tmpcc
=
db
.
session
.
query
(
db
.
Custom_Columns
)
.
filter
(
db
.
Custom_Columns
.
datatype
.
notin_
(
db
.
cc_exceptions
))
.
all
()
if
config
.
config_columns_to_ignore
:
...
...
@@ -832,6 +855,7 @@ def get_cc_columns():
cc
=
tmpcc
return
cc
def
get_download_link
(
book_id
,
book_format
):
book_format
=
book_format
.
split
(
"."
)[
0
]
book
=
db
.
session
.
query
(
db
.
Books
)
.
filter
(
db
.
Books
.
id
==
book_id
)
.
filter
(
common_filters
())
.
first
()
...
...
@@ -856,7 +880,8 @@ def get_download_link(book_id, book_format):
else
:
abort
(
404
)
def
check_exists_book
(
authr
,
title
):
def
check_exists_book
(
authr
,
title
):
db
.
session
.
connection
()
.
connection
.
connection
.
create_function
(
"lower"
,
1
,
lcase
)
q
=
list
()
authorterms
=
re
.
split
(
r'\s*&\s*'
,
authr
)
...
...
@@ -870,6 +895,7 @@ def check_exists_book(authr,title):
############### Database Helper functions
def
lcase
(
s
):
try
:
return
unidecode
.
unidecode
(
s
.
lower
())
...
...
cps/jinjia.py
View file @
24c743d2
...
...
@@ -80,9 +80,13 @@ def formatdate_filter(val):
formatdate
=
datetime
.
datetime
.
strptime
(
conformed_timestamp
[:
15
],
"
%
Y
%
m
%
d
%
H
%
M
%
S"
)
return
format_date
(
formatdate
,
format
=
'medium'
,
locale
=
get_locale
())
except
AttributeError
as
e
:
log
.
error
(
'Babel error:
%
s, Current user locale:
%
s, Current User:
%
s'
,
e
,
current_user
.
locale
,
current_user
.
nickname
)
log
.
error
(
'Babel error:
%
s, Current user locale:
%
s, Current User:
%
s'
,
e
,
current_user
.
locale
,
current_user
.
nickname
)
return
formatdate
@
jinjia
.
app_template_filter
(
'formatdateinput'
)
def
format_date_input
(
val
):
conformed_timestamp
=
re
.
sub
(
r"[:]|([-](?!((\d{2}[:]\d{2})|(\d{4}))$))"
,
''
,
val
)
...
...
cps/kobo.py
View file @
24c743d2
...
...
@@ -385,7 +385,7 @@ def get_metadata(book):
name
=
get_series
(
book
)
metadata
[
"Series"
]
=
{
"Name"
:
get_series
(
book
),
"Number"
:
book
.
series_index
,
"Number"
:
book
.
series_index
,
# ToDo Check int() ?
"NumberFloat"
:
float
(
book
.
series_index
),
# Get a deterministic id based on the series name.
"Id"
:
uuid
.
uuid3
(
uuid
.
NAMESPACE_DNS
,
name
),
...
...
@@ -407,8 +407,10 @@ def HandleTagCreate():
log
.
debug
(
"Received malformed v1/library/tags request."
)
abort
(
400
,
description
=
"Malformed tags POST request. Data is missing 'Name' or 'Items' field"
)
# ToDO: Names are not unique ! -> filter only private shelfs
shelf
=
ub
.
session
.
query
(
ub
.
Shelf
)
.
filter
(
and_
(
ub
.
Shelf
.
name
)
==
name
,
ub
.
Shelf
.
user_id
==
current_user
.
id
)
.
one_or_none
()
current_user
.
id
)
.
one_or_none
()
# ToDO: shouldn't it ) at the end
if
shelf
and
not
shelf_lib
.
check_shelf_edit_permissions
(
shelf
):
abort
(
401
,
description
=
"User is unauthaurized to edit shelf."
)
...
...
@@ -517,6 +519,7 @@ def HandleTagRemoveItem(tag_id):
log
.
debug
(
"Received malformed v1/library/tags/<tag_id>/items/delete request."
)
abort
(
400
,
description
=
"Malformed tags POST request. Data is missing 'Items' field"
)
# insconsitent to above requests
shelf
=
ub
.
session
.
query
(
ub
.
Shelf
)
.
filter
(
ub
.
Shelf
.
uuid
==
tag_id
,
ub
.
Shelf
.
user_id
==
current_user
.
id
)
.
one_or_none
()
if
not
shelf
:
...
...
@@ -552,7 +555,8 @@ def HandleTagRemoveItem(tag_id):
def
sync_shelves
(
sync_token
,
sync_results
):
new_tags_last_modified
=
sync_token
.
tags_last_modified
for
shelf
in
ub
.
session
.
query
(
ub
.
ShelfArchive
)
.
filter
(
func
.
datetime
(
ub
.
ShelfArchive
.
last_modified
)
>
sync_token
.
tags_last_modified
,
ub
.
ShelfArchive
.
user_id
==
current_user
.
id
):
for
shelf
in
ub
.
session
.
query
(
ub
.
ShelfArchive
)
.
filter
(
func
.
datetime
(
ub
.
ShelfArchive
.
last_modified
)
>
sync_token
.
tags_last_modified
,
ub
.
ShelfArchive
.
user_id
==
current_user
.
id
):
new_tags_last_modified
=
max
(
shelf
.
last_modified
,
new_tags_last_modified
)
sync_results
.
append
({
...
...
@@ -564,7 +568,8 @@ def sync_shelves(sync_token, sync_results):
}
})
for
shelf
in
ub
.
session
.
query
(
ub
.
Shelf
)
.
filter
(
func
.
datetime
(
ub
.
Shelf
.
last_modified
)
>
sync_token
.
tags_last_modified
,
ub
.
Shelf
.
user_id
==
current_user
.
id
):
for
shelf
in
ub
.
session
.
query
(
ub
.
Shelf
)
.
filter
(
func
.
datetime
(
ub
.
Shelf
.
last_modified
)
>
sync_token
.
tags_last_modified
,
ub
.
Shelf
.
user_id
==
current_user
.
id
):
if
not
shelf_lib
.
check_shelf_view_permissions
(
shelf
):
continue
...
...
@@ -600,6 +605,7 @@ def create_kobo_tag(shelf):
book
=
db
.
session
.
query
(
db
.
Books
)
.
filter
(
db
.
Books
.
id
==
book_shelf
.
book_id
)
.
one_or_none
()
if
not
book
:
log
.
info
(
u"Book (id:
%
s) in BookShelf (id:
%
s) not found in book database"
,
book_shelf
.
book_id
,
shelf
.
id
)
# ToDo shouldn't it continue?
return
None
tag
[
"Items"
]
.
append
(
{
...
...
@@ -769,7 +775,8 @@ def HandleCoverImageRequest(book_uuid, width, height,Quality, isGreyscale):
height
=
height
),
307
)
else
:
log
.
debug
(
"Cover for unknown book:
%
s requested"
%
book_uuid
)
return
redirect_or_proxy_request
()
# additional proxy request make no sense, -> direct return
return
make_response
(
jsonify
({}))
log
.
debug
(
"Cover request received for book
%
s"
%
book_uuid
)
return
book_cover
...
...
cps/logger.py
View file @
24c743d2
cps/oauth.py
View file @
24c743d2
cps/opds.py
View file @
24c743d2
...
...
@@ -56,8 +56,8 @@ def requires_basic_auth_if_no_ano(f):
return
decorated
class
FeedObject
()
:
def
__init__
(
self
,
rating_id
,
rating_name
):
class
FeedObject
:
def
__init__
(
self
,
rating_id
,
rating_name
):
self
.
rating_id
=
rating_id
self
.
rating_name
=
rating_name
...
...
@@ -119,7 +119,8 @@ def feed_discover():
def
feed_best_rated
():
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
,
__
,
pagination
=
fill_indexpage
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
db
.
Books
,
db
.
Books
.
ratings
.
any
(
db
.
Ratings
.
rating
>
9
),
[
db
.
Books
.
timestamp
.
desc
()])
db
.
Books
,
db
.
Books
.
ratings
.
any
(
db
.
Ratings
.
rating
>
9
),
[
db
.
Books
.
timestamp
.
desc
()])
return
render_xml_template
(
'feed.xml'
,
entries
=
entries
,
pagination
=
pagination
)
...
...
@@ -153,7 +154,8 @@ def feed_hot():
def
feed_authorindex
():
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
=
db
.
session
.
query
(
db
.
Authors
)
.
join
(
db
.
books_authors_link
)
.
join
(
db
.
Books
)
.
filter
(
common_filters
())
\
.
group_by
(
text
(
'books_authors_link.author'
))
.
order_by
(
db
.
Authors
.
sort
)
.
limit
(
config
.
config_books_per_page
)
.
offset
(
off
)
.
group_by
(
text
(
'books_authors_link.author'
))
.
order_by
(
db
.
Authors
.
sort
)
.
limit
(
config
.
config_books_per_page
)
\
.
offset
(
off
)
pagination
=
Pagination
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
config
.
config_books_per_page
,
len
(
db
.
session
.
query
(
db
.
Authors
)
.
all
()))
return
render_xml_template
(
'feed.xml'
,
listelements
=
entries
,
folder
=
'opds.feed_author'
,
pagination
=
pagination
)
...
...
@@ -164,7 +166,9 @@ def feed_authorindex():
def
feed_author
(
book_id
):
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
,
__
,
pagination
=
fill_indexpage
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
db
.
Books
,
db
.
Books
.
authors
.
any
(
db
.
Authors
.
id
==
book_id
),
[
db
.
Books
.
timestamp
.
desc
()])
db
.
Books
,
db
.
Books
.
authors
.
any
(
db
.
Authors
.
id
==
book_id
),
[
db
.
Books
.
timestamp
.
desc
()])
return
render_xml_template
(
'feed.xml'
,
entries
=
entries
,
pagination
=
pagination
)
...
...
@@ -173,7 +177,8 @@ def feed_author(book_id):
def
feed_publisherindex
():
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
=
db
.
session
.
query
(
db
.
Publishers
)
.
join
(
db
.
books_publishers_link
)
.
join
(
db
.
Books
)
.
filter
(
common_filters
())
\
.
group_by
(
text
(
'books_publishers_link.publisher'
))
.
order_by
(
db
.
Publishers
.
sort
)
.
limit
(
config
.
config_books_per_page
)
.
offset
(
off
)
.
group_by
(
text
(
'books_publishers_link.publisher'
))
.
order_by
(
db
.
Publishers
.
sort
)
\
.
limit
(
config
.
config_books_per_page
)
.
offset
(
off
)
pagination
=
Pagination
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
config
.
config_books_per_page
,
len
(
db
.
session
.
query
(
db
.
Publishers
)
.
all
()))
return
render_xml_template
(
'feed.xml'
,
listelements
=
entries
,
folder
=
'opds.feed_publisher'
,
pagination
=
pagination
)
...
...
@@ -184,7 +189,8 @@ def feed_publisherindex():
def
feed_publisher
(
book_id
):
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
,
__
,
pagination
=
fill_indexpage
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
db
.
Books
,
db
.
Books
.
publishers
.
any
(
db
.
Publishers
.
id
==
book_id
),
db
.
Books
,
db
.
Books
.
publishers
.
any
(
db
.
Publishers
.
id
==
book_id
),
[
db
.
Books
.
timestamp
.
desc
()])
return
render_xml_template
(
'feed.xml'
,
entries
=
entries
,
pagination
=
pagination
)
...
...
@@ -205,7 +211,9 @@ def feed_categoryindex():
def
feed_category
(
book_id
):
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
,
__
,
pagination
=
fill_indexpage
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
db
.
Books
,
db
.
Books
.
tags
.
any
(
db
.
Tags
.
id
==
book_id
),
[
db
.
Books
.
timestamp
.
desc
()])
db
.
Books
,
db
.
Books
.
tags
.
any
(
db
.
Tags
.
id
==
book_id
),
[
db
.
Books
.
timestamp
.
desc
()])
return
render_xml_template
(
'feed.xml'
,
entries
=
entries
,
pagination
=
pagination
)
...
...
@@ -225,9 +233,12 @@ def feed_seriesindex():
def
feed_series
(
book_id
):
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
,
__
,
pagination
=
fill_indexpage
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
db
.
Books
,
db
.
Books
.
series
.
any
(
db
.
Series
.
id
==
book_id
),
[
db
.
Books
.
series_index
])
db
.
Books
,
db
.
Books
.
series
.
any
(
db
.
Series
.
id
==
book_id
),
[
db
.
Books
.
series_index
])
return
render_xml_template
(
'feed.xml'
,
entries
=
entries
,
pagination
=
pagination
)
@
opds
.
route
(
"/opds/ratings"
)
@
requires_basic_auth_if_no_ano
def
feed_ratingindex
():
...
...
@@ -244,16 +255,18 @@ def feed_ratingindex():
element
.
append
(
FeedObject
(
entry
[
0
]
.
id
,
"{} Stars"
.
format
(
entry
.
name
)))
return
render_xml_template
(
'feed.xml'
,
listelements
=
element
,
folder
=
'opds.feed_ratings'
,
pagination
=
pagination
)
@
opds
.
route
(
"/opds/ratings/<book_id>"
)
@
requires_basic_auth_if_no_ano
def
feed_ratings
(
book_id
):
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
,
__
,
pagination
=
fill_indexpage
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
db
.
Books
,
db
.
Books
.
ratings
.
any
(
db
.
Ratings
.
id
==
book_id
),[
db
.
Books
.
timestamp
.
desc
()])
db
.
Books
,
db
.
Books
.
ratings
.
any
(
db
.
Ratings
.
id
==
book_id
),
[
db
.
Books
.
timestamp
.
desc
()])
return
render_xml_template
(
'feed.xml'
,
entries
=
entries
,
pagination
=
pagination
)
@
opds
.
route
(
"/opds/formats"
)
@
requires_basic_auth_if_no_ano
def
feed_formatindex
():
...
...
@@ -274,7 +287,9 @@ def feed_formatindex():
def
feed_format
(
book_id
):
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
,
__
,
pagination
=
fill_indexpage
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
db
.
Books
,
db
.
Books
.
data
.
any
(
db
.
Data
.
format
==
book_id
.
upper
()),
[
db
.
Books
.
timestamp
.
desc
()])
db
.
Books
,
db
.
Books
.
data
.
any
(
db
.
Data
.
format
==
book_id
.
upper
()),
[
db
.
Books
.
timestamp
.
desc
()])
return
render_xml_template
(
'feed.xml'
,
entries
=
entries
,
pagination
=
pagination
)
...
...
@@ -306,7 +321,9 @@ def feed_languagesindex():
def
feed_languages
(
book_id
):
off
=
request
.
args
.
get
(
"offset"
)
or
0
entries
,
__
,
pagination
=
fill_indexpage
((
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
),
db
.
Books
,
db
.
Books
.
languages
.
any
(
db
.
Languages
.
id
==
book_id
),
[
db
.
Books
.
timestamp
.
desc
()])
db
.
Books
,
db
.
Books
.
languages
.
any
(
db
.
Languages
.
id
==
book_id
),
[
db
.
Books
.
timestamp
.
desc
()])
return
render_xml_template
(
'feed.xml'
,
entries
=
entries
,
pagination
=
pagination
)
...
...
@@ -326,7 +343,8 @@ def feed_shelfindex():
def
feed_shelf
(
book_id
):
off
=
request
.
args
.
get
(
"offset"
)
or
0
if
current_user
.
is_anonymous
:
shelf
=
ub
.
session
.
query
(
ub
.
Shelf
)
.
filter
(
ub
.
Shelf
.
is_public
==
1
,
ub
.
Shelf
.
id
==
book_id
,
not
ub
.
Shelf
.
deleted
)
.
first
()
shelf
=
ub
.
session
.
query
(
ub
.
Shelf
)
.
filter
(
ub
.
Shelf
.
is_public
==
1
,
ub
.
Shelf
.
id
==
book_id
,
not
ub
.
Shelf
.
deleted
)
.
first
()
else
:
shelf
=
ub
.
session
.
query
(
ub
.
Shelf
)
.
filter
(
or_
(
and_
(
ub
.
Shelf
.
user_id
==
int
(
current_user
.
id
),
ub
.
Shelf
.
id
==
book_id
),
...
...
@@ -349,11 +367,11 @@ def feed_shelf(book_id):
@
requires_basic_auth_if_no_ano
@
download_required
def
opds_download_link
(
book_id
,
book_format
):
return
get_download_link
(
book_id
,
book_format
.
lower
())
return
get_download_link
(
book_id
,
book_format
.
lower
())
@
opds
.
route
(
"/ajax/book/<string:uuid>/<library>"
)
@
opds
.
route
(
"/ajax/book/<string:uuid>"
,
defaults
=
{
'library'
:
""
})
@
opds
.
route
(
"/ajax/book/<string:uuid>"
,
defaults
=
{
'library'
:
""
})
@
requires_basic_auth_if_no_ano
def
get_metadata_calibre_companion
(
uuid
,
library
):
entry
=
db
.
session
.
query
(
db
.
Books
)
.
filter
(
db
.
Books
.
uuid
.
like
(
"
%
"
+
uuid
+
"
%
"
))
.
first
()
...
...
@@ -369,16 +387,17 @@ def get_metadata_calibre_companion(uuid, library):
def
feed_search
(
term
):
if
term
:
term
=
term
.
strip
()
.
lower
()
entries
=
get_search_results
(
term
)
entries
=
get_search_results
(
term
)
entriescount
=
len
(
entries
)
if
len
(
entries
)
>
0
else
1
pagination
=
Pagination
(
1
,
entriescount
,
entriescount
)
return
render_xml_template
(
'feed.xml'
,
searchterm
=
term
,
entries
=
entries
,
pagination
=
pagination
)
else
:
return
render_xml_template
(
'feed.xml'
,
searchterm
=
""
)
def
check_auth
(
username
,
password
):
if
sys
.
version_info
.
major
==
3
:
username
=
username
.
encode
(
'windows-1252'
)
username
=
username
.
encode
(
'windows-1252'
)
user
=
ub
.
session
.
query
(
ub
.
User
)
.
filter
(
func
.
lower
(
ub
.
User
.
nickname
)
==
username
.
decode
(
'utf-8'
)
.
lower
())
.
first
()
return
bool
(
user
and
check_password_hash
(
str
(
user
.
password
),
password
))
...
...
@@ -392,13 +411,14 @@ def authenticate():
def
render_xml_template
(
*
args
,
**
kwargs
):
#ToDo: return time in current timezone similar to %z
#
ToDo: return time in current timezone similar to %z
currtime
=
datetime
.
datetime
.
now
()
.
strftime
(
"
%
Y-
%
m-
%
dT
%
H:
%
M:
%
S+00:00"
)
xml
=
render_template
(
current_time
=
currtime
,
instance
=
config
.
config_calibre_web_title
,
*
args
,
**
kwargs
)
response
=
make_response
(
xml
)
response
.
headers
[
"Content-Type"
]
=
"application/atom+xml; charset=utf-8"
return
response
@
opds
.
route
(
"/opds/thumb_240_240/<book_id>"
)
@
opds
.
route
(
"/opds/cover_240_240/<book_id>"
)
@
opds
.
route
(
"/opds/cover_90_90/<book_id>"
)
...
...
@@ -407,6 +427,7 @@ def render_xml_template(*args, **kwargs):
def
feed_get_cover
(
book_id
):
return
get_book_cover
(
book_id
)
@
opds
.
route
(
"/opds/readbooks"
)
@
requires_basic_auth_if_no_ano
def
feed_read_books
():
...
...
@@ -414,6 +435,7 @@ def feed_read_books():
result
,
pagination
=
render_read_books
(
int
(
off
)
/
(
int
(
config
.
config_books_per_page
))
+
1
,
True
,
True
)
return
render_xml_template
(
'feed.xml'
,
entries
=
result
,
pagination
=
pagination
)
@
opds
.
route
(
"/opds/unreadbooks"
)
@
requires_basic_auth_if_no_ano
def
feed_unread_books
():
...
...
cps/server.py
View file @
24c743d2
...
...
@@ -43,7 +43,6 @@ from . import logger
log
=
logger
.
create
()
def
_readable_listen_address
(
address
,
port
):
if
':'
in
address
:
address
=
"["
+
address
+
"]"
...
...
@@ -84,7 +83,8 @@ class WebServer(object):
if
os
.
path
.
isfile
(
certfile_path
)
and
os
.
path
.
isfile
(
keyfile_path
):
self
.
ssl_args
=
dict
(
certfile
=
certfile_path
,
keyfile
=
keyfile_path
)
else
:
log
.
warning
(
'The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.'
)
log
.
warning
(
'The specified paths for the ssl certificate file and/or key file seem to be broken. '
'Ignoring ssl.'
)
log
.
warning
(
'Cert path:
%
s'
,
certfile_path
)
log
.
warning
(
'Key path:
%
s'
,
keyfile_path
)
...
...
cps/services/SyncToken.py
View file @
24c743d2
...
...
@@ -49,10 +49,11 @@ def get_datetime_from_json(json_object, field_name):
return
datetime
.
min
class
SyncToken
()
:
class
SyncToken
:
""" The SyncToken is used to persist state accross requests.
When serialized over the response headers, the Kobo device will propagate the token onto following requests to the service.
As an example use-case, the SyncToken is used to detect books that have been added to the library since the last time the device synced to the server.
When serialized over the response headers, the Kobo device will propagate the token onto following
requests to the service. As an example use-case, the SyncToken is used to detect books that have been added
to the library since the last time the device synced to the server.
Attributes:
books_last_created: Datetime representing the newest book that the device knows about.
...
...
@@ -66,10 +67,11 @@ class SyncToken():
token_schema
=
{
"type"
:
"object"
,
"properties"
:
{
"version"
:
{
"type"
:
"string"
},
"data"
:
{
"type"
:
"object"
},},
"properties"
:
{
"version"
:
{
"type"
:
"string"
},
"data"
:
{
"type"
:
"object"
},
},
}
# This Schema doesn't contain enough information to detect and propagate book deletions from Calibre to the device.
# A potential solution might be to keep a list of all known book uuids in the token, and look for any missing from the db.
# A potential solution might be to keep a list of all known book uuids in the token, and look for any missing
# from the db.
data_schema_v1
=
{
"type"
:
"object"
,
"properties"
:
{
...
...
cps/shelf.py
View file @
24c743d2
...
...
@@ -25,7 +25,7 @@ from __future__ import division, print_function, unicode_literals
from
flask
import
Blueprint
,
request
,
flash
,
redirect
,
url_for
from
flask_babel
import
gettext
as
_
from
flask_login
import
login_required
,
current_user
from
sqlalchemy.sql.expression
import
func
,
or_
,
and_
from
sqlalchemy.sql.expression
import
func
from
.
import
logger
,
ub
,
searched_ids
,
db
from
.web
import
render_title_template
...
...
@@ -35,6 +35,7 @@ from .helper import common_filters
shelf
=
Blueprint
(
'shelf'
,
__name__
)
log
=
logger
.
create
()
def
check_shelf_edit_permissions
(
cur_shelf
):
if
not
cur_shelf
.
is_public
and
not
cur_shelf
.
user_id
==
int
(
current_user
.
id
):
log
.
error
(
"User
%
s not allowed to edit shelf
%
s"
,
current_user
,
cur_shelf
)
...
...
@@ -195,7 +196,6 @@ def remove_from_shelf(shelf_id, book_id):
return
"Sorry you are not allowed to remove a book from this shelf:
%
s"
%
shelf
.
name
,
403
@
shelf
.
route
(
"/shelf/create"
,
methods
=
[
"GET"
,
"POST"
])
@
login_required
def
create_shelf
():
...
...
@@ -214,21 +214,24 @@ def create_shelf():
.
first
()
is
None
if
not
is_shelf_name_unique
:
flash
(
_
(
u"A public shelf with the name '
%(title)
s' already exists."
,
title
=
to_save
[
"title"
]),
category
=
"error"
)
flash
(
_
(
u"A public shelf with the name '
%(title)
s' already exists."
,
title
=
to_save
[
"title"
]),
category
=
"error"
)
else
:
is_shelf_name_unique
=
ub
.
session
.
query
(
ub
.
Shelf
)
\
.
filter
((
ub
.
Shelf
.
name
==
to_save
[
"title"
])
&
(
ub
.
Shelf
.
is_public
==
0
)
&
(
ub
.
Shelf
.
user_id
==
int
(
current_user
.
id
)))
\
.
filter
((
ub
.
Shelf
.
name
==
to_save
[
"title"
])
&
(
ub
.
Shelf
.
is_public
==
0
)
&
(
ub
.
Shelf
.
user_id
==
int
(
current_user
.
id
)))
\
.
first
()
is
None
if
not
is_shelf_name_unique
:
flash
(
_
(
u"A private shelf with the name '
%(title)
s' already exists."
,
title
=
to_save
[
"title"
]),
category
=
"error"
)
flash
(
_
(
u"A private shelf with the name '
%(title)
s' already exists."
,
title
=
to_save
[
"title"
]),
category
=
"error"
)
if
is_shelf_name_unique
:
try
:
ub
.
session
.
add
(
shelf
)
ub
.
session
.
commit
()
flash
(
_
(
u"Shelf
%(title)
s created"
,
title
=
to_save
[
"title"
]),
category
=
"success"
)
return
redirect
(
url_for
(
'shelf.show_shelf'
,
shelf_id
=
shelf
.
id
))
return
redirect
(
url_for
(
'shelf.show_shelf'
,
shelf_id
=
shelf
.
id
))
except
Exception
:
flash
(
_
(
u"There was an error"
),
category
=
"error"
)
return
render_title_template
(
'shelf_edit.html'
,
shelf
=
shelf
,
title
=
_
(
u"Create a Shelf"
),
page
=
"shelfcreate"
)
...
...
@@ -251,15 +254,18 @@ def edit_shelf(shelf_id):
.
first
()
is
None
if
not
is_shelf_name_unique
:
flash
(
_
(
u"A public shelf with the name '
%(title)
s' already exists."
,
title
=
to_save
[
"title"
]),
category
=
"error"
)
flash
(
_
(
u"A public shelf with the name '
%(title)
s' already exists."
,
title
=
to_save
[
"title"
]),
category
=
"error"
)
else
:
is_shelf_name_unique
=
ub
.
session
.
query
(
ub
.
Shelf
)
\
.
filter
((
ub
.
Shelf
.
name
==
to_save
[
"title"
])
&
(
ub
.
Shelf
.
is_public
==
0
)
&
(
ub
.
Shelf
.
user_id
==
int
(
current_user
.
id
)))
\
.
filter
(
ub
.
Shelf
.
id
!=
shelf_id
)
\
.
filter
((
ub
.
Shelf
.
name
==
to_save
[
"title"
])
&
(
ub
.
Shelf
.
is_public
==
0
)
&
(
ub
.
Shelf
.
user_id
==
int
(
current_user
.
id
)))
\
.
filter
(
ub
.
Shelf
.
id
!=
shelf_id
)
\
.
first
()
is
None
if
not
is_shelf_name_unique
:
flash
(
_
(
u"A private shelf with the name '
%(title)
s' already exists."
,
title
=
to_save
[
"title"
]),
category
=
"error"
)
flash
(
_
(
u"A private shelf with the name '
%(title)
s' already exists."
,
title
=
to_save
[
"title"
]),
category
=
"error"
)
if
is_shelf_name_unique
:
shelf
.
name
=
to_save
[
"title"
]
...
...
@@ -283,7 +289,7 @@ def delete_shelf_helper(cur_shelf):
shelf_id
=
cur_shelf
.
id
ub
.
session
.
delete
(
cur_shelf
)
ub
.
session
.
query
(
ub
.
BookShelf
)
.
filter
(
ub
.
BookShelf
.
shelf
==
shelf_id
)
.
delete
()
ub
.
session
.
add
(
ub
.
ShelfArchive
(
uuid
=
cur_shelf
.
uuid
,
user_id
=
cur_shelf
.
uuid
))
ub
.
session
.
add
(
ub
.
ShelfArchive
(
uuid
=
cur_shelf
.
uuid
,
user_id
=
cur_shelf
.
uuid
))
ub
.
session
.
commit
()
log
.
info
(
"successfully deleted
%
s"
,
cur_shelf
)
...
...
@@ -295,7 +301,7 @@ def delete_shelf(shelf_id):
delete_shelf_helper
(
cur_shelf
)
return
redirect
(
url_for
(
'web.index'
))
# @shelf.route("/shelfdown/<int:shelf_id>")
@
shelf
.
route
(
"/shelf/<int:shelf_id>"
,
defaults
=
{
'shelf_type'
:
1
})
@
shelf
.
route
(
"/shelf/<int:shelf_id>/<int:shelf_type>"
)
def
show_shelf
(
shelf_type
,
shelf_id
):
...
...
@@ -325,7 +331,6 @@ def show_shelf(shelf_type, shelf_id):
return
redirect
(
url_for
(
"web.index"
))
@
shelf
.
route
(
"/shelf/order/<int:shelf_id>"
,
methods
=
[
"GET"
,
"POST"
])
@
login_required
def
order_shelf
(
shelf_id
):
...
...
@@ -347,17 +352,17 @@ def order_shelf(shelf_id):
for
book
in
books_in_shelf2
:
cur_book
=
db
.
session
.
query
(
db
.
Books
)
.
filter
(
db
.
Books
.
id
==
book
.
book_id
)
.
filter
(
common_filters
())
.
first
()
if
cur_book
:
result
.
append
({
'title'
:
cur_book
.
title
,
'id'
:
cur_book
.
id
,
'author'
:
cur_book
.
authors
,
'series'
:
cur_book
.
series
,
'series_index'
:
cur_book
.
series_index
})
result
.
append
({
'title'
:
cur_book
.
title
,
'id'
:
cur_book
.
id
,
'author'
:
cur_book
.
authors
,
'series'
:
cur_book
.
series
,
'series_index'
:
cur_book
.
series_index
})
else
:
cur_book
=
db
.
session
.
query
(
db
.
Books
)
.
filter
(
db
.
Books
.
id
==
book
.
book_id
)
.
first
()
result
.
append
({
'title'
:
_
(
'Hidden Book'
),
'id'
:
cur_book
.
id
,
'author'
:[],
'series'
:[]})
result
.
append
({
'title'
:
_
(
'Hidden Book'
),
'id'
:
cur_book
.
id
,
'author'
:
[],
'series'
:
[]})
return
render_title_template
(
'shelf_order.html'
,
entries
=
result
,
title
=
_
(
u"Change order of Shelf: '
%(name)
s'"
,
name
=
shelf
.
name
),
shelf
=
shelf
,
page
=
"shelforder"
)
cps/subproc_wrapper.py
View file @
24c743d2
...
...
@@ -45,10 +45,10 @@ def process_open(command, quotes=(), env=None, sout=subprocess.PIPE, serr=subpro
def
process_wait
(
command
,
serr
=
subprocess
.
PIPE
):
'''Run command, wait for process to terminate, and return an iterator over lines of its output.'''
# Run command, wait for process to terminate, and return an iterator over lines of its output.
p
=
process_open
(
command
,
serr
=
serr
)
p
.
wait
()
for
l
in
p
.
stdout
.
readlines
():
if
isinstance
(
l
,
bytes
):
l
=
l
.
decode
(
'utf-8'
)
yield
l
for
l
ine
in
p
.
stdout
.
readlines
():
if
isinstance
(
l
ine
,
bytes
):
l
ine
=
line
.
decode
(
'utf-8'
)
yield
l
ine
cps/templates/list.html
View file @
24c743d2
...
...
@@ -4,18 +4,18 @@
<div
class=
"filterheader hidden-xs hidden-sm"
>
{% if entries.__len__() %}
{% if
entries[0][0].sort
%}
<button
id=
"sort_name"
class=
"btn btn-
success
"
><b>
B,A
<->
A B
</b></button>
{% if
data == 'author'
%}
<button
id=
"sort_name"
class=
"btn btn-
primary
"
><b>
B,A
<->
A B
</b></button>
{% endif %}
{% endif %}
<button
id=
"desc"
class=
"btn btn-
success
"
><span
class=
"glyphicon glyphicon-sort-by-alphabet"
></span></button>
<button
id=
"asc"
class=
"btn btn-
success
"
><span
class=
"glyphicon glyphicon-sort-by-alphabet-alt"
></span></button>
<button
id=
"desc"
class=
"btn btn-
primary
"
><span
class=
"glyphicon glyphicon-sort-by-alphabet"
></span></button>
<button
id=
"asc"
class=
"btn btn-
primary
"
><span
class=
"glyphicon glyphicon-sort-by-alphabet-alt"
></span></button>
{% if charlist|length %}
<button
id=
"all"
class=
"btn btn-
success
"
>
{{_('All')}}
</button>
<button
id=
"all"
class=
"btn btn-
primary
"
>
{{_('All')}}
</button>
{% endif %}
<div
class=
"btn-group character"
role=
"group"
>
{% for char in charlist%}
<button
class=
"btn btn-
success
char"
>
{{char.char}}
</button>
<button
class=
"btn btn-
primary
char"
>
{{char.char}}
</button>
{% endfor %}
</div>
</div>
...
...
cps/ub.py
View file @
24c743d2
...
...
@@ -42,11 +42,10 @@ from sqlalchemy import create_engine, exc, exists, event
from
sqlalchemy
import
Column
,
ForeignKey
from
sqlalchemy
import
String
,
Integer
,
SmallInteger
,
Boolean
,
DateTime
,
Float
from
sqlalchemy.ext.declarative
import
declarative_base
from
sqlalchemy.orm
import
backref
,
foreign
,
relationship
,
remote
,
sessionmaker
,
Session
from
sqlalchemy.sql.expression
import
and_
from
sqlalchemy.orm
import
backref
,
relationship
,
sessionmaker
,
Session
from
werkzeug.security
import
generate_password_hash
from
.
import
constants
# , config
from
.
import
constants
session
=
None
...
...
@@ -57,39 +56,39 @@ def get_sidebar_config(kwargs=None):
kwargs
=
kwargs
or
[]
if
'content'
in
kwargs
:
content
=
kwargs
[
'content'
]
content
=
isinstance
(
content
,
(
User
,
LocalProxy
))
and
not
content
.
role_anonymous
()
content
=
isinstance
(
content
,
(
User
,
LocalProxy
))
and
not
content
.
role_anonymous
()
else
:
content
=
'conf'
in
kwargs
sidebar
=
list
()
sidebar
.
append
({
"glyph"
:
"glyphicon-book"
,
"text"
:
_
(
'Recently Added'
),
"link"
:
'web.index'
,
"id"
:
"new"
,
"visibility"
:
constants
.
SIDEBAR_RECENT
,
'public'
:
True
,
"page"
:
"root"
,
"show_text"
:
_
(
'Show recent books'
),
"config_show"
:
True
})
"show_text"
:
_
(
'Show recent books'
),
"config_show"
:
True
})
sidebar
.
append
({
"glyph"
:
"glyphicon-fire"
,
"text"
:
_
(
'Hot Books'
),
"link"
:
'web.books_list'
,
"id"
:
"hot"
,
"visibility"
:
constants
.
SIDEBAR_HOT
,
'public'
:
True
,
"page"
:
"hot"
,
"show_text"
:
_
(
'Show Hot Books'
),
"
config_show"
:
True
})
"visibility"
:
constants
.
SIDEBAR_HOT
,
'public'
:
True
,
"page"
:
"hot"
,
"
show_text"
:
_
(
'Show Hot Books'
),
"config_show"
:
True
})
sidebar
.
append
(
{
"glyph"
:
"glyphicon-star"
,
"text"
:
_
(
'Top Rated Books'
),
"link"
:
'web.books_list'
,
"id"
:
"rated"
,
"visibility"
:
constants
.
SIDEBAR_BEST_RATED
,
'public'
:
True
,
"page"
:
"rated"
,
"show_text"
:
_
(
'Show Top Rated Books'
),
"config_show"
:
True
})
"show_text"
:
_
(
'Show Top Rated Books'
),
"config_show"
:
True
})
sidebar
.
append
({
"glyph"
:
"glyphicon-eye-open"
,
"text"
:
_
(
'Read Books'
),
"link"
:
'web.books_list'
,
"id"
:
"read"
,
"visibility"
:
constants
.
SIDEBAR_READ_AND_UNREAD
,
'public'
:
(
not
g
.
user
.
is_anonymous
),
"page"
:
"read"
,
"show_text"
:
_
(
'Show read and unread'
),
"config_show"
:
content
})
sidebar
.
append
(
{
"glyph"
:
"glyphicon-eye-close"
,
"text"
:
_
(
'Unread Books'
),
"link"
:
'web.books_list'
,
"id"
:
"unread"
,
"visibility"
:
constants
.
SIDEBAR_READ_AND_UNREAD
,
'public'
:
(
not
g
.
user
.
is_anonymous
),
"page"
:
"unread"
,
"show_text"
:
_
(
'Show unread'
),
"config_show"
:
False
})
"show_text"
:
_
(
'Show unread'
),
"config_show"
:
False
})
sidebar
.
append
({
"glyph"
:
"glyphicon-random"
,
"text"
:
_
(
'Discover'
),
"link"
:
'web.books_list'
,
"id"
:
"rand"
,
"visibility"
:
constants
.
SIDEBAR_RANDOM
,
'public'
:
True
,
"page"
:
"discover"
,
"show_text"
:
_
(
'Show random books'
),
"config_show"
:
True
})
"show_text"
:
_
(
'Show random books'
),
"config_show"
:
True
})
sidebar
.
append
({
"glyph"
:
"glyphicon-inbox"
,
"text"
:
_
(
'Categories'
),
"link"
:
'web.category_list'
,
"id"
:
"cat"
,
"visibility"
:
constants
.
SIDEBAR_CATEGORY
,
'public'
:
True
,
"page"
:
"category"
,
"show_text"
:
_
(
'Show category selection'
),
"config_show"
:
True
})
"show_text"
:
_
(
'Show category selection'
),
"config_show"
:
True
})
sidebar
.
append
({
"glyph"
:
"glyphicon-bookmark"
,
"text"
:
_
(
'Series'
),
"link"
:
'web.series_list'
,
"id"
:
"serie"
,
"visibility"
:
constants
.
SIDEBAR_SERIES
,
'public'
:
True
,
"page"
:
"series"
,
"show_text"
:
_
(
'Show series selection'
),
"config_show"
:
True
})
"show_text"
:
_
(
'Show series selection'
),
"config_show"
:
True
})
sidebar
.
append
({
"glyph"
:
"glyphicon-user"
,
"text"
:
_
(
'Authors'
),
"link"
:
'web.author_list'
,
"id"
:
"author"
,
"visibility"
:
constants
.
SIDEBAR_AUTHOR
,
'public'
:
True
,
"page"
:
"author"
,
"show_text"
:
_
(
'Show author selection'
),
"config_show"
:
True
})
"show_text"
:
_
(
'Show author selection'
),
"config_show"
:
True
})
sidebar
.
append
(
{
"glyph"
:
"glyphicon-text-size"
,
"text"
:
_
(
'Publishers'
),
"link"
:
'web.publisher_list'
,
"id"
:
"publisher"
,
"visibility"
:
constants
.
SIDEBAR_PUBLISHER
,
'public'
:
True
,
"page"
:
"publisher"
,
...
...
@@ -97,13 +96,13 @@ def get_sidebar_config(kwargs=None):
sidebar
.
append
({
"glyph"
:
"glyphicon-flag"
,
"text"
:
_
(
'Languages'
),
"link"
:
'web.language_overview'
,
"id"
:
"lang"
,
"visibility"
:
constants
.
SIDEBAR_LANGUAGE
,
'public'
:
(
g
.
user
.
filter_language
()
==
'all'
),
"page"
:
"language"
,
"show_text"
:
_
(
'Show language selection'
),
"config_show"
:
True
})
"show_text"
:
_
(
'Show language selection'
),
"config_show"
:
True
})
sidebar
.
append
({
"glyph"
:
"glyphicon-star-empty"
,
"text"
:
_
(
'Ratings'
),
"link"
:
'web.ratings_list'
,
"id"
:
"rate"
,
"visibility"
:
constants
.
SIDEBAR_RATING
,
'public'
:
True
,
"page"
:
"rating"
,
"show_text"
:
_
(
'Show ratings selection'
),
"config_show"
:
True
})
"page"
:
"rating"
,
"show_text"
:
_
(
'Show ratings selection'
),
"config_show"
:
True
})
sidebar
.
append
({
"glyph"
:
"glyphicon-file"
,
"text"
:
_
(
'File formats'
),
"link"
:
'web.formats_list'
,
"id"
:
"format"
,
"visibility"
:
constants
.
SIDEBAR_FORMAT
,
'public'
:
True
,
"page"
:
"format"
,
"show_text"
:
_
(
'Show file formats selection'
),
"config_show"
:
True
})
"page"
:
"format"
,
"show_text"
:
_
(
'Show file formats selection'
),
"config_show"
:
True
})
sidebar
.
append
(
{
"glyph"
:
"glyphicon-trash"
,
"text"
:
_
(
'Archived Books'
),
"link"
:
'web.books_list'
,
"id"
:
"archived"
,
"visibility"
:
constants
.
SIDEBAR_ARCHIVED
,
'public'
:
(
not
g
.
user
.
is_anonymous
),
"page"
:
"archived"
,
...
...
@@ -236,7 +235,8 @@ class Anonymous(AnonymousUserMixin, UserBase):
self
.
loadSettings
()
def
loadSettings
(
self
):
data
=
session
.
query
(
User
)
.
filter
(
User
.
role
.
op
(
'&'
)(
constants
.
ROLE_ANONYMOUS
)
==
constants
.
ROLE_ANONYMOUS
)
.
first
()
# type: User
data
=
session
.
query
(
User
)
.
filter
(
User
.
role
.
op
(
'&'
)(
constants
.
ROLE_ANONYMOUS
)
==
constants
.
ROLE_ANONYMOUS
)
\
.
first
()
# type: User
self
.
nickname
=
data
.
nickname
self
.
role
=
data
.
role
self
.
id
=
data
.
id
...
...
@@ -259,7 +259,7 @@ class Anonymous(AnonymousUserMixin, UserBase):
@
property
def
is_anonymous
(
self
):
return
True
# self.anon_browse
return
True
@
property
def
is_authenticated
(
self
):
...
...
@@ -271,7 +271,7 @@ class Shelf(Base):
__tablename__
=
'shelf'
id
=
Column
(
Integer
,
primary_key
=
True
)
uuid
=
Column
(
String
,
default
=
lambda
:
str
(
uuid
.
uuid4
()))
uuid
=
Column
(
String
,
default
=
lambda
:
str
(
uuid
.
uuid4
()))
name
=
Column
(
String
)
is_public
=
Column
(
Integer
,
default
=
0
)
user_id
=
Column
(
Integer
,
ForeignKey
(
'user.id'
))
...
...
@@ -318,8 +318,12 @@ class ReadBook(Base):
book_id
=
Column
(
Integer
,
unique
=
False
)
user_id
=
Column
(
Integer
,
ForeignKey
(
'user.id'
),
unique
=
False
)
read_status
=
Column
(
Integer
,
unique
=
False
,
default
=
STATUS_UNREAD
,
nullable
=
False
)
kobo_reading_state
=
relationship
(
"KoboReadingState"
,
uselist
=
False
,
primaryjoin
=
"and_(ReadBook.user_id == foreign(KoboReadingState.user_id), "
"ReadBook.book_id == foreign(KoboReadingState.book_id))"
,
cascade
=
"all"
,
backref
=
backref
(
"book_read_link"
,
uselist
=
False
))
kobo_reading_state
=
relationship
(
"KoboReadingState"
,
uselist
=
False
,
primaryjoin
=
"and_(ReadBook.user_id == foreign(KoboReadingState.user_id), "
"ReadBook.book_id == foreign(KoboReadingState.book_id))"
,
cascade
=
"all"
,
backref
=
backref
(
"book_read_link"
,
uselist
=
False
))
last_modified
=
Column
(
DateTime
,
default
=
datetime
.
datetime
.
utcnow
,
onupdate
=
datetime
.
datetime
.
utcnow
)
last_time_started_reading
=
Column
(
DateTime
,
nullable
=
True
)
times_started_reading
=
Column
(
Integer
,
default
=
0
,
nullable
=
False
)
...
...
@@ -334,6 +338,7 @@ class Bookmark(Base):
format
=
Column
(
String
(
collation
=
'NOCASE'
))
bookmark_key
=
Column
(
String
)
# Baseclass representing books that are archived on the user's Kobo device.
class
ArchivedBook
(
Base
):
__tablename__
=
'archived_book'
...
...
@@ -421,7 +426,6 @@ class Registration(Base):
return
u"<Registration('{0}')>"
.
format
(
self
.
domain
)
class
RemoteAuthToken
(
Base
):
__tablename__
=
'remote_auth_token'
...
...
@@ -532,18 +536,12 @@ def migrate_Database(session):
conn
=
engine
.
connect
()
conn
.
execute
(
"UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
"+ series_books * :side_series + category_books * :side_category + hot_books * "
":side_hot + :side_autor + :detail_random)"
,{
'side_random'
:
constants
.
SIDEBAR_RANDOM
,
'side_lang'
:
constants
.
SIDEBAR_LANGUAGE
,
'side_series'
:
constants
.
SIDEBAR_SERIES
,
'side_category'
:
constants
.
SIDEBAR_CATEGORY
,
'side_hot'
:
constants
.
SIDEBAR_HOT
,
'side_autor'
:
constants
.
SIDEBAR_AUTHOR
,
":side_hot + :side_autor + :detail_random)"
,
{
'side_random'
:
constants
.
SIDEBAR_RANDOM
,
'side_lang'
:
constants
.
SIDEBAR_LANGUAGE
,
'side_series'
:
constants
.
SIDEBAR_SERIES
,
'side_category'
:
constants
.
SIDEBAR_CATEGORY
,
'side_hot'
:
constants
.
SIDEBAR_HOT
,
'side_autor'
:
constants
.
SIDEBAR_AUTHOR
,
'detail_random'
:
constants
.
DETAIL_RANDOM
})
session
.
commit
()
'''try:
session.query(exists().where(User.mature_content)).scalar()
except exc.OperationalError:
conn = engine.connect()
conn.execute("ALTER TABLE user ADD column `mature_content` INTEGER DEFAULT 1")'''
try
:
session
.
query
(
exists
()
.
where
(
User
.
denied_tags
))
.
scalar
()
except
exc
.
OperationalError
:
# Database is not compatible, some columns are missing
...
...
@@ -552,7 +550,8 @@ def migrate_Database(session):
conn
.
execute
(
"ALTER TABLE user ADD column `allowed_tags` String DEFAULT ''"
)
conn
.
execute
(
"ALTER TABLE user ADD column `denied_column_value` DEFAULT ''"
)
conn
.
execute
(
"ALTER TABLE user ADD column `allowed_column_value` DEFAULT ''"
)
if
session
.
query
(
User
)
.
filter
(
User
.
role
.
op
(
'&'
)(
constants
.
ROLE_ANONYMOUS
)
==
constants
.
ROLE_ANONYMOUS
)
.
first
()
is
None
:
if
session
.
query
(
User
)
.
filter
(
User
.
role
.
op
(
'&'
)(
constants
.
ROLE_ANONYMOUS
)
==
constants
.
ROLE_ANONYMOUS
)
.
first
()
\
is
None
:
create_anonymous_user
(
session
)
try
:
# check if one table with autoincrement is existing (should be user table)
...
...
@@ -562,7 +561,7 @@ def migrate_Database(session):
# Create new table user_id and copy contents of table user into it
conn
=
engine
.
connect
()
conn
.
execute
(
"CREATE TABLE user_id (id INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,"
"
nickname VARCHAR(64),"
"
nickname VARCHAR(64),"
"email VARCHAR(120),"
"role SMALLINT,"
"password VARCHAR,"
...
...
@@ -591,25 +590,26 @@ def clean_database(session):
# Remove expired remote login tokens
now
=
datetime
.
datetime
.
now
()
session
.
query
(
RemoteAuthToken
)
.
filter
(
now
>
RemoteAuthToken
.
expiration
)
.
\
filter
(
RemoteAuthToken
.
token_type
!=
1
)
.
delete
()
filter
(
RemoteAuthToken
.
token_type
!=
1
)
.
delete
()
session
.
commit
()
# Save downloaded books per user in calibre-web's own database
def
update_download
(
book_id
,
user_id
):
check
=
session
.
query
(
Downloads
)
.
filter
(
Downloads
.
user_id
==
user_id
)
.
filter
(
Downloads
.
book_id
==
book_id
)
.
first
()
check
=
session
.
query
(
Downloads
)
.
filter
(
Downloads
.
user_id
==
user_id
)
.
filter
(
Downloads
.
book_id
==
book_id
)
.
first
()
if
not
check
:
new_download
=
Downloads
(
user_id
=
user_id
,
book_id
=
book_id
)
session
.
add
(
new_download
)
session
.
commit
()
# Delete non exisiting downloaded books in calibre-web's own database
def
delete_download
(
book_id
):
session
.
query
(
Downloads
)
.
filter
(
book_id
==
Downloads
.
book_id
)
.
delete
()
session
.
commit
()
# Generate user Guest (translated text), as anoymous user, no rights
def
create_anonymous_user
(
session
):
user
=
User
()
...
...
@@ -667,8 +667,12 @@ def dispose():
old_session
=
session
session
=
None
if
old_session
:
try
:
old_session
.
close
()
except
:
pass
try
:
old_session
.
close
()
except
Exception
:
pass
if
old_session
.
bind
:
try
:
old_session
.
bind
.
dispose
()
except
:
pass
try
:
old_session
.
bind
.
dispose
()
except
Exception
:
pass
cps/updater.py
View file @
24c743d2
...
...
@@ -69,7 +69,7 @@ class Updater(threading.Thread):
def
get_available_updates
(
self
,
request_method
,
locale
):
if
config
.
config_updatechannel
==
constants
.
UPDATE_STABLE
:
return
self
.
_stable_available_updates
(
request_method
)
return
self
.
_nightly_available_updates
(
request_method
,
locale
)
return
self
.
_nightly_available_updates
(
request_method
,
locale
)
def
do_work
(
self
):
try
:
...
...
@@ -132,7 +132,7 @@ class Updater(threading.Thread):
def
pause
(
self
):
self
.
can_run
.
clear
()
#should just resume the thread
#
should just resume the thread
def
resume
(
self
):
self
.
can_run
.
set
()
...
...
@@ -268,7 +268,7 @@ class Updater(threading.Thread):
def
is_venv
(
self
):
if
(
hasattr
(
sys
,
'real_prefix'
))
or
(
hasattr
(
sys
,
'base_prefix'
)
and
sys
.
base_prefix
!=
sys
.
prefix
):
return
os
.
sep
+
os
.
path
.
relpath
(
sys
.
prefix
,
constants
.
BASE_DIR
)
return
os
.
sep
+
os
.
path
.
relpath
(
sys
.
prefix
,
constants
.
BASE_DIR
)
else
:
return
False
...
...
@@ -436,7 +436,7 @@ class Updater(threading.Thread):
patch_version_update
>
current_version
[
2
])
or
\
minor_version_update
>
current_version
[
1
]:
parents
.
append
([
commit
[
i
][
'tag_name'
],
commit
[
i
][
'body'
]
.
replace
(
'
\r\n
'
,
'<p>'
)])
newer
=
True
newer
=
True
i
-=
1
continue
if
major_version_update
<
current_version
[
0
]:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment