Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Contribute to GitLab
Sign in / Register
Toggle navigation
D
douban-api-proxy
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
captainwong
douban-api-proxy
Commits
b75247ea
Commit
b75247ea
authored
Mar 15, 2021
by
Ozzie Isaacs
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Suppress some errors
parent
9a963bbe
Hide whitespace changes
Inline
Side-by-side
Showing
11 changed files
with
18 additions
and
18 deletions
+18
-18
about.py
cps/about.py
+1
-0
cache_buster.py
cps/cache_buster.py
+2
-1
constants.py
cps/constants.py
+1
-1
error_handler.py
cps/error_handler.py
+1
-7
gdrive.py
cps/gdrive.py
+1
-1
kobo_auth.py
cps/kobo_auth.py
+1
-0
server.py
cps/server.py
+2
-2
SyncToken.py
cps/services/SyncToken.py
+3
-2
subproc_wrapper.py
cps/subproc_wrapper.py
+1
-1
updater.py
cps/updater.py
+4
-2
uploader.py
cps/uploader.py
+1
-1
No files found.
cps/about.py
View file @
b75247ea
...
@@ -37,6 +37,7 @@ try:
...
@@ -37,6 +37,7 @@ try:
except
ImportError
:
except
ImportError
:
from
flask_login.__about__
import
__version__
as
flask_loginVersion
from
flask_login.__about__
import
__version__
as
flask_loginVersion
try
:
try
:
# pylint: disable=unused-import
import
unidecode
import
unidecode
# _() necessary to make babel aware of string for translation
# _() necessary to make babel aware of string for translation
unidecode_version
=
_
(
u'installed'
)
unidecode_version
=
_
(
u'installed'
)
...
...
cps/cache_buster.py
View file @
b75247ea
...
@@ -49,7 +49,7 @@ def init_cache_busting(app):
...
@@ -49,7 +49,7 @@ def init_cache_busting(app):
# compute version component
# compute version component
rooted_filename
=
os
.
path
.
join
(
dirpath
,
filename
)
rooted_filename
=
os
.
path
.
join
(
dirpath
,
filename
)
with
open
(
rooted_filename
,
'rb'
)
as
f
:
with
open
(
rooted_filename
,
'rb'
)
as
f
:
file_hash
=
hashlib
.
md5
(
f
.
read
())
.
hexdigest
()[:
7
]
file_hash
=
hashlib
.
md5
(
f
.
read
())
.
hexdigest
()[:
7
]
# nosec
# save version to tables
# save version to tables
file_path
=
rooted_filename
.
replace
(
static_folder
,
""
)
file_path
=
rooted_filename
.
replace
(
static_folder
,
""
)
...
@@ -64,6 +64,7 @@ def init_cache_busting(app):
...
@@ -64,6 +64,7 @@ def init_cache_busting(app):
return
filename
.
split
(
"?"
,
1
)[
0
]
return
filename
.
split
(
"?"
,
1
)[
0
]
@
app
.
url_defaults
@
app
.
url_defaults
# pylint: disable=unused-variable
def
reverse_to_cache_busted_url
(
endpoint
,
values
):
def
reverse_to_cache_busted_url
(
endpoint
,
values
):
"""
"""
Make `url_for` produce busted filenames when using the 'static' endpoint.
Make `url_for` produce busted filenames when using the 'static' endpoint.
...
...
cps/constants.py
View file @
b75247ea
...
@@ -104,7 +104,7 @@ LDAP_AUTH_SIMPLE = 0
...
@@ -104,7 +104,7 @@ LDAP_AUTH_SIMPLE = 0
DEFAULT_MAIL_SERVER
=
"mail.example.org"
DEFAULT_MAIL_SERVER
=
"mail.example.org"
DEFAULT_PASSWORD
=
"admin123"
# no
sec # noqa
DEFAULT_PASSWORD
=
"admin123"
# no
qa nosec
DEFAULT_PORT
=
8083
DEFAULT_PORT
=
8083
env_CALIBRE_PORT
=
os
.
environ
.
get
(
"CALIBRE_PORT"
,
DEFAULT_PORT
)
env_CALIBRE_PORT
=
os
.
environ
.
get
(
"CALIBRE_PORT"
,
DEFAULT_PORT
)
try
:
try
:
...
...
cps/error_handler.py
View file @
b75247ea
...
@@ -60,14 +60,8 @@ def init_errorhandler():
...
@@ -60,14 +60,8 @@ def init_errorhandler():
if
services
.
ldap
:
if
services
.
ldap
:
# Only way of catching the LDAPException upon logging in with LDAP server down
# Only way of catching the LDAPException upon logging in with LDAP server down
@
app
.
errorhandler
(
services
.
ldap
.
LDAPException
)
@
app
.
errorhandler
(
services
.
ldap
.
LDAPException
)
# pylint: disable=unused-variable
def
handle_exception
(
e
):
def
handle_exception
(
e
):
log
.
debug
(
'LDAP server not accessible while trying to login to opds feed'
)
log
.
debug
(
'LDAP server not accessible while trying to login to opds feed'
)
return
error_http
(
FailedDependency
())
return
error_http
(
FailedDependency
())
# @app.errorhandler(InvalidRequestError)
#@app.errorhandler(OperationalError)
#def handle_db_exception(e):
# db.session.rollback()
# log.error('Database request error: %s',e)
# return internal_error(InternalServerError(e))
cps/gdrive.py
View file @
b75247ea
...
@@ -142,7 +142,7 @@ def on_received_watch_confirmation():
...
@@ -142,7 +142,7 @@ def on_received_watch_confirmation():
else
:
else
:
dbpath
=
os
.
path
.
join
(
config
.
config_calibre_dir
,
"metadata.db"
)
.
encode
()
dbpath
=
os
.
path
.
join
(
config
.
config_calibre_dir
,
"metadata.db"
)
.
encode
()
if
not
response
[
'deleted'
]
and
response
[
'file'
][
'title'
]
==
'metadata.db'
\
if
not
response
[
'deleted'
]
and
response
[
'file'
][
'title'
]
==
'metadata.db'
\
and
response
[
'file'
][
'md5Checksum'
]
!=
hashlib
.
md5
(
dbpath
):
and
response
[
'file'
][
'md5Checksum'
]
!=
hashlib
.
md5
(
dbpath
):
# nosec
tmp_dir
=
os
.
path
.
join
(
tempfile
.
gettempdir
(),
'calibre_web'
)
tmp_dir
=
os
.
path
.
join
(
tempfile
.
gettempdir
(),
'calibre_web'
)
if
not
os
.
path
.
isdir
(
tmp_dir
):
if
not
os
.
path
.
isdir
(
tmp_dir
):
os
.
mkdir
(
tmp_dir
)
os
.
mkdir
(
tmp_dir
)
...
...
cps/kobo_auth.py
View file @
b75247ea
...
@@ -81,6 +81,7 @@ log = logger.create()
...
@@ -81,6 +81,7 @@ log = logger.create()
def
register_url_value_preprocessor
(
kobo
):
def
register_url_value_preprocessor
(
kobo
):
@
kobo
.
url_value_preprocessor
@
kobo
.
url_value_preprocessor
# pylint: disable=unused-variable
def
pop_auth_token
(
__
,
values
):
def
pop_auth_token
(
__
,
values
):
g
.
auth_token
=
values
.
pop
(
"auth_token"
)
g
.
auth_token
=
values
.
pop
(
"auth_token"
)
...
...
cps/server.py
View file @
b75247ea
...
@@ -22,7 +22,7 @@ import os
...
@@ -22,7 +22,7 @@ import os
import
errno
import
errno
import
signal
import
signal
import
socket
import
socket
import
subprocess
import
subprocess
# nosec
try
:
try
:
from
gevent.pywsgi
import
WSGIServer
from
gevent.pywsgi
import
WSGIServer
...
@@ -259,7 +259,7 @@ class WebServer(object):
...
@@ -259,7 +259,7 @@ class WebServer(object):
log
.
info
(
"Performing restart of Calibre-Web"
)
log
.
info
(
"Performing restart of Calibre-Web"
)
args
=
self
.
_get_args_for_reloading
()
args
=
self
.
_get_args_for_reloading
()
subprocess
.
call
(
args
,
close_fds
=
True
)
subprocess
.
call
(
args
,
close_fds
=
True
)
# nosec
return
True
return
True
def
_killServer
(
self
,
__
,
___
):
def
_killServer
(
self
,
__
,
___
):
...
...
cps/services/SyncToken.py
View file @
b75247ea
...
@@ -22,6 +22,7 @@ from base64 import b64decode, b64encode
...
@@ -22,6 +22,7 @@ from base64 import b64decode, b64encode
from
jsonschema
import
validate
,
exceptions
,
__version__
from
jsonschema
import
validate
,
exceptions
,
__version__
from
datetime
import
datetime
from
datetime
import
datetime
try
:
try
:
# pylint: disable=unused-import
from
urllib
import
unquote
from
urllib
import
unquote
except
ImportError
:
except
ImportError
:
from
urllib.parse
import
unquote
from
urllib.parse
import
unquote
...
@@ -91,14 +92,14 @@ class SyncToken:
...
@@ -91,14 +92,14 @@ class SyncToken:
def
__init__
(
def
__init__
(
self
,
self
,
raw_kobo_store_token
=
""
,
# nosec
raw_kobo_store_token
=
""
,
books_last_created
=
datetime
.
min
,
books_last_created
=
datetime
.
min
,
books_last_modified
=
datetime
.
min
,
books_last_modified
=
datetime
.
min
,
archive_last_modified
=
datetime
.
min
,
archive_last_modified
=
datetime
.
min
,
reading_state_last_modified
=
datetime
.
min
,
reading_state_last_modified
=
datetime
.
min
,
tags_last_modified
=
datetime
.
min
,
tags_last_modified
=
datetime
.
min
,
books_last_id
=-
1
books_last_id
=-
1
):
):
# nosec
self
.
raw_kobo_store_token
=
raw_kobo_store_token
self
.
raw_kobo_store_token
=
raw_kobo_store_token
self
.
books_last_created
=
books_last_created
self
.
books_last_created
=
books_last_created
self
.
books_last_modified
=
books_last_modified
self
.
books_last_modified
=
books_last_modified
...
...
cps/subproc_wrapper.py
View file @
b75247ea
...
@@ -41,7 +41,7 @@ def process_open(command, quotes=(), env=None, sout=subprocess.PIPE, serr=subpro
...
@@ -41,7 +41,7 @@ def process_open(command, quotes=(), env=None, sout=subprocess.PIPE, serr=subpro
else
:
else
:
exc_command
=
[
x
for
x
in
command
]
exc_command
=
[
x
for
x
in
command
]
return
subprocess
.
Popen
(
exc_command
,
shell
=
False
,
stdout
=
sout
,
stderr
=
serr
,
universal_newlines
=
newlines
,
env
=
env
)
return
subprocess
.
Popen
(
exc_command
,
shell
=
False
,
stdout
=
sout
,
stderr
=
serr
,
universal_newlines
=
newlines
,
env
=
env
)
# nosec
def
process_wait
(
command
,
serr
=
subprocess
.
PIPE
):
def
process_wait
(
command
,
serr
=
subprocess
.
PIPE
):
...
...
cps/updater.py
View file @
b75247ea
...
@@ -284,7 +284,8 @@ class Updater(threading.Thread):
...
@@ -284,7 +284,8 @@ class Updater(threading.Thread):
def
_stable_version_info
(
cls
):
def
_stable_version_info
(
cls
):
return
constants
.
STABLE_VERSION
# Current version
return
constants
.
STABLE_VERSION
# Current version
def
_populate_parent_commits
(
self
,
update_data
,
status
,
locale
,
tz
,
parents
):
@
staticmethod
def
_populate_parent_commits
(
update_data
,
status
,
locale
,
tz
,
parents
):
try
:
try
:
parent_commit
=
update_data
[
'parents'
][
0
]
parent_commit
=
update_data
[
'parents'
][
0
]
# limit the maximum search depth
# limit the maximum search depth
...
@@ -322,7 +323,8 @@ class Updater(threading.Thread):
...
@@ -322,7 +323,8 @@ class Updater(threading.Thread):
break
break
return
parents
return
parents
def
_load_nightly_data
(
self
,
repository_url
,
commit
,
status
):
@
staticmethod
def
_load_nightly_data
(
repository_url
,
commit
,
status
):
try
:
try
:
headers
=
{
'Accept'
:
'application/vnd.github.v3+json'
}
headers
=
{
'Accept'
:
'application/vnd.github.v3+json'
}
r
=
requests
.
get
(
repository_url
+
'/git/commits/'
+
commit
[
'object'
][
'sha'
],
r
=
requests
.
get
(
repository_url
+
'/git/commits/'
+
commit
[
'object'
][
'sha'
],
...
...
cps/uploader.py
View file @
b75247ea
...
@@ -191,7 +191,7 @@ def upload(uploadfile, rarExcecutable):
...
@@ -191,7 +191,7 @@ def upload(uploadfile, rarExcecutable):
filename
=
uploadfile
.
filename
filename
=
uploadfile
.
filename
filename_root
,
file_extension
=
os
.
path
.
splitext
(
filename
)
filename_root
,
file_extension
=
os
.
path
.
splitext
(
filename
)
md5
=
hashlib
.
md5
(
filename
.
encode
(
'utf-8'
))
.
hexdigest
()
md5
=
hashlib
.
md5
(
filename
.
encode
(
'utf-8'
))
.
hexdigest
()
# nosec
tmp_file_path
=
os
.
path
.
join
(
tmp_dir
,
md5
)
tmp_file_path
=
os
.
path
.
join
(
tmp_dir
,
md5
)
log
.
debug
(
"Temporary file:
%
s"
,
tmp_file_path
)
log
.
debug
(
"Temporary file:
%
s"
,
tmp_file_path
)
uploadfile
.
save
(
tmp_file_path
)
uploadfile
.
save
(
tmp_file_path
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment