Commit 9a963bbe authored by Ozzie Isaacs's avatar Ozzie Isaacs

Refactored code

Testrun
parent 994bc8b0
...@@ -211,31 +211,8 @@ def delete_book_from_details(book_id): ...@@ -211,31 +211,8 @@ def delete_book_from_details(book_id):
def delete_book_ajax(book_id, book_format): def delete_book_ajax(book_id, book_format):
return delete_book(book_id,book_format, False) return delete_book(book_id,book_format, False)
def delete_book(book_id, book_format, jsonResponse):
warning = {} def delete_whole_book(book_id, book):
if current_user.role_delete_books():
book = calibre_db.get_book(book_id)
if book:
try:
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
if not result:
if jsonResponse:
return json.dumps({"location": url_for("editbook.edit_book"),
"type": "alert",
"format": "",
"error": error}),
else:
flash(error, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
if error:
if jsonResponse:
warning = {"location": url_for("editbook.edit_book"),
"type": "warning",
"format": "",
"error": error}
else:
flash(error, category="warning")
if not book_format:
# delete book from Shelfs, Downloads, Read list # delete book from Shelfs, Downloads, Read list
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete() ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()
ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete() ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()
...@@ -250,7 +227,7 @@ def delete_book(book_id, book_format, jsonResponse): ...@@ -250,7 +227,7 @@ def delete_book(book_id, book_format, jsonResponse):
modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages') modify_database_object([u''], book.languages, db.Languages, calibre_db.session, 'languages')
modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers') modify_database_object([u''], book.publishers, db.Publishers, calibre_db.session, 'publishers')
cc = calibre_db.session.query(db.Custom_Columns).\ cc = calibre_db.session.query(db.Custom_Columns). \
filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all() filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc: for c in cc:
cc_string = "custom_column_" + str(c.id) cc_string = "custom_column_" + str(c.id)
...@@ -279,16 +256,9 @@ def delete_book(book_id, book_format, jsonResponse): ...@@ -279,16 +256,9 @@ def delete_book(book_id, book_format, jsonResponse):
modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id], modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],
calibre_db.session, 'custom') calibre_db.session, 'custom')
calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete() calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()
else:
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
filter(db.Data.format == book_format).delete() def render_delete_book_result(book_format, jsonResponse, warning, book_id):
calibre_db.session.commit()
except Exception as e:
log.debug_or_exception(e)
calibre_db.session.rollback()
else:
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
if book_format: if book_format:
if jsonResponse: if jsonResponse:
return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id), return json.dumps([warning, {"location": url_for("editbook.edit_book", book_id=book_id),
...@@ -309,6 +279,45 @@ def delete_book(book_id, book_format, jsonResponse): ...@@ -309,6 +279,45 @@ def delete_book(book_id, book_format, jsonResponse):
return redirect(url_for('web.index')) return redirect(url_for('web.index'))
def delete_book(book_id, book_format, jsonResponse):
warning = {}
if current_user.role_delete_books():
book = calibre_db.get_book(book_id)
if book:
try:
result, error = helper.delete_book(book, config.config_calibre_dir, book_format=book_format.upper())
if not result:
if jsonResponse:
return json.dumps({"location": url_for("editbook.edit_book"),
"type": "alert",
"format": "",
"error": error}),
else:
flash(error, category="error")
return redirect(url_for('editbook.edit_book', book_id=book_id))
if error:
if jsonResponse:
warning = {"location": url_for("editbook.edit_book"),
"type": "warning",
"format": "",
"error": error}
else:
flash(error, category="warning")
if not book_format:
delete_whole_book(book_id, book)
else:
calibre_db.session.query(db.Data).filter(db.Data.book == book.id).\
filter(db.Data.format == book_format).delete()
calibre_db.session.commit()
except Exception as e:
log.debug_or_exception(e)
calibre_db.session.rollback()
else:
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
return render_delete_book_result(book_format, jsonResponse, warning, book_id)
def render_edit_book(book_id): def render_edit_book(book_id):
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all() cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
book = calibre_db.get_filtered_book(book_id, allow_show_archived=True) book = calibre_db.get_filtered_book(book_id, allow_show_archived=True)
...@@ -447,18 +456,8 @@ def edit_book_publisher(to_save, book): ...@@ -447,18 +456,8 @@ def edit_book_publisher(to_save, book):
return changed return changed
def edit_cc_data(book_id, book, to_save): def edit_cc_data_number(book_id, book, c, to_save, cc_db_value, cc_string):
changed = False changed = False
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
cc_db_value = getattr(book, cc_string)[0].value
else:
cc_db_value = None
if to_save[cc_string].strip():
if c.datatype == 'int' or c.datatype == 'bool' or c.datatype == 'float':
if to_save[cc_string] == 'None': if to_save[cc_string] == 'None':
to_save[cc_string] = None to_save[cc_string] = None
elif c.datatype == 'bool': elif c.datatype == 'bool':
...@@ -479,8 +478,11 @@ def edit_cc_data(book_id, book, to_save): ...@@ -479,8 +478,11 @@ def edit_cc_data(book_id, book, to_save):
new_cc = cc_class(value=to_save[cc_string], book=book_id) new_cc = cc_class(value=to_save[cc_string], book=book_id)
calibre_db.session.add(new_cc) calibre_db.session.add(new_cc)
changed = True changed = True
return changed, to_save
else:
def edit_cc_data_string(book, c, to_save, cc_db_value, cc_string):
changed = False
if c.datatype == 'rating': if c.datatype == 'rating':
to_save[cc_string] = str(int(float(to_save[cc_string]) * 2)) to_save[cc_string] = str(int(float(to_save[cc_string]) * 2))
if to_save[cc_string].strip() != cc_db_value: if to_save[cc_string].strip() != cc_db_value:
...@@ -504,6 +506,24 @@ def edit_cc_data(book_id, book, to_save): ...@@ -504,6 +506,24 @@ def edit_cc_data(book_id, book, to_save):
cc_class.value == to_save[cc_string].strip()).first() cc_class.value == to_save[cc_string].strip()).first()
# add cc value to book # add cc value to book
getattr(book, cc_string).append(new_cc) getattr(book, cc_string).append(new_cc)
return changed, to_save
def edit_cc_data(book_id, book, to_save):
changed = False
cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.datatype.notin_(db.cc_exceptions)).all()
for c in cc:
cc_string = "custom_column_" + str(c.id)
if not c.is_multiple:
if len(getattr(book, cc_string)) > 0:
cc_db_value = getattr(book, cc_string)[0].value
else:
cc_db_value = None
if to_save[cc_string].strip():
if c.datatype == 'int' or c.datatype == 'bool' or c.datatype == 'float':
changed, to_save = edit_cc_data_number(book_id, book, c, to_save, cc_db_value, cc_string)
else:
changed, to_save = edit_cc_data_string(book, c, to_save, cc_db_value, cc_string)
else: else:
if cc_db_value is not None: if cc_db_value is not None:
# remove old cc_val # remove old cc_val
...@@ -766,6 +786,7 @@ def merge_metadata(to_save, meta): ...@@ -766,6 +786,7 @@ def merge_metadata(to_save, meta):
to_save["description"] = to_save["description"] or Markup( to_save["description"] = to_save["description"] or Markup(
getattr(meta, 'description', '')).unescape() getattr(meta, 'description', '')).unescape()
def identifier_list(to_save, book): def identifier_list(to_save, book):
"""Generate a list of Identifiers from form information""" """Generate a list of Identifiers from form information"""
id_type_prefix = 'identifier-type-' id_type_prefix = 'identifier-type-'
...@@ -780,43 +801,8 @@ def identifier_list(to_save, book): ...@@ -780,43 +801,8 @@ def identifier_list(to_save, book):
result.append(db.Identifiers(to_save[val_key], type_value, book.id)) result.append(db.Identifiers(to_save[val_key], type_value, book.id))
return result return result
@editbook.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@upload_required
def upload():
if not config.config_uploading:
abort(404)
if request.method == 'POST' and 'btn-upload' in request.files:
for requested_file in request.files.getlist("btn-upload"):
try:
modif_date = False
# create the function for sorting...
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
# check if file extension is correct
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=file_ext), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
# extract metadata from file
try:
meta = uploader.upload(requested_file, config.config_rarfile_location)
except (IOError, OSError):
log.error("File %s could not saved to temp dir", requested_file.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename= requested_file.filename), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
title = meta.title
authr = meta.author
def prepare_authors_on_upload(title, authr):
if title != _(u'Unknown') and authr != _(u'Unknown'): if title != _(u'Unknown') and authr != _(u'Unknown'):
entry = calibre_db.check_exists_book(authr, title) entry = calibre_db.check_exists_book(authr, title)
if entry: if entry:
...@@ -835,7 +821,7 @@ def upload(): ...@@ -835,7 +821,7 @@ def upload():
if input_authors == ['']: if input_authors == ['']:
input_authors = [_(u'Unknown')] # prevent empty Author input_authors = [_(u'Unknown')] # prevent empty Author
sort_authors_list=list() sort_authors_list = list()
db_author = None db_author = None
for inp in input_authors: for inp in input_authors:
stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first() stored_author = calibre_db.session.query(db.Authors).filter(db.Authors.name == inp).first()
...@@ -851,12 +837,20 @@ def upload(): ...@@ -851,12 +837,20 @@ def upload():
sort_author = stored_author.sort sort_author = stored_author.sort
sort_authors_list.append(sort_author) sort_authors_list.append(sort_author)
sort_authors = ' & '.join(sort_authors_list) sort_authors = ' & '.join(sort_authors_list)
return sort_authors, input_authors, db_author
def create_book_on_upload(modif_date, meta):
title = meta.title
authr = meta.author
sort_authors, input_authors, db_author = prepare_authors_on_upload(title, authr)
title_dir = helper.get_valid_filename(title) title_dir = helper.get_valid_filename(title)
author_dir = helper.get_valid_filename(db_author.name) author_dir = helper.get_valid_filename(db_author.name)
# combine path and normalize path from windows systems # combine path and normalize path from windows systems
path = os.path.join(author_dir, title_dir).replace('\\', '/') path = os.path.join(author_dir, title_dir).replace('\\', '/')
# Calibre adds books with utc as timezone # Calibre adds books with utc as timezone
db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1), db_book = db.Books(title, "", sort_authors, datetime.utcnow(), datetime(101, 1, 1),
'1', datetime.utcnow(), path, meta.cover, db_author, [], "") '1', datetime.utcnow(), path, meta.cover, db_author, [], "")
...@@ -884,6 +878,44 @@ def upload(): ...@@ -884,6 +878,44 @@ def upload():
# flush content, get db_book.id available # flush content, get db_book.id available
calibre_db.session.flush() calibre_db.session.flush()
return db_book, input_authors, title_dir
@editbook.route("/upload", methods=["GET", "POST"])
@login_required_if_no_ano
@upload_required
def upload():
if not config.config_uploading:
abort(404)
if request.method == 'POST' and 'btn-upload' in request.files:
for requested_file in request.files.getlist("btn-upload"):
try:
modif_date = False
# create the function for sorting...
calibre_db.update_title_sort(config)
calibre_db.session.connection().connection.connection.create_function('uuid4', 0, lambda: str(uuid4()))
# check if file extension is correct
if '.' in requested_file.filename:
file_ext = requested_file.filename.rsplit('.', 1)[-1].lower()
if file_ext not in constants.EXTENSIONS_UPLOAD and '' not in constants.EXTENSIONS_UPLOAD:
flash(
_("File extension '%(ext)s' is not allowed to be uploaded to this server",
ext=file_ext), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
else:
flash(_('File to be uploaded must have an extension'), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
# extract metadata from file
try:
meta = uploader.upload(requested_file, config.config_rarfile_location)
except (IOError, OSError):
log.error("File %s could not saved to temp dir", requested_file.filename)
flash(_(u"File %(filename)s could not saved to temp dir",
filename= requested_file.filename), category="error")
return Response(json.dumps({"location": url_for("web.index")}), mimetype='application/json')
db_book, input_authors, title_dir = create_book_on_upload(modif_date, meta)
# Comments needs book id therfore only possible after flush # Comments needs book id therfore only possible after flush
modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book) modif_date |= edit_book_comments(Markup(meta.description).unescape(), db_book)
......
...@@ -730,7 +730,7 @@ def format_runtime(runtime): ...@@ -730,7 +730,7 @@ def format_runtime(runtime):
# helper function to apply localize status information in tasklist entries # helper function to apply localize status information in tasklist entries
def render_task_status(tasklist): def render_task_status(tasklist):
renderedtasklist = list() renderedtasklist = list()
for __, user, added, task in tasklist: for __, user, __, task in tasklist:
if user == current_user.nickname or current_user.role_admin(): if user == current_user.nickname or current_user.role_admin():
ret = {} ret = {}
if task.start_time: if task.start_time:
......
...@@ -918,7 +918,7 @@ def HandleAuthRequest(): ...@@ -918,7 +918,7 @@ def HandleAuthRequest():
if config.config_kobo_proxy: if config.config_kobo_proxy:
try: try:
return redirect_or_proxy_request() return redirect_or_proxy_request()
except: except Exception:
log.error("Failed to receive or parse response from Kobo's auth endpoint. Falling back to un-proxied mode.") log.error("Failed to receive or parse response from Kobo's auth endpoint. Falling back to un-proxied mode.")
return make_calibre_web_auth_response() return make_calibre_web_auth_response()
......
...@@ -96,86 +96,7 @@ def logout_oauth_user(): ...@@ -96,86 +96,7 @@ def logout_oauth_user():
session.pop(str(oauth_key) + '_oauth_user_id') session.pop(str(oauth_key) + '_oauth_user_id')
if ub.oauth_support: def oauth_update_token(provider_id, token, provider_user_id):
oauthblueprints = []
if not ub.session.query(ub.OAuthProvider).count():
for provider in ("github", "google"):
oauthProvider = ub.OAuthProvider()
oauthProvider.provider_name = provider
oauthProvider.active = False
ub.session.add(oauthProvider)
ub.session_commit("{} Blueprint Created".format(provider))
oauth_ids = ub.session.query(ub.OAuthProvider).all()
ele1 = dict(provider_name='github',
id=oauth_ids[0].id,
active=oauth_ids[0].active,
oauth_client_id=oauth_ids[0].oauth_client_id,
scope=None,
oauth_client_secret=oauth_ids[0].oauth_client_secret,
obtain_link='https://github.com/settings/developers')
ele2 = dict(provider_name='google',
id=oauth_ids[1].id,
active=oauth_ids[1].active,
scope=["https://www.googleapis.com/auth/userinfo.email"],
oauth_client_id=oauth_ids[1].oauth_client_id,
oauth_client_secret=oauth_ids[1].oauth_client_secret,
obtain_link='https://console.developers.google.com/apis/credentials')
oauthblueprints.append(ele1)
oauthblueprints.append(ele2)
for element in oauthblueprints:
if element['provider_name'] == 'github':
blueprint_func = make_github_blueprint
else:
blueprint_func = make_google_blueprint
blueprint = blueprint_func(
client_id=element['oauth_client_id'],
client_secret=element['oauth_client_secret'],
redirect_to="oauth."+element['provider_name']+"_login",
scope=element['scope']
)
element['blueprint'] = blueprint
element['blueprint'].backend = OAuthBackend(ub.OAuth, ub.session, str(element['id']),
user=current_user, user_required=True)
app.register_blueprint(blueprint, url_prefix="/login")
if element['active']:
register_oauth_blueprint(element['id'], element['provider_name'])
@oauth_authorized.connect_via(oauthblueprints[0]['blueprint'])
def github_logged_in(blueprint, token):
if not token:
flash(_(u"Failed to log in with GitHub."), category="error")
return False
resp = blueprint.session.get("/user")
if not resp.ok:
flash(_(u"Failed to fetch user info from GitHub."), category="error")
return False
github_info = resp.json()
github_user_id = str(github_info["id"])
return oauth_update_token(str(oauthblueprints[0]['id']), token, github_user_id)
@oauth_authorized.connect_via(oauthblueprints[1]['blueprint'])
def google_logged_in(blueprint, token):
if not token:
flash(_(u"Failed to log in with Google."), category="error")
return False
resp = blueprint.session.get("/oauth2/v2/userinfo")
if not resp.ok:
flash(_(u"Failed to fetch user info from Google."), category="error")
return False
google_info = resp.json()
google_user_id = str(google_info["id"])
return oauth_update_token(str(oauthblueprints[1]['id']), token, google_user_id)
def oauth_update_token(provider_id, token, provider_user_id):
session[provider_id + "_oauth_user_id"] = provider_user_id session[provider_id + "_oauth_user_id"] = provider_user_id
session[provider_id + "_oauth_token"] = token session[provider_id + "_oauth_token"] = token
...@@ -202,7 +123,7 @@ if ub.oauth_support: ...@@ -202,7 +123,7 @@ if ub.oauth_support:
return backend_resultcode return backend_resultcode
def bind_oauth_or_register(provider_id, provider_user_id, redirect_url, provider_name): def bind_oauth_or_register(provider_id, provider_user_id, redirect_url, provider_name):
query = ub.session.query(ub.OAuth).filter_by( query = ub.session.query(ub.OAuth).filter_by(
provider=provider_id, provider=provider_id,
provider_user_id=provider_user_id, provider_user_id=provider_user_id,
...@@ -242,7 +163,7 @@ if ub.oauth_support: ...@@ -242,7 +163,7 @@ if ub.oauth_support:
return redirect(url_for(redirect_url)) return redirect(url_for(redirect_url))
def get_oauth_status(): def get_oauth_status():
status = [] status = []
query = ub.session.query(ub.OAuth).filter_by( query = ub.session.query(ub.OAuth).filter_by(
user_id=current_user.id, user_id=current_user.id,
...@@ -256,7 +177,7 @@ if ub.oauth_support: ...@@ -256,7 +177,7 @@ if ub.oauth_support:
return None return None
def unlink_oauth(provider): def unlink_oauth(provider):
if request.host_url + 'me' != request.referrer: if request.host_url + 'me' != request.referrer:
pass pass
query = ub.session.query(ub.OAuth).filter_by( query = ub.session.query(ub.OAuth).filter_by(
...@@ -281,6 +202,88 @@ if ub.oauth_support: ...@@ -281,6 +202,88 @@ if ub.oauth_support:
flash(_(u"Not Linked to %(oauth)s", oauth=provider), category="error") flash(_(u"Not Linked to %(oauth)s", oauth=provider), category="error")
return redirect(url_for('web.profile')) return redirect(url_for('web.profile'))
def generate_oauth_blueprints():
oauthblueprints = []
if not ub.session.query(ub.OAuthProvider).count():
for provider in ("github", "google"):
oauthProvider = ub.OAuthProvider()
oauthProvider.provider_name = provider
oauthProvider.active = False
ub.session.add(oauthProvider)
ub.session_commit("{} Blueprint Created".format(provider))
oauth_ids = ub.session.query(ub.OAuthProvider).all()
ele1 = dict(provider_name='github',
id=oauth_ids[0].id,
active=oauth_ids[0].active,
oauth_client_id=oauth_ids[0].oauth_client_id,
scope=None,
oauth_client_secret=oauth_ids[0].oauth_client_secret,
obtain_link='https://github.com/settings/developers')
ele2 = dict(provider_name='google',
id=oauth_ids[1].id,
active=oauth_ids[1].active,
scope=["https://www.googleapis.com/auth/userinfo.email"],
oauth_client_id=oauth_ids[1].oauth_client_id,
oauth_client_secret=oauth_ids[1].oauth_client_secret,
obtain_link='https://console.developers.google.com/apis/credentials')
oauthblueprints.append(ele1)
oauthblueprints.append(ele2)
for element in oauthblueprints:
if element['provider_name'] == 'github':
blueprint_func = make_github_blueprint
else:
blueprint_func = make_google_blueprint
blueprint = blueprint_func(
client_id=element['oauth_client_id'],
client_secret=element['oauth_client_secret'],
redirect_to="oauth."+element['provider_name']+"_login",
scope=element['scope']
)
element['blueprint'] = blueprint
element['blueprint'].backend = OAuthBackend(ub.OAuth, ub.session, str(element['id']),
user=current_user, user_required=True)
app.register_blueprint(blueprint, url_prefix="/login")
if element['active']:
register_oauth_blueprint(element['id'], element['provider_name'])
return oauthblueprints
if ub.oauth_support:
oauthblueprints = generate_oauth_blueprints()
@oauth_authorized.connect_via(oauthblueprints[0]['blueprint'])
def github_logged_in(blueprint, token):
if not token:
flash(_(u"Failed to log in with GitHub."), category="error")
return False
resp = blueprint.session.get("/user")
if not resp.ok:
flash(_(u"Failed to fetch user info from GitHub."), category="error")
return False
github_info = resp.json()
github_user_id = str(github_info["id"])
return oauth_update_token(str(oauthblueprints[0]['id']), token, github_user_id)
@oauth_authorized.connect_via(oauthblueprints[1]['blueprint'])
def google_logged_in(blueprint, token):
if not token:
flash(_(u"Failed to log in with Google."), category="error")
return False
resp = blueprint.session.get("/oauth2/v2/userinfo")
if not resp.ok:
flash(_(u"Failed to fetch user info from Google."), category="error")
return False
google_info = resp.json()
google_user_id = str(google_info["id"])
return oauth_update_token(str(oauthblueprints[1]['id']), token, google_user_id)
# notify on OAuth provider error # notify on OAuth provider error
@oauth_error.connect_via(oauthblueprints[0]['blueprint']) @oauth_error.connect_via(oauthblueprints[0]['blueprint'])
......
...@@ -253,7 +253,7 @@ class WebServer(object): ...@@ -253,7 +253,7 @@ class WebServer(object):
if not self.restart: if not self.restart:
log.info("Performing shutdown of Calibre-Web") log.info("Performing shutdown of Calibre-Web")
# prevent irritiating log of pending tasks message from asyncio # prevent irritating log of pending tasks message from asyncio
logger.get('asyncio').setLevel(logger.logging.CRITICAL) logger.get('asyncio').setLevel(logger.logging.CRITICAL)
return True return True
......
...@@ -284,6 +284,62 @@ class Updater(threading.Thread): ...@@ -284,6 +284,62 @@ class Updater(threading.Thread):
def _stable_version_info(cls): def _stable_version_info(cls):
return constants.STABLE_VERSION # Current version return constants.STABLE_VERSION # Current version
def _populate_parent_commits(self, update_data, status, locale, tz, parents):
try:
parent_commit = update_data['parents'][0]
# limit the maximum search depth
remaining_parents_cnt = 10
except (IndexError, KeyError):
remaining_parents_cnt = None
if remaining_parents_cnt is not None:
while True:
if remaining_parents_cnt == 0:
break
# check if we are more than one update behind if so, go up the tree
if parent_commit['sha'] != status['current_commit_hash']:
try:
headers = {'Accept': 'application/vnd.github.v3+json'}
r = requests.get(parent_commit['url'], headers=headers, timeout=10)
r.raise_for_status()
parent_data = r.json()
parent_commit_date = datetime.datetime.strptime(
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
parent_commit_date = format_datetime(
parent_commit_date, format='short', locale=locale)
parents.append([parent_commit_date,
parent_data['message'].replace('\r\n', '<p>').replace('\n', '<p>')])
parent_commit = parent_data['parents'][0]
remaining_parents_cnt -= 1
except Exception:
# it isn't crucial if we can't get information about the parent
break
else:
# parent is our current version
break
return parents
def _load_nightly_data(self, repository_url, commit, status):
try:
headers = {'Accept': 'application/vnd.github.v3+json'}
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'],
headers=headers,
timeout=10)
r.raise_for_status()
update_data = r.json()
except requests.exceptions.HTTPError as e:
status['message'] = _(u'HTTP Error') + ' ' + str(e)
except requests.exceptions.ConnectionError:
status['message'] = _(u'Connection error')
except requests.exceptions.Timeout:
status['message'] = _(u'Timeout while establishing connection')
except (requests.exceptions.RequestException, ValueError):
status['message'] = _(u'General error')
return status, update_data
def _nightly_available_updates(self, request_method, locale): def _nightly_available_updates(self, request_method, locale):
tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone) tz = datetime.timedelta(seconds=time.timezone if (time.localtime().tm_isdst == 0) else time.altzone)
if request_method == "GET": if request_method == "GET":
...@@ -309,22 +365,7 @@ class Updater(threading.Thread): ...@@ -309,22 +365,7 @@ class Updater(threading.Thread):
# a new update is available # a new update is available
status['update'] = True status['update'] = True
status, update_data = self._load_nightly_data(repository_url, commit, status)
try:
headers = {'Accept': 'application/vnd.github.v3+json'}
r = requests.get(repository_url + '/git/commits/' + commit['object']['sha'],
headers=headers,
timeout=10)
r.raise_for_status()
update_data = r.json()
except requests.exceptions.HTTPError as e:
status['message'] = _(u'HTTP Error') + ' ' + str(e)
except requests.exceptions.ConnectionError:
status['message'] = _(u'Connection error')
except requests.exceptions.Timeout:
status['message'] = _(u'Timeout while establishing connection')
except (requests.exceptions.RequestException, ValueError):
status['message'] = _(u'General error')
if status['message'] != '': if status['message'] != '':
return json.dumps(status) return json.dumps(status)
...@@ -346,41 +387,7 @@ class Updater(threading.Thread): ...@@ -346,41 +387,7 @@ class Updater(threading.Thread):
) )
# it only makes sense to analyze the parents if we know the current commit hash # it only makes sense to analyze the parents if we know the current commit hash
if status['current_commit_hash'] != '': if status['current_commit_hash'] != '':
try: parents = self._populate_parent_commits(update_data, status, locale, tz, parents)
parent_commit = update_data['parents'][0]
# limit the maximum search depth
remaining_parents_cnt = 10
except (IndexError, KeyError):
remaining_parents_cnt = None
if remaining_parents_cnt is not None:
while True:
if remaining_parents_cnt == 0:
break
# check if we are more than one update behind if so, go up the tree
if parent_commit['sha'] != status['current_commit_hash']:
try:
headers = {'Accept': 'application/vnd.github.v3+json'}
r = requests.get(parent_commit['url'], headers=headers, timeout=10)
r.raise_for_status()
parent_data = r.json()
parent_commit_date = datetime.datetime.strptime(
parent_data['committer']['date'], '%Y-%m-%dT%H:%M:%SZ') - tz
parent_commit_date = format_datetime(
parent_commit_date, format='short', locale=locale)
parents.append([parent_commit_date,
parent_data['message'].replace('\r\n', '<p>').replace('\n', '<p>')])
parent_commit = parent_data['parents'][0]
remaining_parents_cnt -= 1
except Exception:
# it isn't crucial if we can't get information about the parent
break
else:
# parent is our current version
break
status['history'] = parents[::-1] status['history'] = parents[::-1]
except (IndexError, KeyError): except (IndexError, KeyError):
status['success'] = False status['success'] = False
......
This source diff could not be displayed because it is too large. You can view the blob instead.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment