Commit 546ed65e authored by Ozzieisaacs's avatar Ozzieisaacs

Update logging

Fix sort order author
Fixes sorting view
Moved version info
added feature limit listening to single ipaddress
parent 14b6202e
updater.py ident export-subst
constants.py ident export-subst
/test export-ignore
cps/static/css/libs/* linguist-vendored
cps/static/js/libs/* linguist-vendored
......@@ -76,7 +76,6 @@ try:
with open(os.path.join(_TRANSLATIONS_DIR, 'iso639.pickle'), 'rb') as f:
language_table = cPickle.load(f)
except cPickle.UnpicklingError as error:
# app.logger.error("Can't read file cps/translations/iso639.pickle: %s", error)
print("Can't read file cps/translations/iso639.pickle: %s" % error)
sys.exit(1)
......@@ -89,14 +88,13 @@ from .server import server
Server = server()
babel = Babel()
log = logger.create()
def create_app():
app.wsgi_app = ReverseProxied(app.wsgi_app)
cache_buster.init_cache_busting(app)
log.info('Starting Calibre Web...')
logger.info('Starting Calibre Web...')
Principal(app)
lm.init_app(app)
app.secret_key = os.getenv('SECRET_KEY', 'A0Zr98j/3yX R~XHH!jmN]LWX/,?RT')
......@@ -120,7 +118,7 @@ def get_locale():
try:
preferred.append(str(LC.parse(x.replace('-', '_'))))
except (UnknownLocaleError, ValueError) as e:
log.warning('Could not parse locale "%s": %s', x, e)
logger.warning('Could not parse locale "%s": %s', x, e)
preferred.append('en')
return negotiate_locale(preferred, translations)
......
......@@ -76,7 +76,7 @@ except ImportError:
feature_support['gdrive'] = gdrive_support
admi = Blueprint('admin', __name__)
log = logger.create()
# log = logger.create()
@admi.route("/admin")
......@@ -220,7 +220,7 @@ def view_configuration():
# stop Server
Server.setRestartTyp(True)
Server.stopServer()
log.info('Reboot required, restarting')
logger.info('Reboot required, restarting')
readColumn = db.session.query(db.Custom_Columns)\
.filter(and_(db.Custom_Columns.datatype == 'bool',db.Custom_Columns.mark_for_delete == 0)).all()
return render_title_template("config_view_edit.html", conf=config, readColumns=readColumn,
......@@ -464,6 +464,24 @@ def configuration_helper(origin):
title=_(u"Basic Configuration"), page="config")
content.config_logfile = to_save["config_logfile"]
content.config_access_log = 0
if "config_access_log" in to_save and to_save["config_access_log"] == "on":
content.config_access_log = 1
reboot_required = True
if "config_access_log" not in to_save and config.config_access_log:
reboot_required = True
if content.config_access_logfile != to_save["config_access_logfile"]:
# check valid path, only path or file
if not logger.is_valid_logfile(to_save["config_access_logfile"]):
ub.session.commit()
flash(_(u'Access Logfile location is not valid, please enter correct path'), category="error")
return render_title_template("config_edit.html", config=config, origin=origin,
gdriveError=gdriveError, feature_support=feature_support,
title=_(u"Basic Configuration"), page="config")
content.config_access_logfile = to_save["config_access_logfile"]
reboot_required = True
# Rarfile Content configuration
if "config_rarfile_location" in to_save and to_save['config_rarfile_location'] is not u"":
check = check_unrar(to_save["config_rarfile_location"].strip())
......@@ -500,7 +518,7 @@ def configuration_helper(origin):
# stop Server
Server.setRestartTyp(True)
Server.stopServer()
log.info('Reboot required, restarting')
logger.info('Reboot required, restarting')
if origin:
success = True
if is_gdrive_ready() and feature_support['gdrive'] is True: # and config.config_use_google_drive == True:
......
......@@ -24,7 +24,7 @@ import hashlib
from . import logger
log = logger.create()
# log = logger.create()
def init_cache_busting(app):
......@@ -40,7 +40,7 @@ def init_cache_busting(app):
hash_table = {} # map of file hashes
log.debug('Computing cache-busting values...')
logger.debug('Computing cache-busting values...')
# compute file hashes
for dirpath, __, filenames in os.walk(static_folder):
for filename in filenames:
......@@ -53,7 +53,7 @@ def init_cache_busting(app):
file_path = rooted_filename.replace(static_folder, "")
file_path = file_path.replace("\\", "/") # Convert Windows path to web path
hash_table[file_path] = file_hash
log.debug('Finished computing cache-busting values')
logger.debug('Finished computing cache-busting values')
def bust_filename(filename):
return hash_table.get(filename, "")
......
......@@ -24,6 +24,48 @@ import os
import argparse
from .constants import CONFIG_DIR as _CONFIG_DIR
from .constants import STABLE_VERSION as _STABLE_VERSION
from .constants import NIGHTLY_VERSION as _NIGHTLY_VERSION
VALID_CHARACTERS = 'ABCDEFabcdef:0123456789'
ipv6 = False
def version_info():
if _NIGHTLY_VERSION[1].startswith('$Format'):
return "Calibre-Web version: %s - unkown git-clone" % _STABLE_VERSION['version']
else:
return "Calibre-Web version: %s -%s" % (_STABLE_VERSION['version'],_NIGHTLY_VERSION[1])
def validate_ip4(address):
address_list = address.split('.')
if len(address_list) != 4:
return False
for val in address_list:
if not val.isdigit():
return False
i = int(val)
if i < 0 or i > 255:
return False
return True
def validate_ip6(address):
address_list = address.split(':')
return (
len(address_list) == 8
and all(len(current) <= 4 for current in address_list)
and all(current in VALID_CHARACTERS for current in address)
)
def validate_ip(address):
if validate_ip4(address) or ipv6:
return address
print("IP address is invalid. Exiting")
sys.exit(1)
parser = argparse.ArgumentParser(description='Calibre Web is a web app'
......@@ -34,12 +76,17 @@ parser.add_argument('-c', metavar='path',
help='path and name to SSL certfile, e.g. /opt/test.cert, works only in combination with keyfile')
parser.add_argument('-k', metavar='path',
help='path and name to SSL keyfile, e.g. /opt/test.key, works only in combination with certfile')
parser.add_argument('-v', action='store_true', help='shows version number and exits Calibre-web')
parser.add_argument('-v', '--version', action='version', help='Shows version number and exits Calibre-web',
version=version_info())
parser.add_argument('-i', metavar='ip-adress', help='Server IP-Adress to listen')
parser.add_argument('-s', metavar='user:pass', help='Sets specific username to new password')
args = parser.parse_args()
settingspath = args.p or os.path.join(_CONFIG_DIR, "app.db")
gdpath = args.g or os.path.join(_CONFIG_DIR, "gdrive.db")
# handle and check parameter for ssl encryption
certfilepath = None
keyfilepath = None
if args.c:
......@@ -66,6 +113,12 @@ if (args.k and not args.c) or (not args.k and args.c):
if args.k is "":
keyfilepath = ""
if args.v:
print("Calibre-web version: 0.6.4")
sys.exit(1)
# handle and check ipadress argument
if args.i:
ipv6 = validate_ip6(args.i)
ipadress = validate_ip(args.i)
else:
ipadress = None
# handle and check user password argument
user_password = args.s or None
......@@ -24,14 +24,14 @@ from . import logger, isoLanguages
from .constants import BookMeta
log = logger.create()
# log = logger.create()
try:
from comicapi.comicarchive import ComicArchive, MetaDataStyle
use_comic_meta = True
except ImportError as e:
log.warning('cannot import comicapi, extracting comic metadata will not work: %s', e)
logger.warning('cannot import comicapi, extracting comic metadata will not work: %s', e)
import zipfile
import tarfile
use_comic_meta = False
......
......@@ -111,6 +111,13 @@ BookMeta = namedtuple('BookMeta', 'file_path, extension, title, author, cover, d
STABLE_VERSION = {'version': '0.6.4 Beta'}
NIGHTLY_VERSION = {}
NIGHTLY_VERSION[0] = '$Format:%H$'
NIGHTLY_VERSION[1] = '$Format:%cI$'
# NIGHTLY_VERSION[0] = 'bb7d2c6273ae4560e83950d36d64533343623a57'
# NIGHTLY_VERSION[1] = '2018-09-09T10:13:08+02:00'
# clean-up the module namespace
del sys, os, namedtuple
......@@ -39,7 +39,7 @@ from .web import login_required_if_no_ano, render_title_template, edit_required,
editbook = Blueprint('editbook', __name__)
log = logger.create()
# log = logger.create()
# Modifies different Database objects, first check if elements have to be added to database, than check
......@@ -198,7 +198,7 @@ def delete_book(book_id, book_format):
db.session.commit()
else:
# book not found
log.error('Book with id "%s" could not be deleted: not found', book_id)
logger.error('Book with id "%s" could not be deleted: not found', book_id)
if book_format:
return redirect(url_for('editbook.edit_book', book_id=book_id))
else:
......@@ -237,7 +237,7 @@ def render_edit_book(book_id):
try:
allowed_conversion_formats.remove(file.format.lower())
except Exception:
log.warning('%s already removed from list.', file.format.lower())
logger.warning('%s already removed from list.', file.format.lower())
return render_title_template('book_edit.html', book=book, authors=author_names, cc=cc,
title=_(u"edit metadata"), page="editbook",
......@@ -349,7 +349,7 @@ def upload_single_file(request, book, book_id):
# Format entry already exists, no need to update the database
if is_format:
log.warning('Book format %s already existing', file_ext.upper())
logger.warning('Book format %s already existing', file_ext.upper())
else:
db_format = db.Data(book_id, file_ext.upper(), file_size, file_name)
db.session.add(db_format)
......@@ -492,7 +492,7 @@ def edit_book(book_id):
res = list(language_table[get_locale()].keys())[invers_lang_table.index(lang)]
input_l.append(res)
except ValueError:
log.error('%s is not a valid language', lang)
logger.error('%s is not a valid language', lang)
flash(_(u"%(langname)s is not a valid language", langname=lang), category="error")
modify_database_object(input_l, book.languages, db.Languages, db.session, 'languages')
......@@ -531,7 +531,7 @@ def edit_book(book_id):
flash(error, category="error")
return render_edit_book(book_id)
except Exception as e:
log.exception(e)
logger.exception(e)
db.session.rollback()
flash(_("Error editing book, please check logfile for details"), category="error")
return redirect(url_for('web.show_book', book_id=book.id))
......@@ -703,7 +703,7 @@ def convert_bookformat(book_id):
flash(_(u"Source or destination format for conversion missing"), category="error")
return redirect(request.environ["HTTP_REFERER"])
log.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
logger.info('converting: book id: %s from: %s to: %s', book_id, book_format_from, book_format_to)
rtn = helper.convert_book_format(book_id, config.config_calibre_dir, book_format_from.upper(),
book_format_to.upper(), current_user.nickname)
......
......@@ -44,7 +44,7 @@ from .web import admin_required
gdrive = Blueprint('gdrive', __name__)
log = logger.create()
# log = logger.create()
current_milli_time = lambda: int(round(time() * 1000))
......@@ -74,7 +74,7 @@ def google_drive_callback():
with open(gdriveutils.CREDENTIALS, 'w') as f:
f.write(credentials.to_json())
except ValueError as error:
log.error(error)
logger.error(error)
return redirect(url_for('admin.configuration'))
......@@ -131,7 +131,7 @@ def revoke_watch_gdrive():
@gdrive.route("/gdrive/watch/callback", methods=['GET', 'POST'])
def on_received_watch_confirmation():
log.debug('%r', request.headers)
logger.debug('%r', request.headers)
if request.headers.get('X-Goog-Channel-Token') == gdrive_watch_callback_token \
and request.headers.get('X-Goog-Resource-State') == 'change' \
and request.data:
......@@ -139,26 +139,26 @@ def on_received_watch_confirmation():
data = request.data
def updateMetaData():
log.info('Change received from gdrive')
log.debug('%r', data)
logger.info('Change received from gdrive')
logger.debug('%r', data)
try:
j = json.loads(data)
log.info('Getting change details')
logger.info('Getting change details')
response = gdriveutils.getChangeById(gdriveutils.Gdrive.Instance().drive, j['id'])
log.debug('%r', response)
logger.debug('%r', response)
if response:
dbpath = os.path.join(config.config_calibre_dir, "metadata.db")
if not response['deleted'] and response['file']['title'] == 'metadata.db' and response['file']['md5Checksum'] != hashlib.md5(dbpath):
tmpDir = tempfile.gettempdir()
log.info('Database file updated')
logger.info('Database file updated')
copyfile(dbpath, os.path.join(tmpDir, "metadata.db_" + str(current_milli_time())))
log.info('Backing up existing and downloading updated metadata.db')
logger.info('Backing up existing and downloading updated metadata.db')
gdriveutils.downloadFile(None, "metadata.db", os.path.join(tmpDir, "tmp_metadata.db"))
log.info('Setting up new DB')
logger.info('Setting up new DB')
# prevent error on windows, as os.rename does on exisiting files
move(os.path.join(tmpDir, "tmp_metadata.db"), dbpath)
db.setup_db()
except Exception as e:
log.exception(e)
logger.exception(e)
updateMetaData()
return ''
......@@ -45,7 +45,7 @@ SETTINGS_YAML = os.path.join(_BASE_DIR, 'settings.yaml')
CREDENTIALS = os.path.join(_BASE_DIR, 'gdrive_credentials')
CLIENT_SECRETS = os.path.join(_BASE_DIR, 'client_secrets.json')
log = logger.create()
# log = logger.create()
class Singleton:
......@@ -169,9 +169,9 @@ def getDrive(drive=None, gauth=None):
try:
gauth.Refresh()
except RefreshError as e:
log.error("Google Drive error: %s", e)
logger.error("Google Drive error: %s", e)
except Exception as e:
log.exception(e)
logger.exception(e)
else:
# Initialize the saved creds
gauth.Authorize()
......@@ -181,7 +181,7 @@ def getDrive(drive=None, gauth=None):
try:
drive.auth.Refresh()
except RefreshError as e:
log.error("Google Drive error: %s", e)
logger.error("Google Drive error: %s", e)
return drive
def listRootFolders():
......@@ -218,7 +218,7 @@ def getEbooksFolderId(drive=None):
try:
gDriveId.gdrive_id = getEbooksFolder(drive)['id']
except Exception:
log.error('Error gDrive, root ID not found')
logger.error('Error gDrive, root ID not found')
gDriveId.path = '/'
session.merge(gDriveId)
session.commit()
......@@ -458,10 +458,10 @@ def getChangeById (drive, change_id):
change = drive.auth.service.changes().get(changeId=change_id).execute()
return change
except (errors.HttpError) as error:
log.error(error)
logger.error(error)
return None
except Exception as e:
log.error(e)
logger.error(e)
return None
......@@ -531,6 +531,6 @@ def do_gdrive_download(df, headers):
if resp.status == 206:
yield content
else:
log.warning('An error occurred: %s', resp)
logger.warning('An error occurred: %s', resp)
return
return Response(stream_with_context(stream()), headers=headers)
......@@ -75,7 +75,7 @@ from .worker import STAT_WAITING, STAT_FAIL, STAT_STARTED, STAT_FINISH_SUCCESS
from .worker import TASK_EMAIL, TASK_CONVERT, TASK_UPLOAD, TASK_CONVERT_ANY
log = logger.create()
# log = logger.create()
def update_download(book_id, user_id):
......@@ -92,7 +92,7 @@ def convert_book_format(book_id, calibrepath, old_book_format, new_book_format,
data = db.session.query(db.Data).filter(db.Data.book == book.id).filter(db.Data.format == old_book_format).first()
if not data:
error_message = _(u"%(format)s format not found for book id: %(book)d", format=old_book_format, book=book_id)
log.error("convert_book_format: %s", error_message)
logger.error("convert_book_format: %s", error_message)
return error_message
if config.config_use_google_drive:
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + old_book_format.lower())
......@@ -186,7 +186,7 @@ def check_send_to_kindle(entry):
'text':_('Convert %(orig)s to %(format)s and send to Kindle',orig='Epub',format='Azw3')})'''
return bookformats
else:
log.error(u'Cannot find book entry %d', entry.id)
logger.error(u'Cannot find book entry %d', entry.id)
return None
......@@ -272,7 +272,7 @@ def get_sorted_author(value):
else:
value2 = value
except Exception as ex:
log.error("Sorting author %s failed: %s", value, ex)
logger.error("Sorting author %s failed: %s", value, ex)
value2 = value
return value2
......@@ -289,12 +289,12 @@ def delete_book_file(book, calibrepath, book_format=None):
else:
if os.path.isdir(path):
if len(next(os.walk(path))[1]):
log.error("Deleting book %s failed, path has subfolders: %s", book.id, book.path)
logger.error("Deleting book %s failed, path has subfolders: %s", book.id, book.path)
return False
shutil.rmtree(path, ignore_errors=True)
return True
else:
log.error("Deleting book %s failed, book path not valid: %s", book.id, book.path)
logger.error("Deleting book %s failed, book path not valid: %s", book.id, book.path)
return False
......@@ -317,7 +317,7 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
if not os.path.exists(new_title_path):
os.renames(path, new_title_path)
else:
log.info("Copying title: %s into existing: %s", path, new_title_path)
logger.info("Copying title: %s into existing: %s", path, new_title_path)
for dir_name, __, file_list in os.walk(path):
for file in file_list:
os.renames(os.path.join(dir_name, file),
......@@ -325,8 +325,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
path = new_title_path
localbook.path = localbook.path.split('/')[0] + '/' + new_titledir
except OSError as ex:
log.error("Rename title from: %s to %s: %s", path, new_title_path, ex)
log.debug(ex, exc_info=True)
logger.error("Rename title from: %s to %s: %s", path, new_title_path, ex)
logger.debug(ex, exc_info=True)
return _("Rename title from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
src=path, dest=new_title_path, error=str(ex))
if authordir != new_authordir:
......@@ -335,8 +335,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
os.renames(path, new_author_path)
localbook.path = new_authordir + '/' + localbook.path.split('/')[1]
except OSError as ex:
log.error("Rename author from: %s to %s: %s", path, new_author_path, ex)
log.debug(ex, exc_info=True)
logger.error("Rename author from: %s to %s: %s", path, new_author_path, ex)
logger.debug(ex, exc_info=True)
return _("Rename author from: '%(src)s' to '%(dest)s' failed with error: %(error)s",
src=path, dest=new_author_path, error=str(ex))
# Rename all files from old names to new names
......@@ -349,8 +349,8 @@ def update_dir_structure_file(book_id, calibrepath, first_author):
os.path.join(path_name, new_name + '.' + file_format.format.lower()))
file_format.name = new_name
except OSError as ex:
log.error("Rename file in path %s to %s: %s", path, new_name, ex)
log.debug(ex, exc_info=True)
logger.error("Rename file in path %s to %s: %s", path, new_name, ex)
logger.debug(ex, exc_info=True)
return _("Rename file in path '%(src)s' to '%(dest)s' failed with error: %(error)s",
src=path, dest=new_name, error=str(ex))
return False
......@@ -454,10 +454,10 @@ def get_book_cover(book_id):
if path:
return redirect(path)
else:
log.error('%s/cover.jpg not found on Google Drive', book.path)
logger.error('%s/cover.jpg not found on Google Drive', book.path)
return send_from_directory(_STATIC_DIR, "generic_cover.jpg")
except Exception as e:
log.exception(e)
logger.exception(e)
# traceback.print_exc()
return send_from_directory(_STATIC_DIR,"generic_cover.jpg")
else:
......@@ -487,15 +487,15 @@ def save_cover_from_filestorage(filepath, saved_filename, img):
try:
os.makedirs(filepath)
except OSError:
log.error(u"Failed to create path for cover")
logger.error(u"Failed to create path for cover")
return False
try:
img.save(os.path.join(filepath, saved_filename))
except IOError:
log.error(u"Cover-file is not a valid image file")
logger.error(u"Cover-file is not a valid image file")
return False
except OSError:
log.error(u"Failed to store cover-file")
logger.error(u"Failed to store cover-file")
return False
return True
......@@ -506,7 +506,7 @@ def save_cover(img, book_path):
if use_PIL:
if content_type not in ('image/jpeg', 'image/png', 'image/webp'):
log.error("Only jpg/jpeg/png/webp files are supported as coverfile")
logger.error("Only jpg/jpeg/png/webp files are supported as coverfile")
return False
# convert to jpg because calibre only supports jpg
if content_type in ('image/png', 'image/webp'):
......@@ -520,7 +520,7 @@ def save_cover(img, book_path):
img._content = tmp_bytesio.getvalue()
else:
if content_type not in ('image/jpeg'):
log.error("Only jpg/jpeg files are supported as coverfile")
logger.error("Only jpg/jpeg files are supported as coverfile")
return False
if ub.config.config_use_google_drive:
......@@ -528,7 +528,7 @@ def save_cover(img, book_path):
if save_cover_from_filestorage(tmpDir, "uploaded_cover.jpg", img) is True:
gd.uploadFileToEbooksFolder(os.path.join(book_path, 'cover.jpg'),
os.path.join(tmpDir, "uploaded_cover.jpg"))
log.info("Cover is saved on Google Drive")
logger.info("Cover is saved on Google Drive")
return True
else:
return False
......@@ -541,7 +541,7 @@ def do_download_file(book, book_format, data, headers):
if config.config_use_google_drive:
startTime = time.time()
df = gd.getFileFromEbooksFolder(book.path, data.name + "." + book_format)
log.debug('%s', time.time() - startTime)
logger.debug('%s', time.time() - startTime)
if df:
return gd.do_gdrive_download(df, headers)
else:
......@@ -550,7 +550,7 @@ def do_download_file(book, book_format, data, headers):
filename = os.path.join(config.config_calibre_dir, book.path)
if not os.path.isfile(os.path.join(filename, data.name + "." + book_format)):
# ToDo: improve error handling
log.error('File not found: %s', os.path.join(filename, data.name + "." + book_format))
logger.error('File not found: %s', os.path.join(filename, data.name + "." + book_format))
response = make_response(send_from_directory(filename, data.name + "." + book_format))
response.headers = headers
return response
......@@ -575,7 +575,7 @@ def check_unrar(unrarLocation):
version = value.group(1)
except OSError as e:
error = True
log.exception(e)
logger.exception(e)
version =_(u'Error excecuting UnRar')
else:
version = _(u'Unrar binary file not found')
......
......@@ -37,7 +37,7 @@ from . import logger
jinjia = Blueprint('jinjia', __name__)
log = logger.create()
# log = logger.create()
# pagination links in jinja
......@@ -85,7 +85,7 @@ def formatdate_filter(val):
formatdate = datetime.datetime.strptime(conformed_timestamp[:15], "%Y%m%d %H%M%S")
return format_date(formatdate, format='medium', locale=get_locale())
except AttributeError as e:
log.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, current_user.nickname)
logger.error('Babel error: %s, Current user locale: %s, Current User: %s', e, current_user.locale, current_user.nickname)
return formatdate
@jinjia.app_template_filter('formatdateinput')
......
......@@ -25,23 +25,50 @@ from logging.handlers import RotatingFileHandler
from .constants import BASE_DIR as _BASE_DIR
ACCESS_FORMATTER = Formatter("%(message)s")
FORMATTER = Formatter("[%(asctime)s] %(levelname)5s {%(name)s:%(lineno)d} %(message)s")
DEFAULT_LOG_LEVEL = logging.INFO
DEFAULT_LOG_FILE = os.path.join(_BASE_DIR, "calibre-web.log")
DEFAULT_ACCESS_LOG = os.path.join(_BASE_DIR, "access.log")
LOG_TO_STDERR = '/dev/stderr'
DEFAULT_ACCESS_LEVEL= logging.INFO
logging.addLevelName(logging.WARNING, "WARN")
logging.addLevelName(logging.CRITICAL, "CRIT")
def info(msg, *args, **kwargs):
create(2).info(msg, *args, **kwargs)
def warning(msg, *args, **kwargs):
create(2).warning(msg, *args, **kwargs)
def error(msg, *args, **kwargs):
create(2).error(msg, *args, **kwargs)
def critical(msg, *args, **kwargs):
create(2).critical(msg, *args, **kwargs)
def exception(msg, *args, **kwargs):
create(2).exception(msg, *args, **kwargs)
def debug(msg, *args, **kwargs):
create(2).debug(msg, *args, **kwargs)
def get(name=None):
return logging.getLogger(name)
val = logging.getLogger("general")
val.name = name
return val
def create():
parent_frame = inspect.stack(0)[1]
def create(ini=1):
parent_frame = inspect.stack(0)[ini]
if hasattr(parent_frame, 'frame'):
parent_frame = parent_frame.frame
else:
......@@ -50,8 +77,11 @@ def create():
return get(parent_module.__name__)
def is_debug_enabled():
return logging.root.level <= logging.DEBUG
def is_debug_enabled(logger):
return logging.getLogger(logger).level <= logging.DEBUG
def is_info_enabled(logger):
return logging.getLogger(logger).level <= logging.INFO
def get_level_name(level):
......@@ -67,17 +97,23 @@ def is_valid_logfile(file_path):
return (not log_dir) or os.path.isdir(log_dir)
def setup(log_file, log_level=None):
def setup(log_file, logger, log_level=None):
if logger == "general":
formatter = FORMATTER
default_file = DEFAULT_LOG_FILE
else:
formatter = ACCESS_FORMATTER
default_file = DEFAULT_ACCESS_LOG
if log_file:
if not os.path.dirname(log_file):
log_file = os.path.join(_BASE_DIR, log_file)
log_file = os.path.abspath(log_file)
else:
# log_file = LOG_TO_STDERR
log_file = DEFAULT_LOG_FILE
log_file = default_file
# print ('%r -- %r' % (log_level, log_file))
r = logging.root
r = logging.getLogger(logger)
r.setLevel(log_level or DEFAULT_LOG_LEVEL)
previous_handler = r.handlers[0] if r.handlers else None
......@@ -96,10 +132,10 @@ def setup(log_file, log_level=None):
try:
file_handler = RotatingFileHandler(log_file, maxBytes=50000, backupCount=2)
except IOError:
if log_file == DEFAULT_LOG_FILE:
if log_file == default_file:
raise
file_handler = RotatingFileHandler(DEFAULT_LOG_FILE, maxBytes=50000, backupCount=2)
file_handler.setFormatter(FORMATTER)
file_handler = RotatingFileHandler(default_file, maxBytes=50000, backupCount=2)
file_handler.setFormatter(formatter)
for h in r.handlers:
r.removeHandler(h)
......@@ -122,4 +158,4 @@ class StderrLogger(object):
else:
self.buffer += message
except Exception:
self.logger.debug("Logging Error")
self.log.debug("Logging Error")
......@@ -42,7 +42,7 @@ from .web import login_required
oauth_check = {}
oauth = Blueprint('oauth', __name__)
log = logger.create()
# log = logger.create()
def github_oauth_required(f):
......@@ -105,7 +105,7 @@ def register_user_with_oauth(user=None):
try:
ub.session.commit()
except Exception as e:
log.exception(e)
logger.exception(e)
ub.session.rollback()
......@@ -199,7 +199,7 @@ if ub.oauth_support:
ub.session.add(oauth)
ub.session.commit()
except Exception as e:
log.exception(e)
logger.exception(e)
ub.session.rollback()
# Disable Flask-Dance's default behavior for saving the OAuth token
......@@ -225,7 +225,7 @@ if ub.oauth_support:
ub.session.add(oauth)
ub.session.commit()
except Exception as e:
log.exception(e)
logger.exception(e)
ub.session.rollback()
return redirect(url_for('web.login'))
#if config.config_public_reg:
......@@ -268,11 +268,11 @@ if ub.oauth_support:
logout_oauth_user()
flash(_(u"Unlink to %(oauth)s success.", oauth=oauth_check[provider]), category="success")
except Exception as e:
log.exception(e)
logger.exception(e)
ub.session.rollback()
flash(_(u"Unlink to %(oauth)s failed.", oauth=oauth_check[provider]), category="error")
except NoResultFound:
log.warning("oauth %s for user %d not fount", provider, current_user.id)
logger.warning("oauth %s for user %d not fount", provider, current_user.id)
flash(_(u"Not linked to %(oauth)s.", oauth=oauth_check[provider]), category="error")
return redirect(url_for('web.profile'))
......
......@@ -108,8 +108,8 @@ def feed_best_rated():
@requires_basic_auth_if_no_ano
def feed_hot():
off = request.args.get("offset") or 0
all_books = ub.session.query(ub.Downloads, ub.func.count(ub.Downloads.book_id)).order_by(
ub.func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
all_books = ub.session.query(ub.Downloads, func.count(ub.Downloads.book_id)).order_by(
func.count(ub.Downloads.book_id).desc()).group_by(ub.Downloads.book_id)
hot_books = all_books.offset(off).limit(config.config_books_per_page)
entries = list()
for book in hot_books:
......
......@@ -22,6 +22,7 @@ import sys
import os
import signal
import socket
import logging
try:
from gevent.pywsgi import WSGIServer
......@@ -38,14 +39,12 @@ except ImportError:
from . import logger, config, global_WorkerThread
log = logger.create()
class server:
wsgiserver = None
restart = False
app = None
access_logger = None
def __init__(self):
signal.signal(signal.SIGINT, self.killServer)
......@@ -54,6 +53,11 @@ class server:
def init_app(self, application):
self.app = application
self.port = config.config_port
self.listening = config.get_config_ipaddress(readable=True) + ":" + str(self.port)
if config.config_access_log:
self.access_logger = logging.getLogger("access")
else:
self.access_logger = None
self.ssl_args = None
certfile_path = config.get_config_certfile()
......@@ -63,54 +67,64 @@ class server:
self.ssl_args = {"certfile": certfile_path,
"keyfile": keyfile_path}
else:
log.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.')
log.warning('Cert path: %s', certfile_path)
log.warning('Key path: %s', keyfile_path)
logger.warning('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl.')
logger.warning('Cert path: %s', certfile_path)
logger.warning('Key path: %s', keyfile_path)
def _make_gevent_socket(self):
if config.get_config_ipaddress():
return (config.get_config_ipaddress(), self.port)
if os.name == 'nt':
return ('0.0.0.0', self.port)
try:
s = WSGIServer.get_listener(('', self.port), family=socket.AF_INET6)
except socket.error as ex:
log.error('%s', ex)
log.warning('Unable to listen on \'\', trying on IPv4 only...')
logger.error('%s', ex)
logger.warning('Unable to listen on \'\', trying on IPv4 only...')
s = WSGIServer.get_listener(('', self.port), family=socket.AF_INET)
log.debug("%r %r", s._sock, s._sock.getsockname())
logger.debug("%r %r", s._sock, s._sock.getsockname())
return s
def start_gevent(self):
ssl_args = self.ssl_args or {}
log.info('Starting Gevent server')
logger.info('Starting Gevent server on %s', self.listening)
try:
sock = self._make_gevent_socket()
self.wsgiserver = WSGIServer(sock, self.app, spawn=Pool(), **ssl_args)
self.wsgiserver = WSGIServer(sock, self.app, log=self.access_logger, spawn=Pool(), **ssl_args)
self.wsgiserver.serve_forever()
except (OSError, socket.error) as e:
log.info("Error starting server: %s", e.strerror)
print("Error starting server: %s" % e.strerror)
global_WorkerThread.stop()
sys.exit(1)
except socket.error:
try:
logger.info('Unable to listen on "", trying on "0.0.0.0" only...')
self.wsgiserver = WSGIServer(('0.0.0.0', config.config_port), self.app, spawn=Pool(), **ssl_args)
self.wsgiserver.serve_forever()
except (OSError, socket.error) as e:
logger.info("Error starting server: %s", e.strerror)
print("Error starting server: %s" % e.strerror)
global_WorkerThread.stop()
sys.exit(1)
except Exception:
log.exception("Unknown error while starting gevent")
logger.exception("Unknown error while starting gevent")
sys.exit(0)
def start_tornado(self):
log.info('Starting Tornado server')
logger.info('Starting Tornado server on %s', self.listening)
try:
# Max Buffersize set to 200MB
http_server = HTTPServer(WSGIContainer(self.app),
max_buffer_size = 209700000,
ssl_options=self.ssl_args)
http_server.listen(self.port)
address = config.get_config_ipaddress()
http_server.listen(self.port, address)
# self.access_log = logging.getLogger("tornado.access")
self.wsgiserver=IOLoop.instance()
self.wsgiserver.start()
# wait for stop signal
self.wsgiserver.close(True)
except socket.error as err:
log.exception("Error starting tornado server")
logger.exception("Error starting tornado server")
print("Error starting server: %s" % err.strerror)
global_WorkerThread.stop()
sys.exit(1)
......@@ -123,7 +137,7 @@ class server:
self.start_tornado()
if self.restart is True:
log.info("Performing restart of Calibre-Web")
logger.info("Performing restart of Calibre-Web")
global_WorkerThread.stop()
if os.name == 'nt':
arguments = ["\"" + sys.executable + "\""]
......@@ -133,7 +147,7 @@ class server:
else:
os.execl(sys.executable, sys.executable, *sys.argv)
else:
log.info("Performing shutdown of Calibre-Web")
logger.info("Performing shutdown of Calibre-Web")
global_WorkerThread.stop()
sys.exit(0)
......
......@@ -33,7 +33,7 @@ from .web import render_title_template
shelf = Blueprint('shelf', __name__)
log = logger.create()
# log = logger.create()
@shelf.route("/shelf/add/<int:shelf_id>/<int:book_id>")
......@@ -41,14 +41,14 @@ log = logger.create()
def add_to_shelf(shelf_id, book_id):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
if shelf is None:
log.error("Invalid shelf specified: %s", shelf_id)
logger.error("Invalid shelf specified: %s", shelf_id)
if not request.is_xhr:
flash(_(u"Invalid shelf specified"), category="error")
return redirect(url_for('web.index'))
return "Invalid shelf specified", 400
if not shelf.is_public and not shelf.user_id == int(current_user.id):
log.error("User %s not allowed to add a book to %s", current_user, shelf)
logger.error("User %s not allowed to add a book to %s", current_user, shelf)
if not request.is_xhr:
flash(_(u"Sorry you are not allowed to add a book to the the shelf: %(shelfname)s", shelfname=shelf.name),
category="error")
......@@ -56,7 +56,7 @@ def add_to_shelf(shelf_id, book_id):
return "Sorry you are not allowed to add a book to the the shelf: %s" % shelf.name, 403
if shelf.is_public and not current_user.role_edit_shelfs():
log.info("User %s not allowed to edit public shelves", current_user)
logger.info("User %s not allowed to edit public shelves", current_user)
if not request.is_xhr:
flash(_(u"You are not allowed to edit public shelves"), category="error")
return redirect(url_for('web.index'))
......@@ -65,7 +65,7 @@ def add_to_shelf(shelf_id, book_id):
book_in_shelf = ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id,
ub.BookShelf.book_id == book_id).first()
if book_in_shelf:
log.error("Book %s is already part of %s", book_id, shelf)
logger.error("Book %s is already part of %s", book_id, shelf)
if not request.is_xhr:
flash(_(u"Book is already part of the shelf: %(shelfname)s", shelfname=shelf.name), category="error")
return redirect(url_for('web.index'))
......@@ -94,17 +94,17 @@ def add_to_shelf(shelf_id, book_id):
def search_to_shelf(shelf_id):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
if shelf is None:
log.error("Invalid shelf specified: %s", shelf_id)
logger.error("Invalid shelf specified: %s", shelf_id)
flash(_(u"Invalid shelf specified"), category="error")
return redirect(url_for('web.index'))
if not shelf.is_public and not shelf.user_id == int(current_user.id):
log.error("User %s not allowed to add a book to %s", current_user, shelf)
logger.error("User %s not allowed to add a book to %s", current_user, shelf)
flash(_(u"You are not allowed to add a book to the the shelf: %(name)s", name=shelf.name), category="error")
return redirect(url_for('web.index'))
if shelf.is_public and not current_user.role_edit_shelfs():
log.error("User %s not allowed to edit public shelves", current_user)
logger.error("User %s not allowed to edit public shelves", current_user)
flash(_(u"User is not allowed to edit public shelves"), category="error")
return redirect(url_for('web.index'))
......@@ -122,7 +122,7 @@ def search_to_shelf(shelf_id):
books_for_shelf = searched_ids[current_user.id]
if not books_for_shelf:
log.error("Books are already part of %s", shelf)
logger.error("Books are already part of %s", shelf)
flash(_(u"Books are already part of the shelf: %(name)s", name=shelf.name), category="error")
return redirect(url_for('web.index'))
......@@ -148,7 +148,7 @@ def search_to_shelf(shelf_id):
def remove_from_shelf(shelf_id, book_id):
shelf = ub.session.query(ub.Shelf).filter(ub.Shelf.id == shelf_id).first()
if shelf is None:
log.error("Invalid shelf specified: %s", shelf_id)
logger.error("Invalid shelf specified: %s", shelf_id)
if not request.is_xhr:
return redirect(url_for('web.index'))
return "Invalid shelf specified", 400
......@@ -167,7 +167,7 @@ def remove_from_shelf(shelf_id, book_id):
ub.BookShelf.book_id == book_id).first()
if book_shelf is None:
log.error("Book %s already removed from %s", book_id, shelf)
logger.error("Book %s already removed from %s", book_id, shelf)
if not request.is_xhr:
return redirect(url_for('web.index'))
return "Book already removed from shelf", 410
......@@ -180,7 +180,7 @@ def remove_from_shelf(shelf_id, book_id):
return redirect(request.environ["HTTP_REFERER"])
return "", 204
else:
log.error("User %s not allowed to remove a book from %s", current_user, shelf)
logger.error("User %s not allowed to remove a book from %s", current_user, shelf)
if not request.is_xhr:
flash(_(u"Sorry you are not allowed to remove a book from this shelf: %(sname)s", sname=shelf.name),
category="error")
......@@ -262,7 +262,7 @@ def delete_shelf(shelf_id):
if deleted:
ub.session.query(ub.BookShelf).filter(ub.BookShelf.shelf == shelf_id).delete()
ub.session.commit()
log.info("successfully deleted %s", cur_shelf)
logger.info("successfully deleted %s", cur_shelf)
return redirect(url_for('web.index'))
# @shelf.route("/shelfdown/<int:shelf_id>")
......@@ -289,7 +289,7 @@ def show_shelf(shelf_type, shelf_id):
if cur_book:
result.append(cur_book)
else:
log.info('Not existing book %s in %s deleted', book.book_id, shelf)
logger.info('Not existing book %s in %s deleted', book.book_id, shelf)
ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book.book_id).delete()
ub.session.commit()
return render_title_template(page, entries=result, title=_(u"Shelf: '%(name)s'", name=shelf.name),
......
......@@ -23,16 +23,16 @@
<h3>{{_("In Library")}}</h3>
{% endif %}
<div class="filterheader hidden-xs hidden-sm">
<a id="new" class="btn btn-primary" href="{{url_for('web.books_list', data='author', sort='new')}}"><span class="glyphicon glyphicon-sort-by-order"></span></a>
<a id="old" class="btn btn-primary" href="{{url_for('web.books_list', data='author', sort='old')}}"><span class="glyphicon glyphicon-sort-by-order-alt"></span></a>
<a id="asc" class="btn btn-primary" href="{{url_for('web.books_list', data='author', sort='abc')}}"><span class="glyphicon glyphicon-font"></span><span class="glyphicon glyphicon-sort-by-alphabet"></span></a>
<a id="desc" class="btn btn-primary" href="{{url_for('web.books_list', data='author', sort='zyx')}}"><span class="glyphicon glyphicon-font"></span><span class="glyphicon glyphicon-sort-by-alphabet-alt"></span></a>
<a id="pub_new" class="btn btn-primary" href="{{url_for('web.books_list', data='author', sort='pubnew')}}"><span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order"></span></a>
<a id="pub_old" class="btn btn-primary" href="{{url_for('web.books_list', data='author', sort='pubold')}}"><span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order-alt"></span></a>
<div class="btn-group character" role="group">
<a id="no_shelf" class="btn btn-primary" href="{{url_for('web.books_list', data='author', sort='pubold')}}"><span class="glyphicon glyphicon-list"></span><b>?</b></a>
<a id="new" class="btn btn-primary" href="{{url_for('web.books_list', data='author', book_id=id, sort='new')}}"><span class="glyphicon glyphicon-book"></span> <span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order"></span></a>
<a id="old" class="btn btn-primary" href="{{url_for('web.books_list', data='author', book_id=id, sort='old')}}"><span class="glyphicon glyphicon-book"></span> <span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order-alt"></span></a>
<a id="asc" class="btn btn-primary" href="{{url_for('web.books_list', data='author', book_id=id, sort='abc')}}"><span class="glyphicon glyphicon-font"></span><span class="glyphicon glyphicon-sort-by-alphabet"></span></a>
<a id="desc" class="btn btn-primary" href="{{url_for('web.books_list', data='author', book_id=id, sort='zyx')}}"><span class="glyphicon glyphicon-font"></span><span class="glyphicon glyphicon-sort-by-alphabet-alt"></span></a>
<a id="pub_new" class="btn btn-primary" href="{{url_for('web.books_list', data='author', book_id=id, sort='pubnew')}}"><span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order"></span></a>
<a id="pub_old" class="btn btn-primary" href="{{url_for('web.books_list', data='author', book_id=id, sort='pubold')}}"><span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order-alt"></span></a>
<!--div class="btn-group character" role="group">
<a id="no_shelf" class="btn btn-primary" href="{{url_for('web.books_list', data='author', book_id=id, sort='pubold')}}"><span class="glyphicon glyphicon-list"></span><b>?</b></a>
<div id="all" class="btn btn-primary">{{_('All')}}</div>
</div>
</div-->
</div>
<div class="row">
{% if entries[0] %}
......
......@@ -127,7 +127,15 @@
</div>
<div class="form-group">
<label for="config_logfile">{{_('Location and name of logfile (calibre-web.log for no entry)')}}</label>
<input type="text" class="form-control" name="config_logfile" id="config_logfile" value="{% if config.config_logfile != None %}{{ config.config_logfile }}{% endif %}" autocomplete="off">
<input type="text" class="form-control" name="config_logfile" id="config_logfile" value="{% if config.config_logfile != None %}{{ config.config_logfile }}{% endif %}" autocomplete="off">
</div>
<div class="form-group">
<input type="checkbox" id="config_access_log" name="config_access_log" {% if config.config_access_log %}checked{% endif %}>
<label for="config_access_log">{{_('Enable Access Log')}}</label>
</div>
<div class="form-group">
<label for="config_access_logfile">{{_('Location and name of access logfile (access.log for no entry)')}}</label>
<input type="text" class="form-control" name="config_access_logfile" id="config_access_logfile" value="{% if config.config_access_logfile != None %}{{ config.config_access_logfile }}{% endif %}" autocomplete="off">
</div>
</div>
</div>
......
......@@ -54,15 +54,15 @@
<div class="discover load-more">
<h2 class="{{title}}">{{_(title)}}</h2>
<div class="filterheader hidden-xs hidden-sm">
<a data-toggle="tooltip" id="new" class="btn btn-primary" href="{{url_for('web.books_list', data=page, sort='new')}}"><span class="glyphicon glyphicon-sort-by-attributes"></span></a>
<a id="old" class="btn btn-primary" href="{{url_for('web.books_list', data=page, sort='old')}}"><span class="glyphicon glyphicon-sort-by-attributes-alt"></span></a>
<a id="asc" class="btn btn-primary" href="{{url_for('web.books_list', data=page, sort='abc')}}"><span class="glyphicon glyphicon-font"></span><span class="glyphicon glyphicon-sort-by-alphabet"></span></a>
<a id="desc" class="btn btn-primary" href="{{url_for('web.books_list', data=page, sort='zyx')}}"><span class="glyphicon glyphicon-font"></span><span class="glyphicon glyphicon-sort-by-alphabet-alt"></span></a>
<a id="pub_new" class="btn btn-primary" href="{{url_for('web.books_list', data=page, sort='pubnew')}}"><span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order"></span></a>
<a id="pub_old" class="btn btn-primary" href="{{url_for('web.books_list', data=page, sort='pubold')}}"><span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order-alt"></span></a>
<div class="btn-group character">
<a id="no_shelf" class="btn btn-primary" href="{{url_for('web.books_list', data=page, sort='pubold')}}"><span class="glyphicon glyphicon-list"></span> <b>{{_('Group by series')}}</b></a>
</div>
<a data-toggle="tooltip" id="new" class="btn btn-primary" href="{{url_for('web.books_list', data=page, book_id=id, sort='new')}}"><span class="glyphicon glyphicon-book"></span> <span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order"></span></a>
<a id="old" class="btn btn-primary" href="{{url_for('web.books_list', data=page, book_id=id, sort='old')}}"><span class="glyphicon glyphicon-book"></span> <span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order-alt"></span></a>
<a id="asc" class="btn btn-primary" href="{{url_for('web.books_list', data=page, book_id=id, sort='abc')}}"><span class="glyphicon glyphicon-font"></span><span class="glyphicon glyphicon-sort-by-alphabet"></span></a>
<a id="desc" class="btn btn-primary" href="{{url_for('web.books_list', data=page, book_id=id, sort='zyx')}}"><span class="glyphicon glyphicon-font"></span><span class="glyphicon glyphicon-sort-by-alphabet-alt"></span></a>
<a id="pub_new" class="btn btn-primary" href="{{url_for('web.books_list', data=page, book_id=id, sort='pubnew')}}"><span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order"></span></a>
<a id="pub_old" class="btn btn-primary" href="{{url_for('web.books_list', data=page, book_id=id, sort='pubold')}}"><span class="glyphicon glyphicon-calendar"></span><span class="glyphicon glyphicon-sort-by-order-alt"></span></a>
<!--div class="btn-group character">
<a id="no_shelf" class="btn btn-primary" href="{{url_for('web.books_list', data=page, book_id=id, sort='pubold')}}"><span class="glyphicon glyphicon-list"></span> <b>{{_('Group by series')}}</b></a>
</div-->
</div>
<div class="row">
......
......@@ -39,6 +39,7 @@ from sqlalchemy import String, Integer, SmallInteger, Boolean, DateTime
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from werkzeug.security import generate_password_hash
import logging
try:
import ldap
......@@ -330,6 +331,7 @@ class Settings(Base):
config_read_column = Column(Integer, default=0)
config_title_regex = Column(String, default=u'^(A|The|An|Der|Die|Das|Den|Ein|Eine|Einen|Dem|Des|Einem|Eines)\s+')
config_log_level = Column(SmallInteger, default=logger.DEFAULT_LOG_LEVEL)
config_access_log = Column(SmallInteger, default=0)
config_uploading = Column(SmallInteger, default=0)
config_anonbrowse = Column(SmallInteger, default=0)
config_public_reg = Column(SmallInteger, default=0)
......@@ -355,6 +357,7 @@ class Settings(Base):
config_google_oauth_client_secret = Column(String)
config_mature_content_tags = Column(String)
config_logfile = Column(String)
config_access_logfile = Column(String)
config_ebookconverter = Column(Integer, default=0)
config_converterpath = Column(String)
config_calibre = Column(String)
......@@ -404,6 +407,7 @@ class Config:
self.config_title_regex = data.config_title_regex
self.config_read_column = data.config_read_column
self.config_log_level = data.config_log_level
self.config_access_log = data.config_access_log
self.config_uploading = data.config_uploading
self.config_anonbrowse = data.config_anonbrowse
self.config_public_reg = data.config_public_reg
......@@ -438,10 +442,13 @@ class Config:
self.config_google_oauth_client_secret = data.config_google_oauth_client_secret
self.config_mature_content_tags = data.config_mature_content_tags or u''
self.config_logfile = data.config_logfile or u''
self.config_access_logfile = data.config_access_logfile or u''
self.config_rarfile_location = data.config_rarfile_location
self.config_theme = data.config_theme
self.config_updatechannel = data.config_updatechannel
logger.setup(self.config_logfile, self.config_log_level)
logger.setup(self.config_logfile, "general", self.config_log_level)
if self.config_access_log:
logger.setup("access.log", "access", logger.DEFAULT_ACCESS_LEVEL)
@property
def get_update_channel(self):
......@@ -465,6 +472,21 @@ class Config:
else:
return self.config_keyfile
def get_config_ipaddress(self, readable=False):
if not readable:
if cli.ipadress:
return cli.ipadress
else:
return ""
else:
answer="0.0.0.0"
if cli.ipadress:
if cli.ipv6:
answer = "["+cli.ipadress+"]"
else:
answer = cli.ipadress
return answer
def _has_role(self, role_flag):
return constants.has_flag(self.config_default_role, role_flag)
......@@ -588,8 +610,10 @@ def migrate_Database():
conn.execute("UPDATE user SET 'sidebar_view' = (random_books* :side_random + language_books * :side_lang "
"+ series_books * :side_series + category_books * :side_category + hot_books * "
":side_hot + :side_autor + :detail_random)"
,{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE, 'side_series': constants.SIDEBAR_SERIES,
'side_category': constants.SIDEBAR_CATEGORY, 'side_hot': constants.SIDEBAR_HOT, 'side_autor': constants.SIDEBAR_AUTHOR,
,{'side_random': constants.SIDEBAR_RANDOM, 'side_lang': constants.SIDEBAR_LANGUAGE,
'side_series': constants.SIDEBAR_SERIES,
'side_category': constants.SIDEBAR_CATEGORY, 'side_hot': constants.SIDEBAR_HOT,
'side_autor': constants.SIDEBAR_AUTHOR,
'detail_random': constants.DETAIL_RANDOM})
session.commit()
try:
......@@ -672,6 +696,13 @@ def migrate_Database():
conn = engine.connect()
conn.execute("ALTER TABLE Settings ADD column `config_updatechannel` INTEGER DEFAULT 0")
session.commit()
try:
session.query(exists().where(Settings.config_access_log)).scalar()
except exc.OperationalError: # Database is not compatible, some rows are missing
conn = engine.connect()
conn.execute("ALTER TABLE Settings ADD column `config_access_log` INTEGER DEFAULT 0")
conn.execute("ALTER TABLE Settings ADD column `config_access_logfile` String DEFAULT ''")
session.commit()
# Remove login capability of user Guest
conn = engine.connect()
......
This diff is collapsed.
......@@ -28,7 +28,7 @@ from . import logger, comic
from .constants import BookMeta
log = logger.create()
# log = logger.create()
try:
......@@ -42,7 +42,7 @@ try:
from wand.exceptions import PolicyError
use_generic_pdf_cover = False
except (ImportError, RuntimeError) as e:
log.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e)
logger.warning('cannot import Image, generating pdf covers for pdf uploads will not work: %s', e)
use_generic_pdf_cover = True
try:
......@@ -50,21 +50,21 @@ try:
from PyPDF2 import __version__ as PyPdfVersion
use_pdf_meta = True
except ImportError as e:
log.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e)
logger.warning('cannot import PyPDF2, extracting pdf metadata will not work: %s', e)
use_pdf_meta = False
try:
from . import epub
use_epub_meta = True
except ImportError as e:
log.warning('cannot import epub, extracting epub metadata will not work: %s', e)
logger.warning('cannot import epub, extracting epub metadata will not work: %s', e)
use_epub_meta = False
try:
from . import fb2
use_fb2_meta = True
except ImportError as e:
log.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e)
logger.warning('cannot import fb2, extracting fb2 metadata will not work: %s', e)
use_fb2_meta = False
try:
......@@ -72,7 +72,7 @@ try:
from PIL import __version__ as PILversion
use_PIL = True
except ImportError as e:
log.warning('cannot import Pillow, using png and webp images as cover will not work: %s', e)
logger.warning('cannot import Pillow, using png and webp images as cover will not work: %s', e)
use_generic_pdf_cover = True
use_PIL = False
......@@ -94,7 +94,7 @@ def process(tmp_file_path, original_file_name, original_file_extension):
meta = comic.get_comic_info(tmp_file_path, original_file_name, original_file_extension)
except Exception as ex:
log.warning('cannot parse metadata, using default: %s', ex)
logger.warning('cannot parse metadata, using default: %s', ex)
if meta and meta.title.strip() and meta.author.strip():
return meta
......@@ -198,10 +198,10 @@ def pdf_preview(tmp_file_path, tmp_dir):
img.save(filename=os.path.join(tmp_dir, cover_file_name))
return cover_file_name
except PolicyError as ex:
log.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex)
logger.warning('Pdf extraction forbidden by Imagemagick policy: %s', ex)
return None
except Exception as ex:
log.warning('Cannot extract cover image, using default: %s', ex)
logger.warning('Cannot extract cover image, using default: %s', ex)
return None
......
This diff is collapsed.
......@@ -48,7 +48,7 @@ from . import logger, config, db, gdriveutils
from .subproc_wrapper import process_open
log = logger.create()
# log = logger.create()
chunksize = 8192
# task 'status' consts
......@@ -90,8 +90,8 @@ def get_attachment(bookpath, filename):
data = file_.read()
file_.close()
except IOError as e:
log.exception(e) # traceback.print_exc()
log.error(u'The requested file could not be read. Maybe wrong permissions?')
logger.exception(e) # traceback.print_exc()
logger.error(u'The requested file could not be read. Maybe wrong permissions?')
return None
attachment = MIMEBase('application', 'octet-stream')
......@@ -116,7 +116,7 @@ class emailbase():
def send(self, strg):
"""Send `strg' to the server."""
log.debug('send: %r', strg[:300])
logger.debug('send: %r', strg[:300])
if hasattr(self, 'sock') and self.sock:
try:
if self.transferSize:
......@@ -142,7 +142,7 @@ class emailbase():
@classmethod
def _print_debug(self, *args):
log.debug(args)
logger.debug(args)
def getTransferStatus(self):
if self.transferSize:
......@@ -257,14 +257,14 @@ class WorkerThread(threading.Thread):
# if it does - mark the conversion task as complete and return a success
# this will allow send to kindle workflow to continue to work
if os.path.isfile(file_path + format_new_ext):
log.info("Book id %d already converted to %s", bookid, format_new_ext)
logger.info("Book id %d already converted to %s", bookid, format_new_ext)
cur_book = db.session.query(db.Books).filter(db.Books.id == bookid).first()
self.queue[self.current]['path'] = file_path
self.queue[self.current]['title'] = cur_book.title
self._handleSuccess()
return file_path + format_new_ext
else:
log.info("Book id %d - target format of %s does not exist. Moving forward with convert.", bookid, format_new_ext)
logger.info("Book id %d - target format of %s does not exist. Moving forward with convert.", bookid, format_new_ext)
# check if converter-executable is existing
if not os.path.exists(config.config_converterpath):
......@@ -320,13 +320,13 @@ class WorkerThread(threading.Thread):
if conv_error:
error_message = _(u"Kindlegen failed with Error %(error)s. Message: %(message)s",
error=conv_error.group(1), message=conv_error.group(2).strip())
log.debug("convert_kindlegen: %s", nextline)
logger.debug("convert_kindlegen: %s", nextline)
else:
while p.poll() is None:
nextline = p.stdout.readline()
if os.name == 'nt' and sys.version_info < (3, 0):
nextline = nextline.decode('windows-1252')
log.debug(nextline.strip('\r\n'))
logger.debug(nextline.strip('\r\n'))
# parse progress string from calibre-converter
progress = re.search("(\d+)%\s.*", nextline)
if progress:
......@@ -356,7 +356,7 @@ class WorkerThread(threading.Thread):
return file_path + format_new_ext
else:
error_message = format_new_ext.upper() + ' format not found on disk'
log.info("ebook converter failed with error while converting book")
logger.info("ebook converter failed with error while converting book")
if not error_message:
error_message = 'Ebook converter failed with unknown error'
self._handleError(error_message)
......@@ -460,7 +460,7 @@ class WorkerThread(threading.Thread):
self.asyncSMTP = email(obj['settings']["mail_server"], obj['settings']["mail_port"], timeout)
# link to logginglevel
if logger.is_debug_enabled():
if logger.is_debug_enabled('general'):
self.asyncSMTP.set_debuglevel(1)
if use_ssl == 1:
self.asyncSMTP.starttls()
......@@ -502,7 +502,7 @@ class WorkerThread(threading.Thread):
return retVal
def _handleError(self, error_message):
log.error(error_message)
logger.error(error_message)
self.UIqueue[self.current]['stat'] = STAT_FAIL
self.UIqueue[self.current]['progress'] = "100 %"
self.UIqueue[self.current]['runtime'] = self._formatRuntime(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment