Skip to content

Commit

Permalink
database: Added parameter default_maxage; Increased default value for…
Browse files Browse the repository at this point in the history
… max_delete_logentries to 1500
  • Loading branch information
msinn committed Oct 31, 2022
1 parent 6c859e0 commit 4a4e16d
Show file tree
Hide file tree
Showing 3 changed files with 57 additions and 11 deletions.
44 changes: 37 additions & 7 deletions database/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,7 @@ def __init__(self, sh, *args, **kwargs):
self._precision = self.get_parameter_value('precision')
self.count_logentries = self.get_parameter_value('count_logentries')
self.max_delete_logentries = self.get_parameter_value('max_delete_logentries')
self._default_maxage = float(self.get_parameter_value('default_maxage'))

self.webif_pagelength = self.get_parameter_value('webif_pagelength')
self._webdata = {}
Expand All @@ -116,11 +117,12 @@ def __init__(self, sh, *args, **kwargs):

self.skipping_dump = False

self._handled_items = [] # items that have a 'database' attribute set
self._items_with_maxage = [] # items that have a 'database_maxage' attribute set
self._maxage_worklist = [] # work copy of self._items_with_maxage
self._item_logcount = {} # dict to store the number of log records for an item
self._items_total_entries = 0 # total number of log entries
self._handled_items = [] # items that have a 'database' attribute set
self._items_with_maxage = [] # items that have a 'database_maxage' attribute set
self._maxage_worklist = [] # work copy of self._items_with_maxage
self._item_logcount = {} # dict to store the number of log records for an item
self._items_total_entries = 0 # total number of log entries
self._items_still_counting = False # total number of log entries

self.cleanup_active = False

Expand Down Expand Up @@ -667,6 +669,25 @@ def readLatestLog(self, id, time=None, cur=None):
params = {'id': id, 'time': time}
return self._fetchall("SELECT max(time) FROM {log} WHERE item_id = :id AND time <= :time", params, cur=cur)[0][0]


def readTotalLogCount(self, id=None, time_start=None, time_end=None, cur=None):
"""
Read database log count for the hole database
:param id:
:param time_start:
:param time_end:
:param cur:
:return: Number of log records
"""
params = {'id': id, 'time_start': time_start, 'time_end': time_end}
result = self._fetchall("SELECT count(*) FROM {log};", params, cur=cur)
if result == []:
return 0
return result[0][0]


def readLogCount(self, id, time_start=None, time_end=None, cur=None):
"""
Read database log count for given database ID
Expand Down Expand Up @@ -1289,7 +1310,10 @@ def remove_older_than_maxage(self):

if self._maxage_worklist == []:
# Fill work list, if it is empty
self._maxage_worklist = [i for i in self._items_with_maxage]
if self._default_maxage == 0:
self._maxage_worklist = [i for i in self._items_with_maxage]
else:
self._maxage_worklist = [i for i in self._handled_items]
self.logger.info(f"remove_older_than_maxage: Worklist filled with {len(self._items_with_maxage)} items")

item = self._maxage_worklist.pop(0)
Expand Down Expand Up @@ -1373,7 +1397,11 @@ def get_maxage_ts(self, item):
:return:
"""
maxage = self.get_iattr_value(item.conf, 'database_maxage')
if self.has_iattr(item.conf, 'database_maxage'):
maxage = self.get_iattr_value(item.conf, 'database_maxage')
elif self._default_maxage > 0:
maxage = self._default_maxage

if maxage:
dt = self.shtime.now()
dt = dt.replace(hour=0, minute=0, second=0, microsecond=0)
Expand All @@ -1389,6 +1417,7 @@ def _count_logentries(self):
called by scheduler once on start
"""
self.logger.info("_count_logentries: # handled items = {}".format(len(self._handled_items)))
self._items_still_counting = True
self._items_total_entries = 0
for item in self._handled_items:
item_id = self.id(item, create=False)
Expand All @@ -1397,6 +1426,7 @@ def _count_logentries(self):
self._items_total_entries += logcount
self._webdata[item.id()].update({'logcount': logcount})

self._items_still_counting = False
return


Expand Down
10 changes: 9 additions & 1 deletion database/plugin.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -66,12 +66,20 @@ parameters:

max_delete_logentries:
type: int
default: 1000
default: 1500
valid_min: 20
description:
de: "Maximal auf einmal zu löschende Anzahl an Log Einträgen mit dem database_maxage Attribut, reduziert die Belastung der Datenbank bei alten Datenbeständen"
en: "Maximum number of Logentries to delete at once with database_maxage attribute, reduces load on database with old datasets"

default_maxage:
type: int
default: 0
valid_min: 0
description:
de: "Falls dieser Parameter einen Wert größer 0 enthält: Standard maxage für Items, die kein maxage gesetzt haben"
en: "If this parameter is > 0: maxage for Items that don't have a maxage set."

webif_pagelength:
type: int
default: 0
Expand Down
14 changes: 11 additions & 3 deletions database/webif/templates/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -71,8 +71,12 @@
- {{_( 'Die Datenbank enthält Daten zu {dbitems} Items', vars={'dbitems': p.readItemCount()}) }}
{% endif %}
{% if p._items_total_entries > 0 %}
- {{ "{:,}".format(p._items_total_entries).replace(",",".") }} Log Einträge zu dieser Instanz
{% endif %}
-
{% if p._items_still_counting %}
{{ _('Mindestens') }}
{% endif %}
{{ "{:,}".format(p._items_total_entries).replace(",",".") }} {{ _('Log Einträge zu dieser Instanz') }}
{% endif %}
</div>
<table id="itemtable">
<thead>
Expand Down Expand Up @@ -117,7 +121,11 @@
{% if p.get_iattr_value(item.conf, 'database_maxage') %}
<td class="py-1" id="{{ item }}_maxage">{{ p.get_iattr_value(item.conf, 'database_maxage') }} {{ _('Tage') }}: {{ p.get_maxage_ts(item).strftime('%d.%m.%Y %H:%M') }}</td>
{% else %}
<td class="py-1">-</td>
{% if p._default_maxage > 0 %}
<td class="py-1">default: {{ p._default_maxage }} {{ _('Tage') }}</td>
{% else %}
<td class="py-1">-</td>
{% endif %}
{% endif %}
<td class="py-1">
{% if p.id(item, create=False) == None %}
Expand Down

0 comments on commit 4a4e16d

Please sign in to comment.