* [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs
@ 2023-12-06 11:31 Christian Ebner
2023-12-06 13:37 ` Gabriel Goller
2023-12-06 14:09 ` Fabian Grünbichler
0 siblings, 2 replies; 8+ messages in thread
From: Christian Ebner @ 2023-12-06 11:31 UTC (permalink / raw)
To: pbs-devel
Warn about a missing garbage collection schedule, prune job or verify
job configurations in the datastore's summary panel.
Show the number of prune/verify job configurations, if there are jobs
configured.
Signed-off-by: Christian Ebner <c.ebner@proxmox.com>
---
changes since version 2:
- move message string initialization to controllers `init` function
changes since version 1:
- add check mark icon for configured jobs/schedule
- add question mark icon for unknown configuration state
- fix typo in garbage collection variable name
- add missing trailing commas found by pve-eslint
- refactor message initialization to allow to use format helper
functions
www/datastore/Summary.js | 96 ++++++++++++++++++++++++++++++++++++++++
1 file changed, 96 insertions(+)
diff --git a/www/datastore/Summary.js b/www/datastore/Summary.js
index a932b4e0..273a5bf0 100644
--- a/www/datastore/Summary.js
+++ b/www/datastore/Summary.js
@@ -49,12 +49,30 @@ Ext.define('PBS.DataStoreInfo', {
usage: {},
stillbad: 0,
mountpoint: "",
+ gcScheduleMsg: "",
+ pruneJobMsg: "",
+ verifyJobMsg: "",
},
},
controller: {
xclass: 'Ext.app.ViewController',
+ fmtConfigured: function(fmtString, params) {
+ let msg = Ext.String.format(fmtString, params);
+ return `<i class="fa fa-fw fa-lg fa-check-circle good"></i> ${msg}`;
+ },
+
+ fmtMissing: function(fmtString, params) {
+ let msg = Ext.String.format(fmtString, params);
+ return `<i class="fa fa-fw fa-lg fa-question-circle-o warning"></i> ${msg}`;
+ },
+
+ fmtWarning: function(fmtString, params) {
+ let msg = Ext.String.format(fmtString, params);
+ return `<i class="fa fa-fw fa-lg fa-exclamation-circle warning"></i> ${msg}`;
+ },
+
onLoad: function(store, data, success) {
let me = this;
if (!success) {
@@ -101,6 +119,50 @@ Ext.define('PBS.DataStoreInfo', {
vm.set('ctcount', countstext(counts.ct));
vm.set('vmcount', countstext(counts.vm));
vm.set('hostcount', countstext(counts.host));
+
+ Proxmox.Utils.API2Request({
+ url: `/config/datastore/${me.view.datastore}`,
+ success: function(response) {
+ if (response.result.data['gc-schedule']) {
+ vm.set('gcScheduleMsg', me.fmtConfigured(gettext('configured')));
+ } else {
+ vm.set('gcScheduleMsg', me.fmtWarning(gettext('none configured')));
+ }
+ },
+ failure: function() {
+ vm.set('gcScheduleMsg', me.fmtMissing(gettext('unknown')));
+ },
+ });
+
+ Proxmox.Utils.API2Request({
+ url: `/admin/prune?store=${me.view.datastore}`,
+ success: function(response) {
+ let len = response.result.data.length;
+ if (len > 0) {
+ vm.set('pruneJobMsg', me.fmtConfigured(gettext('{0} configured'), len));
+ } else {
+ vm.set('pruneJobMsg', me.fmtWarning(gettext('none configured')));
+ }
+ },
+ failure: function() {
+ vm.set('pruneJobMsg', me.fmtMissing(gettext('unknown')));
+ },
+ });
+
+ Proxmox.Utils.API2Request({
+ url: `/admin/verify?store=${me.view.datastore}`,
+ success: function(response) {
+ let len = response.result.data.length;
+ if (len > 0) {
+ vm.set('verifyJobMsg', me.fmtConfigured(gettext('{0} configured'), len));
+ } else {
+ vm.set('verifyJobMsg', me.fmtWarning(gettext('none configured')));
+ }
+ },
+ failure: function() {
+ vm.set('verifyJobMsg', me.fmtMissing(gettext('unknown')));
+ },
+ });
},
startStore: function() { this.store.startUpdate(); },
@@ -108,6 +170,12 @@ Ext.define('PBS.DataStoreInfo', {
init: function(view) {
let me = this;
+
+ let vm = me.getViewModel();
+ vm.set('gcScheduleMsg', me.fmtMissing(gettext('unknown')));
+ vm.set('pruneJobMsg', me.fmtMissing(gettext('unknown')));
+ vm.set('verifyJobMsg', me.fmtMissing(gettext('unknown')));
+
let datastore = encodeURIComponent(view.datastore);
me.store = Ext.create('Proxmox.data.ObjectStore', {
interval: 5*1000,
@@ -201,6 +269,34 @@ Ext.define('PBS.DataStoreInfo', {
visible: '{stillbad}',
},
},
+ {
+ title: gettext('Garbage Collection Schedule'),
+ printBar: false,
+ bind: {
+ data: {
+ text: '{gcScheduleMsg}',
+ },
+ },
+ padding: '10 0 0 0',
+ },
+ {
+ title: gettext('Prune Jobs'),
+ printBar: false,
+ bind: {
+ data: {
+ text: '{pruneJobMsg}',
+ },
+ },
+ },
+ {
+ title: gettext('Verify Jobs'),
+ printBar: false,
+ bind: {
+ data: {
+ text: '{verifyJobMsg}',
+ },
+ },
+ },
],
});
--
2.39.2
^ permalink raw reply [flat|nested] 8+ messages in thread
* Re: [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs
2023-12-06 11:31 [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs Christian Ebner
@ 2023-12-06 13:37 ` Gabriel Goller
2023-12-06 14:09 ` Fabian Grünbichler
1 sibling, 0 replies; 8+ messages in thread
From: Gabriel Goller @ 2023-12-06 13:37 UTC (permalink / raw)
To: Proxmox Backup Server development discussion, Christian Ebner
LGTM!
Tested-by: Gabriel Goller <g.goller@proxmox.com>
Reviewed-by: Gabriel Goller <g.goller@proxmox.com>
On 12/6/23 12:31, Christian Ebner wrote:
> Warn about a missing garbage collection schedule, prune job or verify
> job configurations in the datastore's summary panel.
>
> Show the number of prune/verify job configurations, if there are jobs
> configured.
>
> Signed-off-by: Christian Ebner <c.ebner@proxmox.com>
> ---
> changes since version 2:
> - move message string initialization to controllers `init` function
>
> changes since version 1:
> - add check mark icon for configured jobs/schedule
> - add question mark icon for unknown configuration state
> - fix typo in garbage collection variable name
> - add missing trailing commas found by pve-eslint
> - refactor message initialization to allow to use format helper
> functions
> www/datastore/Summary.js | 96 ++++++++++++++++++++++++++++++++++++++++
> 1 file changed, 96 insertions(+)
>
> [..]
^ permalink raw reply [flat|nested] 8+ messages in thread
* Re: [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs
2023-12-06 11:31 [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs Christian Ebner
2023-12-06 13:37 ` Gabriel Goller
@ 2023-12-06 14:09 ` Fabian Grünbichler
2023-12-06 14:43 ` Christian Ebner
1 sibling, 1 reply; 8+ messages in thread
From: Fabian Grünbichler @ 2023-12-06 14:09 UTC (permalink / raw)
To: Proxmox Backup Server development discussion, Christian Ebner
> Christian Ebner <c.ebner@proxmox.com> hat am 06.12.2023 12:31 CET geschrieben:
>
> Warn about a missing garbage collection schedule, prune job or verify
> job configurations in the datastore's summary panel.
>
> Show the number of prune/verify job configurations, if there are jobs
> configured.
for all of them there are possible reasons for not having them set up:
- verify: I trust my storage to protect me against bit rot, and I don't want to incur the performance penalty of doing a full verification just to ensure logical consistency
- prune: I only do client-side pruning
- prune+GC: this is an append-only backup storage, I never want to remove data
all of those could be improved of course (logical-verification as a verify mode or new job type, changing the warning to information if a prune action was done recently, allowing an explicit "append-only" flag that actually disallows pruning, ..)
all of them except for GC also don't take namespaces into account, but that might be harder to get right.
there's another source of confusion if I am an unprivileged user - I might not "see" the jobs that are defined, and get a warning as a result, even though everything is okay. that last one *could* be tackled by leaking the count, even if not leaking the details, but I am not sure if we want to do that ;) for GC at least we could differentiate based on status code and make the user aware that unknown is for lack of privs.
I'd also differentiate between "unknown because no request made/no response retrieved yet" and "unknown because request failed" - the latter case should be a "warning" as well (and ideally contain the error at least as tool tip?), and not "missing", except if we can match the failure to missing privileges..
> Signed-off-by: Christian Ebner <c.ebner@proxmox.com>
> ---
> changes since version 2:
> - move message string initialization to controllers `init` function
>
> changes since version 1:
> - add check mark icon for configured jobs/schedule
> - add question mark icon for unknown configuration state
> - fix typo in garbage collection variable name
> - add missing trailing commas found by pve-eslint
> - refactor message initialization to allow to use format helper
> functions
> www/datastore/Summary.js | 96 ++++++++++++++++++++++++++++++++++++++++
> 1 file changed, 96 insertions(+)
>
> diff --git a/www/datastore/Summary.js b/www/datastore/Summary.js
> index a932b4e0..273a5bf0 100644
> --- a/www/datastore/Summary.js
> +++ b/www/datastore/Summary.js
> @@ -49,12 +49,30 @@ Ext.define('PBS.DataStoreInfo', {
> usage: {},
> stillbad: 0,
> mountpoint: "",
> + gcScheduleMsg: "",
> + pruneJobMsg: "",
> + verifyJobMsg: "",
> },
> },
>
> controller: {
> xclass: 'Ext.app.ViewController',
>
> + fmtConfigured: function(fmtString, params) {
> + let msg = Ext.String.format(fmtString, params);
> + return `<i class="fa fa-fw fa-lg fa-check-circle good"></i> ${msg}`;
> + },
> +
> + fmtMissing: function(fmtString, params) {
> + let msg = Ext.String.format(fmtString, params);
> + return `<i class="fa fa-fw fa-lg fa-question-circle-o warning"></i> ${msg}`;
> + },
> +
> + fmtWarning: function(fmtString, params) {
> + let msg = Ext.String.format(fmtString, params);
> + return `<i class="fa fa-fw fa-lg fa-exclamation-circle warning"></i> ${msg}`;
> + },
> +
> onLoad: function(store, data, success) {
> let me = this;
> if (!success) {
> @@ -101,6 +119,50 @@ Ext.define('PBS.DataStoreInfo', {
> vm.set('ctcount', countstext(counts.ct));
> vm.set('vmcount', countstext(counts.vm));
> vm.set('hostcount', countstext(counts.host));
> +
> + Proxmox.Utils.API2Request({
> + url: `/config/datastore/${me.view.datastore}`,
> + success: function(response) {
> + if (response.result.data['gc-schedule']) {
> + vm.set('gcScheduleMsg', me.fmtConfigured(gettext('configured')));
> + } else {
> + vm.set('gcScheduleMsg', me.fmtWarning(gettext('none configured')));
> + }
> + },
> + failure: function() {
> + vm.set('gcScheduleMsg', me.fmtMissing(gettext('unknown')));
> + },
> + });
> +
> + Proxmox.Utils.API2Request({
> + url: `/admin/prune?store=${me.view.datastore}`,
> + success: function(response) {
> + let len = response.result.data.length;
> + if (len > 0) {
> + vm.set('pruneJobMsg', me.fmtConfigured(gettext('{0} configured'), len));
> + } else {
> + vm.set('pruneJobMsg', me.fmtWarning(gettext('none configured')));
> + }
> + },
> + failure: function() {
> + vm.set('pruneJobMsg', me.fmtMissing(gettext('unknown')));
> + },
> + });
> +
> + Proxmox.Utils.API2Request({
> + url: `/admin/verify?store=${me.view.datastore}`,
> + success: function(response) {
> + let len = response.result.data.length;
> + if (len > 0) {
> + vm.set('verifyJobMsg', me.fmtConfigured(gettext('{0} configured'), len));
> + } else {
> + vm.set('verifyJobMsg', me.fmtWarning(gettext('none configured')));
> + }
> + },
> + failure: function() {
> + vm.set('verifyJobMsg', me.fmtMissing(gettext('unknown')));
> + },
> + });
> },
>
> startStore: function() { this.store.startUpdate(); },
> @@ -108,6 +170,12 @@ Ext.define('PBS.DataStoreInfo', {
>
> init: function(view) {
> let me = this;
> +
> + let vm = me.getViewModel();
> + vm.set('gcScheduleMsg', me.fmtMissing(gettext('unknown')));
> + vm.set('pruneJobMsg', me.fmtMissing(gettext('unknown')));
> + vm.set('verifyJobMsg', me.fmtMissing(gettext('unknown')));
> +
> let datastore = encodeURIComponent(view.datastore);
> me.store = Ext.create('Proxmox.data.ObjectStore', {
> interval: 5*1000,
> @@ -201,6 +269,34 @@ Ext.define('PBS.DataStoreInfo', {
> visible: '{stillbad}',
> },
> },
> + {
> + title: gettext('Garbage Collection Schedule'),
> + printBar: false,
> + bind: {
> + data: {
> + text: '{gcScheduleMsg}',
> + },
> + },
> + padding: '10 0 0 0',
> + },
> + {
> + title: gettext('Prune Jobs'),
> + printBar: false,
> + bind: {
> + data: {
> + text: '{pruneJobMsg}',
> + },
> + },
> + },
> + {
> + title: gettext('Verify Jobs'),
> + printBar: false,
> + bind: {
> + data: {
> + text: '{verifyJobMsg}',
> + },
> + },
> + },
> ],
> });
>
> --
> 2.39.2
>
>
>
> _______________________________________________
> pbs-devel mailing list
> pbs-devel@lists.proxmox.com
> https://lists.proxmox.com/cgi-bin/mailman/listinfo/pbs-devel
^ permalink raw reply [flat|nested] 8+ messages in thread
* Re: [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs
2023-12-06 14:09 ` Fabian Grünbichler
@ 2023-12-06 14:43 ` Christian Ebner
2023-12-11 9:09 ` Thomas Lamprecht
0 siblings, 1 reply; 8+ messages in thread
From: Christian Ebner @ 2023-12-06 14:43 UTC (permalink / raw)
To: Fabian Grünbichler, Proxmox Backup Server development discussion
> On 06.12.2023 15:09 CET Fabian Grünbichler <f.gruenbichler@proxmox.com> wrote:
>
>
> for all of them there are possible reasons for not having them set up:
> - verify: I trust my storage to protect me against bit rot, and I don't want to incur the performance penalty of doing a full verification just to ensure logical consistency
> - prune: I only do client-side pruning
> - prune+GC: this is an append-only backup storage, I never want to remove data
True and I am aware of that. That is why I opted for a warning rather than showing these as errors. But of course an information might be even more fitting here. But I would like to not hide the not configured states to much, as the main point of adding these, is to inform the user that he might be missing something, after all. Maybe I should also add a tool tip to show some explanation for the warning/info when hovered?
>
> all of those could be improved of course (logical-verification as a verify mode or new job type, changing the warning to information if a prune action was done recently, allowing an explicit "append-only" flag that actually disallows pruning, ..)
>
> all of them except for GC also don't take namespaces into account, but that might be harder to get right.
Yes, these only cover if the datastore as a whole currently. Another reason why the green check mark might not be so ideal after all. I will have a look if I might get the namespace information as well without having to perform to much API calls here, as then this status update might produce to much calls for users with many namespaces. Also, I am not sure that this information can be compactly displayed except maybe a "there are namespaces without jobs configured".
>
> there's another source of confusion if I am an unprivileged user - I might not "see" the jobs that are defined, and get a warning as a result, even though everything is okay. that last one *could* be tackled by leaking the count, even if not leaking the details, but I am not sure if we want to do that ;) for GC at least we could differentiate based on status code and make the user aware that unknown is for lack of privs.
Hmm, good point. Maybe I should not show the state information (and fetch via the API) for that job at all if the user has no privileges? Will have a look if I can exclude these.
>
> I'd also differentiate between "unknown because no request made/no response retrieved yet" and "unknown because request failed" - the latter case should be a "warning" as well (and ideally contain the error at least as tool tip?), and not "missing", except if we can match the failure to missing privileges..
Yes, will have a look on improving this as well. Showing the error message might be a bit more messy, but the suggested tool tip could be a viable option.
Thank you for the feedback!
Cheers,
Chris
^ permalink raw reply [flat|nested] 8+ messages in thread
* Re: [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs
2023-12-06 14:43 ` Christian Ebner
@ 2023-12-11 9:09 ` Thomas Lamprecht
2023-12-11 9:44 ` Christian Ebner
0 siblings, 1 reply; 8+ messages in thread
From: Thomas Lamprecht @ 2023-12-11 9:09 UTC (permalink / raw)
To: Proxmox Backup Server development discussion, Christian Ebner,
Fabian Grünbichler
Am 06/12/2023 um 15:43 schrieb Christian Ebner:
>> On 06.12.2023 15:09 CET Fabian Grünbichler <f.gruenbichler@proxmox.com> wrote:
>> for all of them there are possible reasons for not having them set up: -
>> verify: I trust my storage to protect me against bit rot, and I don't
>> want to incur the performance penalty of doing a full verification just
>> to ensure logical consistency - prune: I only do client-side pruning -
>> prune+GC: this is an append-only backup storage, I never want to remove
>> data
>
> True and I am aware of that. That is why I opted for a warning rather than
> showing these as errors. But of course an information might be even more
> fitting here. But I would like to not hide the not configured states to
> much, as the main point of adding these, is to inform the user that he
> might be missing something, after all. Maybe I should also add a tool tip
> to show some explanation for the warning/info when hovered?
IMO it should then be really focus on being informative, not warning, and
tooltips are not very accessible.
I mean, what we actually want here is to avoid that a PBS gets full by
accident because the admin forgot, or did not understand, that they had to
set up a prune and gc job for anything to be deleted.
That might be better handled by generating a low-storage space notification
that is actively sent (with status quo over the mail forwarder, in the
future with Lukas' new notification stack) to the admin.
Such a notification could contain additional hints if there's no GC or no
prune jobs, or if we want to be fancy, that prune jobs do not cover all
namespaces (but IMO overkill for starters).
The when to send might be configurable, but probably something like < 10%
usable space left, by default.
This would help users that actively forget setting such things up, which
while rare has a big impact, while not annoying/scare-mongering users that
want that explicitly, for why ever use case that is.
>> there's another source of confusion if I am an unprivileged user - I
>> might not "see" the jobs that are defined, and get a warning as a result,
>> even though everything is okay. that last one *could* be tackled by
>> leaking the count, even if not leaking the details, but I am not sure if
>> we want to do that ;) for GC at least we could differentiate based on
>> status code and make the user aware that unknown is for lack of privs.
>
> Hmm, good point. Maybe I should not show the state information (and fetch
> via the API) for that job at all if the user has no privileges? Will have
> a look if I can exclude these.
This could be also fixed by polling the permissions on login and ticket
renewal and use those to hide some panels or make some buttons disabled if
it cannot work anyway.
Sorta like we do in PVE, but I'd avoid that odd heuristic there that merges
the available privs in some top-level object, but rather just load the whole
ACL-tree privilege info in a store and check it there, e.g., through a new
`Proxmox.ACL` module `check` (and maybe `checkAny` for any-of) method that
we can use on callsites like check(`/datastore/${name}`, 'Datastore.Audit')
call, if we have more complex cases in the future, we can also add a method
that takes an object do denote such things like:
{ any: ['Priv.Foo', Priv.Bar], all: ['Priv.Baz'] }
But I'd start small, one can also just use local variables to query each
priv and then combine them with a boolean expression to whatever the
callsites need for now anyway, so `check` and maybe `checkAny` should cover
most.
^ permalink raw reply [flat|nested] 8+ messages in thread
* Re: [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs
2023-12-11 9:09 ` Thomas Lamprecht
@ 2023-12-11 9:44 ` Christian Ebner
2023-12-11 10:43 ` Thomas Lamprecht
0 siblings, 1 reply; 8+ messages in thread
From: Christian Ebner @ 2023-12-11 9:44 UTC (permalink / raw)
To: Thomas Lamprecht, Proxmox Backup Server development discussion,
Fabian Grünbichler
> On 11.12.2023 10:09 CET Thomas Lamprecht <t.lamprecht@proxmox.com> wrote:
>
>
> Am 06/12/2023 um 15:43 schrieb Christian Ebner:
> > True and I am aware of that. That is why I opted for a warning rather than
> > showing these as errors. But of course an information might be even more
> > fitting here. But I would like to not hide the not configured states to
> > much, as the main point of adding these, is to inform the user that he
> > might be missing something, after all. Maybe I should also add a tool tip
> > to show some explanation for the warning/info when hovered?
>
> IMO it should then be really focus on being informative, not warning, and
> tooltips are not very accessible.
> I mean, what we actually want here is to avoid that a PBS gets full by
> accident because the admin forgot, or did not understand, that they had to
> set up a prune and gc job for anything to be deleted.
> That might be better handled by generating a low-storage space notification
> that is actively sent (with status quo over the mail forwarder, in the
> future with Lukas' new notification stack) to the admin.
Yes, that sounds like an even better approach.
However, how should the storage space be checked and notification be generated
in that case?
One option would be to have a periodic job, which however once again can be
miss-configured and/or fail? Or do you have something else in mind here?
> Such a notification could contain additional hints if there's no GC or no
> prune jobs, or if we want to be fancy, that prune jobs do not cover all
> namespaces (but IMO overkill for starters).
;) already worked on this last week, getting the namespaces and jobs and
checking if they are covered was not that complex after all. Only required to
fetch the namespaces per datastore via the API and compare the namespace depth
with the jobs namespace level and configured max depth.
>
> The when to send might be configurable, but probably something like < 10%
> usable space left, by default.
> This would help users that actively forget setting such things up, which
> while rare has a big impact, while not annoying/scare-mongering users that
> want that explicitly, for why ever use case that is.
>
> >
> > Hmm, good point. Maybe I should not show the state information (and fetch
> > via the API) for that job at all if the user has no privileges? Will have
> > a look if I can exclude these.
>
> This could be also fixed by polling the permissions on login and ticket
> renewal and use those to hide some panels or make some buttons disabled if
> it cannot work anyway.
>
> Sorta like we do in PVE, but I'd avoid that odd heuristic there that merges
> the available privs in some top-level object, but rather just load the whole
> ACL-tree privilege info in a store and check it there, e.g., through a new
> `Proxmox.ACL` module `check` (and maybe `checkAny` for any-of) method that
> we can use on callsites like check(`/datastore/${name}`, 'Datastore.Audit')
> call, if we have more complex cases in the future, we can also add a method
> that takes an object do denote such things like:
> { any: ['Priv.Foo', Priv.Bar], all: ['Priv.Baz'] }
> But I'd start small, one can also just use local variables to query each
> priv and then combine them with a boolean expression to whatever the
> callsites need for now anyway, so `check` and maybe `checkAny` should cover
> most.
I also had a look at this last week, especially how this is handled in PVE, as
I saw that we are not currently storing this same information for the PBS UI.
I however noticed that without the 'Datastore.Audit' permissions, the user is not
able to get the datastore summary panel, and if the user has that permission, also
reading access to the jobs is granted, so it would not have been required to
check the permissions individually for this particular case.
^ permalink raw reply [flat|nested] 8+ messages in thread
* Re: [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs
2023-12-11 9:44 ` Christian Ebner
@ 2023-12-11 10:43 ` Thomas Lamprecht
2023-12-11 10:59 ` Christian Ebner
0 siblings, 1 reply; 8+ messages in thread
From: Thomas Lamprecht @ 2023-12-11 10:43 UTC (permalink / raw)
To: Christian Ebner, Proxmox Backup Server development discussion,
Fabian Grünbichler
Am 11/12/2023 um 10:44 schrieb Christian Ebner:
>> On 11.12.2023 10:09 CET Thomas Lamprecht <t.lamprecht@proxmox.com> wrote:
>> Am 06/12/2023 um 15:43 schrieb Christian Ebner:
>>> True and I am aware of that. That is why I opted for a warning rather than
>>> showing these as errors. But of course an information might be even more
>>> fitting here. But I would like to not hide the not configured states to
>>> much, as the main point of adding these, is to inform the user that he
>>> might be missing something, after all. Maybe I should also add a tool tip
>>> to show some explanation for the warning/info when hovered?
>>
>> IMO it should then be really focus on being informative, not warning, and
>> tooltips are not very accessible.
>> I mean, what we actually want here is to avoid that a PBS gets full by
>> accident because the admin forgot, or did not understand, that they had to
>> set up a prune and gc job for anything to be deleted.
>> That might be better handled by generating a low-storage space notification
>> that is actively sent (with status quo over the mail forwarder, in the
>> future with Lukas' new notification stack) to the admin.
>
> Yes, that sounds like an even better approach.
>
> However, how should the storage space be checked and notification be generated
> in that case?
> One option would be to have a periodic job, which however once again can be
> miss-configured and/or fail? Or do you have something else in mind here?
I'd just use the daily timer which we use for sending out notifications about
available apt updates.
I.e., just some fixed-scheduled daily health checking, that, e.g., can be
configured in the nodes' config, but only some params like the low-water
mark for when to send the alert.
>> Sorta like we do in PVE, but I'd avoid that odd heuristic there that merges
>> the available privs in some top-level object, but rather just load the whole
>> ACL-tree privilege info in a store and check it there, e.g., through a new
>> `Proxmox.ACL` module `check` (and maybe `checkAny` for any-of) method that
>> we can use on callsites like check(`/datastore/${name}`, 'Datastore.Audit')
>> call, if we have more complex cases in the future, we can also add a method
>> that takes an object do denote such things like:
>> { any: ['Priv.Foo', Priv.Bar], all: ['Priv.Baz'] }
>> But I'd start small, one can also just use local variables to query each
>> priv and then combine them with a boolean expression to whatever the
>> callsites need for now anyway, so `check` and maybe `checkAny` should cover
>> most.
>
> I also had a look at this last week, especially how this is handled in PVE, as
> I saw that we are not currently storing this same information for the PBS UI.
>
> I however noticed that without the 'Datastore.Audit' permissions, the user is not
> able to get the datastore summary panel, and if the user has that permission, also
> reading access to the jobs is granted, so it would not have been required to
> check the permissions individually for this particular case.
We use that also in PVE, i.e., we do not check for specific VMIDs as there access
is covered by what the API returns anyway, the overall datastore list would be
the same, but what one can do within not, so yeah Datastore.Audit might not have
been the best example here.
^ permalink raw reply [flat|nested] 8+ messages in thread
* Re: [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs
2023-12-11 10:43 ` Thomas Lamprecht
@ 2023-12-11 10:59 ` Christian Ebner
0 siblings, 0 replies; 8+ messages in thread
From: Christian Ebner @ 2023-12-11 10:59 UTC (permalink / raw)
To: Thomas Lamprecht, Proxmox Backup Server development discussion,
Fabian Grünbichler
> On 11.12.2023 11:43 CET Thomas Lamprecht <t.lamprecht@proxmox.com> wrote:
>
>
> I'd just use the daily timer which we use for sending out notifications about
> available apt updates.
>
> I.e., just some fixed-scheduled daily health checking, that, e.g., can be
> configured in the nodes' config, but only some params like the low-water
> mark for when to send the alert.
Okay, will integrate a low storage space datastore sanity check and prune + gc-schedule
verification there, sending a notification if the configured threshold is reached.
>
> We use that also in PVE, i.e., we do not check for specific VMIDs as there access
> is covered by what the API returns anyway, the overall datastore list would be
> the same, but what one can do within not, so yeah Datastore.Audit might not have
> been the best example here.
Thanks for the feedback!
Cheers,
Chris
^ permalink raw reply [flat|nested] 8+ messages in thread
end of thread, other threads:[~2023-12-11 10:59 UTC | newest]
Thread overview: 8+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2023-12-06 11:31 [pbs-devel] [PATCH v3 proxmox-backup] ui: warn of missing gc-schedule, prune/verify jobs Christian Ebner
2023-12-06 13:37 ` Gabriel Goller
2023-12-06 14:09 ` Fabian Grünbichler
2023-12-06 14:43 ` Christian Ebner
2023-12-11 9:09 ` Thomas Lamprecht
2023-12-11 9:44 ` Christian Ebner
2023-12-11 10:43 ` Thomas Lamprecht
2023-12-11 10:59 ` Christian Ebner
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox