From c79eb8f3cb2302482c79a293e1d0854ce0e24c8c Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Tue, 30 Apr 2019 15:43:54 +0100 Subject: [PATCH 001/183] Add "grid" db details and rename other to 'cloud' --- monitoring/publishing/views.py | 2 +- monitoring/settings.py | 9 ++++++++- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 81d1656f..7945f959 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -22,7 +22,7 @@ def list(self, request): print last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20) if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): print 'Out of date' - fetchset = VAnonCloudRecord.objects.using('repository').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID), CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") + fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID), CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: CloudSite.objects.update_or_create(defaults={'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) else: diff --git a/monitoring/settings.py b/monitoring/settings.py index d76e5d17..649ae2d8 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -89,7 +89,14 @@ 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), }, - 'repository': { + 'grid': { + 'ENGINE': 'django.db.backends.mysql', + 'HOST': 'localhost', + 'PORT': '3306', + 'NAME': 'django_test', + 'USER': 'root', + }, + 'cloud': { 'ENGINE': 'django.db.backends.mysql', 'HOST': 'localhost', 'PORT': '3306', From f798f30915e1ae40123b4d9af0b18693c5fba718 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Tue, 30 Apr 2019 16:51:04 +0100 Subject: [PATCH 002/183] Change ModelViewSet to ReadOnly version --- monitoring/publishing/views.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 7945f959..4558524f 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -11,8 +11,7 @@ from models import CloudSite, VAnonCloudRecord from serializers import CloudSiteSerializer - -class CloudSiteViewSet(viewsets.ModelViewSet): +class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = CloudSite.objects.all() serializer_class = CloudSiteSerializer template_name = 'cloudsites.html' From 58c79ddb783a84c973fb4033e2235a28e9d1e18a Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Tue, 30 Apr 2019 16:54:14 +0100 Subject: [PATCH 003/183] Add basic single site retrieve view method --- monitoring/publishing/views.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 4558524f..5aaaf890 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -32,3 +32,21 @@ def list(self, request): if type(request.accepted_renderer) is TemplateHTMLRenderer: response.data = {'sites': response.data, 'last_fetched': last_fetched} return response + + def retrieve(self, request, pk=None): + last_fetched = CloudSite.objects.aggregate(Max('fetched'))['fetched__max'] + print last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20) + if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): + print 'Out of date' + fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID), CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") + for f in fetchset: + CloudSite.objects.update_or_create(defaults={'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) + else: + print 'No need to update' + + response = super(CloudSiteViewSet, self).retrieve(request) + # Wrap data in a dict so that it can display in template. + if type(request.accepted_renderer) is TemplateHTMLRenderer: + # Single result put in list to work with same HTML template. + response.data = {'sites': [response.data], 'last_fetched': last_fetched} + return response From 71268e26d98771b8ddf8d8ad874217207603dba1 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Tue, 30 Apr 2019 16:57:28 +0100 Subject: [PATCH 004/183] Add placeholder Nagios style result to site output --- monitoring/publishing/views.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 5aaaf890..dd5a613a 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -49,4 +49,8 @@ def retrieve(self, request, pk=None): if type(request.accepted_renderer) is TemplateHTMLRenderer: # Single result put in list to work with same HTML template. response.data = {'sites': [response.data], 'last_fetched': last_fetched} + + response.data['returncode'] = 3 + response.data['stdout'] = "UNKNOWN" + return response From 13415a830d27b53f9464dc593668e5a0e1de6251 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 1 May 2019 13:48:42 +0100 Subject: [PATCH 005/183] Add VMsInLastUpdate to cloud output --- monitoring/publishing/models.py | 8 ++++++-- monitoring/publishing/serializers.py | 2 +- monitoring/publishing/templates/cloudsites.html | 3 ++- monitoring/publishing/views.py | 10 ++++++---- 4 files changed, 15 insertions(+), 8 deletions(-) diff --git a/monitoring/publishing/models.py b/monitoring/publishing/models.py index ba65c9a3..58211948 100644 --- a/monitoring/publishing/models.py +++ b/monitoring/publishing/models.py @@ -7,6 +7,7 @@ class CloudSite(models.Model): fetched = models.DateTimeField(auto_now=True) name = models.CharField(max_length=255, primary_key=True) + vms = models.IntegerField(default=0) script = models.CharField(max_length=255) updated = models.DateTimeField() @@ -16,6 +17,7 @@ class Meta: class VAnonCloudRecord(models.Model): SiteName = models.CharField(max_length=255, primary_key=True) + VMs = models.IntegerField() CloudType = models.CharField(max_length=255) UpdateTime = models.DateTimeField() @@ -24,6 +26,8 @@ class Meta: db_table = 'vanoncloudrecords' def __str__(self): - return '%s, running "%s", updated at %s' % (self.SiteName, + return '%s running "%s" updated at %s with %s records' % ( + self.SiteName, self.CloudType, - self.UpdateTime) + self.UpdateTime, + self.VMs) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 40309bbe..07e14dd4 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -5,4 +5,4 @@ class CloudSiteSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CloudSite - fields = ('url', 'name', 'script', 'updated') + fields = ('url', 'name', 'vms', 'script', 'updated') diff --git a/monitoring/publishing/templates/cloudsites.html b/monitoring/publishing/templates/cloudsites.html index e146485f..b0b58cfc 100644 --- a/monitoring/publishing/templates/cloudsites.html +++ b/monitoring/publishing/templates/cloudsites.html @@ -7,11 +7,12 @@

Sites publishing cloud accounting records from 2018-06-19 onwards

Page last updated: {{ last_fetched|date:"c" }}

- + {% for site in sites %} + diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index dd5a613a..c19361d5 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -11,6 +11,7 @@ from models import CloudSite, VAnonCloudRecord from serializers import CloudSiteSerializer + class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = CloudSite.objects.all() serializer_class = CloudSiteSerializer @@ -21,9 +22,10 @@ def list(self, request): print last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20) if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): print 'Out of date' - fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID), CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") + fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") + print fetchset for f in fetchset: - CloudSite.objects.update_or_create(defaults={'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) + CloudSite.objects.update_or_create(defaults={'vms': f.VMs, 'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) else: print 'No need to update' @@ -38,9 +40,9 @@ def retrieve(self, request, pk=None): print last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20) if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): print 'Out of date' - fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID), CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") + fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: - CloudSite.objects.update_or_create(defaults={'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) + CloudSite.objects.update_or_create(defaults={'vms': f.VMs, 'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) else: print 'No need to update' From 670853770dcb0f5934eb95ae209d60a7d3441e52 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 1 May 2019 13:49:10 +0100 Subject: [PATCH 006/183] Change schema that grid db setting uses --- monitoring/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/settings.py b/monitoring/settings.py index 649ae2d8..7a243d9d 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -93,7 +93,7 @@ 'ENGINE': 'django.db.backends.mysql', 'HOST': 'localhost', 'PORT': '3306', - 'NAME': 'django_test', + 'NAME': 'django_test_grid', 'USER': 'root', }, 'cloud': { From 0ac742e587f7091f36c6bbe91c14200f60df0b90 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 1 May 2019 14:08:30 +0100 Subject: [PATCH 007/183] Clean up --- monitoring/publishing/views.py | 1 - monitoring/settings.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index c19361d5..051d6ff6 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -23,7 +23,6 @@ def list(self, request): if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): print 'Out of date' fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") - print fetchset for f in fetchset: CloudSite.objects.update_or_create(defaults={'vms': f.VMs, 'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) else: diff --git a/monitoring/settings.py b/monitoring/settings.py index 7a243d9d..9c123cc1 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -38,7 +38,7 @@ 'django.contrib.messages', 'django.contrib.staticfiles', 'rest_framework', - 'monitoring.publishing' + 'monitoring.publishing', ] REST_FRAMEWORK = { From c256acf4a660a6b79b70ae287b033e39e138a7ab Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 1 May 2019 14:15:33 +0100 Subject: [PATCH 008/183] Initial work on grid monitoring --- monitoring/publishing/models.py | 6 ++++++ monitoring/publishing/serializers.py | 7 +++++++ monitoring/publishing/urls.py | 1 + monitoring/publishing/views.py | 10 ++++++++-- 4 files changed, 22 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/models.py b/monitoring/publishing/models.py index 58211948..0a73d8dd 100644 --- a/monitoring/publishing/models.py +++ b/monitoring/publishing/models.py @@ -4,6 +4,12 @@ from django.db import models +class GridSite(models.Model): + fetched = models.DateTimeField(auto_now=True) + name = models.CharField(max_length=255, primary_key=True) + updated = models.DateTimeField() + + class CloudSite(models.Model): fetched = models.DateTimeField(auto_now=True) name = models.CharField(max_length=255, primary_key=True) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 07e14dd4..38eb3ff8 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -2,6 +2,13 @@ from models import CloudSite + +class GridSiteSerializer(serializers.HyperlinkedModelSerializer): + class Meta: + model = CloudSite + fields = ('url', 'name', 'updated') + + class CloudSiteSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CloudSite diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index c45044e0..154fad47 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -6,6 +6,7 @@ router = routers.SimpleRouter() router.register(r'cloud', views.CloudSiteViewSet) +router.register(r'grid', views.GridSiteViewSet) urlpatterns = [ diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 051d6ff6..2ef951b4 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -8,8 +8,14 @@ from rest_framework import viewsets from rest_framework.renderers import TemplateHTMLRenderer -from models import CloudSite, VAnonCloudRecord -from serializers import CloudSiteSerializer +from models import GridSite, CloudSite, VAnonCloudRecord +from serializers import GridSiteSerializer, CloudSiteSerializer + + +class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): + queryset = GridSite.objects.all() + serializer_class = GridSiteSerializer + template_name = 'gridsites.html' class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): From 6a4c786178d11933ccf6cd2a434a3fd4fdc24616 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 1 May 2019 14:44:20 +0100 Subject: [PATCH 009/183] Add basic grid data retrieval --- monitoring/publishing/models.py | 9 +++++++++ monitoring/publishing/views.py | 26 +++++++++++++++++++++++++- 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/monitoring/publishing/models.py b/monitoring/publishing/models.py index 0a73d8dd..1624b5aa 100644 --- a/monitoring/publishing/models.py +++ b/monitoring/publishing/models.py @@ -10,6 +10,15 @@ class GridSite(models.Model): updated = models.DateTimeField() +class VSuperSummaries(models.Model): + Site = models.CharField(max_length=255, primary_key=True) + LatestPublish = models.DateTimeField() + + class Meta: + managed = False + db_table = 'VSuperSummaries' + + class CloudSite(models.Model): fetched = models.DateTimeField(auto_now=True) name = models.CharField(max_length=255, primary_key=True) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 2ef951b4..93bdb03c 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -8,7 +8,7 @@ from rest_framework import viewsets from rest_framework.renderers import TemplateHTMLRenderer -from models import GridSite, CloudSite, VAnonCloudRecord +from models import GridSite, VSuperSummaries, CloudSite, VAnonCloudRecord from serializers import GridSiteSerializer, CloudSiteSerializer @@ -17,6 +17,30 @@ class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): serializer_class = GridSiteSerializer template_name = 'gridsites.html' + def retrieve(self, request, pk=None): + last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] + # If there's no data then last_fetched is None. + if last_fetched is not None: + print last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20) + if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): + print 'Out of date' + fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") + for f in fetchset: + GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, name=f.Site) + else: + print 'No need to update' + + response = super(GridSiteViewSet, self).retrieve(request) + # Wrap data in a dict so that it can display in template. + if type(request.accepted_renderer) is TemplateHTMLRenderer: + # Single result put in list to work with same HTML template. + response.data = {'sites': [response.data], 'last_fetched': last_fetched} + + response.data['returncode'] = 3 + response.data['stdout'] = "UNKNOWN" + + return response + class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = CloudSite.objects.all() From c2a0a23f8f90d0e8b431a44eb2e44944f4c40e11 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 1 May 2019 17:34:40 +0100 Subject: [PATCH 010/183] Add HTML template for grid --- .../publishing/templates/gridsites.html | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 monitoring/publishing/templates/gridsites.html diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html new file mode 100644 index 00000000..3150b3cd --- /dev/null +++ b/monitoring/publishing/templates/gridsites.html @@ -0,0 +1,35 @@ + + + + + +APEL Publication Summary : {{ sites.0.name }} + + + + +

APEL Publication Test

+
    +
  • Displays the last time the site published accounting data to the GOC. +
  • A warning / error is raised if the site has not published accounting data for 7 / 31 days, if a site has not published data for 31 days, which usually signifies a problem with APEL or RGMA services. +
  • Information about APEL APEL Wiki +
  • Contact: apel-admins [at] stfc.ac.uk +
  • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}

+ +
SiteCloudTypeLastUpdatedSiteVMsInLastUpdateCloudTypeLastUpdated
{{ site.name }}{{ site.vms }} {{ site.script }} {{ site.updated }}
+ + + + + + + + + + + + + +
{{ sites.0.name }}
ExecutingSiteMeasurementDateMeasurementTimePublication
Status
{{ sites.0.name }}{{ sites.0.updated }}{{ sites.0.updated }}OK [ last published 1 days ago: 2019-04-30 ]
+ + \ No newline at end of file From 71d66a97c3a05fa5c906a5beea2587044ae8591d Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 1 May 2019 17:35:40 +0100 Subject: [PATCH 011/183] Tweak datetime filter --- monitoring/publishing/templates/cloudsites.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/publishing/templates/cloudsites.html b/monitoring/publishing/templates/cloudsites.html index b0b58cfc..7356e02f 100644 --- a/monitoring/publishing/templates/cloudsites.html +++ b/monitoring/publishing/templates/cloudsites.html @@ -4,7 +4,7 @@

Sites publishing cloud accounting records from 2018-06-19 onwards

-

Page last updated: {{ last_fetched|date:"c" }}

+

Page last updated: {{ last_fetched|date:"Y-m-d H:i:s.u" }}

From bad274e65999033a4eb32d08fa3d0579cebdf06d Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Tue, 11 Jun 2019 16:33:35 +0100 Subject: [PATCH 012/183] Update Django version --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 62119312..5e667054 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==1.11.20 +Django==1.11.21 djangorestframework==3.9.3 mysqlclient==1.3.4 pytz==2019.1 From ea841a37191a14465efc2d6b9e79710a959266ba Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jul 2019 21:19:26 +0000 Subject: [PATCH 013/183] Bump django from 1.11.21 to 1.11.22 Bumps [django](https://github.com/django/django) from 1.11.21 to 1.11.22. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/1.11.21...1.11.22) Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 5e667054..a364e0c7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==1.11.21 +Django==1.11.22 djangorestframework==3.9.3 mysqlclient==1.3.4 pytz==2019.1 From 31cf23de4a71bbfe7b9c8d1101f753fdc3f7466b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 6 Aug 2019 02:14:29 +0000 Subject: [PATCH 014/183] Bump django from 1.11.22 to 1.11.23 Bumps [django](https://github.com/django/django) from 1.11.22 to 1.11.23. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/1.11.22...1.11.23) Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index a364e0c7..ccf1cf44 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==1.11.22 +Django==1.11.23 djangorestframework==3.9.3 mysqlclient==1.3.4 pytz==2019.1 From 2e8a5b91386e06a92fc734ca73ff59f6e422934b Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 11:33:38 +0000 Subject: [PATCH 015/183] Update requirements --- requirements.txt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/requirements.txt b/requirements.txt index ccf1cf44..3d8bf06d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==1.11.23 -djangorestframework==3.9.3 +Django==1.11.26 +djangorestframework==3.9.4 mysqlclient==1.3.4 -pytz==2019.1 +pytz==2019.3 From 77c200f6e5dc638fd2699860d7c21d67e1e86f30 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 13:03:56 +0000 Subject: [PATCH 016/183] Update mysqlclient requirement --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 3d8bf06d..00f75410 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ Django==1.11.26 djangorestframework==3.9.4 -mysqlclient==1.3.4 +mysqlclient==1.3.9 pytz==2019.3 From 18b838fa4284be8900e15efc99ea87da4e51f9e9 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 16:44:05 +0000 Subject: [PATCH 017/183] Fix formatting of datetime in cloud HTML --- monitoring/publishing/serializers.py | 5 +++++ monitoring/publishing/templates/cloudsites.html | 2 +- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 38eb3ff8..7676f486 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -10,6 +10,11 @@ class Meta: class CloudSiteSerializer(serializers.HyperlinkedModelSerializer): + # Override default format with None so that Python datetime is used as + # ouput format. Encoding will be determined by the renderer and can be + # formatted by a template filter. + updated = serializers.DateTimeField(format=None) + class Meta: model = CloudSite fields = ('url', 'name', 'vms', 'script', 'updated') diff --git a/monitoring/publishing/templates/cloudsites.html b/monitoring/publishing/templates/cloudsites.html index 7356e02f..a10ae1cf 100644 --- a/monitoring/publishing/templates/cloudsites.html +++ b/monitoring/publishing/templates/cloudsites.html @@ -14,7 +14,7 @@

Sites publishing cloud accounting records from 2018-06-19 onwards

- + {% endfor %}
SiteVMsInLastUpdateCloudTypeLastUpdated{{ site.name }} {{ site.vms }} {{ site.script }}{{ site.updated }}{{ site.updated|date:"Y-m-d H:i:s" }}
From 9f9dc46882638cbb4279e61afc5bc07eefb58b89 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 16:45:28 +0000 Subject: [PATCH 018/183] Correct model used for grid serializer --- monitoring/publishing/serializers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 7676f486..6e5ad2ca 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -1,11 +1,11 @@ from rest_framework import serializers -from models import CloudSite +from models import CloudSite, GridSite class GridSiteSerializer(serializers.HyperlinkedModelSerializer): class Meta: - model = CloudSite + model = GridSite fields = ('url', 'name', 'updated') From 4cd87e95bf9039cc4edae2f26df35d9f83f23f0e Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 17:00:21 +0000 Subject: [PATCH 019/183] Add header to cloud HTML --- monitoring/publishing/templates/cloudsites.html | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/monitoring/publishing/templates/cloudsites.html b/monitoring/publishing/templates/cloudsites.html index a10ae1cf..e9c2bb7b 100644 --- a/monitoring/publishing/templates/cloudsites.html +++ b/monitoring/publishing/templates/cloudsites.html @@ -1,3 +1,8 @@ + + + + + Sites publishing cloud accounting records From fb4d50fbc0fd1d27b579862095de5917c8c521b3 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 17:13:46 +0000 Subject: [PATCH 020/183] Fix datetime fields for grid --- monitoring/publishing/serializers.py | 5 +++++ monitoring/publishing/templates/gridsites.html | 4 ++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 6e5ad2ca..3e6eb060 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -4,6 +4,11 @@ class GridSiteSerializer(serializers.HyperlinkedModelSerializer): + # Override default format with None so that Python datetime is used as + # ouput format. Encoding will be determined by the renderer and can be + # formatted by a template filter. + updated = serializers.DateTimeField(format=None) + class Meta: model = GridSite fields = ('url', 'name', 'updated') diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index 3150b3cd..696cbcdd 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -26,8 +26,8 @@

APEL Publication Test

{{ sites.0.name }} - {{ sites.0.updated }} - {{ sites.0.updated }} + {{ sites.0.updated|date:"Y-m-d" }} + {{ sites.0.updated|date:"G:i:s" }} OK [ last published 1 days ago: 2019-04-30 ] From 0879c1368d71d260210b48aefef263a68d781e70 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 17:36:07 +0000 Subject: [PATCH 021/183] Change what's shown in grid status column --- monitoring/publishing/templates/gridsites.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index 696cbcdd..b06f394a 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -28,7 +28,7 @@

APEL Publication Test

{{ sites.0.name }} {{ sites.0.updated|date:"Y-m-d" }} {{ sites.0.updated|date:"G:i:s" }} - OK [ last published 1 days ago: 2019-04-30 ] + {{ stdout }} From 63c08d2545ed042467cdb1445dcc627d62fb8e84 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 18:14:43 +0000 Subject: [PATCH 022/183] Change which field is used for measurement --- monitoring/publishing/templates/gridsites.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index b06f394a..ad5d5a5f 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -26,8 +26,8 @@

APEL Publication Test

{{ sites.0.name }} - {{ sites.0.updated|date:"Y-m-d" }} - {{ sites.0.updated|date:"G:i:s" }} + {{ last_fetched|date:"Y-m-d" }} + {{ last_fetched|date:"G:i:s" }} {{ stdout }} From c7fe6cae9faaf9d71278b54962173fedbdf2dbf7 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Fri, 22 Nov 2019 18:15:01 +0000 Subject: [PATCH 023/183] Add basic checking of publishing time --- monitoring/publishing/views.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 93bdb03c..2d26a94e 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -31,13 +31,23 @@ def retrieve(self, request, pk=None): print 'No need to update' response = super(GridSiteViewSet, self).retrieve(request) + date = response.data['updated'].replace(tzinfo=None) + # Wrap data in a dict so that it can display in template. if type(request.accepted_renderer) is TemplateHTMLRenderer: # Single result put in list to work with same HTML template. response.data = {'sites': [response.data], 'last_fetched': last_fetched} - response.data['returncode'] = 3 - response.data['stdout'] = "UNKNOWN" + diff = datetime.today() - date + if diff <= timedelta(days=7): + response.data['returncode'] = 0 + response.data['stdout'] = "OK [ last published %s days ago: %s ]" % (diff.days, date.strftime("%Y-%m-%d")) + elif diff > timedelta(days=7): + response.data['returncode'] = 1 + response.data['stdout'] = "WARNING [ last published %s days ago: %s ]" % (diff.days, date.strftime("%Y-%m-%d")) + else: + response.data['returncode'] = 3 + response.data['stdout'] = "UNKNOWN" return response From 24cc4dd24269fedcee9ae804265f66a217e8aeb6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Feb 2020 09:28:39 +0000 Subject: [PATCH 024/183] Bump django from 1.11.26 to 1.11.28 Bumps [django](https://github.com/django/django) from 1.11.26 to 1.11.28. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/1.11.26...1.11.28) Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 00f75410..8645be01 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==1.11.26 +Django==1.11.28 djangorestframework==3.9.4 mysqlclient==1.3.9 pytz==2019.3 From 3719da0f1f0dae48202e9f0827feac432924ebd6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 5 Jun 2020 18:46:14 +0000 Subject: [PATCH 025/183] Bump django from 1.11.28 to 1.11.29 Bumps [django](https://github.com/django/django) from 1.11.28 to 1.11.29. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/1.11.28...1.11.29) Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 8645be01..c565648b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==1.11.28 +Django==1.11.29 djangorestframework==3.9.4 mysqlclient==1.3.9 pytz==2019.3 From c6949f3255403dc1e4dc82130cba8794092730fe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 19 Mar 2021 22:05:52 +0000 Subject: [PATCH 026/183] Bump djangorestframework from 3.9.4 to 3.11.2 Bumps [djangorestframework](https://github.com/encode/django-rest-framework) from 3.9.4 to 3.11.2. - [Release notes](https://github.com/encode/django-rest-framework/releases) - [Commits](https://github.com/encode/django-rest-framework/compare/3.9.4...3.11.2) Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c565648b..d21814a6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ Django==1.11.29 -djangorestframework==3.9.4 +djangorestframework==3.11.2 mysqlclient==1.3.9 pytz==2019.3 From c15c99d3715389d2fff308e998b760e5b4c37428 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Jun 2021 20:41:40 +0000 Subject: [PATCH 027/183] Bump django from 1.11.29 to 2.2.24 Bumps [django](https://github.com/django/django) from 1.11.29 to 2.2.24. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/1.11.29...2.2.24) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index d21814a6..ac2019ee 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==1.11.29 +Django==2.2.24 djangorestframework==3.11.2 mysqlclient==1.3.9 pytz==2019.3 From 184a691e517128ea868212bbe98780b14fa7639a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 10 Feb 2022 08:18:45 +0000 Subject: [PATCH 028/183] Bump django from 2.2.24 to 2.2.27 Bumps [django](https://github.com/django/django) from 2.2.24 to 2.2.27. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/2.2.24...2.2.27) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index ac2019ee..837bf173 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==2.2.24 +Django==2.2.27 djangorestframework==3.11.2 mysqlclient==1.3.9 pytz==2019.3 From 4093e61f9d173ec1513990c90d8146792595ddf9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 27 Apr 2022 09:26:17 +0100 Subject: [PATCH 029/183] Bump django from 2.2.27 to 2.2.28 (#11) Bumps [django](https://github.com/django/django) from 2.2.27 to 2.2.28. - [Release notes](https://github.com/django/django/releases) - [Commits](https://github.com/django/django/compare/2.2.27...2.2.28) --- updated-dependencies: - dependency-name: django dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 837bf173..54be2963 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -Django==2.2.27 +Django==2.2.28 djangorestframework==3.11.2 mysqlclient==1.3.9 pytz==2019.3 From b6d23f1d7141af674c754619041a43ce4c2d9533 Mon Sep 17 00:00:00 2001 From: Letizia Protopapa Date: Wed, 22 Feb 2023 16:13:30 +0000 Subject: [PATCH 030/183] Add file explaining how to get the app working on an aq machine. --- docs/setting_up_VM_with_app.md | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 docs/setting_up_VM_with_app.md diff --git a/docs/setting_up_VM_with_app.md b/docs/setting_up_VM_with_app.md new file mode 100644 index 00000000..be7f3f3c --- /dev/null +++ b/docs/setting_up_VM_with_app.md @@ -0,0 +1,25 @@ +## Steps to follow to get a VM with monitoring app running on Apache + +To get a 'prototype' of the monitoring app running with Apache, follow these steps: +- create a cloud VM of the type: scientific-linux-7-aq +- continue by selecting sandbox 'testing_personality_2', archetype 'ral-tier1', personality 'apel-data-validation-test' + +Allow 15 minutes after the machine is created, then follow these steps from within the machine: + +- yum remove git && quattor-fetch && quattor-configure --all +- cd /usr/share/DJANGO_MONITORING_APP/monitoring +- modify the file settings.py, specifically the dict called DATABASES, to include the right credentials so as to access the right dbs +- cd .. +- source venv/bin/activate +- systemctl restart httpd +- sudo chown apache . + +At this point the app should be working, so just get the ip address by writing "hostname -I" within the machine and the app should be already running at that address. + + +## What to do if the app seems to stop working +If the VM is shut down, next time we try to open the app, Apache might give the error message "Unable to open the database file". +If this happens, just follow these steps on the machine: +- cd /usr/share/DJANGO_MONITORING_APP +- source venv/bin/activate +- sudo chown apache . From 282dc43c7a46ac334d2f18592d0ec8632cbb2cde Mon Sep 17 00:00:00 2001 From: Letizia Protopapa Date: Wed, 22 Feb 2023 16:16:12 +0000 Subject: [PATCH 031/183] Add file explaining how to access pages of the monitoring app. --- docs/what_pages_can_be_accessed.md | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 docs/what_pages_can_be_accessed.md diff --git a/docs/what_pages_can_be_accessed.md b/docs/what_pages_can_be_accessed.md new file mode 100644 index 00000000..0e211cc4 --- /dev/null +++ b/docs/what_pages_can_be_accessed.md @@ -0,0 +1,22 @@ +## Pages that can be accessed through the app + +The following urls are the ones that can be accessed without passing any parameter: +- http://ip-address/publishing/cloud/ +- http://ip-address/publishing/gridsync/ + +These pages show info for a number of sites, so do not require a site name to be specified within the url. + +The url http://ip-address/publishing/grid/ , instead, should be used together with the name of the site we are looking for. +For example: http://ip-address/publishing/grid/BelGrid-UCL/ +It is not supposed to be used without passing the name of the site. + +The url http://ip-address/publishing/gridsync/ shows a sync table, and it's probably the most important bit of the personality 'apel-data-validation'. +This table contains data related to many sites, specifically number of jobs being published vs in the db, and this number is shown for every (available) month of each site. + +Clicking on any name in the first column (containing site names) allows to access a similar table which only shows the data relative to that site. +This more specific table is such that the first columns shows the months for which we have data (for that site). +Clicking on the month allows to open another table that shows data for that month and site only, divided by submithost. + +The pages accessed through the links can of course be accessed by typing directly the url. For example, if we want data related to the site 'CSCS-LCG2' and month '2013-11', we would type : +http://ip-address/publishing/gridsync/CSCS-LCG2/2013-11/ +However, in this case if there is no data for the month we are looking for, we would get an error. From d94b4dc9c027ed982ad6d80ae64207a25d0166ad Mon Sep 17 00:00:00 2001 From: Letizia Protopapa Date: Wed, 22 Feb 2023 16:33:52 +0000 Subject: [PATCH 032/183] Add file listing packages installed by Quattor. --- docs/what_gets_installed.md | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 docs/what_gets_installed.md diff --git a/docs/what_gets_installed.md b/docs/what_gets_installed.md new file mode 100644 index 00000000..c096610f --- /dev/null +++ b/docs/what_gets_installed.md @@ -0,0 +1,23 @@ +For Django to work with apache, it is common to have a venv within the app, where django and djangorestframework get installed. Other packages needed for the app to work are installed by Aquilon outside the venv. + +## Packages installed by Aquilon outside the venv +Following the config file that Aquilon uses, the following are the packages installed, in this order: +- httpd +- python3-mod_wsgi (for apache to work with django) +- python3-devel +- gcc (needed for dependencies) +- mariadb. + + +## Packages installed within the venv +Within venv, the following are installed through pip: +- djangorestframework (3.11.2) +- pymysql (needed for mariadb to work) +- pandas (needed by the app) +- django (2.1.*). + +Note that when the version of the packages is specified, the app would not work with a different version (due to dependencies conflicts). +Also, as long as mariadb is installed (both client and server), it is not necessary to install mysqlclient (at least when the OS is Scientific Linux). + +Is is also important to note that different types of OS require different packages to be installed. +The above are the packages that allow the app to work on a scientific linux 7 machine. A Centos machine would require slightly different packages. From 7422c4be36f34827d202fcc0d1c9ae315810118a Mon Sep 17 00:00:00 2001 From: Letizia Protopapa Date: Thu, 23 Feb 2023 13:58:54 +0000 Subject: [PATCH 033/183] Add reminder about modifying security groups. --- docs/setting_up_VM_with_app.md | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docs/setting_up_VM_with_app.md b/docs/setting_up_VM_with_app.md index be7f3f3c..6af00c00 100644 --- a/docs/setting_up_VM_with_app.md +++ b/docs/setting_up_VM_with_app.md @@ -2,10 +2,10 @@ To get a 'prototype' of the monitoring app running with Apache, follow these steps: - create a cloud VM of the type: scientific-linux-7-aq -- continue by selecting sandbox 'testing_personality_2', archetype 'ral-tier1', personality 'apel-data-validation-test' - -Allow 15 minutes after the machine is created, then follow these steps from within the machine: +- continue by selecting sandbox 'testing_personality_2', archetype 'ral-tier1', personality 'apel-data-validation-test' +Allow 15 minutes after the machine is created, then remember to edit security groups from OpenStack to allow Apache to work. +Then follow these steps from within the machine: - yum remove git && quattor-fetch && quattor-configure --all - cd /usr/share/DJANGO_MONITORING_APP/monitoring - modify the file settings.py, specifically the dict called DATABASES, to include the right credentials so as to access the right dbs @@ -14,7 +14,7 @@ Allow 15 minutes after the machine is created, then follow these steps from with - systemctl restart httpd - sudo chown apache . -At this point the app should be working, so just get the ip address by writing "hostname -I" within the machine and the app should be already running at that address. +At this point the app should be working, so just get the ip address by writing "hostname -I" within the machine and the app should be already running at that address. ## What to do if the app seems to stop working From be3e742f73a1b0a5fbb6f862f037c8b69073daa5 Mon Sep 17 00:00:00 2001 From: Letizia Protopapa Date: Thu, 23 Feb 2023 13:59:59 +0000 Subject: [PATCH 034/183] Improve clarity of content. --- docs/setting_up_VM_with_app.md | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/docs/setting_up_VM_with_app.md b/docs/setting_up_VM_with_app.md index 6af00c00..d6d1d126 100644 --- a/docs/setting_up_VM_with_app.md +++ b/docs/setting_up_VM_with_app.md @@ -8,7 +8,7 @@ Allow 15 minutes after the machine is created, then remember to edit security gr Then follow these steps from within the machine: - yum remove git && quattor-fetch && quattor-configure --all - cd /usr/share/DJANGO_MONITORING_APP/monitoring -- modify the file settings.py, specifically the dict called DATABASES, to include the right credentials so as to access the right dbs +- modify the file settings.py, specifically the dict called DATABASES, to include the right credentials and database names - cd .. - source venv/bin/activate - systemctl restart httpd @@ -17,9 +17,12 @@ Then follow these steps from within the machine: At this point the app should be working, so just get the ip address by writing "hostname -I" within the machine and the app should be already running at that address. -## What to do if the app seems to stop working +## What to do if the app seems to stop working after closing the VM If the VM is shut down, next time we try to open the app, Apache might give the error message "Unable to open the database file". If this happens, just follow these steps on the machine: -- cd /usr/share/DJANGO_MONITORING_APP -- source venv/bin/activate -- sudo chown apache . +1. cd /usr/share/DJANGO_MONITORING_APP +2. source venv/bin/activate +3. sudo chown apache . + +Note that step 2 is necessary for step 3 to work and the error message to disappear. + From ef2932d0004dc8ff38aaa51688005048241df909 Mon Sep 17 00:00:00 2001 From: Letizia Protopapa Date: Thu, 23 Feb 2023 14:31:40 +0000 Subject: [PATCH 035/183] Remove whitespaces and unnecessary blank lines. --- docs/setting_up_VM_with_app.md | 1 - docs/what_pages_can_be_accessed.md | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/docs/setting_up_VM_with_app.md b/docs/setting_up_VM_with_app.md index d6d1d126..e8346f8e 100644 --- a/docs/setting_up_VM_with_app.md +++ b/docs/setting_up_VM_with_app.md @@ -25,4 +25,3 @@ If this happens, just follow these steps on the machine: 3. sudo chown apache . Note that step 2 is necessary for step 3 to work and the error message to disappear. - diff --git a/docs/what_pages_can_be_accessed.md b/docs/what_pages_can_be_accessed.md index 0e211cc4..6f83fb75 100644 --- a/docs/what_pages_can_be_accessed.md +++ b/docs/what_pages_can_be_accessed.md @@ -6,7 +6,7 @@ The following urls are the ones that can be accessed without passing any paramet These pages show info for a number of sites, so do not require a site name to be specified within the url. -The url http://ip-address/publishing/grid/ , instead, should be used together with the name of the site we are looking for. +The url http://ip-address/publishing/grid/ , instead, should be used together with the name of the site we are looking for. For example: http://ip-address/publishing/grid/BelGrid-UCL/ It is not supposed to be used without passing the name of the site. @@ -14,9 +14,9 @@ The url http://ip-address/publishing/gridsync/ shows a sync table, and it's prob This table contains data related to many sites, specifically number of jobs being published vs in the db, and this number is shown for every (available) month of each site. Clicking on any name in the first column (containing site names) allows to access a similar table which only shows the data relative to that site. -This more specific table is such that the first columns shows the months for which we have data (for that site). +This more specific table is such that the first columns shows the months for which we have data (for that site). Clicking on the month allows to open another table that shows data for that month and site only, divided by submithost. The pages accessed through the links can of course be accessed by typing directly the url. For example, if we want data related to the site 'CSCS-LCG2' and month '2013-11', we would type : -http://ip-address/publishing/gridsync/CSCS-LCG2/2013-11/ +http://ip-address/publishing/gridsync/CSCS-LCG2/2013-11/ However, in this case if there is no data for the month we are looking for, we would get an error. From b751833ede88dd9514144d596f022c873a7abdd3 Mon Sep 17 00:00:00 2001 From: Adrian Coveney <4836233+tofu-rocketry@users.noreply.github.com> Date: Thu, 18 Jan 2024 16:11:54 +0000 Subject: [PATCH 036/183] Apply suggestions from code review Co-authored-by: gregcorbett --- docs/setting_up_VM_with_app.md | 2 +- docs/what_gets_installed.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/setting_up_VM_with_app.md b/docs/setting_up_VM_with_app.md index e8346f8e..254bf0fa 100644 --- a/docs/setting_up_VM_with_app.md +++ b/docs/setting_up_VM_with_app.md @@ -6,7 +6,7 @@ To get a 'prototype' of the monitoring app running with Apache, follow these ste Allow 15 minutes after the machine is created, then remember to edit security groups from OpenStack to allow Apache to work. Then follow these steps from within the machine: -- yum remove git && quattor-fetch && quattor-configure --all +- quattor-fetch && quattor-configure --all - cd /usr/share/DJANGO_MONITORING_APP/monitoring - modify the file settings.py, specifically the dict called DATABASES, to include the right credentials and database names - cd .. diff --git a/docs/what_gets_installed.md b/docs/what_gets_installed.md index c096610f..0d43cce5 100644 --- a/docs/what_gets_installed.md +++ b/docs/what_gets_installed.md @@ -1,7 +1,7 @@ For Django to work with apache, it is common to have a venv within the app, where django and djangorestframework get installed. Other packages needed for the app to work are installed by Aquilon outside the venv. ## Packages installed by Aquilon outside the venv -Following the config file that Aquilon uses, the following are the packages installed, in this order: +Following the config file that Aquilon uses, the following are the packages installed: - httpd - python3-mod_wsgi (for apache to work with django) - python3-devel From 09a0b8cc5b9e2397ff02992877d2ca9f69e00468 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 6 Jan 2023 11:39:55 +0000 Subject: [PATCH 037/183] Ensure Python3 compatibility. --- monitoring/publishing/views.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 2d26a94e..e0e1d5d6 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -21,14 +21,14 @@ def retrieve(self, request, pk=None): last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] # If there's no data then last_fetched is None. if last_fetched is not None: - print last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20) + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): - print 'Out of date' + print('Out of date') fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") for f in fetchset: GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, name=f.Site) else: - print 'No need to update' + print('No need to update') response = super(GridSiteViewSet, self).retrieve(request) date = response.data['updated'].replace(tzinfo=None) @@ -59,14 +59,14 @@ class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): def list(self, request): last_fetched = CloudSite.objects.aggregate(Max('fetched'))['fetched__max'] - print last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20) + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): - print 'Out of date' + print('Out of date') fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: CloudSite.objects.update_or_create(defaults={'vms': f.VMs, 'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) else: - print 'No need to update' + print('No need to update') response = super(CloudSiteViewSet, self).list(request) # Wrap data in a dict so that it can display in template. @@ -76,14 +76,14 @@ def list(self, request): def retrieve(self, request, pk=None): last_fetched = CloudSite.objects.aggregate(Max('fetched'))['fetched__max'] - print last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20) + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): - print 'Out of date' + print('Out of date') fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: CloudSite.objects.update_or_create(defaults={'vms': f.VMs, 'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) else: - print 'No need to update' + print('No need to update') response = super(CloudSiteViewSet, self).retrieve(request) # Wrap data in a dict so that it can display in template. From abbe8c0c472e3bcc431107873f50944fd5915150 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 6 Jan 2023 11:44:43 +0000 Subject: [PATCH 038/183] Make imports more explicit. --- monitoring/availability/urls.py | 4 +++- monitoring/publishing/serializers.py | 2 +- monitoring/publishing/urls.py | 2 +- monitoring/publishing/views.py | 4 ++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/monitoring/availability/urls.py b/monitoring/availability/urls.py index 0c9a4030..edf402e3 100644 --- a/monitoring/availability/urls.py +++ b/monitoring/availability/urls.py @@ -1,6 +1,8 @@ from django.conf.urls import url -import views +import sys +sys.path.append('/usr/share/DJANGO_MONITORING_APP') +from monitoring.availability import views urlpatterns = [ url(r'^$', views.status), diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 3e6eb060..aef7ae85 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -1,6 +1,6 @@ from rest_framework import serializers -from models import CloudSite, GridSite +from monitoring.publishing.models import CloudSite, GridSite class GridSiteSerializer(serializers.HyperlinkedModelSerializer): diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 154fad47..9691e8da 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -2,7 +2,7 @@ from rest_framework import routers -import views +from monitoring.publishing import views router = routers.SimpleRouter() router.register(r'cloud', views.CloudSiteViewSet) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index e0e1d5d6..f296fadf 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -8,8 +8,8 @@ from rest_framework import viewsets from rest_framework.renderers import TemplateHTMLRenderer -from models import GridSite, VSuperSummaries, CloudSite, VAnonCloudRecord -from serializers import GridSiteSerializer, CloudSiteSerializer +from monitoring.publishing.models import GridSite, VSuperSummaries, CloudSite, VAnonCloudRecord +from monitoring.publishing.serializers import GridSiteSerializer, CloudSiteSerializer class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): From 9bd9ec0ed53467bedc41dce8c266397d8e6be391 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 6 Jan 2023 11:45:40 +0000 Subject: [PATCH 039/183] Add allowed hosts. --- monitoring/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/settings.py b/monitoring/settings.py index 9c123cc1..bbeaadb1 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -25,7 +25,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = [] +ALLOWED_HOSTS = [ '127.0.0.1', 'localhost'] # Application definition From 96e1b4a146dabb6776eeb424b1064e8b6137b28d Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 16:43:32 +0000 Subject: [PATCH 040/183] Add static root to be able to run "manage.py collectstatic". --- monitoring/settings.py | 1 + 1 file changed, 1 insertion(+) diff --git a/monitoring/settings.py b/monitoring/settings.py index bbeaadb1..e1da5662 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -143,3 +143,4 @@ # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' +STATIC_ROOT = os.path.join(BASE_DIR, "static/") \ No newline at end of file From c8a79d8a61d6668cf90cce49747bde31a3dff73d Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 6 Jan 2023 11:47:16 +0000 Subject: [PATCH 041/183] Ensure Python3 compatibility for CloudSite view. --- monitoring/publishing/views.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index f296fadf..188fd67d 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -59,8 +59,9 @@ class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): def list(self, request): last_fetched = CloudSite.objects.aggregate(Max('fetched'))['fetched__max'] - print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): + if last_fetched is not None: + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) + if last_fetched is None or (last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20))): print('Out of date') fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: From 6681204d0f6aefcec57860d80f5724678aa08161 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 6 Jan 2023 14:16:41 +0000 Subject: [PATCH 042/183] Ensure the grid site works when retrieving multiple records. --- monitoring/publishing/views.py | 37 +++++++++++++++++++++++++++++++++- 1 file changed, 36 insertions(+), 1 deletion(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 188fd67d..25d1e382 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -17,6 +17,41 @@ class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): serializer_class = GridSiteSerializer template_name = 'gridsites.html' + def list(self, request): + last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] + if last_fetched is not None: + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) + if last_fetched is None or (last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20))): + fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") + for f in fetchset: + GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, name=f.Site) + else: + print('No need to update') + + final_response = [] + response = super(GridSiteViewSet, self).list(request) + + for single_dict in response.data: + date = single_dict.get('updated').replace(tzinfo=None) + + diff = datetime.today() - date + if diff <= timedelta(days=7): + single_dict['returncode'] = 0 + single_dict['stdout'] = "OK [ last published %s days ago: %s ]" % (diff.days, date.strftime("%Y-%m-%d")) + elif diff > timedelta(days=7): + single_dict['returncode'] = 1 + single_dict['stdout'] = "WARNING [ last published %s days ago: %s ]" % (diff.days, date.strftime("%Y-%m-%d")) + else: + single_dict['returncode'] = 3 + single_dict['stdout'] = "UNKNOWN" + final_response.append(single_dict) + + if type(request.accepted_renderer) is TemplateHTMLRenderer: + response.data = {'sites': final_response, 'last_fetched': last_fetched} + + return response + + def retrieve(self, request, pk=None): last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] # If there's no data then last_fetched is None. @@ -95,4 +130,4 @@ def retrieve(self, request, pk=None): response.data['returncode'] = 3 response.data['stdout'] = "UNKNOWN" - return response + return response \ No newline at end of file From e9894a92e967ead2f566f77a4a97c5e61475e65d Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:36:54 +0000 Subject: [PATCH 043/183] Add import for pymysql. --- monitoring/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/monitoring/__init__.py b/monitoring/__init__.py index e69de29b..c45523b2 100644 --- a/monitoring/__init__.py +++ b/monitoring/__init__.py @@ -0,0 +1,2 @@ +import pymysql +pymysql.install_as_MySQLdb() \ No newline at end of file From 8eaf7449f8dfc145d2e893f112fdd4c968e10175 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:37:59 +0000 Subject: [PATCH 044/183] Add models for the sync page. --- monitoring/publishing/models.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/monitoring/publishing/models.py b/monitoring/publishing/models.py index 1624b5aa..e4f6ecd1 100644 --- a/monitoring/publishing/models.py +++ b/monitoring/publishing/models.py @@ -13,12 +13,44 @@ class GridSite(models.Model): class VSuperSummaries(models.Model): Site = models.CharField(max_length=255, primary_key=True) LatestPublish = models.DateTimeField() + Month = models.IntegerField() + Year = models.IntegerField() + RecordStart = models.DateTimeField() + RecordEnd = models.DateTimeField() + RecordCountPublished = models.IntegerField() class Meta: managed = False db_table = 'VSuperSummaries' +class GridSiteSync(models.Model): + fetched = models.DateTimeField(auto_now=True) + SiteName = models.CharField(max_length=255) + YearMonth = models.CharField(max_length=255) + Year = models.IntegerField() + Month = models.IntegerField() + RecordStart = models.DateTimeField() + RecordEnd = models.DateTimeField() + RecordCountPublished = models.IntegerField() + RecordCountInDb = models.IntegerField() + SyncStatus = models.CharField(max_length=255) + + class Meta: + # Descending order of Year and Month to display latest data first + ordering = ('SiteName', '-Year', '-Month') + unique_together = ('SiteName', 'YearMonth') + + +class VSyncRecords(models.Model): + Site = models.CharField(max_length=255, primary_key=True) + RecordCountInDb = models.IntegerField() + + class Meta: + managed = False + db_table = 'VSyncRecords' + + class CloudSite(models.Model): fetched = models.DateTimeField(auto_now=True) name = models.CharField(max_length=255, primary_key=True) From 58417ca89d9fd9ee7c6400af906efddd1e799643 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:38:49 +0000 Subject: [PATCH 045/183] Modify names to ensure consistency with the new models (GridSiteSync). --- monitoring/publishing/models.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/monitoring/publishing/models.py b/monitoring/publishing/models.py index e4f6ecd1..f0e1eb7a 100644 --- a/monitoring/publishing/models.py +++ b/monitoring/publishing/models.py @@ -6,7 +6,7 @@ class GridSite(models.Model): fetched = models.DateTimeField(auto_now=True) - name = models.CharField(max_length=255, primary_key=True) + SiteName = models.CharField(max_length=255, primary_key=True) updated = models.DateTimeField() @@ -18,7 +18,7 @@ class VSuperSummaries(models.Model): RecordStart = models.DateTimeField() RecordEnd = models.DateTimeField() RecordCountPublished = models.IntegerField() - + class Meta: managed = False db_table = 'VSuperSummaries' @@ -53,13 +53,13 @@ class Meta: class CloudSite(models.Model): fetched = models.DateTimeField(auto_now=True) - name = models.CharField(max_length=255, primary_key=True) - vms = models.IntegerField(default=0) - script = models.CharField(max_length=255) + SiteName = models.CharField(max_length=255, primary_key=True) + Vms = models.IntegerField(default=0) + Script = models.CharField(max_length=255) updated = models.DateTimeField() class Meta: - ordering = ('name',) + ordering = ('SiteName',) class VAnonCloudRecord(models.Model): @@ -78,3 +78,4 @@ def __str__(self): self.CloudType, self.UpdateTime, self.VMs) + \ No newline at end of file From 274dc73dd4f299a6dd57843d8cf25439101bab24 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:40:13 +0000 Subject: [PATCH 046/183] Add serializer for gridsync site. --- monitoring/publishing/serializers.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index aef7ae85..dbd2fc20 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -1,6 +1,6 @@ from rest_framework import serializers -from monitoring.publishing.models import CloudSite, GridSite +from monitoring.publishing.models import CloudSite, GridSite, GridSiteSync class GridSiteSerializer(serializers.HyperlinkedModelSerializer): @@ -14,6 +14,22 @@ class Meta: fields = ('url', 'name', 'updated') +class GridSiteSyncSerializer(serializers.HyperlinkedModelSerializer): + # Override default format with None so that Python datetime is used as + # ouput format. Encoding will be determined by the renderer and can be + # formatted by a template filter. + + class Meta: + model = GridSiteSync + fields = ('url', 'SiteName', 'YearMonth', 'RecordStart', 'RecordEnd', 'RecordCountPublished', 'RecordCountInDb', 'SyncStatus') + + # Sitename substitutes pk + lookup_field = 'SiteName' + extra_kwargs = { + 'url': {'lookup_field': 'SiteName'} + } + + class CloudSiteSerializer(serializers.HyperlinkedModelSerializer): # Override default format with None so that Python datetime is used as # ouput format. Encoding will be determined by the renderer and can be From 40fa9725570ee617b5f1865eb8ec45ca5091bee1 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:41:41 +0000 Subject: [PATCH 047/183] Modify names to ensure consistency with modified models. --- monitoring/publishing/serializers.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index dbd2fc20..3da01dc2 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -11,7 +11,7 @@ class GridSiteSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = GridSite - fields = ('url', 'name', 'updated') + fields = ('url', 'SiteName', 'updated') class GridSiteSyncSerializer(serializers.HyperlinkedModelSerializer): @@ -38,4 +38,4 @@ class CloudSiteSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CloudSite - fields = ('url', 'name', 'vms', 'script', 'updated') + fields = ('url', 'SiteName', 'Vms', 'Script', 'updated') From 14788f3cb07f277ce52c0ac0b55c15c712975e1a Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:45:45 +0000 Subject: [PATCH 048/183] Add view for the gridsync table, for both single site and all sites. --- monitoring/publishing/views.py | 176 ++++++++++++++++++++++++++++++++- 1 file changed, 173 insertions(+), 3 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 25d1e382..244d307f 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -4,12 +4,13 @@ from datetime import datetime, timedelta from django.db.models import Max +import pandas as pd -from rest_framework import viewsets +from rest_framework import viewsets, generics from rest_framework.renderers import TemplateHTMLRenderer -from monitoring.publishing.models import GridSite, VSuperSummaries, CloudSite, VAnonCloudRecord -from monitoring.publishing.serializers import GridSiteSerializer, CloudSiteSerializer +from monitoring.publishing.models import GridSite, VSuperSummaries, CloudSite, VAnonCloudRecord, GridSiteSync, VSyncRecords +from monitoring.publishing.serializers import GridSiteSerializer, CloudSiteSerializer, GridSiteSyncSerializer class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): @@ -87,6 +88,175 @@ def retrieve(self, request, pk=None): return response + +class GridSiteSyncViewSet(viewsets.ReadOnlyModelViewSet): + queryset = GridSiteSync.objects.all() + serializer_class = GridSiteSyncSerializer + lookup_field = 'SiteName' + + # When a single site is showed (retrieve function used), the template + # is different than the one used when showing a list of sites + def get_template_names(self): + if self.action == 'list': + return ['gridsync.html'] + elif self.action == 'retrieve': + return ['gridsync_singlesite.html'] + + # Combine Year and Month into one string (display purposes) + def get_year_month_string(self, year, month): + year_string = str(year) + month_string = str(month) + if len(month_string)==1: + month_string = '0'+month_string + return year_string+ '-' +month_string + + def list(self, request): + last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] + n_sites = GridSiteSync.objects.values('SiteName').distinct().count() + + if last_fetched is not None: + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) + if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or n_sites==1: + print('Out of date') + + # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' + fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01' GROUP BY Site, Year, Month;") + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords GROUP BY Site, Year, Month") + + # Create empty dicts that will become dfs to be combined + Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"RecordCountInDb":[]} + + # Fill the dicts with the fetched data + for row in fetchset_Summaries: + Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] + Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] + Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] + Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] + + for row in fetchset_SyncRecords: + SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] + SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] + SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] + SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] + + # Merge data from VSuperSummaries and VSyncRecords into one df + df_Summaries = pd.DataFrame.from_dict(Summaries_dict) + df_SyncRecords = pd.DataFrame.from_dict(SyncRecords_dict) + df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year'], right_on=['Site', 'Month', 'Year'], how='inner') + fetchset = df_all.to_dict('index') + + # Delete all data if table not empty (as this function lists all sites) + GridSiteSync.objects.all().delete() + + # Determine SyncStatus based on the difference between records published and in db + for f in fetchset.values(): + rel_diff1 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountInDb")) + rel_diff2 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountPublished")) + if rel_diff1 < 0.01 or rel_diff2 < 0.01: + f['SyncStatus']='OK' + else: + f['SyncStatus']='Error' + + # Combined primary keys outside the default dict + GridSiteSync.objects.update_or_create( + defaults={ + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), + 'RecordCountPublished': f.get("RecordCountPublished"), + 'RecordCountInDb': f.get("RecordCountInDb"), + 'SyncStatus': f.get("SyncStatus"), + }, + YearMonth = self.get_year_month_string(f.get("Year"), f.get("Month")), + SiteName=f.get("Site"), + Month=f.get("Month"), + Year=f.get("Year"), + ) + + else: + print('No need to update') + + response = super(GridSiteSyncViewSet, self).list(request) + response.data = {'records': response.data, 'last_fetched': last_fetched} + return response + + + + def retrieve(self, request, SiteName=None): + lookup_field = 'SiteName' + last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] + row_1 = GridSiteSync.objects.filter()[:1].get() + n_sites = GridSiteSync.objects.values('SiteName').distinct().count() + + if last_fetched is not None: + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) + if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or n_sites > 1 or SiteName != row_1.SiteName: + print('Out of date') + + # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' + fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01'GROUP BY Site, Month, Year;".format(SiteName)) + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE Site='{}' GROUP BY Site, Month, Year;".format(SiteName)) + + Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[], "RecordCountInDb":[]} + + for row in fetchset_Summaries: + Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] + Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] + Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] + Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] + + for row in fetchset_SyncRecords: + SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] + SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] + SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] + SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] + + df_Summaries = pd.DataFrame.from_dict(Summaries_dict) + df_SyncRecords = pd.DataFrame.from_dict(SyncRecords_dict) + df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year'], right_on=['Site', 'Month', 'Year'], how='inner') + fetchset = df_all.to_dict('index') + + # Ensure we list only the data for one site + first_row = GridSiteSync.objects.first() + if first_row is not None: + if first_row.SiteName != SiteName: + GridSiteSync.objects.all().delete() + + for f in fetchset.values(): + rel_diff1 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountInDb")) + rel_diff2 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountPublished")) + if rel_diff1 <= 0.01 or rel_diff2 <= 0.01: + f['SyncStatus']='OK' + else: + f['SyncStatus']='Error' + + GridSiteSync.objects.update_or_create( + defaults={ + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), + 'RecordCountPublished': f.get("RecordCountPublished"), + 'RecordCountInDb': f.get("RecordCountInDb"), + 'SyncStatus': f.get("SyncStatus"), + }, + YearMonth = self.get_year_month_string(f.get("Year"), f.get("Month")), + SiteName=f.get("Site"), + Month=f.get("Month"), + Year=f.get("Year"), + ) + + else: + print('No need to update') + + response = super(GridSiteSyncViewSet, self).list(request) + response.data = {'records': response.data, 'last_fetched': last_fetched} + return response + + class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = CloudSite.objects.all() serializer_class = CloudSiteSerializer From 39f77d237428026ffa080021be925f7bca05c1ec Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:47:04 +0000 Subject: [PATCH 049/183] Update field names to ensure consistency with modified models and serialisers. --- monitoring/publishing/views.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 244d307f..f3f456a1 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -25,7 +25,7 @@ def list(self, request): if last_fetched is None or (last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20))): fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") for f in fetchset: - GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, name=f.Site) + GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, SiteName=f.Site) else: print('No need to update') @@ -62,7 +62,7 @@ def retrieve(self, request, pk=None): print('Out of date') fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") for f in fetchset: - GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, name=f.Site) + GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, SiteName=f.Site) else: print('No need to update') @@ -270,7 +270,7 @@ def list(self, request): print('Out of date') fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: - CloudSite.objects.update_or_create(defaults={'vms': f.VMs, 'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) + CloudSite.objects.update_or_create(defaults={'Vms': f.VMs, 'Script': f.CloudType, 'updated': f.UpdateTime}, SiteName=f.SiteName) else: print('No need to update') @@ -287,7 +287,7 @@ def retrieve(self, request, pk=None): print('Out of date') fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: - CloudSite.objects.update_or_create(defaults={'vms': f.VMs, 'script': f.CloudType, 'updated': f.UpdateTime}, name=f.SiteName) + CloudSite.objects.update_or_create(defaults={'Vms': f.VMs, 'Script': f.CloudType, 'updated': f.UpdateTime}, SiteName=f.SiteName) else: print('No need to update') From b55b90d88d0eb1651b20f2097166239b48fb3d9a Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:49:08 +0000 Subject: [PATCH 050/183] Add html templates for gridsync, both all sites page and single site page. --- monitoring/publishing/templates/gridsync.html | 43 +++++++++++++++++++ .../templates/gridsync_singlesite.html | 41 ++++++++++++++++++ 2 files changed, 84 insertions(+) create mode 100644 monitoring/publishing/templates/gridsync.html create mode 100644 monitoring/publishing/templates/gridsync_singlesite.html diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html new file mode 100644 index 00000000..1b43e972 --- /dev/null +++ b/monitoring/publishing/templates/gridsync.html @@ -0,0 +1,43 @@ + + + + + APEL Publication Summary + + + + +

APEL Synchronisation Test

+
    +
  • A comparison is made between your local APEL database, and the data that you have published to the GOC. +
  • Major differences are flagged with FAIL. +
  • Information about APEL APEL Wiki +
  • Contact: apel-admins [at] stfc.ac.uk +
  • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}

+ + + + + + + + + + + + + + {% for record in records %} + + + + + + + + + + + {% endfor %} + +
All sites
Site NameMonthRecord StartRecord EndRecord Count
In Your Database
Record Count
What You Published
Synchronisation
Status
{{ record.SiteName }}{{ record.YearMonth }} {{ record.RecordStart }}{{ record.RecordEnd }}{{ record.RecordCountPublished }}{{ record.RecordCountInDb }}{{ record.SyncStatus }}
\ No newline at end of file diff --git a/monitoring/publishing/templates/gridsync_singlesite.html b/monitoring/publishing/templates/gridsync_singlesite.html new file mode 100644 index 00000000..e079a0be --- /dev/null +++ b/monitoring/publishing/templates/gridsync_singlesite.html @@ -0,0 +1,41 @@ + + + + + APEL Publication Summary + + + + +

APEL Synchronisation Test

+
    +
  • A comparison is made between your local APEL database, and the data that you have published to the GOC. +
  • Major differences are flagged with FAIL. +
  • Information about APEL APEL Wiki +
  • Contact: apel-admins [at] stfc.ac.uk +
  • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}

+ + + + + + + + + + + + + {% for record in records %} + + + + + + + + + + {% endfor %} + +
{{ records.0.SiteName }}
MonthRecord StartRecord EndRecord Count
In Your Database
Record Count
What You Published
Synchronisation
Status
{{ record.YearMonth }} {{ record.RecordStart }}{{ record.RecordEnd }}{{ record.RecordCountPublished }}{{ record.RecordCountInDb }}{{ record.SyncStatus }}
\ No newline at end of file From 3a231a861d51db7152681463bceb4af53bb21979 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 10:50:05 +0000 Subject: [PATCH 051/183] Update field names to ensure consistency with modified views. --- monitoring/publishing/templates/cloudsites.html | 6 +++--- monitoring/publishing/templates/gridsites.html | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/monitoring/publishing/templates/cloudsites.html b/monitoring/publishing/templates/cloudsites.html index e9c2bb7b..5fa8680e 100644 --- a/monitoring/publishing/templates/cloudsites.html +++ b/monitoring/publishing/templates/cloudsites.html @@ -16,9 +16,9 @@

Sites publishing cloud accounting records from 2018-06-19 onwards

{% for site in sites %} - {{ site.name }} - {{ site.vms }} - {{ site.script }} + {{ site.SiteName }} + {{ site.Vms }} + {{ site.Script }} {{ site.updated|date:"Y-m-d H:i:s" }} {% endfor %} diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index ad5d5a5f..0ec21b0b 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -17,7 +17,7 @@

APEL Publication Test

  • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}
    - + @@ -25,7 +25,7 @@

    APEL Publication Test

    - + From acecbd3d612c44b3f0d7cab7cf7fa56da8c2e9d0 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 11:11:28 +0000 Subject: [PATCH 052/183] Add urls for gridsync pages. --- monitoring/publishing/urls.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 9691e8da..942c7987 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -3,12 +3,16 @@ from rest_framework import routers from monitoring.publishing import views +from django.urls import re_path router = routers.SimpleRouter() router.register(r'cloud', views.CloudSiteViewSet) router.register(r'grid', views.GridSiteViewSet) +router.register(r'gridsync', views.GridSiteSyncViewSet) - +# Necessary to retrieve one site only urlpatterns = [ - url(r'^', include(router.urls)), + re_path(r'^gridsync/(?P[a-zA-Z0-9-]+)/$', views.GridSiteSyncViewSet.as_view({'get': 'retrieve'}), name='gridsync_singlesite'), ] + +urlpatterns += router.urls From fc13eb71c4cee229ef9d6af2742c3e32c4a14544 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 15:51:46 +0000 Subject: [PATCH 053/183] Add model for the table with submithosts. --- monitoring/publishing/models.py | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/monitoring/publishing/models.py b/monitoring/publishing/models.py index f0e1eb7a..1978624a 100644 --- a/monitoring/publishing/models.py +++ b/monitoring/publishing/models.py @@ -78,4 +78,20 @@ def __str__(self): self.CloudType, self.UpdateTime, self.VMs) - \ No newline at end of file + + +class GridSiteSyncSubmitH(models.Model): + fetched = models.DateTimeField(auto_now=True) + SiteName = models.CharField(max_length=255) + YearMonth = models.CharField(max_length=255) + Year = models.IntegerField() + Month = models.IntegerField() + RecordStart = models.DateTimeField() + RecordEnd = models.DateTimeField() + RecordCountPublished = models.IntegerField() + RecordCountInDb = models.IntegerField() + SubmitHost = models.CharField(max_length=255) + + class Meta: + ordering = ('SiteName', '-Year', '-Month') + unique_together = ('SiteName', 'YearMonth', 'SubmitHost') \ No newline at end of file From 2a2caea72442b0903ff645e132f0a62f88afc312 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 15:52:27 +0000 Subject: [PATCH 054/183] Add serialiser for the table with submithosts. --- monitoring/publishing/serializers.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 3da01dc2..18bf877c 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -1,6 +1,6 @@ from rest_framework import serializers -from monitoring.publishing.models import CloudSite, GridSite, GridSiteSync +from monitoring.publishing.models import CloudSite, GridSite, GridSiteSync, GridSiteSyncSubmitH class GridSiteSerializer(serializers.HyperlinkedModelSerializer): @@ -39,3 +39,15 @@ class CloudSiteSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CloudSite fields = ('url', 'SiteName', 'Vms', 'Script', 'updated') + + +class GridSiteSyncSubmitHSerializer(serializers.HyperlinkedModelSerializer): + # Override default format with None so that Python datetime is used as + # ouput format. Encoding will be determined by the renderer and can be + # formatted by a template filter. + + class Meta: + model = GridSiteSyncSubmitH + fields = ('url', 'SiteName', 'YearMonth', 'RecordStart', 'RecordEnd', 'RecordCountPublished', 'RecordCountInDb', 'SubmitHost') + + lookup_fields = ('SiteName', 'YearMonth') \ No newline at end of file From 247509365772ef3f812094df811506540aa6cc1b Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 15:54:26 +0000 Subject: [PATCH 055/183] Add view for the sync table showing submithosts. --- monitoring/publishing/views.py | 115 ++++++++++++++++++++++++++++++++- 1 file changed, 113 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index f3f456a1..248df2c9 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -9,8 +9,8 @@ from rest_framework import viewsets, generics from rest_framework.renderers import TemplateHTMLRenderer -from monitoring.publishing.models import GridSite, VSuperSummaries, CloudSite, VAnonCloudRecord, GridSiteSync, VSyncRecords -from monitoring.publishing.serializers import GridSiteSerializer, CloudSiteSerializer, GridSiteSyncSerializer +from monitoring.publishing.models import GridSite, VSuperSummaries, CloudSite, VAnonCloudRecord, GridSiteSync, VSyncRecords, GridSiteSyncSubmitH +from monitoring.publishing.serializers import GridSiteSerializer, CloudSiteSerializer, GridSiteSyncSerializer, GridSiteSyncSubmitHSerializer class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): @@ -257,6 +257,117 @@ def retrieve(self, request, SiteName=None): return response +# Needed for passing two parameters to a viewset (GridSiteSyncSubmitHViewSet) +class MultipleFieldLookupMixin: + """ + Apply this mixin to any view or viewset to get multiple field filtering + based on a `lookup_fields` attribute, instead of the default single field filtering. + """ + def get_object(self): + queryset = self.get_queryset() + queryset = self.filter_queryset(queryset) + filter = {} + for field in self.lookup_fields: + if self.kwargs.get(field): + filter[field] = self.kwargs[field] + obj = get_object_or_404(queryset, **filter) + self.check_object_permissions(self.request, obj) + return obj + + +class GridSiteSyncSubmitHViewSet(MultipleFieldLookupMixin, viewsets.ReadOnlyModelViewSet): + queryset = GridSiteSyncSubmitH.objects.all() + serializer_class = GridSiteSyncSubmitHSerializer + template_name = 'gridsync_submithost.html' + + def list(self, request): + response = super(GridSiteSyncSubmitHViewSet, self).list(request) + response.data = {'submisthosts': response.data, 'last_fetched': last_fetched} + return response + + def retrieve(self, request, SiteName=None, YearMonth=None): + + lookup_fields = ('SiteName', 'YearMonth') + last_fetched = GridSiteSyncSubmitH.objects.aggregate(Max('fetched'))['fetched__max'] + Year, Month = YearMonth.replace('-', ' ').split(' ') + sitename_in_table = None + yearmonth_in_table = None + + # This is to ensure the data is updated when changing (or clicking on another) month + if GridSiteSyncSubmitH.objects.count() > 0: + row_1 = GridSiteSyncSubmitH.objects.filter()[:1].get() + sitename_in_table = row_1.SiteName + yearmonth_in_table = row_1.YearMonth + + if last_fetched is not None: + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) + if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or (sitename_in_table!=SiteName) or (yearmonth_in_table!=YearMonth) : + print('Out of date') + + fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb, SubmitHost AS SubmitHostSync FROM VSyncRecords WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) + + Summaries_dict = {"Site":[], "Month":[], "Year":[], "SubmitHostSumm":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"SubmitHostSync":[], "RecordCountInDb":[]} + + for row in fetchset_Summaries: + Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] + Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] + Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] + Summaries_dict["SubmitHostSumm"] = Summaries_dict.get("SubmitHostSumm") + [row.SubmitHostSumm] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] + Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] + + for row in fetchset_SyncRecords: + SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] + SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] + SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] + SyncRecords_dict["SubmitHostSync"] = SyncRecords_dict.get("SubmitHostSync") + [row.SubmitHostSync] + SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] + + df_Summaries = pd.DataFrame.from_dict(Summaries_dict) + df_SyncRecords = pd.DataFrame.from_dict(SyncRecords_dict) + df_Summaries.dropna(inplace=True) + df_SyncRecords.dropna(inplace=True) + + df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year', 'SubmitHostSumm'], right_on=['Site', 'Month', 'Year', 'SubmitHostSync'], how='outer') + fetchset = df_all.to_dict('index') + + # This is to list only data for one month + GridSiteSyncSubmitH.objects.all().delete() + + def get_year_month_string(year, month): + year_string = str(year) + month_string = str(month) + if len(month_string)==1: + month_string = '0'+month_string + return year_string+ '-' +month_string + + for f in fetchset.values(): + GridSiteSyncSubmitH.objects.update_or_create( + defaults={ + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), + 'RecordCountPublished': f.get("RecordCountPublished"), + 'RecordCountInDb': f.get("RecordCountInDb"), + }, + SiteName=f.get("Site"), + YearMonth = get_year_month_string(f.get("Year"), f.get("Month")), + Month=f.get("Month"), + Year=f.get("Year"), + SubmitHost = f.get("SubmitHostSumm"), + ) + + else: + print('No need to update') + + response = super(GridSiteSyncSubmitHViewSet, self).list(request) + response.data = {'submisthosts': response.data, 'last_fetched': last_fetched} + return response + + + class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = CloudSite.objects.all() serializer_class = CloudSiteSerializer From 6ebb2d153c42a33dca15266d63ab5a283830a67f Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 15:55:50 +0000 Subject: [PATCH 056/183] Add hyperlink to table showing submithosts. --- monitoring/publishing/templates/gridsync_singlesite.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/publishing/templates/gridsync_singlesite.html b/monitoring/publishing/templates/gridsync_singlesite.html index e079a0be..00f7aca7 100644 --- a/monitoring/publishing/templates/gridsync_singlesite.html +++ b/monitoring/publishing/templates/gridsync_singlesite.html @@ -28,7 +28,7 @@

    APEL Synchronisation Test

    {% for record in records %}
    - + From 671b4abcbc839cf7c217c20a6733f44e2edc6540 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 15:56:17 +0000 Subject: [PATCH 057/183] Add template for table showing submithosts. --- .../templates/gridsync_submithost.html | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 monitoring/publishing/templates/gridsync_submithost.html diff --git a/monitoring/publishing/templates/gridsync_submithost.html b/monitoring/publishing/templates/gridsync_submithost.html new file mode 100644 index 00000000..a919ea6a --- /dev/null +++ b/monitoring/publishing/templates/gridsync_submithost.html @@ -0,0 +1,43 @@ + + + + + APEL Publication Summary + + + + +

    APEL Synchronisation Test

    +
      +
    • A comparison is made between your local APEL database, and the data that you have published to the GOC. +
    • Major differences are flagged with FAIL. +
    • Information about APEL APEL Wiki +
    • Contact: apel-admins [at] stfc.ac.uk +
    • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}

    + +
    {{ sites.0.name }}
    {{ sites.0.SiteName }}
    ExecutingSite MeasurementDate Publication
    Status
    {{ sites.0.name }}{{ sites.0.SiteName }} {{ last_fetched|date:"Y-m-d" }} {{ last_fetched|date:"G:i:s" }} {{ stdout }}
    {{ record.YearMonth }} {{ record.YearMonth }} {{ record.RecordStart }} {{ record.RecordEnd }} {{ record.RecordCountPublished }}
    + + + + + + + + + + + {% for host in submisthosts %} + + + + + + + + + + + + {% endfor %} + +
    {{submisthosts.0.SiteName}}, {{submisthosts.0.YearMonth}}
    MonthSubmitHostRecordStartRecordEndRecord Count
    In Your Database
    Record Count
    What You Published
    {{ host.YearMonth }} {{ host.SubmitHost }}{{ host.RecordStart }}{{ host.RecordEnd }}{{ host.RecordCountPublished }}{{ host.RecordCountInDb }}
    \ No newline at end of file From b37dde27df3386f3d455234a701c222453156a15 Mon Sep 17 00:00:00 2001 From: Letizia Date: Wed, 25 Jan 2023 16:04:30 +0000 Subject: [PATCH 058/183] Add url to the page showing submithosts. --- monitoring/publishing/urls.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 942c7987..449ded60 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -9,10 +9,11 @@ router.register(r'cloud', views.CloudSiteViewSet) router.register(r'grid', views.GridSiteViewSet) router.register(r'gridsync', views.GridSiteSyncViewSet) +router.register(r'gridsync', views.GridSiteSyncSubmitHViewSet) -# Necessary to retrieve one site only urlpatterns = [ re_path(r'^gridsync/(?P[a-zA-Z0-9-]+)/$', views.GridSiteSyncViewSet.as_view({'get': 'retrieve'}), name='gridsync_singlesite'), + re_path(r'^gridsync/(?P[a-zA-Z0-9-]+)/(?P[0-9-]+)/$', views.GridSiteSyncSubmitHViewSet.as_view({'get': 'retrieve'}), name='gridsync_submithost'), ] urlpatterns += router.urls From 7db5eba1cf79654b532ae5bed4df9405be46633b Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 12:13:34 +0000 Subject: [PATCH 059/183] Trim trailing whitespaces. --- monitoring/publishing/models.py | 22 ++--- monitoring/publishing/serializers.py | 2 +- monitoring/publishing/views.py | 124 +++++++++++++-------------- monitoring/settings.py | 2 +- 4 files changed, 75 insertions(+), 75 deletions(-) diff --git a/monitoring/publishing/models.py b/monitoring/publishing/models.py index 1978624a..26bbb69a 100644 --- a/monitoring/publishing/models.py +++ b/monitoring/publishing/models.py @@ -15,10 +15,10 @@ class VSuperSummaries(models.Model): LatestPublish = models.DateTimeField() Month = models.IntegerField() Year = models.IntegerField() - RecordStart = models.DateTimeField() - RecordEnd = models.DateTimeField() - RecordCountPublished = models.IntegerField() - + RecordStart = models.DateTimeField() + RecordEnd = models.DateTimeField() + RecordCountPublished = models.IntegerField() + class Meta: managed = False db_table = 'VSuperSummaries' @@ -30,8 +30,8 @@ class GridSiteSync(models.Model): YearMonth = models.CharField(max_length=255) Year = models.IntegerField() Month = models.IntegerField() - RecordStart = models.DateTimeField() - RecordEnd = models.DateTimeField() + RecordStart = models.DateTimeField() + RecordEnd = models.DateTimeField() RecordCountPublished = models.IntegerField() RecordCountInDb = models.IntegerField() SyncStatus = models.CharField(max_length=255) @@ -40,7 +40,7 @@ class Meta: # Descending order of Year and Month to display latest data first ordering = ('SiteName', '-Year', '-Month') unique_together = ('SiteName', 'YearMonth') - + class VSyncRecords(models.Model): Site = models.CharField(max_length=255, primary_key=True) @@ -78,16 +78,16 @@ def __str__(self): self.CloudType, self.UpdateTime, self.VMs) - - + + class GridSiteSyncSubmitH(models.Model): fetched = models.DateTimeField(auto_now=True) SiteName = models.CharField(max_length=255) YearMonth = models.CharField(max_length=255) Year = models.IntegerField() Month = models.IntegerField() - RecordStart = models.DateTimeField() - RecordEnd = models.DateTimeField() + RecordStart = models.DateTimeField() + RecordEnd = models.DateTimeField() RecordCountPublished = models.IntegerField() RecordCountInDb = models.IntegerField() SubmitHost = models.CharField(max_length=255) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 18bf877c..f01fee01 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -49,5 +49,5 @@ class GridSiteSyncSubmitHSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = GridSiteSyncSubmitH fields = ('url', 'SiteName', 'YearMonth', 'RecordStart', 'RecordEnd', 'RecordCountPublished', 'RecordCountInDb', 'SubmitHost') - + lookup_fields = ('SiteName', 'YearMonth') \ No newline at end of file diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 248df2c9..7abb7a34 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -31,7 +31,7 @@ def list(self, request): final_response = [] response = super(GridSiteViewSet, self).list(request) - + for single_dict in response.data: date = single_dict.get('updated').replace(tzinfo=None) @@ -46,13 +46,13 @@ def list(self, request): single_dict['returncode'] = 3 single_dict['stdout'] = "UNKNOWN" final_response.append(single_dict) - + if type(request.accepted_renderer) is TemplateHTMLRenderer: response.data = {'sites': final_response, 'last_fetched': last_fetched} return response - + def retrieve(self, request, pk=None): last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] # If there's no data then last_fetched is None. @@ -92,14 +92,14 @@ def retrieve(self, request, pk=None): class GridSiteSyncViewSet(viewsets.ReadOnlyModelViewSet): queryset = GridSiteSync.objects.all() serializer_class = GridSiteSyncSerializer - lookup_field = 'SiteName' + lookup_field = 'SiteName' - # When a single site is showed (retrieve function used), the template + # When a single site is showed (retrieve function used), the template # is different than the one used when showing a list of sites def get_template_names(self): - if self.action == 'list': + if self.action == 'list': return ['gridsync.html'] - elif self.action == 'retrieve': + elif self.action == 'retrieve': return ['gridsync_singlesite.html'] # Combine Year and Month into one string (display purposes) @@ -110,10 +110,10 @@ def get_year_month_string(self, year, month): month_string = '0'+month_string return year_string+ '-' +month_string - def list(self, request): + def list(self, request): last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] n_sites = GridSiteSync.objects.values('SiteName').distinct().count() - + if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or n_sites==1: @@ -121,25 +121,25 @@ def list(self, request): # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01' GROUP BY Site, Year, Month;") - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords GROUP BY Site, Year, Month") + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords GROUP BY Site, Year, Month") # Create empty dicts that will become dfs to be combined - Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"RecordCountInDb":[]} - + Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"RecordCountInDb":[]} + # Fill the dicts with the fetched data for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] - Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] for row in fetchset_SyncRecords: - SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] - SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] - SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] + SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] + SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] + SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] # Merge data from VSuperSummaries and VSyncRecords into one df @@ -163,12 +163,12 @@ def list(self, request): # Combined primary keys outside the default dict GridSiteSync.objects.update_or_create( defaults={ - 'RecordStart': f.get("RecordStart"), - 'RecordEnd': f.get("RecordEnd"), + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), 'RecordCountPublished': f.get("RecordCountPublished"), - 'RecordCountInDb': f.get("RecordCountInDb"), + 'RecordCountInDb': f.get("RecordCountInDb"), 'SyncStatus': f.get("SyncStatus"), - }, + }, YearMonth = self.get_year_month_string(f.get("Year"), f.get("Month")), SiteName=f.get("Site"), Month=f.get("Month"), @@ -183,10 +183,10 @@ def list(self, request): return response - + def retrieve(self, request, SiteName=None): lookup_field = 'SiteName' - last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] + last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] row_1 = GridSiteSync.objects.filter()[:1].get() n_sites = GridSiteSync.objects.values('SiteName').distinct().count() @@ -197,23 +197,23 @@ def retrieve(self, request, SiteName=None): # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01'GROUP BY Site, Month, Year;".format(SiteName)) - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE Site='{}' GROUP BY Site, Month, Year;".format(SiteName)) + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE Site='{}' GROUP BY Site, Month, Year;".format(SiteName)) - Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[], "RecordCountInDb":[]} + Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[], "RecordCountInDb":[]} for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] for row in fetchset_SyncRecords: - SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] - SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] - SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] + SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] + SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] + SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] df_Summaries = pd.DataFrame.from_dict(Summaries_dict) @@ -237,12 +237,12 @@ def retrieve(self, request, SiteName=None): GridSiteSync.objects.update_or_create( defaults={ - 'RecordStart': f.get("RecordStart"), - 'RecordEnd': f.get("RecordEnd"), + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), 'RecordCountPublished': f.get("RecordCountPublished"), - 'RecordCountInDb': f.get("RecordCountInDb"), + 'RecordCountInDb': f.get("RecordCountInDb"), 'SyncStatus': f.get("SyncStatus"), - }, + }, YearMonth = self.get_year_month_string(f.get("Year"), f.get("Month")), SiteName=f.get("Site"), Month=f.get("Month"), @@ -255,7 +255,7 @@ def retrieve(self, request, SiteName=None): response = super(GridSiteSyncViewSet, self).list(request) response.data = {'records': response.data, 'last_fetched': last_fetched} return response - + # Needed for passing two parameters to a viewset (GridSiteSyncSubmitHViewSet) class MultipleFieldLookupMixin: @@ -264,13 +264,13 @@ class MultipleFieldLookupMixin: based on a `lookup_fields` attribute, instead of the default single field filtering. """ def get_object(self): - queryset = self.get_queryset() - queryset = self.filter_queryset(queryset) + queryset = self.get_queryset() + queryset = self.filter_queryset(queryset) filter = {} for field in self.lookup_fields: if self.kwargs.get(field): filter[field] = self.kwargs[field] - obj = get_object_or_404(queryset, **filter) + obj = get_object_or_404(queryset, **filter) self.check_object_permissions(self.request, obj) return obj @@ -279,13 +279,13 @@ class GridSiteSyncSubmitHViewSet(MultipleFieldLookupMixin, viewsets.ReadOnlyMode queryset = GridSiteSyncSubmitH.objects.all() serializer_class = GridSiteSyncSubmitHSerializer template_name = 'gridsync_submithost.html' - - def list(self, request): + + def list(self, request): response = super(GridSiteSyncSubmitHViewSet, self).list(request) response.data = {'submisthosts': response.data, 'last_fetched': last_fetched} return response - def retrieve(self, request, SiteName=None, YearMonth=None): + def retrieve(self, request, SiteName=None, YearMonth=None): lookup_fields = ('SiteName', 'YearMonth') last_fetched = GridSiteSyncSubmitH.objects.aggregate(Max('fetched'))['fetched__max'] @@ -293,10 +293,10 @@ def retrieve(self, request, SiteName=None, YearMonth=None): sitename_in_table = None yearmonth_in_table = None - # This is to ensure the data is updated when changing (or clicking on another) month + # This is to ensure the data is updated when changing (or clicking on another) month if GridSiteSyncSubmitH.objects.count() > 0: row_1 = GridSiteSyncSubmitH.objects.filter()[:1].get() - sitename_in_table = row_1.SiteName + sitename_in_table = row_1.SiteName yearmonth_in_table = row_1.YearMonth if last_fetched is not None: @@ -304,26 +304,26 @@ def retrieve(self, request, SiteName=None, YearMonth=None): if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or (sitename_in_table!=SiteName) or (yearmonth_in_table!=YearMonth) : print('Out of date') - fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb, SubmitHost AS SubmitHostSync FROM VSyncRecords WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) + fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb, SubmitHost AS SubmitHostSync FROM VSyncRecords WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) - Summaries_dict = {"Site":[], "Month":[], "Year":[], "SubmitHostSumm":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"SubmitHostSync":[], "RecordCountInDb":[]} + Summaries_dict = {"Site":[], "Month":[], "Year":[], "SubmitHostSumm":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"SubmitHostSync":[], "RecordCountInDb":[]} for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] Summaries_dict["SubmitHostSumm"] = Summaries_dict.get("SubmitHostSumm") + [row.SubmitHostSumm] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] - Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] for row in fetchset_SyncRecords: - SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] - SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] + SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] + SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] - SyncRecords_dict["SubmitHostSync"] = SyncRecords_dict.get("SubmitHostSync") + [row.SubmitHostSync] + SyncRecords_dict["SubmitHostSync"] = SyncRecords_dict.get("SubmitHostSync") + [row.SubmitHostSync] SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] df_Summaries = pd.DataFrame.from_dict(Summaries_dict) @@ -333,10 +333,10 @@ def retrieve(self, request, SiteName=None, YearMonth=None): df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year', 'SubmitHostSumm'], right_on=['Site', 'Month', 'Year', 'SubmitHostSync'], how='outer') fetchset = df_all.to_dict('index') - - # This is to list only data for one month + + # This is to list only data for one month GridSiteSyncSubmitH.objects.all().delete() - + def get_year_month_string(year, month): year_string = str(year) month_string = str(month) @@ -347,13 +347,13 @@ def get_year_month_string(year, month): for f in fetchset.values(): GridSiteSyncSubmitH.objects.update_or_create( defaults={ - 'RecordStart': f.get("RecordStart"), - 'RecordEnd': f.get("RecordEnd"), + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), 'RecordCountPublished': f.get("RecordCountPublished"), - 'RecordCountInDb': f.get("RecordCountInDb"), - }, + 'RecordCountInDb': f.get("RecordCountInDb"), + }, SiteName=f.get("Site"), - YearMonth = get_year_month_string(f.get("Year"), f.get("Month")), + YearMonth = get_year_month_string(f.get("Year"), f.get("Month")), Month=f.get("Month"), Year=f.get("Year"), SubmitHost = f.get("SubmitHostSumm"), @@ -365,7 +365,7 @@ def get_year_month_string(year, month): response = super(GridSiteSyncSubmitHViewSet, self).list(request) response.data = {'submisthosts': response.data, 'last_fetched': last_fetched} return response - + class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): diff --git a/monitoring/settings.py b/monitoring/settings.py index e1da5662..89849fb3 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -143,4 +143,4 @@ # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' -STATIC_ROOT = os.path.join(BASE_DIR, "static/") \ No newline at end of file +STATIC_ROOT = os.path.join(BASE_DIR, "static/") \ No newline at end of file From bc1a8ac86d7191d30602b07c714aa97ad33d2564 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 12:20:13 +0000 Subject: [PATCH 060/183] Add newline at end of file. --- monitoring/__init__.py | 2 +- monitoring/publishing/models.py | 2 +- monitoring/publishing/serializers.py | 2 +- monitoring/publishing/views.py | 2 +- monitoring/settings.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/monitoring/__init__.py b/monitoring/__init__.py index c45523b2..063cd2cc 100644 --- a/monitoring/__init__.py +++ b/monitoring/__init__.py @@ -1,2 +1,2 @@ import pymysql -pymysql.install_as_MySQLdb() \ No newline at end of file +pymysql.install_as_MySQLdb() diff --git a/monitoring/publishing/models.py b/monitoring/publishing/models.py index 26bbb69a..195872db 100644 --- a/monitoring/publishing/models.py +++ b/monitoring/publishing/models.py @@ -94,4 +94,4 @@ class GridSiteSyncSubmitH(models.Model): class Meta: ordering = ('SiteName', '-Year', '-Month') - unique_together = ('SiteName', 'YearMonth', 'SubmitHost') \ No newline at end of file + unique_together = ('SiteName', 'YearMonth', 'SubmitHost') diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index f01fee01..20ed09fc 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -50,4 +50,4 @@ class Meta: model = GridSiteSyncSubmitH fields = ('url', 'SiteName', 'YearMonth', 'RecordStart', 'RecordEnd', 'RecordCountPublished', 'RecordCountInDb', 'SubmitHost') - lookup_fields = ('SiteName', 'YearMonth') \ No newline at end of file + lookup_fields = ('SiteName', 'YearMonth') diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 7abb7a34..0fe7ac0e 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -411,4 +411,4 @@ def retrieve(self, request, pk=None): response.data['returncode'] = 3 response.data['stdout'] = "UNKNOWN" - return response \ No newline at end of file + return response diff --git a/monitoring/settings.py b/monitoring/settings.py index 89849fb3..a476471c 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -143,4 +143,4 @@ # https://docs.djangoproject.com/en/1.11/howto/static-files/ STATIC_URL = '/static/' -STATIC_ROOT = os.path.join(BASE_DIR, "static/") \ No newline at end of file +STATIC_ROOT = os.path.join(BASE_DIR, "static/") From c611943672ba4888a6b8380b52865bd061589fbb Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 12:30:16 +0000 Subject: [PATCH 061/183] Remove whitespace(s) where not necessary or too many. --- monitoring/publishing/views.py | 26 +++++++++++++------------- monitoring/settings.py | 2 +- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 0fe7ac0e..85f5b51f 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -124,16 +124,16 @@ def list(self, request): fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords GROUP BY Site, Year, Month") # Create empty dicts that will become dfs to be combined - Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"RecordCountInDb":[]} + Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"RecordCountInDb":[]} # Fill the dicts with the fetched data for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] - Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] for row in fetchset_SyncRecords: @@ -199,15 +199,15 @@ def retrieve(self, request, SiteName=None): fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01'GROUP BY Site, Month, Year;".format(SiteName)) fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE Site='{}' GROUP BY Site, Month, Year;".format(SiteName)) - Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[], "RecordCountInDb":[]} + Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[], "RecordCountInDb":[]} for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] - Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] for row in fetchset_SyncRecords: @@ -301,22 +301,22 @@ def retrieve(self, request, SiteName=None, YearMonth=None): if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or (sitename_in_table!=SiteName) or (yearmonth_in_table!=YearMonth) : + if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or (sitename_in_table!=SiteName) or (yearmonth_in_table!=YearMonth): print('Out of date') fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb, SubmitHost AS SubmitHostSync FROM VSyncRecords WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) - Summaries_dict = {"Site":[], "Month":[], "Year":[], "SubmitHostSumm":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"SubmitHostSync":[], "RecordCountInDb":[]} + Summaries_dict = {"Site":[], "Month":[], "Year":[], "SubmitHostSumm":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} + SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"SubmitHostSync":[], "RecordCountInDb":[]} for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] Summaries_dict["SubmitHostSumm"] = Summaries_dict.get("SubmitHostSumm") + [row.SubmitHostSumm] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] - Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] + Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] + Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] for row in fetchset_SyncRecords: diff --git a/monitoring/settings.py b/monitoring/settings.py index a476471c..5d3d44f5 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -25,7 +25,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = [ '127.0.0.1', 'localhost'] +ALLOWED_HOSTS = ['127.0.0.1', 'localhost'] # Application definition From 2dbb9cfaa3b21a8f5269bbceafb01d5a76875288 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 12:38:30 +0000 Subject: [PATCH 062/183] Add whitespace(s) around operators where necessary. --- monitoring/publishing/views.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 85f5b51f..be8a1cce 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -106,9 +106,9 @@ def get_template_names(self): def get_year_month_string(self, year, month): year_string = str(year) month_string = str(month) - if len(month_string)==1: - month_string = '0'+month_string - return year_string+ '-' +month_string + if len(month_string) == 1: + month_string = '0' + month_string + return year_string + '-' + month_string def list(self, request): last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] @@ -116,7 +116,7 @@ def list(self, request): if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or n_sites==1: + if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or n_sites == 1: print('Out of date') # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' @@ -156,9 +156,9 @@ def list(self, request): rel_diff1 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountInDb")) rel_diff2 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountPublished")) if rel_diff1 < 0.01 or rel_diff2 < 0.01: - f['SyncStatus']='OK' + f['SyncStatus'] = 'OK' else: - f['SyncStatus']='Error' + f['SyncStatus'] = 'Error' # Combined primary keys outside the default dict GridSiteSync.objects.update_or_create( @@ -231,9 +231,9 @@ def retrieve(self, request, SiteName=None): rel_diff1 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountInDb")) rel_diff2 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountPublished")) if rel_diff1 <= 0.01 or rel_diff2 <= 0.01: - f['SyncStatus']='OK' + f['SyncStatus'] = 'OK' else: - f['SyncStatus']='Error' + f['SyncStatus'] = 'Error' GridSiteSync.objects.update_or_create( defaults={ @@ -301,7 +301,7 @@ def retrieve(self, request, SiteName=None, YearMonth=None): if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or (sitename_in_table!=SiteName) or (yearmonth_in_table!=YearMonth): + if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or (sitename_in_table != SiteName) or (yearmonth_in_table != YearMonth): print('Out of date') fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) @@ -340,9 +340,9 @@ def retrieve(self, request, SiteName=None, YearMonth=None): def get_year_month_string(year, month): year_string = str(year) month_string = str(month) - if len(month_string)==1: - month_string = '0'+month_string - return year_string+ '-' +month_string + if len(month_string) == 1: + month_string = '0' + month_string + return year_string + '-' + month_string for f in fetchset.values(): GridSiteSyncSubmitH.objects.update_or_create( From d55d6ab339929cf2982e355970d8a0e1f0d7b593 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 12:47:26 +0000 Subject: [PATCH 063/183] Remove spaces around keyword / parameter equals. --- monitoring/publishing/views.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index be8a1cce..b4b6856f 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -169,7 +169,7 @@ def list(self, request): 'RecordCountInDb': f.get("RecordCountInDb"), 'SyncStatus': f.get("SyncStatus"), }, - YearMonth = self.get_year_month_string(f.get("Year"), f.get("Month")), + YearMonth=self.get_year_month_string(f.get("Year"), f.get("Month")), SiteName=f.get("Site"), Month=f.get("Month"), Year=f.get("Year"), @@ -243,7 +243,7 @@ def retrieve(self, request, SiteName=None): 'RecordCountInDb': f.get("RecordCountInDb"), 'SyncStatus': f.get("SyncStatus"), }, - YearMonth = self.get_year_month_string(f.get("Year"), f.get("Month")), + YearMonth=self.get_year_month_string(f.get("Year"), f.get("Month")), SiteName=f.get("Site"), Month=f.get("Month"), Year=f.get("Year"), @@ -353,10 +353,10 @@ def get_year_month_string(year, month): 'RecordCountInDb': f.get("RecordCountInDb"), }, SiteName=f.get("Site"), - YearMonth = get_year_month_string(f.get("Year"), f.get("Month")), + YearMonth=get_year_month_string(f.get("Year"), f.get("Month")), Month=f.get("Month"), Year=f.get("Year"), - SubmitHost = f.get("SubmitHostSumm"), + SubmitHost=f.get("SubmitHostSumm"), ) else: From 964d49886d9f2b288feb867d2187a1d49f55de30 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 12:56:23 +0000 Subject: [PATCH 064/183] Add space after punctuation where necessary. --- monitoring/publishing/views.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index b4b6856f..3a994214 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -124,8 +124,8 @@ def list(self, request): fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords GROUP BY Site, Year, Month") # Create empty dicts that will become dfs to be combined - Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"RecordCountInDb":[]} + Summaries_dict = {"Site": [], "Month": [], "Year": [], "RecordCountPublished": [], "RecordStart": [], "RecordEnd": []} + SyncRecords_dict = {"Site": [], "Month": [], "Year": [], "RecordCountInDb": []} # Fill the dicts with the fetched data for row in fetchset_Summaries: @@ -199,8 +199,8 @@ def retrieve(self, request, SiteName=None): fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01'GROUP BY Site, Month, Year;".format(SiteName)) fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE Site='{}' GROUP BY Site, Month, Year;".format(SiteName)) - Summaries_dict = {"Site":[], "Month":[], "Year":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[], "RecordCountInDb":[]} + Summaries_dict = {"Site": [], "Month": [], "Year": [], "RecordCountPublished": [], "RecordStart": [], "RecordEnd": []} + SyncRecords_dict = {"Site": [], "Month": [], "Year": [], "RecordCountInDb": []} for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] @@ -307,8 +307,8 @@ def retrieve(self, request, SiteName=None, YearMonth=None): fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb, SubmitHost AS SubmitHostSync FROM VSyncRecords WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) - Summaries_dict = {"Site":[], "Month":[], "Year":[], "SubmitHostSumm":[], "RecordCountPublished":[],"RecordStart":[], "RecordEnd":[]} - SyncRecords_dict = {"Site":[], "Month":[], "Year":[],"SubmitHostSync":[], "RecordCountInDb":[]} + Summaries_dict = {"Site": [], "Month": [], "Year": [], "SubmitHostSumm": [], "RecordCountPublished": [], "RecordStart": [], "RecordEnd": []} + SyncRecords_dict = {"Site": [], "Month": [], "Year": [], "SubmitHostSync": [], "RecordCountInDb": []} for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] From 581cd046bd4997d9d05c6646c4da3bc56c2aa61b Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 13:02:54 +0000 Subject: [PATCH 065/183] Remove unnecessary blank lines. --- monitoring/publishing/views.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 3a994214..8843976f 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -52,7 +52,6 @@ def list(self, request): return response - def retrieve(self, request, pk=None): last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] # If there's no data then last_fetched is None. @@ -88,7 +87,6 @@ def retrieve(self, request, pk=None): return response - class GridSiteSyncViewSet(viewsets.ReadOnlyModelViewSet): queryset = GridSiteSync.objects.all() serializer_class = GridSiteSyncSerializer @@ -182,8 +180,6 @@ def list(self, request): response.data = {'records': response.data, 'last_fetched': last_fetched} return response - - def retrieve(self, request, SiteName=None): lookup_field = 'SiteName' last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] @@ -367,7 +363,6 @@ def get_year_month_string(year, month): return response - class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = CloudSite.objects.all() serializer_class = CloudSiteSerializer From 2d3f9610d9522b0a12ac0acc3a7305614ae08927 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 13:05:43 +0000 Subject: [PATCH 066/183] Move import to the top of file. --- monitoring/availability/urls.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/availability/urls.py b/monitoring/availability/urls.py index edf402e3..12d45b8b 100644 --- a/monitoring/availability/urls.py +++ b/monitoring/availability/urls.py @@ -1,9 +1,9 @@ from django.conf.urls import url - import sys -sys.path.append('/usr/share/DJANGO_MONITORING_APP') from monitoring.availability import views +sys.path.append('/usr/share/DJANGO_MONITORING_APP') + urlpatterns = [ url(r'^$', views.status), ] From 58af9a8e8d4e6be0723fe0e8982ba146e7be3c14 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 13:08:39 +0000 Subject: [PATCH 067/183] Rearrange/refactor imports to shorten lines. --- monitoring/publishing/serializers.py | 7 ++++++- monitoring/publishing/views.py | 18 ++++++++++++++++-- 2 files changed, 22 insertions(+), 3 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 20ed09fc..23b74b04 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -1,6 +1,11 @@ from rest_framework import serializers -from monitoring.publishing.models import CloudSite, GridSite, GridSiteSync, GridSiteSyncSubmitH +from monitoring.publishing.models import ( + CloudSite, + GridSite, + GridSiteSync, + GridSiteSyncSubmitH +) class GridSiteSerializer(serializers.HyperlinkedModelSerializer): diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 8843976f..cb88d527 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -9,8 +9,22 @@ from rest_framework import viewsets, generics from rest_framework.renderers import TemplateHTMLRenderer -from monitoring.publishing.models import GridSite, VSuperSummaries, CloudSite, VAnonCloudRecord, GridSiteSync, VSyncRecords, GridSiteSyncSubmitH -from monitoring.publishing.serializers import GridSiteSerializer, CloudSiteSerializer, GridSiteSyncSerializer, GridSiteSyncSubmitHSerializer +from monitoring.publishing.models import ( + GridSite, + VSuperSummaries, + CloudSite, + VAnonCloudRecord, + GridSiteSync, + VSyncRecords, + GridSiteSyncSubmitH +) + +from monitoring.publishing.serializers import ( + GridSiteSerializer, + CloudSiteSerializer, + GridSiteSyncSerializer, + GridSiteSyncSubmitHSerializer +) class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): From 0cc3f30056aa34b2544c59782b83c957bf62c18a Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 13:10:22 +0000 Subject: [PATCH 068/183] Rearrange/refactor serialiser fields to shorten lines. --- monitoring/publishing/serializers.py | 36 ++++++++++++++++++++++++---- 1 file changed, 32 insertions(+), 4 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 23b74b04..de73fd13 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -16,7 +16,11 @@ class GridSiteSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = GridSite - fields = ('url', 'SiteName', 'updated') + fields = ( + 'url', + 'SiteName', + 'updated' + ) class GridSiteSyncSerializer(serializers.HyperlinkedModelSerializer): @@ -26,7 +30,16 @@ class GridSiteSyncSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = GridSiteSync - fields = ('url', 'SiteName', 'YearMonth', 'RecordStart', 'RecordEnd', 'RecordCountPublished', 'RecordCountInDb', 'SyncStatus') + fields = ( + 'url', + 'SiteName', + 'YearMonth', + 'RecordStart', + 'RecordEnd', + 'RecordCountPublished', + 'RecordCountInDb', + 'SyncStatus' + ) # Sitename substitutes pk lookup_field = 'SiteName' @@ -43,7 +56,13 @@ class CloudSiteSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CloudSite - fields = ('url', 'SiteName', 'Vms', 'Script', 'updated') + fields = ( + 'url', + 'SiteName', + 'Vms', + 'Script', + 'updated' + ) class GridSiteSyncSubmitHSerializer(serializers.HyperlinkedModelSerializer): @@ -53,6 +72,15 @@ class GridSiteSyncSubmitHSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = GridSiteSyncSubmitH - fields = ('url', 'SiteName', 'YearMonth', 'RecordStart', 'RecordEnd', 'RecordCountPublished', 'RecordCountInDb', 'SubmitHost') + fields = ( + 'url', + 'SiteName', + 'YearMonth', + 'RecordStart', + 'RecordEnd', + 'RecordCountPublished', + 'RecordCountInDb', + 'SubmitHost' + ) lookup_fields = ('SiteName', 'YearMonth') From 0933fb800098bc3e1996f007ec904d0b39954561 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 13:15:36 +0000 Subject: [PATCH 069/183] Rearrange/refactor urlpatterns to shorten lines. --- monitoring/publishing/urls.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 449ded60..7f8d4e4a 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -12,8 +12,16 @@ router.register(r'gridsync', views.GridSiteSyncSubmitHViewSet) urlpatterns = [ - re_path(r'^gridsync/(?P[a-zA-Z0-9-]+)/$', views.GridSiteSyncViewSet.as_view({'get': 'retrieve'}), name='gridsync_singlesite'), - re_path(r'^gridsync/(?P[a-zA-Z0-9-]+)/(?P[0-9-]+)/$', views.GridSiteSyncSubmitHViewSet.as_view({'get': 'retrieve'}), name='gridsync_submithost'), + re_path( + r'^gridsync/(?P[a-zA-Z0-9-]+)/$', + views.GridSiteSyncViewSet.as_view({'get': 'retrieve'}), + name='gridsync_singlesite' + ), + re_path( + r'^gridsync/(?P[a-zA-Z0-9-]+)/(?P[0-9-]+)/$', + views.GridSiteSyncSubmitHViewSet.as_view({'get': 'retrieve'}), + name='gridsync_submithost' + ), ] urlpatterns += router.urls From 6982316e8c7b5d8211506c35d3f205ba3e5c0e09 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 13:38:35 +0000 Subject: [PATCH 070/183] Rearrange dicts to shorten lines. --- monitoring/publishing/views.py | 127 +++++++++++++++++++++++++-------- 1 file changed, 96 insertions(+), 31 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index cb88d527..818f61f5 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -62,7 +62,10 @@ def list(self, request): final_response.append(single_dict) if type(request.accepted_renderer) is TemplateHTMLRenderer: - response.data = {'sites': final_response, 'last_fetched': last_fetched} + response.data = { + 'sites': final_response, + 'last_fetched': last_fetched + } return response @@ -85,7 +88,10 @@ def retrieve(self, request, pk=None): # Wrap data in a dict so that it can display in template. if type(request.accepted_renderer) is TemplateHTMLRenderer: # Single result put in list to work with same HTML template. - response.data = {'sites': [response.data], 'last_fetched': last_fetched} + response.data = { + 'sites': [response.data], + 'last_fetched': last_fetched + } diff = datetime.today() - date if diff <= timedelta(days=7): @@ -136,8 +142,21 @@ def list(self, request): fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords GROUP BY Site, Year, Month") # Create empty dicts that will become dfs to be combined - Summaries_dict = {"Site": [], "Month": [], "Year": [], "RecordCountPublished": [], "RecordStart": [], "RecordEnd": []} - SyncRecords_dict = {"Site": [], "Month": [], "Year": [], "RecordCountInDb": []} + Summaries_dict = { + "Site": [], + "Month": [], + "Year": [], + "RecordCountPublished": [], + "RecordStart": [], + "RecordEnd": [] + } + + SyncRecords_dict = { + "Site": [], + "Month": [], + "Year": [], + "RecordCountInDb": [] + } # Fill the dicts with the fetched data for row in fetchset_Summaries: @@ -175,12 +194,12 @@ def list(self, request): # Combined primary keys outside the default dict GridSiteSync.objects.update_or_create( defaults={ - 'RecordStart': f.get("RecordStart"), - 'RecordEnd': f.get("RecordEnd"), - 'RecordCountPublished': f.get("RecordCountPublished"), - 'RecordCountInDb': f.get("RecordCountInDb"), - 'SyncStatus': f.get("SyncStatus"), - }, + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), + 'RecordCountPublished': f.get("RecordCountPublished"), + 'RecordCountInDb': f.get("RecordCountInDb"), + 'SyncStatus': f.get("SyncStatus"), + }, YearMonth=self.get_year_month_string(f.get("Year"), f.get("Month")), SiteName=f.get("Site"), Month=f.get("Month"), @@ -191,7 +210,10 @@ def list(self, request): print('No need to update') response = super(GridSiteSyncViewSet, self).list(request) - response.data = {'records': response.data, 'last_fetched': last_fetched} + response.data = { + 'records': response.data, + 'last_fetched': last_fetched + } return response def retrieve(self, request, SiteName=None): @@ -209,8 +231,21 @@ def retrieve(self, request, SiteName=None): fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01'GROUP BY Site, Month, Year;".format(SiteName)) fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE Site='{}' GROUP BY Site, Month, Year;".format(SiteName)) - Summaries_dict = {"Site": [], "Month": [], "Year": [], "RecordCountPublished": [], "RecordStart": [], "RecordEnd": []} - SyncRecords_dict = {"Site": [], "Month": [], "Year": [], "RecordCountInDb": []} + Summaries_dict = { + "Site": [], + "Month": [], + "Year": [], + "RecordCountPublished": [], + "RecordStart": [], + "RecordEnd": [] + } + + SyncRecords_dict = { + "Site": [], + "Month": [], + "Year": [], + "RecordCountInDb": [] + } for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] @@ -247,12 +282,12 @@ def retrieve(self, request, SiteName=None): GridSiteSync.objects.update_or_create( defaults={ - 'RecordStart': f.get("RecordStart"), - 'RecordEnd': f.get("RecordEnd"), - 'RecordCountPublished': f.get("RecordCountPublished"), - 'RecordCountInDb': f.get("RecordCountInDb"), - 'SyncStatus': f.get("SyncStatus"), - }, + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), + 'RecordCountPublished': f.get("RecordCountPublished"), + 'RecordCountInDb': f.get("RecordCountInDb"), + 'SyncStatus': f.get("SyncStatus"), + }, YearMonth=self.get_year_month_string(f.get("Year"), f.get("Month")), SiteName=f.get("Site"), Month=f.get("Month"), @@ -263,7 +298,10 @@ def retrieve(self, request, SiteName=None): print('No need to update') response = super(GridSiteSyncViewSet, self).list(request) - response.data = {'records': response.data, 'last_fetched': last_fetched} + response.data = { + 'records': response.data, + 'last_fetched': last_fetched + } return response @@ -292,7 +330,10 @@ class GridSiteSyncSubmitHViewSet(MultipleFieldLookupMixin, viewsets.ReadOnlyMode def list(self, request): response = super(GridSiteSyncSubmitHViewSet, self).list(request) - response.data = {'submisthosts': response.data, 'last_fetched': last_fetched} + response.data = { + 'submisthosts': response.data, + 'last_fetched': last_fetched + } return response def retrieve(self, request, SiteName=None, YearMonth=None): @@ -317,8 +358,23 @@ def retrieve(self, request, SiteName=None, YearMonth=None): fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb, SubmitHost AS SubmitHostSync FROM VSyncRecords WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) - Summaries_dict = {"Site": [], "Month": [], "Year": [], "SubmitHostSumm": [], "RecordCountPublished": [], "RecordStart": [], "RecordEnd": []} - SyncRecords_dict = {"Site": [], "Month": [], "Year": [], "SubmitHostSync": [], "RecordCountInDb": []} + Summaries_dict = { + "Site": [], + "Month": [], + "Year": [], + "SubmitHostSumm": [], + "RecordCountPublished": [], + "RecordStart": [], + "RecordEnd": [] + } + + SyncRecords_dict = { + "Site": [], + "Month": [], + "Year": [], + "SubmitHostSync": [], + "RecordCountInDb": [] + } for row in fetchset_Summaries: Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] @@ -357,11 +413,11 @@ def get_year_month_string(year, month): for f in fetchset.values(): GridSiteSyncSubmitH.objects.update_or_create( defaults={ - 'RecordStart': f.get("RecordStart"), - 'RecordEnd': f.get("RecordEnd"), - 'RecordCountPublished': f.get("RecordCountPublished"), - 'RecordCountInDb': f.get("RecordCountInDb"), - }, + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), + 'RecordCountPublished': f.get("RecordCountPublished"), + 'RecordCountInDb': f.get("RecordCountInDb"), + }, SiteName=f.get("Site"), YearMonth=get_year_month_string(f.get("Year"), f.get("Month")), Month=f.get("Month"), @@ -373,7 +429,10 @@ def get_year_month_string(year, month): print('No need to update') response = super(GridSiteSyncSubmitHViewSet, self).list(request) - response.data = {'submisthosts': response.data, 'last_fetched': last_fetched} + response.data = { + 'submisthosts': response.data, + 'last_fetched': last_fetched + } return response @@ -397,7 +456,10 @@ def list(self, request): response = super(CloudSiteViewSet, self).list(request) # Wrap data in a dict so that it can display in template. if type(request.accepted_renderer) is TemplateHTMLRenderer: - response.data = {'sites': response.data, 'last_fetched': last_fetched} + response.data = { + 'sites': response.data, + 'last_fetched': last_fetched + } return response def retrieve(self, request, pk=None): @@ -415,7 +477,10 @@ def retrieve(self, request, pk=None): # Wrap data in a dict so that it can display in template. if type(request.accepted_renderer) is TemplateHTMLRenderer: # Single result put in list to work with same HTML template. - response.data = {'sites': [response.data], 'last_fetched': last_fetched} + response.data = { + 'sites': [response.data], + 'last_fetched': last_fetched + } response.data['returncode'] = 3 response.data['stdout'] = "UNKNOWN" From c10e9e92322c51093ce84a3c028ccea60ef72773 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 14:07:56 +0000 Subject: [PATCH 071/183] Refactor parts to create/fill dicts, to reduce duplication and file length. --- monitoring/publishing/views.py | 165 +++++++++++++++------------------ 1 file changed, 76 insertions(+), 89 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 818f61f5..37a16083 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -26,6 +26,57 @@ GridSiteSyncSubmitHSerializer ) +summaries_dict_standard = { + "Site": [], + "Month": [], + "Year": [], + "RecordCountPublished": [], + "RecordStart": [], + "RecordEnd": [], + "SubmitHostSumm": [], +} + +syncrecords_dict_standard = { + "Site": [], + "Month": [], + "Year": [], + "RecordCountInDb": [], + "SubmitHostSync": [] +} + + +def fill_summaries_dict(inpDict, row): + inpDict["Site"] = inpDict.get("Site") + [row.Site] + inpDict["Month"] = inpDict.get("Month") + [row.Month] + inpDict["Year"] = inpDict.get("Year") + [row.Year] + inpDict["RecordCountPublished"] = inpDict.get("RecordCountPublished") + [row.RecordCountPublished] + inpDict["RecordStart"] = inpDict.get("RecordStart") + [row.RecordStart] + inpDict["RecordEnd"] = inpDict.get("RecordEnd") + [row.RecordEnd] + if hasattr(row, "SubmitHostSumm"): + inpDict["SubmitHostSumm"] = inpDict.get("SubmitHostSumm") + [row.SubmitHostSumm] + + return inpDict + + +def fill_syncrecords_dict(inpDict, row): + inpDict["Site"] = inpDict.get("Site") + [row.Site] + inpDict["Month"] = inpDict.get("Month") + [row.Month] + inpDict["Year"] = inpDict.get("Year") + [row.Year] + inpDict["RecordCountInDb"] = inpDict.get("RecordCountInDb") + [row.RecordCountInDb] + if hasattr(row, "SubmitHostSync"): + inpDict["SubmitHostSync"] = inpDict.get("SubmitHostSync") + [row.SubmitHostSync] + return inpDict + + +def correct_dict(inpDict): + keys_to_remove = [] + for key, val in inpDict.items(): + if len(val) == 0: + keys_to_remove.append(key) + for key in keys_to_remove: + inpDict.pop(key) + return inpDict + class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = GridSite.objects.all() @@ -142,40 +193,20 @@ def list(self, request): fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords GROUP BY Site, Year, Month") # Create empty dicts that will become dfs to be combined - Summaries_dict = { - "Site": [], - "Month": [], - "Year": [], - "RecordCountPublished": [], - "RecordStart": [], - "RecordEnd": [] - } - - SyncRecords_dict = { - "Site": [], - "Month": [], - "Year": [], - "RecordCountInDb": [] - } + summaries_dict = summaries_dict_standard.copy() + syncrecords_dict = syncrecords_dict_standard.copy() # Fill the dicts with the fetched data for row in fetchset_Summaries: - Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] - Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] - Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] - Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] - Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] - + summaries_dict = fill_summaries_dict(summaries_dict, row) + summaries_dict = correct_dict(summaries_dict) for row in fetchset_SyncRecords: - SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] - SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] - SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] - SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] + syncrecords_dict = fill_syncrecords_dict(syncrecords_dict, row) + syncrecords_dict = correct_dict(syncrecords_dict) # Merge data from VSuperSummaries and VSyncRecords into one df - df_Summaries = pd.DataFrame.from_dict(Summaries_dict) - df_SyncRecords = pd.DataFrame.from_dict(SyncRecords_dict) + df_Summaries = pd.DataFrame.from_dict(summaries_dict) + df_SyncRecords = pd.DataFrame.from_dict(syncrecords_dict) df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year'], right_on=['Site', 'Month', 'Year'], how='inner') fetchset = df_all.to_dict('index') @@ -231,38 +262,18 @@ def retrieve(self, request, SiteName=None): fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01'GROUP BY Site, Month, Year;".format(SiteName)) fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE Site='{}' GROUP BY Site, Month, Year;".format(SiteName)) - Summaries_dict = { - "Site": [], - "Month": [], - "Year": [], - "RecordCountPublished": [], - "RecordStart": [], - "RecordEnd": [] - } - - SyncRecords_dict = { - "Site": [], - "Month": [], - "Year": [], - "RecordCountInDb": [] - } + summaries_dict = summaries_dict_standard.copy() + syncrecords_dict = syncrecords_dict_standard.copy() for row in fetchset_Summaries: - Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] - Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] - Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] - Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] - Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] - + summaries_dict = fill_summaries_dict(summaries_dict, row) + summaries_dict = correct_dict(summaries_dict) for row in fetchset_SyncRecords: - SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] - SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] - SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] - SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] + syncrecords_dict = fill_syncrecords_dict(syncrecords_dict, row) + syncrecords_dict = correct_dict(syncrecords_dict) - df_Summaries = pd.DataFrame.from_dict(Summaries_dict) - df_SyncRecords = pd.DataFrame.from_dict(SyncRecords_dict) + df_Summaries = pd.DataFrame.from_dict(summaries_dict) + df_SyncRecords = pd.DataFrame.from_dict(syncrecords_dict) df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year'], right_on=['Site', 'Month', 'Year'], how='inner') fetchset = df_all.to_dict('index') @@ -358,42 +369,18 @@ def retrieve(self, request, SiteName=None, YearMonth=None): fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb, SubmitHost AS SubmitHostSync FROM VSyncRecords WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) - Summaries_dict = { - "Site": [], - "Month": [], - "Year": [], - "SubmitHostSumm": [], - "RecordCountPublished": [], - "RecordStart": [], - "RecordEnd": [] - } - - SyncRecords_dict = { - "Site": [], - "Month": [], - "Year": [], - "SubmitHostSync": [], - "RecordCountInDb": [] - } + summaries_dict = summaries_dict_standard.copy() + syncrecords_dict = syncrecords_dict_standard.copy() for row in fetchset_Summaries: - Summaries_dict["Site"] = Summaries_dict.get("Site") + [row.Site] - Summaries_dict["Month"] = Summaries_dict.get("Month") + [row.Month] - Summaries_dict["Year"] = Summaries_dict.get("Year") + [row.Year] - Summaries_dict["SubmitHostSumm"] = Summaries_dict.get("SubmitHostSumm") + [row.SubmitHostSumm] - Summaries_dict["RecordCountPublished"] = Summaries_dict.get("RecordCountPublished") + [row.RecordCountPublished] - Summaries_dict["RecordStart"] = Summaries_dict.get("RecordStart") + [row.RecordStart] - Summaries_dict["RecordEnd"] = Summaries_dict.get("RecordEnd") + [row.RecordEnd] - + summaries_dict = fill_summaries_dict(summaries_dict, row) + summaries_dict = correct_dict(summaries_dict) for row in fetchset_SyncRecords: - SyncRecords_dict["Site"] = SyncRecords_dict.get("Site") + [row.Site] - SyncRecords_dict["Month"] = SyncRecords_dict.get("Month") + [row.Month] - SyncRecords_dict["Year"] = SyncRecords_dict.get("Year") + [row.Year] - SyncRecords_dict["SubmitHostSync"] = SyncRecords_dict.get("SubmitHostSync") + [row.SubmitHostSync] - SyncRecords_dict["RecordCountInDb"] = SyncRecords_dict.get("RecordCountInDb") + [row.RecordCountInDb] - - df_Summaries = pd.DataFrame.from_dict(Summaries_dict) - df_SyncRecords = pd.DataFrame.from_dict(SyncRecords_dict) + syncrecords_dict = fill_syncrecords_dict(syncrecords_dict, row) + syncrecords_dict = correct_dict(syncrecords_dict) + + df_Summaries = pd.DataFrame.from_dict(summaries_dict) + df_SyncRecords = pd.DataFrame.from_dict(syncrecords_dict) df_Summaries.dropna(inplace=True) df_SyncRecords.dropna(inplace=True) From d8c409ed26d4330a2e93e96a2700527414a9d266 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 14:24:57 +0000 Subject: [PATCH 072/183] Bring functions outside class to avoid duplication. --- monitoring/publishing/views.py | 30 ++++++++++++------------------ 1 file changed, 12 insertions(+), 18 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 37a16083..db10f1f7 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -78,6 +78,15 @@ def correct_dict(inpDict): return inpDict +# Combine Year and Month into one string (display purposes) +def get_year_month_str(year, month): + year_string = str(year) + month_string = str(month) + if len(month_string) == 1: + month_string = '0' + month_string + return year_string + '-' + month_string + + class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = GridSite.objects.all() serializer_class = GridSiteSerializer @@ -171,14 +180,6 @@ def get_template_names(self): elif self.action == 'retrieve': return ['gridsync_singlesite.html'] - # Combine Year and Month into one string (display purposes) - def get_year_month_string(self, year, month): - year_string = str(year) - month_string = str(month) - if len(month_string) == 1: - month_string = '0' + month_string - return year_string + '-' + month_string - def list(self, request): last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] n_sites = GridSiteSync.objects.values('SiteName').distinct().count() @@ -231,7 +232,7 @@ def list(self, request): 'RecordCountInDb': f.get("RecordCountInDb"), 'SyncStatus': f.get("SyncStatus"), }, - YearMonth=self.get_year_month_string(f.get("Year"), f.get("Month")), + YearMonth=get_year_month_str(f.get("Year"), f.get("Month")), SiteName=f.get("Site"), Month=f.get("Month"), Year=f.get("Year"), @@ -299,7 +300,7 @@ def retrieve(self, request, SiteName=None): 'RecordCountInDb': f.get("RecordCountInDb"), 'SyncStatus': f.get("SyncStatus"), }, - YearMonth=self.get_year_month_string(f.get("Year"), f.get("Month")), + YearMonth=get_year_month_str(f.get("Year"), f.get("Month")), SiteName=f.get("Site"), Month=f.get("Month"), Year=f.get("Year"), @@ -390,13 +391,6 @@ def retrieve(self, request, SiteName=None, YearMonth=None): # This is to list only data for one month GridSiteSyncSubmitH.objects.all().delete() - def get_year_month_string(year, month): - year_string = str(year) - month_string = str(month) - if len(month_string) == 1: - month_string = '0' + month_string - return year_string + '-' + month_string - for f in fetchset.values(): GridSiteSyncSubmitH.objects.update_or_create( defaults={ @@ -406,7 +400,7 @@ def get_year_month_string(year, month): 'RecordCountInDb': f.get("RecordCountInDb"), }, SiteName=f.get("Site"), - YearMonth=get_year_month_string(f.get("Year"), f.get("Month")), + YearMonth=get_year_month_str(f.get("Year"), f.get("Month")), Month=f.get("Month"), Year=f.get("Year"), SubmitHost=f.get("SubmitHostSumm"), From 0761c70f3f7ea0d497b9a69e4d7300bf7790af77 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 14:29:54 +0000 Subject: [PATCH 073/183] Refactor to reduce line length. --- monitoring/publishing/views.py | 49 +++++++++++++++++++++++++++++----- 1 file changed, 42 insertions(+), 7 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index db10f1f7..f12b8528 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -99,7 +99,10 @@ def list(self, request): if last_fetched is None or (last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20))): fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") for f in fetchset: - GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, SiteName=f.Site) + GridSite.objects.update_or_create( + defaults={'updated': f.LatestPublish}, + SiteName=f.Site + ) else: print('No need to update') @@ -138,7 +141,10 @@ def retrieve(self, request, pk=None): print('Out of date') fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") for f in fetchset: - GridSite.objects.update_or_create(defaults={'updated': f.LatestPublish}, SiteName=f.Site) + GridSite.objects.update_or_create( + defaults={'updated': f.LatestPublish}, + SiteName=f.Site + ) else: print('No need to update') @@ -208,7 +214,12 @@ def list(self, request): # Merge data from VSuperSummaries and VSyncRecords into one df df_Summaries = pd.DataFrame.from_dict(summaries_dict) df_SyncRecords = pd.DataFrame.from_dict(syncrecords_dict) - df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year'], right_on=['Site', 'Month', 'Year'], how='inner') + df_all = df_Summaries.merge( + df_SyncRecords, + left_on=['Site', 'Month', 'Year'], + right_on=['Site', 'Month', 'Year'], + how='inner' + ) fetchset = df_all.to_dict('index') # Delete all data if table not empty (as this function lists all sites) @@ -275,7 +286,12 @@ def retrieve(self, request, SiteName=None): df_Summaries = pd.DataFrame.from_dict(summaries_dict) df_SyncRecords = pd.DataFrame.from_dict(syncrecords_dict) - df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year'], right_on=['Site', 'Month', 'Year'], how='inner') + df_all = df_Summaries.merge( + df_SyncRecords, + left_on=['Site', 'Month', 'Year'], + right_on=['Site', 'Month', 'Year'], + how='inner' + ) fetchset = df_all.to_dict('index') # Ensure we list only the data for one site @@ -385,7 +401,12 @@ def retrieve(self, request, SiteName=None, YearMonth=None): df_Summaries.dropna(inplace=True) df_SyncRecords.dropna(inplace=True) - df_all = df_Summaries.merge(df_SyncRecords, left_on=['Site', 'Month', 'Year', 'SubmitHostSumm'], right_on=['Site', 'Month', 'Year', 'SubmitHostSync'], how='outer') + df_all = df_Summaries.merge( + df_SyncRecords, + left_on=['Site', 'Month', 'Year', 'SubmitHostSumm'], + right_on=['Site', 'Month', 'Year', 'SubmitHostSync'], + how='outer' + ) fetchset = df_all.to_dict('index') # This is to list only data for one month @@ -430,7 +451,14 @@ def list(self, request): print('Out of date') fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: - CloudSite.objects.update_or_create(defaults={'Vms': f.VMs, 'Script': f.CloudType, 'updated': f.UpdateTime}, SiteName=f.SiteName) + CloudSite.objects.update_or_create( + defaults={ + 'Vms': f.VMs, + 'Script': f.CloudType, + 'updated': f.UpdateTime + }, + SiteName=f.SiteName + ) else: print('No need to update') @@ -450,7 +478,14 @@ def retrieve(self, request, pk=None): print('Out of date') fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") for f in fetchset: - CloudSite.objects.update_or_create(defaults={'Vms': f.VMs, 'Script': f.CloudType, 'updated': f.UpdateTime}, SiteName=f.SiteName) + CloudSite.objects.update_or_create( + defaults={ + 'Vms': f.VMs, + 'Script': f.CloudType, + 'updated': f.UpdateTime + }, + SiteName=f.SiteName + ) else: print('No need to update') From 2ac746214075c06ca0087f4f6c32e2f33ea9019e Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 14:33:46 +0000 Subject: [PATCH 074/183] Merge if statements to ensure clarity. --- monitoring/publishing/views.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index f12b8528..19b8b8a7 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -296,9 +296,8 @@ def retrieve(self, request, SiteName=None): # Ensure we list only the data for one site first_row = GridSiteSync.objects.first() - if first_row is not None: - if first_row.SiteName != SiteName: - GridSiteSync.objects.all().delete() + if hasattr(first_row, "SiteName") and first_row.SiteName != SiteName: + GridSiteSync.objects.all().delete() for f in fetchset.values(): rel_diff1 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountInDb")) From 22d1383ec5fc14e46762b9c068c3b4f655ca685b Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 16:03:28 +0000 Subject: [PATCH 075/183] Rearrange SQL queries to reduce line length. --- monitoring/publishing/views.py | 142 ++++++++++++++++++++++++++++++--- 1 file changed, 133 insertions(+), 9 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 19b8b8a7..9a555608 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -97,7 +97,16 @@ def list(self, request): if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) if last_fetched is None or (last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20))): - fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") + sql_query = """ + SELECT + Site, + max(LatestEndTime) AS LatestPublish + FROM VSuperSummaries + WHERE Year=2019 + GROUP BY 1; + """ + fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) + for f in fetchset: GridSite.objects.update_or_create( defaults={'updated': f.LatestPublish}, @@ -139,7 +148,16 @@ def retrieve(self, request, pk=None): print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): print('Out of date') - fetchset = VSuperSummaries.objects.using('grid').raw("SELECT Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries WHERE Year=2019 GROUP BY 1;") + sql_query = """ + SELECT + Site, + max(LatestEndTime) AS LatestPublish + FROM VSuperSummaries + WHERE Year=2019 + GROUP BY 1; + """ + fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) + for f in fetchset: GridSite.objects.update_or_create( defaults={'updated': f.LatestPublish}, @@ -196,8 +214,33 @@ def list(self, request): print('Out of date') # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' - fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01' GROUP BY Site, Year, Month;") - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords GROUP BY Site, Year, Month") + sql_query_summaries = """ + SELECT + Site, + Month, Year, + SUM(NumberOfJobs) AS RecordCountPublished, + MIN(EarliestEndTime) AS RecordStart, + MAX(LatestEndTime) AS RecordEnd + FROM VSuperSummaries + WHERE + EarliestEndTime>'1900-01-01' AND + LatestEndTime>'1900-01-01' + GROUP BY + Site, Year, Month; + """ + fetchset_Summaries = VSuperSummaries.objects.using('apel').raw(sql_query_summaries) + + sql_query_syncrec = """ + SELECT + Site, + Month, + Year, + SUM(NumberOfJobs) AS RecordCountInDb + FROM VSyncRecords + GROUP BY + Site, Year, Month; + """ + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw(sql_query_syncrec) # Create empty dicts that will become dfs to be combined summaries_dict = summaries_dict_standard.copy() @@ -271,8 +314,36 @@ def retrieve(self, request, SiteName=None): print('Out of date') # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' - fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01'GROUP BY Site, Month, Year;".format(SiteName)) - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE Site='{}' GROUP BY Site, Month, Year;".format(SiteName)) + sql_query_summaries = """ + SELECT + Site, + Month, + Year, + SUM(NumberOfJobs) AS RecordCountPublished, + MIN(EarliestEndTime) AS RecordStart, + MAX(LatestEndTime) AS RecordEnd + FROM VSuperSummaries + WHERE + Site='{}' AND + EarliestEndTime>'1900-01-01' AND + LatestEndTime>'1900-01-01' + GROUP BY + Site, Year, Month; + """.format(SiteName) + fetchset_Summaries = VSuperSummaries.objects.using('apel').raw(sql_query_summaries) + + sql_query_syncrecords = """ + SELECT + Site, + Month, + Year, + SUM(NumberOfJobs) AS RecordCountInDb + FROM VSyncRecords + WHERE Site='{}' + GROUP BY + Site, Year, Month; + """.format(SiteName) + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw(sql_query_syncrecords) summaries_dict = summaries_dict_standard.copy() syncrecords_dict = syncrecords_dict_standard.copy() @@ -382,8 +453,39 @@ def retrieve(self, request, SiteName=None, YearMonth=None): if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or (sitename_in_table != SiteName) or (yearmonth_in_table != YearMonth): print('Out of date') - fetchset_Summaries = VSuperSummaries.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountPublished, SubmitHost AS SubmitHostSumm, MIN(EarliestEndTime) AS RecordStart, MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw("SELECT Site, Month, Year, SUM(NumberOfJobs) AS RecordCountInDb, SubmitHost AS SubmitHostSync FROM VSyncRecords WHERE Site='{}' AND Month='{}' AND Year='{}' GROUP BY SubmitHost;".format(SiteName, Month, Year)) + sql_query_summaries = """ + SELECT + Site, + Month, + Year, + SUM(NumberOfJobs) AS RecordCountPublished, + SubmitHost AS SubmitHostSumm, + MIN(EarliestEndTime) AS RecordStart, + MAX(LatestEndTime) AS RecordEnd + FROM VSuperSummaries + WHERE + Site='{}' AND + Month='{}' AND + Year='{}' + GROUP BY SubmitHost; + """.format(SiteName, Month, Year) + fetchset_Summaries = VSuperSummaries.objects.using('apel').raw(sql_query_summaries) + + sql_query_syncrecords = """ + SELECT + Site, + Month, + Year, + SUM(NumberOfJobs) AS RecordCountInDb, + SubmitHost AS SubmitHostSync + FROM VSyncRecords + WHERE + Site='{}' AND + Month='{}' AND + Year='{}' + GROUP BY SubmitHost; + """.format(SiteName, Month, Year) + fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw(sql_query_syncrecords) summaries_dict = summaries_dict_standard.copy() syncrecords_dict = syncrecords_dict_standard.copy() @@ -448,7 +550,29 @@ def list(self, request): print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) if last_fetched is None or (last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20))): print('Out of date') - fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") + + sql_query = """ + SELECT + b.SiteName, + COUNT(DISTINCT VMUUID) as VMs, + CloudType, + b.UpdateTime + FROM( + SELECT + SiteName, + MAX(UpdateTime) AS latest + FROM VAnonCloudRecords + WHERE UpdateTime>'2018-07-25' + GROUP BY SiteName + ) + AS a + INNER JOIN VAnonCloudRecords + AS b + ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest + GROUP BY SiteName; + """ + fetchset = VAnonCloudRecord.objects.using('cloud').raw(sql_query) + for f in fetchset: CloudSite.objects.update_or_create( defaults={ From 5ca8c666f861dd8ca86ca094216e196a185d8f70 Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 16:55:16 +0000 Subject: [PATCH 076/183] Create separate function for sync_status to reduce duplication. --- monitoring/publishing/views.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 9a555608..88e6694c 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -78,6 +78,18 @@ def correct_dict(inpDict): return inpDict +def determine_sync_status(f): + RecordCountPublished = f.get("RecordCountPublished") + RecordCountInDb = f.get("RecordCountInDb") + rel_diff1 = abs(RecordCountPublished - RecordCountInDb)/RecordCountInDb + rel_diff2 = abs(RecordCountPublished - RecordCountInDb)/RecordCountPublished + if rel_diff1 < 0.01 or rel_diff2 < 0.01: + syncstatus = 'OK' + else: + syncstatus = 'Error' + return syncstatus + + # Combine Year and Month into one string (display purposes) def get_year_month_str(year, month): year_string = str(year) @@ -270,12 +282,7 @@ def list(self, request): # Determine SyncStatus based on the difference between records published and in db for f in fetchset.values(): - rel_diff1 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountInDb")) - rel_diff2 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountPublished")) - if rel_diff1 < 0.01 or rel_diff2 < 0.01: - f['SyncStatus'] = 'OK' - else: - f['SyncStatus'] = 'Error' + f['SyncStatus'] = determine_sync_status(f) # Combined primary keys outside the default dict GridSiteSync.objects.update_or_create( @@ -371,12 +378,7 @@ def retrieve(self, request, SiteName=None): GridSiteSync.objects.all().delete() for f in fetchset.values(): - rel_diff1 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountInDb")) - rel_diff2 = abs(f.get("RecordCountPublished") - f.get("RecordCountInDb"))/(f.get("RecordCountPublished")) - if rel_diff1 <= 0.01 or rel_diff2 <= 0.01: - f['SyncStatus'] = 'OK' - else: - f['SyncStatus'] = 'Error' + f['SyncStatus'] = determine_sync_status(f) GridSiteSync.objects.update_or_create( defaults={ From 5799dbb5dd6bd6575350100466c5dccea039700e Mon Sep 17 00:00:00 2001 From: Letizia Date: Fri, 3 Feb 2023 17:29:45 +0000 Subject: [PATCH 077/183] Create separate function foupdating stdout and returncode. --- monitoring/publishing/views.py | 40 +++++++++++++++------------------- 1 file changed, 18 insertions(+), 22 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 88e6694c..20c29a0a 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -45,6 +45,22 @@ } +def update_dict_stdout_and_returncode(single_dict, date): + diff = datetime.today() - date + date = date.strftime("%Y-%m-%d") + + if diff <= timedelta(days=7): + single_dict['returncode'] = 0 + single_dict['stdout'] = "OK [ last published %s days ago: %s ]" % (diff.days, date) + elif diff > timedelta(days=7): + single_dict['returncode'] = 1 + single_dict['stdout'] = "WARNING [ last published %s days ago: %s ]" % (diff.days, date) + else: + single_dict['returncode'] = 3 + single_dict['stdout'] = "UNKNOWN" + return single_dict + + def fill_summaries_dict(inpDict, row): inpDict["Site"] = inpDict.get("Site") + [row.Site] inpDict["Month"] = inpDict.get("Month") + [row.Month] @@ -132,17 +148,7 @@ def list(self, request): for single_dict in response.data: date = single_dict.get('updated').replace(tzinfo=None) - - diff = datetime.today() - date - if diff <= timedelta(days=7): - single_dict['returncode'] = 0 - single_dict['stdout'] = "OK [ last published %s days ago: %s ]" % (diff.days, date.strftime("%Y-%m-%d")) - elif diff > timedelta(days=7): - single_dict['returncode'] = 1 - single_dict['stdout'] = "WARNING [ last published %s days ago: %s ]" % (diff.days, date.strftime("%Y-%m-%d")) - else: - single_dict['returncode'] = 3 - single_dict['stdout'] = "UNKNOWN" + single_dict = update_dict_stdout_and_returncode(single_dict, date) final_response.append(single_dict) if type(request.accepted_renderer) is TemplateHTMLRenderer: @@ -189,17 +195,7 @@ def retrieve(self, request, pk=None): 'last_fetched': last_fetched } - diff = datetime.today() - date - if diff <= timedelta(days=7): - response.data['returncode'] = 0 - response.data['stdout'] = "OK [ last published %s days ago: %s ]" % (diff.days, date.strftime("%Y-%m-%d")) - elif diff > timedelta(days=7): - response.data['returncode'] = 1 - response.data['stdout'] = "WARNING [ last published %s days ago: %s ]" % (diff.days, date.strftime("%Y-%m-%d")) - else: - response.data['returncode'] = 3 - response.data['stdout'] = "UNKNOWN" - + response.data = update_dict_stdout_and_returncode(response.data, date) return response From 12b166eb1f53b6fed843506abc124114c993fd1e Mon Sep 17 00:00:00 2001 From: Letizia Date: Tue, 7 Feb 2023 13:21:20 +0000 Subject: [PATCH 078/183] Refactor to reduce length of lines. --- monitoring/publishing/views.py | 21 ++++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 20c29a0a..e82538d6 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -62,12 +62,19 @@ def update_dict_stdout_and_returncode(single_dict, date): def fill_summaries_dict(inpDict, row): - inpDict["Site"] = inpDict.get("Site") + [row.Site] - inpDict["Month"] = inpDict.get("Month") + [row.Month] - inpDict["Year"] = inpDict.get("Year") + [row.Year] - inpDict["RecordCountPublished"] = inpDict.get("RecordCountPublished") + [row.RecordCountPublished] - inpDict["RecordStart"] = inpDict.get("RecordStart") + [row.RecordStart] - inpDict["RecordEnd"] = inpDict.get("RecordEnd") + [row.RecordEnd] + + fields_to_update_and_value_to_add = { + "Site": row.Site, + "Month": row.Month, + "Year": row.Year, + "RecordCountPublished": row.RecordCountPublished, + "RecordStart": row.RecordStart, + "RecordEnd": row.RecordEnd, + } + + for field, value in fields_to_update_and_value_to_add.items(): + inpDict[field] = inpDict.get(field) + [value] + if hasattr(row, "SubmitHostSumm"): inpDict["SubmitHostSumm"] = inpDict.get("SubmitHostSumm") + [row.SubmitHostSumm] @@ -440,7 +447,7 @@ def retrieve(self, request, SiteName=None, YearMonth=None): sitename_in_table = None yearmonth_in_table = None - # This is to ensure the data is updated when changing (or clicking on another) month + # This is to ensure the data is updated when changing month if GridSiteSyncSubmitH.objects.count() > 0: row_1 = GridSiteSyncSubmitH.objects.filter()[:1].get() sitename_in_table = row_1.SiteName From b03791d662aa6750b01e0f99dbed3a489dced564 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 22 Nov 2023 11:19:01 +0000 Subject: [PATCH 079/183] Add missing objects that weren't created/imported --- monitoring/publishing/views.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index e82538d6..fd596896 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -4,6 +4,7 @@ from datetime import datetime, timedelta from django.db.models import Max +from django.shortcuts import get_object_or_404 import pandas as pd from rest_framework import viewsets, generics @@ -432,6 +433,7 @@ class GridSiteSyncSubmitHViewSet(MultipleFieldLookupMixin, viewsets.ReadOnlyMode template_name = 'gridsync_submithost.html' def list(self, request): + last_fetched = GridSiteSyncSubmitH.objects.aggregate(Max('fetched'))['fetched__max'] response = super(GridSiteSyncSubmitHViewSet, self).list(request) response.data = { 'submisthosts': response.data, From ee92a54666db1a896a1e24409b3f2acddf5b8ef9 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 22 Nov 2023 11:20:37 +0000 Subject: [PATCH 080/183] Correct reference to grid database from settings --- monitoring/publishing/views.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index fd596896..9039c7c9 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -244,7 +244,7 @@ def list(self, request): GROUP BY Site, Year, Month; """ - fetchset_Summaries = VSuperSummaries.objects.using('apel').raw(sql_query_summaries) + fetchset_Summaries = VSuperSummaries.objects.using('grid').raw(sql_query_summaries) sql_query_syncrec = """ SELECT @@ -256,7 +256,7 @@ def list(self, request): GROUP BY Site, Year, Month; """ - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw(sql_query_syncrec) + fetchset_SyncRecords = VSyncRecords.objects.using('grid').raw(sql_query_syncrec) # Create empty dicts that will become dfs to be combined summaries_dict = summaries_dict_standard.copy() @@ -341,7 +341,7 @@ def retrieve(self, request, SiteName=None): GROUP BY Site, Year, Month; """.format(SiteName) - fetchset_Summaries = VSuperSummaries.objects.using('apel').raw(sql_query_summaries) + fetchset_Summaries = VSuperSummaries.objects.using('grid').raw(sql_query_summaries) sql_query_syncrecords = """ SELECT @@ -354,7 +354,7 @@ def retrieve(self, request, SiteName=None): GROUP BY Site, Year, Month; """.format(SiteName) - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw(sql_query_syncrecords) + fetchset_SyncRecords = VSyncRecords.objects.using('grid').raw(sql_query_syncrecords) summaries_dict = summaries_dict_standard.copy() syncrecords_dict = syncrecords_dict_standard.copy() @@ -476,7 +476,7 @@ def retrieve(self, request, SiteName=None, YearMonth=None): Year='{}' GROUP BY SubmitHost; """.format(SiteName, Month, Year) - fetchset_Summaries = VSuperSummaries.objects.using('apel').raw(sql_query_summaries) + fetchset_Summaries = VSuperSummaries.objects.using('grid').raw(sql_query_summaries) sql_query_syncrecords = """ SELECT @@ -492,7 +492,7 @@ def retrieve(self, request, SiteName=None, YearMonth=None): Year='{}' GROUP BY SubmitHost; """.format(SiteName, Month, Year) - fetchset_SyncRecords = VSyncRecords.objects.using('apel').raw(sql_query_syncrecords) + fetchset_SyncRecords = VSyncRecords.objects.using('grid').raw(sql_query_syncrecords) summaries_dict = summaries_dict_standard.copy() syncrecords_dict = syncrecords_dict_standard.copy() From 3b3453db70d6859f03cdccc07be8878b0f1aac1f Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 22 Nov 2023 11:56:47 +0000 Subject: [PATCH 081/183] Automate venv activation Putting venv activation in the wsgi file means that Apache can invoke the venv and it doesn't need to be done manually. --- monitoring/wsgi.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/monitoring/wsgi.py b/monitoring/wsgi.py index ed72d1be..5fa221b9 100644 --- a/monitoring/wsgi.py +++ b/monitoring/wsgi.py @@ -11,6 +11,12 @@ from django.core.wsgi import get_wsgi_application + +# Activate virtualenv +activate_path = os.path.expanduser("/usr/share/DJANGO_MONITORING_APP/venv/bin/activate_this.py") +with open(activate_path) as act: + exec(act.read(), dict(__file__=activate_path)) + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "monitoring.settings") application = get_wsgi_application() From 9d647a609a9e60da18678c5c95a3dcc8300dddd6 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 22 Nov 2023 12:48:45 +0000 Subject: [PATCH 082/183] Tidy up settings --- monitoring/settings.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/monitoring/settings.py b/monitoring/settings.py index 5d3d44f5..122180cb 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -25,7 +25,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = ['127.0.0.1', 'localhost'] +ALLOWED_HOSTS = [] # Application definition @@ -91,17 +91,19 @@ }, 'grid': { 'ENGINE': 'django.db.backends.mysql', - 'HOST': 'localhost', + 'HOST': '', 'PORT': '3306', - 'NAME': 'django_test_grid', - 'USER': 'root', + 'NAME': '', + 'USER': '', + 'PASSWORD': '', }, 'cloud': { 'ENGINE': 'django.db.backends.mysql', - 'HOST': 'localhost', + 'HOST': '', 'PORT': '3306', - 'NAME': 'django_test', - 'USER': 'root', + 'NAME': '', + 'USER': '', + 'PASSWORD': '', }, } From bd77d79f958a734fd364c5c97314d2992bf62f98 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 22 Nov 2023 15:14:56 +0000 Subject: [PATCH 083/183] Change sync status to match old pages --- monitoring/publishing/views.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 9039c7c9..8e2c2cfb 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -108,9 +108,9 @@ def determine_sync_status(f): rel_diff1 = abs(RecordCountPublished - RecordCountInDb)/RecordCountInDb rel_diff2 = abs(RecordCountPublished - RecordCountInDb)/RecordCountPublished if rel_diff1 < 0.01 or rel_diff2 < 0.01: - syncstatus = 'OK' + syncstatus = "OK" else: - syncstatus = 'Error' + syncstatus = "ERROR [ Please use the Gap Publisher to synchronise this dataset]" return syncstatus From 4b2902c044bff38c8e11c1c996680949d3595591 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 22 Nov 2023 17:49:56 +0000 Subject: [PATCH 084/183] Revert changes to availability app urls --- monitoring/availability/urls.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/monitoring/availability/urls.py b/monitoring/availability/urls.py index 12d45b8b..3b2d5752 100644 --- a/monitoring/availability/urls.py +++ b/monitoring/availability/urls.py @@ -1,8 +1,6 @@ from django.conf.urls import url -import sys -from monitoring.availability import views -sys.path.append('/usr/share/DJANGO_MONITORING_APP') +from monitoring.availability import views urlpatterns = [ url(r'^$', views.status), From afd03fa775a440296e6bb83bdf9397ea98859603 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 24 Jan 2024 16:11:32 +0000 Subject: [PATCH 085/183] Simplify availability app to just return "OK" 200 --- monitoring/availability/views.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/monitoring/availability/views.py b/monitoring/availability/views.py index 380575e3..ba6a05be 100644 --- a/monitoring/availability/views.py +++ b/monitoring/availability/views.py @@ -1,15 +1,10 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -import time - from rest_framework.decorators import api_view from rest_framework.response import Response @api_view() def status(requst): - if int(time.time()) % 2: - return Response("Everything OK") - else: - return Response("Everything NOT ok.") + return Response("OK", status=200) From f933dd72051669d048ae1e53965b410ab570ef89 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 24 Jan 2024 16:15:48 +0000 Subject: [PATCH 086/183] Remove year limitation on grid pub SQL query --- monitoring/publishing/views.py | 1 - 1 file changed, 1 deletion(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 8e2c2cfb..26cd927b 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -138,7 +138,6 @@ def list(self, request): Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries - WHERE Year=2019 GROUP BY 1; """ fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) From 0c39eac1d4436df925ee72331808514670afe0ae Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 24 Jan 2024 16:47:02 +0000 Subject: [PATCH 087/183] Add basic HTML template for status page --- monitoring/availability/templates/status.html | 1 + monitoring/availability/views.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 monitoring/availability/templates/status.html diff --git a/monitoring/availability/templates/status.html b/monitoring/availability/templates/status.html new file mode 100644 index 00000000..4fae6b62 --- /dev/null +++ b/monitoring/availability/templates/status.html @@ -0,0 +1 @@ +{{ message }} diff --git a/monitoring/availability/views.py b/monitoring/availability/views.py index ba6a05be..eff30944 100644 --- a/monitoring/availability/views.py +++ b/monitoring/availability/views.py @@ -7,4 +7,4 @@ @api_view() def status(requst): - return Response("OK", status=200) + return Response({"message": "OK"}, status=200, template_name="status.html") From 4d9f151f49e4cd88d845057f7136c8eb07909c8a Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 24 Jan 2024 16:57:26 +0000 Subject: [PATCH 088/183] Add availability app to installed apps This will hopefully register the templates it contains --- monitoring/settings.py | 1 + 1 file changed, 1 insertion(+) diff --git a/monitoring/settings.py b/monitoring/settings.py index 122180cb..ac611091 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -39,6 +39,7 @@ 'django.contrib.staticfiles', 'rest_framework', 'monitoring.publishing', + 'monitoring.availability', ] REST_FRAMEWORK = { From 40a7c6d62726254814f3b76ac007dc5180d7c121 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 28 Feb 2024 10:13:58 +0000 Subject: [PATCH 089/183] Update cutoff date for cloud query --- monitoring/publishing/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 26cd927b..debc0b12 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -568,7 +568,7 @@ def list(self, request): SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords - WHERE UpdateTime>'2018-07-25' + WHERE UpdateTime>'2023-01-01' GROUP BY SiteName ) AS a From 20857e512145cf51a2a19cf044f3b939916ce03f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CSae126V=E2=80=9D?= <“saitejav2021@gmail.com”> Date: Mon, 28 Oct 2024 17:45:45 +0000 Subject: [PATCH 090/183] Add configuration file --- conf/common.cfg | 27 +++++++++++++++++++++++++++ monitoring/settings.py | 31 ++++++++++++++++++------------- 2 files changed, 45 insertions(+), 13 deletions(-) create mode 100644 conf/common.cfg diff --git a/conf/common.cfg b/conf/common.cfg new file mode 100644 index 00000000..f1ac589d --- /dev/null +++ b/conf/common.cfg @@ -0,0 +1,27 @@ +[db] +# type of database +backend_grid = django.db.backends.mysql +backend_cloud = django.db.backends.mysql + +# host with database +hostname_grid = localhost +hostname_cloud = localhost + +# port to connect to +port_grid = 3306 +port_cloud = 3306 + +# database name +name_grid = +name_cloud = + +# database user +username_grid = root +username_cloud = root + +# password for database +password_grid = +password_cloud = + +[common] +allowed_hosts = diff --git a/monitoring/settings.py b/monitoring/settings.py index ac611091..fb7b1eed 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -11,10 +11,15 @@ """ import os +import configparser # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +# Read configuration from the file +cp = configparser.ConfigParser() +file_path = os.path.join(BASE_DIR, 'conf', 'common.cfg') +cp.read(file_path) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ @@ -25,7 +30,7 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = [] +ALLOWED_HOSTS = cp.get('common', 'allowed_hosts').split(',') # Application definition @@ -91,20 +96,20 @@ 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), }, 'grid': { - 'ENGINE': 'django.db.backends.mysql', - 'HOST': '', - 'PORT': '3306', - 'NAME': '', - 'USER': '', - 'PASSWORD': '', + 'ENGINE': cp.get('db', 'backend_grid'), + 'HOST': cp.get('db', 'hostname_grid'), + 'PORT': cp.get('db', 'port_grid'), + 'NAME': cp.get('db', 'name_grid'), + 'USER': cp.get('db', 'username_grid'), + 'PASSWORD': cp.get('db', 'password_grid'), }, 'cloud': { - 'ENGINE': 'django.db.backends.mysql', - 'HOST': '', - 'PORT': '3306', - 'NAME': '', - 'USER': '', - 'PASSWORD': '', + 'ENGINE': cp.get('db', 'backend_cloud'), + 'HOST': cp.get('db', 'hostname_cloud'), + 'PORT': cp.get('db', 'port_cloud'), + 'NAME': cp.get('db', 'name_cloud'), + 'USER': cp.get('db', 'username_cloud'), + 'PASSWORD': cp.get('db', 'password_cloud'), }, } From f42d9a6bad2d79678c3e1b383b822928c707bd28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CSae126V=E2=80=9D?= <“saitejav2021@gmail.com”> Date: Tue, 29 Oct 2024 12:37:20 +0000 Subject: [PATCH 091/183] Update code to have try and catch block and address comments Update the catch block to handle the OSError(s) --- conf/common.cfg | 27 --------------- monitoring/settings.ini | 29 ++++++++++++++++ monitoring/settings.py | 74 +++++++++++++++++++++++------------------ 3 files changed, 70 insertions(+), 60 deletions(-) delete mode 100644 conf/common.cfg create mode 100644 monitoring/settings.ini diff --git a/conf/common.cfg b/conf/common.cfg deleted file mode 100644 index f1ac589d..00000000 --- a/conf/common.cfg +++ /dev/null @@ -1,27 +0,0 @@ -[db] -# type of database -backend_grid = django.db.backends.mysql -backend_cloud = django.db.backends.mysql - -# host with database -hostname_grid = localhost -hostname_cloud = localhost - -# port to connect to -port_grid = 3306 -port_cloud = 3306 - -# database name -name_grid = -name_cloud = - -# database user -username_grid = root -username_cloud = root - -# password for database -password_grid = -password_cloud = - -[common] -allowed_hosts = diff --git a/monitoring/settings.ini b/monitoring/settings.ini new file mode 100644 index 00000000..e1b12a52 --- /dev/null +++ b/monitoring/settings.ini @@ -0,0 +1,29 @@ +# This file will be parsed by settings.py. + +[common] +# `allowed_hosts` values should be comma separated list of hostnames (fqdn's) +allowed_hosts = + + +# Information about the database connection - grid +[db grid] +# type of database - refers to the Django db backend +backend = django.db.backends.mysql + +hostname = localhost +port = 3306 +name = +username = root +password = + + +# Information about the database connection - cloud +[db cloud] +# type of database refers to the Django db backend +backend = django.db.backends.mysql + +hostname = localhost +port = 3306 +name = +username = root +password = diff --git a/monitoring/settings.py b/monitoring/settings.py index fb7b1eed..791cc6bf 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -12,15 +12,11 @@ import os import configparser +import sys # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) -# Read configuration from the file -cp = configparser.ConfigParser() -file_path = os.path.join(BASE_DIR, 'conf', 'common.cfg') -cp.read(file_path) - # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ @@ -30,8 +26,47 @@ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True -ALLOWED_HOSTS = cp.get('common', 'allowed_hosts').split(',') +try: + + # Read configuration from the file + cp = configparser.ConfigParser() + file_path = os.path.join(BASE_DIR, 'monitoring', 'settings.ini') + + if not os.path.isfile(file_path): + raise FileNotFoundError(f'Configuration file NOT found: %s' % file_path) + + cp.read(file_path) + + ALLOWED_HOSTS = cp.get('common', 'allowed_hosts').split(',') + + # Database + # https://docs.djangoproject.com/en/1.11/ref/settings/#databases + DATABASES = { + 'default': { + 'ENGINE': 'django.db.backends.sqlite3', + 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), + }, + 'grid': { + 'ENGINE': cp.get('db grid', 'backend'), + 'HOST': cp.get('db grid', 'hostname'), + 'PORT': cp.get('db grid', 'port'), + 'NAME': cp.get('db grid', 'name'), + 'USER': cp.get('db grid', 'username'), + 'PASSWORD': cp.get('db grid', 'password'), + }, + 'cloud': { + 'ENGINE': cp.get('db cloud', 'backend'), + 'HOST': cp.get('db cloud', 'hostname'), + 'PORT': cp.get('db cloud', 'port'), + 'NAME': cp.get('db cloud', 'name'), + 'USER': cp.get('db cloud', 'username'), + 'PASSWORD': cp.get('db cloud', 'password'), + }, + } +except (OSError, configparser.Error) as err: + print("Error in configuration file: %s" % err) + sys.exit(1) # Application definition @@ -87,33 +122,6 @@ WSGI_APPLICATION = 'monitoring.wsgi.application' -# Database -# https://docs.djangoproject.com/en/1.11/ref/settings/#databases - -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), - }, - 'grid': { - 'ENGINE': cp.get('db', 'backend_grid'), - 'HOST': cp.get('db', 'hostname_grid'), - 'PORT': cp.get('db', 'port_grid'), - 'NAME': cp.get('db', 'name_grid'), - 'USER': cp.get('db', 'username_grid'), - 'PASSWORD': cp.get('db', 'password_grid'), - }, - 'cloud': { - 'ENGINE': cp.get('db', 'backend_cloud'), - 'HOST': cp.get('db', 'hostname_cloud'), - 'PORT': cp.get('db', 'port_cloud'), - 'NAME': cp.get('db', 'name_cloud'), - 'USER': cp.get('db', 'username_cloud'), - 'PASSWORD': cp.get('db', 'password_cloud'), - }, -} - - # Password validation # https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators From ac19a35a5624a1b382872bb53e4fea7b61b7c200 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CSae126V=E2=80=9D?= <“saitejav2021@gmail.com”> Date: Tue, 29 Oct 2024 14:01:03 +0000 Subject: [PATCH 092/183] Rename section headers and fix the file path --- monitoring/settings.ini | 4 ++-- monitoring/settings.py | 30 +++++++++++++----------------- 2 files changed, 15 insertions(+), 19 deletions(-) diff --git a/monitoring/settings.ini b/monitoring/settings.ini index e1b12a52..33ac1ba9 100644 --- a/monitoring/settings.ini +++ b/monitoring/settings.ini @@ -6,7 +6,7 @@ allowed_hosts = # Information about the database connection - grid -[db grid] +[db_grid] # type of database - refers to the Django db backend backend = django.db.backends.mysql @@ -18,7 +18,7 @@ password = # Information about the database connection - cloud -[db cloud] +[db_cloud] # type of database refers to the Django db backend backend = django.db.backends.mysql diff --git a/monitoring/settings.py b/monitoring/settings.py index 791cc6bf..43ad7452 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -31,10 +31,6 @@ # Read configuration from the file cp = configparser.ConfigParser() file_path = os.path.join(BASE_DIR, 'monitoring', 'settings.ini') - - if not os.path.isfile(file_path): - raise FileNotFoundError(f'Configuration file NOT found: %s' % file_path) - cp.read(file_path) ALLOWED_HOSTS = cp.get('common', 'allowed_hosts').split(',') @@ -47,24 +43,24 @@ 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), }, 'grid': { - 'ENGINE': cp.get('db grid', 'backend'), - 'HOST': cp.get('db grid', 'hostname'), - 'PORT': cp.get('db grid', 'port'), - 'NAME': cp.get('db grid', 'name'), - 'USER': cp.get('db grid', 'username'), - 'PASSWORD': cp.get('db grid', 'password'), + 'ENGINE': cp.get('db_grid', 'backend'), + 'HOST': cp.get('db_grid', 'hostname'), + 'PORT': cp.get('db_grid', 'port'), + 'NAME': cp.get('db_grid', 'name'), + 'USER': cp.get('db_grid', 'username'), + 'PASSWORD': cp.get('db_grid', 'password'), }, 'cloud': { - 'ENGINE': cp.get('db cloud', 'backend'), - 'HOST': cp.get('db cloud', 'hostname'), - 'PORT': cp.get('db cloud', 'port'), - 'NAME': cp.get('db cloud', 'name'), - 'USER': cp.get('db cloud', 'username'), - 'PASSWORD': cp.get('db cloud', 'password'), + 'ENGINE': cp.get('db_cloud', 'backend'), + 'HOST': cp.get('db_cloud', 'hostname'), + 'PORT': cp.get('db_cloud', 'port'), + 'NAME': cp.get('db_cloud', 'name'), + 'USER': cp.get('db_cloud', 'username'), + 'PASSWORD': cp.get('db_cloud', 'password'), }, } -except (OSError, configparser.Error) as err: +except (configparser.NoSectionError) as err: print("Error in configuration file: %s" % err) sys.exit(1) From b8f88b0e6452d227df1f2ea691bdccd142865592 Mon Sep 17 00:00:00 2001 From: Saiteja Reddy Vennapusa <127398882+Sae126V@users.noreply.github.com> Date: Wed, 13 Nov 2024 11:00:36 +0000 Subject: [PATCH 093/183] Apply suggestions from code review Co-authored-by: Adrian Coveney <4836233+tofu-rocketry@users.noreply.github.com> --- monitoring/settings.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/settings.py b/monitoring/settings.py index 43ad7452..29dd4b6e 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -10,8 +10,8 @@ https://docs.djangoproject.com/en/1.11/ref/settings/ """ -import os import configparser +import os import sys # Build paths inside the project like this: os.path.join(BASE_DIR, ...) @@ -61,7 +61,7 @@ } except (configparser.NoSectionError) as err: - print("Error in configuration file: %s" % err) + print("Error in configuration file. Check that file exists first: %s" % err) sys.exit(1) # Application definition From e1299ae43a9d4fee5ae649eececffdfd8209d8fb Mon Sep 17 00:00:00 2001 From: Saiteja Reddy Vennapusa <127398882+Sae126V@users.noreply.github.com> Date: Thu, 14 Nov 2024 10:26:42 +0000 Subject: [PATCH 094/183] Update packages to work with py3.6 (#23) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update packages to work with py3.6 * Remove mysqlclient as it been replaced with pymsql --------- Co-authored-by: “Sae126V” <“saitejav2021@gmail.com”> --- docs/what_gets_installed.md | 16 ++++++++-------- requirements.txt | 10 ++++++---- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/docs/what_gets_installed.md b/docs/what_gets_installed.md index 0d43cce5..381be5a8 100644 --- a/docs/what_gets_installed.md +++ b/docs/what_gets_installed.md @@ -6,18 +6,18 @@ Following the config file that Aquilon uses, the following are the packages inst - python3-mod_wsgi (for apache to work with django) - python3-devel - gcc (needed for dependencies) -- mariadb. - +- mariadb +- tar. ## Packages installed within the venv Within venv, the following are installed through pip: -- djangorestframework (3.11.2) -- pymysql (needed for mariadb to work) -- pandas (needed by the app) -- django (2.1.*). +- djangorestframework (3.15.1) +- pymysql (1.0.2) (needed for mariadb to work) +- pandas (1.1.5) (needed by the app) +- django (3.1.14) +- pytz (2024.2). Note that when the version of the packages is specified, the app would not work with a different version (due to dependencies conflicts). -Also, as long as mariadb is installed (both client and server), it is not necessary to install mysqlclient (at least when the OS is Scientific Linux). Is is also important to note that different types of OS require different packages to be installed. -The above are the packages that allow the app to work on a scientific linux 7 machine. A Centos machine would require slightly different packages. +The above are the packages that allow the app to work on a Rocky8. diff --git a/requirements.txt b/requirements.txt index 54be2963..581d42c5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,6 @@ -Django==2.2.28 -djangorestframework==3.11.2 -mysqlclient==1.3.9 -pytz==2019.3 +# Pin packages to support and work with py3.6. +Django==3.1.14 +djangorestframework==3.15.1 +pytz==2024.2 +PyMySQL==1.0.2 +pandas==1.1.5 From e244961b82aa83a5526aa1372a3ca389234314df Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 28 Feb 2024 16:34:10 +0000 Subject: [PATCH 095/183] Add styles sheets and fix references to them --- monitoring/publishing/static/style.css | 53 +++++ monitoring/publishing/static/stylesheet.css | 184 ++++++++++++++++++ .../publishing/templates/gridsites.html | 6 +- monitoring/publishing/templates/gridsync.html | 6 +- .../templates/gridsync_singlesite.html | 6 +- .../templates/gridsync_submithost.html | 6 +- 6 files changed, 253 insertions(+), 8 deletions(-) create mode 100644 monitoring/publishing/static/style.css create mode 100644 monitoring/publishing/static/stylesheet.css diff --git a/monitoring/publishing/static/style.css b/monitoring/publishing/static/style.css new file mode 100644 index 00000000..cdc2843b --- /dev/null +++ b/monitoring/publishing/static/style.css @@ -0,0 +1,53 @@ +/*- Menu Tabs E--------------------------- */ + + #tabsE { + float:left; + width:100%; + background:#333; + font-size:93%; + line-height:normal; + + } + #tabsE ul { + margin:0; + padding:10px 10px 0 50px; + list-style:none; + } + #tabsE li { + display:inline; + margin:0; + padding:0; + } + #tabsE a { + float:left; + background:url("tableftE.gif") no-repeat left top; + margin:0; + padding:0 0 0 4px; + text-decoration:none; + } + #tabsE a span { + float:left; + display:block; + background:url("tabrightE.gif") no-repeat right top; + padding:5px 15px 4px 6px; + color:#fff; + } + /* Commented Backslash Hack hides rule from IE5-Mac \*/ + #tabsE a span {float:none;} + /* End IE5-Mac hack */ + #tabsE a:hover span { + color:#FFF; + } + #tabsE a:hover { + background-position:0% -42px; + } + #tabsE a:hover span { + background-position:100% -42px; + } + + #tabsE #current a { + background-position:0% -42px; + } + #tabsE #current a span { + background-position:100% -42px; + } diff --git a/monitoring/publishing/static/stylesheet.css b/monitoring/publishing/static/stylesheet.css new file mode 100644 index 00000000..67b62dc7 --- /dev/null +++ b/monitoring/publishing/static/stylesheet.css @@ -0,0 +1,184 @@ +h12 { + font-size: 22px; + color: #336699; + background-color: #FFFFFF; + font-family: Arial, Helvetica, sans-serif; + font-weight: bold +} +h1 { + font-size: 1.2em; + color: #336699; + background-color: #FFFFFF; + font-family: Arial, Helvetica, sans-serif; +} +h2 { + font-size: 1em; +} + + +h6 { + align: right; + font-size: 0.6em; +} +body { + font-family: Arial, Helvetica, sans-serif; + background-color: #FFFFFF; + color: #000000; + font-size: .5px; + font-size: 14px; +} +th { + font-family: Arial, Helvetica, sans-serif; + font-size: 12px; + color: #009999; + background-color: #FFFFFF; + font-weight: bold + align: left; +} +a:link { + text-decoration: none; + color: #336699; + font-weight: bold; +} +a:active { + text-decoration: none; + color: #336699; + font-weight: bold; + background-color: #FFFFFF; +} +a:visited { + text-decoration: none; + color: #996699; + font-weight: bold; +} +a:hover { + color : #666666; + background-color: #CCCCCC; + font-weight: bold; +} +hr { + color: #666666; + background-color: #FFFFFF; +} +.outlined { + border: 1px solid #000000; +} +.navbar-title { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.8em; + color: #FFFFFF; + font-weight: bold; + + +} +.navbar { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.6em; + background-color: #FFFFFF; + +} +p { + font-family: Arial, Helvetica, sans-serif; + color: #000000; + font-size: 14px; + font-weight: normal; +} +li { + font-family: Arial, Helvetica, sans-serif; + font-size: 1em; + color: #111111; + list-style-type: square; + font-weight: normal; +} +.sidebar-orange { + background-color: #ffe5b2; + background-image: url(images/orange-globe.jpg); + background-position: right center; + background-repeat: no-repeat; + +} +.sidebar-green { + background-image: url(images/green-pulse.jpg); + background-repeat: repeat-x; + background-position: center bottom; + background-color: #dcf6de; +} +.sidebar-blue { + background-color: #dcf4f6; + background-image: url(images/blue-bars.jpg); + background-repeat: no-repeat; + background-position: right bottom; +} +.sidebar-pink { + background-color: #efdcf6; + background-image: url(images/pink-news.jpg); + background-repeat: no-repeat; + background-position: left bottom; +} +.note { + background-color: #E4E4E4; + border: 1px dotted #000000; + padding: 5px; +} +.tabletext { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.8em; + color: #000000; + background-color: #DDDDDD; +} +.tabletextwarning { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.8em; + color: #000000; + background-color: #FFFF00; +} +.tabletextok { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.8em; + color: #000000; + background-color: #00FF00; +} +.tabletexterror { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.8em; + color: #000000; + background-color: #FF0000; +} +.tabletextinfo { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.8em; + color: #000000; + background-color: #00CCFF; +} +.tableheader { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.8em; + color: #FFFFFF; + background-color: #000000; + font-weight: bold; + text-align: center; +} +.navbar-heading { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.8em; + background-color: #FFFFFF; + font-weight: bold; + color: #000000; +} +.feintoutlined { + border: 1px dotted #CCCCCC; +} +.newsHeader { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.9em; + background-color: #97A6CD; + font-weight: bold; +} +.newsBody { + font-family: Arial, Helvetica, sans-serif; + font-size: 0.7em; + background-color: #ACBBDA; +} +.invisibleBorder { + border: thin dashed #EEEEEE; +} diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index 0ec21b0b..0de8d173 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -1,11 +1,13 @@ +{% load static %} + APEL Publication Summary : {{ sites.0.name }} - - + +

    APEL Publication Test

    diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html index 1b43e972..d7fd074f 100644 --- a/monitoring/publishing/templates/gridsync.html +++ b/monitoring/publishing/templates/gridsync.html @@ -1,10 +1,12 @@ +{% load static %} + APEL Publication Summary - - + +

    APEL Synchronisation Test

    diff --git a/monitoring/publishing/templates/gridsync_singlesite.html b/monitoring/publishing/templates/gridsync_singlesite.html index 00f7aca7..559807e4 100644 --- a/monitoring/publishing/templates/gridsync_singlesite.html +++ b/monitoring/publishing/templates/gridsync_singlesite.html @@ -1,10 +1,12 @@ +{% load static %} + APEL Publication Summary - - + +

    APEL Synchronisation Test

    diff --git a/monitoring/publishing/templates/gridsync_submithost.html b/monitoring/publishing/templates/gridsync_submithost.html index a919ea6a..89ceed08 100644 --- a/monitoring/publishing/templates/gridsync_submithost.html +++ b/monitoring/publishing/templates/gridsync_submithost.html @@ -1,10 +1,12 @@ +{% load static %} + APEL Publication Summary - - + +

    APEL Synchronisation Test

    From 7f8ee867e41fa6bd4a1e5a4558c0301e59d3e51c Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 14 Nov 2024 10:18:14 +0000 Subject: [PATCH 096/183] Add pre-commit config --- .pre-commit-config.yaml | 38 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) create mode 100644 .pre-commit-config.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..545b8f81 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,38 @@ +# See https://pre-commit.com for more information +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 # Python 3.6 compatible + hooks: + # Python related checks + - id: check-ast + - id: check-builtin-literals + - id: check-docstring-first + - id: name-tests-test + name: Check unit tests start with 'test_' + args: ['--django'] + files: 'test/.*' + # Other checks + - id: check-added-large-files + - id: check-case-conflict + - id: check-merge-conflict + - id: check-yaml + - id: debug-statements + - id: detect-private-key + - id: end-of-file-fixer + - id: mixed-line-ending + name: Force line endings to LF + args: ['--fix=lf'] + - id: trailing-whitespace + +- repo: https://github.com/pre-commit/pygrep-hooks + rev: v1.10.0 + hooks: + - id: python-check-mock-methods + - id: python-no-eval + - id: python-no-log-warn + - id: python-use-type-annotations + +# Pre-commit CI config, see https://pre-commit.ci/ +ci: + autofix_prs: false + autoupdate_schedule: quarterly From 5d7228ebea82d8dfe4427db4a28634191d8a5ff3 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 14 Nov 2024 10:20:35 +0000 Subject: [PATCH 097/183] Tidy whitespace, line endings and file endings --- docs/what_gets_installed.md | 2 +- monitoring/availability/templates/status.html | 2 +- monitoring/publishing/static/style.css | 2 +- monitoring/publishing/static/stylesheet.css | 10 +++++----- monitoring/publishing/templates/gridsites.html | 2 +- monitoring/publishing/templates/gridsync.html | 6 +++--- .../publishing/templates/gridsync_singlesite.html | 6 +++--- .../publishing/templates/gridsync_submithost.html | 4 ++-- 8 files changed, 17 insertions(+), 17 deletions(-) diff --git a/docs/what_gets_installed.md b/docs/what_gets_installed.md index 381be5a8..ade3ecd2 100644 --- a/docs/what_gets_installed.md +++ b/docs/what_gets_installed.md @@ -19,5 +19,5 @@ Within venv, the following are installed through pip: Note that when the version of the packages is specified, the app would not work with a different version (due to dependencies conflicts). -Is is also important to note that different types of OS require different packages to be installed. +Is is also important to note that different types of OS require different packages to be installed. The above are the packages that allow the app to work on a Rocky8. diff --git a/monitoring/availability/templates/status.html b/monitoring/availability/templates/status.html index 4fae6b62..9b504ca4 100644 --- a/monitoring/availability/templates/status.html +++ b/monitoring/availability/templates/status.html @@ -1 +1 @@ -{{ message }} +{{ message }} diff --git a/monitoring/publishing/static/style.css b/monitoring/publishing/static/style.css index cdc2843b..85a5cf9a 100644 --- a/monitoring/publishing/static/style.css +++ b/monitoring/publishing/static/style.css @@ -43,7 +43,7 @@ } #tabsE a:hover span { background-position:100% -42px; - } + } #tabsE #current a { background-position:0% -42px; diff --git a/monitoring/publishing/static/stylesheet.css b/monitoring/publishing/static/stylesheet.css index 67b62dc7..88f63afd 100644 --- a/monitoring/publishing/static/stylesheet.css +++ b/monitoring/publishing/static/stylesheet.css @@ -32,27 +32,27 @@ th { font-size: 12px; color: #009999; background-color: #FFFFFF; - font-weight: bold + font-weight: bold align: left; } a:link { - text-decoration: none; + text-decoration: none; color: #336699; font-weight: bold; } a:active { - text-decoration: none; + text-decoration: none; color: #336699; font-weight: bold; background-color: #FFFFFF; } a:visited { - text-decoration: none; + text-decoration: none; color: #996699; font-weight: bold; } a:hover { - color : #666666; + color : #666666; background-color: #CCCCCC; font-weight: bold; } diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index 0de8d173..8abf32b5 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -34,4 +34,4 @@

    APEL Publication Test

    - \ No newline at end of file + diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html index d7fd074f..7edccdab 100644 --- a/monitoring/publishing/templates/gridsync.html +++ b/monitoring/publishing/templates/gridsync.html @@ -32,14 +32,14 @@

    APEL Synchronisation Test

    {% for record in records %} {{ record.SiteName }} - {{ record.YearMonth }} + {{ record.YearMonth }} {{ record.RecordStart }} {{ record.RecordEnd }} {{ record.RecordCountPublished }} {{ record.RecordCountInDb }} {{ record.SyncStatus }} - + {% endfor %} - \ No newline at end of file + diff --git a/monitoring/publishing/templates/gridsync_singlesite.html b/monitoring/publishing/templates/gridsync_singlesite.html index 559807e4..f6e9dc5b 100644 --- a/monitoring/publishing/templates/gridsync_singlesite.html +++ b/monitoring/publishing/templates/gridsync_singlesite.html @@ -30,14 +30,14 @@

    APEL Synchronisation Test

    {% for record in records %} - {{ record.YearMonth }} + {{ record.YearMonth }} {{ record.RecordStart }} {{ record.RecordEnd }} {{ record.RecordCountPublished }} {{ record.RecordCountInDb }} {{ record.SyncStatus }} - + {% endfor %} - \ No newline at end of file + diff --git a/monitoring/publishing/templates/gridsync_submithost.html b/monitoring/publishing/templates/gridsync_submithost.html index 89ceed08..6bb115ed 100644 --- a/monitoring/publishing/templates/gridsync_submithost.html +++ b/monitoring/publishing/templates/gridsync_submithost.html @@ -30,7 +30,7 @@

    APEL Synchronisation Test

    {% for host in submisthosts %} - + {{ host.YearMonth }} {{ host.SubmitHost }} {{ host.RecordStart }} @@ -42,4 +42,4 @@

    APEL Synchronisation Test

    {% endfor %} - \ No newline at end of file + From 420075af15f4a06b915705c28cdf4b31720c4483 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 4 Dec 2024 10:33:31 +0000 Subject: [PATCH 098/183] Change DEBUG to False --- monitoring/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/settings.py b/monitoring/settings.py index 29dd4b6e..724f7d96 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -24,7 +24,7 @@ SECRET_KEY = 'ge^fd9rf)htmxji8kf=jk8frh3=^11@^n=h14gu*fqt^0-lnr$' # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = True +DEBUG = False try: From 64d78718f26d355ee076cadade54773213bf4c22 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 4 Dec 2024 10:34:21 +0000 Subject: [PATCH 099/183] Move SECRET_KEY to settings.ini for config man Move SECRET_KEY to be set from settings.ini so that configuration managment can set it from a secrets file. Also update link to docs for deployment checklist. --- monitoring/settings.ini | 3 +++ monitoring/settings.py | 8 ++++---- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/monitoring/settings.ini b/monitoring/settings.ini index 33ac1ba9..222b25c2 100644 --- a/monitoring/settings.ini +++ b/monitoring/settings.ini @@ -1,6 +1,9 @@ # This file will be parsed by settings.py. [common] +# A new key can be generated using django.core.management.utils.get_random_secret_key() +secret_key = + # `allowed_hosts` values should be comma separated list of hostnames (fqdn's) allowed_hosts = diff --git a/monitoring/settings.py b/monitoring/settings.py index 724f7d96..fcd17649 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -18,10 +18,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/ - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = 'ge^fd9rf)htmxji8kf=jk8frh3=^11@^n=h14gu*fqt^0-lnr$' +# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: don't run with debug turned on in production! DEBUG = False @@ -33,6 +30,9 @@ file_path = os.path.join(BASE_DIR, 'monitoring', 'settings.ini') cp.read(file_path) + # SECURITY WARNING: keep the secret key used in production secret! + SECRET_KEY = cp.get('common', 'secret_key') + ALLOWED_HOSTS = cp.get('common', 'allowed_hosts').split(',') # Database From 525e17f5bbcf70965d3c48bcc452ec36fc714f8d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9CSae126V=E2=80=9D?= <“saitejav2021@gmail.com”> Date: Wed, 12 Feb 2025 11:10:40 +0000 Subject: [PATCH 100/183] Fix issue with ConfigParser interpolation by disabling - Some keys in SECRET_KEY can contain %, which configparser assumes are interpolated values --- monitoring/settings.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/settings.py b/monitoring/settings.py index fcd17649..f4272680 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -26,7 +26,7 @@ try: # Read configuration from the file - cp = configparser.ConfigParser() + cp = configparser.ConfigParser(interpolation=None) file_path = os.path.join(BASE_DIR, 'monitoring', 'settings.ini') cp.read(file_path) From ab0697ff026605ebe9c8f71798a872cc176e580e Mon Sep 17 00:00:00 2001 From: Sae126V Date: Wed, 26 Feb 2025 21:19:02 +0000 Subject: [PATCH 101/183] Remove unused imports in url.py --- monitoring/publishing/urls.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 7f8d4e4a..8f47f569 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -1,5 +1,3 @@ -from django.conf.urls import include, url - from rest_framework import routers from monitoring.publishing import views From 92b6e55aad02f11f1ef2556a06c9da10b99bdc1a Mon Sep 17 00:00:00 2001 From: Sae126V Date: Wed, 26 Feb 2025 21:35:17 +0000 Subject: [PATCH 102/183] Update code to allow dot(s) in regex pattern - We need this currently because it is breaking the current code flow and returning an internal server error. - In the DB we have few sites with dot in their SiteName - Perhaps, Nice-To-Have is either from the codebase we restrict showing the sites with dot(Unnecessary load to fetch) or restrict the sites to make their way to DB. --- monitoring/publishing/urls.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 8f47f569..21c78cdb 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -11,12 +11,12 @@ urlpatterns = [ re_path( - r'^gridsync/(?P[a-zA-Z0-9-]+)/$', + r'^gridsync/(?P[a-zA-Z0-9.-]+)/$', views.GridSiteSyncViewSet.as_view({'get': 'retrieve'}), name='gridsync_singlesite' ), re_path( - r'^gridsync/(?P[a-zA-Z0-9-]+)/(?P[0-9-]+)/$', + r'^gridsync/(?P[a-zA-Z0-9.-]+)/(?P[0-9-]+)/$', views.GridSiteSyncSubmitHViewSet.as_view({'get': 'retrieve'}), name='gridsync_submithost' ), From 3b20da70d1b3d3bc2146c6e69159248ed7d859f1 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Wed, 26 Feb 2025 22:22:50 +0000 Subject: [PATCH 103/183] Update code to use SiteName as a lookup_field - Removed the restriction on grid/ fetching the data with one year ONLY. - urls.py: We need this re_path() for grid and cloud to avoid getting the message saying "Could not resolve URL for hyperlinked relationship" - We need Django Rest Framework fetching the data based on the SiteName - We need this for couple of reasons 1) To avoid showing the data with the URL containing pk(As a Number like) in it 2) To fetch the data using SiteName as primary lookup field. --- monitoring/publishing/serializers.py | 10 ++++++++++ monitoring/publishing/urls.py | 10 ++++++++++ monitoring/publishing/views.py | 8 ++++---- 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index de73fd13..24c9bc48 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -22,6 +22,11 @@ class Meta: 'updated' ) + # Sitename substitutes pk + lookup_field = 'SiteName' + extra_kwargs = { + 'url': {'view_name': 'gridsite-detail','lookup_field': 'SiteName'} + } class GridSiteSyncSerializer(serializers.HyperlinkedModelSerializer): # Override default format with None so that Python datetime is used as @@ -64,6 +69,11 @@ class Meta: 'updated' ) + # Sitename substitutes pk + lookup_field = 'SiteName' + extra_kwargs = { + 'url': {'view_name': 'cloudsite-detail', 'lookup_field': 'SiteName'} + } class GridSiteSyncSubmitHSerializer(serializers.HyperlinkedModelSerializer): # Override default format with None so that Python datetime is used as diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 21c78cdb..f53981a0 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -10,6 +10,16 @@ router.register(r'gridsync', views.GridSiteSyncSubmitHViewSet) urlpatterns = [ + re_path( + r'^cloud/(?P[a-zA-Z0-9.-]+)/$', + views.CloudSiteViewSet.as_view({'get': 'retrieve'}), + name='cloudsite-detail' + ), + re_path( + r'^grid/(?P[a-zA-Z0-9.-]+)/$', + views.GridSiteViewSet.as_view({'get': 'retrieve'}), + name='gridsite-detail' + ), re_path( r'^gridsync/(?P[a-zA-Z0-9.-]+)/$', views.GridSiteSyncViewSet.as_view({'get': 'retrieve'}), diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index debc0b12..c492f88b 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -127,6 +127,7 @@ class GridSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = GridSite.objects.all() serializer_class = GridSiteSerializer template_name = 'gridsites.html' + lookup_field = 'SiteName' def list(self, request): last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] @@ -166,7 +167,7 @@ def list(self, request): return response - def retrieve(self, request, pk=None): + def retrieve(self, request, SiteName=None): last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] # If there's no data then last_fetched is None. if last_fetched is not None: @@ -178,7 +179,6 @@ def retrieve(self, request, pk=None): Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries - WHERE Year=2019 GROUP BY 1; """ fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) @@ -313,7 +313,6 @@ def list(self, request): return response def retrieve(self, request, SiteName=None): - lookup_field = 'SiteName' last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] row_1 = GridSiteSync.objects.filter()[:1].get() n_sites = GridSiteSync.objects.values('SiteName').distinct().count() @@ -549,6 +548,7 @@ class CloudSiteViewSet(viewsets.ReadOnlyModelViewSet): queryset = CloudSite.objects.all() serializer_class = CloudSiteSerializer template_name = 'cloudsites.html' + lookup_field = 'SiteName' def list(self, request): last_fetched = CloudSite.objects.aggregate(Max('fetched'))['fetched__max'] @@ -600,7 +600,7 @@ def list(self, request): } return response - def retrieve(self, request, pk=None): + def retrieve(self, request, SiteName=None): last_fetched = CloudSite.objects.aggregate(Max('fetched'))['fetched__max'] print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): From e336bcdd5058862622421b4de507f161ec35fd49 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Wed, 26 Feb 2025 22:45:21 +0000 Subject: [PATCH 104/183] Update code to make gridsync URL work with the sitename(s) - Sometimes gridsync/ fetches the whole list again, ignoring the lookup_up field because we were using list() - We are using get_serializer() with `many=True` to handle complex data which is returning multiple objects with the single siteName. --- monitoring/publishing/serializers.py | 2 +- monitoring/publishing/urls.py | 3 +-- monitoring/publishing/views.py | 11 +++++++---- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 24c9bc48..a4092d15 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -49,7 +49,7 @@ class Meta: # Sitename substitutes pk lookup_field = 'SiteName' extra_kwargs = { - 'url': {'lookup_field': 'SiteName'} + 'url': {'view_name': 'gridsitesync-detail', 'lookup_field': 'SiteName'} } diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index f53981a0..94c0d4b8 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -7,7 +7,6 @@ router.register(r'cloud', views.CloudSiteViewSet) router.register(r'grid', views.GridSiteViewSet) router.register(r'gridsync', views.GridSiteSyncViewSet) -router.register(r'gridsync', views.GridSiteSyncSubmitHViewSet) urlpatterns = [ re_path( @@ -23,7 +22,7 @@ re_path( r'^gridsync/(?P[a-zA-Z0-9.-]+)/$', views.GridSiteSyncViewSet.as_view({'get': 'retrieve'}), - name='gridsync_singlesite' + name='gridsitesync-detail' ), re_path( r'^gridsync/(?P[a-zA-Z0-9.-]+)/(?P[0-9-]+)/$', diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index c492f88b..ac47b780 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -9,6 +9,7 @@ from rest_framework import viewsets, generics from rest_framework.renderers import TemplateHTMLRenderer +from rest_framework.response import Response from monitoring.publishing.models import ( GridSite, @@ -399,12 +400,14 @@ def retrieve(self, request, SiteName=None): else: print('No need to update') - response = super(GridSiteSyncViewSet, self).list(request) - response.data = { - 'records': response.data, + sites_list_qs = GridSiteSync.objects.filter(SiteName=SiteName) + sites_list_serializer = self.get_serializer(sites_list_qs, many=True) + + response = { + 'records': sites_list_serializer.data, 'last_fetched': last_fetched } - return response + return Response(response) # Needed for passing two parameters to a viewset (GridSiteSyncSubmitHViewSet) From dcf6013a98e1167a2692eaac91d14881cf947ec8 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Thu, 27 Feb 2025 01:03:07 +0000 Subject: [PATCH 105/183] Update code to make gridsync work with sitename and yearmonth - Fixed issue with HyperlinkedModelSerializer, it seems to NOT able to work with two lookup_fields before. - Simplified the split() functionality. Desc --- monitoring/publishing/serializers.py | 24 +++++++++++++++++++++--- monitoring/publishing/urls.py | 2 +- monitoring/publishing/views.py | 6 +++--- 3 files changed, 25 insertions(+), 7 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index a4092d15..c0e6b5f1 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -1,5 +1,5 @@ from rest_framework import serializers - +from rest_framework.reverse import reverse from monitoring.publishing.models import ( CloudSite, GridSite, @@ -80,6 +80,26 @@ class GridSiteSyncSubmitHSerializer(serializers.HyperlinkedModelSerializer): # ouput format. Encoding will be determined by the renderer and can be # formatted by a template filter. + # We need this because HyperlinkedModelSerializer seems to NOT able to work with two lookup_fields + class MultipleFieldLookup(serializers.HyperlinkedIdentityField): + # To match or construct the absolute URL based on the `SiteName` and `YearMonth` + def get_url(self, obj, view_name, request, format): + if not obj.SiteName or not obj.YearMonth: + return None + + return request.build_absolute_uri( + reverse( + view_name, + kwargs={ + 'SiteName': obj.SiteName, + 'YearMonth': obj.YearMonth + }, + request=request, + format=format + )) + + url = MultipleFieldLookup(view_name='gridsync-submithost') + class Meta: model = GridSiteSyncSubmitH fields = ( @@ -92,5 +112,3 @@ class Meta: 'RecordCountInDb', 'SubmitHost' ) - - lookup_fields = ('SiteName', 'YearMonth') diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 94c0d4b8..09f7e30d 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -27,7 +27,7 @@ re_path( r'^gridsync/(?P[a-zA-Z0-9.-]+)/(?P[0-9-]+)/$', views.GridSiteSyncSubmitHViewSet.as_view({'get': 'retrieve'}), - name='gridsync_submithost' + name='gridsync-submithost' ), ] diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index ac47b780..6de1a06d 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -432,6 +432,7 @@ class GridSiteSyncSubmitHViewSet(MultipleFieldLookupMixin, viewsets.ReadOnlyMode queryset = GridSiteSyncSubmitH.objects.all() serializer_class = GridSiteSyncSubmitHSerializer template_name = 'gridsync_submithost.html' + lookup_fields = ('SiteName', 'YearMonth') def list(self, request): last_fetched = GridSiteSyncSubmitH.objects.aggregate(Max('fetched'))['fetched__max'] @@ -443,10 +444,8 @@ def list(self, request): return response def retrieve(self, request, SiteName=None, YearMonth=None): - - lookup_fields = ('SiteName', 'YearMonth') last_fetched = GridSiteSyncSubmitH.objects.aggregate(Max('fetched'))['fetched__max'] - Year, Month = YearMonth.replace('-', ' ').split(' ') + Year, Month = YearMonth.split('-') sitename_in_table = None yearmonth_in_table = None @@ -516,6 +515,7 @@ def retrieve(self, request, SiteName=None, YearMonth=None): right_on=['Site', 'Month', 'Year', 'SubmitHostSync'], how='outer' ) + fetchset = df_all.to_dict('index') # This is to list only data for one month From cb63154f0716567f4cb61de558b1b00ee38ac6a3 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Thu, 27 Feb 2025 10:00:12 +0000 Subject: [PATCH 106/183] Fix codeclimate issues --- monitoring/publishing/serializers.py | 6 ++++-- monitoring/publishing/views.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index c0e6b5f1..71b4cd71 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -25,9 +25,10 @@ class Meta: # Sitename substitutes pk lookup_field = 'SiteName' extra_kwargs = { - 'url': {'view_name': 'gridsite-detail','lookup_field': 'SiteName'} + 'url': {'view_name': 'gridsite-detail', 'lookup_field': 'SiteName'} } + class GridSiteSyncSerializer(serializers.HyperlinkedModelSerializer): # Override default format with None so that Python datetime is used as # ouput format. Encoding will be determined by the renderer and can be @@ -75,6 +76,7 @@ class Meta: 'url': {'view_name': 'cloudsite-detail', 'lookup_field': 'SiteName'} } + class GridSiteSyncSubmitHSerializer(serializers.HyperlinkedModelSerializer): # Override default format with None so that Python datetime is used as # ouput format. Encoding will be determined by the renderer and can be @@ -92,7 +94,7 @@ def get_url(self, obj, view_name, request, format): view_name, kwargs={ 'SiteName': obj.SiteName, - 'YearMonth': obj.YearMonth + 'YearMonth': obj.YearMonth }, request=request, format=format diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 6de1a06d..61d639c0 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -402,7 +402,7 @@ def retrieve(self, request, SiteName=None): sites_list_qs = GridSiteSync.objects.filter(SiteName=SiteName) sites_list_serializer = self.get_serializer(sites_list_qs, many=True) - + response = { 'records': sites_list_serializer.data, 'last_fetched': last_fetched From 62edf14fb403ca3b4ebbe3b2103c8e920e25fadb Mon Sep 17 00:00:00 2001 From: Sae126V Date: Thu, 27 Feb 2025 17:04:19 +0000 Subject: [PATCH 107/183] Move MultipleFieldLookup class outside --- monitoring/publishing/serializers.py | 42 ++++++++++++++++------------ 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 71b4cd71..74b70bd9 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -1,5 +1,6 @@ from rest_framework import serializers from rest_framework.reverse import reverse + from monitoring.publishing.models import ( CloudSite, GridSite, @@ -77,29 +78,34 @@ class Meta: } +class MultipleFieldLookup(serializers.HyperlinkedIdentityField): + # HyperlinkedModelSerializer seems to NOT able to work with two lookup_fields + # This class is ONLY capable to match object instance to its URL representation. + # i.e, `SiteName` and `YearMonth` ONLY + # + # Overriding the get_url() method - To match object instance to its URL representation. + def get_url(self, obj, view_name, request, format): + if not obj.SiteName or not obj.YearMonth: + return None + + return request.build_absolute_uri( + reverse( + view_name, + kwargs={ + 'SiteName': obj.SiteName, + 'YearMonth': obj.YearMonth + }, + request=request, + format=format + )) + + class GridSiteSyncSubmitHSerializer(serializers.HyperlinkedModelSerializer): # Override default format with None so that Python datetime is used as # ouput format. Encoding will be determined by the renderer and can be # formatted by a template filter. - # We need this because HyperlinkedModelSerializer seems to NOT able to work with two lookup_fields - class MultipleFieldLookup(serializers.HyperlinkedIdentityField): - # To match or construct the absolute URL based on the `SiteName` and `YearMonth` - def get_url(self, obj, view_name, request, format): - if not obj.SiteName or not obj.YearMonth: - return None - - return request.build_absolute_uri( - reverse( - view_name, - kwargs={ - 'SiteName': obj.SiteName, - 'YearMonth': obj.YearMonth - }, - request=request, - format=format - )) - + # This helps us to match or construct the absolute URL based on the `SiteName` and `YearMonth` url = MultipleFieldLookup(view_name='gridsync-submithost') class Meta: From a1821cf02bfd1263aff57263d28d8e050ba26448 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Wed, 19 Mar 2025 16:01:08 +0000 Subject: [PATCH 108/183] Update code to allow underscore in an regex pattern --- monitoring/publishing/urls.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/monitoring/publishing/urls.py b/monitoring/publishing/urls.py index 09f7e30d..cf0f6429 100644 --- a/monitoring/publishing/urls.py +++ b/monitoring/publishing/urls.py @@ -10,22 +10,22 @@ urlpatterns = [ re_path( - r'^cloud/(?P[a-zA-Z0-9.-]+)/$', + r'^cloud/(?P[a-zA-Z0-9._-]+)/$', views.CloudSiteViewSet.as_view({'get': 'retrieve'}), name='cloudsite-detail' ), re_path( - r'^grid/(?P[a-zA-Z0-9.-]+)/$', + r'^grid/(?P[a-zA-Z0-9._-]+)/$', views.GridSiteViewSet.as_view({'get': 'retrieve'}), name='gridsite-detail' ), re_path( - r'^gridsync/(?P[a-zA-Z0-9.-]+)/$', + r'^gridsync/(?P[a-zA-Z0-9._-]+)/$', views.GridSiteSyncViewSet.as_view({'get': 'retrieve'}), name='gridsitesync-detail' ), re_path( - r'^gridsync/(?P[a-zA-Z0-9.-]+)/(?P[0-9-]+)/$', + r'^gridsync/(?P[a-zA-Z0-9._-]+)/(?P[0-9-]+)/$', views.GridSiteSyncSubmitHViewSet.as_view({'get': 'retrieve'}), name='gridsync-submithost' ), From 2d278b134d78d11148a88675d3d92cbbb081cf55 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Mon, 14 Apr 2025 11:17:28 +0100 Subject: [PATCH 109/183] Update code to fix grammer for the comments --- monitoring/publishing/serializers.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index 74b70bd9..f3c42a70 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -23,7 +23,7 @@ class Meta: 'updated' ) - # Sitename substitutes pk + # Sitename substitutes for pk lookup_field = 'SiteName' extra_kwargs = { 'url': {'view_name': 'gridsite-detail', 'lookup_field': 'SiteName'} @@ -48,7 +48,7 @@ class Meta: 'SyncStatus' ) - # Sitename substitutes pk + # Sitename substitutes for pk lookup_field = 'SiteName' extra_kwargs = { 'url': {'view_name': 'gridsitesync-detail', 'lookup_field': 'SiteName'} @@ -71,7 +71,7 @@ class Meta: 'updated' ) - # Sitename substitutes pk + # Sitename substitutes for pk lookup_field = 'SiteName' extra_kwargs = { 'url': {'view_name': 'cloudsite-detail', 'lookup_field': 'SiteName'} From 3b5c92f2650a90199d82f5532c26ecc4db9a835d Mon Sep 17 00:00:00 2001 From: Sae126V Date: Tue, 29 Apr 2025 08:45:12 +0100 Subject: [PATCH 110/183] Remove lookup_field as it is been overridden --- monitoring/publishing/serializers.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/monitoring/publishing/serializers.py b/monitoring/publishing/serializers.py index f3c42a70..b273462c 100644 --- a/monitoring/publishing/serializers.py +++ b/monitoring/publishing/serializers.py @@ -24,7 +24,6 @@ class Meta: ) # Sitename substitutes for pk - lookup_field = 'SiteName' extra_kwargs = { 'url': {'view_name': 'gridsite-detail', 'lookup_field': 'SiteName'} } @@ -49,7 +48,6 @@ class Meta: ) # Sitename substitutes for pk - lookup_field = 'SiteName' extra_kwargs = { 'url': {'view_name': 'gridsitesync-detail', 'lookup_field': 'SiteName'} } @@ -72,7 +70,6 @@ class Meta: ) # Sitename substitutes for pk - lookup_field = 'SiteName' extra_kwargs = { 'url': {'view_name': 'cloudsite-detail', 'lookup_field': 'SiteName'} } From b7fd803de0e9d9b17ef1cffabadad2fbea039c00 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Wed, 30 Apr 2025 14:20:01 +0100 Subject: [PATCH 111/183] Update code to make html queryparams work --- monitoring/publishing/templates/gridsites.html | 13 +++++++------ monitoring/publishing/templates/gridsync.html | 2 +- .../publishing/templates/gridsync_singlesite.html | 2 +- monitoring/publishing/views.py | 2 +- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index 8abf32b5..f2330a9b 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -5,7 +5,7 @@ -APEL Publication Summary : {{ sites.0.name }} +APEL Publication Summary @@ -19,19 +19,20 @@

    APEL Publication Test

  • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}
    - + {% for site in sites %} - - - - + + + + + {% endfor %}
    {{ sites.0.SiteName }}
    ExecutingSite MeasurementDate MeasurementTime Publication
    Status
    {{ sites.0.SiteName }}{{ last_fetched|date:"Y-m-d" }}{{ last_fetched|date:"G:i:s" }}{{ stdout }}{{ site.SiteName }}{{ site.updated|date:"Y-m-d" }}{{ site.updated|date:"G:i:s" }}{{ site.stdout }}
    diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html index 7edccdab..cf9f25bf 100644 --- a/monitoring/publishing/templates/gridsync.html +++ b/monitoring/publishing/templates/gridsync.html @@ -31,7 +31,7 @@

    APEL Synchronisation Test

    {% for record in records %} - {{ record.SiteName }} + {{ record.SiteName }} {{ record.YearMonth }} {{ record.RecordStart }} {{ record.RecordEnd }} diff --git a/monitoring/publishing/templates/gridsync_singlesite.html b/monitoring/publishing/templates/gridsync_singlesite.html index f6e9dc5b..169a043b 100644 --- a/monitoring/publishing/templates/gridsync_singlesite.html +++ b/monitoring/publishing/templates/gridsync_singlesite.html @@ -30,7 +30,7 @@

    APEL Synchronisation Test

    {% for record in records %} - {{ record.YearMonth }} + {{ record.YearMonth }} {{ record.RecordStart }} {{ record.RecordEnd }} {{ record.RecordCountPublished }} diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 61d639c0..2a44da94 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -194,6 +194,7 @@ def retrieve(self, request, SiteName=None): response = super(GridSiteViewSet, self).retrieve(request) date = response.data['updated'].replace(tzinfo=None) + response.data = update_dict_stdout_and_returncode(response.data, date) # Wrap data in a dict so that it can display in template. if type(request.accepted_renderer) is TemplateHTMLRenderer: @@ -203,7 +204,6 @@ def retrieve(self, request, SiteName=None): 'last_fetched': last_fetched } - response.data = update_dict_stdout_and_returncode(response.data, date) return response From 047d59bb9fd2c0ffd8f186ce47499b18f3383ba3 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Tue, 29 Apr 2025 10:18:39 +0100 Subject: [PATCH 112/183] Initial commit to work around slow SQL queries This change adds a standalone script to update the local Django database from the APEL database on a cron. This means that the webpages remain responsive. --- monitoring/publishing/db_update_sqlite.py | 279 ++++++++++++++++++++++ monitoring/publishing/views.py | 250 +------------------ monitoring/settings.ini | 2 + 3 files changed, 283 insertions(+), 248 deletions(-) create mode 100644 monitoring/publishing/db_update_sqlite.py diff --git a/monitoring/publishing/db_update_sqlite.py b/monitoring/publishing/db_update_sqlite.py new file mode 100644 index 00000000..71b58826 --- /dev/null +++ b/monitoring/publishing/db_update_sqlite.py @@ -0,0 +1,279 @@ +# -*- coding: utf-8 -*- +""" +`db_update_sqlite.py` - Syncs data from external database into local SQLite DB. + - It will be run as a standalone operation via cron. +""" +import configparser +import logging +import os +import sys + +import pandas as pd +import django +from django.db import DatabaseError + + +BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +# Find the root and the Django project +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))) + +try: + # Read configuration from the file + cp = configparser.ConfigParser(interpolation=None) + file_path = os.path.join(BASE_DIR, 'monitoring', 'settings.ini') + cp.read(file_path) + +except (configparser.NoSectionError) as err: + print("Error in configuration file. Check that file exists first: %s" % err) + sys.exit(1) + +# Set up basic logging config +logging.basicConfig( + filename=cp.get('common', 'logfile'), + level=logging.INFO, + format='%(asctime)s - %(levelname)s - %(message)s' +) + +# set up the logger +log = logging.getLogger(__name__) + +# Set up Django settings to run this `db_update_sqlite.py` as standalone file +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "monitoring.settings") + +# Initialize and setup Django +django.setup() + + +from monitoring.publishing.models import ( + GridSite, + CloudSite, + GridSiteSync, + VAnonCloudRecord, + VSuperSummaries, + VSyncRecords +) + +summaries_dict_standard = { + "Site": [], + "Month": [], + "Year": [], + "RecordCountPublished": [], + "RecordStart": [], + "RecordEnd": [], + "SubmitHostSumm": [], +} + +syncrecords_dict_standard = { + "Site": [], + "Month": [], + "Year": [], + "RecordCountInDb": [], + "SubmitHostSync": [] +} + + +def fill_summaries_dict(inpDict, row): + fields_to_update_and_value_to_add = { + "Site": row.Site, + "Month": row.Month, + "Year": row.Year, + "RecordCountPublished": row.RecordCountPublished, + "RecordStart": row.RecordStart, + "RecordEnd": row.RecordEnd, + } + + for field, value in fields_to_update_and_value_to_add.items(): + inpDict[field] = inpDict.get(field) + [value] + + if hasattr(row, "SubmitHostSumm"): + inpDict["SubmitHostSumm"] = inpDict.get("SubmitHostSumm") + [row.SubmitHostSumm] + + return inpDict + +def fill_syncrecords_dict(inpDict, row): + inpDict["Site"] = inpDict.get("Site") + [row.Site] + inpDict["Month"] = inpDict.get("Month") + [row.Month] + inpDict["Year"] = inpDict.get("Year") + [row.Year] + inpDict["RecordCountInDb"] = inpDict.get("RecordCountInDb") + [row.RecordCountInDb] + if hasattr(row, "SubmitHostSync"): + inpDict["SubmitHostSync"] = inpDict.get("SubmitHostSync") + [row.SubmitHostSync] + return inpDict + +def correct_dict(inpDict): + keys_to_remove = [] + for key, val in inpDict.items(): + if len(val) == 0: + keys_to_remove.append(key) + for key in keys_to_remove: + inpDict.pop(key) + return inpDict + +def get_year_month_str(year, month): + year_string = str(year) + month_string = str(month) + if len(month_string) == 1: + month_string = '0' + month_string + return year_string + '-' + month_string + +def determine_sync_status(f): + RecordCountPublished = f.get("RecordCountPublished") + RecordCountInDb = f.get("RecordCountInDb") + rel_diff1 = abs(RecordCountPublished - RecordCountInDb) / RecordCountInDb + rel_diff2 = abs(RecordCountPublished - RecordCountInDb) / RecordCountPublished + if rel_diff1 < 0.01 or rel_diff2 < 0.01: + return "OK" + return "ERROR [ Please use the Gap Publisher to synchronise this dataset]" + + +def refresh_gridsite(): + try: + sql_query = """ + SELECT + Site, + max(LatestEndTime) AS LatestPublish + FROM VSuperSummaries + GROUP BY 1; + """ + fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) + + for f in fetchset: + GridSite.objects.update_or_create( + defaults={'updated': f.LatestPublish}, + SiteName=f.Site + ) + + log.info("Refreshed GridSite") + + except DatabaseError: + log.exception('Error while trying to refresh GridSite') + + +def refresh_cloudsite(): + try: + sql_query = """ + SELECT + b.SiteName, + COUNT(DISTINCT VMUUID) as VMs, + CloudType, + b.UpdateTime + FROM( + SELECT + SiteName, + MAX(UpdateTime) AS latest + FROM VAnonCloudRecords + WHERE UpdateTime>'2023-01-01' + GROUP BY SiteName + ) + AS a + INNER JOIN VAnonCloudRecords + AS b + ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest + GROUP BY SiteName; + """ + fetchset = VAnonCloudRecord.objects.using('cloud').raw(sql_query) + + for f in fetchset: + CloudSite.objects.update_or_create( + defaults={ + 'Vms': f.VMs, + 'Script': f.CloudType, + 'updated': f.UpdateTime + }, + SiteName=f.SiteName + ) + + log.info("Refreshed CloudSite") + + except DatabaseError: + log.exception('Error while trying to refresh CloudSite') + + +def refresh_gridsitesync(): + try: + # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' + sql_query_summaries = """ + SELECT + Site, + Month, + Year, + SUM(NumberOfJobs) AS RecordCountPublished, + MIN(EarliestEndTime) AS RecordStart, + MAX(LatestEndTime) AS RecordEnd + FROM VSuperSummaries + WHERE + EarliestEndTime>'1900-01-01' AND + LatestEndTime>'1900-01-01' + GROUP BY + Site, Year, Month; + """ + fetchset_Summaries = VSuperSummaries.objects.using('grid').raw(sql_query_summaries) + + sql_query_syncrec = """ + SELECT + Site, + Month, + Year, + SUM(NumberOfJobs) AS RecordCountInDb + FROM VSyncRecords + GROUP BY + Site, Year, Month; + """ + fetchset_SyncRecords = VSyncRecords.objects.using('grid').raw(sql_query_syncrec) + + # Create empty dicts that will become dfs to be combined + summaries_dict = summaries_dict_standard.copy() + syncrecords_dict = syncrecords_dict_standard.copy() + + # Fill the dicts with the fetched data + for row in fetchset_Summaries: + summaries_dict = fill_summaries_dict(summaries_dict, row) + summaries_dict = correct_dict(summaries_dict) + for row in fetchset_SyncRecords: + syncrecords_dict = fill_syncrecords_dict(syncrecords_dict, row) + syncrecords_dict = correct_dict(syncrecords_dict) + + # Merge data from VSuperSummaries and VSyncRecords into one df + df_Summaries = pd.DataFrame.from_dict(summaries_dict) + df_SyncRecords = pd.DataFrame.from_dict(syncrecords_dict) + df_all = df_Summaries.merge( + df_SyncRecords, + left_on=['Site', 'Month', 'Year'], + right_on=['Site', 'Month', 'Year'], + how='inner' + ) + fetchset = df_all.to_dict('index') + + # Determine SyncStatus based on the difference between records published and in db + for f in fetchset.values(): + f['SyncStatus'] = determine_sync_status(f) + + # Combined primary keys outside the default dict + GridSiteSync.objects.update_or_create( + defaults={ + 'RecordStart': f.get("RecordStart"), + 'RecordEnd': f.get("RecordEnd"), + 'RecordCountPublished': f.get("RecordCountPublished"), + 'RecordCountInDb': f.get("RecordCountInDb"), + 'SyncStatus': f.get("SyncStatus"), + }, + YearMonth=get_year_month_str(f.get("Year"), f.get("Month")), + SiteName=f.get("Site"), + Month=f.get("Month"), + Year=f.get("Year"), + ) + log.info("Refreshed GridSiteSync") + + except DatabaseError: + log.exception('Error while trying to refresh GridSiteSync') + +if __name__ == "__main__": + + refresh_gridsite() + refresh_cloudsite() + refresh_gridsitesync() + + log.info( + "Data retrieval from the database backend(s) is completed and " + "successfully synchronized with the local SQLite database." + ) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 2a44da94..37fc0141 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -7,15 +7,15 @@ from django.shortcuts import get_object_or_404 import pandas as pd -from rest_framework import viewsets, generics +from rest_framework import viewsets from rest_framework.renderers import TemplateHTMLRenderer from rest_framework.response import Response + from monitoring.publishing.models import ( GridSite, VSuperSummaries, CloudSite, - VAnonCloudRecord, GridSiteSync, VSyncRecords, GridSiteSyncSubmitH @@ -134,23 +134,6 @@ def list(self, request): last_fetched = GridSite.objects.aggregate(Max('fetched'))['fetched__max'] if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched is None or (last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20))): - sql_query = """ - SELECT - Site, - max(LatestEndTime) AS LatestPublish - FROM VSuperSummaries - GROUP BY 1; - """ - fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) - - for f in fetchset: - GridSite.objects.update_or_create( - defaults={'updated': f.LatestPublish}, - SiteName=f.Site - ) - else: - print('No need to update') final_response = [] response = super(GridSiteViewSet, self).list(request) @@ -173,24 +156,6 @@ def retrieve(self, request, SiteName=None): # If there's no data then last_fetched is None. if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): - print('Out of date') - sql_query = """ - SELECT - Site, - max(LatestEndTime) AS LatestPublish - FROM VSuperSummaries - GROUP BY 1; - """ - fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) - - for f in fetchset: - GridSite.objects.update_or_create( - defaults={'updated': f.LatestPublish}, - SiteName=f.Site - ) - else: - print('No need to update') response = super(GridSiteViewSet, self).retrieve(request) date = response.data['updated'].replace(tzinfo=None) @@ -222,89 +187,9 @@ def get_template_names(self): def list(self, request): last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] - n_sites = GridSiteSync.objects.values('SiteName').distinct().count() if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or n_sites == 1: - print('Out of date') - - # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' - sql_query_summaries = """ - SELECT - Site, - Month, Year, - SUM(NumberOfJobs) AS RecordCountPublished, - MIN(EarliestEndTime) AS RecordStart, - MAX(LatestEndTime) AS RecordEnd - FROM VSuperSummaries - WHERE - EarliestEndTime>'1900-01-01' AND - LatestEndTime>'1900-01-01' - GROUP BY - Site, Year, Month; - """ - fetchset_Summaries = VSuperSummaries.objects.using('grid').raw(sql_query_summaries) - - sql_query_syncrec = """ - SELECT - Site, - Month, - Year, - SUM(NumberOfJobs) AS RecordCountInDb - FROM VSyncRecords - GROUP BY - Site, Year, Month; - """ - fetchset_SyncRecords = VSyncRecords.objects.using('grid').raw(sql_query_syncrec) - - # Create empty dicts that will become dfs to be combined - summaries_dict = summaries_dict_standard.copy() - syncrecords_dict = syncrecords_dict_standard.copy() - - # Fill the dicts with the fetched data - for row in fetchset_Summaries: - summaries_dict = fill_summaries_dict(summaries_dict, row) - summaries_dict = correct_dict(summaries_dict) - for row in fetchset_SyncRecords: - syncrecords_dict = fill_syncrecords_dict(syncrecords_dict, row) - syncrecords_dict = correct_dict(syncrecords_dict) - - # Merge data from VSuperSummaries and VSyncRecords into one df - df_Summaries = pd.DataFrame.from_dict(summaries_dict) - df_SyncRecords = pd.DataFrame.from_dict(syncrecords_dict) - df_all = df_Summaries.merge( - df_SyncRecords, - left_on=['Site', 'Month', 'Year'], - right_on=['Site', 'Month', 'Year'], - how='inner' - ) - fetchset = df_all.to_dict('index') - - # Delete all data if table not empty (as this function lists all sites) - GridSiteSync.objects.all().delete() - - # Determine SyncStatus based on the difference between records published and in db - for f in fetchset.values(): - f['SyncStatus'] = determine_sync_status(f) - - # Combined primary keys outside the default dict - GridSiteSync.objects.update_or_create( - defaults={ - 'RecordStart': f.get("RecordStart"), - 'RecordEnd': f.get("RecordEnd"), - 'RecordCountPublished': f.get("RecordCountPublished"), - 'RecordCountInDb': f.get("RecordCountInDb"), - 'SyncStatus': f.get("SyncStatus"), - }, - YearMonth=get_year_month_str(f.get("Year"), f.get("Month")), - SiteName=f.get("Site"), - Month=f.get("Month"), - Year=f.get("Year"), - ) - - else: - print('No need to update') response = super(GridSiteSyncViewSet, self).list(request) response.data = { @@ -315,90 +200,9 @@ def list(self, request): def retrieve(self, request, SiteName=None): last_fetched = GridSiteSync.objects.aggregate(Max('fetched'))['fetched__max'] - row_1 = GridSiteSync.objects.filter()[:1].get() - n_sites = GridSiteSync.objects.values('SiteName').distinct().count() if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched is None or last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)) or n_sites > 1 or SiteName != row_1.SiteName: - print('Out of date') - - # The condition on EarliestEndTime and LatestEndTime is necessary to avoid error by pytz because of dates like '00-00-00' - sql_query_summaries = """ - SELECT - Site, - Month, - Year, - SUM(NumberOfJobs) AS RecordCountPublished, - MIN(EarliestEndTime) AS RecordStart, - MAX(LatestEndTime) AS RecordEnd - FROM VSuperSummaries - WHERE - Site='{}' AND - EarliestEndTime>'1900-01-01' AND - LatestEndTime>'1900-01-01' - GROUP BY - Site, Year, Month; - """.format(SiteName) - fetchset_Summaries = VSuperSummaries.objects.using('grid').raw(sql_query_summaries) - - sql_query_syncrecords = """ - SELECT - Site, - Month, - Year, - SUM(NumberOfJobs) AS RecordCountInDb - FROM VSyncRecords - WHERE Site='{}' - GROUP BY - Site, Year, Month; - """.format(SiteName) - fetchset_SyncRecords = VSyncRecords.objects.using('grid').raw(sql_query_syncrecords) - - summaries_dict = summaries_dict_standard.copy() - syncrecords_dict = syncrecords_dict_standard.copy() - - for row in fetchset_Summaries: - summaries_dict = fill_summaries_dict(summaries_dict, row) - summaries_dict = correct_dict(summaries_dict) - for row in fetchset_SyncRecords: - syncrecords_dict = fill_syncrecords_dict(syncrecords_dict, row) - syncrecords_dict = correct_dict(syncrecords_dict) - - df_Summaries = pd.DataFrame.from_dict(summaries_dict) - df_SyncRecords = pd.DataFrame.from_dict(syncrecords_dict) - df_all = df_Summaries.merge( - df_SyncRecords, - left_on=['Site', 'Month', 'Year'], - right_on=['Site', 'Month', 'Year'], - how='inner' - ) - fetchset = df_all.to_dict('index') - - # Ensure we list only the data for one site - first_row = GridSiteSync.objects.first() - if hasattr(first_row, "SiteName") and first_row.SiteName != SiteName: - GridSiteSync.objects.all().delete() - - for f in fetchset.values(): - f['SyncStatus'] = determine_sync_status(f) - - GridSiteSync.objects.update_or_create( - defaults={ - 'RecordStart': f.get("RecordStart"), - 'RecordEnd': f.get("RecordEnd"), - 'RecordCountPublished': f.get("RecordCountPublished"), - 'RecordCountInDb': f.get("RecordCountInDb"), - 'SyncStatus': f.get("SyncStatus"), - }, - YearMonth=get_year_month_str(f.get("Year"), f.get("Month")), - SiteName=f.get("Site"), - Month=f.get("Month"), - Year=f.get("Year"), - ) - - else: - print('No need to update') sites_list_qs = GridSiteSync.objects.filter(SiteName=SiteName) sites_list_serializer = self.get_serializer(sites_list_qs, many=True) @@ -557,42 +361,6 @@ def list(self, request): last_fetched = CloudSite.objects.aggregate(Max('fetched'))['fetched__max'] if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched is None or (last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20))): - print('Out of date') - - sql_query = """ - SELECT - b.SiteName, - COUNT(DISTINCT VMUUID) as VMs, - CloudType, - b.UpdateTime - FROM( - SELECT - SiteName, - MAX(UpdateTime) AS latest - FROM VAnonCloudRecords - WHERE UpdateTime>'2023-01-01' - GROUP BY SiteName - ) - AS a - INNER JOIN VAnonCloudRecords - AS b - ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest - GROUP BY SiteName; - """ - fetchset = VAnonCloudRecord.objects.using('cloud').raw(sql_query) - - for f in fetchset: - CloudSite.objects.update_or_create( - defaults={ - 'Vms': f.VMs, - 'Script': f.CloudType, - 'updated': f.UpdateTime - }, - SiteName=f.SiteName - ) - else: - print('No need to update') response = super(CloudSiteViewSet, self).list(request) # Wrap data in a dict so that it can display in template. @@ -606,20 +374,6 @@ def list(self, request): def retrieve(self, request, SiteName=None): last_fetched = CloudSite.objects.aggregate(Max('fetched'))['fetched__max'] print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - if last_fetched.replace(tzinfo=None) < (datetime.today() - timedelta(hours=1, seconds=20)): - print('Out of date') - fetchset = VAnonCloudRecord.objects.using('cloud').raw("SELECT b.SiteName, COUNT(DISTINCT VMUUID) as VMs, CloudType, b.UpdateTime FROM (SELECT SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords WHERE UpdateTime>'2018-07-25' GROUP BY SiteName) AS a INNER JOIN VAnonCloudRecords AS b ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName") - for f in fetchset: - CloudSite.objects.update_or_create( - defaults={ - 'Vms': f.VMs, - 'Script': f.CloudType, - 'updated': f.UpdateTime - }, - SiteName=f.SiteName - ) - else: - print('No need to update') response = super(CloudSiteViewSet, self).retrieve(request) # Wrap data in a dict so that it can display in template. diff --git a/monitoring/settings.ini b/monitoring/settings.ini index 222b25c2..fdb8b354 100644 --- a/monitoring/settings.ini +++ b/monitoring/settings.ini @@ -7,6 +7,8 @@ secret_key = # `allowed_hosts` values should be comma separated list of hostnames (fqdn's) allowed_hosts = +# Path to the log file for `db_update_sqlite.py` execution info. +logfile = # Information about the database connection - grid [db_grid] From 3d0cec1b90da34b39b8e9423329fe68f73b2cdef Mon Sep 17 00:00:00 2001 From: Sae126V Date: Fri, 2 May 2025 09:18:44 +0100 Subject: [PATCH 113/183] Add pagination to gridsync records This page returns a large number of results so needs pagination to not grind to a halt. --- monitoring/publishing/db_update_sqlite.py | 4 ++++ monitoring/publishing/templates/gridsync.html | 2 +- monitoring/publishing/views.py | 6 ++++++ 3 files changed, 11 insertions(+), 1 deletion(-) diff --git a/monitoring/publishing/db_update_sqlite.py b/monitoring/publishing/db_update_sqlite.py index 71b58826..0d415aa8 100644 --- a/monitoring/publishing/db_update_sqlite.py +++ b/monitoring/publishing/db_update_sqlite.py @@ -91,6 +91,7 @@ def fill_summaries_dict(inpDict, row): return inpDict + def fill_syncrecords_dict(inpDict, row): inpDict["Site"] = inpDict.get("Site") + [row.Site] inpDict["Month"] = inpDict.get("Month") + [row.Month] @@ -100,6 +101,7 @@ def fill_syncrecords_dict(inpDict, row): inpDict["SubmitHostSync"] = inpDict.get("SubmitHostSync") + [row.SubmitHostSync] return inpDict + def correct_dict(inpDict): keys_to_remove = [] for key, val in inpDict.items(): @@ -109,6 +111,7 @@ def correct_dict(inpDict): inpDict.pop(key) return inpDict + def get_year_month_str(year, month): year_string = str(year) month_string = str(month) @@ -267,6 +270,7 @@ def refresh_gridsitesync(): except DatabaseError: log.exception('Error while trying to refresh GridSiteSync') + if __name__ == "__main__": refresh_gridsite() diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html index cf9f25bf..c663b1c1 100644 --- a/monitoring/publishing/templates/gridsync.html +++ b/monitoring/publishing/templates/gridsync.html @@ -29,7 +29,7 @@

    APEL Synchronisation Test

    Synchronisation
    Status - {% for record in records %} + {% for record in records.results %} {{ record.SiteName }} {{ record.YearMonth }} diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 37fc0141..8732b21f 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -10,6 +10,7 @@ from rest_framework import viewsets from rest_framework.renderers import TemplateHTMLRenderer from rest_framework.response import Response +from rest_framework.pagination import PageNumberPagination from monitoring.publishing.models import ( @@ -172,10 +173,15 @@ def retrieve(self, request, SiteName=None): return response +class GridSiteSyncPagination(PageNumberPagination): + page_size = 1000 # Number of items to be fetched per page + + class GridSiteSyncViewSet(viewsets.ReadOnlyModelViewSet): queryset = GridSiteSync.objects.all() serializer_class = GridSiteSyncSerializer lookup_field = 'SiteName' + pagination_class = GridSiteSyncPagination # When a single site is showed (retrieve function used), the template # is different than the one used when showing a list of sites From 69f4b74ff4c2db3e7b1508a8382e140b0c2d0273 Mon Sep 17 00:00:00 2001 From: Sae126V Date: Thu, 8 May 2025 18:20:40 +0100 Subject: [PATCH 114/183] Improve styling and comments, and reuse functions --- monitoring/publishing/db_update_sqlite.py | 131 ++++++---------------- monitoring/publishing/views.py | 2 +- 2 files changed, 36 insertions(+), 97 deletions(-) diff --git a/monitoring/publishing/db_update_sqlite.py b/monitoring/publishing/db_update_sqlite.py index 0d415aa8..1f650769 100644 --- a/monitoring/publishing/db_update_sqlite.py +++ b/monitoring/publishing/db_update_sqlite.py @@ -8,15 +8,44 @@ import os import sys -import pandas as pd import django from django.db import DatabaseError +import pandas as pd -BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) # Find the root and the Django project -sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))) +sys.path.append(BASE_DIR) + +# Set up Django settings to run this `db_update_sqlite.py` as standalone file +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "monitoring.settings") + +# Initialize and setup Django +django.setup() + + +# Cron jobs run in a minimal environment and lack access to Django settings. +# To ensure proper model imports and database interactions, we MUST initialize and setup Django first. +from monitoring.publishing.models import ( + GridSite, + CloudSite, + GridSiteSync, + VAnonCloudRecord, + VSuperSummaries, + VSyncRecords +) + +from monitoring.publishing.views import ( + summaries_dict_standard, + syncrecords_dict_standard, + correct_dict, + determine_sync_status, + fill_summaries_dict, + fill_syncrecords_dict, + get_year_month_str +) + try: # Read configuration from the file @@ -38,96 +67,6 @@ # set up the logger log = logging.getLogger(__name__) -# Set up Django settings to run this `db_update_sqlite.py` as standalone file -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "monitoring.settings") - -# Initialize and setup Django -django.setup() - - -from monitoring.publishing.models import ( - GridSite, - CloudSite, - GridSiteSync, - VAnonCloudRecord, - VSuperSummaries, - VSyncRecords -) - -summaries_dict_standard = { - "Site": [], - "Month": [], - "Year": [], - "RecordCountPublished": [], - "RecordStart": [], - "RecordEnd": [], - "SubmitHostSumm": [], -} - -syncrecords_dict_standard = { - "Site": [], - "Month": [], - "Year": [], - "RecordCountInDb": [], - "SubmitHostSync": [] -} - - -def fill_summaries_dict(inpDict, row): - fields_to_update_and_value_to_add = { - "Site": row.Site, - "Month": row.Month, - "Year": row.Year, - "RecordCountPublished": row.RecordCountPublished, - "RecordStart": row.RecordStart, - "RecordEnd": row.RecordEnd, - } - - for field, value in fields_to_update_and_value_to_add.items(): - inpDict[field] = inpDict.get(field) + [value] - - if hasattr(row, "SubmitHostSumm"): - inpDict["SubmitHostSumm"] = inpDict.get("SubmitHostSumm") + [row.SubmitHostSumm] - - return inpDict - - -def fill_syncrecords_dict(inpDict, row): - inpDict["Site"] = inpDict.get("Site") + [row.Site] - inpDict["Month"] = inpDict.get("Month") + [row.Month] - inpDict["Year"] = inpDict.get("Year") + [row.Year] - inpDict["RecordCountInDb"] = inpDict.get("RecordCountInDb") + [row.RecordCountInDb] - if hasattr(row, "SubmitHostSync"): - inpDict["SubmitHostSync"] = inpDict.get("SubmitHostSync") + [row.SubmitHostSync] - return inpDict - - -def correct_dict(inpDict): - keys_to_remove = [] - for key, val in inpDict.items(): - if len(val) == 0: - keys_to_remove.append(key) - for key in keys_to_remove: - inpDict.pop(key) - return inpDict - - -def get_year_month_str(year, month): - year_string = str(year) - month_string = str(month) - if len(month_string) == 1: - month_string = '0' + month_string - return year_string + '-' + month_string - -def determine_sync_status(f): - RecordCountPublished = f.get("RecordCountPublished") - RecordCountInDb = f.get("RecordCountInDb") - rel_diff1 = abs(RecordCountPublished - RecordCountInDb) / RecordCountInDb - rel_diff2 = abs(RecordCountPublished - RecordCountInDb) / RecordCountPublished - if rel_diff1 < 0.01 or rel_diff2 < 0.01: - return "OK" - return "ERROR [ Please use the Gap Publisher to synchronise this dataset]" - def refresh_gridsite(): try: @@ -174,7 +113,7 @@ def refresh_cloudsite(): ON b.SiteName = a.SiteName AND b.UpdateTime = a.latest GROUP BY SiteName; """ - fetchset = VAnonCloudRecord.objects.using('cloud').raw(sql_query) + fetchset = VAnonCloudRecord.objects.using('cloud').raw(sql_query) for f in fetchset: CloudSite.objects.update_or_create( @@ -278,6 +217,6 @@ def refresh_gridsitesync(): refresh_gridsitesync() log.info( - "Data retrieval from the database backend(s) is completed and " - "successfully synchronized with the local SQLite database." + "Data retrieval and processing attempted. " + "Check the above logs for details on the sync status" ) diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 8732b21f..6755a024 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -174,7 +174,7 @@ def retrieve(self, request, SiteName=None): class GridSiteSyncPagination(PageNumberPagination): - page_size = 1000 # Number of items to be fetched per page + page_size = 1000 # Number of items to be fetched per page class GridSiteSyncViewSet(viewsets.ReadOnlyModelViewSet): From 1a81643e3a034ed0b15437a0e6330f8e4a45f3ae Mon Sep 17 00:00:00 2001 From: Sae126V Date: Fri, 16 May 2025 17:39:04 +0100 Subject: [PATCH 115/183] Add log separators and move function to script The function is no-longer used in views, so move it to the script it's used in. --- monitoring/publishing/db_update_sqlite.py | 18 +++++++++++++++++- monitoring/publishing/views.py | 12 ------------ 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/monitoring/publishing/db_update_sqlite.py b/monitoring/publishing/db_update_sqlite.py index 1f650769..2bd75049 100644 --- a/monitoring/publishing/db_update_sqlite.py +++ b/monitoring/publishing/db_update_sqlite.py @@ -40,7 +40,6 @@ summaries_dict_standard, syncrecords_dict_standard, correct_dict, - determine_sync_status, fill_summaries_dict, fill_syncrecords_dict, get_year_month_str @@ -68,6 +67,21 @@ log = logging.getLogger(__name__) +def determine_sync_status(f): + """ + Helper to determine sync status between published and the database record counts. + """ + RecordCountPublished = f.get("RecordCountPublished") + RecordCountInDb = f.get("RecordCountInDb") + rel_diff1 = abs(RecordCountPublished - RecordCountInDb)/RecordCountInDb + rel_diff2 = abs(RecordCountPublished - RecordCountInDb)/RecordCountPublished + if rel_diff1 < 0.01 or rel_diff2 < 0.01: + syncstatus = "OK" + else: + syncstatus = "ERROR [ Please use the Gap Publisher to synchronise this dataset]" + return syncstatus + + def refresh_gridsite(): try: sql_query = """ @@ -211,6 +225,7 @@ def refresh_gridsitesync(): if __name__ == "__main__": + log.info('=====================') refresh_gridsite() refresh_cloudsite() @@ -220,3 +235,4 @@ def refresh_gridsitesync(): "Data retrieval and processing attempted. " "Check the above logs for details on the sync status" ) + log.info('=====================') diff --git a/monitoring/publishing/views.py b/monitoring/publishing/views.py index 6755a024..0e07e74e 100644 --- a/monitoring/publishing/views.py +++ b/monitoring/publishing/views.py @@ -104,18 +104,6 @@ def correct_dict(inpDict): return inpDict -def determine_sync_status(f): - RecordCountPublished = f.get("RecordCountPublished") - RecordCountInDb = f.get("RecordCountInDb") - rel_diff1 = abs(RecordCountPublished - RecordCountInDb)/RecordCountInDb - rel_diff2 = abs(RecordCountPublished - RecordCountInDb)/RecordCountPublished - if rel_diff1 < 0.01 or rel_diff2 < 0.01: - syncstatus = "OK" - else: - syncstatus = "ERROR [ Please use the Gap Publisher to synchronise this dataset]" - return syncstatus - - # Combine Year and Month into one string (display purposes) def get_year_month_str(year, month): year_string = str(year) From ed2c9c2e5daf910793c7c0cecffc71fae175738a Mon Sep 17 00:00:00 2001 From: Sae126V Date: Fri, 13 Jun 2025 12:41:53 +0100 Subject: [PATCH 116/183] Set HTML as a default renderer and remove admin renderer --- monitoring/settings.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/monitoring/settings.py b/monitoring/settings.py index f4272680..627800e6 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -80,10 +80,9 @@ REST_FRAMEWORK = { 'DEFAULT_RENDERER_CLASSES': ( + 'rest_framework.renderers.TemplateHTMLRenderer', 'rest_framework.renderers.JSONRenderer', 'rest_framework.renderers.BrowsableAPIRenderer', - 'rest_framework.renderers.TemplateHTMLRenderer', - 'rest_framework.renderers.AdminRenderer', ) } From 02062b53e87db4351a70529e2c627b9313beb583 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Tue, 17 Jun 2025 17:20:51 +0100 Subject: [PATCH 117/183] Update version number to 0.2 This should have been done before tagging the v0.2 release. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 168bf46b..307434ff 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name='monitoring', - version='0.1', + version='0.2', packages=find_packages(), scripts=['manage.py'], ) From 946ee5a079052d7cb1c77e288425af7f02b507e3 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Tue, 17 Jun 2025 17:08:41 +0100 Subject: [PATCH 118/183] Update reqs to last versions that support Py3.6 This is a stepping stone to updating the host OS and then the Python version. --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 581d42c5..41b5b0c6 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ # Pin packages to support and work with py3.6. -Django==3.1.14 +Django==3.2.25 djangorestframework==3.15.1 -pytz==2024.2 +pytz==2025.2 PyMySQL==1.0.2 pandas==1.1.5 From 323ee1e789f3ac76e84e487a85aded8ebf5547ca Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Tue, 17 Jun 2025 17:21:08 +0100 Subject: [PATCH 119/183] Set app names to dotted paths relative to base dir Later versions of Django are particular about requiring the app name in the app's Config to be a full path from the root (where manage.py is). --- monitoring/availability/apps.py | 2 +- monitoring/publishing/apps.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/availability/apps.py b/monitoring/availability/apps.py index c7eacd8a..53f90fae 100644 --- a/monitoring/availability/apps.py +++ b/monitoring/availability/apps.py @@ -5,4 +5,4 @@ class AvailabilityConfig(AppConfig): - name = 'availability' + name = 'monitoring.availability' diff --git a/monitoring/publishing/apps.py b/monitoring/publishing/apps.py index 57c17f71..d5b06c3b 100644 --- a/monitoring/publishing/apps.py +++ b/monitoring/publishing/apps.py @@ -5,4 +5,4 @@ class PublishingConfig(AppConfig): - name = 'publishing' + name = 'monitoring.publishing' From 5b23760892f9c44c3d9e1dad108180f074e4b602 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 18 Jun 2025 14:42:09 +0100 Subject: [PATCH 120/183] Update version numbers and formatting in docs --- docs/what_gets_installed.md | 22 +++++++++++----------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/what_gets_installed.md b/docs/what_gets_installed.md index ade3ecd2..38d7d387 100644 --- a/docs/what_gets_installed.md +++ b/docs/what_gets_installed.md @@ -2,20 +2,20 @@ For Django to work with apache, it is common to have a venv within the app, wher ## Packages installed by Aquilon outside the venv Following the config file that Aquilon uses, the following are the packages installed: -- httpd -- python3-mod_wsgi (for apache to work with django) -- python3-devel -- gcc (needed for dependencies) -- mariadb -- tar. +- `httpd` +- `python3-mod_wsgi` (for apache to work with django) +- `python3-devel` +- `gcc` (needed for dependencies) +- `mariadb` +- `tar` ## Packages installed within the venv Within venv, the following are installed through pip: -- djangorestframework (3.15.1) -- pymysql (1.0.2) (needed for mariadb to work) -- pandas (1.1.5) (needed by the app) -- django (3.1.14) -- pytz (2024.2). +- `djangorestframework` (3.15.1) +- `pymysql` (1.0.2) (needed for mariadb to work) +- `pandas` (1.1.5) (needed by the app) +- `django` (3.2.25) +- `pytz` (2025.2) Note that when the version of the packages is specified, the app would not work with a different version (due to dependencies conflicts). From 0170e950141d4fc69130e061548f5fc1de4acecc Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 19 Jun 2025 11:08:12 +0100 Subject: [PATCH 121/183] Update version number for v0.3 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 307434ff..030d2c45 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name='monitoring', - version='0.2', + version='0.3', packages=find_packages(), scripts=['manage.py'], ) From f5d55907ddadbaf96ace1df318c83b842e18cf3c Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 19 Jun 2025 12:47:47 +0100 Subject: [PATCH 122/183] Filter all db update queries to last 3 years We don't really care about data past this point, so fetching less should improve performance a bit and not overwhelm the interface. The description on the cloud page is updated to match. --- monitoring/publishing/db_update_sqlite.py | 6 +++++- monitoring/publishing/templates/cloudsites.html | 2 +- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/db_update_sqlite.py b/monitoring/publishing/db_update_sqlite.py index 2bd75049..81a597fe 100644 --- a/monitoring/publishing/db_update_sqlite.py +++ b/monitoring/publishing/db_update_sqlite.py @@ -89,6 +89,7 @@ def refresh_gridsite(): Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries + WHERE LatestEndTime > DATE_SUB(NOW(), INTERVAL 3 YEAR) GROUP BY 1; """ fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) @@ -118,7 +119,7 @@ def refresh_cloudsite(): SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords - WHERE UpdateTime>'2023-01-01' + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 YEAR) GROUP BY SiteName ) AS a @@ -158,6 +159,7 @@ def refresh_gridsitesync(): MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE + Year >= YEAR(NOW()) - 3 AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01' GROUP BY @@ -172,6 +174,8 @@ def refresh_gridsitesync(): Year, SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords + WHERE + Year >= YEAR(NOW()) - 3 GROUP BY Site, Year, Month; """ diff --git a/monitoring/publishing/templates/cloudsites.html b/monitoring/publishing/templates/cloudsites.html index 5fa8680e..871ad9d6 100644 --- a/monitoring/publishing/templates/cloudsites.html +++ b/monitoring/publishing/templates/cloudsites.html @@ -8,7 +8,7 @@ -

    Sites publishing cloud accounting records from 2018-06-19 onwards

    +

    Sites publishing cloud accounting records in last 3 years

    Page last updated: {{ last_fetched|date:"Y-m-d H:i:s.u" }}

    From 3005e4cf4a277c6521924026117a4ddfe5c3a8ae Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 19 Jun 2025 16:28:51 +0100 Subject: [PATCH 123/183] Update version to v0.4 --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 030d2c45..f21ce6e3 100644 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ setup( name='monitoring', - version='0.3', + version='0.4', packages=find_packages(), scripts=['manage.py'], ) From f51396eef73c4f2e4c5c6585756c57ee4da2f08c Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Mon, 23 Jun 2025 17:22:10 +0100 Subject: [PATCH 124/183] Pin numpy version for Py3.6 compatibility --- requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements.txt b/requirements.txt index 41b5b0c6..69f59228 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,3 +4,4 @@ djangorestframework==3.15.1 pytz==2025.2 PyMySQL==1.0.2 pandas==1.1.5 +numpy==1.19.5 # pandas dependency From 7583b39a53d835ba6bd759daa89c48824ddd53d9 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Mon, 23 Jun 2025 17:22:39 +0100 Subject: [PATCH 125/183] Reduce time filters - Reduce grid and cloud publishing filters to 1 year as we only care about quite recent data for those views. - Reduce filter for sync to 2 years as we should be sorting out publishing issues within that time frame. --- monitoring/publishing/db_update_sqlite.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/monitoring/publishing/db_update_sqlite.py b/monitoring/publishing/db_update_sqlite.py index 81a597fe..9f3098f2 100644 --- a/monitoring/publishing/db_update_sqlite.py +++ b/monitoring/publishing/db_update_sqlite.py @@ -89,7 +89,7 @@ def refresh_gridsite(): Site, max(LatestEndTime) AS LatestPublish FROM VSuperSummaries - WHERE LatestEndTime > DATE_SUB(NOW(), INTERVAL 3 YEAR) + WHERE LatestEndTime > DATE_SUB(NOW(), INTERVAL 1 YEAR) GROUP BY 1; """ fetchset = VSuperSummaries.objects.using('grid').raw(sql_query) @@ -119,7 +119,7 @@ def refresh_cloudsite(): SiteName, MAX(UpdateTime) AS latest FROM VAnonCloudRecords - WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 YEAR) + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 1 YEAR) GROUP BY SiteName ) AS a @@ -159,7 +159,7 @@ def refresh_gridsitesync(): MAX(LatestEndTime) AS RecordEnd FROM VSuperSummaries WHERE - Year >= YEAR(NOW()) - 3 AND + Year >= YEAR(NOW()) - 2 AND EarliestEndTime>'1900-01-01' AND LatestEndTime>'1900-01-01' GROUP BY @@ -175,7 +175,7 @@ def refresh_gridsitesync(): SUM(NumberOfJobs) AS RecordCountInDb FROM VSyncRecords WHERE - Year >= YEAR(NOW()) - 3 + Year >= YEAR(NOW()) - 2 GROUP BY Site, Year, Month; """ From 82e175dbc9129b73ffa27474648048f96fd4151c Mon Sep 17 00:00:00 2001 From: Sae126V Date: Thu, 14 Aug 2025 15:25:23 +0100 Subject: [PATCH 126/183] Move db_update_sqlite file to app level --- monitoring/{publishing => }/db_update_sqlite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename monitoring/{publishing => }/db_update_sqlite.py (99%) diff --git a/monitoring/publishing/db_update_sqlite.py b/monitoring/db_update_sqlite.py similarity index 99% rename from monitoring/publishing/db_update_sqlite.py rename to monitoring/db_update_sqlite.py index 9f3098f2..d608557f 100644 --- a/monitoring/publishing/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -13,7 +13,7 @@ import pandas as pd -BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) +BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) # Find the root and the Django project sys.path.append(BASE_DIR) From 34631fb23c1a95f31a4caa308288168bf576e889 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 10:32:37 +0100 Subject: [PATCH 127/183] Add benchmarks app --- monitoring/benchmarks/__init__.py | 0 monitoring/benchmarks/admin.py | 3 + monitoring/benchmarks/apps.py | 6 ++ monitoring/benchmarks/migrations/__init__.py | 0 monitoring/benchmarks/models.py | 13 ++++ monitoring/benchmarks/serializers.py | 21 +++++++ .../templates/benchmarksBySubmithost.html | 28 +++++++++ monitoring/benchmarks/tests.py | 3 + monitoring/benchmarks/urls.py | 17 +++++ monitoring/benchmarks/views.py | 63 +++++++++++++++++++ monitoring/settings.py | 1 + monitoring/urls.py | 1 + 12 files changed, 156 insertions(+) create mode 100644 monitoring/benchmarks/__init__.py create mode 100644 monitoring/benchmarks/admin.py create mode 100644 monitoring/benchmarks/apps.py create mode 100644 monitoring/benchmarks/migrations/__init__.py create mode 100644 monitoring/benchmarks/models.py create mode 100644 monitoring/benchmarks/serializers.py create mode 100644 monitoring/benchmarks/templates/benchmarksBySubmithost.html create mode 100644 monitoring/benchmarks/tests.py create mode 100644 monitoring/benchmarks/urls.py create mode 100644 monitoring/benchmarks/views.py diff --git a/monitoring/benchmarks/__init__.py b/monitoring/benchmarks/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/monitoring/benchmarks/admin.py b/monitoring/benchmarks/admin.py new file mode 100644 index 00000000..8c38f3f3 --- /dev/null +++ b/monitoring/benchmarks/admin.py @@ -0,0 +1,3 @@ +from django.contrib import admin + +# Register your models here. diff --git a/monitoring/benchmarks/apps.py b/monitoring/benchmarks/apps.py new file mode 100644 index 00000000..9a0ebb83 --- /dev/null +++ b/monitoring/benchmarks/apps.py @@ -0,0 +1,6 @@ +from django.apps import AppConfig + + +class BenchmarksConfig(AppConfig): + default_auto_field = 'django.db.models.BigAutoField' + name = 'benchmarks' diff --git a/monitoring/benchmarks/migrations/__init__.py b/monitoring/benchmarks/migrations/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/monitoring/benchmarks/models.py b/monitoring/benchmarks/models.py new file mode 100644 index 00000000..c5c803e3 --- /dev/null +++ b/monitoring/benchmarks/models.py @@ -0,0 +1,13 @@ +from django.db import models + +class BenchmarksBySubmithost(models.Model): + fetched = models.DateTimeField(auto_now=True) + SiteName = models.CharField(max_length=255) + SubmitHost = models.CharField(max_length=255) + ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) + ServiceLevel = models.CharField(max_length=50) + SourceView = models.CharField(max_length=50) + UpdateTime = models.DateTimeField() + + class Meta: + ordering = ('SiteName',) diff --git a/monitoring/benchmarks/serializers.py b/monitoring/benchmarks/serializers.py new file mode 100644 index 00000000..cf57253e --- /dev/null +++ b/monitoring/benchmarks/serializers.py @@ -0,0 +1,21 @@ +from rest_framework import serializers + +from monitoring.benchmarks.models import BenchmarksBySubmithost + + +class BenchmarksBySubmithostSerializer(serializers.HyperlinkedModelSerializer): + # Override default format with None so that Python datetime is used as + # ouput format. Encoding will be determined by the renderer and can be + # formatted by a template filter. + updated = serializers.DateTimeField(format=None) + + class Meta: + model = BenchmarksBySubmithost + fields = ( + 'SiteName', + 'SubmitHost', + 'ServiceLevelType', + 'ServiceLevel', + 'SourceView', + 'UpdateTime', + ) diff --git a/monitoring/benchmarks/templates/benchmarksBySubmithost.html b/monitoring/benchmarks/templates/benchmarksBySubmithost.html new file mode 100644 index 00000000..48f33e41 --- /dev/null +++ b/monitoring/benchmarks/templates/benchmarksBySubmithost.html @@ -0,0 +1,28 @@ + + + + + + + Sites publishing benchmark records + + + +

    Sites publishing benchmark records in last 2 months

    +

    Page last updated: {{ last_fetched|date:"Y-m-d H:i:s.u" }}

    +
    + + + + {% for site in sites %} + + + + + + + + + {% endfor %} +
    SiteSubmit hostService level typeService levelSource viewLast updated
    {{ benchmark.SiteName }}{{ benchmark.SubmitHost }}{{ benchmark.ServiceLevelType }}{{ benchmark.ServiceLevel }}{{ benchmark.SourceView }}{{ benchmark.UpdateTime|date:"Y-m-d H:i:s" }}
    + \ No newline at end of file diff --git a/monitoring/benchmarks/tests.py b/monitoring/benchmarks/tests.py new file mode 100644 index 00000000..7ce503c2 --- /dev/null +++ b/monitoring/benchmarks/tests.py @@ -0,0 +1,3 @@ +from django.test import TestCase + +# Create your tests here. diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py new file mode 100644 index 00000000..fa24785c --- /dev/null +++ b/monitoring/benchmarks/urls.py @@ -0,0 +1,17 @@ +from rest_framework import routers + +from monitoring.benchmarks import views +from django.urls import re_path + +router = routers.SimpleRouter() +router.register(r'benchmarks', views.BenchmarksViewSet) + +urlpatterns = [ + re_path( + r'^benchmarks/(?P[a-zA-Z0-9._-]+)/$', + views.BenchmarksViewSet.as_view({'get': 'retrieve'}), + name='site-benchmarks-detail' + ), +] + +urlpatterns += router.urls diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py new file mode 100644 index 00000000..511fbb6a --- /dev/null +++ b/monitoring/benchmarks/views.py @@ -0,0 +1,63 @@ +from django.shortcuts import render +from datetime import datetime, timedelta + +from django.db.models import Max +from django.shortcuts import get_object_or_404 +import pandas as pd + +from rest_framework import viewsets +from rest_framework.renderers import TemplateHTMLRenderer +from rest_framework.response import Response +from rest_framework.pagination import PageNumberPagination + + +from monitoring.benchmarks.models import BenchmarksBySubmithost + +from monitoring.benchmarks.serializers import BenchmarksBySubmithostSerializer + +class BenchmarksViewSet(viewsets.ReadOnlyModelViewSet): + queryset = BenchmarksBySubmithost.objects.all() + serializer_class = BenchmarksBySubmithostSerializer + template_name = 'benchmarksBySubmithost.html' + lookup_field = 'SiteName' + + def list(self, request): + last_fetched = BenchmarksBySubmithost.objects.aggregate(Max('fetched'))['fetched__max'] + if last_fetched is not None: + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) + + final_response = [] + response = super(BenchmarksViewSet, self).list(request) + + for single_dict in response.data: + date = single_dict.get('UpdateTime').replace(tzinfo=None) + # single_dict = update_dict_stdout_and_returncode(single_dict, date) + final_response.append(date) + + if type(request.accepted_renderer) is TemplateHTMLRenderer: + response.data = { + 'benchmark': final_response, + 'last_fetched': last_fetched + } + + return response + + # def retrieve(self, request, SiteName=None): + # last_fetched = BenchmarksBySubmithost.objects.aggregate(Max('fetched'))['fetched__max'] + # # If there's no data then last_fetched is None. + # if last_fetched is not None: + # print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) + + # response = super(GridSiteViewSet, self).retrieve(request) + # date = response.data['updated'].replace(tzinfo=None) + # response.data = update_dict_stdout_and_returncode(response.data, date) + + # # Wrap data in a dict so that it can display in template. + # if type(request.accepted_renderer) is TemplateHTMLRenderer: + # # Single result put in list to work with same HTML template. + # response.data = { + # 'benchmark': [response.data], + # 'last_fetched': last_fetched + # } + + # return response diff --git a/monitoring/settings.py b/monitoring/settings.py index 627800e6..e00bb9da 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -76,6 +76,7 @@ 'rest_framework', 'monitoring.publishing', 'monitoring.availability', + 'monitoring.benchmarks', ] REST_FRAMEWORK = { diff --git a/monitoring/urls.py b/monitoring/urls.py index 26e40557..7d08c4f8 100644 --- a/monitoring/urls.py +++ b/monitoring/urls.py @@ -20,6 +20,7 @@ url(r'^admin/', admin.site.urls), url(r'^availability/', include('monitoring.availability.urls')), url(r'^publishing/', include('monitoring.publishing.urls')), + url(r'^benchmarks/', include('monitoring.benchmarks.urls')), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), ] From 5e36e49d16c0454b0301e27ff2a7a379aebba51b Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 11:12:19 +0100 Subject: [PATCH 128/183] Update app name --- monitoring/benchmarks/apps.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/monitoring/benchmarks/apps.py b/monitoring/benchmarks/apps.py index 9a0ebb83..bb0e0b34 100644 --- a/monitoring/benchmarks/apps.py +++ b/monitoring/benchmarks/apps.py @@ -2,5 +2,4 @@ class BenchmarksConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'benchmarks' + name = 'monitoring.benchmarks' From c1e7bada8802199946820dd0c2acd2e33768e625 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 11:24:21 +0100 Subject: [PATCH 129/183] Add extra_kwargs --- monitoring/benchmarks/serializers.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/monitoring/benchmarks/serializers.py b/monitoring/benchmarks/serializers.py index cf57253e..80472a53 100644 --- a/monitoring/benchmarks/serializers.py +++ b/monitoring/benchmarks/serializers.py @@ -19,3 +19,8 @@ class Meta: 'SourceView', 'UpdateTime', ) + + # Sitename substitutes for pk + extra_kwargs = { + 'url': {'view_name': 'site-benchmarks-detail', 'lookup_field': 'SiteName'} + } From 9272d9b213eeb2dc2430b6e46b81154a71bf043e Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 11:29:24 +0100 Subject: [PATCH 130/183] Update view name --- monitoring/benchmarks/serializers.py | 2 +- monitoring/benchmarks/urls.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/benchmarks/serializers.py b/monitoring/benchmarks/serializers.py index 80472a53..fd765ffa 100644 --- a/monitoring/benchmarks/serializers.py +++ b/monitoring/benchmarks/serializers.py @@ -22,5 +22,5 @@ class Meta: # Sitename substitutes for pk extra_kwargs = { - 'url': {'view_name': 'site-benchmarks-detail', 'lookup_field': 'SiteName'} + 'url': {'view_name': 'benchmarksbysubmithost-list', 'lookup_field': 'SiteName'} } diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py index fa24785c..58b18396 100644 --- a/monitoring/benchmarks/urls.py +++ b/monitoring/benchmarks/urls.py @@ -10,7 +10,7 @@ re_path( r'^benchmarks/(?P[a-zA-Z0-9._-]+)/$', views.BenchmarksViewSet.as_view({'get': 'retrieve'}), - name='site-benchmarks-detail' + name='benchmarksbysubmithost-list' ), ] From 8e9703a13486cafc9e605d97218db5ffe4d4692d Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 13:10:22 +0100 Subject: [PATCH 131/183] Rename view --- monitoring/benchmarks/serializers.py | 2 +- monitoring/benchmarks/urls.py | 2 +- monitoring/benchmarks/views.py | 60 ++++++++++++++++++---------- 3 files changed, 40 insertions(+), 24 deletions(-) diff --git a/monitoring/benchmarks/serializers.py b/monitoring/benchmarks/serializers.py index fd765ffa..0208f1e8 100644 --- a/monitoring/benchmarks/serializers.py +++ b/monitoring/benchmarks/serializers.py @@ -22,5 +22,5 @@ class Meta: # Sitename substitutes for pk extra_kwargs = { - 'url': {'view_name': 'benchmarksbysubmithost-list', 'lookup_field': 'SiteName'} + 'url': {'view_name': 'benchmarksbysubmithost-details', 'lookup_field': 'SiteName'} } diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py index 58b18396..24398ab4 100644 --- a/monitoring/benchmarks/urls.py +++ b/monitoring/benchmarks/urls.py @@ -10,7 +10,7 @@ re_path( r'^benchmarks/(?P[a-zA-Z0-9._-]+)/$', views.BenchmarksViewSet.as_view({'get': 'retrieve'}), - name='benchmarksbysubmithost-list' + name='benchmarksbysubmithost-details' ), ] diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index 511fbb6a..fb922d7b 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -15,10 +15,26 @@ from monitoring.benchmarks.serializers import BenchmarksBySubmithostSerializer +def update_dict_stdout_and_returncode(single_dict, date): + diff = datetime.today() - date + date = date.strftime("%Y-%m-%d") + + if diff <= timedelta(days=7): + single_dict['returncode'] = 0 + single_dict['stdout'] = "OK [ last published %s days ago: %s ]" % (diff.days, date) + elif diff > timedelta(days=7): + single_dict['returncode'] = 1 + single_dict['stdout'] = "WARNING [ last published %s days ago: %s ]" % (diff.days, date) + else: + single_dict['returncode'] = 3 + single_dict['stdout'] = "UNKNOWN" + return single_dict + + class BenchmarksViewSet(viewsets.ReadOnlyModelViewSet): queryset = BenchmarksBySubmithost.objects.all() serializer_class = BenchmarksBySubmithostSerializer - template_name = 'benchmarksBySubmithost.html' + template_name = 'benchmarksbysubmithost.html' lookup_field = 'SiteName' def list(self, request): @@ -31,8 +47,8 @@ def list(self, request): for single_dict in response.data: date = single_dict.get('UpdateTime').replace(tzinfo=None) - # single_dict = update_dict_stdout_and_returncode(single_dict, date) - final_response.append(date) + single_dict = update_dict_stdout_and_returncode(single_dict, date) + final_response.append(single_dict) if type(request.accepted_renderer) is TemplateHTMLRenderer: response.data = { @@ -42,22 +58,22 @@ def list(self, request): return response - # def retrieve(self, request, SiteName=None): - # last_fetched = BenchmarksBySubmithost.objects.aggregate(Max('fetched'))['fetched__max'] - # # If there's no data then last_fetched is None. - # if last_fetched is not None: - # print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - - # response = super(GridSiteViewSet, self).retrieve(request) - # date = response.data['updated'].replace(tzinfo=None) - # response.data = update_dict_stdout_and_returncode(response.data, date) - - # # Wrap data in a dict so that it can display in template. - # if type(request.accepted_renderer) is TemplateHTMLRenderer: - # # Single result put in list to work with same HTML template. - # response.data = { - # 'benchmark': [response.data], - # 'last_fetched': last_fetched - # } - - # return response + def retrieve(self, request, SiteName=None): + last_fetched = BenchmarksBySubmithost.objects.aggregate(Max('fetched'))['fetched__max'] + # If there's no data then last_fetched is None. + if last_fetched is not None: + print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) + + response = super(BenchmarksViewSet, self).retrieve(request) + date = response.data['UpdateTime'].replace(tzinfo=None) + response.data = update_dict_stdout_and_returncode(response.data, date) + + # Wrap data in a dict so that it can display in template. + if type(request.accepted_renderer) is TemplateHTMLRenderer: + # Single result put in list to work with same HTML template. + response.data = { + 'benchmark': [response.data], + 'last_fetched': last_fetched + } + + return response From 07d9191b59d7328a12b8cfea68c607678486c4b1 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 13:19:11 +0100 Subject: [PATCH 132/183] Update url --- monitoring/benchmarks/urls.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py index 24398ab4..793ba206 100644 --- a/monitoring/benchmarks/urls.py +++ b/monitoring/benchmarks/urls.py @@ -4,11 +4,11 @@ from django.urls import re_path router = routers.SimpleRouter() -router.register(r'benchmarks', views.BenchmarksViewSet) +router.register('', views.BenchmarksViewSet) urlpatterns = [ re_path( - r'^benchmarks/(?P[a-zA-Z0-9._-]+)/$', + r'/(?P[a-zA-Z0-9._-]+)/$', views.BenchmarksViewSet.as_view({'get': 'retrieve'}), name='benchmarksbysubmithost-details' ), From d3c0a1cc0a3bdb2dc743db332b6d63f0ce0fd233 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 13:38:03 +0100 Subject: [PATCH 133/183] Rename template --- ...enchmarksBySubmithost.html => benchmarks_by_submithost.html} | 2 +- monitoring/benchmarks/urls.py | 2 +- monitoring/benchmarks/views.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) rename monitoring/benchmarks/templates/{benchmarksBySubmithost.html => benchmarks_by_submithost.html} (96%) diff --git a/monitoring/benchmarks/templates/benchmarksBySubmithost.html b/monitoring/benchmarks/templates/benchmarks_by_submithost.html similarity index 96% rename from monitoring/benchmarks/templates/benchmarksBySubmithost.html rename to monitoring/benchmarks/templates/benchmarks_by_submithost.html index 48f33e41..31ccb120 100644 --- a/monitoring/benchmarks/templates/benchmarksBySubmithost.html +++ b/monitoring/benchmarks/templates/benchmarks_by_submithost.html @@ -14,7 +14,7 @@

    Sites publishing benchmark records in last 2 months

    SiteSubmit hostService level typeService levelSource viewLast updated - {% for site in sites %} + {% for benchmark in benchmarks %} {{ benchmark.SiteName }} {{ benchmark.SubmitHost }} diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py index 793ba206..e41f0a4c 100644 --- a/monitoring/benchmarks/urls.py +++ b/monitoring/benchmarks/urls.py @@ -8,7 +8,7 @@ urlpatterns = [ re_path( - r'/(?P[a-zA-Z0-9._-]+)/$', + r'^/(?P[a-zA-Z0-9._-]+)/$', views.BenchmarksViewSet.as_view({'get': 'retrieve'}), name='benchmarksbysubmithost-details' ), diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index fb922d7b..991dcc1d 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -34,7 +34,7 @@ def update_dict_stdout_and_returncode(single_dict, date): class BenchmarksViewSet(viewsets.ReadOnlyModelViewSet): queryset = BenchmarksBySubmithost.objects.all() serializer_class = BenchmarksBySubmithostSerializer - template_name = 'benchmarksbysubmithost.html' + template_name = 'benchmarks_by_submithost.html' lookup_field = 'SiteName' def list(self, request): From 76ad09e4844e9969880d75ccdb4433fb1d732824 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 14:56:13 +0100 Subject: [PATCH 134/183] Refresh benchmarks data --- monitoring/benchmarks/models.py | 33 ++++++++++++++++++++++++++++ monitoring/db_update_sqlite.py | 39 +++++++++++++++++++++++++++++++++ 2 files changed, 72 insertions(+) diff --git a/monitoring/benchmarks/models.py b/monitoring/benchmarks/models.py index c5c803e3..00fe24c7 100644 --- a/monitoring/benchmarks/models.py +++ b/monitoring/benchmarks/models.py @@ -11,3 +11,36 @@ class BenchmarksBySubmithost(models.Model): class Meta: ordering = ('SiteName',) + +class VJobRecords(models.Model): + Site = models.CharField(max_length=255, primary_key=True) + SubmitHost = models.CharField(max_length=255) + ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) + ServiceLevel = models.CharField(max_length=50) + UpdateTime = models.DateTimeField() + + class Meta: + managed = False + db_table = 'VJobRecords' + +class VSummaries(models.Model): + Site = models.CharField(max_length=255, primary_key=True) + SubmitHost = models.CharField(max_length=255) + ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) + ServiceLevel = models.CharField(max_length=50) + UpdateTime = models.DateTimeField() + + class Meta: + managed = False + db_table = 'VSummaries' + +class VNormalisedSummaries(models.Model): + Site = models.CharField(max_length=255, primary_key=True) + SubmitHost = models.CharField(max_length=255) + ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) + ServiceLevel = models.CharField(max_length=50) + UpdateTime = models.DateTimeField() + + class Meta: + managed = False + db_table = 'VNormalisedSummaries' diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index d608557f..9a1d9a2f 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -45,6 +45,12 @@ get_year_month_str ) +from monitoring.benchmarks.models import ( + BenchmarksBySubmithost, + VJobRecords, + VSummaries, + VNormalisedSummaries, +) try: # Read configuration from the file @@ -227,6 +233,38 @@ def refresh_gridsitesync(): except DatabaseError: log.exception('Error while trying to refresh GridSiteSync') +def refresh_BenchmarksBySubmitHost(): + try: + sql_query = """ + SELECT + Site, + SubmitHost, + ServiceLevelType, + ServiceLevel, + max(UpdateTime) AS LatestPublish + FROM VJobRecords + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 2 MONTH) + GROUP BY 1; + """ + fetchset = VJobRecords.objects.raw(sql_query) + + for f in fetchset: + BenchmarksBySubmithost.objects.update_or_create( + defaults={ + 'UpdateTime': f.LatestPublish, + 'SourceView': 'VJobRecords' + }, + SiteName=f.Site, + SubmitHost=f.SubmitHost, + ServiceLevelType=f.ServiceLevelType, + ServiceLevel=f.ServiceLevel, + ) + + log.info("Refreshed BenchmarksBySubmitHost") + + except DatabaseError: + log.exception('Error while trying to refresh BenchmarksBySubmitHost') + if __name__ == "__main__": log.info('=====================') @@ -234,6 +272,7 @@ def refresh_gridsitesync(): refresh_gridsite() refresh_cloudsite() refresh_gridsitesync() + refresh_BenchmarksBySubmitHost() log.info( "Data retrieval and processing attempted. " From 43f18bbcc8a81273bbf6d2360b31e6b7184a21d4 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Wed, 13 Aug 2025 16:28:15 +0100 Subject: [PATCH 135/183] Fetch data from VSummaries --- monitoring/benchmarks/models.py | 1 + monitoring/db_update_sqlite.py | 23 ++++++++++++++++++----- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/monitoring/benchmarks/models.py b/monitoring/benchmarks/models.py index 00fe24c7..778f6305 100644 --- a/monitoring/benchmarks/models.py +++ b/monitoring/benchmarks/models.py @@ -18,6 +18,7 @@ class VJobRecords(models.Model): ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) ServiceLevel = models.CharField(max_length=50) UpdateTime = models.DateTimeField() + EndTime = models.DateTimeField() class Meta: managed = False diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 9a1d9a2f..afbbf935 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -235,6 +235,19 @@ def refresh_gridsitesync(): def refresh_BenchmarksBySubmitHost(): try: + # sql_query = """ + # SELECT + # Site, + # SubmitHost, + # ServiceLevelType, + # ServiceLevel, + # max(UpdateTime) AS LatestPublish + # FROM VJobRecords + # WHERE EndTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + # AND UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + # GROUP BY Site, SubmitHost; + # """ + # fetchset = VJobRecords.objects.raw(sql_query) sql_query = """ SELECT Site, @@ -242,17 +255,17 @@ def refresh_BenchmarksBySubmitHost(): ServiceLevelType, ServiceLevel, max(UpdateTime) AS LatestPublish - FROM VJobRecords - WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 2 MONTH) - GROUP BY 1; + FROM VSummaries + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + GROUP BY Site, SubmitHost; """ - fetchset = VJobRecords.objects.raw(sql_query) + fetchset = VSummaries.objects.raw(sql_query) for f in fetchset: BenchmarksBySubmithost.objects.update_or_create( defaults={ 'UpdateTime': f.LatestPublish, - 'SourceView': 'VJobRecords' + 'SourceView': 'VSummaries' }, SiteName=f.Site, SubmitHost=f.SubmitHost, From 0ccdf751096eb82a91a07d778d487f65d32dfaeb Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 14 Aug 2025 16:37:41 +0100 Subject: [PATCH 136/183] Use grid database --- monitoring/db_update_sqlite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index afbbf935..e71547cd 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -259,7 +259,7 @@ def refresh_BenchmarksBySubmitHost(): WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) GROUP BY Site, SubmitHost; """ - fetchset = VSummaries.objects.raw(sql_query) + fetchset = VSummaries.objects.using('grid').raw(sql_query) for f in fetchset: BenchmarksBySubmithost.objects.update_or_create( From 281993adaab646da76e2e30a57fb9c9dcf292144 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 14 Aug 2025 17:20:01 +0100 Subject: [PATCH 137/183] Fix the datatype --- monitoring/benchmarks/models.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/monitoring/benchmarks/models.py b/monitoring/benchmarks/models.py index 778f6305..5ee40be5 100644 --- a/monitoring/benchmarks/models.py +++ b/monitoring/benchmarks/models.py @@ -4,8 +4,8 @@ class BenchmarksBySubmithost(models.Model): fetched = models.DateTimeField(auto_now=True) SiteName = models.CharField(max_length=255) SubmitHost = models.CharField(max_length=255) - ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) - ServiceLevel = models.CharField(max_length=50) + ServiceLevelType = models.CharField(max_length=50) + ServiceLevel = models.DecimalField(max_digits=10, decimal_places=3) SourceView = models.CharField(max_length=50) UpdateTime = models.DateTimeField() @@ -15,8 +15,8 @@ class Meta: class VJobRecords(models.Model): Site = models.CharField(max_length=255, primary_key=True) SubmitHost = models.CharField(max_length=255) - ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) - ServiceLevel = models.CharField(max_length=50) + ServiceLevelType = models.CharField(max_length=50) + ServiceLevel = models.DecimalField(max_digits=10, decimal_places=3) UpdateTime = models.DateTimeField() EndTime = models.DateTimeField() @@ -27,8 +27,8 @@ class Meta: class VSummaries(models.Model): Site = models.CharField(max_length=255, primary_key=True) SubmitHost = models.CharField(max_length=255) - ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) - ServiceLevel = models.CharField(max_length=50) + ServiceLevelType = models.CharField(max_length=50) + ServiceLevel = models.DecimalField(max_digits=10, decimal_places=3) UpdateTime = models.DateTimeField() class Meta: @@ -38,8 +38,8 @@ class Meta: class VNormalisedSummaries(models.Model): Site = models.CharField(max_length=255, primary_key=True) SubmitHost = models.CharField(max_length=255) - ServiceLevelType = models.DecimalField(max_digits=10, decimal_places=3) - ServiceLevel = models.CharField(max_length=50) + ServiceLevelType = models.CharField(max_length=50) + ServiceLevel = models.DecimalField(max_digits=10, decimal_places=3) UpdateTime = models.DateTimeField() class Meta: From b70d9fb9546d469d19e87ed6081f410d649954a4 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 14 Aug 2025 21:57:55 +0100 Subject: [PATCH 138/183] Update variable name --- monitoring/benchmarks/serializers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/benchmarks/serializers.py b/monitoring/benchmarks/serializers.py index 0208f1e8..3dc31bda 100644 --- a/monitoring/benchmarks/serializers.py +++ b/monitoring/benchmarks/serializers.py @@ -7,7 +7,7 @@ class BenchmarksBySubmithostSerializer(serializers.HyperlinkedModelSerializer): # Override default format with None so that Python datetime is used as # ouput format. Encoding will be determined by the renderer and can be # formatted by a template filter. - updated = serializers.DateTimeField(format=None) + UpdateTime = serializers.DateTimeField(format=None) class Meta: model = BenchmarksBySubmithost From 31e499692753fb7b3cfdff2c79fa859276a1544e Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 14 Aug 2025 22:09:55 +0100 Subject: [PATCH 139/183] Update variable name --- monitoring/benchmarks/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index 991dcc1d..42cb9d94 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -52,7 +52,7 @@ def list(self, request): if type(request.accepted_renderer) is TemplateHTMLRenderer: response.data = { - 'benchmark': final_response, + 'benchmarks': final_response, 'last_fetched': last_fetched } From 1f380d50ee2269fa5e6da822b7ecf4afa1b3dd4b Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Fri, 15 Aug 2025 12:02:38 +0100 Subject: [PATCH 140/183] Fetch data from two tables --- monitoring/benchmarks/views.py | 37 ++++++++++--------------------- monitoring/db_update_sqlite.py | 40 ++++++++++++++++------------------ 2 files changed, 31 insertions(+), 46 deletions(-) diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index 42cb9d94..cb1d3fe9 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -15,22 +15,6 @@ from monitoring.benchmarks.serializers import BenchmarksBySubmithostSerializer -def update_dict_stdout_and_returncode(single_dict, date): - diff = datetime.today() - date - date = date.strftime("%Y-%m-%d") - - if diff <= timedelta(days=7): - single_dict['returncode'] = 0 - single_dict['stdout'] = "OK [ last published %s days ago: %s ]" % (diff.days, date) - elif diff > timedelta(days=7): - single_dict['returncode'] = 1 - single_dict['stdout'] = "WARNING [ last published %s days ago: %s ]" % (diff.days, date) - else: - single_dict['returncode'] = 3 - single_dict['stdout'] = "UNKNOWN" - return single_dict - - class BenchmarksViewSet(viewsets.ReadOnlyModelViewSet): queryset = BenchmarksBySubmithost.objects.all() serializer_class = BenchmarksBySubmithostSerializer @@ -47,8 +31,7 @@ def list(self, request): for single_dict in response.data: date = single_dict.get('UpdateTime').replace(tzinfo=None) - single_dict = update_dict_stdout_and_returncode(single_dict, date) - final_response.append(single_dict) + final_response.append(date) if type(request.accepted_renderer) is TemplateHTMLRenderer: response.data = { @@ -63,17 +46,21 @@ def retrieve(self, request, SiteName=None): # If there's no data then last_fetched is None. if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - - response = super(BenchmarksViewSet, self).retrieve(request) - date = response.data['UpdateTime'].replace(tzinfo=None) - response.data = update_dict_stdout_and_returncode(response.data, date) + + final_response = [] + sites_list_qs = BenchmarksBySubmithost.objects.filter(SiteName=SiteName) + sites_list_serializer = self.get_serializer(sites_list_qs, many=True) + + for single_dict in sites_list_serializer.data: + date = single_dict.get('UpdateTime').replace(tzinfo=None) + final_response.append(date) # Wrap data in a dict so that it can display in template. if type(request.accepted_renderer) is TemplateHTMLRenderer: # Single result put in list to work with same HTML template. - response.data = { - 'benchmark': [response.data], + response = { + 'benchmarks': final_response, 'last_fetched': last_fetched } - return response + return Response(response) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index e71547cd..02780f62 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -233,50 +233,48 @@ def refresh_gridsitesync(): except DatabaseError: log.exception('Error while trying to refresh GridSiteSync') + def refresh_BenchmarksBySubmitHost(): + # views = ['VSummaries', 'VJobRecords', 'VNormalisedSummaries'] + views = ['VSummaries', 'VNormalisedSummaries'] + for view in views: + refresh_BenchmarksBySubmitHost_from_view(view) + + +def refresh_BenchmarksBySubmitHost_from_view(view_name): try: - # sql_query = """ - # SELECT - # Site, - # SubmitHost, - # ServiceLevelType, - # ServiceLevel, - # max(UpdateTime) AS LatestPublish - # FROM VJobRecords - # WHERE EndTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) - # AND UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) - # GROUP BY Site, SubmitHost; - # """ - # fetchset = VJobRecords.objects.raw(sql_query) - sql_query = """ + sql_query = f""" SELECT Site, SubmitHost, ServiceLevelType, ServiceLevel, max(UpdateTime) AS LatestPublish - FROM VSummaries + FROM {view_name} WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) GROUP BY Site, SubmitHost; """ - fetchset = VSummaries.objects.using('grid').raw(sql_query) + + # Dynamically get the model class from globals + model_class = globals()[view_name] + fetchset = model_class.objects.using('grid').raw(sql_query) for f in fetchset: BenchmarksBySubmithost.objects.update_or_create( defaults={ 'UpdateTime': f.LatestPublish, - 'SourceView': 'VSummaries' - }, + 'SourceView': view_name + }, SiteName=f.Site, SubmitHost=f.SubmitHost, ServiceLevelType=f.ServiceLevelType, ServiceLevel=f.ServiceLevel, ) - log.info("Refreshed BenchmarksBySubmitHost") + log.info(f"Refreshed BenchmarksBySubmitHost from {view_name}") - except DatabaseError: - log.exception('Error while trying to refresh BenchmarksBySubmitHost') + except Exception: + log.exception(f'Error while trying to refresh BenchmarksBySubmitHost from {view_name}') if __name__ == "__main__": From 736261facc0e4dda7ac7cff6f7819be193cfaad3 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Fri, 15 Aug 2025 13:45:37 +0100 Subject: [PATCH 141/183] Refactoring --- monitoring/benchmarks/views.py | 15 ++------------- monitoring/db_update_sqlite.py | 32 ++++++++++++++++++++++++++++++-- 2 files changed, 32 insertions(+), 15 deletions(-) diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index cb1d3fe9..62afd952 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -26,16 +26,11 @@ def list(self, request): if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - final_response = [] response = super(BenchmarksViewSet, self).list(request) - for single_dict in response.data: - date = single_dict.get('UpdateTime').replace(tzinfo=None) - final_response.append(date) - if type(request.accepted_renderer) is TemplateHTMLRenderer: response.data = { - 'benchmarks': final_response, + 'benchmarks': response.data, 'last_fetched': last_fetched } @@ -47,19 +42,13 @@ def retrieve(self, request, SiteName=None): if last_fetched is not None: print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - final_response = [] sites_list_qs = BenchmarksBySubmithost.objects.filter(SiteName=SiteName) sites_list_serializer = self.get_serializer(sites_list_qs, many=True) - for single_dict in sites_list_serializer.data: - date = single_dict.get('UpdateTime').replace(tzinfo=None) - final_response.append(date) - # Wrap data in a dict so that it can display in template. if type(request.accepted_renderer) is TemplateHTMLRenderer: - # Single result put in list to work with same HTML template. response = { - 'benchmarks': final_response, + 'benchmarks': sites_list_serializer, 'last_fetched': last_fetched } diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 02780f62..cebc24e7 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -242,8 +242,21 @@ def refresh_BenchmarksBySubmitHost(): def refresh_BenchmarksBySubmitHost_from_view(view_name): - try: - sql_query = f""" + try: + if view_name == 'VSummaries': + sql_query = f""" + SELECT + Site, + SubmitHost, + ServiceLevelType, + ServiceLevel, + max(UpdateTime) AS LatestPublish + FROM {view_name} + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + GROUP BY Site, SubmitHost; + """ + elif view_name == 'VJobRecords': + sql_query = f""" SELECT Site, SubmitHost, @@ -254,6 +267,21 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) GROUP BY Site, SubmitHost; """ + elif view_name == 'VNormalisedSummaries': + sql_query = f""" + SELECT + Site, + SubmitHost, + ServiceLevelType, + CpuDuration AS ServiceLevel, + max(UpdateTime) AS LatestPublish + FROM {view_name} + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + GROUP BY Site, SubmitHost; + """ + else: + log.warning(f"Unknown view name: {view_name}") + return # Dynamically get the model class from globals model_class = globals()[view_name] From 85a493b1c175d23b3d00b1cb20975ef98c9255f6 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Fri, 15 Aug 2025 14:31:12 +0100 Subject: [PATCH 142/183] Update select query for VNormalisedSummaries --- monitoring/db_update_sqlite.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index cebc24e7..5948dace 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -273,10 +273,11 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): Site, SubmitHost, ServiceLevelType, - CpuDuration AS ServiceLevel, + (NormalisedWallDuration / WallDuration) AS ServiceLevel, max(UpdateTime) AS LatestPublish FROM {view_name} WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + AND WallDuration > 0 GROUP BY Site, SubmitHost; """ else: From 6798431118a905ab9dc8e81013b3cf615ef6c0b2 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Mon, 18 Aug 2025 13:50:37 +0100 Subject: [PATCH 143/183] Refactoring --- .../templates/benchmarks_by_submithost.html | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/monitoring/benchmarks/templates/benchmarks_by_submithost.html b/monitoring/benchmarks/templates/benchmarks_by_submithost.html index 31ccb120..e8a3c966 100644 --- a/monitoring/benchmarks/templates/benchmarks_by_submithost.html +++ b/monitoring/benchmarks/templates/benchmarks_by_submithost.html @@ -1,9 +1,8 @@ - - + Sites publishing benchmark records @@ -12,7 +11,12 @@

    Sites publishing benchmark records in last 2 months

    Page last updated: {{ last_fetched|date:"Y-m-d H:i:s.u" }}

    - + + + + + + {% for benchmark in benchmarks %} @@ -25,4 +29,6 @@

    Sites publishing benchmark records in last 2 months

    {% endfor %}
    SiteSubmit hostService level typeService levelSource viewLast updatedSiteSubmit hostService level typeService levelSource viewLast updated
    - \ No newline at end of file + + + From d6d0c94142ce432d783da06d49ff56aee25f340b Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 12 Aug 2025 14:56:13 +0100 Subject: [PATCH 144/183] Refresh benchmarks data --- monitoring/db_update_sqlite.py | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 5948dace..9c64c71a 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -233,6 +233,38 @@ def refresh_gridsitesync(): except DatabaseError: log.exception('Error while trying to refresh GridSiteSync') +def refresh_BenchmarksBySubmitHost(): + try: + sql_query = """ + SELECT + Site, + SubmitHost, + ServiceLevelType, + ServiceLevel, + max(UpdateTime) AS LatestPublish + FROM VJobRecords + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 2 MONTH) + GROUP BY 1; + """ + fetchset = VJobRecords.objects.raw(sql_query) + + for f in fetchset: + BenchmarksBySubmithost.objects.update_or_create( + defaults={ + 'UpdateTime': f.LatestPublish, + 'SourceView': 'VJobRecords' + }, + SiteName=f.Site, + SubmitHost=f.SubmitHost, + ServiceLevelType=f.ServiceLevelType, + ServiceLevel=f.ServiceLevel, + ) + + log.info("Refreshed BenchmarksBySubmitHost") + + except DatabaseError: + log.exception('Error while trying to refresh BenchmarksBySubmitHost') + def refresh_BenchmarksBySubmitHost(): # views = ['VSummaries', 'VJobRecords', 'VNormalisedSummaries'] From 2c51440d584179cdec520385088675fa9bfea912 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Wed, 13 Aug 2025 16:28:15 +0100 Subject: [PATCH 145/183] Fetch data from VSummaries --- monitoring/db_update_sqlite.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 9c64c71a..7192beea 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -235,6 +235,19 @@ def refresh_gridsitesync(): def refresh_BenchmarksBySubmitHost(): try: + # sql_query = """ + # SELECT + # Site, + # SubmitHost, + # ServiceLevelType, + # ServiceLevel, + # max(UpdateTime) AS LatestPublish + # FROM VJobRecords + # WHERE EndTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + # AND UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + # GROUP BY Site, SubmitHost; + # """ + # fetchset = VJobRecords.objects.raw(sql_query) sql_query = """ SELECT Site, @@ -242,17 +255,17 @@ def refresh_BenchmarksBySubmitHost(): ServiceLevelType, ServiceLevel, max(UpdateTime) AS LatestPublish - FROM VJobRecords - WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 2 MONTH) - GROUP BY 1; + FROM VSummaries + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + GROUP BY Site, SubmitHost; """ - fetchset = VJobRecords.objects.raw(sql_query) + fetchset = VSummaries.objects.raw(sql_query) for f in fetchset: BenchmarksBySubmithost.objects.update_or_create( defaults={ 'UpdateTime': f.LatestPublish, - 'SourceView': 'VJobRecords' + 'SourceView': 'VSummaries' }, SiteName=f.Site, SubmitHost=f.SubmitHost, From d3bff6339c747a4bb798034bba91b92f779098c1 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 14 Aug 2025 16:37:41 +0100 Subject: [PATCH 146/183] Use grid database --- monitoring/db_update_sqlite.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 7192beea..002295f6 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -259,7 +259,7 @@ def refresh_BenchmarksBySubmitHost(): WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) GROUP BY Site, SubmitHost; """ - fetchset = VSummaries.objects.raw(sql_query) + fetchset = VSummaries.objects.using('grid').raw(sql_query) for f in fetchset: BenchmarksBySubmithost.objects.update_or_create( From 3917a3127980a3ee0a4766fa50f0f0bde0532f5d Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Fri, 15 Aug 2025 12:02:38 +0100 Subject: [PATCH 147/183] Fetch data from two tables --- monitoring/db_update_sqlite.py | 40 ++++++++++++++++------------------ 1 file changed, 19 insertions(+), 21 deletions(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 002295f6..714d5f1e 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -233,50 +233,48 @@ def refresh_gridsitesync(): except DatabaseError: log.exception('Error while trying to refresh GridSiteSync') + def refresh_BenchmarksBySubmitHost(): + # views = ['VSummaries', 'VJobRecords', 'VNormalisedSummaries'] + views = ['VSummaries', 'VNormalisedSummaries'] + for view in views: + refresh_BenchmarksBySubmitHost_from_view(view) + + +def refresh_BenchmarksBySubmitHost_from_view(view_name): try: - # sql_query = """ - # SELECT - # Site, - # SubmitHost, - # ServiceLevelType, - # ServiceLevel, - # max(UpdateTime) AS LatestPublish - # FROM VJobRecords - # WHERE EndTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) - # AND UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) - # GROUP BY Site, SubmitHost; - # """ - # fetchset = VJobRecords.objects.raw(sql_query) - sql_query = """ + sql_query = f""" SELECT Site, SubmitHost, ServiceLevelType, ServiceLevel, max(UpdateTime) AS LatestPublish - FROM VSummaries + FROM {view_name} WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) GROUP BY Site, SubmitHost; """ - fetchset = VSummaries.objects.using('grid').raw(sql_query) + + # Dynamically get the model class from globals + model_class = globals()[view_name] + fetchset = model_class.objects.using('grid').raw(sql_query) for f in fetchset: BenchmarksBySubmithost.objects.update_or_create( defaults={ 'UpdateTime': f.LatestPublish, - 'SourceView': 'VSummaries' - }, + 'SourceView': view_name + }, SiteName=f.Site, SubmitHost=f.SubmitHost, ServiceLevelType=f.ServiceLevelType, ServiceLevel=f.ServiceLevel, ) - log.info("Refreshed BenchmarksBySubmitHost") + log.info(f"Refreshed BenchmarksBySubmitHost from {view_name}") - except DatabaseError: - log.exception('Error while trying to refresh BenchmarksBySubmitHost') + except Exception: + log.exception(f'Error while trying to refresh BenchmarksBySubmitHost from {view_name}') def refresh_BenchmarksBySubmitHost(): From eb5e1b09203e971d63f28cd55b7bc504f3b14452 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Fri, 15 Aug 2025 13:45:37 +0100 Subject: [PATCH 148/183] Refactoring --- monitoring/db_update_sqlite.py | 32 ++++++++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 714d5f1e..e120efd0 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -242,8 +242,21 @@ def refresh_BenchmarksBySubmitHost(): def refresh_BenchmarksBySubmitHost_from_view(view_name): - try: - sql_query = f""" + try: + if view_name == 'VSummaries': + sql_query = f""" + SELECT + Site, + SubmitHost, + ServiceLevelType, + ServiceLevel, + max(UpdateTime) AS LatestPublish + FROM {view_name} + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + GROUP BY Site, SubmitHost; + """ + elif view_name == 'VJobRecords': + sql_query = f""" SELECT Site, SubmitHost, @@ -254,6 +267,21 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) GROUP BY Site, SubmitHost; """ + elif view_name == 'VNormalisedSummaries': + sql_query = f""" + SELECT + Site, + SubmitHost, + ServiceLevelType, + CpuDuration AS ServiceLevel, + max(UpdateTime) AS LatestPublish + FROM {view_name} + WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + GROUP BY Site, SubmitHost; + """ + else: + log.warning(f"Unknown view name: {view_name}") + return # Dynamically get the model class from globals model_class = globals()[view_name] From a477d2b77ef849c07509b39333e186500599c3e9 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Fri, 15 Aug 2025 14:31:12 +0100 Subject: [PATCH 149/183] Update select query for VNormalisedSummaries --- monitoring/db_update_sqlite.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index e120efd0..4c98816c 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -273,10 +273,11 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): Site, SubmitHost, ServiceLevelType, - CpuDuration AS ServiceLevel, + (NormalisedWallDuration / WallDuration) AS ServiceLevel, max(UpdateTime) AS LatestPublish FROM {view_name} WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + AND WallDuration > 0 GROUP BY Site, SubmitHost; """ else: From f8cd8e9dc1914178472582badeb6638204db13ab Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Mon, 18 Aug 2025 16:59:15 +0100 Subject: [PATCH 150/183] Add verbose_name --- monitoring/benchmarks/models.py | 5 ++++- monitoring/benchmarks/serializers.py | 5 ----- monitoring/benchmarks/urls.py | 16 +++------------- monitoring/benchmarks/views.py | 22 ---------------------- monitoring/db_update_sqlite.py | 2 +- 5 files changed, 8 insertions(+), 42 deletions(-) diff --git a/monitoring/benchmarks/models.py b/monitoring/benchmarks/models.py index 5ee40be5..afc51e17 100644 --- a/monitoring/benchmarks/models.py +++ b/monitoring/benchmarks/models.py @@ -23,6 +23,7 @@ class VJobRecords(models.Model): class Meta: managed = False db_table = 'VJobRecords' + verbose_name = 'Job Record' class VSummaries(models.Model): Site = models.CharField(max_length=255, primary_key=True) @@ -34,6 +35,7 @@ class VSummaries(models.Model): class Meta: managed = False db_table = 'VSummaries' + verbose_name = 'Summary' class VNormalisedSummaries(models.Model): Site = models.CharField(max_length=255, primary_key=True) @@ -44,4 +46,5 @@ class VNormalisedSummaries(models.Model): class Meta: managed = False - db_table = 'VNormalisedSummaries' + db_table = 'VNormalisedSummaries' + verbose_name = 'Normalised Summary' diff --git a/monitoring/benchmarks/serializers.py b/monitoring/benchmarks/serializers.py index 3dc31bda..0c66725b 100644 --- a/monitoring/benchmarks/serializers.py +++ b/monitoring/benchmarks/serializers.py @@ -19,8 +19,3 @@ class Meta: 'SourceView', 'UpdateTime', ) - - # Sitename substitutes for pk - extra_kwargs = { - 'url': {'view_name': 'benchmarksbysubmithost-details', 'lookup_field': 'SiteName'} - } diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py index e41f0a4c..40dff782 100644 --- a/monitoring/benchmarks/urls.py +++ b/monitoring/benchmarks/urls.py @@ -1,17 +1,7 @@ -from rest_framework import routers +from django.urls import path from monitoring.benchmarks import views -from django.urls import re_path - -router = routers.SimpleRouter() -router.register('', views.BenchmarksViewSet) urlpatterns = [ - re_path( - r'^/(?P[a-zA-Z0-9._-]+)/$', - views.BenchmarksViewSet.as_view({'get': 'retrieve'}), - name='benchmarksbysubmithost-details' - ), -] - -urlpatterns += router.urls + path('', views.BenchmarksViewSet), +] \ No newline at end of file diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index 62afd952..8b401b4e 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -2,13 +2,9 @@ from datetime import datetime, timedelta from django.db.models import Max -from django.shortcuts import get_object_or_404 -import pandas as pd from rest_framework import viewsets from rest_framework.renderers import TemplateHTMLRenderer -from rest_framework.response import Response -from rest_framework.pagination import PageNumberPagination from monitoring.benchmarks.models import BenchmarksBySubmithost @@ -35,21 +31,3 @@ def list(self, request): } return response - - def retrieve(self, request, SiteName=None): - last_fetched = BenchmarksBySubmithost.objects.aggregate(Max('fetched'))['fetched__max'] - # If there's no data then last_fetched is None. - if last_fetched is not None: - print(last_fetched.replace(tzinfo=None), datetime.today() - timedelta(hours=1, seconds=20)) - - sites_list_qs = BenchmarksBySubmithost.objects.filter(SiteName=SiteName) - sites_list_serializer = self.get_serializer(sites_list_qs, many=True) - - # Wrap data in a dict so that it can display in template. - if type(request.accepted_renderer) is TemplateHTMLRenderer: - response = { - 'benchmarks': sites_list_serializer, - 'last_fetched': last_fetched - } - - return Response(response) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 4c98816c..18452d57 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -292,7 +292,7 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): BenchmarksBySubmithost.objects.update_or_create( defaults={ 'UpdateTime': f.LatestPublish, - 'SourceView': view_name + 'SourceView': model_class._meta.verbose_name }, SiteName=f.Site, SubmitHost=f.SubmitHost, From 9516de8e8fa6f537d49e21fe8ed1224564dcd980 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Mon, 18 Aug 2025 21:18:57 +0100 Subject: [PATCH 151/183] Fix the url --- monitoring/benchmarks/urls.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py index 40dff782..8f3b790a 100644 --- a/monitoring/benchmarks/urls.py +++ b/monitoring/benchmarks/urls.py @@ -1,7 +1,11 @@ -from django.urls import path +from rest_framework import routers +from django.urls import path, include from monitoring.benchmarks import views +router = routers.SimpleRouter() +router.register('', views.BenchmarksViewSet) + urlpatterns = [ - path('', views.BenchmarksViewSet), + path('', include(router.urls)), ] \ No newline at end of file From a5389c32e7fd41038e341f9b1600d31db57ca936 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 19 Aug 2025 09:11:39 +0100 Subject: [PATCH 152/183] Update query for VJobRecords --- monitoring/db_update_sqlite.py | 92 +++------------------------------- 1 file changed, 8 insertions(+), 84 deletions(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 18452d57..4333249f 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -11,6 +11,7 @@ import django from django.db import DatabaseError import pandas as pd +from django.utils.timezone import make_aware, is_naive BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) @@ -45,12 +46,7 @@ get_year_month_str ) -from monitoring.benchmarks.models import ( - BenchmarksBySubmithost, - VJobRecords, - VSummaries, - VNormalisedSummaries, -) +from monitoring.benchmarks.models import BenchmarksBySubmithost try: # Read configuration from the file @@ -235,8 +231,7 @@ def refresh_gridsitesync(): def refresh_BenchmarksBySubmitHost(): - # views = ['VSummaries', 'VJobRecords', 'VNormalisedSummaries'] - views = ['VSummaries', 'VNormalisedSummaries'] + views = ['VSummaries', 'VJobRecords', 'VNormalisedSummaries'] for view in views: refresh_BenchmarksBySubmitHost_from_view(view) @@ -264,7 +259,8 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): ServiceLevel, max(UpdateTime) AS LatestPublish FROM {view_name} - WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + WHERE EndTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + AND UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) GROUP BY Site, SubmitHost; """ elif view_name == 'VNormalisedSummaries': @@ -291,7 +287,7 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): for f in fetchset: BenchmarksBySubmithost.objects.update_or_create( defaults={ - 'UpdateTime': f.LatestPublish, + 'UpdateTime': make_aware(f.LatestPublish) if is_naive(f.LatestPublish) else f.LatestPublish, 'SourceView': model_class._meta.verbose_name }, SiteName=f.Site, @@ -303,79 +299,7 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): log.info(f"Refreshed BenchmarksBySubmitHost from {view_name}") except Exception: - log.exception(f'Error while trying to refresh BenchmarksBySubmitHost from {view_name}') - - -def refresh_BenchmarksBySubmitHost(): - # views = ['VSummaries', 'VJobRecords', 'VNormalisedSummaries'] - views = ['VSummaries', 'VNormalisedSummaries'] - for view in views: - refresh_BenchmarksBySubmitHost_from_view(view) - - -def refresh_BenchmarksBySubmitHost_from_view(view_name): - try: - if view_name == 'VSummaries': - sql_query = f""" - SELECT - Site, - SubmitHost, - ServiceLevelType, - ServiceLevel, - max(UpdateTime) AS LatestPublish - FROM {view_name} - WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) - GROUP BY Site, SubmitHost; - """ - elif view_name == 'VJobRecords': - sql_query = f""" - SELECT - Site, - SubmitHost, - ServiceLevelType, - ServiceLevel, - max(UpdateTime) AS LatestPublish - FROM {view_name} - WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) - GROUP BY Site, SubmitHost; - """ - elif view_name == 'VNormalisedSummaries': - sql_query = f""" - SELECT - Site, - SubmitHost, - ServiceLevelType, - (NormalisedWallDuration / WallDuration) AS ServiceLevel, - max(UpdateTime) AS LatestPublish - FROM {view_name} - WHERE UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) - AND WallDuration > 0 - GROUP BY Site, SubmitHost; - """ - else: - log.warning(f"Unknown view name: {view_name}") - return - - # Dynamically get the model class from globals - model_class = globals()[view_name] - fetchset = model_class.objects.using('grid').raw(sql_query) - - for f in fetchset: - BenchmarksBySubmithost.objects.update_or_create( - defaults={ - 'UpdateTime': f.LatestPublish, - 'SourceView': view_name - }, - SiteName=f.Site, - SubmitHost=f.SubmitHost, - ServiceLevelType=f.ServiceLevelType, - ServiceLevel=f.ServiceLevel, - ) - - log.info(f"Refreshed BenchmarksBySubmitHost from {view_name}") - - except Exception: - log.exception(f'Error while trying to refresh BenchmarksBySubmitHost from {view_name}') + log.exception(f'Error while trying to refresh BenchmarksBySubmitHost from {view_name}') if __name__ == "__main__": @@ -390,4 +314,4 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): "Data retrieval and processing attempted. " "Check the above logs for details on the sync status" ) - log.info('=====================') + log.info('=====================') \ No newline at end of file From 34886bd6e3d17be945e4c1d484213ee32fe1afa9 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 19 Aug 2025 09:21:45 +0100 Subject: [PATCH 153/183] Import models --- monitoring/db_update_sqlite.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 4333249f..caf7ce9c 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -46,7 +46,12 @@ get_year_month_str ) -from monitoring.benchmarks.models import BenchmarksBySubmithost +from monitoring.benchmarks.models import ( + BenchmarksBySubmithost, + VJobRecords, + VSummaries, + VNormalisedSummaries, +) try: # Read configuration from the file From 9e99240b0af81e6db9e46df77d53b4fc44aa15dc Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 19 Aug 2025 10:55:31 +0100 Subject: [PATCH 154/183] Update header --- monitoring/benchmarks/templates/benchmarks_by_submithost.html | 2 +- monitoring/benchmarks/views.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/monitoring/benchmarks/templates/benchmarks_by_submithost.html b/monitoring/benchmarks/templates/benchmarks_by_submithost.html index e8a3c966..882b2065 100644 --- a/monitoring/benchmarks/templates/benchmarks_by_submithost.html +++ b/monitoring/benchmarks/templates/benchmarks_by_submithost.html @@ -7,7 +7,7 @@ -

    Sites publishing benchmark records in last 2 months

    +

    Sites publishing benchmark records in last 3 months

    Page last updated: {{ last_fetched|date:"Y-m-d H:i:s.u" }}

    diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index 8b401b4e..0725898a 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -15,7 +15,6 @@ class BenchmarksViewSet(viewsets.ReadOnlyModelViewSet): queryset = BenchmarksBySubmithost.objects.all() serializer_class = BenchmarksBySubmithostSerializer template_name = 'benchmarks_by_submithost.html' - lookup_field = 'SiteName' def list(self, request): last_fetched = BenchmarksBySubmithost.objects.aggregate(Max('fetched'))['fetched__max'] From 3173166a4ac655552e92f5985c812664a1ba46bc Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Tue, 19 Aug 2025 13:22:53 +0100 Subject: [PATCH 155/183] Rename columns and add new line --- monitoring/benchmarks/models.py | 4 ++++ .../benchmarks/templates/benchmarks_by_submithost.html | 6 +++--- monitoring/benchmarks/urls.py | 2 +- monitoring/db_update_sqlite.py | 2 +- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/monitoring/benchmarks/models.py b/monitoring/benchmarks/models.py index afc51e17..47e2ca06 100644 --- a/monitoring/benchmarks/models.py +++ b/monitoring/benchmarks/models.py @@ -1,5 +1,6 @@ from django.db import models + class BenchmarksBySubmithost(models.Model): fetched = models.DateTimeField(auto_now=True) SiteName = models.CharField(max_length=255) @@ -12,6 +13,7 @@ class BenchmarksBySubmithost(models.Model): class Meta: ordering = ('SiteName',) + class VJobRecords(models.Model): Site = models.CharField(max_length=255, primary_key=True) SubmitHost = models.CharField(max_length=255) @@ -25,6 +27,7 @@ class Meta: db_table = 'VJobRecords' verbose_name = 'Job Record' + class VSummaries(models.Model): Site = models.CharField(max_length=255, primary_key=True) SubmitHost = models.CharField(max_length=255) @@ -37,6 +40,7 @@ class Meta: db_table = 'VSummaries' verbose_name = 'Summary' + class VNormalisedSummaries(models.Model): Site = models.CharField(max_length=255, primary_key=True) SubmitHost = models.CharField(max_length=255) diff --git a/monitoring/benchmarks/templates/benchmarks_by_submithost.html b/monitoring/benchmarks/templates/benchmarks_by_submithost.html index 882b2065..85568307 100644 --- a/monitoring/benchmarks/templates/benchmarks_by_submithost.html +++ b/monitoring/benchmarks/templates/benchmarks_by_submithost.html @@ -13,9 +13,9 @@

    Sites publishing benchmark records in last 3 months

    - - - + + + {% for benchmark in benchmarks %} diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py index 8f3b790a..a03135d4 100644 --- a/monitoring/benchmarks/urls.py +++ b/monitoring/benchmarks/urls.py @@ -8,4 +8,4 @@ urlpatterns = [ path('', include(router.urls)), -] \ No newline at end of file +] diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index caf7ce9c..6b250704 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -319,4 +319,4 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): "Data retrieval and processing attempted. " "Check the above logs for details on the sync status" ) - log.info('=====================') \ No newline at end of file + log.info('=====================') From 3671d89ccd3940d25c26e59516719ef0cb26df92 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Wed, 20 Aug 2025 09:36:51 +0100 Subject: [PATCH 156/183] Trim trailing whitespace --- monitoring/benchmarks/models.py | 2 +- monitoring/db_update_sqlite.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/benchmarks/models.py b/monitoring/benchmarks/models.py index 47e2ca06..2b59d49b 100644 --- a/monitoring/benchmarks/models.py +++ b/monitoring/benchmarks/models.py @@ -51,4 +51,4 @@ class VNormalisedSummaries(models.Model): class Meta: managed = False db_table = 'VNormalisedSummaries' - verbose_name = 'Normalised Summary' + verbose_name = 'Normalised Summary' diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 6b250704..c0166d90 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -304,7 +304,7 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): log.info(f"Refreshed BenchmarksBySubmitHost from {view_name}") except Exception: - log.exception(f'Error while trying to refresh BenchmarksBySubmitHost from {view_name}') + log.exception(f'Error while trying to refresh BenchmarksBySubmitHost from {view_name}') if __name__ == "__main__": From 5a00011c5402c17528e7ba572b803ccf3b3d6b4b Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Wed, 20 Aug 2025 11:20:04 +0100 Subject: [PATCH 157/183] Make the site sorting case insensitive --- monitoring/benchmarks/views.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index 0725898a..677bb935 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta from django.db.models import Max +from django.db.models.functions import Lower from rest_framework import viewsets from rest_framework.renderers import TemplateHTMLRenderer @@ -12,7 +13,8 @@ from monitoring.benchmarks.serializers import BenchmarksBySubmithostSerializer class BenchmarksViewSet(viewsets.ReadOnlyModelViewSet): - queryset = BenchmarksBySubmithost.objects.all() + queryset = BenchmarksBySubmithost.objects.all().annotate(site_name_lower=Lower('SiteName')).order_by('site_name_lower') + serializer_class = BenchmarksBySubmithostSerializer template_name = 'benchmarks_by_submithost.html' From e7ddcf48ddcce798abcfbf4a202de2cd058f3724 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 21 Aug 2025 10:28:21 +0100 Subject: [PATCH 158/183] Rename columns --- monitoring/benchmarks/models.py | 6 +++--- monitoring/benchmarks/serializers.py | 6 +++--- .../benchmarks/templates/benchmarks_by_submithost.html | 6 +++--- monitoring/db_update_sqlite.py | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/monitoring/benchmarks/models.py b/monitoring/benchmarks/models.py index 2b59d49b..72f9f645 100644 --- a/monitoring/benchmarks/models.py +++ b/monitoring/benchmarks/models.py @@ -5,9 +5,9 @@ class BenchmarksBySubmithost(models.Model): fetched = models.DateTimeField(auto_now=True) SiteName = models.CharField(max_length=255) SubmitHost = models.CharField(max_length=255) - ServiceLevelType = models.CharField(max_length=50) - ServiceLevel = models.DecimalField(max_digits=10, decimal_places=3) - SourceView = models.CharField(max_length=50) + BenchmarkType = models.CharField(max_length=50) + BenchmarkValue = models.DecimalField(max_digits=10, decimal_places=3) + RecordType = models.CharField(max_length=50) UpdateTime = models.DateTimeField() class Meta: diff --git a/monitoring/benchmarks/serializers.py b/monitoring/benchmarks/serializers.py index 0c66725b..e7eb7cc9 100644 --- a/monitoring/benchmarks/serializers.py +++ b/monitoring/benchmarks/serializers.py @@ -14,8 +14,8 @@ class Meta: fields = ( 'SiteName', 'SubmitHost', - 'ServiceLevelType', - 'ServiceLevel', - 'SourceView', + 'BenchmarkType', + 'BenchmarkValue', + 'RecordType', 'UpdateTime', ) diff --git a/monitoring/benchmarks/templates/benchmarks_by_submithost.html b/monitoring/benchmarks/templates/benchmarks_by_submithost.html index 85568307..c87a068a 100644 --- a/monitoring/benchmarks/templates/benchmarks_by_submithost.html +++ b/monitoring/benchmarks/templates/benchmarks_by_submithost.html @@ -22,9 +22,9 @@

    Sites publishing benchmark records in last 3 months

    - - - + + + {% endfor %} diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index c0166d90..2d15ca5b 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -293,12 +293,12 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): BenchmarksBySubmithost.objects.update_or_create( defaults={ 'UpdateTime': make_aware(f.LatestPublish) if is_naive(f.LatestPublish) else f.LatestPublish, - 'SourceView': model_class._meta.verbose_name + 'RecordType': model_class._meta.verbose_name }, SiteName=f.Site, SubmitHost=f.SubmitHost, - ServiceLevelType=f.ServiceLevelType, - ServiceLevel=f.ServiceLevel, + BenchmarkType=f.ServiceLevelType, + BenchmarkValue=f.ServiceLevel, ) log.info(f"Refreshed BenchmarksBySubmitHost from {view_name}") From 41440a21f37985e47945b1177b0b8ed319d31565 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 21 Aug 2025 11:22:57 +0100 Subject: [PATCH 159/183] Remove annotate --- monitoring/benchmarks/views.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index 677bb935..866cfadb 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -13,7 +13,7 @@ from monitoring.benchmarks.serializers import BenchmarksBySubmithostSerializer class BenchmarksViewSet(viewsets.ReadOnlyModelViewSet): - queryset = BenchmarksBySubmithost.objects.all().annotate(site_name_lower=Lower('SiteName')).order_by('site_name_lower') + queryset = BenchmarksBySubmithost.objects.all().order_by(Lower('SiteName')) serializer_class = BenchmarksBySubmithostSerializer template_name = 'benchmarks_by_submithost.html' From 18c209d39330bda3400c2cad6b1fe9d5521f6780 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 15:11:08 +0100 Subject: [PATCH 160/183] Remove trailing whitespace in update script --- monitoring/db_update_sqlite.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/monitoring/db_update_sqlite.py b/monitoring/db_update_sqlite.py index 2d15ca5b..2fc2a772 100644 --- a/monitoring/db_update_sqlite.py +++ b/monitoring/db_update_sqlite.py @@ -51,7 +51,7 @@ VJobRecords, VSummaries, VNormalisedSummaries, -) +) try: # Read configuration from the file @@ -242,7 +242,7 @@ def refresh_BenchmarksBySubmitHost(): def refresh_BenchmarksBySubmitHost_from_view(view_name): - try: + try: if view_name == 'VSummaries': sql_query = f""" SELECT @@ -264,7 +264,7 @@ def refresh_BenchmarksBySubmitHost_from_view(view_name): ServiceLevel, max(UpdateTime) AS LatestPublish FROM {view_name} - WHERE EndTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) + WHERE EndTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) AND UpdateTime > DATE_SUB(NOW(), INTERVAL 3 MONTH) GROUP BY Site, SubmitHost; """ From af41d31dc8e13bc17ee7538a474837934c7a8f4e Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 21 Aug 2025 13:25:27 +0100 Subject: [PATCH 161/183] Add site counts by record type --- .../templates/benchmarks_by_submithost.html | 13 +++++++++++++ monitoring/benchmarks/views.py | 13 +++++++++++-- 2 files changed, 24 insertions(+), 2 deletions(-) diff --git a/monitoring/benchmarks/templates/benchmarks_by_submithost.html b/monitoring/benchmarks/templates/benchmarks_by_submithost.html index c87a068a..9b8d0d71 100644 --- a/monitoring/benchmarks/templates/benchmarks_by_submithost.html +++ b/monitoring/benchmarks/templates/benchmarks_by_submithost.html @@ -9,6 +9,19 @@

    Sites publishing benchmark records in last 3 months

    Page last updated: {{ last_fetched|date:"Y-m-d H:i:s.u" }}

    +
    Site Submit hostService level typeService levelSource viewBenchmark typeBenchmark valueRecord type Last updated
    {{ benchmark.SiteName }} {{ benchmark.SubmitHost }}{{ benchmark.ServiceLevelType }}{{ benchmark.ServiceLevel }}{{ benchmark.SourceView }}{{ benchmark.BenchmarkType }}{{ benchmark.BenchmarkValue }}{{ benchmark.RecordType }} {{ benchmark.UpdateTime|date:"Y-m-d H:i:s" }}
    + + + + + {% for item in site_counts_by_record_type %} + + + + + {% endfor %} +
    Record typeNumber of Sites
    {{ item.RecordType }}{{ item.site_count }}
    + diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index 866cfadb..d45ab66c 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -1,7 +1,7 @@ from django.shortcuts import render from datetime import datetime, timedelta -from django.db.models import Max +from django.db.models import Max, Count from django.db.models.functions import Lower from rest_framework import viewsets @@ -25,10 +25,19 @@ def list(self, request): response = super(BenchmarksViewSet, self).list(request) + # Count number of distinct sites per RecordType + site_counts_by_record_type = ( + BenchmarksBySubmithost.objects + .values('RecordType') + .annotate(site_count=Count('SiteName', distinct=True)) + .order_by('RecordType') + ) + if type(request.accepted_renderer) is TemplateHTMLRenderer: response.data = { 'benchmarks': response.data, - 'last_fetched': last_fetched + 'last_fetched': last_fetched, + 'site_counts_by_record_type': site_counts_by_record_type } return response From 8f8efa64e8e677ac340eb53526fe90f3838d7f35 Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 21 Aug 2025 15:18:04 +0100 Subject: [PATCH 162/183] Add new line --- monitoring/benchmarks/templates/benchmarks_by_submithost.html | 2 ++ 1 file changed, 2 insertions(+) diff --git a/monitoring/benchmarks/templates/benchmarks_by_submithost.html b/monitoring/benchmarks/templates/benchmarks_by_submithost.html index 9b8d0d71..c4218011 100644 --- a/monitoring/benchmarks/templates/benchmarks_by_submithost.html +++ b/monitoring/benchmarks/templates/benchmarks_by_submithost.html @@ -22,6 +22,8 @@

    Sites publishing benchmark records in last 3 months

    {% endfor %}
    Site
    +
    + From 159240893abfc3e8bcfaee6eaa8c1ab5ae15b1ac Mon Sep 17 00:00:00 2001 From: Manoj Garai Date: Thu, 21 Aug 2025 16:10:42 +0100 Subject: [PATCH 163/183] Trim trailing whitespace --- monitoring/benchmarks/templates/benchmarks_by_submithost.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/benchmarks/templates/benchmarks_by_submithost.html b/monitoring/benchmarks/templates/benchmarks_by_submithost.html index c4218011..f2e3f81f 100644 --- a/monitoring/benchmarks/templates/benchmarks_by_submithost.html +++ b/monitoring/benchmarks/templates/benchmarks_by_submithost.html @@ -22,7 +22,7 @@

    Sites publishing benchmark records in last 3 months

    {% endfor %}
    Site
    -
    +
    From d8c31145798df5d9bd13e210b1b5b025d7f15317 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 11:21:13 +0100 Subject: [PATCH 164/183] Improve HTML template for cloud page - Correct info line on how far back the data goes. - Set last update time to be in ISO 8601 format. - Change "page last updated" to "data last fetched" as it's more accurate. --- monitoring/publishing/templates/cloudsites.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/templates/cloudsites.html b/monitoring/publishing/templates/cloudsites.html index 871ad9d6..368c3f9b 100644 --- a/monitoring/publishing/templates/cloudsites.html +++ b/monitoring/publishing/templates/cloudsites.html @@ -8,8 +8,8 @@ -

    Sites publishing cloud accounting records in last 3 years

    -

    Page last updated: {{ last_fetched|date:"Y-m-d H:i:s.u" }}

    +

    Sites publishing cloud accounting records in the last year

    +

    Data last fetched: {{ last_fetched|date:"c" }}

    From 502ba90235909e222ba18197970aff0359ef5213 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 12:23:12 +0100 Subject: [PATCH 165/183] Tidy HTML and improve text - Standarise indents to two spaces. - Remove extra html tag. - Add closing html tag. - Tweak column headers. - Tidy formatting. - Standardise last fetch timestampts to ISO-8601. --- .../publishing/templates/gridsites.html | 35 ++++++++--------- monitoring/publishing/templates/gridsync.html | 32 ++++++++-------- .../templates/gridsync_singlesite.html | 32 ++++++++-------- .../templates/gridsync_submithost.html | 38 +++++++++---------- 4 files changed, 71 insertions(+), 66 deletions(-) diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index f2330a9b..7b742e33 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -1,29 +1,30 @@ {% load static %} - - - - -APEL Publication Summary - - - + + + + APEL Publication Summary + + +

    APEL Publication Test

      -
    • Displays the last time the site published accounting data to the GOC. -
    • A warning / error is raised if the site has not published accounting data for 7 / 31 days, if a site has not published data for 31 days, which usually signifies a problem with APEL or RGMA services. -
    • Information about APEL APEL Wiki -
    • Contact: apel-admins [at] stfc.ac.uk -
    • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}

    +
  • Displays the last time the site published accounting data to APEL. +
  • A warning / error is raised if the site has not published accounting data for 7 / 31 days, if a site has not published data for 31 days. +
  • Information about APEL: EGI Documentation +
  • Data last fetched: {{ last_fetched|date:"c"|slice:":22" }} + + +
  • SiteVMsInLastUpdateCloudTypeLastUpdated
    - - - - + + + + {% for site in sites %} diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html index c663b1c1..af503be5 100644 --- a/monitoring/publishing/templates/gridsync.html +++ b/monitoring/publishing/templates/gridsync.html @@ -1,21 +1,23 @@ {% load static %} - - - - APEL Publication Summary - - - + + + + APEL Publication Summary + + +

    APEL Synchronisation Test

      -
    • A comparison is made between your local APEL database, and the data that you have published to the GOC. -
    • Major differences are flagged with FAIL. -
    • Information about APEL APEL Wiki -
    • Contact: apel-admins [at] stfc.ac.uk -
    • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}

    +
  • A comparison is made between your local accounting, and the data that you have published to APEL. +
  • Major differences are flagged with FAIL. +
  • Information about APEL: EGI Documentation +
  • Data last fetched: {{ last_fetched|date:"c"|slice:":22" }} + + +
  • ExecutingSiteMeasurementDateMeasurementTimePublication
    Status
    SiteMeasurement DateMeasurement TimePublication Status
    @@ -37,9 +39,9 @@

    APEL Synchronisation Test

    - - + {% endfor %} -
    All sites
    {{ record.RecordEnd }} {{ record.RecordCountPublished }} {{ record.RecordCountInDb }}{{ record.SyncStatus }}{{ record.SyncStatus }}
    + + diff --git a/monitoring/publishing/templates/gridsync_singlesite.html b/monitoring/publishing/templates/gridsync_singlesite.html index 169a043b..fd0b7ecd 100644 --- a/monitoring/publishing/templates/gridsync_singlesite.html +++ b/monitoring/publishing/templates/gridsync_singlesite.html @@ -1,21 +1,23 @@ {% load static %} - - - - APEL Publication Summary - - - + + + + APEL Publication Summary + + +

    APEL Synchronisation Test

      -
    • A comparison is made between your local APEL database, and the data that you have published to the GOC. -
    • Major differences are flagged with FAIL. -
    • Information about APEL APEL Wiki -
    • Contact: apel-admins [at] stfc.ac.uk -
    • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}

    +
  • A comparison is made between your local accounting, and the data that you have published to APEL. +
  • Major differences are flagged with FAIL. +
  • Information about APEL: EGI Documentation +
  • Data last fetched: {{ last_fetched|date:"c"|slice:":22" }} + + +
    @@ -35,9 +37,9 @@

    APEL Synchronisation Test

    - - + {% endfor %} -
    {{ records.0.SiteName }}
    {{ record.RecordEnd }} {{ record.RecordCountPublished }} {{ record.RecordCountInDb }}{{ record.SyncStatus }}{{ record.SyncStatus }}
    + + diff --git a/monitoring/publishing/templates/gridsync_submithost.html b/monitoring/publishing/templates/gridsync_submithost.html index 6bb115ed..6ed0cd7a 100644 --- a/monitoring/publishing/templates/gridsync_submithost.html +++ b/monitoring/publishing/templates/gridsync_submithost.html @@ -1,45 +1,45 @@ {% load static %} - - - - APEL Publication Summary - - - + + + + APEL Publication Summary + + +

    APEL Synchronisation Test

      -
    • A comparison is made between your local APEL database, and the data that you have published to the GOC. -
    • Major differences are flagged with FAIL. -
    • Information about APEL APEL Wiki -
    • Contact: apel-admins [at] stfc.ac.uk -
    • lastBuild : {{ last_fetched|date:"Y-m-d H:i:s.u"|slice:":22" }}

    +
  • A comparison is made between your local accounting, and the data that you have published to APEL. +
  • Major differences are flagged with FAIL. +
  • Information about APEL: EGI Documentation +
  • Data last fetched: {{ last_fetched|date:"c"|slice:":22" }} + + +
    - + - - + + {% for host in submisthosts %} - - - {% endfor %} -
    {{submisthosts.0.SiteName}}, {{submisthosts.0.YearMonth}}
    {{ submisthosts.0.SiteName }}, {{ submisthosts.0.YearMonth }}
    Month SubmitHostRecordStartRecordEndRecord StartRecord End Record Count
    In Your Database
    Record Count
    What You Published
    {{ host.YearMonth }} {{ host.SubmitHost }} {{ host.RecordStart }} {{ host.RecordEnd }} {{ host.RecordCountPublished }} {{ host.RecordCountInDb }}
    + + From f50e8edf21f903ac432169433d25459568db5ab9 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 12:31:10 +0100 Subject: [PATCH 166/183] Add missing semi-colon to stylesheet --- monitoring/publishing/static/stylesheet.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/publishing/static/stylesheet.css b/monitoring/publishing/static/stylesheet.css index 88f63afd..ad8e70a8 100644 --- a/monitoring/publishing/static/stylesheet.css +++ b/monitoring/publishing/static/stylesheet.css @@ -32,7 +32,7 @@ th { font-size: 12px; color: #009999; background-color: #FFFFFF; - font-weight: bold + font-weight: bold; align: left; } a:link { From 84fe8765eae7b59bf77adf2cf56745dbc77008d6 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 12:37:12 +0100 Subject: [PATCH 167/183] Fix cloud page tag(s) --- monitoring/publishing/templates/cloudsites.html | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/monitoring/publishing/templates/cloudsites.html b/monitoring/publishing/templates/cloudsites.html index 368c3f9b..0ffc1ef5 100644 --- a/monitoring/publishing/templates/cloudsites.html +++ b/monitoring/publishing/templates/cloudsites.html @@ -1,9 +1,7 @@ - - - + Sites publishing cloud accounting records From bbe9f1ab55c4a94a7635ed8b7b87a51bbbca8b71 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 12:42:27 +0100 Subject: [PATCH 168/183] Fix misplaced link tag --- monitoring/publishing/templates/gridsync.html | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html index af503be5..6ca4d06b 100644 --- a/monitoring/publishing/templates/gridsync.html +++ b/monitoring/publishing/templates/gridsync.html @@ -33,8 +33,8 @@

    APEL Synchronisation Test

    {% for record in records.results %} - {{ record.SiteName }} - {{ record.YearMonth }} + {{ record.SiteName }} + {{ record.YearMonth }} {{ record.RecordStart }} {{ record.RecordEnd }} {{ record.RecordCountPublished }} From 4e1c338d602bcf496e85c0881b46ab65810f3ef3 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 13:52:13 +0100 Subject: [PATCH 169/183] Increase font size to match other text --- monitoring/publishing/static/stylesheet.css | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/monitoring/publishing/static/stylesheet.css b/monitoring/publishing/static/stylesheet.css index ad8e70a8..11ce5f07 100644 --- a/monitoring/publishing/static/stylesheet.css +++ b/monitoring/publishing/static/stylesheet.css @@ -65,7 +65,7 @@ hr { } .navbar-title { font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; + font-size: 1em; color: #FFFFFF; font-weight: bold; @@ -122,37 +122,37 @@ li { } .tabletext { font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; + font-size: 1em; color: #000000; background-color: #DDDDDD; } .tabletextwarning { font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; + font-size: 1em; color: #000000; background-color: #FFFF00; } .tabletextok { font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; + font-size: 1em; color: #000000; background-color: #00FF00; } .tabletexterror { font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; + font-size: 1em; color: #000000; background-color: #FF0000; } .tabletextinfo { font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; + font-size: 1em; color: #000000; background-color: #00CCFF; } .tableheader { font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; + font-size: 1em; color: #FFFFFF; background-color: #000000; font-weight: bold; @@ -160,7 +160,7 @@ li { } .navbar-heading { font-family: Arial, Helvetica, sans-serif; - font-size: 0.8em; + font-size: 1em; background-color: #FFFFFF; font-weight: bold; color: #000000; From 84668d29f9b8981aeb3da3f3139ff09af40b0441 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 13:52:38 +0100 Subject: [PATCH 170/183] Add missing style to table contents --- monitoring/publishing/templates/gridsync.html | 16 ++++++++-------- .../templates/gridsync_singlesite.html | 16 +++++++++------- .../templates/gridsync_submithost.html | 12 ++++++------ 3 files changed, 23 insertions(+), 21 deletions(-) diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html index 6ca4d06b..1713bd76 100644 --- a/monitoring/publishing/templates/gridsync.html +++ b/monitoring/publishing/templates/gridsync.html @@ -28,18 +28,18 @@

    APEL Synchronisation Test

    Record End Record Count
    In Your Database Record Count
    What You Published - Synchronisation
    Status + Synchronisation Status {% for record in records.results %} - {{ record.SiteName }} - {{ record.YearMonth }} - {{ record.RecordStart }} - {{ record.RecordEnd }} - {{ record.RecordCountPublished }} - {{ record.RecordCountInDb }} - {{ record.SyncStatus }} + {{ record.SiteName }} + {{ record.YearMonth }} + {{ record.RecordStart }} + {{ record.RecordEnd }} + {{ record.RecordCountPublished }} + {{ record.RecordCountInDb }} + {{ record.SyncStatus }} {% endfor %} diff --git a/monitoring/publishing/templates/gridsync_singlesite.html b/monitoring/publishing/templates/gridsync_singlesite.html index fd0b7ecd..c9d37b3e 100644 --- a/monitoring/publishing/templates/gridsync_singlesite.html +++ b/monitoring/publishing/templates/gridsync_singlesite.html @@ -27,17 +27,19 @@

    APEL Synchronisation Test

    Record End Record Count
    In Your Database Record Count
    What You Published - Synchronisation
    Status + Synchronisation Status {% for record in records %} - {{ record.YearMonth }} - {{ record.RecordStart }} - {{ record.RecordEnd }} - {{ record.RecordCountPublished }} - {{ record.RecordCountInDb }} - {{ record.SyncStatus }} + + {{ record.YearMonth }} + + {{ record.RecordStart }} + {{ record.RecordEnd }} + {{ record.RecordCountPublished }} + {{ record.RecordCountInDb }} + {{ record.SyncStatus }} {% endfor %} diff --git a/monitoring/publishing/templates/gridsync_submithost.html b/monitoring/publishing/templates/gridsync_submithost.html index 6ed0cd7a..cedb8dfb 100644 --- a/monitoring/publishing/templates/gridsync_submithost.html +++ b/monitoring/publishing/templates/gridsync_submithost.html @@ -32,12 +32,12 @@

    APEL Synchronisation Test

    {% for host in submisthosts %} - {{ host.YearMonth }} - {{ host.SubmitHost }} - {{ host.RecordStart }} - {{ host.RecordEnd }} - {{ host.RecordCountPublished }} - {{ host.RecordCountInDb }} + {{ host.YearMonth }} + {{ host.SubmitHost }} + {{ host.RecordStart }} + {{ host.RecordEnd }} + {{ host.RecordCountPublished }} + {{ host.RecordCountInDb }} {% endfor %} From 370657ff8827178b38adeaf384fe79a43125732c Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 14:16:45 +0100 Subject: [PATCH 171/183] Add humanised formatting to the large numbers --- monitoring/publishing/templates/gridsync.html | 5 +++-- monitoring/publishing/templates/gridsync_singlesite.html | 5 +++-- monitoring/publishing/templates/gridsync_submithost.html | 5 +++-- monitoring/settings.py | 1 + 4 files changed, 10 insertions(+), 6 deletions(-) diff --git a/monitoring/publishing/templates/gridsync.html b/monitoring/publishing/templates/gridsync.html index 1713bd76..8c49d4ea 100644 --- a/monitoring/publishing/templates/gridsync.html +++ b/monitoring/publishing/templates/gridsync.html @@ -1,4 +1,5 @@ {% load static %} +{% load humanize %} @@ -37,8 +38,8 @@

    APEL Synchronisation Test

    {{ record.YearMonth }} {{ record.RecordStart }} {{ record.RecordEnd }} - {{ record.RecordCountPublished }} - {{ record.RecordCountInDb }} + {{ record.RecordCountPublished|intcomma }} + {{ record.RecordCountInDb|intcomma }} {{ record.SyncStatus }} {% endfor %} diff --git a/monitoring/publishing/templates/gridsync_singlesite.html b/monitoring/publishing/templates/gridsync_singlesite.html index c9d37b3e..d3321946 100644 --- a/monitoring/publishing/templates/gridsync_singlesite.html +++ b/monitoring/publishing/templates/gridsync_singlesite.html @@ -1,4 +1,5 @@ {% load static %} +{% load humanize %} @@ -37,8 +38,8 @@

    APEL Synchronisation Test

    {{ record.RecordStart }} {{ record.RecordEnd }} - {{ record.RecordCountPublished }} - {{ record.RecordCountInDb }} + {{ record.RecordCountPublished|intcomma }} + {{ record.RecordCountInDb|intcomma }} {{ record.SyncStatus }} {% endfor %} diff --git a/monitoring/publishing/templates/gridsync_submithost.html b/monitoring/publishing/templates/gridsync_submithost.html index cedb8dfb..28f1dbb2 100644 --- a/monitoring/publishing/templates/gridsync_submithost.html +++ b/monitoring/publishing/templates/gridsync_submithost.html @@ -1,4 +1,5 @@ {% load static %} +{% load humanize %} @@ -36,8 +37,8 @@

    APEL Synchronisation Test

    {{ host.SubmitHost }} {{ host.RecordStart }} {{ host.RecordEnd }} - {{ host.RecordCountPublished }} - {{ host.RecordCountInDb }} + {{ host.RecordCountPublished|intcomma }} + {{ host.RecordCountInDb|intcomma }} {% endfor %} diff --git a/monitoring/settings.py b/monitoring/settings.py index e00bb9da..8f7d710e 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -70,6 +70,7 @@ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', + 'django.contrib.humanize', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', From 48b8a4b050d6deb8d50cbbefef03abf81150ad46 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 15:36:18 +0100 Subject: [PATCH 172/183] Update site base urls to use newer path() function --- monitoring/urls.py | 28 ++++++---------------------- 1 file changed, 6 insertions(+), 22 deletions(-) diff --git a/monitoring/urls.py b/monitoring/urls.py index 7d08c4f8..15801cec 100644 --- a/monitoring/urls.py +++ b/monitoring/urls.py @@ -1,26 +1,10 @@ -"""Monitoring URL Configuration. - -The `urlpatterns` list routes URLs to views. For more information please see: - https://docs.djangoproject.com/en/1.11/topics/http/urls/ -Examples: -Function views - 1. Add an import: from my_app import views - 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') -Class-based views - 1. Add an import: from other_app.views import Home - 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') -Including another URLconf - 1. Import the include() function: from django.conf.urls import url, include - 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) -""" -from django.conf.urls import include, url +from django.urls import include, path from django.contrib import admin urlpatterns = [ - url(r'^admin/', admin.site.urls), - url(r'^availability/', include('monitoring.availability.urls')), - url(r'^publishing/', include('monitoring.publishing.urls')), - url(r'^benchmarks/', include('monitoring.benchmarks.urls')), - url(r'^api-auth/', include('rest_framework.urls', - namespace='rest_framework')), + path('admin/', admin.site.urls), + path('availability/', include('monitoring.availability.urls')), + path('publishing/', include('monitoring.publishing.urls')), + path('benchmarks/', include('monitoring.benchmarks.urls')), + path('api-auth/', include('rest_framework.urls', namespace='rest_framework')), ] From 521253126f5a58a19a822a4980792edb6244926c Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 16:00:58 +0100 Subject: [PATCH 173/183] Add basic home page --- monitoring/settings.py | 2 +- monitoring/templates/home.html | 9 +++++++++ monitoring/urls.py | 2 ++ 3 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 monitoring/templates/home.html diff --git a/monitoring/settings.py b/monitoring/settings.py index 8f7d710e..da4991a7 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -103,7 +103,7 @@ TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], + 'DIRS': ['templates',], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ diff --git a/monitoring/templates/home.html b/monitoring/templates/home.html new file mode 100644 index 00000000..39585d61 --- /dev/null +++ b/monitoring/templates/home.html @@ -0,0 +1,9 @@ + + + + +

    APEL Data Validation

    +

    Welcome. This page gives an overview of the views available in the APEL Data Validation system.

    + + + diff --git a/monitoring/urls.py b/monitoring/urls.py index 15801cec..a2c2509f 100644 --- a/monitoring/urls.py +++ b/monitoring/urls.py @@ -1,8 +1,10 @@ from django.urls import include, path from django.contrib import admin +from django.views.generic import TemplateView urlpatterns = [ path('admin/', admin.site.urls), + path('', TemplateView.as_view(template_name='home.html')), path('availability/', include('monitoring.availability.urls')), path('publishing/', include('monitoring.publishing.urls')), path('benchmarks/', include('monitoring.benchmarks.urls')), From d085961825a3e34b24ca1a7aec8ad3c469a1a205 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 17:23:28 +0100 Subject: [PATCH 174/183] Fix template dir path --- monitoring/settings.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/monitoring/settings.py b/monitoring/settings.py index da4991a7..65d0352b 100644 --- a/monitoring/settings.py +++ b/monitoring/settings.py @@ -103,7 +103,8 @@ TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': ['templates',], + # Add project-wide templates directory + 'DIRS': [os.path.join(BASE_DIR, 'monitoring', 'templates'),], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ From 27a0437aeffd02a20a97c5e79a53b7856fb447e5 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 17:57:56 +0100 Subject: [PATCH 175/183] Add names to project urls for later referencing --- monitoring/urls.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/monitoring/urls.py b/monitoring/urls.py index a2c2509f..ac77b36d 100644 --- a/monitoring/urls.py +++ b/monitoring/urls.py @@ -4,9 +4,9 @@ urlpatterns = [ path('admin/', admin.site.urls), - path('', TemplateView.as_view(template_name='home.html')), - path('availability/', include('monitoring.availability.urls')), + path('', TemplateView.as_view(template_name='home.html'), name='home'), + path('availability/', include('monitoring.availability.urls'), name='availability'), path('publishing/', include('monitoring.publishing.urls')), - path('benchmarks/', include('monitoring.benchmarks.urls')), + path('benchmarks/', include('monitoring.benchmarks.urls'), name='benchmarks'), path('api-auth/', include('rest_framework.urls', namespace='rest_framework')), ] From eb21d6af6663ad2ff56091cfbd8ec0e0155ed329 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 17:58:25 +0100 Subject: [PATCH 176/183] Add list of hyperlinked pages to home page --- monitoring/templates/home.html | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/monitoring/templates/home.html b/monitoring/templates/home.html index 39585d61..cdde6825 100644 --- a/monitoring/templates/home.html +++ b/monitoring/templates/home.html @@ -5,5 +5,13 @@

    APEL Data Validation

    Welcome. This page gives an overview of the views available in the APEL Data Validation system.

    + + From 0ed1f1d6bc7f503b85620fde1f40d1fe0aa5e243 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 19:01:42 +0100 Subject: [PATCH 177/183] Reorganise URL naming --- monitoring/availability/urls.py | 2 +- monitoring/benchmarks/urls.py | 5 +---- monitoring/templates/home.html | 8 ++++---- monitoring/urls.py | 4 ++-- 4 files changed, 8 insertions(+), 11 deletions(-) diff --git a/monitoring/availability/urls.py b/monitoring/availability/urls.py index 3b2d5752..15b7627b 100644 --- a/monitoring/availability/urls.py +++ b/monitoring/availability/urls.py @@ -3,5 +3,5 @@ from monitoring.availability import views urlpatterns = [ - url(r'^$', views.status), + url(r'^$', views.status, name='availability'), ] diff --git a/monitoring/benchmarks/urls.py b/monitoring/benchmarks/urls.py index a03135d4..e3b06cbb 100644 --- a/monitoring/benchmarks/urls.py +++ b/monitoring/benchmarks/urls.py @@ -1,11 +1,8 @@ from rest_framework import routers -from django.urls import path, include from monitoring.benchmarks import views router = routers.SimpleRouter() router.register('', views.BenchmarksViewSet) -urlpatterns = [ - path('', include(router.urls)), -] +urlpatterns = router.urls diff --git a/monitoring/templates/home.html b/monitoring/templates/home.html index cdde6825..99b1047e 100644 --- a/monitoring/templates/home.html +++ b/monitoring/templates/home.html @@ -7,10 +7,10 @@

    APEL Data Validation

    diff --git a/monitoring/urls.py b/monitoring/urls.py index ac77b36d..10c97b9d 100644 --- a/monitoring/urls.py +++ b/monitoring/urls.py @@ -5,8 +5,8 @@ urlpatterns = [ path('admin/', admin.site.urls), path('', TemplateView.as_view(template_name='home.html'), name='home'), - path('availability/', include('monitoring.availability.urls'), name='availability'), + path('availability/', include('monitoring.availability.urls')), path('publishing/', include('monitoring.publishing.urls')), - path('benchmarks/', include('monitoring.benchmarks.urls'), name='benchmarks'), + path('benchmarks/', include('monitoring.benchmarks.urls')), path('api-auth/', include('rest_framework.urls', namespace='rest_framework')), ] From 5f8bf5096507851eeb910aa63a090f30eed2f3d3 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 21:27:12 +0100 Subject: [PATCH 178/183] Use correct URL name --- monitoring/templates/home.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/templates/home.html b/monitoring/templates/home.html index 99b1047e..21418238 100644 --- a/monitoring/templates/home.html +++ b/monitoring/templates/home.html @@ -10,7 +10,7 @@

    APEL Data Validation

  • {% url 'cloudsite-list' %}
  • {% url 'gridsite-list' %}
  • {% url 'gridsitesync-list' %}
  • -
  • {% url 'benchmark-list' %}
  • +
  • {% url 'benchmarksbysubmithost-list' %}
  • From b2917abe95549ffbc03e012da5c7f5e40f714fee Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 21:27:44 +0100 Subject: [PATCH 179/183] Remove availability link as not for human parsing --- monitoring/templates/home.html | 1 - 1 file changed, 1 deletion(-) diff --git a/monitoring/templates/home.html b/monitoring/templates/home.html index 21418238..032956f7 100644 --- a/monitoring/templates/home.html +++ b/monitoring/templates/home.html @@ -6,7 +6,6 @@

    APEL Data Validation

    Welcome. This page gives an overview of the views available in the APEL Data Validation system.

      -
    • {% url 'availability' %}
    • {% url 'cloudsite-list' %}
    • {% url 'gridsite-list' %}
    • {% url 'gridsitesync-list' %}
    • From 11246cc3ad32468e3e62069e8760b37580c621e7 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Thu, 21 Aug 2025 21:29:25 +0100 Subject: [PATCH 180/183] Add more detail and examples --- monitoring/templates/home.html | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/monitoring/templates/home.html b/monitoring/templates/home.html index 032956f7..f2fa27af 100644 --- a/monitoring/templates/home.html +++ b/monitoring/templates/home.html @@ -5,12 +5,28 @@

      APEL Data Validation

      Welcome. This page gives an overview of the views available in the APEL Data Validation system.

      +

      EGI/WLCG Grid Accounting

      +

      EGI Cloud Accounting

      +

      + +

      Return format

      +

      + The views above return HTML by default, but can be set to return JSON by adding ?format=json at the end of the URL. + For example: {% url 'gridsitesync-detail' 'RAL-LCG2' %}?format=json +

      + From 754ef7550914e2f843e9f2d64fe2bd9a3486bf04 Mon Sep 17 00:00:00 2001 From: Adrian Coveney Date: Wed, 27 Aug 2025 16:18:19 +0100 Subject: [PATCH 181/183] Remove repitition --- monitoring/publishing/templates/gridsites.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/monitoring/publishing/templates/gridsites.html b/monitoring/publishing/templates/gridsites.html index 7b742e33..dc5426e6 100644 --- a/monitoring/publishing/templates/gridsites.html +++ b/monitoring/publishing/templates/gridsites.html @@ -12,7 +12,7 @@

      APEL Publication Test

      • Displays the last time the site published accounting data to APEL. -
      • A warning / error is raised if the site has not published accounting data for 7 / 31 days, if a site has not published data for 31 days. +
      • A warning / error is raised if the site has not published accounting data for 7 / 31 days.
      • Information about APEL: EGI Documentation
      • Data last fetched: {{ last_fetched|date:"c"|slice:":22" }}
      From 8c97355ee375365040988ca5278313a2c2657f7f Mon Sep 17 00:00:00 2001 From: Adrian Coveney <4836233+tofu-rocketry@users.noreply.github.com> Date: Thu, 28 Aug 2025 11:15:46 +0100 Subject: [PATCH 182/183] Convert remaining url() to path() (#58) url() is deprecating and is directly replaced by re_path(), but the new simpler path() function suits this use case. --- monitoring/availability/urls.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/monitoring/availability/urls.py b/monitoring/availability/urls.py index 15b7627b..077876e9 100644 --- a/monitoring/availability/urls.py +++ b/monitoring/availability/urls.py @@ -1,7 +1,7 @@ -from django.conf.urls import url +from django.urls import path from monitoring.availability import views urlpatterns = [ - url(r'^$', views.status, name='availability'), + path('', views.status, name='availability'), ] From 896bb3a73578744974e74133d0b2097fbcfc1421 Mon Sep 17 00:00:00 2001 From: garaimanoj <99975605+garaimanoj@users.noreply.github.com> Date: Wed, 3 Sep 2025 15:54:01 +0100 Subject: [PATCH 183/183] Add sorting by update time (#59) * Add sorting by update time * Add comment about ordering --- monitoring/benchmarks/views.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/monitoring/benchmarks/views.py b/monitoring/benchmarks/views.py index d45ab66c..736d6ba4 100644 --- a/monitoring/benchmarks/views.py +++ b/monitoring/benchmarks/views.py @@ -13,7 +13,9 @@ from monitoring.benchmarks.serializers import BenchmarksBySubmithostSerializer class BenchmarksViewSet(viewsets.ReadOnlyModelViewSet): - queryset = BenchmarksBySubmithost.objects.all().order_by(Lower('SiteName')) + # Lower('SiteName'): sorts sites alphabetically, case-insensitively. + # '-UpdateTime': sorts records within each site by UpdateTime in descending order (latest first). + queryset = BenchmarksBySubmithost.objects.all().order_by(Lower('SiteName'), '-UpdateTime') serializer_class = BenchmarksBySubmithostSerializer template_name = 'benchmarks_by_submithost.html'