diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..5a1d28f --- /dev/null +++ b/.dockerignore @@ -0,0 +1,6 @@ +db-data/ +docker-compose.yml +Dockerfile +.env +.git/ +.gitignore diff --git a/.env b/.env index e378748..7ad94b8 100644 --- a/.env +++ b/.env @@ -1,12 +1,11 @@ -ENGINE_TYPE=postgresql -DB_HOST=db +DB_HOST=timescaledb DB_PORT=5432 DB_NAME=postgres DB_USER=postgres DB_PASSWORD=postgres SECRET_KEY=very-secret-key -ALLOWED_HOSTS=localhost,example.com,interface +ALLOWED_HOSTS=localhost,example.com,interface,192.168.0.19 CORS_ORIGIN_ALLOW_ALL=False CORS_ORIGIN_WHITELIST=http://example.com,http://localhost,http://interface:8000 CHART_TYPE=uplot diff --git a/.github/workflows/ruff-format-check.yml b/.github/workflows/ruff-format-check.yml new file mode 100644 index 0000000..259651a --- /dev/null +++ b/.github/workflows/ruff-format-check.yml @@ -0,0 +1,31 @@ +name: Ruff Format Check + +on: + push: + paths: + - '**.py' + pull_request: + paths: + - '**.py' + +jobs: + ruff-format: + name: Check code formatting with Ruff + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.12' + + - name: Install Ruff + run: | + pip install ruff + + - name: Run ruff format check + run: | + ruff format --check . diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 9c45e98..f14634e 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,6 +16,21 @@ jobs: matrix: python-version: ['3.10', '3.11', '3.12'] + services: + timescaledb: + image: timescale/timescaledb:latest-pg16 + env: + POSTGRES_PASSWORD: postgres + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + # Maps tcp port 5432 on service container to the host + - 5432:5432 + steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} @@ -30,3 +45,9 @@ jobs: run: | python manage.py collectstatic --noinput python manage.py test + env: + DB_NAME: postgres + DB_USER: postgres + DB_PASSWORD: postgres + DB_HOST: localhost + DB_PORT: 5432 diff --git a/Dockerfile b/Dockerfile index e573653..1455503 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ WORKDIR /app COPY requirements.txt . -RUN apk add --no-cache --virtual .build-deps gcc musl-dev mariadb-connector-c-dev libpq-dev +RUN apk add --no-cache --virtual .build-deps gcc musl-dev libpq-dev RUN pip install --no-cache-dir --upgrade pip && \ pip install --no-cache-dir gunicorn && \ @@ -20,10 +20,11 @@ COPY --from=builder /usr/local/lib/python3.12/site-packages /usr/local/lib/pytho COPY --from=builder /usr/local/bin/gunicorn /usr/local/bin/gunicorn COPY --from=builder /app /app -RUN apk add --no-cache mariadb-connector-c libpq +RUN apk add --no-cache libpq COPY . . -CMD ["sh", "-c", "python manage.py makemigrations && python manage.py migrate && python manage.py collectstatic --noinput && gunicorn --bind=0.0.0.0:8000 --timeout 300 --workers=3 --threads=3 --max-requests 5 --max-requests-jitter 2 pim.wsgi:application"] + +CMD ["sh", "-c", "python manage.py migrate && python manage.py collectstatic --noinput && gunicorn --bind=0.0.0.0:8000 --timeout 300 --workers=3 --threads=3 --max-requests 20 --max-requests-jitter 5 pim.wsgi:application"] EXPOSE 8000/tcp diff --git a/README.md b/README.md index ae2517a..a68cfc9 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,9 @@ The Django application is designed to allow users to view power usage charts for MyStrom and Shelly3EM devices. The application collects the current power usage data every 60 seconds, which is then used to calculate and present power usage charts to the user. +This project uses [TimeScaleDB](https://www.timescale.com/) as database. +The database is optimized for time-series data and this project uses queries which are only compatible with TimeScaleDB. + There are two main views: the Device view and the Result view. Here's what each view looks like: #### Device View @@ -37,7 +40,6 @@ pip install -r requirements.txt Here's a breakdown of all the environment variables that are being used in the Django application: -- `ENGINE_TYPE`: Specifies the type of database engine to use, either `mysql` or `postgresql`. - `DB_NAME`: Specifies the name of the database to use. - `DB_USER`: Specifies the username to use when connecting to the database. - `DB_PASSWORD`: Specifies the password to use when connecting to the database. @@ -87,7 +89,6 @@ Static files are served with WhiteNoise. docker run \ --name mystrom-interface \ -p 8000:8000 \ - -e ENGINE_TYPE={mysql/postgresql} -e DB_NAME=db-name \ -e DB_USER=username \ -e DB_PASSWORD=password \ @@ -107,7 +108,3 @@ Make sure to replace the content in `{...}` with your variable of your choice. ```sh docker compose up ``` - -## Configurations -### Important things to know -Database configuraton in `pim/settings.py` under `DATABASES` diff --git a/docker-compose.yml b/docker-compose.yml index 7fad903..03ed3d7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,19 +1,42 @@ + services: interface: container_name: interface image: ghcr.io/maexled/mystrom-django-interface:master -# build: . + # build: . privileged: true restart: always ports: - "80:8000" env_file: - .env + depends_on: + - timescaledb + + timescaledb: + image: timescale/timescaledb:latest-pg16 + environment: + POSTGRES_DB: postgres + POSTGRES_USER: postgres + POSTGRES_PASSWORD: postgres + volumes: + - ./db-data:/var/lib/postgresql/data + + pgadmin: + image: dpage/pgadmin4 + environment: + PGADMIN_DEFAULT_EMAIL: admin@admin.com + PGADMIN_DEFAULT_PASSWORD: admin + ports: + - "8080:80" + depends_on: + - timescaledb requester: container_name: interface-requester image: curlimages/curl:7.80.0 - command: ["sh", "-c", "while true; do sleep 60; curl -X POST http://interface:8000/shelly-api/devices/request-and-save-results >/dev/null 2>&1 & curl -X POST http://interface:8000/api/devices/request-and-save-results >/dev/null 2>&1; done"] + command: ["sh", "-c", "while true; do sleep 60; curl -fsS -X POST http://interface:8000/shelly-api/devices/request-and-save-results && curl -fsS -X POST http://interface:8000/api/devices/request-and-save-results; done"] depends_on: - interface + diff --git a/interface/admin.py b/interface/admin.py index 8c38f3f..846f6b4 100644 --- a/interface/admin.py +++ b/interface/admin.py @@ -1,3 +1 @@ -from django.contrib import admin - # Register your models here. diff --git a/interface/apps.py b/interface/apps.py index 182cf70..753a5c0 100644 --- a/interface/apps.py +++ b/interface/apps.py @@ -2,5 +2,5 @@ class MystromConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'interface' + default_auto_field = "django.db.models.BigAutoField" + name = "interface" diff --git a/interface/forms.py b/interface/forms.py index 13a5bc1..c20fc4a 100644 --- a/interface/forms.py +++ b/interface/forms.py @@ -1,32 +1,24 @@ from django import forms from mystrom_rest.models import MystromDevice from shelly3em_rest.models import Shelly3EMDevice - + + # creating a form class MystromDeviceForm(forms.ModelForm): - # create meta class class Meta: # specify model to be used model = MystromDevice - + # specify fields to be used - fields = [ - "name", - "ip", - "active" - ] + fields = ["name", "ip", "active"] + class Shelly3EMDeviceForm(forms.ModelForm): - # create meta class class Meta: # specify model to be used model = Shelly3EMDevice - + # specify fields to be used - fields = [ - "name", - "ip", - "active" - ] \ No newline at end of file + fields = ["name", "ip", "active"] diff --git a/interface/templatetags/classname.py b/interface/templatetags/classname.py index ce2fa9d..c8c92fe 100644 --- a/interface/templatetags/classname.py +++ b/interface/templatetags/classname.py @@ -2,6 +2,7 @@ register = template.Library() + @register.filter def classname(obj): - return obj.__class__.__name__ \ No newline at end of file + return obj.__class__.__name__ diff --git a/interface/tests.py b/interface/tests.py index 46c1127..8758fde 100644 --- a/interface/tests.py +++ b/interface/tests.py @@ -1,82 +1,87 @@ from django.urls import reverse from django.test import TestCase -from mystrom_rest.models import MystromDevice, MystromResult +from mystrom_rest.models import MystromDevice from bs4 import BeautifulSoup + class MystromDevicesTestCase(TestCase): def setUp(self): - MystromDevice.objects.create(name="Device 1", ip="192.168.0.205") - MystromDevice.objects.create(name="Device 2", ip="127.0.0.1") + self.device1 = MystromDevice.objects.create(name="Device 1", ip="192.168.0.205") + self.device2 = MystromDevice.objects.create(name="Device 2", ip="127.0.0.1") def test_create_device(self): - url = reverse('mystrom_devices') - response = self.client.post(url, {'name': 'test', 'ip': '127.0.0.1'}) + url = reverse("mystrom_devices") + response = self.client.post(url, {"name": "test", "ip": "127.0.0.1"}) self.assertContains(response, "test") self.assertContains(response, "127.0.0.1") self.assertContains(response, "test") self.assertContains(response, "127.0.0.1") self.assertEqual(len(MystromDevice.objects.all()), 3) - + def test_create_device_fail_invalid_ip(self): - url = reverse('mystrom_devices') - response = self.client.post(url, {'name': 'test', 'ip': 'Not an ip'}) - self.assertContains(response, "
Not valid IP Address
") + url = reverse("mystrom_devices") + response = self.client.post(url, {"name": "test", "ip": "Not an ip"}) + self.assertContains( + response, '
Not valid IP Address
' + ) self.assertEqual(len(MystromDevice.objects.all()), 2) def test_delete_devices(self): - url = reverse('mystrom_devices') + url = reverse("mystrom_devices") response = self.client.delete(url) self.assertNotContains(response, "") - self.assertEqual(len(MystromDevice.objects.all()), 0) - + self.assertEqual(len(MystromDevice.objects.all()), 0) + def test_get_create_device_form(self): - url = reverse('mystrom_devices') + url = reverse("mystrom_devices") response = self.client.get(url) self.assertContains(response, "Create") self.assertContains(response, "") def test_update_device(self): - url = reverse('mystrom_device', args=(1,)) - device = MystromDevice.objects.get(id=1) + url = reverse("mystrom_device", args=(self.device1.id,)) + device = MystromDevice.objects.get(id=self.device1.id) - self.assertEqual(device.name, "Device 1") - response = self.client.post(url, {'name': 'Not Device 1', 'ip': '192.168.0.205'}) + self.assertEqual(device.name, self.device1.name) + response = self.client.post( + url, {"name": "Not Device 1", "ip": "192.168.0.205"} + ) self.assertContains(response, "Not Device 1") - device = MystromDevice.objects.get(id=1) + device = MystromDevice.objects.get(id=self.device1.id) self.assertEqual(device.name, "Not Device 1") def test_update_device_fail_invalid_ip(self): - url = reverse('mystrom_device', args=(1,)) - response = self.client.post(url, {'name': 'Not Device 1', 'ip': 'Not an ip'}) - self.assertContains(response, "
Not valid IP Address
") + url = reverse("mystrom_device", args=(self.device1.id,)) + response = self.client.post(url, {"name": "Not Device 1", "ip": "Not an ip"}) + self.assertContains( + response, '
Not valid IP Address
' + ) self.assertEqual(len(MystromDevice.objects.all()), 2) def test_delete_device(self): - url = reverse('mystrom_device', args=(1,)) + url = reverse("mystrom_device", args=(self.device1.id,)) response = self.client.delete(url) self.assertContains(response, "Device 2") self.assertNotContains(response, "Device 1") self.assertEqual(len(MystromDevice.objects.all()), 1) def test_get_update_device_form(self): - url = reverse('mystrom_device', args=(1,)) + url = reverse("mystrom_device", args=(self.device1.id,)) response = self.client.get(url) self.assertContains(response, "Edit device") self.assertContains(response, "") + class MystromResultsTestCase(TestCase): def setUp(self): self.device = MystromDevice.objects.create(name="Device 1", ip="192.168.0.205") def test_get_results_page(self): - url = reverse('results') + url = reverse("results") response = self.client.get(url) - soup = BeautifulSoup(response.content, 'html.parser') - button = soup.find(id='mystrom-' + str(self.device.id)) + soup = BeautifulSoup(response.content, "html.parser") + button = soup.find(id="mystrom-" + str(self.device.id)) self.assertIsNotNone(button) - - - \ No newline at end of file diff --git a/interface/urls.py b/interface/urls.py index 40f34d9..605a8e6 100644 --- a/interface/urls.py +++ b/interface/urls.py @@ -2,12 +2,12 @@ from . import views urlpatterns = [ - path('', views.index, name='index'), - path('results', views.results, name='results'), - path('results/mystrom/', views.mystrom_results, name='mystrom_results'), - path('results/shelly/', views.shelly_results, name='shelly_results'), - path('devices/mystrom/', views.mystrom_device_info, name='mystrom_device'), - path('devices/mystrom', views.mystrom_devices, name='mystrom_devices'), - path('devices/shelly/', views.shelly_device_info, name='shelly_device'), - path('devices/shelly', views.shelly_devices, name='shelly_devices'), -] \ No newline at end of file + path("", views.index, name="index"), + path("results", views.results, name="results"), + path("results/mystrom/", views.mystrom_results, name="mystrom_results"), + path("results/shelly/", views.shelly_results, name="shelly_results"), + path("devices/mystrom/", views.mystrom_device_info, name="mystrom_device"), + path("devices/mystrom", views.mystrom_devices, name="mystrom_devices"), + path("devices/shelly/", views.shelly_device_info, name="shelly_device"), + path("devices/shelly", views.shelly_devices, name="shelly_devices"), +] diff --git a/interface/views.py b/interface/views.py index f4dff24..3640ae4 100644 --- a/interface/views.py +++ b/interface/views.py @@ -4,56 +4,83 @@ from shelly3em_rest.models import Shelly3EMDevice from .forms import MystromDeviceForm, Shelly3EMDeviceForm + def index(request): form = MystromDeviceForm() - return render(request, 'index.html', { - 'devices' : MystromDevice.objects.all(), - 'shelly_devices' : Shelly3EMDevice.objects.all(), - 'form': form - }) + return render( + request, + "index.html", + { + "devices": MystromDevice.objects.all(), + "shelly_devices": Shelly3EMDevice.objects.all(), + "form": form, + }, + ) + def results(request): - return render(request, 'results.html', { - 'devices' : MystromDevice.objects.all(), - 'shelly_devices' : Shelly3EMDevice.objects.all() - }) + return render( + request, + "results.html", + { + "devices": MystromDevice.objects.all(), + "shelly_devices": Shelly3EMDevice.objects.all(), + }, + ) + def mystrom_results(request, id): device = get_object_or_404(MystromDevice, id=id) - return render(request, 'single_result.html', { - 'device' : device, - }) + return render( + request, + "single_result.html", + { + "device": device, + }, + ) + def shelly_results(request, id): device = get_object_or_404(Shelly3EMDevice, id=id) - return render(request, 'single_result.html', { - 'device' : device, - }) + return render( + request, + "single_result.html", + { + "device": device, + }, + ) + def mystrom_devices(request): if request.method == "POST": form = MystromDeviceForm(request.POST) if form.is_valid(): form.save() - return render(request, 'device_table_entries.html', { - 'devices' : MystromDevice.objects.all(), - }) + return render( + request, + "device_table_entries.html", + { + "devices": MystromDevice.objects.all(), + }, + ) else: - return render(request, 'device_form_rows.html', { - 'form' : form, - }) + return render( + request, + "device_form_rows.html", + { + "form": form, + }, + ) elif request.method == "DELETE": MystromDevice.objects.all().delete() - return render(request, 'device_table_entries.html', { - 'devices' : [] - }) + return render(request, "device_table_entries.html", {"devices": []}) else: form = MystromDeviceForm() - return render(request, 'device_form.html', { - 'form': form, - 'device_type': "MystromDevice" - }) + return render( + request, "device_form.html", {"form": form, "device_type": "MystromDevice"} + ) + def mystrom_device_info(request, id): device = get_object_or_404(MystromDevice, id=id) @@ -61,48 +88,71 @@ def mystrom_device_info(request, id): form = MystromDeviceForm(request.POST, instance=device) if form.is_valid(): form.save() - return render(request, 'device_table_entries.html', { - 'devices' : MystromDevice.objects.all(), - }) + return render( + request, + "device_table_entries.html", + { + "devices": MystromDevice.objects.all(), + }, + ) else: - return render(request, 'device_form_rows.html', { - 'form' : form, - }) + return render( + request, + "device_form_rows.html", + { + "form": form, + }, + ) elif request.method == "DELETE": MystromDevice.objects.filter(id=device.id).delete() - return render(request, 'device_table_entries.html', { - 'devices' : MystromDevice.objects.all(), - }) + return render( + request, + "device_table_entries.html", + { + "devices": MystromDevice.objects.all(), + }, + ) else: form = MystromDeviceForm(instance=device) - return render(request, 'device_form.html', { - 'form': form, - 'device': device, - }) + return render( + request, + "device_form.html", + { + "form": form, + "device": device, + }, + ) + def shelly_devices(request): if request.method == "POST": form = Shelly3EMDeviceForm(request.POST) if form.is_valid(): form.save() - return render(request, 'device_table_entries.html', { - 'devices' : Shelly3EMDevice.objects.all(), - }) + return render( + request, + "device_table_entries.html", + { + "devices": Shelly3EMDevice.objects.all(), + }, + ) else: - return render(request, 'device_form_rows.html', { - 'form' : form, - }) + return render( + request, + "device_form_rows.html", + { + "form": form, + }, + ) elif request.method == "DELETE": Shelly3EMDevice.objects.all().delete() - return render(request, 'device_table_entries.html', { - 'devices' : [] - }) + return render(request, "device_table_entries.html", {"devices": []}) else: form = Shelly3EMDeviceForm() - return render(request, 'device_form.html', { - 'form': form, - 'device_type': "Shelly3EMDevice" - }) + return render( + request, "device_form.html", {"form": form, "device_type": "Shelly3EMDevice"} + ) + def shelly_device_info(request, id): device = get_object_or_404(Shelly3EMDevice, id=id) @@ -110,21 +160,37 @@ def shelly_device_info(request, id): form = Shelly3EMDeviceForm(request.POST, instance=device) if form.is_valid(): form.save() - return render(request, 'device_table_entries.html', { - 'devices' : Shelly3EMDevice.objects.all(), - }) + return render( + request, + "device_table_entries.html", + { + "devices": Shelly3EMDevice.objects.all(), + }, + ) else: - return render(request, 'device_form_rows.html', { - 'form' : form, - }) + return render( + request, + "device_form_rows.html", + { + "form": form, + }, + ) elif request.method == "DELETE": Shelly3EMDevice.objects.filter(id=device.id).delete() - return render(request, 'device_table_entries.html', { - 'devices' : Shelly3EMDevice.objects.all(), - }) + return render( + request, + "device_table_entries.html", + { + "devices": Shelly3EMDevice.objects.all(), + }, + ) else: form = Shelly3EMDeviceForm(instance=device) - return render(request, 'device_form.html', { - 'form': form, - 'device': device, - }) \ No newline at end of file + return render( + request, + "device_form.html", + { + "form": form, + "device": device, + }, + ) diff --git a/manage.py b/manage.py index a02b17b..dd1fc6e 100755 --- a/manage.py +++ b/manage.py @@ -1,12 +1,13 @@ #!/usr/bin/env python """Django's command-line utility for administrative tasks.""" + import os import sys def main(): """Run administrative tasks.""" - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pim.settings') + os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pim.settings") try: from django.core.management import execute_from_command_line except ImportError as exc: @@ -18,5 +19,5 @@ def main(): execute_from_command_line(sys.argv) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/mystrom_rest/admin.py b/mystrom_rest/admin.py index 8c38f3f..846f6b4 100644 --- a/mystrom_rest/admin.py +++ b/mystrom_rest/admin.py @@ -1,3 +1 @@ -from django.contrib import admin - # Register your models here. diff --git a/mystrom_rest/apps.py b/mystrom_rest/apps.py index e46a32e..049c840 100644 --- a/mystrom_rest/apps.py +++ b/mystrom_rest/apps.py @@ -2,5 +2,5 @@ class MystromRestConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'mystrom_rest' + default_auto_field = "django.db.models.BigAutoField" + name = "mystrom_rest" diff --git a/mystrom_rest/migrations/0001_initial.py b/mystrom_rest/migrations/0001_initial.py index a208131..870c1db 100644 --- a/mystrom_rest/migrations/0001_initial.py +++ b/mystrom_rest/migrations/0001_initial.py @@ -6,37 +6,52 @@ class Migration(migrations.Migration): - initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='MystromDevice', + name="MystromDevice", fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('name', models.CharField(max_length=16)), - ('ip', models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message='Not valid IP Address', regex='^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$')])), + ("id", models.AutoField(primary_key=True, serialize=False)), + ("name", models.CharField(max_length=16)), + ( + "ip", + models.CharField( + max_length=16, + validators=[ + django.core.validators.RegexValidator( + message="Not valid IP Address", + regex="^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", + ) + ], + ), + ), ], options={ - 'db_table': 'devices', + "db_table": "devices", }, ), migrations.CreateModel( - name='MystromResult', + name="MystromResult", fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('power', models.FloatField()), - ('ws', models.FloatField()), - ('relay', models.IntegerField()), - ('temperature', models.FloatField()), - ('date', models.DateTimeField(auto_now_add=True)), - ('device', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='mystrom_rest.mystromdevice')), + ("id", models.AutoField(primary_key=True, serialize=False)), + ("power", models.FloatField()), + ("ws", models.FloatField()), + ("relay", models.IntegerField()), + ("temperature", models.FloatField()), + ("date", models.DateTimeField(auto_now_add=True)), + ( + "device", + models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="mystrom_rest.mystromdevice", + ), + ), ], options={ - 'db_table': 'results', + "db_table": "results", }, ), ] diff --git a/mystrom_rest/migrations/0002_mystromdevice_active.py b/mystrom_rest/migrations/0002_mystromdevice_active.py index 67f331d..86a0e7b 100644 --- a/mystrom_rest/migrations/0002_mystromdevice_active.py +++ b/mystrom_rest/migrations/0002_mystromdevice_active.py @@ -4,15 +4,14 @@ class Migration(migrations.Migration): - dependencies = [ - ('mystrom_rest', '0001_initial'), + ("mystrom_rest", "0001_initial"), ] operations = [ migrations.AddField( - model_name='mystromdevice', - name='active', + model_name="mystromdevice", + name="active", field=models.BooleanField(default=True), ), ] diff --git a/mystrom_rest/migrations/0003_auto_20240720_1432.py b/mystrom_rest/migrations/0003_auto_20240720_1432.py new file mode 100644 index 0000000..e4d18af --- /dev/null +++ b/mystrom_rest/migrations/0003_auto_20240720_1432.py @@ -0,0 +1,56 @@ +# Generated by Django 5.0.7 on 2024-07-20 14:32 + +from django.db import migrations + + +def forwards(apps, schema_editor): + # Create a new temporary table with the desired schema + with schema_editor.connection.cursor() as cursor: + cursor.execute(""" + CREATE TABLE results_temp ( + date TIMESTAMPTZ NOT NULL, + device_id INTEGER NOT NULL, + power FLOAT, + ws FLOAT, + relay INTEGER, + temperature FLOAT, + PRIMARY KEY (date, device_id) + ) + """) + + # Copy data from the old table to the new table + cursor.execute(""" + INSERT INTO results_temp (date, device_id, power, ws, relay, temperature) + SELECT date, device_id, power, ws, relay, temperature + FROM results + """) + + # Drop the old table + cursor.execute(""" + DROP TABLE results + """) + + # Rename the new table to the original table name + cursor.execute(""" + ALTER TABLE results_temp + RENAME TO results + """) + + # Create hypertable + cursor.execute(""" + SELECT create_hypertable('results', 'date', migrate_data => true) + """) + + +def backwards(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("mystrom_rest", "0002_mystromdevice_active"), + ] + + operations = [ + migrations.RunPython(forwards, backwards), + ] diff --git a/mystrom_rest/migrations/0004_alter_mystromresult_options.py b/mystrom_rest/migrations/0004_alter_mystromresult_options.py new file mode 100644 index 0000000..1bda780 --- /dev/null +++ b/mystrom_rest/migrations/0004_alter_mystromresult_options.py @@ -0,0 +1,16 @@ +# Generated by Django 5.0.7 on 2024-07-20 15:00 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("mystrom_rest", "0003_auto_20240720_1432"), + ] + + operations = [ + migrations.AlterModelOptions( + name="mystromresult", + options={"managed": False}, + ), + ] diff --git a/mystrom_rest/models.py b/mystrom_rest/models.py index 7ad3c40..c654086 100644 --- a/mystrom_rest/models.py +++ b/mystrom_rest/models.py @@ -1,67 +1,102 @@ from django.db import models, transaction +from django.db.models.fields.composite import CompositePrimaryKey from django.core.validators import RegexValidator +from django.utils import timezone import requests import json -class MystromDevice(models.Model): +import logging + +logger = logging.getLogger("MyStromRest Models") + +class MystromDevice(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=16) active = models.BooleanField(default=True) - ip = models.CharField(max_length=16, validators=[ + ip = models.CharField( + max_length=16, + validators=[ RegexValidator( - regex='^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$', - message='Not valid IP Address', + regex="^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", + message="Not valid IP Address", ), - ]) + ], + ) def __repr__(self): - return "" % ( - self.id, self.name, self.ip) - + return "" % (self.id, self.name, self.ip) + @transaction.atomic def get_and_save_result(self): try: - response = requests.get(f'http://{self.ip}/report') - except requests.exceptions.ConnectionError as e: - print(f'Device {self.name} with ip address {self.ip} seems to be not reachable.') + response = requests.get(f"http://{self.ip}/report") + except requests.exceptions.ConnectionError: + logger.error( + f"Device {self.name} with ip address {self.ip} seems to be not reachable." + ) return - except requests.exceptions.Timeout as e: - print(f'Request to device {self.name} with ip address {self.ip} timed out.') + except requests.exceptions.Timeout: + logger.error( + f"Request to device {self.name} with ip address {self.ip} timed out." + ) return - except requests.exceptions.RequestException as e: - print(f'Request to device {self.name} with ip address {self.ip} failed.') + except requests.exceptions.RequestException: + logger.error( + f"Request to device {self.name} with ip address {self.ip} failed." + ) return try: response = json.loads(response.text) except json.decoder.JSONDecodeError: - print(f'Request to device {self.name} with ip address {self.ip} returns invalid JSON response.') + logger.error( + f"Request to device {self.name} with ip address {self.ip} returns invalid JSON response." + ) return - result = MystromResult(device=self, power=response["power"], ws=response["Ws"], relay=1 if response["relay"] else 0, temperature=response["temperature"]) + # set date to now with timestamp + result = MystromResult( + device=self, + date=timezone.now(), + power=response["power"], + ws=response["Ws"], + relay=1 if response["relay"] else 0, + temperature=response["temperature"], + ) + logger.debug(f"Saving result for device {self.name} with ip address {self.ip}.") + logger.debug(result) result.save() return result class Meta: - db_table = 'devices' + db_table = "devices" -class MystromResult(models.Model): - - id = models.AutoField(primary_key=True) +class MystromResult(models.Model): + pk = CompositePrimaryKey("device_id", "date") + date = models.DateTimeField(auto_now_add=True) device = models.ForeignKey(MystromDevice, on_delete=models.PROTECT) power = models.FloatField() ws = models.FloatField() relay = models.IntegerField() temperature = models.FloatField() - date = models.DateTimeField(auto_now_add=True) def __repr__(self): - return "" % ( - self.device_id, self.power, self.ws, self.relay, self.temperature, self.date) + return ( + "" + % ( + self.device_id, + self.power, + self.ws, + self.relay, + self.temperature, + self.date, + ) + ) class Meta: - db_table = 'results' \ No newline at end of file + db_table = "results" + managed = False diff --git a/mystrom_rest/serializers.py b/mystrom_rest/serializers.py index a1e357f..5f7847b 100644 --- a/mystrom_rest/serializers.py +++ b/mystrom_rest/serializers.py @@ -1,23 +1,20 @@ -from rest_framework import serializers +from rest_framework import serializers from .models import MystromDevice, MystromResult - - + + class MystromDeviceSerializer(serializers.ModelSerializer): - class Meta: model = MystromDevice - read_only_fields = ('id',) - fields = ('id', - 'name', - 'active', - 'ip',) + read_only_fields = ("id",) + fields = ( + "id", + "name", + "active", + "ip", + ) + class MystromResultSerializer(serializers.ModelSerializer): - class Meta: model = MystromResult - fields = ('power', - 'ws', - 'relay', - 'temperature', - 'date') + fields = ("power", "ws", "relay", "temperature", "date") diff --git a/mystrom_rest/tests.py b/mystrom_rest/tests.py index b5a71ef..5024f43 100644 --- a/mystrom_rest/tests.py +++ b/mystrom_rest/tests.py @@ -4,39 +4,48 @@ from rest_framework.test import APITestCase from .models import MystromDevice, MystromResult + class MystromDeviceTests(APITestCase): def test_create_device(self): """ Ensure we can create a new device object. """ - url = reverse('rest_device_index') - data = {'name': 'NewDevice', 'ip': '192.168.0.196'} - response = self.client.post(url, data, format='json') + url = reverse("rest_device_index") + data = {"name": "NewDevice", "ip": "192.168.0.196"} + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(MystromDevice.objects.count(), 1) - self.assertEqual(MystromDevice.objects.get().name, 'NewDevice') - self.assertEqual(MystromDevice.objects.get().ip, '192.168.0.196') + self.assertEqual(MystromDevice.objects.get().name, "NewDevice") + self.assertEqual(MystromDevice.objects.get().ip, "192.168.0.196") def test_create_device_fail_invalid_ip(self): """ Ensure we can not create a new device object when ip is invalid. """ - url = reverse('rest_device_index') - data = {'name': 'NewDevice', 'ip': 'notanip'} - response = self.client.post(url, data, format='json') + url = reverse("rest_device_index") + data = {"name": "NewDevice", "ip": "notanip"} + response = self.client.post(url, data, format="json") self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.assertEqual(MystromDevice.objects.count(), 0) - + class MystromResultsTest(APITestCase): amount_of_results = 60 average_power = 0 + def setUp(self): - device = MystromDevice.objects.create(name='NewDevice', ip='localhost') + self.device = MystromDevice.objects.create(name="NewDevice", ip="localhost") for i in range(self.amount_of_results): power = 500 - result = MystromResult(device=device, power=power, ws=power, relay=1, temperature=25, date=timezone.now() + timezone.timedelta(minutes=-1 * i)) + result = MystromResult( + device=self.device, + power=power, + ws=power, + relay=1, + temperature=25, + date=timezone.now() + timezone.timedelta(minutes=-1 * i), + ) result.save() # calulcate average power @@ -50,6 +59,6 @@ def test_get_results_from_device_date_range_no_value(self): start_param = "2023-05-12T22:00:00.835Z" end_param = "2023-05-15T22:00:00.835Z" - url = f"{reverse('rest_device_results', kwargs={'id': 1})}?minimize=false&start={start_param}&end={end_param}" - response = self.client.get(url, format='json') - self.assertEqual(len(response.json().get('results')), 0) \ No newline at end of file + url = f"{reverse('rest_device_results', kwargs={'id': self.device.id})}?start={start_param}&end={end_param}" + response = self.client.get(url, format="json") + self.assertEqual(len(response.json().get("results")), 0) diff --git a/mystrom_rest/urls.py b/mystrom_rest/urls.py index 87702f0..3cae694 100644 --- a/mystrom_rest/urls.py +++ b/mystrom_rest/urls.py @@ -2,8 +2,12 @@ from . import views urlpatterns = [ - path('devices/', views.device_list, name='rest_device_index'), - path('devices//', views.device_detail, name='rest_device_detail'), - path('devices//results/', views.device_results, name='rest_device_results'), - path('devices/request-and-save-results', views.get_and_save_device_results, name='rest_devices_get_and_save_results'), -] \ No newline at end of file + path("devices/", views.device_list, name="rest_device_index"), + path("devices//", views.device_detail, name="rest_device_detail"), + path("devices//results/", views.device_results, name="rest_device_results"), + path( + "devices/request-and-save-results", + views.get_and_save_device_results, + name="rest_devices_get_and_save_results", + ), +] diff --git a/mystrom_rest/views.py b/mystrom_rest/views.py index 85ef2d3..4fc9046 100644 --- a/mystrom_rest/views.py +++ b/mystrom_rest/views.py @@ -1,169 +1,204 @@ from django.shortcuts import get_object_or_404 from django.http.response import JsonResponse from django.utils import timezone +from dateutil import parser +from datetime import timedelta from rest_framework.parsers import JSONParser from rest_framework import status -from django.db.models import Avg, Sum -from django.db.models.functions import TruncHour +from django.db import connection from .models import MystromResult, MystromDevice from .serializers import MystromDeviceSerializer, MystromResultSerializer from rest_framework.decorators import api_view from rest_framework.response import Response +import logging -@api_view(['GET', 'POST', 'DELETE']) +logger = logging.getLogger("MyStromRest") + + +@api_view(["GET", "POST", "DELETE"]) def device_list(request): - if request.method == 'GET': + if request.method == "GET": devices = MystromDevice.objects.all() devices_serializer = MystromDeviceSerializer(devices, many=True) return JsonResponse(devices_serializer.data, safe=False) # 'safe=False' for objects serialization - elif request.method == 'POST': + elif request.method == "POST": device_data = JSONParser().parse(request) device_serializer = MystromDeviceSerializer(data=device_data) if device_serializer.is_valid(): device_serializer.save() return JsonResponse(device_serializer.data, status=status.HTTP_201_CREATED) - return JsonResponse(device_serializer.errors, status=status.HTTP_400_BAD_REQUEST) + return JsonResponse( + device_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) - elif request.method == 'DELETE': + elif request.method == "DELETE": count = MystromDevice.objects.all().delete() - return JsonResponse({'message': '{} Devices were deleted successfully!'.format(count[0])}, status=status.HTTP_204_NO_CONTENT) + return JsonResponse( + {"message": "{} Devices were deleted successfully!".format(count[0])}, + status=status.HTTP_204_NO_CONTENT, + ) -@api_view(['GET', 'PUT', 'DELETE']) +@api_view(["GET", "PUT", "DELETE"]) def device_detail(request, id): device = get_object_or_404(MystromDevice, id=id) - if request.method == 'GET': + if request.method == "GET": device_serializer = MystromDeviceSerializer(device) return JsonResponse(device_serializer.data) - elif request.method == 'PUT': + elif request.method == "PUT": device_data = JSONParser().parse(request) device_serializer = MystromDeviceSerializer(device, data=device_data) if device_serializer.is_valid(): device_serializer.save() return JsonResponse(device_serializer.data) - return JsonResponse(device_serializer.errors, status=status.HTTP_400_BAD_REQUEST) + return JsonResponse( + device_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) - elif request.method == 'DELETE': + elif request.method == "DELETE": device.delete() - return JsonResponse({'message': 'Device was deleted successfully!'}, status=status.HTTP_204_NO_CONTENT) + return JsonResponse( + {"message": "Device was deleted successfully!"}, + status=status.HTTP_204_NO_CONTENT, + ) -@api_view(['GET']) +@api_view(["GET"]) def device_results(request, id): + # measure time of request + start_time = timezone.now() device = get_object_or_404(MystromDevice, id=id) - start_param = request.GET.get('start') - end_param = request.GET.get('end') + start_param = request.GET.get("start") + end_param = request.GET.get("end") start_param = request.GET.get( - 'start', timezone.now() + timezone.timedelta(days=-1)) - end_param = request.GET.get('end', timezone.now()) - - results = MystromResult.objects.filter(device_id=device, date__range=[ - start_param, end_param]).values('ws', 'power', 'relay', 'temperature', 'date').order_by('date') - - average_power = ( - results - .values('date', 'power') - .annotate(hour=TruncHour('date')) - .values('hour') - .annotate(average_power=Avg('power')) + "start", (timezone.now() - timedelta(days=1)).isoformat() ) - - total_power = 0 - - if average_power.exists(): - - # reduce power average based on if first or last hour is not complete - first_hour = average_power.first()['hour'] - first_hour_results = results.filter( - date__range=[first_hour, first_hour + timezone.timedelta(hours=1)]).values('date').order_by('date') - first_hour_percent = (first_hour_results.first()['date'] - first_hour) / timezone.timedelta(hours=1) - first_hour_power_reduction = average_power.first()['average_power'] * first_hour_percent - - - last_hour = average_power.last()['hour'] - last_hour_results = results.filter( - date__range=[last_hour, last_hour + timezone.timedelta(hours=1)]).values('date').order_by('date') - last_hour_percent = ((last_hour + timezone.timedelta(hours=1)) - last_hour_results.last()['date']) / timezone.timedelta(hours=1) - last_hour_power_reduction = average_power.last()['average_power'] * last_hour_percent - - - total_power = average_power.aggregate(Sum('average_power'))[ - 'average_power__sum'] - first_hour_power_reduction - last_hour_power_reduction - - if request.method == 'GET': - if request.GET.get('minimize', "false") == "true": - minimizedList = minimizeResultList(results) + end_param = request.GET.get("end", timezone.now().isoformat()) + + # Use dateutil parser to handle different formats + start_param = parser.parse(start_param) + end_param = parser.parse(end_param) + + # Ensure dates are timezone-aware + if start_param.tzinfo is None: + start_param = timezone.make_aware(start_param) + if end_param.tzinfo is None: + end_param = timezone.make_aware(end_param) + + logger.debug(f"Request for device {device.id} from {start_param} to {end_param}") + + # Determine interval length based on date range + interval_length = 5 if (end_param - start_param).days <= 7 else 15 + + query_results = """ + SELECT + time_bucket('{interval_length} minutes', date) AS interval, + AVG(ws) AS avg_ws, + AVG(power) AS avg_power, + AVG(temperature) AS avg_temperature, + MIN(date) AS min_date + FROM + {table_name} + WHERE + device_id = %s AND + date BETWEEN %s AND %s + GROUP BY + interval + ORDER BY + interval; + """.format(table_name=MystromResult._meta.db_table, interval_length=interval_length) + + query_total_power = """ + WITH interval_data AS ( + SELECT + date, + power + FROM + {table_name} + WHERE + device_id = %s AND + date BETWEEN %s AND %s + ), + hourly_totals AS ( + SELECT + time_bucket('1 hour', date) AS interval, + AVG(power) AS total_power_per_hour + FROM + interval_data + GROUP BY + interval + ), + min_max_dates AS ( + SELECT + MIN(date) AS min_date, + MAX(date) AS max_date + FROM + interval_data + ) + SELECT + SUM( + CASE + WHEN interval = time_bucket('1 hour', (SELECT min_date FROM min_max_dates)) + THEN total_power_per_hour * (1 - (EXTRACT(epoch FROM (SELECT min_date FROM min_max_dates) - date_trunc('hour', (SELECT min_date FROM min_max_dates))) / 3600.0)) + WHEN interval = time_bucket('1 hour', (SELECT max_date FROM min_max_dates)) + THEN total_power_per_hour * (EXTRACT(epoch FROM (SELECT max_date FROM min_max_dates) - date_trunc('hour', (SELECT max_date FROM min_max_dates))) / 3600.0) + ELSE total_power_per_hour + END + ) AS total_power_wh + FROM + hourly_totals; + """.format(table_name=MystromResult._meta.db_table) + + # Execute the query + with connection.cursor() as cursor: + # Execute the interval query + cursor.execute(query_results, (device.id, start_param, end_param)) + rows_15min = cursor.fetchall() + + results_15min = [ + { + "interval": interval, + "ws": ws, + "power": power, + "temperature": temperature, + "date": date, + } + for (interval, ws, power, temperature, date) in rows_15min + ] + + if request.META.get("HTTP_ACCEPT") == "text/csv": + result_data = results_15min else: - minimizedList = results - result_serializer = MystromResultSerializer(minimizedList, many=True) - - if request.META.get('HTTP_ACCEPT') == 'text/csv': - result_data = result_serializer.data - else: - result_data = {'results': result_serializer.data, - 'total_power': total_power} - - return Response(result_data) - -def minimizeResultList(results) -> list: - resultList = [] - if len(results) == 0: - return resultList - skip = 1 - if (len(results) > 10000): - skip = 20 - elif (len(results) > 5000): - skip = 10 - elif (len(results) > 1600): - skip = 5 - elif (len(results) > 500): - skip = 2 - - currentSkip = 0 - currentObj = None - for result in results.iterator(): - if currentSkip % skip == 0: - if currentObj != None: - calculateAverage(currentObj, currentSkip) - resultList.append(currentObj) - currentObj = result - currentSkip = 0 - else: - currentObj.power += result.power - currentObj.ws += result.ws - currentObj.temperature += result.temperature - currentSkip += 1 - calculateAverage(currentObj, currentSkip) - resultList.append(currentObj) - return resultList + cursor.execute(query_total_power, (device.id, start_param, end_param)) + total_power_wh = cursor.fetchone()[0] + result_data = {"results": results_15min, "total_power": total_power_wh} + end_time = timezone.now() + logger.debug("Request took: " + str(end_time - start_time)) -def calculateAverage(result, amount) -> MystromResult: - result.power /= amount - result.ws /= amount - result.temperature /= amount - return result + return Response(result_data, status=status.HTTP_200_OK) -@api_view(['POST']) +@api_view(["POST"]) def get_and_save_device_results(request): - - if request.method == 'POST': + if request.method == "POST": devices = MystromDevice.objects.filter(active=True).all() results = [] for device in devices: result = device.get_and_save_result() results.append(result) result_serializer = MystromResultSerializer(results, many=True) - return JsonResponse(result_serializer.data, safe=False, status=status.HTTP_200_OK) + return JsonResponse( + result_serializer.data, safe=False, status=status.HTTP_200_OK + ) diff --git a/pim/asgi.py b/pim/asgi.py index 9fd2585..b820d73 100644 --- a/pim/asgi.py +++ b/pim/asgi.py @@ -11,6 +11,6 @@ from django.core.asgi import get_asgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pim.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pim.settings") application = get_asgi_application() diff --git a/pim/context_processors.py b/pim/context_processors.py index 8759890..cc90357 100644 --- a/pim/context_processors.py +++ b/pim/context_processors.py @@ -1,8 +1,9 @@ -import os +import os + def export_vars(request): data = {} - data['CHART_TYPE'] = os.getenv('CHART_TYPE', 'apexcharts') - if data['CHART_TYPE'] != 'apexcharts' and data['CHART_TYPE'] != 'uplot': - data['CHART_TYPE'] = 'apexcharts' - return data \ No newline at end of file + data["CHART_TYPE"] = os.getenv("CHART_TYPE", "apexcharts") + if data["CHART_TYPE"] != "apexcharts" and data["CHART_TYPE"] != "uplot": + data["CHART_TYPE"] = "apexcharts" + return data diff --git a/pim/settings.py b/pim/settings.py index f61fd40..bb780f1 100644 --- a/pim/settings.py +++ b/pim/settings.py @@ -13,6 +13,8 @@ from pathlib import Path import os +from django.contrib.messages import constants as messages + # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent @@ -21,68 +23,70 @@ # See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = os.getenv('SECRET_KEY', os.urandom(32)) +SECRET_KEY = os.getenv("SECRET_KEY", os.urandom(32)) # SECURITY WARNING: don't run with debug turned on in production! -DEBUG = os.getenv('DEBUG', False) == 'True' +DEBUG = os.getenv("DEBUG", False) == "True" -ALLOWED_HOSTS = os.environ.get('ALLOWED_HOSTS', 'localhost').split(',') +ALLOWED_HOSTS = os.environ.get("ALLOWED_HOSTS", "localhost,127.0.0.1").split(",") # Application definition INSTALLED_APPS = [ - 'whitenoise.runserver_nostatic', - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'corsheaders', - 'rest_framework', - 'widget_tweaks', - 'interface', - 'mystrom_rest', - 'shelly3em_rest', - 'debug_toolbar', + "whitenoise.runserver_nostatic", + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "corsheaders", + "rest_framework", + "widget_tweaks", + "interface", + "mystrom_rest", + "shelly3em_rest", + "debug_toolbar", ] -from django.contrib.messages import constants as messages + MESSAGE_TAGS = { - messages.DEBUG: 'alert-secondary', - messages.INFO: 'alert-info', - messages.SUCCESS: 'alert-success', - messages.WARNING: 'alert-warning', - messages.ERROR: 'alert-danger', + messages.DEBUG: "alert-secondary", + messages.INFO: "alert-info", + messages.SUCCESS: "alert-success", + messages.WARNING: "alert-warning", + messages.ERROR: "alert-danger", } MIDDLEWARE = [ - 'django.middleware.gzip.GZipMiddleware', - 'django.middleware.security.SecurityMiddleware', - 'debug_toolbar.middleware.DebugToolbarMiddleware', - 'whitenoise.middleware.WhiteNoiseMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'corsheaders.middleware.CorsMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.middleware.gzip.GZipMiddleware", + "django.middleware.security.SecurityMiddleware", + "debug_toolbar.middleware.DebugToolbarMiddleware", + "whitenoise.middleware.WhiteNoiseMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "corsheaders.middleware.CorsMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] REST_FRAMEWORK = { - 'DEFAULT_RENDERER_CLASSES': ( - 'rest_framework.renderers.JSONRenderer', - 'rest_framework.renderers.BrowsableAPIRenderer', - 'rest_framework_csv.renderers.CSVRenderer', + "DEFAULT_RENDERER_CLASSES": ( + "rest_framework.renderers.JSONRenderer", + "rest_framework.renderers.BrowsableAPIRenderer", + "rest_framework_csv.renderers.CSVRenderer", ), } -CORS_ORIGIN_ALLOW_ALL = os.getenv('CORS_ORIGIN_ALLOW_ALL', False) == 'True' -CORS_ORIGIN_WHITELIST = os.environ.get('CORS_ORIGIN_WHITELIST', 'http://localhost:8000').split(',') +CORS_ORIGIN_ALLOW_ALL = os.getenv("CORS_ORIGIN_ALLOW_ALL", False) == "True" +CORS_ORIGIN_WHITELIST = os.environ.get( + "CORS_ORIGIN_WHITELIST", "http://localhost:8000" +).split(",") -STATIC_URL = '/static/' +STATIC_URL = "/static/" STATICFILES_DIRS = [BASE_DIR / "static"] STATIC_ROOT = BASE_DIR / "staticfiles" STORAGES = { @@ -91,60 +95,49 @@ }, } -ROOT_URLCONF = 'pim.urls' +ROOT_URLCONF = "pim.urls" TEMPLATES = [ { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'pim.context_processors.export_vars', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "pim.context_processors.export_vars", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", ], }, }, ] -WSGI_APPLICATION = 'pim.wsgi.application' +WSGI_APPLICATION = "pim.wsgi.application" if DEBUG: import socket # only if you haven't already imported this + hostname, _, ips = socket.gethostbyname_ex(socket.gethostname()) - INTERNAL_IPS = [ip[:-1] + '1' for ip in ips] + ['127.0.0.1', '10.0.2.2'] + INTERNAL_IPS = [ip[:-1] + "1" for ip in ips] + ["127.0.0.1", "10.0.2.2"] # Database # https://docs.djangoproject.com/en/4.0/ref/settings/#databases -ENGINETYPE = os.getenv('ENGINE_TYPE', 'sqlite3') -ENGINE = None; -if ENGINETYPE == 'mysql': - ENGINE = 'django.db.backends.mysql' -elif ENGINETYPE == 'postgresql': - ENGINE = 'django.db.backends.postgresql_psycopg2' - -if ENGINE: - DATABASES = { - 'default': { - 'ENGINE': ENGINE, - 'NAME': os.environ["DB_NAME"], - 'USER': os.environ["DB_USER"], - 'PASSWORD': os.environ["DB_PASSWORD"], - 'HOST': os.environ["DB_HOST"], - 'PORT': os.environ["DB_PORT"], - } - } -else: - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': BASE_DIR / 'db.sqlite3', - } +ENGINE = "django.db.backends.postgresql_psycopg2" + + +DATABASES = { + "default": { + "ENGINE": ENGINE, + "NAME": os.environ["DB_NAME"], + "USER": os.environ["DB_USER"], + "PASSWORD": os.environ["DB_PASSWORD"], + "HOST": os.environ["DB_HOST"], + "PORT": os.environ["DB_PORT"], } +} # Password validation @@ -152,16 +145,16 @@ AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', + "NAME": "django.contrib.auth.password_validation.MinimumLengthValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', + "NAME": "django.contrib.auth.password_validation.CommonPasswordValidator", }, { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.NumericPasswordValidator", }, ] @@ -169,9 +162,9 @@ # Internationalization # https://docs.djangoproject.com/en/4.0/topics/i18n/ -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" -TIME_ZONE = 'UTC' +TIME_ZONE = "UTC" USE_I18N = True @@ -184,4 +177,4 @@ # Default primary key field type # https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field -DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" diff --git a/pim/urls.py b/pim/urls.py index 28d174c..dd50e7c 100644 --- a/pim/urls.py +++ b/pim/urls.py @@ -13,11 +13,12 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ + from django.urls import include, path urlpatterns = [ - path('', include('interface.urls')), - path('api/', include('mystrom_rest.urls')), - path('shelly-api/', include('shelly3em_rest.urls')), - path('__debug__/', include('debug_toolbar.urls')), + path("", include("interface.urls")), + path("api/", include("mystrom_rest.urls")), + path("shelly-api/", include("shelly3em_rest.urls")), + path("__debug__/", include("debug_toolbar.urls")), ] diff --git a/pim/wsgi.py b/pim/wsgi.py index 4518a52..5da5d90 100644 --- a/pim/wsgi.py +++ b/pim/wsgi.py @@ -11,6 +11,6 @@ from django.core.wsgi import get_wsgi_application -os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pim.settings') +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pim.settings") application = get_wsgi_application() diff --git a/requirements.txt b/requirements.txt index 109d76a..76003ed 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,13 +1,13 @@ -django==5.0.7 +django==5.2.1 django-widget-tweaks==1.5.0 -djangorestframework==3.15.2 +djangorestframework==3.16.0 djangorestframework-csv==3.0.2 -django-cors-headers==4.4.0 -django-debug-toolbar==4.4.6 -whitenoise==6.6.0 -mysqlclient==2.2.4 -psycopg2==2.9.9 +django-cors-headers==4.7.0 +django-debug-toolbar==5.2.0 +whitenoise==6.9.0 +psycopg2==2.9.10 requests==2.32.3 +python-dateutil==2.9.0 ## for testing beautifulsoup4==4.12.2 diff --git a/shelly3em_rest/admin.py b/shelly3em_rest/admin.py index 8c38f3f..846f6b4 100644 --- a/shelly3em_rest/admin.py +++ b/shelly3em_rest/admin.py @@ -1,3 +1 @@ -from django.contrib import admin - # Register your models here. diff --git a/shelly3em_rest/apps.py b/shelly3em_rest/apps.py index 1ae0a75..2aad428 100644 --- a/shelly3em_rest/apps.py +++ b/shelly3em_rest/apps.py @@ -2,5 +2,5 @@ class Shelly3EmRestConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'shelly3em_rest' + default_auto_field = "django.db.models.BigAutoField" + name = "shelly3em_rest" diff --git a/shelly3em_rest/migrations/0001_initial.py b/shelly3em_rest/migrations/0001_initial.py index b262dae..eeca88c 100644 --- a/shelly3em_rest/migrations/0001_initial.py +++ b/shelly3em_rest/migrations/0001_initial.py @@ -6,52 +6,73 @@ class Migration(migrations.Migration): - initial = True - dependencies = [ - ] + dependencies = [] operations = [ migrations.CreateModel( - name='Shelly3EMDevice', + name="Shelly3EMDevice", fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('name', models.CharField(max_length=16)), - ('active', models.BooleanField(default=True)), - ('ip', models.CharField(max_length=16, validators=[django.core.validators.RegexValidator(message='Not valid IP Address', regex='^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$')])), + ("id", models.AutoField(primary_key=True, serialize=False)), + ("name", models.CharField(max_length=16)), + ("active", models.BooleanField(default=True)), + ( + "ip", + models.CharField( + max_length=16, + validators=[ + django.core.validators.RegexValidator( + message="Not valid IP Address", + regex="^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", + ) + ], + ), + ), ], options={ - 'db_table': 'shelly3em_devices', + "db_table": "shelly3em_devices", }, ), migrations.CreateModel( - name='Shelly3EMResult', + name="Shelly3EMResult", fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('total_power', models.FloatField()), - ('date', models.DateTimeField(auto_now_add=True)), - ('device', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='shelly3em_rest.shelly3emdevice')), + ("id", models.AutoField(primary_key=True, serialize=False)), + ("total_power", models.FloatField()), + ("date", models.DateTimeField(auto_now_add=True)), + ( + "device", + models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="shelly3em_rest.shelly3emdevice", + ), + ), ], options={ - 'db_table': 'shelly3em_results', + "db_table": "shelly3em_results", }, ), migrations.CreateModel( - name='Shelly3EMEmeterResult', + name="Shelly3EMEmeterResult", fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('emeter_id', models.IntegerField()), - ('power', models.FloatField()), - ('pf', models.FloatField()), - ('current', models.FloatField()), - ('voltage', models.FloatField()), - ('total', models.FloatField()), - ('total_returned', models.FloatField()), - ('result', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='shelly3em_rest.shelly3emresult')), + ("id", models.AutoField(primary_key=True, serialize=False)), + ("emeter_id", models.IntegerField()), + ("power", models.FloatField()), + ("pf", models.FloatField()), + ("current", models.FloatField()), + ("voltage", models.FloatField()), + ("total", models.FloatField()), + ("total_returned", models.FloatField()), + ( + "result", + models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + to="shelly3em_rest.shelly3emresult", + ), + ), ], options={ - 'db_table': 'shelly3em_emeter_results', + "db_table": "shelly3em_emeter_results", }, ), ] diff --git a/shelly3em_rest/migrations/0002_alter_shelly3ememeterresult_result.py b/shelly3em_rest/migrations/0002_alter_shelly3ememeterresult_result.py index a6c7b3c..243c9da 100644 --- a/shelly3em_rest/migrations/0002_alter_shelly3ememeterresult_result.py +++ b/shelly3em_rest/migrations/0002_alter_shelly3ememeterresult_result.py @@ -5,15 +5,18 @@ class Migration(migrations.Migration): - dependencies = [ - ('shelly3em_rest', '0001_initial'), + ("shelly3em_rest", "0001_initial"), ] operations = [ migrations.AlterField( - model_name='shelly3ememeterresult', - name='result', - field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='emeters', to='shelly3em_rest.shelly3emresult'), + model_name="shelly3ememeterresult", + name="result", + field=models.ForeignKey( + on_delete=django.db.models.deletion.PROTECT, + related_name="emeters", + to="shelly3em_rest.shelly3emresult", + ), ), ] diff --git a/shelly3em_rest/migrations/0003_auto_20240720_1610.py b/shelly3em_rest/migrations/0003_auto_20240720_1610.py new file mode 100644 index 0000000..3945b0c --- /dev/null +++ b/shelly3em_rest/migrations/0003_auto_20240720_1610.py @@ -0,0 +1,87 @@ +# Generated by Django 5.0.7 on 2024-07-20 16:10 + +from django.db import migrations + + +def forwards(apps, schema_editor): + # Create a new temporary table with the desired schema + with schema_editor.connection.cursor() as cursor: + cursor.execute(""" + CREATE TABLE shelly3em_results_temp ( + date TIMESTAMPTZ NOT NULL, + device_id INTEGER NOT NULL, + total_power FLOAT, + PRIMARY KEY (date, device_id) + ) + """) + + # Copy data from the old table to the new table + cursor.execute(""" + INSERT INTO shelly3em_results_temp (date, device_id, total_power) + SELECT date, device_id, total_power + FROM shelly3em_results + """) + + cursor.execute(""" + CREATE TABLE shelly3em_emeter_results_temp ( + date TIMESTAMPTZ NOT NULL, + device_id INTEGER NOT NULL, + emeter_id INTEGER NOT NULL, + power FLOAT, + pf FLOAT, + current FLOAT, + voltage FLOAT, + total FLOAT, + total_returned FLOAT, + PRIMARY KEY (date, device_id, emeter_id) + ) + """) + + cursor.execute(""" + INSERT INTO shelly3em_emeter_results_temp (date, device_id, emeter_id, power, pf, current, voltage, total, total_returned) + SELECT + (SELECT date FROM shelly3em_results WHERE id = old.result_id), + (SELECT device_id FROM shelly3em_results WHERE id = old.result_id), + old.emeter_id, old.power, old.pf, old.current, old.voltage, old.total, old.total_returned + FROM shelly3em_emeter_results old + """) + + # Drop the old table + cursor.execute(""" + DROP TABLE shelly3em_emeter_results + """) + + cursor.execute(""" + DROP TABLE shelly3em_results + """) + + # Rename the new table to the original table name + cursor.execute(""" + ALTER TABLE shelly3em_results_temp + RENAME TO shelly3em_results + """) + + cursor.execute(""" + ALTER TABLE shelly3em_emeter_results_temp + RENAME TO shelly3em_emeter_results + """) + + # Create hypertable + cursor.execute(""" + SELECT create_hypertable('shelly3em_results', 'date', migrate_data => true) + """) + cursor.execute(""" + SELECT create_hypertable('shelly3em_emeter_results', 'date', migrate_data => true) + """) + + +def backwards(apps, schema_editor): + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("shelly3em_rest", "0002_alter_shelly3ememeterresult_result"), + ] + + operations = [migrations.RunPython(forwards, backwards)] diff --git a/shelly3em_rest/migrations/0004_alter_shelly3ememeterresult_options_and_more.py b/shelly3em_rest/migrations/0004_alter_shelly3ememeterresult_options_and_more.py new file mode 100644 index 0000000..f1f7a62 --- /dev/null +++ b/shelly3em_rest/migrations/0004_alter_shelly3ememeterresult_options_and_more.py @@ -0,0 +1,20 @@ +# Generated by Django 5.0.7 on 2024-07-20 16:14 + +from django.db import migrations + + +class Migration(migrations.Migration): + dependencies = [ + ("shelly3em_rest", "0003_auto_20240720_1610"), + ] + + operations = [ + migrations.AlterModelOptions( + name="shelly3ememeterresult", + options={"managed": False}, + ), + migrations.AlterModelOptions( + name="shelly3emresult", + options={"managed": False}, + ), + ] diff --git a/shelly3em_rest/models.py b/shelly3em_rest/models.py index 7864f87..24be300 100644 --- a/shelly3em_rest/models.py +++ b/shelly3em_rest/models.py @@ -1,78 +1,118 @@ from django.db import models, transaction +from django.db.models.fields.composite import CompositePrimaryKey from django.core.validators import RegexValidator +from django.utils import timezone import requests import json -class Shelly3EMDevice(models.Model): +import logging + +logger = logging.getLogger("Shelly3EM Models") + +class Shelly3EMDevice(models.Model): id = models.AutoField(primary_key=True) name = models.CharField(max_length=16) active = models.BooleanField(default=True) - ip = models.CharField(max_length=16, validators=[ + ip = models.CharField( + max_length=16, + validators=[ RegexValidator( - regex='^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$', - message='Not valid IP Address', + regex="^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", + message="Not valid IP Address", ), - ]) + ], + ) def __repr__(self): - return "" % ( - self.id, self.name, self.ip) + return "" % (self.id, self.name, self.ip) @transaction.atomic def get_and_save_result(self): try: - response = requests.get(f'http://{self.ip}/status') - except requests.exceptions.ConnectionError as e: - print(f'Device {self.name} with ip address {self.ip} seems to be not reachable.') + response = requests.get(f"http://{self.ip}/status") + except requests.exceptions.ConnectionError: + logger.error( + f"Device {self.name} with ip address {self.ip} seems to be not reachable." + ) return - except requests.exceptions.Timeout as e: - print(f'Request to device {self.name} with ip address {self.ip} timed out.') + except requests.exceptions.Timeout: + logger.error( + f"Request to device {self.name} with ip address {self.ip} timed out." + ) return - except requests.exceptions.RequestException as e: - print(f'Request to device {self.name} with ip address {self.ip} failed.') + except requests.exceptions.RequestException: + logger.error( + f"Request to device {self.name} with ip address {self.ip} failed." + ) return try: response = json.loads(response.text) except json.decoder.JSONDecodeError: - print(f'Request to device {self.name} with ip address {self.ip} returns invalid JSON response.') + logger.error( + f"Request to device {self.name} with ip address {self.ip} returns invalid JSON response." + ) return - result = Shelly3EMResult(device=self, total_power=response['total_power']) + result = Shelly3EMResult( + device=self, date=timezone.now(), total_power=response["total_power"] + ) + logger.debug(f"Saving result for device {self.name} with ip address {self.ip}.") + logger.debug(result) result.save() id = 0 for emeter in response["emeters"]: - Shelly3EMEmeterResult(result=result, emeter_id=id, power=emeter["power"], pf=emeter["pf"], current=emeter["current"], voltage=emeter["voltage"], total=emeter["total"], total_returned=emeter["total_returned"]).save() + emeter_result = Shelly3EMEmeterResult( + device=self, + date=result.date, + emeter_id=id, + power=emeter["power"], + pf=emeter["pf"], + current=emeter["current"], + voltage=emeter["voltage"], + total=emeter["total"], + total_returned=emeter["total_returned"], + ) + emeter_result.save() + logger.debug(f"Saving emeter result for with emeter_id {id}.") + logger.debug(emeter_result) id += 1 + logger.debug(f"Results for device {self.name} with ip address {self.ip} saved.") + logger.debug(result) return result class Meta: - db_table = 'shelly3em_devices' + db_table = "shelly3em_devices" -class Shelly3EMResult(models.Model): - - id = models.AutoField(primary_key=True) +class Shelly3EMResult(models.Model): + pk = CompositePrimaryKey("device_id", "date") + date = models.DateTimeField(auto_now_add=True) device = models.ForeignKey(Shelly3EMDevice, on_delete=models.PROTECT) total_power = models.FloatField() - date = models.DateTimeField(auto_now_add=True) def __repr__(self): - return "" % ( - self.device_id, self.total_power, self.date) + return "" % ( + self.device_id, + self.total_power, + self.date, + ) class Meta: - db_table = 'shelly3em_results' + db_table = "shelly3em_results" + managed = False -class Shelly3EMEmeterResult(models.Model): - id = models.AutoField(primary_key=True) - result = models.ForeignKey(Shelly3EMResult, on_delete=models.PROTECT, related_name='emeters') +class Shelly3EMEmeterResult(models.Model): + pk = CompositePrimaryKey("device_id", "date", "emeter_id") + date = models.DateTimeField() + device = models.ForeignKey(Shelly3EMDevice, on_delete=models.PROTECT) emeter_id = models.IntegerField() + power = models.FloatField() pf = models.FloatField() current = models.FloatField() @@ -81,8 +121,21 @@ class Shelly3EMEmeterResult(models.Model): total_returned = models.FloatField() def __repr__(self): - return "" % ( - self.device_id, self.result, self.emeter_id, self.power, self.pf, self.current, self.voltage, self.total, self.total_returned) + return ( + "" + % ( + self.device_id, + self.result, + self.emeter_id, + self.power, + self.pf, + self.current, + self.voltage, + self.total, + self.total_returned, + ) + ) class Meta: - db_table = 'shelly3em_emeter_results' \ No newline at end of file + db_table = "shelly3em_emeter_results" + managed = False diff --git a/shelly3em_rest/serializers.py b/shelly3em_rest/serializers.py index 2646960..3af5ddc 100644 --- a/shelly3em_rest/serializers.py +++ b/shelly3em_rest/serializers.py @@ -1,33 +1,36 @@ -from rest_framework import serializers +from rest_framework import serializers from .models import Shelly3EMDevice, Shelly3EMEmeterResult, Shelly3EMResult - - + + class Shelly3EMDeviceSerializer(serializers.ModelSerializer): - class Meta: model = Shelly3EMDevice - read_only_fields = ('id',) - fields = ('id', - 'name', - 'active', - 'ip',) + read_only_fields = ("id",) + fields = ( + "id", + "name", + "active", + "ip", + ) + class Shelly3EMEmeterResultSerializer(serializers.ModelSerializer): - class Meta: model = Shelly3EMEmeterResult - fields = ('emeter_id', - 'power', - 'pf', - 'current', - 'voltage', - 'total', - 'total_returned') + fields = ( + "emeter_id", + "power", + "pf", + "current", + "voltage", + "total", + "total_returned", + ) + class Shelly3EMResultSerializer(serializers.ModelSerializer): - emeters = Shelly3EMEmeterResultSerializer(many=True) + # TODO: add emeters as a nested serializer in the future + class Meta: model = Shelly3EMResult - fields = ('emeters', - 'total_power', - 'date') \ No newline at end of file + fields = ("total_power", "date") diff --git a/shelly3em_rest/tests.py b/shelly3em_rest/tests.py index 0fc6d58..4e122c6 100644 --- a/shelly3em_rest/tests.py +++ b/shelly3em_rest/tests.py @@ -5,19 +5,14 @@ from .models import Shelly3EMDevice from .serializers import Shelly3EMDeviceSerializer + class ShellyDeviceTests(TestCase): def setUp(self): self.client = APIClient() - self.url = reverse('shelly_rest_device_index') - self.device_data = { - 'name': 'Test Device', - 'ip': '192.168.0.74', - 'active': True - } + self.url = reverse("shelly_rest_device_index") + self.device_data = {"name": "Test Device", "ip": "192.168.0.74", "active": True} self.device = Shelly3EMDevice.objects.create( - name='Test Device', - ip='192.168.0.74', - active=True + name="Test Device", ip="192.168.0.74", active=True ) def test_get_device_list(self): @@ -29,4 +24,4 @@ def test_get_device_list(self): def test_create_device_invalid_data(self): response = self.client.post(self.url, {}) - self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) \ No newline at end of file + self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) diff --git a/shelly3em_rest/urls.py b/shelly3em_rest/urls.py index 7181b3b..ed2f425 100644 --- a/shelly3em_rest/urls.py +++ b/shelly3em_rest/urls.py @@ -2,8 +2,16 @@ from . import views urlpatterns = [ - path('devices/', views.device_list, name='shelly_rest_device_index'), - path('devices//', views.device_detail, name='shelly_rest_device_detail'), - path('devices//results/', views.device_results, name='shelly_rest_device_results'), - path('devices/request-and-save-results', views.get_and_save_device_results, name='shelly_rest_devices_get_and_save_results'), -] \ No newline at end of file + path("devices/", views.device_list, name="shelly_rest_device_index"), + path("devices//", views.device_detail, name="shelly_rest_device_detail"), + path( + "devices//results/", + views.device_results, + name="shelly_rest_device_results", + ), + path( + "devices/request-and-save-results", + views.get_and_save_device_results, + name="shelly_rest_devices_get_and_save_results", + ), +] diff --git a/shelly3em_rest/views.py b/shelly3em_rest/views.py index 98254c0..0e40e2f 100644 --- a/shelly3em_rest/views.py +++ b/shelly3em_rest/views.py @@ -1,130 +1,267 @@ from django.shortcuts import get_object_or_404 from django.http.response import JsonResponse from django.utils import timezone +from dateutil import parser from rest_framework.parsers import JSONParser from rest_framework import status -from django.db.models import Avg, Sum, Case, When, FloatField -from django.db.models.functions import TruncHour +from django.db import connection -from .models import Shelly3EMDevice, Shelly3EMResult +from .models import Shelly3EMDevice, Shelly3EMResult, Shelly3EMEmeterResult from .serializers import Shelly3EMDeviceSerializer, Shelly3EMResultSerializer from rest_framework.decorators import api_view from rest_framework.response import Response +import logging -@api_view(['GET', 'POST', 'DELETE']) +logger = logging.getLogger("Shelly3EMRest") + + +@api_view(["GET", "POST", "DELETE"]) def device_list(request): - if request.method == 'GET': + if request.method == "GET": devices = Shelly3EMDevice.objects.all() devices_serializer = Shelly3EMDeviceSerializer(devices, many=True) return JsonResponse(devices_serializer.data, safe=False) # 'safe=False' for objects serialization - elif request.method == 'POST': + elif request.method == "POST": device_data = JSONParser().parse(request) device_serializer = Shelly3EMDeviceSerializer(data=device_data) if device_serializer.is_valid(): device_serializer.save() return JsonResponse(device_serializer.data, status=status.HTTP_201_CREATED) - return JsonResponse(device_serializer.errors, status=status.HTTP_400_BAD_REQUEST) + return JsonResponse( + device_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) - elif request.method == 'DELETE': + elif request.method == "DELETE": count = Shelly3EMDevice.objects.all().delete() - return JsonResponse({'message': '{} Devices were deleted successfully!'.format(count[0])}, status=status.HTTP_204_NO_CONTENT) + return JsonResponse( + {"message": "{} Devices were deleted successfully!".format(count[0])}, + status=status.HTTP_204_NO_CONTENT, + ) -@api_view(['GET', 'PUT', 'DELETE']) +@api_view(["GET", "PUT", "DELETE"]) def device_detail(request, id): device = get_object_or_404(Shelly3EMDevice, id=id) - if request.method == 'GET': + if request.method == "GET": device_serializer = Shelly3EMDeviceSerializer(device) return JsonResponse(device_serializer.data) - elif request.method == 'PUT': + elif request.method == "PUT": device_data = JSONParser().parse(request) device_serializer = Shelly3EMDeviceSerializer(device, data=device_data) if device_serializer.is_valid(): device_serializer.save() return JsonResponse(device_serializer.data) - return JsonResponse(device_serializer.errors, status=status.HTTP_400_BAD_REQUEST) + return JsonResponse( + device_serializer.errors, status=status.HTTP_400_BAD_REQUEST + ) - elif request.method == 'DELETE': + elif request.method == "DELETE": device.delete() - return JsonResponse({'message': 'Device was deleted successfully!'}, status=status.HTTP_204_NO_CONTENT) + return JsonResponse( + {"message": "Device was deleted successfully!"}, + status=status.HTTP_204_NO_CONTENT, + ) -@api_view(['GET']) +@api_view(["GET"]) def device_results(request, id): device = get_object_or_404(Shelly3EMDevice, id=id) - start_param = request.GET.get( - 'start', timezone.now() + timezone.timedelta(days=-1)) - end_param = request.GET.get('end', timezone.now()) - - results = Shelly3EMResult.objects.filter( - device_id=device, date__range=[start_param, end_param]).order_by('date').prefetch_related('emeters') - - average_power = ( - results - .annotate(hour=TruncHour('date')) - .values('hour') - .annotate(average_power=Avg('total_power')) - .annotate(average_power_returned=Avg(Case( - When(total_power__gt=0, then=0), - default='total_power', - output_field=FloatField(), - ))) + start_param = request.GET.get("start") + end_param = request.GET.get("end") + + # Use dateutil parser to handle different formats + start_param = parser.parse(start_param) + end_param = parser.parse(end_param) + + # Ensure dates are timezone-aware + if start_param.tzinfo is None: + start_param = timezone.make_aware(start_param) + if end_param.tzinfo is None: + end_param = timezone.make_aware(end_param) + + logger.debug(f"Request for device {device.id} from {start_param} to {end_param}") + + # Determine interval length based on date range + interval_length = 5 if (end_param - start_param).days <= 7 else 15 + + query_results = """ + SELECT + time_bucket('{interval_length} minutes', date) AS interval, + AVG(total_power) AS avg_total_power, + MIN(date) AS min_date + FROM + {table_name} + WHERE + device_id = %s AND + date BETWEEN %s AND %s + GROUP BY + interval + ORDER BY + interval; + """.format( + table_name=Shelly3EMResult._meta.db_table, interval_length=interval_length ) - total_power = 0 - total_returned_power = 0 - - if average_power.exists(): + query_emeter_results = """ + SELECT + time_bucket('{interval_length} minutes', r.date) AS interval, + er.emeter_id, + AVG(er.power) AS avg_power, + AVG(er.pf) AS avg_pf, + AVG(er.current) AS avg_current, + AVG(er.voltage) AS avg_voltage, + AVG(er.total) AS avg_total, + AVG(er.total_returned) AS avg_total_returned + FROM + {result_table_name} AS r + JOIN + {emeter_result_table_name} AS er + ON + r.device_id = er.device_id AND r.date = er.date + WHERE + r.device_id = %s AND + r.date BETWEEN %s AND %s + GROUP BY + interval, er.emeter_id + ORDER BY + interval, er.emeter_id; + """.format( + result_table_name=Shelly3EMResult._meta.db_table, + emeter_result_table_name=Shelly3EMEmeterResult._meta.db_table, + interval_length=interval_length, + ) - # reduce power average based on if first or last hour is not complete - first_hour = average_power.first()['hour'] - first_hour_results = results.filter( - date__range=[first_hour, first_hour + timezone.timedelta(hours=1)]).values('date').order_by('date') - first_hour_percent = (first_hour_results.first()['date'] - first_hour) / timezone.timedelta(hours=1) - first_hour_power_reduction = average_power.first()['average_power'] * first_hour_percent - first_hour_power_returned_reduction = average_power.first()['average_power_returned'] * first_hour_percent + query_total_power = """ + WITH interval_data AS ( + SELECT + date, + total_power, + LEAST(total_power, 0) AS total_power_returned + FROM + {table_name} + WHERE + device_id = %s AND + date BETWEEN %s AND %s + ), + hourly_totals AS ( + SELECT + time_bucket('1 hour', date) AS interval, + AVG(total_power) AS total_power_per_hour, + AVG(total_power_returned) AS total_power_returned_per_hour + FROM + interval_data + GROUP BY + interval + ), + min_max_dates AS ( + SELECT + MIN(date) AS min_date, + MAX(date) AS max_date + FROM + interval_data + ) + SELECT + SUM( + CASE + WHEN interval = time_bucket('1 hour', (SELECT min_date FROM min_max_dates)) + THEN total_power_per_hour * (1 - (EXTRACT(epoch FROM (SELECT min_date FROM min_max_dates) - date_trunc('hour', (SELECT min_date FROM min_max_dates))) / 3600.0)) + WHEN interval = time_bucket('1 hour', (SELECT max_date FROM min_max_dates)) + THEN total_power_per_hour * (EXTRACT(epoch FROM (SELECT max_date FROM min_max_dates) - date_trunc('hour', (SELECT max_date FROM min_max_dates))) / 3600.0) + ELSE total_power_per_hour + END + ) AS total_power_wh, + SUM( + CASE + WHEN interval = time_bucket('1 hour', (SELECT min_date FROM min_max_dates)) + THEN total_power_returned_per_hour * (1 - (EXTRACT(epoch FROM (SELECT min_date FROM min_max_dates) - date_trunc('hour', (SELECT min_date FROM min_max_dates))) / 3600.0)) + WHEN interval = time_bucket('1 hour', (SELECT max_date FROM min_max_dates)) + THEN total_power_returned_per_hour * (EXTRACT(epoch FROM (SELECT max_date FROM min_max_dates) - date_trunc('hour', (SELECT max_date FROM min_max_dates))) / 3600.0) + ELSE total_power_returned_per_hour + END + ) AS total_power_returned_wh + FROM + hourly_totals; + """.format(table_name=Shelly3EMResult._meta.db_table) + with connection.cursor() as cursor: + params = [device.id, start_param, end_param] + cursor.execute(query_results, params) + rows_15min = cursor.fetchall() - last_hour = average_power.last()['hour'] - last_hour_results = results.filter( - date__range=[last_hour, last_hour + timezone.timedelta(hours=1)]).values('date').order_by('date') - last_hour_percent = ((last_hour + timezone.timedelta(hours=1)) - last_hour_results.last()['date']) / timezone.timedelta(hours=1) - last_hour_power_reduction = average_power.last()['average_power'] * last_hour_percent - last_hour_power_returned_reduction = average_power.last()['average_power_returned'] * last_hour_percent + primary_results = [ + { + "interval": interval, + "total_power": total_power, + "date": date, + } + for (interval, total_power, date) in rows_15min + ] + # Create a dictionary to store emeter results by interval + emeter_results_by_interval = {} - total_power = average_power.aggregate(Sum('average_power'))[ - 'average_power__sum'] - first_hour_power_reduction - last_hour_power_reduction - total_returned_power = average_power.aggregate(Sum('average_power_returned'))[ - 'average_power_returned__sum'] - first_hour_power_returned_reduction - last_hour_power_returned_reduction + cursor.execute(query_emeter_results, params) + emeter_rows = cursor.fetchall() - if request.method == 'GET': - result_serializer = Shelly3EMResultSerializer(results, many=True) - - if request.META.get('HTTP_ACCEPT') == 'text/csv': - result_data = result_serializer.data - else: - result_data = {'results': result_serializer.data, - 'total_power': total_power, - 'total_returned_power': total_returned_power, } + for row in emeter_rows: + ( + interval, + emeter_id, + avg_power, + avg_pf, + avg_current, + avg_voltage, + avg_total, + avg_total_returned, + ) = row + if interval not in emeter_results_by_interval: + emeter_results_by_interval[interval] = [] + emeter_results_by_interval[interval].append( + { + "emeter_id": emeter_id, + "power": avg_power, + "pf": avg_pf, + "current": avg_current, + "voltage": avg_voltage, + "total": avg_total, + "total_returned": avg_total_returned, + } + ) + + # Combine primary results with emeter results + results_15min = [] + for result in primary_results: + interval = result["interval"] + result["emeters"] = emeter_results_by_interval.get(interval, []) + results_15min.append(result) + cursor.execute(query_total_power, (device.id, start_param, end_param)) + total_power, total_power_returned = cursor.fetchone() + + if request.META.get("HTTP_ACCEPT") == "text/csv": + result_data = results_15min + else: + result_data = { + "results": results_15min, + "total_power": total_power, + "total_returned_power": total_power_returned, + } return Response(result_data) -@api_view(['POST']) +@api_view(["POST"]) def get_and_save_device_results(request): - - if request.method == 'POST': + if request.method == "POST": devices = Shelly3EMDevice.objects.filter(active=True).all() results = [] for device in devices: @@ -132,4 +269,6 @@ def get_and_save_device_results(request): results.append(result) result_serializer = Shelly3EMResultSerializer(results, many=True) - return JsonResponse(result_serializer.data, safe=False, status=status.HTTP_200_OK) + return JsonResponse( + result_serializer.data, safe=False, status=status.HTTP_200_OK + )