diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 5ef1708a2e3c6907a73f7ed0c20b52ea805c69bb..c3cb9bae4fd4e9c05ac9294f339de2c18e0a4a3c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -4,6 +4,7 @@ default:
 
 stages:
   - default_validate
+  - lint
   - build
   - test
   - deliver
@@ -13,6 +14,18 @@ include:
     file: 'gitlab-ci-ansible.yml'
     ref: main
 
+lint-job:
+  stage: lint
+  before_script:
+    - python3 -m venv "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/venv"
+    - source "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/venv/bin/activate"
+    - pip install pylint
+  script:
+    - pylint "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/netbox_rps_plugin" "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/tests"
+  after_script:
+    - deactivate
+    - rm -rf "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/venv"
+
 build-job:       # This job runs in the build stage, which runs first.
   stage: build
   script:
@@ -23,13 +36,14 @@ run-test:
   before_script:
     - chmod 600 $ANSIBLE_PRIVATE_KEY_FILE
     - env ANSIBLE_HOST_KEY_CHECKING=false ANSIBLE_PRIVATE_KEY_FILE="$ANSIBLE_PRIVATE_KEY_FILE" ansible-playbook -i "$TESTING_HOSTS" -u debian ansible/deploy_on_test.yml
-  script:
     - python3 -m venv "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/venv"
     - source "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/venv/bin/activate"
     - pip install -r "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/tests/requirements.e2e.txt"
+  script:
     - env HOST="$HOST" PORT="$PORT" API_KEY="$API_KEY" python3 -m unittest discover -b "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/tests/"
   after_script:
     - env ANSIBLE_HOST_KEY_CHECKING=false ANSIBLE_PRIVATE_KEY_FILE="$ANSIBLE_PRIVATE_KEY_FILE" ansible-playbook -i "$TESTING_HOSTS" -u debian ansible/halt_test.yml
+    - deactivate
     - rm -rf "$CI_PROJECT_DIR/plugins/netbox-rps-plugin/venv"
 
 deliver-latest-job:
diff --git a/ansible/deploy_on_test.yml b/ansible/deploy_on_test.yml
index 0144b1787a9ce87601d1f45d9d6cb9e8f961b9c9..4f6e4dca68a6700017eb4b5865145a07d55b2f08 100644
--- a/ansible/deploy_on_test.yml
+++ b/ansible/deploy_on_test.yml
@@ -32,3 +32,7 @@
         project_src: "/home/debian/netbox/{{ lookup('ansible.builtin.env','CI_PIPELINE_ID') }}/"
         state: present
         pull: true
+
+    - name: Pause for 10 seconds
+      ansible.builtin.pause:
+        seconds: 10
diff --git a/docker-compose.test.yml b/docker-compose.test.yml
index bb90b1f916e8332847d148c28496fbc7de23f7f3..20d07c3bc438b8dbadc4ba8fc0cf991b9169f6c4 100644
--- a/docker-compose.test.yml
+++ b/docker-compose.test.yml
@@ -8,13 +8,49 @@ services:
       start_period: 2s
       retries: 30
     env_file: env/netbox.env
+    logging:
+      driver: "syslog"
+      options:
+        syslog-address: "tcp://gra1-63cfda8371ef06f3c6bf175c.gra1.logs.ovh.com:6514"
+        syslog-format: "rfc5424"
+        tag: "netbox"
   netbox-worker:
     env_file: env/netbox.env
+    logging:
+      driver: "syslog"
+      options:
+        syslog-address: "tcp://gra1-63cfda8371ef06f3c6bf175c.gra1.logs.ovh.com:6514"
+        syslog-format: "rfc5424"
+        tag: "netbox-worker"
   netbox-housekeeping:
     env_file: env/netbox.env
+    logging:
+      driver: "syslog"
+      options:
+        syslog-address: "tcp://gra1-63cfda8371ef06f3c6bf175c.gra1.logs.ovh.com:6514"
+        syslog-format: "rfc5424"
+        tag: "netbox-housekeeping"
   postgres:
     env_file: env/postgres.env
+    logging:
+      driver: "syslog"
+      options:
+        syslog-address: "tcp://gra1-63cfda8371ef06f3c6bf175c.gra1.logs.ovh.com:6514"
+        syslog-format: "rfc5424"
+        tag: "netbox-postgres"
   redis:
     env_file: env/redis.env
+    logging:
+      driver: "syslog"
+      options:
+        syslog-address: "tcp://gra1-63cfda8371ef06f3c6bf175c.gra1.logs.ovh.com:6514"
+        syslog-format: "rfc5424"
+        tag: "netbox-redis"
   redis-cache:
     env_file: env/redis-cache.env
+    logging:
+      driver: "syslog"
+      options:
+        syslog-address: "tcp://gra1-63cfda8371ef06f3c6bf175c.gra1.logs.ovh.com:6514"
+        syslog-format: "rfc5424"
+        tag: "netbox-cache"
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/__init__.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/__init__.py
index 192f3b17d3e128532b30ed5ab3db1286462a23e5..dccded0ec00d7a95f1a584faa5b7a84c304cf28f 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/__init__.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/__init__.py
@@ -1,14 +1,19 @@
+"""Netbox Plugin Configuration"""
+
+# pylint: disable=E0401
 from extras.plugins import PluginConfig
 
 
 class NetBoxRpsConfig(PluginConfig):
-    name = 'netbox_rps_plugin'
-    verbose_name = 'NetBox RPS'
-    description = 'A Netbox plugin to add RPS resources'
-    version = '0.10.1'
+    """Netbox Plugin Configuration class"""
+
+    name = "netbox_rps_plugin"
+    verbose_name = "NetBox RPS"
+    description = "A Netbox plugin to add RPS resources"
+    version = "0.10.1"
     author = "Vincent Simonin"
     author_email = "vincent.simonin@ext.ec.europa.eu"
-    base_url = 'rps'
-
+    base_url = "rps"
 
+# pylint: disable=C0103
 config = NetBoxRpsConfig
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/api/serializers.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/api/serializers.py
index 671926aa11d998db698c447a75c69aefe4b8a566..f7cb5e0908109f9f0a93af86c60110278b7db683 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/api/serializers.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/api/serializers.py
@@ -1,9 +1,13 @@
+"""API Serializer definitions"""
+
 from rest_framework import serializers
-from ..models import Mapping, HttpHeader, SamlConfig
 from netbox.api.serializers import NetBoxModelSerializer, WritableNestedSerializer
+from ..models import Mapping, HttpHeader, SamlConfig
 
 
 class NestedMappingSerializer(WritableNestedSerializer):
+    """Nested Mapping Serializer class"""
+
     url = serializers.HyperlinkedIdentityField(
         view_name="plugins-api:netbox_rps_plugin-api:mapping-detail"
     )
@@ -14,6 +18,8 @@ class NestedMappingSerializer(WritableNestedSerializer):
 
 
 class NestedSamlConfigSerializer(WritableNestedSerializer):
+    """Nested SAML Config Serializer class"""
+
     url = url = serializers.HyperlinkedIdentityField(
         view_name="plugins-api:netbox_rps_plugin-api:samlconfig-detail"
     )
@@ -30,6 +36,8 @@ class NestedSamlConfigSerializer(WritableNestedSerializer):
 
 
 class SamlConfigSerializer(NetBoxModelSerializer):
+    """SAML Config Serializer class"""
+
     url = serializers.HyperlinkedIdentityField(
         view_name="plugins-api:netbox_rps_plugin-api:samlconfig-detail"
     )
@@ -53,6 +61,8 @@ class SamlConfigSerializer(NetBoxModelSerializer):
 
 
 class HttpHeaderSerializer(NetBoxModelSerializer):
+    """HTTP Header Serializer class"""
+
     url = serializers.HyperlinkedIdentityField(
         view_name="plugins-api:netbox_rps_plugin-api:httpheader-detail"
     )
@@ -76,6 +86,8 @@ class HttpHeaderSerializer(NetBoxModelSerializer):
 
 
 class NestedHttpHeaderSerializer(WritableNestedSerializer):
+    """Nested HTTP Header Serializer class"""
+
     url = serializers.HyperlinkedIdentityField(
         view_name="plugins-api:netbox_rps_plugin-api:httpheader-detail"
     )
@@ -86,6 +98,8 @@ class NestedHttpHeaderSerializer(WritableNestedSerializer):
 
 
 class MappingSerializer(NetBoxModelSerializer):
+    """Mapping Serializer class"""
+
     url = serializers.HyperlinkedIdentityField(
         view_name="plugins-api:netbox_rps_plugin-api:mapping-detail"
     )
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/api/urls.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/api/urls.py
index 73ce7fd5371ba866f1689ce90a2232fee61077e0..ff74e95d27da37d8648e1df1c0e724d71c7081de 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/api/urls.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/api/urls.py
@@ -1,7 +1,9 @@
+"""API URLs definition"""
+
 from netbox.api.routers import NetBoxRouter
 from . import views
 
-app_name = 'netbox_rps_plugin'
+APP_NAME = 'netbox_rps_plugin'
 
 router = NetBoxRouter()
 router.register('mapping', views.MappingViewSet)
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/api/views.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/api/views.py
index 0f701135486dce760fb67b69c0c24a28096bff0f..cff1d9585fe9513f7fe155c0065d20a1164ca236 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/api/views.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/api/views.py
@@ -1,10 +1,13 @@
+"""API views definitions"""
+
 from netbox.api.viewsets import NetBoxModelViewSet
 from .. import filtersets, models
-from utilities.utils import count_related
 from .serializers import MappingSerializer, HttpHeaderSerializer, SamlConfigSerializer
 
 
 class MappingViewSet(NetBoxModelViewSet):
+    """Mapping view set class"""
+
     queryset = models.Mapping.objects.prefetch_related("http_headers", "tags").all()
     serializer_class = MappingSerializer
     filterset_class = filtersets.MappingFilterSet
@@ -12,12 +15,16 @@ class MappingViewSet(NetBoxModelViewSet):
 
 
 class HttpHeaderViewSet(NetBoxModelViewSet):
+    """HTTP Header view set class"""
+
     queryset = models.HttpHeader.objects.prefetch_related("mapping", "tags").all()
     serializer_class = HttpHeaderSerializer
     http_method_names = ["get", "post", "patch", "delete"]
 
 
 class SamlConfigViewSet(NetBoxModelViewSet):
+    """SAML config view set class"""
+
     queryset = models.SamlConfig.objects.prefetch_related("mapping", "tags").all()
     serializer_class = SamlConfigSerializer
     http_method_names = ["get", "post", "patch", "delete"]
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/filtersets.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/filtersets.py
index 40adf27860f1d2cd79a23554686ad207c935723b..da2580d153c9e831f859d83b66091a20e1ad9126 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/filtersets.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/filtersets.py
@@ -1,35 +1,47 @@
+"""Filtersets definitions"""
+
 from netbox.filtersets import NetBoxModelFilterSet
-from .models import Mapping, HttpHeader, AuthenticationChoices
 from django.db.models import Q
-import django_filters
+from .models import Mapping, HttpHeader
 
 
 class MappingFilterSet(NetBoxModelFilterSet):
+    """Mapping filterset definition class"""
 
     class Meta:
         model = Mapping
-        fields = ('id', 'authentication', 'source', 'target', 'Comment', 'webdav', 'testingpage')
+        fields = (
+            "id",
+            "authentication",
+            "source",
+            "target",
+            "Comment",
+            "webdav",
+            "testingpage",
+        )
 
+    # pylint: disable=W0613
     def search(self, queryset, name, value):
+        """override"""
         if not value.strip():
             return queryset
         return queryset.filter(
-            Q(source__icontains=value) |
-            Q(target__icontains=value) |
-            Q(Comment__icontains=value)
+            Q(source__icontains=value)
+            | Q(target__icontains=value)
+            | Q(Comment__icontains=value)
         )
 
 
 class HttpHeaderFilterSet(NetBoxModelFilterSet):
+    """HTTP Header filterset definition class"""
 
     class Meta:
         model = HttpHeader
-        fields = ('id', 'name', 'value', 'apply_to', 'mapping')
+        fields = ("id", "name", "value", "apply_to", "mapping")
 
+    # pylint: disable=W0613
     def search(self, queryset, name, value):
+        """override"""
         if not value.strip():
             return queryset
-        return queryset.filter(
-            Q(name__icontains=value) |
-            Q(value__icontains=value)
-        )
+        return queryset.filter(Q(name__icontains=value) | Q(value__icontains=value))
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/forms.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/forms.py
index 13b94530e6cd0d38696af2bdb874c8e587963b40..f132a3b061a99c7db55b977added1fa338b03bba 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/forms.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/forms.py
@@ -1,3 +1,5 @@
+"""Forms definitions"""
+
 from django import forms
 from django.utils.translation import gettext as _
 from netbox.forms import (
@@ -5,12 +7,20 @@ from netbox.forms import (
     NetBoxModelFilterSetForm,
     NetBoxModelImportForm,
 )
-from .models import Mapping, AuthenticationChoices, HttpHeader, ApplyToChoices, SamlConfig
 from utilities.forms.fields import DynamicModelMultipleChoiceField, TagFilterField
 from utilities.forms import BOOLEAN_WITH_BLANK_CHOICES, add_blank_choice
+from .models import (
+    Mapping,
+    AuthenticationChoices,
+    HttpHeader,
+    ApplyToChoices,
+    SamlConfig,
+)
 
 
 class MappingForm(NetBoxModelForm):
+    """Mapping form definition class"""
+
     class Meta:
         model = Mapping
         fields = (
@@ -30,6 +40,8 @@ class MappingForm(NetBoxModelForm):
 
 
 class MappingFilterForm(NetBoxModelFilterSetForm):
+    """Mapping filter form definition class"""
+
     model = Mapping
     source = forms.CharField(
         max_length=120, min_length=1, required=False, label="Source URL"
@@ -54,12 +66,16 @@ class MappingFilterForm(NetBoxModelFilterSetForm):
 
 
 class MappingImportForm(NetBoxModelImportForm):
+    """Mapping importation form definition class"""
+
     class Meta:
         model = Mapping
         fields = ("source", "target", "authentication", "testingpage", "webdav")
 
 
 class HttpHeaderForm(NetBoxModelForm):
+    """HTTP header form definition class"""
+
     class Meta:
         model = HttpHeader
         fields = ("mapping", "name", "value", "apply_to")
@@ -72,6 +88,8 @@ class HttpHeaderForm(NetBoxModelForm):
 
 
 class HttpHeaderFilterForm(NetBoxModelFilterSetForm):
+    """HTTP header filter form definition class"""
+
     model = HttpHeader
     name = forms.CharField(
         max_length=120, min_length=1, required=False, label="Header name"
@@ -89,6 +107,8 @@ class HttpHeaderFilterForm(NetBoxModelFilterSetForm):
 
 
 class SamlConfigForm(NetBoxModelForm):
+    """SAML config form definition class"""
+
     class Meta:
         model = SamlConfig
         fields = ("mapping", "acs_url", "logout_url", "force_nauth")
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0001_initial.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0001_initial.py
index 0343a53ec93f52fa6247f2576ecf6c839ce898ee..9d4c884da5f3c6d6abe6f4a218d582cf5e86c632 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0001_initial.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0001_initial.py
@@ -1,40 +1,56 @@
-# Generated by Django 4.1.5 on 2023-02-13 13:42
+"""Migration File"""
+# pylint: disable=C0103
 
-import django.contrib.postgres.fields
 from django.db import migrations, models
-import django.db.models.deletion
 import taggit.managers
 import utilities.json
 
 
 class Migration(migrations.Migration):
+    """Migration Class"""
 
     initial = True
 
     dependencies = [
-        ('extras', '0084_staging'),
-        ('ipam', '0063_standardize_description_comments'),
+        ("extras", "0084_staging"),
+        ("ipam", "0063_standardize_description_comments"),
     ]
 
     operations = [
         migrations.CreateModel(
-            name='Mapping',
+            name="Mapping",
             fields=[
-                ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False)),
-                ('created', models.DateTimeField(auto_now_add=True, null=True)),
-                ('last_updated', models.DateTimeField(auto_now=True, null=True)),
-                ('custom_field_data', models.JSONField(blank=True, default=dict, encoder=utilities.json.CustomFieldJSONEncoder)),
-                ('source', models.CharField(blank=True, max_length=120)),
-                ('target', models.CharField(blank=True, max_length=120)),
-                ('authentication', models.CharField(max_length=30)),
-                ('testingpage', models.CharField(max_length=120)),
-                ('webdav', models.BooleanField(default=False)),
-                ('Comment', models.CharField(blank=True, max_length=500)),
-                ('tags', taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag')),
+                (
+                    "id",
+                    models.BigAutoField(
+                        auto_created=True, primary_key=True, serialize=False
+                    ),
+                ),
+                ("created", models.DateTimeField(auto_now_add=True, null=True)),
+                ("last_updated", models.DateTimeField(auto_now=True, null=True)),
+                (
+                    "custom_field_data",
+                    models.JSONField(
+                        blank=True,
+                        default=dict,
+                        encoder=utilities.json.CustomFieldJSONEncoder,
+                    ),
+                ),
+                ("source", models.CharField(blank=True, max_length=120)),
+                ("target", models.CharField(blank=True, max_length=120)),
+                ("authentication", models.CharField(max_length=30)),
+                ("testingpage", models.CharField(max_length=120)),
+                ("webdav", models.BooleanField(default=False)),
+                ("Comment", models.CharField(blank=True, max_length=500)),
+                (
+                    "tags",
+                    taggit.managers.TaggableManager(
+                        through="extras.TaggedItem", to="extras.Tag"
+                    ),
+                ),
             ],
             options={
-                'ordering': ('source'),
+                "ordering": ("source"),
             },
         ),
     ]
-
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0002_http_header.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0002_http_header.py
index 16da86e3d5f9b77866ef1b319ef463dffeb587d4..1156b28d612569b0d9ad2327dc0921b1aa5f8c6d 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0002_http_header.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0002_http_header.py
@@ -1,33 +1,61 @@
+"""Migration File"""
+# pylint: disable=C0103
+
 import django.contrib.postgres.fields
 from django.db import migrations, models
 import django.db.models.deletion
 import taggit.managers
 import utilities.json
 
+
 class Migration(migrations.Migration):
+    """Migration Class"""
 
     initial = True
 
-    dependencies = [
-        ('netbox_rps_plugin', '0001_initial')
-    ]
+    dependencies = [("netbox_rps_plugin", "0001_initial")]
 
     operations = [
         migrations.CreateModel(
-            name='HttpHeader',
+            name="HttpHeader",
+            # pylint: disable=R0801
             fields=[
-                ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False)),
-                ('created', models.DateTimeField(auto_now_add=True, null=True)),
-                ('last_updated', models.DateTimeField(auto_now=True, null=True)),
-                ('custom_field_data', models.JSONField(blank=True, default=dict, encoder=utilities.json.CustomFieldJSONEncoder)),
-                ('name', models.CharField(blank=True, max_length=120)),
-                ('value', models.CharField(blank=True, max_length=120)),
-                ('apply_to', models.CharField(max_length=30)),
-                ('mapping', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='http_headers', to='netbox_rps_plugin.mapping')),
-                ('tags', taggit.managers.TaggableManager(through='extras.TaggedItem', to='extras.Tag')),
+                (
+                    "id",
+                    models.BigAutoField(
+                        auto_created=True, primary_key=True, serialize=False
+                    ),
+                ),
+                ("created", models.DateTimeField(auto_now_add=True, null=True)),
+                ("last_updated", models.DateTimeField(auto_now=True, null=True)),
+                (
+                    "custom_field_data",
+                    models.JSONField(
+                        blank=True,
+                        default=dict,
+                        encoder=utilities.json.CustomFieldJSONEncoder,
+                    ),
+                ),
+                ("name", models.CharField(blank=True, max_length=120)),
+                ("value", models.CharField(blank=True, max_length=120)),
+                ("apply_to", models.CharField(max_length=30)),
+                (
+                    "mapping",
+                    models.ForeignKey(
+                        on_delete=django.db.models.deletion.CASCADE,
+                        related_name="http_headers",
+                        to="netbox_rps_plugin.mapping",
+                    ),
+                ),
+                (
+                    "tags",
+                    taggit.managers.TaggableManager(
+                        through="extras.TaggedItem", to="extras.Tag"
+                    ),
+                ),
             ],
             options={
-                'ordering': ('name'),
+                "ordering": ("name"),
             },
         ),
     ]
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0003_http_header_unique_constraint.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0003_http_header_unique_constraint.py
index 9f2a1063261cdedaa2da0b790f3faf3b70ab355e..dc9e07f78289403324dfdd5653b14c1b4de2bb35 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0003_http_header_unique_constraint.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0003_http_header_unique_constraint.py
@@ -1,21 +1,25 @@
+"""Migration File"""
+# pylint: disable=C0103
+
 from django.db import migrations, models
-import django.db.models.deletion
 
 
 class Migration(migrations.Migration):
+    """Migration Class"""
 
-    dependencies = [
-        ('netbox_rps_plugin', '0002_http_header')
-    ]
+    dependencies = [("netbox_rps_plugin", "0002_http_header")]
 
     operations = [
         migrations.AlterField(
-            model_name='httpheader',
-            name='value',
+            model_name="httpheader",
+            name="value",
             field=models.CharField(max_length=256, null=True, blank=True),
         ),
         migrations.AddConstraint(
-            model_name='httpheader',
-            constraint=models.UniqueConstraint(fields=('mapping', 'name', 'apply_to'), name='netbox_rps_plugin_httpheader_unique_mapping_name_apply_to'),
+            model_name="httpheader",
+            constraint=models.UniqueConstraint(
+                fields=("mapping", "name", "apply_to"),
+                name="netbox_rps_plugin_httpheader_unique_mapping_name_apply_to",
+            ),
         ),
     ]
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0004_testingpage_nullable.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0004_testingpage_nullable.py
index cbc5e31e7eb930d27432263a51cde9dad9b3377e..bee4289c844d2586f2584331ab5edfafdb90ee07 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0004_testingpage_nullable.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0004_testingpage_nullable.py
@@ -1,17 +1,18 @@
+"""Migration File"""
+# pylint: disable=C0103
+
 from django.db import migrations, models
-import django.db.models.deletion
 
 
 class Migration(migrations.Migration):
+    """Migration Class"""
 
-    dependencies = [
-        ('netbox_rps_plugin', '0003_http_header_unique_constraint')
-    ]
+    dependencies = [("netbox_rps_plugin", "0003_http_header_unique_constraint")]
 
     operations = [
         migrations.AlterField(
-            model_name='mapping',
-            name='testingpage',
+            model_name="mapping",
+            name="testingpage",
             field=models.CharField(max_length=120, null=True, blank=True),
         ),
     ]
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0005_source_mapping_unique_constraint.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0005_source_mapping_unique_constraint.py
index ece218151a926915f2886e1c057a1823c61dbc27..3dd2e2274491f2c344d050e04dde7ce56afc580d 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0005_source_mapping_unique_constraint.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0005_source_mapping_unique_constraint.py
@@ -1,17 +1,20 @@
+"""Migration File"""
+# pylint: disable=C0103
+
 from django.db import migrations, models
-import django.db.models.deletion
 
 
 class Migration(migrations.Migration):
+    """Migration Class"""
 
-    dependencies = [
-        ('netbox_rps_plugin', '0004_testingpage_nullable')
-    ]
+    dependencies = [("netbox_rps_plugin", "0004_testingpage_nullable")]
 
     operations = [
         migrations.AlterField(
-            model_name='mapping',
-            name='source',
-            field=models.CharField(null=False, blank=False, max_length=120, unique=True),
+            model_name="mapping",
+            name="source",
+            field=models.CharField(
+                null=False, blank=False, max_length=120, unique=True
+            ),
         ),
     ]
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0006_url_max_length.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0006_url_max_length.py
index 3cc70a9983ccd58f580ed4b43f000e819ed0ebb7..63f01878886d263658df7991fdc87b279fc2d5f6 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0006_url_max_length.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0006_url_max_length.py
@@ -1,27 +1,30 @@
+"""Migration File"""
+# pylint: disable=C0103
+
 from django.db import migrations, models
-import django.db.models.deletion
 
 
 class Migration(migrations.Migration):
+    """Migration Class"""
 
-    dependencies = [
-        ('netbox_rps_plugin', '0005_source_mapping_unique_constraint')
-    ]
+    dependencies = [("netbox_rps_plugin", "0005_source_mapping_unique_constraint")]
 
     operations = [
         migrations.AlterField(
-            model_name='mapping',
-            name='source',
-            field=models.CharField(null=False, blank=False, max_length=2000, unique=True),
+            model_name="mapping",
+            name="source",
+            field=models.CharField(
+                null=False, blank=False, max_length=2000, unique=True
+            ),
         ),
         migrations.AlterField(
-            model_name='mapping',
-            name='target',
+            model_name="mapping",
+            name="target",
             field=models.CharField(null=False, blank=False, max_length=2000),
         ),
         migrations.AlterField(
-            model_name='mapping',
-            name='testingpage',
+            model_name="mapping",
+            name="testingpage",
             field=models.CharField(null=True, blank=True, max_length=2000),
         ),
     ]
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0007_saml_config.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0007_saml_config.py
index 4414f852dc0c3a712f434644fcc177acfa60f23d..4ba521596893831304840c302178d5202d4c688b 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0007_saml_config.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/migrations/0007_saml_config.py
@@ -1,3 +1,6 @@
+"""Migration File"""
+# pylint: disable=C0103
+
 from django.db import migrations, models
 import utilities.json
 import taggit.managers
@@ -5,11 +8,14 @@ import django.db.models.deletion
 
 
 class Migration(migrations.Migration):
+    """Migration Class"""
+
     dependencies = [("netbox_rps_plugin", "0006_url_max_length")]
 
     operations = [
         migrations.CreateModel(
             name="SamlConfig",
+            # pylint: disable=R0801
             fields=[
                 (
                     "id",
@@ -39,12 +45,14 @@ class Migration(migrations.Migration):
                     models.CharField(blank=False, null=False, max_length=2000),
                 ),
                 ("force_nauth", models.BooleanField(default=False)),
-                ('mapping', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='saml_config', to='netbox_rps_plugin.mapping')),
+                (
+                    "mapping",
+                    models.OneToOneField(
+                        on_delete=django.db.models.deletion.CASCADE,
+                        related_name="saml_config",
+                        to="netbox_rps_plugin.mapping",
+                    ),
+                ),
             ],
         ),
-        #migrations.AddField(
-        #    model_name="mapping",
-        #    name="saml_config",
-        #    field=models.OneToOneField(on_delete=models.SET_NULL, related_name="mapping", to="netbox_rps_plugin.samlconfig", null=True, blank=True),
-        #),
     ]
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/models.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/models.py
index c09a0bbd2fa3a7ed4602e454e23e567c4df90c01..908f07c5cf5ea1c6b75d88e6a3bdac5b41a94754 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/models.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/models.py
@@ -1,14 +1,17 @@
-from django.contrib.postgres.fields import ArrayField
+"""Models definitions"""
+
 from django.db import models
-from netbox.models import NetBoxModel
-from utilities.choices import ChoiceSet
 from django.urls import reverse
 from django.core.validators import URLValidator
+from netbox.models import NetBoxModel
+from utilities.choices import ChoiceSet
 
 URL_MAX_SIZE = 2000
 
 
 class AuthenticationChoices(ChoiceSet):
+    """Authentication choices definition class"""
+
     key = "Mapping.authentication"
 
     DEFAULT_VALUE = "none"
@@ -21,6 +24,8 @@ class AuthenticationChoices(ChoiceSet):
 
 
 class ApplyToChoices(ChoiceSet):
+    """Apply to choices definition class"""
+
     key = "HttpHeader.applyTo"
 
     DEFAULT_VALUE = "request"
@@ -32,6 +37,8 @@ class ApplyToChoices(ChoiceSet):
 
 
 class Mapping(NetBoxModel):
+    """Mapping definition class"""
+
     source = models.CharField(
         max_length=URL_MAX_SIZE,
         blank=False,
@@ -70,10 +77,13 @@ class Mapping(NetBoxModel):
         return f"{self.source}"
 
     def get_absolute_url(self):
+        """override"""
         return reverse("plugins:netbox_rps_plugin:mapping", args=[self.pk])
 
 
 class SamlConfig(NetBoxModel):
+    """SAML config definition class"""
+
     acs_url = models.CharField(
         max_length=URL_MAX_SIZE,
         blank=False,
@@ -99,6 +109,8 @@ class SamlConfig(NetBoxModel):
 
 
 class HttpHeader(NetBoxModel):
+    """HTTP Header definition class"""
+
     mapping = models.ForeignKey(
         Mapping, on_delete=models.CASCADE, related_name="http_headers"
     )
@@ -119,7 +131,8 @@ class HttpHeader(NetBoxModel):
         ordering = ["name"]
 
     def __str__(self):
-        return self.name
+        return f"{self.name}"
 
     def get_absolute_url(self):
+        """override"""
         return reverse("plugins:netbox_rps_plugin:httpheader", args=[self.pk])
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/navigation.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/navigation.py
index f6aa3b25a166180731887205dd782dd88364619b..74f0c15f85dcf40f888fac3bf6e722bb224cd6c6 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/navigation.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/navigation.py
@@ -1,34 +1,34 @@
+"""Navigation Menu definitions"""
+
 from extras.plugins import PluginMenuButton, PluginMenuItem, PluginMenu
 from utilities.choices import ButtonColorChoices
 
-mapping_butons = [
+mapping_buttons = [
     PluginMenuButton(
-        link='plugins:netbox_rps_plugin:mapping_add',
-        title='Add',
-        icon_class='mdi mdi-plus-thick',
-        color=ButtonColorChoices.GREEN
+        link="plugins:netbox_rps_plugin:mapping_add",
+        title="Add",
+        icon_class="mdi mdi-plus-thick",
+        color=ButtonColorChoices.GREEN,
     ),
     PluginMenuButton(
-        link='plugins:netbox_rps_plugin:mapping_add',
-        title='Import',
-        icon_class='mdi mdi-upload',
-        color=ButtonColorChoices.CYAN
+        link="plugins:netbox_rps_plugin:mapping_add",
+        title="Import",
+        icon_class="mdi mdi-upload",
+        color=ButtonColorChoices.CYAN,
     ),
 ]
 
-mappingItem = PluginMenuItem(
-        link='plugins:netbox_rps_plugin:mapping_list',
-        link_text='Mappings',
-        buttons=mapping_butons,
-        permissions=['netbox_rps_plugin.view_mapping'],
+mappingItem = [
+    PluginMenuItem(
+        link="plugins:netbox_rps_plugin:mapping_list",
+        link_text="Mappings",
+        buttons=mapping_buttons,
+        permissions=["netbox_rps_plugin.view_mapping"],
     ),
+]
 
-menu = (
-    PluginMenu(
-        label='Mappings',
-        groups=(
-            ('MAPPINGS', mappingItem),
-        ),
-        icon_class='mdi mdi-graph-outline'
-    )
+menu = PluginMenu(
+    label="Mappings",
+    groups=(("MAPPINGS", mappingItem),),
+    icon_class="mdi mdi-graph-outline",
 )
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/search.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/search.py
index 0f35de95557abb23dab8c285b38ef64f10e1fa9e..feaa5f348c0906480ceeb57461b3110c089c84a0 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/search.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/search.py
@@ -1,9 +1,13 @@
+"""Search definitions"""
+
 from netbox.search import SearchIndex, register_search
 from .models import Mapping, HttpHeader
 
 
 @register_search
 class MappingIndex(SearchIndex):
+    """Mapping search definition class"""
+
     model = Mapping
     fields = (
         ('source', 120),
@@ -14,6 +18,8 @@ class MappingIndex(SearchIndex):
 
 @register_search
 class HttpHeaderIndex(SearchIndex):
+    """Mapping search definition class"""
+
     model = HttpHeader
     fields = (
         ('name', 120),
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/tables.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/tables.py
index f7ff0b933d2bdd7c9cf3ab5670b3fd98fc78a011..ecce3656b47b869603fb63785087c7cfed838153 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/tables.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/tables.py
@@ -1,38 +1,63 @@
-import django_tables2 as tables
+"""Tables definitions"""
 
+import django_tables2 as tables
 from netbox.tables import NetBoxTable, ChoiceFieldColumn, columns
 from .models import Mapping, HttpHeader
 
 
 class MappingTable(NetBoxTable):
+    """Mapping Table definition class"""
+
     authentication = ChoiceFieldColumn()
     source = tables.Column(linkify=True)
     httpheader_count = columns.LinkedCountColumn(
-        viewname='plugins:netbox_rps_plugin:httpheader_list',
-        url_params={'mapping_id': 'pk'},
-        verbose_name='HTTP Headers count'
+        viewname="plugins:netbox_rps_plugin:httpheader_list",
+        url_params={"mapping_id": "pk"},
+        verbose_name="HTTP Headers count",
     )
     tags = columns.TagColumn()
 
     class Meta(NetBoxTable.Meta):
         model = Mapping
         fields = (
-            'pk', 'id', 'source', 'target', 'authentication', 'testingpage', 'webdav', 'Comment', 'httpheader_count', 'tags', 'created', 'last_updated',
+            "pk",
+            "id",
+            "source",
+            "target",
+            "authentication",
+            "testingpage",
+            "webdav",
+            "Comment",
+            "httpheader_count",
+            "tags",
+            "created",
+            "last_updated",
         )
         default_columns = (
-            'source', 'target', 'authentication', 'webdav', 'httpheader_count'
+            "source",
+            "target",
+            "authentication",
+            "webdav",
+            "httpheader_count",
         )
 
+
 class HttpHeaderTable(NetBoxTable):
-    name = tables.Column(
-        linkify=True
-    )
+    """HTTP header Table definition class"""
+
+    name = tables.Column(linkify=True)
 
     class Meta(NetBoxTable.Meta):
         model = HttpHeader
         fields = (
-            'pk', 'id', 'mapping', 'name', 'value', 'apply_to', 'tags', 'created', 'last_updated',
-        )
-        default_columns = (
-            'mapping', 'name', 'value', 'apply_to'
+            "pk",
+            "id",
+            "mapping",
+            "name",
+            "value",
+            "apply_to",
+            "tags",
+            "created",
+            "last_updated",
         )
+        default_columns = ("mapping", "name", "value", "apply_to")
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/urls.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/urls.py
index 7f19cb5bbba0b58f6d6120b0bcba067453d96a42..439c2af4a7ad33851d87d20b5027f1d53312f3a8 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/urls.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/urls.py
@@ -1,3 +1,5 @@
+"""URL definitions"""
+
 from django.urls import path
 from netbox_rps_plugin import views, models
 from netbox.views.generic import ObjectChangeLogView, ObjectJournalView
diff --git a/plugins/netbox-rps-plugin/netbox_rps_plugin/views.py b/plugins/netbox-rps-plugin/netbox_rps_plugin/views.py
index b2e3b4b69cde9d63ad5b99a0dbc662cab603bea5..244e0792c4cbebe9c1d44c25726c80b26d63784b 100644
--- a/plugins/netbox-rps-plugin/netbox_rps_plugin/views.py
+++ b/plugins/netbox-rps-plugin/netbox_rps_plugin/views.py
@@ -1,11 +1,15 @@
+"""Model views definitions"""
+
 from netbox.views import generic
 from utilities.utils import count_related
-from netbox_rps_plugin import forms, tables, filtersets, models
 from utilities.views import ViewTab, register_model_view
+from netbox_rps_plugin import forms, tables, filtersets, models
 from django.utils.translation import gettext as _
 
 
 class MappingView(generic.ObjectView):
+    """Mapping view definition"""
+
     queryset = (
         models.Mapping.objects.all()
         .prefetch_related("http_headers")
@@ -14,6 +18,8 @@ class MappingView(generic.ObjectView):
 
 
 class MappingListView(generic.ObjectListView):
+    """Mapping list view definition"""
+
     queryset = models.Mapping.objects.annotate(
         httpheader_count=count_related(models.HttpHeader, "mapping")
     )
@@ -23,20 +29,28 @@ class MappingListView(generic.ObjectListView):
 
 
 class MappingEditView(generic.ObjectEditView):
+    """Mapping edition view definition"""
+
     queryset = models.Mapping.objects.all()
     form = forms.MappingForm
 
 
 class MappingBulkImportView(generic.BulkImportView):
+    """Mapping bulk import view definition"""
+
     queryset = models.Mapping.objects.all()
     model_form = forms.MappingImportForm
 
 
 class MappingDeleteView(generic.ObjectDeleteView):
+    """Mapping delete view definition"""
+
     queryset = models.Mapping.objects.all()
 
 
 class MappingBulkDeleteView(generic.BulkDeleteView):
+    """Mapping bulk delete view definition"""
+
     queryset = models.Mapping.objects.all()
     filterset = filtersets.MappingFilterSet
     table = tables.MappingTable
@@ -44,6 +58,8 @@ class MappingBulkDeleteView(generic.BulkDeleteView):
 
 @register_model_view(models.Mapping, "httpheader")
 class MappingHttpHeadersView(generic.ObjectChildrenView):
+    """Mapping HTTP Header view definition"""
+
     queryset = models.Mapping.objects.all().prefetch_related("http_headers")
     child_model = models.HttpHeader
     table = tables.HttpHeaderTable
@@ -56,12 +72,16 @@ class MappingHttpHeadersView(generic.ObjectChildrenView):
         hide_if_empty=False,
     )
 
+    # pylint: disable=W0613
     def get_children(self, request, parent):
+        """override"""
         return parent.http_headers
 
 
 @register_model_view(models.Mapping, "samlconfig")
 class MappingSamlConfigView(generic.ObjectView):
+    """Mapping SAML Config view definition"""
+
     base_template = "netbox_rps_plugin/mapping.html"
     queryset = models.Mapping.objects.all().prefetch_related("saml_config")
     template_name = "netbox_rps_plugin/saml_config.html"
@@ -69,15 +89,19 @@ class MappingSamlConfigView(generic.ObjectView):
     tab = ViewTab(
         label=_("SAML Configuration"),
         hide_if_empty=True,
-        badge=lambda obj: 1 if hasattr(obj, 'saml_config') else 0,
+        badge=lambda obj: 1 if hasattr(obj, "saml_config") else 0,
     )
 
 
 class HttpHeaderView(generic.ObjectView):
+    """HTTP Header view definition"""
+
     queryset = models.HttpHeader.objects.all()
 
 
 class HttpHeaderListView(generic.ObjectListView):
+    """HTTP Header list view definition"""
+
     queryset = models.HttpHeader.objects.all()
     table = tables.HttpHeaderTable
     filterset = filtersets.HttpHeaderFilterSet
@@ -85,24 +109,34 @@ class HttpHeaderListView(generic.ObjectListView):
 
 
 class HttpHeaderEditView(generic.ObjectEditView):
+    """HTTP Header edition view definition"""
+
     queryset = models.HttpHeader.objects.all()
     form = forms.HttpHeaderForm
 
 
 class HttpHeaderDeleteView(generic.ObjectDeleteView):
+    """HTTP Header delete view definition"""
+
     queryset = models.HttpHeader.objects.all()
 
 
 class HttpHeaderBulkDeleteView(generic.BulkDeleteView):
+    """HTTP Header bulk delete view definition"""
+
     queryset = models.HttpHeader.objects.all()
     filterset = filtersets.HttpHeaderFilterSet
     table = tables.HttpHeaderTable
 
 
 class SamlConfigEditView(generic.ObjectEditView):
+    """HTTP SAML config edition view definition"""
+
     queryset = models.SamlConfig.objects.all()
     form = forms.SamlConfigForm
 
 
 class SamlConfigDeleteView(generic.ObjectDeleteView):
+    """HTTP SAML config delete view definition"""
+
     queryset = models.SamlConfig.objects.all()
diff --git a/plugins/netbox-rps-plugin/tests/e2e/base.py b/plugins/netbox-rps-plugin/tests/e2e/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..98319076990078d2ae00410758ec0737e9222e2a
--- /dev/null
+++ b/plugins/netbox-rps-plugin/tests/e2e/base.py
@@ -0,0 +1,25 @@
+"""Base TestCase Class for Mapping"""
+
+import os
+import unittest
+import requests
+
+HOST = os.getenv("HOST", default="localhost")
+PORT = os.getenv("PORT", default="8080")
+API_KEY = os.getenv("API_KEY", "only4testingpurpose")
+
+
+class Base(unittest.TestCase):
+    """Base TestCase Class for Mapping"""
+
+    mapping_id = None
+
+    def tearDown(self) -> None:
+        """Teardown function"""
+
+        requests.delete(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
+            json=[{"id": self.mapping_id}],
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
+        )
diff --git a/plugins/netbox-rps-plugin/tests/e2e/test_mapping_creation.py b/plugins/netbox-rps-plugin/tests/e2e/test_mapping_creation.py
index 403a7f8d52b488937f17282aed4aa8f403ff3914..ff6b2a1e10829c30122f7d78a2419b0ee2329716 100644
--- a/plugins/netbox-rps-plugin/tests/e2e/test_mapping_creation.py
+++ b/plugins/netbox-rps-plugin/tests/e2e/test_mapping_creation.py
@@ -1,45 +1,39 @@
+"""Test case for Mapping creation"""
+
 import unittest
-import requests
 import json
 import os
+import requests
+from .base import Base
+
 
+HOST = os.getenv("HOST", default="localhost")
+PORT = os.getenv("PORT", default="8080")
+API_KEY = os.getenv("API_KEY", "only4testingpurpose")
 
-HOST = os.getenv('HOST', default='localhost')
-PORT = os.getenv('PORT', default='8080')
-API_KEY = os.getenv('API_KEY', 'only4testingpurpose')
 
-class TestMappingCreation(unittest.TestCase):
-    mappingId=None
+class TestMappingCreation(Base):
+    """Test case for Mapping creation class"""
 
-    def test_that_mapping_is_created(self):
-        r = requests.post(
-            url='http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT),
+    def test_that_mapping_is_created(self) -> None:
+        """Test that mapping is created"""
+
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
-                "source": "https://truc6.com/api",
-                "target": "http://10.10.10.10:1886/api",
+                "source": "https://truc00.com/api",
+                "target": "http://10.10.10.10:1800/api",
                 "authentication": "none",
-                "testingpage": None
+                "testingpage": None,
             },
-            headers={
-                "Authorization": 'Token {}'.format(API_KEY)
-            }
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
-
-        self.mappingId = json.loads(r.content)['id'];
+        self.assertEqual(response.status_code, 201)
 
-    def tearDown(self) -> None:
-        requests.delete(
-            url='http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT),
-            json=[{
-                "id": self.mappingId
-            }],
-            headers={
-                "Authorization": 'Token {}'.format(API_KEY)
-            }
-        )
+        self.mapping_id = json.loads(response.content)["id"]
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/plugins/netbox-rps-plugin/tests/e2e/test_mapping_unique.py b/plugins/netbox-rps-plugin/tests/e2e/test_mapping_unique.py
index 56076b1d4ae491cd3019f1a9d0f67e666a601718..8121fac3c28ad66332a1f4fd378cbb034c4b44f2 100644
--- a/plugins/netbox-rps-plugin/tests/e2e/test_mapping_unique.py
+++ b/plugins/netbox-rps-plugin/tests/e2e/test_mapping_unique.py
@@ -1,62 +1,55 @@
+"""Test case for Mapping is unique"""
+
 import unittest
-import requests
 import json
 import os
+import requests
+from .base import Base
 
+HOST = os.getenv("HOST", default="localhost")
+PORT = os.getenv("PORT", default="8080")
+API_KEY = os.getenv("API_KEY", "only4testingpurpose")
 
-HOST = os.getenv('HOST', default='localhost')
-PORT = os.getenv('PORT', default='8080')
-API_KEY = os.getenv('API_KEY', 'only4testingpurpose')
 
+class TestMappingUnique(Base):
+    """Test case for Mapping is unique class"""
 
-class TestMappingUnique(unittest.TestCase):
-    mappingId=None
+    def test_that_mapping_is_unique(self) -> None:
+        """Test that mapping is unique"""
 
-    def test_that_mapping_is_unique(self):
-        r = requests.post(
-            url='http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
-                "source": "https://truc7.com/api",
-                "target": "http://10.10.10.10:1886/api",
+                "source": "https://truc8.com/api",
+                "target": "http://10.10.10.10:1888/api",
                 "authentication": "none",
-                "testingpage": None
+                "testingpage": None,
             },
-            headers={
-                "Authorization": 'Token {}'.format(API_KEY)
-            }
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
 
-        self.mappingId = json.loads(r.content)['id'];
+        self.mapping_id = json.loads(response.content)["id"]
 
-        r = requests.post(
-            url='http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
-                "source": "https://truc7.com/api",
-                "target": "http://10.10.10.10:1886/api",
+                "source": "https://truc8.com/api",
+                "target": "http://10.10.10.10:1888/api",
                 "authentication": "none",
-                "testingpage": None
+                "testingpage": None,
             },
-            headers={
-                "Authorization": 'Token {}'.format(API_KEY)
-            }
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
-        self.assertEqual(r.content, b'{"source":["mapping with this Source already exists."]}')
-
-    def tearDown(self) -> None:
-        requests.delete(
-            url='http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT),
-            json=[{
-                "id": self.mappingId
-            }],
-            headers={
-                "Authorization": 'Token {}'.format(API_KEY)
-            }
+        self.assertEqual(response.status_code, 400)
+        self.assertEqual(
+            response.content, b'{"source":["mapping with this Source already exists."]}'
         )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/plugins/netbox-rps-plugin/tests/e2e/test_saml_config.py b/plugins/netbox-rps-plugin/tests/e2e/test_saml_config.py
index a8783d6020bfb6ded0f34d023cf406286013521a..7c632d1e560e9d081b1d64af78db7818af4ee06f 100644
--- a/plugins/netbox-rps-plugin/tests/e2e/test_saml_config.py
+++ b/plugins/netbox-rps-plugin/tests/e2e/test_saml_config.py
@@ -1,315 +1,335 @@
+"""Test case for Mapping SAML configuration"""
+
 import json
 import unittest
-import requests
 import os
+import requests
+from .base import Base
+
 
 HOST = os.getenv("HOST", default="localhost")
 PORT = os.getenv("PORT", default="8080")
 API_KEY = os.getenv("API_KEY", "only4testingpurpose")
 
 
-class TestSamlConfig(unittest.TestCase):
-    mappingId = []
+class TestSamlConfig(Base):
+    """Test case for Mapping SAML configuration class"""
 
-    def test_that_saml_config_is_created(self):
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/mapping/".format(HOST, PORT),
+    def test_that_saml_config_is_created(self) -> None:
+        """Test that SAML config is created"""
+
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
                 "source": "https://truc7.com/api",
                 "target": "http://10.10.10.10:1886/api",
                 "authentication": "none",
                 "testingpage": None,
             },
-            headers={"Authorization": "Token {}".format(API_KEY)},
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
 
-        self.mappingId = json.loads(r.content)["id"]
+        self.mapping_id = json.loads(response.content)["id"]
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "http://localhost",
                 "logout_url": "http://localhost",
                 "force_nauth": False,
-                "mapping": self.mappingId,
+                "mapping": self.mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
+
+        saml_config_id = json.loads(response.content)["id"]
 
-        samlConfigId = json.loads(r.content)["id"]
+        response = requests.get(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/{self.mapping_id}/",
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
+        )
 
-        r = requests.get(
-            url="http://{}:{}/api/plugins/rps/mapping/{}/".format(
-                HOST, PORT, self.mappingId
-            ),
-            headers={"Authorization": "Token {}".format(API_KEY)},
+        self.assertIsNotNone(json.loads(response.content)["saml_config"])
+        self.assertEqual(
+            json.loads(response.content)["saml_config"]["id"], saml_config_id
         )
 
-        self.assertIsNotNone(json.loads(r.content)["saml_config"])
-        self.assertEqual(json.loads(r.content)["saml_config"]["id"], samlConfigId)
+    def test_that_saml_config_is_cascade_deleted(self) -> None:
+        """Test that SAML config is cascade deleted"""
 
-    def test_that_saml_config_is_cascade_deleted(self):
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/mapping/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
                 "source": "https://truc7.com/api",
                 "target": "http://10.10.10.10:1886/api",
                 "authentication": "none",
                 "testingpage": None,
             },
-            headers={"Authorization": "Token {}".format(API_KEY)},
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
 
-        mappingId = json.loads(r.content)["id"]
+        mapping_id = json.loads(response.content)["id"]
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "http://localhost",
                 "logout_url": "http://localhost",
                 "force_nauth": False,
-                "mapping": mappingId,
+                "mapping": mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
 
-        samlConfigId = json.loads(r.content)["id"]
+        saml_config_id = json.loads(response.content)["id"]
 
         requests.delete(
-            url="http://{}:{}/api/plugins/rps/mapping/".format(HOST, PORT),
-            json=[{"id": mappingId}],
-            headers={"Authorization": "Token {}".format(API_KEY)},
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
+            json=[{"id": mapping_id}],
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        r = requests.get(
-            url="http://{}:{}/api/plugins/rps/saml_config/{}/".format(
-                HOST, PORT, samlConfigId
-            ),
+        response = requests.get(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/{saml_config_id}/",
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 404)
+        self.assertEqual(response.status_code, 404)
+
+    def test_that_patch_works(self) -> None:
+        """Test that PATCH HTTP request works"""
 
-    def test_that_patch_works(self):
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/mapping/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/".format(HOST, PORT),
             json={
                 "source": "https://truc7.com/api",
                 "target": "http://10.10.10.10:1886/api",
                 "authentication": "none",
                 "testingpage": None,
             },
-            headers={"Authorization": "Token {}".format(API_KEY)},
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
 
-        self.mappingId = json.loads(r.content)["id"]
+        self.mapping_id = json.loads(response.content)["id"]
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "http://localhost",
                 "logout_url": "http://localhost",
                 "force_nauth": False,
-                "mapping": self.mappingId,
+                "mapping": self.mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
 
-        samlConfigId = json.loads(r.content)["id"]
+        saml_config_id = json.loads(response.content)["id"]
 
-        r = requests.patch(
-            url="http://{}:{}/api/plugins/rps/saml_config/{}/".format(
-                HOST, PORT, samlConfigId
-            ),
+        response = requests.patch(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/{saml_config_id}/",
             json={"acs_url": "http://anotherhost.com:8080"},
-            headers={"Authorization": "Token {}".format(API_KEY)},
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 200)
+        self.assertEqual(response.status_code, 200)
 
-        r = requests.get(
-            url="http://{}:{}/api/plugins/rps/saml_config/{}/".format(
-                HOST, PORT, samlConfigId
-            ),
-            headers={"Authorization": "Token {}".format(API_KEY)},
+        response = requests.get(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/{saml_config_id}/",
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 200)
+        self.assertEqual(response.status_code, 200)
         self.assertEqual(
-            json.loads(r.content)["acs_url"], "http://anotherhost.com:8080"
+            json.loads(response.content)["acs_url"], "http://anotherhost.com:8080"
         )
 
-    def test_that_acs_url_is_an_url(self):
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/mapping/".format(HOST, PORT),
+    def test_that_acs_url_is_an_url(self) -> None:
+        """Test that ACS URL is an URL"""
+
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
                 "source": "https://truc7.com/api",
                 "target": "http://10.10.10.10:1886/api",
                 "authentication": "none",
                 "testingpage": None,
             },
-            headers={"Authorization": "Token {}".format(API_KEY)},
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
 
-        self.mappingId = json.loads(r.content)["id"]
+        self.mapping_id = json.loads(response.content)["id"]
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "http://thisisanurl",
                 "logout_url": "http://localhost",
                 "force_nauth": False,
-                "mapping": self.mappingId,
+                "mapping": self.mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
-        self.assertEqual(r.content, b'{"acs_url":["It must be a url"]}')
+        self.assertEqual(response.status_code, 400)
+        self.assertEqual(response.content, b'{"acs_url":["It must be a url"]}')
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "verynotanurl",
                 "logout_url": "http://localhost",
                 "force_nauth": False,
-                "mapping": self.mappingId,
+                "mapping": self.mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
-        self.assertEqual(r.content, b'{"acs_url":["It must be a url"]}')
+        self.assertEqual(response.status_code, 400)
+        self.assertEqual(response.content, b'{"acs_url":["It must be a url"]}')
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "http://localhost/api" + ("i" * 1981),
                 "logout_url": "http://localhost",
                 "force_nauth": False,
-                "mapping": self.mappingId,
+                "mapping": self.mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
+        self.assertEqual(response.status_code, 400)
         self.assertEqual(
-            r.content,
+            response.content,
             b'{"acs_url":["Ensure this field has no more than 2000 characters."]}',
         )
 
-    def test_that_logout_url_is_an_url(self):
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/mapping/".format(HOST, PORT),
+    def test_that_logout_url_is_an_url(self) -> None:
+        """Test that logout URL is an URL"""
+
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
                 "source": "https://truc7.com/api",
                 "target": "http://10.10.10.10:1886/api",
                 "authentication": "none",
                 "testingpage": None,
             },
-            headers={"Authorization": "Token {}".format(API_KEY)},
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 201)
+        self.assertEqual(response.status_code, 201)
 
-        self.mappingId = json.loads(r.content)["id"]
+        self.mapping_id = json.loads(response.content)["id"]
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "http://localhost",
                 "logout_url": "http://thisisanurl",
                 "force_nauth": False,
-                "mapping": self.mappingId,
+                "mapping": self.mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
-        self.assertEqual(r.content, b'{"logout_url":["It must be a url"]}')
+        self.assertEqual(response.status_code, 400)
+        self.assertEqual(response.content, b'{"logout_url":["It must be a url"]}')
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "http://localhost",
                 "logout_url": "verynotanurl",
                 "force_nauth": False,
-                "mapping": self.mappingId,
+                "mapping": self.mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
-        self.assertEqual(r.content, b'{"logout_url":["It must be a url"]}')
+        self.assertEqual(response.status_code, 400)
+        self.assertEqual(response.content, b'{"logout_url":["It must be a url"]}')
 
-        r = requests.post(
-            url="http://{}:{}/api/plugins/rps/saml_config/".format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/saml_config/",
             json={
                 "acs_url": "http://localhost/api",
                 "logout_url": "http://localhost/api" + ("i" * 1981),
                 "force_nauth": False,
-                "mapping": self.mappingId,
+                "mapping": self.mapping_id,
             },
             headers={
-                "Authorization": "Token {}".format(API_KEY),
+                "Authorization": f"Token {API_KEY}",
                 "accept": "application/json",
             },
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
+        self.assertEqual(response.status_code, 400)
         self.assertEqual(
-            r.content,
+            response.content,
             b'{"logout_url":["Ensure this field has no more than 2000 characters."]}',
         )
 
-    def tearDown(self) -> None:
-        requests.delete(
-            url="http://{}:{}/api/plugins/rps/mapping/".format(HOST, PORT),
-            json=[{"id": self.mappingId}],
-            headers={"Authorization": "Token {}".format(API_KEY)},
-        )
-
 
 if __name__ == "__main__":
     unittest.main()
diff --git a/plugins/netbox-rps-plugin/tests/e2e/test_unauthenticated.py b/plugins/netbox-rps-plugin/tests/e2e/test_unauthenticated.py
index a80fd2a85fdee62eccacde23d591ea5bba2a1c10..3da35c36324be40779cf66a2324cc791ce152fdd 100644
--- a/plugins/netbox-rps-plugin/tests/e2e/test_unauthenticated.py
+++ b/plugins/netbox-rps-plugin/tests/e2e/test_unauthenticated.py
@@ -1,35 +1,54 @@
+"""Test case for Mapping authentication"""
+
 import unittest
-import requests
 import os
+import requests
 
 
-HOST = os.getenv('HOST', default='localhost')
-PORT = os.getenv('PORT', default='8080')
-API_KEY = os.getenv('API_KEY', 'only4testingpurpose')
+HOST = os.getenv("HOST", default="localhost")
+PORT = os.getenv("PORT", default="8080")
+API_KEY = os.getenv("API_KEY", "only4testingpurpose")
 
 
 class TestUnauthenticatedMappings(unittest.TestCase):
+    """Test case for Mapping authentication class"""
+
+    def test_mappings_get_unauthenticated(self) -> None:
+        """Test that mappings GET authentication"""
+
+        response = requests.get(
+            f"http://{HOST}:{PORT}/api/plugins/rps/mapping/", timeout=5
+        )
+
+        self.assertEqual(response.status_code, 403)
+
+    def test_mappings_post_unauthenticated(self) -> None:
+        """Test that mappings POST authentication"""
 
-    def test_mappings_get_unauthenticated(self):
-        r = requests.get('http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT))
+        response = requests.post(
+            f"http://{HOST}:{PORT}/api/plugins/rps/mapping/", timeout=5
+        )
 
-        self.assertEqual(r.status_code, 403)
+        self.assertEqual(response.status_code, 403)
 
-    def test_mappings_post_unauthenticated(self):
-        r = requests.post('http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT))
+    def test_mappings_patch_unauthenticated(self) -> None:
+        """Test that mappings PATCH authentication"""
 
-        self.assertEqual(r.status_code, 403)
+        response = requests.patch(
+            f"http://{HOST}:{PORT}/api/plugins/rps/mapping/", timeout=5
+        )
 
-    def test_mappings_patch_unauthenticated(self):
-        r = requests.patch('http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT))
+        self.assertEqual(response.status_code, 403)
 
-        self.assertEqual(r.status_code, 403)
+    def test_mappings_delete_unauthenticated(self) -> None:
+        """Test that mappings DELETE authentication"""
 
-    def test_mappings_delete_unauthenticated(self):
-        r = requests.delete('http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT))
+        response = requests.delete(
+            f"http://{HOST}:{PORT}/api/plugins/rps/mapping/", timeout=5
+        )
 
-        self.assertEqual(r.status_code, 403)
+        self.assertEqual(response.status_code, 403)
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/plugins/netbox-rps-plugin/tests/e2e/test_url_max_length.py b/plugins/netbox-rps-plugin/tests/e2e/test_url_max_length.py
index dffdc493224a560506e6a4e2bb16d8380ed8c7de..588b65517ebddae530fafb83ee0295ac2ea0681f 100644
--- a/plugins/netbox-rps-plugin/tests/e2e/test_url_max_length.py
+++ b/plugins/netbox-rps-plugin/tests/e2e/test_url_max_length.py
@@ -1,66 +1,82 @@
+"""Test case for Mapping URL max length """
+
 import unittest
-import requests
 import os
+import requests
+from .base import Base
 
 
-HOST = os.getenv('HOST', default='localhost')
-PORT = os.getenv('PORT', default='8080')
-API_KEY = os.getenv('API_KEY', 'only4testingpurpose')
+HOST = os.getenv("HOST", default="localhost")
+PORT = os.getenv("PORT", default="8080")
+API_KEY = os.getenv("API_KEY", "only4testingpurpose")
 
 
-class TestMappingCreation(unittest.TestCase):
+class TestMappingUrlMaxLength(Base):
+    """Test case for Mapping URL max length class"""
 
-    def test_that_source_url_has_max_length(self):
-        r = requests.post(
-            url='http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT),
+    def test_that_source_url_has_max_length(self) -> None:
+        """Test that source URL has max length"""
+
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
-                "source": "https://truc.com/api" + ('i' * 1981),
+                "source": "https://truc.com/api" + ("i" * 1981),
                 "target": "http://10.10.10.10:1886/api",
                 "authentication": "none",
-                "testingpage": None
+                "testingpage": None,
             },
-            headers={
-                "Authorization": 'Token {}'.format(API_KEY)
-            }
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
-        self.assertEqual(r.content, b'{"source":["Ensure this field has no more than 2000 characters."]}')
+        self.assertEqual(response.status_code, 400)
+        self.assertEqual(
+            response.content,
+            b'{"source":["Ensure this field has no more than 2000 characters."]}',
+        )
 
-    def test_that_target_url_has_max_length(self):
-        r = requests.post(
-            url='http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT),
+    def test_that_target_url_has_max_length(self) -> None:
+        """Test that target URL has max length"""
+
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
                 "source": "https://truc.com/api",
-                "target": "http://10.10.10.10:1886/api" + ('i' * 1974),
+                "target": "http://10.10.10.10:1886/api" + ("i" * 1974),
                 "authentication": "none",
-                "testingpage": None
+                "testingpage": None,
             },
-            headers={
-                "Authorization": 'Token {}'.format(API_KEY)
-            }
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
+        )
+
+        self.assertEqual(response.status_code, 400)
+        self.assertEqual(
+            response.content,
+            b'{"target":["Ensure this field has no more than 2000 characters."]}',
         )
 
-        self.assertEqual(r.status_code, 400)
-        self.assertEqual(r.content, b'{"target":["Ensure this field has no more than 2000 characters."]}')
+    def test_that_testingpage_url_has_max_length(self) -> None:
+        """Test that testing page URL has max length"""
 
-    def test_that_testingpage_url_has_max_length(self):
-        r = requests.post(
-            url='http://{}:{}/api/plugins/rps/mapping/'.format(HOST, PORT),
+        response = requests.post(
+            url=f"http://{HOST}:{PORT}/api/plugins/rps/mapping/",
             json={
                 "source": "https://truc.com/api",
                 "target": "http://10.10.10.10:1886/api",
                 "authentication": "none",
-                "testingpage": "https://truc.com/api" + ('i' * 1981),
+                "testingpage": "https://truc.com/api" + ("i" * 1981),
             },
-            headers={
-                "Authorization": 'Token {}'.format(API_KEY)
-            }
+            headers={"Authorization": f"Token {API_KEY}"},
+            timeout=5,
         )
 
-        self.assertEqual(r.status_code, 400)
-        self.assertEqual(r.content, b'{"testingpage":["Ensure this field has no more than 2000 characters."]}')
+        self.assertEqual(response.status_code, 400)
+        self.assertEqual(
+            response.content,
+            b'{"testingpage":["Ensure this field has no more than 2000 characters."]}',
+        )
 
 
-if __name__ == '__main__':
+if __name__ == "__main__":
     unittest.main()
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000000000000000000000000000000000000..10114237c83e982bf457d437aaac8bd1065fab5b
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,536 @@
+[tool.pylint.main]
+# Analyse import fallback blocks. This can be used to support both Python 2 and 3
+# compatible code, which means that the block might have code that exists only in
+# one or another interpreter, leading to false positives when analysed.
+# analyse-fallback-blocks =
+
+# Clear in-memory caches upon conclusion of linting. Useful if running pylint in
+# a server-like mode.
+# clear-cache-post-run =
+
+# Always return a 0 (non-error) status code, even if lint errors are found. This
+# is primarily useful in continuous integration scripts.
+# exit-zero =
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code.
+# extension-pkg-allow-list =
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code. (This is an alternative name to extension-pkg-allow-list
+# for backward compatibility.)
+# extension-pkg-whitelist =
+
+# Return non-zero exit code if any of these messages/categories are detected,
+# even if score is above --fail-under value. Syntax same as enable. Messages
+# specified are enabled, while categories only check already-enabled messages.
+# fail-on =
+
+# Specify a score threshold under which the program will exit with error.
+fail-under = 10
+
+# Interpret the stdin as a python script, whose filename needs to be passed as
+# the module_or_package argument.
+# from-stdin =
+
+# Files or directories to be skipped. They should be base names, not paths.
+ignore = ["CVS", "venv"]
+
+# Add files or directories matching the regular expressions patterns to the
+# ignore-list. The regex matches against paths and can be in Posix or Windows
+# format. Because '\\' represents the directory delimiter on Windows systems, it
+# can't be used as an escape character.
+# ignore-paths =
+
+# Files or directories matching the regular expression patterns are skipped. The
+# regex matches against base names, not paths. The default value ignores Emacs
+# file locks
+ignore-patterns = ["^\\.#"]
+
+# List of module names for which member attributes should not be checked (useful
+# for modules/projects where namespaces are manipulated during runtime and thus
+# existing member attributes cannot be deduced by static analysis). It supports
+# qualified module names, as well as Unix pattern matching.
+# ignored-modules =
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+# init-hook =
+
+# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the
+# number of processors available to use, and will cap the count on Windows to
+# avoid hangs.
+jobs = 1
+
+# Control the amount of potential inferred values when inferring a single object.
+# This can help the performance when dealing with large functions or complex,
+# nested conditions.
+limit-inference-results = 100
+
+# List of plugins (as comma separated values of python module names) to load,
+# usually to register additional checkers.
+# load-plugins =
+
+# Pickle collected data for later comparisons.
+persistent = true
+
+# Minimum Python version to use for version dependent checks. Will default to the
+# version used to run pylint.
+py-version = "3.11"
+
+# Discover python modules and packages in the file system subtree.
+# recursive =
+
+# Add paths to the list of the source roots. Supports globbing patterns. The
+# source root is an absolute path or a path relative to the current working
+# directory used to determine a package namespace for modules located under the
+# source root.
+# source-roots =
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages.
+suggestion-mode = true
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+# unsafe-load-any-extension =
+
+[tool.pylint.basic]
+# Naming style matching correct argument names.
+argument-naming-style = "snake_case"
+
+# Regular expression matching correct argument names. Overrides argument-naming-
+# style. If left empty, argument names will be checked with the set naming style.
+# argument-rgx =
+
+# Naming style matching correct attribute names.
+attr-naming-style = "snake_case"
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style. If left empty, attribute names will be checked with the set naming
+# style.
+# attr-rgx =
+
+# Bad variable names which should always be refused, separated by a comma.
+bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"]
+
+# Bad variable names regexes, separated by a comma. If names match any regex,
+# they will always be refused
+# bad-names-rgxs =
+
+# Naming style matching correct class attribute names.
+class-attribute-naming-style = "any"
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style. If left empty, class attribute names will be checked
+# with the set naming style.
+# class-attribute-rgx =
+
+# Naming style matching correct class constant names.
+class-const-naming-style = "UPPER_CASE"
+
+# Regular expression matching correct class constant names. Overrides class-
+# const-naming-style. If left empty, class constant names will be checked with
+# the set naming style.
+# class-const-rgx =
+
+# Naming style matching correct class names.
+class-naming-style = "PascalCase"
+
+# Regular expression matching correct class names. Overrides class-naming-style.
+# If left empty, class names will be checked with the set naming style.
+# class-rgx =
+
+# Naming style matching correct constant names.
+const-naming-style = "UPPER_CASE"
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style. If left empty, constant names will be checked with the set naming style.
+# const-rgx =
+
+# Minimum line length for functions/classes that require docstrings, shorter ones
+# are exempt.
+docstring-min-length = -1
+
+# Naming style matching correct function names.
+function-naming-style = "snake_case"
+
+# Regular expression matching correct function names. Overrides function-naming-
+# style. If left empty, function names will be checked with the set naming style.
+# function-rgx =
+
+# Good variable names which should always be accepted, separated by a comma.
+good-names = ["i", "j", "k", "ex", "Run", "_"]
+
+# Good variable names regexes, separated by a comma. If names match any regex,
+# they will always be accepted
+# good-names-rgxs =
+
+# Include a hint for the correct naming format with invalid-name.
+# include-naming-hint =
+
+# Naming style matching correct inline iteration names.
+inlinevar-naming-style = "any"
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style. If left empty, inline iteration names will be checked
+# with the set naming style.
+# inlinevar-rgx =
+
+# Naming style matching correct method names.
+method-naming-style = "snake_case"
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style. If left empty, method names will be checked with the set naming style.
+# method-rgx =
+
+# Naming style matching correct module names.
+module-naming-style = "snake_case"
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style. If left empty, module names will be checked with the set naming style.
+# module-rgx =
+
+# Colon-delimited sets of names that determine each other's naming style when the
+# name regexes allow several styles.
+# name-group =
+
+# Regular expression which should only match function or class names that do not
+# require a docstring.
+no-docstring-rgx = "^_|Meta"
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties. These
+# decorators are taken in consideration only for invalid-name.
+property-classes = ["abc.abstractproperty"]
+
+# Regular expression matching correct type alias names. If left empty, type alias
+# names will be checked with the set naming style.
+# typealias-rgx =
+
+# Regular expression matching correct type variable names. If left empty, type
+# variable names will be checked with the set naming style.
+# typevar-rgx =
+
+# Naming style matching correct variable names.
+variable-naming-style = "snake_case"
+
+# Regular expression matching correct variable names. Overrides variable-naming-
+# style. If left empty, variable names will be checked with the set naming style.
+# variable-rgx =
+
+[tool.pylint.classes]
+# Warn about protected attribute access inside special methods
+# check-protected-access-in-special-methods =
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods = ["__init__", "__new__", "setUp", "asyncSetUp", "__post_init__"]
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make", "os._exit"]
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg = ["cls"]
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg = ["mcs"]
+
+[tool.pylint.design]
+# List of regular expressions of class ancestor names to ignore when counting
+# public methods (see R0903)
+# exclude-too-few-public-methods =
+
+# List of qualified class names to ignore when counting class parents (see R0901)
+# ignored-parents =
+
+# Maximum number of arguments for function / method.
+max-args = 5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes = 7
+
+# Maximum number of boolean expressions in an if statement (see R0916).
+max-bool-expr = 5
+
+# Maximum number of branch for function / method body.
+max-branches = 12
+
+# Maximum number of locals for function / method body.
+max-locals = 15
+
+# Maximum number of parents for a class (see R0901).
+max-parents = 7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods = 20
+
+# Maximum number of return / yield for function / method body.
+max-returns = 6
+
+# Maximum number of statements in function / method body.
+max-statements = 50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods = 0
+
+[tool.pylint.exceptions]
+# Exceptions that will emit a warning when caught.
+overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
+
+[tool.pylint.format]
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+# expected-line-ending-format =
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines = "^\\s*(# )?<?https?://\\S+>?$"
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren = 4
+
+# String used as indentation unit. This is usually "    " (4 spaces) or "\t" (1
+# tab).
+indent-string = "    "
+
+# Maximum number of characters on a single line.
+max-line-length = 120
+
+# Maximum number of lines in a module.
+max-module-lines = 1000
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+# single-line-class-stmt =
+
+# Allow the body of an if to be on the same line as the test if there is no else.
+# single-line-if-stmt =
+
+[tool.pylint.imports]
+# List of modules that can be imported at any level, not just the top level one.
+# allow-any-import-level =
+
+# Allow explicit reexports by alias from a package __init__.
+# allow-reexport-from-package =
+
+# Allow wildcard imports from modules that define __all__.
+# allow-wildcard-with-all =
+
+# Deprecated modules which should not be used, separated by a comma.
+# deprecated-modules =
+
+# Output a graph (.gv or any supported image format) of external dependencies to
+# the given file (report RP0402 must not be disabled).
+# ext-import-graph =
+
+# Output a graph (.gv or any supported image format) of all (i.e. internal and
+# external) dependencies to the given file (report RP0402 must not be disabled).
+# import-graph =
+
+# Output a graph (.gv or any supported image format) of internal dependencies to
+# the given file (report RP0402 must not be disabled).
+# int-import-graph =
+
+# Force import order to recognize a module as part of the standard compatibility
+# libraries.
+# known-standard-library =
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party = ["enchant"]
+
+# Couples of modules and preferred modules, separated by a comma.
+# preferred-modules =
+
+[tool.pylint.logging]
+# The type of string formatting that logging methods do. `old` means using %
+# formatting, `new` is for `{}` formatting.
+logging-format-style = "old"
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format.
+logging-modules = ["logging"]
+
+[tool.pylint."messages control"]
+# Only show warnings with the listed confidence levels. Leave empty to show all.
+# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED.
+confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"]
+
+# Disable the message, report, category or checker with the given id(s). You can
+# either give multiple identifiers separated by comma (,) or put this option
+# multiple times (only on the command line, not in the configuration file where
+# it should appear only once). You can also use "--disable=all" to disable
+# everything first and then re-enable specific checks. For example, if you want
+# to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use "--disable=all --enable=classes
+# --disable=W".
+disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "useless-suppression", "deprecated-pragma", "use-symbolic-message-instead", "E0401"]
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where it
+# should appear only once). See also the "--disable" option for examples.
+enable = ["c-extension-no-member"]
+
+[tool.pylint.method_args]
+# List of qualified names (i.e., library.method) which require a timeout
+# parameter e.g. 'requests.api.get,requests.api.post'
+timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"]
+
+[tool.pylint.miscellaneous]
+# List of note tags to take in consideration, separated by a comma.
+notes = ["FIXME", "XXX", "TODO"]
+
+# Regular expression of note tags to take in consideration.
+# notes-rgx =
+
+[tool.pylint.refactoring]
+# Maximum number of nested blocks for function / method body
+max-nested-blocks = 5
+
+# Complete name of functions that never returns. When checking for inconsistent-
+# return-statements if a never returning function is called then it will be
+# considered as an explicit return statement and no message will be printed.
+never-returning-functions = ["sys.exit", "argparse.parse_error"]
+
+[tool.pylint.reports]
+# Python expression which should return a score less than or equal to 10. You
+# have access to the variables 'fatal', 'error', 'warning', 'refactor',
+# 'convention', and 'info' which contain the number of messages in each category,
+# as well as 'statement' which is the total number of statements analyzed. This
+# score is used by the global evaluation report (RP0004).
+evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))"
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details.
+# msg-template =
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio). You can also give a reporter class, e.g.
+# mypackage.mymodule.MyReporterClass.
+# output-format =
+
+# Tells whether to display a full report or only the messages.
+# reports =
+
+# Activate the evaluation score.
+score = true
+
+[tool.pylint.similarities]
+# Comments are removed from the similarity computation
+ignore-comments = true
+
+# Docstrings are removed from the similarity computation
+ignore-docstrings = true
+
+# Imports are removed from the similarity computation
+ignore-imports = true
+
+# Signatures are removed from the similarity computation
+ignore-signatures = true
+
+# Minimum lines number of a similarity.
+min-similarity-lines = 10
+
+[tool.pylint.spelling]
+# Limits count of emitted suggestions for spelling mistakes.
+max-spelling-suggestions = 4
+
+# Spelling dictionary name. No available dictionaries : You need to install both
+# the python package and the system dependency for enchant to work..
+# spelling-dict =
+
+# List of comma separated words that should be considered directives if they
+# appear at the beginning of a comment and should not be checked.
+spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:"
+
+# List of comma separated words that should not be checked.
+# spelling-ignore-words =
+
+# A path to a file that contains the private dictionary; one word per line.
+# spelling-private-dict-file =
+
+# Tells whether to store unknown words to the private dictionary (see the
+# --spelling-private-dict-file option) instead of raising a message.
+# spelling-store-unknown-words =
+
+[tool.pylint.typecheck]
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators = ["contextlib.contextmanager"]
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+# generated-members =
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# class is considered mixin if its name matches the mixin-class-rgx option.
+# Tells whether to warn about missing members when the owner of the attribute is
+# inferred to be None.
+ignore-none = true
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference can
+# return multiple potential results while evaluating a Python object, but some
+# branches might not be evaluated, which results in partial inference. In that
+# case, it might be useful to still emit no-member and other checks for the rest
+# of the inferred objects.
+ignore-on-opaque-inference = true
+
+# List of symbolic message names to ignore for Mixin members.
+ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"]
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes = ["optparse.Values", "thread._local", "_thread._local", "argparse.Namespace"]
+
+# Show a hint with possible names when a member name was not found. The aspect of
+# finding the hint is based on edit distance.
+missing-member-hint = true
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance = 1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices = 1
+
+# Regex pattern to define which classes are considered mixins.
+mixin-class-rgx = ".*[Mm]ixin"
+
+# List of decorators that change the signature of a decorated function.
+# signature-mutators =
+
+[tool.pylint.variables]
+# List of additional names supposed to be defined in builtins. Remember that you
+# should avoid defining new builtins when possible.
+# additional-builtins =
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables = true
+
+# List of names allowed to shadow builtins
+# allowed-redefined-builtins =
+
+# List of strings which can identify a callback function by name. A callback name
+# must start or end with one of those strings.
+callbacks = ["cb_", "_cb"]
+
+# A regular expression matching the name of dummy variables (i.e. expected to not
+# be used).
+dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_"
+
+# Argument names that match this expression will be ignored.
+ignored-argument-names = "_.*|^ignored_|^unused_"
+
+# Tells whether we should check for unused import in __init__ files.
+# init-import =
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"]