diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml
index e91a2a0b65..8fd2adfce7 100644
--- a/.github/workflows/deploy.yml
+++ b/.github/workflows/deploy.yml
@@ -57,13 +57,9 @@ jobs:
         run: bash ./package_antares_web.sh
         working-directory: scripts
 
-      - name: 📜 Install changelog requirements
-        run: npm install -g auto-changelog
-
-      - name: 📜️ Generate changelog file
+      - name: 📜️ Copy changelog file
         run: |
-          auto-changelog -l false --hide-empty-releases
-          mv CHANGELOG.md dist/package/CHANGELOG.md
+          cp docs/CHANGELOG.md dist/package/CHANGELOG.md
 
       - name: 📦 Archive Antares Desktop for Windows
         if: matrix.os == 'windows-latest'
diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py
index 50220da54a..c2106ef2f3 100644
--- a/antarest/study/business/binding_constraint_management.py
+++ b/antarest/study/business/binding_constraint_management.py
@@ -24,6 +24,9 @@
 from antarest.study.business.utils import execute_or_add_commands
 from antarest.study.model import Study
 from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
+    DEFAULT_GROUP,
+    DEFAULT_OPERATOR,
+    DEFAULT_TIMESTEP,
     BindingConstraintFrequency,
     BindingConstraintOperator,
 )
@@ -43,7 +46,6 @@
     default_bc_weekly_daily as default_bc_weekly_daily_86,
 )
 from antarest.study.storage.variantstudy.model.command.create_binding_constraint import (
-    DEFAULT_GROUP,
     EXPECTED_MATRIX_SHAPES,
     BindingConstraintMatrices,
     BindingConstraintPropertiesBase,
@@ -470,8 +472,8 @@ def constraint_model_adapter(constraint: t.Mapping[str, t.Any], version: int) ->
             "id": constraint["id"],
             "name": constraint["name"],
             "enabled": constraint.get("enabled", True),
-            "time_step": constraint.get("type", BindingConstraintFrequency.HOURLY),
-            "operator": constraint.get("operator", BindingConstraintOperator.EQUAL),
+            "time_step": constraint.get("type", DEFAULT_TIMESTEP),
+            "operator": constraint.get("operator", DEFAULT_OPERATOR),
             "comments": constraint.get("comments", ""),
             "terms": constraint.get("terms", []),
         }
@@ -694,8 +696,7 @@ def create_binding_constraint(
         if bc_id in {bc.id for bc in self.get_binding_constraints(study)}:
             raise DuplicateConstraintName(f"A binding constraint with the same name already exists: {bc_id}.")
 
-        # TODO: the default operator should be fixed somewhere so this condition can be consistent
-        check_attributes_coherence(data, version, data.operator or BindingConstraintOperator.EQUAL)
+        check_attributes_coherence(data, version, data.operator or DEFAULT_OPERATOR)
 
         new_constraint = {"name": data.name, **json.loads(data.json(exclude={"terms", "name"}, exclude_none=True))}
         args = {
@@ -709,7 +710,7 @@ def create_binding_constraint(
 
         # Validates the matrices. Needed when the study is a variant because we only append the command to the list
         if isinstance(study, VariantStudy):
-            time_step = data.time_step or BindingConstraintFrequency.HOURLY
+            time_step = data.time_step or DEFAULT_TIMESTEP
             command.validates_and_fills_matrices(
                 time_step=time_step, specific_matrices=None, version=version, create=True
             )
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py
index 11749cf456..c81d66ed5c 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py
@@ -2,6 +2,8 @@
 Object model used to read and update binding constraint configuration.
 """
 
+import typing as t
+
 from antarest.study.business.enum_ignore_case import EnumIgnoreCase
 
 
@@ -35,3 +37,17 @@ class BindingConstraintOperator(EnumIgnoreCase):
     GREATER = "greater"
     BOTH = "both"
     EQUAL = "equal"
+
+
+OPERATOR_MATRICES_MAP: t.Dict[BindingConstraintOperator, t.List[str]] = {
+    BindingConstraintOperator.EQUAL: ["eq"],
+    BindingConstraintOperator.GREATER: ["gt"],
+    BindingConstraintOperator.LESS: ["lt"],
+    BindingConstraintOperator.BOTH: ["lt", "gt"],
+}
+
+
+DEFAULT_GROUP = "default"
+"""Default group for binding constraints (since v8.7)."""
+DEFAULT_OPERATOR = BindingConstraintOperator.EQUAL
+DEFAULT_TIMESTEP = BindingConstraintFrequency.HOURLY
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/files.py b/antarest/study/storage/rawstudy/model/filesystem/config/files.py
index b74b0f2daf..b8140b8c87 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/files.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/files.py
@@ -10,14 +10,18 @@
 
 from antarest.core.model import JSON
 from antarest.study.storage.rawstudy.ini_reader import IniReader
-from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency
+from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
+    DEFAULT_GROUP,
+    DEFAULT_OPERATOR,
+    DEFAULT_TIMESTEP,
+    BindingConstraintFrequency,
+)
 from antarest.study.storage.rawstudy.model.filesystem.config.exceptions import (
     SimulationParsingError,
     XpansionParsingError,
 )
 from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import extract_filtering
 from antarest.study.storage.rawstudy.model.filesystem.config.model import (
-    DEFAULT_GROUP,
     Area,
     BindingConstraintDTO,
     DistrictSet,
@@ -212,11 +216,14 @@ def _parse_bindings(root: Path) -> t.List[BindingConstraintDTO]:
         # contains a set of strings in the following format: "area.cluster"
         cluster_set = set()
         # Default value for time_step
-        time_step = BindingConstraintFrequency.HOURLY
+        time_step = bind.get("type", DEFAULT_TIMESTEP)
+        # Default value for operator
+        operator = bind.get("operator", DEFAULT_OPERATOR)
+        # Default value for group
+        group = bind.get("group", DEFAULT_GROUP)
+        # Build areas and clusters based on terms
         for key in bind:
-            if key == "type":
-                time_step = BindingConstraintFrequency(bind[key])
-            elif "%" in key:
+            if "%" in key:
                 areas = key.split("%", 1)
                 area_set.add(areas[0])
                 area_set.add(areas[1])
@@ -224,13 +231,8 @@ def _parse_bindings(root: Path) -> t.List[BindingConstraintDTO]:
                 cluster_set.add(key)
                 area_set.add(key.split(".", 1)[0])
 
-        group = bind.get("group", DEFAULT_GROUP)
         bc = BindingConstraintDTO(
-            id=bind["id"],
-            areas=area_set,
-            clusters=cluster_set,
-            time_step=time_step,
-            group=group,
+            id=bind["id"], areas=area_set, clusters=cluster_set, time_step=time_step, operator=operator, group=group
         )
         output_list.append(bc)
 
diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/model.py b/antarest/study/storage/rawstudy/model/filesystem/config/model.py
index 612c50e786..46e02bb051 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/config/model.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/config/model.py
@@ -7,15 +7,18 @@
 from antarest.core.utils.utils import DTO
 from antarest.study.business.enum_ignore_case import EnumIgnoreCase
 
-from .binding_constraint import BindingConstraintFrequency
+from .binding_constraint import (
+    DEFAULT_GROUP,
+    DEFAULT_OPERATOR,
+    DEFAULT_TIMESTEP,
+    BindingConstraintFrequency,
+    BindingConstraintOperator,
+)
 from .field_validators import extract_filtering
 from .renewable import RenewableConfigType
 from .st_storage import STStorageConfigType
 from .thermal import ThermalConfigType
 
-DEFAULT_GROUP = "default"
-"""Default group for binding constraints (since v8.7)."""
-
 
 class EnrModelling(EnumIgnoreCase):
     """
@@ -121,15 +124,18 @@ class BindingConstraintDTO(BaseModel):
 
     Attributes:
         id: The ID of the binding constraint.
-        group: The group for the scenario of BC (optional, required since v8.7).
         areas: List of area IDs on which the BC applies (links or clusters).
         clusters: List of thermal cluster IDs on which the BC applies (format: "area.cluster").
+        time_step: The time_step of the BC
+        operator: The operator of the BC
+        group: The group for the scenario of BC (optional, required since v8.7).
     """
 
     id: str
     areas: t.Set[str]
     clusters: t.Set[str]
-    time_step: BindingConstraintFrequency
+    time_step: BindingConstraintFrequency = DEFAULT_TIMESTEP
+    operator: BindingConstraintOperator = DEFAULT_OPERATOR
     # since v8.7
     group: str = DEFAULT_GROUP
 
diff --git a/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py b/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py
index 7e47affbc9..c27b3b647f 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py
@@ -1,4 +1,3 @@
-import shutil
 import typing as t
 from abc import ABC, abstractmethod
 from dataclasses import dataclass
@@ -6,7 +5,6 @@
 from pathlib import Path
 from zipfile import ZipFile
 
-from antarest.core.exceptions import ChildNotFoundError
 from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
 from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
 from antarest.study.storage.rawstudy.model.filesystem.inode import G, INode, S, V
@@ -109,22 +107,6 @@ def delete(self, url: t.Optional[t.List[str]] = None) -> None:
         elif self.config.path.exists():
             self.config.path.unlink()
 
-    def _infer_path(self) -> Path:
-        if self.get_link_path().exists():
-            return self.get_link_path()
-        elif self.config.path.exists():
-            return self.config.path
-        else:
-            raise ChildNotFoundError(
-                f"Neither link file {self.get_link_path} nor matrix file {self.config.path} exists"
-            )
-
-    def _infer_target_path(self, is_link: bool) -> Path:
-        if is_link:
-            return self.get_link_path()
-        else:
-            return self.config.path
-
     def get_link_path(self) -> Path:
         path = self.config.path.parent / (self.config.path.name + ".link")
         return path
@@ -144,16 +126,6 @@ def save(self, data: t.Union[str, bytes, S], url: t.Optional[t.List[str]] = None
             self.get_link_path().unlink()
         return None
 
-    def rename_file(self, target: "LazyNode[t.Any, t.Any, t.Any]") -> None:
-        target_path = target._infer_target_path(self.get_link_path().exists())
-        target_path.unlink(missing_ok=True)
-        self._infer_path().rename(target_path)
-
-    def copy_file(self, target: "LazyNode[t.Any, t.Any, t.Any]") -> None:
-        target_path = target._infer_target_path(self.get_link_path().exists())
-        target_path.unlink(missing_ok=True)
-        shutil.copy(self._infer_path(), target_path)
-
     def get_lazy_content(
         self,
         url: t.Optional[t.List[str]] = None,
diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py
index a68b0f521e..affaedc23e 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py
@@ -1,4 +1,5 @@
 import logging
+import shutil
 from pathlib import Path
 from typing import Any, List, Optional, Union, cast
 
@@ -73,11 +74,11 @@ def parse(
                     raise ChildNotFoundError(f"File '{relpath}' not found in the study '{study_id}'") from e
 
             stopwatch.log_elapsed(lambda x: logger.info(f"Matrix parsed in {x}s"))
-            matrix.dropna(how="any", axis=1, inplace=True)
+            final_matrix = matrix.dropna(how="any", axis=1)
             if return_dataframe:
-                return matrix
+                return final_matrix
 
-            data = cast(JSON, matrix.to_dict(orient="split"))
+            data = cast(JSON, final_matrix.to_dict(orient="split"))
             stopwatch.log_elapsed(lambda x: logger.info(f"Matrix to dict in {x}s"))
 
             return data
@@ -100,3 +101,20 @@ def check_errors(
         if self.nb_columns and len(data) != self.nb_columns:
             errors.append(f"{self.config.path}: Data was wrong size. expected {self.nb_columns} get {len(data)}")
         return errors
+
+    def _infer_path(self) -> Path:
+        if self.get_link_path().exists():
+            return self.get_link_path()
+        elif self.config.path.exists():
+            return self.config.path
+        raise ChildNotFoundError(f"Neither link file {self.get_link_path()} nor matrix file {self.config.path} exists")
+
+    def rename_file(self, target: str) -> None:
+        target_path = self.config.path.parent.joinpath(f"{target}{''.join(self._infer_path().suffixes)}")
+        target_path.unlink(missing_ok=True)
+        self._infer_path().rename(target_path)
+
+    def copy_file(self, target: str) -> None:
+        target_path = self.config.path.parent.joinpath(f"{target}{''.join(self._infer_path().suffixes)}")
+        target_path.unlink(missing_ok=True)
+        shutil.copy(self._infer_path(), target_path)
diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/bindingconstraints/bindingcontraints.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/bindingconstraints/bindingcontraints.py
index 8ef625200c..51426d7e9e 100644
--- a/antarest/study/storage/rawstudy/model/filesystem/root/input/bindingconstraints/bindingcontraints.py
+++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/bindingconstraints/bindingcontraints.py
@@ -1,4 +1,7 @@
-from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency
+from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
+    OPERATOR_MATRICES_MAP,
+    BindingConstraintFrequency,
+)
 from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode
 from antarest.study.storage.rawstudy.model.filesystem.inode import TREE
 from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix
@@ -52,7 +55,8 @@ def build(self) -> TREE:
             }
             children = {}
             for binding in self.config.bindings:
-                for term in ["lt", "gt", "eq"]:
+                terms = OPERATOR_MATRICES_MAP[binding.operator]
+                for term in terms:
                     matrix_id = f"{binding.id}_{term}"
                     children[matrix_id] = InputSeriesMatrix(
                         self.context,
diff --git a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py
index f236cfbca3..8d7464c5aa 100644
--- a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py
+++ b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py
@@ -1,6 +1,10 @@
 import typing as t
 
-from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency
+from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
+    DEFAULT_TIMESTEP,
+    BindingConstraintFrequency,
+    BindingConstraintOperator,
+)
 from antarest.study.storage.rawstudy.model.filesystem.config.model import BindingConstraintDTO, FileStudyTreeConfig
 
 
@@ -8,16 +12,14 @@ def parse_bindings_coeffs_and_save_into_config(
     bd_id: str,
     study_data_config: FileStudyTreeConfig,
     coeffs: t.Mapping[str, t.Union[t.Literal["hourly", "daily", "weekly"], t.Sequence[float]]],
+    operator: BindingConstraintOperator,
+    time_step: BindingConstraintFrequency,
     group: str,
 ) -> None:
     if bd_id not in [bind.id for bind in study_data_config.bindings]:
         areas_set = set()
         clusters_set = set()
-        # Default time_step value
-        time_step = BindingConstraintFrequency.HOURLY
         for k, v in coeffs.items():
-            if k == "type":
-                time_step = BindingConstraintFrequency(v)
             if "%" in k:
                 areas_set |= set(k.split("%"))
             elif "." in k:
@@ -28,6 +30,7 @@ def parse_bindings_coeffs_and_save_into_config(
             group=group,
             areas=areas_set,
             clusters=clusters_set,
+            operator=operator,
             time_step=time_step,
         )
         study_data_config.bindings.append(bc)
diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py
index 0e34b5f867..2925e67c3d 100644
--- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py
+++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py
@@ -9,6 +9,9 @@
 from antarest.matrixstore.model import MatrixData
 from antarest.study.business.all_optional_meta import AllOptionalMetaclass
 from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
+    DEFAULT_GROUP,
+    DEFAULT_OPERATOR,
+    DEFAULT_TIMESTEP,
     BindingConstraintFrequency,
     BindingConstraintOperator,
 )
@@ -24,8 +27,6 @@
 from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand
 from antarest.study.storage.variantstudy.model.model import CommandDTO
 
-DEFAULT_GROUP = "default"
-
 MatrixType = t.List[t.List[MatrixData]]
 
 EXPECTED_MATRIX_SHAPES = {
@@ -80,8 +81,8 @@ def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixTyp
 
 class BindingConstraintPropertiesBase(BaseModel, extra=Extra.forbid, allow_population_by_field_name=True):
     enabled: bool = True
-    time_step: BindingConstraintFrequency = Field(BindingConstraintFrequency.HOURLY, alias="type")
-    operator: BindingConstraintOperator = BindingConstraintOperator.EQUAL
+    time_step: BindingConstraintFrequency = Field(DEFAULT_TIMESTEP, alias="type")
+    operator: BindingConstraintOperator = DEFAULT_OPERATOR
     comments: str = ""
 
     @classmethod
@@ -337,18 +338,21 @@ def apply_binding_constraint(
                     [str(coeff_val) for coeff_val in self.coeffs[link_or_cluster]]
                 )
 
-        group = self.group or DEFAULT_GROUP
-        parse_bindings_coeffs_and_save_into_config(
-            bd_id,
-            study_data.config,
-            self.coeffs or {},
-            group=group,
-        )
         study_data.tree.save(
             binding_constraints,
             ["input", "bindingconstraints", "bindingconstraints"],
         )
 
+        existing_constraint = binding_constraints[new_key]
+        current_operator = self.operator or BindingConstraintOperator(
+            existing_constraint.get("operator", DEFAULT_OPERATOR)
+        )
+        group = self.group or existing_constraint.get("group", DEFAULT_GROUP)
+        time_step = self.time_step or BindingConstraintFrequency(existing_constraint.get("type", DEFAULT_TIMESTEP))
+        parse_bindings_coeffs_and_save_into_config(
+            bd_id, study_data.config, self.coeffs or {}, operator=current_operator, time_step=time_step, group=group
+        )
+
         if version >= 870:
             # When all BC of a given group are removed, the group should be removed from the scenario builder
             old_groups = old_groups or set()
@@ -369,14 +373,13 @@ def apply_binding_constraint(
             BindingConstraintOperator.BOTH: [(self.less_term_matrix, "lt"), (self.greater_term_matrix, "gt")],
         }
 
-        current_operator = self.operator or BindingConstraintOperator(binding_constraints[new_key]["operator"])
-
         for matrix_term, matrix_alias in operator_matrices_map[current_operator]:
             if matrix_term:
                 if not isinstance(matrix_term, str):  # pragma: no cover
                     raise TypeError(repr(matrix_term))
                 if version >= 870:
-                    study_data.tree.save(matrix_term, ["input", "bindingconstraints", f"{bd_id}_{matrix_alias}"])
+                    matrix_id = f"{bd_id}_{matrix_alias}"
+                    study_data.tree.save(matrix_term, ["input", "bindingconstraints", matrix_id])
         return CommandOutput(status=True)
 
 
@@ -394,10 +397,14 @@ class CreateBindingConstraint(AbstractBindingConstraintCommand):
     def _apply_config(self, study_data_config: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]:
         bd_id = transform_name_to_id(self.name)
         group = self.group or DEFAULT_GROUP
+        operator = self.operator or DEFAULT_OPERATOR
+        time_step = self.time_step or DEFAULT_TIMESTEP
         parse_bindings_coeffs_and_save_into_config(
             bd_id,
             study_data_config,
             self.coeffs or {},
+            operator=operator,
+            time_step=time_step,
             group=group,
         )
         return CommandOutput(status=True), {}
diff --git a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py
index 33805f6c73..d2bd4de9ea 100644
--- a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py
+++ b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py
@@ -1,13 +1,11 @@
 from typing import Any, Dict, List, Tuple
 
 from antarest.core.model import JSON
+from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import DEFAULT_GROUP
 from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
 from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
 from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput
-from antarest.study.storage.variantstudy.model.command.create_binding_constraint import (
-    DEFAULT_GROUP,
-    remove_bc_from_scenario_builder,
-)
+from antarest.study.storage.variantstudy.model.command.create_binding_constraint import remove_bc_from_scenario_builder
 from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand
 from antarest.study.storage.variantstudy.model.model import CommandDTO
 
@@ -50,8 +48,11 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
         if study_data.config.version < 870:
             study_data.tree.delete(["input", "bindingconstraints", self.id])
         else:
+            existing_files = study_data.tree.get(["input", "bindingconstraints"], depth=1)
             for term in ["lt", "gt", "eq"]:
-                study_data.tree.delete(["input", "bindingconstraints", f"{self.id}_{term}"])
+                matrix_id = f"{self.id}_{term}"
+                if matrix_id in existing_files:
+                    study_data.tree.delete(["input", "bindingconstraints", matrix_id])
 
             # When all BC of a given group are removed, the group should be removed from the scenario builder
             old_groups = {bd.get("group", DEFAULT_GROUP).lower() for bd in binding_constraints.values()}
diff --git a/antarest/study/storage/variantstudy/model/command/remove_cluster.py b/antarest/study/storage/variantstudy/model/command/remove_cluster.py
index 8f959a2ef1..a9460f4025 100644
--- a/antarest/study/storage/variantstudy/model/command/remove_cluster.py
+++ b/antarest/study/storage/variantstudy/model/command/remove_cluster.py
@@ -193,9 +193,11 @@ def _remove_cluster_from_binding_constraints(self, study_data: FileStudy) -> Non
 
         matrix_suffixes = ["_lt", "_gt", "_eq"] if study_data.config.version >= 870 else [""]
 
+        existing_files = study_data.tree.get(["input", "bindingconstraints"], depth=1)
         for bc_index, bc in bc_to_remove.items():
             for suffix in matrix_suffixes:
-                # noinspection SpellCheckingInspection
-                study_data.tree.delete(["input", "bindingconstraints", f"{bc['id']}{suffix}"])
+                matrix_id = f"{bc['id']}{suffix}"
+                if matrix_id in existing_files:
+                    study_data.tree.delete(["input", "bindingconstraints", matrix_id])
 
         study_data.tree.save(binding_constraints, url)
diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py
index 6c1d9bafae..808aaaecc8 100644
--- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py
+++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py
@@ -2,17 +2,17 @@
 import typing as t
 
 from antarest.core.model import JSON
-from antarest.matrixstore.model import MatrixData
 from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import (
+    DEFAULT_GROUP,
+    OPERATOR_MATRICES_MAP,
     BindingConstraintFrequency,
     BindingConstraintOperator,
 )
-from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
+from antarest.study.storage.rawstudy.model.filesystem.config.model import BindingConstraintDTO, FileStudyTreeConfig
 from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy
-from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode
+from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix
 from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput
 from antarest.study.storage.variantstudy.model.command.create_binding_constraint import (
-    DEFAULT_GROUP,
     AbstractBindingConstraintCommand,
     TermMatrices,
     create_binding_constraint_config,
@@ -20,27 +20,21 @@
 from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand
 from antarest.study.storage.variantstudy.model.model import CommandDTO
 
-MatrixType = t.List[t.List[MatrixData]]
-
-ALIAS_OPERATOR_MAP = {
-    BindingConstraintOperator.EQUAL: "eq",
-    BindingConstraintOperator.LESS: "lt",
-    BindingConstraintOperator.GREATER: "gt",
-}
-
 
 def _update_matrices_names(
     file_study: FileStudy,
-    binding_constraint_id: str,
+    bc_id: str,
     existing_operator: BindingConstraintOperator,
     new_operator: BindingConstraintOperator,
 ) -> None:
     """
     Update the matrix file name according to the new operator.
+    Due to legacy matrices generation, we need to check if the new matrix file already exists
+    and if it does, we need to first remove it before renaming the existing matrix file
 
     Args:
         file_study: the file study
-        binding_constraint_id: the binding constraint ID
+        bc_id: the binding constraint ID
         existing_operator: the existing operator
         new_operator: the new operator
 
@@ -48,17 +42,6 @@ def _update_matrices_names(
         NotImplementedError: if the case is not handled
     """
 
-    parent_folder_node = file_study.tree.get_node(["input", "bindingconstraints"])
-    matrix_lt = parent_folder_node.get_node([f"{binding_constraint_id}_lt"])
-    assert isinstance(matrix_lt, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_lt)}"
-    matrix_eq = parent_folder_node.get_node([f"{binding_constraint_id}_eq"])
-    assert isinstance(matrix_eq, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_eq)}"
-    matrix_gt = parent_folder_node.get_node([f"{binding_constraint_id}_gt"])
-    assert isinstance(matrix_gt, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_gt)}"
-
-    # Due to legacy matrices generation, we need to check if the new matrix file already exists
-    #  and if it does, we need to first remove it before renaming the existing matrix file
-
     handled_operators = [
         BindingConstraintOperator.EQUAL,
         BindingConstraintOperator.LESS,
@@ -72,37 +55,31 @@ def _update_matrices_names(
         )
     elif existing_operator == new_operator:
         return  # nothing to do
-    elif existing_operator != BindingConstraintOperator.BOTH and new_operator != BindingConstraintOperator.BOTH:
-        matrix_node = parent_folder_node.get_node([f"{binding_constraint_id}_{ALIAS_OPERATOR_MAP[existing_operator]}"])
-        assert isinstance(
-            matrix_node, LazyNode
-        ), f"Node type not handled yet: LazyNode expected, got {type(matrix_node)}"
-        new_matrix_node = parent_folder_node.get_node([f"{binding_constraint_id}_{ALIAS_OPERATOR_MAP[new_operator]}"])
-        assert isinstance(
-            new_matrix_node, LazyNode
-        ), f"Node type not handled yet: LazyNode expected, got {type(new_matrix_node)}"
-        matrix_node.rename_file(new_matrix_node)
+
+    parent_folder_node = file_study.tree.get_node(["input", "bindingconstraints"])
+    error_msg = "Unhandled node type, expected InputSeriesMatrix, got "
+    if existing_operator != BindingConstraintOperator.BOTH and new_operator != BindingConstraintOperator.BOTH:
+        current_node = parent_folder_node.get_node([f"{bc_id}_{OPERATOR_MATRICES_MAP[existing_operator][0]}"])
+        assert isinstance(current_node, InputSeriesMatrix), f"{error_msg}{type(current_node)}"
+        current_node.rename_file(f"{bc_id}_{OPERATOR_MATRICES_MAP[new_operator][0]}")
     elif new_operator == BindingConstraintOperator.BOTH:
+        current_node = parent_folder_node.get_node([f"{bc_id}_{OPERATOR_MATRICES_MAP[existing_operator][0]}"])
+        assert isinstance(current_node, InputSeriesMatrix), f"{error_msg}{type(current_node)}"
         if existing_operator == BindingConstraintOperator.EQUAL:
-            matrix_eq.rename_file(matrix_lt)
-            matrix_gt.delete()
-            # copy the matrix lt to gt
-            matrix_lt.copy_file(matrix_gt)
-        elif existing_operator == BindingConstraintOperator.LESS:
-            matrix_gt.delete()
-            matrix_lt.copy_file(matrix_gt)
+            current_node.copy_file(f"{bc_id}_gt")
+            current_node.rename_file(f"{bc_id}_lt")
         else:
-            matrix_lt.delete()
-            matrix_gt.copy_file(matrix_lt)
+            term = "lt" if existing_operator == BindingConstraintOperator.GREATER else "gt"
+            current_node.copy_file(f"{bc_id}_{term}")
     else:
-        if new_operator == BindingConstraintOperator.EQUAL:
-            # we may retrieve the mean of the two matrices, but here we just copy the lt matrix
-            matrix_lt.rename_file(matrix_eq)
-            matrix_gt.delete()
-        elif new_operator == BindingConstraintOperator.LESS:
-            matrix_gt.delete()
+        current_node = parent_folder_node.get_node([f"{bc_id}_lt"])
+        assert isinstance(current_node, InputSeriesMatrix), f"{error_msg}{type(current_node)}"
+        if new_operator == BindingConstraintOperator.GREATER:
+            current_node.delete()
         else:
-            matrix_lt.delete()
+            parent_folder_node.get_node([f"{bc_id}_gt"]).delete()
+            if new_operator == BindingConstraintOperator.EQUAL:
+                current_node.rename_file(f"{bc_id}_eq")
 
 
 class UpdateBindingConstraint(AbstractBindingConstraintCommand):
@@ -123,6 +100,31 @@ class UpdateBindingConstraint(AbstractBindingConstraintCommand):
     id: str
 
     def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]:
+        index = next(i for i, bc in enumerate(study_data.bindings) if bc.id == self.id)
+        existing_constraint = study_data.bindings[index]
+        areas_set = existing_constraint.areas
+        clusters_set = existing_constraint.clusters
+        if self.coeffs:
+            areas_set = set()
+            clusters_set = set()
+            for j in self.coeffs.keys():
+                if "%" in j:
+                    areas_set |= set(j.split("%"))
+                elif "." in j:
+                    clusters_set.add(j)
+                    areas_set.add(j.split(".")[0])
+        group = self.group or existing_constraint.group
+        operator = self.operator or existing_constraint.operator
+        time_step = self.time_step or existing_constraint.time_step
+        new_constraint = BindingConstraintDTO(
+            id=self.id,
+            group=group,
+            areas=areas_set,
+            clusters=clusters_set,
+            operator=operator,
+            time_step=time_step,
+        )
+        study_data.bindings[index] = new_constraint
         return CommandOutput(status=True), {}
 
     def _find_binding_config(self, binding_constraints: t.Mapping[str, JSON]) -> t.Optional[t.Tuple[str, JSON]]:
@@ -154,9 +156,11 @@ def _apply(self, study_data: FileStudy) -> CommandOutput:
         # rename matrices if the operator has changed for version >= 870
         if self.operator and study_data.config.version >= 870:
             existing_operator = BindingConstraintOperator(actual_cfg.get("operator"))
-            new_operator = BindingConstraintOperator(self.operator)
+            new_operator = self.operator
             _update_matrices_names(study_data, self.id, existing_operator, new_operator)
 
+        self._apply_config(study_data.config)
+
         updated_matrices = [
             term for term in [m.value for m in TermMatrices] if hasattr(self, term) and getattr(self, term)
         ]
diff --git a/requirements-dev.txt b/requirements-dev.txt
index e7ff79c736..5ea6126c6c 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -11,6 +11,7 @@ pyinstaller-hooks-contrib==2024.6
 # of the corresponding implementation libraries used in production (in `requirements.txt`).
 
 pandas-stubs~=1.4.0
+types-paramiko~=3.4.0
 types-psycopg2~=2.9.4
 types-redis~=4.1.2
 types-requests~=2.27.1
diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py
index aba3d397ac..79b5d5cbd0 100644
--- a/tests/integration/study_data_blueprint/test_binding_constraints.py
+++ b/tests/integration/study_data_blueprint/test_binding_constraints.py
@@ -604,6 +604,16 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study
                 else:
                     assert data == np.zeros((matrix_lt3.shape[0], 1)).tolist()
 
+        # Checks that we only see existing matrices inside the Debug View
+        res = client.get(f"/v1/studies/{study_id}/raw", params={"path": "/input/bindingconstraints", "depth": 1})
+        assert res.status_code in {200, 201}
+        assert res.json() == {
+            f"{bc_id_wo_group}_lt": {},
+            f"{bc_id_w_group}_gt": {},
+            f"{bc_id_w_matrix}_eq": {},
+            "bindingconstraints": {},
+        }
+
         # =============================
         # CONSTRAINT TERM MANAGEMENT
         # =============================
diff --git a/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py b/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py
index b6ac49fce1..1bc9df122a 100644
--- a/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py
+++ b/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py
@@ -1,3 +1,5 @@
+import itertools
+import shutil
 import textwrap
 import typing as t
 from pathlib import Path
@@ -92,3 +94,71 @@ def test_save(self, my_study_config: FileStudyTreeConfig) -> None:
             """
         )
         assert actual == expected
+
+
+class TestCopyAndRenameFile:
+    node: InputSeriesMatrix
+    fake_node: InputSeriesMatrix
+    target: str
+    file: Path
+    link: Path
+    modified_file: Path
+    modified_link: Path
+
+    def _set_up(self, tmp_path: Path) -> None:
+        self.file = tmp_path / "my-study" / "lazy.txt"
+        self.file.parent.mkdir()
+
+        self.link = self.file.parent / f"{self.file.name}.link"
+        self.link.write_text("Link: Mock File Content")
+
+        config = FileStudyTreeConfig(study_path=self.file.parent, path=self.file, version=-1, study_id="")
+        context = ContextServer(matrix=Mock(), resolver=Mock())
+        self.node = InputSeriesMatrix(context=context, config=config)
+
+        self.modified_file = self.file.parent / "lazy_modified.txt"
+        self.modified_link = self.file.parent / f"{self.modified_file.name}.link"
+        config2 = FileStudyTreeConfig(study_path=self.file.parent, path=self.modified_file, version=-1, study_id="")
+        self.fake_node = InputSeriesMatrix(context=Mock(), config=config2)
+        self.target = self.modified_file.stem
+
+    def _checks_behavior(self, rename: bool, target_is_link: bool):
+        # Asserts `_infer_path` fails if there's no file
+        with pytest.raises(ChildNotFoundError):
+            self.fake_node._infer_path()
+
+        # Checks `copy_file`, `rename_file` methods
+        if target_is_link:
+            assert not self.modified_link.exists()
+            assert self.link.exists()
+            assert not self.file.exists()
+            assert not self.modified_file.exists()
+
+            self.node.rename_file(self.target) if rename else self.node.copy_file(self.target)
+
+            assert not self.link.exists() if rename else self.link.exists()
+            assert self.modified_link.exists()
+            assert not self.file.exists()
+            assert not self.modified_file.exists()
+            assert self.modified_link.read_text() == "Link: Mock File Content"
+
+        else:
+            content = b"No Link: Mock File Content"
+            self.node.save(content)
+            assert self.file.read_text() == content.decode("utf-8")
+            assert not self.link.exists()
+            assert not self.modified_file.exists()
+
+            self.node.rename_file(self.target) if rename else self.node.copy_file(self.target)
+
+            assert not self.link.exists()
+            assert not self.file.exists() if rename else self.file.exists()
+            assert self.modified_file.exists()
+            assert not self.modified_link.exists()
+            assert self.modified_file.read_text() == content.decode("utf-8")
+
+    def test_copy_and_rename_file(self, tmp_path: Path):
+        for rename, target_is_link in itertools.product([True, False], repeat=2):
+            self._set_up(tmp_path)
+            self._checks_behavior(rename, target_is_link)
+            shutil.rmtree(tmp_path / "my-study")
diff --git a/tests/storage/repository/filesystem/test_lazy_node.py b/tests/storage/repository/filesystem/test_lazy_node.py
index a2c72415f5..f899d32fa3 100644
--- a/tests/storage/repository/filesystem/test_lazy_node.py
+++ b/tests/storage/repository/filesystem/test_lazy_node.py
@@ -2,8 +2,6 @@
 from typing import List, Optional
 from unittest.mock import Mock
 
-import pytest
-
 from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
 from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer
 from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode
@@ -140,113 +138,3 @@ def test_save_txt(tmp_path: Path):
     assert file.read_text() == content
     assert not link.exists()
     resolver.resolve.assert_called_once_with(content)
-
-
-@pytest.mark.parametrize("target_is_link", [True, False])
-def test_rename_file(tmp_path: Path, target_is_link: bool):
-    file = tmp_path / "my-study/lazy.txt"
-    file.parent.mkdir()
-
-    link = file.parent / f"{file.name}.link"
-    link.write_text("Link: Mock File Content")
-
-    resolver = Mock()
-    resolver.resolve.return_value = None
-
-    resolver2 = Mock()
-    resolver2.resolve.return_value = None
-
-    config = FileStudyTreeConfig(study_path=file, path=file, version=-1, study_id="")
-    context = ContextServer(matrix=Mock(), resolver=resolver)
-    node = MockLazyNode(context=context, config=config)
-
-    renaming_file = file.parent / "lazy_rename.txt"
-    renaming_link = file.parent / f"{renaming_file.name}.link"
-    config2 = FileStudyTreeConfig(study_path=renaming_file, path=renaming_file, version=-1, study_id="")
-    context2 = ContextServer(matrix=Mock(), resolver=resolver2)
-    target = MockLazyNode(context=context2, config=config2)
-
-    if target_is_link:
-        assert not renaming_link.exists()
-        assert link.exists()
-        assert not file.exists()
-        assert not renaming_file.exists()
-
-        node.rename_file(target)
-
-        assert not link.exists()
-        assert renaming_link.exists()
-        assert not file.exists()
-        assert not renaming_file.exists()
-        assert renaming_link.read_text() == "Link: Mock File Content"
-
-    else:
-        content = "No Link: Mock File Content"
-        node.save(content)
-        assert file.read_text() == content
-        assert not link.exists()
-        assert not renaming_file.exists()
-        resolver.resolve.assert_called_once_with(content)
-
-        node.rename_file(target)
-
-        assert not link.exists()
-        assert not file.exists()
-        assert renaming_file.exists()
-        assert not renaming_link.exists()
-        assert renaming_file.read_text() == "No Link: Mock File Content"
-
-
-@pytest.mark.parametrize("target_is_link", [True, False])
-def test_copy_file(tmp_path: Path, target_is_link: bool):
-    file = tmp_path / "my-study/lazy.txt"
-    file.parent.mkdir()
-
-    link = file.parent / f"{file.name}.link"
-    link.write_text("Link: Mock File Content")
-
-    resolver = Mock()
-    resolver.resolve.return_value = None
-
-    resolver2 = Mock()
-    resolver2.resolve.return_value = None
-
-    config = FileStudyTreeConfig(study_path=file, path=file, version=-1, study_id="")
-    context = ContextServer(matrix=Mock(), resolver=resolver)
-    node = MockLazyNode(context=context, config=config)
-
-    copied_file = file.parent / "lazy_copy.txt"
-    copied_link = file.parent / f"{copied_file.name}.link"
-    config2 = FileStudyTreeConfig(study_path=copied_file, path=copied_file, version=-1, study_id="")
-    context2 = ContextServer(matrix=Mock(), resolver=resolver2)
-    target = MockLazyNode(context=context2, config=config2)
-
-    if target_is_link:
-        assert not copied_link.exists()
-        assert link.exists()
-        assert not file.exists()
-        assert not copied_file.exists()
-
-        node.copy_file(target)
-
-        assert link.exists()
-        assert copied_link.exists()
-        assert not file.exists()
-        assert not copied_file.exists()
-        assert copied_link.read_text() == "Link: Mock File Content"
-
-    else:
-        content = "No Link: Mock File Content"
-        node.save(content)
-        assert file.read_text() == content
-        assert not link.exists()
-        assert not copied_file.exists()
-        resolver.resolve.assert_called_once_with(content)
-
-        node.copy_file(target)
-
-        assert not link.exists()
-        assert file.exists()
-        assert copied_file.exists()
-        assert not copied_link.exists()
-        assert copied_file.read_text() == "No Link: Mock File Content"
diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py
index f2dc3ccaf5..d33af4f5d5 100644
--- a/tests/variantstudy/model/command/test_manage_binding_constraints.py
+++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py
@@ -589,7 +589,7 @@ def test__update_matrices_names(
     # update matrices names
     _update_matrices_names(
         file_study=empty_study,
-        binding_constraint_id="bd_rename_matrices",
+        bc_id="bd_rename_matrices",
         existing_operator=existing_operator,
         new_operator=new_operator,
     )