diff --git a/base/handlers/__init__.py b/base/handlers/__init__.py
new file mode 100644
index 000000000..fdae05bc9
--- /dev/null
+++ b/base/handlers/__init__.py
@@ -0,0 +1,4 @@
+from .ajaxHomeHandler import handler as home_handler
+from .ajaxCounterfactualsHandler import handler as counterfactuals_handler
+from .ajaxChartsHandler import handler as charts_handler
+from .ajaxTrainHandler import handler as train_handler
diff --git a/base/handlers/__pycache__/__init__.cpython-310.pyc b/base/handlers/__pycache__/__init__.cpython-310.pyc
new file mode 100644
index 000000000..830ecc83b
Binary files /dev/null and b/base/handlers/__pycache__/__init__.cpython-310.pyc differ
diff --git a/base/handlers/__pycache__/ajaxChartsHandler.cpython-310.pyc b/base/handlers/__pycache__/ajaxChartsHandler.cpython-310.pyc
new file mode 100644
index 000000000..08c8a3de6
Binary files /dev/null and b/base/handlers/__pycache__/ajaxChartsHandler.cpython-310.pyc differ
diff --git a/base/handlers/__pycache__/ajaxCounterfactualsHandler.cpython-310.pyc b/base/handlers/__pycache__/ajaxCounterfactualsHandler.cpython-310.pyc
new file mode 100644
index 000000000..b8666251e
Binary files /dev/null and b/base/handlers/__pycache__/ajaxCounterfactualsHandler.cpython-310.pyc differ
diff --git a/base/handlers/__pycache__/ajaxHomeHandler.cpython-310.pyc b/base/handlers/__pycache__/ajaxHomeHandler.cpython-310.pyc
new file mode 100644
index 000000000..d0d39195b
Binary files /dev/null and b/base/handlers/__pycache__/ajaxHomeHandler.cpython-310.pyc differ
diff --git a/base/handlers/__pycache__/ajaxTrainHandler.cpython-310.pyc b/base/handlers/__pycache__/ajaxTrainHandler.cpython-310.pyc
new file mode 100644
index 000000000..3f9575943
Binary files /dev/null and b/base/handlers/__pycache__/ajaxTrainHandler.cpython-310.pyc differ
diff --git a/base/handlers/ajaxChartsHandler.py b/base/handlers/ajaxChartsHandler.py
new file mode 100644
index 000000000..df690c95c
--- /dev/null
+++ b/base/handlers/ajaxChartsHandler.py
@@ -0,0 +1,219 @@
+import base.pipeline as pipeline
+import os
+import pandas as pd
+import joblib
+from dict_and_html import *
+from .. import methods
+from ..methods import PIPELINE_PATH
+import base.pipeline as pipeline
+import json
+from django.shortcuts import HttpResponse
+
+def handler(action, request):
+    status = 200
+    if action == "pre_trained":
+        # load pre trained models
+        pre_trained_model_name = request.POST.get("pre_trained")
+        request.session["model_name"] = pre_trained_model_name
+        # dataframe name
+        df_name = request.session.get("df_name")
+
+        if df_name == "upload":
+            df_name = request.session.get("df_name_upload_base_name")
+
+        model_name_path = os.path.join(
+        PIPELINE_PATH + f"{df_name}" + "/trained_models/" + pre_trained_model_name
+        )
+
+        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
+
+        # get the type of the file
+        datasets_types_PipelineJSON_path = os.path.join(
+        PIPELINE_PATH + "/dataset_types_pipeline.json"
+        )
+        datasets_types_pipeline = pipeline.PipelineJSON(
+        datasets_types_PipelineJSON_path
+        )
+        dataset_type = datasets_types_pipeline.read_from_json([df_name])
+
+        if type(dataset_type) is list:
+            dataset_type = dataset_type[0]
+
+        if "url" in request.POST:
+            url = request.POST.get("url")
+            
+            if url == "counterfactuals":
+                # only TSNE
+                tsne = joblib.load(model_name_path + "/tsne.sav")
+
+                # Assuming you already have your fig object created, you can update it like this:
+                # Improved and modern t-SNE visualization
+                tsne.update_layout(
+                    # Modern Legend Design
+                    legend=dict(
+                        x=0.9,
+                        y=0.95,
+                        xanchor="right",
+                        yanchor="top",
+                        bgcolor="rgba(255,255,255,0.8)",  # Light semi-transparent white background
+                        bordercolor="rgba(0,0,0,0.1)",  # Light border for contrast
+                        borderwidth=1,
+                        font=dict(size=12, color="#444"),  # Subtle grey for legend text
+                    ),
+                    # Tight Margins to Focus on the Plot
+                    margin=dict(
+                        l=10, r=10, t=30, b=10
+                    ),  # Very slim margins for a modern look
+                    # Axis Design: Minimalist and Clean
+                    xaxis=dict(
+                        title_text="",  # No axis labels for a clean design
+                        tickfont=dict(
+                            size=10, color="#aaa"
+                        ),  # Light grey for tick labels
+                        showline=True,
+                        linecolor="rgba(0,0,0,0.2)",  # Subtle line color for axis lines
+                        zeroline=False,  # No zero line for a sleek look
+                        showgrid=False,  # Hide grid lines for a minimal appearance
+                        ticks="outside",  # Small ticks outside the axis
+                        ticklen=3,  # Short tick marks for subtlety
+                    ),
+                    yaxis=dict(
+                        title_text="",  # No axis labels
+                        tickfont=dict(size=10, color="#aaa"),
+                        showline=True,
+                        linecolor="rgba(0,0,0,0.2)",
+                        zeroline=False,
+                        showgrid=False,
+                        ticks="outside",
+                        ticklen=3,
+                    ),
+                    # Sleek Background
+                    plot_bgcolor="#fafafa",  # Very light grey background for a smooth finish
+                    paper_bgcolor="#ffffff",  # Pure white paper background
+                    # Modern Title with Elegant Style
+                    title=dict(
+                        text="t-SNE Visualization of Data",
+                        font=dict(
+                            size=16, color="#222", family="Helvetica, Arial, sans-serif"
+                        ),  # Classy font style
+                        x=0.5,
+                        xanchor="center",
+                        yanchor="top",
+                        pad=dict(t=15),  # Padding to separate the title from the plot
+                    ),
+                )
+
+                # Add hover effects for a smooth user experience
+                tsne.update_traces(
+                    hoverinfo="text+name",
+                    hoverlabel=dict(bgcolor="white", font_size=12, font_family="Arial"),
+                )
+
+                context = {
+                    "tsne": tsne.to_html(),
+                }
+            else:
+                # load plots
+                pca = joblib.load(model_name_path + "/pca.sav")
+                classification_report = joblib.load(
+                    model_name_path + "/classification_report.sav"
+                )
+                # tsne = joblib.load(model_name_path + "/tsne.sav")
+
+                # pipeline path
+                json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
+                jsonFile = pipeline.PipelineJSON(json_path)
+
+                # load pipeline data
+                # jsonFile = open(json_path, "r")
+                # pipeline_data = json.load(jsonFile)  # data becomes a dictionary
+                # classifier_data = pipeline_data["classifier"][pre_trained_model_name]
+
+                classifier_data = jsonFile.read_from_json(
+                    ["classifier", pre_trained_model_name]
+                )
+                classifier_data_flattened = methods.flatten_dict(classifier_data)
+                classifier_data_df = pd.DataFrame([classifier_data_flattened])
+
+                if dataset_type == "tabular":
+                    feature_importance = joblib.load(
+                        model_name_path + "/feature_importance.sav"
+                    )
+                    context = {
+                        "dataset_type": dataset_type,
+                        "pca": pca.to_html(),
+                        "class_report": classification_report.to_html(),
+                        "feature_importance": feature_importance.to_html(),
+                        "classifier_data": classifier_data_df.to_html(),
+                    }
+                elif dataset_type == "timeseries":
+                    tsne = joblib.load(model_name_path + "/tsne.sav")
+                    context = {
+                        "dataset_type": dataset_type,
+                        "pca": pca.to_html(),
+                        "class_report": classification_report.to_html(),
+                        "tsne": tsne.to_html(),
+                        "classifier_data": classifier_data_df.to_html(),
+                    }          
+    elif action == "delete_pre_trained":
+        
+        df_name = request.session["df_name"]
+        model_name = request.POST.get("model_name")
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
+        )
+        
+        print(model_name_path)
+        
+        excel_file_name_preprocessed_path = os.path.join(
+            PIPELINE_PATH,
+            f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv",
+        )
+        try:
+            # Check if the file exists
+            if os.path.exists(excel_file_name_preprocessed_path):
+                # Delete the file
+                os.remove(excel_file_name_preprocessed_path)
+                # print(f"File '{excel_file_name_preprocessed_path}' has been deleted successfully.")
+            else:
+                print(f"File '{excel_file_name_preprocessed_path}' does not exist.")
+        except Exception as e:
+            print(f"An error occurred while deleting the file: {e}")
+            
+        json_path = os.path.join(PIPELINE_PATH + f"{df_name}" + "/pipeline.json")
+        jsonFile = pipeline.PipelineJSON(json_path)
+        jsonFile.delete_key(["classifier", model_name])
+        
+        methods.remove_dir_and_empty_parent(model_name_path)
+        # load paths
+        # absolute excel_file_preprocessed_path
+
+        if not jsonFile.key_exists("classifier"):
+            # pre trained models do not exist
+            # check if dataset directory exists
+            df_dir = os.path.join(PIPELINE_PATH + f"{df_name}")
+            if not os.path.exists(df_dir):
+                df_name = None
+
+            context = {
+                "df_name": df_name,
+                "available_pretrained_models_info": [],
+            }
+        else:
+            # if it exists
+            # check the section of "classifiers"
+            # folder path
+            available_pretrained_models = jsonFile.read_from_json(
+                ["classifier"]
+            ).keys()
+
+            available_pretrained_models_info = (
+                methods.create_tuple_of_models_text_value(
+                    available_pretrained_models
+                )
+            )
+            context = {
+                "df_name": df_name,
+                "available_pretrained_models_info": available_pretrained_models_info,
+            }
+    return HttpResponse(json.dumps(context), status=status)
\ No newline at end of file
diff --git a/base/handlers/ajaxCounterfactualsHandler.py b/base/handlers/ajaxCounterfactualsHandler.py
new file mode 100644
index 000000000..8ce97890d
--- /dev/null
+++ b/base/handlers/ajaxCounterfactualsHandler.py
@@ -0,0 +1,768 @@
+import base.pipeline as pipeline
+import pickle, os
+import pandas as pd
+import json
+from sklearn.preprocessing import LabelEncoder
+import joblib
+from dict_and_html import *
+from .. import methods
+from ..methods import PIPELINE_PATH
+import math
+import numpy as np
+from .. glacier.src.glacier_compute_counterfactuals import gc_compute_counterfactuals
+import base.pipeline as pipeline
+import concurrent.futures
+import json
+from django.shortcuts import HttpResponse
+
+def handler(action, request):
+    status = 200
+    if action == "reset_graph":
+        model_name = request.session.get("model_name")
+        # dataframe name
+        excel_file_name = request.session.get("df_name")
+        # save the plots for future use
+        # folder path: pipelines/<dataset name>/trained_models/<model_name>/
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{excel_file_name}" + "/trained_models/" + model_name
+        )
+
+        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
+
+        tsne = joblib.load(model_name_dir_path + "/tsne.sav")
+        context = {"fig": tsne.to_html()}
+    elif action == "pre_trained":
+        # load pre trained models
+        pre_trained_model_name = request.POST.get("pre_trained")
+        request.session["model_name"] = pre_trained_model_name
+        # dataframe name
+        df_name = request.session.get("df_name")
+
+        if df_name == "upload":
+            df_name = request.session.get("df_name_upload_base_name")
+
+        model_name_path = os.path.join(
+        PIPELINE_PATH + f"{df_name}" + "/trained_models/" + pre_trained_model_name
+        )
+
+        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
+
+        # get the type of the file
+        datasets_types_PipelineJSON_path = os.path.join(
+        PIPELINE_PATH + "/dataset_types_pipeline.json"
+        )
+        datasets_types_pipeline = pipeline.PipelineJSON(
+        datasets_types_PipelineJSON_path
+        )
+        dataset_type = datasets_types_pipeline.read_from_json([df_name])
+
+        if type(dataset_type) is list:
+            dataset_type = dataset_type[0]
+
+        if "url" in request.POST:
+            url = request.POST.get("url")
+            
+            if url == "counterfactuals":
+                # only TSNE
+                tsne = joblib.load(model_name_path + "/tsne.sav")
+
+                # Assuming you already have your fig object created, you can update it like this:
+                # Improved and modern t-SNE visualization
+                tsne.update_layout(
+                    # Modern Legend Design
+                    legend=dict(
+                        x=0.9,
+                        y=0.95,
+                        xanchor="right",
+                        yanchor="top",
+                        bgcolor="rgba(255,255,255,0.8)",  # Light semi-transparent white background
+                        bordercolor="rgba(0,0,0,0.1)",  # Light border for contrast
+                        borderwidth=1,
+                        font=dict(size=12, color="#444"),  # Subtle grey for legend text
+                    ),
+                    # Tight Margins to Focus on the Plot
+                    margin=dict(
+                        l=10, r=10, t=30, b=10
+                    ),  # Very slim margins for a modern look
+                    # Axis Design: Minimalist and Clean
+                    xaxis=dict(
+                        title_text="",  # No axis labels for a clean design
+                        tickfont=dict(
+                            size=10, color="#aaa"
+                        ),  # Light grey for tick labels
+                        showline=True,
+                        linecolor="rgba(0,0,0,0.2)",  # Subtle line color for axis lines
+                        zeroline=False,  # No zero line for a sleek look
+                        showgrid=False,  # Hide grid lines for a minimal appearance
+                        ticks="outside",  # Small ticks outside the axis
+                        ticklen=3,  # Short tick marks for subtlety
+                    ),
+                    yaxis=dict(
+                        title_text="",  # No axis labels
+                        tickfont=dict(size=10, color="#aaa"),
+                        showline=True,
+                        linecolor="rgba(0,0,0,0.2)",
+                        zeroline=False,
+                        showgrid=False,
+                        ticks="outside",
+                        ticklen=3,
+                    ),
+                    # Sleek Background
+                    plot_bgcolor="#fafafa",  # Very light grey background for a smooth finish
+                    paper_bgcolor="#ffffff",  # Pure white paper background
+                    # Modern Title with Elegant Style
+                    title=dict(
+                        text="t-SNE Visualization of Data",
+                        font=dict(
+                            size=16, color="#222", family="Helvetica, Arial, sans-serif"
+                        ),  # Classy font style
+                        x=0.5,
+                        xanchor="center",
+                        yanchor="top",
+                        pad=dict(t=15),  # Padding to separate the title from the plot
+                    ),
+                )
+
+                # Add hover effects for a smooth user experience
+                tsne.update_traces(
+                    hoverinfo="text+name",
+                    hoverlabel=dict(bgcolor="white", font_size=12, font_family="Arial"),
+                )
+
+                context = {
+                    "tsne": tsne.to_html(),
+                }
+            else:
+                # load plots
+                pca = joblib.load(model_name_path + "/pca.sav")
+                classification_report = joblib.load(
+                    model_name_path + "/classification_report.sav"
+                )
+                # tsne = joblib.load(model_name_path + "/tsne.sav")
+
+                # pipeline path
+                json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
+                jsonFile = pipeline.PipelineJSON(json_path)
+
+                # load pipeline data
+                # jsonFile = open(json_path, "r")
+                # pipeline_data = json.load(jsonFile)  # data becomes a dictionary
+                # classifier_data = pipeline_data["classifier"][pre_trained_model_name]
+
+                classifier_data = jsonFile.read_from_json(
+                    ["classifier", pre_trained_model_name]
+                )
+                classifier_data_flattened = methods.flatten_dict(classifier_data)
+                classifier_data_df = pd.DataFrame([classifier_data_flattened])
+
+                if dataset_type == "tabular":
+                    feature_importance = joblib.load(
+                        model_name_path + "/feature_importance.sav"
+                    )
+                    context = {
+                        "dataset_type": dataset_type,
+                        "pca": pca.to_html(),
+                        "class_report": classification_report.to_html(),
+                        "feature_importance": feature_importance.to_html(),
+                        "classifier_data": classifier_data_df.to_html(),
+                    }
+                elif dataset_type == "timeseries":
+                    tsne = joblib.load(model_name_path + "/tsne.sav")
+                    context = {
+                        "dataset_type": dataset_type,
+                        "pca": pca.to_html(),
+                        "class_report": classification_report.to_html(),
+                        "tsne": tsne.to_html(),
+                        "classifier_data": classifier_data_df.to_html(),
+                    }
+    elif action == "click_graph":
+        # get df used name
+        df_name = request.session.get("df_name")
+        if df_name == "upload": 
+            df_name = request.session.get("df_name_upload_base_name")
+        # get model_name
+        model_name = request.POST.get("model_name")
+
+        # preprocessed_path
+        excel_file_name_preprocessed_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv"
+        )
+
+        excel_file_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
+        )
+
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
+        )
+
+        # pipeline path
+        json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
+
+        # load pipeline data
+        # jsonFile = open(json_path, "r")
+        # pipeline_data = PipelineJSON.load(jsonFile)  # data becomes a dictionary
+        # class_label = pipeline_data["classifier"][model_name]["class_label"]
+        jsonFile = pipeline.PipelineJSON(json_path)
+        class_label = jsonFile.read_from_json(
+            ["classifier", model_name, "class_label"]
+        )
+
+        df = pd.read_csv(excel_file_name_path)
+
+        # Load your saved feature importance from a .sav file
+        feature_importance_df = pd.read_csv(
+            model_name_path + "/feature_importance_df.csv"
+        )
+        # sorted_df = feature_importance_df.sort_values(by="importance", ascending=False)
+
+        # x and y coordinates of the clicked point in tsne
+        x_coord = request.POST["x"]
+        y_coord = request.POST["y"]
+
+        # tsne_projections
+        tsne_projections_path = os.path.join(
+            PIPELINE_PATH
+            + f"{df_name}/"
+            + f"trained_models/{model_name}"
+            + "/tsne_projections.json",
+        )
+
+        # tsne projections of all points (saved during generation of tsne)
+        projections = pd.read_json(tsne_projections_path)
+        projections = projections.values.tolist()
+
+        # projections array is a list of pairs with the (x, y)
+        # [ [], [], [] ... ]
+        # coordinates for a point in tsne. These are actual absolute
+        # coordinates and not SVG.
+        # find the pair of the projection with x and y coordinates matching that of
+        # clicked point coordinates
+        for clicked_id, item in enumerate(projections):
+            if math.isclose(item[0], float(x_coord)) and math.isclose(
+                item[1], float(y_coord)
+            ):
+                break
+
+        # save clicked point projections
+        request.session["clicked_point"] = item
+        # get clicked point row
+        row = df.iloc[[int(clicked_id)]]
+        request.session["cfrow_id"] = clicked_id
+        request.session["cfrow_og"] = row.to_html()
+        context = {
+            "row": row.to_html(index=False),
+            "feature_importance_dict": feature_importance_df.to_dict(orient="records"),
+        }
+    elif action == "cf":
+        # dataframe name
+        df_name = request.session.get("df_name")
+        if df_name == "upload":
+            df_name = request.session.get("df_name_upload_base_name")
+
+        # preprocessed_path
+        excel_file_name_preprocessed_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv"
+        )
+
+        excel_file_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
+        )
+        # which model is being used during that session
+        model_name = request.POST.get("model_name")
+        # path of used model
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}/" + "trained_models/" + f"{model_name}/"
+        )
+        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
+
+        # read preprocessed data
+        if os.path.exists(excel_file_name_preprocessed_path):
+            df = pd.read_csv(excel_file_name_preprocessed_path)
+        else:
+            df = pd.read_csv(excel_file_name_path)
+
+        datasets_types_PipelineJSON_path = os.path.join(
+            PIPELINE_PATH + "/dataset_types_pipeline.json"
+        )
+        datasets_types_pipeline = pipeline.PipelineJSON(
+            datasets_types_PipelineJSON_path
+        )
+        dataset_type = datasets_types_pipeline.read_from_json([df_name])
+
+        if type(dataset_type) is list:
+            dataset_type = dataset_type[0]
+
+        df_id = request.session.get("cfrow_id")
+        if dataset_type == "tabular":
+
+            # get row
+            features_to_vary = json.loads(request.POST.get("features_to_vary"))
+
+            row = df.iloc[[int(df_id)]]
+
+            # not preprocessed
+            notpre_df = pd.read_csv(excel_file_name_path)
+            notpre_row = notpre_df.iloc[[int(df_id)]]
+
+            # if feature_to_vary has a categorical column then I cannot just
+            # pass that to dice since the trained model does not contain the
+            # categorical column but the one-hot-encoded sub-columns
+            features_to_vary = methods.update_column_list_with_one_hot_columns(
+                notpre_df, df, features_to_vary
+            )
+
+            # pipeline path
+            json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
+
+            # load pipeline data
+            jsonFile = pipeline.PipelineJSON(json_path)
+            class_label = jsonFile.read_from_json(
+                ["classifier", model_name, "class_label"]
+            )  # data becomes a dictionary
+
+            # number of counterfactuals
+            # (TBD) input field value as parameter
+            # in ajax
+            num_counterfactuals = 5
+            le = LabelEncoder()
+            notpre_df[class_label] = le.fit_transform(notpre_df[class_label])
+
+            continuous_features = methods.get_continuous_features(df)
+            non_continuous_features = methods.get_non_continuous_features(df)
+
+            # load used classifier
+            clf = joblib.load(model_name_path + model_name + ".sav")
+
+            try:
+                # Set up the executor to run the function in a separate thread
+                with concurrent.futures.ThreadPoolExecutor() as executor:
+                    # Submit the function to the executor
+                    future = executor.submit(
+                        methods.counterfactuals,
+                        row,
+                        clf,
+                        df,
+                        class_label,
+                        continuous_features,
+                        num_counterfactuals,
+                        features_to_vary,
+                    )
+                    # Wait for the result with a timeout of 10 seconds
+                    counterfactuals = future.result(timeout=10)
+                    print("Counterfactuals computed successfully!")
+            except concurrent.futures.TimeoutError:
+                message = (
+                    "It seems like it took more than expected. Refresh and try again..."
+                )
+                context = {"message": message}
+
+            if counterfactuals:
+                cf_df = counterfactuals[0].final_cfs_df
+                counterfactuals[0].final_cfs_df.to_csv(
+                    model_name_path + "counterfactuals.csv", index=False
+                )
+
+                # get coordinates of the clicked point (saved during 'click' event)
+                clicked_point = request.session.get("clicked_point")
+                clicked_point_df = pd.DataFrame(
+                    {
+                        "0": clicked_point[0],
+                        "1": clicked_point[1],
+                        f"{class_label}": row[class_label].astype(str),
+                    }
+                )
+
+                # tSNE
+                cf_df = pd.read_csv(model_name_path + "counterfactuals.csv")
+                model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
+                tsne_path_to_augment = model_name_path + "tsne.sav"
+
+                tsne = methods.generateAugmentedTSNE(
+                    df,
+                    cf_df,
+                    num_counterfactuals,
+                    clicked_point_df,
+                    tsne_path_to_augment,
+                    class_label,
+                )
+
+                tsne.update_layout(
+                    # Modern Legend Design
+                    legend=dict(
+                        x=0.85,
+                        y=0.95,
+                        xanchor="right",
+                        yanchor="top",
+                        bgcolor="rgba(0,0,0,0.05)",  # Transparent black background for a sleek look
+                        bordercolor="rgba(0,0,0,0.1)",  # Soft border for separation
+                        borderwidth=1,
+                        font=dict(
+                            size=12, color="#333"
+                        ),  # Modern grey font color for text
+                    ),
+                    # Tight Margins for a Focused Plot Area
+                    margin=dict(
+                        l=20, r=20, t=40, b=40
+                    ),  # Reduced margins for a cleaner look
+                    # Axis Titles and Labels: Minimalist Design
+                    xaxis=dict(
+                        title_font=dict(
+                            size=14, color="#555"
+                        ),  # Medium grey color for axis title
+                        tickfont=dict(
+                            size=11, color="#777"
+                        ),  # Light grey color for tick labels
+                        showline=True,
+                        linecolor="rgba(0,0,0,0.15)",  # Subtle line color for axis lines
+                        zeroline=False,  # Hide the zero line for a cleaner design
+                        showgrid=False,  # No grid lines for a modern look
+                    ),
+                    yaxis=dict(
+                        title_font=dict(size=14, color="#555"),
+                        tickfont=dict(size=11, color="#777"),
+                        showline=True,
+                        linecolor="rgba(0,0,0,0.15)",
+                        zeroline=False,
+                        showgrid=False,
+                    ),
+                    # Sleek Background Design
+                    plot_bgcolor="white",  # Crisp white background for a modern touch
+                    paper_bgcolor="white",  # Ensure the entire background is uniform
+                    # Title: Modern Font and Centered
+                    title=dict(
+                        text="t-SNE Visualization of Data",
+                        font=dict(
+                            size=18, color="#333", family="Arial, sans-serif"
+                        ),  # Modern font style
+                        x=0.5,
+                        xanchor="center",
+                        yanchor="top",
+                        pad=dict(t=10),  # Padding to give the title breathing space
+                    ),
+                )
+
+                pickle.dump(tsne, open(model_name_path + "tsne_cfs.sav", "wb"))
+
+                context = {
+                    "dataset_type": dataset_type,
+                    "model_name": model_name,
+                    "tsne": tsne.to_html(),
+                    "num_counterfactuals": num_counterfactuals,
+                    "default_counterfactual": "1",
+                    "clicked_point": notpre_row.to_html(),
+                    "counterfactual": cf_df.iloc[[1]].to_html(),
+                }
+
+            else:
+                context = {
+                    "dataset_type": dataset_type,
+                    "model_name": model_name,
+                    "message": "Please try again with different features.",
+                }
+        elif dataset_type == "timeseries":
+            model_name = request.POST["model_name"]
+            model_name_path = os.path.join(
+                PIPELINE_PATH + f"{df_name}/" + "trained_models/" + f"{model_name}/"
+            )
+            path = model_name_path
+            if model_name == "glacier":
+                constraint = request.POST["constraint"]
+                path = os.path.join(
+                    PIPELINE_PATH
+                    + f"{df_name}/"
+                    + "trained_models/"
+                    + f"{model_name}/"
+                    + f"{constraint}/"
+                )
+
+            X_test_path = os.path.join(model_name_path + "X_test.csv")
+            y_test_path = os.path.join(model_name_path + "y_test.npy")
+            y_pred_path = os.path.join(path + "y_pred.npy")
+            X_cf_path = os.path.join(path + "X_cf.npy")
+            cf_pred_path = os.path.join(path + "cf_pred.npy")
+
+            X_test = pd.read_csv(X_test_path)
+            y_test = np.load(y_test_path)
+            y_pred = np.load(y_pred_path)
+            X_cf = np.load(X_cf_path)
+            cf_pred = np.load(cf_pred_path)
+
+            if model_name != "glacier":
+                scaler = joblib.load(model_name_path + "/min_max_scaler.sav")
+                X_test = pd.DataFrame(scaler.inverse_transform(X_test))
+                X_cf = scaler.inverse_transform(X_cf)
+
+            fig = methods.ecg_plot_counterfactuals(
+                int(df_id), X_test, y_test, y_pred, X_cf, cf_pred
+            )
+
+            context = {
+                "df_name": df_name,
+                "fig": fig.to_html(),
+                "dataset_type": dataset_type,
+            }
+    elif action == "compute_cf":
+        model_name = request.POST.get("model_name")
+        if model_name == "glacier":
+            constraint_type = request.POST.get("constraint")
+            w_value = request.POST.get("w_value")
+            df_name = request.session.get("df_name")
+
+            model_name_path = os.path.join(
+                PIPELINE_PATH + f"{df_name}/" + "trained_models/" + f"{model_name}/"
+            )
+            model_name_path_constraint = model_name_path + f"{constraint_type}/"
+            if not os.path.exists(model_name_path_constraint):
+                os.makedirs(model_name_path_constraint)
+
+            # https://github.com/wildboar-foundation/wildboar/blob/master/docs/guide/explain/counterfactuals.rst#id27
+            classifier = joblib.load(model_name_path + "/classifier.sav")
+
+            # pipeline path
+            json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
+            # load pipeline data
+            jsonFile = pipeline.PipelineJSON(json_path)
+            autoencoder = jsonFile.read_from_json(
+                ["classifier", model_name, "autoencoder"]
+            )
+
+            experiment_dict = {"constraint": constraint_type, "w_value": w_value}
+
+            # if "experiments" in pipeline_data["classifier"][model_name]:
+            #     # if there exists key with value "experiments"
+            #     keys = pipeline_data["classifier"][model_name]["experiments"].keys()
+            #     last_key_int = int(list(keys)[-1])
+            #     last_key_int_incr_str = str(last_key_int + 1)
+            # else:
+            #     last_key_int_incr_str = "0"
+            #     experiment_key_dict = {"experiments": {last_key_int_incr_str: {}}}
+            #     pipeline_data["classifier"][model_name].update(experiment_key_dict)
+
+            # outter_dict = {last_key_int_incr_str: experiment_dict}
+            # pipeline_data["classifier"][model_name]["experiments"].update(outter_dict)
+
+            if jsonFile.key_exists("experiments"):
+                keys = jsonFile.read_from_json(
+                    ["classifier", model_name, "experiments"]
+                ).keys()
+                last_key_int = int(list(keys)[-1])
+                last_key_int_incr_str = str(last_key_int + 1)
+            else:
+                last_key_int_incr_str = "0"
+                experiment_key_dict = {"experiments": {last_key_int_incr_str: {}}}
+                jsonFile.update_json(
+                    ["classifier", model_name], experiment_key_dict
+                )
+
+            outter_dict = {last_key_int_incr_str: experiment_dict}
+            jsonFile.update_json(
+                ["classifier", model_name, "experiments"], outter_dict
+            )
+
+            if autoencoder == "Yes":
+                autoencoder = joblib.load(model_name_path + "/autoencoder.sav")
+            else:
+                autoencoder = None
+
+            gc_compute_counterfactuals(
+                model_name_path,
+                model_name_path_constraint,
+                constraint_type,
+                [0.0001],
+                float(w_value),
+                0.5,
+                classifier,
+                autoencoder,
+            )
+            path = model_name_path_constraint
+            context = {"experiment_dict": experiment_dict}
+    elif action == "counterfactual_select":
+
+        # if <select> element is used, and a specific counterfactual
+        # is inquired to be demonstrated:
+        df_name = request.session.get("df_name")
+        df_name = request.session.get("df_name")
+        if df_name == "upload":
+            df_name = request.session.get("df_name_upload_base_name")
+
+        model_name = request.session.get("model_name")
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
+        )
+
+        excel_file_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
+        )
+
+        # pipeline path
+        json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
+        # load pipeline data
+        jsonFile = pipeline.PipelineJSON(json_path)
+
+        class_label = jsonFile.read_from_json(
+            ["classifier", model_name, "class_label"]
+        )
+
+        # decode counterfactual to original values
+        preprocessing_list = jsonFile.read_from_json(
+            ["classifier", model_name, "preprocessing"]
+        )
+
+        df = pd.read_csv(excel_file_name_path)
+        cf_df = pd.read_csv(model_name_path + "/counterfactuals.csv")
+        cf_id = request.POST["cf_id"]
+        row = cf_df.iloc[[int(cf_id)]]
+
+        if "id" in df.columns:
+            df = df.drop("id", axis=1)
+
+        dec_row = methods.decode_cf(
+            df, row, class_label, model_name_path, preprocessing_list
+        )
+
+        fig = joblib.load(model_name_path + "/tsne_cfs.sav")
+
+        # tsne stores data for each class in different data[]
+        # index.
+        # data[0] is class A
+        # data[1] is class B
+        # ...
+        # data[n-2] is counterfactuals
+        # data[n-1] is clicked point
+
+        fig_data_array_length = len(fig.data)
+        for i in range(fig_data_array_length - 2):
+            fig.data[i].update(
+                opacity=0.3,
+            )
+
+        # last one, data[n-1], contains clicked point
+        l = fig.data[fig_data_array_length - 1]
+        clicked_id = -1
+        for clicked_id, item in enumerate(list(zip(l.x, l.y))):
+            if math.isclose(
+                item[0], request.session.get("clicked_point")[0]
+            ) and math.isclose(item[1], request.session.get("clicked_point")[1]):
+                break
+
+        # data[n-2] contains counterfactuals
+        fig.data[fig_data_array_length - 2].update(
+            selectedpoints=[int(cf_id)],
+            unselected=dict(
+                marker=dict(
+                    opacity=0.3,
+                )
+            ),
+        )
+
+        fig.data[fig_data_array_length - 1].update(
+            selectedpoints=[clicked_id],
+            unselected=dict(
+                marker=dict(
+                    opacity=0.3,
+                )
+            ),
+        )
+
+        if "id" in df.columns:
+            df = df.drop("id", axis=1)
+
+        # order the columns
+        dec_row = dec_row[df.columns]
+        clicked_point_row_id = request.session.get("cfrow_id")
+
+        # return only the differences
+        dec_row = dec_row.reset_index(drop=True)
+        df2 = df.iloc[[int(clicked_point_row_id)]].reset_index(drop=True)
+        difference = dec_row.loc[
+            :,
+            [
+                methods.compare_values(dec_row[col].iloc[0], df2[col].iloc[0])
+                for col in dec_row.columns
+            ],
+        ]
+
+        merged_df = pd.concat([df2[difference.columns], difference], ignore_index=True)
+
+        context = {
+            "row": merged_df.to_html(index=False),
+            "fig": fig.to_html(),
+        }
+    elif action == "class_label_selection":
+
+        df_name = request.session.get("df_name")
+
+        if df_name == "upload":
+            df_name = request.session["df_name_upload_base_name"]
+
+        datasets_types_PipelineJSON_path = os.path.join(
+            PIPELINE_PATH + "/dataset_types_pipeline.json"
+        )
+
+        dataset_type_json = pipeline.PipelineJSON(datasets_types_PipelineJSON_path)
+
+        dataset_type = dataset_type_json.read_from_json([df_name])
+        
+        if isinstance(dataset_type, list):
+            dataset_type = dataset_type[0]
+            
+        # preprocessed_path
+        excel_file_name_preprocessed_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv"
+        )
+
+        excel_file_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
+        )
+
+        # which model is being used during that session
+        model_name = request.POST.get("model_name")
+
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
+        )
+        
+        X_test_path = os.path.join(
+            PIPELINE_PATH
+            + f"{df_name}"
+            + "/trained_models"
+            + f"/{model_name}"
+            + "/X_test.csv"
+        )
+        y_test_path = os.path.join(
+            PIPELINE_PATH
+            + f"{df_name}"
+            + "/trained_models"
+            + f"/{model_name}"
+            + "/y_test.npy"
+        )
+
+        X_test = pd.read_csv(X_test_path)
+        y_test = np.load(y_test_path)
+
+        if model_name != "glacier":
+            scaler = joblib.load(model_name_path + "/min_max_scaler.sav")
+            X_test = pd.DataFrame(scaler.inverse_transform(X_test))
+                
+        if dataset_type == "timeseries":
+            class_label = request.POST.get("class_label")
+            cfrow_id = request.POST.get("cfrow_id")
+
+            class_label = (
+                int(class_label)
+                if class_label.isdigit()
+                else (
+                    float(class_label)
+                    if class_label.replace(".", "", 1).isdigit()
+                    else class_label
+                )
+            )
+
+            fig, index = methods.get_ecg_entry(
+                X_test, y_test, int(cfrow_id), class_label
+            )
+            request.session["cfrow_id"] = index
+            request.session["class_label"] = class_label
+            context = {"fig": fig.to_html(), "dataset_type": dataset_type}
+    return HttpResponse(json.dumps(context), status=status)
\ No newline at end of file
diff --git a/base/handlers/ajaxHomeHandler.py b/base/handlers/ajaxHomeHandler.py
new file mode 100644
index 000000000..adcab037a
--- /dev/null
+++ b/base/handlers/ajaxHomeHandler.py
@@ -0,0 +1,437 @@
+import base.pipeline as pipeline
+import os
+from dict_and_html import *
+from .. import methods
+from ..methods import PIPELINE_PATH
+from django.core.files.storage import FileSystemStorage
+import random
+import base.pipeline as pipeline
+import shutil
+import json
+from django.shortcuts import HttpResponse
+
+def handler(action, request):
+    status = 200
+    if action == "upload_dataset":
+
+        uploaded_file = request.FILES["excel_file"]  # Get the file from request.FILES
+        dataset_type = request.POST.get("dataset_type")
+
+        # action to add dataset when from radio button click
+        # add name of used dataframe in session for future use
+        request.session["df_name"] = "upload"
+        name = uploaded_file.name
+
+        # Split the name and extension
+        base_name, extension = os.path.splitext(name)
+        request.session["df_name_upload_base_name"] = base_name
+        request.session["df_name_upload_extension"] = extension
+
+        df_name = base_name
+
+        df_name_path = os.path.join(
+            PIPELINE_PATH + f"{base_name}",
+        )
+
+        if not os.path.exists(df_name_path):
+            os.makedirs(df_name_path)
+
+        fs = FileSystemStorage()  # FileSystemStorage to save the file
+
+        # Save the file with the new filename
+        fs = FileSystemStorage(location=df_name_path)
+        filename = fs.save(uploaded_file.name, uploaded_file)  # Save file
+
+        request.session["excel_file_name"] = df_name_path
+
+        excel_file_name_path = os.path.join(PIPELINE_PATH + f"{base_name}" + "/" + name)
+
+        df = methods.get_dataframe(excel_file_name_path)
+
+        ## update the datasets_types json file
+        datasets_types_PipelineJSON_path = os.path.join(
+            PIPELINE_PATH + "dataset_types_pipeline.json"
+        )
+        jsonFile = pipeline.PipelineJSON(datasets_types_PipelineJSON_path)
+
+        # with open(datasets_types_PipelineJSON_path, "r") as jsonFile:
+        #     datasets_types_PipelineJSON = pipeline.load(
+        #         jsonFile
+        #     )  # data becomes a dictionary
+
+        jsonFile.append_to_json({df_name: [dataset_type, "uploaded"]})
+        dataset_type = jsonFile.read_from_json([df_name])[0]
+        uploaded_files = jsonFile.get_keys_with_value("uploaded")
+
+        # datasets_types_PipelineJSON[df_name] = dataset_type
+        # with open(datasets_types_PipelineJSON_path, "w") as file:
+        #     pipeline.dump(
+        #         datasets_types_PipelineJSON, file, indent=4
+        #     )  # Write with pretty print (indent=4)
+
+        if df.columns.str.contains(" ").any():
+            df.columns = df.columns.str.replace(" ", "_")
+            # if columns contain space
+            os.remove(excel_file_name_path)
+            df.to_csv(excel_file_name_path, index=None)
+            df = methods.get_dataframe(excel_file_name_path)
+
+        if "id" in df.columns:
+            df.drop(["id"], axis=1, inplace=True)
+            df.to_csv(excel_file_name_path, index=False)
+
+        # if dataset_type == "tabular":
+        #     # tabular datasets
+        #     features = df.columns
+        #     feature1 = df.columns[3]
+        #     feature2 = df.columns[2]
+
+        #     labels = list(df.select_dtypes(include=["object", "category"]).columns)
+        #     # Find binary columns (columns with only two unique values, including numerics)
+        #     binary_columns = [col for col in df.columns if df[col].nunique() == 2]
+
+        #     # Combine categorical and binary columns into one list
+        #     labels = list(set(labels + binary_columns))
+
+        #     label = random.choice(labels)
+        #     fig = methods.stats(
+        #         excel_file_name_path,
+        #         dataset_type,
+        #         None,
+        #         None,
+        #         feature1,
+        #         feature2,
+        #         label,
+        #         df_name,
+        #     )
+
+        #     # tabular dataset
+        #     request.session["data_to_display"] = df[:10].to_html()
+        #     request.session["features"] = list(features)
+        #     request.session["feature1"] = feature1
+        #     request.session["feature2"] = feature2
+        #     request.session["labels"] = list(labels)
+        #     request.session["curlabel"] = label
+        #     request.session["fig"] = fig
+
+        #     context = {
+        #         "dataset_type": dataset_type,
+        #         "data_to_display": df[:10].to_html(),
+        #         "fig": fig,
+        #         "features": list(features),  # error if not a list
+        #         "feature1": feature1,
+        #         "feature2": feature2,
+        #         "labels": list(labels),
+        #         "curlabel": label,
+        #         "df_name": request.session["df_name"],
+        #     }
+        # elif dataset_type == "timeseries":
+        #     fig, fig1 = methods.stats(excel_file_name_path, dataset_type)
+        #     request.session["fig"] = fig
+        #     request.session["fig1"] = fig1
+        #     context = {
+        #         "dataset_type": dataset_type,
+        #         "df_name": df_name,
+        #         "fig": fig,
+        #         "fig1": fig1,
+        #     }
+
+        context = {"dataset_type": dataset_type, "df_name": df_name}
+        context.update({"uploaded_files": uploaded_files})
+        
+        if dataset_type == "timeseries":
+            target_labels = list(df.iloc[:, -1].unique())
+            context.update({"target_labels": target_labels})
+            
+        request.session["context"] = context
+    elif action == "delete_uploaded_file":
+        dataset_name = request.POST.get("dataset_name")
+        dataset_path = os.path.join(PIPELINE_PATH + f"/{dataset_name}")
+
+        # pipeline path
+        datasets_types_pipeline_path = os.path.join(
+            PIPELINE_PATH + "/dataset_types_pipeline.json"
+        )
+        # load pipeline data
+        datasets_types_pipeline = pipeline.PipelineJSON(datasets_types_pipeline_path)
+        datasets_types_pipeline.delete_key([dataset_name])
+
+        request.FILES["excel_file"] = None
+        request.session["df_name"] = None
+
+        # check if there exist uploaded files
+        uploaded_files = datasets_types_pipeline.get_keys_with_value(
+            "uploaded"
+        )
+        if uploaded_files == []:
+            uploaded_files = None
+        try:
+            shutil.rmtree(dataset_path)
+        except Exception as error:
+            print(error)
+
+        context = {"uploaded_files": uploaded_files}
+    elif action == "dataset" or action == "uploaded_datasets":
+
+        # action to add dataset when from radio button click
+        name = request.POST.get("df_name")
+        request.session["df_name"] = name
+
+        # if name == "upload":
+        #     name = request.session.get("df_name_upload_base_name")
+
+        if action == "dataset" and name == "upload":
+            request.session["upload"] = 1
+            context = {"upload": 1}
+        else:
+
+            if name == "timeseries":
+                name = request.session.get("df_name")
+
+            excel_file_name_path = os.path.join(
+                PIPELINE_PATH + f"{name}" + "/" + name + ".csv",
+            )
+
+            datasets_types_PipelineJSON_path = os.path.join(
+                PIPELINE_PATH + "/dataset_types_pipeline.json"
+            )
+            datasets_types_PipelineJSON = pipeline.PipelineJSON(
+                datasets_types_PipelineJSON_path
+            )
+            dataset_type = datasets_types_PipelineJSON.read_from_json([name])
+            uploaded_files = datasets_types_PipelineJSON.get_keys_with_value(
+                "uploaded"
+            )
+
+            if request.POST.get("df_name") == "upload" or action == "uploaded_datasets":
+                if type(dataset_type) is list:
+                    dataset_type = dataset_type[0]
+
+            if request.POST.get("df_name") != "upload" or action == "uploaded_datasets":
+                if os.path.exists(excel_file_name_path):
+                    df = methods.get_dataframe(excel_file_name_path)
+                    df.columns = df.columns.str.replace(" ", "_")
+                    request.session["excel_file_name"] = excel_file_name_path
+
+                    json_path = os.path.join(
+                        PIPELINE_PATH + f"{name}" + "/pipeline.json"
+                    )
+                    if not os.path.exists(json_path):
+                        PipelineJSON = pipeline.PipelineJSON(json_path)
+                        PipelineJSON.append_to_json({"name": name})
+
+                    if "tabular" == dataset_type:
+
+                        if "id" in df.columns:
+                            df.drop(["id"], axis=1, inplace=True)
+                            df.to_csv(excel_file_name_path, index=False)
+
+                        # tabular datasets
+                        features = df.columns
+                        feature1 = df.columns[3]
+                        feature2 = df.columns[2]
+                        label = ""
+
+                        labels = list(
+                            df.select_dtypes(include=["object", "category"]).columns
+                        )
+                        # Find binary columns (columns with only two unique values, including numerics)
+                        binary_columns = [
+                            col for col in df.columns if df[col].nunique() == 2
+                        ]
+
+                        # Combine categorical and binary columns into one list
+                        labels = list(set(labels + binary_columns))
+                        label = random.choice(labels)
+                        fig = methods.stats(
+                            excel_file_name_path,
+                            dataset_type,
+                            feature1=feature1,
+                            feature2=feature2,
+                            label=label,
+                        )
+
+                        # tabular dataset
+                        request.session["data_to_display"] = df[:10].to_html()
+                        request.session["features"] = list(features)
+                        request.session["feature1"] = feature1
+                        request.session["feature2"] = feature2
+                        request.session["labels"] = list(labels)
+                        request.session["curlabel"] = label
+                        request.session["fig"] = fig
+
+                        context = {
+                            "dataset_type": dataset_type,
+                            "data_to_display": df[:10].to_html(),
+                            "fig": fig,
+                            "features": list(features),  # error if not a list
+                            "feature1": feature1,
+                            "feature2": feature2,
+                            "labels": list(labels),
+                            "curlabel": label,
+                            "uploaded_files": list(uploaded_files),
+                        }
+                    elif dataset_type == "timeseries":
+
+                        json_path = os.path.join(
+                            PIPELINE_PATH, f"{name}" + "/pipeline.json"
+                        )
+                        jsonFile = pipeline.PipelineJSON(json_path)
+
+                        pos = jsonFile.read_from_json(["pos"])
+                        neg = jsonFile.read_from_json(["neg"])
+
+                        fig, fig1 = methods.stats(
+                            excel_file_name_path,
+                            dataset_type,
+                            int(pos),
+                            int(neg),
+                            None,
+                            None,
+                            None,
+                            name=name,
+                        )
+                        # timeseries
+                        request.session["fig"] = fig
+                        request.session["fig1"] = fig1
+                        context = {
+                            "fig": fig,
+                            "fig1": fig1,
+                            "dataset_type": dataset_type,
+                        }
+                else:
+                    context = {"uploaded_files": list(uploaded_files)}
+            else:
+                context = {}
+                
+            if (
+                action == "uploaded_datasets"
+                and "upload" in request.session
+                and request.session["upload"] == 1
+            ):
+                request.session["upload"] = 1
+                context.update({"upload": 1, "df_name": name})
+                print(name)
+            else:
+                request.session["upload"] = 0
+    elif action == "dataset_charts":
+        df_name = request.POST.get("df_name")
+        request.session["df_name"] = df_name
+        context = {}
+    elif action == "select_class_labels_for_uploaded_timeseries":
+        name = request.session["df_name"]
+        
+        if name == "upload":
+            name = request.session["df_name_upload_base_name"]
+    
+        pos = request.POST.get("positive_label")
+        neg = request.POST.get("negative_label")
+        
+        json_path = os.path.join(PIPELINE_PATH, f"{name}" + "/pipeline.json")
+        jsonFile = pipeline.PipelineJSON(json_path)
+        
+        jsonFile.append_to_json({"name": name})
+        jsonFile.append_to_json({"pos": pos})
+        jsonFile.append_to_json({"neg": neg})
+        
+        context = {}
+    elif action == "timeseries-dataset": 
+
+        # action to add dataset when from radio button click
+        name = request.POST.get("timeseries_dataset")
+
+        # add name of used dataframe in session for future use
+        request.session["df_name"] = name
+        excel_file_name_path = os.path.join(
+            PIPELINE_PATH + f"{name}" + "/" + name + ".csv",
+        )
+        datasets_types_PipelineJSON_path = os.path.join(
+            PIPELINE_PATH + "/dataset_types_pipeline.json"
+        )
+        datasets_types_PipelineJSON = pipeline.PipelineJSON(
+            datasets_types_PipelineJSON_path
+        )
+        if os.path.exists(excel_file_name_path):
+
+            dataset_type = datasets_types_PipelineJSON.read_from_json([name])
+
+            df = methods.get_dataframe(excel_file_name_path)
+            df.columns = df.columns.str.replace(" ", "_")
+            request.session["excel_file_name"] = excel_file_name_path
+
+            # find the available pre trained datasets
+            # check the pipeline file
+            json_path = os.path.join(PIPELINE_PATH, f"{name}" + "/pipeline.json")
+            jsonFile = pipeline.PipelineJSON(json_path)
+
+            preprocessing_info = {"name": name}
+            dataset_camel = methods.convert_to_camel_case(name)
+            if "Ecg" in dataset_camel:
+                dataset_camel = dataset_camel.replace("Ecg", "ECG")
+            experiment = methods.fetch_line_by_dataset(
+                PIPELINE_PATH + "/glacier_experiments.txt",
+                dataset_camel,
+            )
+            if experiment is not None:
+                stripped_arguments = methods.extract_arguments_from_line(experiment)
+                
+            indices_to_keys = {
+                1: "pos",
+                2: "neg",
+            }
+
+            # Create a dictionary by fetching items from the list at the specified indices
+            inner_dict = {
+                key: stripped_arguments[index] for index, key in indices_to_keys.items()
+            }
+            preprocessing_info.update(inner_dict)
+            jsonFile.append_to_json(preprocessing_info)
+
+            pos = inner_dict["pos"]
+            neg = inner_dict["neg"]
+            fig, fig1 = methods.stats(
+                excel_file_name_path, dataset_type, int(pos), int(neg), name=name
+            )
+            # timeseries
+            request.session["fig"] = fig
+            request.session["fig1"] = fig1
+            context = {"fig": fig, "fig1": fig1, "dataset_type": dataset_type}
+        else:
+            context = {}
+    elif action == "stat":
+
+        name = request.session.get("df_name")
+        datasets_types_PipelineJSON_path = os.path.join(
+            PIPELINE_PATH + "/dataset_types_pipeline.json"
+        )
+        jsonFile = pipeline.PipelineJSON(datasets_types_PipelineJSON_path)
+        dataset_type = jsonFile.read_from_json([name])
+
+        if type(dataset_type) is list:
+            dataset_type = dataset_type[0]
+
+        file_path = os.path.join(
+            PIPELINE_PATH + f"{name}" + "/" + name + ".csv",
+        )
+        if dataset_type == "tabular":
+            feature1 = request.POST.get("feature1")
+            feature2 = request.POST.get("feature2")
+            label = request.POST.get("label")
+        else:
+            feature1 = request.POST.get("feature1")
+            feature2 = []
+            label = []
+
+        fig = methods.stats(
+            file_path,
+            dataset_type,
+            None,
+            None,
+            feature1=feature1,
+            feature2=feature2,
+            label=label,
+        )
+        context = {
+            "fig": fig,
+        }
+    return HttpResponse(json.dumps(context), status=status)
\ No newline at end of file
diff --git a/base/handlers/ajaxTrainHandler.py b/base/handlers/ajaxTrainHandler.py
new file mode 100644
index 000000000..38dc1d782
--- /dev/null
+++ b/base/handlers/ajaxTrainHandler.py
@@ -0,0 +1,464 @@
+import base.pipeline as pipeline
+import pickle, os
+import pandas as pd
+import json
+from sklearn.preprocessing import LabelEncoder
+from dict_and_html import *
+from .. import methods
+from ..methods import PIPELINE_PATH
+import numpy as np
+from collections import defaultdict
+import base.pipeline as pipeline
+import json
+from django.shortcuts import HttpResponse
+
+def handler(action, request):
+    status = 200
+    if action == "train":
+        # train a new model
+        # parameters sent via ajax
+        model_name = request.POST.get("model_name")
+        df_name = request.session.get("df_name")
+
+        # dataframe name
+        if df_name == "upload":
+            df_name = request.session.get("df_name_upload_base_name")
+
+        request.session["model_name"] = model_name
+        test_set_ratio = ""
+        if "test_set_ratio" in request.POST:
+            test_set_ratio = request.POST.get("test_set_ratio")
+
+        datasets_types_PipelineJSON_path = os.path.join(
+            PIPELINE_PATH + "/dataset_types_pipeline.json"
+        )
+        jsonFile = pipeline.PipelineJSON(datasets_types_PipelineJSON_path)
+        dataset_type = jsonFile.read_from_json([df_name])
+
+        if type(dataset_type) is list:
+            dataset_type = dataset_type[0]
+
+        if "array_preprocessing" in request.POST:
+            array_preprocessing = request.POST.get("array_preprocessing")
+
+        if dataset_type == "tabular":
+            class_label = request.POST.get("class_label")
+            preprocessing_info = {
+                "preprocessing": array_preprocessing,
+                "test_set_ratio": test_set_ratio,
+                "explainability": {"technique": "dice"},
+                "class_label": class_label,
+            }
+        elif dataset_type == "timeseries":
+            if model_name != "glacier":
+                preprocessing_info = {
+                    "preprocessing": array_preprocessing,
+                    "test_set_ratio": test_set_ratio,
+                    "explainability": {"technique": model_name},
+                }
+            else:
+                # Path to the Bash script
+                autoencoder = request.POST.get("autoencoder")
+                preprocessing_info = {
+                    "autoencoder": autoencoder,
+                    "explainability": {"technique": model_name},
+                }
+
+        # absolute excel_file_name_path
+        excel_file_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
+        )
+
+        # load paths
+        # absolute excel_file_preprocessed_path
+        excel_file_name_preprocessed_path = os.path.join(
+            PIPELINE_PATH,
+            f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv",
+        )
+
+        json_path = os.path.join(PIPELINE_PATH + f"{df_name}" + "/pipeline.json")
+        jsonFile = pipeline.PipelineJSON(json_path)
+        # save the plots for future use
+        # folder path: pipelines/<dataset name>/trained_models/<model_name>/
+
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
+        )
+
+        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
+
+        if os.path.exists(excel_file_name_preprocessed_path) == True:
+            # if preprocessed_file exists
+            # delete it and do preprocessing again
+            # maybe should optimize it for cases
+            # where the preprocessing is the same with
+            # the one applited on the existing file
+            os.remove(excel_file_name_preprocessed_path)
+
+        # generate filename
+        idx = excel_file_name_path.index(".")
+        excel_file_name_preprocessed = (
+            df_name[:idx] + "_preprocessed" + excel_file_name_path[idx:]
+        )
+
+        # save file for preprocessing
+        preprocess_df = pd.read_csv(excel_file_name_path)
+        request.session["excel_file_name_preprocessed"] = excel_file_name_preprocessed
+
+        # make the dir
+        if not os.path.exists(model_name_path):
+            os.makedirs(model_name_path)
+
+        try:
+            if dataset_type == "tabular":
+                le = LabelEncoder()
+                preprocess_df[class_label] = le.fit_transform(
+                    preprocess_df[class_label]
+                )
+
+                if "array_preprocessing" in request.POST:
+                    preprocess_df = methods.preprocess(
+                        preprocess_df,
+                        array_preprocessing,
+                        excel_file_name_path,
+                        dataset_type,
+                        model_name_path,
+                        class_label,
+                    )
+            elif dataset_type == "timeseries":
+
+                pos = jsonFile.read_from_json(["pos"])
+                neg = jsonFile.read_from_json(["neg"])
+                pos_label, neg_label = 1, 0
+
+                if pos != pos_label:
+                    preprocess_df.iloc[:, -1] = preprocess_df.iloc[:, -1].apply(
+                        lambda x: pos_label if x == int(pos) else x
+                    )
+                if neg != neg_label:
+                    preprocess_df.iloc[:, -1] = preprocess_df.iloc[:, -1].apply(
+                        lambda x: neg_label if x == int(neg) else x
+                    )
+                if "array_preprocessing" in request.POST:
+                    preprocess_df = methods.preprocess(
+                        preprocess_df,
+                        array_preprocessing,
+                        excel_file_name_path,
+                        dataset_type,
+                        model_name_path,
+                    )
+
+            pca = methods.generatePCA(preprocess_df)
+
+            # TSNE
+            if dataset_type == "tabular":
+                tsne, projections = methods.generateTSNE(
+                    preprocess_df, dataset_type, class_label
+                )
+            else:
+                tsne, projections = methods.generateTSNE(preprocess_df, dataset_type)
+
+            if dataset_type == "tabular":
+                # training
+                feature_importance, classification_report, importance_dict = (
+                    methods.training(
+                        preprocess_df,
+                        model_name,
+                        float(test_set_ratio),
+                        class_label,
+                        dataset_type,
+                        df_name,
+                        model_name_path,
+                    )
+                )
+
+                # feature importance on the original categorical columns (if they exist)
+                df = pd.read_csv(excel_file_name_path)
+                df = df.drop(class_label, axis=1)
+
+                # Initialize a dictionary to hold aggregated feature importances
+                categorical_columns = methods.get_categorical_features(df)
+
+                if categorical_columns != []:
+                    aggregated_importance = {}
+                    encoded_columns = methods.update_column_list_with_one_hot_columns(
+                        df, preprocess_df, df.columns
+                    )
+
+                    feature_mapping = defaultdict(list)
+                    for col in encoded_columns:
+                        for original_col in categorical_columns:
+                            if col.startswith(original_col + "_"):
+                                feature_mapping[original_col].append(col)
+                                break
+                        else:
+                            feature_mapping[col].append(
+                                col
+                            )  # If no match, map to itself
+
+                    # Aggregate the feature importances
+                    for original_feature, encoded_columns in feature_mapping.items():
+                        if encoded_columns:  # Check if encoded_columns is not empty
+                            if original_feature not in encoded_columns:
+                                aggregated_importance[original_feature] = np.sum(
+                                    [
+                                        importance_dict.get(col, 0)
+                                        for col in encoded_columns
+                                    ]
+                                )
+                            else:
+                                aggregated_importance[original_feature] = (
+                                    importance_dict.get(original_feature, 0)
+                                )
+
+                    importance_df = pd.DataFrame(
+                        {
+                            "feature": list(aggregated_importance.keys()),
+                            "importance": list(aggregated_importance.values()),
+                        }
+                    )
+
+                    importance_df.to_csv(
+                        model_name_path + "/feature_importance_df.csv", index=None
+                    )
+                else:
+                    # if no categorical columns
+                    # Combine feature names with their respective importance values
+                    feature_importance_df = pd.DataFrame(
+                        {
+                            "feature": importance_dict.keys(),
+                            "importance": importance_dict.values(),
+                        }
+                    )
+
+                    feature_importance_df.to_csv(
+                        model_name_path + "/feature_importance_df.csv", index=None
+                    )
+
+                # save some files
+                pickle.dump(
+                    classification_report,
+                    open(model_name_path + "/classification_report.sav", "wb"),
+                )
+                pickle.dump(
+                    feature_importance,
+                    open(model_name_path + "/feature_importance.sav", "wb"),
+                )
+                pickle.dump(le, open(model_name_path + "/label_encoder.sav", "wb"))
+
+                context = {
+                    "dataset_type": dataset_type,
+                    "pca": pca.to_html(),
+                    "class_report": classification_report.to_html(),
+                    "feature_importance": feature_importance.to_html(),
+                }
+            elif dataset_type == "timeseries":
+
+                path = model_name_path
+                dataset_camel = methods.convert_to_camel_case(df_name)
+                if "Ecg" in dataset_camel:
+                    dataset_camel = dataset_camel.replace("Ecg", "ECG")
+
+                experiment = methods.fetch_line_by_dataset(
+                    PIPELINE_PATH + "/glacier_experiments.txt",
+                    dataset_camel,
+                )
+
+                if experiment is not None:
+                    stripped_arguments = methods.extract_arguments_from_line(experiment)
+
+                if model_name == "glacier":
+                    classification_report = methods.training(
+                        preprocess_df,
+                        model_name,
+                        float(test_set_ratio) if test_set_ratio != "" else 0,
+                        "",
+                        dataset_type,
+                        df_name,
+                        path,
+                        autoencoder,
+                        stripped_arguments,
+                    )
+                else:
+                    classification_report = methods.training(
+                        preprocess_df,
+                        model_name,
+                        float(test_set_ratio) if test_set_ratio != "" else 0,
+                        "",
+                        dataset_type,
+                        df_name,
+                        path,
+                    )
+
+                pickle.dump(
+                    classification_report,
+                    open(path + "/classification_report.sav", "wb"),
+                )
+                
+                context = {
+                    "dataset_type": dataset_type,
+                    "pca": pca.to_html(),
+                    "tsne": tsne.to_html(),
+                    "class_report": classification_report.to_html(),
+                }
+
+            # save the plots
+            pickle.dump(tsne, open(model_name_path + "/tsne.sav", "wb"))
+            pickle.dump(pca, open(model_name_path + "/pca.sav", "wb"))
+
+            # save projections file for future use
+            with open(model_name_path + "/tsne_projections.json", "w") as f:
+                json.dump(projections.tolist(), f, indent=2)
+
+            if jsonFile.key_exists("classifier"):
+                temp_json = {model_name: preprocessing_info}
+                jsonFile.update_json(["classifier"], temp_json)
+            else:
+                temp_jason = {
+                    "preprocessed_name": df_name + "_preprocessed.csv",
+                    "classifier": {model_name: preprocessing_info},
+                }
+                jsonFile.append_to_json(temp_jason)
+                        
+            classifier_data = jsonFile.read_from_json(["classifier", model_name])
+            classifier_data_html = dict_and_html(classifier_data)
+            context.update({"classifier_data": classifier_data_html})
+            preprocess_df.to_csv(excel_file_name_preprocessed_path, index=False)
+            status = 200
+
+        except FileNotFoundError as e:
+            methods.remove_dir_and_empty_parent(model_name_path)
+            context = methods.format_error_context(
+                e, "File error. Please check if all required files are available."
+            )
+            status = 400
+
+        except PermissionError as e:
+            methods.remove_dir_and_empty_parent(model_name_path)
+            context = methods.format_error_context(
+                e, "Permission error. Ensure appropriate file permissions."
+            )
+            status = 400
+
+        except KeyError as e: 
+            methods.remove_dir_and_empty_parent(model_name_path)
+            context = methods.format_error_context(
+                e, f"Key error. Missing expected key {str(e)}. Verify dataset and configuration settings."
+            )
+            status = 400
+
+        except ValueError as e:
+            methods.remove_dir_and_empty_parent(model_name_path)
+            context = methods.format_error_context(
+                e, "Data error. Please verify the data format and preprocessing steps."
+            )
+            status = 400
+
+        except TypeError as e:
+            methods.remove_dir_and_empty_parent(model_name_path)
+            context = methods.format_error_context(
+                e, "Type error. Check for data type compatibility in operations."
+            )
+            status = 400
+
+        except Exception as e:
+            methods.remove_dir_and_empty_parent(model_name_path)
+            context = methods.format_error_context(
+                e, "An unexpected error occurred. Please review the code and data."
+            )
+            status = 400
+    elif action == "delete_pre_trained":
+        
+        df_name = request.session["df_name"]
+        model_name = request.POST.get("model_name")
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
+        )
+        
+        print(model_name_path)
+        
+        excel_file_name_preprocessed_path = os.path.join(
+            PIPELINE_PATH,
+            f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv",
+        )
+        try:
+            # Check if the file exists
+            if os.path.exists(excel_file_name_preprocessed_path):
+                # Delete the file
+                os.remove(excel_file_name_preprocessed_path)
+                # print(f"File '{excel_file_name_preprocessed_path}' has been deleted successfully.")
+            else:
+                print(f"File '{excel_file_name_preprocessed_path}' does not exist.")
+        except Exception as e:
+            print(f"An error occurred while deleting the file: {e}")
+            
+        json_path = os.path.join(PIPELINE_PATH + f"{df_name}" + "/pipeline.json")
+        jsonFile = pipeline.PipelineJSON(json_path)
+        jsonFile.delete_key(["classifier", model_name])
+        
+        methods.remove_dir_and_empty_parent(model_name_path)
+        # load paths
+        # absolute excel_file_preprocessed_path
+
+        if not jsonFile.key_exists("classifier"):
+            # pre trained models do not exist
+            # check if dataset directory exists
+            df_dir = os.path.join(PIPELINE_PATH + f"{df_name}")
+            if not os.path.exists(df_dir):
+                df_name = None
+
+            context = {
+                "df_name": df_name,
+                "available_pretrained_models_info": [],
+            }
+        else:
+            # if it exists
+            # check the section of "classifiers"
+            # folder path
+            available_pretrained_models = jsonFile.read_from_json(
+                ["classifier"]
+            ).keys()
+
+            available_pretrained_models_info = (
+                methods.create_tuple_of_models_text_value(
+                    available_pretrained_models
+                )
+            )
+            context = {
+                "df_name": df_name,
+                "available_pretrained_models_info": available_pretrained_models_info,
+            }
+    elif action == "discard_model":
+        name = request.session["df_name"]
+        model_name = request.session["model_name"]
+        model_name_path = os.path.join(
+            PIPELINE_PATH + f"{name}" + "/trained_models/" + model_name
+        )
+        # should delete model folder
+        # should delete classifier from json
+        # should delete preprocessed path too
+        methods.remove_dir_and_empty_parent(model_name_path)
+        # load paths
+        # absolute excel_file_preprocessed_path
+        excel_file_name_preprocessed_path = os.path.join(
+            PIPELINE_PATH,
+            f"{name}" + "/" + name + "_preprocessed" + ".csv",
+        )
+        try:
+            # Check if the file exists
+            if os.path.exists(excel_file_name_preprocessed_path):
+                # Delete the file
+                os.remove(excel_file_name_preprocessed_path)
+                # print(f"File '{excel_file_name_preprocessed_path}' has been deleted successfully.")
+            else:
+                print(f"File '{excel_file_name_preprocessed_path}' does not exist.")
+        except Exception as e:
+            print(f"An error occurred while deleting the file: {e}")
+            
+        json_path = os.path.join(PIPELINE_PATH + f"{name}" + "/pipeline.json")
+        jsonFile = pipeline.PipelineJSON(json_path)
+        jsonFile.delete_key(["classifier",model_name])
+        
+        context = {}
+        
+    return HttpResponse(json.dumps(context), status=status)
\ No newline at end of file
diff --git a/base/static/css/sb-admin-2.css b/base/static/css/sb-admin-2.css
index 440295341..119f169b8 100755
--- a/base/static/css/sb-admin-2.css
+++ b/base/static/css/sb-admin-2.css
@@ -4884,30 +4884,6 @@ input[type="button"].btn-block {
   max-height: 100%;
 }
 
-.loader {
-  width: 48px;
-  height: 48px;
-  border: 5px solid #fff;
-  border-bottom-color: #ff3d00;
-  border-radius: 50%;
-  display: inline-block;
-  box-sizing: border-box;
-  animation: rotation 1s linear infinite;
-}
-
-.loader_small {
-  width: 22px;
-  height: 22px;
-  border: 3px solid #fff;
-  margin-left: 5px;
-  border-bottom-color: #ff3d00;
-  border-radius: 50%;
-  display: inline-block;
-  box-sizing: border-box;
-  animation: rotation 1s linear infinite;
-  position: fixed;
-}
-
 @keyframes rotation {
   0% {
     transform: rotate(0deg);
@@ -12097,22 +12073,6 @@ button.btn-primary:hover {
   margin-top: 30px;
 }
 
-/* Loader styling */
-.loader {
-  border: 4px solid #f3f3f3;
-  border-top: 4px solid #007bff;
-  border-radius: 50%;
-  width: 25px;
-  height: 25px;
-  animation: spin 1s linear infinite;
-}
-
-/* Keyframes for loader spin */
-@keyframes spin {
-  0% { transform: rotate(0deg); }
-  100% { transform: rotate(360deg); }
-}
-
 /* Preprocessing checkboxes styling */
 .form-check-inline .form-check-label {
   margin-left: 5px;
@@ -12529,7 +12489,7 @@ h6 {
   animation: fadeIn 0.8s ease forwards;
 }
 
-/* Loader Style */
+/* Existing Loader Spinner */
 .loader {
   display: inline-block;
   width: 1.5rem;
@@ -12541,10 +12501,45 @@ h6 {
   margin-left: 8px;
 }
 
+/* Keyframes for spinner animation */
 @keyframes spin {
-  to { transform: rotate(360deg); }
+  to {
+    transform: rotate(360deg);
+  }
 }
 
+/* Loader Overlay */
+.loader-overlay {
+  position: absolute;
+  top: 0;
+  left: 0;
+  width: 100%;
+  height: 100%;
+  background: rgba(255, 255, 255, 0.8); /* Semi-transparent white background */
+  display: flex;
+  justify-content: center;
+  align-items: center;
+  z-index: 10; /* Ensure it overlays the content */
+}
+
+/* Spinner Loader */
+.spinner-border {
+  width: 3rem;
+  height: 3rem;
+  border: 4px solid rgba(0, 0, 0, 0.1);
+  border-top-color: #007bff; /* Customize color */
+  border-radius: 50%;
+  animation: spin 0.6s linear infinite;
+}
+
+/* Keyframes for spinner animation */
+@keyframes spin {
+  to {
+      transform: rotate(360deg);
+  }
+}
+
+
 /* Enhanced style for the modal trigger button */
 .info-button {
   background: none;
@@ -12586,4 +12581,401 @@ table th, .sticky-top-table table td {
 /* Hover effect for rows */
 .sticky-top-table table tbody tr:hover {
   background-color: #eaf1f8; /* Soft highlight on hover */
-}
\ No newline at end of file
+}
+
+/* Modal Styling */
+#deleteFileModal .modal-content {
+  border-radius: 4px;
+  padding: 0;
+  box-shadow: 0 2px 8px rgba(0, 0, 0, 0.1);
+}
+#deleteFileModal .modal-header {
+  padding: 0.5rem 1rem;
+  border-bottom: none;
+}
+#deleteFileModal .modal-title {
+  font-size: 1rem;
+  color: #d9534f;
+}
+#deleteFileModal .modal-body {
+  font-size: 0.9rem;
+  color: #444;
+}
+
+/* Custom Buttons */
+.custom-btn-secondary,
+.custom-btn-danger {
+  font-size: 0.85rem;
+  padding: 0.4rem 1rem;
+  border-radius: 2px;
+  cursor: pointer;
+  transition: background-color 0.2s;
+}
+
+.custom-btn-secondary {
+  color: #555;
+  background-color: #f8f9fa;
+  border: 1px solid #ddd;
+}
+
+.custom-btn-secondary:hover {
+  background-color: #e2e6ea;
+}
+
+.custom-btn-danger {
+  color: #fff;
+  background-color: #d9534f;
+  border: 1px solid transparent;
+}
+
+.custom-btn-danger:hover {
+  background-color: #c9302c;
+}
+
+/* Delete icon next to file names */
+.delete-file-icon {
+  font-size: 1.2rem;
+  color: #bbb;
+  cursor: pointer;
+  transition: color 0.2s;
+}
+.delete-file-icon:hover {
+  color: #d9534f;
+}
+
+.custom-alert {
+  display: flex;
+  align-items: center;
+  padding: 5px 10px;
+  border-radius: 8px;
+  background-color: #eafaf1;
+  color: #28a745;
+  font-size: 14px;
+  max-width: 250px;
+  opacity: 0;
+  transform: translateY(-10px);
+  transition: opacity 0.4s ease, transform 0.4s ease;
+}
+
+.custom-alert.show {
+  opacity: 1;
+  transform: translateY(0);
+}
+
+.loader i {
+  font-size: 1.2em;
+  color: #007bff;
+}
+
+.card-header h6 {
+  font-size: 1rem;
+  font-weight: 600;
+  margin-right: auto;
+}
+
+.card-footer {
+  font-size: 0.85rem;
+  color: #6c757d;
+}
+
+/* Add to your CSS file */
+.blur-effect {
+  transition: filter 0.3s ease, opacity 0.3s ease;
+}
+
+/* Ensure the modal respects the maximum height */
+#modelAnalysisModal .modal-content {
+  max-height: 80vh; /* Adjust the maximum height as needed */
+  overflow-y: auto; /* Add vertical scrolling when content exceeds height */
+}
+
+/* Style for the modal body */
+#modelAnalysisModal .modal-body {
+  padding: 20px; /* Add some padding for better readability */
+}
+
+/* Optional: Keep the tabs navigation fixed at the top inside the modal */
+#modelAnalysisModal .nav-tabs {
+  position: sticky;
+  top: 0;
+  z-index: 1020;
+  background-color: #f8f9fa; /* Match with modal header background */
+  border-bottom: 1px solid #dee2e6;
+}
+
+/* Optional: Add smooth scrolling */
+#modelAnalysisModal .modal-content::-webkit-scrollbar {
+  width: 8px;
+}
+
+#modelAnalysisModal .modal-content::-webkit-scrollbar-thumb {
+  background-color: #6c757d; /* Darker thumb for scrollbar */
+  border-radius: 4px;
+}
+
+#modelAnalysisModal .modal-content::-webkit-scrollbar-track {
+  background-color: #f8f9fa; /* Light track for scrollbar */
+}
+
+/* Make the modal footer fixed to the bottom of the modal */
+#modelAnalysisModal .modal-footer {
+    position: sticky; /* Keep it at the bottom of the modal body */
+    bottom: 0;
+    z-index: 1050; /* Ensure it appears above the modal body content */
+    background-color: #fff; /* Match the modal's background color */
+    border-top: 1px solid #dee2e6; /* Optional: Add a top border */
+    box-shadow: 0 -1px 5px rgba(0, 0, 0, 0.1); /* Optional: Add subtle shadow */
+}
+
+/* Adjust the modal body to account for the footer's height */
+#modelAnalysisModal .modal-body {
+    max-height: calc(80vh - 60px); /* Subtract the approximate footer height */
+    overflow-y: auto; /* Enable scrolling if content exceeds height */
+}
+
+        /* Minimal animations and transitions */
+        .fade-in {
+          opacity: 0;
+          transform: translateY(20px);
+          transition: all 0.5s ease-in-out;
+      }
+
+      .fade-in.visible {
+          opacity: 1;
+          transform: translateY(0);
+      }
+
+      /* Button hover effect */
+      .btn-outline-primary {
+          border: 2px solid #007bff;
+          color: #007bff;
+          background: none;
+          transition: all 0.3s ease-in-out;
+      }
+
+      .btn-outline-primary:hover {
+          background: #007bff;
+          color: #fff;
+          transform: scale(1.05);
+      }
+
+      /* Card hover effect */
+      .feature-card {
+          transition: transform 0.3s ease-in-out, box-shadow 0.3s ease-in-out;
+      }
+
+      .feature-card:hover {
+          transform: translateY(-5px);
+          box-shadow: 0 8px 16px rgba(0, 0, 0, 0.1);
+      }
+
+      /* Typography tweaks */
+      h1, h2, h3 {
+          font-weight: 600;
+      }
+
+      p {
+          font-size: 1rem;
+          line-height: 1.6;
+      }
+
+      .separator {
+          height: 2px;
+          background-color: #ddd;
+          width: 100px;
+          margin: 20px auto;
+      }
+
+      .fade-in {
+        animation: fadeIn 1s ease-in-out;
+    }
+    
+    .btn-primary {
+        transition: background-color 0.3s ease, transform 0.2s ease;
+    }
+    
+    .btn-primary:hover {
+        background-color: #0056b3;
+        transform: scale(1.05);
+    }
+    
+    @keyframes fadeIn {
+        from {
+            opacity: 0;
+            transform: translateY(20px);
+        }
+        to {
+            opacity: 1;
+            transform: translateY(0);
+        }
+    }
+    
+    .carousel-control-prev-icon, .carousel-control-next-icon {
+        width: 3rem;
+        height: 3rem;
+    }
+    
+    .carousel-indicators li {
+        width: 1rem;
+        height: 1rem;
+        margin: 0 0.5rem;
+    }
+    
+    #backToTop {
+        position: fixed;
+        bottom: 20px;
+        right: 20px;
+        display: none;
+        z-index: 1000;
+        box-shadow: 0 2px 5px rgba(0, 0, 0, 0.2);
+    }
+    
+    #backToTop:hover {
+        background-color: #007bff;
+        color: white;
+    }
+    
+    body.dark-mode {
+        background-color: #121212;
+        color: #ffffff;
+    }
+    
+    .dark-mode .bg-light {
+        background-color: #2a2a2a;
+    }
+    
+    .dark-mode .text-dark {
+        color: #ffffff;
+    }
+    
+    .dark-mode .btn-primary {
+        background-color: #0056b3;
+        border-color: #0056b3;
+    }
+/* Background Enhancements */
+#home_intro {
+  overflow: hidden;
+  position: relative;
+  background: linear-gradient(145deg, #f3f4f6, #ffffff);
+}
+
+#home_intro .background-shape {
+  position: absolute;
+  width: 180px; /* Reduced size */
+  height: 180px; /* Reduced size */
+  background: rgba(0, 123, 255, 0.2);
+  border-radius: 50%;
+  filter: blur(60px);
+  z-index: 0;
+  animation: float 5s ease-in-out infinite;
+}
+
+#home_intro .background-shape.shape-1 {
+  top: -40px;
+  left: -40px;
+}
+
+#home_intro .background-shape.shape-2 {
+  bottom: -40px;
+  right: -40px;
+  animation-delay: 2s;
+}
+
+/* Keyframe Animation for Background Shapes */
+@keyframes float {
+  0%, 100% {
+      transform: translateY(0);
+  }
+  50% {
+      transform: translateY(15px);
+  }
+}
+
+/* Logo Styling */
+#home_intro .logos .logo {
+  max-height: 60px; /* Smaller logo size */
+  filter: drop-shadow(0 3px 5px rgba(0, 0, 0, 0.1));
+  transition: transform 0.3s ease, filter 0.3s ease;
+}
+
+#home_intro .logos .logo:hover {
+  transform: scale(1.1);
+  filter: drop-shadow(0 5px 7px rgba(0, 0, 0, 0.2));
+}
+
+/* Animation for Fading in */
+.fade-in {
+  animation: fadeIn 1s ease-in-out;
+}
+
+@keyframes fadeIn {
+  from {
+      opacity: 0;
+      transform: translateY(20px);
+  }
+  to {
+      opacity: 1;
+      transform: translateY(0);
+  }
+}
+
+/* Responsive Styling */
+@media (max-width: 768px) {
+  #home_intro .logos {
+      flex-wrap: wrap;
+  }
+
+  #home_intro .logos .logo {
+      margin-bottom: 8px; /* Reduced spacing */
+  }
+}
+
+/* Overall Styling */
+.collapse {
+  padding: 20px;
+  line-height: 1.6;
+  font-size: 16px;
+}
+
+.collapse h4 {
+  font-weight: 600;
+  text-align: center;
+  margin-bottom: 20px;
+}
+
+.collapse ul {
+  padding: 0;
+  margin: 20px 0;
+  list-style: none;
+}
+
+.collapse ul li {
+  display: inline-block;
+  margin: 0 15px;
+  font-size: 16px;
+  font-weight: 500;
+  color: #495057;
+}
+
+.collapse ul li i {
+  font-size: 20px;
+  vertical-align: middle;
+}
+
+.collapse p {
+  text-align: justify;
+  margin: 10px 0;
+}
+
+.collapse a.btn {
+  font-size: 14px;
+  padding: 10px 20px;
+  border: 1px solid #007bff;
+  color: #007bff;
+  transition: background-color 0.3s ease, color 0.3s ease;
+}
+
+.collapse a.btn:hover {
+  background-color: #007bff;
+  color: white;
+}
diff --git a/base/static/img/digital_features.png b/base/static/img/digital_features.png
new file mode 100644
index 000000000..a7cdcbb84
Binary files /dev/null and b/base/static/img/digital_features.png differ
diff --git a/base/static/img/su_logo.png b/base/static/img/su_logo.png
new file mode 100644
index 000000000..c433f410a
Binary files /dev/null and b/base/static/img/su_logo.png differ
diff --git a/base/static/img/undraw_posting_photo.svg b/base/static/img/undraw_posting_photo.svg
deleted file mode 100755
index fc0d549c1..000000000
--- a/base/static/img/undraw_posting_photo.svg
+++ /dev/null
@@ -1 +0,0 @@
-<svg id="fe93ff64-a18b-49f4-bb52-e425cf20d0d6" data-name="Layer 1" xmlns="http://www.w3.org/2000/svg" width="1050" height="594.02" viewBox="0 0 1050 594.02"><title>posting photo</title><ellipse cx="525" cy="561.02" rx="525" ry="33" fill="#4e73df" opacity="0.1"/><polygon points="497.09 549.99 318.9 547.71 319.43 543.14 328.04 467.75 484.53 467.75 496.04 543.14 496.92 548.85 497.09 549.99" fill="#d0d2d5"/><polygon points="496.92 548.85 408 548.85 318.9 547.71 319.43 543.14 496.04 543.14 496.92 548.85" opacity="0.1"/><rect x="289.2" y="544.28" width="236.45" height="5.71" fill="#d0d2d5"/><path d="M826.24,167.93A14.87,14.87,0,0,0,811.44,153H151.12a14.87,14.87,0,0,0-14.8,14.94V568.2H826.24Z" transform="translate(-75 -152.99)" fill="#3f3d56"/><path d="M136.32,564.2v46.88a14.8,14.8,0,0,0,14.8,14.8H811.44a14.8,14.8,0,0,0,14.8-14.8V564.2Z" transform="translate(-75 -152.99)" fill="#d0d2d5"/><rect x="89.88" y="25.13" width="636.23" height="359.81" fill="#fff"/><path d="M484.71,608.09a15.43,15.43,0,0,0,12.13-5.88v0a16.06,16.06,0,0,0,1.2-1.76L489.57,599l9.15.07a15.44,15.44,0,0,0,.29-12.22l-12.27,6.36,11.32-8.32a15.42,15.42,0,1,0-25.47,17.26v0A15.43,15.43,0,0,0,484.71,608.09Z" transform="translate(-75 -152.99)" fill="#4e73df"/><polygon points="425.13 472.89 496.22 544.28 485.31 472.89 425.13 472.89" opacity="0.1"/><path d="M709.94,364.1a1.48,1.48,0,0,0,0-.21,55.29,55.29,0,0,0-2.66-14.57c-.09-.27-.17-.54-.27-.8a55.77,55.77,0,0,0-21.32-28,55.47,55.47,0,0,0-72.69,9A78.52,78.52,0,0,0,608.57,314a248.45,248.45,0,0,1-44,1.64,177.65,177.65,0,0,0,27.91,10.14l-.34,1.27a178.73,178.73,0,0,1-31.19-11.67l-3-1.46,3.36.22a249.73,249.73,0,0,0,46.82-1.35,79.17,79.17,0,0,0-13.8-21.9c-25.18-2.54-50.17-7.82-73.48-18.3l.54-1.19c22.7,10.2,47,15.45,71.61,18a78.63,78.63,0,0,0-125,13.28A108.05,108.05,0,0,0,441.16,242a251.7,251.7,0,0,1-41.45,12.56,250.58,250.58,0,0,1-64.81,5.14,177.9,177.9,0,0,0,27.9,10.14l-.34,1.26a179,179,0,0,1-31.19-11.66l-3-1.47,3.35.22A248.9,248.9,0,0,0,440.24,241c-1.29-1.42-2.63-2.81-4-4.17-43.06.87-89.95.45-132.4-15A108.28,108.28,0,0,0,252.44,314c0,20.32,5.58,48.27,15.3,76.31A325.56,325.56,0,0,0,283,427.06c3,6,6.12,11.9,9.44,17.52h0a198.58,198.58,0,0,0,13.16,19.71c.86,1.13,1.73,2.24,2.6,3.32a120.36,120.36,0,0,0,16.42,17h0q1.82,1.52,3.67,2.9A69.49,69.49,0,0,0,338.82,494a48.34,48.34,0,0,0,19.81,5.38c.55,0,1.09,0,1.64,0v.23h294v-.23h.22a14.74,14.74,0,0,0,5-.88c10.4-3.69,20-18.5,27.93-37.21,1.76-4.15,3.44-8.49,5-12.95,1.41-3.93,2.75-8,4-12a371.64,371.64,0,0,0,9.25-36.12c2.8-13.88,4.35-26,4.35-33.68C710,365.76,710,364.93,709.94,364.1Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M434.91,235.5a107.89,107.89,0,0,0-129.62-14.61C346.83,235.77,392.68,236.32,434.91,235.5Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M675.72,477.05l-45.37-78.59a1.45,1.45,0,0,0-2.51,0l-45.37,78.59a1.45,1.45,0,0,0,1.25,2.17h13.54a1.46,1.46,0,0,1,1.45,1.44v16.79a1.44,1.44,0,0,0,1.44,1.45h17.66a1.45,1.45,0,0,0,1.45-1.45v-8.1a1.44,1.44,0,0,1,1.44-1.45h16.79a1.45,1.45,0,0,1,1.45,1.45v8.1a1.45,1.45,0,0,0,1.45,1.45H658a1.44,1.44,0,0,0,1.37-1,1.34,1.34,0,0,0,.08-.46V480.66a1.45,1.45,0,0,1,1.45-1.44h13.53A1.45,1.45,0,0,0,675.72,477.05Zm-63.26,8.69a1.4,1.4,0,0,1-1,.43h-6.37a1.45,1.45,0,0,1-1.45-1.45,1.47,1.47,0,0,1,1.45-1.45h6.37a1.45,1.45,0,0,1,1.45,1.45A1.4,1.4,0,0,1,612.46,485.74Zm41.68,0a1.4,1.4,0,0,1-1,.43h-6.37a1.45,1.45,0,0,1-1-2.47,1.4,1.4,0,0,1,1-.43h6.37a1.45,1.45,0,0,1,1.45,1.45A1.4,1.4,0,0,1,654.14,485.74Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M395.28,477.74l-45.37-78.59a1.45,1.45,0,0,0-2.5,0L308.21,467A119.89,119.89,0,0,0,324.63,484H331a1.45,1.45,0,0,1,1.45,1.45,1.47,1.47,0,0,1-1.45,1.45h-2.69a70.22,70.22,0,0,0,10.51,6.53V490a1.44,1.44,0,0,1,1.45-1.44h16.79A1.44,1.44,0,0,1,358.5,490v8.1a1.51,1.51,0,0,0,.13.61,1.44,1.44,0,0,0,1.32.84h17.66a1.41,1.41,0,0,0,1.15-.58,1.44,1.44,0,0,0,.29-.87V481.36a1.45,1.45,0,0,1,1.45-1.45H394A1.44,1.44,0,0,0,395.28,477.74Zm-30,6.65a1.43,1.43,0,0,1,1-.43h6.36a1.45,1.45,0,0,1,1.45,1.45,1.45,1.45,0,0,1-1.45,1.45h-6.36a1.45,1.45,0,0,1-1.45-1.45A1.44,1.44,0,0,1,365.29,484.39Zm-28.5-29.08a1.44,1.44,0,0,1-1.44-1.45v-11a1.44,1.44,0,0,1,1.44-1.45h23.74a1.45,1.45,0,0,1,1.44,1.45v11a1.45,1.45,0,0,1-1.44,1.45Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M574.63,336.57H518.48V324.42a2.9,2.9,0,0,0-2.9-2.9H499.09V308.58a1.82,1.82,0,0,0-1.83-1.82H476a1.83,1.83,0,0,0-1.83,1.82v12.94H454.81a2.9,2.9,0,0,0-2.9,2.9v12.14H398.37a.58.58,0,0,0-.59.59v161.2a.58.58,0,0,0,.59.59H434a.58.58,0,0,0,.59-.59V476.68a.6.6,0,0,1,.59-.6h15a.6.6,0,0,1,.59.6v21.68a.58.58,0,0,0,.59.59h70.32a.59.59,0,0,0,.59-.59V476.68a.59.59,0,0,1,.58-.6h15a.59.59,0,0,1,.59.6v21.68a.59.59,0,0,0,.59.59h35.59a.59.59,0,0,0,.59-.59V337.16A.59.59,0,0,0,574.63,336.57Zm-146.46,132a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.59.59,0,0,1,.59-.6h22a.59.59,0,0,1,.59.6Zm0-19.39a.59.59,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.51a.59.59,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.4a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.58.58,0,0,1,.59.59Zm0-19.39a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.58.58,0,0,1,.59.59Zm0-19.39a.59.59,0,0,1-.59.59h-22A.59.59,0,0,1,405,391v-7.51a.59.59,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.4a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.58.58,0,0,1,.59.59Zm0-19.39a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.58.58,0,0,1,.59.59Zm52.1,116.36a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.59.59,0,0,1,.59-.6h22a.59.59,0,0,1,.59.6Zm0-19.39a.59.59,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.51a.59.59,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.4a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.58.58,0,0,1,.59.59Zm0-19.39a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.58.58,0,0,1,.59.59Zm0-19.39a.59.59,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.51a.59.59,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.4a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.58.58,0,0,1,.59.59Zm0-19.39a.58.58,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.58.58,0,0,1,.59.59Zm35.61,116.36a.59.59,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.5a.6.6,0,0,1,.59-.6h22a.6.6,0,0,1,.59.6Zm0-19.39a.6.6,0,0,1-.59.59h-22a.6.6,0,0,1-.59-.59v-7.51a.6.6,0,0,1,.59-.59h22a.6.6,0,0,1,.59.59Zm0-19.4a.59.59,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.5a.59.59,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.39a.59.59,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.5a.59.59,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.39a.6.6,0,0,1-.59.59h-22a.6.6,0,0,1-.59-.59v-7.51a.6.6,0,0,1,.59-.59h22a.6.6,0,0,1,.59.59Zm0-19.4a.59.59,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.5a.59.59,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.39a.59.59,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.5a.59.59,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59ZM568,468.55a.59.59,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.59.59,0,0,1,.59-.6h22a.6.6,0,0,1,.59.6Zm0-19.39a.6.6,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.51a.59.59,0,0,1,.59-.59h22a.6.6,0,0,1,.59.59Zm0-19.4a.59.59,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.39a.59.59,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59ZM568,391a.6.6,0,0,1-.59.59h-22a.59.59,0,0,1-.59-.59v-7.51a.59.59,0,0,1,.59-.59h22a.6.6,0,0,1,.59.59Zm0-19.4a.59.59,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Zm0-19.39a.59.59,0,0,1-.59.59h-22a.58.58,0,0,1-.59-.59v-7.5a.58.58,0,0,1,.59-.59h22a.59.59,0,0,1,.59.59Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M414.7,292.87a12.6,12.6,0,0,0-7.33.8,10.79,10.79,0,0,1-8.81,0,12.37,12.37,0,0,0-10.36.2,6.33,6.33,0,0,1-3,.75c-4.2,0-7.7-4.23-8.42-9.81a8.11,8.11,0,0,0,2.09-2.27c2.47-4,6.28-6.51,10.56-6.51s8.06,2.51,10.52,6.44a8.1,8.1,0,0,0,7,3.83h.11C410.4,286.28,413.3,289,414.7,292.87Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M427.5,275.35l-6.79,4.31,4.12-7.5a6.73,6.73,0,0,0-4.1-1.46h-.11a8.22,8.22,0,0,1-1.41-.1l-2.3,1.45,1-1.79a8.19,8.19,0,0,1-4-3.05l-4.12,2.61,2.6-4.73a12.05,12.05,0,0,0-9.22-4.67c-4.29,0-8.1,2.55-10.57,6.52a7.87,7.87,0,0,1-7,3.76h-.23c-4.72,0-8.56,5.36-8.56,12s3.84,12,8.56,12a6.48,6.48,0,0,0,3-.74,12.3,12.3,0,0,1,10.36-.2,10.9,10.9,0,0,0,8.81,0,12.35,12.35,0,0,1,10.27.19,6.31,6.31,0,0,0,3,.73c4.72,0,8.56-5.36,8.56-12A15.22,15.22,0,0,0,427.5,275.35Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><rect x="505.46" y="102.97" width="371.54" height="447.42" rx="19.8" fill="#3f3d56"/><rect x="522" y="148.11" width="336" height="357.15" fill="#fff"/><circle cx="691.23" cy="528.8" r="13.08" fill="#fff"/><path d="M766.23,288.17a6,6,0,1,1,6-6A6,6,0,0,1,766.23,288.17Z" transform="translate(-75 -152.99)" fill="#fff"/><path d="M766.23,276.58a5.55,5.55,0,1,1-5.54,5.55,5.55,5.55,0,0,1,5.54-5.55m0-1a6.55,6.55,0,1,0,6.54,6.55,6.54,6.54,0,0,0-6.54-6.55Z" transform="translate(-75 -152.99)" fill="#fff"/><path d="M899.2,486.3s0-.08,0-.12a32.12,32.12,0,0,0-1.55-8.47l-.15-.46A32.51,32.51,0,0,0,885.09,461a32.23,32.23,0,0,0-42.25,5.22,47.14,47.14,0,0,0-2.57-9,144.23,144.23,0,0,1-25.59,1A102.72,102.72,0,0,0,830.9,464l-.2.74A103.56,103.56,0,0,1,812.57,458l-1.76-.85,2,.13a144.61,144.61,0,0,0,27.22-.78,46.08,46.08,0,0,0-8-12.73c-14.64-1.48-29.17-4.55-42.72-10.64l.31-.7c13.2,5.94,27.35,9,41.63,10.49a45.71,45.71,0,0,0-72.66,7.72A62.74,62.74,0,0,0,743,415.31a147.66,147.66,0,0,1-24.1,7.3,145.91,145.91,0,0,1-37.68,3,102.72,102.72,0,0,0,16.22,5.89l-.2.73a102.73,102.73,0,0,1-18.13-6.78l-1.76-.85,2,.13a144.71,144.71,0,0,0,63.16-10c-.75-.83-1.53-1.63-2.33-2.42-25,.5-52.29.26-77-8.73a62.93,62.93,0,0,0-29.89,53.62c0,11.81,3.25,28.06,8.9,44.36A187.93,187.93,0,0,0,651,522.91c1.72,3.51,3.55,6.92,5.48,10.18h0a117,117,0,0,0,7.65,11.46c.5.65,1,1.3,1.52,1.93a69.63,69.63,0,0,0,9.54,9.87h0c.71.59,1.42,1.15,2.14,1.68a40.31,40.31,0,0,0,6.11,3.81A28,28,0,0,0,695,565c.31,0,.63,0,.95,0v.13H866.81V565h.12a8.76,8.76,0,0,0,2.89-.51c6-2.15,11.61-10.76,16.23-21.64,1-2.41,2-4.93,2.94-7.52.82-2.29,1.59-4.63,2.33-7a215.07,215.07,0,0,0,5.38-21,110.27,110.27,0,0,0,2.53-19.58C899.23,487.27,899.22,486.79,899.2,486.3Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M739.31,411.54A62.72,62.72,0,0,0,664,403.05C688.11,411.7,714.76,412,739.31,411.54Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M879.3,552l-26.37-45.69a.84.84,0,0,0-1.46,0L825.09,552a.84.84,0,0,0,.73,1.26h7.87a.85.85,0,0,1,.84.84v9.76a.85.85,0,0,0,.84.84h10.27a.85.85,0,0,0,.84-.84v-4.71a.83.83,0,0,1,.84-.84h9.76a.84.84,0,0,1,.84.84v4.71a.85.85,0,0,0,.84.84H869a.84.84,0,0,0,.8-.57.86.86,0,0,0,0-.27v-9.76a.84.84,0,0,1,.84-.84h7.87A.84.84,0,0,0,879.3,552Zm-36.77,5a.86.86,0,0,1-.6.25h-3.7a.85.85,0,0,1-.84-.84.81.81,0,0,1,.25-.6.84.84,0,0,1,.59-.25h3.7a.85.85,0,0,1,.85.85A.84.84,0,0,1,842.53,557Zm24.23,0a.86.86,0,0,1-.6.25h-3.7a.85.85,0,0,1,0-1.69h3.7a.85.85,0,0,1,.85.85A.84.84,0,0,1,866.76,557Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M716.27,552.37,689.9,506.68a.84.84,0,0,0-1.46,0l-22.78,39.46A69.45,69.45,0,0,0,675.2,556h3.7a.84.84,0,0,1,.6,1.43.81.81,0,0,1-.6.25h-1.56a41,41,0,0,0,6.11,3.8v-2a.84.84,0,0,1,.84-.84h9.76a.85.85,0,0,1,.84.84v4.71a.78.78,0,0,0,.08.35.83.83,0,0,0,.76.49H706a.84.84,0,0,0,.67-.34.86.86,0,0,0,.17-.5v-9.76a.84.84,0,0,1,.84-.84h7.87A.84.84,0,0,0,716.27,552.37Zm-17.43,3.86a.83.83,0,0,1,.59-.24h3.71a.83.83,0,0,1,.59,1.43.8.8,0,0,1-.59.25h-3.71a.85.85,0,0,1-.84-.84A.86.86,0,0,1,698.84,556.23Zm-16.57-16.9a.84.84,0,0,1-.84-.85v-6.39a.84.84,0,0,1,.84-.84h13.8a.85.85,0,0,1,.84.84v6.39a.85.85,0,0,1-.84.85Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M820.53,470.3H787.89v-7.05a1.69,1.69,0,0,0-1.69-1.69h-9.58V454a1.06,1.06,0,0,0-1.06-1.06H763.21a1.05,1.05,0,0,0-1.06,1.06v7.52H750.88a1.69,1.69,0,0,0-1.69,1.69v7.05H718.07a.35.35,0,0,0-.35.34v93.72a.35.35,0,0,0,.35.34h20.68a.34.34,0,0,0,.34-.34V551.75a.35.35,0,0,1,.35-.34h8.73a.35.35,0,0,1,.35.34v12.61a.34.34,0,0,0,.34.34h40.88a.34.34,0,0,0,.34-.34V551.75a.34.34,0,0,1,.34-.34h8.74a.34.34,0,0,1,.34.34v12.61a.35.35,0,0,0,.35.34h20.68a.34.34,0,0,0,.34-.34V470.64A.34.34,0,0,0,820.53,470.3ZM735.39,547a.34.34,0,0,1-.34.34H722.27a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.28a.34.34,0,0,1-.34.34H722.27a.34.34,0,0,1-.34-.34v-4.36a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H722.27a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.28a.35.35,0,0,1-.34.35H722.27a.35.35,0,0,1-.34-.35v-4.36a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H722.27a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H722.27a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.28a.34.34,0,0,1-.34.34H722.27a.34.34,0,0,1-.34-.34V475a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34ZM765.68,547a.34.34,0,0,1-.34.34H752.56a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.28a.34.34,0,0,1-.34.34H752.56a.34.34,0,0,1-.34-.34v-4.36a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H752.56a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.28a.35.35,0,0,1-.34.35H752.56a.35.35,0,0,1-.34-.35v-4.36a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H752.56a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H752.56a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.28a.34.34,0,0,1-.34.34H752.56a.34.34,0,0,1-.34-.34V475a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34ZM786.38,547a.34.34,0,0,1-.34.34H773.26a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34H786a.34.34,0,0,1,.34.34Zm0-11.28a.34.34,0,0,1-.34.34H773.26a.34.34,0,0,1-.34-.34v-4.36a.34.34,0,0,1,.34-.34H786a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H773.26a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34H786a.34.34,0,0,1,.34.34Zm0-11.28a.35.35,0,0,1-.34.35H773.26a.35.35,0,0,1-.34-.35v-4.36a.34.34,0,0,1,.34-.34H786a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H773.26a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34H786a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H773.26a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34H786a.34.34,0,0,1,.34.34Zm0-11.28a.34.34,0,0,1-.34.34H773.26a.34.34,0,0,1-.34-.34V475a.34.34,0,0,1,.34-.34H786a.34.34,0,0,1,.34.34ZM816.67,547a.35.35,0,0,1-.34.34H803.55a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.28a.34.34,0,0,1-.34.34H803.55a.34.34,0,0,1-.34-.34v-4.36a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H803.55a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.35.35,0,0,1,.34.34Zm0-11.28a.35.35,0,0,1-.34.35H803.55a.35.35,0,0,1-.34-.35v-4.36a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.27a.34.34,0,0,1-.34.34H803.55a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.35.35,0,0,1,.34.34Zm0-11.27a.35.35,0,0,1-.34.34H803.55a.34.34,0,0,1-.34-.34v-4.37a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Zm0-11.28a.34.34,0,0,1-.34.34H803.55a.34.34,0,0,1-.34-.34V475a.34.34,0,0,1,.34-.34h12.78a.34.34,0,0,1,.34.34Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M727.56,444.9a7.39,7.39,0,0,0-4.26.46,6.34,6.34,0,0,1-2.55.54,6.24,6.24,0,0,1-2.57-.55,7.18,7.18,0,0,0-6,.12,3.72,3.72,0,0,1-1.73.43c-2.44,0-4.47-2.46-4.89-5.7a4.82,4.82,0,0,0,1.22-1.32,6.86,6.86,0,0,1,12.25,0,4.68,4.68,0,0,0,4,2.23h.07C725.06,441.07,726.74,442.62,727.56,444.9Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M735,434.71l-4,2.51,2.4-4.36a3.92,3.92,0,0,0-2.39-.85H731a5.55,5.55,0,0,1-.82-.06l-1.34.84.58-1a4.72,4.72,0,0,1-2.34-1.77l-2.4,1.51,1.52-2.75a7,7,0,0,0-5.37-2.71,7.35,7.35,0,0,0-6.14,3.79,4.6,4.6,0,0,1-4.06,2.19h-.13c-2.75,0-5,3.11-5,7s2.23,7,5,7a3.73,3.73,0,0,0,1.73-.44,7.18,7.18,0,0,1,6-.11,6.41,6.41,0,0,0,2.57.55,6.34,6.34,0,0,0,2.55-.54,7.19,7.19,0,0,1,6,.11,3.64,3.64,0,0,0,1.71.43c2.75,0,5-3.12,5-7A8.86,8.86,0,0,0,735,434.71Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><rect x="988.64" y="269.17" width="5.36" height="44.17" rx="2.29" fill="#3f3d56"/><rect x="810.55" y="234.57" width="3.01" height="14.54" rx="1.5" fill="#3f3d56"/><rect x="810.44" y="261.19" width="3.39" height="25.31" rx="1.69" fill="#3f3d56"/><rect x="810.49" y="295.35" width="3.23" height="25.53" rx="1.61" fill="#3f3d56"/><rect x="812.25" y="186.51" width="179.29" height="364.37" rx="18.54" fill="#3f3d56"/><rect x="884.6" y="197.39" width="25.04" height="5.08" rx="2.54" fill="#e6e8ec"/><circle cx="916.3" cy="199.94" r="2.88" fill="#e6e8ec"/><path d="M1041.22,349H1020.7v2.47A11.73,11.73,0,0,1,1009,363.19H943.54a11.73,11.73,0,0,1-11.73-11.73V349H912.56a14.25,14.25,0,0,0-14.24,14.24V680.14a14.24,14.24,0,0,0,14.24,14.24h128.66a14.23,14.23,0,0,0,14.24-14.24V363.23A14.24,14.24,0,0,0,1041.22,349Z" transform="translate(-75 -152.99)" fill="#fff"/><path d="M1037.2,524.68v0a14.33,14.33,0,0,0-.7-3.83,1.72,1.72,0,0,0-.07-.21,14.56,14.56,0,0,0-24.65-5,20.46,20.46,0,0,0-1.16-4.07,65.66,65.66,0,0,1-11.55.43,47.79,47.79,0,0,0,7.32,2.66l-.09.33a46.82,46.82,0,0,1-8.18-3.06l-.79-.39.88.06a65.38,65.38,0,0,0,12.28-.35,20.79,20.79,0,0,0-3.62-5.75,61.91,61.91,0,0,1-19.27-4.79l.14-.32a60.89,60.89,0,0,0,18.78,4.73,20.63,20.63,0,0,0-32.78,3.49,28.35,28.35,0,0,0-7-15.94,66.32,66.32,0,0,1-27.87,4.64,46.76,46.76,0,0,0,7.32,2.66l-.09.33a46.82,46.82,0,0,1-8.18-3.06l-.79-.38.88.06a65.26,65.26,0,0,0,28.49-4.52c-.34-.37-.69-.73-1.05-1.09-11.29.23-23.59.12-34.72-3.94a28.4,28.4,0,0,0-13.48,24.19c0,5.33,1.46,12.66,4,20a83.69,83.69,0,0,0,4,9.64q1.17,2.38,2.47,4.6h0a54,54,0,0,0,3.45,5.17l.69.87a32,32,0,0,0,4.3,4.45h0c.32.27.64.52,1,.76a18.94,18.94,0,0,0,2.75,1.72,12.92,12.92,0,0,0,5.2,1.41h.43v.05h77.09v-.05h.06a3.92,3.92,0,0,0,1.3-.23c2.73-1,5.24-4.85,7.32-9.76.47-1.09.91-2.23,1.33-3.4s.72-2.08,1.05-3.15c1-3.2,1.82-6.49,2.43-9.47a50.32,50.32,0,0,0,1.14-8.83C1037.22,525.12,1037.21,524.9,1037.2,524.68Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M965.07,491a28.3,28.3,0,0,0-34-3.83C942,491,954,491.17,965.07,491Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M1028.23,554.3l-11.9-20.61a.38.38,0,0,0-.66,0l-11.9,20.61a.38.38,0,0,0,.33.57h3.55a.38.38,0,0,1,.38.38v4.4a.38.38,0,0,0,.38.38H1013a.38.38,0,0,0,.38-.38v-2.12a.38.38,0,0,1,.38-.38h4.4a.38.38,0,0,1,.38.38v2.12a.38.38,0,0,0,.38.38h4.63a.38.38,0,0,0,.36-.26.37.37,0,0,0,0-.12v-4.4a.38.38,0,0,1,.38-.38h3.55A.38.38,0,0,0,1028.23,554.3Zm-16.59,2.28a.39.39,0,0,1-.27.11h-1.67a.38.38,0,0,1-.38-.38.35.35,0,0,1,.11-.26.4.4,0,0,1,.27-.12h1.67a.38.38,0,0,1,.38.38A.37.37,0,0,1,1011.64,556.58Zm10.93,0a.37.37,0,0,1-.27.11h-1.67a.38.38,0,0,1-.38-.38.35.35,0,0,1,.11-.26.4.4,0,0,1,.27-.12h1.67a.38.38,0,0,1,.38.38A.37.37,0,0,1,1022.57,556.58Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M954.68,554.48l-11.9-20.61a.38.38,0,0,0-.66,0l-10.27,17.8a32,32,0,0,0,4.3,4.45h1.67a.38.38,0,1,1,0,.75h-.7a18.94,18.94,0,0,0,2.75,1.72v-.88a.38.38,0,0,1,.38-.38h4.41a.38.38,0,0,1,.37.38v2.12a.39.39,0,0,0,.38.38h4.64a.38.38,0,0,0,.3-.15.35.35,0,0,0,.07-.23v-4.4a.38.38,0,0,1,.38-.38h3.55A.38.38,0,0,0,954.68,554.48Zm-7.86,1.75a.35.35,0,0,1,.26-.11h1.67a.38.38,0,1,1,0,.75h-1.67a.38.38,0,0,1-.38-.38A.37.37,0,0,1,946.82,556.23Zm-7.48-7.63a.38.38,0,0,1-.38-.38v-2.88a.38.38,0,0,1,.38-.38h6.23a.38.38,0,0,1,.38.38v2.88a.38.38,0,0,1-.38.38Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M1001.72,517.46H987v-3.19a.76.76,0,0,0-.76-.76H981.9v-3.39a.48.48,0,0,0-.47-.48h-5.57a.48.48,0,0,0-.48.48v3.39h-5.09a.76.76,0,0,0-.76.76v3.19h-14a.15.15,0,0,0-.15.15v42.28a.16.16,0,0,0,.15.16h9.33a.16.16,0,0,0,.16-.16V554.2a.15.15,0,0,1,.15-.15h3.94a.16.16,0,0,1,.16.15v5.69a.16.16,0,0,0,.15.16h18.44a.16.16,0,0,0,.16-.16V554.2a.15.15,0,0,1,.15-.15h3.94a.16.16,0,0,1,.16.15v5.69a.16.16,0,0,0,.15.16h9.34a.16.16,0,0,0,.15-.16V517.61A.15.15,0,0,0,1001.72,517.46Zm-38.41,34.61a.16.16,0,0,1-.16.16h-5.76a.16.16,0,0,1-.16-.16v-2a.16.16,0,0,1,.16-.15h5.76a.16.16,0,0,1,.16.15Zm0-5.08a.16.16,0,0,1-.16.15h-5.76a.16.16,0,0,1-.16-.15v-2a.16.16,0,0,1,.16-.16h5.76a.16.16,0,0,1,.16.16Zm0-5.09a.16.16,0,0,1-.16.16h-5.76a.16.16,0,0,1-.16-.16v-2a.16.16,0,0,1,.16-.15h5.76a.16.16,0,0,1,.16.15Zm0-5.09a.16.16,0,0,1-.16.16h-5.76a.16.16,0,0,1-.16-.16v-2a.16.16,0,0,1,.16-.16h5.76a.16.16,0,0,1,.16.16Zm0-5.08a.16.16,0,0,1-.16.15h-5.76a.16.16,0,0,1-.16-.15v-2a.16.16,0,0,1,.16-.15h5.76a.16.16,0,0,1,.16.15Zm0-5.09a.16.16,0,0,1-.16.16h-5.76a.16.16,0,0,1-.16-.16v-2a.16.16,0,0,1,.16-.15h5.76a.16.16,0,0,1,.16.15Zm0-5.08a.16.16,0,0,1-.16.15h-5.76a.16.16,0,0,1-.16-.15v-2a.16.16,0,0,1,.16-.16h5.76a.16.16,0,0,1,.16.16ZM977,552.07a.16.16,0,0,1-.15.16h-5.77a.16.16,0,0,1-.15-.16v-2a.15.15,0,0,1,.15-.15h5.77a.15.15,0,0,1,.15.15Zm0-5.08a.15.15,0,0,1-.15.15h-5.77a.15.15,0,0,1-.15-.15v-2a.16.16,0,0,1,.15-.16h5.77a.16.16,0,0,1,.15.16Zm0-5.09a.16.16,0,0,1-.15.16h-5.77a.16.16,0,0,1-.15-.16v-2a.15.15,0,0,1,.15-.15h5.77a.15.15,0,0,1,.15.15Zm0-5.09a.16.16,0,0,1-.15.16h-5.77a.16.16,0,0,1-.15-.16v-2a.16.16,0,0,1,.15-.16h5.77a.16.16,0,0,1,.15.16Zm0-5.08a.15.15,0,0,1-.15.15h-5.77a.15.15,0,0,1-.15-.15v-2a.15.15,0,0,1,.15-.15h5.77a.15.15,0,0,1,.15.15Zm0-5.09a.16.16,0,0,1-.15.16h-5.77a.16.16,0,0,1-.15-.16v-2a.15.15,0,0,1,.15-.15h5.77a.15.15,0,0,1,.15.15Zm0-5.08a.15.15,0,0,1-.15.15h-5.77a.15.15,0,0,1-.15-.15v-2a.16.16,0,0,1,.15-.16h5.77a.16.16,0,0,1,.15.16Zm9.34,30.51a.16.16,0,0,1-.16.16h-5.76a.16.16,0,0,1-.16-.16v-2a.16.16,0,0,1,.16-.15h5.76a.16.16,0,0,1,.16.15Zm0-5.08a.16.16,0,0,1-.16.15h-5.76a.16.16,0,0,1-.16-.15v-2a.16.16,0,0,1,.16-.16h5.76a.16.16,0,0,1,.16.16Zm0-5.09a.16.16,0,0,1-.16.16h-5.76a.16.16,0,0,1-.16-.16v-2a.16.16,0,0,1,.16-.15h5.76a.16.16,0,0,1,.16.15Zm0-5.09a.16.16,0,0,1-.16.16h-5.76a.16.16,0,0,1-.16-.16v-2a.16.16,0,0,1,.16-.16h5.76a.16.16,0,0,1,.16.16Zm0-5.08a.16.16,0,0,1-.16.15h-5.76a.16.16,0,0,1-.16-.15v-2a.16.16,0,0,1,.16-.15h5.76a.16.16,0,0,1,.16.15Zm0-5.09a.16.16,0,0,1-.16.16h-5.76a.16.16,0,0,1-.16-.16v-2a.16.16,0,0,1,.16-.15h5.76a.16.16,0,0,1,.16.15Zm0-5.08a.16.16,0,0,1-.16.15h-5.76a.16.16,0,0,1-.16-.15v-2a.16.16,0,0,1,.16-.16h5.76a.16.16,0,0,1,.16.16ZM1000,552.07a.16.16,0,0,1-.15.16h-5.77a.16.16,0,0,1-.15-.16v-2a.15.15,0,0,1,.15-.15h5.77a.15.15,0,0,1,.15.15Zm0-5.08a.15.15,0,0,1-.15.15h-5.77a.15.15,0,0,1-.15-.15v-2a.16.16,0,0,1,.15-.16h5.77a.16.16,0,0,1,.15.16Zm0-5.09a.16.16,0,0,1-.15.16h-5.77a.16.16,0,0,1-.15-.16v-2a.15.15,0,0,1,.15-.15h5.77a.15.15,0,0,1,.15.15Zm0-5.09a.16.16,0,0,1-.15.16h-5.77a.16.16,0,0,1-.15-.16v-2a.16.16,0,0,1,.15-.16h5.77a.16.16,0,0,1,.15.16Zm0-5.08a.15.15,0,0,1-.15.15h-5.77a.15.15,0,0,1-.15-.15v-2a.15.15,0,0,1,.15-.15h5.77a.15.15,0,0,1,.15.15Zm0-5.09a.16.16,0,0,1-.15.16h-5.77a.16.16,0,0,1-.15-.16v-2a.15.15,0,0,1,.15-.15h5.77a.15.15,0,0,1,.15.15Zm0-5.08a.15.15,0,0,1-.15.15h-5.77a.15.15,0,0,1-.15-.15v-2a.16.16,0,0,1,.15-.16h5.77a.16.16,0,0,1,.15.16Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M959.77,506a3.33,3.33,0,0,0-1.92.21,2.82,2.82,0,0,1-1.15.24,2.72,2.72,0,0,1-1.16-.25,3.27,3.27,0,0,0-2.72.06,1.73,1.73,0,0,1-.78.19c-1.1,0-2-1.11-2.21-2.57a2.06,2.06,0,0,0,.55-.59,3.1,3.1,0,0,1,5.53,0,2.11,2.11,0,0,0,1.83,1h0A2.28,2.28,0,0,1,959.77,506Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M963.13,501.41l-1.78,1.12,1.08-2a1.75,1.75,0,0,0-1.08-.39h0a1.51,1.51,0,0,1-.37,0l-.61.38.26-.47a2.19,2.19,0,0,1-1.05-.8l-1.08.68.68-1.24a3.16,3.16,0,0,0-2.42-1.22A3.3,3.3,0,0,0,954,499.2a2.09,2.09,0,0,1-1.83,1h-.06c-1.24,0-2.25,1.41-2.25,3.14s1,3.14,2.25,3.14a1.64,1.64,0,0,0,.78-.19,3.23,3.23,0,0,1,2.72,0,2.9,2.9,0,0,0,2.31,0,3.24,3.24,0,0,1,2.69,0,1.6,1.6,0,0,0,.77.19c1.24,0,2.25-1.4,2.25-3.14A4,4,0,0,0,963.13,501.41Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M174.65,693.59a37,37,0,0,1-.8,7.76c-.1.48-.21.95-.32,1.41-2.84,11.39-10.85,19.72-20.41,20.25-.32,0-.64,0-1,0-10.11,0-18.66-8.72-21.48-20.73-.08-.32-.15-.64-.22-1a37,37,0,0,1-.8-7.76c0-16.27,10.07-29.45,22.5-29.45S174.65,677.32,174.65,693.59Z" transform="translate(-75 -152.99)" fill="#3f3d56"/><path d="M174.65,693.59a37,37,0,0,1-.8,7.76c-.1.48-.21.95-.32,1.41-.34,0-.67,0-1,0a45.76,45.76,0,0,1-7.36-1,44.92,44.92,0,0,1-6.56,1.5,45.87,45.87,0,0,1-5.14.48l-1.74,0a46.41,46.41,0,0,1-6.16-.41,45.17,45.17,0,0,1-9.67-2.4,45.56,45.56,0,0,1-5.22,1.4c-.08-.32-.15-.64-.22-1a37,37,0,0,1-.8-7.76c0-16.27,10.07-29.45,22.5-29.45S174.65,677.32,174.65,693.59Z" transform="translate(-75 -152.99)" opacity="0.1"/><path d="M222.65,638.72a45.6,45.6,0,0,0-4.9-20.61l-26.46,8.23,23.23-13.65a45.71,45.71,0,0,0-34.36-19.59,45.65,45.65,0,0,0-3.57-4.72l-38,11.83,31.17-18.33a45.73,45.73,0,0,0-72,24.39l32.55,37.47L95,618.2a45.74,45.74,0,0,0,40.93,80.7,45.92,45.92,0,0,0,29.28.81,45.74,45.74,0,0,0,55.62-44.66c0-1,0-2-.1-3A45.74,45.74,0,0,0,222.65,638.72Z" transform="translate(-75 -152.99)" fill="#4e73df"/><path d="M221.86,647.2a122.14,122.14,0,0,0-42.34-.54c-15.89,2.63-32.13,8.42-47.67,4.19-9.12-2.48-17-8.22-25.91-11.41a49.18,49.18,0,0,0-26.75-1.6,45.76,45.76,0,0,0,56.69,61.06,45.92,45.92,0,0,0,29.28.81,45.74,45.74,0,0,0,55.62-44.66c0-1,0-2-.1-3A46,46,0,0,0,221.86,647.2Z" transform="translate(-75 -152.99)" opacity="0.1"/><path d="M568.71,359.13l-19-.84c-4.35-.19-9.31-.13-12.21,3.11-3.09,3.45-2.28,8.87-.46,13.14s4.48,8.36,4.62,13c.21,7.3-5.78,13.08-11.21,18s-11.23,11-10.44,18.27c.62,5.67,5.14,10.05,9.6,13.59a128.25,128.25,0,0,0,21.85,14c4.43,2.24,9.15,4.26,14.12,4.31,4.36.05,8.58-1.42,12.68-2.9,7.13-2.56,14.45-5.33,19.86-10.62a39.92,39.92,0,0,0,6.9-9.54c9.35-17,11.84-38.26,4.24-56.13a31.55,31.55,0,0,0-7-10.66c-8.1-7.67-20.46-8-31.62-7.91a12.39,12.39,0,0,0-5.92,1.05c-1.77,1-3,3.23-2.18,5.08" transform="translate(-75 -152.99)" fill="#393859"/><path d="M544.39,594.37a411.28,411.28,0,0,0,2.24,60.27c.26,2.29.53,4.58,1,6.84,1.28,7,3.9,13.71,5.95,20.54s3.57,14,2.56,21.07q6.6-.69,13.22-1.13l.06-3.4c.12-6.52-2.44-12.88-1.79-19.37.62-6.14,2.6-12.14,2.51-18.31-.09-5.83-2-11.45-2.91-17.21a97.73,97.73,0,0,1-.82-11.6,146.49,146.49,0,0,1,.1-16c.84-10.82,4.06-21.6,2.57-32.35C560.92,587.48,552.5,590.65,544.39,594.37Z" transform="translate(-75 -152.99)" fill="#a0616a"/><path d="M564.36,726.39a3,3,0,0,0,1.47-.24A3.2,3.2,0,0,0,567,724a26.23,26.23,0,0,1,3.36-7.55c1.13-1.73,2.51-3.49,2.53-5.55,0-3.33-3.48-5.66-4.21-8.91a14.38,14.38,0,0,0-.62-3.08c-.82-1.75-3-2.35-4.88-2.74-2.56-.54-5.87-.76-7.14,1.52-1.08,1.94.12,4.3.28,6.51.2,3-1.58,5.76-3.5,8.07-2.25,2.72-8.16,9.23-4.56,13,1.35,1.42,4.29.94,6,1Z" transform="translate(-75 -152.99)" fill="#3f3d56"/><path d="M603,594.37a409.91,409.91,0,0,1-2.25,60.27c-.25,2.29-.52,4.58-.94,6.84-1.28,7-3.9,13.71-6,20.54s-3.56,14-2.55,21.07q-6.6-.69-13.22-1.13l-.06-3.4c-.12-6.52,2.43-12.88,1.78-19.37-.61-6.14-2.59-12.14-2.5-18.31.09-5.83,2-11.45,2.9-17.21a95.67,95.67,0,0,0,.83-11.6,146.49,146.49,0,0,0-.1-16c-.84-10.82-4.07-21.6-2.57-32.35C586.51,587.48,594.92,590.65,603,594.37Z" transform="translate(-75 -152.99)" fill="#a0616a"/><path d="M583.06,726.39a3,3,0,0,1-1.46-.24,3.2,3.2,0,0,1-1.21-2.15,26.23,26.23,0,0,0-3.36-7.55c-1.13-1.73-2.52-3.49-2.53-5.55,0-3.33,3.48-5.66,4.2-8.91a14.39,14.39,0,0,1,.63-3.08c.82-1.75,3-2.35,4.88-2.74,2.56-.54,5.86-.76,7.14,1.52,1.08,1.94-.13,4.3-.28,6.51-.21,3,1.58,5.76,3.5,8.07,2.25,2.72,8.16,9.23,4.56,13-1.35,1.42-4.29.94-6,1Z" transform="translate(-75 -152.99)" fill="#3f3d56"/><circle cx="492.31" cy="243.59" r="20.49" fill="#a0616a"/><path d="M555.7,415.05A7,7,0,0,1,556,418a5.74,5.74,0,0,1-1.72,2.66,30.58,30.58,0,0,1-10.15,6.77c-.4,1.29.35,2.62,1.08,3.74,2.46,3.79,5.06,7.47,7.67,11.16a74.72,74.72,0,0,0,17.43,18.77l6-7.54a48.75,48.75,0,0,0,3.48-4.73,47.18,47.18,0,0,0,3.12-6.39l7.33-17c-4.41.89-8.89-1.14-12.66-3.61a4.94,4.94,0,0,1-1.84-1.76,5,5,0,0,1-.4-2.18q-.18-5.3-.15-10.59a136.14,136.14,0,0,0-16-1.25c-2.08,0-3.48-.44-4,1.76S555.25,412.92,555.7,415.05Z" transform="translate(-75 -152.99)" fill="#a0616a"/><path d="M564.8,453.61a78.77,78.77,0,0,0-12-16,10.8,10.8,0,0,1-2.51-3.29,10,10,0,0,1-.56-3.18l-.36-5.29a1.23,1.23,0,0,0-.27-.82,1.19,1.19,0,0,0-.81-.23,6.77,6.77,0,0,0-3.71.54,10.4,10.4,0,0,0-2,1.84c-2.29,2.35-5.51,3.46-8.53,4.73a68.63,68.63,0,0,0-6.58,3.17c-1.69.93-3.43,2-4.19,3.8a9.39,9.39,0,0,0-.54,3.58L522,485a2.25,2.25,0,0,1-1.68,2.51,6.1,6.1,0,0,0-2.32,1.77,2.4,2.4,0,0,0-.09,2.75c.3.39.76.63,1.07,1,1,1.19.13,2.91-.54,4.29a9.32,9.32,0,0,0-1,5.81,5,5,0,0,0,3.92,4c.79-2.55-1-5.44,0-7.91a5.67,5.67,0,0,1,3.42-2.85,12.29,12.29,0,0,1,6.24-.68l1.2,4.37c4.31,15.74,1.32,32.65-2.61,48.49-.91,3.7-2.06,7.34-2.74,11.09-1,5.63-1,11.38-1,17.1l0,15.64c0,1.9.12,4.05,1.57,5.28s4.12,1.14,5.21,2.84c.63,1,.51,2.29,1.08,3.31a4.38,4.38,0,0,0,1.7,1.55,11.44,11.44,0,0,0,14.91-3.66c1.08-1.71,1.73-3.78,3.33-5,1.83-1.43,4.37-1.39,6.68-1.28l13.62.65a28.19,28.19,0,0,1,6.92.91,53.46,53.46,0,0,1,5.88,2.54,30.29,30.29,0,0,0,13.77,2.5,8,8,0,0,0,3.72-.9c1.3-.78,2.2-2.2,3.63-2.73,1.61-.6,3.68,0,4.93-1.17a4.47,4.47,0,0,0,1-3.17l.57-13.37c.6-13.79,1.13-28-3.42-41-1.11-3.17-2.51-6.24-3.36-9.49A93.86,93.86,0,0,1,606,520c-1.41-9.51-4.62-18.87-4-28.46,0-.73.64-.9,1.36-1,2.54-.25,5.31.22,7.12,2,1.61,1.6,2.18,4,2.36,6.23a7,7,0,0,1-.86,4.68l3.76-2.76c1.2-.88,2.54-2.09,2.3-3.56-.71-4.37-.47-9.26-1-13.65a199.36,199.36,0,0,0-3.59-22.16A58.25,58.25,0,0,1,612,455c-.36-2.92-.16-5.87-.35-8.81a33.18,33.18,0,0,0-3.14-12,12.26,12.26,0,0,0-2-3.18,13.94,13.94,0,0,0-3.88-2.68A79.93,79.93,0,0,0,583,421.2c-1.35-.29-2.9-.51-4,.33-1.4,1.08-1.24,3.25-.62,4.91s1.57,3.33,1.3,5.08-1.76,3.13-3,4.52c-3.17,3.71-4.61,8.71-8,12.25C567.23,449.92,565.13,451.37,564.8,453.61Z" transform="translate(-75 -152.99)" fill="#ff6f61"/><path d="M552.07,386.46c1.73,1.44,3.18,3.44,5.38,3.91,5.3,1.12,9.44-7.45,14.54-5.62,1.86.67,3,2.52,4.43,3.82a8.77,8.77,0,0,0,12-.87c2.68-3.13,2.71-7.65,2.61-11.77a8.85,8.85,0,0,0-.89-4.42,7.12,7.12,0,0,0-1.77-1.86c-6.05-4.73-14.22-5.41-21.88-5.9a16,16,0,0,0-5,.21c-2.28.58-4.16,2.12-6,3.62-3.44,2.86-11.81,8.3-12.86,12.91S548.67,383.64,552.07,386.46Z" transform="translate(-75 -152.99)" fill="#393859"/><rect x="453.12" y="295.07" width="74.28" height="80.43" rx="2.61" fill="#4e73df"/><rect x="459.52" y="302.01" width="61.48" height="48.65" rx="2.61" fill="#fff"/><path d="M588,480.46v0a5.8,5.8,0,0,0-.27-1.46l0-.07a5.53,5.53,0,0,0-9.36-1.9,7.35,7.35,0,0,0-.44-1.54,24.69,24.69,0,0,1-4.38.16,16.21,16.21,0,0,0,2.78,1l0,.13a18.07,18.07,0,0,1-3.1-1.16l-.3-.15.33,0a24.17,24.17,0,0,0,4.66-.13,7.72,7.72,0,0,0-1.37-2.18,23.49,23.49,0,0,1-7.32-1.82l.06-.12a23.14,23.14,0,0,0,7.13,1.79,7.83,7.83,0,0,0-12.45,1.33,10.74,10.74,0,0,0-2.67-6.05,25.14,25.14,0,0,1-10.58,1.76,18.48,18.48,0,0,0,2.78,1l0,.12a18.07,18.07,0,0,1-3.1-1.16l-.3-.14.33,0a24.79,24.79,0,0,0,10.82-1.72c-.13-.14-.26-.28-.4-.41-4.29.09-8.95,0-13.18-1.5a10.8,10.8,0,0,0-5.12,9.19,25.33,25.33,0,0,0,1.52,7.6,32.44,32.44,0,0,0,1.52,3.66c.3.6.61,1.18.94,1.74h0a20.87,20.87,0,0,0,1.31,2l.26.33a11.48,11.48,0,0,0,1.64,1.69h0a4.57,4.57,0,0,0,.36.29,7.58,7.58,0,0,0,1.05.66,4.87,4.87,0,0,0,2,.53h.17v0h29.27v0h0a1.62,1.62,0,0,0,.49-.08c1-.37,2-1.85,2.79-3.71.17-.41.34-.84.5-1.29s.27-.79.4-1.19c.38-1.22.69-2.47.92-3.6a18.77,18.77,0,0,0,.43-3.35Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.3"/><path d="M560.65,467.65a10.72,10.72,0,0,0-12.91-1.45C551.88,467.68,556.44,467.73,560.65,467.65Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.3"/><path d="M584.63,491.7l-4.52-7.82a.14.14,0,0,0-.25,0l-4.52,7.82a.15.15,0,0,0,.13.22h1.35a.14.14,0,0,1,.14.14v1.68a.14.14,0,0,0,.14.14h1.76a.15.15,0,0,0,.15-.14v-.81a.15.15,0,0,1,.14-.15h1.67a.15.15,0,0,1,.15.15v.81a.14.14,0,0,0,.14.14h1.76a.13.13,0,0,0,.13-.1.06.06,0,0,0,0,0v-1.68a.15.15,0,0,1,.15-.14h1.34A.15.15,0,0,0,584.63,491.7Zm-6.3.87a.13.13,0,0,1-.1,0h-.64a.14.14,0,0,1-.14-.14.15.15,0,0,1,0-.1.14.14,0,0,1,.1-.05h.64a.15.15,0,0,1,.14.15A.13.13,0,0,1,578.33,492.57Zm4.15,0a.13.13,0,0,1-.1,0h-.64a.14.14,0,0,1-.14-.14.15.15,0,0,1,0-.1.14.14,0,0,1,.1-.05h.64a.15.15,0,0,1,.14.15A.13.13,0,0,1,582.48,492.57Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.3"/><path d="M556.7,491.77,552.19,484a.14.14,0,0,0-.25,0L548,490.71a11.91,11.91,0,0,0,1.64,1.68h.63a.15.15,0,0,1,.15.15.14.14,0,0,1-.05.1.15.15,0,0,1-.1,0H550a8.55,8.55,0,0,0,1.05.65V493a.15.15,0,0,1,.14-.15h1.68a.15.15,0,0,1,.14.15v.8a.35.35,0,0,0,0,.06.17.17,0,0,0,.13.09h1.76a.14.14,0,0,0,.12-.06.12.12,0,0,0,0-.09v-1.67a.14.14,0,0,1,.14-.14h1.35A.14.14,0,0,0,556.7,491.77Zm-3,.66a.15.15,0,0,1,.1,0h.63a.15.15,0,0,1,.15.15.14.14,0,0,1-.05.1.15.15,0,0,1-.1,0h-.63a.15.15,0,0,1-.15-.14A.16.16,0,0,1,553.72,492.43Zm-2.84-2.89a.15.15,0,0,1-.15-.15V488.3a.15.15,0,0,1,.15-.14h2.36a.15.15,0,0,1,.15.14v1.09a.15.15,0,0,1-.15.15Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.3"/><path d="M574.56,477.71H569v-1.2a.29.29,0,0,0-.29-.29H567v-1.29a.18.18,0,0,0-.18-.18h-2.12a.18.18,0,0,0-.18.18v1.29h-1.93a.29.29,0,0,0-.29.29v1.2H557a.06.06,0,0,0-.06.06v16.06a.06.06,0,0,0,.06.05h3.54s.06,0,.06-.05v-2.16a.06.06,0,0,1,.06-.06h1.5a.06.06,0,0,1,.06.06v2.16s0,.05.06.05h7a.06.06,0,0,0,.06-.05v-2.16s0-.06,0-.06h1.5a.06.06,0,0,1,.06.06v2.16a.06.06,0,0,0,.06.05h3.54a.06.06,0,0,0,.06-.05V477.77A.06.06,0,0,0,574.56,477.71ZM560,490.86a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.05h-2.19a.06.06,0,0,1-.06-.05v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.94a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.74a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.74a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.05h2.19a.06.06,0,0,1,.06.05Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm5.19,11.59a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.05h-2.19a.06.06,0,0,1-.06-.05v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.94a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.74a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.74a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.05h2.19a.06.06,0,0,1,.06.05Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm3.54,11.59a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.05-.06v-.75a.06.06,0,0,1,.05-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.05-.06v-.75a.06.06,0,0,1,.05-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93s0,.05-.06.05h-2.19a0,0,0,0,1-.05-.05v-.75a.06.06,0,0,1,.05-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.94a.05.05,0,0,1-.06.06h-2.19s-.05,0-.05-.06v-.74s0-.06.05-.06h2.19a.05.05,0,0,1,.06.06Zm0-1.93a.05.05,0,0,1-.06.06h-2.19s-.05,0-.05-.06v-.74s0-.06.05-.06h2.19a.05.05,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.05-.06v-.75a0,0,0,0,1,.05-.05h2.19s.06,0,.06.05Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.05-.06v-.75a.06.06,0,0,1,.05-.06h2.19a.06.06,0,0,1,.06.06Zm5.19,11.59a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.05h-2.19a.06.06,0,0,1-.06-.05v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.94a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.74a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.74a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.05h2.19a.06.06,0,0,1,.06.05Zm0-1.93a.06.06,0,0,1-.06.06h-2.19a.06.06,0,0,1-.06-.06v-.75a.06.06,0,0,1,.06-.06h2.19a.06.06,0,0,1,.06.06Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.3"/><path d="M558.64,473.36a1.38,1.38,0,0,0-.73.08,1,1,0,0,1-.88,0,1.23,1.23,0,0,0-1,0,.61.61,0,0,1-.3.08c-.42,0-.77-.43-.84-1a.77.77,0,0,0,.21-.23,1.28,1.28,0,0,1,1-.65,1.27,1.27,0,0,1,1,.65.81.81,0,0,0,.69.38h0A.87.87,0,0,1,558.64,473.36Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><path d="M559.91,471.62l-.67.43.41-.75a.7.7,0,0,0-.41-.14h-.15l-.23.14.09-.18a.79.79,0,0,1-.4-.3l-.41.26.26-.47a1.2,1.2,0,0,0-.92-.47,1.28,1.28,0,0,0-1.05.65.79.79,0,0,1-.69.38h0c-.47,0-.85.53-.85,1.19s.38,1.19.85,1.19a.73.73,0,0,0,.3-.07,1.2,1.2,0,0,1,1,0,1.12,1.12,0,0,0,.44.09,1.08,1.08,0,0,0,.44-.09,1.21,1.21,0,0,1,1,0,.73.73,0,0,0,.3.07c.47,0,.85-.53.85-1.19A1.52,1.52,0,0,0,559.91,471.62Z" transform="translate(-75 -152.99)" fill="#4e73df" opacity="0.1"/><ellipse cx="450.81" cy="350.66" rx="6.15" ry="10.25" fill="#a0616a"/><ellipse cx="531.76" cy="345.53" rx="6.15" ry="10.25" fill="#a0616a"/></svg>
\ No newline at end of file
diff --git a/base/static/img/undraw_profile.svg b/base/static/img/undraw_profile.svg
deleted file mode 100755
index 980234178..000000000
--- a/base/static/img/undraw_profile.svg
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 25.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->
-<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
-	 viewBox="0 0 108.3 108.3" style="enable-background:new 0 0 108.3 108.3;" xml:space="preserve">
-<style type="text/css">
-	.st0{fill:#E6E6E6;}
-	.st1{fill:#FFB8B8;}
-	.st2{fill:#575A89;}
-	.st3{fill:#2F2E41;}
-</style>
-<g id="Group_45" transform="translate(-191 -152.079)">
-	<g id="Group_30" transform="translate(282.246 224.353)">
-		<path id="Path_944" class="st0" d="M17.1-18.1c0,10.5-3,20.8-8.8,29.6c-1.2,1.9-2.5,3.6-4,5.3c-3.4,4-7.3,7.4-11.6,10.3
-			c-1.2,0.8-2.4,1.5-3.6,2.2c-6.5,3.6-13.7,5.8-21,6.5c-1.7,0.2-3.4,0.2-5.1,0.2c-4.7,0-9.4-0.6-14-1.8c-2.6-0.7-5.1-1.6-7.6-2.6
-			c-1.3-0.5-2.5-1.1-3.7-1.8c-2.9-1.5-5.6-3.3-8.2-5.3c-1.2-0.9-2.3-1.9-3.4-2.9C-95.8,1.3-97.1-33-76.8-54.9s54.6-23.3,76.5-2.9
-			C10.8-47.6,17.1-33.2,17.1-18.1L17.1-18.1z"/>
-		<path id="Path_945" class="st1" d="M-50.2-13.2c0,0,4.9,13.7,1.1,21.4s6,16.4,6,16.4s25.8-13.1,22.5-19.7s-8.8-15.3-7.7-20.8
-			L-50.2-13.2z"/>
-		<ellipse id="Ellipse_185" class="st1" cx="-40.6" cy="-25.5" rx="17.5" ry="17.5"/>
-		<path id="Path_946" class="st2" d="M-51.1,34.2c-2.6-0.7-5.1-1.6-7.6-2.6l0.5-13.3l4.9-11c1.1,0.9,2.3,1.6,3.5,2.3
-			c0.3,0.2,0.6,0.3,0.9,0.5c4.6,2.2,12.2,4.2,19.5-1.3c2.7-2.1,5-4.7,6.7-7.6L-8.8,9l0.7,8.4l0.8,9.8c-1.2,0.8-2.4,1.5-3.6,2.2
-			c-6.5,3.6-13.7,5.8-21,6.5c-1.7,0.2-3.4,0.2-5.1,0.2C-41.8,36.1-46.5,35.4-51.1,34.2z"/>
-		<path id="Path_947" class="st2" d="M-47.7-0.9L-47.7-0.9l-0.7,7.2l-0.4,3.8l-0.5,5.6l-1.8,18.5c-2.6-0.7-5.1-1.6-7.6-2.6
-			c-1.3-0.5-2.5-1.1-3.7-1.8c-2.9-1.5-5.6-3.3-8.2-5.3l-1.9-9l0.1-0.1L-47.7-0.9z"/>
-		<path id="Path_948" class="st2" d="M-10.9,29.3c-6.5,3.6-13.7,5.8-21,6.5c0.4-6.7,1-13.1,1.6-18.8c0.3-2.9,0.7-5.7,1.1-8.2
-			c1.2-8,2.5-13.5,3.4-14.2l6.1,4L4.9,7.3l-0.5,9.5c-3.4,4-7.3,7.4-11.6,10.3C-8.5,27.9-9.7,28.7-10.9,29.3z"/>
-		<path id="Path_949" class="st2" d="M-70.5,24.6c-1.2-0.9-2.3-1.9-3.4-2.9l0.9-6.1l0.7-0.1l3.1-0.4l6.8,14.8
-			C-65.2,28.3-67.9,26.6-70.5,24.6L-70.5,24.6z"/>
-		<path id="Path_950" class="st2" d="M8.3,11.5c-1.2,1.9-2.5,3.6-4,5.3c-3.4,4-7.3,7.4-11.6,10.3c-1.2,0.8-2.4,1.5-3.6,2.2l-0.6-2.8
-			l3.5-9.1l4.2-11.1l8.8,1.1C6.1,8.7,7.2,10.1,8.3,11.5z"/>
-		<path id="Path_951" class="st3" d="M-23.9-41.4c-2.7-4.3-6.8-7.5-11.6-8.9l-3.6,2.9l1.4-3.3c-1.2-0.2-2.3-0.2-3.5-0.2l-3.2,4.1
-			l1.3-4c-5.6,0.7-10.7,3.7-14,8.3c-4.1,5.9-4.8,14.1-0.8,20c1.1-3.4,2.4-6.6,3.5-9.9c0.9,0.1,1.7,0.1,2.6,0l1.3-3.1l0.4,3
-			c4.2-0.4,10.3-1.2,14.3-1.9l-0.4-2.3l2.3,1.9c1.2-0.3,1.9-0.5,1.9-0.7c2.9,4.7,5.8,7.7,8.8,12.5C-22.1-29.8-20.2-35.3-23.9-41.4z"
-			/>
-		<ellipse id="Ellipse_186" class="st1" cx="-24.9" cy="-26.1" rx="1.2" ry="2.4"/>
-	</g>
-</g>
-</svg>
diff --git a/base/static/img/undraw_profile_1.svg b/base/static/img/undraw_profile_1.svg
deleted file mode 100755
index fcc91c706..000000000
--- a/base/static/img/undraw_profile_1.svg
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 25.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->
-<svg version="1.1" id="_x38_8ce59e9-c4b8-4d1d-9d7a-ce0190159aa8"
-	 xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 231.8 231.8"
-	 style="enable-background:new 0 0 231.8 231.8;" xml:space="preserve">
-<style type="text/css">
-	.st0{opacity:0.5;}
-	.st1{fill:url(#SVGID_1_);}
-	.st2{fill:#F5F5F5;}
-	.st3{fill:#333333;}
-	.st4{fill:#4E73DF;}
-	.st5{opacity:0.1;enable-background:new    ;}
-	.st6{fill:#BE7C5E;}
-</style>
-<g class="st0">
-	
-		<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="115.89" y1="525.2" x2="115.89" y2="756.98" gradientTransform="matrix(1 0 0 -1 0 756.98)">
-		<stop  offset="0" style="stop-color:#808080;stop-opacity:0.25"/>
-		<stop  offset="0.54" style="stop-color:#808080;stop-opacity:0.12"/>
-		<stop  offset="1" style="stop-color:#808080;stop-opacity:0.1"/>
-	</linearGradient>
-	<circle class="st1" cx="115.9" cy="115.9" r="115.9"/>
-</g>
-<circle class="st2" cx="115.9" cy="115.3" r="113.4"/>
-<path class="st3" d="M71.6,116.3c0,0-12.9,63.4-19.9,59.8c0,0,67.7,58.5,127.5,0c0,0-10.5-44.6-25.7-59.8H71.6z"/>
-<path class="st4" d="M116.2,229c22.2,0,43.9-6.5,62.4-18.7c-4.2-22.8-20.1-24.1-20.1-24.1H70.8c0,0-15,1.2-19.7,22.2
-	C70.1,221.9,92.9,229.1,116.2,229z"/>
-<circle class="st3" cx="115" cy="112.8" r="50.3"/>
-<path class="st5" d="M97.3,158.4h35.1l0,0v28.1c0,9.7-7.8,17.5-17.5,17.5l0,0c-9.7,0-17.5-7.9-17.5-17.5L97.3,158.4L97.3,158.4z"/>
-<path class="st6" d="M100.7,157.1h28.4c1.9,0,3.4,1.5,3.4,3.3v0v24.7c0,9.7-7.8,17.5-17.5,17.5l0,0c-9.7,0-17.5-7.9-17.5-17.5v0
-	v-24.7C97.4,158.6,98.9,157.1,100.7,157.1z"/>
-<path class="st5" d="M97.4,171.6c11.3,4.2,23.8,4.3,35.1,0.1v-4.3H97.4V171.6z"/>
-<circle class="st6" cx="115" cy="123.7" r="50.3"/>
-<path class="st3" d="M66.9,104.6h95.9c0,0-8.2-38.7-44.4-36.2S66.9,104.6,66.9,104.6z"/>
-<ellipse class="st6" cx="65.8" cy="121.5" rx="4.7" ry="8.8"/>
-<ellipse class="st6" cx="164" cy="121.5" rx="4.7" ry="8.8"/>
-<path class="st5" d="M66.9,105.9h95.9c0,0-8.2-38.7-44.4-36.2S66.9,105.9,66.9,105.9z"/>
-</svg>
diff --git a/base/static/img/undraw_profile_2.svg b/base/static/img/undraw_profile_2.svg
deleted file mode 100755
index 488d1bd67..000000000
--- a/base/static/img/undraw_profile_2.svg
+++ /dev/null
@@ -1,44 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 25.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->
-<svg version="1.1" id="_x38_8ce59e9-c4b8-4d1d-9d7a-ce0190159aa8"
-	 xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 231.8 231.8"
-	 style="enable-background:new 0 0 231.8 231.8;" xml:space="preserve">
-<style type="text/css">
-	.st0{opacity:0.5;}
-	.st1{fill:url(#SVGID_1_);}
-	.st2{fill:#F5F5F5;}
-	.st3{fill:#4E73DF;}
-	.st4{fill:#72351C;}
-	.st5{opacity:0.1;enable-background:new    ;}
-	.st6{fill:#FDA57D;}
-</style>
-<g class="st0">
-	
-		<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="115.89" y1="526.22" x2="115.89" y2="758" gradientTransform="matrix(1 0 0 -1 0 758)">
-		<stop  offset="0" style="stop-color:#808080;stop-opacity:0.25"/>
-		<stop  offset="0.54" style="stop-color:#808080;stop-opacity:0.12"/>
-		<stop  offset="1" style="stop-color:#808080;stop-opacity:0.1"/>
-	</linearGradient>
-	<circle class="st1" cx="115.9" cy="115.9" r="115.9"/>
-</g>
-<circle class="st2" cx="116.1" cy="115.1" r="113.4"/>
-<path class="st3" d="M116.2,229c22.2,0,43.9-6.5,62.4-18.7c-4.2-22.9-20.1-24.2-20.1-24.2H70.8c0,0-15,1.2-19.7,22.2
-	C70.1,221.9,92.9,229.1,116.2,229z"/>
-<circle class="st4" cx="115" cy="112.8" r="54.8"/>
-<path class="st5" d="M97.3,158.4h35.1l0,0v28.1c0,9.7-7.8,17.6-17.5,17.6c0,0,0,0,0,0l0,0c-9.7,0-17.5-7.9-17.5-17.5L97.3,158.4
-	L97.3,158.4z"/>
-<path class="st6" d="M100.7,157.1h28.4c1.9,0,3.3,1.5,3.3,3.4v24.7c0,9.7-7.9,17.5-17.5,17.5l0,0c-9.7,0-17.5-7.9-17.5-17.5v-24.7
-	C97.3,158.6,98.8,157.1,100.7,157.1L100.7,157.1z"/>
-<path class="st5" d="M97.4,171.6c11.3,4.2,23.8,4.3,35.1,0.1v-4.3H97.4V171.6z"/>
-<circle class="st6" cx="115" cy="123.7" r="50.3"/>
-<path class="st5" d="M79.2,77.9c0,0,21.2,43,81,18l-13.9-21.8l-24.7-8.9L79.2,77.9z"/>
-<path class="st4" d="M79.2,77.3c0,0,21.2,43,81,18l-13.9-21.8l-24.7-8.9L79.2,77.3z"/>
-<path class="st4" d="M79,74.4c1.4-4.4,3.9-8.4,7.2-11.7c9.9-9.8,26.1-11.8,34.4-23c1.8,3.1,0.7,7.1-2.4,8.9
-	c-0.2,0.1-0.4,0.2-0.6,0.3c8-0.1,17.2-0.8,21.7-7.3c2.3,5.3,1.3,11.4-2.5,15.7c7.1,0.3,14.6,5.1,15.1,12.2c0.3,4.7-2.6,9.1-6.5,11.9
-	s-8.5,3.9-13.1,4.9C118.8,89.2,70.3,101.6,79,74.4z"/>
-<path class="st4" d="M165.3,124.1H164L138,147.2c-25-11.7-43.3,0-43.3,0l-27.2-22.1l-2.7,0.3c0.8,27.8,23.9,49.6,51.7,48.9
-	C143.6,173.5,165.3,151.3,165.3,124.1L165.3,124.1z M115,156.1c-9.8,0-17.7-2-17.7-4.4s7.9-4.4,17.7-4.4s17.7,2,17.7,4.4
-	S124.7,156.1,115,156.1L115,156.1z"/>
-<ellipse class="st6" cx="64.7" cy="123.6" rx="4.7" ry="8.8"/>
-<ellipse class="st6" cx="165.3" cy="123.6" rx="4.7" ry="8.8"/>
-</svg>
diff --git a/base/static/img/undraw_profile_3.svg b/base/static/img/undraw_profile_3.svg
deleted file mode 100755
index eecb335ba..000000000
--- a/base/static/img/undraw_profile_3.svg
+++ /dev/null
@@ -1,47 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 25.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->
-<svg version="1.1" id="_x38_8ce59e9-c4b8-4d1d-9d7a-ce0190159aa8"
-	 xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 231.8 231.8"
-	 style="enable-background:new 0 0 231.8 231.8;" xml:space="preserve">
-<style type="text/css">
-	.st0{opacity:0.5;}
-	.st1{fill:url(#SVGID_1_);}
-	.st2{fill:#F5F5F5;}
-	.st3{fill:#4E73DF;}
-	.st4{fill:#F55F44;}
-	.st5{opacity:0.1;enable-background:new    ;}
-	.st6{fill:#FDA57D;}
-	.st7{fill:#333333;}
-</style>
-<g class="st0">
-	
-		<linearGradient id="SVGID_1_" gradientUnits="userSpaceOnUse" x1="115.89" y1="9.36" x2="115.89" y2="241.14" gradientTransform="matrix(1 0 0 -1 0 241.14)">
-		<stop  offset="0" style="stop-color:#808080;stop-opacity:0.25"/>
-		<stop  offset="0.54" style="stop-color:#808080;stop-opacity:0.12"/>
-		<stop  offset="1" style="stop-color:#808080;stop-opacity:0.1"/>
-	</linearGradient>
-	<circle class="st1" cx="115.9" cy="115.9" r="115.9"/>
-</g>
-<circle class="st2" cx="116.1" cy="115.1" r="113.4"/>
-<path class="st3" d="M116.2,229c22.2,0,43.8-6.5,62.3-18.7c-4.2-22.8-20.1-24.2-20.1-24.2H70.8c0,0-15,1.2-19.7,22.2
-	C70.1,221.9,92.9,229.1,116.2,229z"/>
-<circle class="st4" cx="115" cy="112.8" r="54.8"/>
-<path class="st5" d="M97.3,158.4h35.1l0,0v28.1c0,9.7-7.9,17.5-17.5,17.5l0,0l0,0c-9.7,0-17.5-7.9-17.5-17.5l0,0L97.3,158.4
-	L97.3,158.4z"/>
-<path class="st6" d="M100.7,157.1h28.4c1.9,0,3.4,1.5,3.4,3.4l0,0v24.7c0,9.7-7.9,17.5-17.5,17.5l0,0l0,0c-9.7,0-17.5-7.9-17.5-17.5
-	l0,0v-24.7C97.4,158.6,98.8,157.1,100.7,157.1L100.7,157.1L100.7,157.1z"/>
-<path class="st5" d="M97.4,171.6c11.3,4.2,23.8,4.3,35.1,0.1v-4.3H97.4V171.6z"/>
-<circle class="st6" cx="115" cy="123.7" r="50.3"/>
-<circle class="st4" cx="114.9" cy="57.1" r="20.2"/>
-<circle class="st4" cx="114.9" cy="37.1" r="13.3"/>
-<path class="st4" d="M106.2,68.2c-9.9-4.4-14.5-15.8-10.5-25.9c-0.1,0.3-0.3,0.6-0.4,0.9c-4.6,10.2,0,22.2,10.2,26.8
-	s22.2,0,26.8-10.2c0.1-0.3,0.2-0.6,0.4-0.9C127.6,68.5,116,72.6,106.2,68.2z"/>
-<path class="st5" d="M79.2,77.9c0,0,21.2,43,81,18l-13.9-21.8l-24.7-8.9L79.2,77.9z"/>
-<path class="st4" d="M79.2,77.3c0,0,21.2,43,81,18l-13.9-21.8l-24.7-8.9L79.2,77.3z"/>
-<path class="st7" d="M95.5,61.6c13-1,26.1-1,39.2,0C134.7,61.6,105.8,64.3,95.5,61.6z"/>
-<path class="st4" d="M118,23c-1,0-2,0-3,0.2h0.8c7.3,0.2,13.1,6.4,12.8,13.7c-0.2,6.2-4.7,11.5-10.8,12.6
-	c7.3,0.1,13.3-5.8,13.4-13.2C131.2,29.1,125.3,23.1,118,23L118,23z"/>
-<ellipse class="st6" cx="64.7" cy="123.6" rx="4.7" ry="8.8"/>
-<ellipse class="st6" cx="165.3" cy="123.6" rx="4.7" ry="8.8"/>
-<polygon class="st4" points="76,78.6 85.8,73.5 88,81.6 82,85.7 "/>
-</svg>
diff --git a/base/static/img/undraw_rocket.svg b/base/static/img/undraw_rocket.svg
deleted file mode 100755
index 45426141b..000000000
--- a/base/static/img/undraw_rocket.svg
+++ /dev/null
@@ -1,39 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!-- Generator: Adobe Illustrator 25.0.0, SVG Export Plug-In . SVG Version: 6.00 Build 0)  -->
-<svg version="1.1" id="b759170a-51c3-4e2f-999d-77dec9fd6d11"
-	 xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px" viewBox="0 0 650.9 610.5"
-	 style="enable-background:new 0 0 650.9 610.5;" xml:space="preserve">
-<style type="text/css">
-	.st0{fill:#AFC0E0;}
-	.st1{opacity:0.2;fill:#FFFFFF;enable-background:new    ;}
-	.st2{opacity:0.1;enable-background:new    ;}
-	.st3{fill:#E3E8F4;}
-	.st4{fill:#4E73DF;}
-</style>
-<path class="st0" d="M174,321c-2-1.6-4.2-3-6.6-4.2c-51.8-26.2-157,67.8-157,67.8L0,372.7c0,0,42.1-43.8,92.4-117.3
-	c45.2-66.1,150.7-51.8,171.4-48.3c2.3,0.4,3.6,0.7,3.6,0.7C298.7,288.3,174,321,174,321z"/>
-<path class="st1" d="M269.4,213.9c-0.6-2-1.3-4-2-6c0,0-1.2-0.2-3.6-0.7c-20.7-3.5-126.2-17.8-171.4,48.3C42.1,329,0,372.7,0,372.7
-	l5.9,6.7c0,0,42.1-43.8,92.4-117.3C143.3,196.3,248,210.2,269.4,213.9z"/>
-<path class="st0" d="M337.7,533.4c-79.2,40.8-127.8,77.1-127.8,77.1l-10.5-11.9c0,0,111.1-96.8,85.3-150.9c-0.5-1.2-1.2-2.3-1.9-3.4
-	c0,0,47.9-119.6,123.9-78.5c0,0,0.1,1,0.2,2.9C407.8,387.8,409.7,496.3,337.7,533.4z"/>
-<path class="st2" d="M174,321c-2-1.6-4.2-3-6.6-4.2c29.3-38.9,61.5-75.5,96.3-109.7c2.3,0.4,3.6,0.7,3.6,0.7
-	C298.7,288.3,174,321,174,321z"/>
-<path class="st2" d="M406.9,368.6c-38.6,29.6-79.4,56.1-122.3,79.1c-0.5-1.2-1.2-2.3-1.9-3.4c0,0,47.9-119.6,123.9-78.5
-	C406.7,365.7,406.8,366.7,406.9,368.6z"/>
-<path class="st3" d="M263.6,455.5c-20.3,10.4-41.6,20.5-64,30.2c-33.6,14.6-51.5-2.2-80.7-91.5c0,0,12.5-22.5,37.2-57
-	c54.3-75.8,167.5-209.1,336.1-286.7C542.7,27.1,596.1,10.1,650.9,0c0,0-9.1,68.8-62,160.1S439.1,365.3,263.6,455.5z"/>
-<circle class="st0" cx="435.6" cy="199.7" r="71.6"/>
-<path class="st4" d="M469.2,237.9c-21,18.6-53.1,16.6-71.7-4.5c-7.8-8.8-12.2-20-12.7-31.8c-0.2-4.7,0.3-9.4,1.4-14
-	c0.5-2,1.1-4.1,1.9-6c2.9-7.7,7.7-14.5,13.8-19.9c0.3-0.3,0.6-0.5,0.9-0.8c17.1-14.4,41.5-15.9,60.3-3.8c3.5,2.3,6.7,4.9,9.5,7.9
-	l1,1.1C492.2,187.2,490.2,219.3,469.2,237.9C469.2,237.8,469.2,237.9,469.2,237.9z"/>
-<path class="st0" d="M588.9,160.1c-83-35.2-96.8-109.6-96.8-109.6C542.7,27,596.1,10.1,650.9,0C650.9,0,641.8,68.8,588.9,160.1z"/>
-<path class="st0" d="M263.6,455.5c-13.7,7.1-27.9,13.9-42.6,20.7c-7,3.2-14.1,6.4-21.4,9.5c-10.9,4.7-51.5-2.2-80.7-91.5
-	c0,0,4.1-7.3,12.1-20c6.1-9.6,14.5-22.2,25.1-37c0,0,11,33.2,41.1,67.3C215.8,425.7,238.4,443,263.6,455.5z"/>
-<path class="st3" d="M221,476.2c-7,3.2-14.1,6.4-21.4,9.5c-10.9,4.7-51.5-2.2-80.7-91.5c0,0,4.1-7.3,12.1-20
-	C131,374.2,170.2,456.9,221,476.2z"/>
-<path class="st1" d="M463.2,157l-0.1,0l-60.1,3.9c-0.3,0.3-0.6,0.5-0.9,0.8c-6.2,5.4-10.9,12.3-13.8,19.9l84.5-16.6L463.2,157z"/>
-<path class="st1" d="M438.8,194.3l-53.9,7.3c-0.2-4.7,0.3-9.4,1.4-14l52.8,1.4L438.8,194.3z"/>
-<path class="st1" d="M131.7,408.7c0,0,12.5-22.5,37.2-57C223.2,276,336.4,142.7,504.9,65c45.6-21.1,93.3-36.9,142.5-47.3
-	C650.1,6.4,650.9,0,650.9,0c-54.8,10.1-108.2,27-158.7,50.5c-168.6,77.7-281.8,211-336.1,286.7c-24.7,34.4-37.2,57-37.2,57
-	c11.5,35.3,26.6,57,40.5,70.3C149.4,451.4,139.7,433.3,131.7,408.7z"/>
-</svg>
diff --git a/base/static/js/click_reset.js b/base/static/js/click_reset.js
deleted file mode 100755
index d46a1afca..000000000
--- a/base/static/js/click_reset.js
+++ /dev/null
@@ -1,29 +0,0 @@
-$(document).ready(function () {
-    //Highlight clicked row
-    document.getElementById('reset_div').addEventListener('click', function () {
-        // on click reset the graph
-        // if reset button exists, hide it
-        var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
-        $("#run_counterfactual_loader").show()
-        $("#reset").remove()
-        $.ajax({
-            method: 'POST',
-            url: '',
-            headers: { 'X-CSRFToken': csrftoken },
-            data: {'action': "reset_graph" },
-            success: function (ret) {
-                $("#run_counterfactual_loader").hide()
-                if ($("#og_cf_row") && $("#og_cf_headers")) {
-                    $("#og_cf_row").hide()
-                    $("#og_cf_headers").hide()
-                }
-                ret = JSON.parse(ret)
-                fig = ret["fig"]
-                document.getElementById("tsne").innerHTML = "";
-                $("#tsne").append(fig)
-            },
-            error: function (ret) {
-            }
-        });
-    });
-});
diff --git a/base/static/js/counterfactuals.js b/base/static/js/counterfactuals.js
index 21a372534..e940a73df 100755
--- a/base/static/js/counterfactuals.js
+++ b/base/static/js/counterfactuals.js
@@ -308,6 +308,7 @@ $(document).ready(function () {
                 });
                 data_to_pass = { 'action': "cf", "features_to_vary": JSON.stringify(features_to_vary), "model_name": model_name }
             }
+            
             // hide button and original point row
             // replace with loader
             $("#cfbtn_loader").show()
diff --git a/base/static/js/home.js b/base/static/js/home.js
new file mode 100755
index 000000000..01b18224b
--- /dev/null
+++ b/base/static/js/home.js
@@ -0,0 +1,613 @@
+import { create_dataframe, create_selection, create_uploaded_file_radio, showSuccessMessage, showLoader, clearPreviousContent, resetContainers } from './methods.js';
+
+$(document).ready(function () {
+
+    // Add visibility to fade-in elements on scroll
+    const fadeElements = document.querySelectorAll('.fade-in');
+
+    const elementInView = (el, percentageScroll = 100) => {
+        const elementTop = el.getBoundingClientRect().top;
+        return elementTop <= (window.innerHeight || document.documentElement.clientHeight) * (percentageScroll / 100);
+    };
+
+    const displayFadeElement = (element) => {
+        element.classList.add('visible');
+    };
+
+    const handleFadeAnimation = () => {
+        fadeElements.forEach((el) => {
+            if (elementInView(el)) {
+                displayFadeElement(el);
+            }
+        });
+    };
+
+    window.addEventListener('scroll', () => {
+        handleFadeAnimation();
+    });
+
+    // Initial check for elements already in view
+    handleFadeAnimation();
+
+    function fetchDatasetData(df_name) {
+        const csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
+        showLoader(true);
+
+        $.ajax({
+            method: 'POST',
+            url: '',
+            headers: { 'X-CSRFToken': csrftoken },
+            data: { 'action': "dataset", 'df_name': df_name },
+            success: function (values) {
+                showLoader(false);
+                handleSuccessResponse(values);
+                $("#new_or_load").show();
+            },
+            error: function (ret) {
+                console.error("Failed to fetch dataset:", ret);
+            }
+        });
+    }
+
+    function handleSuccessResponse(values) {
+        if (!values) return;
+
+        clearPreviousContent();
+        const ret = JSON.parse(values);
+        const datasetType = ret["dataset_type"];
+
+        if (datasetType === "tabular") {
+            setupTabularDataset(ret);
+        } else if (datasetType === "timeseries") {
+            setupTimeseriesDataset(ret);
+        }
+    }
+
+    function setupTabularDataset(ret) {
+        const { data_to_display: df, fig, features, feature1, feature2, labels, curlabel } = ret;
+
+        const selection1 = create_selection(features, "feature1", null, feature1);
+        const selection2 = create_selection(features, "feature2", null, feature2);
+        const selection3 = create_selection(labels, "label", null, curlabel);
+
+        const tb = create_dataframe(df, "df_container");
+
+        $("#model_container, #df, #df_stats").fadeIn(200);
+        $("#df_div").append(tb);
+        $("#selection").append(selection1, selection2, selection3);
+
+        const figDiv = $("<div>", { id: 'stats_container', class: "plotly_fig" }).html(fig);
+        $("#stats_div").append(figDiv);
+    }
+
+    function setupTimeseriesDataset(ret) {
+        const { fig, fig1 } = ret;
+
+        const figDiv = $("<div>", { id: 'ts_confidence_container', class: "plotly_fig" }).html(fig);
+        const figDiv1 = $("<div>", { id: 'ts_stats_container', class: "plotly_fig" }).html(fig1);
+
+        $("#ts_stats, #ts_confidence").fadeIn(200);
+        $("#ts_stats_div").append(figDiv);
+        $("#ts_confidence_div").append(figDiv1);
+    }
+
+    $('.btn-dataset').click(function (e) {
+        const df_name = $(this).is('#upload') ? "upload" : $(this).attr('id');
+        $("#new_or_load_cached").hide();
+        resetContainers();
+        $("#upload_col").toggle(df_name === "upload");
+        $("#timeseries-datasets").toggle(df_name === "timeseries");
+
+        $(this).toggleClass("active").siblings().removeClass("active");
+        $(this).addClass("active");
+
+        const timeseries_dataset = df_name === "timeseries" ? $("input:radio[name=timeseries_dataset]:checked").val() : "";
+        if (timeseries_dataset || (df_name !== "timeseries")) {
+            fetchDatasetData(timeseries_dataset || df_name);
+        }
+    });
+});
+
+document.getElementById("viewModelsButton").addEventListener("click", function () {
+    // Prompt or redirect the user to the pre-trained models section
+    window.location.href = "/charts.html"; // Replace with the actual URL
+});
+
+$(document).ready(function () {
+    $('#timeseries-datasets').change(function () {
+        if ($("input[name=timeseries_dataset]:checked").length > 0) {
+
+            var timeseries_dataset = $("input:radio[name=timeseries_dataset]:checked").val();
+
+            $("#df_container").hide();
+            $("#stats_container").hide();
+            $("#figs").hide();
+
+            $("#ts_confidence_cached").hide()
+            $("#ts_stats_cached").hide()
+
+            $("#ts_confidence").hide()
+            $("#ts_stats").hide()
+            var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
+
+            $("#loader_ds").show();
+            $("#loader_stats").show();
+
+            $("#new_or_load").hide();
+            $("#new_or_load_cached").hide();
+
+            $.ajax({
+                method: 'POST',
+                url: '',
+                headers: { 'X-CSRFToken': csrftoken, },
+                data: { 'action': "timeseries-dataset", 'timeseries_dataset': timeseries_dataset },
+                success: function (values) {
+                    $("#loader_ds").hide();
+                    $("#loader_stats").hide();
+                    // fetch data
+                    // remove data if already displayed
+                    if (document.getElementById("df_container")) {
+                        $("#pretrained_radio").remove();
+                        $("#df_container").remove();
+                        $("#stats_container").remove();
+                        $("#feature1").remove();
+                        $("#feature2").remove();
+                        $("#label").remove();
+                    }
+
+                    $("#new_or_load").show();
+
+                    if (document.getElementById("ts_confidence_container")) {
+                        $("#ts_confidence_container").remove();
+                        $("#ts_stats_container").remove();
+                    }
+
+                    var ret = JSON.parse(values)
+                    var dataset_type = ret["dataset_type"]
+
+                    if (values) {
+                        // timeseries
+                        // var feature = ret["feature"]
+                        var fig = ret["fig"]
+                        var fig1 = ret["fig1"]
+
+                        var iDiv = document.createElement('div');
+                        iDiv.id = 'ts_confidence_container';
+                        iDiv.innerHTML = fig;
+                        iDiv.setAttribute("class", "plotly_fig")
+
+                        var iDiv1 = document.createElement('div');
+                        iDiv1.id = 'ts_stats_container';
+                        iDiv1.innerHTML = fig1;
+                        iDiv1.setAttribute("class", "plotly_fig")
+
+                        $("#ts_stats").show();
+                        $("#ts_confidence").show();
+
+                        $("#ts_stats_div").append(iDiv);
+                        $("#ts_confidence_div").append(iDiv1);
+                    }
+                },
+                error: function (ret) {
+                    console.log("All bad")
+                }
+
+            });
+        }
+    })
+});
+
+// $(document).ready(function () {
+//     $('#radio_buttons').change(function () {
+//         if ($("input[name=uploaded_file]:checked").length > 0) {
+//             var uploaded_dataset = $("input:radio[name=uploaded_file]:checked").val();
+
+//             if (document.getElementById("df_container")) {
+//                 $("#df").hide();
+//                 $("#df_stats").hide();
+//             }
+
+//             if (document.getElementById("df_cached")) {
+//                 $("#df_cached").hide()
+//                 $("#df_stats_cached").hide()
+//             }
+
+//             if (document.getElementById("ts_confidence")) {
+//                 $("#ts_confidence").hide()
+//                 $("#ts_stats").hide()
+//             }
+
+//             if (document.getElementById("ts_confidence_cached")) {
+//                 $("#ts_confidence_cached").hide()
+//                 $("#ts_stats_cached").hide()
+//             }
+
+//             $("#new_or_load").hide()
+//             var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
+
+//             $("#loader_ds").show();
+//             $("#loader_stats").show();
+
+//             $.ajax({
+//                 method: 'POST',
+//                 url: '',
+//                 headers: { 'X-CSRFToken': csrftoken, },
+//                 data: { 'action': "uploaded_datasets", 'df_name': uploaded_dataset },
+//                 success: function (values) {
+//                     $("#loader_ds").hide();
+//                     $("#loader_stats").hide();
+//                     $("#new_or_load").show()
+//                     // fetch data
+//                     // remove data if already displayed
+//                     if (document.getElementById("df_container")) {
+//                         $("#pretrained_radio").remove();
+//                         $("#df_container").remove();
+//                         $("#stats_container").remove();
+//                         $("#feature1").remove();
+//                         $("#feature2").remove();
+//                         $("#label").remove();
+//                     }
+
+//                     if (document.getElementById("df_cached")) {
+//                         $("#df_cached").remove()
+//                         $("#df_stats_cached").remove()
+//                         $("#ts_confidence_cached").remove()
+//                         $("#ts_stats_cached").remove()
+//                     }
+
+//                     if (document.getElementById("ts_confidence_container")) {
+//                         $("#ts_confidence_container").remove();
+//                         $("#ts_stats_container").remove();
+//                     }
+
+//                     var ret = JSON.parse(values)
+//                     var dataset_type = ret["dataset_type"]
+
+//                     if (values) {
+//                         if (dataset_type == "tabular") {
+//                             var df = ret["data_to_display"]
+//                             var fig = ret["fig"]
+//                             var features = ret["features"]
+//                             var feature1 = ret["feature1"]
+//                             var feature2 = ret["feature2"]
+
+//                             // cur labels
+//                             var labels = ret["labels"]
+//                             var curlabel = ret["curlabel"]
+
+//                             var selection1 = create_selection(features, "feature1", null, feature1)
+//                             var selection2 = create_selection(features, "feature2", null, feature2)
+//                             var selection3 = create_selection(labels, "label", null, curlabel)
+
+//                             // create table
+//                             var tb = create_dataframe(df, "df_container")
+
+//                             $("#model_container").show()
+//                             $("#df").show();
+//                             $("#df_stats").show();
+
+//                             // append new data
+//                             $("#df_div").append(tb);
+//                             $("#selection").append(selection1);
+//                             $("#selection").append(selection2);
+//                             $("#selection").append(selection3);
+
+//                             // append fig
+//                             var iDiv = document.createElement('div');
+//                             iDiv.id = 'stats_container';
+//                             iDiv.innerHTML = fig;
+//                             iDiv.setAttribute("class", "plotly_fig")
+
+//                             $("#stats_div").append(iDiv);
+//                         } else if (dataset_type == "timeseries") {
+
+//                             // timeseries
+//                             // var feature = ret["feature"]
+//                             var fig = ret["fig"]
+//                             var fig1 = ret["fig1"]
+
+//                             var iDiv = document.createElement('div');
+//                             iDiv.id = 'ts_confidence_container';
+//                             iDiv.innerHTML = fig;
+//                             iDiv.setAttribute("class", "plotly_fig")
+
+//                             var iDiv1 = document.createElement('div');
+//                             iDiv1.id = 'ts_stats_container';
+//                             iDiv1.innerHTML = fig1;
+//                             iDiv1.setAttribute("class", "plotly_fig")
+
+//                             $("#ts_stats").show();
+//                             $("#ts_confidence").show();
+
+//                             $("#ts_stats_div").append(iDiv);
+//                             $("#ts_confidence_div").append(iDiv1);
+
+//                         }
+//                     }
+//                 },
+//                 error: function (ret) {
+//                 }
+//             });
+//         }
+//     })
+// });
+
+// $('#upload_btn').click(function (event) {
+//     event.preventDefault(); // Prevent default form submission
+
+//     var datasetType = $('input[name="dataset_type"]:checked').val();
+//     var fileInput = $('#doc')[0].files[0];
+//     var csrfToken = $('input[name="csrfmiddlewaretoken"]').val();
+
+//     if (!datasetType || !fileInput) {
+//         alert('Please select a dataset type and choose a file to upload.');
+//         return;
+//     }
+
+//     // Use FormData to handle file upload
+//     var formData = new FormData();
+//     formData.append('action', 'upload_dataset');
+//     formData.append('dataset_type', datasetType);
+//     formData.append('excel_file', fileInput);
+//     formData.append('csrfmiddlewaretoken', csrfToken);
+
+//     $("#cfbtn_loader").show();
+
+//     $.ajax({
+//         url: '', // Replace with your Django view URL for uploading
+//         type: 'POST',
+//         data: formData,
+//         processData: false, // Prevent jQuery from processing data
+//         contentType: false, // Prevent jQuery from setting content type
+//         success: function (response) {
+//             try {
+//                 var ret = JSON.parse(response);
+//                 var df_name = ret["df_name"];
+//                 var uploaded_files = ret["uploaded_files"];
+//                 var counter = uploaded_files.length - 1;
+
+//                 // Add uploaded file to the list
+//                 alert("here")
+//                 $("#radio_buttons").append(create_uploaded_file_radio(df_name, counter));
+
+//                 // Check if target_labels exist in the response
+//                 if (ret["target_labels"]) {
+//                     populateLabelModal(ret["target_labels"]); // Populate the modal for label selection
+//                 }
+
+//                 showSuccessMessage();
+//             } catch (error) {
+//                 console.error("Error processing response:", error);
+//                 alert("An error occurred while processing the upload response.");
+//             } finally {
+//                 $("#cfbtn_loader").hide();
+//             }
+//         },
+//         error: function (xhr, status, error) {
+//             console.error("Error uploading:", status, error);
+//             alert("An error occurred during upload. Please try again.");
+//             $("#cfbtn_loader").hide();
+//         }
+//     });
+// });
+
+// function populateLabelModal(targetLabels) {
+//     const positiveDropdown = $("#positive-label");
+//     const negativeDropdown = $("#negative-label");
+//     const errorContainer = $("#selection-error");
+
+//     // Populate dropdowns
+//     updateDropdownOptions(positiveDropdown, targetLabels, null);
+//     updateDropdownOptions(negativeDropdown, targetLabels, null);
+
+//     // Reset error message
+//     errorContainer.addClass("d-none").text("");
+
+//     // Open the modal
+//     $("#labelSelectionModal").modal({
+//         backdrop: 'static', // Prevent closing when clicking outside
+//         keyboard: false     // Prevent closing with "Escape"
+//     });
+
+//     let selectedPositive = null;
+//     let selectedNegative = null;
+
+//     // Handle changes in positive dropdown
+//     positiveDropdown.off("change").on("change", function () {
+//         selectedPositive = $(this).val();
+//         updateDropdownOptions(negativeDropdown, targetLabels, selectedPositive, selectedNegative);
+//         validateSelection(selectedPositive, selectedNegative);
+//     });
+
+//     // Handle changes in negative dropdown
+//     negativeDropdown.off("change").on("change", function () {
+//         selectedNegative = $(this).val();
+//         updateDropdownOptions(positiveDropdown, targetLabels, selectedNegative, selectedPositive);
+//         validateSelection(selectedPositive, selectedNegative);
+//     });
+
+//     $("#save-label-choices").click(function (event) {
+//         if (validateSelection(selectedPositive, selectedNegative, true)) {
+//             var csrfToken = $('input[name="csrfmiddlewaretoken"]').val();
+//             var formData = new FormData();
+
+//             formData.append('action', 'select_class_labels_for_uploaded_timeseries');
+//             formData.append('positive_label', selectedPositive);
+//             formData.append('negative_label', selectedNegative);
+//             formData.append('csrfToken', csrfToken);
+//             // Show loader
+//             $("#loader_ds").removeClass("d-none");
+
+//             // Disable the Save button to prevent duplicate submissions
+//             $("#save-label-choices").prop("disabled", true);
+
+//             $.ajax({
+//                 url: '', // Replace with your Django view URL for uploading
+//                 type: 'POST',
+//                 headers: { 'X-CSRFToken': csrfToken, },
+//                 data: formData,
+//                 processData: false, // Prevent jQuery from processing data
+//                 contentType: false, // Prevent jQuery from setting content type
+//                 success: function (response) {
+//                     console.log('Labels saved successfully:', response);
+
+//                     // Hide loader
+//                     $("#loader_ds").addClass("d-none");
+
+//                     // Enable the Save button
+//                     $("#save-label-choices").prop("disabled", false);
+
+//                     // Close the modal
+//                     $("#labelSelectionModal").modal("hide");
+
+//                     // Optionally update the UI with the response
+//                 },
+//                 error: function (xhr) {
+//                     const errorContainer = $("#selection-error");
+//                     const errorMessage = xhr.responseJSON?.message || 'An error occurred while saving labels.';
+//                     errorContainer.html(`<i class="fas fa-exclamation-triangle"></i> ${errorMessage}`)
+//                         .removeClass("d-none");
+
+//                     // Hide loader
+//                     $("#loader_ds").addClass("d-none");
+
+//                     // Enable the Save button
+//                     $("#save-label-choices").prop("disabled", false);
+//                 }
+//             });
+//         }
+//     });
+
+
+//     /**
+//      * Helper function to retrieve CSRF token.
+//      * Assumes the CSRF token is stored in a cookie named 'csrftoken'.
+//      * @returns {string} - CSRF token value.
+//      */
+//     function getCSRFToken() {
+//         const name = 'csrftoken';
+//         const cookies = document.cookie.split(';');
+//         for (let cookie of cookies) {
+//             cookie = cookie.trim();
+//             if (cookie.startsWith(name + '=')) {
+//                 return cookie.substring(name.length + 1);
+//             }
+//         }
+//         return '';
+//     }
+// }
+
+// /**
+//  * Update dropdown options dynamically, excluding the currently selected value in the other dropdown.
+//  * @param {jQuery} dropdown - The dropdown to update.
+//  * @param {Array} options - The list of options to populate.
+//  * @param {string|null} exclude - The value to exclude from the dropdown options.
+//  * @param {string|null} currentValue - The current value of the dropdown being updated.
+//  */
+// function updateDropdownOptions(dropdown, options, exclude, currentValue = null) {
+//     dropdown.empty(); // Clear existing options
+
+//     // Add default placeholder
+//     dropdown.append('<option value="" disabled>Select a label</option>');
+
+//     // Repopulate options, excluding the selected value from the other dropdown
+//     options.forEach(option => {
+//         if (option !== exclude) {
+//             dropdown.append(
+//                 `<option value="${option}" ${option === currentValue ? "selected" : ""}>${option}</option>`
+//             );
+//         }
+//     });
+
+//     // Reset dropdown if the current value is no longer valid
+//     if (exclude === currentValue) {
+//         dropdown.val("");
+//     }
+// }
+
+// /**
+//  * Validate the selected positive and negative labels.
+//  * @param {string|null} positive - The selected positive label.
+//  * @param {string|null} negative - The selected negative label.
+//  * @param {boolean} showError - Whether to show an error message on failure.
+//  * @returns {boolean} - Returns true if the selection is valid, otherwise false.
+//  */
+// function validateSelection(positive, negative, showError = false) {
+//     const errorContainer = $("#selection-error");
+
+//     if (!positive || !negative) {
+//         if (showError) {
+//             errorContainer.text("You must select both a positive and a negative label!").removeClass("d-none");
+//         }
+//         return false;
+//     }
+
+//     if (positive === negative) {
+//         if (showError) {
+//             errorContainer.text("Positive and Negative labels must be different!").removeClass("d-none");
+//         }
+//         return false;
+//     }
+
+//     // Clear error if valid
+//     errorContainer.addClass("d-none").text("");
+//     return true;
+// }
+
+document.getElementById("selection").addEventListener("change", function (e) {
+    var feature1 = document.getElementById("feature1").value
+    var feature2 = document.getElementById("feature2").value
+    var label = document.getElementById("label").value
+    var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
+    $("#stats_container").remove()
+    $('#loader_stats').show()
+    $.ajax({
+        method: 'POST',
+        url: '',
+        headers: { 'X-CSRFToken': csrftoken, },
+        data: { 'action': "stat", 'feature1': feature1, 'feature2': feature2, 'label': label },
+        success: function (ret) {
+            $('#loader_stats').hide()
+            var ret = JSON.parse(ret)
+            var fig = ret["fig"]
+            var iDiv = document.createElement('div');
+            iDiv.id = 'stats_container';
+            iDiv.insertAdjacentHTML('beforeend', fig);
+            $("#stats_div").append(iDiv);
+
+        },
+        error: function (ret) {
+        }
+    });
+});
+
+if (document.getElementById("selection_cached")) {
+    document.getElementById("selection_cached").addEventListener("change", function (e) {
+
+        var feature1 = document.getElementById("feature1_cached").value
+        var feature2 = document.getElementById("feature2_cached").value
+        var label = document.getElementById("label_cached").value
+        var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
+        $("#stats_container_cached").html("")
+        $('#loader_stats_cached').show()
+        $.ajax({
+            method: 'POST',
+            url: '',
+            headers: { 'X-CSRFToken': csrftoken, },
+            data: { 'action': "stat", 'feature1': feature1, 'feature2': feature2, 'label': label },
+            success: function (ret) {
+                $('#loader_stats_cached').hide()
+                var ret = JSON.parse(ret)
+                var fig = ret["fig"]
+                var iDiv = document.createElement('div');
+                iDiv.id = 'stats_container_cached';
+                iDiv.insertAdjacentHTML('beforeend', fig);
+                $("#stats_container_cached").html(fig)
+                // $("#stats_container_cached").append(iDiv);
+
+            },
+            error: function (ret) {
+            }
+        });
+    });
+}
\ No newline at end of file
diff --git a/base/static/js/import.js b/base/static/js/import.js
deleted file mode 100755
index 100d09d14..000000000
--- a/base/static/js/import.js
+++ /dev/null
@@ -1,21 +0,0 @@
-function enterdata() {
-    //Highlight clicked row
-    document.getElementById('upload_btn').addEventListener('click', function () {
-        var data1 = new FormData()
-        var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
-        data1.append('excel_file', $('#doc')[0].files[0])
-        console.log(data1)
-        $.ajax({
-            method: 'POST',
-            url: '',
-            headers: { 'X-CSRFToken': csrftoken, },
-            data: { 'action': "upload_csv", 'data': data1 },
-            success: function (ret) {
-                console.log(ret)
-            },
-            error: function (ret) {
-                
-            }
-        });
-    });
-};
diff --git a/base/static/js/main.js b/base/static/js/main.js
index d943d03a9..248eff38c 100644
--- a/base/static/js/main.js
+++ b/base/static/js/main.js
@@ -73,20 +73,48 @@ document.addEventListener("DOMContentLoaded", function () {
 if (document.getElementById("backToDatasetButton")) {
     document.getElementById("backToDatasetButton").addEventListener("click", function () {
         // Redirect to the dataset selection section
-        window.location.href = "/#dataset_selection"; // Replace with the actual URL
+        window.location.href = "/#dataset_selection";
     });
 }
 
 if (document.getElementById("viewCounterfactualsButton")) {
     document.getElementById("viewCounterfactualsButton").addEventListener("click", function () {
         // Redirect to the counterfactuals view section
-        window.location.href = "/counterfactuals.html"; // Replace with the actual URL
+        window.location.href = "/counterfactuals.html";
     });
 }
 
 if (document.getElementById("viewPreTrainedButton")) {
     document.getElementById("viewPreTrainedButton").addEventListener("click", function () {
-        // Redirect to the counterfactuals view section
-        window.location.href = "/charts.html"; // Replace with the actual URL
+        // Redirect to the pre trained view section
+        window.location.href = "/charts.html"; 
     });
-}
\ No newline at end of file
+}
+
+// JavaScript to handle delete functionality
+document.addEventListener("DOMContentLoaded", function () {
+
+    document.getElementById("radio_buttons").addEventListener("click", function (event) {        
+        // Identify if the click originated from the button or its child span
+        let targetButton = event.target.closest(".delete-file-icon");
+        console.log(targetButton)
+        // Only proceed if a delete-file-icon button was clicked
+        if (targetButton) {
+            // Get the filename from the data-file attribute
+            const fileName = targetButton.getAttribute("data-file");
+
+            const fileNameValue = targetButton.getAttribute("data-file-value");
+
+            // Set the file name in the modal for display
+            document.getElementById("fileToDeleteName").innerText = fileName;
+
+            // Set the filename in the confirm button for reference during deletion
+            document.getElementById("confirmDeleteButton").setAttribute("data-file", fileName);
+
+            document.getElementById("confirmDeleteButton").setAttribute("data-file-value", fileNameValue);
+
+            // Show the delete confirmation modal
+            $('#deleteFileModal').modal('show');
+        }
+    });
+});
\ No newline at end of file
diff --git a/base/static/js/methods.js b/base/static/js/methods.js
index ec8db56cc..557bc1c16 100755
--- a/base/static/js/methods.js
+++ b/base/static/js/methods.js
@@ -81,10 +81,10 @@ function transpose_table(tableHtml) {
         for (let colIndex = 0; colIndex < colNames.length; colIndex++) {
             transposedHtml += `<tr><td>${colNames[colIndex]}</td>`;
             transposedHtml += `<td>${rows[colIndex]}</td>`;
-            transposedHtml += `<td>${rows[colIndex + colNames.length ]}</td>`;
+            transposedHtml += `<td>${rows[colIndex + colNames.length]}</td>`;
             transposedHtml += '</tr>';
         }
-    }else{
+    } else {
         for (let colIndex = 0; colIndex < colNames.length; colIndex++) {
             transposedHtml += `<tr><td>${colNames[colIndex]}</td>`;
             transposedHtml += `<td>${rows[colIndex]}</td>`;
@@ -98,6 +98,108 @@ function transpose_table(tableHtml) {
     return transposedHtml;
 }
 
+function create_uploaded_file_radio(name, counter) {
+    var formCheckDiv = document.createElement("div");
+    formCheckDiv.className = "form-check mb-1 d-flex align-items-center";
+
+    // Create the radio input element
+    var radioInput = document.createElement("input");
+    radioInput.className = "form-check-input mr-2";
+    radioInput.type = "radio";
+    radioInput.name = "uploaded_file";
+    radioInput.id = "file_" + counter;
+    radioInput.value = name;
+    radioInput.required = true;
+
+    // Create the label element
+    var label = document.createElement("label");
+    label.className = "form-check-label mr-auto";
+    label.htmlFor = "file_" + counter;
+    label.innerText = name;
+
+    // Create the delete button
+    var deleteButton = document.createElement("button");
+    deleteButton.className = "delete-file-icon p-0 ml-2 text-muted close";
+    deleteButton.type = "button";
+    deleteButton.dataset.file = name;
+    deleteButton.setAttribute("aria-label", "Delete " + name);
+
+    // Create the '×' span inside the delete button
+    var deleteIcon = document.createElement("span");
+    deleteIcon.setAttribute("aria-hidden", "true");
+    deleteIcon.innerHTML = "&times;";
+
+    // Append the delete icon to the delete button
+    deleteButton.appendChild(deleteIcon);
+
+    // Append the radio input, label, and delete button to the container div
+    formCheckDiv.appendChild(radioInput);
+    formCheckDiv.appendChild(label);
+    formCheckDiv.appendChild(deleteButton);
+
+    return formCheckDiv
+}
+
+
+function showSuccessMessage() {
+    const successMessage = document.getElementById("success-message");
+    successMessage.classList.remove("d-none");
+
+    // Add a slight delay to trigger the transition
+    setTimeout(() => successMessage.classList.add("show"), 10);
+
+    // Automatically hide the message after a few seconds
+    setTimeout(() => hideSuccessMessage(), 3000);
+}
+
+function hideSuccessMessage() {
+    const successMessage = document.getElementById("success-message");
+    successMessage.classList.remove("show");
+
+    // Delay hiding the element fully until after the transition
+    setTimeout(() => successMessage.classList.add("d-none"), 400);
+}
+
+function showLoader(show, ids = ["#loader_ds", "#loader_stats"]) {
+    ids.forEach(id => {
+        $(id).toggle(show);
+    });
+}
+function resetContainers(ids = [
+    "#df_container",
+    "#stats_container",
+    "#figs",
+    "#df",
+    "#df_stats",
+    "#df_cached",
+    "#df_stats_cached",
+    "#ts_confidence_cached",
+    "#ts_stats_cached",
+    "#ts_confidence",
+    "#ts_stats"
+]) {
+    ids.forEach(id => {
+        $(id).hide();
+    });
+}
+
+function clearPreviousContent(ids = [
+    "#df_container",
+    "#stats_container",
+    "#pretrained_radio",
+    "#feature1",
+    "#feature2",
+    "#label",
+    "#ts_confidence_container",
+    "#ts_stats_container"
+]) {
+    ids.forEach(id => {
+        const element = document.querySelector(id);
+        if (element) element.remove();
+    });
+}
+
+
 export {
-    create_selection, create_dataframe, create_div, transpose_table
+    create_selection, create_dataframe, create_div, transpose_table, create_uploaded_file_radio, showSuccessMessage, hideSuccessMessage, showLoader, clearPreviousContent, resetContainers
 }
\ No newline at end of file
diff --git a/base/static/js/radio_dataset.js b/base/static/js/radio_dataset.js
deleted file mode 100755
index a197456e0..000000000
--- a/base/static/js/radio_dataset.js
+++ /dev/null
@@ -1,98 +0,0 @@
-import { create_dataframe, create_selection } from './methods.js';
-
-$(document).ready(function () {
-
-    function showLoader(show) {
-        $("#loader, #loader_stats").toggle(show);
-    }
-
-    function resetContainers() {
-        $("#df_container, #stats_container, #figs, #df, #df_stats, #df_cached, #df_stats_cached, #ts_confidence_cached, #ts_stats_cached, #ts_confidence, #ts_stats").hide();
-    }
-
-    function clearPreviousContent() {
-        ["#df_container", "#stats_container", "#pretrained_radio", "#feature1", "#feature2", "#label", "#ts_confidence_container", "#ts_stats_container"].forEach(id => {
-            if ($(id).length) $(id).remove();
-        });
-    }
-
-    function fetchDatasetData(df_name) {
-        const csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
-        showLoader(true);
-
-        $.ajax({
-            method: 'POST',
-            url: '',
-            headers: { 'X-CSRFToken': csrftoken },
-            data: { 'action': "dataset", 'df_name': df_name },
-            success: function (values) {
-                showLoader(false);
-                handleSuccessResponse(values);
-            },
-            error: function (ret) {
-                console.error("Failed to fetch dataset:", ret);
-            }
-        });
-    }
-
-    function handleSuccessResponse(values) {
-        if (!values) return;
-        
-        clearPreviousContent();
-        const ret = JSON.parse(values);
-        const datasetType = ret["dataset_type"];
-        
-        if (datasetType === "tabular") {
-            setupTabularDataset(ret);
-        } else if (datasetType === "timeseries") {
-            setupTimeseriesDataset(ret);
-        }
-    }
-
-    function setupTabularDataset(ret) {
-        const { data_to_display: df, fig, features, feature1, feature2, labels, curlabel } = ret;
-
-        const selection1 = create_selection(features, "feature1", null, feature1);
-        const selection2 = create_selection(features, "feature2", null, feature2);
-        const selection3 = create_selection(labels, "label", null, curlabel);
-
-        const tb = create_dataframe(df, "df_container");
-        
-        $("#model_container, #df, #df_stats").fadeIn(200);
-        $("#df_div").append(tb);
-        $("#selection").append(selection1, selection2, selection3);
-
-        const figDiv = $("<div>", { id: 'stats_container', class: "plotly_fig" }).html(fig);
-        $("#stats_div").append(figDiv);
-    }
-
-    function setupTimeseriesDataset(ret) {
-        const { fig, fig1 } = ret;
-
-        const figDiv = $("<div>", { id: 'ts_confidence_container', class: "plotly_fig" }).html(fig);
-        const figDiv1 = $("<div>", { id: 'ts_stats_container', class: "plotly_fig" }).html(fig1);
-
-        $("#ts_stats, #ts_confidence").fadeIn(200);
-        $("#ts_stats_div").append(figDiv);
-        $("#ts_confidence_div").append(figDiv1);
-    }
-
-    $('.btn-dataset').click(function (e) {
-        const df_name = $(this).is('#upload') ? "upload" : $(this).attr('id');
-        resetContainers();
-        $("#upload_col").toggle(df_name === "upload");
-        $("#timeseries-datasets").toggle(df_name === "timeseries");
-
-        $(this).toggleClass("active").siblings().removeClass("active");
-        $(this).addClass("active");
-        const timeseries_dataset = df_name === "timeseries" ? $("input:radio[name=timeseries_dataset]:checked").val() : "";
-        if (timeseries_dataset || df_name !== "timeseries") {
-            fetchDatasetData(timeseries_dataset || df_name);
-        }
-    });
-});
-
-document.getElementById("viewModelsButton").addEventListener("click", function () {
-    // Prompt or redirect the user to the pre-trained models section
-    window.location.href = "/charts.html"; // Replace with the actual URL
-});
\ No newline at end of file
diff --git a/base/static/js/radio_model.js b/base/static/js/radio_model.js
index 6fb99c6e3..3306b7427 100755
--- a/base/static/js/radio_model.js
+++ b/base/static/js/radio_model.js
@@ -1,10 +1,7 @@
-import { create_dataframe, create_div, transpose_table } from './methods.js'
-
-// pretrained model selection and
-// train new model selection (not execution)
+import { create_dataframe, create_div } from './methods.js'
 
 $(document).ready(function () {
-    $('#pre_trained_models').change(function () {
+    $('#radio_buttons').change(function () {
         if ($("input[name=modeling_options]:checked").length > 0) {
 
             // pre trained model selected
@@ -18,8 +15,6 @@ $(document).ready(function () {
             var url = ""
 
             if (currentUrl.includes('charts.html')) {
-                $("#figs").hide();
-                $("#figs_2").hide();
 
                 // if they already exist, remove them and update them
                 if (document.getElementById("principle_component_analysis")) {
@@ -118,4 +113,77 @@ $(document).ready(function () {
             });
         }
     });
+
+    document.getElementById('confirmDeleteButton').addEventListener('click', function () {
+        const fileNameValue = this.getAttribute('data-file-value');
+        const fileName = this.getAttribute('data-file');
+        const csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
+        const uploadedDataset = $("input:radio[name=radio_buttons]:checked").val();
+        // AJAX request to delete file
+        $.ajax({
+            type: 'POST',
+            url: '',  // Add the URL where this request should go
+            data: {
+                action: 'delete_pre_trained',
+                model_name: fileNameValue,
+                csrfmiddlewaretoken: csrftoken  // Django CSRF token
+            },
+            success: function () {
+                // Remove the file entry from the UI
+                const fileElement = $(`[data-file="${fileName}"]`).closest('.form-check');
+                fileElement.remove();
+                
+                // Check if there are any remaining .form-check elements
+                if ($('#radio_buttons .form-check').length === 0) {
+                    // Replace the #radio_buttons content with the fallback message
+                    const radioButtonsContainer = document.querySelector('#radio_buttons');
+                    radioButtonsContainer.innerHTML = `
+                        <p class="text-danger">
+                            There are no available pre-trained models. 
+                            Please <a href="/train.html" class="text-primary">train a model</a>.
+                        </p>
+                    `;
+                }
+            
+                // Attach a success message to the modal
+                const modalBody = document.querySelector('#deleteFileModal .modal-body');
+                modalBody.innerHTML = `
+                    <div class="alert alert-success mb-3" role="alert">
+                        <i class="fas fa-check-circle mr-2"></i>
+                        The file <strong>${fileName}</strong> has been successfully deleted.
+                    </div>
+                `;
+            
+                // Optionally hide the modal after a delay
+                setTimeout(() => {
+                    $('#deleteFileModal').modal('hide');
+                    modalBody.innerHTML = ''; // Clear the message after hiding
+                }, 2000);
+            
+                // Reset containers if the deleted file is the uploaded dataset
+                if (fileName === uploadedDataset) {
+                    resetContainers();
+                }
+            },
+            
+            error: function () {
+                // Attach an error message to the modal
+                const modalBody = document.querySelector('#deleteFileModal .modal-body');
+                modalBody.innerHTML = `
+                    <div class="alert alert-danger mb-3" role="alert">
+                        <i class="fas fa-times-circle mr-2"></i>
+                        An error occurred while deleting the file. Please try again.
+                    </div>
+                `;
+    
+                // Optionally reset the modal content after a delay
+                setTimeout(() => {
+                    modalBody.innerHTML = `
+                        <p class="mb-1">Delete <span id="fileToDeleteName" class="font-weight-bold"></span> pre-trained classifier on <span class="font-weight-bold"> {{ df_name }} </span> dataset?</p>
+                        <small class="text-muted">This action is permanent.</small>
+                    `;
+                }, 3000);
+            }
+        });
+    });    
 });
diff --git a/base/static/js/radio_timeseries_dataset.js b/base/static/js/radio_timeseries_dataset.js
index 23f2bba8b..d74eb8b28 100644
--- a/base/static/js/radio_timeseries_dataset.js
+++ b/base/static/js/radio_timeseries_dataset.js
@@ -1,81 +1,73 @@
-import { create_dataframe, create_selection } from './methods.js'
+import { create_dataframe, create_selection } from './methods.js';
 
 $(document).ready(function () {
-
     $('#timeseries-datasets').change(function () {
-        if ($("input[name=timeseries_dataset]:checked").length > 0) {
+        const selectedDataset = $("input[name=timeseries_dataset]:checked").val();
 
-            var timeseries_dataset = $("input:radio[name=timeseries_dataset]:checked").val();
+        if (selectedDataset) {
+            resetContainers();
+            toggleSkeletons(true);
 
-            $("#df_container").hide();
-            $("#stats_container").hide();
-            $("#figs").hide();
-
-            $("#ts_confidence_cached").hide()
-            $("#ts_stats_cached").hide()
-
-            $("#ts_confidence").hide()
-            $("#ts_stats").hide()
-            var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
-            
-            $("#loader").show();
-            $("#loader_stats").show();
+            const csrfToken = $("[name=csrfmiddlewaretoken]").val();
 
+            // AJAX request to fetch the dataset
             $.ajax({
                 method: 'POST',
-                url: '',
-                headers: { 'X-CSRFToken': csrftoken, },
-                data: { 'action': "timeseries-dataset", 'timeseries_dataset': timeseries_dataset },
-                success: function (values) {
-                    $("#loader").hide();
-                    $("#loader_stats").hide();
-                    // fetch data
-                    // remove data if already displayed
-                    if (document.getElementById("df_container")) {
-                        $("#pretrained_radio").remove();
-                        $("#df_container").remove();
-                        $("#stats_container").remove();
-                        $("#feature1").remove();
-                        $("#feature2").remove();
-                        $("#label").remove();
-                    }
-
-                    if (document.getElementById("ts_confidence_container")) {
-                        $("#ts_confidence_container").remove();
-                        $("#ts_stats_container").remove();
-                    }
-
-                    var ret = JSON.parse(values)
-                    var dataset_type = ret["dataset_type"]
-
-                    if (values) {
-                        // timeseries
-                        // var feature = ret["feature"]
-                        var fig = ret["fig"]
-                        var fig1 = ret["fig1"]
-
-                        var iDiv = document.createElement('div');
-                        iDiv.id = 'ts_confidence_container';
-                        iDiv.innerHTML = fig;
-                        iDiv.setAttribute("class", "plotly_fig")
-
-                        var iDiv1 = document.createElement('div');
-                        iDiv1.id = 'ts_stats_container';
-                        iDiv1.innerHTML = fig1;
-                        iDiv1.setAttribute("class", "plotly_fig")
-
-                        $("#ts_stats").show();
-                        $("#ts_confidence").show();
-
-                        $("#ts_stats_div").append(iDiv);
-                        $("#ts_confidence_div").append(iDiv1);
-                    }
+                url: '', // Specify your endpoint here
+                headers: { 'X-CSRFToken': csrfToken },
+                data: { action: "timeseries-dataset", timeseries_dataset: selectedDataset },
+                success: function (response) {
+                    alert("herereererer")
+                    toggleSkeletons(false);
+                    handleTimeseriesResponse(response);
                 },
-                error: function (ret) {
-                    console.log("All bad")
+                error: function (error) {
+                    toggleSkeletons(false);
+                    console.error("An error occurred:", error);
                 }
-
             });
         }
-    })
-});
\ No newline at end of file
+    });
+
+    /**
+     * Reset the containers by hiding any previous data.
+     */
+    function resetContainers() {
+        const elementsToHide = [
+            "#df_div", "#stats_div", "#ts_confidence_div", "#ts_stats_div"
+        ];
+        elementsToHide.forEach(selector => $(selector).empty().hide());
+    }
+
+    /**
+     * Toggle skeleton loaders for a smooth user experience.
+     * @param {boolean} show - Whether to show or hide the skeleton loaders.
+     */
+    function toggleSkeletons(show) {
+        const skeletonSelectors = [
+            "#df_skeleton", "#stats_skeleton", "#ts_confidence_skeleton", "#ts_stats_skeleton"
+        ];
+        skeletonSelectors.forEach(selector => $(selector).toggle(show));
+    }
+
+    /**
+     * Handle the response for timeseries dataset.
+     * @param {Object|string} response - The server response.
+     */
+    function handleTimeseriesResponse(response) {
+        try {
+            const data = JSON.parse(response);
+            if (!data) throw new Error("Invalid response format");
+
+            // Populate data and stats
+            if (data.fig) {
+                $("#ts_confidence_div").html(data.fig).show();
+            }
+            if (data.fig1) {
+                $("#ts_stats_div").html(data.fig1).show();
+            }
+        } catch (error) {
+            console.error("Failed to process response:", error);
+        }
+    }
+});
diff --git a/base/static/js/radio_uploaded_dataset.js b/base/static/js/radio_uploaded_dataset.js
deleted file mode 100644
index b48757961..000000000
--- a/base/static/js/radio_uploaded_dataset.js
+++ /dev/null
@@ -1,118 +0,0 @@
-import { create_dataframe, create_selection } from './methods.js'
-
-$(document).ready(function () {
-
-    $('#uploaded_file').change(function () {
-        if ($("input[name=uploaded_file]:checked").length > 0) {
-
-            var uploaded_dataset = $("input:radio[name=uploaded_file]:checked").val();
-
-            $("#df_container").hide();
-            $("#stats_container").hide();
-            $("#figs").hide();
-
-            $("#ts_confidence_cached").hide()
-            $("#ts_stats_cached").hide()
-
-            $("#ts_confidence").hide()
-            $("#ts_stats").hide()
-            var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
-            
-            $("#loader").show();
-            $("#loader_stats").show();
-
-            $.ajax({
-                method: 'POST',
-                url: '',
-                headers: { 'X-CSRFToken': csrftoken, },
-                data: { 'action': "uploaded_datasets", 'df_name': uploaded_dataset },
-                success: function (values) {
-                     $("#loader").hide();
-                     $("#loader_stats").hide();
-                     // fetch data
-                     // remove data if already displayed
-                     if (document.getElementById("df_container")) {
-                          $("#pretrained_radio").remove();
-                          $("#df_container").remove();
-                          $("#stats_container").remove();
-                          $("#feature1").remove();
-                          $("#feature2").remove();
-                          $("#label").remove();
-                     }
-
-                     if (document.getElementById("ts_confidence_container")) {
-                          $("#ts_confidence_container").remove();
-                          $("#ts_stats_container").remove();
-                     }
-
-                     var ret = JSON.parse(values)
-                     var dataset_type = ret["dataset_type"]
-
-                     if (values) {
-                          if (dataset_type == "tabular") {
-                               var df = ret["data_to_display"]
-                               var fig = ret["fig"]
-                               var features = ret["features"]
-                               var feature1 = ret["feature1"]
-                               var feature2 = ret["feature2"]
-
-                               // cur labels
-                               var labels = ret["labels"]
-                               var curlabel = ret["curlabel"]
-
-                               var selection1 = create_selection(features, "feature1", null, feature1)
-                               var selection2 = create_selection(features, "feature2", null, feature2)
-                               var selection3 = create_selection(labels, "label", null, curlabel)
-
-                               // create table
-                               var tb = create_dataframe(df, "df_container")
-
-                               $("#model_container").show()
-                               $("#df").show();
-                               $("#df_stats").show();
-
-                               // append new data
-                               $("#df_div").append(tb);
-                               $("#selection").append(selection1);
-                               $("#selection").append(selection2);
-                               $("#selection").append(selection3);
-
-                               // append fig
-                               var iDiv = document.createElement('div');
-                               iDiv.id = 'stats_container';
-                               iDiv.innerHTML = fig;
-                               iDiv.setAttribute("class", "plotly_fig")
-
-                               $("#stats_div").append(iDiv);
-                          } else if (dataset_type == "timeseries") {
-
-                               // timeseries
-                               // var feature = ret["feature"]
-                               var fig = ret["fig"]
-                               var fig1 = ret["fig1"]
-
-                               var iDiv = document.createElement('div');
-                               iDiv.id = 'ts_confidence_container';
-                               iDiv.innerHTML = fig;
-                               iDiv.setAttribute("class", "plotly_fig")
-
-                               var iDiv1 = document.createElement('div');
-                               iDiv1.id = 'ts_stats_container';
-                               iDiv1.innerHTML = fig1;
-                               iDiv1.setAttribute("class", "plotly_fig")
-
-                               $("#ts_stats").show();
-                               $("#ts_confidence").show();
-
-                               $("#ts_stats_div").append(iDiv);
-                               $("#ts_confidence_div").append(iDiv1);
-
-                          }
-                     }
-                },
-                error: function (ret) {
-                }
-           });
-        }
-    })
-});
\ No newline at end of file
diff --git a/base/static/js/train.js b/base/static/js/train.js
index 1b5561190..9cc8e15ed 100755
--- a/base/static/js/train.js
+++ b/base/static/js/train.js
@@ -1,6 +1,5 @@
 // train a new model 
-import { create_dataframe, create_div, change_nav_text } from './methods.js'
-change_nav_text("train_nav")
+import { create_dataframe, create_div } from './methods.js'
 
 $(document).ready(function () {
 
@@ -26,57 +25,285 @@ $(document).ready(function () {
     });
 
     $('.train_test').click(function () {
-        // get preprocessing variables, model and test set ratio
-        // send ajax request to back end and wait for results
-        var array_preprocessing = []
+        const classifier = document.getElementById("classifier").value;
+        const errorMessage = $("#error_message_new_x_2");
 
-        $("#loader_train").show();
-        var test_set_ratio
-        var classifier = document.getElementById("classifier").value
-        var class_label = ""
-        var autoencoder = ""
-        var data_to_pass = {}
-        if (classifier != "wildboar_knn" && classifier != "wildboar_rsf" && classifier != "glacier") {
-            class_label = document.getElementById("class_label_train").value
-            test_set_ratio = document.getElementById("slider").value
-            document.getElementsByName("boxes").forEach(function (elem) {
-                if (elem.checked == true) {
-                    array_preprocessing.push(elem.value);
-                }
-            });
-            data_to_pass = { 'action': "train", 'model_name': classifier, 'test_set_ratio': test_set_ratio, 'array_preprocessing': JSON.stringify(array_preprocessing), 'class_label': class_label }
-        } else if (classifier == "glacier") {
-            // time series data, no class label
-            autoencoder = document.getElementById("autoencoder").value
-            // TODO: maybe add test set ratio
-            data_to_pass = { 'action': "train", 'model_name': classifier, 'autoencoder': autoencoder }
-        } else if (classifier == "wildboar_knn" || classifier == "wildboar_rsf") {
-            test_set_ratio = document.getElementById("slider").value
-            document.getElementsByName("boxes").forEach(function (elem) {
-                if (elem.checked == true) {
-                    array_preprocessing.push(elem.value);
-                }
-            });
-            data_to_pass = { 'action': "train", 'model_name': classifier, 'test_set_ratio': test_set_ratio, 'array_preprocessing': JSON.stringify(array_preprocessing) }
+        let array_preprocessing = [];
+        let test_set_ratio, class_label, autoencoder;
+        let data_to_pass = {};
+
+        // Helper function to show errors
+        function showError(message) {
+            errorMessage.text(message);
+            errorMessage.show();
         }
-        // ajax request for training
-        var csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
+
+        // Helper function to get checked values of checkboxes by name
+        function getCheckedValues(name) {
+            return Array.from(document.getElementsByName(name))
+                .filter((elem) => elem.checked)
+                .map((elem) => elem.value);
+        }
+
+        // Check if a classifier is selected
+        if (!classifier) {
+            // Show loader while training
+            showError("Please select a classifier before proceeding.");
+            return;
+        }
+
+        // Check if at least one preprocessing checkbox is checked
+        const anyPreprocessingChecked = getCheckedValues("boxes").length > 0;
+
+        if (!anyPreprocessingChecked && classifier !== "glacier") {
+            showError("Please select at least one preprocessing option.");
+            return;
+        }
+
+        // Hide the error message if validations pass
+        errorMessage.hide();
+
+        $("#train_test_btn").hide()
+
+        // Show loader while training
+        $("#loader_train").removeClass("d-none").show();
+
+        // Set up data to pass based on classifier
+        if (classifier === "glacier") {
+            autoencoder = document.getElementById("autoencoder").value;
+            data_to_pass = {
+                action: "train",
+                model_name: classifier,
+                autoencoder: autoencoder
+            };
+        } else {
+            test_set_ratio = document.getElementById("slider").value;
+            class_label = document.getElementById("class_label_train")?.value || "";
+            array_preprocessing = getCheckedValues("boxes");
+
+            data_to_pass = {
+                action: "train",
+                model_name: classifier,
+                test_set_ratio: test_set_ratio,
+                array_preprocessing: JSON.stringify(array_preprocessing),
+                class_label: class_label
+            };
+        }
+
+        // AJAX request for training
+        const csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
         $.ajax({
             method: 'POST',
             url: '',
-            headers: { 'X-CSRFToken': csrftoken, },
+            headers: { 'X-CSRFToken': csrftoken },
             data: data_to_pass,
-            processData: true,   // This should be `true` for form data
-            contentType: 'application/x-www-form-urlencoded; charset=UTF-8',  // Standard form content type,
-            success: function (values) {
+            processData: true,
+            contentType: 'application/x-www-form-urlencoded; charset=UTF-8',
+            success: function (ret) {
                 $("#loader_train").hide();
+
+                try {
+                    $("#train_test_btn").show()
+
+                    // if they already exist, remove them and update them
+                    if (document.getElementById("principle_component_analysis")) {
+                        $("#principle_component_analysis").remove();
+                    }
+                    if (document.getElementById("class_report")) {
+                        $("#class_report").remove();
+                    }
+                    if (document.getElementById("feature_importance")) {
+                        $("#feature_importance").remove();
+                    }
+                    if (document.getElementById("classifier_data")) {
+                        $("#classifier_data").remove();
+                    }
+                    if (document.getElementById("tsne_plot")) {
+                        $("#tsne_plot").remove();
+                    }
+
+                    var ret = JSON.parse(ret)
+                    // Parse successful response data
+                    const class_report = ret["class_report"];
+                    const classifier_data = ret["classifier_data"];
+                    const pca = ret["pca"];
+                    const dataset_type = ret["dataset_type"];
+                    $("#tab").show();
+
+                    if (dataset_type == "timeseries") {
+                        // For timeseries datasets
+                        const tsne = ret["tsne"];
+                        $("#tsne-tab-nav").show();
+                        const col_div_tsne = create_div("tsne_plot", "plotly_fig");
+                        col_div_tsne.insertAdjacentHTML('beforeend', tsne);
+                        $("#tsne_container").append(col_div_tsne);
+                    } else {
+                        // For other datasets
+                        $("#feature-tab-nav").show();
+                        const feature_importance = ret["feature_importance"];
+                        const col_div_fi = create_div("feature_importance", "plotly_fig");
+                        col_div_fi.insertAdjacentHTML('beforeend', feature_importance);
+                        $("#fi_container").append(col_div_fi);
+                    }
+
+                    // Create and append dataframes
+                    const tb = create_dataframe(classifier_data, "details_container");
+                    const cr_tb = create_dataframe(class_report, "cr_container");
+
+                    // Create and append plots
+                    const col_div_pca = create_div("principle_component_analysis", "plotly_fig");
+                    col_div_pca.insertAdjacentHTML('beforeend', pca);
+
+                    const col_div_class_report = create_div("class_report", "plotly_fig sticky-top-table");
+                    col_div_class_report.append(cr_tb);
+
+                    const col_div_classifier_data = create_div("classifier_data", "plotly_fig sticky-top-table");
+                    col_div_classifier_data.append(tb);
+
+                    // Append content to modal tabs
+                    $("#classification_report").append(col_div_class_report);
+                    $("#details").append(col_div_classifier_data);
+                    $("#pca_container").append(col_div_pca);
+
+                    // Show modal for analysis
+                    $("#modelAnalysisModal").modal("show");
+
+                } catch (e) {
+                    console.error("Error processing response:", e);
+                    $("#modelAnalysisModal").modal("show");
+                }
             },
             error: function (ret) {
+                $("#loader_train").hide();
+
+                // Prepare error message
+                const errorMessage = $("#error_message_new_x_2");
+                const errorMessageText = $("#error_message_text");
+                let backendErrorMessage = "An error occurred."; // Default message
+
+                try {
+                    if (ret.responseJSON && ret.responseJSON.message) {
+                        backendErrorMessage = ret.responseJSON.message + ret.responseJSON.line;
+                    } else if (ret.responseText) {
+                        const parsedResponse = JSON.parse(ret.responseText);
+                        backendErrorMessage = parsedResponse.message || backendErrorMessage;
+                    }
+                } catch (e) {
+                    console.error("Error parsing error response:", e);
+                    backendErrorMessage = ret.responseText || "Unknown error.";
+                }
+
+                // Display error message and trigger modal
+                errorMessageText.text(backendErrorMessage);
+                errorMessage.show();
             }
         });
+    });
 
+    document.getElementById("discard-model").addEventListener("click", function () {
+        // Append a confirmation message to the modal
+        const modalBody = document.querySelector("#modelAnalysisModal .modal-body");
+        const messageContainer = document.createElement("div");
+        messageContainer.id = "discard-message";
+        messageContainer.className = "alert"; // Bootstrap class for alert styles
+
+        // Add a message to confirm the user's decision
+        messageContainer.classList.add("alert-warning");
+        messageContainer.innerHTML = `
+            <i class="fas fa-exclamation-triangle mr-2"></i>
+            Are you sure you want to discard this model? This action cannot be undone.
+            <div class="mt-3">
+                <button id="confirm-discard" class="btn btn-danger btn-sm">Yes, Discard</button>
+                <button id="cancel-discard" class="btn btn-secondary btn-sm">Cancel</button>
+            </div>
+        `;
+        modalBody.appendChild(messageContainer);
+
+        // Add event listeners for confirm and cancel buttons
+        document.getElementById("confirm-discard").addEventListener("click", function () {
+            // Data to send in the AJAX request
+            const data = { action: "discard_model" };
+
+            // Fetch CSRF token (assuming Django or similar framework)
+            const csrftoken = jQuery("[name=csrfmiddlewaretoken]").val();
+
+            // Send AJAX POST request to the backend
+            $.ajax({
+                method: "POST",
+                url: "", // Replace with your actual backend URL
+                headers: { "X-CSRFToken": csrftoken }, // Include CSRF token
+                data: data,
+                success: function (response) {
+                    // Update the modal with a success message
+                    messageContainer.classList.remove("alert-warning");
+                    messageContainer.classList.add("alert-success");
+                    messageContainer.innerHTML = `
+                        <i class="fas fa-check-circle mr-2"></i>
+                        The model has been successfully discarded.
+                    `;
+
+                    // Optionally close the modal after a delay
+                    setTimeout(() => {
+                        $("#modelAnalysisModal").modal("hide");
+                        // Optionally refresh the page or update UI
+                        // location.reload(); // Uncomment to refresh the page
+                    }, 2000);
+                },
+                error: function (xhr) {
+                    // Update the modal with an error message
+                    messageContainer.classList.remove("alert-warning");
+                    messageContainer.classList.add("alert-danger");
+                    const errorMessage = xhr.responseJSON?.message || "An error occurred while discarding the model.";
+                    messageContainer.innerHTML = `
+                        <i class="fas fa-times-circle mr-2"></i>
+                        Failed to discard the model: ${errorMessage}.
+                    `;
+                },
+            });
+        });
+
+        // Cancel discard operation
+        document.getElementById("cancel-discard").addEventListener("click", function () {
+            // Remove the confirmation message
+            modalBody.removeChild(messageContainer);
+        });
+    });
+
+    document.getElementById("save-model").addEventListener("click", function () {
+        // Get the modal body element
+        const modalBody = document.querySelector("#modelAnalysisModal .modal-body");
+
+        // Create a confirmation message container
+        const confirmationMessage = document.createElement("div");
+        confirmationMessage.className = "alert alert-success mt-3"; // Bootstrap alert styles
+        confirmationMessage.innerHTML = `
+            <i class="fas fa-check-circle mr-2"></i>
+            The model has been successfully saved!
+        `;
+
+        // Clear existing content in the modal body (optional)
+        modalBody.innerHTML = "";
+
+        // Append the confirmation message to the modal body
+        modalBody.appendChild(confirmationMessage);
+
+        // Set a timeout to hide the modal after showing the message
+        setTimeout(() => {
+            $("#modelAnalysisModal").modal("hide");
+
+            // Optionally reset the modal body content after hiding
+            setTimeout(() => {
+                modalBody.innerHTML = `
+                    <div class="alert alert-info">
+                        <i class="fas fa-info-circle mr-2"></i>
+                        After training your model/classifier, you should now decide whether to <strong>keep</strong> it or <strong>discard</strong> it based on its performance metrics and visualizations below.
+                    </div>
+                    <!-- Tabs Navigation and other content here -->
+                `;
+            }, 500); // Small delay to ensure the modal is fully hidden before resetting
+        }, 2000); // Hide the modal after 2 seconds
+    });
 
-    })
 });
 
 
diff --git a/base/templates/base/charts.html b/base/templates/base/charts.html
index fba3223af..157251bd1 100755
--- a/base/templates/base/charts.html
+++ b/base/templates/base/charts.html
@@ -60,11 +60,11 @@
                     <div class="card-body">
                         {% csrf_token %}
                         <p class="text-muted"><strong>Select a pre-trained model below to view its detailed analysis:</strong></p>
-                        <div id="pre_trained_models">
+                        <div id="radio_buttons">
                             {% if not df_name %}
                                 <p class="text-muted">The available pre-trained models will show up here. You first need to <a href="/" class="text-primary">pick or upload a dataset</a>.</p>
                             {% elif not available_pretrained_models_info %}
-                                <p class="text-danger">There are no available pre-trained models for <b>{{df_name}}</b>. Please <a href="/train.html" class="text-primary">train a model</a>.</p>
+                                <p class="text-danger">There are no available pre-trained models. Please <a href="/train.html" class="text-primary">train a model</a>.</p>
                             {% else %}
                                 {% for value, text in available_pretrained_models_info %}
                                     <div class="form-check py-1">
@@ -78,6 +78,7 @@
                 </div>
             </div>
         </div>
+        
         <!-- Figures Section: Classification, PCA, Feature Importance -->
         <div class="row" id="tab" style="display:none;">
             <div class="col-lg-12">
@@ -147,6 +148,28 @@
             </div>
         </div>
 
+        <!-- Minimal Delete Confirmation Modal -->
+        <div class="modal fade" id="deleteFileModal" tabindex="-1" role="dialog" aria-labelledby="deleteFileModalLabel" aria-hidden="true">
+            <div class="modal-dialog modal-dialog-centered" role="document">
+                <div class="modal-content border-0 shadow-sm">
+                    <div class="modal-header border-0">
+                        <h6 class="modal-title text-danger" id="deleteFileModalLabel">Confirm Deletion</h6>
+                        <button type="button" class="close text-muted" data-dismiss="modal" aria-label="Close" style="font-size: 1.2rem;">
+                            &times;
+                        </button>
+                    </div>
+                    <div class="modal-body text-center py-3">
+                        <p class="mb-1">Delete <span id="fileToDeleteName" class="font-weight-bold"></span> pre trained classifier on <span class="font-weight-bold"> {{ df_name }} </span> dataset?</p>
+                        <small class="text-muted">This action is permanent.</small>
+                    </div>
+                    <div class="modal-footer justify-content-center border-0">
+                        <button type="button" class="custom-btn-secondary" data-dismiss="modal">Cancel</button>
+                        <button type="button" class="custom-btn-danger" id="confirmDeleteButton">Delete</button>
+                    </div>
+                </div>
+            </div>
+        </div>
+
         <div class="row mt-3" id="new_or_load">
             <div class="col d-flex justify-content-center">
                 <div class="text-center mt-4 d-flex justify-content-center">
diff --git a/base/templates/base/counterfactuals.html b/base/templates/base/counterfactuals.html
index 2b528afac..d90f6eb4f 100755
--- a/base/templates/base/counterfactuals.html
+++ b/base/templates/base/counterfactuals.html
@@ -134,7 +134,7 @@
                             <i class="fas fa-info-circle"></i>
                         </button>
                     </div>
-                    <div class="card-body">
+                    <div class="card-body" id="radio_buttons">
                         {% csrf_token %}
                         {% if not df_name %}
                             <p class="text-muted">
@@ -195,13 +195,13 @@
                                     </div>
                                 {% endfor %}
                             {% else %}
-                                <div class="alert alert-warning text-center mb-0">No pre-computed experiments available.</div>
+                                <div class="alert alert-warning text-center mb-0" id="no-pre-computed">No pre-computed experiments available.</div>
                             {% endif %}
-                        </div>
+                        </div> 
                     </div>
                 </div>
             </div>
-        </div>        
+        </div>
         
         <!-- Modal for Glacier Overview -->
         <div class="modal fade" id="glacierInfoModal" tabindex="-1" role="dialog" aria-labelledby="glacierInfoModalLabel" aria-hidden="true">
@@ -278,9 +278,10 @@
                 </div>
             </div>
         </div>
+        
         <!-- Right Column: New Experiment Details -->
-        <div class="row" id="new_experiment_details" style="display:none;">
-            <div class="col-xl-4 col-lg-4 mb-4" >
+        <!-- <div class="row" id="new_experiment_details" style="display:none;" style="padding-top: 50px;">
+            <div class="col-xl-4 col-lg-4 mb-4">
                 <div class="card border-0 shadow-sm h-100">
                     <div class="card-header bg-light text-dark py-3 d-flex justify-content-between align-items-center">
                         <h6 class="m-0 font-weight-bold">New Experiment Details</h6>
@@ -299,7 +300,6 @@
                         <input name="w_value" id="slider" type="range" min="0" max="1" step="0.1" class="w-100 mb-2">
                         <output id="value" class="d-block text-center font-weight-bold"></output>
 
-                        <!-- Run Experiment Button -->
                         <div class="d-flex justify-content-center mt-4">
                             <div id="error_message_new_x" class="alert alert-danger text-center" style="display: none; width: 100%; max-width: 400px;" role="alert">
                                 <i class="fas fa-exclamation-triangle"></i> Please correct errors before proceeding.
@@ -310,8 +310,75 @@
                     </div>
                 </div>
             </div>
-        </div>
+        </div> -->
 
+        <!-- Modal Structure -->
+        <div class="modal fade" id="newExperimentModal" tabindex="-1" role="dialog" aria-labelledby="newExperimentModalLabel" aria-hidden="true">
+            <div class="modal-dialog modal-lg" role="document">
+                <div class="modal-content shadow-lg">
+                    <!-- Modal Header -->
+                    <div class="modal-header bg-primary text-white">
+                        <h5 class="modal-title" id="newExperimentModalLabel">New Experiment Details</h5>
+                        <button type="button" class="close text-white" data-dismiss="modal" aria-label="Close">
+                            <span aria-hidden="true">&times;</span>
+                        </button>
+                    </div>
+                    <!-- Modal Body -->
+                    <div class="modal-body p-4">
+                        <!-- Constraint Selection -->
+                        <div class="form-group mb-4">
+                            <label for="constraint" class="font-weight-bold text-muted">Constraint</label>
+                            <select required id="constraint" class="form-control" aria-describedby="constraintHelp">
+                                <option value="" disabled selected hidden>Select constraints for counterfactuals</option>
+                                <option value="unconstrained">Unconstrained</option>
+                                <option value="local">Local</option>
+                                <option value="global">Global</option>
+                                <option value="uniform">Uniform</option>
+                            </select>
+                            <small id="constraintHelp" class="form-text text-muted">Choose the type of constraint for the experiment.</small>
+                        </div>
+        
+                        <!-- Predicted Margin Weight -->
+                        <div class="form-group mb-4">
+                            <label for="slider" class="font-weight-bold text-muted">Predicted Margin Weight</label>
+                            <input name="w_value" id="slider" type="range" min="0" max="1" step="0.1" class="form-control-range">
+                            <output id="value" class="d-block text-center font-weight-bold mt-2">0.5</output>
+                        </div>
+        
+                        <!-- Error Message -->
+                        <div id="error_message_new_x" class="alert d-none" role="alert"></div>
+        
+                        <!-- Success Message -->
+                        <div id="success_message" class="alert alert-success d-none" role="alert"></div>
+        
+                        <!-- Run Experiment Button -->
+                        <div class="d-flex justify-content-center mt-4">
+                            <button class="btn btn-primary btn-lg compute_counterfactual d-flex align-items-center" id="cfbtn_2" role="button" name="cf">
+                                <i class="fas fa-play-circle mr-2"></i> Run New Experiment!
+                            </button>
+                        </div>
+                        <div class="row justify-content-center align-items-center my-4">
+                            <div id="cfbtn_loader_2" class="col-auto" style="display:none;">
+                                <div class="d-flex align-items-center text-muted">
+                                    <div class="spinner-border spinner-border-sm text-primary mr-2" role="status"></div>
+                                    <span>Processing...</span>
+                                </div>
+                            </div>
+                        </div>
+                    </div>
+                </div>
+            </div>
+        </div>
+        
+
+<script>
+    // Update slider output dynamically
+    const slider = document.getElementById("slider");
+    const output = document.getElementById("value");
+    slider.addEventListener("input", () => {
+        output.textContent = slider.value;
+    });
+</script>
         <!-- Optional Column: Class Label -->
         {% if dataset_type == "timeseries" %}
             <div class="row" id="class_label_container" style="display: none; padding-top: 70px;">
@@ -374,8 +441,12 @@
         {% endif %}
          
         <div class="d-flex justify-content-center mt-4">
-            <div id="error_message_new_x_2" class="alert alert-danger text-center" style="display: none; width: 100%; max-width: 400px;" role="alert">
-                <i class="fas fa-exclamation-triangle"></i> Please correct errors before proceeding.
+            <div id="error_message_new_x_2" class="alert alert-danger alert-dismissible text-center" style="display: none; width: 100%; max-width: 400px;" role="alert">
+                <i class="fas fa-exclamation-triangle"></i> 
+                <span>Please correct errors before proceeding.</span>
+                <button type="button" class="close" aria-label="Close" onclick="$('#error_message_new_x_2').hide();">
+                    <span aria-hidden="true">&times;</span>
+                </button>
             </div>
         </div>
         
diff --git a/base/templates/base/home.html b/base/templates/base/home.html
index cd0e9d22f..06a35dffb 100755
--- a/base/templates/base/home.html
+++ b/base/templates/base/home.html
@@ -6,128 +6,288 @@
 <!-- Main Content -->
 <div id="content">
 
-    <!-- Introduction Section with Collapsible Content -->
-    <div id="home_intro" class="intro-section py-5 bg-light text-center">
-        <div class="container">
-            <h1 class="h3 text-dark mb-3">
-                Welcome to <a href="https://datascience.dsv.su.se/projects/extremum.html" target="_blank" class="text-primary">Extremum Dashboard</a>
+ <!-- Intro Section -->
+ <div id="home_intro" class="intro-section py-5 text-center position-relative">
+    <div class="container">
+        <!-- Animated Background Graphics -->
+        <div class="background-shape shape-1"></div>
+        <div class="background-shape shape-2"></div>
+
+        <!-- Main Heading -->
+        <div class="intro-content position-relative">
+            <div class="logos d-flex justify-content-center align-items-center mb-4 fade-in">
+                <img src="{% static 'img/su_logo.png' %}" alt="Stockholm University Logo" class="logo su-logo mx-3">
+                <img src="{% static 'img/digital_features.png' %}" alt="Digital Features Logo" class="logo df-logo mx-3">
+            </div>
+            <h1 class="display-4 text-dark mb-4 fade-in">
+                Welcome to the <a href="https://datascience.dsv.su.se/projects/extremum.html" target="_blank" class="text-primary">Extremum Dashboard</a>
             </h1>
-            <p class="lead text-muted">An efficient way to explore health informatics and time-series datasets with ease.</p>
+            <p class="lead text-muted fade-in mx-auto" style="max-width: 800px;">
+                Your gateway to exploring health informatics and time-series datasets with ease.
+            </p>
+        </div>
+    </div>
+</div>
 
-            <!-- Learn More Button with Expand/Collapse Functionality -->
-            <button class="btn btn-outline-secondary mb-3" type="button" data-toggle="collapse" data-target="#introContent" aria-expanded="false" aria-controls="introContent" id="toggleIntro">
-                <span class="mr-1">Read More</span>
-                <i class="fas fa-chevron-down ml-2"></i>
-            </button>
 
-            <!-- Collapsible Content for "Learn More" -->
-            <div class="collapse" id="introContent">
-                <p class="text-muted">
-                    The Extremum Dashboard supports researchers and data enthusiasts by providing tools for dataset selection, advanced visualization, and statistical analysis.
+<div class="about-project-section py-5 position-relative">
+    <div class="container" style="padding-top:250px;">
+        <!-- Main Section with Split Layout -->
+        <div class="about-project-section py-5 position-relative fade-in">
+            <div class="container">
+                <div class="row align-items-center">
+                    <!-- Content Section -->
+                    <div class="col-lg-7">
+                        <h2 class="h4 text-dark mb-3 fade-in">About the Extremum Dashboard</h2>
+                        <p class="text-muted fade-in">
+                            The <strong>Extremum Dashboard</strong>, developed by <strong>Stockholm University</strong>, is part of the 
+                            <a href="https://datascience.dsv.su.se/projects/extremum.html" target="_blank" class="text-primary">EXTREMUM project</a>. It combines advanced AI with ethical practices to improve healthcare outcomes.
+                        </p>
+                        <ul class="list-unstyled mt-4 fade-in">
+                            <li class="mb-3">
+                                <i class="fas fa-layer-group text-primary mr-2"></i>
+                                <strong>Unified Data Representation:</strong> Seamlessly integrate complex medical datasets.
+                            </li>
+                            <li class="mb-3">
+                                <i class="fas fa-brain text-success mr-2"></i>
+                                <strong>Explainable Predictive Models:</strong> Build AI solutions that are interpretable and reliable.
+                            </li>
+                            <li class="mb-3">
+                                <i class="fas fa-balance-scale text-warning mr-2"></i>
+                                <strong>Ethical Compliance:</strong> Ensure AI aligns with ethical and legal standards.
+                            </li>
+                        </ul>
+                        <button class="btn btn-primary rounded-pill px-4 mt-4 fade-in" type="button" data-toggle="collapse" data-target="#extremumDetails" aria-expanded="false" aria-controls="extremumDetails">
+                            Learn More <i class="fas fa-chevron-down ml-2"></i>
+                        </button>
+                    </div>
+        
+                    <!-- Image Section -->
+                    <div class="col-lg-5 text-center">
+                        <img src="https://datascience.dsv.su.se/img/logo/dsgroup.png" 
+                             alt="EXTREMUM Visualization" 
+                             class="img-fluid rounded shadow-lg fade-in" 
+                             loading="lazy" 
+                             style="max-height: 250px;">
+                    </div>
+                </div>
+            </div>
+        </div>
+        
+
+        <div class="collapse mt-4 fade-in" id="extremumDetails">
+            <div class="text-muted mx-auto" style="max-width: 700px;">
+                <h4 class="h5 text-dark text-center mb-3">About the EXTREMUM Project</h4>
+                <p>
+                    The <strong>EXTREMUM Project</strong> focuses on developing an explainable machine learning platform to analyze complex medical data. It addresses two key healthcare areas:
                 </p>
-                <ul class="list-unstyled text-left mx-auto" style="max-width: 600px;">
-                    <li><i class="fas fa-database text-primary mr-2"></i><strong>Dataset Selection:</strong> Choose from preloaded datasets or upload your own for tailored analysis.</li>
-                    <li><i class="fas fa-chart-line text-success mr-2"></i><strong>Timeseries Analysis:</strong> Explore timeseries datasets with customizable parameters and statistical insights.</li>
-                    <li><i class="fas fa-eye text-info mr-2"></i><strong>Interactive Visualization:</strong> View tabular data and statistical insights with interactive graphs.</li>
+                <ul class="list-unstyled text-center my-4">
+                    <li class="mb-3">
+                        <i class="fas fa-heartbeat text-danger"></i>
+                        <span class="ml-2">Adverse Drug Event Detection</span>
+                    </li>
+                    <li>
+                        <i class="fas fa-stethoscope text-info"></i>
+                        <span class="ml-2">Cardiovascular Disease Detection</span>
+                    </li>
                 </ul>
-                <p class="text-muted">This platform turns data into actionable insights effortlessly!</p>
+                <p>
+                    This project integrates medical data sources, builds interpretable predictive models, and ensures ethical integrity in machine learning.
+                </p>
+                <p class="text-center">
+                    <a href="https://datascience.dsv.su.se/projects/extremum.html" target="_blank" class="btn btn-outline-primary rounded-pill">
+                        Learn More
+                    </a>
+                </p>
+            </div>
+        </div>
+        
+
+        <!-- Feature Carousel Section -->
+        <div class="feature-carousel py-5 bg-light mt-5 fade-in">
+            <div class="container">
+                <h3 class="h4 text-dark text-center mb-4">Key Innovations in EXTREMUM</h3>
+                <p class="text-muted text-center mx-auto mb-5" style="max-width: 700px;">
+                    Discover the powerful tools and methodologies developed under the EXTREMUM project, designed to revolutionize explainable AI for healthcare applications.
+                </p>
+                <div id="carouselFeatures" class="carousel slide" data-ride="carousel" data-interval="5000" data-pause="hover">
+                    <ol class="carousel-indicators">
+                        <li data-target="#carouselFeatures" data-slide-to="0" class="active" tabindex="0" aria-label="Feature 1"></li>
+                        <li data-target="#carouselFeatures" data-slide-to="1" tabindex="0" aria-label="Feature 2"></li>
+                    </ol>
+
+                    <div class="carousel-inner">
+                        <div class="carousel-item active">
+                            <div class="feature-card p-5 shadow rounded text-center">
+                                <i class="fas fa-wave-square text-info fa-3x mb-4"></i>
+                                <h5 class="text-dark">Wildboar</h5>
+                                <p class="text-muted">
+                                    Created by <strong>Isak Samsten</strong>, Wildboar is a Python library for temporal machine learning, offering tools for classification, regression, and explainability.
+                                </p>
+                                <a href="https://github.com/wildboar-foundation/wildboar" target="_blank" class="btn btn-primary rounded-pill px-4 py-2 mt-3">
+                                    Learn More <i class="fas fa-external-link-alt ml-2"></i>
+                                </a>
+                            </div>
+                        </div>
+
+                        <div class="carousel-item">
+                            <div class="feature-card p-5 shadow rounded text-center">
+                                <i class="fas fa-snowflake text-primary fa-3x mb-4"></i>
+                                <h5 class="text-dark">Glacier</h5>
+                                <p class="text-muted">
+                                    Developed by <strong>Zhendong Wang</strong>, Glacier generates counterfactual explanations for time series classification, ensuring realistic and interpretable results.
+                                </p>
+                                <a href="https://github.com/zhendong3wang/learning-time-series-counterfactuals" target="_blank" class="btn btn-primary rounded-pill px-4 py-2 mt-3">
+                                    Learn More <i class="fas fa-external-link-alt ml-2"></i>
+                                </a>
+                            </div>
+                        </div>
+                    </div>
+
+                    <a class="carousel-control-prev" href="#carouselFeatures" role="button" data-slide="prev">
+                        <span class="carousel-control-prev-icon bg-dark rounded-circle p-2" aria-hidden="true"></span>
+                        <span class="sr-only">Previous</span>
+                    </a>
+                    <a class="carousel-control-next" href="#carouselFeatures" role="button" data-slide="next">
+                        <span class="carousel-control-next-icon bg-dark rounded-circle p-2" aria-hidden="true"></span>
+                        <span class="sr-only">Next</span>
+                    </a>
+                </div>
             </div>
         </div>
     </div>
+</div>
 
-    <!-- New Key Features or Benefits Section -->
-    <div class="key-features-section py-5 text-center bg-white">
+
+<div>
+    <!-- Call to Action Section -->
+    <div class="separator-section py-5 text-center bg-light">
         <div class="container">
-            <h2 class="h4 text-gray-800 mb-4">Why Use Extremum Dashboard?</h2>
-            <div class="row">
-                <div class="col-md-4 mb-4">
-                    <div class="feature-card border-0 p-3 shadow-sm animate-card">
-                        <i class="fas fa-bolt text-primary fa-2x mb-3"></i>
-                        <h5 class="text-dark">Fast & Efficient</h5>
-                        <p class="text-muted">Quickly analyze large datasets with optimized performance and get insights in seconds.</p>
-                    </div>
-                </div>
-                <div class="col-md-4 mb-4">
-                    <div class="feature-card border-0 p-3 shadow-sm animate-card">
-                        <i class="fas fa-chart-pie text-success fa-2x mb-3"></i>
-                        <h5 class="text-dark">Comprehensive Visualizations</h5>
-                        <p class="text-muted">Access a wide range of visualizations to understand your data better.</p>
-                    </div>
-                </div>
-                <div class="col-md-4 mb-4">
-                    <div class="feature-card border-0 p-3 shadow-sm animate-card">
-                        <i class="fas fa-lock text-info fa-2x mb-3"></i>
-                        <h5 class="text-dark">Secure & Reliable</h5>
-                        <p class="text-muted">Your data is safe with us, with top-notch security measures in place.</p>
-                    </div>
-                </div>
-            </div>
+            <h3 class="h5 text-dark mb-4 fade-in">Ready to start your journey?</h3>
+            <button class="btn btn-outline-primary fade-in" onclick="document.getElementById('dataset_selection').scrollIntoView({behavior: 'smooth'})">
+                Explore Datasets <i class="fas fa-arrow-down ml-2"></i>
+            </button>
         </div>
     </div>
-
-    <!-- Separator Section with Interactive Scroll Button -->
-    <div class="separator-section py-4 text-center">
-        <hr class="w-50 mx-auto mb-4">
-        <button class="btn btn-primary btn-lg" onclick="document.getElementById('dataset_selection').scrollIntoView({behavior: 'smooth'})">
-            Start Exploring Datasets
-            <i class="fas fa-arrow-down ml-2"></i>
-        </button>
-    </div>
-    <div class="cool-separator my-5">
-        <hr>
-    </div>
-    
 </div>
 <!-- Page Heading -->
-<!-- Combined Heading and Button Group for Dataset Selection -->
-<div class="text-center mb-5" style="padding-top:250px;">
-    <h2 id="dataset_selection" class="h4 mb-4 text-dark">Choose a Dataset</h2>
 
-    <!-- Dataset Selection Button Group -->
-    <div class="row justify-content-center">
+<!-- Combined Heading and Button Group for Dataset Selection -->
+<style>
+    /* Section Styling */
+    .dataset-section {
+        padding: 100px 20px;
+        background-color: #f8f9fa;
+        text-align: center;
+    }
+
+    .dataset-section h2 {
+        font-size: 1.5rem;
+        font-weight: 600;
+        color: #333;
+        margin-bottom: 1rem;
+    }
+
+    .dataset-section p {
+        font-size: 0.95rem;
+        color: #666;
+        margin-bottom: 2rem;
+        max-width: 600px;
+        margin: 0 auto;
+        line-height: 1.5;
+    }
+
+    /* Button Styling */
+    .btn-dataset {
+        font-size: 1rem;
+        font-weight: 500;
+        border: 1px solid #ccc;
+        color: #333;
+        background-color: white;
+        border-radius: 5px;
+        padding: 12px 20px;
+        margin: 10px;
+        transition: all 0.2s ease;
+        width: 200px;
+    }
+
+    .btn-dataset:hover {
+        border-color: #007bff;
+        color: #007bff;
+    }
+
+    .btn-dataset.active {
+        background-color: #007bff;
+        color: white;
+        border-color: #007bff;
+    }
+
+    /* Responsive Alignment */
+    .dataset-section .row {
+        justify-content: center;
+        gap: 1rem;
+    }
+
+    @media (max-width: 768px) {
+        .btn-dataset {
+            width: 180px;
+            padding: 10px 15px;
+        }
+    }
+</style>
+
+<div class="dataset-section" style="padding-top:300px;">
+    <!-- Title -->
+    <h2 id="dataset_selection">Choose Your Dataset</h2>
+    <p>
+        Select a dataset to visualize its graphs and perform advanced operations like using pre-trained models 
+        or computing counterfactuals. Your choice will be used throughout the session.
+    </p>
+
+    <!-- Dataset Selection Buttons -->
+    <div class="row">
         {% csrf_token %}
-        
+
         <!-- Breast Cancer Dataset -->
-        <div class="col-auto">
-            <button type="button" class="btn btn-dataset px-4 py-2 mb-2 {% if df_name == 'breast-cancer' %}active{% endif %}" id="breast-cancer">
-                <i class="fas fa-dna"></i> Breast Cancer
-            </button>
-        </div>
+        <button type="button" class="btn btn-dataset {% if df_name == 'breast-cancer' %}active{% endif %}" id="breast-cancer">
+            Breast Cancer
+        </button>
 
         <!-- Stroke Dataset -->
-        <div class="col-auto">
-            <button type="button" class="btn btn-dataset px-4 py-2 mb-2 {% if df_name == 'stroke' %}active{% endif %}" id="stroke">
-                <i class="fas fa-heartbeat"></i> Stroke
-            </button>
-        </div>
+        <button type="button" class="btn btn-dataset {% if df_name == 'stroke' %}active{% endif %}" id="stroke">
+            Stroke
+        </button>
 
         <!-- Timeseries Dataset -->
-        <div class="col-auto">
-            <button type="button" class="btn btn-dataset px-4 py-2 mb-2 {% if dataset_type == 'timeseries' %}active{% endif %}" id="timeseries">
-                <i class="fas fa-chart-line"></i> Timeseries
-            </button>
-        </div>
+        <button type="button" class="btn btn-dataset {% if dataset_type == 'timeseries' %}active{% endif %}" id="timeseries">
+            Timeseries
+        </button>
 
-        <!-- Upload Dataset -->
-        <div class="col-auto">
-            <button type="button" class="btn btn-dataset px-4 py-2 mb-2 {% if upload %}active{% endif %}" id="upload">
-                <i class="fas fa-upload"></i> Upload
-            </button>
-        </div>
+        <!-- Upload Dataset (Optional) -->
+        <!-- Uncomment if needed
+        <button type="button" class="btn btn-dataset {% if upload == 1 %}active{% endif %}" id="upload">
+            Upload Dataset
+        </button>
+        -->
     </div>
 </div>
 
+
 <!-- Upload Form Section -->
-<div class="row justify-content-center">
-    <div class="col-xl-5 col-lg-5" id="upload_col" {% if upload %} style="display: block;" {% else %} style="display: none;" {% endif %}>
-        <div class="card shadow-sm mb-4 border-0 animate-card">
-            <div class="card-body">
+<!-- <div class="row justify-content-center">
+    <div class="col-xl-5 col-lg-6" id="upload_col" {% if upload %} style="display: block;" {% else %} style="display: none;" {% endif %}>
+        <div class="card shadow-sm border-0 animate-card">
+            <div class="card-header bg-primary text-muted d-flex align-items-center">
+                <h6 class="mb-0">Upload Dataset</h6>
+                <i class="fas fa-upload ml-auto"></i>
+            </div>
+            <div class="card-body bg-light">
                 <div class="row">
-                    <!-- Left Column: Form Section -->
-                    <div class="col-md-6">
+                    <div class="col-md-7 mb-4">
                         <form id="csv_form" method="POST" enctype="multipart/form-data">
                             {% csrf_token %}
-                            <fieldset class="form-group mb-3">
+                            
+                            <fieldset class="form-group mb-4">
                                 <legend class="col-form-label small text-secondary font-weight-semibold">Data Type</legend>
                                 <div class="form-check">
                                     <input class="form-check-input" type="radio" name="dataset_type" id="tabular" value="tabular" required>
@@ -138,43 +298,81 @@
                                     <label class="form-check-label" for="timeseries">Timeseries</label>
                                 </div>
                             </fieldset>
-
-                            <div class="form-group">
-                                <label class="small text-secondary font-weight-semibold" for="doc">Upload File</label>
+                
+                            <div class="form-group mb-4">
+                                <label class="small text-secondary font-weight-semibold" for="doc">Select File</label>
                                 <input class="form-control-file" type="file" id="doc" name="excel_file" required>
+                                <small class="text-muted d-block mt-1">Supported format: CSV</small>
                             </div>
-
-                            <div class="form-group">
-                                <input class="btn btn-primary btn-sm mt-2" type="submit" value="Upload" id="upload_btn">
+                
+                            <div class="form-group d-flex align-items-center w-100">
+                                <input class="btn btn-primary btn-sm mr-3" type="submit" value="Upload" id="upload_btn">
+                                
+                                <div class="loader" id="cfbtn_loader" style="display: none; margin-left: 5px;">
+                                    <i class="fas fa-spinner fa-spin"></i>
+                                </div>
+                                
+                                <div id="success-message" class="alert alert-success custom-alert d-none ml-3 mb-0" role="alert">
+                                    <i class="fas fa-check-circle"></i>
+                                    <span class="ml-2">File uploaded successfully.</span>
+                                    <button type="button" class="close ml-auto p-0" aria-label="Close" onclick="hideSuccessMessage();">
+                                        <span aria-hidden="true">&times;</span>
+                                    </button>
+                                </div>
                             </div>
                         </form>
                     </div>
-
-                    <!-- Right Column: Uploaded Files Section -->
-                    <div class="col-md-6" id="uploaded_file">
-                        <fieldset class="form-group mb-3">
+                
+                    <div class="col-md-5 mb-5">
+                        <fieldset class="form-group mb-4">
                             <legend class="col-form-label small text-secondary font-weight-semibold">Uploaded Files</legend>
                             {% if uploaded_files %}
-                                <fieldset class="form-group">
+                                <fieldset class="form-group" id="radio_buttons">
                                     {% for uploaded_file in uploaded_files %}
-                                        <div class="form-check mb-1">
-                                            <input class="form-check-input" type="radio" name="uploaded_file" id="uploaded_file_{{ forloop.counter }}" value="{{ uploaded_file }}" required>
-                                            <label class="form-check-label" for="uploaded_file_{{ forloop.counter }}">{{ uploaded_file }}</label>
+                                        <div class="form-check mb-2 d-flex align-items-center">
+                                            <input class="form-check-input mr-2" type="radio" {% if df_name == uploaded_file %} checked {% endif %} name="uploaded_file" id="element_{{ forloop.counter }}" value="{{ uploaded_file }}" required>
+                                            <label class="form-check-label mr-auto" for="element_{{ forloop.counter }}">{{ uploaded_file }}</label>
+                                            <button type="button" class="delete-file-icon p-0 ml-2 text-muted close" data-file="{{ uploaded_file }}" data-file-value="{{uploaded_file}}" aria-label="Delete {{ uploaded_file }}">
+                                                <span aria-hidden="true">&times;</span>
+                                            </button>
                                         </div>
                                     {% endfor %}
                                 </fieldset>
                             {% else %}
-                                <p class="small text-muted">No files uploaded.</p>
+                                <p class="small text-muted">No files uploaded yet. Please upload a dataset to select it here.</p>
                             {% endif %}
                         </fieldset>
                     </div>
                 </div>
             </div>
+            <div class="card-footer bg-white text-center">
+                <small class="text-muted">Manage your datasets effectively. Ensure data is accurate and up-to-date.</small>
+            </div>
+        </div>
+    </div>
+</div> -->
+
+<!-- Minimal Delete Confirmation Modal -->
+<div class="modal fade" id="deleteFileModal" tabindex="-1" role="dialog" aria-labelledby="deleteFileModalLabel" aria-hidden="true">
+    <div class="modal-dialog modal-dialog-centered" role="document">
+        <div class="modal-content border-0 shadow-sm">
+            <div class="modal-header border-0">
+                <h6 class="modal-title text-danger" id="deleteFileModalLabel">Confirm Deletion</h6>
+                <button type="button" class="close text-muted" data-dismiss="modal" aria-label="Close" style="font-size: 1.2rem;">
+                    &times;
+                </button>
+            </div>
+            <div class="modal-body text-center py-3">
+                <p class="mb-1">Delete <span id="fileToDeleteName" class="font-weight-bold"></span>?</p>
+                <small class="text-muted">This action is permanent.</small>
+            </div>
+            <div class="modal-footer justify-content-center border-0">
+                <button type="button" class="custom-btn-secondary" data-dismiss="modal">Cancel</button>
+                <button type="button" class="custom-btn-danger" id="confirmDeleteButton">Delete</button>
+            </div>
         </div>
     </div>
 </div>
-
-
 <!-- Timeseries Dataset Selection -->
 <div class="row justify-content-center">
     <div class="col-lg-5" {% if dataset_type != "timeseries" %} style="display:none;" {% endif %} id="timeseries-datasets">
@@ -269,7 +467,58 @@
     </div>
 </div>
 
+<!-- Modal Window -->
+<div class="modal fade" id="labelSelectionModal" tabindex="-1" aria-labelledby="labelSelectionModalLabel" aria-hidden="true" data-backdrop="static" data-keyboard="false">
+    <div class="modal-dialog modal-dialog-centered">
+        <div class="modal-content">
+            <!-- Modal Header -->
+            <div class="modal-header">
+                <h5 class="modal-title" id="labelSelectionModalLabel">Assign Positive and Negative Labels</h5>
+            </div>
+            <!-- Modal Body -->
+                <div class="modal-body">
+                    {% csrf_token %}
+                    <p class="text-muted">Please assign one label as <strong>Positive</strong> and another as <strong>Negative</strong>.</p>
+                    <!-- Positive Label Dropdown -->
+                    <div class="form-group">
+                        <label for="positive-label" class="font-weight-semibold">Positive Label</label>
+                        <select id="positive-label" class="form-control">
+                            <option value="" disabled selected>Select a positive label</option>
+                            <!-- Options populated dynamically -->
+                        </select>
+                    </div>
+                    <!-- Negative Label Dropdown -->
+                    <div class="form-group mt-3">
+                        <label for="negative-label" class="font-weight-semibold">Negative Label</label>
+                        <select id="negative-label" class="form-control">
+                            <option value="" disabled selected>Select a negative label</option>
+                            <!-- Options populated dynamically -->
+                        </select>
+                    </div>
+                    <!-- Error Message -->
+                    <div id="selection-error" class="alert alert-danger d-none mt-3">
+                        <i class="fas fa-exclamation-triangle"></i> Labels must be different. Please select one positive and one negative label.
+                    </div>
+                    <!-- Loader -->
+                    <div id="loader" class="d-none text-center mt-3">
+                        <div class="spinner-border text-primary" role="status">
+                            <span class="sr-only">Loading...</span>
+                        </div>
+                        <p>Saving your choices...</p>
+                    </div>
+                </div>
+
+            <!-- Modal Footer -->
+            <div class="modal-footer">
+                <button type="button" class="btn btn-primary" id="save-label-choices">Save Choices</button>
+            </div>
+        </div>
+    </div>
+</div>
+
+
 {% if dataset_type == "tabular" and df_name and data_to_display %}
+
     <div class="row mb-4">
         <!-- Data Card with Original ID -->
         <div class="col-lg-6" id="df_cached">
@@ -317,7 +566,20 @@
             </div>
         </div>
     </div>
+
+    <div class="row mt-3" id="new_or_load_cached">
+        <div class="col d-flex justify-content-center">
+            <div class="text-center mt-4">
+                <button id="viewModelsButton" class="btn btn-view-models">
+                    View Pre-trained Models
+                    <i class="fas fa-arrow-right ml-2"></i> <!-- Font Awesome icon for added appeal -->
+                </button>
+            </div>
+        </div>
+    </div>
+
 {% elif dataset_type == "timeseries" %}
+
     <div class="row mb-4">
         <!-- Confidence Interval Card with Original ID -->
         <div class="col-lg-6" id="ts_confidence_cached">
@@ -343,20 +605,27 @@
             </div>
         </div>
     </div>
-{% endif %}
 
-
-<!-- Loader -->
-<div class="row">
-    <div class="col d-flex justify-content-center">
-        <span class="loader" id="loader" style="display: none;"></span>
+    <div class="row mt-3" id="new_or_load_cached">
+        <div class="col d-flex justify-content-center">
+            <div class="text-center mt-4">
+                <button id="viewPreTrainedButton" class="btn btn-view-models">
+                    View Pre-trained Models
+                    <i class="fas fa-arrow-right ml-2"></i> <!-- Font Awesome icon for added appeal -->
+                </button>
+            </div>
+        </div>
     </div>
-</div>
 
-<div class="row mt-3" id="new_or_load">
+{% endif %}
+<!-- Loader -->
+<div class="d-flex justify-content-center">
+    <span class="loader" id="loader_ds" style="display: none;"></span>
+</div>
+<div class="row mt-3" id="new_or_load" style="display:none;">
     <div class="col d-flex justify-content-center">
         <div class="text-center mt-4">
-            <button id="viewModelsButton" class="btn btn-view-models">
+            <button id="viewPreTrainedButton" class="btn btn-view-models">
                 View Pre-trained Models
                 <i class="fas fa-arrow-right ml-2"></i> <!-- Font Awesome icon for added appeal -->
             </button>
@@ -365,9 +634,6 @@
 </div>
 
 <!-- JavaScript -->
-<script type="module" src="{% static 'js/radio_dataset.js' %}"></script>
-<script type="module" src="{% static 'js/selection_change.js' %}"></script>
-<script type="module" src="{% static 'js/radio_timeseries_dataset.js' %}"></script>
-<script type="module" src="{% static 'js/radio_uploaded_dataset.js' %}"></script>
+<script type="module" src="{% static 'js/home.js' %}"></script>
 
 {% endblock content%}
diff --git a/base/templates/base/train.html b/base/templates/base/train.html
index f3f2d90cf..60b936371 100755
--- a/base/templates/base/train.html
+++ b/base/templates/base/train.html
@@ -21,7 +21,7 @@
         
         <div class="row">
             {% if df_name %}
-                <div class="col-xl-10 col-lg-10">
+                <div class="col-xl-12 col-lg-12">
                     <div class="card shadow-sm mb-4 border-0  animate-card">
                         <div class="card-header bg-light text-dark py-3 d-flex flex-row align-items-center justify-content-between"> <!-- Softer background and font color -->
                             <h6 class="m-0">DataFrame Summary Information</h6> <!-- Reduced emphasis -->
@@ -33,7 +33,6 @@
                         
                         <div class="card-body">
                             <div class="row">
-                                
                                 <div class="col-xl-3 col-lg-3">
                                     <div class="py-3 d-flex flex-row align-items-center justify-content-between">
                                         <h6 class="m-0 text-muted">Classifier</h6> <!-- Text-muted for subtlety -->
@@ -63,7 +62,7 @@
 
                             <!-- Test set ratio slider -->
                             <div class="row" style="padding-top:20px;">
-                                <div class="col-xl-6 col-lg-6" id="ratio" {% if dataset_type == "timeseries" %} style="display:none;" {% endif %}>
+                                <div class="col-xl-4 col-lg-4" id="ratio" {% if dataset_type == "timeseries" %} style="display:none;" {% endif %}>
                                     <div class="py-3 d-flex flex-row align-items-center justify-content-between">
                                         <h6 class="m-0 text-muted">Test Set Ratio</h6>
                                     </div>
@@ -71,7 +70,7 @@
                                     <output id="value"></output>
                                 </div>
                                 {% if dataset_type == "tabular" %}
-                                    <div class="col-xl-4 col-lg-4" id="class_label" style="display:none;">
+                                    <div class="col-xl-4 col-lg-4" id="class_label">
                                         <div class="py-3 d-flex flex-row align-items-center justify-content-between">
                                             <h6 class="m-0 text-muted">Class Label</h6>
                                         </div>
@@ -94,17 +93,7 @@
                                     </div>
                                 {% endif %}
                             </div>
-
                             <!-- Action Button Section -->
-                            <div class="row justify-content-md-center" style="padding-top:30px;"> 
-                                {% csrf_token %}
-                                <button class="btn btn-outline-primary train_test" role="button" disabled>Go!</button> <!-- Outlined button for minimalistic style -->
-                                <div id="loader_train" style="display: none;">
-                                    <div class="col-sm d-flex justify-content-center">
-                                        <span class="loader"></span>
-                                    </div>
-                                </div>
-                            </div>
                         </div>
                     </div>
                 </div>
@@ -121,6 +110,142 @@
                 </div>
             {% endif %}
         </div>
+        <div class="d-flex justify-content-center mt-4">
+            <div id="error_message_new_x_2" class="alert alert-danger alert-dismissible text-center" style="display: none; width: 100%; max-width: 400px;" role="alert">
+                <i class="fas fa-exclamation-triangle"></i> 
+                <span id="error_message_text">Please correct errors before proceeding.</span>
+                <button type="button" class="close" aria-label="Close" onclick="$('#error_message_new_x_2').hide();">
+                    <span aria-hidden="true">&times;</span>
+                </button>
+            </div>
+        </div>
+
+        <!-- Modal Window -->
+        <div class="modal fade" id="modelAnalysisModal" tabindex="-1" aria-labelledby="modelAnalysisModalLabel" aria-hidden="true" data-backdrop="static" data-keyboard="false">
+            <div class="modal-dialog modal-dialog-centered modal-lg">
+                <div class="modal-content">
+                    <!-- Modal Header -->
+                    <div class="modal-header bg-light text-dark">
+                        <h5 class="modal-title" id="modelAnalysisModalLabel">Model Analysis and Decision</h5>
+                    </div>
+                    <!-- Modal Body -->
+                    <div class="modal-body">
+                        <!-- Prompt Message -->
+                        <div class="alert alert-info">
+                            <i class="fas fa-info-circle mr-2"></i>
+                            After training your model/classifier, you should now decide whether to <strong>keep</strong> it or <strong>discard</strong> it based on its performance metrics and visualizations below.
+                        </div>
+                        
+                        <!-- Tabs Navigation -->
+                        <ul class="nav nav-tabs" id="analysisTabs" role="tablist">
+                            <li class="nav-item">
+                                <a class="nav-link" id="classification-tab" data-toggle="tab" href="#classification" role="tab" aria-controls="classification" aria-selected="false">
+                                    <i class="fas fa-chart-line mr-2"></i>Classification Report
+                                </a>
+                            </li>
+                            <li class="nav-item">
+                                <a class="nav-link" id="details-tab" data-toggle="tab" href="#details" role="tab" aria-controls="details" aria-selected="false">
+                                    <i class="fas fa-info-circle mr-2"></i>Classifier Details
+                                </a>
+                            </li>
+                            <li class="nav-item">
+                                <a class="nav-link active" id="pca-tab" data-toggle="tab" href="#pca" role="tab" aria-controls="pca" aria-selected="true">
+                                    <i class="fas fa-project-diagram mr-2"></i>PCA
+                                </a>
+                            </li>
+                            <li class="nav-item" id="feature-tab-nav" style="display: none;">
+                                <a class="nav-link" id="fi-tab" data-toggle="tab" href="#feature" role="tab" aria-controls="feature" aria-selected="false">
+                                    <i class="fas fa-th mr-2"></i>Feature Importance
+                                </a>
+                            </li>
+                            <li class="nav-item" id="tsne-tab-nav" style="display: none;">
+                                <a class="nav-link" id="tsne-tab" data-toggle="tab" href="#tsne" role="tab" aria-controls="tsne" aria-selected="true">
+                                    <i class="fas fa-clone mr-2"></i>TSNE
+                                </a>
+                            </li>
+                        </ul>
+                        
+                        <!-- Tabs Content -->
+                        <div class="tab-content mt-3" id="analysisTabsContent">
+                            <!-- Classification Report Tab -->
+                            <div class="tab-pane fade" id="classification" role="tabpanel" aria-labelledby="classification-tab">
+                                <div id="classification_report" class="p-3"></div>
+                            </div>
+                            
+                            <!-- Classifier Details Tab -->
+                            <div class="tab-pane fade" id="details" role="tabpanel" aria-labelledby="details-tab">
+                                <div id="details_content" class="p-3 overflow-auto"></div>
+                            </div>
+                            
+                            <!-- PCA Tab -->
+                            <div class="tab-pane fade show active" id="pca" role="tabpanel" aria-labelledby="pca-tab">
+                                <div id="pca_container" class="p-3"></div>
+                            </div>
+                            
+                            <!-- Feature Importance Tab -->
+                            <div class="tab-pane fade" id="feature" role="tabpanel" aria-labelledby="fi-tab">
+                                <div id="fi_container" class="p-3"></div>
+                            </div>
+                            
+                            <!-- TSNE Tab -->
+                            <div class="tab-pane fade" id="tsne" role="tabpanel" aria-labelledby="tsne-tab">
+                                <div id="tsne_container" class="p-3"></div>
+                            </div>
+                        </div>
+                    </div>
+        
+                    {% csrf_token %}
+                    
+                    <!-- Modal Footer -->
+                    <div class="modal-footer">
+                        <button type="button" class="btn btn-success" id="save-model">
+                            <i class="fas fa-save mr-2"></i>Save Model
+                        </button>
+                        <button type="button" class="btn btn-danger" id="discard-model">
+                            <i class="fas fa-trash-alt mr-2"></i>Discard Model
+                        </button>
+                    </div>
+                </div>
+            </div>
+        </div>        
+
+        <div class="row justify-content-center align-items-center my-4">
+            <!-- CSRF Token -->
+            {% csrf_token %}
+        
+            <!-- Button -->
+            <div class="col-auto" id="train_test_btn">
+                <button class="btn btn-primary train_test align-items-center px-4 py-2 shadow-sm">
+                    <i class="fas fa-play mr-2"></i>
+                    Start Training
+                </button>
+            </div>
+        
+            <!-- Loader -->
+            <div id="loader_train" class="col-auto d-none">
+                <div class="d-flex align-items-center text-muted">
+                    <div class="spinner-border spinner-border-sm text-primary mr-2" role="status"></div>
+                    <span>Processing...</span>
+                </div>
+            </div>
+        </div>
+
+        <div class="row mt-3" id="new_or_load">
+            <div class="col d-flex justify-content-center">
+                <div class="text-center mt-4 d-flex justify-content-center">
+                    <!-- Back to Dataset Selection Button -->
+                    <button id="backToDatasetButton" class="btn btn-view-models mr-3">
+                        <i class="fas fa-arrow-left mr-2"></i> Back to Dataset Selection
+                    </button>
+                    
+                    <!-- View Counterfactuals Button -->
+                    <button id="viewPreTrainedButton" class="btn btn-view-models">
+                        View Pre-Trained Models <i class="fas fa-arrow-right ml-2"></i>
+                    </button>
+                </div>
+            </div>
+        </div>
+
     </div>
 </div>
 <script src="{% static 'js/slider.js' %}"></script>
diff --git a/base/views.py b/base/views.py
index 39cb317c4..19ac3e02a 100755
--- a/base/views.py
+++ b/base/views.py
@@ -1,177 +1,41 @@
-from django.shortcuts import render, HttpResponse, HttpResponseRedirect
-from django.shortcuts import redirect, render
+from django.shortcuts import render
+from django.shortcuts import render
 import base.pipeline as pipeline
-import pickle, os
-import pandas as pd
-import json
-from sklearn.preprocessing import LabelEncoder
-import joblib
 from dict_and_html import *
-from django.conf import settings
 from . import methods
 from .methods import PIPELINE_PATH
-import math
-import numpy as np
-from django.core.files.storage import FileSystemStorage
 import random
-from collections import defaultdict
-from .glacier.src.glacier_compute_counterfactuals import gc_compute_counterfactuals
 import base.pipeline as pipeline
-import concurrent.futures
+import os
+from . handlers import home_handler, counterfactuals_handler, charts_handler, train_handler
 
 
 def home(request):
     # ajax request condition
     if request.headers.get("X-Requested-With") == "XMLHttpRequest":
-        return ajax_requests(request.POST.get("action"), request)
+        return home_handler(request.POST.get("action"), request)
 
-    if request.method == "POST" and request.FILES["excel_file"]:
-        uploaded_file = request.FILES["excel_file"]  # Get the file from request.FILES
-        dataset_type = request.POST.get("dataset_type")
+    if "upload" in request.session:
+        upload = request.session.get("upload")
+    else:
+        upload = 0
 
-        # action to add dataset when from radio button click
-        # add name of used dataframe in session for future use
-        request.session["df_name"] = "upload"
-        name = uploaded_file.name
-
-        # Split the name and extension
-        base_name, extension = os.path.splitext(name)
-        request.session["df_name_upload_base_name"] = base_name
-        request.session["df_name_upload_extension"] = extension
-
-        df_name = base_name
-
-        df_name_path = os.path.join(
-            PIPELINE_PATH + f"{base_name}",
-        )
-
-        if not os.path.exists(df_name_path):
-            os.makedirs(df_name_path)
-
-        fs = FileSystemStorage()  # FileSystemStorage to save the file
-
-        # Save the file with the new filename
-        fs = FileSystemStorage(location=df_name_path)
-        filename = fs.save(uploaded_file.name, uploaded_file)  # Save file
-
-        request.session["excel_file_name"] = df_name_path
-
-        excel_file_name_path = os.path.join(PIPELINE_PATH + f"{base_name}" + "/" + name)
-
-        df = methods.get_dataframe(excel_file_name_path)
-
-        ## update the datasets_types json file
-        datasets_types_pipeline_json_path = os.path.join(
-            PIPELINE_PATH + "/dataset_types_pipeline.json"
-        )
-        jsonFile = pipeline.pipeline_json(datasets_types_pipeline_json_path)
-
-        # with open(datasets_types_pipeline_json_path, "r") as jsonFile:
-        #     datasets_types_pipeline_json = pipeline.load(
-        #         jsonFile
-        #     )  # data becomes a dictionary
-
-        jsonFile.append_pipeline_json({df_name: [dataset_type, "uploaded"]})
-        dataset_type = jsonFile.read_pipline_json([df_name])[0]
-        uploaded_files = jsonFile.get_keys_with_specific_value("uploaded")
-
-        # datasets_types_pipeline_json[df_name] = dataset_type
-        # with open(datasets_types_pipeline_json_path, "w") as file:
-        #     pipeline.dump(
-        #         datasets_types_pipeline_json, file, indent=4
-        #     )  # Write with pretty print (indent=4)
-
-        if df.columns.str.contains(" ").any():
-            df.columns = df.columns.str.replace(" ", "_")
-            # if columns contain space
-            os.remove(excel_file_name_path)
-            df.to_csv(excel_file_name_path, index=None)
-            df = methods.get_dataframe(excel_file_name_path)
-
-        if "id" in df.columns:
-            df.drop(["id"], axis=1, inplace=True)
-            df.to_csv(excel_file_name_path, index=False)
-
-        if dataset_type == "tabular":
-            # tabular datasets
-            features = df.columns
-            feature1 = df.columns[3]
-            feature2 = df.columns[2]
-
-            labels = list(df.select_dtypes(include=["object", "category"]).columns)
-            # Find binary columns (columns with only two unique values, including numerics)
-            binary_columns = [col for col in df.columns if df[col].nunique() == 2]
-
-            # Combine categorical and binary columns into one list
-            labels = list(set(labels + binary_columns))
-
-            label = random.choice(labels)
-            fig = methods.stats(
-                excel_file_name_path,
-                dataset_type,
-                None,
-                None,
-                feature1,
-                feature2,
-                label,
-                df_name,
-            )
-
-            # tabular dataset
-            request.session["data_to_display"] = df[:10].to_html()
-            request.session["features"] = list(features)
-            request.session["feature1"] = feature1
-            request.session["feature2"] = feature2
-            request.session["labels"] = list(labels)
-            request.session["curlabel"] = label
-            request.session["fig"] = fig
-
-            context = {
-                "dataset_type": dataset_type,
-                "data_to_display": df[:10].to_html(),
-                "fig": fig,
-                "features": list(features),  # error if not a list
-                "feature1": feature1,
-                "feature2": feature2,
-                "labels": list(labels),
-                "curlabel": label,
-                "df_name": request.session["df_name"],
-            }
-        elif dataset_type == "timeseries":
-            fig, fig1 = methods.stats(excel_file_name_path, dataset_type)
-            request.session["fig"] = fig
-            request.session["fig1"] = fig1
-            context = {
-                "dataset_type": dataset_type,
-                "df_name": df_name,
-                "fig": fig,
-                "fig1": fig1,
-            }
-
-        print("Uploaded files: ", uploaded_files)
-        context.update({"uploaded_files": uploaded_files})
-        request.session["context"] = context
-
-        return render(request, "base/home.html", context)
-
-    upload = 0
     ## get the type of the active dataset
-    datasets_types_pipeline_json_path = os.path.join(
-        PIPELINE_PATH + "/dataset_types_pipeline.json"
+    datasets_types_PipelineJSON_path = os.path.join(
+        PIPELINE_PATH + "dataset_types_pipeline.json"
     )
-    datasets_types_pipeline_json = pipeline.pipeline_json(
-        datasets_types_pipeline_json_path
+    datasets_types_PipelineJSON = pipeline.PipelineJSON(
+        datasets_types_PipelineJSON_path
     )
-    uploaded_files = datasets_types_pipeline_json.get_keys_with_specific_value(
+
+    uploaded_files = datasets_types_PipelineJSON.get_keys_with_value(
         "uploaded"
     )
-
-    if "df_name" in request.session:
+    
+    if "df_name" in request.session and request.session["df_name"] != "upload":
         df_name = request.session.get("df_name")
-        if df_name == "upload":
-            upload = 1
-            df_name = request.session.get("df_name_upload_base_name")
-        dataset_type = datasets_types_pipeline_json.read_pipline_json([df_name])
+
+        dataset_type = datasets_types_PipelineJSON.read_from_json([df_name])
 
         if type(dataset_type) is list:
             dataset_type = dataset_type[0]
@@ -200,13 +64,14 @@ def home(request):
                 }
         elif dataset_type == "timeseries":
             context = {
+                "upload": upload,
                 "df_name": df_name,
                 "dataset_type": dataset_type,
                 "fig": request.session.get("fig"),
                 "fig1": request.session.get("fig1"),
             }
-        # else:
-        #     context = {}
+        else:
+            context = {}
     else:
         context = {}
 
@@ -217,7 +82,7 @@ def home(request):
 def counterfactuals(request):
     # ajax request condition
     if request.headers.get("X-Requested-With") == "XMLHttpRequest":
-        return ajax_requests(request.POST.get("action", None), request)
+        return counterfactuals_handler(request.POST.get("action", None), request)
     available_pretrained_models_info = []
     if "df_name" in request.session:
         df_name = request.session.get("df_name")
@@ -239,16 +104,16 @@ def counterfactuals(request):
             # if it does not exist, obviously there are no pre trained
             # models
             available_pretrained_models = []
-            jsonFile = pipeline.pipeline_json(json_path)
-            if not jsonFile.check_key_pipeline_json("classifier"):
+            jsonFile = pipeline.PipelineJSON(json_path)
+            if not jsonFile.key_exists("classifier"):
                 # pre trained models do not exist
                 available_pretrained_models = []
             else:
                 # if it exists
                 # check the section of "classifiers"
                 # folder path
-                if jsonFile.check_key_pipeline_json("classifier"):
-                    available_pretrained_models = jsonFile.read_pipline_json(
+                if jsonFile.key_exists("classifier"):
+                    available_pretrained_models = jsonFile.read_from_json(
                         ["classifier"]
                     ).keys()
                     available_pretrained_models_info = (
@@ -258,21 +123,21 @@ def counterfactuals(request):
                     )
 
             ## get the type of the active dataset
-            datasets_types_pipeline_json_path = os.path.join(
+            datasets_types_PipelineJSON_path = os.path.join(
                 PIPELINE_PATH + "/dataset_types_pipeline.json"
             )
-            datasets_types_pipeline_json = pipeline.pipeline_json(
-                datasets_types_pipeline_json_path
+            datasets_types_PipelineJSON = pipeline.PipelineJSON(
+                datasets_types_PipelineJSON_path
             )
-            dataset_type = datasets_types_pipeline_json.read_pipline_json([df_name])
+            dataset_type = datasets_types_PipelineJSON.read_from_json([df_name])
             if type(dataset_type) is list:
                 dataset_type = dataset_type[0]
-            
+
             # model_name_path = os.path.join(
             #     PIPELINE_PATH + f"{df_name}" + "/trained_models/" + pre_trained_model_name
             # )
             # tsne = joblib.load(model_name_path + "/tsne.sav")
-        
+
             if dataset_type == "tabular":
 
                 context = {
@@ -297,12 +162,12 @@ def counterfactuals(request):
                 target_label_info = zip(target_label_value, target_label_text)
 
                 available_pre_computed_counterfactuals = []
-                if jsonFile.check_key_pipeline_json("classifier"):
-                    if jsonFile.check_key_pipeline_json("glacier"):
-                        if jsonFile.check_key_pipeline_json("experiments"):
+                if jsonFile.key_exists("classifier"):
+                    if jsonFile.key_exists("glacier"):
+                        if jsonFile.key_exists("experiments"):
                             # applies only to glacier
                             # there are pre computed counterfactuals
-                            experiments_dict = jsonFile.read_pipline_json(
+                            experiments_dict = jsonFile.read_from_json(
                                 ["classifier", "glacier", "experiments"]
                             )
                             list_of_experiment_keys = list(experiments_dict.keys())
@@ -314,6 +179,8 @@ def counterfactuals(request):
 
                 else:
                     available_pre_computed_counterfactuals = None
+                
+                print(available_pre_computed_counterfactuals)
 
                 context = {
                     "df_name": df_name,
@@ -331,7 +198,7 @@ def counterfactuals(request):
 def train(request):
     # ajax request condition
     if request.headers.get("X-Requested-With") == "XMLHttpRequest":
-        return ajax_requests(request.POST.get("action"), request)
+        return train_handler(request.POST.get("action"), request)
 
     df_name = request.session.get("df_name")
 
@@ -339,13 +206,13 @@ def train(request):
         df_name = request.session.get("df_name_upload_base_name")
 
     ## get the type of the active dataset
-    datasets_types_pipeline_json_path = os.path.join(
+    datasets_types_PipelineJSON_path = os.path.join(
         PIPELINE_PATH + "/dataset_types_pipeline.json"
     )
-    datasets_types_pipeline_json = pipeline.pipeline_json(
-        datasets_types_pipeline_json_path
+    datasets_types_PipelineJSON = pipeline.PipelineJSON(
+        datasets_types_PipelineJSON_path
     )
-    dataset_type = datasets_types_pipeline_json.read_pipline_json([df_name])
+    dataset_type = datasets_types_PipelineJSON.read_from_json([df_name])
 
     if type(dataset_type) is list:
         dataset_type = dataset_type[0]
@@ -400,6 +267,7 @@ def train(request):
         excel_file_name_path = os.path.join(
             PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
         )
+
         df = methods.get_dataframe(excel_file_name_path)
         df.columns = df.columns.str.replace(" ", "_")
         preprocessing_value = ["std", "denoise", "imp"]
@@ -408,6 +276,7 @@ def train(request):
             "Denoising",
             "Imputations",
         ]
+
         preprocessing_info = zip(preprocessing_value, preprocessing_text)
         classifiers_value = ["wildboar_knn", "wildboar_rsf", "glacier"]
         classifiers_text = [
@@ -415,6 +284,7 @@ def train(request):
             "Wildboar-Random Shapelet Forest (classifier train)",
             "Glacier 1DCNN",
         ]
+
         classifiers_info = zip(classifiers_value, classifiers_text)
         context = {
             "dataset_type": dataset_type,
@@ -430,10 +300,11 @@ def train(request):
 def charts(request):
     # ajax request condition
     if request.headers.get("X-Requested-With") == "XMLHttpRequest":
-        return ajax_requests(request.POST.get("action"), request)
+        return charts_handler(request.POST.get("action"), request)
 
     if "df_name" in request.session:
-        df_name = request.session.get("df_name")
+        df_name = request.session["df_name"]
+        
         if df_name == "upload":
             df_name = request.session.get("df_name_upload_base_name")
 
@@ -441,11 +312,11 @@ def charts(request):
         json_path = os.path.join(PIPELINE_PATH + f"{df_name}" + "/pipeline.json")
 
         if os.path.exists(json_path):
-            jsonFile = pipeline.pipeline_json(json_path)
+            jsonFile = pipeline.PipelineJSON(json_path)
             # if it does not exist, obviously there are no pre trained
             # models
 
-            if not jsonFile.check_key_pipeline_json("classifier"):
+            if not jsonFile.key_exists("classifier"):
                 # pre trained models do not exist
                 # check if dataset directory exists
                 df_dir = os.path.join(PIPELINE_PATH + f"{df_name}")
@@ -460,7 +331,7 @@ def charts(request):
                 # if it exists
                 # check the section of "classifiers"
                 # folder path
-                available_pretrained_models = jsonFile.read_pipline_json(
+                available_pretrained_models = jsonFile.read_from_json(
                     ["classifier"]
                 ).keys()
 
@@ -479,1283 +350,3 @@ def charts(request):
         context = {}
 
     return render(request, "base/charts.html", context)
-
-
-## AJAX REQUSTS HANDLER
-def ajax_requests(action, request):
-    if action == "click_graph":
-        # get df used name
-        df_name = request.session.get("df_name")
-        df_name = request.session.get("df_name")
-        if df_name == "upload":
-            df_name = request.session.get("df_name_upload_base_name")
-        # get model_name
-        model_name = request.POST.get("model_name")
-
-        # preprocessed_path
-        excel_file_name_preprocessed_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv"
-        )
-
-        excel_file_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
-        )
-
-        model_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
-        )
-
-        # pipeline path
-        json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
-
-        # load pipeline data
-        # jsonFile = open(json_path, "r")
-        # pipeline_data = pipeline_json.load(jsonFile)  # data becomes a dictionary
-        # class_label = pipeline_data["classifier"][model_name]["class_label"]
-        jsonFile = pipeline.pipeline_json(json_path)
-        class_label = jsonFile.read_pipline_json(
-            ["classifier", model_name, "class_label"]
-        )
-
-        df = pd.read_csv(excel_file_name_path)
-
-        # Load your saved feature importance from a .sav file
-        feature_importance_df = pd.read_csv(
-            model_name_path + "/feature_importance_df.csv"
-        )
-        # sorted_df = feature_importance_df.sort_values(by="importance", ascending=False)
-
-        # x and y coordinates of the clicked point in tsne
-        x_coord = request.POST["x"]
-        y_coord = request.POST["y"]
-
-        # tsne_projections
-        tsne_projections_path = os.path.join(
-            PIPELINE_PATH
-            + f"{df_name}/"
-            + f"trained_models/{model_name}"
-            + "/tsne_projections.json",
-        )
-
-        # tsne projections of all points (saved during generation of tsne)
-        projections = pd.read_json(tsne_projections_path)
-        projections = projections.values.tolist()
-
-        # projections array is a list of pairs with the (x, y)
-        # [ [], [], [] ... ]
-        # coordinates for a point in tsne. These are actual absolute
-        # coordinates and not SVG.
-        # find the pair of the projection with x and y coordinates matching that of
-        # clicked point coordinates
-        for clicked_id, item in enumerate(projections):
-            if math.isclose(item[0], float(x_coord)) and math.isclose(
-                item[1], float(y_coord)
-            ):
-                break
-
-        # save clicked point projections
-        request.session["clicked_point"] = item
-        # get clicked point row
-        row = df.iloc[[int(clicked_id)]]
-        request.session["cfrow_id"] = clicked_id
-        request.session["cfrow_og"] = row.to_html()
-        context = {
-            "row": row.to_html(index=False),
-            "feature_importance_dict": feature_importance_df.to_dict(orient="records"),
-        }
-
-    elif action == "counterfactual_select":
-
-        # if <select> element is used, and a specific counterfactual
-        # is inquired to be demonstrated:
-        df_name = request.session.get("df_name")
-        df_name = request.session.get("df_name")
-        if df_name == "upload":
-            df_name = request.session.get("df_name_upload_base_name")
-
-        model_name = request.session.get("model_name")
-        model_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
-        )
-
-        excel_file_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
-        )
-
-        # pipeline path
-        json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
-        # load pipeline data
-        jsonFile = pipeline.pipeline_json(json_path)
-
-        class_label = jsonFile.read_pipline_json(
-            ["classifier", model_name, "class_label"]
-        )
-
-        # decode counterfactual to original values
-        preprocessing_list = jsonFile.read_pipline_json(
-            ["classifier", model_name, "preprocessing"]
-        )
-
-        df = pd.read_csv(excel_file_name_path)
-        cf_df = pd.read_csv(model_name_path + "/counterfactuals.csv")
-        cf_id = request.POST["cf_id"]
-        row = cf_df.iloc[[int(cf_id)]]
-
-        if "id" in df.columns:
-            df = df.drop("id", axis=1)
-
-        dec_row = methods.decode_cf(
-            df, row, class_label, model_name_path, preprocessing_list
-        )
-
-        fig = joblib.load(model_name_path + "/tsne_cfs.sav")
-
-        # tsne stores data for each class in different data[]
-        # index.
-        # data[0] is class A
-        # data[1] is class B
-        # ...
-        # data[n-2] is counterfactuals
-        # data[n-1] is clicked point
-
-        fig_data_array_length = len(fig.data)
-        for i in range(fig_data_array_length - 2):
-            fig.data[i].update(
-                opacity=0.3,
-            )
-
-        # last one, data[n-1], contains clicked point
-        l = fig.data[fig_data_array_length - 1]
-        clicked_id = -1
-        for clicked_id, item in enumerate(list(zip(l.x, l.y))):
-            if math.isclose(
-                item[0], request.session.get("clicked_point")[0]
-            ) and math.isclose(item[1], request.session.get("clicked_point")[1]):
-                break
-
-        # data[n-2] contains counterfactuals
-        fig.data[fig_data_array_length - 2].update(
-            selectedpoints=[int(cf_id)],
-            unselected=dict(
-                marker=dict(
-                    opacity=0.3,
-                )
-            ),
-        )
-        fig.data[fig_data_array_length - 1].update(
-            selectedpoints=[clicked_id],
-            unselected=dict(
-                marker=dict(
-                    opacity=0.3,
-                )
-            ),
-        )
-
-        if "id" in df.columns:
-            df = df.drop("id", axis=1)
-
-        # order the columns
-        dec_row = dec_row[df.columns]
-        clicked_point_row_id = request.session.get("cfrow_id")
-
-        # return only the differences
-        dec_row = dec_row.reset_index(drop=True)
-        df2 = df.iloc[[int(clicked_point_row_id)]].reset_index(drop=True)
-        difference = dec_row.loc[
-            :,
-            [
-                methods.compare_values(dec_row[col].iloc[0], df2[col].iloc[0])
-                for col in dec_row.columns
-            ],
-        ]
-
-        merged_df = pd.concat([df2[difference.columns], difference], ignore_index=True)
-        print(merged_df)
-        context = {
-            "row": merged_df.to_html(index=False),
-            "fig": fig.to_html(),
-        }
-
-    elif action == "reset_graph":
-
-        model_name = request.session.get("model_name")
-        # dataframe name
-        excel_file_name = request.session.get("df_name")
-        # save the plots for future use
-        # folder path: pipelines/<dataset name>/trained_models/<model_name>/
-        model_name_path = os.path.join(
-            PIPELINE_PATH + f"{excel_file_name}" + "/trained_models/" + model_name
-        )
-
-        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
-
-        tsne = joblib.load(model_name_dir_path + "/tsne.sav")
-        context = {"fig": tsne.to_html()}
-
-    elif action == "dataset" or action == "uploaded_datasets":
-
-        # action to add dataset when from radio button click
-        name = request.POST.get("df_name")
-        request.session["df_name"] = name
-
-        if name == "upload":
-            name = request.session.get("df_name_upload_base_name")
-
-        if name == "timeseries":
-            name = request.session.get("df_name")
-
-        excel_file_name_path = os.path.join(
-            PIPELINE_PATH + f"{name}" + "/" + name + ".csv",
-        )
-
-        datasets_types_pipeline_json_path = os.path.join(
-            PIPELINE_PATH + "/dataset_types_pipeline.json"
-        )
-        datasets_types_pipeline_json = pipeline.pipeline_json(
-            datasets_types_pipeline_json_path
-        )
-        dataset_type = datasets_types_pipeline_json.read_pipline_json([name])
-        uploaded_files = datasets_types_pipeline_json.get_keys_with_specific_value(
-            "uploaded"
-        )
-
-        if request.POST.get("df_name") == "upload" or action == "uploaded_datasets":
-            if type(dataset_type) is list:
-                dataset_type = dataset_type[0]
-
-        if request.POST.get("df_name") != "upload" or action == "uploaded_datasets":
-            if os.path.exists(excel_file_name_path):
-                df = methods.get_dataframe(excel_file_name_path)
-                df.columns = df.columns.str.replace(" ", "_")
-                request.session["excel_file_name"] = excel_file_name_path
-
-                json_path = os.path.join(PIPELINE_PATH + f"{name}" + "/pipeline.json")
-                if not os.path.exists(json_path):
-                    pipeline_json = pipeline.pipeline_json(json_path)
-                    pipeline_json.append_pipeline_json({"name": name})
-
-                if "tabular" == dataset_type:
-
-                    if "id" in df.columns:
-                        df.drop(["id"], axis=1, inplace=True)
-                        df.to_csv(excel_file_name_path, index=False)
-
-                    # tabular datasets
-                    features = df.columns
-                    feature1 = df.columns[3]
-                    feature2 = df.columns[2]
-                    label = ""
-
-                    labels = list(
-                        df.select_dtypes(include=["object", "category"]).columns
-                    )
-                    # Find binary columns (columns with only two unique values, including numerics)
-                    binary_columns = [
-                        col for col in df.columns if df[col].nunique() == 2
-                    ]
-
-                    # Combine categorical and binary columns into one list
-                    labels = list(set(labels + binary_columns))
-                    label = random.choice(labels)
-                    fig = methods.stats(
-                        excel_file_name_path,
-                        dataset_type,
-                        feature1=feature1,
-                        feature2=feature2,
-                        label=label,
-                    )
-
-                    # tabular dataset
-                    request.session["data_to_display"] = df[:10].to_html()
-                    request.session["features"] = list(features)
-                    request.session["feature1"] = feature1
-                    request.session["feature2"] = feature2
-                    request.session["labels"] = list(labels)
-                    request.session["curlabel"] = label
-                    request.session["fig"] = fig
-
-                    context = {
-                        "dataset_type": dataset_type,
-                        "data_to_display": df[:10].to_html(),
-                        "fig": fig,
-                        "features": list(features),  # error if not a list
-                        "feature1": feature1,
-                        "feature2": feature2,
-                        "labels": list(labels),
-                        "curlabel": label,
-                        "uploaded_files": list(uploaded_files),
-                    }
-                elif dataset_type == "timeseries":
-
-                    fig, fig1 = methods.stats(
-                        excel_file_name_path, dataset_type, name=name
-                    )
-                    # timeseries
-                    request.session["fig"] = fig
-                    request.session["fig1"] = fig1
-                    context = {"fig": fig, "fig1": fig1, "dataset_type": dataset_type}
-
-                request.session["context"] = context
-            else:
-                context = {"uploaded_files": list(uploaded_files)}
-        else:
-            context = {}
-    elif action == "stat":
-
-        name = request.session.get("df_name")
-        datasets_types_pipeline_json_path = os.path.join(
-            PIPELINE_PATH + "/dataset_types_pipeline.json"
-        )
-        jsonFile = pipeline.pipeline_json(datasets_types_pipeline_json_path)
-        dataset_type = jsonFile.read_pipline_json([name])
-
-        if type(dataset_type) is list:
-            dataset_type = dataset_type[0]
-
-        file_path = os.path.join(
-            PIPELINE_PATH + f"{name}" + "/" + name + ".csv",
-        )
-        if dataset_type == "tabular":
-            feature1 = request.POST.get("feature1")
-            feature2 = request.POST.get("feature2")
-            label = request.POST.get("label")
-        else:
-            feature1 = request.POST.get("feature1")
-            feature2 = []
-            label = []
-
-        fig = methods.stats(
-            file_path,
-            dataset_type,
-            None,
-            None,
-            feature1=feature1,
-            feature2=feature2,
-            label=label,
-        )
-        context = {
-            "fig": fig,
-        }
-    elif action == "train":
-
-        # train a new model
-        # parameters sent via ajax
-        model_name = request.POST.get("model_name")
-        df_name = request.session.get("df_name")
-
-        # dataframe name
-
-        if df_name == "upload":
-            df_name = request.session.get("df_name_upload_base_name")
-
-        request.session["model_name"] = model_name
-        test_set_ratio = ""
-        if "test_set_ratio" in request.POST:
-            test_set_ratio = request.POST.get("test_set_ratio")
-
-        datasets_types_pipeline_json_path = os.path.join(
-            PIPELINE_PATH + "/dataset_types_pipeline.json"
-        )
-        jsonFile = pipeline.pipeline_json(datasets_types_pipeline_json_path)
-        dataset_type = jsonFile.read_pipline_json([df_name])
-
-        if type(dataset_type) is list:
-            dataset_type = dataset_type[0]
-
-        if "array_preprocessing" in request.POST:
-            array_preprocessing = request.POST.get("array_preprocessing")
-
-        if dataset_type == "tabular":
-            class_label = request.POST.get("class_label")
-            preprocessing_info = {
-                "preprocessing": array_preprocessing,
-                "test_set_ratio": test_set_ratio,
-                "explainability": {"technique": "dice"},
-                "class_label": class_label,
-            }
-        elif dataset_type == "timeseries":
-            if model_name != "glacier":
-                preprocessing_info = {
-                    "preprocessing": array_preprocessing,
-                    "test_set_ratio": test_set_ratio,
-                    "explainability": {"technique": model_name},
-                }
-            else:
-                # Path to the Bash script
-                autoencoder = request.POST.get("autoencoder")
-                preprocessing_info = {
-                    "autoencoder": autoencoder,
-                    "explainability": {"technique": model_name},
-                }
-
-        # absolute excel_file_name_path
-        excel_file_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
-        )
-
-        # load paths
-        # absolute excel_file_preprocessed_path
-        excel_file_name_preprocessed_path = os.path.join(
-            PIPELINE_PATH,
-            f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv",
-        )
-
-        json_path = os.path.join(PIPELINE_PATH + f"{df_name}" + "/pipeline.json")
-        jsonFile = pipeline.pipeline_json(json_path)
-        # save the plots for future use
-        # folder path: pipelines/<dataset name>/trained_models/<model_name>/
-
-        model_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
-        )
-
-        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
-
-        # make the dir
-        if not os.path.exists(model_name_path):
-            os.makedirs(model_name_path)
-
-        # if json exists, simply append to it
-        # jsonFile = open(json_path, "r")
-        # pipeline_json = pipeline_json.load(jsonFile)  # data becomes a dictionary
-        # jsonFile.close()  # Close the JSON file
-
-        # if "classifier" in pipeline_json.keys():
-        #     temp_jason = {model_name: preprocessing_info}
-        #     pipeline_json["classifier"].update(temp_jason)
-        # else:
-        #     temp_jason = {
-        #         "preprocessed_name": df_name + "_preprocessed.csv",
-        #         "classifier": {model_name: preprocessing_info},
-        #     }
-        #     pipeline_json.update(temp_jason)
-
-        if jsonFile.check_key_pipeline_json("classifier"):
-            temp_json = {model_name: preprocessing_info}
-            jsonFile.update_pipeline_json(["classifier"], temp_json)
-        else:
-            temp_jason = {
-                "preprocessed_name": df_name + "_preprocessed.csv",
-                "classifier": {model_name: preprocessing_info},
-            }
-            jsonFile.append_pipeline_json(temp_jason)
-
-        if os.path.exists(excel_file_name_preprocessed_path) == True:
-            # if preprocessed_file exists
-            # delete it and do preprocessing again
-            # maybe should optimize it for cases
-            # where the preprocessing is the same with
-            # the one applited on the existing file
-            os.remove(excel_file_name_preprocessed_path)
-
-        # generate filename
-        idx = excel_file_name_path.index(".")
-        excel_file_name_preprocessed = (
-            df_name[:idx] + "_preprocessed" + excel_file_name_path[idx:]
-        )
-
-        # save file for preprocessing
-        preprocess_df = pd.read_csv(excel_file_name_path)
-        request.session["excel_file_name_preprocessed"] = excel_file_name_preprocessed
-
-        if dataset_type == "tabular":
-            le = LabelEncoder()
-            preprocess_df[class_label] = le.fit_transform(preprocess_df[class_label])
-            pickle.dump(le, open(model_name_path + "/label_encoder.sav", "wb"))
-
-            if "array_preprocessing" in request.POST:
-                preprocess_df = methods.preprocess(
-                    preprocess_df,
-                    array_preprocessing,
-                    excel_file_name_path,
-                    dataset_type,
-                    model_name_path,
-                    class_label,
-                )
-        elif dataset_type == "timeseries":
-
-            pos = jsonFile.read_pipline_json(["pos"])
-            neg = jsonFile.read_pipline_json(["neg"])
-            pos_label, neg_label = 1, 0
-
-            if pos != pos_label:
-                preprocess_df.iloc[:, -1] = preprocess_df.iloc[:, -1].apply(
-                    lambda x: pos_label if x == int(pos) else x
-                )
-            if neg != neg_label:
-                preprocess_df.iloc[:, -1] = preprocess_df.iloc[:, -1].apply(
-                    lambda x: neg_label if x == int(neg) else x
-                )
-
-            if "array_preprocessing" in request.POST:
-                preprocess_df = methods.preprocess(
-                    preprocess_df,
-                    array_preprocessing,
-                    excel_file_name_path,
-                    dataset_type,
-                    model_name_path,
-                )
-
-        # PCA
-        pca = methods.generatePCA(preprocess_df)
-        # TSNE
-        if dataset_type == "tabular":
-            tsne, projections = methods.generateTSNE(
-                preprocess_df, dataset_type, class_label
-            )
-        else:
-            tsne, projections = methods.generateTSNE(preprocess_df, dataset_type)
-
-        # save the plots
-        pickle.dump(tsne, open(model_name_path + "/tsne.sav", "wb"))
-        pickle.dump(pca, open(model_name_path + "/pca.sav", "wb"))
-
-        # save projections file for future use
-        with open(model_name_path + "/tsne_projections.json", "w") as f:
-            json.dump(projections.tolist(), f, indent=2)
-
-        if dataset_type == "tabular":
-            # training
-            feature_importance, classification_report, importance_dict = (
-                methods.training(
-                    preprocess_df,
-                    model_name,
-                    float(test_set_ratio),
-                    class_label,
-                    dataset_type,
-                    df_name,
-                    model_name_path,
-                )
-            )
-
-            # save some files
-            pickle.dump(
-                classification_report,
-                open(model_name_path + "/classification_report.sav", "wb"),
-            )
-            pickle.dump(
-                feature_importance,
-                open(model_name_path + "/feature_importance.sav", "wb"),
-            )
-
-            # feature importance on the original categorical columns (if they exist)
-            df = pd.read_csv(excel_file_name_path)
-            df = df.drop(class_label, axis=1)
-
-            # Initialize a dictionary to hold aggregated feature importances
-            categorical_columns = methods.get_categorical_features(df)
-
-            if categorical_columns:
-                aggregated_importance = {}
-                encoded_columns = methods.update_column_list_with_one_hot_columns(
-                    df, preprocess_df, df.columns
-                )
-
-                feature_mapping = defaultdict(list)
-                for col in encoded_columns:
-                    # Check if the column matches a pattern in categorical column names
-                    for original_col in categorical_columns:
-                        if col.startswith(original_col + "_"):
-                            feature_mapping[original_col].append(col)
-                            break
-                    else:
-                        # If no match, it's likely a numerical or non-encoded feature, map it to itself
-                        feature_mapping[col].append(col)
-
-                # Aggregate the feature importances
-                for original_feature, encoded_columns in feature_mapping.items():
-                    if original_feature not in encoded_columns:
-                        aggregated_importance[original_feature] = np.sum(
-                            [importance_dict[col] for col in encoded_columns]
-                        )
-                    else:
-                        aggregated_importance[original_feature] = importance_dict[
-                            original_feature
-                        ]
-
-                importance_df = pd.DataFrame(
-                    {
-                        "feature": list(aggregated_importance.keys()),
-                        "importance": list(aggregated_importance.values()),
-                    }
-                )
-                importance_df.to_csv(
-                    model_name_path + "/feature_importance_df.csv", index=None
-                )
-            else:
-                # if no categorical columns
-
-                # Combine feature names with their respective importance values
-                feature_importance_df = pd.DataFrame(
-                    {
-                        "feature": importance_dict.keys(),
-                        "importance": importance_dict.values(),
-                    }
-                )
-
-                feature_importance_df.to_csv(
-                    model_name_path + "/feature_importance_df.csv", index=None
-                )
-
-            # load pipeline data
-            # jsonFile = open(json_path, "r")
-            # pipeline_data = json.load(jsonFile)  # data becomes a dictionary
-            # classifier_data = pipeline_data["classifier"][model_name]
-            # classifier_data_html = dict_and_html(classifier_data)
-
-            classifier_data = jsonFile.read_pipline_json(["classifier", model_name])
-
-            classifier_data_html = dict_and_html(classifier_data)
-
-            context = {
-                "dataset_type": dataset_type,
-                "tsne": tsne.to_html(),
-                "class_report": classification_report.to_html(),
-                "feature_importance": feature_importance.to_html(),
-                "classifier_data": classifier_data_html,
-            }
-        elif dataset_type == "timeseries":
-
-            # training
-            # if model_name == "glacier":
-            #     path = model_name_path_type_dir
-            # else:
-            path = model_name_path
-            dataset_camel = methods.convert_to_camel_case(df_name)
-            if "Ecg" in dataset_camel:
-                dataset_camel = dataset_camel.replace("Ecg", "ECG")
-
-            experiment = methods.fetch_line_by_dataset(
-                PIPELINE_PATH + "/glacier_experiments.txt",
-                dataset_camel,
-            )
-
-            if experiment is not None:
-                stripped_arguments = methods.extract_arguments_from_line(experiment)
-
-            if model_name == "glacier":
-                classification_report = methods.training(
-                    preprocess_df,
-                    model_name,
-                    float(test_set_ratio) if test_set_ratio != "" else 0,
-                    "",
-                    dataset_type,
-                    df_name,
-                    path,
-                    autoencoder,
-                    stripped_arguments,
-                )
-            else:
-                classification_report = methods.training(
-                    preprocess_df,
-                    model_name,
-                    float(test_set_ratio) if test_set_ratio != "" else 0,
-                    "",
-                    dataset_type,
-                    df_name,
-                    path,
-                )
-
-            pickle.dump(
-                classification_report,
-                open(path + "/classification_report.sav", "wb"),
-            )
-
-            context = {
-                "dataset_type": dataset_type,
-                "pca": pca.to_html(),
-                "tsne": tsne.to_html(),
-                "classification_report": classification_report.to_html(),
-            }
-
-        preprocess_df.to_csv(excel_file_name_preprocessed_path, index=False)
-
-    elif action == "pre_trained":
-        # load pre trained models
-        pre_trained_model_name = request.POST.get("pre_trained")
-        request.session["model_name"] = pre_trained_model_name
-        # dataframe name
-        df_name = request.session.get("df_name")
-
-        if df_name == "upload":
-            df_name = request.session.get("df_name_upload_base_name")
-
-        model_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + pre_trained_model_name
-        )
-        
-        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
-
-        # get the type of the file
-        datasets_types_pipeline_json_path = os.path.join(
-            PIPELINE_PATH + "/dataset_types_pipeline.json"
-        )
-        datasets_types_pipeline = pipeline.pipeline_json(
-            datasets_types_pipeline_json_path
-        )
-        dataset_type = datasets_types_pipeline.read_pipline_json([df_name])
-
-        if type(dataset_type) is list:
-            dataset_type = dataset_type[0]
-
-        if "url" in request.POST:
-            url = request.POST.get("url")
-            if url == "counterfactuals":
-                # only TSNE
-                tsne = joblib.load(model_name_path + "/tsne.sav")
-                
-                # Assuming you already have your fig object created, you can update it like this:
-                # Improved and modern t-SNE visualization
-                tsne.update_layout(
-                    # Modern Legend Design
-                    legend=dict(
-                        x=0.9,
-                        y=0.95,
-                        xanchor="right",
-                        yanchor="top",
-                        bgcolor="rgba(255,255,255,0.8)",  # Light semi-transparent white background
-                        bordercolor="rgba(0,0,0,0.1)",  # Light border for contrast
-                        borderwidth=1,
-                        font=dict(size=12, color="#444")  # Subtle grey for legend text
-                    ),
-                    # Tight Margins to Focus on the Plot
-                    margin=dict(l=10, r=10, t=30, b=10),  # Very slim margins for a modern look
-                    # Axis Design: Minimalist and Clean
-                    xaxis=dict(
-                        title_text="",  # No axis labels for a clean design
-                        tickfont=dict(size=10, color="#aaa"),  # Light grey for tick labels
-                        showline=True,
-                        linecolor="rgba(0,0,0,0.2)",  # Subtle line color for axis lines
-                        zeroline=False,                # No zero line for a sleek look
-                        showgrid=False,                # Hide grid lines for a minimal appearance
-                        ticks="outside",               # Small ticks outside the axis
-                        ticklen=3                      # Short tick marks for subtlety
-                    ),
-                    yaxis=dict(
-                        title_text="",  # No axis labels
-                        tickfont=dict(size=10, color="#aaa"),
-                        showline=True,
-                        linecolor="rgba(0,0,0,0.2)",
-                        zeroline=False,
-                        showgrid=False,
-                        ticks="outside",
-                        ticklen=3
-                    ),
-                    # Sleek Background
-                    plot_bgcolor="#fafafa",  # Very light grey background for a smooth finish
-                    paper_bgcolor="#ffffff",  # Pure white paper background
-                    # Modern Title with Elegant Style
-                    title=dict(
-                        text="t-SNE Visualization of Data",
-                        font=dict(size=16, color="#222", family="Helvetica, Arial, sans-serif"),  # Classy font style
-                        x=0.5,
-                        xanchor="center",
-                        yanchor="top",
-                        pad=dict(t=15)  # Padding to separate the title from the plot
-                    )
-                )
-
-                # Add hover effects for a smooth user experience
-                tsne.update_traces(hoverinfo="text+name", hoverlabel=dict(bgcolor="white", font_size=12, font_family="Arial"))
-                
-                context = {
-                    "tsne": tsne.to_html(),
-                }
-            else:
-                # load plots
-                pca = joblib.load(model_name_path + "/pca.sav")
-                classification_report = joblib.load(
-                    model_name_path + "/classification_report.sav"
-                )
-                # tsne = joblib.load(model_name_path + "/tsne.sav")
-
-                # pipeline path
-                json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
-                jsonFile = pipeline.pipeline_json(json_path)
-
-                # load pipeline data
-                # jsonFile = open(json_path, "r")
-                # pipeline_data = json.load(jsonFile)  # data becomes a dictionary
-                # classifier_data = pipeline_data["classifier"][pre_trained_model_name]
-
-                classifier_data = jsonFile.read_pipline_json(
-                    ["classifier", pre_trained_model_name]
-                )
-                classifier_data_flattened = methods.flatten_dict(classifier_data)
-                classifier_data_df = pd.DataFrame([classifier_data_flattened])
-
-                if dataset_type == "tabular":
-                    feature_importance = joblib.load(
-                        model_name_path + "/feature_importance.sav"
-                    )
-                    context = {
-                        "dataset_type": dataset_type,
-                        "pca": pca.to_html(),
-                        "class_report": classification_report.to_html(),
-                        "feature_importance": feature_importance.to_html(),
-                        "classifier_data": classifier_data_df.to_html(),
-                    }
-                elif dataset_type == "timeseries":
-                    tsne = joblib.load(model_name_path + "/tsne.sav")
-                    context = {
-                        "dataset_type": dataset_type,
-                        "pca": pca.to_html(),
-                        "class_report": classification_report.to_html(),
-                        "tsne": tsne.to_html(),
-                        "classifier_data": classifier_data_df.to_html(),
-                    }
-
-    elif action == "cf":
-        # dataframe name
-        df_name = request.session.get("df_name")
-        if df_name == "upload":
-            df_name = request.session.get("df_name_upload_base_name")
-
-        # preprocessed_path
-        excel_file_name_preprocessed_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv"
-        )
-
-        excel_file_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
-        )
-        # which model is being used during that session
-        model_name = request.POST.get("model_name")
-        # path of used model
-        model_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}/" + "trained_models/" + f"{model_name}/"
-        )
-        model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
-
-        # read preprocessed data
-        if os.path.exists(excel_file_name_preprocessed_path):
-            df = pd.read_csv(excel_file_name_preprocessed_path)
-        else:
-            df = pd.read_csv(excel_file_name_path)
-
-        datasets_types_pipeline_json_path = os.path.join(
-            PIPELINE_PATH + "/dataset_types_pipeline.json"
-        )
-        datasets_types_pipeline = pipeline.pipeline_json(
-            datasets_types_pipeline_json_path
-        )
-        dataset_type = datasets_types_pipeline.read_pipline_json([df_name])
-
-        if type(dataset_type) is list:
-            dataset_type = dataset_type[0]
-
-        df_id = request.session.get("cfrow_id")
-        if dataset_type == "tabular":
-
-            # get row
-            features_to_vary = json.loads(request.POST.get("features_to_vary"))
-
-            row = df.iloc[[int(df_id)]]
-
-            # not preprocessed
-            notpre_df = pd.read_csv(excel_file_name_path)
-            notpre_row = notpre_df.iloc[[int(df_id)]]
-
-            # if feature_to_vary has a categorical column then I cannot just
-            # pass that to dice since the trained model does not contain the
-            # categorical column but the one-hot-encoded sub-columns
-            features_to_vary = methods.update_column_list_with_one_hot_columns(
-                notpre_df, df, features_to_vary
-            )
-
-            # pipeline path
-            json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
-
-            # load pipeline data
-            jsonFile = pipeline.pipeline_json(json_path)
-            class_label = jsonFile.read_pipline_json(
-                ["classifier", model_name, "class_label"]
-            )  # data becomes a dictionary
-
-            # number of counterfactuals
-            # (TBD) input field value as parameter
-            # in ajax
-            num_counterfactuals = 5
-            le = LabelEncoder()
-            notpre_df[class_label] = le.fit_transform(notpre_df[class_label])
-
-            continuous_features = methods.get_continuous_features(df)
-            non_continuous_features = methods.get_non_continuous_features(df)
-
-            # load used classifier
-            clf = joblib.load(model_name_path + model_name + ".sav")
-
-            try:
-                # Set up the executor to run the function in a separate thread
-                with concurrent.futures.ThreadPoolExecutor() as executor:
-                    # Submit the function to the executor
-                    future = executor.submit(
-                        methods.counterfactuals,
-                        row,
-                        clf,
-                        df,
-                        class_label,
-                        continuous_features,
-                        num_counterfactuals,
-                        features_to_vary,
-                    )
-                    # Wait for the result with a timeout of 10 seconds
-                    counterfactuals = future.result(timeout=10)
-                    print("Counterfactuals computed successfully!")
-            except concurrent.futures.TimeoutError:
-                message = (
-                    "It seems like it took more than expected. Refresh and try again..."
-                )
-                print(message)
-                exit(1)
-                
-            if counterfactuals:
-                cf_df = counterfactuals[0].final_cfs_df
-                counterfactuals[0].final_cfs_df.to_csv(
-                    model_name_path + "counterfactuals.csv", index=False
-                )
-
-                # get coordinates of the clicked point (saved during 'click' event)
-                clicked_point = request.session.get("clicked_point")
-                clicked_point_df = pd.DataFrame(
-                    {
-                        "0": clicked_point[0],
-                        "1": clicked_point[1],
-                        f"{class_label}": row[class_label].astype(str),
-                    }
-                )
-
-                # tSNE
-                cf_df = pd.read_csv(model_name_path + "counterfactuals.csv")
-                model_name_dir_path = os.path.join(PIPELINE_PATH + f"{df_name}")
-                tsne_path_to_augment = model_name_path + "tsne.sav"
-                
-                tsne = methods.generateAugmentedTSNE(
-                    df,
-                    cf_df,
-                    num_counterfactuals,
-                    clicked_point_df,
-                    tsne_path_to_augment,
-                    class_label,
-                )
-                
-                tsne.update_layout(
-                    # Modern Legend Design
-                    legend=dict(
-                        x=0.85,
-                        y=0.95,
-                        xanchor="right",
-                        yanchor="top",
-                        bgcolor="rgba(0,0,0,0.05)",  # Transparent black background for a sleek look
-                        bordercolor="rgba(0,0,0,0.1)",  # Soft border for separation
-                        borderwidth=1,
-                        font=dict(size=12, color="#333")  # Modern grey font color for text
-                    ),
-                    # Tight Margins for a Focused Plot Area
-                    margin=dict(l=20, r=20, t=40, b=40),  # Reduced margins for a cleaner look
-                    # Axis Titles and Labels: Minimalist Design
-                    xaxis=dict(
-                        title_font=dict(size=14, color="#555"),  # Medium grey color for axis title
-                        tickfont=dict(size=11, color="#777"),    # Light grey color for tick labels
-                        showline=True,
-                        linecolor="rgba(0,0,0,0.15)",  # Subtle line color for axis lines
-                        zeroline=False,                # Hide the zero line for a cleaner design
-                        showgrid=False                 # No grid lines for a modern look
-                    ),
-                    yaxis=dict(
-                        title_font=dict(size=14, color="#555"),
-                        tickfont=dict(size=11, color="#777"),
-                        showline=True,
-                        linecolor="rgba(0,0,0,0.15)",
-                        zeroline=False,
-                        showgrid=False
-                    ),
-                    # Sleek Background Design
-                    plot_bgcolor="white",  # Crisp white background for a modern touch
-                    paper_bgcolor="white",  # Ensure the entire background is uniform
-                    # Title: Modern Font and Centered
-                    title=dict(
-                        text="t-SNE Visualization of Data",
-                        font=dict(size=18, color="#333", family="Arial, sans-serif"),  # Modern font style
-                        x=0.5,
-                        xanchor="center",
-                        yanchor="top",
-                        pad=dict(t=10)  # Padding to give the title breathing space
-                    )
-                )
-
-                pickle.dump(tsne, open(model_name_path + "tsne_cfs.sav", "wb"))
-
-                context = {
-                    "dataset_type": dataset_type,
-                    "model_name": model_name,
-                    "tsne": tsne.to_html(),
-                    "num_counterfactuals": num_counterfactuals,
-                    "default_counterfactual": "1",
-                    "clicked_point": notpre_row.to_html(),
-                    "counterfactual": cf_df.iloc[[1]].to_html(),
-                }
-
-            else:
-                context = {
-                    "dataset_type": dataset_type,
-                    "model_name": model_name,
-                    "message": "Please try again with different features.",
-                }
-        elif dataset_type == "timeseries":
-            model_name = request.POST["model_name"]
-            model_name_path = os.path.join(
-                PIPELINE_PATH + f"{df_name}/" + "trained_models/" + f"{model_name}/"
-            )
-            path = model_name_path
-            if model_name == "glacier":
-                constraint = request.POST["constraint"]
-                path = os.path.join(
-                    PIPELINE_PATH
-                    + f"{df_name}/"
-                    + "trained_models/"
-                    + f"{model_name}/"
-                    + f"{constraint}/"
-                )
-
-            X_test_path = os.path.join(model_name_path + "X_test.csv")
-            y_test_path = os.path.join(model_name_path + "y_test.npy")
-            y_pred_path = os.path.join(path + "y_pred.npy")
-            X_cf_path = os.path.join(path + "X_cf.npy")
-            cf_pred_path = os.path.join(path + "cf_pred.npy")
-
-            X_test = pd.read_csv(X_test_path)
-            y_test = np.load(y_test_path)
-            y_pred = np.load(y_pred_path)
-            X_cf = np.load(X_cf_path)
-            cf_pred = np.load(cf_pred_path)
-
-            if model_name != "glacier":
-                scaler = joblib.load(model_name_path + "/min_max_scaler.sav")
-                X_test = pd.DataFrame(scaler.inverse_transform(X_test))
-                X_cf = scaler.inverse_transform(X_cf)
-
-            fig = methods.ecg_plot_counterfactuals(
-                int(df_id), X_test, y_test, y_pred, X_cf, cf_pred
-            )
-
-            context = {
-                "df_name": df_name,
-                "fig": fig.to_html(),
-                "dataset_type": dataset_type,
-            }
-    elif action == "compute_cf":
-        model_name = request.POST.get("model_name")
-        if model_name == "glacier":
-            constraint_type = request.POST.get("constraint")
-            w_value = request.POST.get("w_value")
-            df_name = request.session.get("df_name")
-
-            model_name_path = os.path.join(
-                PIPELINE_PATH + f"{df_name}/" + "trained_models/" + f"{model_name}/"
-            )
-            model_name_path_constraint = model_name_path + f"{constraint_type}/"
-            if not os.path.exists(model_name_path_constraint):
-                os.makedirs(model_name_path_constraint)
-
-            # https://github.com/wildboar-foundation/wildboar/blob/master/docs/guide/explain/counterfactuals.rst#id27
-            classifier = joblib.load(model_name_path + "/classifier.sav")
-
-            # pipeline path
-            json_path = os.path.join(PIPELINE_PATH, f"{df_name}" + "/pipeline.json")
-            # load pipeline data
-            jsonFile = pipeline.pipeline_json(json_path)
-            autoencoder = jsonFile.read_pipline_json(
-                ["classifier", model_name, "autoencoder"]
-            )
-
-            experiment_dict = {"constraint": constraint_type, "w_value": w_value}
-
-            # if "experiments" in pipeline_data["classifier"][model_name]:
-            #     # if there exists key with value "experiments"
-            #     keys = pipeline_data["classifier"][model_name]["experiments"].keys()
-            #     last_key_int = int(list(keys)[-1])
-            #     last_key_int_incr_str = str(last_key_int + 1)
-            # else:
-            #     last_key_int_incr_str = "0"
-            #     experiment_key_dict = {"experiments": {last_key_int_incr_str: {}}}
-            #     pipeline_data["classifier"][model_name].update(experiment_key_dict)
-
-            # outter_dict = {last_key_int_incr_str: experiment_dict}
-            # pipeline_data["classifier"][model_name]["experiments"].update(outter_dict)
-
-            if jsonFile.check_key_pipeline_json("experiments"):
-                keys = jsonFile.read_pipline_json(
-                    ["classifier", model_name, "experiments"]
-                ).keys()
-                last_key_int = int(list(keys)[-1])
-                last_key_int_incr_str = str(last_key_int + 1)
-            else:
-                last_key_int_incr_str = "0"
-                experiment_key_dict = {"experiments": {last_key_int_incr_str: {}}}
-                jsonFile.update_pipeline_json(
-                    ["classifier", model_name], experiment_key_dict
-                )
-
-            outter_dict = {last_key_int_incr_str: experiment_dict}
-            jsonFile.update_pipeline_json(
-                ["classifier", model_name, "experiments"], outter_dict
-            )
-
-            if autoencoder == "Yes":
-                autoencoder = joblib.load(model_name_path + "/autoencoder.sav")
-            else:
-                autoencoder = None
-
-            gc_compute_counterfactuals(
-                model_name_path,
-                model_name_path_constraint,
-                constraint_type,
-                [0.0001],
-                float(w_value),
-                0.5,
-                classifier,
-                autoencoder,
-            )
-            path = model_name_path_constraint
-            context = {"experiment_dict": experiment_dict}
-    elif action == "class_label_selection":
-
-        df_name = request.session.get("df_name")
-
-        if df_name == "upload":
-            df_name = request.session["df_name_upload_base_name"]
-
-        datasets_types_pipeline_json_path = os.path.join(
-            PIPELINE_PATH + "/dataset_types_pipeline.json"
-        )
-
-        dataset_type_json = pipeline.pipeline_json(datasets_types_pipeline_json_path)
-
-        dataset_type = dataset_type_json.read_pipline_json([df_name])
-        # preprocessed_path
-        excel_file_name_preprocessed_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/" + df_name + "_preprocessed" + ".csv"
-        )
-
-        excel_file_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/" + df_name + ".csv"
-        )
-
-        # which model is being used during that session
-        model_name = request.POST.get("model_name")
-
-        model_name_path = os.path.join(
-            PIPELINE_PATH + f"{df_name}" + "/trained_models/" + model_name
-        )
-
-        X_test_path = os.path.join(
-            PIPELINE_PATH
-            + f"{df_name}"
-            + "/trained_models"
-            + f"/{model_name}"
-            + "/X_test.csv"
-        )
-        y_test_path = os.path.join(
-            PIPELINE_PATH
-            + f"{df_name}"
-            + "/trained_models"
-            + f"/{model_name}"
-            + "/y_test.npy"
-        )
-
-        X_test = pd.read_csv(X_test_path)
-        y_test = np.load(y_test_path)
-
-        if model_name != "glacier":
-            scaler = joblib.load(model_name_path + "/min_max_scaler.sav")
-            X_test = pd.DataFrame(scaler.inverse_transform(X_test))
-
-        if dataset_type == "timeseries":
-            class_label = request.POST.get("class_label")
-            cfrow_id = request.POST.get("cfrow_id")
-
-            class_label = (
-                int(class_label)
-                if class_label.isdigit()
-                else (
-                    float(class_label)
-                    if class_label.replace(".", "", 1).isdigit()
-                    else class_label
-                )
-            )
-
-            fig, index = methods.get_ecg_entry(
-                X_test, y_test, int(cfrow_id), class_label
-            )
-            request.session["cfrow_id"] = index
-            request.session["class_label"] = class_label
-            context = {"fig": fig.to_html(), "dataset_type": dataset_type}
-    elif action == "dataset_charts":
-        df_name = request.POST.get("df_name")
-        request.session["df_name"] = df_name
-        context = {}
-    elif action == "timeseries-dataset":
-
-        # action to add dataset when from radio button click
-        name = request.POST.get("timeseries_dataset")
-
-        # add name of used dataframe in session for future use
-        request.session["df_name"] = name
-        excel_file_name_path = os.path.join(
-            PIPELINE_PATH + f"{name}" + "/" + name + ".csv",
-        )
-        datasets_types_pipeline_json_path = os.path.join(
-            PIPELINE_PATH + "/dataset_types_pipeline.json"
-        )
-        datasets_types_pipeline_json = pipeline.pipeline_json(
-            datasets_types_pipeline_json_path
-        )
-        if os.path.exists(excel_file_name_path):
-
-            dataset_type = datasets_types_pipeline_json.read_pipline_json([name])
-
-            df = methods.get_dataframe(excel_file_name_path)
-            df.columns = df.columns.str.replace(" ", "_")
-            request.session["excel_file_name"] = excel_file_name_path
-
-            # find the available pre trained datasets
-            # check the pipeline file
-            json_path = os.path.join(PIPELINE_PATH, f"{name}" + "/pipeline.json")
-            jsonFile = pipeline.pipeline_json(json_path)
-
-            preprocessing_info = {"name": name}
-            dataset_camel = methods.convert_to_camel_case(name)
-            if "Ecg" in dataset_camel:
-                dataset_camel = dataset_camel.replace("Ecg", "ECG")
-            experiment = methods.fetch_line_by_dataset(
-                PIPELINE_PATH + "/glacier_experiments.txt",
-                dataset_camel,
-            )
-            if experiment is not None:
-                stripped_arguments = methods.extract_arguments_from_line(experiment)
-            indices_to_keys = {
-                1: "pos",
-                2: "neg",
-            }
-
-            # Create a dictionary by fetching items from the list at the specified indices
-            inner_dict = {
-                key: stripped_arguments[index] for index, key in indices_to_keys.items()
-            }
-            preprocessing_info.update(inner_dict)
-            jsonFile.append_pipeline_json(preprocessing_info)
-
-            pos = inner_dict["pos"]
-            neg = inner_dict["neg"]
-            fig, fig1 = methods.stats(
-                excel_file_name_path, dataset_type, int(pos), int(neg), name=name
-            )
-            # timeseries
-            request.session["fig"] = fig
-            request.session["fig1"] = fig1
-            context = {"fig": fig, "fig1": fig1, "dataset_type": dataset_type}
-        else:
-            context = {}
-
-    return HttpResponse(json.dumps(context))