{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Första ordningens regression"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "NN = 10\n",
    "X = np.linspace(0, 10, NN, endpoint=False)\n",
    "\n",
    "np.random.seed(42)  # Samma slumpmässiga talföljd varje gång för jämförbarhet\n",
    "y = np.array([X[ii] + 3 * (-.5 + np.random.rand()) for ii in range(NN)]) # Linjär funktion med slumpmässighet\n",
    "\n",
    "\n",
    "m, b = np.polyfit(X, y, 1)  # Beräkna lutning och skärning med y-axel för en linjär modell\n",
    "\n",
    "xp = 3.5 # \"Osedd\" data: Vad har x = 3.5 för y-värde?\n",
    "\n",
    "prediction = m * xp + b\n",
    "print(f'k = {round(m,3)}, m = {round(b,3)}')\n",
    "\n",
    "print(f\"Prediktion för X={xp}: {round(prediction,3)}\")\n",
    "\n",
    "plt.scatter(X, y, color='blue', label='Data')   # Plotta datapunkterna\n",
    "X_line = np.linspace(min(X), max(X), 100)       # Plotta regressionslinjen\n",
    "y_line = m * X_line + b\n",
    "plt.plot(X_line, y_line, color='red', label='Regressionslinje')\n",
    "plt.xlabel(\"x\")\n",
    "plt.ylabel(\"y\")\n",
    "plt.title(\"Data\")\n",
    "plt.grid()\n",
    "plt.legend()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# 3D (endast demo)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from mpl_toolkits.mplot3d import Axes3D\n",
    "from sklearn.linear_model import LinearRegression\n",
    "from sklearn.decomposition import PCA\n",
    "\n",
    "# Antal punkter\n",
    "NN = 100\n",
    "\n",
    "# Skapa data längs en linje i 3D med brus\n",
    "x = np.linspace(0, 10, NN)\n",
    "z = np.linspace(0, 5, NN) + 2 * (-0.5 + np.random.rand(NN))\n",
    "y = 2 * x + 1.5 * z + 3 * (-0.5 + np.random.rand(NN))\n",
    "\n",
    "# Regressionsmodell (plan)\n",
    "X = np.column_stack((x, z))\n",
    "model = LinearRegression()\n",
    "model.fit(X, y)\n",
    "\n",
    "# Skapa ett rutnät för regressionsplanet\n",
    "x_grid, z_grid = np.meshgrid(np.linspace(x.min(), x.max(), 20),\n",
    "                             np.linspace(z.min(), z.max(), 20))\n",
    "X_grid = np.column_stack((x_grid.ravel(), z_grid.ravel()))\n",
    "y_grid = model.predict(X_grid).reshape(x_grid.shape)\n",
    "\n",
    "# PCA för att hitta bästa linjen genom punkterna\n",
    "points = np.column_stack((x, z, y))\n",
    "pca = PCA(n_components=1)\n",
    "pca.fit(points)\n",
    "direction = pca.components_[0]       # Linjens riktning, underscore indikerar att det är ett *beräknat* värde\n",
    "center = points.mean(axis=0)         # Linjens mittpunkt\n",
    "t = np.linspace(-10, 10, 100)        # Parametrisering längs linjen\n",
    "line_points = center + np.outer(t, direction)\n",
    "\n",
    "# Plotting\n",
    "fig = plt.figure(figsize=(10, 8))\n",
    "ax = fig.add_subplot(111, projection='3d')\n",
    "\n",
    "# Punkterna\n",
    "ax.scatter(x, z, y, color='blue', alpha=0.6, label='Data')\n",
    "\n",
    "# Regressionsplanet\n",
    "# ax.plot_surface(x_grid, z_grid, y_grid, alpha=0.4, color='orange')\n",
    "\n",
    "# Regressionslinjen (PCA)\n",
    "ax.plot(line_points[:, 0], line_points[:, 1], line_points[:, 2],\n",
    "        color='red', linewidth=2.5, label='3D-regressionslinje')\n",
    "\n",
    "# Axlar och etiketter\n",
    "ax.set_xlabel('x')\n",
    "ax.set_ylabel('z')\n",
    "ax.set_zlabel('y')\n",
    "plt.title('3D-regression')\n",
    "ax.legend()\n",
    "plt.tight_layout()\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Andra ordningens regression"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "np.random.seed(42)  # Samma slumpmässiga talföljd varje gång för jämförbarhet\n",
    "\n",
    "NN = 20\n",
    "X = np.linspace(-5, 5, NN, endpoint=True)\n",
    "y = np.array([X[ii]**2 + 3 * (-.5 + np.random.rand()) for ii in range(NN)])\n",
    "\n",
    "# Anpassa andragradspolynom\n",
    "coeffs = np.polyfit(X, y, 2)\n",
    "\n",
    "print(f'Uppskattat polynom: A * x^2 + B * x + C, where A = {coeffs[0]}, B = {coeffs[1]}, and C = {coeffs[2]}')\n",
    "# Prediktion\n",
    "prediction = coeffs[0] * 6**2 + coeffs[1] * 6 + coeffs[2]\n",
    "print(f\"Prediktion för X=6: {prediction}\")\n",
    "\n",
    "# Plotta datapunkterna\n",
    "plt.scatter(X, y, color='blue', label='Data')\n",
    "\n",
    "# Plotta regressionskurvan\n",
    "X_line = np.linspace(min(X), max(X), 100)\n",
    "y_line = coeffs[0] * X_line**2 + coeffs[1] * X_line + coeffs[2]\n",
    "plt.plot(X_line, y_line, color='red', label='Regressionskurva')\n",
    "\n",
    "plt.xlabel(\"X\")\n",
    "plt.ylabel(\"y\")\n",
    "plt.legend()\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Över- och underanpassning (endast demo)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from scipy.interpolate import interp1d, UnivariateSpline\n",
    "from numpy.polynomial.polynomial import Polynomial\n",
    "\n",
    "np.random.seed(42)\n",
    "\n",
    "X = np.linspace(-5, 5, 20)\n",
    "y = X**2 + np.random.normal(scale=4, size=X.shape)\n",
    "\n",
    "# s = \"mjukhetsparameter\", ju större desto mjukare\n",
    "spline_soft_1 = UnivariateSpline(X, y, s=0)\n",
    "spline_soft_2 = UnivariateSpline(X, y, s=500)\n",
    "\n",
    "coeffs_linear = np.polyfit(X, y, 1)\n",
    "poly_linear = np.poly1d(coeffs_linear)\n",
    "\n",
    "X_dense = np.linspace(X.min(), X.max(), 500)\n",
    "plt.figure(figsize=(10, 6))\n",
    "plt.scatter(X, y, label='Data', color='black')\n",
    "\n",
    "plt.plot(X_dense, spline_soft_1(X_dense), label='Hård spline (\"överanpassning\")', color='red')\n",
    "plt.plot(X_dense, spline_soft_2(X_dense), label='Mjuk spline (\"lagom\")', color='green')\n",
    "plt.plot(X_dense, poly_linear(X_dense), label='Linjär regr. (\"underanpassning\")', color='blue')\n",
    "\n",
    "plt.title(\"Demonstration av överanpassning och underanpassning\")\n",
    "plt.xlabel(\"X\")\n",
    "plt.ylabel(\"y\")\n",
    "plt.legend()\n",
    "plt.grid(True)\n",
    "plt.tight_layout()\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Osedd data"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from scipy.interpolate import UnivariateSpline\n",
    "\n",
    "np.random.seed(42) # Testa annan seed (0 vs 42)\n",
    "\n",
    "# Skapa brusig kvadratisk data\n",
    "X = np.linspace(-5, 5, 20)\n",
    "y = X**2 + np.random.normal(scale=4, size=X.shape)\n",
    "\n",
    "# Anpassade modeller\n",
    "spline_hard = UnivariateSpline(X, y, s=0)     # Överanpassning\n",
    "spline_soft = UnivariateSpline(X, y, s=500)    # Lagom\n",
    "coeffs_linear = np.polyfit(X, y, 1)           # Underanpassning\n",
    "poly_linear = np.poly1d(coeffs_linear)\n",
    "\n",
    "# Täta X för både träningsintervall och extrapoleringsintervall\n",
    "X_dense_train = np.linspace(X.min(), X.max(), 500)\n",
    "X_dense_extra = np.linspace(5, 8, 20)  # Extrapoleringsintervall\n",
    "\n",
    "# Plotta\n",
    "plt.figure(figsize=(10, 6))\n",
    "plt.scatter(X, y, label='Träningsdata', color='black')\n",
    "\n",
    "# Träningsintervall\n",
    "plt.plot(X_dense_train, spline_hard(X_dense_train), label='Överanpassning (s=0)', color='red')\n",
    "plt.plot(X_dense_train, spline_soft(X_dense_train), label='Lagom', color='green')\n",
    "#plt.plot(X_dense_train, poly_linear(X_dense_train), label='Underanpassning (linjär)', color='blue')\n",
    "\n",
    "# Extrapolering\n",
    "plt.plot(X_dense_extra, spline_hard(X_dense_extra), color='red', linestyle='dashed')\n",
    "plt.plot(X_dense_extra, spline_soft(X_dense_extra), color='green', linestyle='dashed')\n",
    "#plt.plot(X_dense_extra, poly_linear(X_dense_extra), color='blue', linestyle='dashed')\n",
    "\n",
    "plt.axvline(5, color='gray', linestyle=':', label='Gräns för träningsdata')\n",
    "plt.title(\"Kurvanpassning och generalisering utanför träningsintervall\")\n",
    "plt.xlabel(\"X\")\n",
    "plt.ylabel(\"y\")\n",
    "plt.ylim([-10,150])\n",
    "plt.legend()\n",
    "plt.grid(True)\n",
    "plt.tight_layout()\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Beslutsträd (demo med lite trix)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import networkx as nx   # lib för att rita själva trädet\n",
    "\n",
    "class DecisionTree:\n",
    "    def __init__(self, depth=3):\n",
    "        self.depth = depth\n",
    "        self.split_value = None\n",
    "        self.left = None\n",
    "        self.right = None\n",
    "        self.prediction = None\n",
    "\n",
    "    def fit(self, X, y, depth=0):\n",
    "        if depth == self.depth or len(X) < 2:\n",
    "            self.prediction = np.mean(y)\n",
    "            return\n",
    "\n",
    "        self.split_value = np.median(X)\n",
    "        left_mask = X < self.split_value\n",
    "        right_mask = X >= self.split_value\n",
    "\n",
    "        self.left = DecisionTree(self.depth)\n",
    "        self.right = DecisionTree(self.depth)\n",
    "\n",
    "        self.left.fit(X[left_mask], y[left_mask], depth + 1)\n",
    "        self.right.fit(X[right_mask], y[right_mask], depth + 1)\n",
    "\n",
    "    def predict(self, X):\n",
    "        if self.prediction is not None:\n",
    "            return np.full_like(X, self.prediction)\n",
    "\n",
    "        mask = X < self.split_value\n",
    "        y_pred = np.zeros_like(X)\n",
    "        y_pred[mask] = self.left.predict(X[mask])\n",
    "        y_pred[~mask] = self.right.predict(X[~mask])    # tilde = komplement\n",
    "        return y_pred\n",
    "\n",
    "def visualize_tree(tree, ax):\n",
    "    # Ritar upp beslutsträdet på en specifik axel (subplot).\n",
    "    G = nx.DiGraph()\n",
    "    pos = {}\n",
    "    labels = {}\n",
    "\n",
    "    def add_nodes_edges(node, depth=0, pos_x=0, pos_y=0, parent=None):\n",
    "        # Rekursiv funktion för att lägga till noder i grafen\n",
    "        if node is None:\n",
    "            return\n",
    "\n",
    "        node_id = len(G)\n",
    "        pos[node_id] = (pos_x, -pos_y)\n",
    "        \n",
    "        if node.prediction is not None:\n",
    "            labels[node_id] = f\"{node.prediction:.1f}\"\n",
    "        else:\n",
    "            labels[node_id] = f\"x < {node.split_value:.1f}\"\n",
    "        \n",
    "        G.add_node(node_id)\n",
    "\n",
    "        if parent is not None:\n",
    "            G.add_edge(parent, node_id)\n",
    "\n",
    "        # Rekursivt gå ner i trädet\n",
    "        add_nodes_edges(node.left, depth+1, pos_x - 1/(depth+2), pos_y + 1, node_id)\n",
    "        add_nodes_edges(node.right, depth+1, pos_x + 1/(depth+2), pos_y + 1, node_id)\n",
    "\n",
    "    add_nodes_edges(tree)\n",
    "\n",
    "    # Rita trädet på axeln (subplot)\n",
    "    nx.draw(G, pos, with_labels=True, labels=labels, node_size=1000, node_color=\"lightblue\", edge_color=\"gray\", font_size=10, font_weight=\"bold\", ax=ax)\n",
    "    ax.set_title(\"Beslutsträd\")\n",
    "\n",
    "# Träna beslutsträdet och visualisera det\n",
    "np.random.seed(42)\n",
    "NN = 100\n",
    "X = np.linspace(-10, 10, NN)\n",
    "y = 2 * X**2 + np.random.randn(NN) * 10\n",
    "\n",
    "tree = DecisionTree(depth=3)\n",
    "tree.fit(X, y)\n",
    "y_pred_tree = tree.predict(X)\n",
    "plt.scatter(X, y, alpha=0.5, label=\"Data\")\n",
    "#plt.plot(X, y_pred_tree, color=\"green\", label=\"Beslutsträd\")\n",
    "plt.legend()\n",
    "plt.title(\"Data\")\n",
    "plt.show()\n",
    "\n",
    "# Skapa subplots\n",
    "fig, axes = plt.subplots(1, 2, figsize=(15, 7), constrained_layout=True)\n",
    "\n",
    "# Första plotten: Data och beslutsträdets förutsägelser\n",
    "axes[0].scatter(X, y, alpha=0.5, label=\"Data\")\n",
    "axes[0].plot(X, y_pred_tree, color=\"green\", label=\"Beslutsträd\")\n",
    "axes[0].legend()\n",
    "axes[0].set_title(\"Data och Beslutsträd\")\n",
    "\n",
    "# Andra plotten: Visualisering av beslutsträdet\n",
    "visualize_tree(tree, axes[1])\n",
    "\n",
    "# Visa figuren\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Liknande förfarande, men med specialiserat bibliotek"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "from sklearn.tree import DecisionTreeRegressor\n",
    "\n",
    "np.random.seed(0)\n",
    "X = np.sort(np.random.rand(100, 1) * 10, axis=0)\n",
    "y = np.sin(X).ravel() + np.random.randn(100) * 0.1\n",
    "\n",
    "# Träna beslutsträd med sklearn.tree\n",
    "tree = DecisionTreeRegressor(max_depth=3)\n",
    "tree.fit(X, y)\n",
    "\n",
    "X_test = np.linspace(0, 10, 500).reshape(-1, 1)\n",
    "y_pred = tree.predict(X_test)\n",
    "\n",
    "plt.scatter(X, y, s=20, label=\"Träningsdata\")\n",
    "plt.plot(X_test, y_pred, color=\"r\", linewidth=2, label=\"Trädets prediktion\")\n",
    "plt.title(\"Kurvanpassning med beslutsträd\")\n",
    "plt.legend()\n",
    "plt.show()\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Många beslutsträd: Random Forest (endast demo)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "import networkx as nx\n",
    "from sklearn.utils import resample\n",
    "\n",
    "class DecisionTree:\n",
    "    def __init__(self, depth=3):\n",
    "        self.depth = depth\n",
    "        self.split_value = None\n",
    "        self.left = None\n",
    "        self.right = None\n",
    "        self.prediction = None\n",
    "\n",
    "    def fit(self, X, y, depth=0):\n",
    "        if depth == self.depth or len(X) < 2:\n",
    "            self.prediction = np.mean(y)\n",
    "            return\n",
    "\n",
    "        self.split_value = np.median(X)\n",
    "        left_mask = X < self.split_value\n",
    "        right_mask = X >= self.split_value\n",
    "\n",
    "        self.left = DecisionTree(self.depth)\n",
    "        self.right = DecisionTree(self.depth)\n",
    "\n",
    "        self.left.fit(X[left_mask], y[left_mask], depth + 1)\n",
    "        self.right.fit(X[right_mask], y[right_mask], depth + 1)\n",
    "\n",
    "    def predict(self, X):\n",
    "        if self.prediction is not None:\n",
    "            return np.full_like(X, self.prediction)\n",
    "\n",
    "        mask = X < self.split_value\n",
    "        y_pred = np.zeros_like(X)\n",
    "        y_pred[mask] = self.left.predict(X[mask])\n",
    "        y_pred[~mask] = self.right.predict(X[~mask])\n",
    "        return y_pred\n",
    "\n",
    "class RandomForest:\n",
    "    def __init__(self, n_trees=10, depth=3):\n",
    "        self.n_trees = n_trees\n",
    "        self.depth = depth\n",
    "        self.trees = []\n",
    "\n",
    "    def fit(self, X, y):\n",
    "        for _ in range(self.n_trees):\n",
    "            X_sample, y_sample = resample(X, y, replace=True)  # Bootstrap-sampling\n",
    "            tree = DecisionTree(self.depth)\n",
    "            tree.fit(X_sample, y_sample)\n",
    "            self.trees.append(tree)\n",
    "\n",
    "    def predict(self, X):\n",
    "        predictions = np.array([tree.predict(X) for tree in self.trees])\n",
    "        return np.mean(predictions, axis=0)  # Medelvärde av alla trädens prediktioner\n",
    "\n",
    "# Generera data\n",
    "np.random.seed(42)\n",
    "NN = 100\n",
    "X = np.linspace(-10, 10, NN)\n",
    "y = 2 * X**2 + np.random.randn(NN) * 10\n",
    "\n",
    "# Träna Random Forest\n",
    "rf = RandomForest(n_trees=10, depth=3)\n",
    "rf.fit(X, y)\n",
    "y_pred_rf = rf.predict(X)\n",
    "\n",
    "# Visualisera resultaten\n",
    "plt.scatter(X, y, alpha=0.5, label=\"Data\")\n",
    "plt.plot(X, y_pred_rf, color=\"red\", label=\"Random Forest\")\n",
    "plt.legend()\n",
    "plt.title(\"Random Forest Regression\")\n",
    "plt.show()\n",
    "\n",
    "# Skapa subplots\n",
    "fig, axes = plt.subplots(1, 3, figsize=(15, 5), constrained_layout=True)\n",
    "\n",
    "# Definiera en funktion för att träna och plotta RandomForest\n",
    "def plot_rf(axes, idx, n_trees, X, y):\n",
    "    rf = RandomForest(n_trees=n_trees, depth=3)\n",
    "    rf.fit(X, y)\n",
    "    y_pred_rf = rf.predict(X)\n",
    "    \n",
    "    axes[idx].scatter(X, y, alpha=0.5, label=\"Data\")\n",
    "    axes[idx].plot(X, y_pred_rf, color=\"red\", linewidth=3, label=f\"RF ({n_trees} träd)\")\n",
    "    axes[idx].legend()\n",
    "    axes[idx].set_title(f\"Random Forest med {n_trees} träd\")\n",
    "\n",
    "# Plot för 2 träd\n",
    "plot_rf(axes, 0, 2, X, y)\n",
    "\n",
    "# Plot för 10 träd\n",
    "plot_rf(axes, 1, 10, X, y)\n",
    "\n",
    "# Plot för 50 träd\n",
    "plot_rf(axes, 2, 50, X, y)\n",
    "\n",
    "# Visa figuren\n",
    "plt.show()"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Artificiellt neuralt nätverk (med NumPy enbart!)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import time \n",
    "\n",
    "t_start = time.time()\n",
    "\n",
    "# Sigmoid-funktionen och dess derivata\n",
    "def sigmoid(x):\n",
    "    return 1 / (1 + np.exp(-x))\n",
    "\n",
    "def sigmoid_derivative(x):\n",
    "    # Observera: derivatan kan beräknas med själva sigmoid-funktionen\n",
    "    return sigmoid(x) * (1 - sigmoid(x))\n",
    "\n",
    "# XOR-dataset: indata och önskade utdata\n",
    "X = np.array([[0, 0],\n",
    "              [0, 1],\n",
    "              [1, 0],\n",
    "              [1, 1]])\n",
    "\n",
    "y = np.array([[0],\n",
    "              [1],\n",
    "              [1],\n",
    "              [0]])\n",
    "\n",
    "np.random.seed(42)\n",
    "\n",
    "# Modell-parametrar\n",
    "input_dim = 2       # Två ingångar\n",
    "hidden_dim = 2      # Två noder i dolt lager (du kan experimentera med fler)\n",
    "output_dim = 1      # En utgång\n",
    "learning_rate = 0.1 # Hur snabbt modellen ska lära sig\n",
    "epochs = 10000      # Hur många tränings-loopar som ska köras\n",
    "\n",
    "# Slumpmässig viktinitialisering\n",
    "W1 = np.random.randn(input_dim, hidden_dim)\n",
    "b1 = np.random.randn(1, hidden_dim)\n",
    "W2 = np.random.randn(hidden_dim, output_dim)\n",
    "b2 = np.random.randn(1, output_dim)\n",
    "\n",
    "t_trainingloop = time.time()\n",
    "\n",
    "# Träningsloop\n",
    "print('Träningsloop startar')\n",
    "for epoch in range(epochs):\n",
    "    # Framåtpropagation\n",
    "    z1 = np.dot(X, W1) + b1      # Inmatningen multipliceras med vikterna till det dolda lagret.\n",
    "    a1 = sigmoid(z1)             # Sigmoid aktiverar de dolda neuronerna.\n",
    "    z2 = np.dot(a1, W2) + b2     # De dolda aktiveringarna skickas till utgången.\n",
    "    a2 = sigmoid(z2)             # Detta är nätverkets gissning.\n",
    "    \n",
    "    # Beräkna fel och medelkvadratfel (mean square error: MSE)\n",
    "    error = y - a2\n",
    "    loss = np.mean(error**2)\n",
    "    \n",
    "    # Bakåtpropagering (backpropagation) - beräkna gradienter\n",
    "    d_a2 = error * sigmoid_derivative(z2)  # Feljusterad output-gradient\n",
    "    d_W2 = np.dot(a1.T, d_a2)\n",
    "    d_b2 = np.sum(d_a2, axis=0, keepdims=True)\n",
    "    d_a1 = np.dot(d_a2, W2.T) * sigmoid_derivative(z1)\n",
    "    d_W1 = np.dot(X.T, d_a1)\n",
    "    d_b1 = np.sum(d_a1, axis=0, keepdims=True)\n",
    "    \n",
    "    # Uppdatera vikterna med gradientnedstigning\n",
    "    W2 += learning_rate * d_W2\n",
    "    b2 += learning_rate * d_b2\n",
    "    W1 += learning_rate * d_W1\n",
    "    b1 += learning_rate * d_b1\n",
    "    \n",
    "    # Skriv ut förloppet var 1000:e iteration\n",
    "    if (epoch + 1) % int(epochs/10) == 0:\n",
    "        print(f\"Epoch {epoch+1}, Loss: {loss:.4f}\")\n",
    "\n",
    "print('Träning slutförd')\n",
    "print(f'Tid, träning: {round(time.time() - t_trainingloop,3)} s')\n",
    "# Utvärdering\n",
    "print(\"\\nTränade nätverkets output:\")\n",
    "print(a2)\n",
    "print(\"\\nFörväntaf output:\")\n",
    "print(y)\n",
    "print(f'Tid, skript: {round(time.time() - t_start,3)} s')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "# Från fråga på föreläsningen: Testa ovan tränad modell på ny data:\n",
    "# (Verkar som förväntat bli nonsens-utmatningar)\n",
    "\n",
    "new_pair = [1, 2]\n",
    "new_input = np.array([new_pair])\n",
    "\n",
    "# Framåtpassering med de tränade vikterna och biaser\n",
    "z1_new = np.dot(new_input, W1) + b1\n",
    "a1_new = sigmoid(z1_new)\n",
    "z2_new = np.dot(a1_new, W2) + b2\n",
    "a2_new = sigmoid(z2_new)\n",
    "\n",
    "print(f\"Modellens output för {new_pair}:\", a2_new)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Linjär regression med ett neuralt nätverk (endast demo, dock intressant)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import numpy as np\n",
    "import matplotlib.pyplot as plt\n",
    "\n",
    "np.random.seed(42)\n",
    "\n",
    "# Skapa datasetet\n",
    "NN = 10\n",
    "learning_rate = 0.01\n",
    "epochs = 10000\n",
    "\n",
    "x = np.linspace(0, 10, NN, endpoint=True).reshape(-1, 1)\n",
    "y = np.array([x[ii] + .3 * (-0.5 + np.random.rand()) for ii in range(NN)]).reshape(-1, 1)\n",
    "\n",
    "# Sigmoid-funktionen igen och dess derivata (för det dolda lagret)\n",
    "def sigmoid(z):\n",
    "    return 1 / (1 + np.exp(-z))\n",
    "\n",
    "def sigmoid_derivative(z):\n",
    "    s = sigmoid(z)\n",
    "    return s * (1 - s)\n",
    "\n",
    "# Prediktionsfunktion - tar in nya data och de tränade parametrarna\n",
    "def predict(new_x, W1, b1, W2, b2):\n",
    "    z1 = np.dot(new_x, W1) + b1\n",
    "    a1 = sigmoid(z1)\n",
    "    z2 = np.dot(a1, W2) + b2\n",
    "    return z2  # Linjär utgång\n",
    "\n",
    "# Initiera parametrar\n",
    "input_dim = 1\n",
    "hidden_dim = 2\n",
    "output_dim = 1\n",
    "\n",
    "W1 = np.random.randn(input_dim, hidden_dim)\n",
    "b1 = np.random.randn(1, hidden_dim)\n",
    "W2 = np.random.randn(hidden_dim, output_dim)\n",
    "b2 = np.random.randn(1, output_dim)\n",
    "\n",
    "# Träningsloop\n",
    "for epoch in range(epochs):\n",
    "    # Framåtpropagation\n",
    "    z1 = np.dot(x, W1) + b1\n",
    "    a1 = sigmoid(z1)\n",
    "    z2 = np.dot(a1, W2) + b2\n",
    "    output = z2  # Linjär utgång\n",
    "    \n",
    "    # Beräkna medelkvadratfelet (MSE)\n",
    "    loss = np.mean((output - y) ** 2)\n",
    "    \n",
    "    # Bakåtpropagering\n",
    "    d_output = 2 * (output - y) / NN\n",
    "    dW2 = np.dot(a1.T, d_output)\n",
    "    db2 = np.sum(d_output, axis=0, keepdims=True)\n",
    "    \n",
    "    d_a1 = np.dot(d_output, W2.T)\n",
    "    d_z1 = d_a1 * sigmoid_derivative(z1)\n",
    "    dW1 = np.dot(x.T, d_z1)\n",
    "    db1 = np.sum(d_z1, axis=0, keepdims=True)\n",
    "    \n",
    "    # Uppdatera parametrarna\n",
    "    W2 -= learning_rate * dW2\n",
    "    b2 -= learning_rate * db2\n",
    "    W1 -= learning_rate * dW1\n",
    "    b1 -= learning_rate * db1\n",
    "    \n",
    "    if (epoch + 1) % 1000 == 0:\n",
    "        print(f\"Epoch {epoch+1}: Loss = {loss:.4f}\")\n",
    "\n",
    "# Visa nätverkets prediktioner jämfört med de faktiska värdena\n",
    "\"\"\"\n",
    "print(\"Prediktioner:\")\n",
    "print(output)\n",
    "print(\"Faktiska värden:\")\n",
    "print(y)\n",
    "\"\"\"\n",
    "\n",
    "# Plotting: Faktiska värden (röda punkter) och predikterade värden (blå linje)\n",
    "plt.figure(figsize=(8, 6))\n",
    "plt.plot(x, y, 'ro', label='Faktiska värden')\n",
    "plt.plot(x, output, 'bo-', label='Predikterade värden')\n",
    "plt.xlabel('x')\n",
    "plt.ylabel('y')\n",
    "plt.title('Faktiska vs Predikterade värden')\n",
    "plt.legend()\n",
    "plt.show()\n",
    "\n",
    "# Använd den tränade modellen för att göra en prediktion på ett nytt värde, ex. x=3.5\n",
    "new_x = np.array([[3.5]])\n",
    "prediction = predict(new_x, W1, b1, W2, b2)\n",
    "print(f\"\\nFör x = 3.5 är modellens prediktion: {prediction[0][0]:.4f}\")\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "x_extra      = np.linspace(10, 15, 50).reshape(-1, 1)\n",
    "y_extra_pred = predict(x_extra, W1, b1, W2, b2)\n",
    "\n",
    "x_all         = np.vstack([x, x_extra])\n",
    "y_all         = np.vstack([output, y_extra_pred])\n",
    "order         = np.argsort(x_all[:, 0])\n",
    "x_all_sorted  = x_all[order]\n",
    "y_all_sorted  = y_all[order]\n",
    "\n",
    "plt.figure(figsize=(8, 6))\n",
    "plt.plot(x, y,              'ro',  label='Träningsdata (faktiska)')\n",
    "plt.plot(x, output,   'bo-', label='Prediktion på träning')\n",
    "plt.plot(x_all_sorted,\n",
    "         y_all_sorted,      'g--', label='Extrapolering 10–15')\n",
    "plt.xlabel('x')\n",
    "plt.ylabel('y')\n",
    "plt.title('Modellens prediktion: träning och extrapolering')\n",
    "plt.legend()\n",
    "plt.show()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python (venv)",
   "language": "python",
   "name": "venv"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.11"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
