diff --git a/docs/image/bootstrap_hist.png b/docs/image/bootstrap_hist.png
index 9212688..c81c961 100644
Binary files a/docs/image/bootstrap_hist.png and b/docs/image/bootstrap_hist.png differ
diff --git a/docs/image/bootstrap_hists.png b/docs/image/bootstrap_hists.png
new file mode 100644
index 0000000..a158bd2
Binary files /dev/null and b/docs/image/bootstrap_hists.png differ
diff --git a/docs/tutorial/bootstrap.rst b/docs/tutorial/bootstrap.rst
index 741cccf..cf8c3a4 100644
--- a/docs/tutorial/bootstrap.rst
+++ b/docs/tutorial/bootstrap.rst
@@ -15,6 +15,9 @@ In this example, we need to import ``numpy``, ``pandas``, and ``graphviz`` in ad
import lingam
from lingam.utils import print_causal_directions, print_dagc, make_dot
+ import warnings
+ warnings.filterwarnings("ignore")
+
print([np.__version__, pd.__version__, graphviz.__version__, lingam.__version__])
np.set_printoptions(precision=3, suppress=True)
@@ -23,8 +26,8 @@ In this example, we need to import ``numpy``, ``pandas``, and ``graphviz`` in ad
.. parsed-literal::
- ['1.24.4', '2.0.3', '0.20.1', '1.8.3']
-
+ ['1.26.4', '2.3.3', '0.21', '1.12.1']
+
Test data
---------
@@ -33,12 +36,13 @@ We create test data consisting of 6 variables.
.. code-block:: python
- x3 = np.random.uniform(size=1000)
- x0 = 3.0*x3 + np.random.uniform(size=1000)
- x2 = 6.0*x3 + np.random.uniform(size=1000)
- x1 = 3.0*x0 + 2.0*x2 + np.random.uniform(size=1000)
- x5 = 4.0*x0 + np.random.uniform(size=1000)
- x4 = 8.0*x0 - 1.0*x2 + np.random.uniform(size=1000)
+ _size = 100
+ x3 = np.random.uniform(size=_size)
+ x0 = 3.0*x3 + np.random.uniform(size=_size)
+ x2 = 6.0*x3 + np.random.uniform(size=_size)
+ x1 = 3.0*x0 + 2.0*x2 + np.random.uniform(size=_size)
+ x5 = 4.0*x0 + np.random.uniform(size=_size)
+ x4 = 8.0*x0 - 1.0*x2 + np.random.uniform(size=_size)
X = pd.DataFrame(np.array([x0, x1, x2, x3, x4, x5]).T ,columns=['x0', 'x1', 'x2', 'x3', 'x4', 'x5'])
X.head()
@@ -99,48 +103,48 @@ We create test data consisting of 6 variables.
| 0 |
- 2.239321 |
- 15.340724 |
- 4.104399 |
+ 2.324257 |
+ 15.088680 |
+ 3.604677 |
0.548814 |
- 14.176947 |
- 9.249925 |
+ 15.299760 |
+ 9.698288 |
| 1 |
- 2.155632 |
- 16.630954 |
- 4.767220 |
+ 2.415576 |
+ 17.995735 |
+ 4.987480 |
0.715189 |
- 12.775458 |
- 9.189045 |
+ 14.710164 |
+ 10.591596 |
| 2 |
- 2.284116 |
- 15.910406 |
- 4.139736 |
+ 2.543484 |
+ 15.952262 |
+ 3.994332 |
0.602763 |
- 14.201794 |
- 9.273880 |
+ 16.878512 |
+ 10.273552 |
| 3 |
- 2.343420 |
- 14.921457 |
- 3.519820 |
+ 2.596838 |
+ 14.769421 |
+ 3.448903 |
0.544883 |
- 15.580067 |
- 9.723392 |
+ 18.076397 |
+ 11.332654 |
| 4 |
- 1.314940 |
- 11.055176 |
- 3.146972 |
+ 1.519718 |
+ 10.099609 |
+ 2.566608 |
0.423655 |
- 7.604743 |
- 5.312976 |
+ 9.924640 |
+ 6.948359 |
@@ -174,8 +178,10 @@ We call :func:`~lingam.DirectLiNGAM.bootstrap` method instead of :func:`~lingam.
.. code-block:: python
+ n_samples = 1000
+
model = lingam.DirectLiNGAM()
- result = model.bootstrap(X, n_sampling=100)
+ result = model.bootstrap(X, n_sampling=n_samples)
Causal Directions
-----------------
@@ -190,20 +196,20 @@ We can check the result by utility function.
.. code-block:: python
- print_causal_directions(cdc, 100)
+ print_causal_directions(cdc, n_samples)
.. parsed-literal::
- x5 <--- x0 (b>0) (100.0%)
- x1 <--- x0 (b>0) (100.0%)
- x1 <--- x2 (b>0) (100.0%)
- x4 <--- x2 (b<0) (100.0%)
- x0 <--- x3 (b>0) (98.0%)
- x4 <--- x0 (b>0) (98.0%)
- x2 <--- x3 (b>0) (96.0%)
- x3 <--- x2 (b>0) (4.0%)
-
+ x4 <--- x2 (b<0) (87.9%)
+ x4 <--- x0 (b>0) (86.6%)
+ x1 <--- x2 (b>0) (77.5%)
+ x1 <--- x0 (b>0) (77.3%)
+ x2 <--- x3 (b>0) (76.1%)
+ x5 <--- x0 (b>0) (75.4%)
+ x0 <--- x3 (b>0) (45.4%)
+ x0 <--- x5 (b>0) (24.6%)
+
Directed Acyclic Graphs
-----------------------
@@ -222,12 +228,12 @@ We can check the result by utility function.
.. code-block:: python
- print_dagc(dagc, 100)
+ print_dagc(dagc, n_samples)
.. parsed-literal::
- DAG[0]: 84.0%
+ DAG[0]: 17.0%
x0 <--- x3 (b>0)
x1 <--- x0 (b>0)
x1 <--- x2 (b>0)
@@ -235,7 +241,7 @@ We can check the result by utility function.
x4 <--- x0 (b>0)
x4 <--- x2 (b<0)
x5 <--- x0 (b>0)
- DAG[1]: 3.0%
+ DAG[1]: 4.2%
x0 <--- x3 (b>0)
x1 <--- x0 (b>0)
x1 <--- x2 (b>0)
@@ -243,16 +249,15 @@ We can check the result by utility function.
x4 <--- x0 (b>0)
x4 <--- x2 (b<0)
x5 <--- x0 (b>0)
- DAG[2]: 2.0%
- x0 <--- x3 (b>0)
+ DAG[2]: 3.9%
x1 <--- x0 (b>0)
x1 <--- x2 (b>0)
- x1 <--- x3 (b<0)
x2 <--- x3 (b>0)
+ x3 <--- x0 (b>0)
x4 <--- x0 (b>0)
x4 <--- x2 (b<0)
x5 <--- x0 (b>0)
-
+
Probability
-----------
@@ -268,13 +273,13 @@ bootstrapping.
.. parsed-literal::
- [[0. 0. 0.03 0.98 0.02 0. ]
- [1. 0. 1. 0.02 0. 0.01]
- [0.01 0. 0. 0.96 0. 0.01]
- [0. 0. 0.04 0. 0. 0. ]
- [0.98 0.01 1. 0.02 0. 0.02]
- [1. 0. 0.02 0.02 0. 0. ]]
-
+ [[0. 0.178 0.163 0.482 0.134 0.246]
+ [0.773 0. 0.775 0.202 0.069 0.064]
+ [0.2 0.225 0. 0.761 0.093 0.032]
+ [0.183 0.166 0.19 0. 0.031 0.084]
+ [0.866 0.074 0.88 0.121 0. 0.043]
+ [0.754 0.059 0.065 0.095 0.062 0. ]]
+
Total Causal Effects
--------------------
@@ -351,150 +356,213 @@ below.
| 0 |
- x3 |
- x0 |
- 3.004106 |
- 1.00 |
+ x2 |
+ x4 |
+ -0.986006 |
+ 0.884 |
| 1 |
x0 |
- x1 |
- 2.963177 |
- 1.00 |
+ x4 |
+ 7.975821 |
+ 0.866 |
| 2 |
- x2 |
- x1 |
- 2.017539 |
- 1.00 |
+ x3 |
+ x4 |
+ 17.169757 |
+ 0.858 |
| 3 |
x3 |
x1 |
- 20.928254 |
- 1.00 |
+ 20.553538 |
+ 0.794 |
| 4 |
x0 |
- x5 |
- 3.997787 |
- 1.00 |
+ x1 |
+ 3.020369 |
+ 0.793 |
| 5 |
x3 |
- x4 |
- 18.077943 |
- 1.00 |
+ x2 |
+ 5.968590 |
+ 0.788 |
| 6 |
- x3 |
- x5 |
- 12.012988 |
- 1.00 |
+ x2 |
+ x1 |
+ 1.992771 |
+ 0.775 |
| 7 |
- x2 |
- x4 |
- -1.006362 |
- 1.00 |
+ x0 |
+ x5 |
+ 3.984278 |
+ 0.754 |
| 8 |
- x0 |
- x4 |
- 8.011818 |
- 0.98 |
+ x3 |
+ x5 |
+ 11.686617 |
+ 0.657 |
| 9 |
x3 |
- x2 |
- 5.964879 |
- 0.96 |
+ x0 |
+ 2.920996 |
+ 0.653 |
| 10 |
+ x0 |
x2 |
- x5 |
- 0.396327 |
- 0.09 |
+ 1.679845 |
+ 0.343 |
| 11 |
x2 |
- x0 |
- 0.487915 |
- 0.07 |
+ x5 |
+ 0.155444 |
+ 0.282 |
| 12 |
- x2 |
- x3 |
- 0.164565 |
- 0.04 |
+ x5 |
+ x4 |
+ 1.550997 |
+ 0.266 |
| 13 |
- x5 |
- x4 |
- 0.087437 |
- 0.03 |
+ x0 |
+ x3 |
+ 0.305366 |
+ 0.260 |
| 14 |
- x4 |
x5 |
- 0.496445 |
- 0.02 |
+ x1 |
+ 0.939446 |
+ 0.259 |
| 15 |
x5 |
- x1 |
- -0.064703 |
- 0.02 |
+ x0 |
+ 0.249365 |
+ 0.246 |
| 16 |
- x4 |
x1 |
- 0.367100 |
- 0.02 |
+ x4 |
+ 0.863039 |
+ 0.245 |
| 17 |
- x4 |
+ x2 |
x0 |
- 0.124114 |
- 0.02 |
+ 0.120842 |
+ 0.244 |
| 18 |
- x0 |
+ x1 |
x2 |
- 0.056261 |
- 0.01 |
+ 0.285349 |
+ 0.225 |
| 19 |
x1 |
- x4 |
- -0.097108 |
- 0.01 |
+ x5 |
+ 0.576121 |
+ 0.199 |
| 20 |
+ x1 |
+ x0 |
+ 0.144407 |
+ 0.197 |
+
+
+ | 21 |
x5 |
x2 |
- -0.111894 |
- 0.01 |
+ 0.451434 |
+ 0.196 |
+
+
+ | 22 |
+ x1 |
+ x3 |
+ 0.046961 |
+ 0.194 |
+
+
+ | 23 |
+ x2 |
+ x3 |
+ 0.133917 |
+ 0.191 |
+
+
+ | 24 |
+ x5 |
+ x3 |
+ 0.076654 |
+ 0.168 |
+
+
+ | 25 |
+ x4 |
+ x1 |
+ 0.362045 |
+ 0.144 |
+
+
+ | 26 |
+ x4 |
+ x5 |
+ 0.478376 |
+ 0.143 |
+
+
+ | 27 |
+ x4 |
+ x0 |
+ 0.123534 |
+ 0.134 |
+
+
+ | 28 |
+ x4 |
+ x2 |
+ -0.139721 |
+ 0.097 |
+
+
+ | 29 |
+ x4 |
+ x3 |
+ -0.006454 |
+ 0.043 |
@@ -565,36 +633,36 @@ We can easily perform sorting operations with pandas.DataFrame.
3 |
x3 |
x1 |
- 20.928254 |
- 1.00 |
+ 20.553538 |
+ 0.794 |
- | 5 |
+ 2 |
x3 |
x4 |
- 18.077943 |
- 1.00 |
+ 17.169757 |
+ 0.858 |
- | 6 |
+ 8 |
x3 |
x5 |
- 12.012988 |
- 1.00 |
+ 11.686617 |
+ 0.657 |
- | 8 |
+ 1 |
x0 |
x4 |
- 8.011818 |
- 0.98 |
+ 7.975821 |
+ 0.866 |
- | 9 |
+ 5 |
x3 |
x2 |
- 5.964879 |
- 0.96 |
+ 5.968590 |
+ 0.788 |
@@ -660,39 +728,39 @@ We can easily perform sorting operations with pandas.DataFrame.
- | 20 |
- x5 |
- x2 |
- -0.111894 |
- 0.01 |
+ 29 |
+ x4 |
+ x3 |
+ -0.006454 |
+ 0.043 |
- | 18 |
- x0 |
+ 28 |
+ x4 |
x2 |
- 0.056261 |
- 0.01 |
+ -0.139721 |
+ 0.097 |
- | 19 |
- x1 |
+ 27 |
x4 |
- -0.097108 |
- 0.01 |
+ x0 |
+ 0.123534 |
+ 0.134 |
- | 17 |
+ 26 |
x4 |
- x0 |
- 0.124114 |
- 0.02 |
+ x5 |
+ 0.478376 |
+ 0.143 |
- | 16 |
+ 25 |
x4 |
x1 |
- 0.367100 |
- 0.02 |
+ 0.362045 |
+ 0.144 |
@@ -761,39 +829,39 @@ following code extracts the causal direction towards x1.
- | 1 |
- x0 |
+ 3 |
+ x3 |
x1 |
- 2.963177 |
- 1.00 |
+ 20.553538 |
+ 0.794 |
- | 2 |
- x2 |
+ 4 |
+ x0 |
x1 |
- 2.017539 |
- 1.00 |
+ 3.020369 |
+ 0.793 |
- | 3 |
- x3 |
+ 6 |
+ x2 |
x1 |
- 20.928254 |
- 1.00 |
+ 1.992771 |
+ 0.775 |
- | 15 |
+ 14 |
x5 |
x1 |
- -0.064703 |
- 0.02 |
+ 0.939446 |
+ 0.259 |
- | 16 |
+ 25 |
x4 |
x1 |
- 0.367100 |
- 0.02 |
+ 0.362045 |
+ 0.144 |
@@ -818,6 +886,78 @@ values of the causal effect, as shown below.
.. image:: ../image/bootstrap_hist.png
+Furthermore, when we separate the bootstrap coefficient distributions
+into the three structural cases - X->Y, Y->X, and no directed edge between
+X and Y - the resulting histograms are shown below.
+
+.. code-block:: python
+
+ import matplotlib.ticker as ticker
+
+ from_index, to_index = 2, 4
+
+ te_xy = result.total_effects_[:, to_index, from_index]
+ te_yx = result.total_effects_[:, from_index, to_index]
+
+ both_zero_mask = (te_xy == 0.0) & (te_yx == 0.0)
+ te_zero = result.total_effects_[both_zero_mask, to_index, from_index]
+
+ te_xy = te_xy[te_xy != 0.0]
+ te_yx = te_yx[te_yx != 0.0]
+
+ bins_count = int(np.ceil(1 + np.log2(max(n_samples, 1))))
+
+ # calculate xmin, xmax
+ arr_list = [te_xy, te_yx, te_zero]
+ if any(a.size > 0 for a in arr_list):
+ vals = np.concatenate([a for a in arr_list if a.size > 0])
+ else:
+ vals = np.array([0.0])
+
+ xmin, xmax = np.min(vals), np.max(vals)
+ if xmin == xmax:
+ eps = 1e-9 if xmin == 0 else abs(xmin) * 1e-3
+ xmin, xmax = xmin - eps, xmax + eps
+
+ bin_edges = np.linspace(xmin, xmax, bins_count + 1)
+
+ # calculate ymax
+ counts_xy, _ = np.histogram(te_xy, bins=bin_edges) if te_xy.size > 0 else (np.zeros(bins_count, dtype=int), None)
+ counts_yx, _ = np.histogram(te_yx, bins=bin_edges) if te_yx.size > 0 else (np.zeros(bins_count, dtype=int), None)
+ counts_zz, _ = np.histogram(te_zero, bins=bin_edges) if te_zero.size > 0 else (np.zeros(bins_count, dtype=int), None)
+
+ ymax = int(max(counts_xy.max(initial=0), counts_yx.max(initial=0), counts_zz.max(initial=0)))
+ ymax = max(ymax, 1)
+ # If you want to set ymax to the number of bootstrap iterations, uncomment next line.
+ # ymax = n_samples
+
+ # display histograms
+ fig, axes = plt.subplots(1, 3, figsize=(15, 4), sharex=True, sharey=True)
+ labels = [f'x{i}' for i in range(X.shape[1])]
+
+ axes[0].hist(te_xy, bins=bin_edges)
+ axes[0].set_title(f"{labels[from_index]} -> {labels[to_index]}")
+ axes[0].yaxis.set_major_locator(ticker.MaxNLocator(integer=True))
+ axes[0].set_xlim(xmin, xmax)
+ axes[0].set_ylim(0, ymax)
+
+ axes[1].hist(te_yx, bins=bin_edges)
+ axes[1].set_title(f"{labels[to_index]} -> {labels[from_index]}")
+ axes[1].yaxis.set_major_locator(ticker.MaxNLocator(integer=True))
+ axes[1].set_xlim(xmin, xmax)
+ axes[1].set_ylim(0, ymax)
+
+ axes[2].hist(te_zero, bins=bin_edges)
+ axes[2].set_title("No directed edge between " + labels[from_index] + " and " + labels[to_index])
+ axes[2].yaxis.set_major_locator(ticker.MaxNLocator(integer=True))
+ axes[2].set_xlim(xmin, xmax)
+ axes[2].set_ylim(0, ymax)
+
+ plt.tight_layout()
+ plt.show()
+
+.. image:: ../image/bootstrap_hists.png
+
Bootstrap Probability of Path
-----------------------------
@@ -830,7 +970,7 @@ variable X0 to variable X1.
.. code-block:: python
from_index = 3 # index of x3
- to_index = 1 # index of x0
+ to_index = 1 # index of x1
pd.DataFrame(result.get_paths(from_index, to_index))
@@ -888,57 +1028,237 @@ variable X0 to variable X1.
| 0 |
- [3, 0, 1] |
- 8.893562 |
- 0.98 |
+ [3, 2, 1] |
+ 11.914854 |
+ 0.660 |
| 1 |
- [3, 2, 1] |
- 12.030408 |
- 0.96 |
+ [3, 0, 1] |
+ 8.756234 |
+ 0.443 |
| 2 |
- [3, 2, 0, 1] |
- 2.239175 |
- 0.03 |
+ [3, 1] |
+ 2.105700 |
+ 0.202 |
| 3 |
- [3, 1] |
- -0.639462 |
- 0.02 |
+ [3, 2, 0, 1] |
+ 1.635862 |
+ 0.094 |
| 4 |
- [3, 2, 4, 0, 1] |
- -3.194541 |
- 0.02 |
+ [3, 5, 0, 1] |
+ 8.670284 |
+ 0.060 |
| 5 |
[3, 4, 0, 1] |
- 9.820705 |
- 0.02 |
+ 6.979752 |
+ 0.054 |
| 6 |
- [3, 0, 2, 1] |
- 3.061033 |
- 0.01 |
+ [3, 2, 4, 1] |
+ -1.146483 |
+ 0.038 |
| 7 |
- [3, 0, 5, 1] |
- 1.176834 |
- 0.01 |
+ [3, 0, 4, 1] |
+ 4.459602 |
+ 0.028 |
| 8 |
+ [3, 0, 5, 1] |
+ 2.864025 |
+ 0.026 |
+
+
+ | 9 |
+ [3, 2, 4, 0, 1] |
+ -4.602396 |
+ 0.024 |
+
+
+ | 10 |
+ [3, 0, 2, 1] |
+ -1.512156 |
+ 0.022 |
+
+
+ | 11 |
+ [3, 4, 1] |
+ 4.954881 |
+ 0.019 |
+
+
+ | 12 |
+ [3, 2, 5, 0, 1] |
+ 0.374461 |
+ 0.009 |
+
+
+ | 13 |
+ [3, 2, 0, 5, 1] |
+ 0.583856 |
+ 0.008 |
+
+
+ | 14 |
+ [3, 5, 4, 0, 1] |
+ 6.941594 |
+ 0.007 |
+
+
+ | 15 |
+ [3, 4, 5, 0, 1] |
+ 2.145360 |
+ 0.007 |
+
+
+ | 16 |
+ [3, 4, 2, 1] |
+ -1.080988 |
+ 0.007 |
+
+
+ | 17 |
+ [3, 5, 1] |
+ 3.272935 |
+ 0.006 |
+
+
+ | 18 |
+ [3, 4, 0, 5, 1] |
+ 2.697207 |
+ 0.005 |
+
+
+ | 19 |
+ [3, 4, 2, 0, 1] |
+ -0.219167 |
+ 0.005 |
+
+
+ | 20 |
[3, 0, 5, 2, 1] |
- -2.719517 |
- 0.01 |
+ 5.181321 |
+ 0.004 |
+
+
+ | 21 |
+ [3, 5, 0, 4, 1] |
+ 5.442240 |
+ 0.004 |
+
+
+ | 22 |
+ [3, 5, 2, 1] |
+ 1.537410 |
+ 0.003 |
+
+
+ | 23 |
+ [3, 4, 5, 1] |
+ 4.166390 |
+ 0.003 |
+
+
+ | 24 |
+ [3, 0, 5, 4, 1] |
+ -0.522766 |
+ 0.003 |
+
+
+ | 25 |
+ [3, 2, 4, 5, 0, 1] |
+ -1.083415 |
+ 0.003 |
+
+
+ | 26 |
+ [3, 5, 4, 1] |
+ -7.351469 |
+ 0.002 |
+
+
+ | 27 |
+ [3, 2, 4, 5, 1] |
+ 0.203801 |
+ 0.002 |
+
+
+ | 28 |
+ [3, 2, 4, 0, 5, 1] |
+ -1.303056 |
+ 0.002 |
+
+
+ | 29 |
+ [3, 5, 2, 0, 1] |
+ -0.006054 |
+ 0.002 |
+
+
+ | 30 |
+ [3, 5, 4, 2, 1] |
+ -15.137090 |
+ 0.002 |
+
+
+ | 31 |
+ [3, 4, 0, 2, 1] |
+ -3.885974 |
+ 0.001 |
+
+
+ | 32 |
+ [3, 2, 5, 4, 1] |
+ -0.035426 |
+ 0.001 |
+
+
+ | 33 |
+ [3, 5, 0, 2, 1] |
+ 7.112032 |
+ 0.001 |
+
+
+ | 34 |
+ [3, 2, 0, 4, 1] |
+ -3.206907 |
+ 0.001 |
+
+
+ | 35 |
+ [3, 5, 2, 4, 0, 1] |
+ 0.351331 |
+ 0.001 |
+
+
+ | 36 |
+ [3, 0, 4, 5, 1] |
+ -0.695107 |
+ 0.001 |
+
+
+ | 37 |
+ [3, 5, 4, 0, 2, 1] |
+ 14.386599 |
+ 0.001 |
+
+
+ | 38 |
+ [3, 4, 2, 0, 5, 1] |
+ -0.072976 |
+ 0.001 |
diff --git a/docs/tutorial/bootstrap_with_imputation.rst b/docs/tutorial/bootstrap_with_imputation.rst
index 4df14f0..3a596fe 100644
--- a/docs/tutorial/bootstrap_with_imputation.rst
+++ b/docs/tutorial/bootstrap_with_imputation.rst
@@ -28,8 +28,8 @@ Import and settings
.. parsed-literal::
- ['1.24.4', '1.8.3']
-
+ ['1.26.4', '1.12.1']
+
Test data
---------
@@ -311,12 +311,17 @@ results with no missing data.
Median value of each element of the matrix
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Median values of each element of the adjacency matrix estimated on the
-data with missing values are as follows:
+The following matrix shows the median of each adjacency matrix element
+across all bootstrap replicates. For each bootstrap replicate, its
+representative adjacency matrix is obtained by taking the median across
+its multiple imputations.
.. code-block:: python
- np.median(adj_matrices_list, axis=(0, 1))
+ adj_median_per_bootstrap = np.median(adj_matrices_list, axis=1)
+ adj_median_over_bootstrap = np.median(adj_median_per_bootstrap, axis=0)
+
+ adj_median_over_bootstrap
@@ -327,8 +332,8 @@ data with missing values are as follows:
[ 0.555, 0. , 0.381, 0. , 0. , 0. ],
[ 0. , 0. , 0. , 0.918, 0. , 0. ],
[ 0. , 0. , 0. , 0. , 0. , 0. ],
- [ 1.025, 0. , -0.147, 0. , 0. , 0. ],
- [ 0.892, 0. , 0. , 0. , 0. , 0. ]])
+ [ 1.026, 0. , -0.148, 0. , 0. , 0. ],
+ [ 0.893, 0. , 0. , 0. , 0. , 0. ]])
diff --git a/examples/Bootstrap.ipynb b/examples/Bootstrap.ipynb
index 9b9223f..e5be0ab 100644
--- a/examples/Bootstrap.ipynb
+++ b/examples/Bootstrap.ipynb
@@ -29,7 +29,7 @@
"name": "stdout",
"output_type": "stream",
"text": [
- "['1.24.4', '2.0.3', '0.20.1', '1.8.3']\n"
+ "['1.26.4', '2.3.3', '0.21', '1.12.1']\n"
]
}
],
@@ -40,6 +40,9 @@
"import lingam\n",
"from lingam.utils import print_causal_directions, print_dagc, make_dot\n",
"\n",
+ "import warnings\n",
+ "warnings.filterwarnings(\"ignore\")\n",
+ "\n",
"print([np.__version__, pd.__version__, graphviz.__version__, lingam.__version__])\n",
"\n",
"np.set_printoptions(precision=3, suppress=True)\n",
@@ -96,60 +99,60 @@
" \n",
" \n",
" | 0 | \n",
- " 2.239321 | \n",
- " 15.340724 | \n",
- " 4.104399 | \n",
+ " 2.324257 | \n",
+ " 15.088680 | \n",
+ " 3.604677 | \n",
" 0.548814 | \n",
- " 14.176947 | \n",
- " 9.249925 | \n",
+ " 15.299760 | \n",
+ " 9.698288 | \n",
"
\n",
" \n",
" | 1 | \n",
- " 2.155632 | \n",
- " 16.630954 | \n",
- " 4.767220 | \n",
+ " 2.415576 | \n",
+ " 17.995735 | \n",
+ " 4.987480 | \n",
" 0.715189 | \n",
- " 12.775458 | \n",
- " 9.189045 | \n",
+ " 14.710164 | \n",
+ " 10.591596 | \n",
"
\n",
" \n",
" | 2 | \n",
- " 2.284116 | \n",
- " 15.910406 | \n",
- " 4.139736 | \n",
+ " 2.543484 | \n",
+ " 15.952262 | \n",
+ " 3.994332 | \n",
" 0.602763 | \n",
- " 14.201794 | \n",
- " 9.273880 | \n",
+ " 16.878512 | \n",
+ " 10.273552 | \n",
"
\n",
" \n",
" | 3 | \n",
- " 2.343420 | \n",
- " 14.921457 | \n",
- " 3.519820 | \n",
+ " 2.596838 | \n",
+ " 14.769421 | \n",
+ " 3.448903 | \n",
" 0.544883 | \n",
- " 15.580067 | \n",
- " 9.723392 | \n",
+ " 18.076397 | \n",
+ " 11.332654 | \n",
"
\n",
" \n",
" | 4 | \n",
- " 1.314940 | \n",
- " 11.055176 | \n",
- " 3.146972 | \n",
+ " 1.519718 | \n",
+ " 10.099609 | \n",
+ " 2.566608 | \n",
" 0.423655 | \n",
- " 7.604743 | \n",
- " 5.312976 | \n",
+ " 9.924640 | \n",
+ " 6.948359 | \n",
"
\n",
" \n",
"\n",
""
],
"text/plain": [
- " x0 x1 x2 x3 x4 x5\n",
- "0 2.239321 15.340724 4.104399 0.548814 14.176947 9.249925\n",
- "1 2.155632 16.630954 4.767220 0.715189 12.775458 9.189045\n",
- "2 2.284116 15.910406 4.139736 0.602763 14.201794 9.273880\n",
- "3 2.343420 14.921457 3.519820 0.544883 15.580067 9.723392\n",
- "4 1.314940 11.055176 3.146972 0.423655 7.604743 5.312976"
+ " x0 x1 x2 x3 x4 x5\n",
+ "0 2.324257 15.088680 3.604677 0.548814 15.299760 9.698288\n",
+ "1 2.415576 17.995735 4.987480 0.715189 14.710164 10.591596\n",
+ "2 2.543484 15.952262 3.994332 0.602763 16.878512 10.273552\n",
+ "3 2.596838 14.769421 3.448903 0.544883 18.076397 11.332654\n",
+ "4 1.519718 10.099609 2.566608 0.423655 9.924640 6.948359"
]
},
"execution_count": 2,
@@ -158,12 +161,13 @@
}
],
"source": [
- "x3 = np.random.uniform(size=1000)\n",
- "x0 = 3.0*x3 + np.random.uniform(size=1000)\n",
- "x2 = 6.0*x3 + np.random.uniform(size=1000)\n",
- "x1 = 3.0*x0 + 2.0*x2 + np.random.uniform(size=1000)\n",
- "x5 = 4.0*x0 + np.random.uniform(size=1000)\n",
- "x4 = 8.0*x0 - 1.0*x2 + np.random.uniform(size=1000)\n",
+ "_size = 100\n",
+ "x3 = np.random.uniform(size=_size)\n",
+ "x0 = 3.0*x3 + np.random.uniform(size=_size)\n",
+ "x2 = 6.0*x3 + np.random.uniform(size=_size)\n",
+ "x1 = 3.0*x0 + 2.0*x2 + np.random.uniform(size=_size)\n",
+ "x5 = 4.0*x0 + np.random.uniform(size=_size)\n",
+ "x4 = 8.0*x0 - 1.0*x2 + np.random.uniform(size=_size)\n",
"X = pd.DataFrame(np.array([x0, x1, x2, x3, x4, x5]).T ,columns=['x0', 'x1', 'x2', 'x3', 'x4', 'x5'])\n",
"X.head()"
]
@@ -184,104 +188,103 @@
"\n",
"\n",
- "\n",
- "\n",
- "