{"diffoscope-json-version": 1, "source1": "/srv/reproducible-results/rbuild-debian/r-b-build.StFljeAU/b1/pandas_1.1.5+dfsg-2_i386.changes", "source2": "/srv/reproducible-results/rbuild-debian/r-b-build.StFljeAU/b2/pandas_1.1.5+dfsg-2_i386.changes", "unified_diff": null, "details": [{"source1": "Files", "source2": "Files", "unified_diff": "@@ -1,5 +1,5 @@\n \n- 2191a62eb177a832c7f98ee2094d1ac0 8192660 doc optional python-pandas-doc_1.1.5+dfsg-2_all.deb\n- 80331794a9126cd4924000d3dcc2af47 22990752 debug optional python3-pandas-lib-dbgsym_1.1.5+dfsg-2_i386.deb\n- 9bbc1f434f4231977f650cb124f01c30 3182900 python optional python3-pandas-lib_1.1.5+dfsg-2_i386.deb\n+ 891f8b5e82742c2cc9f89efda6aa745b 8192404 doc optional python-pandas-doc_1.1.5+dfsg-2_all.deb\n+ b74982e04056970403964bc59c145e59 22988772 debug optional python3-pandas-lib-dbgsym_1.1.5+dfsg-2_i386.deb\n+ 39af52e30d8e988f675410f89b99c1d0 3183116 python optional python3-pandas-lib_1.1.5+dfsg-2_i386.deb\n e6541380ee2e4732c3cfb039a92ab3e3 2095828 python optional python3-pandas_1.1.5+dfsg-2_all.deb\n"}, {"source1": "python-pandas-doc_1.1.5+dfsg-2_all.deb", "source2": "python-pandas-doc_1.1.5+dfsg-2_all.deb", "unified_diff": null, "details": [{"source1": "file list", "source2": "file list", "unified_diff": "@@ -1,3 +1,3 @@\n -rw-r--r-- 0 0 0 4 2021-01-12 21:06:04.000000 debian-binary\n--rw-r--r-- 0 0 0 146852 2021-01-12 21:06:04.000000 control.tar.xz\n--rw-r--r-- 0 0 0 8045616 2021-01-12 21:06:04.000000 data.tar.xz\n+-rw-r--r-- 0 0 0 146856 2021-01-12 21:06:04.000000 control.tar.xz\n+-rw-r--r-- 0 0 0 8045356 2021-01-12 21:06:04.000000 data.tar.xz\n"}, {"source1": "control.tar.xz", "source2": "control.tar.xz", "unified_diff": null, "details": [{"source1": "control.tar", "source2": "control.tar", "unified_diff": null, "details": [{"source1": "./md5sums", "source2": "./md5sums", "unified_diff": null, "details": [{"source1": "./md5sums", "source2": "./md5sums", "comments": ["Files differ"], "unified_diff": null}]}]}]}, {"source1": "data.tar.xz", "source2": "data.tar.xz", "unified_diff": null, "details": [{"source1": "data.tar", "source2": "data.tar", "unified_diff": null, "details": [{"source1": "file list", "source2": "file list", "unified_diff": "@@ -6267,45 +6267,45 @@\n -rw-r--r-- 0 root (0) root (0) 20758 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/reference/resampling.html\n -rw-r--r-- 0 root (0) root (0) 141536 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/reference/series.html\n -rw-r--r-- 0 root (0) root (0) 19226 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/reference/style.html\n -rw-r--r-- 0 root (0) root (0) 26916 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/reference/window.html\n -rw-r--r-- 0 root (0) root (0) 244 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/release.html\n -rw-r--r-- 0 root (0) root (0) 269 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/reshaping.html\n -rw-r--r-- 0 root (0) root (0) 3935 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/search.html\n--rw-r--r-- 0 root (0) root (0) 1182373 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/searchindex.js\n+-rw-r--r-- 0 root (0) root (0) 1182349 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/searchindex.js\n -rw-r--r-- 0 root (0) root (0) 259 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/sparse.html\n -rw-r--r-- 0 root (0) root (0) 244 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/style.html\n -rw-r--r-- 0 root (0) root (0) 255 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/text.html\n -rw-r--r-- 0 root (0) root (0) 256 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/timedeltas.html\n -rw-r--r-- 0 root (0) root (0) 277 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/timeseries.html\n -rw-r--r-- 0 root (0) root (0) 272 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/tutorials.html\n drwxr-xr-x 0 root (0) root (0) 0 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/\n -rw-r--r-- 0 root (0) root (0) 126472 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/10min.html\n--rw-r--r-- 0 root (0) root (0) 220586 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/advanced.html\n+-rw-r--r-- 0 root (0) root (0) 220582 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/advanced.html\n -rw-r--r-- 0 root (0) root (0) 414882 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/basics.html\n -rw-r--r-- 0 root (0) root (0) 20303 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/boolean.html\n -rw-r--r-- 0 root (0) root (0) 197140 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/categorical.html\n -rw-r--r-- 0 root (0) root (0) 163238 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/computation.html\n -rw-r--r-- 0 root (0) root (0) 4519 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/cookbook.html\n -rw-r--r-- 0 root (0) root (0) 125192 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/dsintro.html\n--rw-r--r-- 0 root (0) root (0) 93135 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/enhancingperf.html\n+-rw-r--r-- 0 root (0) root (0) 93136 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/enhancingperf.html\n -rw-r--r-- 0 root (0) root (0) 50499 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/gotchas.html\n -rw-r--r-- 0 root (0) root (0) 237875 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/groupby.html\n -rw-r--r-- 0 root (0) root (0) 35919 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/index.html\n -rw-r--r-- 0 root (0) root (0) 296750 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/indexing.html\n -rw-r--r-- 0 root (0) root (0) 24994 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/integer_na.html\n--rw-r--r-- 0 root (0) root (0) 807677 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/io.html\n+-rw-r--r-- 0 root (0) root (0) 807681 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/io.html\n -rw-r--r-- 0 root (0) root (0) 196130 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/merging.html\n -rw-r--r-- 0 root (0) root (0) 147153 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/missing_data.html\n -rw-r--r-- 0 root (0) root (0) 75718 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/options.html\n -rw-r--r-- 0 root (0) root (0) 163930 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/reshaping.html\n--rw-r--r-- 0 root (0) root (0) 73914 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/scale.html\n+-rw-r--r-- 0 root (0) root (0) 73911 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/scale.html\n -rw-r--r-- 0 root (0) root (0) 59327 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/sparse.html\n -rw-r--r-- 0 root (0) root (0) 542658 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/style.html\n--rw-r--r-- 0 root (0) root (0) 39358 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/style.ipynb.gz\n+-rw-r--r-- 0 root (0) root (0) 39371 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/style.ipynb.gz\n -rw-r--r-- 0 root (0) root (0) 144670 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/text.html\n -rw-r--r-- 0 root (0) root (0) 85862 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/timedeltas.html\n -rw-r--r-- 0 root (0) root (0) 429323 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/timeseries.html\n -rw-r--r-- 0 root (0) root (0) 177623 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/user_guide/visualization.html\n -rw-r--r-- 0 root (0) root (0) 264 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/visualization.html\n drwxr-xr-x 0 root (0) root (0) 0 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/whatsnew/\n -rw-r--r-- 0 root (0) root (0) 57200 2021-01-12 21:06:04.000000 ./usr/share/doc/python-pandas-doc/html/whatsnew/index.html\n"}, {"source1": "./usr/share/doc/python-pandas-doc/html/searchindex.js", "source2": "./usr/share/doc/python-pandas-doc/html/searchindex.js", "unified_diff": null, "details": [{"source1": "js-beautify {}", "source2": "js-beautify {}", "unified_diff": "@@ -2415,23 +2415,23 @@\n \"000857\": 2190,\n \"000882\": 2190,\n \"000895\": 2195,\n \"000951\": 2187,\n \"000969\": 2192,\n \"000985\": 2203,\n \"000z\": [2199, 2233],\n- \"001\": [537, 873, 1390, 2193, 2230, 2262],\n+ \"001\": [537, 873, 1390, 2230, 2262],\n \"001000\": 2208,\n \"001283\": 2199,\n \"001294\": 2209,\n \"001427\": 2212,\n \"001486\": [96, 1158],\n \"001675\": 2190,\n \"001971\": 2203,\n- \"002\": 2262,\n+ \"002\": [2193, 2262],\n \"002000\": 2230,\n \"002040\": [2206, 2233],\n \"002118\": [2228, 2229],\n \"002601\": 2190,\n \"002698\": 2190,\n \"002726\": 2199,\n \"002759\": 2203,\n@@ -2455,15 +2455,15 @@\n \"004194\": 2187,\n \"004201\": 2187,\n \"004229\": 2187,\n \"004474\": 2185,\n \"004580\": 2209,\n \"004772\": 2199,\n \"00486\": 26,\n- \"005\": 2208,\n+ \"005\": [2193, 2208],\n \"005000\": 2216,\n \"005011\": 2195,\n \"005087\": 2190,\n \"005262\": 2192,\n \"005283\": 2199,\n \"005383\": 2218,\n \"005446\": 2217,\n@@ -2478,20 +2478,22 @@\n \"006438\": 2213,\n \"006549\": [180, 761],\n \"006695\": 2187,\n \"006733\": 2203,\n \"006747\": [2186, 2197, 2202, 2203, 2213],\n \"006888\": 2218,\n \"006889\": 2190,\n+ \"007\": 2193,\n \"007207\": [2185, 2212],\n \"007289885159540637\": 2190,\n \"007668\": 2199,\n \"007824\": 12,\n \"007996\": 2187,\n \"007f\": 201,\n+ \"008\": 2193,\n \"008097\": 2204,\n \"008277\": 2192,\n \"008298\": 2187,\n \"008500\": 12,\n \"008543\": [96, 1158],\n \"008943\": [96, 1158],\n \"009212\": 2190,\n@@ -2505,15 +2507,14 @@\n \"0100\": [443, 576, 891, 2187, 2199, 2209, 2244, 2269],\n \"010010012\": [926, 2208],\n \"010026\": 2192,\n \"010081\": 12,\n \"010589\": 2193,\n \"010670\": [96, 1158],\n \"0108\": 2255,\n- \"011\": 2193,\n \"011111\": [180, 761],\n \"011139\": 2190,\n \"011374\": 2195,\n \"011531\": 2201,\n \"011736\": 2187,\n \"01183\": 2227,\n \"012002\": [180, 761],\n@@ -2534,15 +2535,14 @@\n \"014138\": 2192,\n \"014144\": [96, 1158],\n \"014500\": 2199,\n \"014648\": 2187,\n \"014752\": 2233,\n \"014805\": 2202,\n \"014871\": [2186, 2197, 2202, 2203],\n- \"015\": 2193,\n \"015083\": 2187,\n \"015226\": 2199,\n \"015420\": 2195,\n \"015696\": [2218, 2226, 2228],\n \"015906\": 2187,\n \"015962\": [2185, 2212],\n \"015988\": 2187,\n@@ -2551,40 +2551,41 @@\n \"016331\": 2209,\n \"016424\": [13, 15],\n \"016548\": 2192,\n \"016692\": [2185, 2195, 2212],\n \"016739\": 2199,\n \"016828\": 2199,\n \"01685762652715874\": 626,\n- \"017\": 2193,\n \"017060\": 2190,\n \"017152\": 2187,\n \"017207\": 2199,\n \"017587\": [2185, 2195, 2212],\n \"017654\": 2185,\n \"018\": 2199,\n \"018117\": [2192, 2203],\n \"018169\": 2190,\n \"018501\": 2208,\n \"018587\": 2190,\n \"018596\": 2203,\n \"018601\": [2185, 2212],\n \"018617\": 2208,\n \"018993\": 2212,\n+ \"019\": 2193,\n \"019462\": 2195,\n \"019794\": 2197,\n \"019814\": 2186,\n \"01985\": 2201,\n \"019855\": 2201,\n \"01t00\": [906, 1233, 2165, 2199, 2209, 2233, 2244, 2259],\n \"01t01\": 2209,\n \"01t03\": 2209,\n \"01t05\": [910, 2209, 2233],\n \"01t12\": 955,\n \"01t23\": [443, 891, 2187, 2244],\n+ \"020\": 2193,\n \"0200\": 2209,\n \"020161\": [96, 1158],\n \"020208\": 2195,\n \"020399\": 2195,\n \"020433\": 2190,\n \"020544\": 2187,\n \"020762\": 2218,\n@@ -2594,25 +2595,23 @@\n \"021292\": 2187,\n \"02141\": 2201,\n \"021415\": 2201,\n \"021499\": 2187,\n \"02155\": 26,\n \"021605\": 2203,\n \"021833\": 2199,\n- \"022\": 2193,\n \"022070\": 2185,\n \"023\": [1369, 2200, 2230],\n \"023100\": 2195,\n \"023167\": 12,\n \"023640\": 2228,\n \"023688\": [12, 2186, 2192, 2197],\n \"023751\": 2199,\n \"023888\": 2187,\n \"023898\": 2195,\n- \"024\": 2193,\n \"024180\": [2186, 2197, 2202, 2203, 2213],\n \"024180e\": 2203,\n \"024320\": 2209,\n \"02458\": 2195,\n \"024580\": [2185, 2195, 2212],\n \"024721\": 2190,\n \"024738\": [96, 1158],\n@@ -2627,14 +2626,15 @@\n \"026158\": 2209,\n \"026437\": 2197,\n \"026458\": [2192, 2214],\n \"0266708\": 2202,\n \"0267\": 2202,\n \"026844\": 2185,\n \"026922\": 2195,\n+ \"027\": 2193,\n \"027778\": [62, 103, 104, 124, 168, 170, 171, 197, 202, 204, 213, 214, 215, 218, 219, 220, 242, 243, 272],\n \"027798\": 2193,\n \"027965\": 2199,\n \"028096\": 2209,\n \"028115\": 2203,\n \"028166\": 12,\n \"028525\": 2190,\n@@ -2688,39 +2688,38 @@\n \"035852\": 2204,\n \"035962\": 2187,\n \"036047\": 2212,\n \"036094\": 2190,\n \"036142\": [2218, 2229],\n \"0362\": 2202,\n \"0362196\": 2202,\n- \"037\": 2193,\n \"037181\": 2192,\n \"037528\": [2206, 2233],\n \"037577\": 2185,\n \"037697\": 2195,\n \"037772\": 2212,\n \"037870\": 2190,\n \"037882\": [2185, 2212],\n \"038\": [1369, 2200, 2230],\n \"038237\": 2190,\n \"038378\": 2190,\n \"038402\": 2197,\n+ \"039\": 2193,\n \"039266\": 2213,\n \"039268\": [12, 2186, 2187, 2197, 2199, 2202, 2213, 2214, 2216, 2217, 2233, 2239, 2262],\n \"0392684835147725\": 2199,\n \"0393\": 2187,\n \"039548\": 2199,\n \"0395749851146963\": 2199,\n \"039575\": [12, 2185, 2186, 2187, 2192, 2195, 2197, 2199, 2201, 2202, 2203, 2209, 2212, 2213, 2216, 2223, 2224, 2239, 2258],\n \"0396\": [2185, 2187],\n \"039926\": 2209,\n \"03c\": 2207,\n \"03t00\": [2199, 2209, 2233, 2259],\n \"03t05\": [910, 2209],\n- \"040\": 2193,\n \"0400\": [2220, 2269],\n \"040039\": 2214,\n \"0405\": [180, 761],\n \"040863\": 2187,\n \"041\": [1369, 2200, 2230],\n \"041242\": 2199,\n \"041290\": 2197,\n@@ -2918,29 +2917,32 @@\n \"074315\": 2197,\n \"074515\": 2190,\n \"074597\": 2209,\n \"074752\": 2209,\n \"074978\": [2218, 2229],\n \"075\": [1369, 2200, 2203, 2230],\n \"0750\": [20, 21],\n+ \"075158\": 2226,\n \"075381\": 2228,\n \"075499\": 2217,\n \"075531\": 2195,\n \"075758\": 2195,\n \"0757697837155533\": 2199,\n \"07577\": 2239,\n \"075770\": [12, 2186, 2187, 2197, 2199, 2203, 2213, 2214, 2216, 2217, 2233, 2239, 2262],\n \"075792\": 2199,\n+ \"076052\": 2226,\n \"076404\": 2197,\n \"076467\": [12, 2186, 2197, 2202, 2203, 2213, 2255],\n \"076524\": 2214,\n \"076610\": [2185, 2255],\n \"076651\": 2197,\n \"076676\": 2195,\n \"076693\": 2199,\n+ \"077\": 2193,\n \"077118\": [2185, 2195, 2212],\n \"077144\": 2204,\n \"077324\": 2195,\n \"077692\": 2203,\n \"078638\": [2186, 2197, 2202, 2203],\n \"078718\": 2197,\n \"079150\": 2186,\n@@ -3009,27 +3011,25 @@\n \"08e1bfbf8723\": 15,\n \"08t00\": 2259,\n \"090118\": 2217,\n \"090255\": 2197,\n \"091\": [2187, 2225],\n \"091430\": 12,\n \"091886\": 12,\n- \"092\": 2193,\n \"092225\": 2187,\n \"0923385948\": 2199,\n \"092732\": 2199,\n \"092759\": 2216,\n \"092903\": 2212,\n \"092961\": 2199,\n \"092970\": 2190,\n \"093110\": 2195,\n \"093214\": 2199,\n \"093650\": 2217,\n \"093787\": 2190,\n- \"093d7726d2db\": 2210,\n \"094055\": [2192, 2197],\n \"0941\": 2203,\n \"094104\": 2195,\n \"094214\": 2190,\n \"094709\": 2227,\n \"095025\": 2209,\n \"095031\": 2197,\n@@ -3060,41 +3060,41 @@\n \"0b11111111\": 912,\n \"0bac803e32dc42ae83fddfd029cbdebc\": 2199,\n \"0d501c078554\": [13, 15],\n \"0dac417a4890\": 2201,\n \"0de747740af\": 14,\n \"0em\": 2206,\n \"0th\": [22, 248, 880, 1184, 2197, 2233],\n- \"0x762ee9d0\": 2228,\n \"0x7efd0c0b0690\": 2,\n- \"0x920e6da8\": 2210,\n- \"0x920f2460\": 2210,\n- \"0x92155730\": 2210,\n- \"0x9721eb20\": 2209,\n- \"0x9730f1c0\": 2210,\n- \"0x99c8c2e0\": 2244,\n- \"0x9d0c82e0\": 2195,\n- \"0x9dd3d3d0\": 2199,\n- \"0x9e225aa8\": 2199,\n- \"0x9f56cdc0\": 2185,\n- \"0xa29ca4a8\": 2197,\n+ \"0xb378b220\": 2244,\n+ \"0xcfbd1370\": 2228,\n+ \"0xd75dce38\": 2209,\n+ \"0xd7cb61f0\": 2210,\n+ \"0xd8eb3298\": 2195,\n+ \"0xd9301b68\": 2210,\n+ \"0xdb9eadc0\": 2197,\n+ \"0xdbdf8028\": 2199,\n+ \"0xdcaadfd0\": 2199,\n+ \"0xddec6688\": 2210,\n+ \"0xddf39298\": 2210,\n+ \"0xdf1b16a0\": 2185,\n \"100\": [2, 12, 26, 61, 91, 106, 113, 127, 130, 136, 155, 158, 173, 190, 200, 205, 210, 211, 231, 298, 337, 351, 352, 434, 578, 589, 590, 622, 656, 709, 717, 783, 789, 790, 1309, 1327, 1334, 1369, 1380, 1395, 1410, 1411, 1419, 2185, 2186, 2187, 2189, 2190, 2192, 2193, 2194, 2195, 2197, 2199, 2200, 2201, 2202, 2203, 2204, 2206, 2207, 2208, 2209, 2210, 2216, 2218, 2220, 2221, 2223, 2224, 2228, 2229, 2230, 2233, 2239, 2240, 2244, 2247],\n \"1000\": [3, 12, 20, 21, 24, 25, 28, 96, 136, 189, 191, 192, 434, 717, 768, 769, 770, 873, 1154, 1158, 1379, 1388, 1390, 1419, 2185, 2186, 2187, 2189, 2190, 2193, 2195, 2199, 2205, 2206, 2209, 2210, 2218, 2221, 2227, 2228, 2233, 2236, 2244, 2247, 2259],\n \"10000\": [190, 1408, 2186, 2199, 2201, 2205, 2209, 2218, 2226, 2264],\n \"100000\": [1311, 2199, 2201, 2209],\n \"1000000\": [139, 2199, 2226],\n \"1000x5\": 2205,\n \"1001\": [2186, 2195, 2204],\n \"100123\": 2223,\n \"100230\": 2185,\n \"1003\": [26, 2204],\n \"1007\": 2204,\n \"100780\": 2187,\n \"1009\": [13, 14, 15, 2199, 2204, 2233],\n- \"101\": [205, 783, 2185, 2186, 2187, 2189, 2190, 2192, 2193, 2195, 2197, 2199, 2200, 2201, 2202, 2203, 2204, 2207, 2208, 2209, 2210, 2216, 2220, 2228, 2229, 2230, 2233, 2244],\n+ \"101\": [205, 783, 2185, 2186, 2187, 2189, 2190, 2192, 2195, 2197, 2199, 2200, 2201, 2202, 2203, 2204, 2207, 2208, 2209, 2210, 2216, 2220, 2228, 2229, 2230, 2233, 2244],\n \"1010\": [13, 14, 15, 2199, 2204, 2233],\n \"1011\": [13, 14, 15, 2199, 2233],\n \"1013\": 23,\n \"101401\": 2203,\n \"1015\": 2204,\n \"1016\": 2204,\n \"101684\": 2186,\n@@ -3220,15 +3220,15 @@\n \"11300\": [174, 177, 755, 758, 1229, 1230],\n \"113057\": 2192,\n \"113208\": 2190,\n \"113308\": 2258,\n \"1136\": [2185, 2187],\n \"113648\": [12, 2185, 2186, 2187, 2197, 2199, 2201, 2202, 2209, 2212, 2213, 2214, 2216, 2217, 2233, 2239, 2262],\n \"11364840968888545\": 2199,\n- \"114\": [266, 2185, 2186, 2187, 2189, 2190, 2192, 2195, 2197, 2199, 2200, 2201, 2203, 2207, 2208, 2209, 2210, 2216, 2218, 2220, 2228, 2230, 2233, 2244],\n+ \"114\": [266, 2185, 2186, 2187, 2189, 2190, 2192, 2193, 2195, 2197, 2199, 2200, 2201, 2203, 2207, 2208, 2209, 2210, 2216, 2218, 2220, 2228, 2230, 2233, 2244],\n \"114285\": 2187,\n \"1147\": 2202,\n \"114722\": 2197,\n \"1147378\": 2202,\n \"114987\": 2201,\n \"115\": [280, 2185, 2186, 2187, 2189, 2190, 2192, 2195, 2197, 2199, 2200, 2201, 2203, 2207, 2208, 2209, 2210, 2216, 2220, 2228, 2229, 2230, 2233, 2244],\n \"11503\": 2255,\n@@ -3354,15 +3354,15 @@\n \"129\": [2185, 2186, 2187, 2189, 2192, 2195, 2197, 2199, 2200, 2201, 2203, 2207, 2209, 2210, 2212, 2223, 2230, 2233],\n \"129149\": 2190,\n \"1297\": [13, 14, 15, 2233],\n \"129820\": [2206, 2233],\n \"12h\": [78, 597, 2209, 2229, 2237, 2238],\n \"12pt\": 2206,\n \"12th\": 2199,\n- \"130\": [12, 1366, 2185, 2186, 2187, 2189, 2192, 2193, 2195, 2197, 2199, 2200, 2201, 2203, 2207, 2209, 2210, 2223, 2230, 2233],\n+ \"130\": [12, 1366, 2185, 2186, 2187, 2189, 2192, 2195, 2197, 2199, 2200, 2201, 2203, 2207, 2209, 2210, 2223, 2230, 2233],\n \"1300\": [13, 14, 15, 1419, 2199, 2233],\n \"13000\": [2186, 2218],\n \"13000101\": 1419,\n \"1301\": [13, 14, 15, 2199, 2233],\n \"130121\": 2186,\n \"130127\": [12, 2186, 2197, 2202, 2203, 2213],\n \"1302\": [13, 14, 15, 2199, 2233],\n@@ -3480,20 +3480,20 @@\n \"141295\": 2197,\n \"141361\": 2214,\n \"1415\": [2203, 2255],\n \"1416\": [13, 14, 15, 2199, 2233],\n \"1417\": [13, 14, 15, 2199, 2233],\n \"1418\": [13, 14, 15, 2199, 2233],\n \"141809\": 2212,\n- \"142\": [2185, 2186, 2187, 2189, 2195, 2197, 2199, 2200, 2201, 2203, 2206, 2209, 2210, 2230],\n+ \"142\": [2185, 2186, 2187, 2189, 2193, 2195, 2197, 2199, 2200, 2201, 2203, 2206, 2209, 2210, 2230],\n \"1420043460\": 2229,\n \"142856\": 2216,\n \"142903\": 2190,\n \"142913\": 2190,\n- \"143\": [2185, 2186, 2187, 2189, 2193, 2195, 2197, 2199, 2200, 2201, 2203, 2206, 2209, 2210, 2230],\n+ \"143\": [2185, 2186, 2187, 2189, 2195, 2197, 2199, 2200, 2201, 2203, 2206, 2209, 2210, 2230],\n \"143539\": 2199,\n \"143608\": [2186, 2203],\n \"143682\": 2199,\n \"143704\": [2185, 2195, 2212],\n \"143767\": 2228,\n \"143778\": 2228,\n \"144\": [588, 745, 2185, 2186, 2187, 2189, 2195, 2197, 2199, 2200, 2201, 2203, 2209, 2210, 2230],\n@@ -3630,15 +3630,15 @@\n \"164377\": 2199,\n \"165\": [2186, 2187, 2189, 2195, 2197, 2199, 2201, 2209, 2210],\n \"165548\": 2199,\n \"165562\": [2206, 2233],\n \"165640\": 2199,\n \"1658\": 2202,\n \"165937\": 2190,\n- \"166\": [2186, 2187, 2189, 2193, 2195, 2197, 2199, 2201, 2209, 2210],\n+ \"166\": [2186, 2187, 2189, 2195, 2197, 2199, 2201, 2209, 2210],\n \"166221\": 2186,\n \"166480\": 2195,\n \"166574\": 2226,\n \"166599\": 2185,\n \"166914\": 2187,\n \"167\": [2186, 2187, 2189, 2195, 2197, 2199, 2201, 2209, 2210],\n \"167123\": 2187,\n@@ -3692,15 +3692,15 @@\n \"173636\": 2227,\n \"173926\": 2193,\n \"174\": [2186, 2187, 2189, 2193, 2195, 2197, 2199, 2201, 2209, 2210, 2233],\n \"1741\": 2202,\n \"174126\": 2195,\n \"174950\": 12,\n \"174966\": 2192,\n- \"175\": [113, 127, 130, 155, 158, 173, 2186, 2187, 2189, 2193, 2195, 2197, 2199, 2201, 2209, 2210],\n+ \"175\": [113, 127, 130, 155, 158, 173, 2186, 2187, 2189, 2195, 2197, 2199, 2201, 2209, 2210],\n \"1750\": [2186, 2244],\n \"175829\": 2227,\n \"176\": [2186, 2187, 2189, 2195, 2197, 2199, 2209, 2210, 2229],\n \"176180\": 2203,\n \"1764\": 2192,\n \"176444\": [12, 2186, 2192, 2197, 2202],\n \"176488\": 2199,\n@@ -3749,15 +3749,15 @@\n \"18446744073709551615\": [1420, 2244],\n \"184594\": 2209,\n \"1847\": 2202,\n \"1847350\": 2202,\n \"1847438576\": 2199,\n \"184744\": 2199,\n \"184940\": 2201,\n- \"185\": [13, 14, 15, 129, 709, 2186, 2187, 2189, 2193, 2195, 2197, 2199, 2209, 2210, 2233],\n+ \"185\": [13, 14, 15, 129, 709, 2186, 2187, 2189, 2195, 2197, 2199, 2209, 2210, 2233],\n \"185043\": 2195,\n \"185352\": 2214,\n \"1854\": [2197, 2229],\n \"185429\": [2185, 2195],\n \"185760\": 2197,\n \"185778\": 2197,\n \"186\": [2186, 2187, 2189, 2195, 2197, 2199, 2209, 2210],\n@@ -3951,16 +3951,16 @@\n \"2018q4\": [211, 790],\n \"2019\": [1, 7, 22, 23, 25, 26, 27, 76, 423, 426, 595, 1308, 1486, 2209, 2211, 2239, 2240, 2241, 2243, 2269],\n \"202\": [2186, 2187, 2189, 2195, 2197, 2199, 2209, 2263],\n \"2020\": [30, 76, 116, 227, 595, 697, 805, 1426, 1446, 1466, 1486, 1555, 1574, 2190, 2209, 2211],\n \"20200101\": [76, 595],\n \"2021\": [32, 1466],\n \"2022\": [1466, 1486],\n- \"2024\": [2187, 2225, 2226],\n- \"2025\": 2226,\n+ \"2025\": [2187, 2225, 2226],\n+ \"2026\": 2226,\n \"202646\": 2228,\n \"202765\": 2203,\n \"202872\": [2185, 2212],\n \"203\": [2186, 2187, 2189, 2195, 2197, 2199, 2209, 2229],\n \"2030\": 2263,\n \"203098\": 2187,\n \"203534\": 2199,\n@@ -4061,15 +4061,15 @@\n \"218\": [2186, 2187, 2189, 2193, 2195, 2197, 2199, 2209, 2218, 2229, 2263],\n \"218320\": 2190,\n \"218423\": 2199,\n \"218470\": 2193,\n \"218499\": 2199,\n \"218792\": 2228,\n \"218983\": 2215,\n- \"219\": [2186, 2187, 2189, 2195, 2197, 2199, 2209, 2263],\n+ \"219\": [2186, 2187, 2189, 2193, 2195, 2197, 2199, 2209, 2263],\n \"219217\": [2186, 2197, 2202, 2203],\n \"219477\": 2228,\n \"219565\": [2206, 2233],\n \"219673\": 12,\n \"21st\": 27,\n \"220\": [113, 127, 130, 155, 158, 173, 2186, 2187, 2189, 2195, 2197, 2199, 2209, 2218, 2263],\n \"2200\": [2209, 2263],\n@@ -4304,18 +4304,17 @@\n \"255743\": 2190,\n \"255e03c34a49\": [13, 15],\n \"256\": [2187, 2189, 2197, 2199, 2209, 2244],\n \"256057\": 2190,\n \"256090\": 2187,\n \"256348\": 2212,\n \"256725\": 2190,\n- \"257\": [2, 2187, 2189, 2197, 2199, 2209],\n+ \"257\": [2, 2187, 2189, 2193, 2197, 2199, 2209],\n \"257213\": 2187,\n \"257326\": 2228,\n- \"2575293880\": 2244,\n \"257606\": 2187,\n \"257623\": 2187,\n \"257759\": 2195,\n \"258\": [2187, 2189, 2195, 2197, 2199, 2209, 2225],\n \"2583\": 25,\n \"2583560\": [174, 177],\n \"258635\": 2204,\n@@ -4327,15 +4326,14 @@\n \"25th\": [101, 631, 1164],\n \"25x\": 2236,\n \"260\": [139, 745, 2187, 2189, 2197, 2199, 2209, 2233],\n \"260266\": 2195,\n \"260476\": [2222, 2226],\n \"261\": [2187, 2189, 2197, 2199, 2209, 2233],\n \"261385\": 2209,\n- \"2615542872\": 2244,\n \"261740\": 12,\n \"262\": [2187, 2189, 2197, 2199, 2209],\n \"2621\": 2202,\n \"2621358\": 2202,\n \"262140\": 1311,\n \"262144\": 2199,\n \"262150\": 1311,\n@@ -4540,14 +4538,15 @@\n \"299606\": 2199,\n \"299674\": 2197,\n \"29th\": [298, 656],\n \"2_item\": [64, 581],\n \"2bm\": 2209,\n \"2c21c770fc36\": 15,\n \"2d5h\": 2208,\n+ \"2d61\": 2210,\n \"2gb\": 2236,\n \"2h20min\": 2209,\n \"2min\": [211, 337, 790],\n \"2ms\": [1369, 2200],\n \"2nd\": [203, 637, 779, 1393, 2104, 2197, 2220, 2233],\n \"2to3\": [2216, 2251],\n \"2x2\": [1274, 1289],\n@@ -4557,32 +4556,34 @@\n \"300000\": [1311, 2209],\n \"3000000000\": 922,\n \"300218\": 12,\n \"3005000\": [918, 922],\n \"300647\": 2187,\n \"301\": [205, 783, 2187, 2197, 2199, 2209, 2229],\n \"301038\": 12,\n+ \"3011430504\": 2244,\n \"301149\": 2190,\n \"301239\": 2209,\n \"301379\": 2186,\n \"3016\": 2192,\n \"301624\": [2186, 2192, 2197, 2202],\n- \"302\": [2187, 2197, 2199, 2209],\n+ \"302\": [2186, 2187, 2197, 2199, 2209],\n \"302092\": 2195,\n \"3023325726\": 2199,\n \"302827\": 2197,\n \"303\": [2187, 2197, 2199, 2202, 2208, 2209, 2220],\n \"303099\": 2187,\n \"303202\": 2217,\n \"303398\": 2187,\n \"303421\": [2192, 2202],\n \"303886\": 2185,\n \"303893\": 2187,\n \"304\": [2187, 2197, 2199, 2209, 2217],\n \"3040\": 2202,\n+ \"3042626600\": 2244,\n \"304418\": 2197,\n \"304611\": 2197,\n \"305\": [2187, 2197, 2199, 2209],\n \"305260\": 2190,\n \"305384\": 2197,\n \"30586265\": [180, 761],\n \"306\": [2187, 2197, 2199, 2209],\n@@ -4596,17 +4597,14 @@\n \"307129\": 2190,\n \"307473\": 2187,\n \"307713\": 2199,\n \"308\": [2187, 2197, 2199, 2209],\n \"308013\": 2193,\n \"308127\": 2190,\n \"308146\": 2199,\n- \"3082048656\": 2236,\n- \"3082048672\": 2236,\n- \"3082048688\": 2236,\n \"3087\": 2255,\n \"308847\": [13, 15],\n \"308975\": 2209,\n \"309\": [2187, 2197, 2199, 2209],\n \"309040\": 2197,\n \"309059\": 2197,\n \"309500\": 2197,\n@@ -4921,15 +4919,15 @@\n \"368824\": 2228,\n \"369\": [2187, 2199, 2209],\n \"369374\": 2197,\n \"369552\": 2190,\n \"369650\": 2233,\n \"3698\": 2192,\n \"369849\": [2186, 2192, 2197, 2202],\n- \"370\": [2, 129, 709, 2186, 2187, 2199, 2209, 2239],\n+ \"370\": [2, 129, 709, 2187, 2199, 2209, 2239],\n \"370076\": 2228,\n \"370079\": 2209,\n \"370251\": 2228,\n \"370255\": 2197,\n \"370545\": 2190,\n \"3706\": 2187,\n \"3706468582364464\": 2199,\n@@ -4943,15 +4941,15 @@\n \"371900\": 2187,\n \"371983\": 2195,\n \"372\": [2187, 2199, 2209],\n \"373\": [2187, 2199, 2209],\n \"374\": [2187, 2193, 2199, 2209],\n \"374179\": 2192,\n \"374284\": 25,\n- \"375\": [2, 129, 2187, 2193, 2199, 2209, 2225],\n+ \"375\": [2, 129, 2187, 2199, 2209, 2225],\n \"375064\": 2190,\n \"375074\": 2190,\n \"3751\": 2267,\n \"375530\": 2204,\n \"375703\": 2199,\n \"376\": [2187, 2199, 2209],\n \"376750\": 2226,\n@@ -5165,14 +5163,17 @@\n \"414214\": [270, 899, 2210, 2218],\n \"414490\": 2192,\n \"414505\": 2233,\n \"414523\": 2204,\n \"414806\": 2187,\n \"415\": [2187, 2199, 2209, 2230],\n \"415245\": 2190,\n+ \"4156126352\": 2236,\n+ \"4156126368\": 2236,\n+ \"4156126384\": 2236,\n \"416\": [2187, 2199, 2209, 2230],\n \"416203\": 2187,\n \"417\": [2187, 2199, 2209, 2225, 2230],\n \"4170220047\": 2199,\n \"417535\": 2185,\n \"417884\": 2199,\n \"418\": [2187, 2199, 2209],\n@@ -5300,15 +5301,14 @@\n \"438572\": 2195,\n \"438692\": 2192,\n \"438713\": [2206, 2233],\n \"438921\": 2197,\n \"439\": [2187, 2199, 2209],\n \"439121\": 2190,\n \"4395\": 2202,\n- \"43cd\": 2210,\n \"440\": [2187, 2199, 2203, 2209],\n \"440455\": 2197,\n \"441\": [588, 2187, 2199, 2209],\n \"441177\": 2226,\n \"441652\": [2185, 2195, 2212],\n \"441779\": 2201,\n \"442\": [2187, 2199, 2209],\n@@ -5525,15 +5525,14 @@\n \"486514\": 2197,\n \"486567\": 2187,\n \"4867681\": 2202,\n \"4868\": 2202,\n \"487\": [2199, 2209],\n \"487602\": [2186, 2192, 2197, 2203],\n \"487645\": 12,\n- \"4877\": 2210,\n \"488\": [2199, 2209],\n \"488887\": 2212,\n \"489\": [193, 771, 2199, 2209],\n \"489266\": 2201,\n \"489682\": 2197,\n \"489811\": 2217,\n \"48e\": [2187, 2225],\n@@ -5595,14 +5594,15 @@\n \"499753\": 2195,\n \"4999\": [2194, 2220],\n \"49e\": [2187, 2225],\n \"4af8aafc38e1\": [13, 15],\n \"4b67dc0b596c\": 2209,\n \"4dx\": 2207,\n \"4e545755431b\": 2233,\n+ \"4e93\": 2210,\n \"4ecdc4\": 1384,\n \"4ef9dbe7035d\": 14,\n \"4pt\": 2206,\n \"4q2005\": 517,\n \"4th\": [2187, 2190, 2195, 2199, 2209, 2220, 2223],\n \"500\": [74, 190, 278, 594, 926, 1380, 2185, 2186, 2187, 2199, 2200, 2203, 2206, 2209, 2210, 2220, 2239, 2247],\n \"5000\": [25, 165, 2194, 2220],\n@@ -5664,15 +5664,15 @@\n \"512817\": 2204,\n \"513\": 2199,\n \"514\": 2199,\n \"514058\": 2203,\n \"514474\": 2209,\n \"514509\": 2190,\n \"514704\": 2186,\n- \"515\": [13, 14, 15, 2193, 2199, 2201, 2233],\n+ \"515\": [13, 14, 15, 2199, 2201, 2233],\n \"515018\": [2206, 2233],\n \"515707\": 2190,\n \"516\": [13, 14, 15, 2199, 2233],\n \"5167\": 25,\n \"516842\": 14,\n \"517\": [13, 14, 15, 2192, 2199, 2205, 2233],\n \"517098\": 2195,\n@@ -6094,28 +6094,26 @@\n \"622727\": 2195,\n \"622809\": 2193,\n \"622830\": 2193,\n \"623033\": 2185,\n \"6233601\": 2199,\n \"623893\": 2195,\n \"623977\": 2197,\n- \"624532\": 2226,\n \"624607\": 12,\n \"624988\": 2228,\n \"625\": [203, 779],\n \"625237\": 2203,\n \"625456\": 2190,\n \"6256\": 2202,\n \"625733\": 2228,\n \"626063\": 2193,\n \"626300\": [1274, 1289],\n \"6263001\": [1274, 1289],\n \"626404\": [2206, 2233],\n \"626444\": 12,\n- \"626668\": 2226,\n \"626968\": 2215,\n \"627081\": [2185, 2195, 2212],\n \"627390\": 2185,\n \"627712\": 2197,\n \"627786\": 2199,\n \"627796\": [2206, 2233],\n \"628174\": 2199,\n@@ -6242,15 +6240,14 @@\n \"667715\": 2187,\n \"668715\": 2199,\n \"669052\": [2185, 2212],\n \"669065\": 2190,\n \"669304\": 2190,\n \"669692\": 2197,\n \"669934\": 2214,\n- \"670\": 2193,\n \"670047\": 2187,\n \"670153\": 2197,\n \"670213\": [116, 697],\n \"670477\": 2215,\n \"671590\": 2185,\n \"672416\": 2190,\n \"672808\": 2201,\n@@ -6636,14 +6633,15 @@\n \"773900\": 12,\n \"774627\": 12,\n \"774650\": 2199,\n \"774753\": 2195,\n \"7750\": 25,\n \"775558e\": 2220,\n \"775880\": 2187,\n+ \"776\": 2204,\n \"776514\": 2190,\n \"776903716971867\": 2199,\n \"776904\": [2186, 2197, 2199, 2203, 2213, 2216, 2217, 2255],\n \"776977\": 2190,\n \"777642\": 2204,\n \"777777\": 2190,\n \"778033\": 2185,\n@@ -6658,14 +6656,15 @@\n \"779976\": 2228,\n \"780\": [2187, 2225],\n \"780489\": 2192,\n \"780506\": 2199,\n \"781105\": 2218,\n \"781163\": 2199,\n \"781993\": 2192,\n+ \"782\": 2204,\n \"782234\": 2185,\n \"782376\": 2212,\n \"782753\": 2199,\n \"782797\": 2195,\n \"783051\": 2217,\n \"783123\": 2187,\n \"783392\": 2204,\n@@ -6697,15 +6696,14 @@\n \"792652\": 2197,\n \"793615\": 2228,\n \"793644\": 2199,\n \"793893\": 2195,\n \"794212\": 2203,\n \"794317\": 2228,\n \"794667\": 2204,\n- \"795\": 2193,\n \"796421\": 2195,\n \"797\": 2193,\n \"798\": 2193,\n \"798110\": 2195,\n \"7983\": 2202,\n \"7983341\": 2202,\n \"798494\": 2199,\n@@ -7078,15 +7076,14 @@\n \"899260\": 12,\n \"899303\": 2192,\n \"899734\": 12,\n \"899904\": 2203,\n \"8a2e\": 2239,\n \"8ab2297b7141\": [13, 14, 15],\n \"8ba55b61d48c\": 2199,\n- \"8d81\": 2210,\n \"8f10c7fd9b13\": 14,\n \"900\": [2199, 2201, 2206],\n \"9000\": [2186, 2218],\n \"900000\": 2216,\n \"90010907\": 626,\n \"900628\": 2192,\n \"900d99c3802f\": 14,\n@@ -7392,15 +7389,15 @@\n \"995761\": [2186, 2197, 2202, 2203],\n \"996\": [449, 909, 2185, 2193, 2195, 2199, 2204, 2228],\n \"996000\": 2230,\n \"996428\": 2216,\n \"996632\": 2190,\n \"997\": [2185, 2187, 2193, 2195, 2199, 2228],\n \"997345\": [2168, 2216, 2227],\n- \"998\": [2185, 2187, 2193, 2195, 2199, 2216, 2228],\n+ \"998\": [2185, 2187, 2193, 2195, 2199, 2228],\n \"9980\": 2202,\n \"998000\": 2230,\n \"998438\": [96, 1158],\n \"999\": [854, 860, 1277, 1292, 2185, 2187, 2193, 2195, 2199, 2202, 2204, 2225, 2227, 2228, 2230, 2259],\n \"999359\": 27,\n \"9995\": 2230,\n \"999552\": 2205,\n@@ -8208,17 +8205,17 @@\n ayd: 2246,\n azur: [1, 2269],\n b10: 2200,\n b11: 2200,\n b1970: [2168, 2203, 2216],\n b1980: [2168, 2203, 2216],\n b22: [2207, 2228],\n+ b2c1: 2210,\n b42: 20,\n b49fdb567646: 2204,\n- b4d04735: 2210,\n b77: 28,\n b832ec9cf6b: 2199,\n b921d1a: 2218,\n b_a: 2203,\n b_b: [2203, 2220],\n b_c: [2203, 2207, 2220],\n b_col: [64, 581],\n@@ -9180,16 +9177,18 @@\n customiz: 2218,\n cut: [12, 370, 449, 459, 473, 700, 909, 1127, 1325, 2187, 2189, 2203, 2216, 2218, 2221, 2232, 2233, 2234, 2239, 2244, 2247, 2259, 2263, 2269, 2272],\n cuyahoga: [1366, 2199],\n cx_freez: 2222,\n cyberpanda: 2239,\n cycl: [1, 13, 14, 15, 2187, 2199, 2233, 2239, 2248],\n cython: [1, 6, 10, 18, 30, 1149, 1185, 1225, 1232, 1273, 1288, 2187, 2190, 2196, 2199, 2216, 2218, 2220, 2223, 2224, 2226, 2231, 2233, 2239, 2244, 2251, 2252, 2253, 2255, 2259, 2263, 2264, 2269, 2272],\n+ d0c51297: 2210,\n d10: 2200,\n d11: 2200,\n+ d1edcd0a0541: 2210,\n d4bce7d5df53: 2199,\n d65f5f: [1334, 2206],\n d7f5e5528e52: [13, 15],\n d_2037: 2209,\n d_2038: 2209,\n d_e: 2207,\n dade: [1366, 2199],\n@@ -10023,15 +10022,15 @@\n favourit: 1,\n fay: [1366, 2199],\n fc48c260e052: 14,\n feather: [18, 253, 255, 886, 1394, 2175, 2196, 2233, 2244],\n feather_format: 2199,\n featur: [1, 7, 9, 10, 11, 13, 15, 20, 21, 24, 25, 26, 28, 30, 1392, 1396, 1409, 2186, 2187, 2190, 2192, 2196, 2199, 2200, 2206, 2209, 2211, 2214, 2216, 2217, 2218, 2219, 2221, 2222, 2224, 2240, 2241, 2243, 2244, 2245, 2246, 2247],\n feb152015: 15,\n- feb: 2209,\n+ feb: [2187, 2209, 2225],\n februari: [298, 309, 656, 667, 2199, 2209, 2211],\n feder: 11,\n fedora: [1, 18],\n feed: [11, 2203],\n feedback: [2206, 2227, 2233, 2238],\n feedtyp: 12,\n feel: [1, 2, 7, 11, 26, 2187, 2194, 2199, 2204, 2226],\n@@ -10215,15 +10214,15 @@\n freez: [2199, 2233],\n freeze_pan: [252, 884, 1356, 2199, 2233],\n french: [11, 180, 761],\n freq: [27, 73, 78, 82, 101, 102, 122, 153, 180, 185, 205, 206, 211, 227, 262, 268, 273, 274, 284, 285, 287, 288, 289, 291, 294, 298, 301, 302, 303, 304, 306, 307, 308, 309, 310, 311, 313, 314, 315, 316, 320, 321, 325, 326, 328, 330, 337, 403, 404, 405, 429, 443, 517, 518, 519, 521, 523, 527, 534, 536, 537, 538, 543, 545, 546, 563, 564, 576, 593, 597, 603, 631, 632, 641, 642, 645, 646, 647, 653, 655, 656, 659, 660, 661, 662, 663, 665, 666, 667, 668, 670, 673, 674, 677, 680, 681, 683, 685, 686, 688, 690, 704, 734, 761, 764, 783, 784, 790, 805, 891, 892, 896, 902, 903, 917, 919, 923, 933, 941, 942, 945, 950, 955, 958, 969, 975, 1000, 1062, 1111, 1123, 1125, 1143, 1146, 1147, 1164, 1165, 1175, 1179, 1181, 1186, 1215, 1236, 1237, 1238, 1239, 1240, 1243, 1254, 1308, 1315, 1329, 1359, 1364, 1365, 1371, 1372, 1375, 1415, 1417, 1418, 1419, 1421, 1422, 2165, 2169, 2185, 2186, 2187, 2189, 2190, 2195, 2197, 2199, 2201, 2203, 2208, 2209, 2210, 2212, 2214, 2215, 2216, 2218, 2219, 2220, 2221, 2224, 2225, 2226, 2228, 2229, 2230, 2231, 2233, 2236, 2238, 2239, 2244, 2259, 2262, 2263, 2265, 2269],\n freqstr: [517, 1426, 1446, 1466, 1486, 1506, 1529, 1555, 1574, 1596, 1619, 1645, 1671, 1697, 1716, 1736, 1755, 1779, 1805, 1825, 1846, 1866, 1886, 1906, 1925, 1944, 1964, 1984, 2004, 2024, 2044, 2064, 2084, 2104, 2125, 2145, 2219, 2223, 2228, 2230, 2236, 2239],\n frequenc: [6, 30, 73, 101, 120, 129, 169, 211, 216, 227, 262, 268, 274, 279, 284, 285, 291, 292, 293, 297, 313, 315, 320, 321, 337, 429, 449, 473, 476, 477, 478, 518, 520, 537, 538, 540, 543, 544, 545, 546, 553, 554, 564, 593, 631, 642, 653, 673, 680, 702, 709, 790, 795, 805, 892, 896, 903, 909, 919, 923, 933, 941, 942, 945, 946, 950, 958, 969, 1000, 1064, 1119, 1125, 1127, 1130, 1131, 1132, 1143, 1147, 1164, 1167, 1179, 1181, 1186, 1239, 1240, 1243, 1254, 1306, 1308, 1315, 1328, 1329, 1375, 1418, 2175, 2185, 2186, 2190, 2195, 2196, 2199, 2203, 2204, 2210, 2212, 2215, 2216, 2217, 2218, 2220, 2222, 2223, 2225, 2228, 2229, 2230, 2233, 2236, 2237, 2239, 2241, 2244, 2245, 2247, 2255, 2259, 2262, 2263, 2269, 2272, 2274],\n frequent: [30, 279, 449, 909, 2187, 2196, 2197, 2203, 2236, 2244],\n- fri: [14, 1147, 1596, 2209, 2215],\n+ fri: [14, 1147, 1596, 2187, 2209, 2215, 2225],\n fridai: [1755, 1779, 1825, 2104, 2209, 2215, 2229],\n friend: [1309, 1327, 1410, 1411, 2202, 2215, 2216],\n friendli: [11, 18, 258, 259, 267, 888, 890, 2168, 2196, 2203, 2223, 2237, 2260],\n friendlier: [2228, 2255],\n frm: 2186,\n from: [1, 2, 4, 6, 7, 10, 11, 12, 14, 19, 20, 22, 24, 25, 27, 28, 29, 30, 31, 39, 44, 55, 58, 61, 65, 66, 68, 70, 71, 72, 82, 85, 89, 90, 93, 94, 95, 96, 101, 106, 109, 111, 118, 121, 125, 126, 128, 136, 139, 149, 156, 160, 164, 166, 180, 182, 183, 184, 188, 189, 190, 192, 195, 196, 201, 210, 211, 212, 216, 221, 227, 238, 239, 247, 248, 249, 250, 251, 254, 255, 256, 257, 258, 262, 263, 264, 265, 266, 267, 269, 273, 274, 276, 277, 278, 279, 282, 283, 284, 325, 326, 334, 337, 344, 356, 357, 359, 360, 361, 367, 370, 373, 375, 376, 377, 379, 385, 387, 425, 427, 436, 437, 447, 448, 449, 452, 459, 473, 476, 477, 478, 479, 492, 495, 496, 497, 498, 499, 509, 512, 517, 534, 535, 537, 570, 574, 576, 578, 585, 588, 603, 615, 619, 621, 624, 631, 637, 640, 685, 686, 693, 699, 700, 703, 708, 717, 729, 730, 737, 740, 741, 761, 763, 767, 768, 770, 773, 789, 790, 791, 795, 800, 801, 805, 811, 818, 820, 823, 830, 831, 833, 834, 850, 853, 856, 861, 862, 863, 864, 865, 867, 873, 879, 880, 881, 882, 883, 886, 887, 888, 892, 893, 894, 897, 902, 903, 905, 906, 908, 909, 913, 914, 926, 955, 973, 974, 976, 986, 1013, 1017, 1020, 1034, 1038, 1039, 1056, 1059, 1062, 1064, 1072, 1076, 1078, 1080, 1094, 1095, 1107, 1114, 1116, 1122, 1123, 1124, 1125, 1126, 1127, 1130, 1131, 1132, 1143, 1144, 1145, 1147, 1148, 1149, 1154, 1155, 1156, 1158, 1159, 1164, 1168, 1180, 1184, 1185, 1186, 1195, 1202, 1204, 1211, 1212, 1223, 1225, 1232, 1233, 1239, 1240, 1243, 1245, 1254, 1273, 1288, 1307, 1309, 1311, 1320, 1324, 1325, 1327, 1333, 1340, 1341, 1357, 1359, 1367, 1368, 1370, 1375, 1376, 1377, 1378, 1380, 1381, 1386, 1391, 1392, 1393, 1394, 1395, 1396, 1397, 1399, 1400, 1401, 1402, 1403, 1404, 1405, 1407, 1409, 1410, 1411, 1414, 1415, 1419, 1420, 1421, 1422, 1426, 1446, 1466, 1486, 1555, 1574, 1596, 1697, 2165, 2168, 2185, 2186, 2188, 2189, 2190, 2193, 2194, 2195, 2197, 2198, 2200, 2201, 2202, 2203, 2204, 2205, 2206, 2207, 2208, 2210, 2212, 2213, 2214, 2215, 2216, 2217, 2218, 2219, 2220, 2221, 2222, 2223, 2224, 2225, 2226, 2227, 2228, 2229, 2230, 2231, 2234, 2235, 2236, 2237, 2238, 2240, 2245, 2246, 2248, 2251, 2252, 2253, 2254, 2255, 2256, 2257, 2258, 2259, 2260, 2261, 2262, 2265, 2268, 2269, 2271, 2272, 2274],\n from_a: 2203,\n@@ -16562,15 +16561,15 @@\n itr: 1408,\n its: [1, 2, 4, 6, 7, 10, 11, 12, 13, 15, 30, 34, 116, 121, 139, 149, 181, 189, 192, 205, 206, 216, 263, 271, 278, 293, 345, 357, 373, 429, 454, 455, 456, 458, 459, 479, 499, 534, 554, 599, 636, 697, 703, 751, 762, 768, 770, 783, 784, 795, 893, 926, 1029, 1030, 1031, 1033, 1036, 1044, 1125, 1126, 1142, 1146, 1191, 1306, 1309, 1327, 1330, 1361, 1382, 1410, 1411, 1419, 1697, 2186, 2187, 2189, 2190, 2192, 2193, 2195, 2197, 2198, 2199, 2200, 2202, 2203, 2204, 2206, 2207, 2209, 2216, 2218, 2219, 2220, 2222, 2223, 2224, 2225, 2226, 2227, 2228, 2229, 2230, 2233, 2236, 2237, 2239, 2244, 2247, 2252, 2262, 2263, 2269, 2273, 2274],\n itself: [1, 4, 6, 7, 10, 25, 201, 345, 434, 490, 854, 860, 975, 1036, 1141, 1330, 1415, 2187, 2189, 2195, 2197, 2199, 2205, 2207, 2209, 2210, 2212, 2218, 2224, 2225, 2228, 2233, 2239, 2244],\n ivs: [463, 481, 1133],\n ix3: 2210,\n jack: [2207, 2224],\n jacqu: 28,\n- jan: [32, 537, 2187, 2209, 2220, 2225, 2226],\n+ jan: [32, 537, 2209, 2220, 2226],\n jancauska: 2259,\n jane: [13, 15, 2247],\n janschulz: 2216,\n januari: [308, 309, 546, 558, 666, 667, 1009, 1014, 1419, 2199, 2208, 2209, 2211, 2218, 2239, 2240, 2241, 2243, 2245],\n jarqu: [2187, 2225],\n javascript: 2199,\n jbrockmendel: [2245, 2246],\n@@ -19317,21 +19316,21 @@\n syntax: [1, 2, 201, 1324, 1396, 1405, 1406, 2186, 2187, 2192, 2195, 2199, 2216, 2227, 2228, 2233, 2236],\n syntaxerror: [2193, 2199, 2233],\n sys: [13, 14, 15, 18, 139, 266, 1396, 2199, 2204, 2228, 2233, 2269],\n system: [1, 2, 4, 6, 10, 11, 18, 85, 165, 249, 410, 745, 881, 1100, 1381, 1412, 2169, 2187, 2192, 2194, 2199, 2202, 2236, 2248, 2256],\n sytl: 2206,\n t05h: 926,\n t5h: 926,\n- t_1d4562b2_b114_11ee_8b43_59a83b27c78b: 2206,\n- t_1d4562b2_b114_11ee_8b43_59a83b27c78blevel0_row0: 2206,\n- t_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col0: 2206,\n- t_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col1: 2206,\n- t_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col2: 2206,\n- t_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col3: 2206,\n- t_1d4562b2_b114_11ee_8b43_59a83b27c78brow3_col3: 2206,\n+ t_4056c92e_ea13_11ef_835c_39a090989f27: 2206,\n+ t_4056c92e_ea13_11ef_835c_39a090989f27level0_row0: 2206,\n+ t_4056c92e_ea13_11ef_835c_39a090989f27row0_col0: 2206,\n+ t_4056c92e_ea13_11ef_835c_39a090989f27row0_col1: 2206,\n+ t_4056c92e_ea13_11ef_835c_39a090989f27row0_col2: 2206,\n+ t_4056c92e_ea13_11ef_835c_39a090989f27row0_col3: 2206,\n+ t_4056c92e_ea13_11ef_835c_39a090989f27row3_col3: 2206,\n tab: [1, 11, 13, 15, 22, 872, 1409, 2185, 2190, 2192, 2195, 2197, 2210, 2216, 2223, 2226, 2228, 2234, 2239, 2240, 2241, 2249, 2252, 2263],\n tabl: [2, 4, 11, 12, 13, 14, 15, 17, 18, 20, 21, 22, 23, 24, 27, 28, 29, 30, 31, 164, 182, 183, 184, 239, 254, 255, 256, 257, 258, 261, 265, 277, 338, 342, 343, 354, 443, 592, 763, 870, 886, 887, 888, 891, 894, 906, 1233, 1306, 1309, 1319, 1327, 1330, 1331, 1346, 1352, 1353, 1359, 1366, 1367, 1376, 1377, 1392, 1393, 1394, 1395, 1397, 1398, 1399, 1400, 1401, 1402, 1403, 1405, 1406, 1407, 1408, 1409, 1410, 1411, 2165, 2175, 2187, 2188, 2189, 2192, 2194, 2196, 2197, 2200, 2201, 2207, 2212, 2213, 2214, 2215, 2216, 2217, 2218, 2220, 2222, 2223, 2224, 2225, 2226, 2227, 2228, 2229, 2230, 2232, 2236, 2237, 2240, 2244, 2246, 2247, 2254, 2255, 2259, 2263, 2269],\n table_attribut: [1330, 1347],\n table_id: 256,\n table_nam: [1407, 2199],\n table_schema: [254, 1309, 1327, 1410, 1411, 2202, 2233],\n table_styl: [1330, 1347, 1353, 2206],\n@@ -19501,15 +19500,15 @@\n three: [1, 2, 12, 18, 25, 26, 121, 131, 132, 174, 177, 247, 703, 712, 755, 758, 847, 856, 879, 955, 1124, 1144, 1147, 1169, 1185, 1229, 1230, 1232, 1279, 1294, 1302, 1307, 1308, 1328, 1329, 1375, 1392, 1393, 1409, 1418, 2185, 2187, 2188, 2189, 2190, 2192, 2193, 2195, 2197, 2199, 2200, 2201, 2203, 2207, 2213, 2214, 2236, 2237, 2244, 2245, 2258, 2263],\n thresh: 109,\n threshold: [86, 96, 273, 618, 902, 1158, 1309, 1327, 1333, 1410, 1411, 2190, 2199, 2202, 2224, 2236, 2243, 2244, 2247, 2248],\n threw: 2216,\n through: [1, 2, 4, 11, 13, 14, 15, 18, 30, 32, 55, 85, 188, 203, 211, 337, 388, 443, 615, 767, 779, 790, 825, 870, 891, 1043, 1081, 1149, 1185, 1225, 1232, 1273, 1288, 1347, 1381, 1408, 2169, 2187, 2190, 2192, 2193, 2196, 2200, 2201, 2202, 2206, 2208, 2211, 2212, 2216, 2218, 2220, 2223, 2225, 2227, 2228, 2230, 2233, 2239, 2244, 2252, 2255, 2256, 2258, 2259, 2263],\n throughout: [0, 1, 10, 13, 14, 15, 861, 865, 2189, 2196, 2215, 2247, 2253, 2259, 2263, 2269],\n thrown: [227, 274, 805, 903, 1186, 1312, 2187, 2197, 2217, 2233, 2236, 2247],\n- thu: [1, 4, 30, 250, 370, 700, 882, 1147, 1325, 1596, 1697, 2187, 2190, 2192, 2193, 2194, 2195, 2197, 2199, 2207, 2209, 2215, 2216, 2225, 2239, 2252, 2255, 2258, 2259],\n+ thu: [1, 4, 30, 250, 370, 700, 882, 1147, 1325, 1596, 1697, 2187, 2190, 2192, 2193, 2194, 2195, 2197, 2199, 2207, 2209, 2215, 2216, 2239, 2252, 2255, 2258, 2259],\n thumb: 2193,\n thur: 14,\n thursdai: [1755, 1779, 1825, 2104, 2209],\n tic: 2244,\n tick: [30, 85, 184, 763, 1381, 2175, 2233, 2239, 2262, 2269],\n tick_top: 2210,\n ticker: [1369, 2200, 2230],\n"}]}, {"source1": "./usr/share/doc/python-pandas-doc/html/user_guide/10min.html", "source2": "./usr/share/doc/python-pandas-doc/html/user_guide/10min.html", "unified_diff": "@@ -1211,15 +1211,15 @@\n In [138]: plt.figure()\n Out[138]: <Figure size 640x480 with 0 Axes>\n \n In [139]: df.plot()\n Out[139]: <AxesSubplot:>\n \n In [140]: plt.legend(loc='best')\n-Out[140]: <matplotlib.legend.Legend at 0x9f56cdc0>\n+Out[140]: <matplotlib.legend.Legend at 0xdf1b16a0>\n \n \n \n \n
In [144]: ser = pd.Series(arr[:, 0])\n \n In [145]: %timeit ser.iloc[indexer]\n .....: %timeit ser.take(indexer)\n .....: \n-173 us +- 7.61 us per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n-159 us +- 6.36 us per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n+107 us +- 78.5 ns per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n+98 us +- 85 ns per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n
We have discussed MultiIndex
in the previous sections pretty extensively.\n Documentation about DatetimeIndex
and PeriodIndex
are shown here,\n", "details": [{"source1": "html2text {}", "source2": "html2text {}", "unified_diff": "@@ -1113,23 +1113,23 @@\n In [141]: indexer = np.arange(10000)\n \n In [142]: random.shuffle(indexer)\n \n In [143]: %timeit arr[indexer]\n .....: %timeit arr.take(indexer, axis=0)\n .....:\n-370 us +- 5.65 us per loop (mean +- std. dev. of 7 runs, 1000 loops each)\n-165 us +- 2.77 us per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n+302 us +- 112 ns per loop (mean +- std. dev. of 7 runs, 1000 loops each)\n+131 us +- 75.7 ns per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n In [144]: ser = pd.Series(arr[:, 0])\n \n In [145]: %timeit ser.iloc[indexer]\n .....: %timeit ser.take(indexer)\n .....:\n-173 us +- 7.61 us per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n-159 us +- 6.36 us per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n+107 us +- 78.5 ns per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n+98 us +- 85 ns per loop (mean +- std. dev. of 7 runs, 10000 loops each)\n ***** Index types\u00c2\u00b6 *****\n We have discussed MultiIndex in the previous sections pretty extensively.\n Documentation about DatetimeIndex and PeriodIndex are shown here, and\n documentation about TimedeltaIndex is found here.\n In the following sub-sections we will highlight some other index types.\n **** CategoricalIndex\u00c2\u00b6 ****\n CategoricalIndex is a type of index that is useful for supporting indexing with\n"}]}, {"source1": "./usr/share/doc/python-pandas-doc/html/user_guide/basics.html", "source2": "./usr/share/doc/python-pandas-doc/html/user_guide/basics.html", "comments": ["Ordering differences only"], "unified_diff": "@@ -3550,21 +3550,21 @@\n dtype: object\n
The number of columns of each type in a DataFrame
can be found by calling\n DataFrame.dtypes.value_counts()
.
In [352]: dft.dtypes.value_counts()\n Out[352]: \n-bool 1\n-float64 1\n-int64 1\n datetime64[ns] 1\n object 1\n-float32 1\n+bool 1\n+int64 1\n+float64 1\n int8 1\n+float32 1\n dtype: int64\n
Numeric dtypes will propagate and can coexist in DataFrames.\n If a dtype is passed (either directly via the dtype
keyword, a passed ndarray
,\n or a passed Series
), then it will be preserved in DataFrame operations. Furthermore,\n different numeric dtypes will NOT be combined. The following example will give you a taste.
But clearly this isn\u2019t fast enough for us. Let\u2019s take a look and see where the\n time is spent during this operation (limited to the most time consuming\n four calls) using the prun ipython magic function:
\nIn [5]: %prun -l 4 df.apply(lambda x: integrate_f(x['a'], x['b'], x['N']), axis=1) # noqa E999\n- 622830 function calls (622809 primitive calls) in 1.375 seconds\n+ 622830 function calls (622809 primitive calls) in 0.257 seconds\n \n Ordered by: internal time\n List reduced from 214 to 4 due to restriction <4>\n \n ncalls tottime percall cumtime percall filename:lineno(function)\n- 1000 0.670 0.001 1.185 0.001 <ipython-input-4-c2a74e076cf0>:1(integrate_f)\n- 552423 0.515 0.000 0.515 0.000 <ipython-input-3-c138bdd570e3>:1(f)\n- 3000 0.024 0.000 0.143 0.000 series.py:868(__getitem__)\n- 3000 0.017 0.000 0.101 0.000 series.py:973(_get_value)\n+ 1000 0.142 0.000 0.219 0.000 <ipython-input-4-c2a74e076cf0>:1(integrate_f)\n+ 552423 0.077 0.000 0.077 0.000 <ipython-input-3-c138bdd570e3>:1(f)\n+ 3000 0.005 0.000 0.027 0.000 series.py:868(__getitem__)\n+ 3000 0.004 0.000 0.020 0.000 series.py:973(_get_value)\n
By far the majority of time is spend inside either integrate_f
or f
,\n hence we\u2019ll concentrate our efforts cythonizing these two functions.
Now, we\u2019re talking! It\u2019s now over ten times faster than the original python\n implementation, and we haven\u2019t really modified the code. Let\u2019s have another\n look at what\u2019s eating up time:
\nIn [9]: %prun -l 4 df.apply(lambda x: integrate_f_typed(x['a'], x['b'], x['N']), axis=1)\n- 70396 function calls (70375 primitive calls) in 0.175 seconds\n+ 70396 function calls (70375 primitive calls) in 0.039 seconds\n \n Ordered by: internal time\n List reduced from 208 to 4 due to restriction <4>\n \n ncalls tottime percall cumtime percall filename:lineno(function)\n- 3000 0.022 0.000 0.130 0.000 series.py:868(__getitem__)\n- 3000 0.015 0.000 0.092 0.000 series.py:973(_get_value)\n- 3000 0.011 0.000 0.040 0.000 base.py:2854(get_loc)\n- 3000 0.011 0.000 0.037 0.000 base.py:4626(_get_values_for_loc)\n+ 3000 0.005 0.000 0.027 0.000 series.py:868(__getitem__)\n+ 3000 0.003 0.000 0.019 0.000 series.py:973(_get_value)\n+ 3000 0.002 0.000 0.008 0.000 base.py:2854(get_loc)\n+ 3000 0.002 0.000 0.007 0.000 base.py:4626(_get_values_for_loc)\n
It\u2019s calling series\u2026 a lot! It\u2019s creating a Series from each row, and get-ting from both\n the index and the series (three times for each row). Function calls are expensive\n@@ -277,24 +277,24 @@\n
We\u2019ve gotten another big improvement. Let\u2019s check again where the time is spent:
\nIn [11]: %%prun -l 4 apply_integrate_f(df['a'].to_numpy(),\n ....: df['b'].to_numpy(),\n ....: df['N'].to_numpy())\n ....: \n- 218 function calls in 0.004 seconds\n+ 218 function calls in 0.002 seconds\n \n Ordered by: internal time\n List reduced from 59 to 4 due to restriction <4>\n \n ncalls tottime percall cumtime percall filename:lineno(function)\n- 1 0.003 0.003 0.003 0.003 {built-in method _cython_magic_7dc7064016b351e6f537d7542a365f15.apply_integrate_f}\n+ 1 0.002 0.002 0.002 0.002 {built-in method _cython_magic_7dc7064016b351e6f537d7542a365f15.apply_integrate_f}\n+ 3 0.000 0.000 0.000 0.000 frame.py:2869(__getitem__)\n 3 0.000 0.000 0.000 0.000 managers.py:993(iget)\n- 3 0.000 0.000 0.001 0.000 frame.py:2869(__getitem__)\n- 1 0.000 0.000 0.004 0.004 {built-in method builtins.exec}\n+ 1 0.000 0.000 0.002 0.002 {built-in method builtins.exec}\n
As one might expect, the majority of the time is now spent in apply_integrate_f
,\n so if we wanted to make anymore efficiencies we must continue to concentrate our\n efforts here.
Now let\u2019s compare adding them together using plain ol\u2019 Python versus\n eval()
:
In [15]: %timeit df1 + df2 + df3 + df4\n-43.5 ms +- 1.3 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+11.9 ms +- 114 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n
In [16]: %timeit pd.eval('df1 + df2 + df3 + df4')\n-26.1 ms +- 1.26 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+7.94 ms +- 34.5 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n
Now let\u2019s do the same thing but with comparisons:
\nIn [17]: %timeit (df1 > 0) & (df2 > 0) & (df3 > 0) & (df4 > 0)\n-54.3 ms +- 1.68 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+25.7 ms +- 65 us per loop (mean +- std. dev. of 7 runs, 10 loops each)\n
In [18]: %timeit pd.eval('(df1 > 0) & (df2 > 0) & (df3 > 0) & (df4 > 0)')\n-15.8 ms +- 166 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+8.97 ms +- 38.3 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n
eval()
also works with unaligned pandas objects:
In [19]: s = pd.Series(np.random.randn(50))\n \n In [20]: %timeit df1 + df2 + df3 + df4 + s\n-72.9 ms +- 1.7 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+42.9 ms +- 42.4 us per loop (mean +- std. dev. of 7 runs, 10 loops each)\n
In [21]: %timeit pd.eval('df1 + df2 + df3 + df4 + s')\n-27.9 ms +- 795 us per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+28.6 ms +- 62.4 us per loop (mean +- std. dev. of 7 runs, 10 loops each)\n
Note
\nOperations such as
\n\n\n\n\n1 and 2 # would parse to 1 & 2, but should evaluate to 2\n@@ -834,19 +834,19 @@\n other evaluation engines against it. You will achieve no performance\n benefits usingeval()
withengine='python'
and in fact may\n incur a performance hit.\nYou can see this by using
\npandas.eval()
with the'python'
engine. It\n is a bit slower (not by much) than evaluating the same expression in Python\n\nIn [62]: %timeit df1 + df2 + df3 + df4\n-46.7 ms +- 2.02 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+12 ms +- 77.8 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n\n\nIn [63]: %timeit pd.eval('df1 + df2 + df3 + df4', engine='python')\n-44.9 ms +- 1.95 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+12.8 ms +- 11 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n\n\n\n
pandas.eval()
performance\u00b6
eval()
is intended to speed up certain kinds of operations. In\n particular, those operations involving complex expressions with large\n", "details": [{"source1": "html2text {}", "source2": "html2text {}", "unified_diff": "@@ -78,26 +78,26 @@\n In [7]: %timeit df.apply(lambda x: integrate_f(x['a'], x['b'], x['N']), axis=1)\n 10 loops, best of 3: 174 ms per loop\n But clearly this isn\u00e2\u0080\u0099t fast enough for us. Let\u00e2\u0080\u0099s take a look and see where\n the time is spent during this operation (limited to the most time consuming\n four calls) using the prun_ipython_magic_function:\n In [5]: %prun -l 4 df.apply(lambda x: integrate_f(x['a'], x['b'], x['N']),\n axis=1) # noqa E999\n- 622830 function calls (622809 primitive calls) in 1.375 seconds\n+ 622830 function calls (622809 primitive calls) in 0.257 seconds\n \n Ordered by: internal time\n List reduced from 214 to 4 due to restriction <4>\n \n ncalls tottime percall cumtime percall filename:lineno(function)\n- 1000 0.670 0.001 1.185 0.001:1\n+ 1000 0.142 0.000 0.219 0.000 :1\n (integrate_f)\n- 552423 0.515 0.000 0.515 0.000 :1\n+ 552423 0.077 0.000 0.077 0.000 :1\n (f)\n- 3000 0.024 0.000 0.143 0.000 series.py:868(__getitem__)\n- 3000 0.017 0.000 0.101 0.000 series.py:973(_get_value)\n+ 3000 0.005 0.000 0.027 0.000 series.py:868(__getitem__)\n+ 3000 0.004 0.000 0.020 0.000 series.py:973(_get_value)\n By far the majority of time is spend inside either integrate_f or f, hence\n we\u00e2\u0080\u0099ll concentrate our efforts cythonizing these two functions.\n **** Plain Cython\u00c2\u00b6 ****\n First we\u00e2\u0080\u0099re going to need to import the Cython magic function to ipython:\n In [6]: %load_ext Cython\n Now, let\u00e2\u0080\u0099s simply copy our functions over to Cython as is (the suffix is here\n to distinguish between function versions):\n@@ -136,24 +136,24 @@\n axis=1)\n 10 loops, best of 3: 20.3 ms per loop\n Now, we\u00e2\u0080\u0099re talking! It\u00e2\u0080\u0099s now over ten times faster than the original python\n implementation, and we haven\u00e2\u0080\u0099t really modified the code. Let\u00e2\u0080\u0099s have another\n look at what\u00e2\u0080\u0099s eating up time:\n In [9]: %prun -l 4 df.apply(lambda x: integrate_f_typed(x['a'], x['b'], x\n ['N']), axis=1)\n- 70396 function calls (70375 primitive calls) in 0.175 seconds\n+ 70396 function calls (70375 primitive calls) in 0.039 seconds\n \n Ordered by: internal time\n List reduced from 208 to 4 due to restriction <4>\n \n ncalls tottime percall cumtime percall filename:lineno(function)\n- 3000 0.022 0.000 0.130 0.000 series.py:868(__getitem__)\n- 3000 0.015 0.000 0.092 0.000 series.py:973(_get_value)\n- 3000 0.011 0.000 0.040 0.000 base.py:2854(get_loc)\n- 3000 0.011 0.000 0.037 0.000 base.py:4626(_get_values_for_loc)\n+ 3000 0.005 0.000 0.027 0.000 series.py:868(__getitem__)\n+ 3000 0.003 0.000 0.019 0.000 series.py:973(_get_value)\n+ 3000 0.002 0.000 0.008 0.000 base.py:2854(get_loc)\n+ 3000 0.002 0.000 0.007 0.000 base.py:4626(_get_values_for_loc)\n **** Using ndarray\u00c2\u00b6 ****\n It\u00e2\u0080\u0099s calling series\u00e2\u0080\u00a6 a lot! It\u00e2\u0080\u0099s creating a Series from each row, and\n get-ting from both the index and the series (three times for each row).\n Function calls are expensive in Python, so maybe we could minimize these by\n cythonizing the apply part.\n Note\n We are now passing ndarrays into the Cython function, fortunately Cython plays\n@@ -205,25 +205,25 @@\n 1000 loops, best of 3: 1.25 ms per loop\n We\u00e2\u0080\u0099ve gotten another big improvement. Let\u00e2\u0080\u0099s check again where the time is\n spent:\n In [11]: %%prun -l 4 apply_integrate_f(df['a'].to_numpy(),\n ....: df['b'].to_numpy(),\n ....: df['N'].to_numpy())\n ....:\n- 218 function calls in 0.004 seconds\n+ 218 function calls in 0.002 seconds\n \n Ordered by: internal time\n List reduced from 59 to 4 due to restriction <4>\n \n ncalls tottime percall cumtime percall filename:lineno(function)\n- 1 0.003 0.003 0.003 0.003 {built-in method\n+ 1 0.002 0.002 0.002 0.002 {built-in method\n _cython_magic_7dc7064016b351e6f537d7542a365f15.apply_integrate_f}\n+ 3 0.000 0.000 0.000 0.000 frame.py:2869(__getitem__)\n 3 0.000 0.000 0.000 0.000 managers.py:993(iget)\n- 3 0.000 0.000 0.001 0.000 frame.py:2869(__getitem__)\n- 1 0.000 0.000 0.004 0.004 {built-in method builtins.exec}\n+ 1 0.000 0.000 0.002 0.002 {built-in method builtins.exec}\n As one might expect, the majority of the time is now spent in\n apply_integrate_f, so if we wanted to make anymore efficiencies we must\n continue to concentrate our efforts here.\n **** More advanced techniques\u00c2\u00b6 ****\n There is still hope for improvement. Here\u00e2\u0080\u0099s an example of using some more\n advanced Cython techniques:\n In [12]: %%cython\n@@ -420,29 +420,29 @@\n In [13]: nrows, ncols = 20000, 100\n \n In [14]: df1, df2, df3, df4 = [pd.DataFrame(np.random.randn(nrows, ncols)) for\n _ in range(4)]\n Now let\u00e2\u0080\u0099s compare adding them together using plain ol\u00e2\u0080\u0099 Python versus eval\n ():\n In [15]: %timeit df1 + df2 + df3 + df4\n-43.5 ms +- 1.3 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+11.9 ms +- 114 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n In [16]: %timeit pd.eval('df1 + df2 + df3 + df4')\n-26.1 ms +- 1.26 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+7.94 ms +- 34.5 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n Now let\u00e2\u0080\u0099s do the same thing but with comparisons:\n In [17]: %timeit (df1 > 0) & (df2 > 0) & (df3 > 0) & (df4 > 0)\n-54.3 ms +- 1.68 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+25.7 ms +- 65 us per loop (mean +- std. dev. of 7 runs, 10 loops each)\n In [18]: %timeit pd.eval('(df1 > 0) & (df2 > 0) & (df3 > 0) & (df4 > 0)')\n-15.8 ms +- 166 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+8.97 ms +- 38.3 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n eval() also works with unaligned pandas objects:\n In [19]: s = pd.Series(np.random.randn(50))\n \n In [20]: %timeit df1 + df2 + df3 + df4 + s\n-72.9 ms +- 1.7 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+42.9 ms +- 42.4 us per loop (mean +- std. dev. of 7 runs, 10 loops each)\n In [21]: %timeit pd.eval('df1 + df2 + df3 + df4 + s')\n-27.9 ms +- 795 us per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+28.6 ms +- 62.4 us per loop (mean +- std. dev. of 7 runs, 10 loops each)\n Note\n Operations such as\n 1 and 2 # would parse to 1 & 2, but should evaluate to 2\n 3 or 4 # would parse to 3 | 4, but should evaluate to 3\n ~1 # this is okay, but slower when using eval\n should be performed in Python. An exception will be raised if you try to\n perform any boolean/bitwise operations with scalar operands that are not of\n@@ -667,17 +667,17 @@\n Note\n Using the 'python' engine is generally not useful, except for testing other\n evaluation engines against it. You will achieve no performance benefits using\n eval() with engine='python' and in fact may incur a performance hit.\n You can see this by using pandas.eval() with the 'python' engine. It is a bit\n slower (not by much) than evaluating the same expression in Python\n In [62]: %timeit df1 + df2 + df3 + df4\n-46.7 ms +- 2.02 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+12 ms +- 77.8 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n In [63]: %timeit pd.eval('df1 + df2 + df3 + df4', engine='python')\n-44.9 ms +- 1.95 ms per loop (mean +- std. dev. of 7 runs, 10 loops each)\n+12.8 ms +- 11 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n **** pandas.eval() performance\u00c2\u00b6 ****\n eval() is intended to speed up certain kinds of operations. In particular,\n those operations involving complex expressions with large DataFrame/Series\n objects should see a significant performance benefit. Here is a plot showing\n the running time of pandas.eval() as function of the size of the frame involved\n in the computation. The two lines are two different engines.\n [../_images/eval-perf.png]\n"}]}, {"source1": "./usr/share/doc/python-pandas-doc/html/user_guide/groupby.html", "source2": "./usr/share/doc/python-pandas-doc/html/user_guide/groupby.html", "unified_diff": "@@ -559,15 +559,15 @@\n In [59]: grouped_C = grouped['C']\n \n In [60]: grouped_D = grouped['D']\n This is mainly syntactic sugar for the alternative and much more verbose:
\n\n\nIn [61]: df['C'].groupby(df['A'])\n-Out[61]: <pandas.core.groupby.generic.SeriesGroupBy object at 0x9d0c82e0>\n+Out[61]: <pandas.core.groupby.generic.SeriesGroupBy object at 0xd8eb3298>\nAdditionally this method avoids recomputing the internal grouping information\n derived from the passed key.
\n
query()
Python versus pandas Syntax Comparison\u00b6Full numpy-like syntax:
\nIn [232]: df = pd.DataFrame(np.random.randint(n, size=(n, 3)), columns=list('abc'))\n", "details": [{"source1": "html2text {}", "source2": "html2text {}", "unified_diff": "@@ -1773,15 +1773,15 @@\n 9 0.732206 0.419540 0.604675\n 10 0.604466 0.848974 0.896165\n 11 0.589168 0.920046 0.732716\n \n In [230]: expr = '0.0 <= a <= c <= 0.5'\n \n In [231]: map(lambda frame: frame.query(expr), [df, df2])\n-Out[231]:
By specifying a chunksize
to read_csv
, the return\n value will be an iterable object of type TextFileReader
:
In [190]: reader = pd.read_csv('tmp.sv', sep='|', chunksize=4)\n \n In [191]: reader\n-Out[191]: <pandas.io.parsers.TextFileReader at 0x9dd3d3d0>\n+Out[191]: <pandas.io.parsers.TextFileReader at 0xdcaadfd0>\n \n In [192]: for chunk in reader:\n .....: print(chunk)\n .....: \n Unnamed: 0 0 1 2 3\n 0 0 0.469112 -0.282863 -1.509059 -1.135632\n 1 1 1.212112 -0.173215 0.119209 -1.044236\n@@ -2679,31 +2679,31 @@\n \n In [249]: dffloats = pd.DataFrame(randfloats, columns=list('ABCDEFGHIJ'))\n \n In [250]: jsonfloats = dffloats.to_json()\n
In [251]: %timeit pd.read_json(jsonfloats)\n-19.9 ms +- 439 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+12.7 ms +- 22.2 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n
In [252]: %timeit pd.read_json(jsonfloats, numpy=True)\n-12.7 ms +- 279 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+8.47 ms +- 39.2 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n
The speedup is less noticeable for smaller datasets:
\nIn [253]: jsonfloats = dffloats.head(100).to_json()\n
In [254]: %timeit pd.read_json(jsonfloats)\n-9.95 ms +- 193 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+6.81 ms +- 10.8 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n
In [255]: %timeit pd.read_json(jsonfloats, numpy=True)\n-8.29 ms +- 178 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+5.83 ms +- 5.65 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n
Warning
\nDirect NumPy decoding makes a number of assumptions and may fail or produce\n unexpected output if these assumptions are not satisfied:
\n\n@@ -2800,15 +2800,15 @@\n In [265]: df.to_json(orient='records', lines=True)\n Out[265]: '{"a":1,"b":2}\\n{"a":3,"b":4}'\n \n # reader is an iterator that returns `chunksize` lines each iteration\n In [266]: reader = pd.read_json(StringIO(jsonl), lines=True, chunksize=1)\n \n In [267]: reader\n-Out[267]: <pandas.io.json._json.JsonReader at 0x9e225aa8>\n+Out[267]: <pandas.io.json._json.JsonReader at 0xdbdf8028>\n \n In [268]: for chunk in reader:\n .....: print(chunk)\n .....: \n Empty DataFrame\n Columns: []\n Index: []\n@@ -4869,18 +4869,18 @@\n 5 0.852727 0.463819 0.146262 string 1 True 2001-01-02\n 6 -1.177365 0.793644 -0.131959 string 1 True 2001-01-02\n 7 1.236988 0.221252 0.089012 string 1 True 2001-01-02\n \n In [393]: df_mixed1.dtypes.value_counts()\n Out[393]: \n float64 2\n-int64 1\n-bool 1\n-object 1\n datetime64[ns] 1\n+object 1\n+bool 1\n+int64 1\n float32 1\n dtype: int64\n \n # we have provided a minimum string column size\n In [394]: store.root.df_mixed.table\n Out[394]: \n /df_mixed/table (Table(8,)) ''\n", "details": [{"source1": "html2text {}", "source2": "html2text {}", "unified_diff": "@@ -1717,15 +1717,15 @@\n 8 8 1.075770 -0.109050 1.643563 -1.469388\n 9 9 0.357021 -0.674600 -1.776904 -0.968914\n By specifying a chunksize to read_csv, the return value will be an iterable\n object of type TextFileReader:\n In [190]: reader = pd.read_csv('tmp.sv', sep='|', chunksize=4)\n \n In [191]: reader\n-Out[191]:\n+Out[191]: \n \n In [192]: for chunk in reader:\n .....: print(chunk)\n .....:\n Unnamed: 0 0 1 2 3\n 0 0 0.469112 -0.282863 -1.509059 -1.135632\n 1 1 1.212112 -0.173215 0.119209 -1.044236\n@@ -2220,23 +2220,23 @@\n \n In [248]: randfloats.shape = (1000, 10)\n \n In [249]: dffloats = pd.DataFrame(randfloats, columns=list('ABCDEFGHIJ'))\n \n In [250]: jsonfloats = dffloats.to_json()\n In [251]: %timeit pd.read_json(jsonfloats)\n-19.9 ms +- 439 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+12.7 ms +- 22.2 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n In [252]: %timeit pd.read_json(jsonfloats, numpy=True)\n-12.7 ms +- 279 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+8.47 ms +- 39.2 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n The speedup is less noticeable for smaller datasets:\n In [253]: jsonfloats = dffloats.head(100).to_json()\n In [254]: %timeit pd.read_json(jsonfloats)\n-9.95 ms +- 193 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+6.81 ms +- 10.8 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n In [255]: %timeit pd.read_json(jsonfloats, numpy=True)\n-8.29 ms +- 178 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n+5.83 ms +- 5.65 us per loop (mean +- std. dev. of 7 runs, 100 loops each)\n Warning\n Direct NumPy decoding makes a number of assumptions and may fail or produce\n unexpected output if these assumptions are not satisfied:\n * data is numeric.\n * data is uniform. The dtype is sniffed from the first value\n decoded. A ValueError may be raised, or incorrect output may be\n produced if this condition is not satisfied.\n@@ -2319,15 +2319,15 @@\n In [265]: df.to_json(orient='records', lines=True)\n Out[265]: '{\"a\":1,\"b\":2}\\n{\"a\":3,\"b\":4}'\n \n # reader is an iterator that returns `chunksize` lines each iteration\n In [266]: reader = pd.read_json(StringIO(jsonl), lines=True, chunksize=1)\n \n In [267]: reader\n-Out[267]: \n+Out[267]: \n \n In [268]: for chunk in reader:\n .....: print(chunk)\n .....:\n Empty DataFrame\n Columns: []\n Index: []\n@@ -3948,18 +3948,18 @@\n 5 0.852727 0.463819 0.146262 string 1 True 2001-01-02\n 6 -1.177365 0.793644 -0.131959 string 1 True 2001-01-02\n 7 1.236988 0.221252 0.089012 string 1 True 2001-01-02\n \n In [393]: df_mixed1.dtypes.value_counts()\n Out[393]:\n float64 2\n-int64 1\n-bool 1\n-object 1\n datetime64[ns] 1\n+object 1\n+bool 1\n+int64 1\n float32 1\n dtype: int64\n \n # we have provided a minimum string column size\n In [394]: store.root.df_mixed.table\n Out[394]:\n /df_mixed/table (Table(8,)) ''\n"}]}, {"source1": "./usr/share/doc/python-pandas-doc/html/user_guide/missing_data.html", "source2": "./usr/share/doc/python-pandas-doc/html/user_guide/missing_data.html", "comments": ["Ordering differences only"], "unified_diff": "@@ -232,17 +232,17 @@\n e 0.119209 -1.044236 -0.861849 bar True 2012-01-01\n f -2.104569 -0.494929 1.071804 bar False 2012-01-01\n h NaN -0.706771 -1.039575 bar True NaT\n \n In [20]: df2.dtypes.value_counts()\n Out[20]: \n float64 3\n-bool 1\n-object 1\n datetime64[ns] 1\n+object 1\n+bool 1\n dtype: int64\n
Some readers, like pandas.read_csv()
, offer parameters to control the\n chunksize
when reading a single file.
Manually chunking is an OK option for workflows that don\u2019t\n require too sophisticated of operations. Some operations, like groupby
, are\n", "details": [{"source1": "html2text {}", "source2": "html2text {}", "unified_diff": "@@ -279,16 +279,16 @@\n ....: files = pathlib.Path(\"data/timeseries/\").glob(\"ts*.parquet\")\n ....: counts = pd.Series(dtype=int)\n ....: for path in files:\n ....: df = pd.read_parquet(path)\n ....: counts = counts.add(df['name'].value_counts(), fill_value=0)\n ....: counts.astype(int)\n ....:\n-CPU times: user 1.13 ms, sys: 0 ns, total: 1.13 ms\n-Wall time: 1.13 ms\n+CPU times: user 776 us, sys: 0 ns, total: 776 us\n+Wall time: 782 us\n Out[19]: Series([], dtype: int32)\n Some readers, like pandas.read_csv(), offer parameters to control the chunksize\n when reading a single file.\n Manually chunking is an OK option for workflows that don\u00e2\u0080\u0099t require too\n sophisticated of operations. Some operations, like groupby, are much harder to\n do chunkwise. In these cases, you may be better switching to a different\n library that implements these out-of-core algorithms for you.\n"}]}, {"source1": "./usr/share/doc/python-pandas-doc/html/user_guide/style.html", "source2": "./usr/share/doc/python-pandas-doc/html/user_guide/style.html", "unified_diff": "@@ -341,94 +341,94 @@\n \n
[3]:\n
A | B | C | D | E |
---|
A | B | C | D | E | |||||||
---|---|---|---|---|---|---|---|---|---|---|---|
0 | \n-1.000000 | \n-1.329212 | \n-nan | \n--0.316280 | \n--0.990810 | \n-||||||
1 | \n-2.000000 | \n--1.070816 | \n--1.438713 | \n-0.564417 | \n-0.295722 | \n-||||||
2 | \n-3.000000 | \n--1.626404 | \n-0.219565 | \n-0.678805 | \n-1.889273 | \n-||||||
3 | \n-4.000000 | \n-0.961538 | \n-0.104011 | \n-nan | \n-0.850229 | \n-||||||
4 | \n-5.000000 | \n-1.453425 | \n-1.057737 | \n-0.165562 | \n-0.515018 | \n-||||||
5 | \n-6.000000 | \n--1.336936 | \n-0.562861 | \n-1.392855 | \n--0.063328 | \n-||||||
6 | \n-7.000000 | \n-0.121668 | \n-1.207603 | \n--0.002040 | \n-1.627796 | \n-||||||
7 | \n-8.000000 | \n-0.354493 | \n-1.037528 | \n--0.385684 | \n-0.519818 | \n-||||||
8 | \n-9.000000 | \n-1.686583 | \n--1.325963 | \n-1.428984 | \n--2.089354 | \n-||||||
9 | \n-10.000000 | \n--0.129820 | \n-0.631523 | \n--0.586538 | \n-0.290720 | \n+0 | \n+1.000000 | \n+1.329212 | \n+nan | \n+-0.316280 | \n+-0.990810 | \n+
1 | \n+2.000000 | \n+-1.070816 | \n+-1.438713 | \n+0.564417 | \n+0.295722 | \n+||||||
2 | \n+3.000000 | \n+-1.626404 | \n+0.219565 | \n+0.678805 | \n+1.889273 | \n+||||||
3 | \n+4.000000 | \n+0.961538 | \n+0.104011 | \n+nan | \n+0.850229 | \n+||||||
4 | \n+5.000000 | \n+1.453425 | \n+1.057737 | \n+0.165562 | \n+0.515018 | \n+||||||
5 | \n+6.000000 | \n+-1.336936 | \n+0.562861 | \n+1.392855 | \n+-0.063328 | \n+||||||
6 | \n+7.000000 | \n+0.121668 | \n+1.207603 | \n+-0.002040 | \n+1.627796 | \n+||||||
7 | \n+8.000000 | \n+0.354493 | \n+1.037528 | \n+-0.385684 | \n+0.519818 | \n+||||||
8 | \n+9.000000 | \n+1.686583 | \n+-1.325963 | \n+1.428984 | \n+-2.089354 | \n+||||||
9 | \n+10.000000 | \n+-0.129820 | \n+0.631523 | \n+-0.586538 | \n+0.290720 | \n
Note: The DataFrame.style
attribute is a property that returns a Styler
object. Styler
has a _repr_html_
method defined on it so they are rendered automatically. If you want the actual HTML back for further processing or for writing to file call the .render()
method which returns a string.
The above output looks very similar to the standard DataFrame HTML representation. But we\u2019ve done some work behind the scenes to attach CSS classes to each cell. We can view these by calling the .render
method.
[4]:\n@@ -442,23 +442,23 @@\n \n [4]:\n
\n \n \n \n ['<style type="text/css" >',\n- '#T_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col2,#T_1d4562b2_b114_11ee_8b43_59a83b27c78brow3_col3{',\n+ '#T_4056c92e_ea13_11ef_835c_39a090989f27row0_col2,#T_4056c92e_ea13_11ef_835c_39a090989f27row3_col3{',\n ' background-color: red;',\n- ' }</style><table id="T_1d4562b2_b114_11ee_8b43_59a83b27c78b" ><thead> <tr> <th class="blank level0" ></th> <th class="col_heading level0 col0" >A</th> <th class="col_heading level0 col1" >B</th> <th class="col_heading level0 col2" >C</th> <th class="col_heading level0 col3" >D</th> <th class="col_heading level0 col4" >E</th> </tr></thead><tbody>',\n+ ' }</style><table id="T_4056c92e_ea13_11ef_835c_39a090989f27" ><thead> <tr> <th class="blank level0" ></th> <th class="col_heading level0 col0" >A</th> <th class="col_heading level0 col1" >B</th> <th class="col_heading level0 col2" >C</th> <th class="col_heading level0 col3" >D</th> <th class="col_heading level0 col4" >E</th> </tr></thead><tbody>',\n ' <tr>',\n- ' <th id="T_1d4562b2_b114_11ee_8b43_59a83b27c78blevel0_row0" class="row_heading level0 row0" >0</th>',\n- ' <td id="T_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col0" class="data row0 col0" >1.000000</td>',\n- ' <td id="T_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col1" class="data row0 col1" >1.329212</td>',\n- ' <td id="T_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col2" class="data row0 col2" >nan</td>',\n- ' <td id="T_1d4562b2_b114_11ee_8b43_59a83b27c78brow0_col3" class="data row0 col3" >-0.316280</td>']\n+ ' <th id="T_4056c92e_ea13_11ef_835c_39a090989f27level0_row0" class="row_heading level0 row0" >0</th>',\n+ ' <td id="T_4056c92e_ea13_11ef_835c_39a090989f27row0_col0" class="data row0 col0" >1.000000</td>',\n+ ' <td id="T_4056c92e_ea13_11ef_835c_39a090989f27row0_col1" class="data row0 col1" >1.329212</td>',\n+ ' <td id="T_4056c92e_ea13_11ef_835c_39a090989f27row0_col2" class="data row0 col2" >nan</td>',\n+ ' <td id="T_4056c92e_ea13_11ef_835c_39a090989f27row0_col3" class="data row0 col3" >-0.316280</td>']\n
\n \n The row0_col2
is the identifier for that particular cell. We\u2019ve also prepended each row/column identifier with a UUID unique to each DataFrame so that the style from one doesn\u2019t collide with the styling from another within the same notebook or page (you can set the uuid
if you\u2019d like to tie together the styling of two DataFrames).
\n When writing style functions, you take care of producing the CSS attribute / value pairs you want. Pandas matches those up with the CSS classes that identify each cell.
\n Let\u2019s write a simple style function that will color negative numbers red and positive numbers black.
\n \n [5]:\n@@ -489,98 +489,98 @@\n
\n \n [6]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n Notice the similarity with the standard df.applymap
, which operates on DataFrames elementwise. We want you to be able to reuse your existing knowledge of how to interact with DataFrames.
\n Notice also that our function returned a string containing the CSS attribute and value, separated by a colon just like in a <style>
tag. This will be a common theme.
\n Finally, the input shapes matched. Styler.applymap
calls the function on each scalar input, and the function returns a scalar output.
\n Now suppose you wanted to highlight the maximum value in each column. We can\u2019t use .applymap
anymore since that operated elementwise. Instead, we\u2019ll turn to .apply
which operates columnwise (or rowwise using the axis
keyword). Later on we\u2019ll see that something like highlight_max
is already defined on Styler
so you wouldn\u2019t need to write this yourself.
\n@@ -609,96 +609,96 @@\n \n \n [8]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n In this case the input is a Series
, one column at a time. Notice that the output shape of highlight_max
matches the input shape, an array with len(s)
items.
\n We encourage you to use method chains to build up a style piecewise, before finally rending at the end of the chain.
\n \n [9]:\n@@ -713,101 +713,101 @@\n
\n \n [9]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n Above we used Styler.apply
to pass in each column one at a time.
\n Debugging Tip: If you\u2019re having trouble writing your style function, try just passing it into DataFrame.apply. Internally, Styler.apply uses DataFrame.apply so the result should be the same.
\n What if you wanted to highlight just the maximum value in the entire table? Use .apply(function, axis=None)
to indicate that your function wants the entire table, not one column or row at a time. Let\u2019s try that next.
\n We\u2019ll rewrite our highlight-max
to handle either Series (from .apply(axis=0 or 1)
) or DataFrames (from .apply(axis=None)
). We\u2019ll also allow the color to be adjustable, to demonstrate that .apply
, and .applymap
pass along keyword arguments.
\n@@ -843,96 +843,96 @@\n \n \n [11]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n \n Building Styles Summary\u00b6
\n Style functions should return strings with one or more CSS attribute: value
delimited by semicolons. Use
\n \n@@ -965,96 +965,96 @@\n
\n \n [12]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n For row and column slicing, any valid indexer to .loc
will work.
\n \n [13]:\n
\n@@ -1067,98 +1067,98 @@\n \n \n [13]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n Only label-based slicing is supported right now, not positional.
\n If your style function uses a subset
or axis
keyword argument, consider wrapping your function in a functools.partial
, partialing out that keyword.
\n my_func2 = functools.partial(my_func, subset=42)\n
\n@@ -1178,94 +1178,94 @@\n \n \n [14]:\n
\n \n \n A B C D E \n+ A B C D E \n \n- 0 \n- 100.00% \n- 132.92% \n- nan% \n- -31.63% \n- -99.08% \n- \n- \n- 1 \n- 200.00% \n- -107.08% \n- -143.87% \n- 56.44% \n- 29.57% \n- \n- \n- 2 \n- 300.00% \n- -162.64% \n- 21.96% \n- 67.88% \n- 188.93% \n- \n- \n- 3 \n- 400.00% \n- 96.15% \n- 10.40% \n- nan% \n- 85.02% \n- \n- \n- 4 \n- 500.00% \n- 145.34% \n- 105.77% \n- 16.56% \n- 51.50% \n- \n- \n- 5 \n- 600.00% \n- -133.69% \n- 56.29% \n- 139.29% \n- -6.33% \n- \n- \n- 6 \n- 700.00% \n- 12.17% \n- 120.76% \n- -0.20% \n- 162.78% \n- \n- \n- 7 \n- 800.00% \n- 35.45% \n- 103.75% \n- -38.57% \n- 51.98% \n- \n- \n- 8 \n- 900.00% \n- 168.66% \n- -132.60% \n- 142.90% \n- -208.94% \n- \n- \n- 9 \n- 1000.00% \n- -12.98% \n- 63.15% \n- -58.65% \n- 29.07% \n+ 0 \n+ 100.00% \n+ 132.92% \n+ nan% \n+ -31.63% \n+ -99.08% \n+ \n+ \n+ 1 \n+ 200.00% \n+ -107.08% \n+ -143.87% \n+ 56.44% \n+ 29.57% \n+ \n+ \n+ 2 \n+ 300.00% \n+ -162.64% \n+ 21.96% \n+ 67.88% \n+ 188.93% \n+ \n+ \n+ 3 \n+ 400.00% \n+ 96.15% \n+ 10.40% \n+ nan% \n+ 85.02% \n+ \n+ \n+ 4 \n+ 500.00% \n+ 145.34% \n+ 105.77% \n+ 16.56% \n+ 51.50% \n+ \n+ \n+ 5 \n+ 600.00% \n+ -133.69% \n+ 56.29% \n+ 139.29% \n+ -6.33% \n+ \n+ \n+ 6 \n+ 700.00% \n+ 12.17% \n+ 120.76% \n+ -0.20% \n+ 162.78% \n+ \n+ \n+ 7 \n+ 800.00% \n+ 35.45% \n+ 103.75% \n+ -38.57% \n+ 51.98% \n+ \n+ \n+ 8 \n+ 900.00% \n+ 168.66% \n+ -132.60% \n+ 142.90% \n+ -208.94% \n+ \n+ \n+ 9 \n+ 1000.00% \n+ -12.98% \n+ 63.15% \n+ -58.65% \n+ 29.07% \n \n
\n \n Use a dictionary to format specific columns.
\n \n [15]:\n
\n@@ -1277,94 +1277,94 @@\n \n \n [15]:\n
\n \n \n A B C D E \n+ A B C D E \n \n- 0 \n- 1.000000 \n- 1000 \n- nan \n- -0.32 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -100 \n- -1.438713 \n- +0.56 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -200 \n- 0.219565 \n- +0.68 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 1000 \n- 0.104011 \n- +nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1000 \n- 1.057737 \n- +0.17 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -100 \n- 0.562861 \n- +1.39 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0000 \n- 1.207603 \n- -0.00 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0000 \n- 1.037528 \n- -0.39 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 2000 \n- -1.325963 \n- +1.43 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -000 \n- 0.631523 \n- -0.59 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1000 \n+ nan \n+ -0.32 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -100 \n+ -1.438713 \n+ +0.56 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -200 \n+ 0.219565 \n+ +0.68 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 1000 \n+ 0.104011 \n+ +nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1000 \n+ 1.057737 \n+ +0.17 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -100 \n+ 0.562861 \n+ +1.39 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0000 \n+ 1.207603 \n+ -0.00 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0000 \n+ 1.037528 \n+ -0.39 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 2000 \n+ -1.325963 \n+ +1.43 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -000 \n+ 0.631523 \n+ -0.59 \n+ 0.290720 \n \n
\n \n Or pass in a callable (or dictionary of callables) for more flexible handling.
\n \n [16]:\n
\n@@ -1376,94 +1376,94 @@\n \n \n [16]:\n
\n \n \n A B C D E \n+ A B C D E \n \n- 0 \n- 1.000000 \n- \u00b11.33 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- \u00b11.07 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- \u00b11.63 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- \u00b10.96 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- \u00b11.45 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- \u00b11.34 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- \u00b10.12 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- \u00b10.35 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- \u00b11.69 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- \u00b10.13 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ \u00b11.33 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ \u00b11.07 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ \u00b11.63 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ \u00b10.96 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ \u00b11.45 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ \u00b11.34 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ \u00b10.12 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ \u00b10.35 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ \u00b11.69 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ \u00b10.13 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n You can format the text displayed for missing values by na_rep
.
\n \n [17]:\n
\n@@ -1475,94 +1475,94 @@\n \n \n [17]:\n
\n \n \n A B C D E \n+ A B C D E \n \n- 0 \n- 100.00% \n- 132.92% \n- - \n- -31.63% \n- -99.08% \n- \n- \n- 1 \n- 200.00% \n- -107.08% \n- -143.87% \n- 56.44% \n- 29.57% \n- \n- \n- 2 \n- 300.00% \n- -162.64% \n- 21.96% \n- 67.88% \n- 188.93% \n- \n- \n- 3 \n- 400.00% \n- 96.15% \n- 10.40% \n- - \n- 85.02% \n- \n- \n- 4 \n- 500.00% \n- 145.34% \n- 105.77% \n- 16.56% \n- 51.50% \n- \n- \n- 5 \n- 600.00% \n- -133.69% \n- 56.29% \n- 139.29% \n- -6.33% \n- \n- \n- 6 \n- 700.00% \n- 12.17% \n- 120.76% \n- -0.20% \n- 162.78% \n- \n- \n- 7 \n- 800.00% \n- 35.45% \n- 103.75% \n- -38.57% \n- 51.98% \n- \n- \n- 8 \n- 900.00% \n- 168.66% \n- -132.60% \n- 142.90% \n- -208.94% \n- \n- \n- 9 \n- 1000.00% \n- -12.98% \n- 63.15% \n- -58.65% \n- 29.07% \n+ 0 \n+ 100.00% \n+ 132.92% \n+ - \n+ -31.63% \n+ -99.08% \n+ \n+ \n+ 1 \n+ 200.00% \n+ -107.08% \n+ -143.87% \n+ 56.44% \n+ 29.57% \n+ \n+ \n+ 2 \n+ 300.00% \n+ -162.64% \n+ 21.96% \n+ 67.88% \n+ 188.93% \n+ \n+ \n+ 3 \n+ 400.00% \n+ 96.15% \n+ 10.40% \n+ - \n+ 85.02% \n+ \n+ \n+ 4 \n+ 500.00% \n+ 145.34% \n+ 105.77% \n+ 16.56% \n+ 51.50% \n+ \n+ \n+ 5 \n+ 600.00% \n+ -133.69% \n+ 56.29% \n+ 139.29% \n+ -6.33% \n+ \n+ \n+ 6 \n+ 700.00% \n+ 12.17% \n+ 120.76% \n+ -0.20% \n+ 162.78% \n+ \n+ \n+ 7 \n+ 800.00% \n+ 35.45% \n+ 103.75% \n+ -38.57% \n+ 51.98% \n+ \n+ \n+ 8 \n+ 900.00% \n+ 168.66% \n+ -132.60% \n+ 142.90% \n+ -208.94% \n+ \n+ \n+ 9 \n+ 1000.00% \n+ -12.98% \n+ 63.15% \n+ -58.65% \n+ 29.07% \n \n
\n \n These formatting techniques can be used in combination with styling.
\n \n [18]:\n
\n@@ -1574,96 +1574,96 @@\n \n \n [18]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- - \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- - \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ - \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ - \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n \n \n Builtin styles\u00b6
\n Finally, we expect certain styling functions to be common enough that we\u2019ve included a few \u201cbuilt-in\u201d to the Styler
, so you don\u2019t have to write them yourself.
\n@@ -1678,96 +1678,96 @@\n \n \n [19]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n You can create \u201cheatmaps\u201d with the background_gradient
method. These require matplotlib, and we\u2019ll use Seaborn to get a nice colormap.
\n \n [20]:\n
\n@@ -1784,199 +1784,199 @@\n \n \n [20]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n Styler.background_gradient
takes the keyword arguments low
and high
. Roughly speaking these extend the range of your data by low
and high
percent so that when we convert the colors, the colormap\u2019s entire range isn\u2019t used. This is useful so that you can actually read the text still.
\n \n [21]:\n
\n@@ -1989,102 +1989,102 @@\n \n \n [21]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n \n
\n \n \n [22]:\n
\n \n@@ -2099,103 +2099,103 @@\n \n \n [22]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n \n
\n \n There\u2019s also .highlight_min
and .highlight_max
.
\n \n [23]:\n
\n@@ -2207,96 +2207,96 @@\n \n \n [23]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n Use Styler.set_properties
when the style doesn\u2019t actually depend on the values.
\n \n [24]:\n
\n@@ -2310,98 +2310,98 @@\n \n \n [24]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n \n Bar charts\u00b6
\n You can include \u201cbar charts\u201d in your DataFrame.
\n \n@@ -2415,165 +2415,165 @@\n \n \n [25]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n New in version 0.20.0 is the ability to customize further the bar chart: You can now have the df.style.bar
be centered on zero or midpoint value (in addition to the already existing way of having the min value at the left side of the cell), and you can pass a list of [color_negative, color_positive]
.
\n Here\u2019s how you can change the above with the new align='mid'
option:
\n \n [26]:\n@@ -2586,174 +2586,174 @@\n
\n \n [26]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n The following example aims to give a highlight of the behavior of the new align options:
\n \n [27]:\n
\n@@ -2811,313 +2811,313 @@\n All Negative \n All Positive \n Both Neg and Pos \n \n \n \n left \n+ } \n \n- 0 \n- -100 \n+ 0 \n+ -100 \n \n \n- 1 \n- -60 \n+ 1 \n+ -60 \n \n \n- 2 \n- -30 \n+ 2 \n+ -30 \n \n \n- 3 \n- -20 \n+ 3 \n+ -20 \n \n
\n+ } \n \n- 0 \n- 10 \n+ 0 \n+ 10 \n \n \n- 1 \n- 20 \n+ 1 \n+ 20 \n \n \n- 2 \n- 50 \n+ 2 \n+ 50 \n \n \n- 3 \n- 100 \n+ 3 \n+ 100 \n \n
\n+ } \n \n- 0 \n- -10 \n+ 0 \n+ -10 \n \n \n- 1 \n- -5 \n+ 1 \n+ -5 \n \n \n- 2 \n- 0 \n+ 2 \n+ 0 \n \n \n- 3 \n- 90 \n+ 3 \n+ 90 \n \n
zero \n+ } \n \n- 0 \n- -100 \n+ 0 \n+ -100 \n \n \n- 1 \n- -60 \n+ 1 \n+ -60 \n \n \n- 2 \n- -30 \n+ 2 \n+ -30 \n \n \n- 3 \n- -20 \n+ 3 \n+ -20 \n \n
\n+ } \n \n- 0 \n- 10 \n+ 0 \n+ 10 \n \n \n- 1 \n- 20 \n+ 1 \n+ 20 \n \n \n- 2 \n- 50 \n+ 2 \n+ 50 \n \n \n- 3 \n- 100 \n+ 3 \n+ 100 \n \n
\n+ } \n \n- 0 \n- -10 \n+ 0 \n+ -10 \n \n \n- 1 \n- -5 \n+ 1 \n+ -5 \n \n \n- 2 \n- 0 \n+ 2 \n+ 0 \n \n \n- 3 \n- 90 \n+ 3 \n+ 90 \n \n
mid \n+ } \n \n- 0 \n- -100 \n+ 0 \n+ -100 \n \n \n- 1 \n- -60 \n+ 1 \n+ -60 \n \n \n- 2 \n- -30 \n+ 2 \n+ -30 \n \n \n- 3 \n- -20 \n+ 3 \n+ -20 \n \n
\n+ } \n \n- 0 \n- 10 \n+ 0 \n+ 10 \n \n \n- 1 \n- 20 \n+ 1 \n+ 20 \n \n \n- 2 \n- 50 \n+ 2 \n+ 50 \n \n \n- 3 \n- 100 \n+ 3 \n+ 100 \n \n
\n+ } \n \n- 0 \n- -10 \n+ 0 \n+ -10 \n \n \n- 1 \n- -5 \n+ 1 \n+ -5 \n \n \n- 2 \n- 0 \n+ 2 \n+ 0 \n \n \n- 3 \n- 90 \n+ 3 \n+ 90 \n \n
\n \n
\n \n \n \n@@ -3137,98 +3137,98 @@\n \n \n [28]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n \n [29]:\n
\n \n@@ -3241,98 +3241,98 @@\n \n \n [29]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- -1.000000 \n- -1.329212 \n- nan \n- 0.316280 \n- 0.990810 \n- \n- \n- 1 \n- -2.000000 \n- 1.070816 \n- 1.438713 \n- -0.564417 \n- -0.295722 \n- \n- \n- 2 \n- -3.000000 \n- 1.626404 \n- -0.219565 \n- -0.678805 \n- -1.889273 \n- \n- \n- 3 \n- -4.000000 \n- -0.961538 \n- -0.104011 \n- nan \n- -0.850229 \n- \n- \n- 4 \n- -5.000000 \n- -1.453425 \n- -1.057737 \n- -0.165562 \n- -0.515018 \n- \n- \n- 5 \n- -6.000000 \n- 1.336936 \n- -0.562861 \n- -1.392855 \n- 0.063328 \n- \n- \n- 6 \n- -7.000000 \n- -0.121668 \n- -1.207603 \n- 0.002040 \n- -1.627796 \n- \n- \n- 7 \n- -8.000000 \n- -0.354493 \n- -1.037528 \n- 0.385684 \n- -0.519818 \n- \n- \n- 8 \n- -9.000000 \n- -1.686583 \n- 1.325963 \n- -1.428984 \n- 2.089354 \n- \n- \n- 9 \n- -10.000000 \n- 0.129820 \n- -0.631523 \n- 0.586538 \n- -0.290720 \n+ 0 \n+ -1.000000 \n+ -1.329212 \n+ nan \n+ 0.316280 \n+ 0.990810 \n+ \n+ \n+ 1 \n+ -2.000000 \n+ 1.070816 \n+ 1.438713 \n+ -0.564417 \n+ -0.295722 \n+ \n+ \n+ 2 \n+ -3.000000 \n+ 1.626404 \n+ -0.219565 \n+ -0.678805 \n+ -1.889273 \n+ \n+ \n+ 3 \n+ -4.000000 \n+ -0.961538 \n+ -0.104011 \n+ nan \n+ -0.850229 \n+ \n+ \n+ 4 \n+ -5.000000 \n+ -1.453425 \n+ -1.057737 \n+ -0.165562 \n+ -0.515018 \n+ \n+ \n+ 5 \n+ -6.000000 \n+ 1.336936 \n+ -0.562861 \n+ -1.392855 \n+ 0.063328 \n+ \n+ \n+ 6 \n+ -7.000000 \n+ -0.121668 \n+ -1.207603 \n+ 0.002040 \n+ -1.627796 \n+ \n+ \n+ 7 \n+ -8.000000 \n+ -0.354493 \n+ -1.037528 \n+ 0.385684 \n+ -0.519818 \n+ \n+ \n+ 8 \n+ -9.000000 \n+ -1.686583 \n+ 1.325963 \n+ -1.428984 \n+ 2.089354 \n+ \n+ \n+ 9 \n+ -10.000000 \n+ 0.129820 \n+ -0.631523 \n+ 0.586538 \n+ -0.290720 \n \n
\n \n Notice that you\u2019re able to share the styles even though they\u2019re data aware. The styles are re-evaluated on the new DataFrame they\u2019ve been use
d upon.
\n \n \n Other Options\u00b6
\n@@ -3368,101 +3368,101 @@\n \n \n [30]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.00 \n- 1.33 \n- nan \n- -0.32 \n- -0.99 \n- \n- \n- 1 \n- 2.00 \n- -1.07 \n- -1.44 \n- 0.56 \n- 0.30 \n- \n- \n- 2 \n- 3.00 \n- -1.63 \n- 0.22 \n- 0.68 \n- 1.89 \n- \n- \n- 3 \n- 4.00 \n- 0.96 \n- 0.10 \n- nan \n- 0.85 \n- \n- \n- 4 \n- 5.00 \n- 1.45 \n- 1.06 \n- 0.17 \n- 0.52 \n- \n- \n- 5 \n- 6.00 \n- -1.34 \n- 0.56 \n- 1.39 \n- -0.06 \n- \n- \n- 6 \n- 7.00 \n- 0.12 \n- 1.21 \n- -0.00 \n- 1.63 \n- \n- \n- 7 \n- 8.00 \n- 0.35 \n- 1.04 \n- -0.39 \n- 0.52 \n- \n- \n- 8 \n- 9.00 \n- 1.69 \n- -1.33 \n- 1.43 \n- -2.09 \n- \n- \n- 9 \n- 10.00 \n- -0.13 \n- 0.63 \n- -0.59 \n- 0.29 \n+ 0 \n+ 1.00 \n+ 1.33 \n+ nan \n+ -0.32 \n+ -0.99 \n+ \n+ \n+ 1 \n+ 2.00 \n+ -1.07 \n+ -1.44 \n+ 0.56 \n+ 0.30 \n+ \n+ \n+ 2 \n+ 3.00 \n+ -1.63 \n+ 0.22 \n+ 0.68 \n+ 1.89 \n+ \n+ \n+ 3 \n+ 4.00 \n+ 0.96 \n+ 0.10 \n+ nan \n+ 0.85 \n+ \n+ \n+ 4 \n+ 5.00 \n+ 1.45 \n+ 1.06 \n+ 0.17 \n+ 0.52 \n+ \n+ \n+ 5 \n+ 6.00 \n+ -1.34 \n+ 0.56 \n+ 1.39 \n+ -0.06 \n+ \n+ \n+ 6 \n+ 7.00 \n+ 0.12 \n+ 1.21 \n+ -0.00 \n+ 1.63 \n+ \n+ \n+ 7 \n+ 8.00 \n+ 0.35 \n+ 1.04 \n+ -0.39 \n+ 0.52 \n+ \n+ \n+ 8 \n+ 9.00 \n+ 1.69 \n+ -1.33 \n+ 1.43 \n+ -2.09 \n+ \n+ \n+ 9 \n+ 10.00 \n+ -0.13 \n+ 0.63 \n+ -0.59 \n+ 0.29 \n \n
\n \n Or through a set_precision
method.
\n \n [31]:\n
\n@@ -3477,101 +3477,101 @@\n \n \n [31]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.00 \n- 1.33 \n- nan \n- -0.32 \n- -0.99 \n- \n- \n- 1 \n- 2.00 \n- -1.07 \n- -1.44 \n- 0.56 \n- 0.30 \n- \n- \n- 2 \n- 3.00 \n- -1.63 \n- 0.22 \n- 0.68 \n- 1.89 \n- \n- \n- 3 \n- 4.00 \n- 0.96 \n- 0.10 \n- nan \n- 0.85 \n- \n- \n- 4 \n- 5.00 \n- 1.45 \n- 1.06 \n- 0.17 \n- 0.52 \n- \n- \n- 5 \n- 6.00 \n- -1.34 \n- 0.56 \n- 1.39 \n- -0.06 \n- \n- \n- 6 \n- 7.00 \n- 0.12 \n- 1.21 \n- -0.00 \n- 1.63 \n- \n- \n- 7 \n- 8.00 \n- 0.35 \n- 1.04 \n- -0.39 \n- 0.52 \n- \n- \n- 8 \n- 9.00 \n- 1.69 \n- -1.33 \n- 1.43 \n- -2.09 \n- \n- \n- 9 \n- 10.00 \n- -0.13 \n- 0.63 \n- -0.59 \n- 0.29 \n+ 0 \n+ 1.00 \n+ 1.33 \n+ nan \n+ -0.32 \n+ -0.99 \n+ \n+ \n+ 1 \n+ 2.00 \n+ -1.07 \n+ -1.44 \n+ 0.56 \n+ 0.30 \n+ \n+ \n+ 2 \n+ 3.00 \n+ -1.63 \n+ 0.22 \n+ 0.68 \n+ 1.89 \n+ \n+ \n+ 3 \n+ 4.00 \n+ 0.96 \n+ 0.10 \n+ nan \n+ 0.85 \n+ \n+ \n+ 4 \n+ 5.00 \n+ 1.45 \n+ 1.06 \n+ 0.17 \n+ 0.52 \n+ \n+ \n+ 5 \n+ 6.00 \n+ -1.34 \n+ 0.56 \n+ 1.39 \n+ -0.06 \n+ \n+ \n+ 6 \n+ 7.00 \n+ 0.12 \n+ 1.21 \n+ -0.00 \n+ 1.63 \n+ \n+ \n+ 7 \n+ 8.00 \n+ 0.35 \n+ 1.04 \n+ -0.39 \n+ 0.52 \n+ \n+ \n+ 8 \n+ 9.00 \n+ 1.69 \n+ -1.33 \n+ 1.43 \n+ -2.09 \n+ \n+ \n+ 9 \n+ 10.00 \n+ -0.13 \n+ 0.63 \n+ -0.59 \n+ 0.29 \n \n
\n \n Setting the precision only affects the printed number; the full-precision values are always passed to your style functions. You can always use df.round(2).style
if you\u2019d prefer to round from the start.
\n \n \n Captions\u00b6
\n@@ -3588,199 +3588,199 @@\n \n \n [32]:\n
\n \n \n Colormaps, with a caption. A B C D E \n+ }Colormaps, with a caption. A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n \n \n Table styles\u00b6
\n The next option you have are \u201ctable styles\u201d. These are styles that apply to the table as a whole, but don\u2019t look at the data. Certain sytlings, including pseudo-selectors like :hover
can only be used this way.
\n@@ -3809,101 +3809,101 @@\n \n \n [33]:\n
\n \n \n Hover to highlight. A B C D E \n+ }Hover to highlight. A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n table_styles
should be a list of dictionaries. Each dictionary should have the selector
and props
keys. The value for selector
should be a valid CSS selector. Recall that all the styles are already attached to an id
, unique to each Styler
. This selector is in addition to that id
. The value for props
should be a list of tuples of ('attribute', 'value')
.
\n table_styles
are extremely flexible, but not as fun to type out by hand. We hope to collect some useful ones either in pandas, or preferable in a new package that builds on top the tools here.
\n \n \n@@ -3923,96 +3923,96 @@\n \n \n [34]:\n
\n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- FAIL \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- PASS \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ FAIL \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ PASS \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n \n \n Hiding the Index or Columns\u00b6
\n The index can be hidden from rendering by calling Styler.hide_index
. Columns can be hidden from rendering by calling Styler.hide_columns
and passing in the name of a column, or a slice of columns.
\n@@ -4027,84 +4027,84 @@\n \n \n [35]:\n
\n \n \n A B C D E \n+ A B C D E \n \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n \n \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n \n \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n \n \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n \n \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n \n \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n \n \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n \n \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n \n \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n \n \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n \n [36]:\n
\n \n@@ -4115,74 +4115,74 @@\n \n \n [36]:\n
\n \n \n A B E \n+ A B E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- -0.990810 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ -0.990810 \n \n \n- 1 \n- 2.000000 \n- -1.070816 \n- 0.295722 \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ 0.295722 \n \n \n- 2 \n- 3.000000 \n- -1.626404 \n- 1.889273 \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 1.889273 \n \n \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.850229 \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.850229 \n \n \n- 4 \n- 5.000000 \n- 1.453425 \n- 0.515018 \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 0.515018 \n \n \n- 5 \n- 6.000000 \n- -1.336936 \n- -0.063328 \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ -0.063328 \n \n \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.627796 \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.627796 \n \n \n- 7 \n- 8.000000 \n- 0.354493 \n- 0.519818 \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 0.519818 \n \n \n- 8 \n- 9.000000 \n- 1.686583 \n- -2.089354 \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -2.089354 \n \n \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.290720 \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.290720 \n \n
\n \n \n \n CSS classes\u00b6
\n Certain CSS classes are attached to cells.
\n@@ -4246,181 +4246,181 @@\n \n \n \n \n \n \n A B C D E \n+ } A B C D E \n \n- 0 \n- 1.000000 \n- 1.329212 \n- nan \n- -0.316280 \n- -0.990810 \n- \n- \n- 1 \n- 2.000000 \n- -1.070816 \n- -1.438713 \n- 0.564417 \n- 0.295722 \n- \n- \n- 2 \n- 3.000000 \n- -1.626404 \n- 0.219565 \n- 0.678805 \n- 1.889273 \n- \n- \n- 3 \n- 4.000000 \n- 0.961538 \n- 0.104011 \n- nan \n- 0.850229 \n- \n- \n- 4 \n- 5.000000 \n- 1.453425 \n- 1.057737 \n- 0.165562 \n- 0.515018 \n- \n- \n- 5 \n- 6.000000 \n- -1.336936 \n- 0.562861 \n- 1.392855 \n- -0.063328 \n- \n- \n- 6 \n- 7.000000 \n- 0.121668 \n- 1.207603 \n- -0.002040 \n- 1.627796 \n- \n- \n- 7 \n- 8.000000 \n- 0.354493 \n- 1.037528 \n- -0.385684 \n- 0.519818 \n- \n- \n- 8 \n- 9.000000 \n- 1.686583 \n- -1.325963 \n- 1.428984 \n- -2.089354 \n- \n- \n- 9 \n- 10.000000 \n- -0.129820 \n- 0.631523 \n- -0.586538 \n- 0.290720 \n+ 0 \n+ 1.000000 \n+ 1.329212 \n+ nan \n+ -0.316280 \n+ -0.990810 \n+ \n+ \n+ 1 \n+ 2.000000 \n+ -1.070816 \n+ -1.438713 \n+ 0.564417 \n+ 0.295722 \n+ \n+ \n+ 2 \n+ 3.000000 \n+ -1.626404 \n+ 0.219565 \n+ 0.678805 \n+ 1.889273 \n+ \n+ \n+ 3 \n+ 4.000000 \n+ 0.961538 \n+ 0.104011 \n+ nan \n+ 0.850229 \n+ \n+ \n+ 4 \n+ 5.000000 \n+ 1.453425 \n+ 1.057737 \n+ 0.165562 \n+ 0.515018 \n+ \n+ \n+ 5 \n+ 6.000000 \n+ -1.336936 \n+ 0.562861 \n+ 1.392855 \n+ -0.063328 \n+ \n+ \n+ 6 \n+ 7.000000 \n+ 0.121668 \n+ 1.207603 \n+ -0.002040 \n+ 1.627796 \n+ \n+ \n+ 7 \n+ 8.000000 \n+ 0.354493 \n+ 1.037528 \n+ -0.385684 \n+ 0.519818 \n+ \n+ \n+ 8 \n+ 9.000000 \n+ 1.686583 \n+ -1.325963 \n+ 1.428984 \n+ -2.089354 \n+ \n+ \n+ 9 \n+ 10.000000 \n+ -0.129820 \n+ 0.631523 \n+ -0.586538 \n+ 0.290720 \n \n
\n \n \n [38]:\n
\n \n@@ -4458,1583 +4458,1583 @@\n \n \n [39]:\n
\n \n \n Hover to magnify 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 \n+ }Hover to magnify 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 \n \n- 0 \n- 0.23 \n- 1.03 \n- -0.84 \n- -0.59 \n- -0.96 \n- -0.22 \n- -0.62 \n- 1.84 \n- -2.05 \n- 0.87 \n- -0.92 \n- -0.23 \n- 2.15 \n- -1.33 \n- 0.08 \n- -1.25 \n- 1.20 \n- -1.05 \n- 1.06 \n- -0.42 \n- 2.29 \n- -2.59 \n- 2.82 \n- 0.68 \n- -1.58 \n- \n- \n- 1 \n- -1.75 \n- 1.56 \n- -1.13 \n- -1.10 \n- 1.03 \n- 0.00 \n- -2.46 \n- 3.45 \n- -1.66 \n- 1.27 \n- -0.52 \n- -0.02 \n- 1.52 \n- -1.09 \n- -1.86 \n- -1.13 \n- -0.68 \n- -0.81 \n- 0.35 \n- -0.06 \n- 1.79 \n- -2.82 \n- 2.26 \n- 0.78 \n- 0.44 \n- \n- \n- 2 \n- -0.65 \n- 3.22 \n- -1.76 \n- 0.52 \n- 2.20 \n- -0.37 \n- -3.00 \n- 3.73 \n- -1.87 \n- 2.46 \n- 0.21 \n- -0.24 \n- -0.10 \n- -0.78 \n- -3.02 \n- -0.82 \n- -0.21 \n- -0.23 \n- 0.86 \n- -0.68 \n- 1.45 \n- -4.89 \n- 3.03 \n- 1.91 \n- 0.61 \n- \n- \n- 3 \n- -1.62 \n- 3.71 \n- -2.31 \n- 0.43 \n- 4.17 \n- -0.43 \n- -3.86 \n- 4.16 \n- -2.15 \n- 1.08 \n- 0.12 \n- 0.60 \n- -0.89 \n- 0.27 \n- -3.67 \n- -2.71 \n- -0.31 \n- -1.59 \n- 1.35 \n- -1.83 \n- 0.91 \n- -5.80 \n- 2.81 \n- 2.11 \n- 0.28 \n- \n- \n- 4 \n- -3.35 \n- 4.48 \n- -1.86 \n- -1.70 \n- 5.19 \n- -1.02 \n- -3.81 \n- 4.72 \n- -0.72 \n- 1.08 \n- -0.18 \n- 0.83 \n- -0.22 \n- -1.08 \n- -4.27 \n- -2.88 \n- -0.97 \n- -1.78 \n- 1.53 \n- -1.80 \n- 2.21 \n- -6.34 \n- 3.34 \n- 2.49 \n- 2.09 \n- \n- \n- 5 \n- -0.84 \n- 4.23 \n- -1.65 \n- -2.00 \n- 5.34 \n- -0.99 \n- -4.13 \n- 3.94 \n- -1.06 \n- -0.94 \n- 1.24 \n- 0.09 \n- -1.78 \n- -0.11 \n- -4.45 \n- -0.85 \n- -2.06 \n- -1.35 \n- 0.80 \n- -1.63 \n- 1.54 \n- -6.51 \n- 2.80 \n- 2.14 \n- 3.77 \n- \n- \n- 6 \n- -0.74 \n- 5.35 \n- -2.11 \n- -1.13 \n- 4.20 \n- -1.85 \n- -3.20 \n- 3.76 \n- -3.22 \n- -1.23 \n- 0.34 \n- 0.57 \n- -1.82 \n- 0.54 \n- -4.43 \n- -1.83 \n- -4.03 \n- -2.62 \n- -0.20 \n- -4.68 \n- 1.93 \n- -8.46 \n- 3.34 \n- 2.52 \n- 5.81 \n- \n- \n- 7 \n- -0.44 \n- 4.69 \n- -2.30 \n- -0.21 \n- 5.93 \n- -2.63 \n- -1.83 \n- 5.46 \n- -4.50 \n- -3.16 \n- -1.73 \n- 0.18 \n- 0.11 \n- 0.04 \n- -5.99 \n- -0.45 \n- -6.20 \n- -3.89 \n- 0.71 \n- -3.95 \n- 0.67 \n- -7.26 \n- 2.97 \n- 3.39 \n- 6.66 \n- \n- \n- 8 \n- 0.92 \n- 5.80 \n- -3.33 \n- -0.65 \n- 5.99 \n- -3.19 \n- -1.83 \n- 5.63 \n- -3.53 \n- -1.30 \n- -1.61 \n- 0.82 \n- -2.45 \n- -0.40 \n- -6.06 \n- -0.52 \n- -6.60 \n- -3.48 \n- -0.04 \n- -4.60 \n- 0.51 \n- -5.85 \n- 3.23 \n- 2.40 \n- 5.08 \n- \n- \n- 9 \n- 0.38 \n- 5.54 \n- -4.49 \n- -0.80 \n- 7.05 \n- -2.64 \n- -0.44 \n- 5.35 \n- -1.96 \n- -0.33 \n- -0.80 \n- 0.26 \n- -3.37 \n- -0.82 \n- -6.05 \n- -2.61 \n- -8.45 \n- -4.45 \n- 0.41 \n- -4.71 \n- 1.89 \n- -6.93 \n- 2.14 \n- 3.00 \n- 5.16 \n- \n- \n- 10 \n- 2.06 \n- 5.84 \n- -3.90 \n- -0.98 \n- 7.78 \n- -2.49 \n- -0.59 \n- 5.59 \n- -2.22 \n- -0.71 \n- -0.46 \n- 1.80 \n- -2.79 \n- 0.48 \n- -5.97 \n- -3.44 \n- -7.77 \n- -5.49 \n- -0.70 \n- -4.61 \n- -0.52 \n- -7.72 \n- 1.54 \n- 5.02 \n- 5.81 \n- \n- \n- 11 \n- 1.86 \n- 4.47 \n- -2.17 \n- -1.38 \n- 5.90 \n- -0.49 \n- 0.02 \n- 5.78 \n- -1.04 \n- -0.60 \n- 0.49 \n- 1.96 \n- -1.47 \n- 1.88 \n- -5.92 \n- -4.55 \n- -8.15 \n- -3.42 \n- -2.24 \n- -4.33 \n- -1.17 \n- -7.90 \n- 1.36 \n- 5.31 \n- 5.83 \n- \n- \n- 12 \n- 3.19 \n- 4.22 \n- -3.06 \n- -2.27 \n- 5.93 \n- -2.64 \n- 0.33 \n- 6.72 \n- -2.84 \n- -0.20 \n- 1.89 \n- 2.63 \n- -1.53 \n- 0.75 \n- -5.27 \n- -4.53 \n- -7.57 \n- -2.85 \n- -2.17 \n- -4.78 \n- -1.13 \n- -8.99 \n- 2.11 \n- 6.42 \n- 5.60 \n- \n- \n- 13 \n- 2.31 \n- 4.45 \n- -3.87 \n- -2.05 \n- 6.76 \n- -3.25 \n- -2.17 \n- 7.99 \n- -2.56 \n- -0.80 \n- 0.71 \n- 2.33 \n- -0.16 \n- -0.46 \n- -5.10 \n- -3.79 \n- -7.58 \n- -4.00 \n- 0.33 \n- -3.67 \n- -1.05 \n- -8.71 \n- 2.47 \n- 5.87 \n- 6.71 \n- \n- \n- 14 \n- 3.78 \n- 4.33 \n- -3.88 \n- -1.58 \n- 6.22 \n- -3.23 \n- -1.46 \n- 5.57 \n- -2.93 \n- -0.33 \n- -0.97 \n- 1.72 \n- 3.61 \n- 0.29 \n- -4.21 \n- -4.10 \n- -6.68 \n- -4.50 \n- -2.19 \n- -2.43 \n- -1.64 \n- -9.36 \n- 3.36 \n- 6.11 \n- 7.53 \n- \n- \n- 15 \n- 5.64 \n- 5.31 \n- -3.98 \n- -2.26 \n- 5.91 \n- -3.30 \n- -1.03 \n- 5.68 \n- -3.06 \n- -0.33 \n- -1.16 \n- 2.19 \n- 4.20 \n- 1.01 \n- -3.22 \n- -4.31 \n- -5.74 \n- -4.44 \n- -2.30 \n- -1.36 \n- -1.20 \n- -11.27 \n- 2.59 \n- 6.69 \n- 5.91 \n- \n- \n- 16 \n- 4.08 \n- 4.34 \n- -2.44 \n- -3.30 \n- 6.04 \n- -2.52 \n- -0.47 \n- 5.28 \n- -4.84 \n- 1.58 \n- 0.23 \n- 0.10 \n- 5.79 \n- 1.80 \n- -3.13 \n- -3.85 \n- -5.53 \n- -2.97 \n- -2.13 \n- -1.15 \n- -0.56 \n- -13.13 \n- 2.07 \n- 6.16 \n- 4.94 \n- \n- \n- 17 \n- 5.64 \n- 4.57 \n- -3.53 \n- -3.76 \n- 6.58 \n- -2.58 \n- -0.75 \n- 6.58 \n- -4.78 \n- 3.63 \n- -0.29 \n- 0.56 \n- 5.76 \n- 2.05 \n- -2.27 \n- -2.31 \n- -4.95 \n- -3.16 \n- -3.06 \n- -2.43 \n- 0.84 \n- -12.57 \n- 3.56 \n- 7.36 \n- 4.70 \n- \n- \n- 18 \n- 5.99 \n- 5.82 \n- -2.85 \n- -4.15 \n- 7.12 \n- -3.32 \n- -1.21 \n- 7.93 \n- -4.85 \n- 1.44 \n- -0.63 \n- 0.35 \n- 7.47 \n- 0.87 \n- -1.52 \n- -2.09 \n- -4.23 \n- -2.55 \n- -2.46 \n- -2.89 \n- 1.90 \n- -9.74 \n- 3.43 \n- 7.07 \n- 4.39 \n- \n- \n- 19 \n- 4.03 \n- 6.23 \n- -4.10 \n- -4.11 \n- 7.19 \n- -4.10 \n- -1.52 \n- 6.53 \n- -5.21 \n- -0.24 \n- 0.01 \n- 1.16 \n- 6.43 \n- -1.97 \n- -2.64 \n- -1.66 \n- -5.20 \n- -3.25 \n- -2.87 \n- -1.65 \n- 1.64 \n- -10.66 \n- 2.83 \n- 7.48 \n- 3.94 \n+ 0 \n+ 0.23 \n+ 1.03 \n+ -0.84 \n+ -0.59 \n+ -0.96 \n+ -0.22 \n+ -0.62 \n+ 1.84 \n+ -2.05 \n+ 0.87 \n+ -0.92 \n+ -0.23 \n+ 2.15 \n+ -1.33 \n+ 0.08 \n+ -1.25 \n+ 1.20 \n+ -1.05 \n+ 1.06 \n+ -0.42 \n+ 2.29 \n+ -2.59 \n+ 2.82 \n+ 0.68 \n+ -1.58 \n+ \n+ \n+ 1 \n+ -1.75 \n+ 1.56 \n+ -1.13 \n+ -1.10 \n+ 1.03 \n+ 0.00 \n+ -2.46 \n+ 3.45 \n+ -1.66 \n+ 1.27 \n+ -0.52 \n+ -0.02 \n+ 1.52 \n+ -1.09 \n+ -1.86 \n+ -1.13 \n+ -0.68 \n+ -0.81 \n+ 0.35 \n+ -0.06 \n+ 1.79 \n+ -2.82 \n+ 2.26 \n+ 0.78 \n+ 0.44 \n+ \n+ \n+ 2 \n+ -0.65 \n+ 3.22 \n+ -1.76 \n+ 0.52 \n+ 2.20 \n+ -0.37 \n+ -3.00 \n+ 3.73 \n+ -1.87 \n+ 2.46 \n+ 0.21 \n+ -0.24 \n+ -0.10 \n+ -0.78 \n+ -3.02 \n+ -0.82 \n+ -0.21 \n+ -0.23 \n+ 0.86 \n+ -0.68 \n+ 1.45 \n+ -4.89 \n+ 3.03 \n+ 1.91 \n+ 0.61 \n+ \n+ \n+ 3 \n+ -1.62 \n+ 3.71 \n+ -2.31 \n+ 0.43 \n+ 4.17 \n+ -0.43 \n+ -3.86 \n+ 4.16 \n+ -2.15 \n+ 1.08 \n+ 0.12 \n+ 0.60 \n+ -0.89 \n+ 0.27 \n+ -3.67 \n+ -2.71 \n+ -0.31 \n+ -1.59 \n+ 1.35 \n+ -1.83 \n+ 0.91 \n+ -5.80 \n+ 2.81 \n+ 2.11 \n+ 0.28 \n+ \n+ \n+ 4 \n+ -3.35 \n+ 4.48 \n+ -1.86 \n+ -1.70 \n+ 5.19 \n+ -1.02 \n+ -3.81 \n+ 4.72 \n+ -0.72 \n+ 1.08 \n+ -0.18 \n+ 0.83 \n+ -0.22 \n+ -1.08 \n+ -4.27 \n+ -2.88 \n+ -0.97 \n+ -1.78 \n+ 1.53 \n+ -1.80 \n+ 2.21 \n+ -6.34 \n+ 3.34 \n+ 2.49 \n+ 2.09 \n+ \n+ \n+ 5 \n+ -0.84 \n+ 4.23 \n+ -1.65 \n+ -2.00 \n+ 5.34 \n+ -0.99 \n+ -4.13 \n+ 3.94 \n+ -1.06 \n+ -0.94 \n+ 1.24 \n+ 0.09 \n+ -1.78 \n+ -0.11 \n+ -4.45 \n+ -0.85 \n+ -2.06 \n+ -1.35 \n+ 0.80 \n+ -1.63 \n+ 1.54 \n+ -6.51 \n+ 2.80 \n+ 2.14 \n+ 3.77 \n+ \n+ \n+ 6 \n+ -0.74 \n+ 5.35 \n+ -2.11 \n+ -1.13 \n+ 4.20 \n+ -1.85 \n+ -3.20 \n+ 3.76 \n+ -3.22 \n+ -1.23 \n+ 0.34 \n+ 0.57 \n+ -1.82 \n+ 0.54 \n+ -4.43 \n+ -1.83 \n+ -4.03 \n+ -2.62 \n+ -0.20 \n+ -4.68 \n+ 1.93 \n+ -8.46 \n+ 3.34 \n+ 2.52 \n+ 5.81 \n+ \n+ \n+ 7 \n+ -0.44 \n+ 4.69 \n+ -2.30 \n+ -0.21 \n+ 5.93 \n+ -2.63 \n+ -1.83 \n+ 5.46 \n+ -4.50 \n+ -3.16 \n+ -1.73 \n+ 0.18 \n+ 0.11 \n+ 0.04 \n+ -5.99 \n+ -0.45 \n+ -6.20 \n+ -3.89 \n+ 0.71 \n+ -3.95 \n+ 0.67 \n+ -7.26 \n+ 2.97 \n+ 3.39 \n+ 6.66 \n+ \n+ \n+ 8 \n+ 0.92 \n+ 5.80 \n+ -3.33 \n+ -0.65 \n+ 5.99 \n+ -3.19 \n+ -1.83 \n+ 5.63 \n+ -3.53 \n+ -1.30 \n+ -1.61 \n+ 0.82 \n+ -2.45 \n+ -0.40 \n+ -6.06 \n+ -0.52 \n+ -6.60 \n+ -3.48 \n+ -0.04 \n+ -4.60 \n+ 0.51 \n+ -5.85 \n+ 3.23 \n+ 2.40 \n+ 5.08 \n+ \n+ \n+ 9 \n+ 0.38 \n+ 5.54 \n+ -4.49 \n+ -0.80 \n+ 7.05 \n+ -2.64 \n+ -0.44 \n+ 5.35 \n+ -1.96 \n+ -0.33 \n+ -0.80 \n+ 0.26 \n+ -3.37 \n+ -0.82 \n+ -6.05 \n+ -2.61 \n+ -8.45 \n+ -4.45 \n+ 0.41 \n+ -4.71 \n+ 1.89 \n+ -6.93 \n+ 2.14 \n+ 3.00 \n+ 5.16 \n+ \n+ \n+ 10 \n+ 2.06 \n+ 5.84 \n+ -3.90 \n+ -0.98 \n+ 7.78 \n+ -2.49 \n+ -0.59 \n+ 5.59 \n+ -2.22 \n+ -0.71 \n+ -0.46 \n+ 1.80 \n+ -2.79 \n+ 0.48 \n+ -5.97 \n+ -3.44 \n+ -7.77 \n+ -5.49 \n+ -0.70 \n+ -4.61 \n+ -0.52 \n+ -7.72 \n+ 1.54 \n+ 5.02 \n+ 5.81 \n+ \n+ \n+ 11 \n+ 1.86 \n+ 4.47 \n+ -2.17 \n+ -1.38 \n+ 5.90 \n+ -0.49 \n+ 0.02 \n+ 5.78 \n+ -1.04 \n+ -0.60 \n+ 0.49 \n+ 1.96 \n+ -1.47 \n+ 1.88 \n+ -5.92 \n+ -4.55 \n+ -8.15 \n+ -3.42 \n+ -2.24 \n+ -4.33 \n+ -1.17 \n+ -7.90 \n+ 1.36 \n+ 5.31 \n+ 5.83 \n+ \n+ \n+ 12 \n+ 3.19 \n+ 4.22 \n+ -3.06 \n+ -2.27 \n+ 5.93 \n+ -2.64 \n+ 0.33 \n+ 6.72 \n+ -2.84 \n+ -0.20 \n+ 1.89 \n+ 2.63 \n+ -1.53 \n+ 0.75 \n+ -5.27 \n+ -4.53 \n+ -7.57 \n+ -2.85 \n+ -2.17 \n+ -4.78 \n+ -1.13 \n+ -8.99 \n+ 2.11 \n+ 6.42 \n+ 5.60 \n+ \n+ \n+ 13 \n+ 2.31 \n+ 4.45 \n+ -3.87 \n+ -2.05 \n+ 6.76 \n+ -3.25 \n+ -2.17 \n+ 7.99 \n+ -2.56 \n+ -0.80 \n+ 0.71 \n+ 2.33 \n+ -0.16 \n+ -0.46 \n+ -5.10 \n+ -3.79 \n+ -7.58 \n+ -4.00 \n+ 0.33 \n+ -3.67 \n+ -1.05 \n+ -8.71 \n+ 2.47 \n+ 5.87 \n+ 6.71 \n+ \n+ \n+ 14 \n+ 3.78 \n+ 4.33 \n+ -3.88 \n+ -1.58 \n+ 6.22 \n+ -3.23 \n+ -1.46 \n+ 5.57 \n+ -2.93 \n+ -0.33 \n+ -0.97 \n+ 1.72 \n+ 3.61 \n+ 0.29 \n+ -4.21 \n+ -4.10 \n+ -6.68 \n+ -4.50 \n+ -2.19 \n+ -2.43 \n+ -1.64 \n+ -9.36 \n+ 3.36 \n+ 6.11 \n+ 7.53 \n+ \n+ \n+ 15 \n+ 5.64 \n+ 5.31 \n+ -3.98 \n+ -2.26 \n+ 5.91 \n+ -3.30 \n+ -1.03 \n+ 5.68 \n+ -3.06 \n+ -0.33 \n+ -1.16 \n+ 2.19 \n+ 4.20 \n+ 1.01 \n+ -3.22 \n+ -4.31 \n+ -5.74 \n+ -4.44 \n+ -2.30 \n+ -1.36 \n+ -1.20 \n+ -11.27 \n+ 2.59 \n+ 6.69 \n+ 5.91 \n+ \n+ \n+ 16 \n+ 4.08 \n+ 4.34 \n+ -2.44 \n+ -3.30 \n+ 6.04 \n+ -2.52 \n+ -0.47 \n+ 5.28 \n+ -4.84 \n+ 1.58 \n+ 0.23 \n+ 0.10 \n+ 5.79 \n+ 1.80 \n+ -3.13 \n+ -3.85 \n+ -5.53 \n+ -2.97 \n+ -2.13 \n+ -1.15 \n+ -0.56 \n+ -13.13 \n+ 2.07 \n+ 6.16 \n+ 4.94 \n+ \n+ \n+ 17 \n+ 5.64 \n+ 4.57 \n+ -3.53 \n+ -3.76 \n+ 6.58 \n+ -2.58 \n+ -0.75 \n+ 6.58 \n+ -4.78 \n+ 3.63 \n+ -0.29 \n+ 0.56 \n+ 5.76 \n+ 2.05 \n+ -2.27 \n+ -2.31 \n+ -4.95 \n+ -3.16 \n+ -3.06 \n+ -2.43 \n+ 0.84 \n+ -12.57 \n+ 3.56 \n+ 7.36 \n+ 4.70 \n+ \n+ \n+ 18 \n+ 5.99 \n+ 5.82 \n+ -2.85 \n+ -4.15 \n+ 7.12 \n+ -3.32 \n+ -1.21 \n+ 7.93 \n+ -4.85 \n+ 1.44 \n+ -0.63 \n+ 0.35 \n+ 7.47 \n+ 0.87 \n+ -1.52 \n+ -2.09 \n+ -4.23 \n+ -2.55 \n+ -2.46 \n+ -2.89 \n+ 1.90 \n+ -9.74 \n+ 3.43 \n+ 7.07 \n+ 4.39 \n+ \n+ \n+ 19 \n+ 4.03 \n+ 6.23 \n+ -4.10 \n+ -4.11 \n+ 7.19 \n+ -4.10 \n+ -1.52 \n+ 6.53 \n+ -5.21 \n+ -0.24 \n+ 0.01 \n+ 1.16 \n+ 6.43 \n+ -1.97 \n+ -2.64 \n+ -1.66 \n+ -5.20 \n+ -3.25 \n+ -2.87 \n+ -1.65 \n+ 1.64 \n+ -10.66 \n+ 2.83 \n+ 7.48 \n+ 3.94 \n \n
\n \n \n \n Export to Excel\u00b6
\n New in version 0.20.0
\n@@ -6156,15 +6156,15 @@\n \n \n \n \n \n My Table
\n \n-\n+\n \n \n \n A \n B \n C \n D \n@@ -6174,237 +6174,237 @@\n \n \n \n \n \n \n \n- 0 \n+ 0 \n \n \n- 1.000000 \n+ 1.000000 \n \n \n- 1.329212 \n+ 1.329212 \n \n \n- nan \n+ nan \n \n \n- -0.316280 \n+ -0.316280 \n \n \n- -0.990810 \n+ -0.990810 \n \n \n \n \n \n \n \n- 1 \n+ 1 \n \n \n- 2.000000 \n+ 2.000000 \n \n \n- -1.070816 \n+ -1.070816 \n \n \n- -1.438713 \n+ -1.438713 \n \n \n- 0.564417 \n+ 0.564417 \n \n \n- 0.295722 \n+ 0.295722 \n \n \n \n \n \n \n \n- 2 \n+ 2 \n \n \n- 3.000000 \n+ 3.000000 \n \n \n- -1.626404 \n+ -1.626404 \n \n \n- 0.219565 \n+ 0.219565 \n \n \n- 0.678805 \n+ 0.678805 \n \n \n- 1.889273 \n+ 1.889273 \n \n \n \n \n \n \n \n- 3 \n+ 3 \n \n \n- 4.000000 \n+ 4.000000 \n \n \n- 0.961538 \n+ 0.961538 \n \n \n- 0.104011 \n+ 0.104011 \n \n \n- nan \n+ nan \n \n \n- 0.850229 \n+ 0.850229 \n \n \n \n \n \n \n \n- 4 \n+ 4 \n \n \n- 5.000000 \n+ 5.000000 \n \n \n- 1.453425 \n+ 1.453425 \n \n \n- 1.057737 \n+ 1.057737 \n \n \n- 0.165562 \n+ 0.165562 \n \n \n- 0.515018 \n+ 0.515018 \n \n \n \n \n \n \n \n- 5 \n+ 5 \n \n \n- 6.000000 \n+ 6.000000 \n \n \n- -1.336936 \n+ -1.336936 \n \n \n- 0.562861 \n+ 0.562861 \n \n \n- 1.392855 \n+ 1.392855 \n \n \n- -0.063328 \n+ -0.063328 \n \n \n \n \n \n \n \n- 6 \n+ 6 \n \n \n- 7.000000 \n+ 7.000000 \n \n \n- 0.121668 \n+ 0.121668 \n \n \n- 1.207603 \n+ 1.207603 \n \n \n- -0.002040 \n+ -0.002040 \n \n \n- 1.627796 \n+ 1.627796 \n \n \n \n \n \n \n \n- 7 \n+ 7 \n \n \n- 8.000000 \n+ 8.000000 \n \n \n- 0.354493 \n+ 0.354493 \n \n \n- 1.037528 \n+ 1.037528 \n \n \n- -0.385684 \n+ -0.385684 \n \n \n- 0.519818 \n+ 0.519818 \n \n \n \n \n \n \n \n- 8 \n+ 8 \n \n \n- 9.000000 \n+ 9.000000 \n \n \n- 1.686583 \n+ 1.686583 \n \n \n- -1.325963 \n+ -1.325963 \n \n \n- 1.428984 \n+ 1.428984 \n \n \n- -2.089354 \n+ -2.089354 \n \n \n \n \n \n \n \n- 9 \n+ 9 \n \n \n- 10.000000 \n+ 10.000000 \n \n \n- -0.129820 \n+ -0.129820 \n \n \n- 0.631523 \n+ 0.631523 \n \n \n- -0.586538 \n+ -0.586538 \n \n \n- 0.290720 \n+ 0.290720 \n \n \n \n \n
\n \n Our custom template accepts a table_title
keyword. We can provide the value in the .render
method.
\n@@ -6426,15 +6426,15 @@\n \n \n \n \n \n Extending Example
\n \n-\n+\n \n \n \n A \n B \n C \n D \n@@ -6444,237 +6444,237 @@\n \n \n \n \n \n \n \n- 0 \n+ 0 \n \n \n- 1.000000 \n+ 1.000000 \n \n \n- 1.329212 \n+ 1.329212 \n \n \n- nan \n+ nan \n \n \n- -0.316280 \n+ -0.316280 \n \n \n- -0.990810 \n+ -0.990810 \n \n \n \n \n \n \n \n- 1 \n+ 1 \n \n \n- 2.000000 \n+ 2.000000 \n \n \n- -1.070816 \n+ -1.070816 \n \n \n- -1.438713 \n+ -1.438713 \n \n \n- 0.564417 \n+ 0.564417 \n \n \n- 0.295722 \n+ 0.295722 \n \n \n \n \n \n \n \n- 2 \n+ 2 \n \n \n- 3.000000 \n+ 3.000000 \n \n \n- -1.626404 \n+ -1.626404 \n \n \n- 0.219565 \n+ 0.219565 \n \n \n- 0.678805 \n+ 0.678805 \n \n \n- 1.889273 \n+ 1.889273 \n \n \n \n \n \n \n \n- 3 \n+ 3 \n \n \n- 4.000000 \n+ 4.000000 \n \n \n- 0.961538 \n+ 0.961538 \n \n \n- 0.104011 \n+ 0.104011 \n \n \n- nan \n+ nan \n \n \n- 0.850229 \n+ 0.850229 \n \n \n \n \n \n \n \n- 4 \n+ 4 \n \n \n- 5.000000 \n+ 5.000000 \n \n \n- 1.453425 \n+ 1.453425 \n \n \n- 1.057737 \n+ 1.057737 \n \n \n- 0.165562 \n+ 0.165562 \n \n \n- 0.515018 \n+ 0.515018 \n \n \n \n \n \n \n \n- 5 \n+ 5 \n \n \n- 6.000000 \n+ 6.000000 \n \n \n- -1.336936 \n+ -1.336936 \n \n \n- 0.562861 \n+ 0.562861 \n \n \n- 1.392855 \n+ 1.392855 \n \n \n- -0.063328 \n+ -0.063328 \n \n \n \n \n \n \n \n- 6 \n+ 6 \n \n \n- 7.000000 \n+ 7.000000 \n \n \n- 0.121668 \n+ 0.121668 \n \n \n- 1.207603 \n+ 1.207603 \n \n \n- -0.002040 \n+ -0.002040 \n \n \n- 1.627796 \n+ 1.627796 \n \n \n \n \n \n \n \n- 7 \n+ 7 \n \n \n- 8.000000 \n+ 8.000000 \n \n \n- 0.354493 \n+ 0.354493 \n \n \n- 1.037528 \n+ 1.037528 \n \n \n- -0.385684 \n+ -0.385684 \n \n \n- 0.519818 \n+ 0.519818 \n \n \n \n \n \n \n \n- 8 \n+ 8 \n \n \n- 9.000000 \n+ 9.000000 \n \n \n- 1.686583 \n+ 1.686583 \n \n \n- -1.325963 \n+ -1.325963 \n \n \n- 1.428984 \n+ 1.428984 \n \n \n- -2.089354 \n+ -2.089354 \n \n \n \n \n \n \n \n- 9 \n+ 9 \n \n \n- 10.000000 \n+ 10.000000 \n \n \n- -0.129820 \n+ -0.129820 \n \n \n- 0.631523 \n+ 0.631523 \n \n \n- -0.586538 \n+ -0.586538 \n \n \n- 0.290720 \n+ 0.290720 \n \n \n \n \n
\n \n For convenience, we provide the Styler.from_custom_template
method that does the same as the custom subclass.
\n@@ -6697,15 +6697,15 @@\n \n \n \n \n \n My Table
\n \n-\n+\n \n \n \n A \n B \n C \n D \n@@ -6715,237 +6715,237 @@\n \n \n \n \n \n \n \n- 0 \n+ 0 \n \n \n- 1.000000 \n+ 1.000000 \n \n \n- 1.329212 \n+ 1.329212 \n \n \n- nan \n+ nan \n \n \n- -0.316280 \n+ -0.316280 \n \n \n- -0.990810 \n+ -0.990810 \n \n \n \n \n \n \n \n- 1 \n+ 1 \n \n \n- 2.000000 \n+ 2.000000 \n \n \n- -1.070816 \n+ -1.070816 \n \n \n- -1.438713 \n+ -1.438713 \n \n \n- 0.564417 \n+ 0.564417 \n \n \n- 0.295722 \n+ 0.295722 \n \n \n \n \n \n \n \n- 2 \n+ 2 \n \n \n- 3.000000 \n+ 3.000000 \n \n \n- -1.626404 \n+ -1.626404 \n \n \n- 0.219565 \n+ 0.219565 \n \n \n- 0.678805 \n+ 0.678805 \n \n \n- 1.889273 \n+ 1.889273 \n \n \n \n \n \n \n \n- 3 \n+ 3 \n \n \n- 4.000000 \n+ 4.000000 \n \n \n- 0.961538 \n+ 0.961538 \n \n \n- 0.104011 \n+ 0.104011 \n \n \n- nan \n+ nan \n \n \n- 0.850229 \n+ 0.850229 \n \n \n \n \n \n \n \n- 4 \n+ 4 \n \n \n- 5.000000 \n+ 5.000000 \n \n \n- 1.453425 \n+ 1.453425 \n \n \n- 1.057737 \n+ 1.057737 \n \n \n- 0.165562 \n+ 0.165562 \n \n \n- 0.515018 \n+ 0.515018 \n \n \n \n \n \n \n \n- 5 \n+ 5 \n \n \n- 6.000000 \n+ 6.000000 \n \n \n- -1.336936 \n+ -1.336936 \n \n \n- 0.562861 \n+ 0.562861 \n \n \n- 1.392855 \n+ 1.392855 \n \n \n- -0.063328 \n+ -0.063328 \n \n \n \n \n \n \n \n- 6 \n+ 6 \n \n \n- 7.000000 \n+ 7.000000 \n \n \n- 0.121668 \n+ 0.121668 \n \n \n- 1.207603 \n+ 1.207603 \n \n \n- -0.002040 \n+ -0.002040 \n \n \n- 1.627796 \n+ 1.627796 \n \n \n \n \n \n \n \n- 7 \n+ 7 \n \n \n- 8.000000 \n+ 8.000000 \n \n \n- 0.354493 \n+ 0.354493 \n \n \n- 1.037528 \n+ 1.037528 \n \n \n- -0.385684 \n+ -0.385684 \n \n \n- 0.519818 \n+ 0.519818 \n \n \n \n \n \n \n \n- 8 \n+ 8 \n \n \n- 9.000000 \n+ 9.000000 \n \n \n- 1.686583 \n+ 1.686583 \n \n \n- -1.325963 \n+ -1.325963 \n \n \n- 1.428984 \n+ 1.428984 \n \n \n- -2.089354 \n+ -2.089354 \n \n \n \n \n \n \n \n- 9 \n+ 9 \n \n \n- 10.000000 \n+ 10.000000 \n \n \n- -0.129820 \n+ -0.129820 \n \n \n- 0.631523 \n+ 0.631523 \n \n \n- -0.586538 \n+ -0.586538 \n \n \n- 0.290720 \n+ 0.290720 \n \n \n \n \n
\n \n Here\u2019s the template structure:
\n", "details": [{"source1": "html2text {}", "source2": "html2text {}", "unified_diff": "@@ -77,38 +77,38 @@\n The above output looks very similar to the standard DataFrame HTML\n representation. But we\u00e2\u0080\u0099ve done some work behind the scenes to attach CSS\n classes to each cell. We can view these by calling the .render method.\n [4]:\n df.style.highlight_null().render().split('\\n')[:10]\n [4]:\n ['\n+ ' }\n A B \n C D E \n tr> ',\n ' ',\n ' 0 ',\n ' 1.000000 ',\n ' 1.329212 ',\n ' nan ',\n ' -\n+id=\"T_4056c92e_ea13_11ef_835c_39a090989f27row0_col3\" class=\"data row0 col3\" >-\n 0.316280 ']\n The row0_col2 is the identifier for that particular cell. We\u00e2\u0080\u0099ve also\n prepended each row/column identifier with a UUID unique to each DataFrame so\n that the style from one doesn\u00e2\u0080\u0099t collide with the styling from another within\n the same notebook or page (you can set the uuid if you\u00e2\u0080\u0099d like to tie together\n the styling of two DataFrames).\n When writing style functions, you take care of producing the CSS attribute /\n"}]}, {"source1": "./usr/share/doc/python-pandas-doc/html/user_guide/style.ipynb.gz", "source2": "./usr/share/doc/python-pandas-doc/html/user_guide/style.ipynb.gz", "unified_diff": null, "details": [{"source1": "style.ipynb", "source2": "style.ipynb", "unified_diff": null, "details": [{"source1": "Pretty-printed", "source2": "Pretty-printed", "comments": ["Similarity: 0.9997853087998139%", "Differences: {\"'cells'\": \"{5: {'outputs': {0: {'data': {'text/html': {insert: [(1, ' A B C D E [\u2026]"], "unified_diff": "@@ -83,99 +83,99 @@\n \"execution_count\": 3,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 3,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -197,23 +197,23 @@\n \"execution_count\": 4,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/plain\": [\n \"[' A B C D E ',\\n\",\n+ \" ' } A B C D E ',\\n\",\n \" ' ',\\n\",\n- \" ' 0 ',\\n\",\n- \" ' 1.000000 ',\\n\",\n- \" ' 1.329212 ',\\n\",\n- \" ' nan ',\\n\",\n- \" ' -0.316280 ']\"\n+ \" ' 0 ',\\n\",\n+ \" ' 1.000000 ',\\n\",\n+ \" ' 1.329212 ',\\n\",\n+ \" ' nan ',\\n\",\n+ \" ' -0.316280 ']\"\n ]\n },\n \"execution_count\": 4,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -266,103 +266,103 @@\n \"execution_count\": 6,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 6,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -410,101 +410,101 @@\n \"execution_count\": 8,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 8,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -532,106 +532,106 @@\n \"execution_count\": 9,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 9,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -687,101 +687,101 @@\n \"execution_count\": 11,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 11,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -833,101 +833,101 @@\n \"execution_count\": 12,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 12,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -947,103 +947,103 @@\n \"execution_count\": 13,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 13,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -1080,99 +1080,99 @@\n \"execution_count\": 14,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 100.00% \\n\",\n- \" 132.92% \\n\",\n- \" nan% \\n\",\n- \" -31.63% \\n\",\n- \" -99.08% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 200.00% \\n\",\n- \" -107.08% \\n\",\n- \" -143.87% \\n\",\n- \" 56.44% \\n\",\n- \" 29.57% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 300.00% \\n\",\n- \" -162.64% \\n\",\n- \" 21.96% \\n\",\n- \" 67.88% \\n\",\n- \" 188.93% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 400.00% \\n\",\n- \" 96.15% \\n\",\n- \" 10.40% \\n\",\n- \" nan% \\n\",\n- \" 85.02% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 500.00% \\n\",\n- \" 145.34% \\n\",\n- \" 105.77% \\n\",\n- \" 16.56% \\n\",\n- \" 51.50% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 600.00% \\n\",\n- \" -133.69% \\n\",\n- \" 56.29% \\n\",\n- \" 139.29% \\n\",\n- \" -6.33% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 700.00% \\n\",\n- \" 12.17% \\n\",\n- \" 120.76% \\n\",\n- \" -0.20% \\n\",\n- \" 162.78% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 800.00% \\n\",\n- \" 35.45% \\n\",\n- \" 103.75% \\n\",\n- \" -38.57% \\n\",\n- \" 51.98% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 900.00% \\n\",\n- \" 168.66% \\n\",\n- \" -132.60% \\n\",\n- \" 142.90% \\n\",\n- \" -208.94% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 1000.00% \\n\",\n- \" -12.98% \\n\",\n- \" 63.15% \\n\",\n- \" -58.65% \\n\",\n- \" 29.07% \\n\",\n+ \" 0 \\n\",\n+ \" 100.00% \\n\",\n+ \" 132.92% \\n\",\n+ \" nan% \\n\",\n+ \" -31.63% \\n\",\n+ \" -99.08% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 200.00% \\n\",\n+ \" -107.08% \\n\",\n+ \" -143.87% \\n\",\n+ \" 56.44% \\n\",\n+ \" 29.57% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 300.00% \\n\",\n+ \" -162.64% \\n\",\n+ \" 21.96% \\n\",\n+ \" 67.88% \\n\",\n+ \" 188.93% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 400.00% \\n\",\n+ \" 96.15% \\n\",\n+ \" 10.40% \\n\",\n+ \" nan% \\n\",\n+ \" 85.02% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 500.00% \\n\",\n+ \" 145.34% \\n\",\n+ \" 105.77% \\n\",\n+ \" 16.56% \\n\",\n+ \" 51.50% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 600.00% \\n\",\n+ \" -133.69% \\n\",\n+ \" 56.29% \\n\",\n+ \" 139.29% \\n\",\n+ \" -6.33% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 700.00% \\n\",\n+ \" 12.17% \\n\",\n+ \" 120.76% \\n\",\n+ \" -0.20% \\n\",\n+ \" 162.78% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 800.00% \\n\",\n+ \" 35.45% \\n\",\n+ \" 103.75% \\n\",\n+ \" -38.57% \\n\",\n+ \" 51.98% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 900.00% \\n\",\n+ \" 168.66% \\n\",\n+ \" -132.60% \\n\",\n+ \" 142.90% \\n\",\n+ \" -208.94% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 1000.00% \\n\",\n+ \" -12.98% \\n\",\n+ \" 63.15% \\n\",\n+ \" -58.65% \\n\",\n+ \" 29.07% \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 14,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -1192,99 +1192,99 @@\n \"execution_count\": 15,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1000 \\n\",\n- \" nan \\n\",\n- \" -0.32 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -100 \\n\",\n- \" -1.438713 \\n\",\n- \" +0.56 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -200 \\n\",\n- \" 0.219565 \\n\",\n- \" +0.68 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 1000 \\n\",\n- \" 0.104011 \\n\",\n- \" +nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1000 \\n\",\n- \" 1.057737 \\n\",\n- \" +0.17 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -100 \\n\",\n- \" 0.562861 \\n\",\n- \" +1.39 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0000 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.00 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0000 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.39 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 2000 \\n\",\n- \" -1.325963 \\n\",\n- \" +1.43 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -000 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.59 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1000 \\n\",\n+ \" nan \\n\",\n+ \" -0.32 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -100 \\n\",\n+ \" -1.438713 \\n\",\n+ \" +0.56 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -200 \\n\",\n+ \" 0.219565 \\n\",\n+ \" +0.68 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 1000 \\n\",\n+ \" 0.104011 \\n\",\n+ \" +nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1000 \\n\",\n+ \" 1.057737 \\n\",\n+ \" +0.17 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -100 \\n\",\n+ \" 0.562861 \\n\",\n+ \" +1.39 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0000 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.00 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0000 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.39 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 2000 \\n\",\n+ \" -1.325963 \\n\",\n+ \" +1.43 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -000 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.59 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 15,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -1304,99 +1304,99 @@\n \"execution_count\": 16,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" \\u00b11.33 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" \\u00b11.07 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" \\u00b11.63 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" \\u00b10.96 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" \\u00b11.45 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" \\u00b11.34 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" \\u00b10.12 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" \\u00b10.35 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" \\u00b11.69 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" \\u00b10.13 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" \\u00b11.33 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" \\u00b11.07 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" \\u00b11.63 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" \\u00b10.96 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" \\u00b11.45 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" \\u00b11.34 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" \\u00b10.12 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" \\u00b10.35 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" \\u00b11.69 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" \\u00b10.13 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 16,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -1416,99 +1416,99 @@\n \"execution_count\": 17,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 100.00% \\n\",\n- \" 132.92% \\n\",\n- \" - \\n\",\n- \" -31.63% \\n\",\n- \" -99.08% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 200.00% \\n\",\n- \" -107.08% \\n\",\n- \" -143.87% \\n\",\n- \" 56.44% \\n\",\n- \" 29.57% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 300.00% \\n\",\n- \" -162.64% \\n\",\n- \" 21.96% \\n\",\n- \" 67.88% \\n\",\n- \" 188.93% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 400.00% \\n\",\n- \" 96.15% \\n\",\n- \" 10.40% \\n\",\n- \" - \\n\",\n- \" 85.02% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 500.00% \\n\",\n- \" 145.34% \\n\",\n- \" 105.77% \\n\",\n- \" 16.56% \\n\",\n- \" 51.50% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 600.00% \\n\",\n- \" -133.69% \\n\",\n- \" 56.29% \\n\",\n- \" 139.29% \\n\",\n- \" -6.33% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 700.00% \\n\",\n- \" 12.17% \\n\",\n- \" 120.76% \\n\",\n- \" -0.20% \\n\",\n- \" 162.78% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 800.00% \\n\",\n- \" 35.45% \\n\",\n- \" 103.75% \\n\",\n- \" -38.57% \\n\",\n- \" 51.98% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 900.00% \\n\",\n- \" 168.66% \\n\",\n- \" -132.60% \\n\",\n- \" 142.90% \\n\",\n- \" -208.94% \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 1000.00% \\n\",\n- \" -12.98% \\n\",\n- \" 63.15% \\n\",\n- \" -58.65% \\n\",\n- \" 29.07% \\n\",\n+ \" 0 \\n\",\n+ \" 100.00% \\n\",\n+ \" 132.92% \\n\",\n+ \" - \\n\",\n+ \" -31.63% \\n\",\n+ \" -99.08% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 200.00% \\n\",\n+ \" -107.08% \\n\",\n+ \" -143.87% \\n\",\n+ \" 56.44% \\n\",\n+ \" 29.57% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 300.00% \\n\",\n+ \" -162.64% \\n\",\n+ \" 21.96% \\n\",\n+ \" 67.88% \\n\",\n+ \" 188.93% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 400.00% \\n\",\n+ \" 96.15% \\n\",\n+ \" 10.40% \\n\",\n+ \" - \\n\",\n+ \" 85.02% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 500.00% \\n\",\n+ \" 145.34% \\n\",\n+ \" 105.77% \\n\",\n+ \" 16.56% \\n\",\n+ \" 51.50% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 600.00% \\n\",\n+ \" -133.69% \\n\",\n+ \" 56.29% \\n\",\n+ \" 139.29% \\n\",\n+ \" -6.33% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 700.00% \\n\",\n+ \" 12.17% \\n\",\n+ \" 120.76% \\n\",\n+ \" -0.20% \\n\",\n+ \" 162.78% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 800.00% \\n\",\n+ \" 35.45% \\n\",\n+ \" 103.75% \\n\",\n+ \" -38.57% \\n\",\n+ \" 51.98% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 900.00% \\n\",\n+ \" 168.66% \\n\",\n+ \" -132.60% \\n\",\n+ \" 142.90% \\n\",\n+ \" -208.94% \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 1000.00% \\n\",\n+ \" -12.98% \\n\",\n+ \" 63.15% \\n\",\n+ \" -58.65% \\n\",\n+ \" 29.07% \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 17,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -1528,101 +1528,101 @@\n \"execution_count\": 18,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" - \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" - \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" - \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" - \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 18,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -1649,101 +1649,101 @@\n \"execution_count\": 19,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 19,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -1763,204 +1763,204 @@\n \"execution_count\": 20,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 5 \\n\",\n- \" 6.000000 \\n\",\n- \" -1.336936 \\n\",\n- \" 0.562861 \\n\",\n- \" 1.392855 \\n\",\n- \" -0.063328 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 6 \\n\",\n- \" 7.000000 \\n\",\n- \" 0.121668 \\n\",\n- \" 1.207603 \\n\",\n- \" -0.002040 \\n\",\n- \" 1.627796 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 7 \\n\",\n- \" 8.000000 \\n\",\n- \" 0.354493 \\n\",\n- \" 1.037528 \\n\",\n- \" -0.385684 \\n\",\n- \" 0.519818 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 8 \\n\",\n- \" 9.000000 \\n\",\n- \" 1.686583 \\n\",\n- \" -1.325963 \\n\",\n- \" 1.428984 \\n\",\n- \" -2.089354 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 9 \\n\",\n- \" 10.000000 \\n\",\n- \" -0.129820 \\n\",\n- \" 0.631523 \\n\",\n- \" -0.586538 \\n\",\n- \" 0.290720 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 5 \\n\",\n+ \" 6.000000 \\n\",\n+ \" -1.336936 \\n\",\n+ \" 0.562861 \\n\",\n+ \" 1.392855 \\n\",\n+ \" -0.063328 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 6 \\n\",\n+ \" 7.000000 \\n\",\n+ \" 0.121668 \\n\",\n+ \" 1.207603 \\n\",\n+ \" -0.002040 \\n\",\n+ \" 1.627796 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 7 \\n\",\n+ \" 8.000000 \\n\",\n+ \" 0.354493 \\n\",\n+ \" 1.037528 \\n\",\n+ \" -0.385684 \\n\",\n+ \" 0.519818 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 8 \\n\",\n+ \" 9.000000 \\n\",\n+ \" 1.686583 \\n\",\n+ \" -1.325963 \\n\",\n+ \" 1.428984 \\n\",\n+ \" -2.089354 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 9 \\n\",\n+ \" 10.000000 \\n\",\n+ \" -0.129820 \\n\",\n+ \" 0.631523 \\n\",\n+ \" -0.586538 \\n\",\n+ \" 0.290720 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 20,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -1985,107 +1985,107 @@\n \"execution_count\": 21,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" } A B C D E \\n\",\n \" \\n\",\n- \" 0 \\n\",\n- \" 1.000000 \\n\",\n- \" 1.329212 \\n\",\n- \" nan \\n\",\n- \" -0.316280 \\n\",\n- \" -0.990810 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 1 \\n\",\n- \" 2.000000 \\n\",\n- \" -1.070816 \\n\",\n- \" -1.438713 \\n\",\n- \" 0.564417 \\n\",\n- \" 0.295722 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 2 \\n\",\n- \" 3.000000 \\n\",\n- \" -1.626404 \\n\",\n- \" 0.219565 \\n\",\n- \" 0.678805 \\n\",\n- \" 1.889273 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 3 \\n\",\n- \" 4.000000 \\n\",\n- \" 0.961538 \\n\",\n- \" 0.104011 \\n\",\n- \" nan \\n\",\n- \" 0.850229 \\n\",\n- \" \\n\",\n- \" \\n\",\n- \" 4 \\n\",\n- \" 5.000000 \\n\",\n- \" 1.453425 \\n\",\n- \" 1.057737 \\n\",\n- \" 0.165562 \\n\",\n- \" 0.515018 \\n\",\n+ \" 0 \\n\",\n+ \" 1.000000 \\n\",\n+ \" 1.329212 \\n\",\n+ \" nan \\n\",\n+ \" -0.316280 \\n\",\n+ \" -0.990810 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 1 \\n\",\n+ \" 2.000000 \\n\",\n+ \" -1.070816 \\n\",\n+ \" -1.438713 \\n\",\n+ \" 0.564417 \\n\",\n+ \" 0.295722 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 2 \\n\",\n+ \" 3.000000 \\n\",\n+ \" -1.626404 \\n\",\n+ \" 0.219565 \\n\",\n+ \" 0.678805 \\n\",\n+ \" 1.889273 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 3 \\n\",\n+ \" 4.000000 \\n\",\n+ \" 0.961538 \\n\",\n+ \" 0.104011 \\n\",\n+ \" nan \\n\",\n+ \" 0.850229 \\n\",\n+ \" \\n\",\n+ \" \\n\",\n+ \" 4 \\n\",\n+ \" 5.000000 \\n\",\n+ \" 1.453425 \\n\",\n+ \" 1.057737 \\n\",\n+ \" 0.165562 \\n\",\n+ \" 0.515018 \\n\",\n \" \\n\",\n \"
\"\n ],\n \"text/plain\": [\n- \"\"\n+ \"\"\n ]\n },\n \"execution_count\": 21,\n \"metadata\": {},\n \"output_type\": \"execute_result\"\n }\n ],\n@@ -2099,108 +2099,108 @@\n \"execution_count\": 22,\n \"metadata\": {},\n \"outputs\": [\n {\n \"data\": {\n \"text/html\": [\n \" A B C D E \\n\",\n+ \" }