diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..afb34fe --- /dev/null +++ b/.gitignore @@ -0,0 +1,28 @@ +# Temp and compilation +*.pyc +.DS_Store +MANIFEST.in +dcblockmodels.egg-info/ +pyvenv.cfg +__pycache__ +.cache + +# Generated by the code +*.pdf +model_debug_output/ +saved_models/ +datasets/ +*.log +*checkpoint.ipynb +.pytest_cache +.ipynb_checkpoints +*.npz +*.npy + +# Git & IDE +.git +*sublime-project +*sublime-workspace + +# Misc +README_template.md \ No newline at end of file diff --git a/LICENSE b/LICENSE index 261eeb9..6a6b4b7 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,7 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ +Copyright 2023 Thales Group / Université de Paris - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - 1. Definitions. +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md index 0c4ebac..37ed08c 100644 --- a/README.md +++ b/README.md @@ -1,44 +1,153 @@ -# Thales Open Source Template Project +# Dynamic/Constrained Block Models -Template for creating a new project in the [Thales GitHub organization](https://github.com/ThalesGroup). +Version 1.0 -Each Thales OSS project repository **MUST** contain the following files at the root: +## Get started -- a `LICENSE` which has been chosen in accordance with legal department depending on your needs +This package implements algorithms for co-clustering count data based on the Latent Block Model (LBM). There are two main models : a dynamic LBM `dLBM` for data represented as a series of adjacency matrices and a semi-supervised (or constrained) LBM `HLBM` using pairwise constraints in both row and column space. This packages allows sampling data from the models, plotting the data, fitting the models, measuring the clustering performances and analyzing the behavior of the parameters during inference. For more details, see: -- a `README.md` outlining the project goals, sponsoring sig, and community contact information, [GitHub tips about README.md](https://docs.github.com/en/github/creating-cloning-and-archiving-repositories/about-readmes) +### References -- a `CONTRIBUTING.md` outlining how to contribute to the project, how to submit a pull request and an issue +Relevant articles & thesis: -- a `SECURITY.md` outlining how the security concerns are handled, [GitHub tips about SECURITY.md](https://docs.github.com/en/github/managing-security-vulnerabilities/adding-a-security-policy-to-your-repository) +- Paul Riverain, Simon Fossier, and Mohamed Nadif. “Poisson Degree Corrected Dynamic Stochastic Block Model.” Advances in Data Analysis and Classification, February 27, 2022. https://doi.org/10.1007/s11634-022-00492-9. +- Paul Riverain, Simon Fossier, and Mohamed Nadif. “Semi-Supervised Latent Block Model with Pairwise Constraints.” Machine Learning 111, no. 5 (May 1, 2022): 1739–64. https://doi.org/10.1007/s10994-022-06137-4. +- Paul Riverain, "Intégration de connaissance métier dans des algorithmes d'apprentissage non supervisé pour le transport ferroviaire", PhD Thesis -Below is an example of the common structure and information expected in a README. +## Installing -**Please keep this structure as is and only fill the content for each section according to your project.** +### Dependencies -If you need assistance or have question, please contact oss@thalesgroup.com +**Base:** -## Get started + - numpy 1.21 + - scipy + - numba + +**Interactive:** + + - notebook 5.7.10 + - jupyter_contrib_nbextensions + - jupyter_nbextensions_configurator + - matplotlib + - networkx + - seaborn + - plotly + - pandas + - prince : for Correspondence Analysis + - nltk : for notebook of text processing + +**Metrics:** + + - sparsebm : for Co-clustering ARI (CARI) + +**Initialization:** + + - spherecluster + - scikit-learn 0.20 : 0.20 because of spherecluster + + +### Install + +*Note*: the tests won't pass if you do not at least install the `[initialization]`, which requires a python3.7 installation. + +Within the source folder, run: + + python -m pip install --upgrade pip + python -m pip install -e .[all] + jupyter contrib nbextension install --user + python -m pytest ./dcblockmodels/tests/*short.py # short tests + # or + python -m pytest ./dcblockmodels/tests/*long.py # long tests + +Notes: + +- In the previous commands, `python` should be replaced by the Python executable for which you want to install dcblockmodels. As a good practice, you can create a virtual environment. +- The `-e` option in the install command is not required, but it installs the code in editable mode. This allows you to pull code updates on the git server without having to reinstall it. + + +### Rationale for package dependencies + +The package in its core only depends on classical packages (numpy, scipy, numba) and could be compatible with any recent python version. However, the performances of the model depends in part of the initial partition it is given. This initialization can be done with k-means or spherical k-means, the latter being particularly suited to a Poisson LBM with margins. An efficient implementation of k-means is available in `sklearn`, but no implementation of spherical k-means is in `sklearn`. For this reason, we used the implementation of spherical k-means of the package `spherecluster`. This packages uses private methods of a version of `sklearn<=0.20`. This results in a chain of dependencies that implies a requirement to `python 3.7`. Thus, a fully functionning version of the proposed package requires `python 3.7` and `spherecluster`. Note that the `fit()` method of the `HLBM` and `dLBM` have a `given_Z` and `given_W` parameters that allows to give directly row and column initial partitions. -XXX project purpose it to ... -**Please also add the description into the About section (Description field)** +## Testing + +Testing such algorithms is complicated. The proposed tests sample data from the model and check that the clustering metrics are not too low. If the tests do not pass because the code breaks, there is a problem. If the tests do not pass beacause the metrics are too low, this could be due to bad luck (complex sampled data or bad initialization) or to a real problem in the code. In the tests, the clustering metrics are compared to thresholds for different setups of sampled data. These thresholds can be changed if needed. + ## Documentation -Documentation is available at [xxx/docs](https://xxx/docs/). +### Code style + +The specifics of this codebase in terms of code style are described in [`codestyle.md`](./docs/codestyle.md). + +### Code outline + +#### Package structure + +- the `dcblockmodels` directory contains the models +- the `notebooks` directory contains the sample notebooks +- the `alluvial` directory contains d3.js/HTML/CSS code to create an alluvial diagram for MTR network passenger flow monitoring +- the `saved_models` directory contains the saved fitted models in the form of pickle files + + +#### Main module + +The main module `dcblockmodels` contains: +- the `models` directory that contains the implementations of the models +- the `tests` directory that contains the tests of the models +- `data.py` that contains methods to sample data from static or dynamic LBM/SBM +- `metrics.py` that contains methods measure the quality of a partition of the data or to measure the class separability given the ground truth +- `plot.py` that contains different plotting functions -You can use [GitHub pages](https://guides.github.com/features/pages/) to create your documentation. +#### The `utils` submodule -See an example here : https://github.com/ThalesGroup/ThalesGroup.github.io +Both `dLBM` and `HLBM` inherit from `blockmodels` that mainly takes care of input check, parameter save for the debug mode and model save. As `dLBM` and `HLBM` share common characteristics, some computations can be done with the same function. The idea is to put as much code as possible in `/dcblockmodels/models/utils` directory in the form of static functions. The `/dcblockmodels/models/utils` directory is separated into 7 files: +- `absent_nodes.py` : creates a class to efficiently deal with the indexes of absent and appearing nodes +- `consensus.py` : performs different kinds of consensus clustering based on a series of partitions of the data +- `e_step.py` : contains the methods for the E-step of the EM algorithm +- `general.py` : contains functions that can be used in all modules +- `m_step.py` : contains the methods for the M-step of the EM algorithm +- `similarity_matrices.py` : contains methods that deal with the pairwise similarity matrices (i.e. the semi-supervision) used for `HLBM` +- `smoothing_schedule.py` : the class that build a smoothing schedule for `dLBM` -**Please also add the documentation URL into the About section (Website field)** +### Notations + +Conventions used in the code: + + N : number of row nodes + D : number of column nodes + ND: general argument, could be N or D + + Kz : number of row clusters + Kw : number of column clusters + Kzw: general argument, could be Kz or Kw + + *indexes* : + i and j are nodes (i.e. row or column), + t is a timestep, + k and l are cluster indexes + + Z[t, i, k], W[t, j, l] are respectively the row (resp. columns) variational probabilities in VEM or the cluster indicator matrices in CEM + ZW : is a general argument and could be Z or W + qz[t, i, k, k'], qw[t, i, l, l'] : variational transition proba + qzw : general argument, could be qz or qw + + alpha_beta: general argument, could be alpha or beta + pi_rho: general argument, could be pi or rho + + ZW_app[t_i, k] : posterior proba of appearing node of size n_appearing_nodes x Kzw + app_zw_map[t] = {i: t_i} + the mapping between time step t, node index i and the index t_i used in ZW_app + app_zw_map is a list of T dicts ## Contributing -If you are interested in contributing to the XXX project, start by reading the [Contributing guide](/CONTRIBUTING.md). +If you are interested in contributing to the dcblockmodels project, start by reading the [Contributing guide](/CONTRIBUTING.md). ## License -The chosen license in accordance with legal department must be defined into an explicit [LICENSE](https://github.com/ThalesGroup/template-project/blob/master/LICENSE) file at the root of the repository -You can also link this file in this README section. +[MIT License, (c) Thales Group / Université de Paris, 2022](/LICENSE) + +* d3.js and d3-sankey.js licensed under BSD 3-clause, Copyright Mike Bostock \ No newline at end of file diff --git a/SECURITY.md b/SECURITY.md index 572fe14..c663fae 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,45 +1,15 @@ -Describe here all the security policies in place on this repository to help your contributors to handle security issues efficiently. +## Good practices to follow -## Goods practices to follow - -:warning:**You must never store credentials information into source code or config file in a GitHub repository** -- Block sensitive data being pushed to GitHub by git-secrets or its likes as a git pre-commit hook -- Audit for slipped secrets with dedicated tools -- Use environment variables for secrets in CI/CD (e.g. GitHub Secrets) and secret managers in production +:warning:**Never store credentials information into source code or config file** # Security Policy ## Supported Versions -Use this section to tell people about which versions of your project are currently being supported with security updates. - | Version | Supported | | ------- | ------------------ | -| 5.1.x | :white_check_mark: | -| 5.0.x | :x: | -| 4.0.x | :white_check_mark: | -| < 4.0 | :x: | +| 1.0 | :white_check_mark: | ## Reporting a Vulnerability -Use this section to tell people how to report a vulnerability. -Tell them where to go, how often they can expect to get an update on a reported vulnerability, what to expect if the vulnerability is accepted or declined, etc. - -You can ask for support by contacting security@opensource.thalesgroup.com - -## Disclosure policy - -Define the procedure for what a reporter who finds a security issue needs to do in order to fully disclose the problem safely, including who to contact and how. - -## Security Update policy - -Define how you intend to update users about new security vulnerabilities as they are found. - -## Security related configuration - -Settings users should consider that would impact the security posture of deploying this project, such as HTTPS, authorization and many others. - -## Known security gaps & future enhancements - -Security improvements you haven’t gotten to yet. -Inform users those security controls aren’t in place, and perhaps suggest they contribute an implementation +To report a vulnerability, please use contact security@opensource.thalesgroup.com diff --git a/alluvial/LICENSE-d3 b/alluvial/LICENSE-d3 new file mode 100644 index 0000000..a626880 --- /dev/null +++ b/alluvial/LICENSE-d3 @@ -0,0 +1,27 @@ +Copyright 2010-2017 Mike Bostock +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of the author nor the names of contributors may be used to + endorse or promote products derived from this software without specific prior + written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/alluvial/alluvial_dashboard.js b/alluvial/alluvial_dashboard.js new file mode 100644 index 0000000..23edd63 --- /dev/null +++ b/alluvial/alluvial_dashboard.js @@ -0,0 +1,974 @@ +///////////////////////////////////////////////////////////////// +/////////////// Constants //////////////////////////////////// +///////////////////////////////////////////////////////////////// + + +// TODO: +// afficher pi et rho (a la plac de gmma, avecc un switch) + +data_name = 'all_30_min_interval_july'; //all_10_min_interval_july_1515_1615, all_10_min_interval_july_404_504 + +// all_15_min_interval, center_15_min_interval, all_30_min_interval_july + +data_file = './alluvial_data/' + data_name + '_data.json'; +image_path = './images/' + data_name + '/'; + +onlyTickHour = false; +delta_ticks_fs = 3.; + +// initial values for important params for svg +nodeWidth = 20; +nodePadding = 10; +widthPerTimestep = 150; +heightPerCluster = 80; +ticksFontSize = 10; + +// old +//nodeWidth = 60; +//nodePadding = 20; +//widthPerTimestep = 220; +//heightPerCluster = 330; +//ticksFontSize = 45; + + +initMode = 'entry'; + +xMargin = 250; +yTopMargin = 100; +yBottomMargin = 25; + +margins = { + 'left': xMargin, + 'right': xMargin, + 'top': yTopMargin, + 'bottom': yBottomMargin +}; + +nodeRx = 4; +nodeRy = 4; + +nIterationsSankey = 50; + +// Can be replaced by 'none', 'path', 'input' or 'output' +edgeColor = 'input' + +// the number associated to the emtpy cluster +// set to -1 to show the empty cluster +emptyCluster = -1; + +minFlowStation = 2; +minFlowBlock = 50; + +offsetNodeName = 10; +xOffsetTooltip = - 200; +yOffsetTooltip = 50; + +linkStrokeOpacity = .5; +linkOpacity = .5; +nodeOpacity = .8; +nodesOpaqueOpacity = .15; + +timestepScrollCorrectFactor = .995; + + +///////////////////////////////////////////////////////////////// +/////////////// Functions //////////////////////////////////// +///////////////////////////////////////////////////////////////// + +function hideShowSankeySettings(button){ + if (button.value === 'hidden'){ + document.getElementById('selectSankeyOptions').style.display = 'inline'; + button.innerHTML = 'Hide Sankey Settings'; + button.value = 'shown'; + } + else if (button.value == 'shown'){ + document.getElementById('selectSankeyOptions').style.display = 'none'; + button.innerHTML = 'Show Sankey Settings'; + button.value = 'hidden'; + } +} + +// Just a UUID generator +function uuidv4() { + return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, function(c) { + var r = Math.random() * 16 | 0, v = c == 'x' ? r : (r & 0x3 | 0x8); + return v.toString(16); + }); +} + +var processLineText = function(o) { + frac = o[0] + stations = o[1] + codes = o[2] + + content = codes.map((v, i) => v + ' - ' + stations[i]) + + content = '' + JSON.stringify(content) + .replace(/"/g, '') + .replace(/,/g, '') + .replace('[', '') + .replace(']', '') + + return { + title:frac, + content: content + }; +} + +function tooltipContentNode(d, mode){ + + units = (mode === 'block')? 'OD pairs': 'stations' + + s = 'Cluster: ' + d.clusterName + '' + + 'Date: ' + new Date(d.datetime).toLocaleString() + '' + + 'Number of ' + units + ': ' + d.value + '' + + 'Arrivals: ' + d.arrivals + '' + + 'Departures: ' + d.departures + '' + + 'From inactive cluster: ' + d.flowFromEmptyCluster + return s; +}; + +function tooltipContentLink(d, mode){ + units = (mode === 'block')? 'OD pairs': 'stations' + + s = 'Source: ' + d.source.clusterName +'' + + 'Target: ' + d.target.clusterName + '' + + 'Number of ' + units + ': ' + d.value + '' + + 'Date: ' + new Date(d.target.datetime).toLocaleString() + return s; +}; + +function initSankeyOptionRanges(nodeWidth, nodePadding, widthPerTimestep, heightPerCluster){ + document.getElementById('nodeWidthText').innerHTML = 'node width = ' + nodeWidth; + document.getElementById('nodePaddingText').innerHTML = 'node padding = ' + nodePadding; + document.getElementById('widthText').innerHTML = 'width = ' + widthPerTimestep; + document.getElementById('heightText').innerHTML = 'height = ' + heightPerCluster; + + document.getElementById('sankeyNodeWidth').value = nodeWidth; + document.getElementById('sankeyPadding').value = nodePadding; + document.getElementById('sankeyWidth').value = widthPerTimestep; + document.getElementById('sankeyHeight').value = heightPerCluster; +} + +function selectMode(val){ + + if (val === 'exit'){ + document + .getElementById('title') + .innerHTML = 'Clusters of exit stations over time'; + + document.getElementById('entryRoutemapContainer') + .appendChild(document.getElementById('details')) + } + else if (val == 'entry'){ + document + .getElementById('title') + .innerHTML = 'Clusters of entry stations over time'; + + document.getElementById('exitRoutemapContainer') + .appendChild(document.getElementById('details')) + } + else if (val == 'block'){ + document + .getElementById('title') + .innerHTML = 'Blocks over time'; + }; + + updateSankeyMode(val); + + document.getElementById('details') + .style.cssText = 'display: none'; + +} + +function updateSankeyMode(val) { + nodeWidth = document.getElementById('sankeyNodeWidth').value; + nodePadding = document.getElementById('sankeyPadding').value; + widthPerTimestep = document.getElementById('sankeyWidth').value; + heightPerCluster = document.getElementById('sankeyHeight').value; + + d3.json(data_file) + .then(d => prepareHtml(d, val)); + + d3.json(data_file) + .then(drawSVGAndAnimate(val, nodeWidth, nodePadding, widthPerTimestep, heightPerCluster)); +} + +function updateSankeyNodeWidth(val) { + document.getElementById('nodeWidthText').innerHTML = 'node width = ' + val; + + mode = document.getElementById('selectMode').value; + nodePadding = document.getElementById('sankeyPadding').value; + widthPerTimestep = document.getElementById('sankeyWidth').value; + heightPerCluster = document.getElementById('sankeyHeight').value; + + d3.json(data_file) + .then(drawSVGAndAnimate(mode, val, nodePadding, widthPerTimestep, heightPerCluster)); +} + +function updateSankeyNodePadding(val) { + document.getElementById('nodePaddingText').innerHTML = 'node padding = ' + val; + + mode = document.getElementById('selectMode').value; + nodeWidth = document.getElementById('sankeyNodeWidth').value; + widthPerTimestep = document.getElementById('sankeyWidth').value; + heightPerCluster = document.getElementById('sankeyHeight').value; + + d3.json(data_file) + .then(drawSVGAndAnimate(mode, nodeWidth, val, widthPerTimestep, heightPerCluster)); +} + +function updateSankeyWidth(val) { + document.getElementById('widthText').innerHTML = 'width = ' + val; + + mode = document.getElementById('selectMode').value; + nodeWidth = document.getElementById('sankeyNodeWidth').value; + nodePadding = document.getElementById('sankeyPadding').value; + heightPerCluster = document.getElementById('sankeyHeight').value; + + d3.json(data_file) + .then(drawSVGAndAnimate(mode, nodeWidth, nodePadding, val, heightPerCluster)); +} + +function updateSankeyHeight(val) { + document.getElementById('heightText').innerHTML = 'height = ' + val; + + mode = document.getElementById('selectMode').value; + nodeWidth = document.getElementById('sankeyNodeWidth').value; + nodePadding = document.getElementById('sankeyPadding').value; + widthPerTimestep = document.getElementById('sankeyWidth').value; + + d3.json(data_file) + .then(drawSVGAndAnimate(mode, nodeWidth, nodePadding, widthPerTimestep, val)); +} + +function drawSVGAndAnimate(mode, nodeWidth, nodePadding, widthPerTimestep, heightPerCluster){ + return d => drawSVGAndAnimatep(d, mode, nodeWidth, nodePadding, widthPerTimestep, heightPerCluster); +} + +function updateTickFs(ticksFontSize){ + document + .getElementById('ticksFontSizeText') + .innerHTML = 'tick font size = ' + ticksFontSize; + + document + .getElementById('sankeyTickFs') + .value = ticksFontSize; + + d3.selectAll('.tick') + .style('font-size', ticksFontSize + 'px'); +}; + +function prepareHtml(data, mode) { + + // data-dependant html structure + + // select station or a group of stations + // and see its trajectory + d3.select('#selectStationOptionsStat') + .selectAll('select') + .remove(); + + d3.select('#selectStationOptionsStat') + .append('select') + .attr('class','select') + .attr('id', 'selectStation') + + d3.select('#selectStationOptionsLine') + .selectAll('select') + .remove(); + + d3.select('#selectStationOptionsLine') + .append('select') + .attr('class','select') + .attr('id', 'selectLine') + + d3.select('#selectCluster') + .selectAll('select') + .remove(); + + d3.select('#selectCluster') + .append('select') + .attr('class','select') + .attr('id', 'selectClusterVal') + + // cluster selection options + d3.select('#selectClusterVal') + .selectAll('option') + .remove() + + if (mode === 'block'){ + document.getElementById('selectStationOptionsStat') + .style.cssText = 'display: none'; + document.getElementById('selectStationOptionsLine') + .style.cssText = 'display: none'; + document.getElementById('selectCluster') + .style.cssText = 'display: none'; + } + else{ + document.getElementById('selectStationOptionsStat') + .style.cssText = 'display: inherit'; + document.getElementById('selectStationOptionsLine') + .style.cssText = 'display: inherit'; + document.getElementById('selectCluster') + .style.cssText = 'display: inherit'; + + stationNames = data['stations'].sort(function (s1, s2){ + return s1.toLowerCase() > s2.toLowerCase() ? 1: -1 + }); + stationNames = ['None'].concat(stationNames); + lines = data['lines']; + + if (mode =='entry'){ + nbClusters = data['nbEntryClusters'] + } + else if (mode === 'exit'){ + nbClusters = data['nbExitClusters'] + } + clusterList = ['None'].concat([...Array(nbClusters).keys()]) + + // stations selection options + d3.select('#selectStation') + .selectAll('option') + .data(stationNames).enter() + .append('option') + .text(d => d); + + // lines selection options + d3.select('#selectLine') + .selectAll('option') + .data(['None'].concat(Object.keys(lines))).enter() + .append('option') + .text(d => d); + + d3.select('#selectClusterVal') + .selectAll('option') + .data(clusterList).enter() + .append('option') + .text(d => d); + + d3.select('#details') + .selectAll('div').remove(); + + // station details div + for (line in lines){ + + var lineDiv = d3.select('#details') + .append('div') + .attr('class', 'lineContainer') + .attr('id', 'lineContainer' + line) + + lineDiv + .append('div') + .attr('class', 'lineTitleContainer') + .attr('id', 'lineTitleContainer' + line) + .text(line) + + lineDiv + .append('div') + .attr('class', 'lineContentContainer') + .attr('id', 'lineContentContainer' + line) + } + }; +} + +// Define Sankey function that will return +// the nodes and links that will be +// used in the SVG +var sankey = function( + nodeWidth, + nodePadding, + margins){ + return d3.sankey() + .nodeId(d => d.name) + .nodeAlign(d3.sankeyCenter) + .nodeWidth(nodeWidth) + .nodePadding(nodePadding) + .nodeSort((n1, n2) => n1.cluster >= n2.cluster? 1: -1) + .iterations(nIterationsSankey) + .extent( + [[margins.left, margins.bottom], + [width - margins.right, height - margins.top]]) +} + + +function drawSVGAndAnimatep( + data, + mode, + nodeWidth, + nodePadding, + widthPerTimestep, + heightPerCluster) { + + console.log(data); + + d3.select('#sankeySVG').remove(); + d3.select('#tooltip').remove(); + + nbTimesteps = data['nbTimesteps']; + timesteps = data['timesteps']; + daysOfWeek = data['daysOfWeek']; + stationNames = data['stations']; + stationNames = ['None'].concat(stationNames); + lines = data['lines']; + + if (mode === 'entry'){ + nbClusters = data['nbEntryClusters']; + trajectories = data['entryTrajectories']; + cmap = data['entryClusterCmap']; + height = nbClusters * heightPerCluster; + } else if (mode === 'exit'){ + nbClusters = data['nbExitClusters']; + trajectories = data['exitTrajectories']; + cmap = data['exitClusterCmap']; + height = nbClusters * heightPerCluster; + } else if (mode === 'block'){ + nbClusters = data['nbBlocks']; + trajectories = null; + cmap = data['blockCmap']; + height = .3 * nbClusters * heightPerCluster; + }; + + function color(d){ + return cmap[d.cluster]; + } + width = nbTimesteps * widthPerTimestep; + + // Initialize SVG + svg = d3.select('#sankeyDiagram') + .append('svg') + .attr('id', 'sankeySVG') + .attr('viewBox', [0, 0, width, height]) + .attr('class', 'svg-background'); + + // create a tooltip + var tooltip = d3.select('#sankeyDiagram') + .append('div') + .attr('id', 'tooltip') + .style('opacity', 0) + .attr('class', 'tooltip') + + // Add zoom + function svgZoom(){ + svg.attr('transform', d3.event.transform) + } + var zoom = d3.zoom() + .scaleExtent([.1, 1.5]) + .on('zoom', svgZoom); + d3.select('#sankeyDiagram').call(zoom); + + // Retrieve D3-generated nodes and links (i.e. positioned in the page) + // nodes and links are js object returned by d3-sankey + // that will be used to create the SVG elements + // node and link + var sankeyF = sankey(nodeWidth, nodePadding, margins); + + if (mode === 'entry'){ + var {nodes, links} = sankeyF({ + nodes: data.entryNodes.map(d => Object.assign({}, d)), + links: data.entryLinks.map(d => Object.assign({}, d)) + }); + } else if (mode === 'exit'){ + var {nodes, links} = sankeyF({ + nodes: data.exitNodes.map(d => Object.assign({}, d)), + links: data.exitLinks.map(d => Object.assign({}, d)) + }); + } else if (mode === 'block'){ + var {nodes, links} = sankeyF({ + nodes: data.blockNodes.map(d => Object.assign({}, d)), + links: data.blockLinks.map(d => Object.assign({}, d)) + }); + } + + minFlow = (mode === 'block') ? minFlowBlock : minFlowStation; + + console.log(nodes); + console.log(links); + + // map the node data to SVG + // creates a for each node (=cluster) + // that will contain two rects : + // a regular one and an opaque one + // the opaque one will be used to represent + // trajectories for a group of stations + const nodeGroup = svg.append('g') + .selectAll('g') + .data(nodes) // Data is linked here with DOM element + .join( + enter => enter + .filter(d => d.cluster != emptyCluster) + .append('g') + ); + + const nodeOpaque = nodeGroup + .append('rect') + .attr('x', d => d.x0) + .attr('y', d => d.y0) + .attr('index', d => d.index) + .attr('rx', nodeRx) + .attr('ry', nodeRy) + .attr('height', d => 0.) + .attr('width', d => d.x1 - d.x0) + .attr('class', 'nodesOpaque') + .attr('fill', d => color(d)) + .style('opacity', nodesOpaqueOpacity) + .attr('pointer-events', 'none'); + + const node = nodeGroup + .append('rect') + .attr('x', d => d.x0) + .attr('y', d => d.y0) + .attr('index', d => d.index) + .attr('rx', nodeRx) + .attr('ry', nodeRy) + .attr('height', d => d.y1 - d.y0) + .attr('width', d => d.x1 - d.x0) + .attr('class', 'nodes') + .attr('fill', d => color(d)) + .style('opacity', nodeOpacity); + + // Add link elements to SVG + const link = svg.append('g') + .attr('fill', 'none') + .attr('opacity', linkOpacity) + .attr('stroke-opacity', linkStrokeOpacity) + .selectAll('g') + .data(links) + .join('g') + .filter(d => d.value >= minFlow) + .filter(d => d.source.cluster != emptyCluster) + .filter(d => d.target.cluster != emptyCluster) + .style('mix-blend-mode', 'multiply'); + + // Define the correct color or gradient to the link + if (edgeColor === 'path') { + const gradient = link.append('linearGradient') + .attr('id', d => (d.uid = uuidv4())) + .attr('gradientUnits', 'userSpaceOnUse') + .attr('x1', d => d.source.x1) + .attr('x2', d => d.target.x0); + + gradient.append('stop') + .attr('offset', '0%') + .attr('stop-color', d => color(d.source)); + + gradient.append('stop') + .attr('offset', '100%') + .attr('stop-color', d => color(d.target)); + } + + // Apply the actual path in the link + link.append('path') + .attr('d', d3.sankeyLinkHorizontal()) + .attr('stroke', d => edgeColor === 'none' ? '#aaa' + : edgeColor === 'path' ? `url(#${d.uid}` + : edgeColor === 'input' ? color(d.source) + : color(d.target)) + .attr('stroke-width', d => Math.max(1, d.width)); + + + // highlights the trajectory of a station + d3.select('#selectStation') + .on('change',onchangeStation) + d3.select('#selectLine') + .on('change',onchangeLine) + + function onchangeStation() { + selectValue = d3.select('#selectStation').property('value'); + svg.selectAll('rect') + .style('stroke', 'none'); + + if (selectValue !== 'None'){ + selectTraj = trajectories[selectValue]; + svg.selectAll('rect') + .filter(d => selectTraj.includes(d.name)) + .style('stroke', 'black'); + } + }; + + function onchangeLine() { + selectValue = d3.select('#selectLine').property('value'); + + node + .attr('height', 0.) + + nodeOpaque + .attr('y', d => d.y0) + .attr('height', d => d.y1 - d.y0) //d.y1 - d.y0 + + if (selectValue !== 'None'){ + selectStations = lines[selectValue] + selectTraj = new Set; + for (let i = 0; i < selectStations.length; i++){ + stat = selectStations[i] + traj = new Set(trajectories[stat]) + selectTraj = new Set([...selectTraj, ...traj]); + } + selectTraj = Array.from(selectTraj); + + node + .filter(d => selectTraj.includes(d.name)) + .attr('y', d => d.y0) + .attr('height', d => d.lineRatio[selectValue] * (d.y1 - d.y0)); + //.style('opacity', nodeOpacity) + + nodeOpaque + .filter(d => selectTraj.includes(d.name)) + .attr('y', d => d.y0 + d.lineRatio[selectValue] * (d.y1 - d.y0)) + .attr('height', d => (1 - d.lineRatio[selectValue]) * (d.y1 - d.y0)) + } + else { + nodeOpaque + .attr('y', d => d.y0) + .attr('height', 0.) + + node + .attr('y', d => d.y0) + .attr('height', d => d.y1 - d.y0) + .style('opacity', nodeOpacity) + } + }; + + // 4 functions that change the tooltip when user hover / move / leave a cell + var nodeMouseover = function(d) { + + tooltip.style('opacity', 1); + + // Fires when the mouse enters the canvas or any of its children. + if (d3.select('#selectLine').property('value') === 'None' & + d3.select('#selectStation').property('value') === 'None'){ + + d3.select(this) + .style('stroke', 'black') + .style('opacity', 1.); + + mode = document.getElementById('selectMode').value; + + if (mode !== 'block'){ + document.getElementById( + ( mode === 'entry'? 'exit': 'entry') + 'RoutemapImage' + ).style.cssText = 'display: none'; + document.getElementById('details') + .style.cssText = 'display: inherit'; + }; + } + } + + var linkMouseover = function(d) { + // Fires when the mouse enters the canvas or any of its children. + tooltip.style('opacity', 1); + + mode = document.getElementById('selectMode').value; + + if (mode !== 'block'){ + document.getElementById( + (mode === 'entry'? 'exit': 'entry') + 'RoutemapImage' + ).style.cssText = 'display: none'; + document.getElementById('details') + .style.cssText = 'display: inherit'; + }; + } + + var linkMousemove = function(d) { + // Fires on any mouse movement over the canvas + tooltip.html(tooltipContentLink(d, mode)) + .style('position', 'absolute') + .style('left', (d3.event.pageX + xOffsetTooltip) + 'px') + .style('top', (d3.event.pageY + yOffsetTooltip) + 'px'); + + if (mode !== 'block'){ + for (line in lines){ + const {title, content} = processLineText(d.stationsLink[line]); + d3.select('#lineTitleContainer' + line) + .html(line + ' : ' + title); + d3.select('#lineContentContainer' + line) + .html(content); + } + }; + } + + var nodeMousemove = function(d) { + // Fires on any mouse movement over the canvas + tooltip.html(tooltipContentNode(d, mode)) + .style('position', 'absolute') + .style('left', (d3.event.pageX + xOffsetTooltip) + 'px') + .style('top', (d3.event.pageY + yOffsetTooltip) + 'px'); + + if (mode !== 'block'){ + for (line in lines){ + const {title, content} = processLineText(d.stationsNode[line]) + d3.select('#lineTitleContainer' + line) + .html(line + ' : ' + title); + d3.select('#lineContentContainer' + line) + .html(content); + } + }; + } + + var nodeMouseleave = function(d) { + if (d3.select('#selectLine').property('value') === 'None' & + d3.select('#selectStation').property('value') === 'None'){ + + tooltip.style('opacity', 0); + + d3.select(this) + .style('stroke', 'none') + .style('opacity', nodeOpacity); + + document.getElementById( + (mode === 'entry'? 'exit': 'entry') + 'RoutemapImage' + ).style.cssText = 'display: inherit'; + document.getElementById('details') + .style.cssText = 'display: none'; + } + } + + var linkMouseleave = function(d) { + tooltip.style('opacity', 0); + + document.getElementById( + (mode === 'entry'? 'exit': 'entry') + 'RoutemapImage' + ).style.cssText = 'display: inherit'; + document.getElementById('details') + .style.cssText = 'display: none'; + } + + var click = function(d){ + document.getElementById('routemapTimestepRange').value = d.timestep + updateRoutemap(); + } + + link + .on('mouseover', linkMouseover) + .on('mousemove', linkMousemove) + .on('mouseleave', linkMouseleave) + .on('click', click); + + node + .on('mouseover', nodeMouseover) + .on('mousemove', nodeMousemove) + .on('mouseleave', nodeMouseleave) + .on('click', click); + + // Create scale + //var x = d3.scaleTime() + //.rangeRound([margins.left, width - margins.right]) + //.domain([new Date(timesteps[0]), new Date(timesteps[timesteps.length - 1])]) + + var ordinalRange = []; + var tickVals = []; + + start = parseFloat(margins.left) + .5 * nodeWidth; + + // hacky way to find appropriate step + step = 0.; + x0 = node._groups[0][0].x.animVal.value; + for (const rect of node._groups[0]) { + val = rect.x.animVal.value; + if (val > x0){ + step = ((val - x0) * 12 * 100) / timesteps.length; // why 100?? * 100 for 1 day, *12 * 100 for 5 weeks + break; + } + } + + step = step + .14735 * nodeWidth; // minor correction + + for (i = 0; i < timesteps.length; i++){ + + ordinalRange.push(start + i * step) + + arrT = timesteps[i].split('T'); + dateT = arrT[0]; + timeT = arrT[1]; + if (onlyTickHour){ + tickVals.push(timeT); + } else { + tickVals.push(dateT.slice(5) + '-' + timeT); + } + } + + var x = d3.scaleOrdinal() + .range(ordinalRange) + .domain(timesteps); + + // Add scales to axis + var xAxis = d3.axisBottom(x) + .tickValues(tickVals) + //.ticks(Math.floor(nbTimesteps / 2)) + .tickSize(25) + //.tickFormat(d3.timeFormat('%H:%M')); + + // Append group and insert axis + svg.append('g') + .attr('id', 'ticks') + .attr('class', 'axis') + .attr('transform', 'translate(0,' + (height - margins.top * .8) + ')') + .call(xAxis) + .selectAll("text") + .style("text-anchor", "end") + .attr("dx", "-.8em") + .attr("dy", ".15em") + .attr("transform", "rotate(-65)"); + + //ticksFontSize = document.getElementById('sankeyTickFs').value; + updateTickFs(ticksFontSize); + + mode = d3.select('#selectMode').property('value'); + document + .getElementById('selectClusterTxt') + .innerHTML = 'Select ' + mode + ' cluster  '; + + + // Update the routemap picture according + // to the slider + // Note that the slider is in [1: T] + // but prints timesteps + d3.select('#routemapTimestepRange') + .property('max', timesteps.length - 1) + .on('input', updateRoutemap); + + d3.select('#selectCluster') + .on('input', updateRoutemap); + + d3.select('#showFullGammaButton') + .on('click', updateFullGamma); + + function updateFullGamma(){ + button = document + .getElementById('showFullGammaButton'); + + if (button.value === 'hide'){ + button.innerHTML = 'Show full cluster interactions'; + button.value = 'show'; + } + else if (button.value == 'show'){ + button.innerHTML = 'Show current cluster interactions'; + button.value = 'hide'; + } + updateRoutemap(); + } + + function updateRoutemap(){ + t = d3.select('#routemapTimestepRange').property('value'); + k = d3.select('#selectClusterVal').property('value'); + mode = d3.select('#selectMode').property('value'); + + document + .getElementById('selectClusterTxt') + .innerHTML = 'Select ' + mode + ' cluster  '; + + + fullGamma = (document + .getElementById('showFullGammaButton') + .value == 'show'? false : true); + + datetime = timesteps[t].split('T'); + date = datetime[0]; + time = datetime[1]; + dayOfWeek = daysOfWeek[t]; + tickValue = tickVals[t]; + + document + .getElementById('routemapTimestepRangeText') + .innerHTML = dayOfWeek + ' ' + date + ' ' + time; + + ticks = document.getElementsByClassName('tick'); + fs = document.getElementById('sankeyTickFs').value; + selected_fs = delta_ticks_fs + parseFloat(fs); + + for (i = 0; i < ticks.length; i++){ + ticks[i].style.fontWeight = 'normal'; + ticks[i].style.fontSize = fs + 'px'; + + if (ticks[i].getElementsByTagName('text')[0].innerHTML == tickValue){ + ticks[i].style.fontWeight = 'bold'; + ticks[i].style.fontSize = selected_fs + 'px'; + } + } + + if (mode === 'entry'){ + document + .getElementById('entryRoutemapImage') + .setAttribute('src', image_path + 'routemap_entries_' + t + '_' + k + '.jpg'); + + if (k === 'None'){ + document + .getElementById('exitRoutemapImage') + .setAttribute('src', image_path + 'routemap_exits_' + t + '_None.jpg'); + } else { + document + .getElementById('exitRoutemapImage') + .setAttribute('src', image_path + 'routemap_exits_gamma_' + t + '_' + k + '.jpg'); + } + } else if (mode === 'exit'){ + if (k === 'None'){ + document + .getElementById('entryRoutemapImage') + .setAttribute('src', image_path + 'routemap_entries_' + t + '_None.jpg'); + } else { + document + .getElementById('entryRoutemapImage') + .setAttribute('src', image_path + 'routemap_entries_gamma_' + t + '_' + k + '.jpg'); + } + document + .getElementById('exitRoutemapImage') + .setAttribute('src', image_path + 'routemap_exits_' + t + '_' + k + '.jpg'); + + } else if (mode === 'block'){ + document + .getElementById('entryRoutemapImage') + .setAttribute('src', image_path + 'routemap_entries_' + t + '_None.jpg'); + document + .getElementById('exitRoutemapImage') + .setAttribute('src', image_path + 'routemap_exits_' + t + '_None.jpg'); + }; + + //document + //.getElementById('gammaImage') + //.setAttribute('src', image_path + 'gamma_' + mode + '_' + k + '.jpg'); + + if (fullGamma) { + document + .getElementById('gammaImage') + .setAttribute('src', image_path + 'gamma_full.jpg'); + } else { + if ((mode === 'entry') || (mode === 'exit')){ + document + .getElementById('gammaImage') + .setAttribute('src', image_path + 'gamma_interact_'+ t + '_' + mode + '_' + k + '.jpg'); + } else if (mode === 'block'){ + document + .getElementById('gammaImage') + .setAttribute('src', image_path + 'gamma_interact_'+ t + '_exit_None.jpg'); + }; + }; + + // translate SVG with timestep cursor + svgbb = document.getElementById('sankeySVG').getBoundingClientRect(); + + svgWidth = timestepScrollCorrectFactor * parseFloat(svgbb.width); + xTranslateVal = parseFloat(margins.left) - (t / timesteps.length) * svgWidth; + svg.attr('transform', 'translate(' + xTranslateVal + ',0)'); + } + // initial call + updateRoutemap(); +} + +// waiting for data + +// non D3js html writing +initSankeyOptionRanges(nodeWidth, nodePadding, widthPerTimestep, heightPerCluster); + +d3.json(data_file) +.then(d => prepareHtml(d, initMode)); + +// first sankey construction +// can be later overwritten by interactively +// changing the sankey options +//d3.json(data_file) +//.then(drawSVGAndAnimate(mode, nodeWidth, nodePadding, widthPerTimestep, heightPerCluster)); + + +selectMode(initMode); + + diff --git a/alluvial/d3-sankey.js b/alluvial/d3-sankey.js new file mode 100644 index 0000000..90bfd29 --- /dev/null +++ b/alluvial/d3-sankey.js @@ -0,0 +1,455 @@ +// https://github.com/d3/d3-sankey v0.12.3 Copyright 2019 Mike Bostock +(function (global, factory) { +typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports, require('d3-array'), require('d3-shape')) : +typeof define === 'function' && define.amd ? define(['exports', 'd3-array', 'd3-shape'], factory) : +(global = global || self, factory(global.d3 = global.d3 || {}, global.d3, global.d3)); +}(this, function (exports, d3Array, d3Shape) { 'use strict'; + +function targetDepth(d) { + return d.target.depth; +} + +function left(node) { + return node.depth; +} + +function right(node, n) { + return n - 1 - node.height; +} + +function justify(node, n) { + return node.sourceLinks.length ? node.depth : n - 1; +} + +function center(node) { + return node.targetLinks.length ? node.depth + : node.sourceLinks.length ? d3Array.min(node.sourceLinks, targetDepth) - 1 + : 0; +} + +function constant(x) { + return function() { + return x; + }; +} + +function ascendingSourceBreadth(a, b) { + return ascendingBreadth(a.source, b.source) || a.index - b.index; +} + +function ascendingTargetBreadth(a, b) { + return ascendingBreadth(a.target, b.target) || a.index - b.index; +} + +function ascendingBreadth(a, b) { + return a.y0 - b.y0; +} + +function value(d) { + return d.value; +} + +function defaultId(d) { + return d.index; +} + +function defaultNodes(graph) { + return graph.nodes; +} + +function defaultLinks(graph) { + return graph.links; +} + +function find(nodeById, id) { + const node = nodeById.get(id); + if (!node) throw new Error("missing: " + id); + return node; +} + +function computeLinkBreadths({nodes}) { + for (const node of nodes) { + let y0 = node.y0; + let y1 = y0; + for (const link of node.sourceLinks) { + link.y0 = y0 + link.width / 2; + y0 += link.width; + } + for (const link of node.targetLinks) { + link.y1 = y1 + link.width / 2; + y1 += link.width; + } + } +} + +function Sankey() { + let x0 = 0, y0 = 0, x1 = 1, y1 = 1; // extent + let dx = 24; // nodeWidth + let dy = 8, py; // nodePadding + let id = defaultId; + let align = justify; + let sort; + let linkSort; + let nodes = defaultNodes; + let links = defaultLinks; + let iterations = 6; + + function sankey() { + const graph = {nodes: nodes.apply(null, arguments), links: links.apply(null, arguments)}; + computeNodeLinks(graph); + computeNodeValues(graph); + computeNodeDepths(graph); + computeNodeHeights(graph); + computeNodeBreadths(graph); + computeLinkBreadths(graph); + return graph; + } + + sankey.update = function(graph) { + computeLinkBreadths(graph); + return graph; + }; + + sankey.nodeId = function(_) { + return arguments.length ? (id = typeof _ === "function" ? _ : constant(_), sankey) : id; + }; + + sankey.nodeAlign = function(_) { + return arguments.length ? (align = typeof _ === "function" ? _ : constant(_), sankey) : align; + }; + + sankey.nodeSort = function(_) { + return arguments.length ? (sort = _, sankey) : sort; + }; + + sankey.nodeWidth = function(_) { + return arguments.length ? (dx = +_, sankey) : dx; + }; + + sankey.nodePadding = function(_) { + return arguments.length ? (dy = py = +_, sankey) : dy; + }; + + sankey.nodes = function(_) { + return arguments.length ? (nodes = typeof _ === "function" ? _ : constant(_), sankey) : nodes; + }; + + sankey.links = function(_) { + return arguments.length ? (links = typeof _ === "function" ? _ : constant(_), sankey) : links; + }; + + sankey.linkSort = function(_) { + return arguments.length ? (linkSort = _, sankey) : linkSort; + }; + + sankey.size = function(_) { + return arguments.length ? (x0 = y0 = 0, x1 = +_[0], y1 = +_[1], sankey) : [x1 - x0, y1 - y0]; + }; + + sankey.extent = function(_) { + return arguments.length ? (x0 = +_[0][0], x1 = +_[1][0], y0 = +_[0][1], y1 = +_[1][1], sankey) : [[x0, y0], [x1, y1]]; + }; + + sankey.iterations = function(_) { + return arguments.length ? (iterations = +_, sankey) : iterations; + }; + + + // TO REMOVE + // TEST + sankey.link = function() { + var curvature = .5; + + function link(d) { + var x0 = d.source.x + d.source.dx, + x1 = d.target.x, + xi = d3.interpolateNumber(x0, x1), + x2 = xi(curvature), + x3 = xi(1 - curvature), + y0 = d.source.y + d.sy + d.dy / 2, + y1 = d.target.y + d.ty + d.dy / 2; + return "M" + x0 + "," + y0 + + "C" + x2 + "," + y0 + + " " + x3 + "," + y1 + + " " + x1 + "," + y1; + } + + link.curvature = function(_) { + if (!arguments.length) return curvature; + curvature = +_; + return link; + }; + + return link; + }; + + + function computeNodeLinks({nodes, links}) { + for (const [i, node] of nodes.entries()) { + node.index = i; + node.sourceLinks = []; + node.targetLinks = []; + } + const nodeById = new Map(nodes.map((d, i) => [id(d, i, nodes), d])); + for (const [i, link] of links.entries()) { + link.index = i; + let {source, target} = link; + if (typeof source !== "object") source = link.source = find(nodeById, source); + if (typeof target !== "object") target = link.target = find(nodeById, target); + source.sourceLinks.push(link); + target.targetLinks.push(link); + } + if (linkSort != null) { + for (const {sourceLinks, targetLinks} of nodes) { + sourceLinks.sort(linkSort); + targetLinks.sort(linkSort); + } + } + } + + function computeNodeValues({nodes}) { + for (const node of nodes) { + node.value = node.fixedValue === undefined + ? Math.max(d3Array.sum(node.sourceLinks, value), d3Array.sum(node.targetLinks, value)) + : node.fixedValue; + } + } + + function computeNodeDepths({nodes}) { + const n = nodes.length; + let current = new Set(nodes); + let next = new Set; + let x = 0; + while (current.size) { + for (const node of current) { + node.depth = x; + for (const {target} of node.sourceLinks) { + next.add(target); + } + } + if (++x > n) throw new Error("circular link"); + current = next; + next = new Set; + } + } + + function computeNodeHeights({nodes}) { + const n = nodes.length; + let current = new Set(nodes); + let next = new Set; + let x = 0; + while (current.size) { + for (const node of current) { + node.height = x; + for (const {source} of node.targetLinks) { + next.add(source); + } + } + if (++x > n) throw new Error("circular link"); + current = next; + next = new Set; + } + } + + function computeNodeLayers({nodes}) { + const x = d3Array.max(nodes, d => d.depth) + 1; + const kx = (x1 - x0 - dx) / (x - 1); + const columns = new Array(x); + for (const node of nodes) { + const i = Math.max(0, Math.min(x - 1, Math.floor(align.call(null, node, x)))); + node.layer = i; + node.x0 = x0 + i * kx; + node.x1 = node.x0 + dx; + if (columns[i]) columns[i].push(node); + else columns[i] = [node]; + } + if (sort) for (const column of columns) { + column.sort(sort); + } + return columns; + } + + function initializeNodeBreadths(columns) { + const ky = d3Array.min(columns, c => (y1 - y0 - (c.length - 1) * py) / d3Array.sum(c, value)); + for (const nodes of columns) { + let y = y0; + for (const node of nodes) { + node.y0 = y; + node.y1 = y + node.value * ky; + y = node.y1 + py; + for (const link of node.sourceLinks) { + link.width = link.value * ky; + } + } + y = (y1 - y + py) / (nodes.length + 1); + for (let i = 0; i < nodes.length; ++i) { + const node = nodes[i]; + node.y0 += y * (i + 1); + node.y1 += y * (i + 1); + } + reorderLinks(nodes); + } + } + + function computeNodeBreadths(graph) { + const columns = computeNodeLayers(graph); + py = Math.min(dy, (y1 - y0) / (d3Array.max(columns, c => c.length) - 1)); + initializeNodeBreadths(columns); + for (let i = 0; i < iterations; ++i) { + const alpha = Math.pow(0.99, i); + const beta = Math.max(1 - alpha, (i + 1) / iterations); + relaxRightToLeft(columns, alpha, beta); + relaxLeftToRight(columns, alpha, beta); + } + } + + // Reposition each node based on its incoming (target) links. + function relaxLeftToRight(columns, alpha, beta) { + for (let i = 1, n = columns.length; i < n; ++i) { + const column = columns[i]; + for (const target of column) { + let y = 0; + let w = 0; + for (const {source, value} of target.targetLinks) { + let v = value * (target.layer - source.layer); + y += targetTop(source, target) * v; + w += v; + } + if (!(w > 0)) continue; + let dy = (y / w - target.y0) * alpha; + target.y0 += dy; + target.y1 += dy; + reorderNodeLinks(target); + } + if (sort === undefined) column.sort(ascendingBreadth); + resolveCollisions(column, beta); + } + } + + // Reposition each node based on its outgoing (source) links. + function relaxRightToLeft(columns, alpha, beta) { + for (let n = columns.length, i = n - 2; i >= 0; --i) { + const column = columns[i]; + for (const source of column) { + let y = 0; + let w = 0; + for (const {target, value} of source.sourceLinks) { + let v = value * (target.layer - source.layer); + y += sourceTop(source, target) * v; + w += v; + } + if (!(w > 0)) continue; + let dy = (y / w - source.y0) * alpha; + source.y0 += dy; + source.y1 += dy; + reorderNodeLinks(source); + } + if (sort === undefined) column.sort(ascendingBreadth); + resolveCollisions(column, beta); + } + } + + function resolveCollisions(nodes, alpha) { + const i = nodes.length >> 1; + const subject = nodes[i]; + resolveCollisionsBottomToTop(nodes, subject.y0 - py, i - 1, alpha); + resolveCollisionsTopToBottom(nodes, subject.y1 + py, i + 1, alpha); + resolveCollisionsBottomToTop(nodes, y1, nodes.length - 1, alpha); + resolveCollisionsTopToBottom(nodes, y0, 0, alpha); + } + + // Push any overlapping nodes down. + function resolveCollisionsTopToBottom(nodes, y, i, alpha) { + for (; i < nodes.length; ++i) { + const node = nodes[i]; + const dy = (y - node.y0) * alpha; + if (dy > 1e-6) node.y0 += dy, node.y1 += dy; + y = node.y1 + py; + } + } + + // Push any overlapping nodes up. + function resolveCollisionsBottomToTop(nodes, y, i, alpha) { + for (; i >= 0; --i) { + const node = nodes[i]; + const dy = (node.y1 - y) * alpha; + if (dy > 1e-6) node.y0 -= dy, node.y1 -= dy; + y = node.y0 - py; + } + } + + function reorderNodeLinks({sourceLinks, targetLinks}) { + if (linkSort === undefined) { + for (const {source: {sourceLinks}} of targetLinks) { + sourceLinks.sort(ascendingTargetBreadth); + } + for (const {target: {targetLinks}} of sourceLinks) { + targetLinks.sort(ascendingSourceBreadth); + } + } + } + + function reorderLinks(nodes) { + if (linkSort === undefined) { + for (const {sourceLinks, targetLinks} of nodes) { + sourceLinks.sort(ascendingTargetBreadth); + targetLinks.sort(ascendingSourceBreadth); + } + } + } + + // Returns the target.y0 that would produce an ideal link from source to target. + function targetTop(source, target) { + let y = source.y0 - (source.sourceLinks.length - 1) * py / 2; + for (const {target: node, width} of source.sourceLinks) { + if (node === target) break; + y += width + py; + } + for (const {source: node, width} of target.targetLinks) { + if (node === source) break; + y -= width; + } + return y; + } + + // Returns the source.y0 that would produce an ideal link from source to target. + function sourceTop(source, target) { + let y = target.y0 - (target.targetLinks.length - 1) * py / 2; + for (const {source: node, width} of target.targetLinks) { + if (node === source) break; + y += width + py; + } + for (const {target: node, width} of source.sourceLinks) { + if (node === target) break; + y -= width; + } + return y; + } + + return sankey; +} + +function horizontalSource(d) { + return [d.source.x1, d.y0]; +} + +function horizontalTarget(d) { + return [d.target.x0, d.y1]; +} + +function sankeyLinkHorizontal() { + return d3Shape.linkHorizontal() + .source(horizontalSource) + .target(horizontalTarget); +} + +exports.sankey = Sankey; +exports.sankeyCenter = center; +exports.sankeyJustify = justify; +exports.sankeyLeft = left; +exports.sankeyLinkHorizontal = sankeyLinkHorizontal; +exports.sankeyRight = right; + +Object.defineProperty(exports, '__esModule', { value: true }); + +})); diff --git a/alluvial/d3.js b/alluvial/d3.js new file mode 100644 index 0000000..632b3fa --- /dev/null +++ b/alluvial/d3.js @@ -0,0 +1,18568 @@ +// https://d3js.org v5.16.0 Copyright 2020 Mike Bostock +(function (global, factory) { +typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) : +typeof define === 'function' && define.amd ? define(['exports'], factory) : +(global = global || self, factory(global.d3 = global.d3 || {})); +}(this, function (exports) { 'use strict'; + +var version = "5.16.0"; + +function ascending(a, b) { + return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN; +} + +function bisector(compare) { + if (compare.length === 1) compare = ascendingComparator(compare); + return { + left: function(a, x, lo, hi) { + if (lo == null) lo = 0; + if (hi == null) hi = a.length; + while (lo < hi) { + var mid = lo + hi >>> 1; + if (compare(a[mid], x) < 0) lo = mid + 1; + else hi = mid; + } + return lo; + }, + right: function(a, x, lo, hi) { + if (lo == null) lo = 0; + if (hi == null) hi = a.length; + while (lo < hi) { + var mid = lo + hi >>> 1; + if (compare(a[mid], x) > 0) hi = mid; + else lo = mid + 1; + } + return lo; + } + }; +} + +function ascendingComparator(f) { + return function(d, x) { + return ascending(f(d), x); + }; +} + +var ascendingBisect = bisector(ascending); +var bisectRight = ascendingBisect.right; +var bisectLeft = ascendingBisect.left; + +function pairs(array, f) { + if (f == null) f = pair; + var i = 0, n = array.length - 1, p = array[0], pairs = new Array(n < 0 ? 0 : n); + while (i < n) pairs[i] = f(p, p = array[++i]); + return pairs; +} + +function pair(a, b) { + return [a, b]; +} + +function cross(values0, values1, reduce) { + var n0 = values0.length, + n1 = values1.length, + values = new Array(n0 * n1), + i0, + i1, + i, + value0; + + if (reduce == null) reduce = pair; + + for (i0 = i = 0; i0 < n0; ++i0) { + for (value0 = values0[i0], i1 = 0; i1 < n1; ++i1, ++i) { + values[i] = reduce(value0, values1[i1]); + } + } + + return values; +} + +function descending(a, b) { + return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN; +} + +function number(x) { + return x === null ? NaN : +x; +} + +function variance(values, valueof) { + var n = values.length, + m = 0, + i = -1, + mean = 0, + value, + delta, + sum = 0; + + if (valueof == null) { + while (++i < n) { + if (!isNaN(value = number(values[i]))) { + delta = value - mean; + mean += delta / ++m; + sum += delta * (value - mean); + } + } + } + + else { + while (++i < n) { + if (!isNaN(value = number(valueof(values[i], i, values)))) { + delta = value - mean; + mean += delta / ++m; + sum += delta * (value - mean); + } + } + } + + if (m > 1) return sum / (m - 1); +} + +function deviation(array, f) { + var v = variance(array, f); + return v ? Math.sqrt(v) : v; +} + +function extent(values, valueof) { + var n = values.length, + i = -1, + value, + min, + max; + + if (valueof == null) { + while (++i < n) { // Find the first comparable value. + if ((value = values[i]) != null && value >= value) { + min = max = value; + while (++i < n) { // Compare the remaining values. + if ((value = values[i]) != null) { + if (min > value) min = value; + if (max < value) max = value; + } + } + } + } + } + + else { + while (++i < n) { // Find the first comparable value. + if ((value = valueof(values[i], i, values)) != null && value >= value) { + min = max = value; + while (++i < n) { // Compare the remaining values. + if ((value = valueof(values[i], i, values)) != null) { + if (min > value) min = value; + if (max < value) max = value; + } + } + } + } + } + + return [min, max]; +} + +var array = Array.prototype; + +var slice = array.slice; +var map = array.map; + +function constant(x) { + return function() { + return x; + }; +} + +function identity(x) { + return x; +} + +function sequence(start, stop, step) { + start = +start, stop = +stop, step = (n = arguments.length) < 2 ? (stop = start, start = 0, 1) : n < 3 ? 1 : +step; + + var i = -1, + n = Math.max(0, Math.ceil((stop - start) / step)) | 0, + range = new Array(n); + + while (++i < n) { + range[i] = start + i * step; + } + + return range; +} + +var e10 = Math.sqrt(50), + e5 = Math.sqrt(10), + e2 = Math.sqrt(2); + +function ticks(start, stop, count) { + var reverse, + i = -1, + n, + ticks, + step; + + stop = +stop, start = +start, count = +count; + if (start === stop && count > 0) return [start]; + if (reverse = stop < start) n = start, start = stop, stop = n; + if ((step = tickIncrement(start, stop, count)) === 0 || !isFinite(step)) return []; + + if (step > 0) { + start = Math.ceil(start / step); + stop = Math.floor(stop / step); + ticks = new Array(n = Math.ceil(stop - start + 1)); + while (++i < n) ticks[i] = (start + i) * step; + } else { + start = Math.floor(start * step); + stop = Math.ceil(stop * step); + ticks = new Array(n = Math.ceil(start - stop + 1)); + while (++i < n) ticks[i] = (start - i) / step; + } + + if (reverse) ticks.reverse(); + + return ticks; +} + +function tickIncrement(start, stop, count) { + var step = (stop - start) / Math.max(0, count), + power = Math.floor(Math.log(step) / Math.LN10), + error = step / Math.pow(10, power); + return power >= 0 + ? (error >= e10 ? 10 : error >= e5 ? 5 : error >= e2 ? 2 : 1) * Math.pow(10, power) + : -Math.pow(10, -power) / (error >= e10 ? 10 : error >= e5 ? 5 : error >= e2 ? 2 : 1); +} + +function tickStep(start, stop, count) { + var step0 = Math.abs(stop - start) / Math.max(0, count), + step1 = Math.pow(10, Math.floor(Math.log(step0) / Math.LN10)), + error = step0 / step1; + if (error >= e10) step1 *= 10; + else if (error >= e5) step1 *= 5; + else if (error >= e2) step1 *= 2; + return stop < start ? -step1 : step1; +} + +function thresholdSturges(values) { + return Math.ceil(Math.log(values.length) / Math.LN2) + 1; +} + +function histogram() { + var value = identity, + domain = extent, + threshold = thresholdSturges; + + function histogram(data) { + var i, + n = data.length, + x, + values = new Array(n); + + for (i = 0; i < n; ++i) { + values[i] = value(data[i], i, data); + } + + var xz = domain(values), + x0 = xz[0], + x1 = xz[1], + tz = threshold(values, x0, x1); + + // Convert number of thresholds into uniform thresholds. + if (!Array.isArray(tz)) { + tz = tickStep(x0, x1, tz); + tz = sequence(Math.ceil(x0 / tz) * tz, x1, tz); // exclusive + } + + // Remove any thresholds outside the domain. + var m = tz.length; + while (tz[0] <= x0) tz.shift(), --m; + while (tz[m - 1] > x1) tz.pop(), --m; + + var bins = new Array(m + 1), + bin; + + // Initialize bins. + for (i = 0; i <= m; ++i) { + bin = bins[i] = []; + bin.x0 = i > 0 ? tz[i - 1] : x0; + bin.x1 = i < m ? tz[i] : x1; + } + + // Assign data to bins by value, ignoring any outside the domain. + for (i = 0; i < n; ++i) { + x = values[i]; + if (x0 <= x && x <= x1) { + bins[bisectRight(tz, x, 0, m)].push(data[i]); + } + } + + return bins; + } + + histogram.value = function(_) { + return arguments.length ? (value = typeof _ === "function" ? _ : constant(_), histogram) : value; + }; + + histogram.domain = function(_) { + return arguments.length ? (domain = typeof _ === "function" ? _ : constant([_[0], _[1]]), histogram) : domain; + }; + + histogram.thresholds = function(_) { + return arguments.length ? (threshold = typeof _ === "function" ? _ : Array.isArray(_) ? constant(slice.call(_)) : constant(_), histogram) : threshold; + }; + + return histogram; +} + +function threshold(values, p, valueof) { + if (valueof == null) valueof = number; + if (!(n = values.length)) return; + if ((p = +p) <= 0 || n < 2) return +valueof(values[0], 0, values); + if (p >= 1) return +valueof(values[n - 1], n - 1, values); + var n, + i = (n - 1) * p, + i0 = Math.floor(i), + value0 = +valueof(values[i0], i0, values), + value1 = +valueof(values[i0 + 1], i0 + 1, values); + return value0 + (value1 - value0) * (i - i0); +} + +function freedmanDiaconis(values, min, max) { + values = map.call(values, number).sort(ascending); + return Math.ceil((max - min) / (2 * (threshold(values, 0.75) - threshold(values, 0.25)) * Math.pow(values.length, -1 / 3))); +} + +function scott(values, min, max) { + return Math.ceil((max - min) / (3.5 * deviation(values) * Math.pow(values.length, -1 / 3))); +} + +function max(values, valueof) { + var n = values.length, + i = -1, + value, + max; + + if (valueof == null) { + while (++i < n) { // Find the first comparable value. + if ((value = values[i]) != null && value >= value) { + max = value; + while (++i < n) { // Compare the remaining values. + if ((value = values[i]) != null && value > max) { + max = value; + } + } + } + } + } + + else { + while (++i < n) { // Find the first comparable value. + if ((value = valueof(values[i], i, values)) != null && value >= value) { + max = value; + while (++i < n) { // Compare the remaining values. + if ((value = valueof(values[i], i, values)) != null && value > max) { + max = value; + } + } + } + } + } + + return max; +} + +function mean(values, valueof) { + var n = values.length, + m = n, + i = -1, + value, + sum = 0; + + if (valueof == null) { + while (++i < n) { + if (!isNaN(value = number(values[i]))) sum += value; + else --m; + } + } + + else { + while (++i < n) { + if (!isNaN(value = number(valueof(values[i], i, values)))) sum += value; + else --m; + } + } + + if (m) return sum / m; +} + +function median(values, valueof) { + var n = values.length, + i = -1, + value, + numbers = []; + + if (valueof == null) { + while (++i < n) { + if (!isNaN(value = number(values[i]))) { + numbers.push(value); + } + } + } + + else { + while (++i < n) { + if (!isNaN(value = number(valueof(values[i], i, values)))) { + numbers.push(value); + } + } + } + + return threshold(numbers.sort(ascending), 0.5); +} + +function merge(arrays) { + var n = arrays.length, + m, + i = -1, + j = 0, + merged, + array; + + while (++i < n) j += arrays[i].length; + merged = new Array(j); + + while (--n >= 0) { + array = arrays[n]; + m = array.length; + while (--m >= 0) { + merged[--j] = array[m]; + } + } + + return merged; +} + +function min(values, valueof) { + var n = values.length, + i = -1, + value, + min; + + if (valueof == null) { + while (++i < n) { // Find the first comparable value. + if ((value = values[i]) != null && value >= value) { + min = value; + while (++i < n) { // Compare the remaining values. + if ((value = values[i]) != null && min > value) { + min = value; + } + } + } + } + } + + else { + while (++i < n) { // Find the first comparable value. + if ((value = valueof(values[i], i, values)) != null && value >= value) { + min = value; + while (++i < n) { // Compare the remaining values. + if ((value = valueof(values[i], i, values)) != null && min > value) { + min = value; + } + } + } + } + } + + return min; +} + +function permute(array, indexes) { + var i = indexes.length, permutes = new Array(i); + while (i--) permutes[i] = array[indexes[i]]; + return permutes; +} + +function scan(values, compare) { + if (!(n = values.length)) return; + var n, + i = 0, + j = 0, + xi, + xj = values[j]; + + if (compare == null) compare = ascending; + + while (++i < n) { + if (compare(xi = values[i], xj) < 0 || compare(xj, xj) !== 0) { + xj = xi, j = i; + } + } + + if (compare(xj, xj) === 0) return j; +} + +function shuffle(array, i0, i1) { + var m = (i1 == null ? array.length : i1) - (i0 = i0 == null ? 0 : +i0), + t, + i; + + while (m) { + i = Math.random() * m-- | 0; + t = array[m + i0]; + array[m + i0] = array[i + i0]; + array[i + i0] = t; + } + + return array; +} + +function sum(values, valueof) { + var n = values.length, + i = -1, + value, + sum = 0; + + if (valueof == null) { + while (++i < n) { + if (value = +values[i]) sum += value; // Note: zero and null are equivalent. + } + } + + else { + while (++i < n) { + if (value = +valueof(values[i], i, values)) sum += value; + } + } + + return sum; +} + +function transpose(matrix) { + if (!(n = matrix.length)) return []; + for (var i = -1, m = min(matrix, length), transpose = new Array(m); ++i < m;) { + for (var j = -1, n, row = transpose[i] = new Array(n); ++j < n;) { + row[j] = matrix[j][i]; + } + } + return transpose; +} + +function length(d) { + return d.length; +} + +function zip() { + return transpose(arguments); +} + +var slice$1 = Array.prototype.slice; + +function identity$1(x) { + return x; +} + +var top = 1, + right = 2, + bottom = 3, + left = 4, + epsilon = 1e-6; + +function translateX(x) { + return "translate(" + (x + 0.5) + ",0)"; +} + +function translateY(y) { + return "translate(0," + (y + 0.5) + ")"; +} + +function number$1(scale) { + return function(d) { + return +scale(d); + }; +} + +function center(scale) { + var offset = Math.max(0, scale.bandwidth() - 1) / 2; // Adjust for 0.5px offset. + if (scale.round()) offset = Math.round(offset); + return function(d) { + return +scale(d) + offset; + }; +} + +function entering() { + return !this.__axis; +} + +function axis(orient, scale) { + var tickArguments = [], + tickValues = null, + tickFormat = null, + tickSizeInner = 6, + tickSizeOuter = 6, + tickPadding = 3, + k = orient === top || orient === left ? -1 : 1, + x = orient === left || orient === right ? "x" : "y", + transform = orient === top || orient === bottom ? translateX : translateY; + + function axis(context) { + var values = tickValues == null ? (scale.ticks ? scale.ticks.apply(scale, tickArguments) : scale.domain()) : tickValues, + format = tickFormat == null ? (scale.tickFormat ? scale.tickFormat.apply(scale, tickArguments) : identity$1) : tickFormat, + spacing = Math.max(tickSizeInner, 0) + tickPadding, + range = scale.range(), + range0 = +range[0] + 0.5, + range1 = +range[range.length - 1] + 0.5, + position = (scale.bandwidth ? center : number$1)(scale.copy()), + selection = context.selection ? context.selection() : context, + path = selection.selectAll(".domain").data([null]), + tick = selection.selectAll(".tick").data(values, scale).order(), + tickExit = tick.exit(), + tickEnter = tick.enter().append("g").attr("class", "tick"), + line = tick.select("line"), + text = tick.select("text"); + + path = path.merge(path.enter().insert("path", ".tick") + .attr("class", "domain") + .attr("stroke", "currentColor")); + + tick = tick.merge(tickEnter); + + line = line.merge(tickEnter.append("line") + .attr("stroke", "currentColor") + .attr(x + "2", k * tickSizeInner)); + + text = text.merge(tickEnter.append("text") + .attr("fill", "currentColor") + .attr(x, k * spacing) + .attr("dy", orient === top ? "0em" : orient === bottom ? "0.71em" : "0.32em")); + + if (context !== selection) { + path = path.transition(context); + tick = tick.transition(context); + line = line.transition(context); + text = text.transition(context); + + tickExit = tickExit.transition(context) + .attr("opacity", epsilon) + .attr("transform", function(d) { return isFinite(d = position(d)) ? transform(d) : this.getAttribute("transform"); }); + + tickEnter + .attr("opacity", epsilon) + .attr("transform", function(d) { var p = this.parentNode.__axis; return transform(p && isFinite(p = p(d)) ? p : position(d)); }); + } + + tickExit.remove(); + + path + .attr("d", orient === left || orient == right + ? (tickSizeOuter ? "M" + k * tickSizeOuter + "," + range0 + "H0.5V" + range1 + "H" + k * tickSizeOuter : "M0.5," + range0 + "V" + range1) + : (tickSizeOuter ? "M" + range0 + "," + k * tickSizeOuter + "V0.5H" + range1 + "V" + k * tickSizeOuter : "M" + range0 + ",0.5H" + range1)); + + tick + .attr("opacity", 1) + .attr("transform", function(d) { return transform(position(d)); }); + + line + .attr(x + "2", k * tickSizeInner); + + text + .attr(x, k * spacing) + .text(format); + + selection.filter(entering) + .attr("fill", "none") + .attr("font-size", 10) + .attr("font-family", "sans-serif") + .attr("text-anchor", orient === right ? "start" : orient === left ? "end" : "middle"); + + selection + .each(function() { this.__axis = position; }); + } + + axis.scale = function(_) { + return arguments.length ? (scale = _, axis) : scale; + }; + + axis.ticks = function() { + return tickArguments = slice$1.call(arguments), axis; + }; + + axis.tickArguments = function(_) { + return arguments.length ? (tickArguments = _ == null ? [] : slice$1.call(_), axis) : tickArguments.slice(); + }; + + axis.tickValues = function(_) { + return arguments.length ? (tickValues = _ == null ? null : slice$1.call(_), axis) : tickValues && tickValues.slice(); + }; + + axis.tickFormat = function(_) { + return arguments.length ? (tickFormat = _, axis) : tickFormat; + }; + + axis.tickSize = function(_) { + return arguments.length ? (tickSizeInner = tickSizeOuter = +_, axis) : tickSizeInner; + }; + + axis.tickSizeInner = function(_) { + return arguments.length ? (tickSizeInner = +_, axis) : tickSizeInner; + }; + + axis.tickSizeOuter = function(_) { + return arguments.length ? (tickSizeOuter = +_, axis) : tickSizeOuter; + }; + + axis.tickPadding = function(_) { + return arguments.length ? (tickPadding = +_, axis) : tickPadding; + }; + + return axis; +} + +function axisTop(scale) { + return axis(top, scale); +} + +function axisRight(scale) { + return axis(right, scale); +} + +function axisBottom(scale) { + return axis(bottom, scale); +} + +function axisLeft(scale) { + return axis(left, scale); +} + +var noop = {value: function() {}}; + +function dispatch() { + for (var i = 0, n = arguments.length, _ = {}, t; i < n; ++i) { + if (!(t = arguments[i] + "") || (t in _) || /[\s.]/.test(t)) throw new Error("illegal type: " + t); + _[t] = []; + } + return new Dispatch(_); +} + +function Dispatch(_) { + this._ = _; +} + +function parseTypenames(typenames, types) { + return typenames.trim().split(/^|\s+/).map(function(t) { + var name = "", i = t.indexOf("."); + if (i >= 0) name = t.slice(i + 1), t = t.slice(0, i); + if (t && !types.hasOwnProperty(t)) throw new Error("unknown type: " + t); + return {type: t, name: name}; + }); +} + +Dispatch.prototype = dispatch.prototype = { + constructor: Dispatch, + on: function(typename, callback) { + var _ = this._, + T = parseTypenames(typename + "", _), + t, + i = -1, + n = T.length; + + // If no callback was specified, return the callback of the given type and name. + if (arguments.length < 2) { + while (++i < n) if ((t = (typename = T[i]).type) && (t = get(_[t], typename.name))) return t; + return; + } + + // If a type was specified, set the callback for the given type and name. + // Otherwise, if a null callback was specified, remove callbacks of the given name. + if (callback != null && typeof callback !== "function") throw new Error("invalid callback: " + callback); + while (++i < n) { + if (t = (typename = T[i]).type) _[t] = set(_[t], typename.name, callback); + else if (callback == null) for (t in _) _[t] = set(_[t], typename.name, null); + } + + return this; + }, + copy: function() { + var copy = {}, _ = this._; + for (var t in _) copy[t] = _[t].slice(); + return new Dispatch(copy); + }, + call: function(type, that) { + if ((n = arguments.length - 2) > 0) for (var args = new Array(n), i = 0, n, t; i < n; ++i) args[i] = arguments[i + 2]; + if (!this._.hasOwnProperty(type)) throw new Error("unknown type: " + type); + for (t = this._[type], i = 0, n = t.length; i < n; ++i) t[i].value.apply(that, args); + }, + apply: function(type, that, args) { + if (!this._.hasOwnProperty(type)) throw new Error("unknown type: " + type); + for (var t = this._[type], i = 0, n = t.length; i < n; ++i) t[i].value.apply(that, args); + } +}; + +function get(type, name) { + for (var i = 0, n = type.length, c; i < n; ++i) { + if ((c = type[i]).name === name) { + return c.value; + } + } +} + +function set(type, name, callback) { + for (var i = 0, n = type.length; i < n; ++i) { + if (type[i].name === name) { + type[i] = noop, type = type.slice(0, i).concat(type.slice(i + 1)); + break; + } + } + if (callback != null) type.push({name: name, value: callback}); + return type; +} + +var xhtml = "http://www.w3.org/1999/xhtml"; + +var namespaces = { + svg: "http://www.w3.org/2000/svg", + xhtml: xhtml, + xlink: "http://www.w3.org/1999/xlink", + xml: "http://www.w3.org/XML/1998/namespace", + xmlns: "http://www.w3.org/2000/xmlns/" +}; + +function namespace(name) { + var prefix = name += "", i = prefix.indexOf(":"); + if (i >= 0 && (prefix = name.slice(0, i)) !== "xmlns") name = name.slice(i + 1); + return namespaces.hasOwnProperty(prefix) ? {space: namespaces[prefix], local: name} : name; +} + +function creatorInherit(name) { + return function() { + var document = this.ownerDocument, + uri = this.namespaceURI; + return uri === xhtml && document.documentElement.namespaceURI === xhtml + ? document.createElement(name) + : document.createElementNS(uri, name); + }; +} + +function creatorFixed(fullname) { + return function() { + return this.ownerDocument.createElementNS(fullname.space, fullname.local); + }; +} + +function creator(name) { + var fullname = namespace(name); + return (fullname.local + ? creatorFixed + : creatorInherit)(fullname); +} + +function none() {} + +function selector(selector) { + return selector == null ? none : function() { + return this.querySelector(selector); + }; +} + +function selection_select(select) { + if (typeof select !== "function") select = selector(select); + + for (var groups = this._groups, m = groups.length, subgroups = new Array(m), j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, subgroup = subgroups[j] = new Array(n), node, subnode, i = 0; i < n; ++i) { + if ((node = group[i]) && (subnode = select.call(node, node.__data__, i, group))) { + if ("__data__" in node) subnode.__data__ = node.__data__; + subgroup[i] = subnode; + } + } + } + + return new Selection(subgroups, this._parents); +} + +function empty() { + return []; +} + +function selectorAll(selector) { + return selector == null ? empty : function() { + return this.querySelectorAll(selector); + }; +} + +function selection_selectAll(select) { + if (typeof select !== "function") select = selectorAll(select); + + for (var groups = this._groups, m = groups.length, subgroups = [], parents = [], j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, node, i = 0; i < n; ++i) { + if (node = group[i]) { + subgroups.push(select.call(node, node.__data__, i, group)); + parents.push(node); + } + } + } + + return new Selection(subgroups, parents); +} + +function matcher(selector) { + return function() { + return this.matches(selector); + }; +} + +function selection_filter(match) { + if (typeof match !== "function") match = matcher(match); + + for (var groups = this._groups, m = groups.length, subgroups = new Array(m), j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, subgroup = subgroups[j] = [], node, i = 0; i < n; ++i) { + if ((node = group[i]) && match.call(node, node.__data__, i, group)) { + subgroup.push(node); + } + } + } + + return new Selection(subgroups, this._parents); +} + +function sparse(update) { + return new Array(update.length); +} + +function selection_enter() { + return new Selection(this._enter || this._groups.map(sparse), this._parents); +} + +function EnterNode(parent, datum) { + this.ownerDocument = parent.ownerDocument; + this.namespaceURI = parent.namespaceURI; + this._next = null; + this._parent = parent; + this.__data__ = datum; +} + +EnterNode.prototype = { + constructor: EnterNode, + appendChild: function(child) { return this._parent.insertBefore(child, this._next); }, + insertBefore: function(child, next) { return this._parent.insertBefore(child, next); }, + querySelector: function(selector) { return this._parent.querySelector(selector); }, + querySelectorAll: function(selector) { return this._parent.querySelectorAll(selector); } +}; + +function constant$1(x) { + return function() { + return x; + }; +} + +var keyPrefix = "$"; // Protect against keys like “__proto__”. + +function bindIndex(parent, group, enter, update, exit, data) { + var i = 0, + node, + groupLength = group.length, + dataLength = data.length; + + // Put any non-null nodes that fit into update. + // Put any null nodes into enter. + // Put any remaining data into enter. + for (; i < dataLength; ++i) { + if (node = group[i]) { + node.__data__ = data[i]; + update[i] = node; + } else { + enter[i] = new EnterNode(parent, data[i]); + } + } + + // Put any non-null nodes that don’t fit into exit. + for (; i < groupLength; ++i) { + if (node = group[i]) { + exit[i] = node; + } + } +} + +function bindKey(parent, group, enter, update, exit, data, key) { + var i, + node, + nodeByKeyValue = {}, + groupLength = group.length, + dataLength = data.length, + keyValues = new Array(groupLength), + keyValue; + + // Compute the key for each node. + // If multiple nodes have the same key, the duplicates are added to exit. + for (i = 0; i < groupLength; ++i) { + if (node = group[i]) { + keyValues[i] = keyValue = keyPrefix + key.call(node, node.__data__, i, group); + if (keyValue in nodeByKeyValue) { + exit[i] = node; + } else { + nodeByKeyValue[keyValue] = node; + } + } + } + + // Compute the key for each datum. + // If there a node associated with this key, join and add it to update. + // If there is not (or the key is a duplicate), add it to enter. + for (i = 0; i < dataLength; ++i) { + keyValue = keyPrefix + key.call(parent, data[i], i, data); + if (node = nodeByKeyValue[keyValue]) { + update[i] = node; + node.__data__ = data[i]; + nodeByKeyValue[keyValue] = null; + } else { + enter[i] = new EnterNode(parent, data[i]); + } + } + + // Add any remaining nodes that were not bound to data to exit. + for (i = 0; i < groupLength; ++i) { + if ((node = group[i]) && (nodeByKeyValue[keyValues[i]] === node)) { + exit[i] = node; + } + } +} + +function selection_data(value, key) { + if (!value) { + data = new Array(this.size()), j = -1; + this.each(function(d) { data[++j] = d; }); + return data; + } + + var bind = key ? bindKey : bindIndex, + parents = this._parents, + groups = this._groups; + + if (typeof value !== "function") value = constant$1(value); + + for (var m = groups.length, update = new Array(m), enter = new Array(m), exit = new Array(m), j = 0; j < m; ++j) { + var parent = parents[j], + group = groups[j], + groupLength = group.length, + data = value.call(parent, parent && parent.__data__, j, parents), + dataLength = data.length, + enterGroup = enter[j] = new Array(dataLength), + updateGroup = update[j] = new Array(dataLength), + exitGroup = exit[j] = new Array(groupLength); + + bind(parent, group, enterGroup, updateGroup, exitGroup, data, key); + + // Now connect the enter nodes to their following update node, such that + // appendChild can insert the materialized enter node before this node, + // rather than at the end of the parent node. + for (var i0 = 0, i1 = 0, previous, next; i0 < dataLength; ++i0) { + if (previous = enterGroup[i0]) { + if (i0 >= i1) i1 = i0 + 1; + while (!(next = updateGroup[i1]) && ++i1 < dataLength); + previous._next = next || null; + } + } + } + + update = new Selection(update, parents); + update._enter = enter; + update._exit = exit; + return update; +} + +function selection_exit() { + return new Selection(this._exit || this._groups.map(sparse), this._parents); +} + +function selection_join(onenter, onupdate, onexit) { + var enter = this.enter(), update = this, exit = this.exit(); + enter = typeof onenter === "function" ? onenter(enter) : enter.append(onenter + ""); + if (onupdate != null) update = onupdate(update); + if (onexit == null) exit.remove(); else onexit(exit); + return enter && update ? enter.merge(update).order() : update; +} + +function selection_merge(selection) { + + for (var groups0 = this._groups, groups1 = selection._groups, m0 = groups0.length, m1 = groups1.length, m = Math.min(m0, m1), merges = new Array(m0), j = 0; j < m; ++j) { + for (var group0 = groups0[j], group1 = groups1[j], n = group0.length, merge = merges[j] = new Array(n), node, i = 0; i < n; ++i) { + if (node = group0[i] || group1[i]) { + merge[i] = node; + } + } + } + + for (; j < m0; ++j) { + merges[j] = groups0[j]; + } + + return new Selection(merges, this._parents); +} + +function selection_order() { + + for (var groups = this._groups, j = -1, m = groups.length; ++j < m;) { + for (var group = groups[j], i = group.length - 1, next = group[i], node; --i >= 0;) { + if (node = group[i]) { + if (next && node.compareDocumentPosition(next) ^ 4) next.parentNode.insertBefore(node, next); + next = node; + } + } + } + + return this; +} + +function selection_sort(compare) { + if (!compare) compare = ascending$1; + + function compareNode(a, b) { + return a && b ? compare(a.__data__, b.__data__) : !a - !b; + } + + for (var groups = this._groups, m = groups.length, sortgroups = new Array(m), j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, sortgroup = sortgroups[j] = new Array(n), node, i = 0; i < n; ++i) { + if (node = group[i]) { + sortgroup[i] = node; + } + } + sortgroup.sort(compareNode); + } + + return new Selection(sortgroups, this._parents).order(); +} + +function ascending$1(a, b) { + return a < b ? -1 : a > b ? 1 : a >= b ? 0 : NaN; +} + +function selection_call() { + var callback = arguments[0]; + arguments[0] = this; + callback.apply(null, arguments); + return this; +} + +function selection_nodes() { + var nodes = new Array(this.size()), i = -1; + this.each(function() { nodes[++i] = this; }); + return nodes; +} + +function selection_node() { + + for (var groups = this._groups, j = 0, m = groups.length; j < m; ++j) { + for (var group = groups[j], i = 0, n = group.length; i < n; ++i) { + var node = group[i]; + if (node) return node; + } + } + + return null; +} + +function selection_size() { + var size = 0; + this.each(function() { ++size; }); + return size; +} + +function selection_empty() { + return !this.node(); +} + +function selection_each(callback) { + + for (var groups = this._groups, j = 0, m = groups.length; j < m; ++j) { + for (var group = groups[j], i = 0, n = group.length, node; i < n; ++i) { + if (node = group[i]) callback.call(node, node.__data__, i, group); + } + } + + return this; +} + +function attrRemove(name) { + return function() { + this.removeAttribute(name); + }; +} + +function attrRemoveNS(fullname) { + return function() { + this.removeAttributeNS(fullname.space, fullname.local); + }; +} + +function attrConstant(name, value) { + return function() { + this.setAttribute(name, value); + }; +} + +function attrConstantNS(fullname, value) { + return function() { + this.setAttributeNS(fullname.space, fullname.local, value); + }; +} + +function attrFunction(name, value) { + return function() { + var v = value.apply(this, arguments); + if (v == null) this.removeAttribute(name); + else this.setAttribute(name, v); + }; +} + +function attrFunctionNS(fullname, value) { + return function() { + var v = value.apply(this, arguments); + if (v == null) this.removeAttributeNS(fullname.space, fullname.local); + else this.setAttributeNS(fullname.space, fullname.local, v); + }; +} + +function selection_attr(name, value) { + var fullname = namespace(name); + + if (arguments.length < 2) { + var node = this.node(); + return fullname.local + ? node.getAttributeNS(fullname.space, fullname.local) + : node.getAttribute(fullname); + } + + return this.each((value == null + ? (fullname.local ? attrRemoveNS : attrRemove) : (typeof value === "function" + ? (fullname.local ? attrFunctionNS : attrFunction) + : (fullname.local ? attrConstantNS : attrConstant)))(fullname, value)); +} + +function defaultView(node) { + return (node.ownerDocument && node.ownerDocument.defaultView) // node is a Node + || (node.document && node) // node is a Window + || node.defaultView; // node is a Document +} + +function styleRemove(name) { + return function() { + this.style.removeProperty(name); + }; +} + +function styleConstant(name, value, priority) { + return function() { + this.style.setProperty(name, value, priority); + }; +} + +function styleFunction(name, value, priority) { + return function() { + var v = value.apply(this, arguments); + if (v == null) this.style.removeProperty(name); + else this.style.setProperty(name, v, priority); + }; +} + +function selection_style(name, value, priority) { + return arguments.length > 1 + ? this.each((value == null + ? styleRemove : typeof value === "function" + ? styleFunction + : styleConstant)(name, value, priority == null ? "" : priority)) + : styleValue(this.node(), name); +} + +function styleValue(node, name) { + return node.style.getPropertyValue(name) + || defaultView(node).getComputedStyle(node, null).getPropertyValue(name); +} + +function propertyRemove(name) { + return function() { + delete this[name]; + }; +} + +function propertyConstant(name, value) { + return function() { + this[name] = value; + }; +} + +function propertyFunction(name, value) { + return function() { + var v = value.apply(this, arguments); + if (v == null) delete this[name]; + else this[name] = v; + }; +} + +function selection_property(name, value) { + return arguments.length > 1 + ? this.each((value == null + ? propertyRemove : typeof value === "function" + ? propertyFunction + : propertyConstant)(name, value)) + : this.node()[name]; +} + +function classArray(string) { + return string.trim().split(/^|\s+/); +} + +function classList(node) { + return node.classList || new ClassList(node); +} + +function ClassList(node) { + this._node = node; + this._names = classArray(node.getAttribute("class") || ""); +} + +ClassList.prototype = { + add: function(name) { + var i = this._names.indexOf(name); + if (i < 0) { + this._names.push(name); + this._node.setAttribute("class", this._names.join(" ")); + } + }, + remove: function(name) { + var i = this._names.indexOf(name); + if (i >= 0) { + this._names.splice(i, 1); + this._node.setAttribute("class", this._names.join(" ")); + } + }, + contains: function(name) { + return this._names.indexOf(name) >= 0; + } +}; + +function classedAdd(node, names) { + var list = classList(node), i = -1, n = names.length; + while (++i < n) list.add(names[i]); +} + +function classedRemove(node, names) { + var list = classList(node), i = -1, n = names.length; + while (++i < n) list.remove(names[i]); +} + +function classedTrue(names) { + return function() { + classedAdd(this, names); + }; +} + +function classedFalse(names) { + return function() { + classedRemove(this, names); + }; +} + +function classedFunction(names, value) { + return function() { + (value.apply(this, arguments) ? classedAdd : classedRemove)(this, names); + }; +} + +function selection_classed(name, value) { + var names = classArray(name + ""); + + if (arguments.length < 2) { + var list = classList(this.node()), i = -1, n = names.length; + while (++i < n) if (!list.contains(names[i])) return false; + return true; + } + + return this.each((typeof value === "function" + ? classedFunction : value + ? classedTrue + : classedFalse)(names, value)); +} + +function textRemove() { + this.textContent = ""; +} + +function textConstant(value) { + return function() { + this.textContent = value; + }; +} + +function textFunction(value) { + return function() { + var v = value.apply(this, arguments); + this.textContent = v == null ? "" : v; + }; +} + +function selection_text(value) { + return arguments.length + ? this.each(value == null + ? textRemove : (typeof value === "function" + ? textFunction + : textConstant)(value)) + : this.node().textContent; +} + +function htmlRemove() { + this.innerHTML = ""; +} + +function htmlConstant(value) { + return function() { + this.innerHTML = value; + }; +} + +function htmlFunction(value) { + return function() { + var v = value.apply(this, arguments); + this.innerHTML = v == null ? "" : v; + }; +} + +function selection_html(value) { + return arguments.length + ? this.each(value == null + ? htmlRemove : (typeof value === "function" + ? htmlFunction + : htmlConstant)(value)) + : this.node().innerHTML; +} + +function raise() { + if (this.nextSibling) this.parentNode.appendChild(this); +} + +function selection_raise() { + return this.each(raise); +} + +function lower() { + if (this.previousSibling) this.parentNode.insertBefore(this, this.parentNode.firstChild); +} + +function selection_lower() { + return this.each(lower); +} + +function selection_append(name) { + var create = typeof name === "function" ? name : creator(name); + return this.select(function() { + return this.appendChild(create.apply(this, arguments)); + }); +} + +function constantNull() { + return null; +} + +function selection_insert(name, before) { + var create = typeof name === "function" ? name : creator(name), + select = before == null ? constantNull : typeof before === "function" ? before : selector(before); + return this.select(function() { + return this.insertBefore(create.apply(this, arguments), select.apply(this, arguments) || null); + }); +} + +function remove() { + var parent = this.parentNode; + if (parent) parent.removeChild(this); +} + +function selection_remove() { + return this.each(remove); +} + +function selection_cloneShallow() { + var clone = this.cloneNode(false), parent = this.parentNode; + return parent ? parent.insertBefore(clone, this.nextSibling) : clone; +} + +function selection_cloneDeep() { + var clone = this.cloneNode(true), parent = this.parentNode; + return parent ? parent.insertBefore(clone, this.nextSibling) : clone; +} + +function selection_clone(deep) { + return this.select(deep ? selection_cloneDeep : selection_cloneShallow); +} + +function selection_datum(value) { + return arguments.length + ? this.property("__data__", value) + : this.node().__data__; +} + +var filterEvents = {}; + +exports.event = null; + +if (typeof document !== "undefined") { + var element = document.documentElement; + if (!("onmouseenter" in element)) { + filterEvents = {mouseenter: "mouseover", mouseleave: "mouseout"}; + } +} + +function filterContextListener(listener, index, group) { + listener = contextListener(listener, index, group); + return function(event) { + var related = event.relatedTarget; + if (!related || (related !== this && !(related.compareDocumentPosition(this) & 8))) { + listener.call(this, event); + } + }; +} + +function contextListener(listener, index, group) { + return function(event1) { + var event0 = exports.event; // Events can be reentrant (e.g., focus). + exports.event = event1; + try { + listener.call(this, this.__data__, index, group); + } finally { + exports.event = event0; + } + }; +} + +function parseTypenames$1(typenames) { + return typenames.trim().split(/^|\s+/).map(function(t) { + var name = "", i = t.indexOf("."); + if (i >= 0) name = t.slice(i + 1), t = t.slice(0, i); + return {type: t, name: name}; + }); +} + +function onRemove(typename) { + return function() { + var on = this.__on; + if (!on) return; + for (var j = 0, i = -1, m = on.length, o; j < m; ++j) { + if (o = on[j], (!typename.type || o.type === typename.type) && o.name === typename.name) { + this.removeEventListener(o.type, o.listener, o.capture); + } else { + on[++i] = o; + } + } + if (++i) on.length = i; + else delete this.__on; + }; +} + +function onAdd(typename, value, capture) { + var wrap = filterEvents.hasOwnProperty(typename.type) ? filterContextListener : contextListener; + return function(d, i, group) { + var on = this.__on, o, listener = wrap(value, i, group); + if (on) for (var j = 0, m = on.length; j < m; ++j) { + if ((o = on[j]).type === typename.type && o.name === typename.name) { + this.removeEventListener(o.type, o.listener, o.capture); + this.addEventListener(o.type, o.listener = listener, o.capture = capture); + o.value = value; + return; + } + } + this.addEventListener(typename.type, listener, capture); + o = {type: typename.type, name: typename.name, value: value, listener: listener, capture: capture}; + if (!on) this.__on = [o]; + else on.push(o); + }; +} + +function selection_on(typename, value, capture) { + var typenames = parseTypenames$1(typename + ""), i, n = typenames.length, t; + + if (arguments.length < 2) { + var on = this.node().__on; + if (on) for (var j = 0, m = on.length, o; j < m; ++j) { + for (i = 0, o = on[j]; i < n; ++i) { + if ((t = typenames[i]).type === o.type && t.name === o.name) { + return o.value; + } + } + } + return; + } + + on = value ? onAdd : onRemove; + if (capture == null) capture = false; + for (i = 0; i < n; ++i) this.each(on(typenames[i], value, capture)); + return this; +} + +function customEvent(event1, listener, that, args) { + var event0 = exports.event; + event1.sourceEvent = exports.event; + exports.event = event1; + try { + return listener.apply(that, args); + } finally { + exports.event = event0; + } +} + +function dispatchEvent(node, type, params) { + var window = defaultView(node), + event = window.CustomEvent; + + if (typeof event === "function") { + event = new event(type, params); + } else { + event = window.document.createEvent("Event"); + if (params) event.initEvent(type, params.bubbles, params.cancelable), event.detail = params.detail; + else event.initEvent(type, false, false); + } + + node.dispatchEvent(event); +} + +function dispatchConstant(type, params) { + return function() { + return dispatchEvent(this, type, params); + }; +} + +function dispatchFunction(type, params) { + return function() { + return dispatchEvent(this, type, params.apply(this, arguments)); + }; +} + +function selection_dispatch(type, params) { + return this.each((typeof params === "function" + ? dispatchFunction + : dispatchConstant)(type, params)); +} + +var root = [null]; + +function Selection(groups, parents) { + this._groups = groups; + this._parents = parents; +} + +function selection() { + return new Selection([[document.documentElement]], root); +} + +Selection.prototype = selection.prototype = { + constructor: Selection, + select: selection_select, + selectAll: selection_selectAll, + filter: selection_filter, + data: selection_data, + enter: selection_enter, + exit: selection_exit, + join: selection_join, + merge: selection_merge, + order: selection_order, + sort: selection_sort, + call: selection_call, + nodes: selection_nodes, + node: selection_node, + size: selection_size, + empty: selection_empty, + each: selection_each, + attr: selection_attr, + style: selection_style, + property: selection_property, + classed: selection_classed, + text: selection_text, + html: selection_html, + raise: selection_raise, + lower: selection_lower, + append: selection_append, + insert: selection_insert, + remove: selection_remove, + clone: selection_clone, + datum: selection_datum, + on: selection_on, + dispatch: selection_dispatch +}; + +function select(selector) { + return typeof selector === "string" + ? new Selection([[document.querySelector(selector)]], [document.documentElement]) + : new Selection([[selector]], root); +} + +function create(name) { + return select(creator(name).call(document.documentElement)); +} + +var nextId = 0; + +function local() { + return new Local; +} + +function Local() { + this._ = "@" + (++nextId).toString(36); +} + +Local.prototype = local.prototype = { + constructor: Local, + get: function(node) { + var id = this._; + while (!(id in node)) if (!(node = node.parentNode)) return; + return node[id]; + }, + set: function(node, value) { + return node[this._] = value; + }, + remove: function(node) { + return this._ in node && delete node[this._]; + }, + toString: function() { + return this._; + } +}; + +function sourceEvent() { + var current = exports.event, source; + while (source = current.sourceEvent) current = source; + return current; +} + +function point(node, event) { + var svg = node.ownerSVGElement || node; + + if (svg.createSVGPoint) { + var point = svg.createSVGPoint(); + point.x = event.clientX, point.y = event.clientY; + point = point.matrixTransform(node.getScreenCTM().inverse()); + return [point.x, point.y]; + } + + var rect = node.getBoundingClientRect(); + return [event.clientX - rect.left - node.clientLeft, event.clientY - rect.top - node.clientTop]; +} + +function mouse(node) { + var event = sourceEvent(); + if (event.changedTouches) event = event.changedTouches[0]; + return point(node, event); +} + +function selectAll(selector) { + return typeof selector === "string" + ? new Selection([document.querySelectorAll(selector)], [document.documentElement]) + : new Selection([selector == null ? [] : selector], root); +} + +function touch(node, touches, identifier) { + if (arguments.length < 3) identifier = touches, touches = sourceEvent().changedTouches; + + for (var i = 0, n = touches ? touches.length : 0, touch; i < n; ++i) { + if ((touch = touches[i]).identifier === identifier) { + return point(node, touch); + } + } + + return null; +} + +function touches(node, touches) { + if (touches == null) touches = sourceEvent().touches; + + for (var i = 0, n = touches ? touches.length : 0, points = new Array(n); i < n; ++i) { + points[i] = point(node, touches[i]); + } + + return points; +} + +function nopropagation() { + exports.event.stopImmediatePropagation(); +} + +function noevent() { + exports.event.preventDefault(); + exports.event.stopImmediatePropagation(); +} + +function dragDisable(view) { + var root = view.document.documentElement, + selection = select(view).on("dragstart.drag", noevent, true); + if ("onselectstart" in root) { + selection.on("selectstart.drag", noevent, true); + } else { + root.__noselect = root.style.MozUserSelect; + root.style.MozUserSelect = "none"; + } +} + +function yesdrag(view, noclick) { + var root = view.document.documentElement, + selection = select(view).on("dragstart.drag", null); + if (noclick) { + selection.on("click.drag", noevent, true); + setTimeout(function() { selection.on("click.drag", null); }, 0); + } + if ("onselectstart" in root) { + selection.on("selectstart.drag", null); + } else { + root.style.MozUserSelect = root.__noselect; + delete root.__noselect; + } +} + +function constant$2(x) { + return function() { + return x; + }; +} + +function DragEvent(target, type, subject, id, active, x, y, dx, dy, dispatch) { + this.target = target; + this.type = type; + this.subject = subject; + this.identifier = id; + this.active = active; + this.x = x; + this.y = y; + this.dx = dx; + this.dy = dy; + this._ = dispatch; +} + +DragEvent.prototype.on = function() { + var value = this._.on.apply(this._, arguments); + return value === this._ ? this : value; +}; + +// Ignore right-click, since that should open the context menu. +function defaultFilter() { + return !exports.event.ctrlKey && !exports.event.button; +} + +function defaultContainer() { + return this.parentNode; +} + +function defaultSubject(d) { + return d == null ? {x: exports.event.x, y: exports.event.y} : d; +} + +function defaultTouchable() { + return navigator.maxTouchPoints || ("ontouchstart" in this); +} + +function drag() { + var filter = defaultFilter, + container = defaultContainer, + subject = defaultSubject, + touchable = defaultTouchable, + gestures = {}, + listeners = dispatch("start", "drag", "end"), + active = 0, + mousedownx, + mousedowny, + mousemoving, + touchending, + clickDistance2 = 0; + + function drag(selection) { + selection + .on("mousedown.drag", mousedowned) + .filter(touchable) + .on("touchstart.drag", touchstarted) + .on("touchmove.drag", touchmoved) + .on("touchend.drag touchcancel.drag", touchended) + .style("touch-action", "none") + .style("-webkit-tap-highlight-color", "rgba(0,0,0,0)"); + } + + function mousedowned() { + if (touchending || !filter.apply(this, arguments)) return; + var gesture = beforestart("mouse", container.apply(this, arguments), mouse, this, arguments); + if (!gesture) return; + select(exports.event.view).on("mousemove.drag", mousemoved, true).on("mouseup.drag", mouseupped, true); + dragDisable(exports.event.view); + nopropagation(); + mousemoving = false; + mousedownx = exports.event.clientX; + mousedowny = exports.event.clientY; + gesture("start"); + } + + function mousemoved() { + noevent(); + if (!mousemoving) { + var dx = exports.event.clientX - mousedownx, dy = exports.event.clientY - mousedowny; + mousemoving = dx * dx + dy * dy > clickDistance2; + } + gestures.mouse("drag"); + } + + function mouseupped() { + select(exports.event.view).on("mousemove.drag mouseup.drag", null); + yesdrag(exports.event.view, mousemoving); + noevent(); + gestures.mouse("end"); + } + + function touchstarted() { + if (!filter.apply(this, arguments)) return; + var touches = exports.event.changedTouches, + c = container.apply(this, arguments), + n = touches.length, i, gesture; + + for (i = 0; i < n; ++i) { + if (gesture = beforestart(touches[i].identifier, c, touch, this, arguments)) { + nopropagation(); + gesture("start"); + } + } + } + + function touchmoved() { + var touches = exports.event.changedTouches, + n = touches.length, i, gesture; + + for (i = 0; i < n; ++i) { + if (gesture = gestures[touches[i].identifier]) { + noevent(); + gesture("drag"); + } + } + } + + function touchended() { + var touches = exports.event.changedTouches, + n = touches.length, i, gesture; + + if (touchending) clearTimeout(touchending); + touchending = setTimeout(function() { touchending = null; }, 500); // Ghost clicks are delayed! + for (i = 0; i < n; ++i) { + if (gesture = gestures[touches[i].identifier]) { + nopropagation(); + gesture("end"); + } + } + } + + function beforestart(id, container, point, that, args) { + var p = point(container, id), s, dx, dy, + sublisteners = listeners.copy(); + + if (!customEvent(new DragEvent(drag, "beforestart", s, id, active, p[0], p[1], 0, 0, sublisteners), function() { + if ((exports.event.subject = s = subject.apply(that, args)) == null) return false; + dx = s.x - p[0] || 0; + dy = s.y - p[1] || 0; + return true; + })) return; + + return function gesture(type) { + var p0 = p, n; + switch (type) { + case "start": gestures[id] = gesture, n = active++; break; + case "end": delete gestures[id], --active; // nobreak + case "drag": p = point(container, id), n = active; break; + } + customEvent(new DragEvent(drag, type, s, id, n, p[0] + dx, p[1] + dy, p[0] - p0[0], p[1] - p0[1], sublisteners), sublisteners.apply, sublisteners, [type, that, args]); + }; + } + + drag.filter = function(_) { + return arguments.length ? (filter = typeof _ === "function" ? _ : constant$2(!!_), drag) : filter; + }; + + drag.container = function(_) { + return arguments.length ? (container = typeof _ === "function" ? _ : constant$2(_), drag) : container; + }; + + drag.subject = function(_) { + return arguments.length ? (subject = typeof _ === "function" ? _ : constant$2(_), drag) : subject; + }; + + drag.touchable = function(_) { + return arguments.length ? (touchable = typeof _ === "function" ? _ : constant$2(!!_), drag) : touchable; + }; + + drag.on = function() { + var value = listeners.on.apply(listeners, arguments); + return value === listeners ? drag : value; + }; + + drag.clickDistance = function(_) { + return arguments.length ? (clickDistance2 = (_ = +_) * _, drag) : Math.sqrt(clickDistance2); + }; + + return drag; +} + +function define(constructor, factory, prototype) { + constructor.prototype = factory.prototype = prototype; + prototype.constructor = constructor; +} + +function extend(parent, definition) { + var prototype = Object.create(parent.prototype); + for (var key in definition) prototype[key] = definition[key]; + return prototype; +} + +function Color() {} + +var darker = 0.7; +var brighter = 1 / darker; + +var reI = "\\s*([+-]?\\d+)\\s*", + reN = "\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)\\s*", + reP = "\\s*([+-]?\\d*\\.?\\d+(?:[eE][+-]?\\d+)?)%\\s*", + reHex = /^#([0-9a-f]{3,8})$/, + reRgbInteger = new RegExp("^rgb\\(" + [reI, reI, reI] + "\\)$"), + reRgbPercent = new RegExp("^rgb\\(" + [reP, reP, reP] + "\\)$"), + reRgbaInteger = new RegExp("^rgba\\(" + [reI, reI, reI, reN] + "\\)$"), + reRgbaPercent = new RegExp("^rgba\\(" + [reP, reP, reP, reN] + "\\)$"), + reHslPercent = new RegExp("^hsl\\(" + [reN, reP, reP] + "\\)$"), + reHslaPercent = new RegExp("^hsla\\(" + [reN, reP, reP, reN] + "\\)$"); + +var named = { + aliceblue: 0xf0f8ff, + antiquewhite: 0xfaebd7, + aqua: 0x00ffff, + aquamarine: 0x7fffd4, + azure: 0xf0ffff, + beige: 0xf5f5dc, + bisque: 0xffe4c4, + black: 0x000000, + blanchedalmond: 0xffebcd, + blue: 0x0000ff, + blueviolet: 0x8a2be2, + brown: 0xa52a2a, + burlywood: 0xdeb887, + cadetblue: 0x5f9ea0, + chartreuse: 0x7fff00, + chocolate: 0xd2691e, + coral: 0xff7f50, + cornflowerblue: 0x6495ed, + cornsilk: 0xfff8dc, + crimson: 0xdc143c, + cyan: 0x00ffff, + darkblue: 0x00008b, + darkcyan: 0x008b8b, + darkgoldenrod: 0xb8860b, + darkgray: 0xa9a9a9, + darkgreen: 0x006400, + darkgrey: 0xa9a9a9, + darkkhaki: 0xbdb76b, + darkmagenta: 0x8b008b, + darkolivegreen: 0x556b2f, + darkorange: 0xff8c00, + darkorchid: 0x9932cc, + darkred: 0x8b0000, + darksalmon: 0xe9967a, + darkseagreen: 0x8fbc8f, + darkslateblue: 0x483d8b, + darkslategray: 0x2f4f4f, + darkslategrey: 0x2f4f4f, + darkturquoise: 0x00ced1, + darkviolet: 0x9400d3, + deeppink: 0xff1493, + deepskyblue: 0x00bfff, + dimgray: 0x696969, + dimgrey: 0x696969, + dodgerblue: 0x1e90ff, + firebrick: 0xb22222, + floralwhite: 0xfffaf0, + forestgreen: 0x228b22, + fuchsia: 0xff00ff, + gainsboro: 0xdcdcdc, + ghostwhite: 0xf8f8ff, + gold: 0xffd700, + goldenrod: 0xdaa520, + gray: 0x808080, + green: 0x008000, + greenyellow: 0xadff2f, + grey: 0x808080, + honeydew: 0xf0fff0, + hotpink: 0xff69b4, + indianred: 0xcd5c5c, + indigo: 0x4b0082, + ivory: 0xfffff0, + khaki: 0xf0e68c, + lavender: 0xe6e6fa, + lavenderblush: 0xfff0f5, + lawngreen: 0x7cfc00, + lemonchiffon: 0xfffacd, + lightblue: 0xadd8e6, + lightcoral: 0xf08080, + lightcyan: 0xe0ffff, + lightgoldenrodyellow: 0xfafad2, + lightgray: 0xd3d3d3, + lightgreen: 0x90ee90, + lightgrey: 0xd3d3d3, + lightpink: 0xffb6c1, + lightsalmon: 0xffa07a, + lightseagreen: 0x20b2aa, + lightskyblue: 0x87cefa, + lightslategray: 0x778899, + lightslategrey: 0x778899, + lightsteelblue: 0xb0c4de, + lightyellow: 0xffffe0, + lime: 0x00ff00, + limegreen: 0x32cd32, + linen: 0xfaf0e6, + magenta: 0xff00ff, + maroon: 0x800000, + mediumaquamarine: 0x66cdaa, + mediumblue: 0x0000cd, + mediumorchid: 0xba55d3, + mediumpurple: 0x9370db, + mediumseagreen: 0x3cb371, + mediumslateblue: 0x7b68ee, + mediumspringgreen: 0x00fa9a, + mediumturquoise: 0x48d1cc, + mediumvioletred: 0xc71585, + midnightblue: 0x191970, + mintcream: 0xf5fffa, + mistyrose: 0xffe4e1, + moccasin: 0xffe4b5, + navajowhite: 0xffdead, + navy: 0x000080, + oldlace: 0xfdf5e6, + olive: 0x808000, + olivedrab: 0x6b8e23, + orange: 0xffa500, + orangered: 0xff4500, + orchid: 0xda70d6, + palegoldenrod: 0xeee8aa, + palegreen: 0x98fb98, + paleturquoise: 0xafeeee, + palevioletred: 0xdb7093, + papayawhip: 0xffefd5, + peachpuff: 0xffdab9, + peru: 0xcd853f, + pink: 0xffc0cb, + plum: 0xdda0dd, + powderblue: 0xb0e0e6, + purple: 0x800080, + rebeccapurple: 0x663399, + red: 0xff0000, + rosybrown: 0xbc8f8f, + royalblue: 0x4169e1, + saddlebrown: 0x8b4513, + salmon: 0xfa8072, + sandybrown: 0xf4a460, + seagreen: 0x2e8b57, + seashell: 0xfff5ee, + sienna: 0xa0522d, + silver: 0xc0c0c0, + skyblue: 0x87ceeb, + slateblue: 0x6a5acd, + slategray: 0x708090, + slategrey: 0x708090, + snow: 0xfffafa, + springgreen: 0x00ff7f, + steelblue: 0x4682b4, + tan: 0xd2b48c, + teal: 0x008080, + thistle: 0xd8bfd8, + tomato: 0xff6347, + turquoise: 0x40e0d0, + violet: 0xee82ee, + wheat: 0xf5deb3, + white: 0xffffff, + whitesmoke: 0xf5f5f5, + yellow: 0xffff00, + yellowgreen: 0x9acd32 +}; + +define(Color, color, { + copy: function(channels) { + return Object.assign(new this.constructor, this, channels); + }, + displayable: function() { + return this.rgb().displayable(); + }, + hex: color_formatHex, // Deprecated! Use color.formatHex. + formatHex: color_formatHex, + formatHsl: color_formatHsl, + formatRgb: color_formatRgb, + toString: color_formatRgb +}); + +function color_formatHex() { + return this.rgb().formatHex(); +} + +function color_formatHsl() { + return hslConvert(this).formatHsl(); +} + +function color_formatRgb() { + return this.rgb().formatRgb(); +} + +function color(format) { + var m, l; + format = (format + "").trim().toLowerCase(); + return (m = reHex.exec(format)) ? (l = m[1].length, m = parseInt(m[1], 16), l === 6 ? rgbn(m) // #ff0000 + : l === 3 ? new Rgb((m >> 8 & 0xf) | (m >> 4 & 0xf0), (m >> 4 & 0xf) | (m & 0xf0), ((m & 0xf) << 4) | (m & 0xf), 1) // #f00 + : l === 8 ? rgba(m >> 24 & 0xff, m >> 16 & 0xff, m >> 8 & 0xff, (m & 0xff) / 0xff) // #ff000000 + : l === 4 ? rgba((m >> 12 & 0xf) | (m >> 8 & 0xf0), (m >> 8 & 0xf) | (m >> 4 & 0xf0), (m >> 4 & 0xf) | (m & 0xf0), (((m & 0xf) << 4) | (m & 0xf)) / 0xff) // #f000 + : null) // invalid hex + : (m = reRgbInteger.exec(format)) ? new Rgb(m[1], m[2], m[3], 1) // rgb(255, 0, 0) + : (m = reRgbPercent.exec(format)) ? new Rgb(m[1] * 255 / 100, m[2] * 255 / 100, m[3] * 255 / 100, 1) // rgb(100%, 0%, 0%) + : (m = reRgbaInteger.exec(format)) ? rgba(m[1], m[2], m[3], m[4]) // rgba(255, 0, 0, 1) + : (m = reRgbaPercent.exec(format)) ? rgba(m[1] * 255 / 100, m[2] * 255 / 100, m[3] * 255 / 100, m[4]) // rgb(100%, 0%, 0%, 1) + : (m = reHslPercent.exec(format)) ? hsla(m[1], m[2] / 100, m[3] / 100, 1) // hsl(120, 50%, 50%) + : (m = reHslaPercent.exec(format)) ? hsla(m[1], m[2] / 100, m[3] / 100, m[4]) // hsla(120, 50%, 50%, 1) + : named.hasOwnProperty(format) ? rgbn(named[format]) // eslint-disable-line no-prototype-builtins + : format === "transparent" ? new Rgb(NaN, NaN, NaN, 0) + : null; +} + +function rgbn(n) { + return new Rgb(n >> 16 & 0xff, n >> 8 & 0xff, n & 0xff, 1); +} + +function rgba(r, g, b, a) { + if (a <= 0) r = g = b = NaN; + return new Rgb(r, g, b, a); +} + +function rgbConvert(o) { + if (!(o instanceof Color)) o = color(o); + if (!o) return new Rgb; + o = o.rgb(); + return new Rgb(o.r, o.g, o.b, o.opacity); +} + +function rgb(r, g, b, opacity) { + return arguments.length === 1 ? rgbConvert(r) : new Rgb(r, g, b, opacity == null ? 1 : opacity); +} + +function Rgb(r, g, b, opacity) { + this.r = +r; + this.g = +g; + this.b = +b; + this.opacity = +opacity; +} + +define(Rgb, rgb, extend(Color, { + brighter: function(k) { + k = k == null ? brighter : Math.pow(brighter, k); + return new Rgb(this.r * k, this.g * k, this.b * k, this.opacity); + }, + darker: function(k) { + k = k == null ? darker : Math.pow(darker, k); + return new Rgb(this.r * k, this.g * k, this.b * k, this.opacity); + }, + rgb: function() { + return this; + }, + displayable: function() { + return (-0.5 <= this.r && this.r < 255.5) + && (-0.5 <= this.g && this.g < 255.5) + && (-0.5 <= this.b && this.b < 255.5) + && (0 <= this.opacity && this.opacity <= 1); + }, + hex: rgb_formatHex, // Deprecated! Use color.formatHex. + formatHex: rgb_formatHex, + formatRgb: rgb_formatRgb, + toString: rgb_formatRgb +})); + +function rgb_formatHex() { + return "#" + hex(this.r) + hex(this.g) + hex(this.b); +} + +function rgb_formatRgb() { + var a = this.opacity; a = isNaN(a) ? 1 : Math.max(0, Math.min(1, a)); + return (a === 1 ? "rgb(" : "rgba(") + + Math.max(0, Math.min(255, Math.round(this.r) || 0)) + ", " + + Math.max(0, Math.min(255, Math.round(this.g) || 0)) + ", " + + Math.max(0, Math.min(255, Math.round(this.b) || 0)) + + (a === 1 ? ")" : ", " + a + ")"); +} + +function hex(value) { + value = Math.max(0, Math.min(255, Math.round(value) || 0)); + return (value < 16 ? "0" : "") + value.toString(16); +} + +function hsla(h, s, l, a) { + if (a <= 0) h = s = l = NaN; + else if (l <= 0 || l >= 1) h = s = NaN; + else if (s <= 0) h = NaN; + return new Hsl(h, s, l, a); +} + +function hslConvert(o) { + if (o instanceof Hsl) return new Hsl(o.h, o.s, o.l, o.opacity); + if (!(o instanceof Color)) o = color(o); + if (!o) return new Hsl; + if (o instanceof Hsl) return o; + o = o.rgb(); + var r = o.r / 255, + g = o.g / 255, + b = o.b / 255, + min = Math.min(r, g, b), + max = Math.max(r, g, b), + h = NaN, + s = max - min, + l = (max + min) / 2; + if (s) { + if (r === max) h = (g - b) / s + (g < b) * 6; + else if (g === max) h = (b - r) / s + 2; + else h = (r - g) / s + 4; + s /= l < 0.5 ? max + min : 2 - max - min; + h *= 60; + } else { + s = l > 0 && l < 1 ? 0 : h; + } + return new Hsl(h, s, l, o.opacity); +} + +function hsl(h, s, l, opacity) { + return arguments.length === 1 ? hslConvert(h) : new Hsl(h, s, l, opacity == null ? 1 : opacity); +} + +function Hsl(h, s, l, opacity) { + this.h = +h; + this.s = +s; + this.l = +l; + this.opacity = +opacity; +} + +define(Hsl, hsl, extend(Color, { + brighter: function(k) { + k = k == null ? brighter : Math.pow(brighter, k); + return new Hsl(this.h, this.s, this.l * k, this.opacity); + }, + darker: function(k) { + k = k == null ? darker : Math.pow(darker, k); + return new Hsl(this.h, this.s, this.l * k, this.opacity); + }, + rgb: function() { + var h = this.h % 360 + (this.h < 0) * 360, + s = isNaN(h) || isNaN(this.s) ? 0 : this.s, + l = this.l, + m2 = l + (l < 0.5 ? l : 1 - l) * s, + m1 = 2 * l - m2; + return new Rgb( + hsl2rgb(h >= 240 ? h - 240 : h + 120, m1, m2), + hsl2rgb(h, m1, m2), + hsl2rgb(h < 120 ? h + 240 : h - 120, m1, m2), + this.opacity + ); + }, + displayable: function() { + return (0 <= this.s && this.s <= 1 || isNaN(this.s)) + && (0 <= this.l && this.l <= 1) + && (0 <= this.opacity && this.opacity <= 1); + }, + formatHsl: function() { + var a = this.opacity; a = isNaN(a) ? 1 : Math.max(0, Math.min(1, a)); + return (a === 1 ? "hsl(" : "hsla(") + + (this.h || 0) + ", " + + (this.s || 0) * 100 + "%, " + + (this.l || 0) * 100 + "%" + + (a === 1 ? ")" : ", " + a + ")"); + } +})); + +/* From FvD 13.37, CSS Color Module Level 3 */ +function hsl2rgb(h, m1, m2) { + return (h < 60 ? m1 + (m2 - m1) * h / 60 + : h < 180 ? m2 + : h < 240 ? m1 + (m2 - m1) * (240 - h) / 60 + : m1) * 255; +} + +var deg2rad = Math.PI / 180; +var rad2deg = 180 / Math.PI; + +// https://observablehq.com/@mbostock/lab-and-rgb +var K = 18, + Xn = 0.96422, + Yn = 1, + Zn = 0.82521, + t0 = 4 / 29, + t1 = 6 / 29, + t2 = 3 * t1 * t1, + t3 = t1 * t1 * t1; + +function labConvert(o) { + if (o instanceof Lab) return new Lab(o.l, o.a, o.b, o.opacity); + if (o instanceof Hcl) return hcl2lab(o); + if (!(o instanceof Rgb)) o = rgbConvert(o); + var r = rgb2lrgb(o.r), + g = rgb2lrgb(o.g), + b = rgb2lrgb(o.b), + y = xyz2lab((0.2225045 * r + 0.7168786 * g + 0.0606169 * b) / Yn), x, z; + if (r === g && g === b) x = z = y; else { + x = xyz2lab((0.4360747 * r + 0.3850649 * g + 0.1430804 * b) / Xn); + z = xyz2lab((0.0139322 * r + 0.0971045 * g + 0.7141733 * b) / Zn); + } + return new Lab(116 * y - 16, 500 * (x - y), 200 * (y - z), o.opacity); +} + +function gray(l, opacity) { + return new Lab(l, 0, 0, opacity == null ? 1 : opacity); +} + +function lab(l, a, b, opacity) { + return arguments.length === 1 ? labConvert(l) : new Lab(l, a, b, opacity == null ? 1 : opacity); +} + +function Lab(l, a, b, opacity) { + this.l = +l; + this.a = +a; + this.b = +b; + this.opacity = +opacity; +} + +define(Lab, lab, extend(Color, { + brighter: function(k) { + return new Lab(this.l + K * (k == null ? 1 : k), this.a, this.b, this.opacity); + }, + darker: function(k) { + return new Lab(this.l - K * (k == null ? 1 : k), this.a, this.b, this.opacity); + }, + rgb: function() { + var y = (this.l + 16) / 116, + x = isNaN(this.a) ? y : y + this.a / 500, + z = isNaN(this.b) ? y : y - this.b / 200; + x = Xn * lab2xyz(x); + y = Yn * lab2xyz(y); + z = Zn * lab2xyz(z); + return new Rgb( + lrgb2rgb( 3.1338561 * x - 1.6168667 * y - 0.4906146 * z), + lrgb2rgb(-0.9787684 * x + 1.9161415 * y + 0.0334540 * z), + lrgb2rgb( 0.0719453 * x - 0.2289914 * y + 1.4052427 * z), + this.opacity + ); + } +})); + +function xyz2lab(t) { + return t > t3 ? Math.pow(t, 1 / 3) : t / t2 + t0; +} + +function lab2xyz(t) { + return t > t1 ? t * t * t : t2 * (t - t0); +} + +function lrgb2rgb(x) { + return 255 * (x <= 0.0031308 ? 12.92 * x : 1.055 * Math.pow(x, 1 / 2.4) - 0.055); +} + +function rgb2lrgb(x) { + return (x /= 255) <= 0.04045 ? x / 12.92 : Math.pow((x + 0.055) / 1.055, 2.4); +} + +function hclConvert(o) { + if (o instanceof Hcl) return new Hcl(o.h, o.c, o.l, o.opacity); + if (!(o instanceof Lab)) o = labConvert(o); + if (o.a === 0 && o.b === 0) return new Hcl(NaN, 0 < o.l && o.l < 100 ? 0 : NaN, o.l, o.opacity); + var h = Math.atan2(o.b, o.a) * rad2deg; + return new Hcl(h < 0 ? h + 360 : h, Math.sqrt(o.a * o.a + o.b * o.b), o.l, o.opacity); +} + +function lch(l, c, h, opacity) { + return arguments.length === 1 ? hclConvert(l) : new Hcl(h, c, l, opacity == null ? 1 : opacity); +} + +function hcl(h, c, l, opacity) { + return arguments.length === 1 ? hclConvert(h) : new Hcl(h, c, l, opacity == null ? 1 : opacity); +} + +function Hcl(h, c, l, opacity) { + this.h = +h; + this.c = +c; + this.l = +l; + this.opacity = +opacity; +} + +function hcl2lab(o) { + if (isNaN(o.h)) return new Lab(o.l, 0, 0, o.opacity); + var h = o.h * deg2rad; + return new Lab(o.l, Math.cos(h) * o.c, Math.sin(h) * o.c, o.opacity); +} + +define(Hcl, hcl, extend(Color, { + brighter: function(k) { + return new Hcl(this.h, this.c, this.l + K * (k == null ? 1 : k), this.opacity); + }, + darker: function(k) { + return new Hcl(this.h, this.c, this.l - K * (k == null ? 1 : k), this.opacity); + }, + rgb: function() { + return hcl2lab(this).rgb(); + } +})); + +var A = -0.14861, + B = +1.78277, + C = -0.29227, + D = -0.90649, + E = +1.97294, + ED = E * D, + EB = E * B, + BC_DA = B * C - D * A; + +function cubehelixConvert(o) { + if (o instanceof Cubehelix) return new Cubehelix(o.h, o.s, o.l, o.opacity); + if (!(o instanceof Rgb)) o = rgbConvert(o); + var r = o.r / 255, + g = o.g / 255, + b = o.b / 255, + l = (BC_DA * b + ED * r - EB * g) / (BC_DA + ED - EB), + bl = b - l, + k = (E * (g - l) - C * bl) / D, + s = Math.sqrt(k * k + bl * bl) / (E * l * (1 - l)), // NaN if l=0 or l=1 + h = s ? Math.atan2(k, bl) * rad2deg - 120 : NaN; + return new Cubehelix(h < 0 ? h + 360 : h, s, l, o.opacity); +} + +function cubehelix(h, s, l, opacity) { + return arguments.length === 1 ? cubehelixConvert(h) : new Cubehelix(h, s, l, opacity == null ? 1 : opacity); +} + +function Cubehelix(h, s, l, opacity) { + this.h = +h; + this.s = +s; + this.l = +l; + this.opacity = +opacity; +} + +define(Cubehelix, cubehelix, extend(Color, { + brighter: function(k) { + k = k == null ? brighter : Math.pow(brighter, k); + return new Cubehelix(this.h, this.s, this.l * k, this.opacity); + }, + darker: function(k) { + k = k == null ? darker : Math.pow(darker, k); + return new Cubehelix(this.h, this.s, this.l * k, this.opacity); + }, + rgb: function() { + var h = isNaN(this.h) ? 0 : (this.h + 120) * deg2rad, + l = +this.l, + a = isNaN(this.s) ? 0 : this.s * l * (1 - l), + cosh = Math.cos(h), + sinh = Math.sin(h); + return new Rgb( + 255 * (l + a * (A * cosh + B * sinh)), + 255 * (l + a * (C * cosh + D * sinh)), + 255 * (l + a * (E * cosh)), + this.opacity + ); + } +})); + +function basis(t1, v0, v1, v2, v3) { + var t2 = t1 * t1, t3 = t2 * t1; + return ((1 - 3 * t1 + 3 * t2 - t3) * v0 + + (4 - 6 * t2 + 3 * t3) * v1 + + (1 + 3 * t1 + 3 * t2 - 3 * t3) * v2 + + t3 * v3) / 6; +} + +function basis$1(values) { + var n = values.length - 1; + return function(t) { + var i = t <= 0 ? (t = 0) : t >= 1 ? (t = 1, n - 1) : Math.floor(t * n), + v1 = values[i], + v2 = values[i + 1], + v0 = i > 0 ? values[i - 1] : 2 * v1 - v2, + v3 = i < n - 1 ? values[i + 2] : 2 * v2 - v1; + return basis((t - i / n) * n, v0, v1, v2, v3); + }; +} + +function basisClosed(values) { + var n = values.length; + return function(t) { + var i = Math.floor(((t %= 1) < 0 ? ++t : t) * n), + v0 = values[(i + n - 1) % n], + v1 = values[i % n], + v2 = values[(i + 1) % n], + v3 = values[(i + 2) % n]; + return basis((t - i / n) * n, v0, v1, v2, v3); + }; +} + +function constant$3(x) { + return function() { + return x; + }; +} + +function linear(a, d) { + return function(t) { + return a + t * d; + }; +} + +function exponential(a, b, y) { + return a = Math.pow(a, y), b = Math.pow(b, y) - a, y = 1 / y, function(t) { + return Math.pow(a + t * b, y); + }; +} + +function hue(a, b) { + var d = b - a; + return d ? linear(a, d > 180 || d < -180 ? d - 360 * Math.round(d / 360) : d) : constant$3(isNaN(a) ? b : a); +} + +function gamma(y) { + return (y = +y) === 1 ? nogamma : function(a, b) { + return b - a ? exponential(a, b, y) : constant$3(isNaN(a) ? b : a); + }; +} + +function nogamma(a, b) { + var d = b - a; + return d ? linear(a, d) : constant$3(isNaN(a) ? b : a); +} + +var interpolateRgb = (function rgbGamma(y) { + var color = gamma(y); + + function rgb$1(start, end) { + var r = color((start = rgb(start)).r, (end = rgb(end)).r), + g = color(start.g, end.g), + b = color(start.b, end.b), + opacity = nogamma(start.opacity, end.opacity); + return function(t) { + start.r = r(t); + start.g = g(t); + start.b = b(t); + start.opacity = opacity(t); + return start + ""; + }; + } + + rgb$1.gamma = rgbGamma; + + return rgb$1; +})(1); + +function rgbSpline(spline) { + return function(colors) { + var n = colors.length, + r = new Array(n), + g = new Array(n), + b = new Array(n), + i, color; + for (i = 0; i < n; ++i) { + color = rgb(colors[i]); + r[i] = color.r || 0; + g[i] = color.g || 0; + b[i] = color.b || 0; + } + r = spline(r); + g = spline(g); + b = spline(b); + color.opacity = 1; + return function(t) { + color.r = r(t); + color.g = g(t); + color.b = b(t); + return color + ""; + }; + }; +} + +var rgbBasis = rgbSpline(basis$1); +var rgbBasisClosed = rgbSpline(basisClosed); + +function numberArray(a, b) { + if (!b) b = []; + var n = a ? Math.min(b.length, a.length) : 0, + c = b.slice(), + i; + return function(t) { + for (i = 0; i < n; ++i) c[i] = a[i] * (1 - t) + b[i] * t; + return c; + }; +} + +function isNumberArray(x) { + return ArrayBuffer.isView(x) && !(x instanceof DataView); +} + +function array$1(a, b) { + return (isNumberArray(b) ? numberArray : genericArray)(a, b); +} + +function genericArray(a, b) { + var nb = b ? b.length : 0, + na = a ? Math.min(nb, a.length) : 0, + x = new Array(na), + c = new Array(nb), + i; + + for (i = 0; i < na; ++i) x[i] = interpolateValue(a[i], b[i]); + for (; i < nb; ++i) c[i] = b[i]; + + return function(t) { + for (i = 0; i < na; ++i) c[i] = x[i](t); + return c; + }; +} + +function date(a, b) { + var d = new Date; + return a = +a, b = +b, function(t) { + return d.setTime(a * (1 - t) + b * t), d; + }; +} + +function interpolateNumber(a, b) { + return a = +a, b = +b, function(t) { + return a * (1 - t) + b * t; + }; +} + +function object(a, b) { + var i = {}, + c = {}, + k; + + if (a === null || typeof a !== "object") a = {}; + if (b === null || typeof b !== "object") b = {}; + + for (k in b) { + if (k in a) { + i[k] = interpolateValue(a[k], b[k]); + } else { + c[k] = b[k]; + } + } + + return function(t) { + for (k in i) c[k] = i[k](t); + return c; + }; +} + +var reA = /[-+]?(?:\d+\.?\d*|\.?\d+)(?:[eE][-+]?\d+)?/g, + reB = new RegExp(reA.source, "g"); + +function zero(b) { + return function() { + return b; + }; +} + +function one(b) { + return function(t) { + return b(t) + ""; + }; +} + +function interpolateString(a, b) { + var bi = reA.lastIndex = reB.lastIndex = 0, // scan index for next number in b + am, // current match in a + bm, // current match in b + bs, // string preceding current number in b, if any + i = -1, // index in s + s = [], // string constants and placeholders + q = []; // number interpolators + + // Coerce inputs to strings. + a = a + "", b = b + ""; + + // Interpolate pairs of numbers in a & b. + while ((am = reA.exec(a)) + && (bm = reB.exec(b))) { + if ((bs = bm.index) > bi) { // a string precedes the next number in b + bs = b.slice(bi, bs); + if (s[i]) s[i] += bs; // coalesce with previous string + else s[++i] = bs; + } + if ((am = am[0]) === (bm = bm[0])) { // numbers in a & b match + if (s[i]) s[i] += bm; // coalesce with previous string + else s[++i] = bm; + } else { // interpolate non-matching numbers + s[++i] = null; + q.push({i: i, x: interpolateNumber(am, bm)}); + } + bi = reB.lastIndex; + } + + // Add remains of b. + if (bi < b.length) { + bs = b.slice(bi); + if (s[i]) s[i] += bs; // coalesce with previous string + else s[++i] = bs; + } + + // Special optimization for only a single match. + // Otherwise, interpolate each of the numbers and rejoin the string. + return s.length < 2 ? (q[0] + ? one(q[0].x) + : zero(b)) + : (b = q.length, function(t) { + for (var i = 0, o; i < b; ++i) s[(o = q[i]).i] = o.x(t); + return s.join(""); + }); +} + +function interpolateValue(a, b) { + var t = typeof b, c; + return b == null || t === "boolean" ? constant$3(b) + : (t === "number" ? interpolateNumber + : t === "string" ? ((c = color(b)) ? (b = c, interpolateRgb) : interpolateString) + : b instanceof color ? interpolateRgb + : b instanceof Date ? date + : isNumberArray(b) ? numberArray + : Array.isArray(b) ? genericArray + : typeof b.valueOf !== "function" && typeof b.toString !== "function" || isNaN(b) ? object + : interpolateNumber)(a, b); +} + +function discrete(range) { + var n = range.length; + return function(t) { + return range[Math.max(0, Math.min(n - 1, Math.floor(t * n)))]; + }; +} + +function hue$1(a, b) { + var i = hue(+a, +b); + return function(t) { + var x = i(t); + return x - 360 * Math.floor(x / 360); + }; +} + +function interpolateRound(a, b) { + return a = +a, b = +b, function(t) { + return Math.round(a * (1 - t) + b * t); + }; +} + +var degrees = 180 / Math.PI; + +var identity$2 = { + translateX: 0, + translateY: 0, + rotate: 0, + skewX: 0, + scaleX: 1, + scaleY: 1 +}; + +function decompose(a, b, c, d, e, f) { + var scaleX, scaleY, skewX; + if (scaleX = Math.sqrt(a * a + b * b)) a /= scaleX, b /= scaleX; + if (skewX = a * c + b * d) c -= a * skewX, d -= b * skewX; + if (scaleY = Math.sqrt(c * c + d * d)) c /= scaleY, d /= scaleY, skewX /= scaleY; + if (a * d < b * c) a = -a, b = -b, skewX = -skewX, scaleX = -scaleX; + return { + translateX: e, + translateY: f, + rotate: Math.atan2(b, a) * degrees, + skewX: Math.atan(skewX) * degrees, + scaleX: scaleX, + scaleY: scaleY + }; +} + +var cssNode, + cssRoot, + cssView, + svgNode; + +function parseCss(value) { + if (value === "none") return identity$2; + if (!cssNode) cssNode = document.createElement("DIV"), cssRoot = document.documentElement, cssView = document.defaultView; + cssNode.style.transform = value; + value = cssView.getComputedStyle(cssRoot.appendChild(cssNode), null).getPropertyValue("transform"); + cssRoot.removeChild(cssNode); + value = value.slice(7, -1).split(","); + return decompose(+value[0], +value[1], +value[2], +value[3], +value[4], +value[5]); +} + +function parseSvg(value) { + if (value == null) return identity$2; + if (!svgNode) svgNode = document.createElementNS("http://www.w3.org/2000/svg", "g"); + svgNode.setAttribute("transform", value); + if (!(value = svgNode.transform.baseVal.consolidate())) return identity$2; + value = value.matrix; + return decompose(value.a, value.b, value.c, value.d, value.e, value.f); +} + +function interpolateTransform(parse, pxComma, pxParen, degParen) { + + function pop(s) { + return s.length ? s.pop() + " " : ""; + } + + function translate(xa, ya, xb, yb, s, q) { + if (xa !== xb || ya !== yb) { + var i = s.push("translate(", null, pxComma, null, pxParen); + q.push({i: i - 4, x: interpolateNumber(xa, xb)}, {i: i - 2, x: interpolateNumber(ya, yb)}); + } else if (xb || yb) { + s.push("translate(" + xb + pxComma + yb + pxParen); + } + } + + function rotate(a, b, s, q) { + if (a !== b) { + if (a - b > 180) b += 360; else if (b - a > 180) a += 360; // shortest path + q.push({i: s.push(pop(s) + "rotate(", null, degParen) - 2, x: interpolateNumber(a, b)}); + } else if (b) { + s.push(pop(s) + "rotate(" + b + degParen); + } + } + + function skewX(a, b, s, q) { + if (a !== b) { + q.push({i: s.push(pop(s) + "skewX(", null, degParen) - 2, x: interpolateNumber(a, b)}); + } else if (b) { + s.push(pop(s) + "skewX(" + b + degParen); + } + } + + function scale(xa, ya, xb, yb, s, q) { + if (xa !== xb || ya !== yb) { + var i = s.push(pop(s) + "scale(", null, ",", null, ")"); + q.push({i: i - 4, x: interpolateNumber(xa, xb)}, {i: i - 2, x: interpolateNumber(ya, yb)}); + } else if (xb !== 1 || yb !== 1) { + s.push(pop(s) + "scale(" + xb + "," + yb + ")"); + } + } + + return function(a, b) { + var s = [], // string constants and placeholders + q = []; // number interpolators + a = parse(a), b = parse(b); + translate(a.translateX, a.translateY, b.translateX, b.translateY, s, q); + rotate(a.rotate, b.rotate, s, q); + skewX(a.skewX, b.skewX, s, q); + scale(a.scaleX, a.scaleY, b.scaleX, b.scaleY, s, q); + a = b = null; // gc + return function(t) { + var i = -1, n = q.length, o; + while (++i < n) s[(o = q[i]).i] = o.x(t); + return s.join(""); + }; + }; +} + +var interpolateTransformCss = interpolateTransform(parseCss, "px, ", "px)", "deg)"); +var interpolateTransformSvg = interpolateTransform(parseSvg, ", ", ")", ")"); + +var rho = Math.SQRT2, + rho2 = 2, + rho4 = 4, + epsilon2 = 1e-12; + +function cosh(x) { + return ((x = Math.exp(x)) + 1 / x) / 2; +} + +function sinh(x) { + return ((x = Math.exp(x)) - 1 / x) / 2; +} + +function tanh(x) { + return ((x = Math.exp(2 * x)) - 1) / (x + 1); +} + +// p0 = [ux0, uy0, w0] +// p1 = [ux1, uy1, w1] +function interpolateZoom(p0, p1) { + var ux0 = p0[0], uy0 = p0[1], w0 = p0[2], + ux1 = p1[0], uy1 = p1[1], w1 = p1[2], + dx = ux1 - ux0, + dy = uy1 - uy0, + d2 = dx * dx + dy * dy, + i, + S; + + // Special case for u0 ≅ u1. + if (d2 < epsilon2) { + S = Math.log(w1 / w0) / rho; + i = function(t) { + return [ + ux0 + t * dx, + uy0 + t * dy, + w0 * Math.exp(rho * t * S) + ]; + }; + } + + // General case. + else { + var d1 = Math.sqrt(d2), + b0 = (w1 * w1 - w0 * w0 + rho4 * d2) / (2 * w0 * rho2 * d1), + b1 = (w1 * w1 - w0 * w0 - rho4 * d2) / (2 * w1 * rho2 * d1), + r0 = Math.log(Math.sqrt(b0 * b0 + 1) - b0), + r1 = Math.log(Math.sqrt(b1 * b1 + 1) - b1); + S = (r1 - r0) / rho; + i = function(t) { + var s = t * S, + coshr0 = cosh(r0), + u = w0 / (rho2 * d1) * (coshr0 * tanh(rho * s + r0) - sinh(r0)); + return [ + ux0 + u * dx, + uy0 + u * dy, + w0 * coshr0 / cosh(rho * s + r0) + ]; + }; + } + + i.duration = S * 1000; + + return i; +} + +function hsl$1(hue) { + return function(start, end) { + var h = hue((start = hsl(start)).h, (end = hsl(end)).h), + s = nogamma(start.s, end.s), + l = nogamma(start.l, end.l), + opacity = nogamma(start.opacity, end.opacity); + return function(t) { + start.h = h(t); + start.s = s(t); + start.l = l(t); + start.opacity = opacity(t); + return start + ""; + }; + } +} + +var hsl$2 = hsl$1(hue); +var hslLong = hsl$1(nogamma); + +function lab$1(start, end) { + var l = nogamma((start = lab(start)).l, (end = lab(end)).l), + a = nogamma(start.a, end.a), + b = nogamma(start.b, end.b), + opacity = nogamma(start.opacity, end.opacity); + return function(t) { + start.l = l(t); + start.a = a(t); + start.b = b(t); + start.opacity = opacity(t); + return start + ""; + }; +} + +function hcl$1(hue) { + return function(start, end) { + var h = hue((start = hcl(start)).h, (end = hcl(end)).h), + c = nogamma(start.c, end.c), + l = nogamma(start.l, end.l), + opacity = nogamma(start.opacity, end.opacity); + return function(t) { + start.h = h(t); + start.c = c(t); + start.l = l(t); + start.opacity = opacity(t); + return start + ""; + }; + } +} + +var hcl$2 = hcl$1(hue); +var hclLong = hcl$1(nogamma); + +function cubehelix$1(hue) { + return (function cubehelixGamma(y) { + y = +y; + + function cubehelix$1(start, end) { + var h = hue((start = cubehelix(start)).h, (end = cubehelix(end)).h), + s = nogamma(start.s, end.s), + l = nogamma(start.l, end.l), + opacity = nogamma(start.opacity, end.opacity); + return function(t) { + start.h = h(t); + start.s = s(t); + start.l = l(Math.pow(t, y)); + start.opacity = opacity(t); + return start + ""; + }; + } + + cubehelix$1.gamma = cubehelixGamma; + + return cubehelix$1; + })(1); +} + +var cubehelix$2 = cubehelix$1(hue); +var cubehelixLong = cubehelix$1(nogamma); + +function piecewise(interpolate, values) { + var i = 0, n = values.length - 1, v = values[0], I = new Array(n < 0 ? 0 : n); + while (i < n) I[i] = interpolate(v, v = values[++i]); + return function(t) { + var i = Math.max(0, Math.min(n - 1, Math.floor(t *= n))); + return I[i](t - i); + }; +} + +function quantize(interpolator, n) { + var samples = new Array(n); + for (var i = 0; i < n; ++i) samples[i] = interpolator(i / (n - 1)); + return samples; +} + +var frame = 0, // is an animation frame pending? + timeout = 0, // is a timeout pending? + interval = 0, // are any timers active? + pokeDelay = 1000, // how frequently we check for clock skew + taskHead, + taskTail, + clockLast = 0, + clockNow = 0, + clockSkew = 0, + clock = typeof performance === "object" && performance.now ? performance : Date, + setFrame = typeof window === "object" && window.requestAnimationFrame ? window.requestAnimationFrame.bind(window) : function(f) { setTimeout(f, 17); }; + +function now() { + return clockNow || (setFrame(clearNow), clockNow = clock.now() + clockSkew); +} + +function clearNow() { + clockNow = 0; +} + +function Timer() { + this._call = + this._time = + this._next = null; +} + +Timer.prototype = timer.prototype = { + constructor: Timer, + restart: function(callback, delay, time) { + if (typeof callback !== "function") throw new TypeError("callback is not a function"); + time = (time == null ? now() : +time) + (delay == null ? 0 : +delay); + if (!this._next && taskTail !== this) { + if (taskTail) taskTail._next = this; + else taskHead = this; + taskTail = this; + } + this._call = callback; + this._time = time; + sleep(); + }, + stop: function() { + if (this._call) { + this._call = null; + this._time = Infinity; + sleep(); + } + } +}; + +function timer(callback, delay, time) { + var t = new Timer; + t.restart(callback, delay, time); + return t; +} + +function timerFlush() { + now(); // Get the current time, if not already set. + ++frame; // Pretend we’ve set an alarm, if we haven’t already. + var t = taskHead, e; + while (t) { + if ((e = clockNow - t._time) >= 0) t._call.call(null, e); + t = t._next; + } + --frame; +} + +function wake() { + clockNow = (clockLast = clock.now()) + clockSkew; + frame = timeout = 0; + try { + timerFlush(); + } finally { + frame = 0; + nap(); + clockNow = 0; + } +} + +function poke() { + var now = clock.now(), delay = now - clockLast; + if (delay > pokeDelay) clockSkew -= delay, clockLast = now; +} + +function nap() { + var t0, t1 = taskHead, t2, time = Infinity; + while (t1) { + if (t1._call) { + if (time > t1._time) time = t1._time; + t0 = t1, t1 = t1._next; + } else { + t2 = t1._next, t1._next = null; + t1 = t0 ? t0._next = t2 : taskHead = t2; + } + } + taskTail = t0; + sleep(time); +} + +function sleep(time) { + if (frame) return; // Soonest alarm already set, or will be. + if (timeout) timeout = clearTimeout(timeout); + var delay = time - clockNow; // Strictly less than if we recomputed clockNow. + if (delay > 24) { + if (time < Infinity) timeout = setTimeout(wake, time - clock.now() - clockSkew); + if (interval) interval = clearInterval(interval); + } else { + if (!interval) clockLast = clock.now(), interval = setInterval(poke, pokeDelay); + frame = 1, setFrame(wake); + } +} + +function timeout$1(callback, delay, time) { + var t = new Timer; + delay = delay == null ? 0 : +delay; + t.restart(function(elapsed) { + t.stop(); + callback(elapsed + delay); + }, delay, time); + return t; +} + +function interval$1(callback, delay, time) { + var t = new Timer, total = delay; + if (delay == null) return t.restart(callback, delay, time), t; + delay = +delay, time = time == null ? now() : +time; + t.restart(function tick(elapsed) { + elapsed += total; + t.restart(tick, total += delay, time); + callback(elapsed); + }, delay, time); + return t; +} + +var emptyOn = dispatch("start", "end", "cancel", "interrupt"); +var emptyTween = []; + +var CREATED = 0; +var SCHEDULED = 1; +var STARTING = 2; +var STARTED = 3; +var RUNNING = 4; +var ENDING = 5; +var ENDED = 6; + +function schedule(node, name, id, index, group, timing) { + var schedules = node.__transition; + if (!schedules) node.__transition = {}; + else if (id in schedules) return; + create$1(node, id, { + name: name, + index: index, // For context during callback. + group: group, // For context during callback. + on: emptyOn, + tween: emptyTween, + time: timing.time, + delay: timing.delay, + duration: timing.duration, + ease: timing.ease, + timer: null, + state: CREATED + }); +} + +function init(node, id) { + var schedule = get$1(node, id); + if (schedule.state > CREATED) throw new Error("too late; already scheduled"); + return schedule; +} + +function set$1(node, id) { + var schedule = get$1(node, id); + if (schedule.state > STARTED) throw new Error("too late; already running"); + return schedule; +} + +function get$1(node, id) { + var schedule = node.__transition; + if (!schedule || !(schedule = schedule[id])) throw new Error("transition not found"); + return schedule; +} + +function create$1(node, id, self) { + var schedules = node.__transition, + tween; + + // Initialize the self timer when the transition is created. + // Note the actual delay is not known until the first callback! + schedules[id] = self; + self.timer = timer(schedule, 0, self.time); + + function schedule(elapsed) { + self.state = SCHEDULED; + self.timer.restart(start, self.delay, self.time); + + // If the elapsed delay is less than our first sleep, start immediately. + if (self.delay <= elapsed) start(elapsed - self.delay); + } + + function start(elapsed) { + var i, j, n, o; + + // If the state is not SCHEDULED, then we previously errored on start. + if (self.state !== SCHEDULED) return stop(); + + for (i in schedules) { + o = schedules[i]; + if (o.name !== self.name) continue; + + // While this element already has a starting transition during this frame, + // defer starting an interrupting transition until that transition has a + // chance to tick (and possibly end); see d3/d3-transition#54! + if (o.state === STARTED) return timeout$1(start); + + // Interrupt the active transition, if any. + if (o.state === RUNNING) { + o.state = ENDED; + o.timer.stop(); + o.on.call("interrupt", node, node.__data__, o.index, o.group); + delete schedules[i]; + } + + // Cancel any pre-empted transitions. + else if (+i < id) { + o.state = ENDED; + o.timer.stop(); + o.on.call("cancel", node, node.__data__, o.index, o.group); + delete schedules[i]; + } + } + + // Defer the first tick to end of the current frame; see d3/d3#1576. + // Note the transition may be canceled after start and before the first tick! + // Note this must be scheduled before the start event; see d3/d3-transition#16! + // Assuming this is successful, subsequent callbacks go straight to tick. + timeout$1(function() { + if (self.state === STARTED) { + self.state = RUNNING; + self.timer.restart(tick, self.delay, self.time); + tick(elapsed); + } + }); + + // Dispatch the start event. + // Note this must be done before the tween are initialized. + self.state = STARTING; + self.on.call("start", node, node.__data__, self.index, self.group); + if (self.state !== STARTING) return; // interrupted + self.state = STARTED; + + // Initialize the tween, deleting null tween. + tween = new Array(n = self.tween.length); + for (i = 0, j = -1; i < n; ++i) { + if (o = self.tween[i].value.call(node, node.__data__, self.index, self.group)) { + tween[++j] = o; + } + } + tween.length = j + 1; + } + + function tick(elapsed) { + var t = elapsed < self.duration ? self.ease.call(null, elapsed / self.duration) : (self.timer.restart(stop), self.state = ENDING, 1), + i = -1, + n = tween.length; + + while (++i < n) { + tween[i].call(node, t); + } + + // Dispatch the end event. + if (self.state === ENDING) { + self.on.call("end", node, node.__data__, self.index, self.group); + stop(); + } + } + + function stop() { + self.state = ENDED; + self.timer.stop(); + delete schedules[id]; + for (var i in schedules) return; // eslint-disable-line no-unused-vars + delete node.__transition; + } +} + +function interrupt(node, name) { + var schedules = node.__transition, + schedule, + active, + empty = true, + i; + + if (!schedules) return; + + name = name == null ? null : name + ""; + + for (i in schedules) { + if ((schedule = schedules[i]).name !== name) { empty = false; continue; } + active = schedule.state > STARTING && schedule.state < ENDING; + schedule.state = ENDED; + schedule.timer.stop(); + schedule.on.call(active ? "interrupt" : "cancel", node, node.__data__, schedule.index, schedule.group); + delete schedules[i]; + } + + if (empty) delete node.__transition; +} + +function selection_interrupt(name) { + return this.each(function() { + interrupt(this, name); + }); +} + +function tweenRemove(id, name) { + var tween0, tween1; + return function() { + var schedule = set$1(this, id), + tween = schedule.tween; + + // If this node shared tween with the previous node, + // just assign the updated shared tween and we’re done! + // Otherwise, copy-on-write. + if (tween !== tween0) { + tween1 = tween0 = tween; + for (var i = 0, n = tween1.length; i < n; ++i) { + if (tween1[i].name === name) { + tween1 = tween1.slice(); + tween1.splice(i, 1); + break; + } + } + } + + schedule.tween = tween1; + }; +} + +function tweenFunction(id, name, value) { + var tween0, tween1; + if (typeof value !== "function") throw new Error; + return function() { + var schedule = set$1(this, id), + tween = schedule.tween; + + // If this node shared tween with the previous node, + // just assign the updated shared tween and we’re done! + // Otherwise, copy-on-write. + if (tween !== tween0) { + tween1 = (tween0 = tween).slice(); + for (var t = {name: name, value: value}, i = 0, n = tween1.length; i < n; ++i) { + if (tween1[i].name === name) { + tween1[i] = t; + break; + } + } + if (i === n) tween1.push(t); + } + + schedule.tween = tween1; + }; +} + +function transition_tween(name, value) { + var id = this._id; + + name += ""; + + if (arguments.length < 2) { + var tween = get$1(this.node(), id).tween; + for (var i = 0, n = tween.length, t; i < n; ++i) { + if ((t = tween[i]).name === name) { + return t.value; + } + } + return null; + } + + return this.each((value == null ? tweenRemove : tweenFunction)(id, name, value)); +} + +function tweenValue(transition, name, value) { + var id = transition._id; + + transition.each(function() { + var schedule = set$1(this, id); + (schedule.value || (schedule.value = {}))[name] = value.apply(this, arguments); + }); + + return function(node) { + return get$1(node, id).value[name]; + }; +} + +function interpolate(a, b) { + var c; + return (typeof b === "number" ? interpolateNumber + : b instanceof color ? interpolateRgb + : (c = color(b)) ? (b = c, interpolateRgb) + : interpolateString)(a, b); +} + +function attrRemove$1(name) { + return function() { + this.removeAttribute(name); + }; +} + +function attrRemoveNS$1(fullname) { + return function() { + this.removeAttributeNS(fullname.space, fullname.local); + }; +} + +function attrConstant$1(name, interpolate, value1) { + var string00, + string1 = value1 + "", + interpolate0; + return function() { + var string0 = this.getAttribute(name); + return string0 === string1 ? null + : string0 === string00 ? interpolate0 + : interpolate0 = interpolate(string00 = string0, value1); + }; +} + +function attrConstantNS$1(fullname, interpolate, value1) { + var string00, + string1 = value1 + "", + interpolate0; + return function() { + var string0 = this.getAttributeNS(fullname.space, fullname.local); + return string0 === string1 ? null + : string0 === string00 ? interpolate0 + : interpolate0 = interpolate(string00 = string0, value1); + }; +} + +function attrFunction$1(name, interpolate, value) { + var string00, + string10, + interpolate0; + return function() { + var string0, value1 = value(this), string1; + if (value1 == null) return void this.removeAttribute(name); + string0 = this.getAttribute(name); + string1 = value1 + ""; + return string0 === string1 ? null + : string0 === string00 && string1 === string10 ? interpolate0 + : (string10 = string1, interpolate0 = interpolate(string00 = string0, value1)); + }; +} + +function attrFunctionNS$1(fullname, interpolate, value) { + var string00, + string10, + interpolate0; + return function() { + var string0, value1 = value(this), string1; + if (value1 == null) return void this.removeAttributeNS(fullname.space, fullname.local); + string0 = this.getAttributeNS(fullname.space, fullname.local); + string1 = value1 + ""; + return string0 === string1 ? null + : string0 === string00 && string1 === string10 ? interpolate0 + : (string10 = string1, interpolate0 = interpolate(string00 = string0, value1)); + }; +} + +function transition_attr(name, value) { + var fullname = namespace(name), i = fullname === "transform" ? interpolateTransformSvg : interpolate; + return this.attrTween(name, typeof value === "function" + ? (fullname.local ? attrFunctionNS$1 : attrFunction$1)(fullname, i, tweenValue(this, "attr." + name, value)) + : value == null ? (fullname.local ? attrRemoveNS$1 : attrRemove$1)(fullname) + : (fullname.local ? attrConstantNS$1 : attrConstant$1)(fullname, i, value)); +} + +function attrInterpolate(name, i) { + return function(t) { + this.setAttribute(name, i.call(this, t)); + }; +} + +function attrInterpolateNS(fullname, i) { + return function(t) { + this.setAttributeNS(fullname.space, fullname.local, i.call(this, t)); + }; +} + +function attrTweenNS(fullname, value) { + var t0, i0; + function tween() { + var i = value.apply(this, arguments); + if (i !== i0) t0 = (i0 = i) && attrInterpolateNS(fullname, i); + return t0; + } + tween._value = value; + return tween; +} + +function attrTween(name, value) { + var t0, i0; + function tween() { + var i = value.apply(this, arguments); + if (i !== i0) t0 = (i0 = i) && attrInterpolate(name, i); + return t0; + } + tween._value = value; + return tween; +} + +function transition_attrTween(name, value) { + var key = "attr." + name; + if (arguments.length < 2) return (key = this.tween(key)) && key._value; + if (value == null) return this.tween(key, null); + if (typeof value !== "function") throw new Error; + var fullname = namespace(name); + return this.tween(key, (fullname.local ? attrTweenNS : attrTween)(fullname, value)); +} + +function delayFunction(id, value) { + return function() { + init(this, id).delay = +value.apply(this, arguments); + }; +} + +function delayConstant(id, value) { + return value = +value, function() { + init(this, id).delay = value; + }; +} + +function transition_delay(value) { + var id = this._id; + + return arguments.length + ? this.each((typeof value === "function" + ? delayFunction + : delayConstant)(id, value)) + : get$1(this.node(), id).delay; +} + +function durationFunction(id, value) { + return function() { + set$1(this, id).duration = +value.apply(this, arguments); + }; +} + +function durationConstant(id, value) { + return value = +value, function() { + set$1(this, id).duration = value; + }; +} + +function transition_duration(value) { + var id = this._id; + + return arguments.length + ? this.each((typeof value === "function" + ? durationFunction + : durationConstant)(id, value)) + : get$1(this.node(), id).duration; +} + +function easeConstant(id, value) { + if (typeof value !== "function") throw new Error; + return function() { + set$1(this, id).ease = value; + }; +} + +function transition_ease(value) { + var id = this._id; + + return arguments.length + ? this.each(easeConstant(id, value)) + : get$1(this.node(), id).ease; +} + +function transition_filter(match) { + if (typeof match !== "function") match = matcher(match); + + for (var groups = this._groups, m = groups.length, subgroups = new Array(m), j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, subgroup = subgroups[j] = [], node, i = 0; i < n; ++i) { + if ((node = group[i]) && match.call(node, node.__data__, i, group)) { + subgroup.push(node); + } + } + } + + return new Transition(subgroups, this._parents, this._name, this._id); +} + +function transition_merge(transition) { + if (transition._id !== this._id) throw new Error; + + for (var groups0 = this._groups, groups1 = transition._groups, m0 = groups0.length, m1 = groups1.length, m = Math.min(m0, m1), merges = new Array(m0), j = 0; j < m; ++j) { + for (var group0 = groups0[j], group1 = groups1[j], n = group0.length, merge = merges[j] = new Array(n), node, i = 0; i < n; ++i) { + if (node = group0[i] || group1[i]) { + merge[i] = node; + } + } + } + + for (; j < m0; ++j) { + merges[j] = groups0[j]; + } + + return new Transition(merges, this._parents, this._name, this._id); +} + +function start(name) { + return (name + "").trim().split(/^|\s+/).every(function(t) { + var i = t.indexOf("."); + if (i >= 0) t = t.slice(0, i); + return !t || t === "start"; + }); +} + +function onFunction(id, name, listener) { + var on0, on1, sit = start(name) ? init : set$1; + return function() { + var schedule = sit(this, id), + on = schedule.on; + + // If this node shared a dispatch with the previous node, + // just assign the updated shared dispatch and we’re done! + // Otherwise, copy-on-write. + if (on !== on0) (on1 = (on0 = on).copy()).on(name, listener); + + schedule.on = on1; + }; +} + +function transition_on(name, listener) { + var id = this._id; + + return arguments.length < 2 + ? get$1(this.node(), id).on.on(name) + : this.each(onFunction(id, name, listener)); +} + +function removeFunction(id) { + return function() { + var parent = this.parentNode; + for (var i in this.__transition) if (+i !== id) return; + if (parent) parent.removeChild(this); + }; +} + +function transition_remove() { + return this.on("end.remove", removeFunction(this._id)); +} + +function transition_select(select) { + var name = this._name, + id = this._id; + + if (typeof select !== "function") select = selector(select); + + for (var groups = this._groups, m = groups.length, subgroups = new Array(m), j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, subgroup = subgroups[j] = new Array(n), node, subnode, i = 0; i < n; ++i) { + if ((node = group[i]) && (subnode = select.call(node, node.__data__, i, group))) { + if ("__data__" in node) subnode.__data__ = node.__data__; + subgroup[i] = subnode; + schedule(subgroup[i], name, id, i, subgroup, get$1(node, id)); + } + } + } + + return new Transition(subgroups, this._parents, name, id); +} + +function transition_selectAll(select) { + var name = this._name, + id = this._id; + + if (typeof select !== "function") select = selectorAll(select); + + for (var groups = this._groups, m = groups.length, subgroups = [], parents = [], j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, node, i = 0; i < n; ++i) { + if (node = group[i]) { + for (var children = select.call(node, node.__data__, i, group), child, inherit = get$1(node, id), k = 0, l = children.length; k < l; ++k) { + if (child = children[k]) { + schedule(child, name, id, k, children, inherit); + } + } + subgroups.push(children); + parents.push(node); + } + } + } + + return new Transition(subgroups, parents, name, id); +} + +var Selection$1 = selection.prototype.constructor; + +function transition_selection() { + return new Selection$1(this._groups, this._parents); +} + +function styleNull(name, interpolate) { + var string00, + string10, + interpolate0; + return function() { + var string0 = styleValue(this, name), + string1 = (this.style.removeProperty(name), styleValue(this, name)); + return string0 === string1 ? null + : string0 === string00 && string1 === string10 ? interpolate0 + : interpolate0 = interpolate(string00 = string0, string10 = string1); + }; +} + +function styleRemove$1(name) { + return function() { + this.style.removeProperty(name); + }; +} + +function styleConstant$1(name, interpolate, value1) { + var string00, + string1 = value1 + "", + interpolate0; + return function() { + var string0 = styleValue(this, name); + return string0 === string1 ? null + : string0 === string00 ? interpolate0 + : interpolate0 = interpolate(string00 = string0, value1); + }; +} + +function styleFunction$1(name, interpolate, value) { + var string00, + string10, + interpolate0; + return function() { + var string0 = styleValue(this, name), + value1 = value(this), + string1 = value1 + ""; + if (value1 == null) string1 = value1 = (this.style.removeProperty(name), styleValue(this, name)); + return string0 === string1 ? null + : string0 === string00 && string1 === string10 ? interpolate0 + : (string10 = string1, interpolate0 = interpolate(string00 = string0, value1)); + }; +} + +function styleMaybeRemove(id, name) { + var on0, on1, listener0, key = "style." + name, event = "end." + key, remove; + return function() { + var schedule = set$1(this, id), + on = schedule.on, + listener = schedule.value[key] == null ? remove || (remove = styleRemove$1(name)) : undefined; + + // If this node shared a dispatch with the previous node, + // just assign the updated shared dispatch and we’re done! + // Otherwise, copy-on-write. + if (on !== on0 || listener0 !== listener) (on1 = (on0 = on).copy()).on(event, listener0 = listener); + + schedule.on = on1; + }; +} + +function transition_style(name, value, priority) { + var i = (name += "") === "transform" ? interpolateTransformCss : interpolate; + return value == null ? this + .styleTween(name, styleNull(name, i)) + .on("end.style." + name, styleRemove$1(name)) + : typeof value === "function" ? this + .styleTween(name, styleFunction$1(name, i, tweenValue(this, "style." + name, value))) + .each(styleMaybeRemove(this._id, name)) + : this + .styleTween(name, styleConstant$1(name, i, value), priority) + .on("end.style." + name, null); +} + +function styleInterpolate(name, i, priority) { + return function(t) { + this.style.setProperty(name, i.call(this, t), priority); + }; +} + +function styleTween(name, value, priority) { + var t, i0; + function tween() { + var i = value.apply(this, arguments); + if (i !== i0) t = (i0 = i) && styleInterpolate(name, i, priority); + return t; + } + tween._value = value; + return tween; +} + +function transition_styleTween(name, value, priority) { + var key = "style." + (name += ""); + if (arguments.length < 2) return (key = this.tween(key)) && key._value; + if (value == null) return this.tween(key, null); + if (typeof value !== "function") throw new Error; + return this.tween(key, styleTween(name, value, priority == null ? "" : priority)); +} + +function textConstant$1(value) { + return function() { + this.textContent = value; + }; +} + +function textFunction$1(value) { + return function() { + var value1 = value(this); + this.textContent = value1 == null ? "" : value1; + }; +} + +function transition_text(value) { + return this.tween("text", typeof value === "function" + ? textFunction$1(tweenValue(this, "text", value)) + : textConstant$1(value == null ? "" : value + "")); +} + +function textInterpolate(i) { + return function(t) { + this.textContent = i.call(this, t); + }; +} + +function textTween(value) { + var t0, i0; + function tween() { + var i = value.apply(this, arguments); + if (i !== i0) t0 = (i0 = i) && textInterpolate(i); + return t0; + } + tween._value = value; + return tween; +} + +function transition_textTween(value) { + var key = "text"; + if (arguments.length < 1) return (key = this.tween(key)) && key._value; + if (value == null) return this.tween(key, null); + if (typeof value !== "function") throw new Error; + return this.tween(key, textTween(value)); +} + +function transition_transition() { + var name = this._name, + id0 = this._id, + id1 = newId(); + + for (var groups = this._groups, m = groups.length, j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, node, i = 0; i < n; ++i) { + if (node = group[i]) { + var inherit = get$1(node, id0); + schedule(node, name, id1, i, group, { + time: inherit.time + inherit.delay + inherit.duration, + delay: 0, + duration: inherit.duration, + ease: inherit.ease + }); + } + } + } + + return new Transition(groups, this._parents, name, id1); +} + +function transition_end() { + var on0, on1, that = this, id = that._id, size = that.size(); + return new Promise(function(resolve, reject) { + var cancel = {value: reject}, + end = {value: function() { if (--size === 0) resolve(); }}; + + that.each(function() { + var schedule = set$1(this, id), + on = schedule.on; + + // If this node shared a dispatch with the previous node, + // just assign the updated shared dispatch and we’re done! + // Otherwise, copy-on-write. + if (on !== on0) { + on1 = (on0 = on).copy(); + on1._.cancel.push(cancel); + on1._.interrupt.push(cancel); + on1._.end.push(end); + } + + schedule.on = on1; + }); + }); +} + +var id = 0; + +function Transition(groups, parents, name, id) { + this._groups = groups; + this._parents = parents; + this._name = name; + this._id = id; +} + +function transition(name) { + return selection().transition(name); +} + +function newId() { + return ++id; +} + +var selection_prototype = selection.prototype; + +Transition.prototype = transition.prototype = { + constructor: Transition, + select: transition_select, + selectAll: transition_selectAll, + filter: transition_filter, + merge: transition_merge, + selection: transition_selection, + transition: transition_transition, + call: selection_prototype.call, + nodes: selection_prototype.nodes, + node: selection_prototype.node, + size: selection_prototype.size, + empty: selection_prototype.empty, + each: selection_prototype.each, + on: transition_on, + attr: transition_attr, + attrTween: transition_attrTween, + style: transition_style, + styleTween: transition_styleTween, + text: transition_text, + textTween: transition_textTween, + remove: transition_remove, + tween: transition_tween, + delay: transition_delay, + duration: transition_duration, + ease: transition_ease, + end: transition_end +}; + +function linear$1(t) { + return +t; +} + +function quadIn(t) { + return t * t; +} + +function quadOut(t) { + return t * (2 - t); +} + +function quadInOut(t) { + return ((t *= 2) <= 1 ? t * t : --t * (2 - t) + 1) / 2; +} + +function cubicIn(t) { + return t * t * t; +} + +function cubicOut(t) { + return --t * t * t + 1; +} + +function cubicInOut(t) { + return ((t *= 2) <= 1 ? t * t * t : (t -= 2) * t * t + 2) / 2; +} + +var exponent = 3; + +var polyIn = (function custom(e) { + e = +e; + + function polyIn(t) { + return Math.pow(t, e); + } + + polyIn.exponent = custom; + + return polyIn; +})(exponent); + +var polyOut = (function custom(e) { + e = +e; + + function polyOut(t) { + return 1 - Math.pow(1 - t, e); + } + + polyOut.exponent = custom; + + return polyOut; +})(exponent); + +var polyInOut = (function custom(e) { + e = +e; + + function polyInOut(t) { + return ((t *= 2) <= 1 ? Math.pow(t, e) : 2 - Math.pow(2 - t, e)) / 2; + } + + polyInOut.exponent = custom; + + return polyInOut; +})(exponent); + +var pi = Math.PI, + halfPi = pi / 2; + +function sinIn(t) { + return 1 - Math.cos(t * halfPi); +} + +function sinOut(t) { + return Math.sin(t * halfPi); +} + +function sinInOut(t) { + return (1 - Math.cos(pi * t)) / 2; +} + +function expIn(t) { + return Math.pow(2, 10 * t - 10); +} + +function expOut(t) { + return 1 - Math.pow(2, -10 * t); +} + +function expInOut(t) { + return ((t *= 2) <= 1 ? Math.pow(2, 10 * t - 10) : 2 - Math.pow(2, 10 - 10 * t)) / 2; +} + +function circleIn(t) { + return 1 - Math.sqrt(1 - t * t); +} + +function circleOut(t) { + return Math.sqrt(1 - --t * t); +} + +function circleInOut(t) { + return ((t *= 2) <= 1 ? 1 - Math.sqrt(1 - t * t) : Math.sqrt(1 - (t -= 2) * t) + 1) / 2; +} + +var b1 = 4 / 11, + b2 = 6 / 11, + b3 = 8 / 11, + b4 = 3 / 4, + b5 = 9 / 11, + b6 = 10 / 11, + b7 = 15 / 16, + b8 = 21 / 22, + b9 = 63 / 64, + b0 = 1 / b1 / b1; + +function bounceIn(t) { + return 1 - bounceOut(1 - t); +} + +function bounceOut(t) { + return (t = +t) < b1 ? b0 * t * t : t < b3 ? b0 * (t -= b2) * t + b4 : t < b6 ? b0 * (t -= b5) * t + b7 : b0 * (t -= b8) * t + b9; +} + +function bounceInOut(t) { + return ((t *= 2) <= 1 ? 1 - bounceOut(1 - t) : bounceOut(t - 1) + 1) / 2; +} + +var overshoot = 1.70158; + +var backIn = (function custom(s) { + s = +s; + + function backIn(t) { + return t * t * ((s + 1) * t - s); + } + + backIn.overshoot = custom; + + return backIn; +})(overshoot); + +var backOut = (function custom(s) { + s = +s; + + function backOut(t) { + return --t * t * ((s + 1) * t + s) + 1; + } + + backOut.overshoot = custom; + + return backOut; +})(overshoot); + +var backInOut = (function custom(s) { + s = +s; + + function backInOut(t) { + return ((t *= 2) < 1 ? t * t * ((s + 1) * t - s) : (t -= 2) * t * ((s + 1) * t + s) + 2) / 2; + } + + backInOut.overshoot = custom; + + return backInOut; +})(overshoot); + +var tau = 2 * Math.PI, + amplitude = 1, + period = 0.3; + +var elasticIn = (function custom(a, p) { + var s = Math.asin(1 / (a = Math.max(1, a))) * (p /= tau); + + function elasticIn(t) { + return a * Math.pow(2, 10 * --t) * Math.sin((s - t) / p); + } + + elasticIn.amplitude = function(a) { return custom(a, p * tau); }; + elasticIn.period = function(p) { return custom(a, p); }; + + return elasticIn; +})(amplitude, period); + +var elasticOut = (function custom(a, p) { + var s = Math.asin(1 / (a = Math.max(1, a))) * (p /= tau); + + function elasticOut(t) { + return 1 - a * Math.pow(2, -10 * (t = +t)) * Math.sin((t + s) / p); + } + + elasticOut.amplitude = function(a) { return custom(a, p * tau); }; + elasticOut.period = function(p) { return custom(a, p); }; + + return elasticOut; +})(amplitude, period); + +var elasticInOut = (function custom(a, p) { + var s = Math.asin(1 / (a = Math.max(1, a))) * (p /= tau); + + function elasticInOut(t) { + return ((t = t * 2 - 1) < 0 + ? a * Math.pow(2, 10 * t) * Math.sin((s - t) / p) + : 2 - a * Math.pow(2, -10 * t) * Math.sin((s + t) / p)) / 2; + } + + elasticInOut.amplitude = function(a) { return custom(a, p * tau); }; + elasticInOut.period = function(p) { return custom(a, p); }; + + return elasticInOut; +})(amplitude, period); + +var defaultTiming = { + time: null, // Set on use. + delay: 0, + duration: 250, + ease: cubicInOut +}; + +function inherit(node, id) { + var timing; + while (!(timing = node.__transition) || !(timing = timing[id])) { + if (!(node = node.parentNode)) { + return defaultTiming.time = now(), defaultTiming; + } + } + return timing; +} + +function selection_transition(name) { + var id, + timing; + + if (name instanceof Transition) { + id = name._id, name = name._name; + } else { + id = newId(), (timing = defaultTiming).time = now(), name = name == null ? null : name + ""; + } + + for (var groups = this._groups, m = groups.length, j = 0; j < m; ++j) { + for (var group = groups[j], n = group.length, node, i = 0; i < n; ++i) { + if (node = group[i]) { + schedule(node, name, id, i, group, timing || inherit(node, id)); + } + } + } + + return new Transition(groups, this._parents, name, id); +} + +selection.prototype.interrupt = selection_interrupt; +selection.prototype.transition = selection_transition; + +var root$1 = [null]; + +function active(node, name) { + var schedules = node.__transition, + schedule, + i; + + if (schedules) { + name = name == null ? null : name + ""; + for (i in schedules) { + if ((schedule = schedules[i]).state > SCHEDULED && schedule.name === name) { + return new Transition([[node]], root$1, name, +i); + } + } + } + + return null; +} + +function constant$4(x) { + return function() { + return x; + }; +} + +function BrushEvent(target, type, selection) { + this.target = target; + this.type = type; + this.selection = selection; +} + +function nopropagation$1() { + exports.event.stopImmediatePropagation(); +} + +function noevent$1() { + exports.event.preventDefault(); + exports.event.stopImmediatePropagation(); +} + +var MODE_DRAG = {name: "drag"}, + MODE_SPACE = {name: "space"}, + MODE_HANDLE = {name: "handle"}, + MODE_CENTER = {name: "center"}; + +function number1(e) { + return [+e[0], +e[1]]; +} + +function number2(e) { + return [number1(e[0]), number1(e[1])]; +} + +function toucher(identifier) { + return function(target) { + return touch(target, exports.event.touches, identifier); + }; +} + +var X = { + name: "x", + handles: ["w", "e"].map(type), + input: function(x, e) { return x == null ? null : [[+x[0], e[0][1]], [+x[1], e[1][1]]]; }, + output: function(xy) { return xy && [xy[0][0], xy[1][0]]; } +}; + +var Y = { + name: "y", + handles: ["n", "s"].map(type), + input: function(y, e) { return y == null ? null : [[e[0][0], +y[0]], [e[1][0], +y[1]]]; }, + output: function(xy) { return xy && [xy[0][1], xy[1][1]]; } +}; + +var XY = { + name: "xy", + handles: ["n", "w", "e", "s", "nw", "ne", "sw", "se"].map(type), + input: function(xy) { return xy == null ? null : number2(xy); }, + output: function(xy) { return xy; } +}; + +var cursors = { + overlay: "crosshair", + selection: "move", + n: "ns-resize", + e: "ew-resize", + s: "ns-resize", + w: "ew-resize", + nw: "nwse-resize", + ne: "nesw-resize", + se: "nwse-resize", + sw: "nesw-resize" +}; + +var flipX = { + e: "w", + w: "e", + nw: "ne", + ne: "nw", + se: "sw", + sw: "se" +}; + +var flipY = { + n: "s", + s: "n", + nw: "sw", + ne: "se", + se: "ne", + sw: "nw" +}; + +var signsX = { + overlay: +1, + selection: +1, + n: null, + e: +1, + s: null, + w: -1, + nw: -1, + ne: +1, + se: +1, + sw: -1 +}; + +var signsY = { + overlay: +1, + selection: +1, + n: -1, + e: null, + s: +1, + w: null, + nw: -1, + ne: -1, + se: +1, + sw: +1 +}; + +function type(t) { + return {type: t}; +} + +// Ignore right-click, since that should open the context menu. +function defaultFilter$1() { + return !exports.event.ctrlKey && !exports.event.button; +} + +function defaultExtent() { + var svg = this.ownerSVGElement || this; + if (svg.hasAttribute("viewBox")) { + svg = svg.viewBox.baseVal; + return [[svg.x, svg.y], [svg.x + svg.width, svg.y + svg.height]]; + } + return [[0, 0], [svg.width.baseVal.value, svg.height.baseVal.value]]; +} + +function defaultTouchable$1() { + return navigator.maxTouchPoints || ("ontouchstart" in this); +} + +// Like d3.local, but with the name “__brush” rather than auto-generated. +function local$1(node) { + while (!node.__brush) if (!(node = node.parentNode)) return; + return node.__brush; +} + +function empty$1(extent) { + return extent[0][0] === extent[1][0] + || extent[0][1] === extent[1][1]; +} + +function brushSelection(node) { + var state = node.__brush; + return state ? state.dim.output(state.selection) : null; +} + +function brushX() { + return brush$1(X); +} + +function brushY() { + return brush$1(Y); +} + +function brush() { + return brush$1(XY); +} + +function brush$1(dim) { + var extent = defaultExtent, + filter = defaultFilter$1, + touchable = defaultTouchable$1, + keys = true, + listeners = dispatch("start", "brush", "end"), + handleSize = 6, + touchending; + + function brush(group) { + var overlay = group + .property("__brush", initialize) + .selectAll(".overlay") + .data([type("overlay")]); + + overlay.enter().append("rect") + .attr("class", "overlay") + .attr("pointer-events", "all") + .attr("cursor", cursors.overlay) + .merge(overlay) + .each(function() { + var extent = local$1(this).extent; + select(this) + .attr("x", extent[0][0]) + .attr("y", extent[0][1]) + .attr("width", extent[1][0] - extent[0][0]) + .attr("height", extent[1][1] - extent[0][1]); + }); + + group.selectAll(".selection") + .data([type("selection")]) + .enter().append("rect") + .attr("class", "selection") + .attr("cursor", cursors.selection) + .attr("fill", "#777") + .attr("fill-opacity", 0.3) + .attr("stroke", "#fff") + .attr("shape-rendering", "crispEdges"); + + var handle = group.selectAll(".handle") + .data(dim.handles, function(d) { return d.type; }); + + handle.exit().remove(); + + handle.enter().append("rect") + .attr("class", function(d) { return "handle handle--" + d.type; }) + .attr("cursor", function(d) { return cursors[d.type]; }); + + group + .each(redraw) + .attr("fill", "none") + .attr("pointer-events", "all") + .on("mousedown.brush", started) + .filter(touchable) + .on("touchstart.brush", started) + .on("touchmove.brush", touchmoved) + .on("touchend.brush touchcancel.brush", touchended) + .style("touch-action", "none") + .style("-webkit-tap-highlight-color", "rgba(0,0,0,0)"); + } + + brush.move = function(group, selection) { + if (group.selection) { + group + .on("start.brush", function() { emitter(this, arguments).beforestart().start(); }) + .on("interrupt.brush end.brush", function() { emitter(this, arguments).end(); }) + .tween("brush", function() { + var that = this, + state = that.__brush, + emit = emitter(that, arguments), + selection0 = state.selection, + selection1 = dim.input(typeof selection === "function" ? selection.apply(this, arguments) : selection, state.extent), + i = interpolateValue(selection0, selection1); + + function tween(t) { + state.selection = t === 1 && selection1 === null ? null : i(t); + redraw.call(that); + emit.brush(); + } + + return selection0 !== null && selection1 !== null ? tween : tween(1); + }); + } else { + group + .each(function() { + var that = this, + args = arguments, + state = that.__brush, + selection1 = dim.input(typeof selection === "function" ? selection.apply(that, args) : selection, state.extent), + emit = emitter(that, args).beforestart(); + + interrupt(that); + state.selection = selection1 === null ? null : selection1; + redraw.call(that); + emit.start().brush().end(); + }); + } + }; + + brush.clear = function(group) { + brush.move(group, null); + }; + + function redraw() { + var group = select(this), + selection = local$1(this).selection; + + if (selection) { + group.selectAll(".selection") + .style("display", null) + .attr("x", selection[0][0]) + .attr("y", selection[0][1]) + .attr("width", selection[1][0] - selection[0][0]) + .attr("height", selection[1][1] - selection[0][1]); + + group.selectAll(".handle") + .style("display", null) + .attr("x", function(d) { return d.type[d.type.length - 1] === "e" ? selection[1][0] - handleSize / 2 : selection[0][0] - handleSize / 2; }) + .attr("y", function(d) { return d.type[0] === "s" ? selection[1][1] - handleSize / 2 : selection[0][1] - handleSize / 2; }) + .attr("width", function(d) { return d.type === "n" || d.type === "s" ? selection[1][0] - selection[0][0] + handleSize : handleSize; }) + .attr("height", function(d) { return d.type === "e" || d.type === "w" ? selection[1][1] - selection[0][1] + handleSize : handleSize; }); + } + + else { + group.selectAll(".selection,.handle") + .style("display", "none") + .attr("x", null) + .attr("y", null) + .attr("width", null) + .attr("height", null); + } + } + + function emitter(that, args, clean) { + return (!clean && that.__brush.emitter) || new Emitter(that, args); + } + + function Emitter(that, args) { + this.that = that; + this.args = args; + this.state = that.__brush; + this.active = 0; + } + + Emitter.prototype = { + beforestart: function() { + if (++this.active === 1) this.state.emitter = this, this.starting = true; + return this; + }, + start: function() { + if (this.starting) this.starting = false, this.emit("start"); + else this.emit("brush"); + return this; + }, + brush: function() { + this.emit("brush"); + return this; + }, + end: function() { + if (--this.active === 0) delete this.state.emitter, this.emit("end"); + return this; + }, + emit: function(type) { + customEvent(new BrushEvent(brush, type, dim.output(this.state.selection)), listeners.apply, listeners, [type, this.that, this.args]); + } + }; + + function started() { + if (touchending && !exports.event.touches) return; + if (!filter.apply(this, arguments)) return; + + var that = this, + type = exports.event.target.__data__.type, + mode = (keys && exports.event.metaKey ? type = "overlay" : type) === "selection" ? MODE_DRAG : (keys && exports.event.altKey ? MODE_CENTER : MODE_HANDLE), + signX = dim === Y ? null : signsX[type], + signY = dim === X ? null : signsY[type], + state = local$1(that), + extent = state.extent, + selection = state.selection, + W = extent[0][0], w0, w1, + N = extent[0][1], n0, n1, + E = extent[1][0], e0, e1, + S = extent[1][1], s0, s1, + dx = 0, + dy = 0, + moving, + shifting = signX && signY && keys && exports.event.shiftKey, + lockX, + lockY, + pointer = exports.event.touches ? toucher(exports.event.changedTouches[0].identifier) : mouse, + point0 = pointer(that), + point = point0, + emit = emitter(that, arguments, true).beforestart(); + + if (type === "overlay") { + if (selection) moving = true; + state.selection = selection = [ + [w0 = dim === Y ? W : point0[0], n0 = dim === X ? N : point0[1]], + [e0 = dim === Y ? E : w0, s0 = dim === X ? S : n0] + ]; + } else { + w0 = selection[0][0]; + n0 = selection[0][1]; + e0 = selection[1][0]; + s0 = selection[1][1]; + } + + w1 = w0; + n1 = n0; + e1 = e0; + s1 = s0; + + var group = select(that) + .attr("pointer-events", "none"); + + var overlay = group.selectAll(".overlay") + .attr("cursor", cursors[type]); + + if (exports.event.touches) { + emit.moved = moved; + emit.ended = ended; + } else { + var view = select(exports.event.view) + .on("mousemove.brush", moved, true) + .on("mouseup.brush", ended, true); + if (keys) view + .on("keydown.brush", keydowned, true) + .on("keyup.brush", keyupped, true); + + dragDisable(exports.event.view); + } + + nopropagation$1(); + interrupt(that); + redraw.call(that); + emit.start(); + + function moved() { + var point1 = pointer(that); + if (shifting && !lockX && !lockY) { + if (Math.abs(point1[0] - point[0]) > Math.abs(point1[1] - point[1])) lockY = true; + else lockX = true; + } + point = point1; + moving = true; + noevent$1(); + move(); + } + + function move() { + var t; + + dx = point[0] - point0[0]; + dy = point[1] - point0[1]; + + switch (mode) { + case MODE_SPACE: + case MODE_DRAG: { + if (signX) dx = Math.max(W - w0, Math.min(E - e0, dx)), w1 = w0 + dx, e1 = e0 + dx; + if (signY) dy = Math.max(N - n0, Math.min(S - s0, dy)), n1 = n0 + dy, s1 = s0 + dy; + break; + } + case MODE_HANDLE: { + if (signX < 0) dx = Math.max(W - w0, Math.min(E - w0, dx)), w1 = w0 + dx, e1 = e0; + else if (signX > 0) dx = Math.max(W - e0, Math.min(E - e0, dx)), w1 = w0, e1 = e0 + dx; + if (signY < 0) dy = Math.max(N - n0, Math.min(S - n0, dy)), n1 = n0 + dy, s1 = s0; + else if (signY > 0) dy = Math.max(N - s0, Math.min(S - s0, dy)), n1 = n0, s1 = s0 + dy; + break; + } + case MODE_CENTER: { + if (signX) w1 = Math.max(W, Math.min(E, w0 - dx * signX)), e1 = Math.max(W, Math.min(E, e0 + dx * signX)); + if (signY) n1 = Math.max(N, Math.min(S, n0 - dy * signY)), s1 = Math.max(N, Math.min(S, s0 + dy * signY)); + break; + } + } + + if (e1 < w1) { + signX *= -1; + t = w0, w0 = e0, e0 = t; + t = w1, w1 = e1, e1 = t; + if (type in flipX) overlay.attr("cursor", cursors[type = flipX[type]]); + } + + if (s1 < n1) { + signY *= -1; + t = n0, n0 = s0, s0 = t; + t = n1, n1 = s1, s1 = t; + if (type in flipY) overlay.attr("cursor", cursors[type = flipY[type]]); + } + + if (state.selection) selection = state.selection; // May be set by brush.move! + if (lockX) w1 = selection[0][0], e1 = selection[1][0]; + if (lockY) n1 = selection[0][1], s1 = selection[1][1]; + + if (selection[0][0] !== w1 + || selection[0][1] !== n1 + || selection[1][0] !== e1 + || selection[1][1] !== s1) { + state.selection = [[w1, n1], [e1, s1]]; + redraw.call(that); + emit.brush(); + } + } + + function ended() { + nopropagation$1(); + if (exports.event.touches) { + if (exports.event.touches.length) return; + if (touchending) clearTimeout(touchending); + touchending = setTimeout(function() { touchending = null; }, 500); // Ghost clicks are delayed! + } else { + yesdrag(exports.event.view, moving); + view.on("keydown.brush keyup.brush mousemove.brush mouseup.brush", null); + } + group.attr("pointer-events", "all"); + overlay.attr("cursor", cursors.overlay); + if (state.selection) selection = state.selection; // May be set by brush.move (on start)! + if (empty$1(selection)) state.selection = null, redraw.call(that); + emit.end(); + } + + function keydowned() { + switch (exports.event.keyCode) { + case 16: { // SHIFT + shifting = signX && signY; + break; + } + case 18: { // ALT + if (mode === MODE_HANDLE) { + if (signX) e0 = e1 - dx * signX, w0 = w1 + dx * signX; + if (signY) s0 = s1 - dy * signY, n0 = n1 + dy * signY; + mode = MODE_CENTER; + move(); + } + break; + } + case 32: { // SPACE; takes priority over ALT + if (mode === MODE_HANDLE || mode === MODE_CENTER) { + if (signX < 0) e0 = e1 - dx; else if (signX > 0) w0 = w1 - dx; + if (signY < 0) s0 = s1 - dy; else if (signY > 0) n0 = n1 - dy; + mode = MODE_SPACE; + overlay.attr("cursor", cursors.selection); + move(); + } + break; + } + default: return; + } + noevent$1(); + } + + function keyupped() { + switch (exports.event.keyCode) { + case 16: { // SHIFT + if (shifting) { + lockX = lockY = shifting = false; + move(); + } + break; + } + case 18: { // ALT + if (mode === MODE_CENTER) { + if (signX < 0) e0 = e1; else if (signX > 0) w0 = w1; + if (signY < 0) s0 = s1; else if (signY > 0) n0 = n1; + mode = MODE_HANDLE; + move(); + } + break; + } + case 32: { // SPACE + if (mode === MODE_SPACE) { + if (exports.event.altKey) { + if (signX) e0 = e1 - dx * signX, w0 = w1 + dx * signX; + if (signY) s0 = s1 - dy * signY, n0 = n1 + dy * signY; + mode = MODE_CENTER; + } else { + if (signX < 0) e0 = e1; else if (signX > 0) w0 = w1; + if (signY < 0) s0 = s1; else if (signY > 0) n0 = n1; + mode = MODE_HANDLE; + } + overlay.attr("cursor", cursors[type]); + move(); + } + break; + } + default: return; + } + noevent$1(); + } + } + + function touchmoved() { + emitter(this, arguments).moved(); + } + + function touchended() { + emitter(this, arguments).ended(); + } + + function initialize() { + var state = this.__brush || {selection: null}; + state.extent = number2(extent.apply(this, arguments)); + state.dim = dim; + return state; + } + + brush.extent = function(_) { + return arguments.length ? (extent = typeof _ === "function" ? _ : constant$4(number2(_)), brush) : extent; + }; + + brush.filter = function(_) { + return arguments.length ? (filter = typeof _ === "function" ? _ : constant$4(!!_), brush) : filter; + }; + + brush.touchable = function(_) { + return arguments.length ? (touchable = typeof _ === "function" ? _ : constant$4(!!_), brush) : touchable; + }; + + brush.handleSize = function(_) { + return arguments.length ? (handleSize = +_, brush) : handleSize; + }; + + brush.keyModifiers = function(_) { + return arguments.length ? (keys = !!_, brush) : keys; + }; + + brush.on = function() { + var value = listeners.on.apply(listeners, arguments); + return value === listeners ? brush : value; + }; + + return brush; +} + +var cos = Math.cos; +var sin = Math.sin; +var pi$1 = Math.PI; +var halfPi$1 = pi$1 / 2; +var tau$1 = pi$1 * 2; +var max$1 = Math.max; + +function compareValue(compare) { + return function(a, b) { + return compare( + a.source.value + a.target.value, + b.source.value + b.target.value + ); + }; +} + +function chord() { + var padAngle = 0, + sortGroups = null, + sortSubgroups = null, + sortChords = null; + + function chord(matrix) { + var n = matrix.length, + groupSums = [], + groupIndex = sequence(n), + subgroupIndex = [], + chords = [], + groups = chords.groups = new Array(n), + subgroups = new Array(n * n), + k, + x, + x0, + dx, + i, + j; + + // Compute the sum. + k = 0, i = -1; while (++i < n) { + x = 0, j = -1; while (++j < n) { + x += matrix[i][j]; + } + groupSums.push(x); + subgroupIndex.push(sequence(n)); + k += x; + } + + // Sort groups… + if (sortGroups) groupIndex.sort(function(a, b) { + return sortGroups(groupSums[a], groupSums[b]); + }); + + // Sort subgroups… + if (sortSubgroups) subgroupIndex.forEach(function(d, i) { + d.sort(function(a, b) { + return sortSubgroups(matrix[i][a], matrix[i][b]); + }); + }); + + // Convert the sum to scaling factor for [0, 2pi]. + // TODO Allow start and end angle to be specified? + // TODO Allow padding to be specified as percentage? + k = max$1(0, tau$1 - padAngle * n) / k; + dx = k ? padAngle : tau$1 / n; + + // Compute the start and end angle for each group and subgroup. + // Note: Opera has a bug reordering object literal properties! + x = 0, i = -1; while (++i < n) { + x0 = x, j = -1; while (++j < n) { + var di = groupIndex[i], + dj = subgroupIndex[di][j], + v = matrix[di][dj], + a0 = x, + a1 = x += v * k; + subgroups[dj * n + di] = { + index: di, + subindex: dj, + startAngle: a0, + endAngle: a1, + value: v + }; + } + groups[di] = { + index: di, + startAngle: x0, + endAngle: x, + value: groupSums[di] + }; + x += dx; + } + + // Generate chords for each (non-empty) subgroup-subgroup link. + i = -1; while (++i < n) { + j = i - 1; while (++j < n) { + var source = subgroups[j * n + i], + target = subgroups[i * n + j]; + if (source.value || target.value) { + chords.push(source.value < target.value + ? {source: target, target: source} + : {source: source, target: target}); + } + } + } + + return sortChords ? chords.sort(sortChords) : chords; + } + + chord.padAngle = function(_) { + return arguments.length ? (padAngle = max$1(0, _), chord) : padAngle; + }; + + chord.sortGroups = function(_) { + return arguments.length ? (sortGroups = _, chord) : sortGroups; + }; + + chord.sortSubgroups = function(_) { + return arguments.length ? (sortSubgroups = _, chord) : sortSubgroups; + }; + + chord.sortChords = function(_) { + return arguments.length ? (_ == null ? sortChords = null : (sortChords = compareValue(_))._ = _, chord) : sortChords && sortChords._; + }; + + return chord; +} + +var slice$2 = Array.prototype.slice; + +function constant$5(x) { + return function() { + return x; + }; +} + +var pi$2 = Math.PI, + tau$2 = 2 * pi$2, + epsilon$1 = 1e-6, + tauEpsilon = tau$2 - epsilon$1; + +function Path() { + this._x0 = this._y0 = // start of current subpath + this._x1 = this._y1 = null; // end of current subpath + this._ = ""; +} + +function path() { + return new Path; +} + +Path.prototype = path.prototype = { + constructor: Path, + moveTo: function(x, y) { + this._ += "M" + (this._x0 = this._x1 = +x) + "," + (this._y0 = this._y1 = +y); + }, + closePath: function() { + if (this._x1 !== null) { + this._x1 = this._x0, this._y1 = this._y0; + this._ += "Z"; + } + }, + lineTo: function(x, y) { + this._ += "L" + (this._x1 = +x) + "," + (this._y1 = +y); + }, + quadraticCurveTo: function(x1, y1, x, y) { + this._ += "Q" + (+x1) + "," + (+y1) + "," + (this._x1 = +x) + "," + (this._y1 = +y); + }, + bezierCurveTo: function(x1, y1, x2, y2, x, y) { + this._ += "C" + (+x1) + "," + (+y1) + "," + (+x2) + "," + (+y2) + "," + (this._x1 = +x) + "," + (this._y1 = +y); + }, + arcTo: function(x1, y1, x2, y2, r) { + x1 = +x1, y1 = +y1, x2 = +x2, y2 = +y2, r = +r; + var x0 = this._x1, + y0 = this._y1, + x21 = x2 - x1, + y21 = y2 - y1, + x01 = x0 - x1, + y01 = y0 - y1, + l01_2 = x01 * x01 + y01 * y01; + + // Is the radius negative? Error. + if (r < 0) throw new Error("negative radius: " + r); + + // Is this path empty? Move to (x1,y1). + if (this._x1 === null) { + this._ += "M" + (this._x1 = x1) + "," + (this._y1 = y1); + } + + // Or, is (x1,y1) coincident with (x0,y0)? Do nothing. + else if (!(l01_2 > epsilon$1)); + + // Or, are (x0,y0), (x1,y1) and (x2,y2) collinear? + // Equivalently, is (x1,y1) coincident with (x2,y2)? + // Or, is the radius zero? Line to (x1,y1). + else if (!(Math.abs(y01 * x21 - y21 * x01) > epsilon$1) || !r) { + this._ += "L" + (this._x1 = x1) + "," + (this._y1 = y1); + } + + // Otherwise, draw an arc! + else { + var x20 = x2 - x0, + y20 = y2 - y0, + l21_2 = x21 * x21 + y21 * y21, + l20_2 = x20 * x20 + y20 * y20, + l21 = Math.sqrt(l21_2), + l01 = Math.sqrt(l01_2), + l = r * Math.tan((pi$2 - Math.acos((l21_2 + l01_2 - l20_2) / (2 * l21 * l01))) / 2), + t01 = l / l01, + t21 = l / l21; + + // If the start tangent is not coincident with (x0,y0), line to. + if (Math.abs(t01 - 1) > epsilon$1) { + this._ += "L" + (x1 + t01 * x01) + "," + (y1 + t01 * y01); + } + + this._ += "A" + r + "," + r + ",0,0," + (+(y01 * x20 > x01 * y20)) + "," + (this._x1 = x1 + t21 * x21) + "," + (this._y1 = y1 + t21 * y21); + } + }, + arc: function(x, y, r, a0, a1, ccw) { + x = +x, y = +y, r = +r, ccw = !!ccw; + var dx = r * Math.cos(a0), + dy = r * Math.sin(a0), + x0 = x + dx, + y0 = y + dy, + cw = 1 ^ ccw, + da = ccw ? a0 - a1 : a1 - a0; + + // Is the radius negative? Error. + if (r < 0) throw new Error("negative radius: " + r); + + // Is this path empty? Move to (x0,y0). + if (this._x1 === null) { + this._ += "M" + x0 + "," + y0; + } + + // Or, is (x0,y0) not coincident with the previous point? Line to (x0,y0). + else if (Math.abs(this._x1 - x0) > epsilon$1 || Math.abs(this._y1 - y0) > epsilon$1) { + this._ += "L" + x0 + "," + y0; + } + + // Is this arc empty? We’re done. + if (!r) return; + + // Does the angle go the wrong way? Flip the direction. + if (da < 0) da = da % tau$2 + tau$2; + + // Is this a complete circle? Draw two arcs to complete the circle. + if (da > tauEpsilon) { + this._ += "A" + r + "," + r + ",0,1," + cw + "," + (x - dx) + "," + (y - dy) + "A" + r + "," + r + ",0,1," + cw + "," + (this._x1 = x0) + "," + (this._y1 = y0); + } + + // Is this arc non-empty? Draw an arc! + else if (da > epsilon$1) { + this._ += "A" + r + "," + r + ",0," + (+(da >= pi$2)) + "," + cw + "," + (this._x1 = x + r * Math.cos(a1)) + "," + (this._y1 = y + r * Math.sin(a1)); + } + }, + rect: function(x, y, w, h) { + this._ += "M" + (this._x0 = this._x1 = +x) + "," + (this._y0 = this._y1 = +y) + "h" + (+w) + "v" + (+h) + "h" + (-w) + "Z"; + }, + toString: function() { + return this._; + } +}; + +function defaultSource(d) { + return d.source; +} + +function defaultTarget(d) { + return d.target; +} + +function defaultRadius(d) { + return d.radius; +} + +function defaultStartAngle(d) { + return d.startAngle; +} + +function defaultEndAngle(d) { + return d.endAngle; +} + +function ribbon() { + var source = defaultSource, + target = defaultTarget, + radius = defaultRadius, + startAngle = defaultStartAngle, + endAngle = defaultEndAngle, + context = null; + + function ribbon() { + var buffer, + argv = slice$2.call(arguments), + s = source.apply(this, argv), + t = target.apply(this, argv), + sr = +radius.apply(this, (argv[0] = s, argv)), + sa0 = startAngle.apply(this, argv) - halfPi$1, + sa1 = endAngle.apply(this, argv) - halfPi$1, + sx0 = sr * cos(sa0), + sy0 = sr * sin(sa0), + tr = +radius.apply(this, (argv[0] = t, argv)), + ta0 = startAngle.apply(this, argv) - halfPi$1, + ta1 = endAngle.apply(this, argv) - halfPi$1; + + if (!context) context = buffer = path(); + + context.moveTo(sx0, sy0); + context.arc(0, 0, sr, sa0, sa1); + if (sa0 !== ta0 || sa1 !== ta1) { // TODO sr !== tr? + context.quadraticCurveTo(0, 0, tr * cos(ta0), tr * sin(ta0)); + context.arc(0, 0, tr, ta0, ta1); + } + context.quadraticCurveTo(0, 0, sx0, sy0); + context.closePath(); + + if (buffer) return context = null, buffer + "" || null; + } + + ribbon.radius = function(_) { + return arguments.length ? (radius = typeof _ === "function" ? _ : constant$5(+_), ribbon) : radius; + }; + + ribbon.startAngle = function(_) { + return arguments.length ? (startAngle = typeof _ === "function" ? _ : constant$5(+_), ribbon) : startAngle; + }; + + ribbon.endAngle = function(_) { + return arguments.length ? (endAngle = typeof _ === "function" ? _ : constant$5(+_), ribbon) : endAngle; + }; + + ribbon.source = function(_) { + return arguments.length ? (source = _, ribbon) : source; + }; + + ribbon.target = function(_) { + return arguments.length ? (target = _, ribbon) : target; + }; + + ribbon.context = function(_) { + return arguments.length ? ((context = _ == null ? null : _), ribbon) : context; + }; + + return ribbon; +} + +var prefix = "$"; + +function Map() {} + +Map.prototype = map$1.prototype = { + constructor: Map, + has: function(key) { + return (prefix + key) in this; + }, + get: function(key) { + return this[prefix + key]; + }, + set: function(key, value) { + this[prefix + key] = value; + return this; + }, + remove: function(key) { + var property = prefix + key; + return property in this && delete this[property]; + }, + clear: function() { + for (var property in this) if (property[0] === prefix) delete this[property]; + }, + keys: function() { + var keys = []; + for (var property in this) if (property[0] === prefix) keys.push(property.slice(1)); + return keys; + }, + values: function() { + var values = []; + for (var property in this) if (property[0] === prefix) values.push(this[property]); + return values; + }, + entries: function() { + var entries = []; + for (var property in this) if (property[0] === prefix) entries.push({key: property.slice(1), value: this[property]}); + return entries; + }, + size: function() { + var size = 0; + for (var property in this) if (property[0] === prefix) ++size; + return size; + }, + empty: function() { + for (var property in this) if (property[0] === prefix) return false; + return true; + }, + each: function(f) { + for (var property in this) if (property[0] === prefix) f(this[property], property.slice(1), this); + } +}; + +function map$1(object, f) { + var map = new Map; + + // Copy constructor. + if (object instanceof Map) object.each(function(value, key) { map.set(key, value); }); + + // Index array by numeric index or specified key function. + else if (Array.isArray(object)) { + var i = -1, + n = object.length, + o; + + if (f == null) while (++i < n) map.set(i, object[i]); + else while (++i < n) map.set(f(o = object[i], i, object), o); + } + + // Convert object to map. + else if (object) for (var key in object) map.set(key, object[key]); + + return map; +} + +function nest() { + var keys = [], + sortKeys = [], + sortValues, + rollup, + nest; + + function apply(array, depth, createResult, setResult) { + if (depth >= keys.length) { + if (sortValues != null) array.sort(sortValues); + return rollup != null ? rollup(array) : array; + } + + var i = -1, + n = array.length, + key = keys[depth++], + keyValue, + value, + valuesByKey = map$1(), + values, + result = createResult(); + + while (++i < n) { + if (values = valuesByKey.get(keyValue = key(value = array[i]) + "")) { + values.push(value); + } else { + valuesByKey.set(keyValue, [value]); + } + } + + valuesByKey.each(function(values, key) { + setResult(result, key, apply(values, depth, createResult, setResult)); + }); + + return result; + } + + function entries(map, depth) { + if (++depth > keys.length) return map; + var array, sortKey = sortKeys[depth - 1]; + if (rollup != null && depth >= keys.length) array = map.entries(); + else array = [], map.each(function(v, k) { array.push({key: k, values: entries(v, depth)}); }); + return sortKey != null ? array.sort(function(a, b) { return sortKey(a.key, b.key); }) : array; + } + + return nest = { + object: function(array) { return apply(array, 0, createObject, setObject); }, + map: function(array) { return apply(array, 0, createMap, setMap); }, + entries: function(array) { return entries(apply(array, 0, createMap, setMap), 0); }, + key: function(d) { keys.push(d); return nest; }, + sortKeys: function(order) { sortKeys[keys.length - 1] = order; return nest; }, + sortValues: function(order) { sortValues = order; return nest; }, + rollup: function(f) { rollup = f; return nest; } + }; +} + +function createObject() { + return {}; +} + +function setObject(object, key, value) { + object[key] = value; +} + +function createMap() { + return map$1(); +} + +function setMap(map, key, value) { + map.set(key, value); +} + +function Set() {} + +var proto = map$1.prototype; + +Set.prototype = set$2.prototype = { + constructor: Set, + has: proto.has, + add: function(value) { + value += ""; + this[prefix + value] = value; + return this; + }, + remove: proto.remove, + clear: proto.clear, + values: proto.keys, + size: proto.size, + empty: proto.empty, + each: proto.each +}; + +function set$2(object, f) { + var set = new Set; + + // Copy constructor. + if (object instanceof Set) object.each(function(value) { set.add(value); }); + + // Otherwise, assume it’s an array. + else if (object) { + var i = -1, n = object.length; + if (f == null) while (++i < n) set.add(object[i]); + else while (++i < n) set.add(f(object[i], i, object)); + } + + return set; +} + +function keys(map) { + var keys = []; + for (var key in map) keys.push(key); + return keys; +} + +function values(map) { + var values = []; + for (var key in map) values.push(map[key]); + return values; +} + +function entries(map) { + var entries = []; + for (var key in map) entries.push({key: key, value: map[key]}); + return entries; +} + +var array$2 = Array.prototype; + +var slice$3 = array$2.slice; + +function ascending$2(a, b) { + return a - b; +} + +function area(ring) { + var i = 0, n = ring.length, area = ring[n - 1][1] * ring[0][0] - ring[n - 1][0] * ring[0][1]; + while (++i < n) area += ring[i - 1][1] * ring[i][0] - ring[i - 1][0] * ring[i][1]; + return area; +} + +function constant$6(x) { + return function() { + return x; + }; +} + +function contains(ring, hole) { + var i = -1, n = hole.length, c; + while (++i < n) if (c = ringContains(ring, hole[i])) return c; + return 0; +} + +function ringContains(ring, point) { + var x = point[0], y = point[1], contains = -1; + for (var i = 0, n = ring.length, j = n - 1; i < n; j = i++) { + var pi = ring[i], xi = pi[0], yi = pi[1], pj = ring[j], xj = pj[0], yj = pj[1]; + if (segmentContains(pi, pj, point)) return 0; + if (((yi > y) !== (yj > y)) && ((x < (xj - xi) * (y - yi) / (yj - yi) + xi))) contains = -contains; + } + return contains; +} + +function segmentContains(a, b, c) { + var i; return collinear(a, b, c) && within(a[i = +(a[0] === b[0])], c[i], b[i]); +} + +function collinear(a, b, c) { + return (b[0] - a[0]) * (c[1] - a[1]) === (c[0] - a[0]) * (b[1] - a[1]); +} + +function within(p, q, r) { + return p <= q && q <= r || r <= q && q <= p; +} + +function noop$1() {} + +var cases = [ + [], + [[[1.0, 1.5], [0.5, 1.0]]], + [[[1.5, 1.0], [1.0, 1.5]]], + [[[1.5, 1.0], [0.5, 1.0]]], + [[[1.0, 0.5], [1.5, 1.0]]], + [[[1.0, 1.5], [0.5, 1.0]], [[1.0, 0.5], [1.5, 1.0]]], + [[[1.0, 0.5], [1.0, 1.5]]], + [[[1.0, 0.5], [0.5, 1.0]]], + [[[0.5, 1.0], [1.0, 0.5]]], + [[[1.0, 1.5], [1.0, 0.5]]], + [[[0.5, 1.0], [1.0, 0.5]], [[1.5, 1.0], [1.0, 1.5]]], + [[[1.5, 1.0], [1.0, 0.5]]], + [[[0.5, 1.0], [1.5, 1.0]]], + [[[1.0, 1.5], [1.5, 1.0]]], + [[[0.5, 1.0], [1.0, 1.5]]], + [] +]; + +function contours() { + var dx = 1, + dy = 1, + threshold = thresholdSturges, + smooth = smoothLinear; + + function contours(values) { + var tz = threshold(values); + + // Convert number of thresholds into uniform thresholds. + if (!Array.isArray(tz)) { + var domain = extent(values), start = domain[0], stop = domain[1]; + tz = tickStep(start, stop, tz); + tz = sequence(Math.floor(start / tz) * tz, Math.floor(stop / tz) * tz, tz); + } else { + tz = tz.slice().sort(ascending$2); + } + + return tz.map(function(value) { + return contour(values, value); + }); + } + + // Accumulate, smooth contour rings, assign holes to exterior rings. + // Based on https://github.com/mbostock/shapefile/blob/v0.6.2/shp/polygon.js + function contour(values, value) { + var polygons = [], + holes = []; + + isorings(values, value, function(ring) { + smooth(ring, values, value); + if (area(ring) > 0) polygons.push([ring]); + else holes.push(ring); + }); + + holes.forEach(function(hole) { + for (var i = 0, n = polygons.length, polygon; i < n; ++i) { + if (contains((polygon = polygons[i])[0], hole) !== -1) { + polygon.push(hole); + return; + } + } + }); + + return { + type: "MultiPolygon", + value: value, + coordinates: polygons + }; + } + + // Marching squares with isolines stitched into rings. + // Based on https://github.com/topojson/topojson-client/blob/v3.0.0/src/stitch.js + function isorings(values, value, callback) { + var fragmentByStart = new Array, + fragmentByEnd = new Array, + x, y, t0, t1, t2, t3; + + // Special case for the first row (y = -1, t2 = t3 = 0). + x = y = -1; + t1 = values[0] >= value; + cases[t1 << 1].forEach(stitch); + while (++x < dx - 1) { + t0 = t1, t1 = values[x + 1] >= value; + cases[t0 | t1 << 1].forEach(stitch); + } + cases[t1 << 0].forEach(stitch); + + // General case for the intermediate rows. + while (++y < dy - 1) { + x = -1; + t1 = values[y * dx + dx] >= value; + t2 = values[y * dx] >= value; + cases[t1 << 1 | t2 << 2].forEach(stitch); + while (++x < dx - 1) { + t0 = t1, t1 = values[y * dx + dx + x + 1] >= value; + t3 = t2, t2 = values[y * dx + x + 1] >= value; + cases[t0 | t1 << 1 | t2 << 2 | t3 << 3].forEach(stitch); + } + cases[t1 | t2 << 3].forEach(stitch); + } + + // Special case for the last row (y = dy - 1, t0 = t1 = 0). + x = -1; + t2 = values[y * dx] >= value; + cases[t2 << 2].forEach(stitch); + while (++x < dx - 1) { + t3 = t2, t2 = values[y * dx + x + 1] >= value; + cases[t2 << 2 | t3 << 3].forEach(stitch); + } + cases[t2 << 3].forEach(stitch); + + function stitch(line) { + var start = [line[0][0] + x, line[0][1] + y], + end = [line[1][0] + x, line[1][1] + y], + startIndex = index(start), + endIndex = index(end), + f, g; + if (f = fragmentByEnd[startIndex]) { + if (g = fragmentByStart[endIndex]) { + delete fragmentByEnd[f.end]; + delete fragmentByStart[g.start]; + if (f === g) { + f.ring.push(end); + callback(f.ring); + } else { + fragmentByStart[f.start] = fragmentByEnd[g.end] = {start: f.start, end: g.end, ring: f.ring.concat(g.ring)}; + } + } else { + delete fragmentByEnd[f.end]; + f.ring.push(end); + fragmentByEnd[f.end = endIndex] = f; + } + } else if (f = fragmentByStart[endIndex]) { + if (g = fragmentByEnd[startIndex]) { + delete fragmentByStart[f.start]; + delete fragmentByEnd[g.end]; + if (f === g) { + f.ring.push(end); + callback(f.ring); + } else { + fragmentByStart[g.start] = fragmentByEnd[f.end] = {start: g.start, end: f.end, ring: g.ring.concat(f.ring)}; + } + } else { + delete fragmentByStart[f.start]; + f.ring.unshift(start); + fragmentByStart[f.start = startIndex] = f; + } + } else { + fragmentByStart[startIndex] = fragmentByEnd[endIndex] = {start: startIndex, end: endIndex, ring: [start, end]}; + } + } + } + + function index(point) { + return point[0] * 2 + point[1] * (dx + 1) * 4; + } + + function smoothLinear(ring, values, value) { + ring.forEach(function(point) { + var x = point[0], + y = point[1], + xt = x | 0, + yt = y | 0, + v0, + v1 = values[yt * dx + xt]; + if (x > 0 && x < dx && xt === x) { + v0 = values[yt * dx + xt - 1]; + point[0] = x + (value - v0) / (v1 - v0) - 0.5; + } + if (y > 0 && y < dy && yt === y) { + v0 = values[(yt - 1) * dx + xt]; + point[1] = y + (value - v0) / (v1 - v0) - 0.5; + } + }); + } + + contours.contour = contour; + + contours.size = function(_) { + if (!arguments.length) return [dx, dy]; + var _0 = Math.ceil(_[0]), _1 = Math.ceil(_[1]); + if (!(_0 > 0) || !(_1 > 0)) throw new Error("invalid size"); + return dx = _0, dy = _1, contours; + }; + + contours.thresholds = function(_) { + return arguments.length ? (threshold = typeof _ === "function" ? _ : Array.isArray(_) ? constant$6(slice$3.call(_)) : constant$6(_), contours) : threshold; + }; + + contours.smooth = function(_) { + return arguments.length ? (smooth = _ ? smoothLinear : noop$1, contours) : smooth === smoothLinear; + }; + + return contours; +} + +// TODO Optimize edge cases. +// TODO Optimize index calculation. +// TODO Optimize arguments. +function blurX(source, target, r) { + var n = source.width, + m = source.height, + w = (r << 1) + 1; + for (var j = 0; j < m; ++j) { + for (var i = 0, sr = 0; i < n + r; ++i) { + if (i < n) { + sr += source.data[i + j * n]; + } + if (i >= r) { + if (i >= w) { + sr -= source.data[i - w + j * n]; + } + target.data[i - r + j * n] = sr / Math.min(i + 1, n - 1 + w - i, w); + } + } + } +} + +// TODO Optimize edge cases. +// TODO Optimize index calculation. +// TODO Optimize arguments. +function blurY(source, target, r) { + var n = source.width, + m = source.height, + w = (r << 1) + 1; + for (var i = 0; i < n; ++i) { + for (var j = 0, sr = 0; j < m + r; ++j) { + if (j < m) { + sr += source.data[i + j * n]; + } + if (j >= r) { + if (j >= w) { + sr -= source.data[i + (j - w) * n]; + } + target.data[i + (j - r) * n] = sr / Math.min(j + 1, m - 1 + w - j, w); + } + } + } +} + +function defaultX(d) { + return d[0]; +} + +function defaultY(d) { + return d[1]; +} + +function defaultWeight() { + return 1; +} + +function density() { + var x = defaultX, + y = defaultY, + weight = defaultWeight, + dx = 960, + dy = 500, + r = 20, // blur radius + k = 2, // log2(grid cell size) + o = r * 3, // grid offset, to pad for blur + n = (dx + o * 2) >> k, // grid width + m = (dy + o * 2) >> k, // grid height + threshold = constant$6(20); + + function density(data) { + var values0 = new Float32Array(n * m), + values1 = new Float32Array(n * m); + + data.forEach(function(d, i, data) { + var xi = (+x(d, i, data) + o) >> k, + yi = (+y(d, i, data) + o) >> k, + wi = +weight(d, i, data); + if (xi >= 0 && xi < n && yi >= 0 && yi < m) { + values0[xi + yi * n] += wi; + } + }); + + // TODO Optimize. + blurX({width: n, height: m, data: values0}, {width: n, height: m, data: values1}, r >> k); + blurY({width: n, height: m, data: values1}, {width: n, height: m, data: values0}, r >> k); + blurX({width: n, height: m, data: values0}, {width: n, height: m, data: values1}, r >> k); + blurY({width: n, height: m, data: values1}, {width: n, height: m, data: values0}, r >> k); + blurX({width: n, height: m, data: values0}, {width: n, height: m, data: values1}, r >> k); + blurY({width: n, height: m, data: values1}, {width: n, height: m, data: values0}, r >> k); + + var tz = threshold(values0); + + // Convert number of thresholds into uniform thresholds. + if (!Array.isArray(tz)) { + var stop = max(values0); + tz = tickStep(0, stop, tz); + tz = sequence(0, Math.floor(stop / tz) * tz, tz); + tz.shift(); + } + + return contours() + .thresholds(tz) + .size([n, m]) + (values0) + .map(transform); + } + + function transform(geometry) { + geometry.value *= Math.pow(2, -2 * k); // Density in points per square pixel. + geometry.coordinates.forEach(transformPolygon); + return geometry; + } + + function transformPolygon(coordinates) { + coordinates.forEach(transformRing); + } + + function transformRing(coordinates) { + coordinates.forEach(transformPoint); + } + + // TODO Optimize. + function transformPoint(coordinates) { + coordinates[0] = coordinates[0] * Math.pow(2, k) - o; + coordinates[1] = coordinates[1] * Math.pow(2, k) - o; + } + + function resize() { + o = r * 3; + n = (dx + o * 2) >> k; + m = (dy + o * 2) >> k; + return density; + } + + density.x = function(_) { + return arguments.length ? (x = typeof _ === "function" ? _ : constant$6(+_), density) : x; + }; + + density.y = function(_) { + return arguments.length ? (y = typeof _ === "function" ? _ : constant$6(+_), density) : y; + }; + + density.weight = function(_) { + return arguments.length ? (weight = typeof _ === "function" ? _ : constant$6(+_), density) : weight; + }; + + density.size = function(_) { + if (!arguments.length) return [dx, dy]; + var _0 = Math.ceil(_[0]), _1 = Math.ceil(_[1]); + if (!(_0 >= 0) && !(_0 >= 0)) throw new Error("invalid size"); + return dx = _0, dy = _1, resize(); + }; + + density.cellSize = function(_) { + if (!arguments.length) return 1 << k; + if (!((_ = +_) >= 1)) throw new Error("invalid cell size"); + return k = Math.floor(Math.log(_) / Math.LN2), resize(); + }; + + density.thresholds = function(_) { + return arguments.length ? (threshold = typeof _ === "function" ? _ : Array.isArray(_) ? constant$6(slice$3.call(_)) : constant$6(_), density) : threshold; + }; + + density.bandwidth = function(_) { + if (!arguments.length) return Math.sqrt(r * (r + 1)); + if (!((_ = +_) >= 0)) throw new Error("invalid bandwidth"); + return r = Math.round((Math.sqrt(4 * _ * _ + 1) - 1) / 2), resize(); + }; + + return density; +} + +var EOL = {}, + EOF = {}, + QUOTE = 34, + NEWLINE = 10, + RETURN = 13; + +function objectConverter(columns) { + return new Function("d", "return {" + columns.map(function(name, i) { + return JSON.stringify(name) + ": d[" + i + "] || \"\""; + }).join(",") + "}"); +} + +function customConverter(columns, f) { + var object = objectConverter(columns); + return function(row, i) { + return f(object(row), i, columns); + }; +} + +// Compute unique columns in order of discovery. +function inferColumns(rows) { + var columnSet = Object.create(null), + columns = []; + + rows.forEach(function(row) { + for (var column in row) { + if (!(column in columnSet)) { + columns.push(columnSet[column] = column); + } + } + }); + + return columns; +} + +function pad(value, width) { + var s = value + "", length = s.length; + return length < width ? new Array(width - length + 1).join(0) + s : s; +} + +function formatYear(year) { + return year < 0 ? "-" + pad(-year, 6) + : year > 9999 ? "+" + pad(year, 6) + : pad(year, 4); +} + +function formatDate(date) { + var hours = date.getUTCHours(), + minutes = date.getUTCMinutes(), + seconds = date.getUTCSeconds(), + milliseconds = date.getUTCMilliseconds(); + return isNaN(date) ? "Invalid Date" + : formatYear(date.getUTCFullYear()) + "-" + pad(date.getUTCMonth() + 1, 2) + "-" + pad(date.getUTCDate(), 2) + + (milliseconds ? "T" + pad(hours, 2) + ":" + pad(minutes, 2) + ":" + pad(seconds, 2) + "." + pad(milliseconds, 3) + "Z" + : seconds ? "T" + pad(hours, 2) + ":" + pad(minutes, 2) + ":" + pad(seconds, 2) + "Z" + : minutes || hours ? "T" + pad(hours, 2) + ":" + pad(minutes, 2) + "Z" + : ""); +} + +function dsvFormat(delimiter) { + var reFormat = new RegExp("[\"" + delimiter + "\n\r]"), + DELIMITER = delimiter.charCodeAt(0); + + function parse(text, f) { + var convert, columns, rows = parseRows(text, function(row, i) { + if (convert) return convert(row, i - 1); + columns = row, convert = f ? customConverter(row, f) : objectConverter(row); + }); + rows.columns = columns || []; + return rows; + } + + function parseRows(text, f) { + var rows = [], // output rows + N = text.length, + I = 0, // current character index + n = 0, // current line number + t, // current token + eof = N <= 0, // current token followed by EOF? + eol = false; // current token followed by EOL? + + // Strip the trailing newline. + if (text.charCodeAt(N - 1) === NEWLINE) --N; + if (text.charCodeAt(N - 1) === RETURN) --N; + + function token() { + if (eof) return EOF; + if (eol) return eol = false, EOL; + + // Unescape quotes. + var i, j = I, c; + if (text.charCodeAt(j) === QUOTE) { + while (I++ < N && text.charCodeAt(I) !== QUOTE || text.charCodeAt(++I) === QUOTE); + if ((i = I) >= N) eof = true; + else if ((c = text.charCodeAt(I++)) === NEWLINE) eol = true; + else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; } + return text.slice(j + 1, i - 1).replace(/""/g, "\""); + } + + // Find next delimiter or newline. + while (I < N) { + if ((c = text.charCodeAt(i = I++)) === NEWLINE) eol = true; + else if (c === RETURN) { eol = true; if (text.charCodeAt(I) === NEWLINE) ++I; } + else if (c !== DELIMITER) continue; + return text.slice(j, i); + } + + // Return last token before EOF. + return eof = true, text.slice(j, N); + } + + while ((t = token()) !== EOF) { + var row = []; + while (t !== EOL && t !== EOF) row.push(t), t = token(); + if (f && (row = f(row, n++)) == null) continue; + rows.push(row); + } + + return rows; + } + + function preformatBody(rows, columns) { + return rows.map(function(row) { + return columns.map(function(column) { + return formatValue(row[column]); + }).join(delimiter); + }); + } + + function format(rows, columns) { + if (columns == null) columns = inferColumns(rows); + return [columns.map(formatValue).join(delimiter)].concat(preformatBody(rows, columns)).join("\n"); + } + + function formatBody(rows, columns) { + if (columns == null) columns = inferColumns(rows); + return preformatBody(rows, columns).join("\n"); + } + + function formatRows(rows) { + return rows.map(formatRow).join("\n"); + } + + function formatRow(row) { + return row.map(formatValue).join(delimiter); + } + + function formatValue(value) { + return value == null ? "" + : value instanceof Date ? formatDate(value) + : reFormat.test(value += "") ? "\"" + value.replace(/"/g, "\"\"") + "\"" + : value; + } + + return { + parse: parse, + parseRows: parseRows, + format: format, + formatBody: formatBody, + formatRows: formatRows, + formatRow: formatRow, + formatValue: formatValue + }; +} + +var csv = dsvFormat(","); + +var csvParse = csv.parse; +var csvParseRows = csv.parseRows; +var csvFormat = csv.format; +var csvFormatBody = csv.formatBody; +var csvFormatRows = csv.formatRows; +var csvFormatRow = csv.formatRow; +var csvFormatValue = csv.formatValue; + +var tsv = dsvFormat("\t"); + +var tsvParse = tsv.parse; +var tsvParseRows = tsv.parseRows; +var tsvFormat = tsv.format; +var tsvFormatBody = tsv.formatBody; +var tsvFormatRows = tsv.formatRows; +var tsvFormatRow = tsv.formatRow; +var tsvFormatValue = tsv.formatValue; + +function autoType(object) { + for (var key in object) { + var value = object[key].trim(), number, m; + if (!value) value = null; + else if (value === "true") value = true; + else if (value === "false") value = false; + else if (value === "NaN") value = NaN; + else if (!isNaN(number = +value)) value = number; + else if (m = value.match(/^([-+]\d{2})?\d{4}(-\d{2}(-\d{2})?)?(T\d{2}:\d{2}(:\d{2}(\.\d{3})?)?(Z|[-+]\d{2}:\d{2})?)?$/)) { + if (fixtz && !!m[4] && !m[7]) value = value.replace(/-/g, "/").replace(/T/, " "); + value = new Date(value); + } + else continue; + object[key] = value; + } + return object; +} + +// https://github.com/d3/d3-dsv/issues/45 +var fixtz = new Date("2019-01-01T00:00").getHours() || new Date("2019-07-01T00:00").getHours(); + +function responseBlob(response) { + if (!response.ok) throw new Error(response.status + " " + response.statusText); + return response.blob(); +} + +function blob(input, init) { + return fetch(input, init).then(responseBlob); +} + +function responseArrayBuffer(response) { + if (!response.ok) throw new Error(response.status + " " + response.statusText); + return response.arrayBuffer(); +} + +function buffer(input, init) { + return fetch(input, init).then(responseArrayBuffer); +} + +function responseText(response) { + if (!response.ok) throw new Error(response.status + " " + response.statusText); + return response.text(); +} + +function text(input, init) { + return fetch(input, init).then(responseText); +} + +function dsvParse(parse) { + return function(input, init, row) { + if (arguments.length === 2 && typeof init === "function") row = init, init = undefined; + return text(input, init).then(function(response) { + return parse(response, row); + }); + }; +} + +function dsv(delimiter, input, init, row) { + if (arguments.length === 3 && typeof init === "function") row = init, init = undefined; + var format = dsvFormat(delimiter); + return text(input, init).then(function(response) { + return format.parse(response, row); + }); +} + +var csv$1 = dsvParse(csvParse); +var tsv$1 = dsvParse(tsvParse); + +function image(input, init) { + return new Promise(function(resolve, reject) { + var image = new Image; + for (var key in init) image[key] = init[key]; + image.onerror = reject; + image.onload = function() { resolve(image); }; + image.src = input; + }); +} + +function responseJson(response) { + if (!response.ok) throw new Error(response.status + " " + response.statusText); + return response.json(); +} + +function json(input, init) { + return fetch(input, init).then(responseJson); +} + +function parser(type) { + return function(input, init) { + return text(input, init).then(function(text) { + return (new DOMParser).parseFromString(text, type); + }); + }; +} + +var xml = parser("application/xml"); + +var html = parser("text/html"); + +var svg = parser("image/svg+xml"); + +function center$1(x, y) { + var nodes; + + if (x == null) x = 0; + if (y == null) y = 0; + + function force() { + var i, + n = nodes.length, + node, + sx = 0, + sy = 0; + + for (i = 0; i < n; ++i) { + node = nodes[i], sx += node.x, sy += node.y; + } + + for (sx = sx / n - x, sy = sy / n - y, i = 0; i < n; ++i) { + node = nodes[i], node.x -= sx, node.y -= sy; + } + } + + force.initialize = function(_) { + nodes = _; + }; + + force.x = function(_) { + return arguments.length ? (x = +_, force) : x; + }; + + force.y = function(_) { + return arguments.length ? (y = +_, force) : y; + }; + + return force; +} + +function constant$7(x) { + return function() { + return x; + }; +} + +function jiggle() { + return (Math.random() - 0.5) * 1e-6; +} + +function tree_add(d) { + var x = +this._x.call(null, d), + y = +this._y.call(null, d); + return add(this.cover(x, y), x, y, d); +} + +function add(tree, x, y, d) { + if (isNaN(x) || isNaN(y)) return tree; // ignore invalid points + + var parent, + node = tree._root, + leaf = {data: d}, + x0 = tree._x0, + y0 = tree._y0, + x1 = tree._x1, + y1 = tree._y1, + xm, + ym, + xp, + yp, + right, + bottom, + i, + j; + + // If the tree is empty, initialize the root as a leaf. + if (!node) return tree._root = leaf, tree; + + // Find the existing leaf for the new point, or add it. + while (node.length) { + if (right = x >= (xm = (x0 + x1) / 2)) x0 = xm; else x1 = xm; + if (bottom = y >= (ym = (y0 + y1) / 2)) y0 = ym; else y1 = ym; + if (parent = node, !(node = node[i = bottom << 1 | right])) return parent[i] = leaf, tree; + } + + // Is the new point is exactly coincident with the existing point? + xp = +tree._x.call(null, node.data); + yp = +tree._y.call(null, node.data); + if (x === xp && y === yp) return leaf.next = node, parent ? parent[i] = leaf : tree._root = leaf, tree; + + // Otherwise, split the leaf node until the old and new point are separated. + do { + parent = parent ? parent[i] = new Array(4) : tree._root = new Array(4); + if (right = x >= (xm = (x0 + x1) / 2)) x0 = xm; else x1 = xm; + if (bottom = y >= (ym = (y0 + y1) / 2)) y0 = ym; else y1 = ym; + } while ((i = bottom << 1 | right) === (j = (yp >= ym) << 1 | (xp >= xm))); + return parent[j] = node, parent[i] = leaf, tree; +} + +function addAll(data) { + var d, i, n = data.length, + x, + y, + xz = new Array(n), + yz = new Array(n), + x0 = Infinity, + y0 = Infinity, + x1 = -Infinity, + y1 = -Infinity; + + // Compute the points and their extent. + for (i = 0; i < n; ++i) { + if (isNaN(x = +this._x.call(null, d = data[i])) || isNaN(y = +this._y.call(null, d))) continue; + xz[i] = x; + yz[i] = y; + if (x < x0) x0 = x; + if (x > x1) x1 = x; + if (y < y0) y0 = y; + if (y > y1) y1 = y; + } + + // If there were no (valid) points, abort. + if (x0 > x1 || y0 > y1) return this; + + // Expand the tree to cover the new points. + this.cover(x0, y0).cover(x1, y1); + + // Add the new points. + for (i = 0; i < n; ++i) { + add(this, xz[i], yz[i], data[i]); + } + + return this; +} + +function tree_cover(x, y) { + if (isNaN(x = +x) || isNaN(y = +y)) return this; // ignore invalid points + + var x0 = this._x0, + y0 = this._y0, + x1 = this._x1, + y1 = this._y1; + + // If the quadtree has no extent, initialize them. + // Integer extent are necessary so that if we later double the extent, + // the existing quadrant boundaries don’t change due to floating point error! + if (isNaN(x0)) { + x1 = (x0 = Math.floor(x)) + 1; + y1 = (y0 = Math.floor(y)) + 1; + } + + // Otherwise, double repeatedly to cover. + else { + var z = x1 - x0, + node = this._root, + parent, + i; + + while (x0 > x || x >= x1 || y0 > y || y >= y1) { + i = (y < y0) << 1 | (x < x0); + parent = new Array(4), parent[i] = node, node = parent, z *= 2; + switch (i) { + case 0: x1 = x0 + z, y1 = y0 + z; break; + case 1: x0 = x1 - z, y1 = y0 + z; break; + case 2: x1 = x0 + z, y0 = y1 - z; break; + case 3: x0 = x1 - z, y0 = y1 - z; break; + } + } + + if (this._root && this._root.length) this._root = node; + } + + this._x0 = x0; + this._y0 = y0; + this._x1 = x1; + this._y1 = y1; + return this; +} + +function tree_data() { + var data = []; + this.visit(function(node) { + if (!node.length) do data.push(node.data); while (node = node.next) + }); + return data; +} + +function tree_extent(_) { + return arguments.length + ? this.cover(+_[0][0], +_[0][1]).cover(+_[1][0], +_[1][1]) + : isNaN(this._x0) ? undefined : [[this._x0, this._y0], [this._x1, this._y1]]; +} + +function Quad(node, x0, y0, x1, y1) { + this.node = node; + this.x0 = x0; + this.y0 = y0; + this.x1 = x1; + this.y1 = y1; +} + +function tree_find(x, y, radius) { + var data, + x0 = this._x0, + y0 = this._y0, + x1, + y1, + x2, + y2, + x3 = this._x1, + y3 = this._y1, + quads = [], + node = this._root, + q, + i; + + if (node) quads.push(new Quad(node, x0, y0, x3, y3)); + if (radius == null) radius = Infinity; + else { + x0 = x - radius, y0 = y - radius; + x3 = x + radius, y3 = y + radius; + radius *= radius; + } + + while (q = quads.pop()) { + + // Stop searching if this quadrant can’t contain a closer node. + if (!(node = q.node) + || (x1 = q.x0) > x3 + || (y1 = q.y0) > y3 + || (x2 = q.x1) < x0 + || (y2 = q.y1) < y0) continue; + + // Bisect the current quadrant. + if (node.length) { + var xm = (x1 + x2) / 2, + ym = (y1 + y2) / 2; + + quads.push( + new Quad(node[3], xm, ym, x2, y2), + new Quad(node[2], x1, ym, xm, y2), + new Quad(node[1], xm, y1, x2, ym), + new Quad(node[0], x1, y1, xm, ym) + ); + + // Visit the closest quadrant first. + if (i = (y >= ym) << 1 | (x >= xm)) { + q = quads[quads.length - 1]; + quads[quads.length - 1] = quads[quads.length - 1 - i]; + quads[quads.length - 1 - i] = q; + } + } + + // Visit this point. (Visiting coincident points isn’t necessary!) + else { + var dx = x - +this._x.call(null, node.data), + dy = y - +this._y.call(null, node.data), + d2 = dx * dx + dy * dy; + if (d2 < radius) { + var d = Math.sqrt(radius = d2); + x0 = x - d, y0 = y - d; + x3 = x + d, y3 = y + d; + data = node.data; + } + } + } + + return data; +} + +function tree_remove(d) { + if (isNaN(x = +this._x.call(null, d)) || isNaN(y = +this._y.call(null, d))) return this; // ignore invalid points + + var parent, + node = this._root, + retainer, + previous, + next, + x0 = this._x0, + y0 = this._y0, + x1 = this._x1, + y1 = this._y1, + x, + y, + xm, + ym, + right, + bottom, + i, + j; + + // If the tree is empty, initialize the root as a leaf. + if (!node) return this; + + // Find the leaf node for the point. + // While descending, also retain the deepest parent with a non-removed sibling. + if (node.length) while (true) { + if (right = x >= (xm = (x0 + x1) / 2)) x0 = xm; else x1 = xm; + if (bottom = y >= (ym = (y0 + y1) / 2)) y0 = ym; else y1 = ym; + if (!(parent = node, node = node[i = bottom << 1 | right])) return this; + if (!node.length) break; + if (parent[(i + 1) & 3] || parent[(i + 2) & 3] || parent[(i + 3) & 3]) retainer = parent, j = i; + } + + // Find the point to remove. + while (node.data !== d) if (!(previous = node, node = node.next)) return this; + if (next = node.next) delete node.next; + + // If there are multiple coincident points, remove just the point. + if (previous) return (next ? previous.next = next : delete previous.next), this; + + // If this is the root point, remove it. + if (!parent) return this._root = next, this; + + // Remove this leaf. + next ? parent[i] = next : delete parent[i]; + + // If the parent now contains exactly one leaf, collapse superfluous parents. + if ((node = parent[0] || parent[1] || parent[2] || parent[3]) + && node === (parent[3] || parent[2] || parent[1] || parent[0]) + && !node.length) { + if (retainer) retainer[j] = node; + else this._root = node; + } + + return this; +} + +function removeAll(data) { + for (var i = 0, n = data.length; i < n; ++i) this.remove(data[i]); + return this; +} + +function tree_root() { + return this._root; +} + +function tree_size() { + var size = 0; + this.visit(function(node) { + if (!node.length) do ++size; while (node = node.next) + }); + return size; +} + +function tree_visit(callback) { + var quads = [], q, node = this._root, child, x0, y0, x1, y1; + if (node) quads.push(new Quad(node, this._x0, this._y0, this._x1, this._y1)); + while (q = quads.pop()) { + if (!callback(node = q.node, x0 = q.x0, y0 = q.y0, x1 = q.x1, y1 = q.y1) && node.length) { + var xm = (x0 + x1) / 2, ym = (y0 + y1) / 2; + if (child = node[3]) quads.push(new Quad(child, xm, ym, x1, y1)); + if (child = node[2]) quads.push(new Quad(child, x0, ym, xm, y1)); + if (child = node[1]) quads.push(new Quad(child, xm, y0, x1, ym)); + if (child = node[0]) quads.push(new Quad(child, x0, y0, xm, ym)); + } + } + return this; +} + +function tree_visitAfter(callback) { + var quads = [], next = [], q; + if (this._root) quads.push(new Quad(this._root, this._x0, this._y0, this._x1, this._y1)); + while (q = quads.pop()) { + var node = q.node; + if (node.length) { + var child, x0 = q.x0, y0 = q.y0, x1 = q.x1, y1 = q.y1, xm = (x0 + x1) / 2, ym = (y0 + y1) / 2; + if (child = node[0]) quads.push(new Quad(child, x0, y0, xm, ym)); + if (child = node[1]) quads.push(new Quad(child, xm, y0, x1, ym)); + if (child = node[2]) quads.push(new Quad(child, x0, ym, xm, y1)); + if (child = node[3]) quads.push(new Quad(child, xm, ym, x1, y1)); + } + next.push(q); + } + while (q = next.pop()) { + callback(q.node, q.x0, q.y0, q.x1, q.y1); + } + return this; +} + +function defaultX$1(d) { + return d[0]; +} + +function tree_x(_) { + return arguments.length ? (this._x = _, this) : this._x; +} + +function defaultY$1(d) { + return d[1]; +} + +function tree_y(_) { + return arguments.length ? (this._y = _, this) : this._y; +} + +function quadtree(nodes, x, y) { + var tree = new Quadtree(x == null ? defaultX$1 : x, y == null ? defaultY$1 : y, NaN, NaN, NaN, NaN); + return nodes == null ? tree : tree.addAll(nodes); +} + +function Quadtree(x, y, x0, y0, x1, y1) { + this._x = x; + this._y = y; + this._x0 = x0; + this._y0 = y0; + this._x1 = x1; + this._y1 = y1; + this._root = undefined; +} + +function leaf_copy(leaf) { + var copy = {data: leaf.data}, next = copy; + while (leaf = leaf.next) next = next.next = {data: leaf.data}; + return copy; +} + +var treeProto = quadtree.prototype = Quadtree.prototype; + +treeProto.copy = function() { + var copy = new Quadtree(this._x, this._y, this._x0, this._y0, this._x1, this._y1), + node = this._root, + nodes, + child; + + if (!node) return copy; + + if (!node.length) return copy._root = leaf_copy(node), copy; + + nodes = [{source: node, target: copy._root = new Array(4)}]; + while (node = nodes.pop()) { + for (var i = 0; i < 4; ++i) { + if (child = node.source[i]) { + if (child.length) nodes.push({source: child, target: node.target[i] = new Array(4)}); + else node.target[i] = leaf_copy(child); + } + } + } + + return copy; +}; + +treeProto.add = tree_add; +treeProto.addAll = addAll; +treeProto.cover = tree_cover; +treeProto.data = tree_data; +treeProto.extent = tree_extent; +treeProto.find = tree_find; +treeProto.remove = tree_remove; +treeProto.removeAll = removeAll; +treeProto.root = tree_root; +treeProto.size = tree_size; +treeProto.visit = tree_visit; +treeProto.visitAfter = tree_visitAfter; +treeProto.x = tree_x; +treeProto.y = tree_y; + +function x(d) { + return d.x + d.vx; +} + +function y(d) { + return d.y + d.vy; +} + +function collide(radius) { + var nodes, + radii, + strength = 1, + iterations = 1; + + if (typeof radius !== "function") radius = constant$7(radius == null ? 1 : +radius); + + function force() { + var i, n = nodes.length, + tree, + node, + xi, + yi, + ri, + ri2; + + for (var k = 0; k < iterations; ++k) { + tree = quadtree(nodes, x, y).visitAfter(prepare); + for (i = 0; i < n; ++i) { + node = nodes[i]; + ri = radii[node.index], ri2 = ri * ri; + xi = node.x + node.vx; + yi = node.y + node.vy; + tree.visit(apply); + } + } + + function apply(quad, x0, y0, x1, y1) { + var data = quad.data, rj = quad.r, r = ri + rj; + if (data) { + if (data.index > node.index) { + var x = xi - data.x - data.vx, + y = yi - data.y - data.vy, + l = x * x + y * y; + if (l < r * r) { + if (x === 0) x = jiggle(), l += x * x; + if (y === 0) y = jiggle(), l += y * y; + l = (r - (l = Math.sqrt(l))) / l * strength; + node.vx += (x *= l) * (r = (rj *= rj) / (ri2 + rj)); + node.vy += (y *= l) * r; + data.vx -= x * (r = 1 - r); + data.vy -= y * r; + } + } + return; + } + return x0 > xi + r || x1 < xi - r || y0 > yi + r || y1 < yi - r; + } + } + + function prepare(quad) { + if (quad.data) return quad.r = radii[quad.data.index]; + for (var i = quad.r = 0; i < 4; ++i) { + if (quad[i] && quad[i].r > quad.r) { + quad.r = quad[i].r; + } + } + } + + function initialize() { + if (!nodes) return; + var i, n = nodes.length, node; + radii = new Array(n); + for (i = 0; i < n; ++i) node = nodes[i], radii[node.index] = +radius(node, i, nodes); + } + + force.initialize = function(_) { + nodes = _; + initialize(); + }; + + force.iterations = function(_) { + return arguments.length ? (iterations = +_, force) : iterations; + }; + + force.strength = function(_) { + return arguments.length ? (strength = +_, force) : strength; + }; + + force.radius = function(_) { + return arguments.length ? (radius = typeof _ === "function" ? _ : constant$7(+_), initialize(), force) : radius; + }; + + return force; +} + +function index(d) { + return d.index; +} + +function find(nodeById, nodeId) { + var node = nodeById.get(nodeId); + if (!node) throw new Error("missing: " + nodeId); + return node; +} + +function link(links) { + var id = index, + strength = defaultStrength, + strengths, + distance = constant$7(30), + distances, + nodes, + count, + bias, + iterations = 1; + + if (links == null) links = []; + + function defaultStrength(link) { + return 1 / Math.min(count[link.source.index], count[link.target.index]); + } + + function force(alpha) { + for (var k = 0, n = links.length; k < iterations; ++k) { + for (var i = 0, link, source, target, x, y, l, b; i < n; ++i) { + link = links[i], source = link.source, target = link.target; + x = target.x + target.vx - source.x - source.vx || jiggle(); + y = target.y + target.vy - source.y - source.vy || jiggle(); + l = Math.sqrt(x * x + y * y); + l = (l - distances[i]) / l * alpha * strengths[i]; + x *= l, y *= l; + target.vx -= x * (b = bias[i]); + target.vy -= y * b; + source.vx += x * (b = 1 - b); + source.vy += y * b; + } + } + } + + function initialize() { + if (!nodes) return; + + var i, + n = nodes.length, + m = links.length, + nodeById = map$1(nodes, id), + link; + + for (i = 0, count = new Array(n); i < m; ++i) { + link = links[i], link.index = i; + if (typeof link.source !== "object") link.source = find(nodeById, link.source); + if (typeof link.target !== "object") link.target = find(nodeById, link.target); + count[link.source.index] = (count[link.source.index] || 0) + 1; + count[link.target.index] = (count[link.target.index] || 0) + 1; + } + + for (i = 0, bias = new Array(m); i < m; ++i) { + link = links[i], bias[i] = count[link.source.index] / (count[link.source.index] + count[link.target.index]); + } + + strengths = new Array(m), initializeStrength(); + distances = new Array(m), initializeDistance(); + } + + function initializeStrength() { + if (!nodes) return; + + for (var i = 0, n = links.length; i < n; ++i) { + strengths[i] = +strength(links[i], i, links); + } + } + + function initializeDistance() { + if (!nodes) return; + + for (var i = 0, n = links.length; i < n; ++i) { + distances[i] = +distance(links[i], i, links); + } + } + + force.initialize = function(_) { + nodes = _; + initialize(); + }; + + force.links = function(_) { + return arguments.length ? (links = _, initialize(), force) : links; + }; + + force.id = function(_) { + return arguments.length ? (id = _, force) : id; + }; + + force.iterations = function(_) { + return arguments.length ? (iterations = +_, force) : iterations; + }; + + force.strength = function(_) { + return arguments.length ? (strength = typeof _ === "function" ? _ : constant$7(+_), initializeStrength(), force) : strength; + }; + + force.distance = function(_) { + return arguments.length ? (distance = typeof _ === "function" ? _ : constant$7(+_), initializeDistance(), force) : distance; + }; + + return force; +} + +function x$1(d) { + return d.x; +} + +function y$1(d) { + return d.y; +} + +var initialRadius = 10, + initialAngle = Math.PI * (3 - Math.sqrt(5)); + +function simulation(nodes) { + var simulation, + alpha = 1, + alphaMin = 0.001, + alphaDecay = 1 - Math.pow(alphaMin, 1 / 300), + alphaTarget = 0, + velocityDecay = 0.6, + forces = map$1(), + stepper = timer(step), + event = dispatch("tick", "end"); + + if (nodes == null) nodes = []; + + function step() { + tick(); + event.call("tick", simulation); + if (alpha < alphaMin) { + stepper.stop(); + event.call("end", simulation); + } + } + + function tick(iterations) { + var i, n = nodes.length, node; + + if (iterations === undefined) iterations = 1; + + for (var k = 0; k < iterations; ++k) { + alpha += (alphaTarget - alpha) * alphaDecay; + + forces.each(function (force) { + force(alpha); + }); + + for (i = 0; i < n; ++i) { + node = nodes[i]; + if (node.fx == null) node.x += node.vx *= velocityDecay; + else node.x = node.fx, node.vx = 0; + if (node.fy == null) node.y += node.vy *= velocityDecay; + else node.y = node.fy, node.vy = 0; + } + } + + return simulation; + } + + function initializeNodes() { + for (var i = 0, n = nodes.length, node; i < n; ++i) { + node = nodes[i], node.index = i; + if (node.fx != null) node.x = node.fx; + if (node.fy != null) node.y = node.fy; + if (isNaN(node.x) || isNaN(node.y)) { + var radius = initialRadius * Math.sqrt(i), angle = i * initialAngle; + node.x = radius * Math.cos(angle); + node.y = radius * Math.sin(angle); + } + if (isNaN(node.vx) || isNaN(node.vy)) { + node.vx = node.vy = 0; + } + } + } + + function initializeForce(force) { + if (force.initialize) force.initialize(nodes); + return force; + } + + initializeNodes(); + + return simulation = { + tick: tick, + + restart: function() { + return stepper.restart(step), simulation; + }, + + stop: function() { + return stepper.stop(), simulation; + }, + + nodes: function(_) { + return arguments.length ? (nodes = _, initializeNodes(), forces.each(initializeForce), simulation) : nodes; + }, + + alpha: function(_) { + return arguments.length ? (alpha = +_, simulation) : alpha; + }, + + alphaMin: function(_) { + return arguments.length ? (alphaMin = +_, simulation) : alphaMin; + }, + + alphaDecay: function(_) { + return arguments.length ? (alphaDecay = +_, simulation) : +alphaDecay; + }, + + alphaTarget: function(_) { + return arguments.length ? (alphaTarget = +_, simulation) : alphaTarget; + }, + + velocityDecay: function(_) { + return arguments.length ? (velocityDecay = 1 - _, simulation) : 1 - velocityDecay; + }, + + force: function(name, _) { + return arguments.length > 1 ? ((_ == null ? forces.remove(name) : forces.set(name, initializeForce(_))), simulation) : forces.get(name); + }, + + find: function(x, y, radius) { + var i = 0, + n = nodes.length, + dx, + dy, + d2, + node, + closest; + + if (radius == null) radius = Infinity; + else radius *= radius; + + for (i = 0; i < n; ++i) { + node = nodes[i]; + dx = x - node.x; + dy = y - node.y; + d2 = dx * dx + dy * dy; + if (d2 < radius) closest = node, radius = d2; + } + + return closest; + }, + + on: function(name, _) { + return arguments.length > 1 ? (event.on(name, _), simulation) : event.on(name); + } + }; +} + +function manyBody() { + var nodes, + node, + alpha, + strength = constant$7(-30), + strengths, + distanceMin2 = 1, + distanceMax2 = Infinity, + theta2 = 0.81; + + function force(_) { + var i, n = nodes.length, tree = quadtree(nodes, x$1, y$1).visitAfter(accumulate); + for (alpha = _, i = 0; i < n; ++i) node = nodes[i], tree.visit(apply); + } + + function initialize() { + if (!nodes) return; + var i, n = nodes.length, node; + strengths = new Array(n); + for (i = 0; i < n; ++i) node = nodes[i], strengths[node.index] = +strength(node, i, nodes); + } + + function accumulate(quad) { + var strength = 0, q, c, weight = 0, x, y, i; + + // For internal nodes, accumulate forces from child quadrants. + if (quad.length) { + for (x = y = i = 0; i < 4; ++i) { + if ((q = quad[i]) && (c = Math.abs(q.value))) { + strength += q.value, weight += c, x += c * q.x, y += c * q.y; + } + } + quad.x = x / weight; + quad.y = y / weight; + } + + // For leaf nodes, accumulate forces from coincident quadrants. + else { + q = quad; + q.x = q.data.x; + q.y = q.data.y; + do strength += strengths[q.data.index]; + while (q = q.next); + } + + quad.value = strength; + } + + function apply(quad, x1, _, x2) { + if (!quad.value) return true; + + var x = quad.x - node.x, + y = quad.y - node.y, + w = x2 - x1, + l = x * x + y * y; + + // Apply the Barnes-Hut approximation if possible. + // Limit forces for very close nodes; randomize direction if coincident. + if (w * w / theta2 < l) { + if (l < distanceMax2) { + if (x === 0) x = jiggle(), l += x * x; + if (y === 0) y = jiggle(), l += y * y; + if (l < distanceMin2) l = Math.sqrt(distanceMin2 * l); + node.vx += x * quad.value * alpha / l; + node.vy += y * quad.value * alpha / l; + } + return true; + } + + // Otherwise, process points directly. + else if (quad.length || l >= distanceMax2) return; + + // Limit forces for very close nodes; randomize direction if coincident. + if (quad.data !== node || quad.next) { + if (x === 0) x = jiggle(), l += x * x; + if (y === 0) y = jiggle(), l += y * y; + if (l < distanceMin2) l = Math.sqrt(distanceMin2 * l); + } + + do if (quad.data !== node) { + w = strengths[quad.data.index] * alpha / l; + node.vx += x * w; + node.vy += y * w; + } while (quad = quad.next); + } + + force.initialize = function(_) { + nodes = _; + initialize(); + }; + + force.strength = function(_) { + return arguments.length ? (strength = typeof _ === "function" ? _ : constant$7(+_), initialize(), force) : strength; + }; + + force.distanceMin = function(_) { + return arguments.length ? (distanceMin2 = _ * _, force) : Math.sqrt(distanceMin2); + }; + + force.distanceMax = function(_) { + return arguments.length ? (distanceMax2 = _ * _, force) : Math.sqrt(distanceMax2); + }; + + force.theta = function(_) { + return arguments.length ? (theta2 = _ * _, force) : Math.sqrt(theta2); + }; + + return force; +} + +function radial(radius, x, y) { + var nodes, + strength = constant$7(0.1), + strengths, + radiuses; + + if (typeof radius !== "function") radius = constant$7(+radius); + if (x == null) x = 0; + if (y == null) y = 0; + + function force(alpha) { + for (var i = 0, n = nodes.length; i < n; ++i) { + var node = nodes[i], + dx = node.x - x || 1e-6, + dy = node.y - y || 1e-6, + r = Math.sqrt(dx * dx + dy * dy), + k = (radiuses[i] - r) * strengths[i] * alpha / r; + node.vx += dx * k; + node.vy += dy * k; + } + } + + function initialize() { + if (!nodes) return; + var i, n = nodes.length; + strengths = new Array(n); + radiuses = new Array(n); + for (i = 0; i < n; ++i) { + radiuses[i] = +radius(nodes[i], i, nodes); + strengths[i] = isNaN(radiuses[i]) ? 0 : +strength(nodes[i], i, nodes); + } + } + + force.initialize = function(_) { + nodes = _, initialize(); + }; + + force.strength = function(_) { + return arguments.length ? (strength = typeof _ === "function" ? _ : constant$7(+_), initialize(), force) : strength; + }; + + force.radius = function(_) { + return arguments.length ? (radius = typeof _ === "function" ? _ : constant$7(+_), initialize(), force) : radius; + }; + + force.x = function(_) { + return arguments.length ? (x = +_, force) : x; + }; + + force.y = function(_) { + return arguments.length ? (y = +_, force) : y; + }; + + return force; +} + +function x$2(x) { + var strength = constant$7(0.1), + nodes, + strengths, + xz; + + if (typeof x !== "function") x = constant$7(x == null ? 0 : +x); + + function force(alpha) { + for (var i = 0, n = nodes.length, node; i < n; ++i) { + node = nodes[i], node.vx += (xz[i] - node.x) * strengths[i] * alpha; + } + } + + function initialize() { + if (!nodes) return; + var i, n = nodes.length; + strengths = new Array(n); + xz = new Array(n); + for (i = 0; i < n; ++i) { + strengths[i] = isNaN(xz[i] = +x(nodes[i], i, nodes)) ? 0 : +strength(nodes[i], i, nodes); + } + } + + force.initialize = function(_) { + nodes = _; + initialize(); + }; + + force.strength = function(_) { + return arguments.length ? (strength = typeof _ === "function" ? _ : constant$7(+_), initialize(), force) : strength; + }; + + force.x = function(_) { + return arguments.length ? (x = typeof _ === "function" ? _ : constant$7(+_), initialize(), force) : x; + }; + + return force; +} + +function y$2(y) { + var strength = constant$7(0.1), + nodes, + strengths, + yz; + + if (typeof y !== "function") y = constant$7(y == null ? 0 : +y); + + function force(alpha) { + for (var i = 0, n = nodes.length, node; i < n; ++i) { + node = nodes[i], node.vy += (yz[i] - node.y) * strengths[i] * alpha; + } + } + + function initialize() { + if (!nodes) return; + var i, n = nodes.length; + strengths = new Array(n); + yz = new Array(n); + for (i = 0; i < n; ++i) { + strengths[i] = isNaN(yz[i] = +y(nodes[i], i, nodes)) ? 0 : +strength(nodes[i], i, nodes); + } + } + + force.initialize = function(_) { + nodes = _; + initialize(); + }; + + force.strength = function(_) { + return arguments.length ? (strength = typeof _ === "function" ? _ : constant$7(+_), initialize(), force) : strength; + }; + + force.y = function(_) { + return arguments.length ? (y = typeof _ === "function" ? _ : constant$7(+_), initialize(), force) : y; + }; + + return force; +} + +// Computes the decimal coefficient and exponent of the specified number x with +// significant digits p, where x is positive and p is in [1, 21] or undefined. +// For example, formatDecimal(1.23) returns ["123", 0]. +function formatDecimal(x, p) { + if ((i = (x = p ? x.toExponential(p - 1) : x.toExponential()).indexOf("e")) < 0) return null; // NaN, ±Infinity + var i, coefficient = x.slice(0, i); + + // The string returned by toExponential either has the form \d\.\d+e[-+]\d+ + // (e.g., 1.2e+3) or the form \de[-+]\d+ (e.g., 1e+3). + return [ + coefficient.length > 1 ? coefficient[0] + coefficient.slice(2) : coefficient, + +x.slice(i + 1) + ]; +} + +function exponent$1(x) { + return x = formatDecimal(Math.abs(x)), x ? x[1] : NaN; +} + +function formatGroup(grouping, thousands) { + return function(value, width) { + var i = value.length, + t = [], + j = 0, + g = grouping[0], + length = 0; + + while (i > 0 && g > 0) { + if (length + g + 1 > width) g = Math.max(1, width - length); + t.push(value.substring(i -= g, i + g)); + if ((length += g + 1) > width) break; + g = grouping[j = (j + 1) % grouping.length]; + } + + return t.reverse().join(thousands); + }; +} + +function formatNumerals(numerals) { + return function(value) { + return value.replace(/[0-9]/g, function(i) { + return numerals[+i]; + }); + }; +} + +// [[fill]align][sign][symbol][0][width][,][.precision][~][type] +var re = /^(?:(.)?([<>=^]))?([+\-( ])?([$#])?(0)?(\d+)?(,)?(\.\d+)?(~)?([a-z%])?$/i; + +function formatSpecifier(specifier) { + if (!(match = re.exec(specifier))) throw new Error("invalid format: " + specifier); + var match; + return new FormatSpecifier({ + fill: match[1], + align: match[2], + sign: match[3], + symbol: match[4], + zero: match[5], + width: match[6], + comma: match[7], + precision: match[8] && match[8].slice(1), + trim: match[9], + type: match[10] + }); +} + +formatSpecifier.prototype = FormatSpecifier.prototype; // instanceof + +function FormatSpecifier(specifier) { + this.fill = specifier.fill === undefined ? " " : specifier.fill + ""; + this.align = specifier.align === undefined ? ">" : specifier.align + ""; + this.sign = specifier.sign === undefined ? "-" : specifier.sign + ""; + this.symbol = specifier.symbol === undefined ? "" : specifier.symbol + ""; + this.zero = !!specifier.zero; + this.width = specifier.width === undefined ? undefined : +specifier.width; + this.comma = !!specifier.comma; + this.precision = specifier.precision === undefined ? undefined : +specifier.precision; + this.trim = !!specifier.trim; + this.type = specifier.type === undefined ? "" : specifier.type + ""; +} + +FormatSpecifier.prototype.toString = function() { + return this.fill + + this.align + + this.sign + + this.symbol + + (this.zero ? "0" : "") + + (this.width === undefined ? "" : Math.max(1, this.width | 0)) + + (this.comma ? "," : "") + + (this.precision === undefined ? "" : "." + Math.max(0, this.precision | 0)) + + (this.trim ? "~" : "") + + this.type; +}; + +// Trims insignificant zeros, e.g., replaces 1.2000k with 1.2k. +function formatTrim(s) { + out: for (var n = s.length, i = 1, i0 = -1, i1; i < n; ++i) { + switch (s[i]) { + case ".": i0 = i1 = i; break; + case "0": if (i0 === 0) i0 = i; i1 = i; break; + default: if (!+s[i]) break out; if (i0 > 0) i0 = 0; break; + } + } + return i0 > 0 ? s.slice(0, i0) + s.slice(i1 + 1) : s; +} + +var prefixExponent; + +function formatPrefixAuto(x, p) { + var d = formatDecimal(x, p); + if (!d) return x + ""; + var coefficient = d[0], + exponent = d[1], + i = exponent - (prefixExponent = Math.max(-8, Math.min(8, Math.floor(exponent / 3))) * 3) + 1, + n = coefficient.length; + return i === n ? coefficient + : i > n ? coefficient + new Array(i - n + 1).join("0") + : i > 0 ? coefficient.slice(0, i) + "." + coefficient.slice(i) + : "0." + new Array(1 - i).join("0") + formatDecimal(x, Math.max(0, p + i - 1))[0]; // less than 1y! +} + +function formatRounded(x, p) { + var d = formatDecimal(x, p); + if (!d) return x + ""; + var coefficient = d[0], + exponent = d[1]; + return exponent < 0 ? "0." + new Array(-exponent).join("0") + coefficient + : coefficient.length > exponent + 1 ? coefficient.slice(0, exponent + 1) + "." + coefficient.slice(exponent + 1) + : coefficient + new Array(exponent - coefficient.length + 2).join("0"); +} + +var formatTypes = { + "%": function(x, p) { return (x * 100).toFixed(p); }, + "b": function(x) { return Math.round(x).toString(2); }, + "c": function(x) { return x + ""; }, + "d": function(x) { return Math.round(x).toString(10); }, + "e": function(x, p) { return x.toExponential(p); }, + "f": function(x, p) { return x.toFixed(p); }, + "g": function(x, p) { return x.toPrecision(p); }, + "o": function(x) { return Math.round(x).toString(8); }, + "p": function(x, p) { return formatRounded(x * 100, p); }, + "r": formatRounded, + "s": formatPrefixAuto, + "X": function(x) { return Math.round(x).toString(16).toUpperCase(); }, + "x": function(x) { return Math.round(x).toString(16); } +}; + +function identity$3(x) { + return x; +} + +var map$2 = Array.prototype.map, + prefixes = ["y","z","a","f","p","n","\xB5","m","","k","M","G","T","P","E","Z","Y"]; + +function formatLocale(locale) { + var group = locale.grouping === undefined || locale.thousands === undefined ? identity$3 : formatGroup(map$2.call(locale.grouping, Number), locale.thousands + ""), + currencyPrefix = locale.currency === undefined ? "" : locale.currency[0] + "", + currencySuffix = locale.currency === undefined ? "" : locale.currency[1] + "", + decimal = locale.decimal === undefined ? "." : locale.decimal + "", + numerals = locale.numerals === undefined ? identity$3 : formatNumerals(map$2.call(locale.numerals, String)), + percent = locale.percent === undefined ? "%" : locale.percent + "", + minus = locale.minus === undefined ? "-" : locale.minus + "", + nan = locale.nan === undefined ? "NaN" : locale.nan + ""; + + function newFormat(specifier) { + specifier = formatSpecifier(specifier); + + var fill = specifier.fill, + align = specifier.align, + sign = specifier.sign, + symbol = specifier.symbol, + zero = specifier.zero, + width = specifier.width, + comma = specifier.comma, + precision = specifier.precision, + trim = specifier.trim, + type = specifier.type; + + // The "n" type is an alias for ",g". + if (type === "n") comma = true, type = "g"; + + // The "" type, and any invalid type, is an alias for ".12~g". + else if (!formatTypes[type]) precision === undefined && (precision = 12), trim = true, type = "g"; + + // If zero fill is specified, padding goes after sign and before digits. + if (zero || (fill === "0" && align === "=")) zero = true, fill = "0", align = "="; + + // Compute the prefix and suffix. + // For SI-prefix, the suffix is lazily computed. + var prefix = symbol === "$" ? currencyPrefix : symbol === "#" && /[boxX]/.test(type) ? "0" + type.toLowerCase() : "", + suffix = symbol === "$" ? currencySuffix : /[%p]/.test(type) ? percent : ""; + + // What format function should we use? + // Is this an integer type? + // Can this type generate exponential notation? + var formatType = formatTypes[type], + maybeSuffix = /[defgprs%]/.test(type); + + // Set the default precision if not specified, + // or clamp the specified precision to the supported range. + // For significant precision, it must be in [1, 21]. + // For fixed precision, it must be in [0, 20]. + precision = precision === undefined ? 6 + : /[gprs]/.test(type) ? Math.max(1, Math.min(21, precision)) + : Math.max(0, Math.min(20, precision)); + + function format(value) { + var valuePrefix = prefix, + valueSuffix = suffix, + i, n, c; + + if (type === "c") { + valueSuffix = formatType(value) + valueSuffix; + value = ""; + } else { + value = +value; + + // Determine the sign. -0 is not less than 0, but 1 / -0 is! + var valueNegative = value < 0 || 1 / value < 0; + + // Perform the initial formatting. + value = isNaN(value) ? nan : formatType(Math.abs(value), precision); + + // Trim insignificant zeros. + if (trim) value = formatTrim(value); + + // If a negative value rounds to zero after formatting, and no explicit positive sign is requested, hide the sign. + if (valueNegative && +value === 0 && sign !== "+") valueNegative = false; + + // Compute the prefix and suffix. + valuePrefix = (valueNegative ? (sign === "(" ? sign : minus) : sign === "-" || sign === "(" ? "" : sign) + valuePrefix; + valueSuffix = (type === "s" ? prefixes[8 + prefixExponent / 3] : "") + valueSuffix + (valueNegative && sign === "(" ? ")" : ""); + + // Break the formatted value into the integer “value” part that can be + // grouped, and fractional or exponential “suffix” part that is not. + if (maybeSuffix) { + i = -1, n = value.length; + while (++i < n) { + if (c = value.charCodeAt(i), 48 > c || c > 57) { + valueSuffix = (c === 46 ? decimal + value.slice(i + 1) : value.slice(i)) + valueSuffix; + value = value.slice(0, i); + break; + } + } + } + } + + // If the fill character is not "0", grouping is applied before padding. + if (comma && !zero) value = group(value, Infinity); + + // Compute the padding. + var length = valuePrefix.length + value.length + valueSuffix.length, + padding = length < width ? new Array(width - length + 1).join(fill) : ""; + + // If the fill character is "0", grouping is applied after padding. + if (comma && zero) value = group(padding + value, padding.length ? width - valueSuffix.length : Infinity), padding = ""; + + // Reconstruct the final output based on the desired alignment. + switch (align) { + case "<": value = valuePrefix + value + valueSuffix + padding; break; + case "=": value = valuePrefix + padding + value + valueSuffix; break; + case "^": value = padding.slice(0, length = padding.length >> 1) + valuePrefix + value + valueSuffix + padding.slice(length); break; + default: value = padding + valuePrefix + value + valueSuffix; break; + } + + return numerals(value); + } + + format.toString = function() { + return specifier + ""; + }; + + return format; + } + + function formatPrefix(specifier, value) { + var f = newFormat((specifier = formatSpecifier(specifier), specifier.type = "f", specifier)), + e = Math.max(-8, Math.min(8, Math.floor(exponent$1(value) / 3))) * 3, + k = Math.pow(10, -e), + prefix = prefixes[8 + e / 3]; + return function(value) { + return f(k * value) + prefix; + }; + } + + return { + format: newFormat, + formatPrefix: formatPrefix + }; +} + +var locale; + +defaultLocale({ + decimal: ".", + thousands: ",", + grouping: [3], + currency: ["$", ""], + minus: "-" +}); + +function defaultLocale(definition) { + locale = formatLocale(definition); + exports.format = locale.format; + exports.formatPrefix = locale.formatPrefix; + return locale; +} + +function precisionFixed(step) { + return Math.max(0, -exponent$1(Math.abs(step))); +} + +function precisionPrefix(step, value) { + return Math.max(0, Math.max(-8, Math.min(8, Math.floor(exponent$1(value) / 3))) * 3 - exponent$1(Math.abs(step))); +} + +function precisionRound(step, max) { + step = Math.abs(step), max = Math.abs(max) - step; + return Math.max(0, exponent$1(max) - exponent$1(step)) + 1; +} + +// Adds floating point numbers with twice the normal precision. +// Reference: J. R. Shewchuk, Adaptive Precision Floating-Point Arithmetic and +// Fast Robust Geometric Predicates, Discrete & Computational Geometry 18(3) +// 305–363 (1997). +// Code adapted from GeographicLib by Charles F. F. Karney, +// http://geographiclib.sourceforge.net/ + +function adder() { + return new Adder; +} + +function Adder() { + this.reset(); +} + +Adder.prototype = { + constructor: Adder, + reset: function() { + this.s = // rounded value + this.t = 0; // exact error + }, + add: function(y) { + add$1(temp, y, this.t); + add$1(this, temp.s, this.s); + if (this.s) this.t += temp.t; + else this.s = temp.t; + }, + valueOf: function() { + return this.s; + } +}; + +var temp = new Adder; + +function add$1(adder, a, b) { + var x = adder.s = a + b, + bv = x - a, + av = x - bv; + adder.t = (a - av) + (b - bv); +} + +var epsilon$2 = 1e-6; +var epsilon2$1 = 1e-12; +var pi$3 = Math.PI; +var halfPi$2 = pi$3 / 2; +var quarterPi = pi$3 / 4; +var tau$3 = pi$3 * 2; + +var degrees$1 = 180 / pi$3; +var radians = pi$3 / 180; + +var abs = Math.abs; +var atan = Math.atan; +var atan2 = Math.atan2; +var cos$1 = Math.cos; +var ceil = Math.ceil; +var exp = Math.exp; +var log = Math.log; +var pow = Math.pow; +var sin$1 = Math.sin; +var sign = Math.sign || function(x) { return x > 0 ? 1 : x < 0 ? -1 : 0; }; +var sqrt = Math.sqrt; +var tan = Math.tan; + +function acos(x) { + return x > 1 ? 0 : x < -1 ? pi$3 : Math.acos(x); +} + +function asin(x) { + return x > 1 ? halfPi$2 : x < -1 ? -halfPi$2 : Math.asin(x); +} + +function haversin(x) { + return (x = sin$1(x / 2)) * x; +} + +function noop$2() {} + +function streamGeometry(geometry, stream) { + if (geometry && streamGeometryType.hasOwnProperty(geometry.type)) { + streamGeometryType[geometry.type](geometry, stream); + } +} + +var streamObjectType = { + Feature: function(object, stream) { + streamGeometry(object.geometry, stream); + }, + FeatureCollection: function(object, stream) { + var features = object.features, i = -1, n = features.length; + while (++i < n) streamGeometry(features[i].geometry, stream); + } +}; + +var streamGeometryType = { + Sphere: function(object, stream) { + stream.sphere(); + }, + Point: function(object, stream) { + object = object.coordinates; + stream.point(object[0], object[1], object[2]); + }, + MultiPoint: function(object, stream) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) object = coordinates[i], stream.point(object[0], object[1], object[2]); + }, + LineString: function(object, stream) { + streamLine(object.coordinates, stream, 0); + }, + MultiLineString: function(object, stream) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) streamLine(coordinates[i], stream, 0); + }, + Polygon: function(object, stream) { + streamPolygon(object.coordinates, stream); + }, + MultiPolygon: function(object, stream) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) streamPolygon(coordinates[i], stream); + }, + GeometryCollection: function(object, stream) { + var geometries = object.geometries, i = -1, n = geometries.length; + while (++i < n) streamGeometry(geometries[i], stream); + } +}; + +function streamLine(coordinates, stream, closed) { + var i = -1, n = coordinates.length - closed, coordinate; + stream.lineStart(); + while (++i < n) coordinate = coordinates[i], stream.point(coordinate[0], coordinate[1], coordinate[2]); + stream.lineEnd(); +} + +function streamPolygon(coordinates, stream) { + var i = -1, n = coordinates.length; + stream.polygonStart(); + while (++i < n) streamLine(coordinates[i], stream, 1); + stream.polygonEnd(); +} + +function geoStream(object, stream) { + if (object && streamObjectType.hasOwnProperty(object.type)) { + streamObjectType[object.type](object, stream); + } else { + streamGeometry(object, stream); + } +} + +var areaRingSum = adder(); + +var areaSum = adder(), + lambda00, + phi00, + lambda0, + cosPhi0, + sinPhi0; + +var areaStream = { + point: noop$2, + lineStart: noop$2, + lineEnd: noop$2, + polygonStart: function() { + areaRingSum.reset(); + areaStream.lineStart = areaRingStart; + areaStream.lineEnd = areaRingEnd; + }, + polygonEnd: function() { + var areaRing = +areaRingSum; + areaSum.add(areaRing < 0 ? tau$3 + areaRing : areaRing); + this.lineStart = this.lineEnd = this.point = noop$2; + }, + sphere: function() { + areaSum.add(tau$3); + } +}; + +function areaRingStart() { + areaStream.point = areaPointFirst; +} + +function areaRingEnd() { + areaPoint(lambda00, phi00); +} + +function areaPointFirst(lambda, phi) { + areaStream.point = areaPoint; + lambda00 = lambda, phi00 = phi; + lambda *= radians, phi *= radians; + lambda0 = lambda, cosPhi0 = cos$1(phi = phi / 2 + quarterPi), sinPhi0 = sin$1(phi); +} + +function areaPoint(lambda, phi) { + lambda *= radians, phi *= radians; + phi = phi / 2 + quarterPi; // half the angular distance from south pole + + // Spherical excess E for a spherical triangle with vertices: south pole, + // previous point, current point. Uses a formula derived from Cagnoli’s + // theorem. See Todhunter, Spherical Trig. (1871), Sec. 103, Eq. (2). + var dLambda = lambda - lambda0, + sdLambda = dLambda >= 0 ? 1 : -1, + adLambda = sdLambda * dLambda, + cosPhi = cos$1(phi), + sinPhi = sin$1(phi), + k = sinPhi0 * sinPhi, + u = cosPhi0 * cosPhi + k * cos$1(adLambda), + v = k * sdLambda * sin$1(adLambda); + areaRingSum.add(atan2(v, u)); + + // Advance the previous points. + lambda0 = lambda, cosPhi0 = cosPhi, sinPhi0 = sinPhi; +} + +function area$1(object) { + areaSum.reset(); + geoStream(object, areaStream); + return areaSum * 2; +} + +function spherical(cartesian) { + return [atan2(cartesian[1], cartesian[0]), asin(cartesian[2])]; +} + +function cartesian(spherical) { + var lambda = spherical[0], phi = spherical[1], cosPhi = cos$1(phi); + return [cosPhi * cos$1(lambda), cosPhi * sin$1(lambda), sin$1(phi)]; +} + +function cartesianDot(a, b) { + return a[0] * b[0] + a[1] * b[1] + a[2] * b[2]; +} + +function cartesianCross(a, b) { + return [a[1] * b[2] - a[2] * b[1], a[2] * b[0] - a[0] * b[2], a[0] * b[1] - a[1] * b[0]]; +} + +// TODO return a +function cartesianAddInPlace(a, b) { + a[0] += b[0], a[1] += b[1], a[2] += b[2]; +} + +function cartesianScale(vector, k) { + return [vector[0] * k, vector[1] * k, vector[2] * k]; +} + +// TODO return d +function cartesianNormalizeInPlace(d) { + var l = sqrt(d[0] * d[0] + d[1] * d[1] + d[2] * d[2]); + d[0] /= l, d[1] /= l, d[2] /= l; +} + +var lambda0$1, phi0, lambda1, phi1, // bounds + lambda2, // previous lambda-coordinate + lambda00$1, phi00$1, // first point + p0, // previous 3D point + deltaSum = adder(), + ranges, + range; + +var boundsStream = { + point: boundsPoint, + lineStart: boundsLineStart, + lineEnd: boundsLineEnd, + polygonStart: function() { + boundsStream.point = boundsRingPoint; + boundsStream.lineStart = boundsRingStart; + boundsStream.lineEnd = boundsRingEnd; + deltaSum.reset(); + areaStream.polygonStart(); + }, + polygonEnd: function() { + areaStream.polygonEnd(); + boundsStream.point = boundsPoint; + boundsStream.lineStart = boundsLineStart; + boundsStream.lineEnd = boundsLineEnd; + if (areaRingSum < 0) lambda0$1 = -(lambda1 = 180), phi0 = -(phi1 = 90); + else if (deltaSum > epsilon$2) phi1 = 90; + else if (deltaSum < -epsilon$2) phi0 = -90; + range[0] = lambda0$1, range[1] = lambda1; + }, + sphere: function() { + lambda0$1 = -(lambda1 = 180), phi0 = -(phi1 = 90); + } +}; + +function boundsPoint(lambda, phi) { + ranges.push(range = [lambda0$1 = lambda, lambda1 = lambda]); + if (phi < phi0) phi0 = phi; + if (phi > phi1) phi1 = phi; +} + +function linePoint(lambda, phi) { + var p = cartesian([lambda * radians, phi * radians]); + if (p0) { + var normal = cartesianCross(p0, p), + equatorial = [normal[1], -normal[0], 0], + inflection = cartesianCross(equatorial, normal); + cartesianNormalizeInPlace(inflection); + inflection = spherical(inflection); + var delta = lambda - lambda2, + sign = delta > 0 ? 1 : -1, + lambdai = inflection[0] * degrees$1 * sign, + phii, + antimeridian = abs(delta) > 180; + if (antimeridian ^ (sign * lambda2 < lambdai && lambdai < sign * lambda)) { + phii = inflection[1] * degrees$1; + if (phii > phi1) phi1 = phii; + } else if (lambdai = (lambdai + 360) % 360 - 180, antimeridian ^ (sign * lambda2 < lambdai && lambdai < sign * lambda)) { + phii = -inflection[1] * degrees$1; + if (phii < phi0) phi0 = phii; + } else { + if (phi < phi0) phi0 = phi; + if (phi > phi1) phi1 = phi; + } + if (antimeridian) { + if (lambda < lambda2) { + if (angle(lambda0$1, lambda) > angle(lambda0$1, lambda1)) lambda1 = lambda; + } else { + if (angle(lambda, lambda1) > angle(lambda0$1, lambda1)) lambda0$1 = lambda; + } + } else { + if (lambda1 >= lambda0$1) { + if (lambda < lambda0$1) lambda0$1 = lambda; + if (lambda > lambda1) lambda1 = lambda; + } else { + if (lambda > lambda2) { + if (angle(lambda0$1, lambda) > angle(lambda0$1, lambda1)) lambda1 = lambda; + } else { + if (angle(lambda, lambda1) > angle(lambda0$1, lambda1)) lambda0$1 = lambda; + } + } + } + } else { + ranges.push(range = [lambda0$1 = lambda, lambda1 = lambda]); + } + if (phi < phi0) phi0 = phi; + if (phi > phi1) phi1 = phi; + p0 = p, lambda2 = lambda; +} + +function boundsLineStart() { + boundsStream.point = linePoint; +} + +function boundsLineEnd() { + range[0] = lambda0$1, range[1] = lambda1; + boundsStream.point = boundsPoint; + p0 = null; +} + +function boundsRingPoint(lambda, phi) { + if (p0) { + var delta = lambda - lambda2; + deltaSum.add(abs(delta) > 180 ? delta + (delta > 0 ? 360 : -360) : delta); + } else { + lambda00$1 = lambda, phi00$1 = phi; + } + areaStream.point(lambda, phi); + linePoint(lambda, phi); +} + +function boundsRingStart() { + areaStream.lineStart(); +} + +function boundsRingEnd() { + boundsRingPoint(lambda00$1, phi00$1); + areaStream.lineEnd(); + if (abs(deltaSum) > epsilon$2) lambda0$1 = -(lambda1 = 180); + range[0] = lambda0$1, range[1] = lambda1; + p0 = null; +} + +// Finds the left-right distance between two longitudes. +// This is almost the same as (lambda1 - lambda0 + 360°) % 360°, except that we want +// the distance between ±180° to be 360°. +function angle(lambda0, lambda1) { + return (lambda1 -= lambda0) < 0 ? lambda1 + 360 : lambda1; +} + +function rangeCompare(a, b) { + return a[0] - b[0]; +} + +function rangeContains(range, x) { + return range[0] <= range[1] ? range[0] <= x && x <= range[1] : x < range[0] || range[1] < x; +} + +function bounds(feature) { + var i, n, a, b, merged, deltaMax, delta; + + phi1 = lambda1 = -(lambda0$1 = phi0 = Infinity); + ranges = []; + geoStream(feature, boundsStream); + + // First, sort ranges by their minimum longitudes. + if (n = ranges.length) { + ranges.sort(rangeCompare); + + // Then, merge any ranges that overlap. + for (i = 1, a = ranges[0], merged = [a]; i < n; ++i) { + b = ranges[i]; + if (rangeContains(a, b[0]) || rangeContains(a, b[1])) { + if (angle(a[0], b[1]) > angle(a[0], a[1])) a[1] = b[1]; + if (angle(b[0], a[1]) > angle(a[0], a[1])) a[0] = b[0]; + } else { + merged.push(a = b); + } + } + + // Finally, find the largest gap between the merged ranges. + // The final bounding box will be the inverse of this gap. + for (deltaMax = -Infinity, n = merged.length - 1, i = 0, a = merged[n]; i <= n; a = b, ++i) { + b = merged[i]; + if ((delta = angle(a[1], b[0])) > deltaMax) deltaMax = delta, lambda0$1 = b[0], lambda1 = a[1]; + } + } + + ranges = range = null; + + return lambda0$1 === Infinity || phi0 === Infinity + ? [[NaN, NaN], [NaN, NaN]] + : [[lambda0$1, phi0], [lambda1, phi1]]; +} + +var W0, W1, + X0, Y0, Z0, + X1, Y1, Z1, + X2, Y2, Z2, + lambda00$2, phi00$2, // first point + x0, y0, z0; // previous point + +var centroidStream = { + sphere: noop$2, + point: centroidPoint, + lineStart: centroidLineStart, + lineEnd: centroidLineEnd, + polygonStart: function() { + centroidStream.lineStart = centroidRingStart; + centroidStream.lineEnd = centroidRingEnd; + }, + polygonEnd: function() { + centroidStream.lineStart = centroidLineStart; + centroidStream.lineEnd = centroidLineEnd; + } +}; + +// Arithmetic mean of Cartesian vectors. +function centroidPoint(lambda, phi) { + lambda *= radians, phi *= radians; + var cosPhi = cos$1(phi); + centroidPointCartesian(cosPhi * cos$1(lambda), cosPhi * sin$1(lambda), sin$1(phi)); +} + +function centroidPointCartesian(x, y, z) { + ++W0; + X0 += (x - X0) / W0; + Y0 += (y - Y0) / W0; + Z0 += (z - Z0) / W0; +} + +function centroidLineStart() { + centroidStream.point = centroidLinePointFirst; +} + +function centroidLinePointFirst(lambda, phi) { + lambda *= radians, phi *= radians; + var cosPhi = cos$1(phi); + x0 = cosPhi * cos$1(lambda); + y0 = cosPhi * sin$1(lambda); + z0 = sin$1(phi); + centroidStream.point = centroidLinePoint; + centroidPointCartesian(x0, y0, z0); +} + +function centroidLinePoint(lambda, phi) { + lambda *= radians, phi *= radians; + var cosPhi = cos$1(phi), + x = cosPhi * cos$1(lambda), + y = cosPhi * sin$1(lambda), + z = sin$1(phi), + w = atan2(sqrt((w = y0 * z - z0 * y) * w + (w = z0 * x - x0 * z) * w + (w = x0 * y - y0 * x) * w), x0 * x + y0 * y + z0 * z); + W1 += w; + X1 += w * (x0 + (x0 = x)); + Y1 += w * (y0 + (y0 = y)); + Z1 += w * (z0 + (z0 = z)); + centroidPointCartesian(x0, y0, z0); +} + +function centroidLineEnd() { + centroidStream.point = centroidPoint; +} + +// See J. E. Brock, The Inertia Tensor for a Spherical Triangle, +// J. Applied Mechanics 42, 239 (1975). +function centroidRingStart() { + centroidStream.point = centroidRingPointFirst; +} + +function centroidRingEnd() { + centroidRingPoint(lambda00$2, phi00$2); + centroidStream.point = centroidPoint; +} + +function centroidRingPointFirst(lambda, phi) { + lambda00$2 = lambda, phi00$2 = phi; + lambda *= radians, phi *= radians; + centroidStream.point = centroidRingPoint; + var cosPhi = cos$1(phi); + x0 = cosPhi * cos$1(lambda); + y0 = cosPhi * sin$1(lambda); + z0 = sin$1(phi); + centroidPointCartesian(x0, y0, z0); +} + +function centroidRingPoint(lambda, phi) { + lambda *= radians, phi *= radians; + var cosPhi = cos$1(phi), + x = cosPhi * cos$1(lambda), + y = cosPhi * sin$1(lambda), + z = sin$1(phi), + cx = y0 * z - z0 * y, + cy = z0 * x - x0 * z, + cz = x0 * y - y0 * x, + m = sqrt(cx * cx + cy * cy + cz * cz), + w = asin(m), // line weight = angle + v = m && -w / m; // area weight multiplier + X2 += v * cx; + Y2 += v * cy; + Z2 += v * cz; + W1 += w; + X1 += w * (x0 + (x0 = x)); + Y1 += w * (y0 + (y0 = y)); + Z1 += w * (z0 + (z0 = z)); + centroidPointCartesian(x0, y0, z0); +} + +function centroid(object) { + W0 = W1 = + X0 = Y0 = Z0 = + X1 = Y1 = Z1 = + X2 = Y2 = Z2 = 0; + geoStream(object, centroidStream); + + var x = X2, + y = Y2, + z = Z2, + m = x * x + y * y + z * z; + + // If the area-weighted ccentroid is undefined, fall back to length-weighted ccentroid. + if (m < epsilon2$1) { + x = X1, y = Y1, z = Z1; + // If the feature has zero length, fall back to arithmetic mean of point vectors. + if (W1 < epsilon$2) x = X0, y = Y0, z = Z0; + m = x * x + y * y + z * z; + // If the feature still has an undefined ccentroid, then return. + if (m < epsilon2$1) return [NaN, NaN]; + } + + return [atan2(y, x) * degrees$1, asin(z / sqrt(m)) * degrees$1]; +} + +function constant$8(x) { + return function() { + return x; + }; +} + +function compose(a, b) { + + function compose(x, y) { + return x = a(x, y), b(x[0], x[1]); + } + + if (a.invert && b.invert) compose.invert = function(x, y) { + return x = b.invert(x, y), x && a.invert(x[0], x[1]); + }; + + return compose; +} + +function rotationIdentity(lambda, phi) { + return [abs(lambda) > pi$3 ? lambda + Math.round(-lambda / tau$3) * tau$3 : lambda, phi]; +} + +rotationIdentity.invert = rotationIdentity; + +function rotateRadians(deltaLambda, deltaPhi, deltaGamma) { + return (deltaLambda %= tau$3) ? (deltaPhi || deltaGamma ? compose(rotationLambda(deltaLambda), rotationPhiGamma(deltaPhi, deltaGamma)) + : rotationLambda(deltaLambda)) + : (deltaPhi || deltaGamma ? rotationPhiGamma(deltaPhi, deltaGamma) + : rotationIdentity); +} + +function forwardRotationLambda(deltaLambda) { + return function(lambda, phi) { + return lambda += deltaLambda, [lambda > pi$3 ? lambda - tau$3 : lambda < -pi$3 ? lambda + tau$3 : lambda, phi]; + }; +} + +function rotationLambda(deltaLambda) { + var rotation = forwardRotationLambda(deltaLambda); + rotation.invert = forwardRotationLambda(-deltaLambda); + return rotation; +} + +function rotationPhiGamma(deltaPhi, deltaGamma) { + var cosDeltaPhi = cos$1(deltaPhi), + sinDeltaPhi = sin$1(deltaPhi), + cosDeltaGamma = cos$1(deltaGamma), + sinDeltaGamma = sin$1(deltaGamma); + + function rotation(lambda, phi) { + var cosPhi = cos$1(phi), + x = cos$1(lambda) * cosPhi, + y = sin$1(lambda) * cosPhi, + z = sin$1(phi), + k = z * cosDeltaPhi + x * sinDeltaPhi; + return [ + atan2(y * cosDeltaGamma - k * sinDeltaGamma, x * cosDeltaPhi - z * sinDeltaPhi), + asin(k * cosDeltaGamma + y * sinDeltaGamma) + ]; + } + + rotation.invert = function(lambda, phi) { + var cosPhi = cos$1(phi), + x = cos$1(lambda) * cosPhi, + y = sin$1(lambda) * cosPhi, + z = sin$1(phi), + k = z * cosDeltaGamma - y * sinDeltaGamma; + return [ + atan2(y * cosDeltaGamma + z * sinDeltaGamma, x * cosDeltaPhi + k * sinDeltaPhi), + asin(k * cosDeltaPhi - x * sinDeltaPhi) + ]; + }; + + return rotation; +} + +function rotation(rotate) { + rotate = rotateRadians(rotate[0] * radians, rotate[1] * radians, rotate.length > 2 ? rotate[2] * radians : 0); + + function forward(coordinates) { + coordinates = rotate(coordinates[0] * radians, coordinates[1] * radians); + return coordinates[0] *= degrees$1, coordinates[1] *= degrees$1, coordinates; + } + + forward.invert = function(coordinates) { + coordinates = rotate.invert(coordinates[0] * radians, coordinates[1] * radians); + return coordinates[0] *= degrees$1, coordinates[1] *= degrees$1, coordinates; + }; + + return forward; +} + +// Generates a circle centered at [0°, 0°], with a given radius and precision. +function circleStream(stream, radius, delta, direction, t0, t1) { + if (!delta) return; + var cosRadius = cos$1(radius), + sinRadius = sin$1(radius), + step = direction * delta; + if (t0 == null) { + t0 = radius + direction * tau$3; + t1 = radius - step / 2; + } else { + t0 = circleRadius(cosRadius, t0); + t1 = circleRadius(cosRadius, t1); + if (direction > 0 ? t0 < t1 : t0 > t1) t0 += direction * tau$3; + } + for (var point, t = t0; direction > 0 ? t > t1 : t < t1; t -= step) { + point = spherical([cosRadius, -sinRadius * cos$1(t), -sinRadius * sin$1(t)]); + stream.point(point[0], point[1]); + } +} + +// Returns the signed angle of a cartesian point relative to [cosRadius, 0, 0]. +function circleRadius(cosRadius, point) { + point = cartesian(point), point[0] -= cosRadius; + cartesianNormalizeInPlace(point); + var radius = acos(-point[1]); + return ((-point[2] < 0 ? -radius : radius) + tau$3 - epsilon$2) % tau$3; +} + +function circle() { + var center = constant$8([0, 0]), + radius = constant$8(90), + precision = constant$8(6), + ring, + rotate, + stream = {point: point}; + + function point(x, y) { + ring.push(x = rotate(x, y)); + x[0] *= degrees$1, x[1] *= degrees$1; + } + + function circle() { + var c = center.apply(this, arguments), + r = radius.apply(this, arguments) * radians, + p = precision.apply(this, arguments) * radians; + ring = []; + rotate = rotateRadians(-c[0] * radians, -c[1] * radians, 0).invert; + circleStream(stream, r, p, 1); + c = {type: "Polygon", coordinates: [ring]}; + ring = rotate = null; + return c; + } + + circle.center = function(_) { + return arguments.length ? (center = typeof _ === "function" ? _ : constant$8([+_[0], +_[1]]), circle) : center; + }; + + circle.radius = function(_) { + return arguments.length ? (radius = typeof _ === "function" ? _ : constant$8(+_), circle) : radius; + }; + + circle.precision = function(_) { + return arguments.length ? (precision = typeof _ === "function" ? _ : constant$8(+_), circle) : precision; + }; + + return circle; +} + +function clipBuffer() { + var lines = [], + line; + return { + point: function(x, y) { + line.push([x, y]); + }, + lineStart: function() { + lines.push(line = []); + }, + lineEnd: noop$2, + rejoin: function() { + if (lines.length > 1) lines.push(lines.pop().concat(lines.shift())); + }, + result: function() { + var result = lines; + lines = []; + line = null; + return result; + } + }; +} + +function pointEqual(a, b) { + return abs(a[0] - b[0]) < epsilon$2 && abs(a[1] - b[1]) < epsilon$2; +} + +function Intersection(point, points, other, entry) { + this.x = point; + this.z = points; + this.o = other; // another intersection + this.e = entry; // is an entry? + this.v = false; // visited + this.n = this.p = null; // next & previous +} + +// A generalized polygon clipping algorithm: given a polygon that has been cut +// into its visible line segments, and rejoins the segments by interpolating +// along the clip edge. +function clipRejoin(segments, compareIntersection, startInside, interpolate, stream) { + var subject = [], + clip = [], + i, + n; + + segments.forEach(function(segment) { + if ((n = segment.length - 1) <= 0) return; + var n, p0 = segment[0], p1 = segment[n], x; + + // If the first and last points of a segment are coincident, then treat as a + // closed ring. TODO if all rings are closed, then the winding order of the + // exterior ring should be checked. + if (pointEqual(p0, p1)) { + stream.lineStart(); + for (i = 0; i < n; ++i) stream.point((p0 = segment[i])[0], p0[1]); + stream.lineEnd(); + return; + } + + subject.push(x = new Intersection(p0, segment, null, true)); + clip.push(x.o = new Intersection(p0, null, x, false)); + subject.push(x = new Intersection(p1, segment, null, false)); + clip.push(x.o = new Intersection(p1, null, x, true)); + }); + + if (!subject.length) return; + + clip.sort(compareIntersection); + link$1(subject); + link$1(clip); + + for (i = 0, n = clip.length; i < n; ++i) { + clip[i].e = startInside = !startInside; + } + + var start = subject[0], + points, + point; + + while (1) { + // Find first unvisited intersection. + var current = start, + isSubject = true; + while (current.v) if ((current = current.n) === start) return; + points = current.z; + stream.lineStart(); + do { + current.v = current.o.v = true; + if (current.e) { + if (isSubject) { + for (i = 0, n = points.length; i < n; ++i) stream.point((point = points[i])[0], point[1]); + } else { + interpolate(current.x, current.n.x, 1, stream); + } + current = current.n; + } else { + if (isSubject) { + points = current.p.z; + for (i = points.length - 1; i >= 0; --i) stream.point((point = points[i])[0], point[1]); + } else { + interpolate(current.x, current.p.x, -1, stream); + } + current = current.p; + } + current = current.o; + points = current.z; + isSubject = !isSubject; + } while (!current.v); + stream.lineEnd(); + } +} + +function link$1(array) { + if (!(n = array.length)) return; + var n, + i = 0, + a = array[0], + b; + while (++i < n) { + a.n = b = array[i]; + b.p = a; + a = b; + } + a.n = b = array[0]; + b.p = a; +} + +var sum$1 = adder(); + +function longitude(point) { + if (abs(point[0]) <= pi$3) + return point[0]; + else + return sign(point[0]) * ((abs(point[0]) + pi$3) % tau$3 - pi$3); +} + +function polygonContains(polygon, point) { + var lambda = longitude(point), + phi = point[1], + sinPhi = sin$1(phi), + normal = [sin$1(lambda), -cos$1(lambda), 0], + angle = 0, + winding = 0; + + sum$1.reset(); + + if (sinPhi === 1) phi = halfPi$2 + epsilon$2; + else if (sinPhi === -1) phi = -halfPi$2 - epsilon$2; + + for (var i = 0, n = polygon.length; i < n; ++i) { + if (!(m = (ring = polygon[i]).length)) continue; + var ring, + m, + point0 = ring[m - 1], + lambda0 = longitude(point0), + phi0 = point0[1] / 2 + quarterPi, + sinPhi0 = sin$1(phi0), + cosPhi0 = cos$1(phi0); + + for (var j = 0; j < m; ++j, lambda0 = lambda1, sinPhi0 = sinPhi1, cosPhi0 = cosPhi1, point0 = point1) { + var point1 = ring[j], + lambda1 = longitude(point1), + phi1 = point1[1] / 2 + quarterPi, + sinPhi1 = sin$1(phi1), + cosPhi1 = cos$1(phi1), + delta = lambda1 - lambda0, + sign = delta >= 0 ? 1 : -1, + absDelta = sign * delta, + antimeridian = absDelta > pi$3, + k = sinPhi0 * sinPhi1; + + sum$1.add(atan2(k * sign * sin$1(absDelta), cosPhi0 * cosPhi1 + k * cos$1(absDelta))); + angle += antimeridian ? delta + sign * tau$3 : delta; + + // Are the longitudes either side of the point’s meridian (lambda), + // and are the latitudes smaller than the parallel (phi)? + if (antimeridian ^ lambda0 >= lambda ^ lambda1 >= lambda) { + var arc = cartesianCross(cartesian(point0), cartesian(point1)); + cartesianNormalizeInPlace(arc); + var intersection = cartesianCross(normal, arc); + cartesianNormalizeInPlace(intersection); + var phiArc = (antimeridian ^ delta >= 0 ? -1 : 1) * asin(intersection[2]); + if (phi > phiArc || phi === phiArc && (arc[0] || arc[1])) { + winding += antimeridian ^ delta >= 0 ? 1 : -1; + } + } + } + } + + // First, determine whether the South pole is inside or outside: + // + // It is inside if: + // * the polygon winds around it in a clockwise direction. + // * the polygon does not (cumulatively) wind around it, but has a negative + // (counter-clockwise) area. + // + // Second, count the (signed) number of times a segment crosses a lambda + // from the point to the South pole. If it is zero, then the point is the + // same side as the South pole. + + return (angle < -epsilon$2 || angle < epsilon$2 && sum$1 < -epsilon$2) ^ (winding & 1); +} + +function clip(pointVisible, clipLine, interpolate, start) { + return function(sink) { + var line = clipLine(sink), + ringBuffer = clipBuffer(), + ringSink = clipLine(ringBuffer), + polygonStarted = false, + polygon, + segments, + ring; + + var clip = { + point: point, + lineStart: lineStart, + lineEnd: lineEnd, + polygonStart: function() { + clip.point = pointRing; + clip.lineStart = ringStart; + clip.lineEnd = ringEnd; + segments = []; + polygon = []; + }, + polygonEnd: function() { + clip.point = point; + clip.lineStart = lineStart; + clip.lineEnd = lineEnd; + segments = merge(segments); + var startInside = polygonContains(polygon, start); + if (segments.length) { + if (!polygonStarted) sink.polygonStart(), polygonStarted = true; + clipRejoin(segments, compareIntersection, startInside, interpolate, sink); + } else if (startInside) { + if (!polygonStarted) sink.polygonStart(), polygonStarted = true; + sink.lineStart(); + interpolate(null, null, 1, sink); + sink.lineEnd(); + } + if (polygonStarted) sink.polygonEnd(), polygonStarted = false; + segments = polygon = null; + }, + sphere: function() { + sink.polygonStart(); + sink.lineStart(); + interpolate(null, null, 1, sink); + sink.lineEnd(); + sink.polygonEnd(); + } + }; + + function point(lambda, phi) { + if (pointVisible(lambda, phi)) sink.point(lambda, phi); + } + + function pointLine(lambda, phi) { + line.point(lambda, phi); + } + + function lineStart() { + clip.point = pointLine; + line.lineStart(); + } + + function lineEnd() { + clip.point = point; + line.lineEnd(); + } + + function pointRing(lambda, phi) { + ring.push([lambda, phi]); + ringSink.point(lambda, phi); + } + + function ringStart() { + ringSink.lineStart(); + ring = []; + } + + function ringEnd() { + pointRing(ring[0][0], ring[0][1]); + ringSink.lineEnd(); + + var clean = ringSink.clean(), + ringSegments = ringBuffer.result(), + i, n = ringSegments.length, m, + segment, + point; + + ring.pop(); + polygon.push(ring); + ring = null; + + if (!n) return; + + // No intersections. + if (clean & 1) { + segment = ringSegments[0]; + if ((m = segment.length - 1) > 0) { + if (!polygonStarted) sink.polygonStart(), polygonStarted = true; + sink.lineStart(); + for (i = 0; i < m; ++i) sink.point((point = segment[i])[0], point[1]); + sink.lineEnd(); + } + return; + } + + // Rejoin connected segments. + // TODO reuse ringBuffer.rejoin()? + if (n > 1 && clean & 2) ringSegments.push(ringSegments.pop().concat(ringSegments.shift())); + + segments.push(ringSegments.filter(validSegment)); + } + + return clip; + }; +} + +function validSegment(segment) { + return segment.length > 1; +} + +// Intersections are sorted along the clip edge. For both antimeridian cutting +// and circle clipping, the same comparison is used. +function compareIntersection(a, b) { + return ((a = a.x)[0] < 0 ? a[1] - halfPi$2 - epsilon$2 : halfPi$2 - a[1]) + - ((b = b.x)[0] < 0 ? b[1] - halfPi$2 - epsilon$2 : halfPi$2 - b[1]); +} + +var clipAntimeridian = clip( + function() { return true; }, + clipAntimeridianLine, + clipAntimeridianInterpolate, + [-pi$3, -halfPi$2] +); + +// Takes a line and cuts into visible segments. Return values: 0 - there were +// intersections or the line was empty; 1 - no intersections; 2 - there were +// intersections, and the first and last segments should be rejoined. +function clipAntimeridianLine(stream) { + var lambda0 = NaN, + phi0 = NaN, + sign0 = NaN, + clean; // no intersections + + return { + lineStart: function() { + stream.lineStart(); + clean = 1; + }, + point: function(lambda1, phi1) { + var sign1 = lambda1 > 0 ? pi$3 : -pi$3, + delta = abs(lambda1 - lambda0); + if (abs(delta - pi$3) < epsilon$2) { // line crosses a pole + stream.point(lambda0, phi0 = (phi0 + phi1) / 2 > 0 ? halfPi$2 : -halfPi$2); + stream.point(sign0, phi0); + stream.lineEnd(); + stream.lineStart(); + stream.point(sign1, phi0); + stream.point(lambda1, phi0); + clean = 0; + } else if (sign0 !== sign1 && delta >= pi$3) { // line crosses antimeridian + if (abs(lambda0 - sign0) < epsilon$2) lambda0 -= sign0 * epsilon$2; // handle degeneracies + if (abs(lambda1 - sign1) < epsilon$2) lambda1 -= sign1 * epsilon$2; + phi0 = clipAntimeridianIntersect(lambda0, phi0, lambda1, phi1); + stream.point(sign0, phi0); + stream.lineEnd(); + stream.lineStart(); + stream.point(sign1, phi0); + clean = 0; + } + stream.point(lambda0 = lambda1, phi0 = phi1); + sign0 = sign1; + }, + lineEnd: function() { + stream.lineEnd(); + lambda0 = phi0 = NaN; + }, + clean: function() { + return 2 - clean; // if intersections, rejoin first and last segments + } + }; +} + +function clipAntimeridianIntersect(lambda0, phi0, lambda1, phi1) { + var cosPhi0, + cosPhi1, + sinLambda0Lambda1 = sin$1(lambda0 - lambda1); + return abs(sinLambda0Lambda1) > epsilon$2 + ? atan((sin$1(phi0) * (cosPhi1 = cos$1(phi1)) * sin$1(lambda1) + - sin$1(phi1) * (cosPhi0 = cos$1(phi0)) * sin$1(lambda0)) + / (cosPhi0 * cosPhi1 * sinLambda0Lambda1)) + : (phi0 + phi1) / 2; +} + +function clipAntimeridianInterpolate(from, to, direction, stream) { + var phi; + if (from == null) { + phi = direction * halfPi$2; + stream.point(-pi$3, phi); + stream.point(0, phi); + stream.point(pi$3, phi); + stream.point(pi$3, 0); + stream.point(pi$3, -phi); + stream.point(0, -phi); + stream.point(-pi$3, -phi); + stream.point(-pi$3, 0); + stream.point(-pi$3, phi); + } else if (abs(from[0] - to[0]) > epsilon$2) { + var lambda = from[0] < to[0] ? pi$3 : -pi$3; + phi = direction * lambda / 2; + stream.point(-lambda, phi); + stream.point(0, phi); + stream.point(lambda, phi); + } else { + stream.point(to[0], to[1]); + } +} + +function clipCircle(radius) { + var cr = cos$1(radius), + delta = 6 * radians, + smallRadius = cr > 0, + notHemisphere = abs(cr) > epsilon$2; // TODO optimise for this common case + + function interpolate(from, to, direction, stream) { + circleStream(stream, radius, delta, direction, from, to); + } + + function visible(lambda, phi) { + return cos$1(lambda) * cos$1(phi) > cr; + } + + // Takes a line and cuts into visible segments. Return values used for polygon + // clipping: 0 - there were intersections or the line was empty; 1 - no + // intersections 2 - there were intersections, and the first and last segments + // should be rejoined. + function clipLine(stream) { + var point0, // previous point + c0, // code for previous point + v0, // visibility of previous point + v00, // visibility of first point + clean; // no intersections + return { + lineStart: function() { + v00 = v0 = false; + clean = 1; + }, + point: function(lambda, phi) { + var point1 = [lambda, phi], + point2, + v = visible(lambda, phi), + c = smallRadius + ? v ? 0 : code(lambda, phi) + : v ? code(lambda + (lambda < 0 ? pi$3 : -pi$3), phi) : 0; + if (!point0 && (v00 = v0 = v)) stream.lineStart(); + // Handle degeneracies. + // TODO ignore if not clipping polygons. + if (v !== v0) { + point2 = intersect(point0, point1); + if (!point2 || pointEqual(point0, point2) || pointEqual(point1, point2)) { + point1[0] += epsilon$2; + point1[1] += epsilon$2; + v = visible(point1[0], point1[1]); + } + } + if (v !== v0) { + clean = 0; + if (v) { + // outside going in + stream.lineStart(); + point2 = intersect(point1, point0); + stream.point(point2[0], point2[1]); + } else { + // inside going out + point2 = intersect(point0, point1); + stream.point(point2[0], point2[1]); + stream.lineEnd(); + } + point0 = point2; + } else if (notHemisphere && point0 && smallRadius ^ v) { + var t; + // If the codes for two points are different, or are both zero, + // and there this segment intersects with the small circle. + if (!(c & c0) && (t = intersect(point1, point0, true))) { + clean = 0; + if (smallRadius) { + stream.lineStart(); + stream.point(t[0][0], t[0][1]); + stream.point(t[1][0], t[1][1]); + stream.lineEnd(); + } else { + stream.point(t[1][0], t[1][1]); + stream.lineEnd(); + stream.lineStart(); + stream.point(t[0][0], t[0][1]); + } + } + } + if (v && (!point0 || !pointEqual(point0, point1))) { + stream.point(point1[0], point1[1]); + } + point0 = point1, v0 = v, c0 = c; + }, + lineEnd: function() { + if (v0) stream.lineEnd(); + point0 = null; + }, + // Rejoin first and last segments if there were intersections and the first + // and last points were visible. + clean: function() { + return clean | ((v00 && v0) << 1); + } + }; + } + + // Intersects the great circle between a and b with the clip circle. + function intersect(a, b, two) { + var pa = cartesian(a), + pb = cartesian(b); + + // We have two planes, n1.p = d1 and n2.p = d2. + // Find intersection line p(t) = c1 n1 + c2 n2 + t (n1 ⨯ n2). + var n1 = [1, 0, 0], // normal + n2 = cartesianCross(pa, pb), + n2n2 = cartesianDot(n2, n2), + n1n2 = n2[0], // cartesianDot(n1, n2), + determinant = n2n2 - n1n2 * n1n2; + + // Two polar points. + if (!determinant) return !two && a; + + var c1 = cr * n2n2 / determinant, + c2 = -cr * n1n2 / determinant, + n1xn2 = cartesianCross(n1, n2), + A = cartesianScale(n1, c1), + B = cartesianScale(n2, c2); + cartesianAddInPlace(A, B); + + // Solve |p(t)|^2 = 1. + var u = n1xn2, + w = cartesianDot(A, u), + uu = cartesianDot(u, u), + t2 = w * w - uu * (cartesianDot(A, A) - 1); + + if (t2 < 0) return; + + var t = sqrt(t2), + q = cartesianScale(u, (-w - t) / uu); + cartesianAddInPlace(q, A); + q = spherical(q); + + if (!two) return q; + + // Two intersection points. + var lambda0 = a[0], + lambda1 = b[0], + phi0 = a[1], + phi1 = b[1], + z; + + if (lambda1 < lambda0) z = lambda0, lambda0 = lambda1, lambda1 = z; + + var delta = lambda1 - lambda0, + polar = abs(delta - pi$3) < epsilon$2, + meridian = polar || delta < epsilon$2; + + if (!polar && phi1 < phi0) z = phi0, phi0 = phi1, phi1 = z; + + // Check that the first point is between a and b. + if (meridian + ? polar + ? phi0 + phi1 > 0 ^ q[1] < (abs(q[0] - lambda0) < epsilon$2 ? phi0 : phi1) + : phi0 <= q[1] && q[1] <= phi1 + : delta > pi$3 ^ (lambda0 <= q[0] && q[0] <= lambda1)) { + var q1 = cartesianScale(u, (-w + t) / uu); + cartesianAddInPlace(q1, A); + return [q, spherical(q1)]; + } + } + + // Generates a 4-bit vector representing the location of a point relative to + // the small circle's bounding box. + function code(lambda, phi) { + var r = smallRadius ? radius : pi$3 - radius, + code = 0; + if (lambda < -r) code |= 1; // left + else if (lambda > r) code |= 2; // right + if (phi < -r) code |= 4; // below + else if (phi > r) code |= 8; // above + return code; + } + + return clip(visible, clipLine, interpolate, smallRadius ? [0, -radius] : [-pi$3, radius - pi$3]); +} + +function clipLine(a, b, x0, y0, x1, y1) { + var ax = a[0], + ay = a[1], + bx = b[0], + by = b[1], + t0 = 0, + t1 = 1, + dx = bx - ax, + dy = by - ay, + r; + + r = x0 - ax; + if (!dx && r > 0) return; + r /= dx; + if (dx < 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } else if (dx > 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } + + r = x1 - ax; + if (!dx && r < 0) return; + r /= dx; + if (dx < 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } else if (dx > 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } + + r = y0 - ay; + if (!dy && r > 0) return; + r /= dy; + if (dy < 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } else if (dy > 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } + + r = y1 - ay; + if (!dy && r < 0) return; + r /= dy; + if (dy < 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } else if (dy > 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } + + if (t0 > 0) a[0] = ax + t0 * dx, a[1] = ay + t0 * dy; + if (t1 < 1) b[0] = ax + t1 * dx, b[1] = ay + t1 * dy; + return true; +} + +var clipMax = 1e9, clipMin = -clipMax; + +// TODO Use d3-polygon’s polygonContains here for the ring check? +// TODO Eliminate duplicate buffering in clipBuffer and polygon.push? + +function clipRectangle(x0, y0, x1, y1) { + + function visible(x, y) { + return x0 <= x && x <= x1 && y0 <= y && y <= y1; + } + + function interpolate(from, to, direction, stream) { + var a = 0, a1 = 0; + if (from == null + || (a = corner(from, direction)) !== (a1 = corner(to, direction)) + || comparePoint(from, to) < 0 ^ direction > 0) { + do stream.point(a === 0 || a === 3 ? x0 : x1, a > 1 ? y1 : y0); + while ((a = (a + direction + 4) % 4) !== a1); + } else { + stream.point(to[0], to[1]); + } + } + + function corner(p, direction) { + return abs(p[0] - x0) < epsilon$2 ? direction > 0 ? 0 : 3 + : abs(p[0] - x1) < epsilon$2 ? direction > 0 ? 2 : 1 + : abs(p[1] - y0) < epsilon$2 ? direction > 0 ? 1 : 0 + : direction > 0 ? 3 : 2; // abs(p[1] - y1) < epsilon + } + + function compareIntersection(a, b) { + return comparePoint(a.x, b.x); + } + + function comparePoint(a, b) { + var ca = corner(a, 1), + cb = corner(b, 1); + return ca !== cb ? ca - cb + : ca === 0 ? b[1] - a[1] + : ca === 1 ? a[0] - b[0] + : ca === 2 ? a[1] - b[1] + : b[0] - a[0]; + } + + return function(stream) { + var activeStream = stream, + bufferStream = clipBuffer(), + segments, + polygon, + ring, + x__, y__, v__, // first point + x_, y_, v_, // previous point + first, + clean; + + var clipStream = { + point: point, + lineStart: lineStart, + lineEnd: lineEnd, + polygonStart: polygonStart, + polygonEnd: polygonEnd + }; + + function point(x, y) { + if (visible(x, y)) activeStream.point(x, y); + } + + function polygonInside() { + var winding = 0; + + for (var i = 0, n = polygon.length; i < n; ++i) { + for (var ring = polygon[i], j = 1, m = ring.length, point = ring[0], a0, a1, b0 = point[0], b1 = point[1]; j < m; ++j) { + a0 = b0, a1 = b1, point = ring[j], b0 = point[0], b1 = point[1]; + if (a1 <= y1) { if (b1 > y1 && (b0 - a0) * (y1 - a1) > (b1 - a1) * (x0 - a0)) ++winding; } + else { if (b1 <= y1 && (b0 - a0) * (y1 - a1) < (b1 - a1) * (x0 - a0)) --winding; } + } + } + + return winding; + } + + // Buffer geometry within a polygon and then clip it en masse. + function polygonStart() { + activeStream = bufferStream, segments = [], polygon = [], clean = true; + } + + function polygonEnd() { + var startInside = polygonInside(), + cleanInside = clean && startInside, + visible = (segments = merge(segments)).length; + if (cleanInside || visible) { + stream.polygonStart(); + if (cleanInside) { + stream.lineStart(); + interpolate(null, null, 1, stream); + stream.lineEnd(); + } + if (visible) { + clipRejoin(segments, compareIntersection, startInside, interpolate, stream); + } + stream.polygonEnd(); + } + activeStream = stream, segments = polygon = ring = null; + } + + function lineStart() { + clipStream.point = linePoint; + if (polygon) polygon.push(ring = []); + first = true; + v_ = false; + x_ = y_ = NaN; + } + + // TODO rather than special-case polygons, simply handle them separately. + // Ideally, coincident intersection points should be jittered to avoid + // clipping issues. + function lineEnd() { + if (segments) { + linePoint(x__, y__); + if (v__ && v_) bufferStream.rejoin(); + segments.push(bufferStream.result()); + } + clipStream.point = point; + if (v_) activeStream.lineEnd(); + } + + function linePoint(x, y) { + var v = visible(x, y); + if (polygon) ring.push([x, y]); + if (first) { + x__ = x, y__ = y, v__ = v; + first = false; + if (v) { + activeStream.lineStart(); + activeStream.point(x, y); + } + } else { + if (v && v_) activeStream.point(x, y); + else { + var a = [x_ = Math.max(clipMin, Math.min(clipMax, x_)), y_ = Math.max(clipMin, Math.min(clipMax, y_))], + b = [x = Math.max(clipMin, Math.min(clipMax, x)), y = Math.max(clipMin, Math.min(clipMax, y))]; + if (clipLine(a, b, x0, y0, x1, y1)) { + if (!v_) { + activeStream.lineStart(); + activeStream.point(a[0], a[1]); + } + activeStream.point(b[0], b[1]); + if (!v) activeStream.lineEnd(); + clean = false; + } else if (v) { + activeStream.lineStart(); + activeStream.point(x, y); + clean = false; + } + } + } + x_ = x, y_ = y, v_ = v; + } + + return clipStream; + }; +} + +function extent$1() { + var x0 = 0, + y0 = 0, + x1 = 960, + y1 = 500, + cache, + cacheStream, + clip; + + return clip = { + stream: function(stream) { + return cache && cacheStream === stream ? cache : cache = clipRectangle(x0, y0, x1, y1)(cacheStream = stream); + }, + extent: function(_) { + return arguments.length ? (x0 = +_[0][0], y0 = +_[0][1], x1 = +_[1][0], y1 = +_[1][1], cache = cacheStream = null, clip) : [[x0, y0], [x1, y1]]; + } + }; +} + +var lengthSum = adder(), + lambda0$2, + sinPhi0$1, + cosPhi0$1; + +var lengthStream = { + sphere: noop$2, + point: noop$2, + lineStart: lengthLineStart, + lineEnd: noop$2, + polygonStart: noop$2, + polygonEnd: noop$2 +}; + +function lengthLineStart() { + lengthStream.point = lengthPointFirst; + lengthStream.lineEnd = lengthLineEnd; +} + +function lengthLineEnd() { + lengthStream.point = lengthStream.lineEnd = noop$2; +} + +function lengthPointFirst(lambda, phi) { + lambda *= radians, phi *= radians; + lambda0$2 = lambda, sinPhi0$1 = sin$1(phi), cosPhi0$1 = cos$1(phi); + lengthStream.point = lengthPoint; +} + +function lengthPoint(lambda, phi) { + lambda *= radians, phi *= radians; + var sinPhi = sin$1(phi), + cosPhi = cos$1(phi), + delta = abs(lambda - lambda0$2), + cosDelta = cos$1(delta), + sinDelta = sin$1(delta), + x = cosPhi * sinDelta, + y = cosPhi0$1 * sinPhi - sinPhi0$1 * cosPhi * cosDelta, + z = sinPhi0$1 * sinPhi + cosPhi0$1 * cosPhi * cosDelta; + lengthSum.add(atan2(sqrt(x * x + y * y), z)); + lambda0$2 = lambda, sinPhi0$1 = sinPhi, cosPhi0$1 = cosPhi; +} + +function length$1(object) { + lengthSum.reset(); + geoStream(object, lengthStream); + return +lengthSum; +} + +var coordinates = [null, null], + object$1 = {type: "LineString", coordinates: coordinates}; + +function distance(a, b) { + coordinates[0] = a; + coordinates[1] = b; + return length$1(object$1); +} + +var containsObjectType = { + Feature: function(object, point) { + return containsGeometry(object.geometry, point); + }, + FeatureCollection: function(object, point) { + var features = object.features, i = -1, n = features.length; + while (++i < n) if (containsGeometry(features[i].geometry, point)) return true; + return false; + } +}; + +var containsGeometryType = { + Sphere: function() { + return true; + }, + Point: function(object, point) { + return containsPoint(object.coordinates, point); + }, + MultiPoint: function(object, point) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) if (containsPoint(coordinates[i], point)) return true; + return false; + }, + LineString: function(object, point) { + return containsLine(object.coordinates, point); + }, + MultiLineString: function(object, point) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) if (containsLine(coordinates[i], point)) return true; + return false; + }, + Polygon: function(object, point) { + return containsPolygon(object.coordinates, point); + }, + MultiPolygon: function(object, point) { + var coordinates = object.coordinates, i = -1, n = coordinates.length; + while (++i < n) if (containsPolygon(coordinates[i], point)) return true; + return false; + }, + GeometryCollection: function(object, point) { + var geometries = object.geometries, i = -1, n = geometries.length; + while (++i < n) if (containsGeometry(geometries[i], point)) return true; + return false; + } +}; + +function containsGeometry(geometry, point) { + return geometry && containsGeometryType.hasOwnProperty(geometry.type) + ? containsGeometryType[geometry.type](geometry, point) + : false; +} + +function containsPoint(coordinates, point) { + return distance(coordinates, point) === 0; +} + +function containsLine(coordinates, point) { + var ao, bo, ab; + for (var i = 0, n = coordinates.length; i < n; i++) { + bo = distance(coordinates[i], point); + if (bo === 0) return true; + if (i > 0) { + ab = distance(coordinates[i], coordinates[i - 1]); + if ( + ab > 0 && + ao <= ab && + bo <= ab && + (ao + bo - ab) * (1 - Math.pow((ao - bo) / ab, 2)) < epsilon2$1 * ab + ) + return true; + } + ao = bo; + } + return false; +} + +function containsPolygon(coordinates, point) { + return !!polygonContains(coordinates.map(ringRadians), pointRadians(point)); +} + +function ringRadians(ring) { + return ring = ring.map(pointRadians), ring.pop(), ring; +} + +function pointRadians(point) { + return [point[0] * radians, point[1] * radians]; +} + +function contains$1(object, point) { + return (object && containsObjectType.hasOwnProperty(object.type) + ? containsObjectType[object.type] + : containsGeometry)(object, point); +} + +function graticuleX(y0, y1, dy) { + var y = sequence(y0, y1 - epsilon$2, dy).concat(y1); + return function(x) { return y.map(function(y) { return [x, y]; }); }; +} + +function graticuleY(x0, x1, dx) { + var x = sequence(x0, x1 - epsilon$2, dx).concat(x1); + return function(y) { return x.map(function(x) { return [x, y]; }); }; +} + +function graticule() { + var x1, x0, X1, X0, + y1, y0, Y1, Y0, + dx = 10, dy = dx, DX = 90, DY = 360, + x, y, X, Y, + precision = 2.5; + + function graticule() { + return {type: "MultiLineString", coordinates: lines()}; + } + + function lines() { + return sequence(ceil(X0 / DX) * DX, X1, DX).map(X) + .concat(sequence(ceil(Y0 / DY) * DY, Y1, DY).map(Y)) + .concat(sequence(ceil(x0 / dx) * dx, x1, dx).filter(function(x) { return abs(x % DX) > epsilon$2; }).map(x)) + .concat(sequence(ceil(y0 / dy) * dy, y1, dy).filter(function(y) { return abs(y % DY) > epsilon$2; }).map(y)); + } + + graticule.lines = function() { + return lines().map(function(coordinates) { return {type: "LineString", coordinates: coordinates}; }); + }; + + graticule.outline = function() { + return { + type: "Polygon", + coordinates: [ + X(X0).concat( + Y(Y1).slice(1), + X(X1).reverse().slice(1), + Y(Y0).reverse().slice(1)) + ] + }; + }; + + graticule.extent = function(_) { + if (!arguments.length) return graticule.extentMinor(); + return graticule.extentMajor(_).extentMinor(_); + }; + + graticule.extentMajor = function(_) { + if (!arguments.length) return [[X0, Y0], [X1, Y1]]; + X0 = +_[0][0], X1 = +_[1][0]; + Y0 = +_[0][1], Y1 = +_[1][1]; + if (X0 > X1) _ = X0, X0 = X1, X1 = _; + if (Y0 > Y1) _ = Y0, Y0 = Y1, Y1 = _; + return graticule.precision(precision); + }; + + graticule.extentMinor = function(_) { + if (!arguments.length) return [[x0, y0], [x1, y1]]; + x0 = +_[0][0], x1 = +_[1][0]; + y0 = +_[0][1], y1 = +_[1][1]; + if (x0 > x1) _ = x0, x0 = x1, x1 = _; + if (y0 > y1) _ = y0, y0 = y1, y1 = _; + return graticule.precision(precision); + }; + + graticule.step = function(_) { + if (!arguments.length) return graticule.stepMinor(); + return graticule.stepMajor(_).stepMinor(_); + }; + + graticule.stepMajor = function(_) { + if (!arguments.length) return [DX, DY]; + DX = +_[0], DY = +_[1]; + return graticule; + }; + + graticule.stepMinor = function(_) { + if (!arguments.length) return [dx, dy]; + dx = +_[0], dy = +_[1]; + return graticule; + }; + + graticule.precision = function(_) { + if (!arguments.length) return precision; + precision = +_; + x = graticuleX(y0, y1, 90); + y = graticuleY(x0, x1, precision); + X = graticuleX(Y0, Y1, 90); + Y = graticuleY(X0, X1, precision); + return graticule; + }; + + return graticule + .extentMajor([[-180, -90 + epsilon$2], [180, 90 - epsilon$2]]) + .extentMinor([[-180, -80 - epsilon$2], [180, 80 + epsilon$2]]); +} + +function graticule10() { + return graticule()(); +} + +function interpolate$1(a, b) { + var x0 = a[0] * radians, + y0 = a[1] * radians, + x1 = b[0] * radians, + y1 = b[1] * radians, + cy0 = cos$1(y0), + sy0 = sin$1(y0), + cy1 = cos$1(y1), + sy1 = sin$1(y1), + kx0 = cy0 * cos$1(x0), + ky0 = cy0 * sin$1(x0), + kx1 = cy1 * cos$1(x1), + ky1 = cy1 * sin$1(x1), + d = 2 * asin(sqrt(haversin(y1 - y0) + cy0 * cy1 * haversin(x1 - x0))), + k = sin$1(d); + + var interpolate = d ? function(t) { + var B = sin$1(t *= d) / k, + A = sin$1(d - t) / k, + x = A * kx0 + B * kx1, + y = A * ky0 + B * ky1, + z = A * sy0 + B * sy1; + return [ + atan2(y, x) * degrees$1, + atan2(z, sqrt(x * x + y * y)) * degrees$1 + ]; + } : function() { + return [x0 * degrees$1, y0 * degrees$1]; + }; + + interpolate.distance = d; + + return interpolate; +} + +function identity$4(x) { + return x; +} + +var areaSum$1 = adder(), + areaRingSum$1 = adder(), + x00, + y00, + x0$1, + y0$1; + +var areaStream$1 = { + point: noop$2, + lineStart: noop$2, + lineEnd: noop$2, + polygonStart: function() { + areaStream$1.lineStart = areaRingStart$1; + areaStream$1.lineEnd = areaRingEnd$1; + }, + polygonEnd: function() { + areaStream$1.lineStart = areaStream$1.lineEnd = areaStream$1.point = noop$2; + areaSum$1.add(abs(areaRingSum$1)); + areaRingSum$1.reset(); + }, + result: function() { + var area = areaSum$1 / 2; + areaSum$1.reset(); + return area; + } +}; + +function areaRingStart$1() { + areaStream$1.point = areaPointFirst$1; +} + +function areaPointFirst$1(x, y) { + areaStream$1.point = areaPoint$1; + x00 = x0$1 = x, y00 = y0$1 = y; +} + +function areaPoint$1(x, y) { + areaRingSum$1.add(y0$1 * x - x0$1 * y); + x0$1 = x, y0$1 = y; +} + +function areaRingEnd$1() { + areaPoint$1(x00, y00); +} + +var x0$2 = Infinity, + y0$2 = x0$2, + x1 = -x0$2, + y1 = x1; + +var boundsStream$1 = { + point: boundsPoint$1, + lineStart: noop$2, + lineEnd: noop$2, + polygonStart: noop$2, + polygonEnd: noop$2, + result: function() { + var bounds = [[x0$2, y0$2], [x1, y1]]; + x1 = y1 = -(y0$2 = x0$2 = Infinity); + return bounds; + } +}; + +function boundsPoint$1(x, y) { + if (x < x0$2) x0$2 = x; + if (x > x1) x1 = x; + if (y < y0$2) y0$2 = y; + if (y > y1) y1 = y; +} + +// TODO Enforce positive area for exterior, negative area for interior? + +var X0$1 = 0, + Y0$1 = 0, + Z0$1 = 0, + X1$1 = 0, + Y1$1 = 0, + Z1$1 = 0, + X2$1 = 0, + Y2$1 = 0, + Z2$1 = 0, + x00$1, + y00$1, + x0$3, + y0$3; + +var centroidStream$1 = { + point: centroidPoint$1, + lineStart: centroidLineStart$1, + lineEnd: centroidLineEnd$1, + polygonStart: function() { + centroidStream$1.lineStart = centroidRingStart$1; + centroidStream$1.lineEnd = centroidRingEnd$1; + }, + polygonEnd: function() { + centroidStream$1.point = centroidPoint$1; + centroidStream$1.lineStart = centroidLineStart$1; + centroidStream$1.lineEnd = centroidLineEnd$1; + }, + result: function() { + var centroid = Z2$1 ? [X2$1 / Z2$1, Y2$1 / Z2$1] + : Z1$1 ? [X1$1 / Z1$1, Y1$1 / Z1$1] + : Z0$1 ? [X0$1 / Z0$1, Y0$1 / Z0$1] + : [NaN, NaN]; + X0$1 = Y0$1 = Z0$1 = + X1$1 = Y1$1 = Z1$1 = + X2$1 = Y2$1 = Z2$1 = 0; + return centroid; + } +}; + +function centroidPoint$1(x, y) { + X0$1 += x; + Y0$1 += y; + ++Z0$1; +} + +function centroidLineStart$1() { + centroidStream$1.point = centroidPointFirstLine; +} + +function centroidPointFirstLine(x, y) { + centroidStream$1.point = centroidPointLine; + centroidPoint$1(x0$3 = x, y0$3 = y); +} + +function centroidPointLine(x, y) { + var dx = x - x0$3, dy = y - y0$3, z = sqrt(dx * dx + dy * dy); + X1$1 += z * (x0$3 + x) / 2; + Y1$1 += z * (y0$3 + y) / 2; + Z1$1 += z; + centroidPoint$1(x0$3 = x, y0$3 = y); +} + +function centroidLineEnd$1() { + centroidStream$1.point = centroidPoint$1; +} + +function centroidRingStart$1() { + centroidStream$1.point = centroidPointFirstRing; +} + +function centroidRingEnd$1() { + centroidPointRing(x00$1, y00$1); +} + +function centroidPointFirstRing(x, y) { + centroidStream$1.point = centroidPointRing; + centroidPoint$1(x00$1 = x0$3 = x, y00$1 = y0$3 = y); +} + +function centroidPointRing(x, y) { + var dx = x - x0$3, + dy = y - y0$3, + z = sqrt(dx * dx + dy * dy); + + X1$1 += z * (x0$3 + x) / 2; + Y1$1 += z * (y0$3 + y) / 2; + Z1$1 += z; + + z = y0$3 * x - x0$3 * y; + X2$1 += z * (x0$3 + x); + Y2$1 += z * (y0$3 + y); + Z2$1 += z * 3; + centroidPoint$1(x0$3 = x, y0$3 = y); +} + +function PathContext(context) { + this._context = context; +} + +PathContext.prototype = { + _radius: 4.5, + pointRadius: function(_) { + return this._radius = _, this; + }, + polygonStart: function() { + this._line = 0; + }, + polygonEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._point = 0; + }, + lineEnd: function() { + if (this._line === 0) this._context.closePath(); + this._point = NaN; + }, + point: function(x, y) { + switch (this._point) { + case 0: { + this._context.moveTo(x, y); + this._point = 1; + break; + } + case 1: { + this._context.lineTo(x, y); + break; + } + default: { + this._context.moveTo(x + this._radius, y); + this._context.arc(x, y, this._radius, 0, tau$3); + break; + } + } + }, + result: noop$2 +}; + +var lengthSum$1 = adder(), + lengthRing, + x00$2, + y00$2, + x0$4, + y0$4; + +var lengthStream$1 = { + point: noop$2, + lineStart: function() { + lengthStream$1.point = lengthPointFirst$1; + }, + lineEnd: function() { + if (lengthRing) lengthPoint$1(x00$2, y00$2); + lengthStream$1.point = noop$2; + }, + polygonStart: function() { + lengthRing = true; + }, + polygonEnd: function() { + lengthRing = null; + }, + result: function() { + var length = +lengthSum$1; + lengthSum$1.reset(); + return length; + } +}; + +function lengthPointFirst$1(x, y) { + lengthStream$1.point = lengthPoint$1; + x00$2 = x0$4 = x, y00$2 = y0$4 = y; +} + +function lengthPoint$1(x, y) { + x0$4 -= x, y0$4 -= y; + lengthSum$1.add(sqrt(x0$4 * x0$4 + y0$4 * y0$4)); + x0$4 = x, y0$4 = y; +} + +function PathString() { + this._string = []; +} + +PathString.prototype = { + _radius: 4.5, + _circle: circle$1(4.5), + pointRadius: function(_) { + if ((_ = +_) !== this._radius) this._radius = _, this._circle = null; + return this; + }, + polygonStart: function() { + this._line = 0; + }, + polygonEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._point = 0; + }, + lineEnd: function() { + if (this._line === 0) this._string.push("Z"); + this._point = NaN; + }, + point: function(x, y) { + switch (this._point) { + case 0: { + this._string.push("M", x, ",", y); + this._point = 1; + break; + } + case 1: { + this._string.push("L", x, ",", y); + break; + } + default: { + if (this._circle == null) this._circle = circle$1(this._radius); + this._string.push("M", x, ",", y, this._circle); + break; + } + } + }, + result: function() { + if (this._string.length) { + var result = this._string.join(""); + this._string = []; + return result; + } else { + return null; + } + } +}; + +function circle$1(radius) { + return "m0," + radius + + "a" + radius + "," + radius + " 0 1,1 0," + -2 * radius + + "a" + radius + "," + radius + " 0 1,1 0," + 2 * radius + + "z"; +} + +function index$1(projection, context) { + var pointRadius = 4.5, + projectionStream, + contextStream; + + function path(object) { + if (object) { + if (typeof pointRadius === "function") contextStream.pointRadius(+pointRadius.apply(this, arguments)); + geoStream(object, projectionStream(contextStream)); + } + return contextStream.result(); + } + + path.area = function(object) { + geoStream(object, projectionStream(areaStream$1)); + return areaStream$1.result(); + }; + + path.measure = function(object) { + geoStream(object, projectionStream(lengthStream$1)); + return lengthStream$1.result(); + }; + + path.bounds = function(object) { + geoStream(object, projectionStream(boundsStream$1)); + return boundsStream$1.result(); + }; + + path.centroid = function(object) { + geoStream(object, projectionStream(centroidStream$1)); + return centroidStream$1.result(); + }; + + path.projection = function(_) { + return arguments.length ? (projectionStream = _ == null ? (projection = null, identity$4) : (projection = _).stream, path) : projection; + }; + + path.context = function(_) { + if (!arguments.length) return context; + contextStream = _ == null ? (context = null, new PathString) : new PathContext(context = _); + if (typeof pointRadius !== "function") contextStream.pointRadius(pointRadius); + return path; + }; + + path.pointRadius = function(_) { + if (!arguments.length) return pointRadius; + pointRadius = typeof _ === "function" ? _ : (contextStream.pointRadius(+_), +_); + return path; + }; + + return path.projection(projection).context(context); +} + +function transform(methods) { + return { + stream: transformer(methods) + }; +} + +function transformer(methods) { + return function(stream) { + var s = new TransformStream; + for (var key in methods) s[key] = methods[key]; + s.stream = stream; + return s; + }; +} + +function TransformStream() {} + +TransformStream.prototype = { + constructor: TransformStream, + point: function(x, y) { this.stream.point(x, y); }, + sphere: function() { this.stream.sphere(); }, + lineStart: function() { this.stream.lineStart(); }, + lineEnd: function() { this.stream.lineEnd(); }, + polygonStart: function() { this.stream.polygonStart(); }, + polygonEnd: function() { this.stream.polygonEnd(); } +}; + +function fit(projection, fitBounds, object) { + var clip = projection.clipExtent && projection.clipExtent(); + projection.scale(150).translate([0, 0]); + if (clip != null) projection.clipExtent(null); + geoStream(object, projection.stream(boundsStream$1)); + fitBounds(boundsStream$1.result()); + if (clip != null) projection.clipExtent(clip); + return projection; +} + +function fitExtent(projection, extent, object) { + return fit(projection, function(b) { + var w = extent[1][0] - extent[0][0], + h = extent[1][1] - extent[0][1], + k = Math.min(w / (b[1][0] - b[0][0]), h / (b[1][1] - b[0][1])), + x = +extent[0][0] + (w - k * (b[1][0] + b[0][0])) / 2, + y = +extent[0][1] + (h - k * (b[1][1] + b[0][1])) / 2; + projection.scale(150 * k).translate([x, y]); + }, object); +} + +function fitSize(projection, size, object) { + return fitExtent(projection, [[0, 0], size], object); +} + +function fitWidth(projection, width, object) { + return fit(projection, function(b) { + var w = +width, + k = w / (b[1][0] - b[0][0]), + x = (w - k * (b[1][0] + b[0][0])) / 2, + y = -k * b[0][1]; + projection.scale(150 * k).translate([x, y]); + }, object); +} + +function fitHeight(projection, height, object) { + return fit(projection, function(b) { + var h = +height, + k = h / (b[1][1] - b[0][1]), + x = -k * b[0][0], + y = (h - k * (b[1][1] + b[0][1])) / 2; + projection.scale(150 * k).translate([x, y]); + }, object); +} + +var maxDepth = 16, // maximum depth of subdivision + cosMinDistance = cos$1(30 * radians); // cos(minimum angular distance) + +function resample(project, delta2) { + return +delta2 ? resample$1(project, delta2) : resampleNone(project); +} + +function resampleNone(project) { + return transformer({ + point: function(x, y) { + x = project(x, y); + this.stream.point(x[0], x[1]); + } + }); +} + +function resample$1(project, delta2) { + + function resampleLineTo(x0, y0, lambda0, a0, b0, c0, x1, y1, lambda1, a1, b1, c1, depth, stream) { + var dx = x1 - x0, + dy = y1 - y0, + d2 = dx * dx + dy * dy; + if (d2 > 4 * delta2 && depth--) { + var a = a0 + a1, + b = b0 + b1, + c = c0 + c1, + m = sqrt(a * a + b * b + c * c), + phi2 = asin(c /= m), + lambda2 = abs(abs(c) - 1) < epsilon$2 || abs(lambda0 - lambda1) < epsilon$2 ? (lambda0 + lambda1) / 2 : atan2(b, a), + p = project(lambda2, phi2), + x2 = p[0], + y2 = p[1], + dx2 = x2 - x0, + dy2 = y2 - y0, + dz = dy * dx2 - dx * dy2; + if (dz * dz / d2 > delta2 // perpendicular projected distance + || abs((dx * dx2 + dy * dy2) / d2 - 0.5) > 0.3 // midpoint close to an end + || a0 * a1 + b0 * b1 + c0 * c1 < cosMinDistance) { // angular distance + resampleLineTo(x0, y0, lambda0, a0, b0, c0, x2, y2, lambda2, a /= m, b /= m, c, depth, stream); + stream.point(x2, y2); + resampleLineTo(x2, y2, lambda2, a, b, c, x1, y1, lambda1, a1, b1, c1, depth, stream); + } + } + } + return function(stream) { + var lambda00, x00, y00, a00, b00, c00, // first point + lambda0, x0, y0, a0, b0, c0; // previous point + + var resampleStream = { + point: point, + lineStart: lineStart, + lineEnd: lineEnd, + polygonStart: function() { stream.polygonStart(); resampleStream.lineStart = ringStart; }, + polygonEnd: function() { stream.polygonEnd(); resampleStream.lineStart = lineStart; } + }; + + function point(x, y) { + x = project(x, y); + stream.point(x[0], x[1]); + } + + function lineStart() { + x0 = NaN; + resampleStream.point = linePoint; + stream.lineStart(); + } + + function linePoint(lambda, phi) { + var c = cartesian([lambda, phi]), p = project(lambda, phi); + resampleLineTo(x0, y0, lambda0, a0, b0, c0, x0 = p[0], y0 = p[1], lambda0 = lambda, a0 = c[0], b0 = c[1], c0 = c[2], maxDepth, stream); + stream.point(x0, y0); + } + + function lineEnd() { + resampleStream.point = point; + stream.lineEnd(); + } + + function ringStart() { + lineStart(); + resampleStream.point = ringPoint; + resampleStream.lineEnd = ringEnd; + } + + function ringPoint(lambda, phi) { + linePoint(lambda00 = lambda, phi), x00 = x0, y00 = y0, a00 = a0, b00 = b0, c00 = c0; + resampleStream.point = linePoint; + } + + function ringEnd() { + resampleLineTo(x0, y0, lambda0, a0, b0, c0, x00, y00, lambda00, a00, b00, c00, maxDepth, stream); + resampleStream.lineEnd = lineEnd; + lineEnd(); + } + + return resampleStream; + }; +} + +var transformRadians = transformer({ + point: function(x, y) { + this.stream.point(x * radians, y * radians); + } +}); + +function transformRotate(rotate) { + return transformer({ + point: function(x, y) { + var r = rotate(x, y); + return this.stream.point(r[0], r[1]); + } + }); +} + +function scaleTranslate(k, dx, dy, sx, sy) { + function transform(x, y) { + x *= sx; y *= sy; + return [dx + k * x, dy - k * y]; + } + transform.invert = function(x, y) { + return [(x - dx) / k * sx, (dy - y) / k * sy]; + }; + return transform; +} + +function scaleTranslateRotate(k, dx, dy, sx, sy, alpha) { + var cosAlpha = cos$1(alpha), + sinAlpha = sin$1(alpha), + a = cosAlpha * k, + b = sinAlpha * k, + ai = cosAlpha / k, + bi = sinAlpha / k, + ci = (sinAlpha * dy - cosAlpha * dx) / k, + fi = (sinAlpha * dx + cosAlpha * dy) / k; + function transform(x, y) { + x *= sx; y *= sy; + return [a * x - b * y + dx, dy - b * x - a * y]; + } + transform.invert = function(x, y) { + return [sx * (ai * x - bi * y + ci), sy * (fi - bi * x - ai * y)]; + }; + return transform; +} + +function projection(project) { + return projectionMutator(function() { return project; })(); +} + +function projectionMutator(projectAt) { + var project, + k = 150, // scale + x = 480, y = 250, // translate + lambda = 0, phi = 0, // center + deltaLambda = 0, deltaPhi = 0, deltaGamma = 0, rotate, // pre-rotate + alpha = 0, // post-rotate angle + sx = 1, // reflectX + sy = 1, // reflectX + theta = null, preclip = clipAntimeridian, // pre-clip angle + x0 = null, y0, x1, y1, postclip = identity$4, // post-clip extent + delta2 = 0.5, // precision + projectResample, + projectTransform, + projectRotateTransform, + cache, + cacheStream; + + function projection(point) { + return projectRotateTransform(point[0] * radians, point[1] * radians); + } + + function invert(point) { + point = projectRotateTransform.invert(point[0], point[1]); + return point && [point[0] * degrees$1, point[1] * degrees$1]; + } + + projection.stream = function(stream) { + return cache && cacheStream === stream ? cache : cache = transformRadians(transformRotate(rotate)(preclip(projectResample(postclip(cacheStream = stream))))); + }; + + projection.preclip = function(_) { + return arguments.length ? (preclip = _, theta = undefined, reset()) : preclip; + }; + + projection.postclip = function(_) { + return arguments.length ? (postclip = _, x0 = y0 = x1 = y1 = null, reset()) : postclip; + }; + + projection.clipAngle = function(_) { + return arguments.length ? (preclip = +_ ? clipCircle(theta = _ * radians) : (theta = null, clipAntimeridian), reset()) : theta * degrees$1; + }; + + projection.clipExtent = function(_) { + return arguments.length ? (postclip = _ == null ? (x0 = y0 = x1 = y1 = null, identity$4) : clipRectangle(x0 = +_[0][0], y0 = +_[0][1], x1 = +_[1][0], y1 = +_[1][1]), reset()) : x0 == null ? null : [[x0, y0], [x1, y1]]; + }; + + projection.scale = function(_) { + return arguments.length ? (k = +_, recenter()) : k; + }; + + projection.translate = function(_) { + return arguments.length ? (x = +_[0], y = +_[1], recenter()) : [x, y]; + }; + + projection.center = function(_) { + return arguments.length ? (lambda = _[0] % 360 * radians, phi = _[1] % 360 * radians, recenter()) : [lambda * degrees$1, phi * degrees$1]; + }; + + projection.rotate = function(_) { + return arguments.length ? (deltaLambda = _[0] % 360 * radians, deltaPhi = _[1] % 360 * radians, deltaGamma = _.length > 2 ? _[2] % 360 * radians : 0, recenter()) : [deltaLambda * degrees$1, deltaPhi * degrees$1, deltaGamma * degrees$1]; + }; + + projection.angle = function(_) { + return arguments.length ? (alpha = _ % 360 * radians, recenter()) : alpha * degrees$1; + }; + + projection.reflectX = function(_) { + return arguments.length ? (sx = _ ? -1 : 1, recenter()) : sx < 0; + }; + + projection.reflectY = function(_) { + return arguments.length ? (sy = _ ? -1 : 1, recenter()) : sy < 0; + }; + + projection.precision = function(_) { + return arguments.length ? (projectResample = resample(projectTransform, delta2 = _ * _), reset()) : sqrt(delta2); + }; + + projection.fitExtent = function(extent, object) { + return fitExtent(projection, extent, object); + }; + + projection.fitSize = function(size, object) { + return fitSize(projection, size, object); + }; + + projection.fitWidth = function(width, object) { + return fitWidth(projection, width, object); + }; + + projection.fitHeight = function(height, object) { + return fitHeight(projection, height, object); + }; + + function recenter() { + var center = scaleTranslateRotate(k, 0, 0, sx, sy, alpha).apply(null, project(lambda, phi)), + transform = (alpha ? scaleTranslateRotate : scaleTranslate)(k, x - center[0], y - center[1], sx, sy, alpha); + rotate = rotateRadians(deltaLambda, deltaPhi, deltaGamma); + projectTransform = compose(project, transform); + projectRotateTransform = compose(rotate, projectTransform); + projectResample = resample(projectTransform, delta2); + return reset(); + } + + function reset() { + cache = cacheStream = null; + return projection; + } + + return function() { + project = projectAt.apply(this, arguments); + projection.invert = project.invert && invert; + return recenter(); + }; +} + +function conicProjection(projectAt) { + var phi0 = 0, + phi1 = pi$3 / 3, + m = projectionMutator(projectAt), + p = m(phi0, phi1); + + p.parallels = function(_) { + return arguments.length ? m(phi0 = _[0] * radians, phi1 = _[1] * radians) : [phi0 * degrees$1, phi1 * degrees$1]; + }; + + return p; +} + +function cylindricalEqualAreaRaw(phi0) { + var cosPhi0 = cos$1(phi0); + + function forward(lambda, phi) { + return [lambda * cosPhi0, sin$1(phi) / cosPhi0]; + } + + forward.invert = function(x, y) { + return [x / cosPhi0, asin(y * cosPhi0)]; + }; + + return forward; +} + +function conicEqualAreaRaw(y0, y1) { + var sy0 = sin$1(y0), n = (sy0 + sin$1(y1)) / 2; + + // Are the parallels symmetrical around the Equator? + if (abs(n) < epsilon$2) return cylindricalEqualAreaRaw(y0); + + var c = 1 + sy0 * (2 * n - sy0), r0 = sqrt(c) / n; + + function project(x, y) { + var r = sqrt(c - 2 * n * sin$1(y)) / n; + return [r * sin$1(x *= n), r0 - r * cos$1(x)]; + } + + project.invert = function(x, y) { + var r0y = r0 - y, + l = atan2(x, abs(r0y)) * sign(r0y); + if (r0y * n < 0) + l -= pi$3 * sign(x) * sign(r0y); + return [l / n, asin((c - (x * x + r0y * r0y) * n * n) / (2 * n))]; + }; + + return project; +} + +function conicEqualArea() { + return conicProjection(conicEqualAreaRaw) + .scale(155.424) + .center([0, 33.6442]); +} + +function albers() { + return conicEqualArea() + .parallels([29.5, 45.5]) + .scale(1070) + .translate([480, 250]) + .rotate([96, 0]) + .center([-0.6, 38.7]); +} + +// The projections must have mutually exclusive clip regions on the sphere, +// as this will avoid emitting interleaving lines and polygons. +function multiplex(streams) { + var n = streams.length; + return { + point: function(x, y) { var i = -1; while (++i < n) streams[i].point(x, y); }, + sphere: function() { var i = -1; while (++i < n) streams[i].sphere(); }, + lineStart: function() { var i = -1; while (++i < n) streams[i].lineStart(); }, + lineEnd: function() { var i = -1; while (++i < n) streams[i].lineEnd(); }, + polygonStart: function() { var i = -1; while (++i < n) streams[i].polygonStart(); }, + polygonEnd: function() { var i = -1; while (++i < n) streams[i].polygonEnd(); } + }; +} + +// A composite projection for the United States, configured by default for +// 960×500. The projection also works quite well at 960×600 if you change the +// scale to 1285 and adjust the translate accordingly. The set of standard +// parallels for each region comes from USGS, which is published here: +// http://egsc.usgs.gov/isb/pubs/MapProjections/projections.html#albers +function albersUsa() { + var cache, + cacheStream, + lower48 = albers(), lower48Point, + alaska = conicEqualArea().rotate([154, 0]).center([-2, 58.5]).parallels([55, 65]), alaskaPoint, // EPSG:3338 + hawaii = conicEqualArea().rotate([157, 0]).center([-3, 19.9]).parallels([8, 18]), hawaiiPoint, // ESRI:102007 + point, pointStream = {point: function(x, y) { point = [x, y]; }}; + + function albersUsa(coordinates) { + var x = coordinates[0], y = coordinates[1]; + return point = null, + (lower48Point.point(x, y), point) + || (alaskaPoint.point(x, y), point) + || (hawaiiPoint.point(x, y), point); + } + + albersUsa.invert = function(coordinates) { + var k = lower48.scale(), + t = lower48.translate(), + x = (coordinates[0] - t[0]) / k, + y = (coordinates[1] - t[1]) / k; + return (y >= 0.120 && y < 0.234 && x >= -0.425 && x < -0.214 ? alaska + : y >= 0.166 && y < 0.234 && x >= -0.214 && x < -0.115 ? hawaii + : lower48).invert(coordinates); + }; + + albersUsa.stream = function(stream) { + return cache && cacheStream === stream ? cache : cache = multiplex([lower48.stream(cacheStream = stream), alaska.stream(stream), hawaii.stream(stream)]); + }; + + albersUsa.precision = function(_) { + if (!arguments.length) return lower48.precision(); + lower48.precision(_), alaska.precision(_), hawaii.precision(_); + return reset(); + }; + + albersUsa.scale = function(_) { + if (!arguments.length) return lower48.scale(); + lower48.scale(_), alaska.scale(_ * 0.35), hawaii.scale(_); + return albersUsa.translate(lower48.translate()); + }; + + albersUsa.translate = function(_) { + if (!arguments.length) return lower48.translate(); + var k = lower48.scale(), x = +_[0], y = +_[1]; + + lower48Point = lower48 + .translate(_) + .clipExtent([[x - 0.455 * k, y - 0.238 * k], [x + 0.455 * k, y + 0.238 * k]]) + .stream(pointStream); + + alaskaPoint = alaska + .translate([x - 0.307 * k, y + 0.201 * k]) + .clipExtent([[x - 0.425 * k + epsilon$2, y + 0.120 * k + epsilon$2], [x - 0.214 * k - epsilon$2, y + 0.234 * k - epsilon$2]]) + .stream(pointStream); + + hawaiiPoint = hawaii + .translate([x - 0.205 * k, y + 0.212 * k]) + .clipExtent([[x - 0.214 * k + epsilon$2, y + 0.166 * k + epsilon$2], [x - 0.115 * k - epsilon$2, y + 0.234 * k - epsilon$2]]) + .stream(pointStream); + + return reset(); + }; + + albersUsa.fitExtent = function(extent, object) { + return fitExtent(albersUsa, extent, object); + }; + + albersUsa.fitSize = function(size, object) { + return fitSize(albersUsa, size, object); + }; + + albersUsa.fitWidth = function(width, object) { + return fitWidth(albersUsa, width, object); + }; + + albersUsa.fitHeight = function(height, object) { + return fitHeight(albersUsa, height, object); + }; + + function reset() { + cache = cacheStream = null; + return albersUsa; + } + + return albersUsa.scale(1070); +} + +function azimuthalRaw(scale) { + return function(x, y) { + var cx = cos$1(x), + cy = cos$1(y), + k = scale(cx * cy); + return [ + k * cy * sin$1(x), + k * sin$1(y) + ]; + } +} + +function azimuthalInvert(angle) { + return function(x, y) { + var z = sqrt(x * x + y * y), + c = angle(z), + sc = sin$1(c), + cc = cos$1(c); + return [ + atan2(x * sc, z * cc), + asin(z && y * sc / z) + ]; + } +} + +var azimuthalEqualAreaRaw = azimuthalRaw(function(cxcy) { + return sqrt(2 / (1 + cxcy)); +}); + +azimuthalEqualAreaRaw.invert = azimuthalInvert(function(z) { + return 2 * asin(z / 2); +}); + +function azimuthalEqualArea() { + return projection(azimuthalEqualAreaRaw) + .scale(124.75) + .clipAngle(180 - 1e-3); +} + +var azimuthalEquidistantRaw = azimuthalRaw(function(c) { + return (c = acos(c)) && c / sin$1(c); +}); + +azimuthalEquidistantRaw.invert = azimuthalInvert(function(z) { + return z; +}); + +function azimuthalEquidistant() { + return projection(azimuthalEquidistantRaw) + .scale(79.4188) + .clipAngle(180 - 1e-3); +} + +function mercatorRaw(lambda, phi) { + return [lambda, log(tan((halfPi$2 + phi) / 2))]; +} + +mercatorRaw.invert = function(x, y) { + return [x, 2 * atan(exp(y)) - halfPi$2]; +}; + +function mercator() { + return mercatorProjection(mercatorRaw) + .scale(961 / tau$3); +} + +function mercatorProjection(project) { + var m = projection(project), + center = m.center, + scale = m.scale, + translate = m.translate, + clipExtent = m.clipExtent, + x0 = null, y0, x1, y1; // clip extent + + m.scale = function(_) { + return arguments.length ? (scale(_), reclip()) : scale(); + }; + + m.translate = function(_) { + return arguments.length ? (translate(_), reclip()) : translate(); + }; + + m.center = function(_) { + return arguments.length ? (center(_), reclip()) : center(); + }; + + m.clipExtent = function(_) { + return arguments.length ? ((_ == null ? x0 = y0 = x1 = y1 = null : (x0 = +_[0][0], y0 = +_[0][1], x1 = +_[1][0], y1 = +_[1][1])), reclip()) : x0 == null ? null : [[x0, y0], [x1, y1]]; + }; + + function reclip() { + var k = pi$3 * scale(), + t = m(rotation(m.rotate()).invert([0, 0])); + return clipExtent(x0 == null + ? [[t[0] - k, t[1] - k], [t[0] + k, t[1] + k]] : project === mercatorRaw + ? [[Math.max(t[0] - k, x0), y0], [Math.min(t[0] + k, x1), y1]] + : [[x0, Math.max(t[1] - k, y0)], [x1, Math.min(t[1] + k, y1)]]); + } + + return reclip(); +} + +function tany(y) { + return tan((halfPi$2 + y) / 2); +} + +function conicConformalRaw(y0, y1) { + var cy0 = cos$1(y0), + n = y0 === y1 ? sin$1(y0) : log(cy0 / cos$1(y1)) / log(tany(y1) / tany(y0)), + f = cy0 * pow(tany(y0), n) / n; + + if (!n) return mercatorRaw; + + function project(x, y) { + if (f > 0) { if (y < -halfPi$2 + epsilon$2) y = -halfPi$2 + epsilon$2; } + else { if (y > halfPi$2 - epsilon$2) y = halfPi$2 - epsilon$2; } + var r = f / pow(tany(y), n); + return [r * sin$1(n * x), f - r * cos$1(n * x)]; + } + + project.invert = function(x, y) { + var fy = f - y, r = sign(n) * sqrt(x * x + fy * fy), + l = atan2(x, abs(fy)) * sign(fy); + if (fy * n < 0) + l -= pi$3 * sign(x) * sign(fy); + return [l / n, 2 * atan(pow(f / r, 1 / n)) - halfPi$2]; + }; + + return project; +} + +function conicConformal() { + return conicProjection(conicConformalRaw) + .scale(109.5) + .parallels([30, 30]); +} + +function equirectangularRaw(lambda, phi) { + return [lambda, phi]; +} + +equirectangularRaw.invert = equirectangularRaw; + +function equirectangular() { + return projection(equirectangularRaw) + .scale(152.63); +} + +function conicEquidistantRaw(y0, y1) { + var cy0 = cos$1(y0), + n = y0 === y1 ? sin$1(y0) : (cy0 - cos$1(y1)) / (y1 - y0), + g = cy0 / n + y0; + + if (abs(n) < epsilon$2) return equirectangularRaw; + + function project(x, y) { + var gy = g - y, nx = n * x; + return [gy * sin$1(nx), g - gy * cos$1(nx)]; + } + + project.invert = function(x, y) { + var gy = g - y, + l = atan2(x, abs(gy)) * sign(gy); + if (gy * n < 0) + l -= pi$3 * sign(x) * sign(gy); + return [l / n, g - sign(n) * sqrt(x * x + gy * gy)]; + }; + + return project; +} + +function conicEquidistant() { + return conicProjection(conicEquidistantRaw) + .scale(131.154) + .center([0, 13.9389]); +} + +var A1 = 1.340264, + A2 = -0.081106, + A3 = 0.000893, + A4 = 0.003796, + M = sqrt(3) / 2, + iterations = 12; + +function equalEarthRaw(lambda, phi) { + var l = asin(M * sin$1(phi)), l2 = l * l, l6 = l2 * l2 * l2; + return [ + lambda * cos$1(l) / (M * (A1 + 3 * A2 * l2 + l6 * (7 * A3 + 9 * A4 * l2))), + l * (A1 + A2 * l2 + l6 * (A3 + A4 * l2)) + ]; +} + +equalEarthRaw.invert = function(x, y) { + var l = y, l2 = l * l, l6 = l2 * l2 * l2; + for (var i = 0, delta, fy, fpy; i < iterations; ++i) { + fy = l * (A1 + A2 * l2 + l6 * (A3 + A4 * l2)) - y; + fpy = A1 + 3 * A2 * l2 + l6 * (7 * A3 + 9 * A4 * l2); + l -= delta = fy / fpy, l2 = l * l, l6 = l2 * l2 * l2; + if (abs(delta) < epsilon2$1) break; + } + return [ + M * x * (A1 + 3 * A2 * l2 + l6 * (7 * A3 + 9 * A4 * l2)) / cos$1(l), + asin(sin$1(l) / M) + ]; +}; + +function equalEarth() { + return projection(equalEarthRaw) + .scale(177.158); +} + +function gnomonicRaw(x, y) { + var cy = cos$1(y), k = cos$1(x) * cy; + return [cy * sin$1(x) / k, sin$1(y) / k]; +} + +gnomonicRaw.invert = azimuthalInvert(atan); + +function gnomonic() { + return projection(gnomonicRaw) + .scale(144.049) + .clipAngle(60); +} + +function identity$5() { + var k = 1, tx = 0, ty = 0, sx = 1, sy = 1, // scale, translate and reflect + alpha = 0, ca, sa, // angle + x0 = null, y0, x1, y1, // clip extent + kx = 1, ky = 1, + transform = transformer({ + point: function(x, y) { + var p = projection([x, y]); + this.stream.point(p[0], p[1]); + } + }), + postclip = identity$4, + cache, + cacheStream; + + function reset() { + kx = k * sx; + ky = k * sy; + cache = cacheStream = null; + return projection; + } + + function projection (p) { + var x = p[0] * kx, y = p[1] * ky; + if (alpha) { + var t = y * ca - x * sa; + x = x * ca + y * sa; + y = t; + } + return [x + tx, y + ty]; + } + projection.invert = function(p) { + var x = p[0] - tx, y = p[1] - ty; + if (alpha) { + var t = y * ca + x * sa; + x = x * ca - y * sa; + y = t; + } + return [x / kx, y / ky]; + }; + projection.stream = function(stream) { + return cache && cacheStream === stream ? cache : cache = transform(postclip(cacheStream = stream)); + }; + projection.postclip = function(_) { + return arguments.length ? (postclip = _, x0 = y0 = x1 = y1 = null, reset()) : postclip; + }; + projection.clipExtent = function(_) { + return arguments.length ? (postclip = _ == null ? (x0 = y0 = x1 = y1 = null, identity$4) : clipRectangle(x0 = +_[0][0], y0 = +_[0][1], x1 = +_[1][0], y1 = +_[1][1]), reset()) : x0 == null ? null : [[x0, y0], [x1, y1]]; + }; + projection.scale = function(_) { + return arguments.length ? (k = +_, reset()) : k; + }; + projection.translate = function(_) { + return arguments.length ? (tx = +_[0], ty = +_[1], reset()) : [tx, ty]; + }; + projection.angle = function(_) { + return arguments.length ? (alpha = _ % 360 * radians, sa = sin$1(alpha), ca = cos$1(alpha), reset()) : alpha * degrees$1; + }; + projection.reflectX = function(_) { + return arguments.length ? (sx = _ ? -1 : 1, reset()) : sx < 0; + }; + projection.reflectY = function(_) { + return arguments.length ? (sy = _ ? -1 : 1, reset()) : sy < 0; + }; + projection.fitExtent = function(extent, object) { + return fitExtent(projection, extent, object); + }; + projection.fitSize = function(size, object) { + return fitSize(projection, size, object); + }; + projection.fitWidth = function(width, object) { + return fitWidth(projection, width, object); + }; + projection.fitHeight = function(height, object) { + return fitHeight(projection, height, object); + }; + + return projection; +} + +function naturalEarth1Raw(lambda, phi) { + var phi2 = phi * phi, phi4 = phi2 * phi2; + return [ + lambda * (0.8707 - 0.131979 * phi2 + phi4 * (-0.013791 + phi4 * (0.003971 * phi2 - 0.001529 * phi4))), + phi * (1.007226 + phi2 * (0.015085 + phi4 * (-0.044475 + 0.028874 * phi2 - 0.005916 * phi4))) + ]; +} + +naturalEarth1Raw.invert = function(x, y) { + var phi = y, i = 25, delta; + do { + var phi2 = phi * phi, phi4 = phi2 * phi2; + phi -= delta = (phi * (1.007226 + phi2 * (0.015085 + phi4 * (-0.044475 + 0.028874 * phi2 - 0.005916 * phi4))) - y) / + (1.007226 + phi2 * (0.015085 * 3 + phi4 * (-0.044475 * 7 + 0.028874 * 9 * phi2 - 0.005916 * 11 * phi4))); + } while (abs(delta) > epsilon$2 && --i > 0); + return [ + x / (0.8707 + (phi2 = phi * phi) * (-0.131979 + phi2 * (-0.013791 + phi2 * phi2 * phi2 * (0.003971 - 0.001529 * phi2)))), + phi + ]; +}; + +function naturalEarth1() { + return projection(naturalEarth1Raw) + .scale(175.295); +} + +function orthographicRaw(x, y) { + return [cos$1(y) * sin$1(x), sin$1(y)]; +} + +orthographicRaw.invert = azimuthalInvert(asin); + +function orthographic() { + return projection(orthographicRaw) + .scale(249.5) + .clipAngle(90 + epsilon$2); +} + +function stereographicRaw(x, y) { + var cy = cos$1(y), k = 1 + cos$1(x) * cy; + return [cy * sin$1(x) / k, sin$1(y) / k]; +} + +stereographicRaw.invert = azimuthalInvert(function(z) { + return 2 * atan(z); +}); + +function stereographic() { + return projection(stereographicRaw) + .scale(250) + .clipAngle(142); +} + +function transverseMercatorRaw(lambda, phi) { + return [log(tan((halfPi$2 + phi) / 2)), -lambda]; +} + +transverseMercatorRaw.invert = function(x, y) { + return [-y, 2 * atan(exp(x)) - halfPi$2]; +}; + +function transverseMercator() { + var m = mercatorProjection(transverseMercatorRaw), + center = m.center, + rotate = m.rotate; + + m.center = function(_) { + return arguments.length ? center([-_[1], _[0]]) : (_ = center(), [_[1], -_[0]]); + }; + + m.rotate = function(_) { + return arguments.length ? rotate([_[0], _[1], _.length > 2 ? _[2] + 90 : 90]) : (_ = rotate(), [_[0], _[1], _[2] - 90]); + }; + + return rotate([0, 0, 90]) + .scale(159.155); +} + +function defaultSeparation(a, b) { + return a.parent === b.parent ? 1 : 2; +} + +function meanX(children) { + return children.reduce(meanXReduce, 0) / children.length; +} + +function meanXReduce(x, c) { + return x + c.x; +} + +function maxY(children) { + return 1 + children.reduce(maxYReduce, 0); +} + +function maxYReduce(y, c) { + return Math.max(y, c.y); +} + +function leafLeft(node) { + var children; + while (children = node.children) node = children[0]; + return node; +} + +function leafRight(node) { + var children; + while (children = node.children) node = children[children.length - 1]; + return node; +} + +function cluster() { + var separation = defaultSeparation, + dx = 1, + dy = 1, + nodeSize = false; + + function cluster(root) { + var previousNode, + x = 0; + + // First walk, computing the initial x & y values. + root.eachAfter(function(node) { + var children = node.children; + if (children) { + node.x = meanX(children); + node.y = maxY(children); + } else { + node.x = previousNode ? x += separation(node, previousNode) : 0; + node.y = 0; + previousNode = node; + } + }); + + var left = leafLeft(root), + right = leafRight(root), + x0 = left.x - separation(left, right) / 2, + x1 = right.x + separation(right, left) / 2; + + // Second walk, normalizing x & y to the desired size. + return root.eachAfter(nodeSize ? function(node) { + node.x = (node.x - root.x) * dx; + node.y = (root.y - node.y) * dy; + } : function(node) { + node.x = (node.x - x0) / (x1 - x0) * dx; + node.y = (1 - (root.y ? node.y / root.y : 1)) * dy; + }); + } + + cluster.separation = function(x) { + return arguments.length ? (separation = x, cluster) : separation; + }; + + cluster.size = function(x) { + return arguments.length ? (nodeSize = false, dx = +x[0], dy = +x[1], cluster) : (nodeSize ? null : [dx, dy]); + }; + + cluster.nodeSize = function(x) { + return arguments.length ? (nodeSize = true, dx = +x[0], dy = +x[1], cluster) : (nodeSize ? [dx, dy] : null); + }; + + return cluster; +} + +function count(node) { + var sum = 0, + children = node.children, + i = children && children.length; + if (!i) sum = 1; + else while (--i >= 0) sum += children[i].value; + node.value = sum; +} + +function node_count() { + return this.eachAfter(count); +} + +function node_each(callback) { + var node = this, current, next = [node], children, i, n; + do { + current = next.reverse(), next = []; + while (node = current.pop()) { + callback(node), children = node.children; + if (children) for (i = 0, n = children.length; i < n; ++i) { + next.push(children[i]); + } + } + } while (next.length); + return this; +} + +function node_eachBefore(callback) { + var node = this, nodes = [node], children, i; + while (node = nodes.pop()) { + callback(node), children = node.children; + if (children) for (i = children.length - 1; i >= 0; --i) { + nodes.push(children[i]); + } + } + return this; +} + +function node_eachAfter(callback) { + var node = this, nodes = [node], next = [], children, i, n; + while (node = nodes.pop()) { + next.push(node), children = node.children; + if (children) for (i = 0, n = children.length; i < n; ++i) { + nodes.push(children[i]); + } + } + while (node = next.pop()) { + callback(node); + } + return this; +} + +function node_sum(value) { + return this.eachAfter(function(node) { + var sum = +value(node.data) || 0, + children = node.children, + i = children && children.length; + while (--i >= 0) sum += children[i].value; + node.value = sum; + }); +} + +function node_sort(compare) { + return this.eachBefore(function(node) { + if (node.children) { + node.children.sort(compare); + } + }); +} + +function node_path(end) { + var start = this, + ancestor = leastCommonAncestor(start, end), + nodes = [start]; + while (start !== ancestor) { + start = start.parent; + nodes.push(start); + } + var k = nodes.length; + while (end !== ancestor) { + nodes.splice(k, 0, end); + end = end.parent; + } + return nodes; +} + +function leastCommonAncestor(a, b) { + if (a === b) return a; + var aNodes = a.ancestors(), + bNodes = b.ancestors(), + c = null; + a = aNodes.pop(); + b = bNodes.pop(); + while (a === b) { + c = a; + a = aNodes.pop(); + b = bNodes.pop(); + } + return c; +} + +function node_ancestors() { + var node = this, nodes = [node]; + while (node = node.parent) { + nodes.push(node); + } + return nodes; +} + +function node_descendants() { + var nodes = []; + this.each(function(node) { + nodes.push(node); + }); + return nodes; +} + +function node_leaves() { + var leaves = []; + this.eachBefore(function(node) { + if (!node.children) { + leaves.push(node); + } + }); + return leaves; +} + +function node_links() { + var root = this, links = []; + root.each(function(node) { + if (node !== root) { // Don’t include the root’s parent, if any. + links.push({source: node.parent, target: node}); + } + }); + return links; +} + +function hierarchy(data, children) { + var root = new Node(data), + valued = +data.value && (root.value = data.value), + node, + nodes = [root], + child, + childs, + i, + n; + + if (children == null) children = defaultChildren; + + while (node = nodes.pop()) { + if (valued) node.value = +node.data.value; + if ((childs = children(node.data)) && (n = childs.length)) { + node.children = new Array(n); + for (i = n - 1; i >= 0; --i) { + nodes.push(child = node.children[i] = new Node(childs[i])); + child.parent = node; + child.depth = node.depth + 1; + } + } + } + + return root.eachBefore(computeHeight); +} + +function node_copy() { + return hierarchy(this).eachBefore(copyData); +} + +function defaultChildren(d) { + return d.children; +} + +function copyData(node) { + node.data = node.data.data; +} + +function computeHeight(node) { + var height = 0; + do node.height = height; + while ((node = node.parent) && (node.height < ++height)); +} + +function Node(data) { + this.data = data; + this.depth = + this.height = 0; + this.parent = null; +} + +Node.prototype = hierarchy.prototype = { + constructor: Node, + count: node_count, + each: node_each, + eachAfter: node_eachAfter, + eachBefore: node_eachBefore, + sum: node_sum, + sort: node_sort, + path: node_path, + ancestors: node_ancestors, + descendants: node_descendants, + leaves: node_leaves, + links: node_links, + copy: node_copy +}; + +var slice$4 = Array.prototype.slice; + +function shuffle$1(array) { + var m = array.length, + t, + i; + + while (m) { + i = Math.random() * m-- | 0; + t = array[m]; + array[m] = array[i]; + array[i] = t; + } + + return array; +} + +function enclose(circles) { + var i = 0, n = (circles = shuffle$1(slice$4.call(circles))).length, B = [], p, e; + + while (i < n) { + p = circles[i]; + if (e && enclosesWeak(e, p)) ++i; + else e = encloseBasis(B = extendBasis(B, p)), i = 0; + } + + return e; +} + +function extendBasis(B, p) { + var i, j; + + if (enclosesWeakAll(p, B)) return [p]; + + // If we get here then B must have at least one element. + for (i = 0; i < B.length; ++i) { + if (enclosesNot(p, B[i]) + && enclosesWeakAll(encloseBasis2(B[i], p), B)) { + return [B[i], p]; + } + } + + // If we get here then B must have at least two elements. + for (i = 0; i < B.length - 1; ++i) { + for (j = i + 1; j < B.length; ++j) { + if (enclosesNot(encloseBasis2(B[i], B[j]), p) + && enclosesNot(encloseBasis2(B[i], p), B[j]) + && enclosesNot(encloseBasis2(B[j], p), B[i]) + && enclosesWeakAll(encloseBasis3(B[i], B[j], p), B)) { + return [B[i], B[j], p]; + } + } + } + + // If we get here then something is very wrong. + throw new Error; +} + +function enclosesNot(a, b) { + var dr = a.r - b.r, dx = b.x - a.x, dy = b.y - a.y; + return dr < 0 || dr * dr < dx * dx + dy * dy; +} + +function enclosesWeak(a, b) { + var dr = a.r - b.r + 1e-6, dx = b.x - a.x, dy = b.y - a.y; + return dr > 0 && dr * dr > dx * dx + dy * dy; +} + +function enclosesWeakAll(a, B) { + for (var i = 0; i < B.length; ++i) { + if (!enclosesWeak(a, B[i])) { + return false; + } + } + return true; +} + +function encloseBasis(B) { + switch (B.length) { + case 1: return encloseBasis1(B[0]); + case 2: return encloseBasis2(B[0], B[1]); + case 3: return encloseBasis3(B[0], B[1], B[2]); + } +} + +function encloseBasis1(a) { + return { + x: a.x, + y: a.y, + r: a.r + }; +} + +function encloseBasis2(a, b) { + var x1 = a.x, y1 = a.y, r1 = a.r, + x2 = b.x, y2 = b.y, r2 = b.r, + x21 = x2 - x1, y21 = y2 - y1, r21 = r2 - r1, + l = Math.sqrt(x21 * x21 + y21 * y21); + return { + x: (x1 + x2 + x21 / l * r21) / 2, + y: (y1 + y2 + y21 / l * r21) / 2, + r: (l + r1 + r2) / 2 + }; +} + +function encloseBasis3(a, b, c) { + var x1 = a.x, y1 = a.y, r1 = a.r, + x2 = b.x, y2 = b.y, r2 = b.r, + x3 = c.x, y3 = c.y, r3 = c.r, + a2 = x1 - x2, + a3 = x1 - x3, + b2 = y1 - y2, + b3 = y1 - y3, + c2 = r2 - r1, + c3 = r3 - r1, + d1 = x1 * x1 + y1 * y1 - r1 * r1, + d2 = d1 - x2 * x2 - y2 * y2 + r2 * r2, + d3 = d1 - x3 * x3 - y3 * y3 + r3 * r3, + ab = a3 * b2 - a2 * b3, + xa = (b2 * d3 - b3 * d2) / (ab * 2) - x1, + xb = (b3 * c2 - b2 * c3) / ab, + ya = (a3 * d2 - a2 * d3) / (ab * 2) - y1, + yb = (a2 * c3 - a3 * c2) / ab, + A = xb * xb + yb * yb - 1, + B = 2 * (r1 + xa * xb + ya * yb), + C = xa * xa + ya * ya - r1 * r1, + r = -(A ? (B + Math.sqrt(B * B - 4 * A * C)) / (2 * A) : C / B); + return { + x: x1 + xa + xb * r, + y: y1 + ya + yb * r, + r: r + }; +} + +function place(b, a, c) { + var dx = b.x - a.x, x, a2, + dy = b.y - a.y, y, b2, + d2 = dx * dx + dy * dy; + if (d2) { + a2 = a.r + c.r, a2 *= a2; + b2 = b.r + c.r, b2 *= b2; + if (a2 > b2) { + x = (d2 + b2 - a2) / (2 * d2); + y = Math.sqrt(Math.max(0, b2 / d2 - x * x)); + c.x = b.x - x * dx - y * dy; + c.y = b.y - x * dy + y * dx; + } else { + x = (d2 + a2 - b2) / (2 * d2); + y = Math.sqrt(Math.max(0, a2 / d2 - x * x)); + c.x = a.x + x * dx - y * dy; + c.y = a.y + x * dy + y * dx; + } + } else { + c.x = a.x + c.r; + c.y = a.y; + } +} + +function intersects(a, b) { + var dr = a.r + b.r - 1e-6, dx = b.x - a.x, dy = b.y - a.y; + return dr > 0 && dr * dr > dx * dx + dy * dy; +} + +function score(node) { + var a = node._, + b = node.next._, + ab = a.r + b.r, + dx = (a.x * b.r + b.x * a.r) / ab, + dy = (a.y * b.r + b.y * a.r) / ab; + return dx * dx + dy * dy; +} + +function Node$1(circle) { + this._ = circle; + this.next = null; + this.previous = null; +} + +function packEnclose(circles) { + if (!(n = circles.length)) return 0; + + var a, b, c, n, aa, ca, i, j, k, sj, sk; + + // Place the first circle. + a = circles[0], a.x = 0, a.y = 0; + if (!(n > 1)) return a.r; + + // Place the second circle. + b = circles[1], a.x = -b.r, b.x = a.r, b.y = 0; + if (!(n > 2)) return a.r + b.r; + + // Place the third circle. + place(b, a, c = circles[2]); + + // Initialize the front-chain using the first three circles a, b and c. + a = new Node$1(a), b = new Node$1(b), c = new Node$1(c); + a.next = c.previous = b; + b.next = a.previous = c; + c.next = b.previous = a; + + // Attempt to place each remaining circle… + pack: for (i = 3; i < n; ++i) { + place(a._, b._, c = circles[i]), c = new Node$1(c); + + // Find the closest intersecting circle on the front-chain, if any. + // “Closeness” is determined by linear distance along the front-chain. + // “Ahead” or “behind” is likewise determined by linear distance. + j = b.next, k = a.previous, sj = b._.r, sk = a._.r; + do { + if (sj <= sk) { + if (intersects(j._, c._)) { + b = j, a.next = b, b.previous = a, --i; + continue pack; + } + sj += j._.r, j = j.next; + } else { + if (intersects(k._, c._)) { + a = k, a.next = b, b.previous = a, --i; + continue pack; + } + sk += k._.r, k = k.previous; + } + } while (j !== k.next); + + // Success! Insert the new circle c between a and b. + c.previous = a, c.next = b, a.next = b.previous = b = c; + + // Compute the new closest circle pair to the centroid. + aa = score(a); + while ((c = c.next) !== b) { + if ((ca = score(c)) < aa) { + a = c, aa = ca; + } + } + b = a.next; + } + + // Compute the enclosing circle of the front chain. + a = [b._], c = b; while ((c = c.next) !== b) a.push(c._); c = enclose(a); + + // Translate the circles to put the enclosing circle around the origin. + for (i = 0; i < n; ++i) a = circles[i], a.x -= c.x, a.y -= c.y; + + return c.r; +} + +function siblings(circles) { + packEnclose(circles); + return circles; +} + +function optional(f) { + return f == null ? null : required(f); +} + +function required(f) { + if (typeof f !== "function") throw new Error; + return f; +} + +function constantZero() { + return 0; +} + +function constant$9(x) { + return function() { + return x; + }; +} + +function defaultRadius$1(d) { + return Math.sqrt(d.value); +} + +function index$2() { + var radius = null, + dx = 1, + dy = 1, + padding = constantZero; + + function pack(root) { + root.x = dx / 2, root.y = dy / 2; + if (radius) { + root.eachBefore(radiusLeaf(radius)) + .eachAfter(packChildren(padding, 0.5)) + .eachBefore(translateChild(1)); + } else { + root.eachBefore(radiusLeaf(defaultRadius$1)) + .eachAfter(packChildren(constantZero, 1)) + .eachAfter(packChildren(padding, root.r / Math.min(dx, dy))) + .eachBefore(translateChild(Math.min(dx, dy) / (2 * root.r))); + } + return root; + } + + pack.radius = function(x) { + return arguments.length ? (radius = optional(x), pack) : radius; + }; + + pack.size = function(x) { + return arguments.length ? (dx = +x[0], dy = +x[1], pack) : [dx, dy]; + }; + + pack.padding = function(x) { + return arguments.length ? (padding = typeof x === "function" ? x : constant$9(+x), pack) : padding; + }; + + return pack; +} + +function radiusLeaf(radius) { + return function(node) { + if (!node.children) { + node.r = Math.max(0, +radius(node) || 0); + } + }; +} + +function packChildren(padding, k) { + return function(node) { + if (children = node.children) { + var children, + i, + n = children.length, + r = padding(node) * k || 0, + e; + + if (r) for (i = 0; i < n; ++i) children[i].r += r; + e = packEnclose(children); + if (r) for (i = 0; i < n; ++i) children[i].r -= r; + node.r = e + r; + } + }; +} + +function translateChild(k) { + return function(node) { + var parent = node.parent; + node.r *= k; + if (parent) { + node.x = parent.x + k * node.x; + node.y = parent.y + k * node.y; + } + }; +} + +function roundNode(node) { + node.x0 = Math.round(node.x0); + node.y0 = Math.round(node.y0); + node.x1 = Math.round(node.x1); + node.y1 = Math.round(node.y1); +} + +function treemapDice(parent, x0, y0, x1, y1) { + var nodes = parent.children, + node, + i = -1, + n = nodes.length, + k = parent.value && (x1 - x0) / parent.value; + + while (++i < n) { + node = nodes[i], node.y0 = y0, node.y1 = y1; + node.x0 = x0, node.x1 = x0 += node.value * k; + } +} + +function partition() { + var dx = 1, + dy = 1, + padding = 0, + round = false; + + function partition(root) { + var n = root.height + 1; + root.x0 = + root.y0 = padding; + root.x1 = dx; + root.y1 = dy / n; + root.eachBefore(positionNode(dy, n)); + if (round) root.eachBefore(roundNode); + return root; + } + + function positionNode(dy, n) { + return function(node) { + if (node.children) { + treemapDice(node, node.x0, dy * (node.depth + 1) / n, node.x1, dy * (node.depth + 2) / n); + } + var x0 = node.x0, + y0 = node.y0, + x1 = node.x1 - padding, + y1 = node.y1 - padding; + if (x1 < x0) x0 = x1 = (x0 + x1) / 2; + if (y1 < y0) y0 = y1 = (y0 + y1) / 2; + node.x0 = x0; + node.y0 = y0; + node.x1 = x1; + node.y1 = y1; + }; + } + + partition.round = function(x) { + return arguments.length ? (round = !!x, partition) : round; + }; + + partition.size = function(x) { + return arguments.length ? (dx = +x[0], dy = +x[1], partition) : [dx, dy]; + }; + + partition.padding = function(x) { + return arguments.length ? (padding = +x, partition) : padding; + }; + + return partition; +} + +var keyPrefix$1 = "$", // Protect against keys like “__proto__”. + preroot = {depth: -1}, + ambiguous = {}; + +function defaultId(d) { + return d.id; +} + +function defaultParentId(d) { + return d.parentId; +} + +function stratify() { + var id = defaultId, + parentId = defaultParentId; + + function stratify(data) { + var d, + i, + n = data.length, + root, + parent, + node, + nodes = new Array(n), + nodeId, + nodeKey, + nodeByKey = {}; + + for (i = 0; i < n; ++i) { + d = data[i], node = nodes[i] = new Node(d); + if ((nodeId = id(d, i, data)) != null && (nodeId += "")) { + nodeKey = keyPrefix$1 + (node.id = nodeId); + nodeByKey[nodeKey] = nodeKey in nodeByKey ? ambiguous : node; + } + } + + for (i = 0; i < n; ++i) { + node = nodes[i], nodeId = parentId(data[i], i, data); + if (nodeId == null || !(nodeId += "")) { + if (root) throw new Error("multiple roots"); + root = node; + } else { + parent = nodeByKey[keyPrefix$1 + nodeId]; + if (!parent) throw new Error("missing: " + nodeId); + if (parent === ambiguous) throw new Error("ambiguous: " + nodeId); + if (parent.children) parent.children.push(node); + else parent.children = [node]; + node.parent = parent; + } + } + + if (!root) throw new Error("no root"); + root.parent = preroot; + root.eachBefore(function(node) { node.depth = node.parent.depth + 1; --n; }).eachBefore(computeHeight); + root.parent = null; + if (n > 0) throw new Error("cycle"); + + return root; + } + + stratify.id = function(x) { + return arguments.length ? (id = required(x), stratify) : id; + }; + + stratify.parentId = function(x) { + return arguments.length ? (parentId = required(x), stratify) : parentId; + }; + + return stratify; +} + +function defaultSeparation$1(a, b) { + return a.parent === b.parent ? 1 : 2; +} + +// function radialSeparation(a, b) { +// return (a.parent === b.parent ? 1 : 2) / a.depth; +// } + +// This function is used to traverse the left contour of a subtree (or +// subforest). It returns the successor of v on this contour. This successor is +// either given by the leftmost child of v or by the thread of v. The function +// returns null if and only if v is on the highest level of its subtree. +function nextLeft(v) { + var children = v.children; + return children ? children[0] : v.t; +} + +// This function works analogously to nextLeft. +function nextRight(v) { + var children = v.children; + return children ? children[children.length - 1] : v.t; +} + +// Shifts the current subtree rooted at w+. This is done by increasing +// prelim(w+) and mod(w+) by shift. +function moveSubtree(wm, wp, shift) { + var change = shift / (wp.i - wm.i); + wp.c -= change; + wp.s += shift; + wm.c += change; + wp.z += shift; + wp.m += shift; +} + +// All other shifts, applied to the smaller subtrees between w- and w+, are +// performed by this function. To prepare the shifts, we have to adjust +// change(w+), shift(w+), and change(w-). +function executeShifts(v) { + var shift = 0, + change = 0, + children = v.children, + i = children.length, + w; + while (--i >= 0) { + w = children[i]; + w.z += shift; + w.m += shift; + shift += w.s + (change += w.c); + } +} + +// If vi-’s ancestor is a sibling of v, returns vi-’s ancestor. Otherwise, +// returns the specified (default) ancestor. +function nextAncestor(vim, v, ancestor) { + return vim.a.parent === v.parent ? vim.a : ancestor; +} + +function TreeNode(node, i) { + this._ = node; + this.parent = null; + this.children = null; + this.A = null; // default ancestor + this.a = this; // ancestor + this.z = 0; // prelim + this.m = 0; // mod + this.c = 0; // change + this.s = 0; // shift + this.t = null; // thread + this.i = i; // number +} + +TreeNode.prototype = Object.create(Node.prototype); + +function treeRoot(root) { + var tree = new TreeNode(root, 0), + node, + nodes = [tree], + child, + children, + i, + n; + + while (node = nodes.pop()) { + if (children = node._.children) { + node.children = new Array(n = children.length); + for (i = n - 1; i >= 0; --i) { + nodes.push(child = node.children[i] = new TreeNode(children[i], i)); + child.parent = node; + } + } + } + + (tree.parent = new TreeNode(null, 0)).children = [tree]; + return tree; +} + +// Node-link tree diagram using the Reingold-Tilford "tidy" algorithm +function tree() { + var separation = defaultSeparation$1, + dx = 1, + dy = 1, + nodeSize = null; + + function tree(root) { + var t = treeRoot(root); + + // Compute the layout using Buchheim et al.’s algorithm. + t.eachAfter(firstWalk), t.parent.m = -t.z; + t.eachBefore(secondWalk); + + // If a fixed node size is specified, scale x and y. + if (nodeSize) root.eachBefore(sizeNode); + + // If a fixed tree size is specified, scale x and y based on the extent. + // Compute the left-most, right-most, and depth-most nodes for extents. + else { + var left = root, + right = root, + bottom = root; + root.eachBefore(function(node) { + if (node.x < left.x) left = node; + if (node.x > right.x) right = node; + if (node.depth > bottom.depth) bottom = node; + }); + var s = left === right ? 1 : separation(left, right) / 2, + tx = s - left.x, + kx = dx / (right.x + s + tx), + ky = dy / (bottom.depth || 1); + root.eachBefore(function(node) { + node.x = (node.x + tx) * kx; + node.y = node.depth * ky; + }); + } + + return root; + } + + // Computes a preliminary x-coordinate for v. Before that, FIRST WALK is + // applied recursively to the children of v, as well as the function + // APPORTION. After spacing out the children by calling EXECUTE SHIFTS, the + // node v is placed to the midpoint of its outermost children. + function firstWalk(v) { + var children = v.children, + siblings = v.parent.children, + w = v.i ? siblings[v.i - 1] : null; + if (children) { + executeShifts(v); + var midpoint = (children[0].z + children[children.length - 1].z) / 2; + if (w) { + v.z = w.z + separation(v._, w._); + v.m = v.z - midpoint; + } else { + v.z = midpoint; + } + } else if (w) { + v.z = w.z + separation(v._, w._); + } + v.parent.A = apportion(v, w, v.parent.A || siblings[0]); + } + + // Computes all real x-coordinates by summing up the modifiers recursively. + function secondWalk(v) { + v._.x = v.z + v.parent.m; + v.m += v.parent.m; + } + + // The core of the algorithm. Here, a new subtree is combined with the + // previous subtrees. Threads are used to traverse the inside and outside + // contours of the left and right subtree up to the highest common level. The + // vertices used for the traversals are vi+, vi-, vo-, and vo+, where the + // superscript o means outside and i means inside, the subscript - means left + // subtree and + means right subtree. For summing up the modifiers along the + // contour, we use respective variables si+, si-, so-, and so+. Whenever two + // nodes of the inside contours conflict, we compute the left one of the + // greatest uncommon ancestors using the function ANCESTOR and call MOVE + // SUBTREE to shift the subtree and prepare the shifts of smaller subtrees. + // Finally, we add a new thread (if necessary). + function apportion(v, w, ancestor) { + if (w) { + var vip = v, + vop = v, + vim = w, + vom = vip.parent.children[0], + sip = vip.m, + sop = vop.m, + sim = vim.m, + som = vom.m, + shift; + while (vim = nextRight(vim), vip = nextLeft(vip), vim && vip) { + vom = nextLeft(vom); + vop = nextRight(vop); + vop.a = v; + shift = vim.z + sim - vip.z - sip + separation(vim._, vip._); + if (shift > 0) { + moveSubtree(nextAncestor(vim, v, ancestor), v, shift); + sip += shift; + sop += shift; + } + sim += vim.m; + sip += vip.m; + som += vom.m; + sop += vop.m; + } + if (vim && !nextRight(vop)) { + vop.t = vim; + vop.m += sim - sop; + } + if (vip && !nextLeft(vom)) { + vom.t = vip; + vom.m += sip - som; + ancestor = v; + } + } + return ancestor; + } + + function sizeNode(node) { + node.x *= dx; + node.y = node.depth * dy; + } + + tree.separation = function(x) { + return arguments.length ? (separation = x, tree) : separation; + }; + + tree.size = function(x) { + return arguments.length ? (nodeSize = false, dx = +x[0], dy = +x[1], tree) : (nodeSize ? null : [dx, dy]); + }; + + tree.nodeSize = function(x) { + return arguments.length ? (nodeSize = true, dx = +x[0], dy = +x[1], tree) : (nodeSize ? [dx, dy] : null); + }; + + return tree; +} + +function treemapSlice(parent, x0, y0, x1, y1) { + var nodes = parent.children, + node, + i = -1, + n = nodes.length, + k = parent.value && (y1 - y0) / parent.value; + + while (++i < n) { + node = nodes[i], node.x0 = x0, node.x1 = x1; + node.y0 = y0, node.y1 = y0 += node.value * k; + } +} + +var phi = (1 + Math.sqrt(5)) / 2; + +function squarifyRatio(ratio, parent, x0, y0, x1, y1) { + var rows = [], + nodes = parent.children, + row, + nodeValue, + i0 = 0, + i1 = 0, + n = nodes.length, + dx, dy, + value = parent.value, + sumValue, + minValue, + maxValue, + newRatio, + minRatio, + alpha, + beta; + + while (i0 < n) { + dx = x1 - x0, dy = y1 - y0; + + // Find the next non-empty node. + do sumValue = nodes[i1++].value; while (!sumValue && i1 < n); + minValue = maxValue = sumValue; + alpha = Math.max(dy / dx, dx / dy) / (value * ratio); + beta = sumValue * sumValue * alpha; + minRatio = Math.max(maxValue / beta, beta / minValue); + + // Keep adding nodes while the aspect ratio maintains or improves. + for (; i1 < n; ++i1) { + sumValue += nodeValue = nodes[i1].value; + if (nodeValue < minValue) minValue = nodeValue; + if (nodeValue > maxValue) maxValue = nodeValue; + beta = sumValue * sumValue * alpha; + newRatio = Math.max(maxValue / beta, beta / minValue); + if (newRatio > minRatio) { sumValue -= nodeValue; break; } + minRatio = newRatio; + } + + // Position and record the row orientation. + rows.push(row = {value: sumValue, dice: dx < dy, children: nodes.slice(i0, i1)}); + if (row.dice) treemapDice(row, x0, y0, x1, value ? y0 += dy * sumValue / value : y1); + else treemapSlice(row, x0, y0, value ? x0 += dx * sumValue / value : x1, y1); + value -= sumValue, i0 = i1; + } + + return rows; +} + +var squarify = (function custom(ratio) { + + function squarify(parent, x0, y0, x1, y1) { + squarifyRatio(ratio, parent, x0, y0, x1, y1); + } + + squarify.ratio = function(x) { + return custom((x = +x) > 1 ? x : 1); + }; + + return squarify; +})(phi); + +function index$3() { + var tile = squarify, + round = false, + dx = 1, + dy = 1, + paddingStack = [0], + paddingInner = constantZero, + paddingTop = constantZero, + paddingRight = constantZero, + paddingBottom = constantZero, + paddingLeft = constantZero; + + function treemap(root) { + root.x0 = + root.y0 = 0; + root.x1 = dx; + root.y1 = dy; + root.eachBefore(positionNode); + paddingStack = [0]; + if (round) root.eachBefore(roundNode); + return root; + } + + function positionNode(node) { + var p = paddingStack[node.depth], + x0 = node.x0 + p, + y0 = node.y0 + p, + x1 = node.x1 - p, + y1 = node.y1 - p; + if (x1 < x0) x0 = x1 = (x0 + x1) / 2; + if (y1 < y0) y0 = y1 = (y0 + y1) / 2; + node.x0 = x0; + node.y0 = y0; + node.x1 = x1; + node.y1 = y1; + if (node.children) { + p = paddingStack[node.depth + 1] = paddingInner(node) / 2; + x0 += paddingLeft(node) - p; + y0 += paddingTop(node) - p; + x1 -= paddingRight(node) - p; + y1 -= paddingBottom(node) - p; + if (x1 < x0) x0 = x1 = (x0 + x1) / 2; + if (y1 < y0) y0 = y1 = (y0 + y1) / 2; + tile(node, x0, y0, x1, y1); + } + } + + treemap.round = function(x) { + return arguments.length ? (round = !!x, treemap) : round; + }; + + treemap.size = function(x) { + return arguments.length ? (dx = +x[0], dy = +x[1], treemap) : [dx, dy]; + }; + + treemap.tile = function(x) { + return arguments.length ? (tile = required(x), treemap) : tile; + }; + + treemap.padding = function(x) { + return arguments.length ? treemap.paddingInner(x).paddingOuter(x) : treemap.paddingInner(); + }; + + treemap.paddingInner = function(x) { + return arguments.length ? (paddingInner = typeof x === "function" ? x : constant$9(+x), treemap) : paddingInner; + }; + + treemap.paddingOuter = function(x) { + return arguments.length ? treemap.paddingTop(x).paddingRight(x).paddingBottom(x).paddingLeft(x) : treemap.paddingTop(); + }; + + treemap.paddingTop = function(x) { + return arguments.length ? (paddingTop = typeof x === "function" ? x : constant$9(+x), treemap) : paddingTop; + }; + + treemap.paddingRight = function(x) { + return arguments.length ? (paddingRight = typeof x === "function" ? x : constant$9(+x), treemap) : paddingRight; + }; + + treemap.paddingBottom = function(x) { + return arguments.length ? (paddingBottom = typeof x === "function" ? x : constant$9(+x), treemap) : paddingBottom; + }; + + treemap.paddingLeft = function(x) { + return arguments.length ? (paddingLeft = typeof x === "function" ? x : constant$9(+x), treemap) : paddingLeft; + }; + + return treemap; +} + +function binary(parent, x0, y0, x1, y1) { + var nodes = parent.children, + i, n = nodes.length, + sum, sums = new Array(n + 1); + + for (sums[0] = sum = i = 0; i < n; ++i) { + sums[i + 1] = sum += nodes[i].value; + } + + partition(0, n, parent.value, x0, y0, x1, y1); + + function partition(i, j, value, x0, y0, x1, y1) { + if (i >= j - 1) { + var node = nodes[i]; + node.x0 = x0, node.y0 = y0; + node.x1 = x1, node.y1 = y1; + return; + } + + var valueOffset = sums[i], + valueTarget = (value / 2) + valueOffset, + k = i + 1, + hi = j - 1; + + while (k < hi) { + var mid = k + hi >>> 1; + if (sums[mid] < valueTarget) k = mid + 1; + else hi = mid; + } + + if ((valueTarget - sums[k - 1]) < (sums[k] - valueTarget) && i + 1 < k) --k; + + var valueLeft = sums[k] - valueOffset, + valueRight = value - valueLeft; + + if ((x1 - x0) > (y1 - y0)) { + var xk = (x0 * valueRight + x1 * valueLeft) / value; + partition(i, k, valueLeft, x0, y0, xk, y1); + partition(k, j, valueRight, xk, y0, x1, y1); + } else { + var yk = (y0 * valueRight + y1 * valueLeft) / value; + partition(i, k, valueLeft, x0, y0, x1, yk); + partition(k, j, valueRight, x0, yk, x1, y1); + } + } +} + +function sliceDice(parent, x0, y0, x1, y1) { + (parent.depth & 1 ? treemapSlice : treemapDice)(parent, x0, y0, x1, y1); +} + +var resquarify = (function custom(ratio) { + + function resquarify(parent, x0, y0, x1, y1) { + if ((rows = parent._squarify) && (rows.ratio === ratio)) { + var rows, + row, + nodes, + i, + j = -1, + n, + m = rows.length, + value = parent.value; + + while (++j < m) { + row = rows[j], nodes = row.children; + for (i = row.value = 0, n = nodes.length; i < n; ++i) row.value += nodes[i].value; + if (row.dice) treemapDice(row, x0, y0, x1, y0 += (y1 - y0) * row.value / value); + else treemapSlice(row, x0, y0, x0 += (x1 - x0) * row.value / value, y1); + value -= row.value; + } + } else { + parent._squarify = rows = squarifyRatio(ratio, parent, x0, y0, x1, y1); + rows.ratio = ratio; + } + } + + resquarify.ratio = function(x) { + return custom((x = +x) > 1 ? x : 1); + }; + + return resquarify; +})(phi); + +function area$2(polygon) { + var i = -1, + n = polygon.length, + a, + b = polygon[n - 1], + area = 0; + + while (++i < n) { + a = b; + b = polygon[i]; + area += a[1] * b[0] - a[0] * b[1]; + } + + return area / 2; +} + +function centroid$1(polygon) { + var i = -1, + n = polygon.length, + x = 0, + y = 0, + a, + b = polygon[n - 1], + c, + k = 0; + + while (++i < n) { + a = b; + b = polygon[i]; + k += c = a[0] * b[1] - b[0] * a[1]; + x += (a[0] + b[0]) * c; + y += (a[1] + b[1]) * c; + } + + return k *= 3, [x / k, y / k]; +} + +// Returns the 2D cross product of AB and AC vectors, i.e., the z-component of +// the 3D cross product in a quadrant I Cartesian coordinate system (+x is +// right, +y is up). Returns a positive value if ABC is counter-clockwise, +// negative if clockwise, and zero if the points are collinear. +function cross$1(a, b, c) { + return (b[0] - a[0]) * (c[1] - a[1]) - (b[1] - a[1]) * (c[0] - a[0]); +} + +function lexicographicOrder(a, b) { + return a[0] - b[0] || a[1] - b[1]; +} + +// Computes the upper convex hull per the monotone chain algorithm. +// Assumes points.length >= 3, is sorted by x, unique in y. +// Returns an array of indices into points in left-to-right order. +function computeUpperHullIndexes(points) { + var n = points.length, + indexes = [0, 1], + size = 2; + + for (var i = 2; i < n; ++i) { + while (size > 1 && cross$1(points[indexes[size - 2]], points[indexes[size - 1]], points[i]) <= 0) --size; + indexes[size++] = i; + } + + return indexes.slice(0, size); // remove popped points +} + +function hull(points) { + if ((n = points.length) < 3) return null; + + var i, + n, + sortedPoints = new Array(n), + flippedPoints = new Array(n); + + for (i = 0; i < n; ++i) sortedPoints[i] = [+points[i][0], +points[i][1], i]; + sortedPoints.sort(lexicographicOrder); + for (i = 0; i < n; ++i) flippedPoints[i] = [sortedPoints[i][0], -sortedPoints[i][1]]; + + var upperIndexes = computeUpperHullIndexes(sortedPoints), + lowerIndexes = computeUpperHullIndexes(flippedPoints); + + // Construct the hull polygon, removing possible duplicate endpoints. + var skipLeft = lowerIndexes[0] === upperIndexes[0], + skipRight = lowerIndexes[lowerIndexes.length - 1] === upperIndexes[upperIndexes.length - 1], + hull = []; + + // Add upper hull in right-to-l order. + // Then add lower hull in left-to-right order. + for (i = upperIndexes.length - 1; i >= 0; --i) hull.push(points[sortedPoints[upperIndexes[i]][2]]); + for (i = +skipLeft; i < lowerIndexes.length - skipRight; ++i) hull.push(points[sortedPoints[lowerIndexes[i]][2]]); + + return hull; +} + +function contains$2(polygon, point) { + var n = polygon.length, + p = polygon[n - 1], + x = point[0], y = point[1], + x0 = p[0], y0 = p[1], + x1, y1, + inside = false; + + for (var i = 0; i < n; ++i) { + p = polygon[i], x1 = p[0], y1 = p[1]; + if (((y1 > y) !== (y0 > y)) && (x < (x0 - x1) * (y - y1) / (y0 - y1) + x1)) inside = !inside; + x0 = x1, y0 = y1; + } + + return inside; +} + +function length$2(polygon) { + var i = -1, + n = polygon.length, + b = polygon[n - 1], + xa, + ya, + xb = b[0], + yb = b[1], + perimeter = 0; + + while (++i < n) { + xa = xb; + ya = yb; + b = polygon[i]; + xb = b[0]; + yb = b[1]; + xa -= xb; + ya -= yb; + perimeter += Math.sqrt(xa * xa + ya * ya); + } + + return perimeter; +} + +function defaultSource$1() { + return Math.random(); +} + +var uniform = (function sourceRandomUniform(source) { + function randomUniform(min, max) { + min = min == null ? 0 : +min; + max = max == null ? 1 : +max; + if (arguments.length === 1) max = min, min = 0; + else max -= min; + return function() { + return source() * max + min; + }; + } + + randomUniform.source = sourceRandomUniform; + + return randomUniform; +})(defaultSource$1); + +var normal = (function sourceRandomNormal(source) { + function randomNormal(mu, sigma) { + var x, r; + mu = mu == null ? 0 : +mu; + sigma = sigma == null ? 1 : +sigma; + return function() { + var y; + + // If available, use the second previously-generated uniform random. + if (x != null) y = x, x = null; + + // Otherwise, generate a new x and y. + else do { + x = source() * 2 - 1; + y = source() * 2 - 1; + r = x * x + y * y; + } while (!r || r > 1); + + return mu + sigma * y * Math.sqrt(-2 * Math.log(r) / r); + }; + } + + randomNormal.source = sourceRandomNormal; + + return randomNormal; +})(defaultSource$1); + +var logNormal = (function sourceRandomLogNormal(source) { + function randomLogNormal() { + var randomNormal = normal.source(source).apply(this, arguments); + return function() { + return Math.exp(randomNormal()); + }; + } + + randomLogNormal.source = sourceRandomLogNormal; + + return randomLogNormal; +})(defaultSource$1); + +var irwinHall = (function sourceRandomIrwinHall(source) { + function randomIrwinHall(n) { + return function() { + for (var sum = 0, i = 0; i < n; ++i) sum += source(); + return sum; + }; + } + + randomIrwinHall.source = sourceRandomIrwinHall; + + return randomIrwinHall; +})(defaultSource$1); + +var bates = (function sourceRandomBates(source) { + function randomBates(n) { + var randomIrwinHall = irwinHall.source(source)(n); + return function() { + return randomIrwinHall() / n; + }; + } + + randomBates.source = sourceRandomBates; + + return randomBates; +})(defaultSource$1); + +var exponential$1 = (function sourceRandomExponential(source) { + function randomExponential(lambda) { + return function() { + return -Math.log(1 - source()) / lambda; + }; + } + + randomExponential.source = sourceRandomExponential; + + return randomExponential; +})(defaultSource$1); + +function initRange(domain, range) { + switch (arguments.length) { + case 0: break; + case 1: this.range(domain); break; + default: this.range(range).domain(domain); break; + } + return this; +} + +function initInterpolator(domain, interpolator) { + switch (arguments.length) { + case 0: break; + case 1: this.interpolator(domain); break; + default: this.interpolator(interpolator).domain(domain); break; + } + return this; +} + +var array$3 = Array.prototype; + +var map$3 = array$3.map; +var slice$5 = array$3.slice; + +var implicit = {name: "implicit"}; + +function ordinal() { + var index = map$1(), + domain = [], + range = [], + unknown = implicit; + + function scale(d) { + var key = d + "", i = index.get(key); + if (!i) { + if (unknown !== implicit) return unknown; + index.set(key, i = domain.push(d)); + } + return range[(i - 1) % range.length]; + } + + scale.domain = function(_) { + if (!arguments.length) return domain.slice(); + domain = [], index = map$1(); + var i = -1, n = _.length, d, key; + while (++i < n) if (!index.has(key = (d = _[i]) + "")) index.set(key, domain.push(d)); + return scale; + }; + + scale.range = function(_) { + return arguments.length ? (range = slice$5.call(_), scale) : range.slice(); + }; + + scale.unknown = function(_) { + return arguments.length ? (unknown = _, scale) : unknown; + }; + + scale.copy = function() { + return ordinal(domain, range).unknown(unknown); + }; + + initRange.apply(scale, arguments); + + return scale; +} + +function band() { + var scale = ordinal().unknown(undefined), + domain = scale.domain, + ordinalRange = scale.range, + range = [0, 1], + step, + bandwidth, + round = false, + paddingInner = 0, + paddingOuter = 0, + align = 0.5; + + delete scale.unknown; + + function rescale() { + var n = domain().length, + reverse = range[1] < range[0], + start = range[reverse - 0], + stop = range[1 - reverse]; + step = (stop - start) / Math.max(1, n - paddingInner + paddingOuter * 2); + if (round) step = Math.floor(step); + start += (stop - start - step * (n - paddingInner)) * align; + bandwidth = step * (1 - paddingInner); + if (round) start = Math.round(start), bandwidth = Math.round(bandwidth); + var values = sequence(n).map(function(i) { return start + step * i; }); + return ordinalRange(reverse ? values.reverse() : values); + } + + scale.domain = function(_) { + return arguments.length ? (domain(_), rescale()) : domain(); + }; + + scale.range = function(_) { + return arguments.length ? (range = [+_[0], +_[1]], rescale()) : range.slice(); + }; + + scale.rangeRound = function(_) { + return range = [+_[0], +_[1]], round = true, rescale(); + }; + + scale.bandwidth = function() { + return bandwidth; + }; + + scale.step = function() { + return step; + }; + + scale.round = function(_) { + return arguments.length ? (round = !!_, rescale()) : round; + }; + + scale.padding = function(_) { + return arguments.length ? (paddingInner = Math.min(1, paddingOuter = +_), rescale()) : paddingInner; + }; + + scale.paddingInner = function(_) { + return arguments.length ? (paddingInner = Math.min(1, _), rescale()) : paddingInner; + }; + + scale.paddingOuter = function(_) { + return arguments.length ? (paddingOuter = +_, rescale()) : paddingOuter; + }; + + scale.align = function(_) { + return arguments.length ? (align = Math.max(0, Math.min(1, _)), rescale()) : align; + }; + + scale.copy = function() { + return band(domain(), range) + .round(round) + .paddingInner(paddingInner) + .paddingOuter(paddingOuter) + .align(align); + }; + + return initRange.apply(rescale(), arguments); +} + +function pointish(scale) { + var copy = scale.copy; + + scale.padding = scale.paddingOuter; + delete scale.paddingInner; + delete scale.paddingOuter; + + scale.copy = function() { + return pointish(copy()); + }; + + return scale; +} + +function point$1() { + return pointish(band.apply(null, arguments).paddingInner(1)); +} + +function constant$a(x) { + return function() { + return x; + }; +} + +function number$2(x) { + return +x; +} + +var unit = [0, 1]; + +function identity$6(x) { + return x; +} + +function normalize(a, b) { + return (b -= (a = +a)) + ? function(x) { return (x - a) / b; } + : constant$a(isNaN(b) ? NaN : 0.5); +} + +function clamper(domain) { + var a = domain[0], b = domain[domain.length - 1], t; + if (a > b) t = a, a = b, b = t; + return function(x) { return Math.max(a, Math.min(b, x)); }; +} + +// normalize(a, b)(x) takes a domain value x in [a,b] and returns the corresponding parameter t in [0,1]. +// interpolate(a, b)(t) takes a parameter t in [0,1] and returns the corresponding range value x in [a,b]. +function bimap(domain, range, interpolate) { + var d0 = domain[0], d1 = domain[1], r0 = range[0], r1 = range[1]; + if (d1 < d0) d0 = normalize(d1, d0), r0 = interpolate(r1, r0); + else d0 = normalize(d0, d1), r0 = interpolate(r0, r1); + return function(x) { return r0(d0(x)); }; +} + +function polymap(domain, range, interpolate) { + var j = Math.min(domain.length, range.length) - 1, + d = new Array(j), + r = new Array(j), + i = -1; + + // Reverse descending domains. + if (domain[j] < domain[0]) { + domain = domain.slice().reverse(); + range = range.slice().reverse(); + } + + while (++i < j) { + d[i] = normalize(domain[i], domain[i + 1]); + r[i] = interpolate(range[i], range[i + 1]); + } + + return function(x) { + var i = bisectRight(domain, x, 1, j) - 1; + return r[i](d[i](x)); + }; +} + +function copy(source, target) { + return target + .domain(source.domain()) + .range(source.range()) + .interpolate(source.interpolate()) + .clamp(source.clamp()) + .unknown(source.unknown()); +} + +function transformer$1() { + var domain = unit, + range = unit, + interpolate = interpolateValue, + transform, + untransform, + unknown, + clamp = identity$6, + piecewise, + output, + input; + + function rescale() { + piecewise = Math.min(domain.length, range.length) > 2 ? polymap : bimap; + output = input = null; + return scale; + } + + function scale(x) { + return isNaN(x = +x) ? unknown : (output || (output = piecewise(domain.map(transform), range, interpolate)))(transform(clamp(x))); + } + + scale.invert = function(y) { + return clamp(untransform((input || (input = piecewise(range, domain.map(transform), interpolateNumber)))(y))); + }; + + scale.domain = function(_) { + return arguments.length ? (domain = map$3.call(_, number$2), clamp === identity$6 || (clamp = clamper(domain)), rescale()) : domain.slice(); + }; + + scale.range = function(_) { + return arguments.length ? (range = slice$5.call(_), rescale()) : range.slice(); + }; + + scale.rangeRound = function(_) { + return range = slice$5.call(_), interpolate = interpolateRound, rescale(); + }; + + scale.clamp = function(_) { + return arguments.length ? (clamp = _ ? clamper(domain) : identity$6, scale) : clamp !== identity$6; + }; + + scale.interpolate = function(_) { + return arguments.length ? (interpolate = _, rescale()) : interpolate; + }; + + scale.unknown = function(_) { + return arguments.length ? (unknown = _, scale) : unknown; + }; + + return function(t, u) { + transform = t, untransform = u; + return rescale(); + }; +} + +function continuous(transform, untransform) { + return transformer$1()(transform, untransform); +} + +function tickFormat(start, stop, count, specifier) { + var step = tickStep(start, stop, count), + precision; + specifier = formatSpecifier(specifier == null ? ",f" : specifier); + switch (specifier.type) { + case "s": { + var value = Math.max(Math.abs(start), Math.abs(stop)); + if (specifier.precision == null && !isNaN(precision = precisionPrefix(step, value))) specifier.precision = precision; + return exports.formatPrefix(specifier, value); + } + case "": + case "e": + case "g": + case "p": + case "r": { + if (specifier.precision == null && !isNaN(precision = precisionRound(step, Math.max(Math.abs(start), Math.abs(stop))))) specifier.precision = precision - (specifier.type === "e"); + break; + } + case "f": + case "%": { + if (specifier.precision == null && !isNaN(precision = precisionFixed(step))) specifier.precision = precision - (specifier.type === "%") * 2; + break; + } + } + return exports.format(specifier); +} + +function linearish(scale) { + var domain = scale.domain; + + scale.ticks = function(count) { + var d = domain(); + return ticks(d[0], d[d.length - 1], count == null ? 10 : count); + }; + + scale.tickFormat = function(count, specifier) { + var d = domain(); + return tickFormat(d[0], d[d.length - 1], count == null ? 10 : count, specifier); + }; + + scale.nice = function(count) { + if (count == null) count = 10; + + var d = domain(), + i0 = 0, + i1 = d.length - 1, + start = d[i0], + stop = d[i1], + step; + + if (stop < start) { + step = start, start = stop, stop = step; + step = i0, i0 = i1, i1 = step; + } + + step = tickIncrement(start, stop, count); + + if (step > 0) { + start = Math.floor(start / step) * step; + stop = Math.ceil(stop / step) * step; + step = tickIncrement(start, stop, count); + } else if (step < 0) { + start = Math.ceil(start * step) / step; + stop = Math.floor(stop * step) / step; + step = tickIncrement(start, stop, count); + } + + if (step > 0) { + d[i0] = Math.floor(start / step) * step; + d[i1] = Math.ceil(stop / step) * step; + domain(d); + } else if (step < 0) { + d[i0] = Math.ceil(start * step) / step; + d[i1] = Math.floor(stop * step) / step; + domain(d); + } + + return scale; + }; + + return scale; +} + +function linear$2() { + var scale = continuous(identity$6, identity$6); + + scale.copy = function() { + return copy(scale, linear$2()); + }; + + initRange.apply(scale, arguments); + + return linearish(scale); +} + +function identity$7(domain) { + var unknown; + + function scale(x) { + return isNaN(x = +x) ? unknown : x; + } + + scale.invert = scale; + + scale.domain = scale.range = function(_) { + return arguments.length ? (domain = map$3.call(_, number$2), scale) : domain.slice(); + }; + + scale.unknown = function(_) { + return arguments.length ? (unknown = _, scale) : unknown; + }; + + scale.copy = function() { + return identity$7(domain).unknown(unknown); + }; + + domain = arguments.length ? map$3.call(domain, number$2) : [0, 1]; + + return linearish(scale); +} + +function nice(domain, interval) { + domain = domain.slice(); + + var i0 = 0, + i1 = domain.length - 1, + x0 = domain[i0], + x1 = domain[i1], + t; + + if (x1 < x0) { + t = i0, i0 = i1, i1 = t; + t = x0, x0 = x1, x1 = t; + } + + domain[i0] = interval.floor(x0); + domain[i1] = interval.ceil(x1); + return domain; +} + +function transformLog(x) { + return Math.log(x); +} + +function transformExp(x) { + return Math.exp(x); +} + +function transformLogn(x) { + return -Math.log(-x); +} + +function transformExpn(x) { + return -Math.exp(-x); +} + +function pow10(x) { + return isFinite(x) ? +("1e" + x) : x < 0 ? 0 : x; +} + +function powp(base) { + return base === 10 ? pow10 + : base === Math.E ? Math.exp + : function(x) { return Math.pow(base, x); }; +} + +function logp(base) { + return base === Math.E ? Math.log + : base === 10 && Math.log10 + || base === 2 && Math.log2 + || (base = Math.log(base), function(x) { return Math.log(x) / base; }); +} + +function reflect(f) { + return function(x) { + return -f(-x); + }; +} + +function loggish(transform) { + var scale = transform(transformLog, transformExp), + domain = scale.domain, + base = 10, + logs, + pows; + + function rescale() { + logs = logp(base), pows = powp(base); + if (domain()[0] < 0) { + logs = reflect(logs), pows = reflect(pows); + transform(transformLogn, transformExpn); + } else { + transform(transformLog, transformExp); + } + return scale; + } + + scale.base = function(_) { + return arguments.length ? (base = +_, rescale()) : base; + }; + + scale.domain = function(_) { + return arguments.length ? (domain(_), rescale()) : domain(); + }; + + scale.ticks = function(count) { + var d = domain(), + u = d[0], + v = d[d.length - 1], + r; + + if (r = v < u) i = u, u = v, v = i; + + var i = logs(u), + j = logs(v), + p, + k, + t, + n = count == null ? 10 : +count, + z = []; + + if (!(base % 1) && j - i < n) { + i = Math.round(i) - 1, j = Math.round(j) + 1; + if (u > 0) for (; i < j; ++i) { + for (k = 1, p = pows(i); k < base; ++k) { + t = p * k; + if (t < u) continue; + if (t > v) break; + z.push(t); + } + } else for (; i < j; ++i) { + for (k = base - 1, p = pows(i); k >= 1; --k) { + t = p * k; + if (t < u) continue; + if (t > v) break; + z.push(t); + } + } + } else { + z = ticks(i, j, Math.min(j - i, n)).map(pows); + } + + return r ? z.reverse() : z; + }; + + scale.tickFormat = function(count, specifier) { + if (specifier == null) specifier = base === 10 ? ".0e" : ","; + if (typeof specifier !== "function") specifier = exports.format(specifier); + if (count === Infinity) return specifier; + if (count == null) count = 10; + var k = Math.max(1, base * count / scale.ticks().length); // TODO fast estimate? + return function(d) { + var i = d / pows(Math.round(logs(d))); + if (i * base < base - 0.5) i *= base; + return i <= k ? specifier(d) : ""; + }; + }; + + scale.nice = function() { + return domain(nice(domain(), { + floor: function(x) { return pows(Math.floor(logs(x))); }, + ceil: function(x) { return pows(Math.ceil(logs(x))); } + })); + }; + + return scale; +} + +function log$1() { + var scale = loggish(transformer$1()).domain([1, 10]); + + scale.copy = function() { + return copy(scale, log$1()).base(scale.base()); + }; + + initRange.apply(scale, arguments); + + return scale; +} + +function transformSymlog(c) { + return function(x) { + return Math.sign(x) * Math.log1p(Math.abs(x / c)); + }; +} + +function transformSymexp(c) { + return function(x) { + return Math.sign(x) * Math.expm1(Math.abs(x)) * c; + }; +} + +function symlogish(transform) { + var c = 1, scale = transform(transformSymlog(c), transformSymexp(c)); + + scale.constant = function(_) { + return arguments.length ? transform(transformSymlog(c = +_), transformSymexp(c)) : c; + }; + + return linearish(scale); +} + +function symlog() { + var scale = symlogish(transformer$1()); + + scale.copy = function() { + return copy(scale, symlog()).constant(scale.constant()); + }; + + return initRange.apply(scale, arguments); +} + +function transformPow(exponent) { + return function(x) { + return x < 0 ? -Math.pow(-x, exponent) : Math.pow(x, exponent); + }; +} + +function transformSqrt(x) { + return x < 0 ? -Math.sqrt(-x) : Math.sqrt(x); +} + +function transformSquare(x) { + return x < 0 ? -x * x : x * x; +} + +function powish(transform) { + var scale = transform(identity$6, identity$6), + exponent = 1; + + function rescale() { + return exponent === 1 ? transform(identity$6, identity$6) + : exponent === 0.5 ? transform(transformSqrt, transformSquare) + : transform(transformPow(exponent), transformPow(1 / exponent)); + } + + scale.exponent = function(_) { + return arguments.length ? (exponent = +_, rescale()) : exponent; + }; + + return linearish(scale); +} + +function pow$1() { + var scale = powish(transformer$1()); + + scale.copy = function() { + return copy(scale, pow$1()).exponent(scale.exponent()); + }; + + initRange.apply(scale, arguments); + + return scale; +} + +function sqrt$1() { + return pow$1.apply(null, arguments).exponent(0.5); +} + +function quantile() { + var domain = [], + range = [], + thresholds = [], + unknown; + + function rescale() { + var i = 0, n = Math.max(1, range.length); + thresholds = new Array(n - 1); + while (++i < n) thresholds[i - 1] = threshold(domain, i / n); + return scale; + } + + function scale(x) { + return isNaN(x = +x) ? unknown : range[bisectRight(thresholds, x)]; + } + + scale.invertExtent = function(y) { + var i = range.indexOf(y); + return i < 0 ? [NaN, NaN] : [ + i > 0 ? thresholds[i - 1] : domain[0], + i < thresholds.length ? thresholds[i] : domain[domain.length - 1] + ]; + }; + + scale.domain = function(_) { + if (!arguments.length) return domain.slice(); + domain = []; + for (var i = 0, n = _.length, d; i < n; ++i) if (d = _[i], d != null && !isNaN(d = +d)) domain.push(d); + domain.sort(ascending); + return rescale(); + }; + + scale.range = function(_) { + return arguments.length ? (range = slice$5.call(_), rescale()) : range.slice(); + }; + + scale.unknown = function(_) { + return arguments.length ? (unknown = _, scale) : unknown; + }; + + scale.quantiles = function() { + return thresholds.slice(); + }; + + scale.copy = function() { + return quantile() + .domain(domain) + .range(range) + .unknown(unknown); + }; + + return initRange.apply(scale, arguments); +} + +function quantize$1() { + var x0 = 0, + x1 = 1, + n = 1, + domain = [0.5], + range = [0, 1], + unknown; + + function scale(x) { + return x <= x ? range[bisectRight(domain, x, 0, n)] : unknown; + } + + function rescale() { + var i = -1; + domain = new Array(n); + while (++i < n) domain[i] = ((i + 1) * x1 - (i - n) * x0) / (n + 1); + return scale; + } + + scale.domain = function(_) { + return arguments.length ? (x0 = +_[0], x1 = +_[1], rescale()) : [x0, x1]; + }; + + scale.range = function(_) { + return arguments.length ? (n = (range = slice$5.call(_)).length - 1, rescale()) : range.slice(); + }; + + scale.invertExtent = function(y) { + var i = range.indexOf(y); + return i < 0 ? [NaN, NaN] + : i < 1 ? [x0, domain[0]] + : i >= n ? [domain[n - 1], x1] + : [domain[i - 1], domain[i]]; + }; + + scale.unknown = function(_) { + return arguments.length ? (unknown = _, scale) : scale; + }; + + scale.thresholds = function() { + return domain.slice(); + }; + + scale.copy = function() { + return quantize$1() + .domain([x0, x1]) + .range(range) + .unknown(unknown); + }; + + return initRange.apply(linearish(scale), arguments); +} + +function threshold$1() { + var domain = [0.5], + range = [0, 1], + unknown, + n = 1; + + function scale(x) { + return x <= x ? range[bisectRight(domain, x, 0, n)] : unknown; + } + + scale.domain = function(_) { + return arguments.length ? (domain = slice$5.call(_), n = Math.min(domain.length, range.length - 1), scale) : domain.slice(); + }; + + scale.range = function(_) { + return arguments.length ? (range = slice$5.call(_), n = Math.min(domain.length, range.length - 1), scale) : range.slice(); + }; + + scale.invertExtent = function(y) { + var i = range.indexOf(y); + return [domain[i - 1], domain[i]]; + }; + + scale.unknown = function(_) { + return arguments.length ? (unknown = _, scale) : unknown; + }; + + scale.copy = function() { + return threshold$1() + .domain(domain) + .range(range) + .unknown(unknown); + }; + + return initRange.apply(scale, arguments); +} + +var t0$1 = new Date, + t1$1 = new Date; + +function newInterval(floori, offseti, count, field) { + + function interval(date) { + return floori(date = arguments.length === 0 ? new Date : new Date(+date)), date; + } + + interval.floor = function(date) { + return floori(date = new Date(+date)), date; + }; + + interval.ceil = function(date) { + return floori(date = new Date(date - 1)), offseti(date, 1), floori(date), date; + }; + + interval.round = function(date) { + var d0 = interval(date), + d1 = interval.ceil(date); + return date - d0 < d1 - date ? d0 : d1; + }; + + interval.offset = function(date, step) { + return offseti(date = new Date(+date), step == null ? 1 : Math.floor(step)), date; + }; + + interval.range = function(start, stop, step) { + var range = [], previous; + start = interval.ceil(start); + step = step == null ? 1 : Math.floor(step); + if (!(start < stop) || !(step > 0)) return range; // also handles Invalid Date + do range.push(previous = new Date(+start)), offseti(start, step), floori(start); + while (previous < start && start < stop); + return range; + }; + + interval.filter = function(test) { + return newInterval(function(date) { + if (date >= date) while (floori(date), !test(date)) date.setTime(date - 1); + }, function(date, step) { + if (date >= date) { + if (step < 0) while (++step <= 0) { + while (offseti(date, -1), !test(date)) {} // eslint-disable-line no-empty + } else while (--step >= 0) { + while (offseti(date, +1), !test(date)) {} // eslint-disable-line no-empty + } + } + }); + }; + + if (count) { + interval.count = function(start, end) { + t0$1.setTime(+start), t1$1.setTime(+end); + floori(t0$1), floori(t1$1); + return Math.floor(count(t0$1, t1$1)); + }; + + interval.every = function(step) { + step = Math.floor(step); + return !isFinite(step) || !(step > 0) ? null + : !(step > 1) ? interval + : interval.filter(field + ? function(d) { return field(d) % step === 0; } + : function(d) { return interval.count(0, d) % step === 0; }); + }; + } + + return interval; +} + +var millisecond = newInterval(function() { + // noop +}, function(date, step) { + date.setTime(+date + step); +}, function(start, end) { + return end - start; +}); + +// An optimized implementation for this simple case. +millisecond.every = function(k) { + k = Math.floor(k); + if (!isFinite(k) || !(k > 0)) return null; + if (!(k > 1)) return millisecond; + return newInterval(function(date) { + date.setTime(Math.floor(date / k) * k); + }, function(date, step) { + date.setTime(+date + step * k); + }, function(start, end) { + return (end - start) / k; + }); +}; +var milliseconds = millisecond.range; + +var durationSecond = 1e3; +var durationMinute = 6e4; +var durationHour = 36e5; +var durationDay = 864e5; +var durationWeek = 6048e5; + +var second = newInterval(function(date) { + date.setTime(date - date.getMilliseconds()); +}, function(date, step) { + date.setTime(+date + step * durationSecond); +}, function(start, end) { + return (end - start) / durationSecond; +}, function(date) { + return date.getUTCSeconds(); +}); +var seconds = second.range; + +var minute = newInterval(function(date) { + date.setTime(date - date.getMilliseconds() - date.getSeconds() * durationSecond); +}, function(date, step) { + date.setTime(+date + step * durationMinute); +}, function(start, end) { + return (end - start) / durationMinute; +}, function(date) { + return date.getMinutes(); +}); +var minutes = minute.range; + +var hour = newInterval(function(date) { + date.setTime(date - date.getMilliseconds() - date.getSeconds() * durationSecond - date.getMinutes() * durationMinute); +}, function(date, step) { + date.setTime(+date + step * durationHour); +}, function(start, end) { + return (end - start) / durationHour; +}, function(date) { + return date.getHours(); +}); +var hours = hour.range; + +var day = newInterval(function(date) { + date.setHours(0, 0, 0, 0); +}, function(date, step) { + date.setDate(date.getDate() + step); +}, function(start, end) { + return (end - start - (end.getTimezoneOffset() - start.getTimezoneOffset()) * durationMinute) / durationDay; +}, function(date) { + return date.getDate() - 1; +}); +var days = day.range; + +function weekday(i) { + return newInterval(function(date) { + date.setDate(date.getDate() - (date.getDay() + 7 - i) % 7); + date.setHours(0, 0, 0, 0); + }, function(date, step) { + date.setDate(date.getDate() + step * 7); + }, function(start, end) { + return (end - start - (end.getTimezoneOffset() - start.getTimezoneOffset()) * durationMinute) / durationWeek; + }); +} + +var sunday = weekday(0); +var monday = weekday(1); +var tuesday = weekday(2); +var wednesday = weekday(3); +var thursday = weekday(4); +var friday = weekday(5); +var saturday = weekday(6); + +var sundays = sunday.range; +var mondays = monday.range; +var tuesdays = tuesday.range; +var wednesdays = wednesday.range; +var thursdays = thursday.range; +var fridays = friday.range; +var saturdays = saturday.range; + +var month = newInterval(function(date) { + date.setDate(1); + date.setHours(0, 0, 0, 0); +}, function(date, step) { + date.setMonth(date.getMonth() + step); +}, function(start, end) { + return end.getMonth() - start.getMonth() + (end.getFullYear() - start.getFullYear()) * 12; +}, function(date) { + return date.getMonth(); +}); +var months = month.range; + +var year = newInterval(function(date) { + date.setMonth(0, 1); + date.setHours(0, 0, 0, 0); +}, function(date, step) { + date.setFullYear(date.getFullYear() + step); +}, function(start, end) { + return end.getFullYear() - start.getFullYear(); +}, function(date) { + return date.getFullYear(); +}); + +// An optimized implementation for this simple case. +year.every = function(k) { + return !isFinite(k = Math.floor(k)) || !(k > 0) ? null : newInterval(function(date) { + date.setFullYear(Math.floor(date.getFullYear() / k) * k); + date.setMonth(0, 1); + date.setHours(0, 0, 0, 0); + }, function(date, step) { + date.setFullYear(date.getFullYear() + step * k); + }); +}; +var years = year.range; + +var utcMinute = newInterval(function(date) { + date.setUTCSeconds(0, 0); +}, function(date, step) { + date.setTime(+date + step * durationMinute); +}, function(start, end) { + return (end - start) / durationMinute; +}, function(date) { + return date.getUTCMinutes(); +}); +var utcMinutes = utcMinute.range; + +var utcHour = newInterval(function(date) { + date.setUTCMinutes(0, 0, 0); +}, function(date, step) { + date.setTime(+date + step * durationHour); +}, function(start, end) { + return (end - start) / durationHour; +}, function(date) { + return date.getUTCHours(); +}); +var utcHours = utcHour.range; + +var utcDay = newInterval(function(date) { + date.setUTCHours(0, 0, 0, 0); +}, function(date, step) { + date.setUTCDate(date.getUTCDate() + step); +}, function(start, end) { + return (end - start) / durationDay; +}, function(date) { + return date.getUTCDate() - 1; +}); +var utcDays = utcDay.range; + +function utcWeekday(i) { + return newInterval(function(date) { + date.setUTCDate(date.getUTCDate() - (date.getUTCDay() + 7 - i) % 7); + date.setUTCHours(0, 0, 0, 0); + }, function(date, step) { + date.setUTCDate(date.getUTCDate() + step * 7); + }, function(start, end) { + return (end - start) / durationWeek; + }); +} + +var utcSunday = utcWeekday(0); +var utcMonday = utcWeekday(1); +var utcTuesday = utcWeekday(2); +var utcWednesday = utcWeekday(3); +var utcThursday = utcWeekday(4); +var utcFriday = utcWeekday(5); +var utcSaturday = utcWeekday(6); + +var utcSundays = utcSunday.range; +var utcMondays = utcMonday.range; +var utcTuesdays = utcTuesday.range; +var utcWednesdays = utcWednesday.range; +var utcThursdays = utcThursday.range; +var utcFridays = utcFriday.range; +var utcSaturdays = utcSaturday.range; + +var utcMonth = newInterval(function(date) { + date.setUTCDate(1); + date.setUTCHours(0, 0, 0, 0); +}, function(date, step) { + date.setUTCMonth(date.getUTCMonth() + step); +}, function(start, end) { + return end.getUTCMonth() - start.getUTCMonth() + (end.getUTCFullYear() - start.getUTCFullYear()) * 12; +}, function(date) { + return date.getUTCMonth(); +}); +var utcMonths = utcMonth.range; + +var utcYear = newInterval(function(date) { + date.setUTCMonth(0, 1); + date.setUTCHours(0, 0, 0, 0); +}, function(date, step) { + date.setUTCFullYear(date.getUTCFullYear() + step); +}, function(start, end) { + return end.getUTCFullYear() - start.getUTCFullYear(); +}, function(date) { + return date.getUTCFullYear(); +}); + +// An optimized implementation for this simple case. +utcYear.every = function(k) { + return !isFinite(k = Math.floor(k)) || !(k > 0) ? null : newInterval(function(date) { + date.setUTCFullYear(Math.floor(date.getUTCFullYear() / k) * k); + date.setUTCMonth(0, 1); + date.setUTCHours(0, 0, 0, 0); + }, function(date, step) { + date.setUTCFullYear(date.getUTCFullYear() + step * k); + }); +}; +var utcYears = utcYear.range; + +function localDate(d) { + if (0 <= d.y && d.y < 100) { + var date = new Date(-1, d.m, d.d, d.H, d.M, d.S, d.L); + date.setFullYear(d.y); + return date; + } + return new Date(d.y, d.m, d.d, d.H, d.M, d.S, d.L); +} + +function utcDate(d) { + if (0 <= d.y && d.y < 100) { + var date = new Date(Date.UTC(-1, d.m, d.d, d.H, d.M, d.S, d.L)); + date.setUTCFullYear(d.y); + return date; + } + return new Date(Date.UTC(d.y, d.m, d.d, d.H, d.M, d.S, d.L)); +} + +function newDate(y, m, d) { + return {y: y, m: m, d: d, H: 0, M: 0, S: 0, L: 0}; +} + +function formatLocale$1(locale) { + var locale_dateTime = locale.dateTime, + locale_date = locale.date, + locale_time = locale.time, + locale_periods = locale.periods, + locale_weekdays = locale.days, + locale_shortWeekdays = locale.shortDays, + locale_months = locale.months, + locale_shortMonths = locale.shortMonths; + + var periodRe = formatRe(locale_periods), + periodLookup = formatLookup(locale_periods), + weekdayRe = formatRe(locale_weekdays), + weekdayLookup = formatLookup(locale_weekdays), + shortWeekdayRe = formatRe(locale_shortWeekdays), + shortWeekdayLookup = formatLookup(locale_shortWeekdays), + monthRe = formatRe(locale_months), + monthLookup = formatLookup(locale_months), + shortMonthRe = formatRe(locale_shortMonths), + shortMonthLookup = formatLookup(locale_shortMonths); + + var formats = { + "a": formatShortWeekday, + "A": formatWeekday, + "b": formatShortMonth, + "B": formatMonth, + "c": null, + "d": formatDayOfMonth, + "e": formatDayOfMonth, + "f": formatMicroseconds, + "H": formatHour24, + "I": formatHour12, + "j": formatDayOfYear, + "L": formatMilliseconds, + "m": formatMonthNumber, + "M": formatMinutes, + "p": formatPeriod, + "q": formatQuarter, + "Q": formatUnixTimestamp, + "s": formatUnixTimestampSeconds, + "S": formatSeconds, + "u": formatWeekdayNumberMonday, + "U": formatWeekNumberSunday, + "V": formatWeekNumberISO, + "w": formatWeekdayNumberSunday, + "W": formatWeekNumberMonday, + "x": null, + "X": null, + "y": formatYear$1, + "Y": formatFullYear, + "Z": formatZone, + "%": formatLiteralPercent + }; + + var utcFormats = { + "a": formatUTCShortWeekday, + "A": formatUTCWeekday, + "b": formatUTCShortMonth, + "B": formatUTCMonth, + "c": null, + "d": formatUTCDayOfMonth, + "e": formatUTCDayOfMonth, + "f": formatUTCMicroseconds, + "H": formatUTCHour24, + "I": formatUTCHour12, + "j": formatUTCDayOfYear, + "L": formatUTCMilliseconds, + "m": formatUTCMonthNumber, + "M": formatUTCMinutes, + "p": formatUTCPeriod, + "q": formatUTCQuarter, + "Q": formatUnixTimestamp, + "s": formatUnixTimestampSeconds, + "S": formatUTCSeconds, + "u": formatUTCWeekdayNumberMonday, + "U": formatUTCWeekNumberSunday, + "V": formatUTCWeekNumberISO, + "w": formatUTCWeekdayNumberSunday, + "W": formatUTCWeekNumberMonday, + "x": null, + "X": null, + "y": formatUTCYear, + "Y": formatUTCFullYear, + "Z": formatUTCZone, + "%": formatLiteralPercent + }; + + var parses = { + "a": parseShortWeekday, + "A": parseWeekday, + "b": parseShortMonth, + "B": parseMonth, + "c": parseLocaleDateTime, + "d": parseDayOfMonth, + "e": parseDayOfMonth, + "f": parseMicroseconds, + "H": parseHour24, + "I": parseHour24, + "j": parseDayOfYear, + "L": parseMilliseconds, + "m": parseMonthNumber, + "M": parseMinutes, + "p": parsePeriod, + "q": parseQuarter, + "Q": parseUnixTimestamp, + "s": parseUnixTimestampSeconds, + "S": parseSeconds, + "u": parseWeekdayNumberMonday, + "U": parseWeekNumberSunday, + "V": parseWeekNumberISO, + "w": parseWeekdayNumberSunday, + "W": parseWeekNumberMonday, + "x": parseLocaleDate, + "X": parseLocaleTime, + "y": parseYear, + "Y": parseFullYear, + "Z": parseZone, + "%": parseLiteralPercent + }; + + // These recursive directive definitions must be deferred. + formats.x = newFormat(locale_date, formats); + formats.X = newFormat(locale_time, formats); + formats.c = newFormat(locale_dateTime, formats); + utcFormats.x = newFormat(locale_date, utcFormats); + utcFormats.X = newFormat(locale_time, utcFormats); + utcFormats.c = newFormat(locale_dateTime, utcFormats); + + function newFormat(specifier, formats) { + return function(date) { + var string = [], + i = -1, + j = 0, + n = specifier.length, + c, + pad, + format; + + if (!(date instanceof Date)) date = new Date(+date); + + while (++i < n) { + if (specifier.charCodeAt(i) === 37) { + string.push(specifier.slice(j, i)); + if ((pad = pads[c = specifier.charAt(++i)]) != null) c = specifier.charAt(++i); + else pad = c === "e" ? " " : "0"; + if (format = formats[c]) c = format(date, pad); + string.push(c); + j = i + 1; + } + } + + string.push(specifier.slice(j, i)); + return string.join(""); + }; + } + + function newParse(specifier, Z) { + return function(string) { + var d = newDate(1900, undefined, 1), + i = parseSpecifier(d, specifier, string += "", 0), + week, day$1; + if (i != string.length) return null; + + // If a UNIX timestamp is specified, return it. + if ("Q" in d) return new Date(d.Q); + if ("s" in d) return new Date(d.s * 1000 + ("L" in d ? d.L : 0)); + + // If this is utcParse, never use the local timezone. + if (Z && !("Z" in d)) d.Z = 0; + + // The am-pm flag is 0 for AM, and 1 for PM. + if ("p" in d) d.H = d.H % 12 + d.p * 12; + + // If the month was not specified, inherit from the quarter. + if (d.m === undefined) d.m = "q" in d ? d.q : 0; + + // Convert day-of-week and week-of-year to day-of-year. + if ("V" in d) { + if (d.V < 1 || d.V > 53) return null; + if (!("w" in d)) d.w = 1; + if ("Z" in d) { + week = utcDate(newDate(d.y, 0, 1)), day$1 = week.getUTCDay(); + week = day$1 > 4 || day$1 === 0 ? utcMonday.ceil(week) : utcMonday(week); + week = utcDay.offset(week, (d.V - 1) * 7); + d.y = week.getUTCFullYear(); + d.m = week.getUTCMonth(); + d.d = week.getUTCDate() + (d.w + 6) % 7; + } else { + week = localDate(newDate(d.y, 0, 1)), day$1 = week.getDay(); + week = day$1 > 4 || day$1 === 0 ? monday.ceil(week) : monday(week); + week = day.offset(week, (d.V - 1) * 7); + d.y = week.getFullYear(); + d.m = week.getMonth(); + d.d = week.getDate() + (d.w + 6) % 7; + } + } else if ("W" in d || "U" in d) { + if (!("w" in d)) d.w = "u" in d ? d.u % 7 : "W" in d ? 1 : 0; + day$1 = "Z" in d ? utcDate(newDate(d.y, 0, 1)).getUTCDay() : localDate(newDate(d.y, 0, 1)).getDay(); + d.m = 0; + d.d = "W" in d ? (d.w + 6) % 7 + d.W * 7 - (day$1 + 5) % 7 : d.w + d.U * 7 - (day$1 + 6) % 7; + } + + // If a time zone is specified, all fields are interpreted as UTC and then + // offset according to the specified time zone. + if ("Z" in d) { + d.H += d.Z / 100 | 0; + d.M += d.Z % 100; + return utcDate(d); + } + + // Otherwise, all fields are in local time. + return localDate(d); + }; + } + + function parseSpecifier(d, specifier, string, j) { + var i = 0, + n = specifier.length, + m = string.length, + c, + parse; + + while (i < n) { + if (j >= m) return -1; + c = specifier.charCodeAt(i++); + if (c === 37) { + c = specifier.charAt(i++); + parse = parses[c in pads ? specifier.charAt(i++) : c]; + if (!parse || ((j = parse(d, string, j)) < 0)) return -1; + } else if (c != string.charCodeAt(j++)) { + return -1; + } + } + + return j; + } + + function parsePeriod(d, string, i) { + var n = periodRe.exec(string.slice(i)); + return n ? (d.p = periodLookup[n[0].toLowerCase()], i + n[0].length) : -1; + } + + function parseShortWeekday(d, string, i) { + var n = shortWeekdayRe.exec(string.slice(i)); + return n ? (d.w = shortWeekdayLookup[n[0].toLowerCase()], i + n[0].length) : -1; + } + + function parseWeekday(d, string, i) { + var n = weekdayRe.exec(string.slice(i)); + return n ? (d.w = weekdayLookup[n[0].toLowerCase()], i + n[0].length) : -1; + } + + function parseShortMonth(d, string, i) { + var n = shortMonthRe.exec(string.slice(i)); + return n ? (d.m = shortMonthLookup[n[0].toLowerCase()], i + n[0].length) : -1; + } + + function parseMonth(d, string, i) { + var n = monthRe.exec(string.slice(i)); + return n ? (d.m = monthLookup[n[0].toLowerCase()], i + n[0].length) : -1; + } + + function parseLocaleDateTime(d, string, i) { + return parseSpecifier(d, locale_dateTime, string, i); + } + + function parseLocaleDate(d, string, i) { + return parseSpecifier(d, locale_date, string, i); + } + + function parseLocaleTime(d, string, i) { + return parseSpecifier(d, locale_time, string, i); + } + + function formatShortWeekday(d) { + return locale_shortWeekdays[d.getDay()]; + } + + function formatWeekday(d) { + return locale_weekdays[d.getDay()]; + } + + function formatShortMonth(d) { + return locale_shortMonths[d.getMonth()]; + } + + function formatMonth(d) { + return locale_months[d.getMonth()]; + } + + function formatPeriod(d) { + return locale_periods[+(d.getHours() >= 12)]; + } + + function formatQuarter(d) { + return 1 + ~~(d.getMonth() / 3); + } + + function formatUTCShortWeekday(d) { + return locale_shortWeekdays[d.getUTCDay()]; + } + + function formatUTCWeekday(d) { + return locale_weekdays[d.getUTCDay()]; + } + + function formatUTCShortMonth(d) { + return locale_shortMonths[d.getUTCMonth()]; + } + + function formatUTCMonth(d) { + return locale_months[d.getUTCMonth()]; + } + + function formatUTCPeriod(d) { + return locale_periods[+(d.getUTCHours() >= 12)]; + } + + function formatUTCQuarter(d) { + return 1 + ~~(d.getUTCMonth() / 3); + } + + return { + format: function(specifier) { + var f = newFormat(specifier += "", formats); + f.toString = function() { return specifier; }; + return f; + }, + parse: function(specifier) { + var p = newParse(specifier += "", false); + p.toString = function() { return specifier; }; + return p; + }, + utcFormat: function(specifier) { + var f = newFormat(specifier += "", utcFormats); + f.toString = function() { return specifier; }; + return f; + }, + utcParse: function(specifier) { + var p = newParse(specifier += "", true); + p.toString = function() { return specifier; }; + return p; + } + }; +} + +var pads = {"-": "", "_": " ", "0": "0"}, + numberRe = /^\s*\d+/, // note: ignores next directive + percentRe = /^%/, + requoteRe = /[\\^$*+?|[\]().{}]/g; + +function pad$1(value, fill, width) { + var sign = value < 0 ? "-" : "", + string = (sign ? -value : value) + "", + length = string.length; + return sign + (length < width ? new Array(width - length + 1).join(fill) + string : string); +} + +function requote(s) { + return s.replace(requoteRe, "\\$&"); +} + +function formatRe(names) { + return new RegExp("^(?:" + names.map(requote).join("|") + ")", "i"); +} + +function formatLookup(names) { + var map = {}, i = -1, n = names.length; + while (++i < n) map[names[i].toLowerCase()] = i; + return map; +} + +function parseWeekdayNumberSunday(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 1)); + return n ? (d.w = +n[0], i + n[0].length) : -1; +} + +function parseWeekdayNumberMonday(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 1)); + return n ? (d.u = +n[0], i + n[0].length) : -1; +} + +function parseWeekNumberSunday(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.U = +n[0], i + n[0].length) : -1; +} + +function parseWeekNumberISO(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.V = +n[0], i + n[0].length) : -1; +} + +function parseWeekNumberMonday(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.W = +n[0], i + n[0].length) : -1; +} + +function parseFullYear(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 4)); + return n ? (d.y = +n[0], i + n[0].length) : -1; +} + +function parseYear(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.y = +n[0] + (+n[0] > 68 ? 1900 : 2000), i + n[0].length) : -1; +} + +function parseZone(d, string, i) { + var n = /^(Z)|([+-]\d\d)(?::?(\d\d))?/.exec(string.slice(i, i + 6)); + return n ? (d.Z = n[1] ? 0 : -(n[2] + (n[3] || "00")), i + n[0].length) : -1; +} + +function parseQuarter(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 1)); + return n ? (d.q = n[0] * 3 - 3, i + n[0].length) : -1; +} + +function parseMonthNumber(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.m = n[0] - 1, i + n[0].length) : -1; +} + +function parseDayOfMonth(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.d = +n[0], i + n[0].length) : -1; +} + +function parseDayOfYear(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 3)); + return n ? (d.m = 0, d.d = +n[0], i + n[0].length) : -1; +} + +function parseHour24(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.H = +n[0], i + n[0].length) : -1; +} + +function parseMinutes(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.M = +n[0], i + n[0].length) : -1; +} + +function parseSeconds(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 2)); + return n ? (d.S = +n[0], i + n[0].length) : -1; +} + +function parseMilliseconds(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 3)); + return n ? (d.L = +n[0], i + n[0].length) : -1; +} + +function parseMicroseconds(d, string, i) { + var n = numberRe.exec(string.slice(i, i + 6)); + return n ? (d.L = Math.floor(n[0] / 1000), i + n[0].length) : -1; +} + +function parseLiteralPercent(d, string, i) { + var n = percentRe.exec(string.slice(i, i + 1)); + return n ? i + n[0].length : -1; +} + +function parseUnixTimestamp(d, string, i) { + var n = numberRe.exec(string.slice(i)); + return n ? (d.Q = +n[0], i + n[0].length) : -1; +} + +function parseUnixTimestampSeconds(d, string, i) { + var n = numberRe.exec(string.slice(i)); + return n ? (d.s = +n[0], i + n[0].length) : -1; +} + +function formatDayOfMonth(d, p) { + return pad$1(d.getDate(), p, 2); +} + +function formatHour24(d, p) { + return pad$1(d.getHours(), p, 2); +} + +function formatHour12(d, p) { + return pad$1(d.getHours() % 12 || 12, p, 2); +} + +function formatDayOfYear(d, p) { + return pad$1(1 + day.count(year(d), d), p, 3); +} + +function formatMilliseconds(d, p) { + return pad$1(d.getMilliseconds(), p, 3); +} + +function formatMicroseconds(d, p) { + return formatMilliseconds(d, p) + "000"; +} + +function formatMonthNumber(d, p) { + return pad$1(d.getMonth() + 1, p, 2); +} + +function formatMinutes(d, p) { + return pad$1(d.getMinutes(), p, 2); +} + +function formatSeconds(d, p) { + return pad$1(d.getSeconds(), p, 2); +} + +function formatWeekdayNumberMonday(d) { + var day = d.getDay(); + return day === 0 ? 7 : day; +} + +function formatWeekNumberSunday(d, p) { + return pad$1(sunday.count(year(d) - 1, d), p, 2); +} + +function formatWeekNumberISO(d, p) { + var day = d.getDay(); + d = (day >= 4 || day === 0) ? thursday(d) : thursday.ceil(d); + return pad$1(thursday.count(year(d), d) + (year(d).getDay() === 4), p, 2); +} + +function formatWeekdayNumberSunday(d) { + return d.getDay(); +} + +function formatWeekNumberMonday(d, p) { + return pad$1(monday.count(year(d) - 1, d), p, 2); +} + +function formatYear$1(d, p) { + return pad$1(d.getFullYear() % 100, p, 2); +} + +function formatFullYear(d, p) { + return pad$1(d.getFullYear() % 10000, p, 4); +} + +function formatZone(d) { + var z = d.getTimezoneOffset(); + return (z > 0 ? "-" : (z *= -1, "+")) + + pad$1(z / 60 | 0, "0", 2) + + pad$1(z % 60, "0", 2); +} + +function formatUTCDayOfMonth(d, p) { + return pad$1(d.getUTCDate(), p, 2); +} + +function formatUTCHour24(d, p) { + return pad$1(d.getUTCHours(), p, 2); +} + +function formatUTCHour12(d, p) { + return pad$1(d.getUTCHours() % 12 || 12, p, 2); +} + +function formatUTCDayOfYear(d, p) { + return pad$1(1 + utcDay.count(utcYear(d), d), p, 3); +} + +function formatUTCMilliseconds(d, p) { + return pad$1(d.getUTCMilliseconds(), p, 3); +} + +function formatUTCMicroseconds(d, p) { + return formatUTCMilliseconds(d, p) + "000"; +} + +function formatUTCMonthNumber(d, p) { + return pad$1(d.getUTCMonth() + 1, p, 2); +} + +function formatUTCMinutes(d, p) { + return pad$1(d.getUTCMinutes(), p, 2); +} + +function formatUTCSeconds(d, p) { + return pad$1(d.getUTCSeconds(), p, 2); +} + +function formatUTCWeekdayNumberMonday(d) { + var dow = d.getUTCDay(); + return dow === 0 ? 7 : dow; +} + +function formatUTCWeekNumberSunday(d, p) { + return pad$1(utcSunday.count(utcYear(d) - 1, d), p, 2); +} + +function formatUTCWeekNumberISO(d, p) { + var day = d.getUTCDay(); + d = (day >= 4 || day === 0) ? utcThursday(d) : utcThursday.ceil(d); + return pad$1(utcThursday.count(utcYear(d), d) + (utcYear(d).getUTCDay() === 4), p, 2); +} + +function formatUTCWeekdayNumberSunday(d) { + return d.getUTCDay(); +} + +function formatUTCWeekNumberMonday(d, p) { + return pad$1(utcMonday.count(utcYear(d) - 1, d), p, 2); +} + +function formatUTCYear(d, p) { + return pad$1(d.getUTCFullYear() % 100, p, 2); +} + +function formatUTCFullYear(d, p) { + return pad$1(d.getUTCFullYear() % 10000, p, 4); +} + +function formatUTCZone() { + return "+0000"; +} + +function formatLiteralPercent() { + return "%"; +} + +function formatUnixTimestamp(d) { + return +d; +} + +function formatUnixTimestampSeconds(d) { + return Math.floor(+d / 1000); +} + +var locale$1; + +defaultLocale$1({ + dateTime: "%x, %X", + date: "%-m/%-d/%Y", + time: "%-I:%M:%S %p", + periods: ["AM", "PM"], + days: ["Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday"], + shortDays: ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"], + months: ["January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"], + shortMonths: ["Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"] +}); + +function defaultLocale$1(definition) { + locale$1 = formatLocale$1(definition); + exports.timeFormat = locale$1.format; + exports.timeParse = locale$1.parse; + exports.utcFormat = locale$1.utcFormat; + exports.utcParse = locale$1.utcParse; + return locale$1; +} + +var isoSpecifier = "%Y-%m-%dT%H:%M:%S.%LZ"; + +function formatIsoNative(date) { + return date.toISOString(); +} + +var formatIso = Date.prototype.toISOString + ? formatIsoNative + : exports.utcFormat(isoSpecifier); + +function parseIsoNative(string) { + var date = new Date(string); + return isNaN(date) ? null : date; +} + +var parseIso = +new Date("2000-01-01T00:00:00.000Z") + ? parseIsoNative + : exports.utcParse(isoSpecifier); + +var durationSecond$1 = 1000, + durationMinute$1 = durationSecond$1 * 60, + durationHour$1 = durationMinute$1 * 60, + durationDay$1 = durationHour$1 * 24, + durationWeek$1 = durationDay$1 * 7, + durationMonth = durationDay$1 * 30, + durationYear = durationDay$1 * 365; + +function date$1(t) { + return new Date(t); +} + +function number$3(t) { + return t instanceof Date ? +t : +new Date(+t); +} + +function calendar(year, month, week, day, hour, minute, second, millisecond, format) { + var scale = continuous(identity$6, identity$6), + invert = scale.invert, + domain = scale.domain; + + var formatMillisecond = format(".%L"), + formatSecond = format(":%S"), + formatMinute = format("%I:%M"), + formatHour = format("%I %p"), + formatDay = format("%a %d"), + formatWeek = format("%b %d"), + formatMonth = format("%B"), + formatYear = format("%Y"); + + var tickIntervals = [ + [second, 1, durationSecond$1], + [second, 5, 5 * durationSecond$1], + [second, 15, 15 * durationSecond$1], + [second, 30, 30 * durationSecond$1], + [minute, 1, durationMinute$1], + [minute, 5, 5 * durationMinute$1], + [minute, 15, 15 * durationMinute$1], + [minute, 30, 30 * durationMinute$1], + [ hour, 1, durationHour$1 ], + [ hour, 3, 3 * durationHour$1 ], + [ hour, 6, 6 * durationHour$1 ], + [ hour, 12, 12 * durationHour$1 ], + [ day, 1, durationDay$1 ], + [ day, 2, 2 * durationDay$1 ], + [ week, 1, durationWeek$1 ], + [ month, 1, durationMonth ], + [ month, 3, 3 * durationMonth ], + [ year, 1, durationYear ] + ]; + + function tickFormat(date) { + return (second(date) < date ? formatMillisecond + : minute(date) < date ? formatSecond + : hour(date) < date ? formatMinute + : day(date) < date ? formatHour + : month(date) < date ? (week(date) < date ? formatDay : formatWeek) + : year(date) < date ? formatMonth + : formatYear)(date); + } + + function tickInterval(interval, start, stop, step) { + if (interval == null) interval = 10; + + // If a desired tick count is specified, pick a reasonable tick interval + // based on the extent of the domain and a rough estimate of tick size. + // Otherwise, assume interval is already a time interval and use it. + if (typeof interval === "number") { + var target = Math.abs(stop - start) / interval, + i = bisector(function(i) { return i[2]; }).right(tickIntervals, target); + if (i === tickIntervals.length) { + step = tickStep(start / durationYear, stop / durationYear, interval); + interval = year; + } else if (i) { + i = tickIntervals[target / tickIntervals[i - 1][2] < tickIntervals[i][2] / target ? i - 1 : i]; + step = i[1]; + interval = i[0]; + } else { + step = Math.max(tickStep(start, stop, interval), 1); + interval = millisecond; + } + } + + return step == null ? interval : interval.every(step); + } + + scale.invert = function(y) { + return new Date(invert(y)); + }; + + scale.domain = function(_) { + return arguments.length ? domain(map$3.call(_, number$3)) : domain().map(date$1); + }; + + scale.ticks = function(interval, step) { + var d = domain(), + t0 = d[0], + t1 = d[d.length - 1], + r = t1 < t0, + t; + if (r) t = t0, t0 = t1, t1 = t; + t = tickInterval(interval, t0, t1, step); + t = t ? t.range(t0, t1 + 1) : []; // inclusive stop + return r ? t.reverse() : t; + }; + + scale.tickFormat = function(count, specifier) { + return specifier == null ? tickFormat : format(specifier); + }; + + scale.nice = function(interval, step) { + var d = domain(); + return (interval = tickInterval(interval, d[0], d[d.length - 1], step)) + ? domain(nice(d, interval)) + : scale; + }; + + scale.copy = function() { + return copy(scale, calendar(year, month, week, day, hour, minute, second, millisecond, format)); + }; + + return scale; +} + +function time() { + return initRange.apply(calendar(year, month, sunday, day, hour, minute, second, millisecond, exports.timeFormat).domain([new Date(2000, 0, 1), new Date(2000, 0, 2)]), arguments); +} + +function utcTime() { + return initRange.apply(calendar(utcYear, utcMonth, utcSunday, utcDay, utcHour, utcMinute, second, millisecond, exports.utcFormat).domain([Date.UTC(2000, 0, 1), Date.UTC(2000, 0, 2)]), arguments); +} + +function transformer$2() { + var x0 = 0, + x1 = 1, + t0, + t1, + k10, + transform, + interpolator = identity$6, + clamp = false, + unknown; + + function scale(x) { + return isNaN(x = +x) ? unknown : interpolator(k10 === 0 ? 0.5 : (x = (transform(x) - t0) * k10, clamp ? Math.max(0, Math.min(1, x)) : x)); + } + + scale.domain = function(_) { + return arguments.length ? (t0 = transform(x0 = +_[0]), t1 = transform(x1 = +_[1]), k10 = t0 === t1 ? 0 : 1 / (t1 - t0), scale) : [x0, x1]; + }; + + scale.clamp = function(_) { + return arguments.length ? (clamp = !!_, scale) : clamp; + }; + + scale.interpolator = function(_) { + return arguments.length ? (interpolator = _, scale) : interpolator; + }; + + scale.unknown = function(_) { + return arguments.length ? (unknown = _, scale) : unknown; + }; + + return function(t) { + transform = t, t0 = t(x0), t1 = t(x1), k10 = t0 === t1 ? 0 : 1 / (t1 - t0); + return scale; + }; +} + +function copy$1(source, target) { + return target + .domain(source.domain()) + .interpolator(source.interpolator()) + .clamp(source.clamp()) + .unknown(source.unknown()); +} + +function sequential() { + var scale = linearish(transformer$2()(identity$6)); + + scale.copy = function() { + return copy$1(scale, sequential()); + }; + + return initInterpolator.apply(scale, arguments); +} + +function sequentialLog() { + var scale = loggish(transformer$2()).domain([1, 10]); + + scale.copy = function() { + return copy$1(scale, sequentialLog()).base(scale.base()); + }; + + return initInterpolator.apply(scale, arguments); +} + +function sequentialSymlog() { + var scale = symlogish(transformer$2()); + + scale.copy = function() { + return copy$1(scale, sequentialSymlog()).constant(scale.constant()); + }; + + return initInterpolator.apply(scale, arguments); +} + +function sequentialPow() { + var scale = powish(transformer$2()); + + scale.copy = function() { + return copy$1(scale, sequentialPow()).exponent(scale.exponent()); + }; + + return initInterpolator.apply(scale, arguments); +} + +function sequentialSqrt() { + return sequentialPow.apply(null, arguments).exponent(0.5); +} + +function sequentialQuantile() { + var domain = [], + interpolator = identity$6; + + function scale(x) { + if (!isNaN(x = +x)) return interpolator((bisectRight(domain, x) - 1) / (domain.length - 1)); + } + + scale.domain = function(_) { + if (!arguments.length) return domain.slice(); + domain = []; + for (var i = 0, n = _.length, d; i < n; ++i) if (d = _[i], d != null && !isNaN(d = +d)) domain.push(d); + domain.sort(ascending); + return scale; + }; + + scale.interpolator = function(_) { + return arguments.length ? (interpolator = _, scale) : interpolator; + }; + + scale.copy = function() { + return sequentialQuantile(interpolator).domain(domain); + }; + + return initInterpolator.apply(scale, arguments); +} + +function transformer$3() { + var x0 = 0, + x1 = 0.5, + x2 = 1, + t0, + t1, + t2, + k10, + k21, + interpolator = identity$6, + transform, + clamp = false, + unknown; + + function scale(x) { + return isNaN(x = +x) ? unknown : (x = 0.5 + ((x = +transform(x)) - t1) * (x < t1 ? k10 : k21), interpolator(clamp ? Math.max(0, Math.min(1, x)) : x)); + } + + scale.domain = function(_) { + return arguments.length ? (t0 = transform(x0 = +_[0]), t1 = transform(x1 = +_[1]), t2 = transform(x2 = +_[2]), k10 = t0 === t1 ? 0 : 0.5 / (t1 - t0), k21 = t1 === t2 ? 0 : 0.5 / (t2 - t1), scale) : [x0, x1, x2]; + }; + + scale.clamp = function(_) { + return arguments.length ? (clamp = !!_, scale) : clamp; + }; + + scale.interpolator = function(_) { + return arguments.length ? (interpolator = _, scale) : interpolator; + }; + + scale.unknown = function(_) { + return arguments.length ? (unknown = _, scale) : unknown; + }; + + return function(t) { + transform = t, t0 = t(x0), t1 = t(x1), t2 = t(x2), k10 = t0 === t1 ? 0 : 0.5 / (t1 - t0), k21 = t1 === t2 ? 0 : 0.5 / (t2 - t1); + return scale; + }; +} + +function diverging() { + var scale = linearish(transformer$3()(identity$6)); + + scale.copy = function() { + return copy$1(scale, diverging()); + }; + + return initInterpolator.apply(scale, arguments); +} + +function divergingLog() { + var scale = loggish(transformer$3()).domain([0.1, 1, 10]); + + scale.copy = function() { + return copy$1(scale, divergingLog()).base(scale.base()); + }; + + return initInterpolator.apply(scale, arguments); +} + +function divergingSymlog() { + var scale = symlogish(transformer$3()); + + scale.copy = function() { + return copy$1(scale, divergingSymlog()).constant(scale.constant()); + }; + + return initInterpolator.apply(scale, arguments); +} + +function divergingPow() { + var scale = powish(transformer$3()); + + scale.copy = function() { + return copy$1(scale, divergingPow()).exponent(scale.exponent()); + }; + + return initInterpolator.apply(scale, arguments); +} + +function divergingSqrt() { + return divergingPow.apply(null, arguments).exponent(0.5); +} + +function colors(specifier) { + var n = specifier.length / 6 | 0, colors = new Array(n), i = 0; + while (i < n) colors[i] = "#" + specifier.slice(i * 6, ++i * 6); + return colors; +} + +var category10 = colors("1f77b4ff7f0e2ca02cd627289467bd8c564be377c27f7f7fbcbd2217becf"); + +var Accent = colors("7fc97fbeaed4fdc086ffff99386cb0f0027fbf5b17666666"); + +var Dark2 = colors("1b9e77d95f027570b3e7298a66a61ee6ab02a6761d666666"); + +var Paired = colors("a6cee31f78b4b2df8a33a02cfb9a99e31a1cfdbf6fff7f00cab2d66a3d9affff99b15928"); + +var Pastel1 = colors("fbb4aeb3cde3ccebc5decbe4fed9a6ffffcce5d8bdfddaecf2f2f2"); + +var Pastel2 = colors("b3e2cdfdcdaccbd5e8f4cae4e6f5c9fff2aef1e2cccccccc"); + +var Set1 = colors("e41a1c377eb84daf4a984ea3ff7f00ffff33a65628f781bf999999"); + +var Set2 = colors("66c2a5fc8d628da0cbe78ac3a6d854ffd92fe5c494b3b3b3"); + +var Set3 = colors("8dd3c7ffffb3bebadafb807280b1d3fdb462b3de69fccde5d9d9d9bc80bdccebc5ffed6f"); + +var Tableau10 = colors("4e79a7f28e2ce1575976b7b259a14fedc949af7aa1ff9da79c755fbab0ab"); + +function ramp(scheme) { + return rgbBasis(scheme[scheme.length - 1]); +} + +var scheme = new Array(3).concat( + "d8b365f5f5f55ab4ac", + "a6611adfc27d80cdc1018571", + "a6611adfc27df5f5f580cdc1018571", + "8c510ad8b365f6e8c3c7eae55ab4ac01665e", + "8c510ad8b365f6e8c3f5f5f5c7eae55ab4ac01665e", + "8c510abf812ddfc27df6e8c3c7eae580cdc135978f01665e", + "8c510abf812ddfc27df6e8c3f5f5f5c7eae580cdc135978f01665e", + "5430058c510abf812ddfc27df6e8c3c7eae580cdc135978f01665e003c30", + "5430058c510abf812ddfc27df6e8c3f5f5f5c7eae580cdc135978f01665e003c30" +).map(colors); + +var BrBG = ramp(scheme); + +var scheme$1 = new Array(3).concat( + "af8dc3f7f7f77fbf7b", + "7b3294c2a5cfa6dba0008837", + "7b3294c2a5cff7f7f7a6dba0008837", + "762a83af8dc3e7d4e8d9f0d37fbf7b1b7837", + "762a83af8dc3e7d4e8f7f7f7d9f0d37fbf7b1b7837", + "762a839970abc2a5cfe7d4e8d9f0d3a6dba05aae611b7837", + "762a839970abc2a5cfe7d4e8f7f7f7d9f0d3a6dba05aae611b7837", + "40004b762a839970abc2a5cfe7d4e8d9f0d3a6dba05aae611b783700441b", + "40004b762a839970abc2a5cfe7d4e8f7f7f7d9f0d3a6dba05aae611b783700441b" +).map(colors); + +var PRGn = ramp(scheme$1); + +var scheme$2 = new Array(3).concat( + "e9a3c9f7f7f7a1d76a", + "d01c8bf1b6dab8e1864dac26", + "d01c8bf1b6daf7f7f7b8e1864dac26", + "c51b7de9a3c9fde0efe6f5d0a1d76a4d9221", + "c51b7de9a3c9fde0eff7f7f7e6f5d0a1d76a4d9221", + "c51b7dde77aef1b6dafde0efe6f5d0b8e1867fbc414d9221", + "c51b7dde77aef1b6dafde0eff7f7f7e6f5d0b8e1867fbc414d9221", + "8e0152c51b7dde77aef1b6dafde0efe6f5d0b8e1867fbc414d9221276419", + "8e0152c51b7dde77aef1b6dafde0eff7f7f7e6f5d0b8e1867fbc414d9221276419" +).map(colors); + +var PiYG = ramp(scheme$2); + +var scheme$3 = new Array(3).concat( + "998ec3f7f7f7f1a340", + "5e3c99b2abd2fdb863e66101", + "5e3c99b2abd2f7f7f7fdb863e66101", + "542788998ec3d8daebfee0b6f1a340b35806", + "542788998ec3d8daebf7f7f7fee0b6f1a340b35806", + "5427888073acb2abd2d8daebfee0b6fdb863e08214b35806", + "5427888073acb2abd2d8daebf7f7f7fee0b6fdb863e08214b35806", + "2d004b5427888073acb2abd2d8daebfee0b6fdb863e08214b358067f3b08", + "2d004b5427888073acb2abd2d8daebf7f7f7fee0b6fdb863e08214b358067f3b08" +).map(colors); + +var PuOr = ramp(scheme$3); + +var scheme$4 = new Array(3).concat( + "ef8a62f7f7f767a9cf", + "ca0020f4a58292c5de0571b0", + "ca0020f4a582f7f7f792c5de0571b0", + "b2182bef8a62fddbc7d1e5f067a9cf2166ac", + "b2182bef8a62fddbc7f7f7f7d1e5f067a9cf2166ac", + "b2182bd6604df4a582fddbc7d1e5f092c5de4393c32166ac", + "b2182bd6604df4a582fddbc7f7f7f7d1e5f092c5de4393c32166ac", + "67001fb2182bd6604df4a582fddbc7d1e5f092c5de4393c32166ac053061", + "67001fb2182bd6604df4a582fddbc7f7f7f7d1e5f092c5de4393c32166ac053061" +).map(colors); + +var RdBu = ramp(scheme$4); + +var scheme$5 = new Array(3).concat( + "ef8a62ffffff999999", + "ca0020f4a582bababa404040", + "ca0020f4a582ffffffbababa404040", + "b2182bef8a62fddbc7e0e0e09999994d4d4d", + "b2182bef8a62fddbc7ffffffe0e0e09999994d4d4d", + "b2182bd6604df4a582fddbc7e0e0e0bababa8787874d4d4d", + "b2182bd6604df4a582fddbc7ffffffe0e0e0bababa8787874d4d4d", + "67001fb2182bd6604df4a582fddbc7e0e0e0bababa8787874d4d4d1a1a1a", + "67001fb2182bd6604df4a582fddbc7ffffffe0e0e0bababa8787874d4d4d1a1a1a" +).map(colors); + +var RdGy = ramp(scheme$5); + +var scheme$6 = new Array(3).concat( + "fc8d59ffffbf91bfdb", + "d7191cfdae61abd9e92c7bb6", + "d7191cfdae61ffffbfabd9e92c7bb6", + "d73027fc8d59fee090e0f3f891bfdb4575b4", + "d73027fc8d59fee090ffffbfe0f3f891bfdb4575b4", + "d73027f46d43fdae61fee090e0f3f8abd9e974add14575b4", + "d73027f46d43fdae61fee090ffffbfe0f3f8abd9e974add14575b4", + "a50026d73027f46d43fdae61fee090e0f3f8abd9e974add14575b4313695", + "a50026d73027f46d43fdae61fee090ffffbfe0f3f8abd9e974add14575b4313695" +).map(colors); + +var RdYlBu = ramp(scheme$6); + +var scheme$7 = new Array(3).concat( + "fc8d59ffffbf91cf60", + "d7191cfdae61a6d96a1a9641", + "d7191cfdae61ffffbfa6d96a1a9641", + "d73027fc8d59fee08bd9ef8b91cf601a9850", + "d73027fc8d59fee08bffffbfd9ef8b91cf601a9850", + "d73027f46d43fdae61fee08bd9ef8ba6d96a66bd631a9850", + "d73027f46d43fdae61fee08bffffbfd9ef8ba6d96a66bd631a9850", + "a50026d73027f46d43fdae61fee08bd9ef8ba6d96a66bd631a9850006837", + "a50026d73027f46d43fdae61fee08bffffbfd9ef8ba6d96a66bd631a9850006837" +).map(colors); + +var RdYlGn = ramp(scheme$7); + +var scheme$8 = new Array(3).concat( + "fc8d59ffffbf99d594", + "d7191cfdae61abdda42b83ba", + "d7191cfdae61ffffbfabdda42b83ba", + "d53e4ffc8d59fee08be6f59899d5943288bd", + "d53e4ffc8d59fee08bffffbfe6f59899d5943288bd", + "d53e4ff46d43fdae61fee08be6f598abdda466c2a53288bd", + "d53e4ff46d43fdae61fee08bffffbfe6f598abdda466c2a53288bd", + "9e0142d53e4ff46d43fdae61fee08be6f598abdda466c2a53288bd5e4fa2", + "9e0142d53e4ff46d43fdae61fee08bffffbfe6f598abdda466c2a53288bd5e4fa2" +).map(colors); + +var Spectral = ramp(scheme$8); + +var scheme$9 = new Array(3).concat( + "e5f5f999d8c92ca25f", + "edf8fbb2e2e266c2a4238b45", + "edf8fbb2e2e266c2a42ca25f006d2c", + "edf8fbccece699d8c966c2a42ca25f006d2c", + "edf8fbccece699d8c966c2a441ae76238b45005824", + "f7fcfde5f5f9ccece699d8c966c2a441ae76238b45005824", + "f7fcfde5f5f9ccece699d8c966c2a441ae76238b45006d2c00441b" +).map(colors); + +var BuGn = ramp(scheme$9); + +var scheme$a = new Array(3).concat( + "e0ecf49ebcda8856a7", + "edf8fbb3cde38c96c688419d", + "edf8fbb3cde38c96c68856a7810f7c", + "edf8fbbfd3e69ebcda8c96c68856a7810f7c", + "edf8fbbfd3e69ebcda8c96c68c6bb188419d6e016b", + "f7fcfde0ecf4bfd3e69ebcda8c96c68c6bb188419d6e016b", + "f7fcfde0ecf4bfd3e69ebcda8c96c68c6bb188419d810f7c4d004b" +).map(colors); + +var BuPu = ramp(scheme$a); + +var scheme$b = new Array(3).concat( + "e0f3dba8ddb543a2ca", + "f0f9e8bae4bc7bccc42b8cbe", + "f0f9e8bae4bc7bccc443a2ca0868ac", + "f0f9e8ccebc5a8ddb57bccc443a2ca0868ac", + "f0f9e8ccebc5a8ddb57bccc44eb3d32b8cbe08589e", + "f7fcf0e0f3dbccebc5a8ddb57bccc44eb3d32b8cbe08589e", + "f7fcf0e0f3dbccebc5a8ddb57bccc44eb3d32b8cbe0868ac084081" +).map(colors); + +var GnBu = ramp(scheme$b); + +var scheme$c = new Array(3).concat( + "fee8c8fdbb84e34a33", + "fef0d9fdcc8afc8d59d7301f", + "fef0d9fdcc8afc8d59e34a33b30000", + "fef0d9fdd49efdbb84fc8d59e34a33b30000", + "fef0d9fdd49efdbb84fc8d59ef6548d7301f990000", + "fff7ecfee8c8fdd49efdbb84fc8d59ef6548d7301f990000", + "fff7ecfee8c8fdd49efdbb84fc8d59ef6548d7301fb300007f0000" +).map(colors); + +var OrRd = ramp(scheme$c); + +var scheme$d = new Array(3).concat( + "ece2f0a6bddb1c9099", + "f6eff7bdc9e167a9cf02818a", + "f6eff7bdc9e167a9cf1c9099016c59", + "f6eff7d0d1e6a6bddb67a9cf1c9099016c59", + "f6eff7d0d1e6a6bddb67a9cf3690c002818a016450", + "fff7fbece2f0d0d1e6a6bddb67a9cf3690c002818a016450", + "fff7fbece2f0d0d1e6a6bddb67a9cf3690c002818a016c59014636" +).map(colors); + +var PuBuGn = ramp(scheme$d); + +var scheme$e = new Array(3).concat( + "ece7f2a6bddb2b8cbe", + "f1eef6bdc9e174a9cf0570b0", + "f1eef6bdc9e174a9cf2b8cbe045a8d", + "f1eef6d0d1e6a6bddb74a9cf2b8cbe045a8d", + "f1eef6d0d1e6a6bddb74a9cf3690c00570b0034e7b", + "fff7fbece7f2d0d1e6a6bddb74a9cf3690c00570b0034e7b", + "fff7fbece7f2d0d1e6a6bddb74a9cf3690c00570b0045a8d023858" +).map(colors); + +var PuBu = ramp(scheme$e); + +var scheme$f = new Array(3).concat( + "e7e1efc994c7dd1c77", + "f1eef6d7b5d8df65b0ce1256", + "f1eef6d7b5d8df65b0dd1c77980043", + "f1eef6d4b9dac994c7df65b0dd1c77980043", + "f1eef6d4b9dac994c7df65b0e7298ace125691003f", + "f7f4f9e7e1efd4b9dac994c7df65b0e7298ace125691003f", + "f7f4f9e7e1efd4b9dac994c7df65b0e7298ace125698004367001f" +).map(colors); + +var PuRd = ramp(scheme$f); + +var scheme$g = new Array(3).concat( + "fde0ddfa9fb5c51b8a", + "feebe2fbb4b9f768a1ae017e", + "feebe2fbb4b9f768a1c51b8a7a0177", + "feebe2fcc5c0fa9fb5f768a1c51b8a7a0177", + "feebe2fcc5c0fa9fb5f768a1dd3497ae017e7a0177", + "fff7f3fde0ddfcc5c0fa9fb5f768a1dd3497ae017e7a0177", + "fff7f3fde0ddfcc5c0fa9fb5f768a1dd3497ae017e7a017749006a" +).map(colors); + +var RdPu = ramp(scheme$g); + +var scheme$h = new Array(3).concat( + "edf8b17fcdbb2c7fb8", + "ffffcca1dab441b6c4225ea8", + "ffffcca1dab441b6c42c7fb8253494", + "ffffccc7e9b47fcdbb41b6c42c7fb8253494", + "ffffccc7e9b47fcdbb41b6c41d91c0225ea80c2c84", + "ffffd9edf8b1c7e9b47fcdbb41b6c41d91c0225ea80c2c84", + "ffffd9edf8b1c7e9b47fcdbb41b6c41d91c0225ea8253494081d58" +).map(colors); + +var YlGnBu = ramp(scheme$h); + +var scheme$i = new Array(3).concat( + "f7fcb9addd8e31a354", + "ffffccc2e69978c679238443", + "ffffccc2e69978c67931a354006837", + "ffffccd9f0a3addd8e78c67931a354006837", + "ffffccd9f0a3addd8e78c67941ab5d238443005a32", + "ffffe5f7fcb9d9f0a3addd8e78c67941ab5d238443005a32", + "ffffe5f7fcb9d9f0a3addd8e78c67941ab5d238443006837004529" +).map(colors); + +var YlGn = ramp(scheme$i); + +var scheme$j = new Array(3).concat( + "fff7bcfec44fd95f0e", + "ffffd4fed98efe9929cc4c02", + "ffffd4fed98efe9929d95f0e993404", + "ffffd4fee391fec44ffe9929d95f0e993404", + "ffffd4fee391fec44ffe9929ec7014cc4c028c2d04", + "ffffe5fff7bcfee391fec44ffe9929ec7014cc4c028c2d04", + "ffffe5fff7bcfee391fec44ffe9929ec7014cc4c02993404662506" +).map(colors); + +var YlOrBr = ramp(scheme$j); + +var scheme$k = new Array(3).concat( + "ffeda0feb24cf03b20", + "ffffb2fecc5cfd8d3ce31a1c", + "ffffb2fecc5cfd8d3cf03b20bd0026", + "ffffb2fed976feb24cfd8d3cf03b20bd0026", + "ffffb2fed976feb24cfd8d3cfc4e2ae31a1cb10026", + "ffffccffeda0fed976feb24cfd8d3cfc4e2ae31a1cb10026", + "ffffccffeda0fed976feb24cfd8d3cfc4e2ae31a1cbd0026800026" +).map(colors); + +var YlOrRd = ramp(scheme$k); + +var scheme$l = new Array(3).concat( + "deebf79ecae13182bd", + "eff3ffbdd7e76baed62171b5", + "eff3ffbdd7e76baed63182bd08519c", + "eff3ffc6dbef9ecae16baed63182bd08519c", + "eff3ffc6dbef9ecae16baed64292c62171b5084594", + "f7fbffdeebf7c6dbef9ecae16baed64292c62171b5084594", + "f7fbffdeebf7c6dbef9ecae16baed64292c62171b508519c08306b" +).map(colors); + +var Blues = ramp(scheme$l); + +var scheme$m = new Array(3).concat( + "e5f5e0a1d99b31a354", + "edf8e9bae4b374c476238b45", + "edf8e9bae4b374c47631a354006d2c", + "edf8e9c7e9c0a1d99b74c47631a354006d2c", + "edf8e9c7e9c0a1d99b74c47641ab5d238b45005a32", + "f7fcf5e5f5e0c7e9c0a1d99b74c47641ab5d238b45005a32", + "f7fcf5e5f5e0c7e9c0a1d99b74c47641ab5d238b45006d2c00441b" +).map(colors); + +var Greens = ramp(scheme$m); + +var scheme$n = new Array(3).concat( + "f0f0f0bdbdbd636363", + "f7f7f7cccccc969696525252", + "f7f7f7cccccc969696636363252525", + "f7f7f7d9d9d9bdbdbd969696636363252525", + "f7f7f7d9d9d9bdbdbd969696737373525252252525", + "fffffff0f0f0d9d9d9bdbdbd969696737373525252252525", + "fffffff0f0f0d9d9d9bdbdbd969696737373525252252525000000" +).map(colors); + +var Greys = ramp(scheme$n); + +var scheme$o = new Array(3).concat( + "efedf5bcbddc756bb1", + "f2f0f7cbc9e29e9ac86a51a3", + "f2f0f7cbc9e29e9ac8756bb154278f", + "f2f0f7dadaebbcbddc9e9ac8756bb154278f", + "f2f0f7dadaebbcbddc9e9ac8807dba6a51a34a1486", + "fcfbfdefedf5dadaebbcbddc9e9ac8807dba6a51a34a1486", + "fcfbfdefedf5dadaebbcbddc9e9ac8807dba6a51a354278f3f007d" +).map(colors); + +var Purples = ramp(scheme$o); + +var scheme$p = new Array(3).concat( + "fee0d2fc9272de2d26", + "fee5d9fcae91fb6a4acb181d", + "fee5d9fcae91fb6a4ade2d26a50f15", + "fee5d9fcbba1fc9272fb6a4ade2d26a50f15", + "fee5d9fcbba1fc9272fb6a4aef3b2ccb181d99000d", + "fff5f0fee0d2fcbba1fc9272fb6a4aef3b2ccb181d99000d", + "fff5f0fee0d2fcbba1fc9272fb6a4aef3b2ccb181da50f1567000d" +).map(colors); + +var Reds = ramp(scheme$p); + +var scheme$q = new Array(3).concat( + "fee6cefdae6be6550d", + "feeddefdbe85fd8d3cd94701", + "feeddefdbe85fd8d3ce6550da63603", + "feeddefdd0a2fdae6bfd8d3ce6550da63603", + "feeddefdd0a2fdae6bfd8d3cf16913d948018c2d04", + "fff5ebfee6cefdd0a2fdae6bfd8d3cf16913d948018c2d04", + "fff5ebfee6cefdd0a2fdae6bfd8d3cf16913d94801a636037f2704" +).map(colors); + +var Oranges = ramp(scheme$q); + +function cividis(t) { + t = Math.max(0, Math.min(1, t)); + return "rgb(" + + Math.max(0, Math.min(255, Math.round(-4.54 - t * (35.34 - t * (2381.73 - t * (6402.7 - t * (7024.72 - t * 2710.57))))))) + ", " + + Math.max(0, Math.min(255, Math.round(32.49 + t * (170.73 + t * (52.82 - t * (131.46 - t * (176.58 - t * 67.37))))))) + ", " + + Math.max(0, Math.min(255, Math.round(81.24 + t * (442.36 - t * (2482.43 - t * (6167.24 - t * (6614.94 - t * 2475.67))))))) + + ")"; +} + +var cubehelix$3 = cubehelixLong(cubehelix(300, 0.5, 0.0), cubehelix(-240, 0.5, 1.0)); + +var warm = cubehelixLong(cubehelix(-100, 0.75, 0.35), cubehelix(80, 1.50, 0.8)); + +var cool = cubehelixLong(cubehelix(260, 0.75, 0.35), cubehelix(80, 1.50, 0.8)); + +var c = cubehelix(); + +function rainbow(t) { + if (t < 0 || t > 1) t -= Math.floor(t); + var ts = Math.abs(t - 0.5); + c.h = 360 * t - 100; + c.s = 1.5 - 1.5 * ts; + c.l = 0.8 - 0.9 * ts; + return c + ""; +} + +var c$1 = rgb(), + pi_1_3 = Math.PI / 3, + pi_2_3 = Math.PI * 2 / 3; + +function sinebow(t) { + var x; + t = (0.5 - t) * Math.PI; + c$1.r = 255 * (x = Math.sin(t)) * x; + c$1.g = 255 * (x = Math.sin(t + pi_1_3)) * x; + c$1.b = 255 * (x = Math.sin(t + pi_2_3)) * x; + return c$1 + ""; +} + +function turbo(t) { + t = Math.max(0, Math.min(1, t)); + return "rgb(" + + Math.max(0, Math.min(255, Math.round(34.61 + t * (1172.33 - t * (10793.56 - t * (33300.12 - t * (38394.49 - t * 14825.05))))))) + ", " + + Math.max(0, Math.min(255, Math.round(23.31 + t * (557.33 + t * (1225.33 - t * (3574.96 - t * (1073.77 + t * 707.56))))))) + ", " + + Math.max(0, Math.min(255, Math.round(27.2 + t * (3211.1 - t * (15327.97 - t * (27814 - t * (22569.18 - t * 6838.66))))))) + + ")"; +} + +function ramp$1(range) { + var n = range.length; + return function(t) { + return range[Math.max(0, Math.min(n - 1, Math.floor(t * n)))]; + }; +} + +var viridis = ramp$1(colors("44015444025645045745055946075a46085c460a5d460b5e470d60470e6147106347116447136548146748166848176948186a481a6c481b6d481c6e481d6f481f70482071482173482374482475482576482677482878482979472a7a472c7a472d7b472e7c472f7d46307e46327e46337f463480453581453781453882443983443a83443b84433d84433e85423f854240864241864142874144874045884046883f47883f48893e49893e4a893e4c8a3d4d8a3d4e8a3c4f8a3c508b3b518b3b528b3a538b3a548c39558c39568c38588c38598c375a8c375b8d365c8d365d8d355e8d355f8d34608d34618d33628d33638d32648e32658e31668e31678e31688e30698e306a8e2f6b8e2f6c8e2e6d8e2e6e8e2e6f8e2d708e2d718e2c718e2c728e2c738e2b748e2b758e2a768e2a778e2a788e29798e297a8e297b8e287c8e287d8e277e8e277f8e27808e26818e26828e26828e25838e25848e25858e24868e24878e23888e23898e238a8d228b8d228c8d228d8d218e8d218f8d21908d21918c20928c20928c20938c1f948c1f958b1f968b1f978b1f988b1f998a1f9a8a1e9b8a1e9c891e9d891f9e891f9f881fa0881fa1881fa1871fa28720a38620a48621a58521a68522a78522a88423a98324aa8325ab8225ac8226ad8127ad8128ae8029af7f2ab07f2cb17e2db27d2eb37c2fb47c31b57b32b67a34b67935b77937b87838b9773aba763bbb753dbc743fbc7340bd7242be7144bf7046c06f48c16e4ac16d4cc26c4ec36b50c46a52c56954c56856c66758c7655ac8645cc8635ec96260ca6063cb5f65cb5e67cc5c69cd5b6ccd5a6ece5870cf5773d05675d05477d1537ad1517cd2507fd34e81d34d84d44b86d54989d5488bd6468ed64590d74393d74195d84098d83e9bd93c9dd93ba0da39a2da37a5db36a8db34aadc32addc30b0dd2fb2dd2db5de2bb8de29bade28bddf26c0df25c2df23c5e021c8e020cae11fcde11dd0e11cd2e21bd5e21ad8e219dae319dde318dfe318e2e418e5e419e7e419eae51aece51befe51cf1e51df4e61ef6e620f8e621fbe723fde725")); + +var magma = ramp$1(colors("00000401000501010601010802010902020b02020d03030f03031204041405041606051806051a07061c08071e0907200a08220b09240c09260d0a290e0b2b100b2d110c2f120d31130d34140e36150e38160f3b180f3d19103f1a10421c10441d11471e114920114b21114e22115024125325125527125829115a2a115c2c115f2d11612f116331116533106734106936106b38106c390f6e3b0f703d0f713f0f72400f74420f75440f764510774710784910784a10794c117a4e117b4f127b51127c52137c54137d56147d57157e59157e5a167e5c167f5d177f5f187f601880621980641a80651a80671b80681c816a1c816b1d816d1d816e1e81701f81721f817320817521817621817822817922827b23827c23827e24828025828125818326818426818627818827818928818b29818c29818e2a81902a81912b81932b80942c80962c80982d80992d809b2e7f9c2e7f9e2f7fa02f7fa1307ea3307ea5317ea6317da8327daa337dab337cad347cae347bb0357bb2357bb3367ab5367ab73779b83779ba3878bc3978bd3977bf3a77c03a76c23b75c43c75c53c74c73d73c83e73ca3e72cc3f71cd4071cf4070d0416fd2426fd3436ed5446dd6456cd8456cd9466bdb476adc4869de4968df4a68e04c67e24d66e34e65e44f64e55064e75263e85362e95462ea5661eb5760ec5860ed5a5fee5b5eef5d5ef05f5ef1605df2625df2645cf3655cf4675cf4695cf56b5cf66c5cf66e5cf7705cf7725cf8745cf8765cf9785df9795df97b5dfa7d5efa7f5efa815ffb835ffb8560fb8761fc8961fc8a62fc8c63fc8e64fc9065fd9266fd9467fd9668fd9869fd9a6afd9b6bfe9d6cfe9f6dfea16efea36ffea571fea772fea973feaa74feac76feae77feb078feb27afeb47bfeb67cfeb77efeb97ffebb81febd82febf84fec185fec287fec488fec68afec88cfeca8dfecc8ffecd90fecf92fed194fed395fed597fed799fed89afdda9cfddc9efddea0fde0a1fde2a3fde3a5fde5a7fde7a9fde9aafdebacfcecaefceeb0fcf0b2fcf2b4fcf4b6fcf6b8fcf7b9fcf9bbfcfbbdfcfdbf")); + +var inferno = ramp$1(colors("00000401000501010601010802010a02020c02020e03021004031204031405041706041907051b08051d09061f0a07220b07240c08260d08290e092b10092d110a30120a32140b34150b37160b39180c3c190c3e1b0c411c0c431e0c451f0c48210c4a230c4c240c4f260c51280b53290b552b0b572d0b592f0a5b310a5c320a5e340a5f3609613809623909633b09643d09653e0966400a67420a68440a68450a69470b6a490b6a4a0c6b4c0c6b4d0d6c4f0d6c510e6c520e6d540f6d550f6d57106e59106e5a116e5c126e5d126e5f136e61136e62146e64156e65156e67166e69166e6a176e6c186e6d186e6f196e71196e721a6e741a6e751b6e771c6d781c6d7a1d6d7c1d6d7d1e6d7f1e6c801f6c82206c84206b85216b87216b88226a8a226a8c23698d23698f24699025689225689326679526679727669827669a28659b29649d29649f2a63a02a63a22b62a32c61a52c60a62d60a82e5fa92e5eab2f5ead305dae305cb0315bb1325ab3325ab43359b63458b73557b93556ba3655bc3754bd3853bf3952c03a51c13a50c33b4fc43c4ec63d4dc73e4cc83f4bca404acb4149cc4248ce4347cf4446d04545d24644d34743d44842d54a41d74b3fd84c3ed94d3dda4e3cdb503bdd513ade5238df5337e05536e15635e25734e35933e45a31e55c30e65d2fe75e2ee8602de9612bea632aeb6429eb6628ec6726ed6925ee6a24ef6c23ef6e21f06f20f1711ff1731df2741cf3761bf37819f47918f57b17f57d15f67e14f68013f78212f78410f8850ff8870ef8890cf98b0bf98c0af98e09fa9008fa9207fa9407fb9606fb9706fb9906fb9b06fb9d07fc9f07fca108fca309fca50afca60cfca80dfcaa0ffcac11fcae12fcb014fcb216fcb418fbb61afbb81dfbba1ffbbc21fbbe23fac026fac228fac42afac62df9c72ff9c932f9cb35f8cd37f8cf3af7d13df7d340f6d543f6d746f5d949f5db4cf4dd4ff4df53f4e156f3e35af3e55df2e661f2e865f2ea69f1ec6df1ed71f1ef75f1f179f2f27df2f482f3f586f3f68af4f88ef5f992f6fa96f8fb9af9fc9dfafda1fcffa4")); + +var plasma = ramp$1(colors("0d088710078813078916078a19068c1b068d1d068e20068f2206902406912605912805922a05932c05942e05952f059631059733059735049837049938049a3a049a3c049b3e049c3f049c41049d43039e44039e46039f48039f4903a04b03a14c02a14e02a25002a25102a35302a35502a45601a45801a45901a55b01a55c01a65e01a66001a66100a76300a76400a76600a76700a86900a86a00a86c00a86e00a86f00a87100a87201a87401a87501a87701a87801a87a02a87b02a87d03a87e03a88004a88104a78305a78405a78606a68707a68808a68a09a58b0aa58d0ba58e0ca48f0da4910ea3920fa39410a29511a19613a19814a099159f9a169f9c179e9d189d9e199da01a9ca11b9ba21d9aa31e9aa51f99a62098a72197a82296aa2395ab2494ac2694ad2793ae2892b02991b12a90b22b8fb32c8eb42e8db52f8cb6308bb7318ab83289ba3388bb3488bc3587bd3786be3885bf3984c03a83c13b82c23c81c33d80c43e7fc5407ec6417dc7427cc8437bc9447aca457acb4679cc4778cc4977cd4a76ce4b75cf4c74d04d73d14e72d24f71d35171d45270d5536fd5546ed6556dd7566cd8576bd9586ada5a6ada5b69db5c68dc5d67dd5e66de5f65de6164df6263e06363e16462e26561e26660e3685fe4695ee56a5de56b5de66c5ce76e5be76f5ae87059e97158e97257ea7457eb7556eb7655ec7754ed7953ed7a52ee7b51ef7c51ef7e50f07f4ff0804ef1814df1834cf2844bf3854bf3874af48849f48948f58b47f58c46f68d45f68f44f79044f79143f79342f89441f89540f9973ff9983ef99a3efa9b3dfa9c3cfa9e3bfb9f3afba139fba238fca338fca537fca636fca835fca934fdab33fdac33fdae32fdaf31fdb130fdb22ffdb42ffdb52efeb72dfeb82cfeba2cfebb2bfebd2afebe2afec029fdc229fdc328fdc527fdc627fdc827fdca26fdcb26fccd25fcce25fcd025fcd225fbd324fbd524fbd724fad824fada24f9dc24f9dd25f8df25f8e125f7e225f7e425f6e626f6e826f5e926f5eb27f4ed27f3ee27f3f027f2f227f1f426f1f525f0f724f0f921")); + +function constant$b(x) { + return function constant() { + return x; + }; +} + +var abs$1 = Math.abs; +var atan2$1 = Math.atan2; +var cos$2 = Math.cos; +var max$2 = Math.max; +var min$1 = Math.min; +var sin$2 = Math.sin; +var sqrt$2 = Math.sqrt; + +var epsilon$3 = 1e-12; +var pi$4 = Math.PI; +var halfPi$3 = pi$4 / 2; +var tau$4 = 2 * pi$4; + +function acos$1(x) { + return x > 1 ? 0 : x < -1 ? pi$4 : Math.acos(x); +} + +function asin$1(x) { + return x >= 1 ? halfPi$3 : x <= -1 ? -halfPi$3 : Math.asin(x); +} + +function arcInnerRadius(d) { + return d.innerRadius; +} + +function arcOuterRadius(d) { + return d.outerRadius; +} + +function arcStartAngle(d) { + return d.startAngle; +} + +function arcEndAngle(d) { + return d.endAngle; +} + +function arcPadAngle(d) { + return d && d.padAngle; // Note: optional! +} + +function intersect(x0, y0, x1, y1, x2, y2, x3, y3) { + var x10 = x1 - x0, y10 = y1 - y0, + x32 = x3 - x2, y32 = y3 - y2, + t = y32 * x10 - x32 * y10; + if (t * t < epsilon$3) return; + t = (x32 * (y0 - y2) - y32 * (x0 - x2)) / t; + return [x0 + t * x10, y0 + t * y10]; +} + +// Compute perpendicular offset line of length rc. +// http://mathworld.wolfram.com/Circle-LineIntersection.html +function cornerTangents(x0, y0, x1, y1, r1, rc, cw) { + var x01 = x0 - x1, + y01 = y0 - y1, + lo = (cw ? rc : -rc) / sqrt$2(x01 * x01 + y01 * y01), + ox = lo * y01, + oy = -lo * x01, + x11 = x0 + ox, + y11 = y0 + oy, + x10 = x1 + ox, + y10 = y1 + oy, + x00 = (x11 + x10) / 2, + y00 = (y11 + y10) / 2, + dx = x10 - x11, + dy = y10 - y11, + d2 = dx * dx + dy * dy, + r = r1 - rc, + D = x11 * y10 - x10 * y11, + d = (dy < 0 ? -1 : 1) * sqrt$2(max$2(0, r * r * d2 - D * D)), + cx0 = (D * dy - dx * d) / d2, + cy0 = (-D * dx - dy * d) / d2, + cx1 = (D * dy + dx * d) / d2, + cy1 = (-D * dx + dy * d) / d2, + dx0 = cx0 - x00, + dy0 = cy0 - y00, + dx1 = cx1 - x00, + dy1 = cy1 - y00; + + // Pick the closer of the two intersection points. + // TODO Is there a faster way to determine which intersection to use? + if (dx0 * dx0 + dy0 * dy0 > dx1 * dx1 + dy1 * dy1) cx0 = cx1, cy0 = cy1; + + return { + cx: cx0, + cy: cy0, + x01: -ox, + y01: -oy, + x11: cx0 * (r1 / r - 1), + y11: cy0 * (r1 / r - 1) + }; +} + +function arc() { + var innerRadius = arcInnerRadius, + outerRadius = arcOuterRadius, + cornerRadius = constant$b(0), + padRadius = null, + startAngle = arcStartAngle, + endAngle = arcEndAngle, + padAngle = arcPadAngle, + context = null; + + function arc() { + var buffer, + r, + r0 = +innerRadius.apply(this, arguments), + r1 = +outerRadius.apply(this, arguments), + a0 = startAngle.apply(this, arguments) - halfPi$3, + a1 = endAngle.apply(this, arguments) - halfPi$3, + da = abs$1(a1 - a0), + cw = a1 > a0; + + if (!context) context = buffer = path(); + + // Ensure that the outer radius is always larger than the inner radius. + if (r1 < r0) r = r1, r1 = r0, r0 = r; + + // Is it a point? + if (!(r1 > epsilon$3)) context.moveTo(0, 0); + + // Or is it a circle or annulus? + else if (da > tau$4 - epsilon$3) { + context.moveTo(r1 * cos$2(a0), r1 * sin$2(a0)); + context.arc(0, 0, r1, a0, a1, !cw); + if (r0 > epsilon$3) { + context.moveTo(r0 * cos$2(a1), r0 * sin$2(a1)); + context.arc(0, 0, r0, a1, a0, cw); + } + } + + // Or is it a circular or annular sector? + else { + var a01 = a0, + a11 = a1, + a00 = a0, + a10 = a1, + da0 = da, + da1 = da, + ap = padAngle.apply(this, arguments) / 2, + rp = (ap > epsilon$3) && (padRadius ? +padRadius.apply(this, arguments) : sqrt$2(r0 * r0 + r1 * r1)), + rc = min$1(abs$1(r1 - r0) / 2, +cornerRadius.apply(this, arguments)), + rc0 = rc, + rc1 = rc, + t0, + t1; + + // Apply padding? Note that since r1 ≥ r0, da1 ≥ da0. + if (rp > epsilon$3) { + var p0 = asin$1(rp / r0 * sin$2(ap)), + p1 = asin$1(rp / r1 * sin$2(ap)); + if ((da0 -= p0 * 2) > epsilon$3) p0 *= (cw ? 1 : -1), a00 += p0, a10 -= p0; + else da0 = 0, a00 = a10 = (a0 + a1) / 2; + if ((da1 -= p1 * 2) > epsilon$3) p1 *= (cw ? 1 : -1), a01 += p1, a11 -= p1; + else da1 = 0, a01 = a11 = (a0 + a1) / 2; + } + + var x01 = r1 * cos$2(a01), + y01 = r1 * sin$2(a01), + x10 = r0 * cos$2(a10), + y10 = r0 * sin$2(a10); + + // Apply rounded corners? + if (rc > epsilon$3) { + var x11 = r1 * cos$2(a11), + y11 = r1 * sin$2(a11), + x00 = r0 * cos$2(a00), + y00 = r0 * sin$2(a00), + oc; + + // Restrict the corner radius according to the sector angle. + if (da < pi$4 && (oc = intersect(x01, y01, x00, y00, x11, y11, x10, y10))) { + var ax = x01 - oc[0], + ay = y01 - oc[1], + bx = x11 - oc[0], + by = y11 - oc[1], + kc = 1 / sin$2(acos$1((ax * bx + ay * by) / (sqrt$2(ax * ax + ay * ay) * sqrt$2(bx * bx + by * by))) / 2), + lc = sqrt$2(oc[0] * oc[0] + oc[1] * oc[1]); + rc0 = min$1(rc, (r0 - lc) / (kc - 1)); + rc1 = min$1(rc, (r1 - lc) / (kc + 1)); + } + } + + // Is the sector collapsed to a line? + if (!(da1 > epsilon$3)) context.moveTo(x01, y01); + + // Does the sector’s outer ring have rounded corners? + else if (rc1 > epsilon$3) { + t0 = cornerTangents(x00, y00, x01, y01, r1, rc1, cw); + t1 = cornerTangents(x11, y11, x10, y10, r1, rc1, cw); + + context.moveTo(t0.cx + t0.x01, t0.cy + t0.y01); + + // Have the corners merged? + if (rc1 < rc) context.arc(t0.cx, t0.cy, rc1, atan2$1(t0.y01, t0.x01), atan2$1(t1.y01, t1.x01), !cw); + + // Otherwise, draw the two corners and the ring. + else { + context.arc(t0.cx, t0.cy, rc1, atan2$1(t0.y01, t0.x01), atan2$1(t0.y11, t0.x11), !cw); + context.arc(0, 0, r1, atan2$1(t0.cy + t0.y11, t0.cx + t0.x11), atan2$1(t1.cy + t1.y11, t1.cx + t1.x11), !cw); + context.arc(t1.cx, t1.cy, rc1, atan2$1(t1.y11, t1.x11), atan2$1(t1.y01, t1.x01), !cw); + } + } + + // Or is the outer ring just a circular arc? + else context.moveTo(x01, y01), context.arc(0, 0, r1, a01, a11, !cw); + + // Is there no inner ring, and it’s a circular sector? + // Or perhaps it’s an annular sector collapsed due to padding? + if (!(r0 > epsilon$3) || !(da0 > epsilon$3)) context.lineTo(x10, y10); + + // Does the sector’s inner ring (or point) have rounded corners? + else if (rc0 > epsilon$3) { + t0 = cornerTangents(x10, y10, x11, y11, r0, -rc0, cw); + t1 = cornerTangents(x01, y01, x00, y00, r0, -rc0, cw); + + context.lineTo(t0.cx + t0.x01, t0.cy + t0.y01); + + // Have the corners merged? + if (rc0 < rc) context.arc(t0.cx, t0.cy, rc0, atan2$1(t0.y01, t0.x01), atan2$1(t1.y01, t1.x01), !cw); + + // Otherwise, draw the two corners and the ring. + else { + context.arc(t0.cx, t0.cy, rc0, atan2$1(t0.y01, t0.x01), atan2$1(t0.y11, t0.x11), !cw); + context.arc(0, 0, r0, atan2$1(t0.cy + t0.y11, t0.cx + t0.x11), atan2$1(t1.cy + t1.y11, t1.cx + t1.x11), cw); + context.arc(t1.cx, t1.cy, rc0, atan2$1(t1.y11, t1.x11), atan2$1(t1.y01, t1.x01), !cw); + } + } + + // Or is the inner ring just a circular arc? + else context.arc(0, 0, r0, a10, a00, cw); + } + + context.closePath(); + + if (buffer) return context = null, buffer + "" || null; + } + + arc.centroid = function() { + var r = (+innerRadius.apply(this, arguments) + +outerRadius.apply(this, arguments)) / 2, + a = (+startAngle.apply(this, arguments) + +endAngle.apply(this, arguments)) / 2 - pi$4 / 2; + return [cos$2(a) * r, sin$2(a) * r]; + }; + + arc.innerRadius = function(_) { + return arguments.length ? (innerRadius = typeof _ === "function" ? _ : constant$b(+_), arc) : innerRadius; + }; + + arc.outerRadius = function(_) { + return arguments.length ? (outerRadius = typeof _ === "function" ? _ : constant$b(+_), arc) : outerRadius; + }; + + arc.cornerRadius = function(_) { + return arguments.length ? (cornerRadius = typeof _ === "function" ? _ : constant$b(+_), arc) : cornerRadius; + }; + + arc.padRadius = function(_) { + return arguments.length ? (padRadius = _ == null ? null : typeof _ === "function" ? _ : constant$b(+_), arc) : padRadius; + }; + + arc.startAngle = function(_) { + return arguments.length ? (startAngle = typeof _ === "function" ? _ : constant$b(+_), arc) : startAngle; + }; + + arc.endAngle = function(_) { + return arguments.length ? (endAngle = typeof _ === "function" ? _ : constant$b(+_), arc) : endAngle; + }; + + arc.padAngle = function(_) { + return arguments.length ? (padAngle = typeof _ === "function" ? _ : constant$b(+_), arc) : padAngle; + }; + + arc.context = function(_) { + return arguments.length ? ((context = _ == null ? null : _), arc) : context; + }; + + return arc; +} + +function Linear(context) { + this._context = context; +} + +Linear.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._point = 0; + }, + lineEnd: function() { + if (this._line || (this._line !== 0 && this._point === 1)) this._context.closePath(); + this._line = 1 - this._line; + }, + point: function(x, y) { + x = +x, y = +y; + switch (this._point) { + case 0: this._point = 1; this._line ? this._context.lineTo(x, y) : this._context.moveTo(x, y); break; + case 1: this._point = 2; // proceed + default: this._context.lineTo(x, y); break; + } + } +}; + +function curveLinear(context) { + return new Linear(context); +} + +function x$3(p) { + return p[0]; +} + +function y$3(p) { + return p[1]; +} + +function line() { + var x = x$3, + y = y$3, + defined = constant$b(true), + context = null, + curve = curveLinear, + output = null; + + function line(data) { + var i, + n = data.length, + d, + defined0 = false, + buffer; + + if (context == null) output = curve(buffer = path()); + + for (i = 0; i <= n; ++i) { + if (!(i < n && defined(d = data[i], i, data)) === defined0) { + if (defined0 = !defined0) output.lineStart(); + else output.lineEnd(); + } + if (defined0) output.point(+x(d, i, data), +y(d, i, data)); + } + + if (buffer) return output = null, buffer + "" || null; + } + + line.x = function(_) { + return arguments.length ? (x = typeof _ === "function" ? _ : constant$b(+_), line) : x; + }; + + line.y = function(_) { + return arguments.length ? (y = typeof _ === "function" ? _ : constant$b(+_), line) : y; + }; + + line.defined = function(_) { + return arguments.length ? (defined = typeof _ === "function" ? _ : constant$b(!!_), line) : defined; + }; + + line.curve = function(_) { + return arguments.length ? (curve = _, context != null && (output = curve(context)), line) : curve; + }; + + line.context = function(_) { + return arguments.length ? (_ == null ? context = output = null : output = curve(context = _), line) : context; + }; + + return line; +} + +function area$3() { + var x0 = x$3, + x1 = null, + y0 = constant$b(0), + y1 = y$3, + defined = constant$b(true), + context = null, + curve = curveLinear, + output = null; + + function area(data) { + var i, + j, + k, + n = data.length, + d, + defined0 = false, + buffer, + x0z = new Array(n), + y0z = new Array(n); + + if (context == null) output = curve(buffer = path()); + + for (i = 0; i <= n; ++i) { + if (!(i < n && defined(d = data[i], i, data)) === defined0) { + if (defined0 = !defined0) { + j = i; + output.areaStart(); + output.lineStart(); + } else { + output.lineEnd(); + output.lineStart(); + for (k = i - 1; k >= j; --k) { + output.point(x0z[k], y0z[k]); + } + output.lineEnd(); + output.areaEnd(); + } + } + if (defined0) { + x0z[i] = +x0(d, i, data), y0z[i] = +y0(d, i, data); + output.point(x1 ? +x1(d, i, data) : x0z[i], y1 ? +y1(d, i, data) : y0z[i]); + } + } + + if (buffer) return output = null, buffer + "" || null; + } + + function arealine() { + return line().defined(defined).curve(curve).context(context); + } + + area.x = function(_) { + return arguments.length ? (x0 = typeof _ === "function" ? _ : constant$b(+_), x1 = null, area) : x0; + }; + + area.x0 = function(_) { + return arguments.length ? (x0 = typeof _ === "function" ? _ : constant$b(+_), area) : x0; + }; + + area.x1 = function(_) { + return arguments.length ? (x1 = _ == null ? null : typeof _ === "function" ? _ : constant$b(+_), area) : x1; + }; + + area.y = function(_) { + return arguments.length ? (y0 = typeof _ === "function" ? _ : constant$b(+_), y1 = null, area) : y0; + }; + + area.y0 = function(_) { + return arguments.length ? (y0 = typeof _ === "function" ? _ : constant$b(+_), area) : y0; + }; + + area.y1 = function(_) { + return arguments.length ? (y1 = _ == null ? null : typeof _ === "function" ? _ : constant$b(+_), area) : y1; + }; + + area.lineX0 = + area.lineY0 = function() { + return arealine().x(x0).y(y0); + }; + + area.lineY1 = function() { + return arealine().x(x0).y(y1); + }; + + area.lineX1 = function() { + return arealine().x(x1).y(y0); + }; + + area.defined = function(_) { + return arguments.length ? (defined = typeof _ === "function" ? _ : constant$b(!!_), area) : defined; + }; + + area.curve = function(_) { + return arguments.length ? (curve = _, context != null && (output = curve(context)), area) : curve; + }; + + area.context = function(_) { + return arguments.length ? (_ == null ? context = output = null : output = curve(context = _), area) : context; + }; + + return area; +} + +function descending$1(a, b) { + return b < a ? -1 : b > a ? 1 : b >= a ? 0 : NaN; +} + +function identity$8(d) { + return d; +} + +function pie() { + var value = identity$8, + sortValues = descending$1, + sort = null, + startAngle = constant$b(0), + endAngle = constant$b(tau$4), + padAngle = constant$b(0); + + function pie(data) { + var i, + n = data.length, + j, + k, + sum = 0, + index = new Array(n), + arcs = new Array(n), + a0 = +startAngle.apply(this, arguments), + da = Math.min(tau$4, Math.max(-tau$4, endAngle.apply(this, arguments) - a0)), + a1, + p = Math.min(Math.abs(da) / n, padAngle.apply(this, arguments)), + pa = p * (da < 0 ? -1 : 1), + v; + + for (i = 0; i < n; ++i) { + if ((v = arcs[index[i] = i] = +value(data[i], i, data)) > 0) { + sum += v; + } + } + + // Optionally sort the arcs by previously-computed values or by data. + if (sortValues != null) index.sort(function(i, j) { return sortValues(arcs[i], arcs[j]); }); + else if (sort != null) index.sort(function(i, j) { return sort(data[i], data[j]); }); + + // Compute the arcs! They are stored in the original data's order. + for (i = 0, k = sum ? (da - n * pa) / sum : 0; i < n; ++i, a0 = a1) { + j = index[i], v = arcs[j], a1 = a0 + (v > 0 ? v * k : 0) + pa, arcs[j] = { + data: data[j], + index: i, + value: v, + startAngle: a0, + endAngle: a1, + padAngle: p + }; + } + + return arcs; + } + + pie.value = function(_) { + return arguments.length ? (value = typeof _ === "function" ? _ : constant$b(+_), pie) : value; + }; + + pie.sortValues = function(_) { + return arguments.length ? (sortValues = _, sort = null, pie) : sortValues; + }; + + pie.sort = function(_) { + return arguments.length ? (sort = _, sortValues = null, pie) : sort; + }; + + pie.startAngle = function(_) { + return arguments.length ? (startAngle = typeof _ === "function" ? _ : constant$b(+_), pie) : startAngle; + }; + + pie.endAngle = function(_) { + return arguments.length ? (endAngle = typeof _ === "function" ? _ : constant$b(+_), pie) : endAngle; + }; + + pie.padAngle = function(_) { + return arguments.length ? (padAngle = typeof _ === "function" ? _ : constant$b(+_), pie) : padAngle; + }; + + return pie; +} + +var curveRadialLinear = curveRadial(curveLinear); + +function Radial(curve) { + this._curve = curve; +} + +Radial.prototype = { + areaStart: function() { + this._curve.areaStart(); + }, + areaEnd: function() { + this._curve.areaEnd(); + }, + lineStart: function() { + this._curve.lineStart(); + }, + lineEnd: function() { + this._curve.lineEnd(); + }, + point: function(a, r) { + this._curve.point(r * Math.sin(a), r * -Math.cos(a)); + } +}; + +function curveRadial(curve) { + + function radial(context) { + return new Radial(curve(context)); + } + + radial._curve = curve; + + return radial; +} + +function lineRadial(l) { + var c = l.curve; + + l.angle = l.x, delete l.x; + l.radius = l.y, delete l.y; + + l.curve = function(_) { + return arguments.length ? c(curveRadial(_)) : c()._curve; + }; + + return l; +} + +function lineRadial$1() { + return lineRadial(line().curve(curveRadialLinear)); +} + +function areaRadial() { + var a = area$3().curve(curveRadialLinear), + c = a.curve, + x0 = a.lineX0, + x1 = a.lineX1, + y0 = a.lineY0, + y1 = a.lineY1; + + a.angle = a.x, delete a.x; + a.startAngle = a.x0, delete a.x0; + a.endAngle = a.x1, delete a.x1; + a.radius = a.y, delete a.y; + a.innerRadius = a.y0, delete a.y0; + a.outerRadius = a.y1, delete a.y1; + a.lineStartAngle = function() { return lineRadial(x0()); }, delete a.lineX0; + a.lineEndAngle = function() { return lineRadial(x1()); }, delete a.lineX1; + a.lineInnerRadius = function() { return lineRadial(y0()); }, delete a.lineY0; + a.lineOuterRadius = function() { return lineRadial(y1()); }, delete a.lineY1; + + a.curve = function(_) { + return arguments.length ? c(curveRadial(_)) : c()._curve; + }; + + return a; +} + +function pointRadial(x, y) { + return [(y = +y) * Math.cos(x -= Math.PI / 2), y * Math.sin(x)]; +} + +var slice$6 = Array.prototype.slice; + +function linkSource(d) { + return d.source; +} + +function linkTarget(d) { + return d.target; +} + +function link$2(curve) { + var source = linkSource, + target = linkTarget, + x = x$3, + y = y$3, + context = null; + + function link() { + var buffer, argv = slice$6.call(arguments), s = source.apply(this, argv), t = target.apply(this, argv); + if (!context) context = buffer = path(); + curve(context, +x.apply(this, (argv[0] = s, argv)), +y.apply(this, argv), +x.apply(this, (argv[0] = t, argv)), +y.apply(this, argv)); + if (buffer) return context = null, buffer + "" || null; + } + + link.source = function(_) { + return arguments.length ? (source = _, link) : source; + }; + + link.target = function(_) { + return arguments.length ? (target = _, link) : target; + }; + + link.x = function(_) { + return arguments.length ? (x = typeof _ === "function" ? _ : constant$b(+_), link) : x; + }; + + link.y = function(_) { + return arguments.length ? (y = typeof _ === "function" ? _ : constant$b(+_), link) : y; + }; + + link.context = function(_) { + return arguments.length ? ((context = _ == null ? null : _), link) : context; + }; + + return link; +} + +function curveHorizontal(context, x0, y0, x1, y1) { + context.moveTo(x0, y0); + context.bezierCurveTo(x0 = (x0 + x1) / 2, y0, x0, y1, x1, y1); +} + +function curveVertical(context, x0, y0, x1, y1) { + context.moveTo(x0, y0); + context.bezierCurveTo(x0, y0 = (y0 + y1) / 2, x1, y0, x1, y1); +} + +function curveRadial$1(context, x0, y0, x1, y1) { + var p0 = pointRadial(x0, y0), + p1 = pointRadial(x0, y0 = (y0 + y1) / 2), + p2 = pointRadial(x1, y0), + p3 = pointRadial(x1, y1); + context.moveTo(p0[0], p0[1]); + context.bezierCurveTo(p1[0], p1[1], p2[0], p2[1], p3[0], p3[1]); +} + +function linkHorizontal() { + return link$2(curveHorizontal); +} + +function linkVertical() { + return link$2(curveVertical); +} + +function linkRadial() { + var l = link$2(curveRadial$1); + l.angle = l.x, delete l.x; + l.radius = l.y, delete l.y; + return l; +} + +var circle$2 = { + draw: function(context, size) { + var r = Math.sqrt(size / pi$4); + context.moveTo(r, 0); + context.arc(0, 0, r, 0, tau$4); + } +}; + +var cross$2 = { + draw: function(context, size) { + var r = Math.sqrt(size / 5) / 2; + context.moveTo(-3 * r, -r); + context.lineTo(-r, -r); + context.lineTo(-r, -3 * r); + context.lineTo(r, -3 * r); + context.lineTo(r, -r); + context.lineTo(3 * r, -r); + context.lineTo(3 * r, r); + context.lineTo(r, r); + context.lineTo(r, 3 * r); + context.lineTo(-r, 3 * r); + context.lineTo(-r, r); + context.lineTo(-3 * r, r); + context.closePath(); + } +}; + +var tan30 = Math.sqrt(1 / 3), + tan30_2 = tan30 * 2; + +var diamond = { + draw: function(context, size) { + var y = Math.sqrt(size / tan30_2), + x = y * tan30; + context.moveTo(0, -y); + context.lineTo(x, 0); + context.lineTo(0, y); + context.lineTo(-x, 0); + context.closePath(); + } +}; + +var ka = 0.89081309152928522810, + kr = Math.sin(pi$4 / 10) / Math.sin(7 * pi$4 / 10), + kx = Math.sin(tau$4 / 10) * kr, + ky = -Math.cos(tau$4 / 10) * kr; + +var star = { + draw: function(context, size) { + var r = Math.sqrt(size * ka), + x = kx * r, + y = ky * r; + context.moveTo(0, -r); + context.lineTo(x, y); + for (var i = 1; i < 5; ++i) { + var a = tau$4 * i / 5, + c = Math.cos(a), + s = Math.sin(a); + context.lineTo(s * r, -c * r); + context.lineTo(c * x - s * y, s * x + c * y); + } + context.closePath(); + } +}; + +var square = { + draw: function(context, size) { + var w = Math.sqrt(size), + x = -w / 2; + context.rect(x, x, w, w); + } +}; + +var sqrt3 = Math.sqrt(3); + +var triangle = { + draw: function(context, size) { + var y = -Math.sqrt(size / (sqrt3 * 3)); + context.moveTo(0, y * 2); + context.lineTo(-sqrt3 * y, -y); + context.lineTo(sqrt3 * y, -y); + context.closePath(); + } +}; + +var c$2 = -0.5, + s = Math.sqrt(3) / 2, + k = 1 / Math.sqrt(12), + a = (k / 2 + 1) * 3; + +var wye = { + draw: function(context, size) { + var r = Math.sqrt(size / a), + x0 = r / 2, + y0 = r * k, + x1 = x0, + y1 = r * k + r, + x2 = -x1, + y2 = y1; + context.moveTo(x0, y0); + context.lineTo(x1, y1); + context.lineTo(x2, y2); + context.lineTo(c$2 * x0 - s * y0, s * x0 + c$2 * y0); + context.lineTo(c$2 * x1 - s * y1, s * x1 + c$2 * y1); + context.lineTo(c$2 * x2 - s * y2, s * x2 + c$2 * y2); + context.lineTo(c$2 * x0 + s * y0, c$2 * y0 - s * x0); + context.lineTo(c$2 * x1 + s * y1, c$2 * y1 - s * x1); + context.lineTo(c$2 * x2 + s * y2, c$2 * y2 - s * x2); + context.closePath(); + } +}; + +var symbols = [ + circle$2, + cross$2, + diamond, + square, + star, + triangle, + wye +]; + +function symbol() { + var type = constant$b(circle$2), + size = constant$b(64), + context = null; + + function symbol() { + var buffer; + if (!context) context = buffer = path(); + type.apply(this, arguments).draw(context, +size.apply(this, arguments)); + if (buffer) return context = null, buffer + "" || null; + } + + symbol.type = function(_) { + return arguments.length ? (type = typeof _ === "function" ? _ : constant$b(_), symbol) : type; + }; + + symbol.size = function(_) { + return arguments.length ? (size = typeof _ === "function" ? _ : constant$b(+_), symbol) : size; + }; + + symbol.context = function(_) { + return arguments.length ? (context = _ == null ? null : _, symbol) : context; + }; + + return symbol; +} + +function noop$3() {} + +function point$2(that, x, y) { + that._context.bezierCurveTo( + (2 * that._x0 + that._x1) / 3, + (2 * that._y0 + that._y1) / 3, + (that._x0 + 2 * that._x1) / 3, + (that._y0 + 2 * that._y1) / 3, + (that._x0 + 4 * that._x1 + x) / 6, + (that._y0 + 4 * that._y1 + y) / 6 + ); +} + +function Basis(context) { + this._context = context; +} + +Basis.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x0 = this._x1 = + this._y0 = this._y1 = NaN; + this._point = 0; + }, + lineEnd: function() { + switch (this._point) { + case 3: point$2(this, this._x1, this._y1); // proceed + case 2: this._context.lineTo(this._x1, this._y1); break; + } + if (this._line || (this._line !== 0 && this._point === 1)) this._context.closePath(); + this._line = 1 - this._line; + }, + point: function(x, y) { + x = +x, y = +y; + switch (this._point) { + case 0: this._point = 1; this._line ? this._context.lineTo(x, y) : this._context.moveTo(x, y); break; + case 1: this._point = 2; break; + case 2: this._point = 3; this._context.lineTo((5 * this._x0 + this._x1) / 6, (5 * this._y0 + this._y1) / 6); // proceed + default: point$2(this, x, y); break; + } + this._x0 = this._x1, this._x1 = x; + this._y0 = this._y1, this._y1 = y; + } +}; + +function basis$2(context) { + return new Basis(context); +} + +function BasisClosed(context) { + this._context = context; +} + +BasisClosed.prototype = { + areaStart: noop$3, + areaEnd: noop$3, + lineStart: function() { + this._x0 = this._x1 = this._x2 = this._x3 = this._x4 = + this._y0 = this._y1 = this._y2 = this._y3 = this._y4 = NaN; + this._point = 0; + }, + lineEnd: function() { + switch (this._point) { + case 1: { + this._context.moveTo(this._x2, this._y2); + this._context.closePath(); + break; + } + case 2: { + this._context.moveTo((this._x2 + 2 * this._x3) / 3, (this._y2 + 2 * this._y3) / 3); + this._context.lineTo((this._x3 + 2 * this._x2) / 3, (this._y3 + 2 * this._y2) / 3); + this._context.closePath(); + break; + } + case 3: { + this.point(this._x2, this._y2); + this.point(this._x3, this._y3); + this.point(this._x4, this._y4); + break; + } + } + }, + point: function(x, y) { + x = +x, y = +y; + switch (this._point) { + case 0: this._point = 1; this._x2 = x, this._y2 = y; break; + case 1: this._point = 2; this._x3 = x, this._y3 = y; break; + case 2: this._point = 3; this._x4 = x, this._y4 = y; this._context.moveTo((this._x0 + 4 * this._x1 + x) / 6, (this._y0 + 4 * this._y1 + y) / 6); break; + default: point$2(this, x, y); break; + } + this._x0 = this._x1, this._x1 = x; + this._y0 = this._y1, this._y1 = y; + } +}; + +function basisClosed$1(context) { + return new BasisClosed(context); +} + +function BasisOpen(context) { + this._context = context; +} + +BasisOpen.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x0 = this._x1 = + this._y0 = this._y1 = NaN; + this._point = 0; + }, + lineEnd: function() { + if (this._line || (this._line !== 0 && this._point === 3)) this._context.closePath(); + this._line = 1 - this._line; + }, + point: function(x, y) { + x = +x, y = +y; + switch (this._point) { + case 0: this._point = 1; break; + case 1: this._point = 2; break; + case 2: this._point = 3; var x0 = (this._x0 + 4 * this._x1 + x) / 6, y0 = (this._y0 + 4 * this._y1 + y) / 6; this._line ? this._context.lineTo(x0, y0) : this._context.moveTo(x0, y0); break; + case 3: this._point = 4; // proceed + default: point$2(this, x, y); break; + } + this._x0 = this._x1, this._x1 = x; + this._y0 = this._y1, this._y1 = y; + } +}; + +function basisOpen(context) { + return new BasisOpen(context); +} + +function Bundle(context, beta) { + this._basis = new Basis(context); + this._beta = beta; +} + +Bundle.prototype = { + lineStart: function() { + this._x = []; + this._y = []; + this._basis.lineStart(); + }, + lineEnd: function() { + var x = this._x, + y = this._y, + j = x.length - 1; + + if (j > 0) { + var x0 = x[0], + y0 = y[0], + dx = x[j] - x0, + dy = y[j] - y0, + i = -1, + t; + + while (++i <= j) { + t = i / j; + this._basis.point( + this._beta * x[i] + (1 - this._beta) * (x0 + t * dx), + this._beta * y[i] + (1 - this._beta) * (y0 + t * dy) + ); + } + } + + this._x = this._y = null; + this._basis.lineEnd(); + }, + point: function(x, y) { + this._x.push(+x); + this._y.push(+y); + } +}; + +var bundle = (function custom(beta) { + + function bundle(context) { + return beta === 1 ? new Basis(context) : new Bundle(context, beta); + } + + bundle.beta = function(beta) { + return custom(+beta); + }; + + return bundle; +})(0.85); + +function point$3(that, x, y) { + that._context.bezierCurveTo( + that._x1 + that._k * (that._x2 - that._x0), + that._y1 + that._k * (that._y2 - that._y0), + that._x2 + that._k * (that._x1 - x), + that._y2 + that._k * (that._y1 - y), + that._x2, + that._y2 + ); +} + +function Cardinal(context, tension) { + this._context = context; + this._k = (1 - tension) / 6; +} + +Cardinal.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x0 = this._x1 = this._x2 = + this._y0 = this._y1 = this._y2 = NaN; + this._point = 0; + }, + lineEnd: function() { + switch (this._point) { + case 2: this._context.lineTo(this._x2, this._y2); break; + case 3: point$3(this, this._x1, this._y1); break; + } + if (this._line || (this._line !== 0 && this._point === 1)) this._context.closePath(); + this._line = 1 - this._line; + }, + point: function(x, y) { + x = +x, y = +y; + switch (this._point) { + case 0: this._point = 1; this._line ? this._context.lineTo(x, y) : this._context.moveTo(x, y); break; + case 1: this._point = 2; this._x1 = x, this._y1 = y; break; + case 2: this._point = 3; // proceed + default: point$3(this, x, y); break; + } + this._x0 = this._x1, this._x1 = this._x2, this._x2 = x; + this._y0 = this._y1, this._y1 = this._y2, this._y2 = y; + } +}; + +var cardinal = (function custom(tension) { + + function cardinal(context) { + return new Cardinal(context, tension); + } + + cardinal.tension = function(tension) { + return custom(+tension); + }; + + return cardinal; +})(0); + +function CardinalClosed(context, tension) { + this._context = context; + this._k = (1 - tension) / 6; +} + +CardinalClosed.prototype = { + areaStart: noop$3, + areaEnd: noop$3, + lineStart: function() { + this._x0 = this._x1 = this._x2 = this._x3 = this._x4 = this._x5 = + this._y0 = this._y1 = this._y2 = this._y3 = this._y4 = this._y5 = NaN; + this._point = 0; + }, + lineEnd: function() { + switch (this._point) { + case 1: { + this._context.moveTo(this._x3, this._y3); + this._context.closePath(); + break; + } + case 2: { + this._context.lineTo(this._x3, this._y3); + this._context.closePath(); + break; + } + case 3: { + this.point(this._x3, this._y3); + this.point(this._x4, this._y4); + this.point(this._x5, this._y5); + break; + } + } + }, + point: function(x, y) { + x = +x, y = +y; + switch (this._point) { + case 0: this._point = 1; this._x3 = x, this._y3 = y; break; + case 1: this._point = 2; this._context.moveTo(this._x4 = x, this._y4 = y); break; + case 2: this._point = 3; this._x5 = x, this._y5 = y; break; + default: point$3(this, x, y); break; + } + this._x0 = this._x1, this._x1 = this._x2, this._x2 = x; + this._y0 = this._y1, this._y1 = this._y2, this._y2 = y; + } +}; + +var cardinalClosed = (function custom(tension) { + + function cardinal(context) { + return new CardinalClosed(context, tension); + } + + cardinal.tension = function(tension) { + return custom(+tension); + }; + + return cardinal; +})(0); + +function CardinalOpen(context, tension) { + this._context = context; + this._k = (1 - tension) / 6; +} + +CardinalOpen.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x0 = this._x1 = this._x2 = + this._y0 = this._y1 = this._y2 = NaN; + this._point = 0; + }, + lineEnd: function() { + if (this._line || (this._line !== 0 && this._point === 3)) this._context.closePath(); + this._line = 1 - this._line; + }, + point: function(x, y) { + x = +x, y = +y; + switch (this._point) { + case 0: this._point = 1; break; + case 1: this._point = 2; break; + case 2: this._point = 3; this._line ? this._context.lineTo(this._x2, this._y2) : this._context.moveTo(this._x2, this._y2); break; + case 3: this._point = 4; // proceed + default: point$3(this, x, y); break; + } + this._x0 = this._x1, this._x1 = this._x2, this._x2 = x; + this._y0 = this._y1, this._y1 = this._y2, this._y2 = y; + } +}; + +var cardinalOpen = (function custom(tension) { + + function cardinal(context) { + return new CardinalOpen(context, tension); + } + + cardinal.tension = function(tension) { + return custom(+tension); + }; + + return cardinal; +})(0); + +function point$4(that, x, y) { + var x1 = that._x1, + y1 = that._y1, + x2 = that._x2, + y2 = that._y2; + + if (that._l01_a > epsilon$3) { + var a = 2 * that._l01_2a + 3 * that._l01_a * that._l12_a + that._l12_2a, + n = 3 * that._l01_a * (that._l01_a + that._l12_a); + x1 = (x1 * a - that._x0 * that._l12_2a + that._x2 * that._l01_2a) / n; + y1 = (y1 * a - that._y0 * that._l12_2a + that._y2 * that._l01_2a) / n; + } + + if (that._l23_a > epsilon$3) { + var b = 2 * that._l23_2a + 3 * that._l23_a * that._l12_a + that._l12_2a, + m = 3 * that._l23_a * (that._l23_a + that._l12_a); + x2 = (x2 * b + that._x1 * that._l23_2a - x * that._l12_2a) / m; + y2 = (y2 * b + that._y1 * that._l23_2a - y * that._l12_2a) / m; + } + + that._context.bezierCurveTo(x1, y1, x2, y2, that._x2, that._y2); +} + +function CatmullRom(context, alpha) { + this._context = context; + this._alpha = alpha; +} + +CatmullRom.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x0 = this._x1 = this._x2 = + this._y0 = this._y1 = this._y2 = NaN; + this._l01_a = this._l12_a = this._l23_a = + this._l01_2a = this._l12_2a = this._l23_2a = + this._point = 0; + }, + lineEnd: function() { + switch (this._point) { + case 2: this._context.lineTo(this._x2, this._y2); break; + case 3: this.point(this._x2, this._y2); break; + } + if (this._line || (this._line !== 0 && this._point === 1)) this._context.closePath(); + this._line = 1 - this._line; + }, + point: function(x, y) { + x = +x, y = +y; + + if (this._point) { + var x23 = this._x2 - x, + y23 = this._y2 - y; + this._l23_a = Math.sqrt(this._l23_2a = Math.pow(x23 * x23 + y23 * y23, this._alpha)); + } + + switch (this._point) { + case 0: this._point = 1; this._line ? this._context.lineTo(x, y) : this._context.moveTo(x, y); break; + case 1: this._point = 2; break; + case 2: this._point = 3; // proceed + default: point$4(this, x, y); break; + } + + this._l01_a = this._l12_a, this._l12_a = this._l23_a; + this._l01_2a = this._l12_2a, this._l12_2a = this._l23_2a; + this._x0 = this._x1, this._x1 = this._x2, this._x2 = x; + this._y0 = this._y1, this._y1 = this._y2, this._y2 = y; + } +}; + +var catmullRom = (function custom(alpha) { + + function catmullRom(context) { + return alpha ? new CatmullRom(context, alpha) : new Cardinal(context, 0); + } + + catmullRom.alpha = function(alpha) { + return custom(+alpha); + }; + + return catmullRom; +})(0.5); + +function CatmullRomClosed(context, alpha) { + this._context = context; + this._alpha = alpha; +} + +CatmullRomClosed.prototype = { + areaStart: noop$3, + areaEnd: noop$3, + lineStart: function() { + this._x0 = this._x1 = this._x2 = this._x3 = this._x4 = this._x5 = + this._y0 = this._y1 = this._y2 = this._y3 = this._y4 = this._y5 = NaN; + this._l01_a = this._l12_a = this._l23_a = + this._l01_2a = this._l12_2a = this._l23_2a = + this._point = 0; + }, + lineEnd: function() { + switch (this._point) { + case 1: { + this._context.moveTo(this._x3, this._y3); + this._context.closePath(); + break; + } + case 2: { + this._context.lineTo(this._x3, this._y3); + this._context.closePath(); + break; + } + case 3: { + this.point(this._x3, this._y3); + this.point(this._x4, this._y4); + this.point(this._x5, this._y5); + break; + } + } + }, + point: function(x, y) { + x = +x, y = +y; + + if (this._point) { + var x23 = this._x2 - x, + y23 = this._y2 - y; + this._l23_a = Math.sqrt(this._l23_2a = Math.pow(x23 * x23 + y23 * y23, this._alpha)); + } + + switch (this._point) { + case 0: this._point = 1; this._x3 = x, this._y3 = y; break; + case 1: this._point = 2; this._context.moveTo(this._x4 = x, this._y4 = y); break; + case 2: this._point = 3; this._x5 = x, this._y5 = y; break; + default: point$4(this, x, y); break; + } + + this._l01_a = this._l12_a, this._l12_a = this._l23_a; + this._l01_2a = this._l12_2a, this._l12_2a = this._l23_2a; + this._x0 = this._x1, this._x1 = this._x2, this._x2 = x; + this._y0 = this._y1, this._y1 = this._y2, this._y2 = y; + } +}; + +var catmullRomClosed = (function custom(alpha) { + + function catmullRom(context) { + return alpha ? new CatmullRomClosed(context, alpha) : new CardinalClosed(context, 0); + } + + catmullRom.alpha = function(alpha) { + return custom(+alpha); + }; + + return catmullRom; +})(0.5); + +function CatmullRomOpen(context, alpha) { + this._context = context; + this._alpha = alpha; +} + +CatmullRomOpen.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x0 = this._x1 = this._x2 = + this._y0 = this._y1 = this._y2 = NaN; + this._l01_a = this._l12_a = this._l23_a = + this._l01_2a = this._l12_2a = this._l23_2a = + this._point = 0; + }, + lineEnd: function() { + if (this._line || (this._line !== 0 && this._point === 3)) this._context.closePath(); + this._line = 1 - this._line; + }, + point: function(x, y) { + x = +x, y = +y; + + if (this._point) { + var x23 = this._x2 - x, + y23 = this._y2 - y; + this._l23_a = Math.sqrt(this._l23_2a = Math.pow(x23 * x23 + y23 * y23, this._alpha)); + } + + switch (this._point) { + case 0: this._point = 1; break; + case 1: this._point = 2; break; + case 2: this._point = 3; this._line ? this._context.lineTo(this._x2, this._y2) : this._context.moveTo(this._x2, this._y2); break; + case 3: this._point = 4; // proceed + default: point$4(this, x, y); break; + } + + this._l01_a = this._l12_a, this._l12_a = this._l23_a; + this._l01_2a = this._l12_2a, this._l12_2a = this._l23_2a; + this._x0 = this._x1, this._x1 = this._x2, this._x2 = x; + this._y0 = this._y1, this._y1 = this._y2, this._y2 = y; + } +}; + +var catmullRomOpen = (function custom(alpha) { + + function catmullRom(context) { + return alpha ? new CatmullRomOpen(context, alpha) : new CardinalOpen(context, 0); + } + + catmullRom.alpha = function(alpha) { + return custom(+alpha); + }; + + return catmullRom; +})(0.5); + +function LinearClosed(context) { + this._context = context; +} + +LinearClosed.prototype = { + areaStart: noop$3, + areaEnd: noop$3, + lineStart: function() { + this._point = 0; + }, + lineEnd: function() { + if (this._point) this._context.closePath(); + }, + point: function(x, y) { + x = +x, y = +y; + if (this._point) this._context.lineTo(x, y); + else this._point = 1, this._context.moveTo(x, y); + } +}; + +function linearClosed(context) { + return new LinearClosed(context); +} + +function sign$1(x) { + return x < 0 ? -1 : 1; +} + +// Calculate the slopes of the tangents (Hermite-type interpolation) based on +// the following paper: Steffen, M. 1990. A Simple Method for Monotonic +// Interpolation in One Dimension. Astronomy and Astrophysics, Vol. 239, NO. +// NOV(II), P. 443, 1990. +function slope3(that, x2, y2) { + var h0 = that._x1 - that._x0, + h1 = x2 - that._x1, + s0 = (that._y1 - that._y0) / (h0 || h1 < 0 && -0), + s1 = (y2 - that._y1) / (h1 || h0 < 0 && -0), + p = (s0 * h1 + s1 * h0) / (h0 + h1); + return (sign$1(s0) + sign$1(s1)) * Math.min(Math.abs(s0), Math.abs(s1), 0.5 * Math.abs(p)) || 0; +} + +// Calculate a one-sided slope. +function slope2(that, t) { + var h = that._x1 - that._x0; + return h ? (3 * (that._y1 - that._y0) / h - t) / 2 : t; +} + +// According to https://en.wikipedia.org/wiki/Cubic_Hermite_spline#Representations +// "you can express cubic Hermite interpolation in terms of cubic Bézier curves +// with respect to the four values p0, p0 + m0 / 3, p1 - m1 / 3, p1". +function point$5(that, t0, t1) { + var x0 = that._x0, + y0 = that._y0, + x1 = that._x1, + y1 = that._y1, + dx = (x1 - x0) / 3; + that._context.bezierCurveTo(x0 + dx, y0 + dx * t0, x1 - dx, y1 - dx * t1, x1, y1); +} + +function MonotoneX(context) { + this._context = context; +} + +MonotoneX.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x0 = this._x1 = + this._y0 = this._y1 = + this._t0 = NaN; + this._point = 0; + }, + lineEnd: function() { + switch (this._point) { + case 2: this._context.lineTo(this._x1, this._y1); break; + case 3: point$5(this, this._t0, slope2(this, this._t0)); break; + } + if (this._line || (this._line !== 0 && this._point === 1)) this._context.closePath(); + this._line = 1 - this._line; + }, + point: function(x, y) { + var t1 = NaN; + + x = +x, y = +y; + if (x === this._x1 && y === this._y1) return; // Ignore coincident points. + switch (this._point) { + case 0: this._point = 1; this._line ? this._context.lineTo(x, y) : this._context.moveTo(x, y); break; + case 1: this._point = 2; break; + case 2: this._point = 3; point$5(this, slope2(this, t1 = slope3(this, x, y)), t1); break; + default: point$5(this, this._t0, t1 = slope3(this, x, y)); break; + } + + this._x0 = this._x1, this._x1 = x; + this._y0 = this._y1, this._y1 = y; + this._t0 = t1; + } +}; + +function MonotoneY(context) { + this._context = new ReflectContext(context); +} + +(MonotoneY.prototype = Object.create(MonotoneX.prototype)).point = function(x, y) { + MonotoneX.prototype.point.call(this, y, x); +}; + +function ReflectContext(context) { + this._context = context; +} + +ReflectContext.prototype = { + moveTo: function(x, y) { this._context.moveTo(y, x); }, + closePath: function() { this._context.closePath(); }, + lineTo: function(x, y) { this._context.lineTo(y, x); }, + bezierCurveTo: function(x1, y1, x2, y2, x, y) { this._context.bezierCurveTo(y1, x1, y2, x2, y, x); } +}; + +function monotoneX(context) { + return new MonotoneX(context); +} + +function monotoneY(context) { + return new MonotoneY(context); +} + +function Natural(context) { + this._context = context; +} + +Natural.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x = []; + this._y = []; + }, + lineEnd: function() { + var x = this._x, + y = this._y, + n = x.length; + + if (n) { + this._line ? this._context.lineTo(x[0], y[0]) : this._context.moveTo(x[0], y[0]); + if (n === 2) { + this._context.lineTo(x[1], y[1]); + } else { + var px = controlPoints(x), + py = controlPoints(y); + for (var i0 = 0, i1 = 1; i1 < n; ++i0, ++i1) { + this._context.bezierCurveTo(px[0][i0], py[0][i0], px[1][i0], py[1][i0], x[i1], y[i1]); + } + } + } + + if (this._line || (this._line !== 0 && n === 1)) this._context.closePath(); + this._line = 1 - this._line; + this._x = this._y = null; + }, + point: function(x, y) { + this._x.push(+x); + this._y.push(+y); + } +}; + +// See https://www.particleincell.com/2012/bezier-splines/ for derivation. +function controlPoints(x) { + var i, + n = x.length - 1, + m, + a = new Array(n), + b = new Array(n), + r = new Array(n); + a[0] = 0, b[0] = 2, r[0] = x[0] + 2 * x[1]; + for (i = 1; i < n - 1; ++i) a[i] = 1, b[i] = 4, r[i] = 4 * x[i] + 2 * x[i + 1]; + a[n - 1] = 2, b[n - 1] = 7, r[n - 1] = 8 * x[n - 1] + x[n]; + for (i = 1; i < n; ++i) m = a[i] / b[i - 1], b[i] -= m, r[i] -= m * r[i - 1]; + a[n - 1] = r[n - 1] / b[n - 1]; + for (i = n - 2; i >= 0; --i) a[i] = (r[i] - a[i + 1]) / b[i]; + b[n - 1] = (x[n] + a[n - 1]) / 2; + for (i = 0; i < n - 1; ++i) b[i] = 2 * x[i + 1] - a[i + 1]; + return [a, b]; +} + +function natural(context) { + return new Natural(context); +} + +function Step(context, t) { + this._context = context; + this._t = t; +} + +Step.prototype = { + areaStart: function() { + this._line = 0; + }, + areaEnd: function() { + this._line = NaN; + }, + lineStart: function() { + this._x = this._y = NaN; + this._point = 0; + }, + lineEnd: function() { + if (0 < this._t && this._t < 1 && this._point === 2) this._context.lineTo(this._x, this._y); + if (this._line || (this._line !== 0 && this._point === 1)) this._context.closePath(); + if (this._line >= 0) this._t = 1 - this._t, this._line = 1 - this._line; + }, + point: function(x, y) { + x = +x, y = +y; + switch (this._point) { + case 0: this._point = 1; this._line ? this._context.lineTo(x, y) : this._context.moveTo(x, y); break; + case 1: this._point = 2; // proceed + default: { + if (this._t <= 0) { + this._context.lineTo(this._x, y); + this._context.lineTo(x, y); + } else { + var x1 = this._x * (1 - this._t) + x * this._t; + this._context.lineTo(x1, this._y); + this._context.lineTo(x1, y); + } + break; + } + } + this._x = x, this._y = y; + } +}; + +function step(context) { + return new Step(context, 0.5); +} + +function stepBefore(context) { + return new Step(context, 0); +} + +function stepAfter(context) { + return new Step(context, 1); +} + +function none$1(series, order) { + if (!((n = series.length) > 1)) return; + for (var i = 1, j, s0, s1 = series[order[0]], n, m = s1.length; i < n; ++i) { + s0 = s1, s1 = series[order[i]]; + for (j = 0; j < m; ++j) { + s1[j][1] += s1[j][0] = isNaN(s0[j][1]) ? s0[j][0] : s0[j][1]; + } + } +} + +function none$2(series) { + var n = series.length, o = new Array(n); + while (--n >= 0) o[n] = n; + return o; +} + +function stackValue(d, key) { + return d[key]; +} + +function stack() { + var keys = constant$b([]), + order = none$2, + offset = none$1, + value = stackValue; + + function stack(data) { + var kz = keys.apply(this, arguments), + i, + m = data.length, + n = kz.length, + sz = new Array(n), + oz; + + for (i = 0; i < n; ++i) { + for (var ki = kz[i], si = sz[i] = new Array(m), j = 0, sij; j < m; ++j) { + si[j] = sij = [0, +value(data[j], ki, j, data)]; + sij.data = data[j]; + } + si.key = ki; + } + + for (i = 0, oz = order(sz); i < n; ++i) { + sz[oz[i]].index = i; + } + + offset(sz, oz); + return sz; + } + + stack.keys = function(_) { + return arguments.length ? (keys = typeof _ === "function" ? _ : constant$b(slice$6.call(_)), stack) : keys; + }; + + stack.value = function(_) { + return arguments.length ? (value = typeof _ === "function" ? _ : constant$b(+_), stack) : value; + }; + + stack.order = function(_) { + return arguments.length ? (order = _ == null ? none$2 : typeof _ === "function" ? _ : constant$b(slice$6.call(_)), stack) : order; + }; + + stack.offset = function(_) { + return arguments.length ? (offset = _ == null ? none$1 : _, stack) : offset; + }; + + return stack; +} + +function expand(series, order) { + if (!((n = series.length) > 0)) return; + for (var i, n, j = 0, m = series[0].length, y; j < m; ++j) { + for (y = i = 0; i < n; ++i) y += series[i][j][1] || 0; + if (y) for (i = 0; i < n; ++i) series[i][j][1] /= y; + } + none$1(series, order); +} + +function diverging$1(series, order) { + if (!((n = series.length) > 0)) return; + for (var i, j = 0, d, dy, yp, yn, n, m = series[order[0]].length; j < m; ++j) { + for (yp = yn = 0, i = 0; i < n; ++i) { + if ((dy = (d = series[order[i]][j])[1] - d[0]) > 0) { + d[0] = yp, d[1] = yp += dy; + } else if (dy < 0) { + d[1] = yn, d[0] = yn += dy; + } else { + d[0] = 0, d[1] = dy; + } + } + } +} + +function silhouette(series, order) { + if (!((n = series.length) > 0)) return; + for (var j = 0, s0 = series[order[0]], n, m = s0.length; j < m; ++j) { + for (var i = 0, y = 0; i < n; ++i) y += series[i][j][1] || 0; + s0[j][1] += s0[j][0] = -y / 2; + } + none$1(series, order); +} + +function wiggle(series, order) { + if (!((n = series.length) > 0) || !((m = (s0 = series[order[0]]).length) > 0)) return; + for (var y = 0, j = 1, s0, m, n; j < m; ++j) { + for (var i = 0, s1 = 0, s2 = 0; i < n; ++i) { + var si = series[order[i]], + sij0 = si[j][1] || 0, + sij1 = si[j - 1][1] || 0, + s3 = (sij0 - sij1) / 2; + for (var k = 0; k < i; ++k) { + var sk = series[order[k]], + skj0 = sk[j][1] || 0, + skj1 = sk[j - 1][1] || 0; + s3 += skj0 - skj1; + } + s1 += sij0, s2 += s3 * sij0; + } + s0[j - 1][1] += s0[j - 1][0] = y; + if (s1) y -= s2 / s1; + } + s0[j - 1][1] += s0[j - 1][0] = y; + none$1(series, order); +} + +function appearance(series) { + var peaks = series.map(peak); + return none$2(series).sort(function(a, b) { return peaks[a] - peaks[b]; }); +} + +function peak(series) { + var i = -1, j = 0, n = series.length, vi, vj = -Infinity; + while (++i < n) if ((vi = +series[i][1]) > vj) vj = vi, j = i; + return j; +} + +function ascending$3(series) { + var sums = series.map(sum$2); + return none$2(series).sort(function(a, b) { return sums[a] - sums[b]; }); +} + +function sum$2(series) { + var s = 0, i = -1, n = series.length, v; + while (++i < n) if (v = +series[i][1]) s += v; + return s; +} + +function descending$2(series) { + return ascending$3(series).reverse(); +} + +function insideOut(series) { + var n = series.length, + i, + j, + sums = series.map(sum$2), + order = appearance(series), + top = 0, + bottom = 0, + tops = [], + bottoms = []; + + for (i = 0; i < n; ++i) { + j = order[i]; + if (top < bottom) { + top += sums[j]; + tops.push(j); + } else { + bottom += sums[j]; + bottoms.push(j); + } + } + + return bottoms.reverse().concat(tops); +} + +function reverse(series) { + return none$2(series).reverse(); +} + +function constant$c(x) { + return function() { + return x; + }; +} + +function x$4(d) { + return d[0]; +} + +function y$4(d) { + return d[1]; +} + +function RedBlackTree() { + this._ = null; // root node +} + +function RedBlackNode(node) { + node.U = // parent node + node.C = // color - true for red, false for black + node.L = // left node + node.R = // right node + node.P = // previous node + node.N = null; // next node +} + +RedBlackTree.prototype = { + constructor: RedBlackTree, + + insert: function(after, node) { + var parent, grandpa, uncle; + + if (after) { + node.P = after; + node.N = after.N; + if (after.N) after.N.P = node; + after.N = node; + if (after.R) { + after = after.R; + while (after.L) after = after.L; + after.L = node; + } else { + after.R = node; + } + parent = after; + } else if (this._) { + after = RedBlackFirst(this._); + node.P = null; + node.N = after; + after.P = after.L = node; + parent = after; + } else { + node.P = node.N = null; + this._ = node; + parent = null; + } + node.L = node.R = null; + node.U = parent; + node.C = true; + + after = node; + while (parent && parent.C) { + grandpa = parent.U; + if (parent === grandpa.L) { + uncle = grandpa.R; + if (uncle && uncle.C) { + parent.C = uncle.C = false; + grandpa.C = true; + after = grandpa; + } else { + if (after === parent.R) { + RedBlackRotateLeft(this, parent); + after = parent; + parent = after.U; + } + parent.C = false; + grandpa.C = true; + RedBlackRotateRight(this, grandpa); + } + } else { + uncle = grandpa.L; + if (uncle && uncle.C) { + parent.C = uncle.C = false; + grandpa.C = true; + after = grandpa; + } else { + if (after === parent.L) { + RedBlackRotateRight(this, parent); + after = parent; + parent = after.U; + } + parent.C = false; + grandpa.C = true; + RedBlackRotateLeft(this, grandpa); + } + } + parent = after.U; + } + this._.C = false; + }, + + remove: function(node) { + if (node.N) node.N.P = node.P; + if (node.P) node.P.N = node.N; + node.N = node.P = null; + + var parent = node.U, + sibling, + left = node.L, + right = node.R, + next, + red; + + if (!left) next = right; + else if (!right) next = left; + else next = RedBlackFirst(right); + + if (parent) { + if (parent.L === node) parent.L = next; + else parent.R = next; + } else { + this._ = next; + } + + if (left && right) { + red = next.C; + next.C = node.C; + next.L = left; + left.U = next; + if (next !== right) { + parent = next.U; + next.U = node.U; + node = next.R; + parent.L = node; + next.R = right; + right.U = next; + } else { + next.U = parent; + parent = next; + node = next.R; + } + } else { + red = node.C; + node = next; + } + + if (node) node.U = parent; + if (red) return; + if (node && node.C) { node.C = false; return; } + + do { + if (node === this._) break; + if (node === parent.L) { + sibling = parent.R; + if (sibling.C) { + sibling.C = false; + parent.C = true; + RedBlackRotateLeft(this, parent); + sibling = parent.R; + } + if ((sibling.L && sibling.L.C) + || (sibling.R && sibling.R.C)) { + if (!sibling.R || !sibling.R.C) { + sibling.L.C = false; + sibling.C = true; + RedBlackRotateRight(this, sibling); + sibling = parent.R; + } + sibling.C = parent.C; + parent.C = sibling.R.C = false; + RedBlackRotateLeft(this, parent); + node = this._; + break; + } + } else { + sibling = parent.L; + if (sibling.C) { + sibling.C = false; + parent.C = true; + RedBlackRotateRight(this, parent); + sibling = parent.L; + } + if ((sibling.L && sibling.L.C) + || (sibling.R && sibling.R.C)) { + if (!sibling.L || !sibling.L.C) { + sibling.R.C = false; + sibling.C = true; + RedBlackRotateLeft(this, sibling); + sibling = parent.L; + } + sibling.C = parent.C; + parent.C = sibling.L.C = false; + RedBlackRotateRight(this, parent); + node = this._; + break; + } + } + sibling.C = true; + node = parent; + parent = parent.U; + } while (!node.C); + + if (node) node.C = false; + } +}; + +function RedBlackRotateLeft(tree, node) { + var p = node, + q = node.R, + parent = p.U; + + if (parent) { + if (parent.L === p) parent.L = q; + else parent.R = q; + } else { + tree._ = q; + } + + q.U = parent; + p.U = q; + p.R = q.L; + if (p.R) p.R.U = p; + q.L = p; +} + +function RedBlackRotateRight(tree, node) { + var p = node, + q = node.L, + parent = p.U; + + if (parent) { + if (parent.L === p) parent.L = q; + else parent.R = q; + } else { + tree._ = q; + } + + q.U = parent; + p.U = q; + p.L = q.R; + if (p.L) p.L.U = p; + q.R = p; +} + +function RedBlackFirst(node) { + while (node.L) node = node.L; + return node; +} + +function createEdge(left, right, v0, v1) { + var edge = [null, null], + index = edges.push(edge) - 1; + edge.left = left; + edge.right = right; + if (v0) setEdgeEnd(edge, left, right, v0); + if (v1) setEdgeEnd(edge, right, left, v1); + cells[left.index].halfedges.push(index); + cells[right.index].halfedges.push(index); + return edge; +} + +function createBorderEdge(left, v0, v1) { + var edge = [v0, v1]; + edge.left = left; + return edge; +} + +function setEdgeEnd(edge, left, right, vertex) { + if (!edge[0] && !edge[1]) { + edge[0] = vertex; + edge.left = left; + edge.right = right; + } else if (edge.left === right) { + edge[1] = vertex; + } else { + edge[0] = vertex; + } +} + +// Liang–Barsky line clipping. +function clipEdge(edge, x0, y0, x1, y1) { + var a = edge[0], + b = edge[1], + ax = a[0], + ay = a[1], + bx = b[0], + by = b[1], + t0 = 0, + t1 = 1, + dx = bx - ax, + dy = by - ay, + r; + + r = x0 - ax; + if (!dx && r > 0) return; + r /= dx; + if (dx < 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } else if (dx > 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } + + r = x1 - ax; + if (!dx && r < 0) return; + r /= dx; + if (dx < 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } else if (dx > 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } + + r = y0 - ay; + if (!dy && r > 0) return; + r /= dy; + if (dy < 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } else if (dy > 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } + + r = y1 - ay; + if (!dy && r < 0) return; + r /= dy; + if (dy < 0) { + if (r > t1) return; + if (r > t0) t0 = r; + } else if (dy > 0) { + if (r < t0) return; + if (r < t1) t1 = r; + } + + if (!(t0 > 0) && !(t1 < 1)) return true; // TODO Better check? + + if (t0 > 0) edge[0] = [ax + t0 * dx, ay + t0 * dy]; + if (t1 < 1) edge[1] = [ax + t1 * dx, ay + t1 * dy]; + return true; +} + +function connectEdge(edge, x0, y0, x1, y1) { + var v1 = edge[1]; + if (v1) return true; + + var v0 = edge[0], + left = edge.left, + right = edge.right, + lx = left[0], + ly = left[1], + rx = right[0], + ry = right[1], + fx = (lx + rx) / 2, + fy = (ly + ry) / 2, + fm, + fb; + + if (ry === ly) { + if (fx < x0 || fx >= x1) return; + if (lx > rx) { + if (!v0) v0 = [fx, y0]; + else if (v0[1] >= y1) return; + v1 = [fx, y1]; + } else { + if (!v0) v0 = [fx, y1]; + else if (v0[1] < y0) return; + v1 = [fx, y0]; + } + } else { + fm = (lx - rx) / (ry - ly); + fb = fy - fm * fx; + if (fm < -1 || fm > 1) { + if (lx > rx) { + if (!v0) v0 = [(y0 - fb) / fm, y0]; + else if (v0[1] >= y1) return; + v1 = [(y1 - fb) / fm, y1]; + } else { + if (!v0) v0 = [(y1 - fb) / fm, y1]; + else if (v0[1] < y0) return; + v1 = [(y0 - fb) / fm, y0]; + } + } else { + if (ly < ry) { + if (!v0) v0 = [x0, fm * x0 + fb]; + else if (v0[0] >= x1) return; + v1 = [x1, fm * x1 + fb]; + } else { + if (!v0) v0 = [x1, fm * x1 + fb]; + else if (v0[0] < x0) return; + v1 = [x0, fm * x0 + fb]; + } + } + } + + edge[0] = v0; + edge[1] = v1; + return true; +} + +function clipEdges(x0, y0, x1, y1) { + var i = edges.length, + edge; + + while (i--) { + if (!connectEdge(edge = edges[i], x0, y0, x1, y1) + || !clipEdge(edge, x0, y0, x1, y1) + || !(Math.abs(edge[0][0] - edge[1][0]) > epsilon$4 + || Math.abs(edge[0][1] - edge[1][1]) > epsilon$4)) { + delete edges[i]; + } + } +} + +function createCell(site) { + return cells[site.index] = { + site: site, + halfedges: [] + }; +} + +function cellHalfedgeAngle(cell, edge) { + var site = cell.site, + va = edge.left, + vb = edge.right; + if (site === vb) vb = va, va = site; + if (vb) return Math.atan2(vb[1] - va[1], vb[0] - va[0]); + if (site === va) va = edge[1], vb = edge[0]; + else va = edge[0], vb = edge[1]; + return Math.atan2(va[0] - vb[0], vb[1] - va[1]); +} + +function cellHalfedgeStart(cell, edge) { + return edge[+(edge.left !== cell.site)]; +} + +function cellHalfedgeEnd(cell, edge) { + return edge[+(edge.left === cell.site)]; +} + +function sortCellHalfedges() { + for (var i = 0, n = cells.length, cell, halfedges, j, m; i < n; ++i) { + if ((cell = cells[i]) && (m = (halfedges = cell.halfedges).length)) { + var index = new Array(m), + array = new Array(m); + for (j = 0; j < m; ++j) index[j] = j, array[j] = cellHalfedgeAngle(cell, edges[halfedges[j]]); + index.sort(function(i, j) { return array[j] - array[i]; }); + for (j = 0; j < m; ++j) array[j] = halfedges[index[j]]; + for (j = 0; j < m; ++j) halfedges[j] = array[j]; + } + } +} + +function clipCells(x0, y0, x1, y1) { + var nCells = cells.length, + iCell, + cell, + site, + iHalfedge, + halfedges, + nHalfedges, + start, + startX, + startY, + end, + endX, + endY, + cover = true; + + for (iCell = 0; iCell < nCells; ++iCell) { + if (cell = cells[iCell]) { + site = cell.site; + halfedges = cell.halfedges; + iHalfedge = halfedges.length; + + // Remove any dangling clipped edges. + while (iHalfedge--) { + if (!edges[halfedges[iHalfedge]]) { + halfedges.splice(iHalfedge, 1); + } + } + + // Insert any border edges as necessary. + iHalfedge = 0, nHalfedges = halfedges.length; + while (iHalfedge < nHalfedges) { + end = cellHalfedgeEnd(cell, edges[halfedges[iHalfedge]]), endX = end[0], endY = end[1]; + start = cellHalfedgeStart(cell, edges[halfedges[++iHalfedge % nHalfedges]]), startX = start[0], startY = start[1]; + if (Math.abs(endX - startX) > epsilon$4 || Math.abs(endY - startY) > epsilon$4) { + halfedges.splice(iHalfedge, 0, edges.push(createBorderEdge(site, end, + Math.abs(endX - x0) < epsilon$4 && y1 - endY > epsilon$4 ? [x0, Math.abs(startX - x0) < epsilon$4 ? startY : y1] + : Math.abs(endY - y1) < epsilon$4 && x1 - endX > epsilon$4 ? [Math.abs(startY - y1) < epsilon$4 ? startX : x1, y1] + : Math.abs(endX - x1) < epsilon$4 && endY - y0 > epsilon$4 ? [x1, Math.abs(startX - x1) < epsilon$4 ? startY : y0] + : Math.abs(endY - y0) < epsilon$4 && endX - x0 > epsilon$4 ? [Math.abs(startY - y0) < epsilon$4 ? startX : x0, y0] + : null)) - 1); + ++nHalfedges; + } + } + + if (nHalfedges) cover = false; + } + } + + // If there weren’t any edges, have the closest site cover the extent. + // It doesn’t matter which corner of the extent we measure! + if (cover) { + var dx, dy, d2, dc = Infinity; + + for (iCell = 0, cover = null; iCell < nCells; ++iCell) { + if (cell = cells[iCell]) { + site = cell.site; + dx = site[0] - x0; + dy = site[1] - y0; + d2 = dx * dx + dy * dy; + if (d2 < dc) dc = d2, cover = cell; + } + } + + if (cover) { + var v00 = [x0, y0], v01 = [x0, y1], v11 = [x1, y1], v10 = [x1, y0]; + cover.halfedges.push( + edges.push(createBorderEdge(site = cover.site, v00, v01)) - 1, + edges.push(createBorderEdge(site, v01, v11)) - 1, + edges.push(createBorderEdge(site, v11, v10)) - 1, + edges.push(createBorderEdge(site, v10, v00)) - 1 + ); + } + } + + // Lastly delete any cells with no edges; these were entirely clipped. + for (iCell = 0; iCell < nCells; ++iCell) { + if (cell = cells[iCell]) { + if (!cell.halfedges.length) { + delete cells[iCell]; + } + } + } +} + +var circlePool = []; + +var firstCircle; + +function Circle() { + RedBlackNode(this); + this.x = + this.y = + this.arc = + this.site = + this.cy = null; +} + +function attachCircle(arc) { + var lArc = arc.P, + rArc = arc.N; + + if (!lArc || !rArc) return; + + var lSite = lArc.site, + cSite = arc.site, + rSite = rArc.site; + + if (lSite === rSite) return; + + var bx = cSite[0], + by = cSite[1], + ax = lSite[0] - bx, + ay = lSite[1] - by, + cx = rSite[0] - bx, + cy = rSite[1] - by; + + var d = 2 * (ax * cy - ay * cx); + if (d >= -epsilon2$2) return; + + var ha = ax * ax + ay * ay, + hc = cx * cx + cy * cy, + x = (cy * ha - ay * hc) / d, + y = (ax * hc - cx * ha) / d; + + var circle = circlePool.pop() || new Circle; + circle.arc = arc; + circle.site = cSite; + circle.x = x + bx; + circle.y = (circle.cy = y + by) + Math.sqrt(x * x + y * y); // y bottom + + arc.circle = circle; + + var before = null, + node = circles._; + + while (node) { + if (circle.y < node.y || (circle.y === node.y && circle.x <= node.x)) { + if (node.L) node = node.L; + else { before = node.P; break; } + } else { + if (node.R) node = node.R; + else { before = node; break; } + } + } + + circles.insert(before, circle); + if (!before) firstCircle = circle; +} + +function detachCircle(arc) { + var circle = arc.circle; + if (circle) { + if (!circle.P) firstCircle = circle.N; + circles.remove(circle); + circlePool.push(circle); + RedBlackNode(circle); + arc.circle = null; + } +} + +var beachPool = []; + +function Beach() { + RedBlackNode(this); + this.edge = + this.site = + this.circle = null; +} + +function createBeach(site) { + var beach = beachPool.pop() || new Beach; + beach.site = site; + return beach; +} + +function detachBeach(beach) { + detachCircle(beach); + beaches.remove(beach); + beachPool.push(beach); + RedBlackNode(beach); +} + +function removeBeach(beach) { + var circle = beach.circle, + x = circle.x, + y = circle.cy, + vertex = [x, y], + previous = beach.P, + next = beach.N, + disappearing = [beach]; + + detachBeach(beach); + + var lArc = previous; + while (lArc.circle + && Math.abs(x - lArc.circle.x) < epsilon$4 + && Math.abs(y - lArc.circle.cy) < epsilon$4) { + previous = lArc.P; + disappearing.unshift(lArc); + detachBeach(lArc); + lArc = previous; + } + + disappearing.unshift(lArc); + detachCircle(lArc); + + var rArc = next; + while (rArc.circle + && Math.abs(x - rArc.circle.x) < epsilon$4 + && Math.abs(y - rArc.circle.cy) < epsilon$4) { + next = rArc.N; + disappearing.push(rArc); + detachBeach(rArc); + rArc = next; + } + + disappearing.push(rArc); + detachCircle(rArc); + + var nArcs = disappearing.length, + iArc; + for (iArc = 1; iArc < nArcs; ++iArc) { + rArc = disappearing[iArc]; + lArc = disappearing[iArc - 1]; + setEdgeEnd(rArc.edge, lArc.site, rArc.site, vertex); + } + + lArc = disappearing[0]; + rArc = disappearing[nArcs - 1]; + rArc.edge = createEdge(lArc.site, rArc.site, null, vertex); + + attachCircle(lArc); + attachCircle(rArc); +} + +function addBeach(site) { + var x = site[0], + directrix = site[1], + lArc, + rArc, + dxl, + dxr, + node = beaches._; + + while (node) { + dxl = leftBreakPoint(node, directrix) - x; + if (dxl > epsilon$4) node = node.L; else { + dxr = x - rightBreakPoint(node, directrix); + if (dxr > epsilon$4) { + if (!node.R) { + lArc = node; + break; + } + node = node.R; + } else { + if (dxl > -epsilon$4) { + lArc = node.P; + rArc = node; + } else if (dxr > -epsilon$4) { + lArc = node; + rArc = node.N; + } else { + lArc = rArc = node; + } + break; + } + } + } + + createCell(site); + var newArc = createBeach(site); + beaches.insert(lArc, newArc); + + if (!lArc && !rArc) return; + + if (lArc === rArc) { + detachCircle(lArc); + rArc = createBeach(lArc.site); + beaches.insert(newArc, rArc); + newArc.edge = rArc.edge = createEdge(lArc.site, newArc.site); + attachCircle(lArc); + attachCircle(rArc); + return; + } + + if (!rArc) { // && lArc + newArc.edge = createEdge(lArc.site, newArc.site); + return; + } + + // else lArc !== rArc + detachCircle(lArc); + detachCircle(rArc); + + var lSite = lArc.site, + ax = lSite[0], + ay = lSite[1], + bx = site[0] - ax, + by = site[1] - ay, + rSite = rArc.site, + cx = rSite[0] - ax, + cy = rSite[1] - ay, + d = 2 * (bx * cy - by * cx), + hb = bx * bx + by * by, + hc = cx * cx + cy * cy, + vertex = [(cy * hb - by * hc) / d + ax, (bx * hc - cx * hb) / d + ay]; + + setEdgeEnd(rArc.edge, lSite, rSite, vertex); + newArc.edge = createEdge(lSite, site, null, vertex); + rArc.edge = createEdge(site, rSite, null, vertex); + attachCircle(lArc); + attachCircle(rArc); +} + +function leftBreakPoint(arc, directrix) { + var site = arc.site, + rfocx = site[0], + rfocy = site[1], + pby2 = rfocy - directrix; + + if (!pby2) return rfocx; + + var lArc = arc.P; + if (!lArc) return -Infinity; + + site = lArc.site; + var lfocx = site[0], + lfocy = site[1], + plby2 = lfocy - directrix; + + if (!plby2) return lfocx; + + var hl = lfocx - rfocx, + aby2 = 1 / pby2 - 1 / plby2, + b = hl / plby2; + + if (aby2) return (-b + Math.sqrt(b * b - 2 * aby2 * (hl * hl / (-2 * plby2) - lfocy + plby2 / 2 + rfocy - pby2 / 2))) / aby2 + rfocx; + + return (rfocx + lfocx) / 2; +} + +function rightBreakPoint(arc, directrix) { + var rArc = arc.N; + if (rArc) return leftBreakPoint(rArc, directrix); + var site = arc.site; + return site[1] === directrix ? site[0] : Infinity; +} + +var epsilon$4 = 1e-6; +var epsilon2$2 = 1e-12; +var beaches; +var cells; +var circles; +var edges; + +function triangleArea(a, b, c) { + return (a[0] - c[0]) * (b[1] - a[1]) - (a[0] - b[0]) * (c[1] - a[1]); +} + +function lexicographic(a, b) { + return b[1] - a[1] + || b[0] - a[0]; +} + +function Diagram(sites, extent) { + var site = sites.sort(lexicographic).pop(), + x, + y, + circle; + + edges = []; + cells = new Array(sites.length); + beaches = new RedBlackTree; + circles = new RedBlackTree; + + while (true) { + circle = firstCircle; + if (site && (!circle || site[1] < circle.y || (site[1] === circle.y && site[0] < circle.x))) { + if (site[0] !== x || site[1] !== y) { + addBeach(site); + x = site[0], y = site[1]; + } + site = sites.pop(); + } else if (circle) { + removeBeach(circle.arc); + } else { + break; + } + } + + sortCellHalfedges(); + + if (extent) { + var x0 = +extent[0][0], + y0 = +extent[0][1], + x1 = +extent[1][0], + y1 = +extent[1][1]; + clipEdges(x0, y0, x1, y1); + clipCells(x0, y0, x1, y1); + } + + this.edges = edges; + this.cells = cells; + + beaches = + circles = + edges = + cells = null; +} + +Diagram.prototype = { + constructor: Diagram, + + polygons: function() { + var edges = this.edges; + + return this.cells.map(function(cell) { + var polygon = cell.halfedges.map(function(i) { return cellHalfedgeStart(cell, edges[i]); }); + polygon.data = cell.site.data; + return polygon; + }); + }, + + triangles: function() { + var triangles = [], + edges = this.edges; + + this.cells.forEach(function(cell, i) { + if (!(m = (halfedges = cell.halfedges).length)) return; + var site = cell.site, + halfedges, + j = -1, + m, + s0, + e1 = edges[halfedges[m - 1]], + s1 = e1.left === site ? e1.right : e1.left; + + while (++j < m) { + s0 = s1; + e1 = edges[halfedges[j]]; + s1 = e1.left === site ? e1.right : e1.left; + if (s0 && s1 && i < s0.index && i < s1.index && triangleArea(site, s0, s1) < 0) { + triangles.push([site.data, s0.data, s1.data]); + } + } + }); + + return triangles; + }, + + links: function() { + return this.edges.filter(function(edge) { + return edge.right; + }).map(function(edge) { + return { + source: edge.left.data, + target: edge.right.data + }; + }); + }, + + find: function(x, y, radius) { + var that = this, i0, i1 = that._found || 0, n = that.cells.length, cell; + + // Use the previously-found cell, or start with an arbitrary one. + while (!(cell = that.cells[i1])) if (++i1 >= n) return null; + var dx = x - cell.site[0], dy = y - cell.site[1], d2 = dx * dx + dy * dy; + + // Traverse the half-edges to find a closer cell, if any. + do { + cell = that.cells[i0 = i1], i1 = null; + cell.halfedges.forEach(function(e) { + var edge = that.edges[e], v = edge.left; + if ((v === cell.site || !v) && !(v = edge.right)) return; + var vx = x - v[0], vy = y - v[1], v2 = vx * vx + vy * vy; + if (v2 < d2) d2 = v2, i1 = v.index; + }); + } while (i1 !== null); + + that._found = i0; + + return radius == null || d2 <= radius * radius ? cell.site : null; + } +}; + +function voronoi() { + var x = x$4, + y = y$4, + extent = null; + + function voronoi(data) { + return new Diagram(data.map(function(d, i) { + var s = [Math.round(x(d, i, data) / epsilon$4) * epsilon$4, Math.round(y(d, i, data) / epsilon$4) * epsilon$4]; + s.index = i; + s.data = d; + return s; + }), extent); + } + + voronoi.polygons = function(data) { + return voronoi(data).polygons(); + }; + + voronoi.links = function(data) { + return voronoi(data).links(); + }; + + voronoi.triangles = function(data) { + return voronoi(data).triangles(); + }; + + voronoi.x = function(_) { + return arguments.length ? (x = typeof _ === "function" ? _ : constant$c(+_), voronoi) : x; + }; + + voronoi.y = function(_) { + return arguments.length ? (y = typeof _ === "function" ? _ : constant$c(+_), voronoi) : y; + }; + + voronoi.extent = function(_) { + return arguments.length ? (extent = _ == null ? null : [[+_[0][0], +_[0][1]], [+_[1][0], +_[1][1]]], voronoi) : extent && [[extent[0][0], extent[0][1]], [extent[1][0], extent[1][1]]]; + }; + + voronoi.size = function(_) { + return arguments.length ? (extent = _ == null ? null : [[0, 0], [+_[0], +_[1]]], voronoi) : extent && [extent[1][0] - extent[0][0], extent[1][1] - extent[0][1]]; + }; + + return voronoi; +} + +function constant$d(x) { + return function() { + return x; + }; +} + +function ZoomEvent(target, type, transform) { + this.target = target; + this.type = type; + this.transform = transform; +} + +function Transform(k, x, y) { + this.k = k; + this.x = x; + this.y = y; +} + +Transform.prototype = { + constructor: Transform, + scale: function(k) { + return k === 1 ? this : new Transform(this.k * k, this.x, this.y); + }, + translate: function(x, y) { + return x === 0 & y === 0 ? this : new Transform(this.k, this.x + this.k * x, this.y + this.k * y); + }, + apply: function(point) { + return [point[0] * this.k + this.x, point[1] * this.k + this.y]; + }, + applyX: function(x) { + return x * this.k + this.x; + }, + applyY: function(y) { + return y * this.k + this.y; + }, + invert: function(location) { + return [(location[0] - this.x) / this.k, (location[1] - this.y) / this.k]; + }, + invertX: function(x) { + return (x - this.x) / this.k; + }, + invertY: function(y) { + return (y - this.y) / this.k; + }, + rescaleX: function(x) { + return x.copy().domain(x.range().map(this.invertX, this).map(x.invert, x)); + }, + rescaleY: function(y) { + return y.copy().domain(y.range().map(this.invertY, this).map(y.invert, y)); + }, + toString: function() { + return "translate(" + this.x + "," + this.y + ") scale(" + this.k + ")"; + } +}; + +var identity$9 = new Transform(1, 0, 0); + +transform$1.prototype = Transform.prototype; + +function transform$1(node) { + while (!node.__zoom) if (!(node = node.parentNode)) return identity$9; + return node.__zoom; +} + +function nopropagation$2() { + exports.event.stopImmediatePropagation(); +} + +function noevent$2() { + exports.event.preventDefault(); + exports.event.stopImmediatePropagation(); +} + +// Ignore right-click, since that should open the context menu. +function defaultFilter$2() { + return !exports.event.ctrlKey && !exports.event.button; +} + +function defaultExtent$1() { + var e = this; + if (e instanceof SVGElement) { + e = e.ownerSVGElement || e; + if (e.hasAttribute("viewBox")) { + e = e.viewBox.baseVal; + return [[e.x, e.y], [e.x + e.width, e.y + e.height]]; + } + return [[0, 0], [e.width.baseVal.value, e.height.baseVal.value]]; + } + return [[0, 0], [e.clientWidth, e.clientHeight]]; +} + +function defaultTransform() { + return this.__zoom || identity$9; +} + +function defaultWheelDelta() { + return -exports.event.deltaY * (exports.event.deltaMode === 1 ? 0.05 : exports.event.deltaMode ? 1 : 0.002); +} + +function defaultTouchable$2() { + return navigator.maxTouchPoints || ("ontouchstart" in this); +} + +function defaultConstrain(transform, extent, translateExtent) { + var dx0 = transform.invertX(extent[0][0]) - translateExtent[0][0], + dx1 = transform.invertX(extent[1][0]) - translateExtent[1][0], + dy0 = transform.invertY(extent[0][1]) - translateExtent[0][1], + dy1 = transform.invertY(extent[1][1]) - translateExtent[1][1]; + return transform.translate( + dx1 > dx0 ? (dx0 + dx1) / 2 : Math.min(0, dx0) || Math.max(0, dx1), + dy1 > dy0 ? (dy0 + dy1) / 2 : Math.min(0, dy0) || Math.max(0, dy1) + ); +} + +function zoom() { + var filter = defaultFilter$2, + extent = defaultExtent$1, + constrain = defaultConstrain, + wheelDelta = defaultWheelDelta, + touchable = defaultTouchable$2, + scaleExtent = [0, Infinity], + translateExtent = [[-Infinity, -Infinity], [Infinity, Infinity]], + duration = 250, + interpolate = interpolateZoom, + listeners = dispatch("start", "zoom", "end"), + touchstarting, + touchending, + touchDelay = 500, + wheelDelay = 150, + clickDistance2 = 0; + + function zoom(selection) { + selection + .property("__zoom", defaultTransform) + .on("wheel.zoom", wheeled) + .on("mousedown.zoom", mousedowned) + .on("dblclick.zoom", dblclicked) + .filter(touchable) + .on("touchstart.zoom", touchstarted) + .on("touchmove.zoom", touchmoved) + .on("touchend.zoom touchcancel.zoom", touchended) + .style("touch-action", "none") + .style("-webkit-tap-highlight-color", "rgba(0,0,0,0)"); + } + + zoom.transform = function(collection, transform, point) { + var selection = collection.selection ? collection.selection() : collection; + selection.property("__zoom", defaultTransform); + if (collection !== selection) { + schedule(collection, transform, point); + } else { + selection.interrupt().each(function() { + gesture(this, arguments) + .start() + .zoom(null, typeof transform === "function" ? transform.apply(this, arguments) : transform) + .end(); + }); + } + }; + + zoom.scaleBy = function(selection, k, p) { + zoom.scaleTo(selection, function() { + var k0 = this.__zoom.k, + k1 = typeof k === "function" ? k.apply(this, arguments) : k; + return k0 * k1; + }, p); + }; + + zoom.scaleTo = function(selection, k, p) { + zoom.transform(selection, function() { + var e = extent.apply(this, arguments), + t0 = this.__zoom, + p0 = p == null ? centroid(e) : typeof p === "function" ? p.apply(this, arguments) : p, + p1 = t0.invert(p0), + k1 = typeof k === "function" ? k.apply(this, arguments) : k; + return constrain(translate(scale(t0, k1), p0, p1), e, translateExtent); + }, p); + }; + + zoom.translateBy = function(selection, x, y) { + zoom.transform(selection, function() { + return constrain(this.__zoom.translate( + typeof x === "function" ? x.apply(this, arguments) : x, + typeof y === "function" ? y.apply(this, arguments) : y + ), extent.apply(this, arguments), translateExtent); + }); + }; + + zoom.translateTo = function(selection, x, y, p) { + zoom.transform(selection, function() { + var e = extent.apply(this, arguments), + t = this.__zoom, + p0 = p == null ? centroid(e) : typeof p === "function" ? p.apply(this, arguments) : p; + return constrain(identity$9.translate(p0[0], p0[1]).scale(t.k).translate( + typeof x === "function" ? -x.apply(this, arguments) : -x, + typeof y === "function" ? -y.apply(this, arguments) : -y + ), e, translateExtent); + }, p); + }; + + function scale(transform, k) { + k = Math.max(scaleExtent[0], Math.min(scaleExtent[1], k)); + return k === transform.k ? transform : new Transform(k, transform.x, transform.y); + } + + function translate(transform, p0, p1) { + var x = p0[0] - p1[0] * transform.k, y = p0[1] - p1[1] * transform.k; + return x === transform.x && y === transform.y ? transform : new Transform(transform.k, x, y); + } + + function centroid(extent) { + return [(+extent[0][0] + +extent[1][0]) / 2, (+extent[0][1] + +extent[1][1]) / 2]; + } + + function schedule(transition, transform, point) { + transition + .on("start.zoom", function() { gesture(this, arguments).start(); }) + .on("interrupt.zoom end.zoom", function() { gesture(this, arguments).end(); }) + .tween("zoom", function() { + var that = this, + args = arguments, + g = gesture(that, args), + e = extent.apply(that, args), + p = point == null ? centroid(e) : typeof point === "function" ? point.apply(that, args) : point, + w = Math.max(e[1][0] - e[0][0], e[1][1] - e[0][1]), + a = that.__zoom, + b = typeof transform === "function" ? transform.apply(that, args) : transform, + i = interpolate(a.invert(p).concat(w / a.k), b.invert(p).concat(w / b.k)); + return function(t) { + if (t === 1) t = b; // Avoid rounding error on end. + else { var l = i(t), k = w / l[2]; t = new Transform(k, p[0] - l[0] * k, p[1] - l[1] * k); } + g.zoom(null, t); + }; + }); + } + + function gesture(that, args, clean) { + return (!clean && that.__zooming) || new Gesture(that, args); + } + + function Gesture(that, args) { + this.that = that; + this.args = args; + this.active = 0; + this.extent = extent.apply(that, args); + this.taps = 0; + } + + Gesture.prototype = { + start: function() { + if (++this.active === 1) { + this.that.__zooming = this; + this.emit("start"); + } + return this; + }, + zoom: function(key, transform) { + if (this.mouse && key !== "mouse") this.mouse[1] = transform.invert(this.mouse[0]); + if (this.touch0 && key !== "touch") this.touch0[1] = transform.invert(this.touch0[0]); + if (this.touch1 && key !== "touch") this.touch1[1] = transform.invert(this.touch1[0]); + this.that.__zoom = transform; + this.emit("zoom"); + return this; + }, + end: function() { + if (--this.active === 0) { + delete this.that.__zooming; + this.emit("end"); + } + return this; + }, + emit: function(type) { + customEvent(new ZoomEvent(zoom, type, this.that.__zoom), listeners.apply, listeners, [type, this.that, this.args]); + } + }; + + function wheeled() { + if (!filter.apply(this, arguments)) return; + var g = gesture(this, arguments), + t = this.__zoom, + k = Math.max(scaleExtent[0], Math.min(scaleExtent[1], t.k * Math.pow(2, wheelDelta.apply(this, arguments)))), + p = mouse(this); + + // If the mouse is in the same location as before, reuse it. + // If there were recent wheel events, reset the wheel idle timeout. + if (g.wheel) { + if (g.mouse[0][0] !== p[0] || g.mouse[0][1] !== p[1]) { + g.mouse[1] = t.invert(g.mouse[0] = p); + } + clearTimeout(g.wheel); + } + + // If this wheel event won’t trigger a transform change, ignore it. + else if (t.k === k) return; + + // Otherwise, capture the mouse point and location at the start. + else { + g.mouse = [p, t.invert(p)]; + interrupt(this); + g.start(); + } + + noevent$2(); + g.wheel = setTimeout(wheelidled, wheelDelay); + g.zoom("mouse", constrain(translate(scale(t, k), g.mouse[0], g.mouse[1]), g.extent, translateExtent)); + + function wheelidled() { + g.wheel = null; + g.end(); + } + } + + function mousedowned() { + if (touchending || !filter.apply(this, arguments)) return; + var g = gesture(this, arguments, true), + v = select(exports.event.view).on("mousemove.zoom", mousemoved, true).on("mouseup.zoom", mouseupped, true), + p = mouse(this), + x0 = exports.event.clientX, + y0 = exports.event.clientY; + + dragDisable(exports.event.view); + nopropagation$2(); + g.mouse = [p, this.__zoom.invert(p)]; + interrupt(this); + g.start(); + + function mousemoved() { + noevent$2(); + if (!g.moved) { + var dx = exports.event.clientX - x0, dy = exports.event.clientY - y0; + g.moved = dx * dx + dy * dy > clickDistance2; + } + g.zoom("mouse", constrain(translate(g.that.__zoom, g.mouse[0] = mouse(g.that), g.mouse[1]), g.extent, translateExtent)); + } + + function mouseupped() { + v.on("mousemove.zoom mouseup.zoom", null); + yesdrag(exports.event.view, g.moved); + noevent$2(); + g.end(); + } + } + + function dblclicked() { + if (!filter.apply(this, arguments)) return; + var t0 = this.__zoom, + p0 = mouse(this), + p1 = t0.invert(p0), + k1 = t0.k * (exports.event.shiftKey ? 0.5 : 2), + t1 = constrain(translate(scale(t0, k1), p0, p1), extent.apply(this, arguments), translateExtent); + + noevent$2(); + if (duration > 0) select(this).transition().duration(duration).call(schedule, t1, p0); + else select(this).call(zoom.transform, t1); + } + + function touchstarted() { + if (!filter.apply(this, arguments)) return; + var touches = exports.event.touches, + n = touches.length, + g = gesture(this, arguments, exports.event.changedTouches.length === n), + started, i, t, p; + + nopropagation$2(); + for (i = 0; i < n; ++i) { + t = touches[i], p = touch(this, touches, t.identifier); + p = [p, this.__zoom.invert(p), t.identifier]; + if (!g.touch0) g.touch0 = p, started = true, g.taps = 1 + !!touchstarting; + else if (!g.touch1 && g.touch0[2] !== p[2]) g.touch1 = p, g.taps = 0; + } + + if (touchstarting) touchstarting = clearTimeout(touchstarting); + + if (started) { + if (g.taps < 2) touchstarting = setTimeout(function() { touchstarting = null; }, touchDelay); + interrupt(this); + g.start(); + } + } + + function touchmoved() { + if (!this.__zooming) return; + var g = gesture(this, arguments), + touches = exports.event.changedTouches, + n = touches.length, i, t, p, l; + + noevent$2(); + if (touchstarting) touchstarting = clearTimeout(touchstarting); + g.taps = 0; + for (i = 0; i < n; ++i) { + t = touches[i], p = touch(this, touches, t.identifier); + if (g.touch0 && g.touch0[2] === t.identifier) g.touch0[0] = p; + else if (g.touch1 && g.touch1[2] === t.identifier) g.touch1[0] = p; + } + t = g.that.__zoom; + if (g.touch1) { + var p0 = g.touch0[0], l0 = g.touch0[1], + p1 = g.touch1[0], l1 = g.touch1[1], + dp = (dp = p1[0] - p0[0]) * dp + (dp = p1[1] - p0[1]) * dp, + dl = (dl = l1[0] - l0[0]) * dl + (dl = l1[1] - l0[1]) * dl; + t = scale(t, Math.sqrt(dp / dl)); + p = [(p0[0] + p1[0]) / 2, (p0[1] + p1[1]) / 2]; + l = [(l0[0] + l1[0]) / 2, (l0[1] + l1[1]) / 2]; + } + else if (g.touch0) p = g.touch0[0], l = g.touch0[1]; + else return; + g.zoom("touch", constrain(translate(t, p, l), g.extent, translateExtent)); + } + + function touchended() { + if (!this.__zooming) return; + var g = gesture(this, arguments), + touches = exports.event.changedTouches, + n = touches.length, i, t; + + nopropagation$2(); + if (touchending) clearTimeout(touchending); + touchending = setTimeout(function() { touchending = null; }, touchDelay); + for (i = 0; i < n; ++i) { + t = touches[i]; + if (g.touch0 && g.touch0[2] === t.identifier) delete g.touch0; + else if (g.touch1 && g.touch1[2] === t.identifier) delete g.touch1; + } + if (g.touch1 && !g.touch0) g.touch0 = g.touch1, delete g.touch1; + if (g.touch0) g.touch0[1] = this.__zoom.invert(g.touch0[0]); + else { + g.end(); + // If this was a dbltap, reroute to the (optional) dblclick.zoom handler. + if (g.taps === 2) { + var p = select(this).on("dblclick.zoom"); + if (p) p.apply(this, arguments); + } + } + } + + zoom.wheelDelta = function(_) { + return arguments.length ? (wheelDelta = typeof _ === "function" ? _ : constant$d(+_), zoom) : wheelDelta; + }; + + zoom.filter = function(_) { + return arguments.length ? (filter = typeof _ === "function" ? _ : constant$d(!!_), zoom) : filter; + }; + + zoom.touchable = function(_) { + return arguments.length ? (touchable = typeof _ === "function" ? _ : constant$d(!!_), zoom) : touchable; + }; + + zoom.extent = function(_) { + return arguments.length ? (extent = typeof _ === "function" ? _ : constant$d([[+_[0][0], +_[0][1]], [+_[1][0], +_[1][1]]]), zoom) : extent; + }; + + zoom.scaleExtent = function(_) { + return arguments.length ? (scaleExtent[0] = +_[0], scaleExtent[1] = +_[1], zoom) : [scaleExtent[0], scaleExtent[1]]; + }; + + zoom.translateExtent = function(_) { + return arguments.length ? (translateExtent[0][0] = +_[0][0], translateExtent[1][0] = +_[1][0], translateExtent[0][1] = +_[0][1], translateExtent[1][1] = +_[1][1], zoom) : [[translateExtent[0][0], translateExtent[0][1]], [translateExtent[1][0], translateExtent[1][1]]]; + }; + + zoom.constrain = function(_) { + return arguments.length ? (constrain = _, zoom) : constrain; + }; + + zoom.duration = function(_) { + return arguments.length ? (duration = +_, zoom) : duration; + }; + + zoom.interpolate = function(_) { + return arguments.length ? (interpolate = _, zoom) : interpolate; + }; + + zoom.on = function() { + var value = listeners.on.apply(listeners, arguments); + return value === listeners ? zoom : value; + }; + + zoom.clickDistance = function(_) { + return arguments.length ? (clickDistance2 = (_ = +_) * _, zoom) : Math.sqrt(clickDistance2); + }; + + return zoom; +} + +exports.FormatSpecifier = FormatSpecifier; +exports.active = active; +exports.arc = arc; +exports.area = area$3; +exports.areaRadial = areaRadial; +exports.ascending = ascending; +exports.autoType = autoType; +exports.axisBottom = axisBottom; +exports.axisLeft = axisLeft; +exports.axisRight = axisRight; +exports.axisTop = axisTop; +exports.bisect = bisectRight; +exports.bisectLeft = bisectLeft; +exports.bisectRight = bisectRight; +exports.bisector = bisector; +exports.blob = blob; +exports.brush = brush; +exports.brushSelection = brushSelection; +exports.brushX = brushX; +exports.brushY = brushY; +exports.buffer = buffer; +exports.chord = chord; +exports.clientPoint = point; +exports.cluster = cluster; +exports.color = color; +exports.contourDensity = density; +exports.contours = contours; +exports.create = create; +exports.creator = creator; +exports.cross = cross; +exports.csv = csv$1; +exports.csvFormat = csvFormat; +exports.csvFormatBody = csvFormatBody; +exports.csvFormatRow = csvFormatRow; +exports.csvFormatRows = csvFormatRows; +exports.csvFormatValue = csvFormatValue; +exports.csvParse = csvParse; +exports.csvParseRows = csvParseRows; +exports.cubehelix = cubehelix; +exports.curveBasis = basis$2; +exports.curveBasisClosed = basisClosed$1; +exports.curveBasisOpen = basisOpen; +exports.curveBundle = bundle; +exports.curveCardinal = cardinal; +exports.curveCardinalClosed = cardinalClosed; +exports.curveCardinalOpen = cardinalOpen; +exports.curveCatmullRom = catmullRom; +exports.curveCatmullRomClosed = catmullRomClosed; +exports.curveCatmullRomOpen = catmullRomOpen; +exports.curveLinear = curveLinear; +exports.curveLinearClosed = linearClosed; +exports.curveMonotoneX = monotoneX; +exports.curveMonotoneY = monotoneY; +exports.curveNatural = natural; +exports.curveStep = step; +exports.curveStepAfter = stepAfter; +exports.curveStepBefore = stepBefore; +exports.customEvent = customEvent; +exports.descending = descending; +exports.deviation = deviation; +exports.dispatch = dispatch; +exports.drag = drag; +exports.dragDisable = dragDisable; +exports.dragEnable = yesdrag; +exports.dsv = dsv; +exports.dsvFormat = dsvFormat; +exports.easeBack = backInOut; +exports.easeBackIn = backIn; +exports.easeBackInOut = backInOut; +exports.easeBackOut = backOut; +exports.easeBounce = bounceOut; +exports.easeBounceIn = bounceIn; +exports.easeBounceInOut = bounceInOut; +exports.easeBounceOut = bounceOut; +exports.easeCircle = circleInOut; +exports.easeCircleIn = circleIn; +exports.easeCircleInOut = circleInOut; +exports.easeCircleOut = circleOut; +exports.easeCubic = cubicInOut; +exports.easeCubicIn = cubicIn; +exports.easeCubicInOut = cubicInOut; +exports.easeCubicOut = cubicOut; +exports.easeElastic = elasticOut; +exports.easeElasticIn = elasticIn; +exports.easeElasticInOut = elasticInOut; +exports.easeElasticOut = elasticOut; +exports.easeExp = expInOut; +exports.easeExpIn = expIn; +exports.easeExpInOut = expInOut; +exports.easeExpOut = expOut; +exports.easeLinear = linear$1; +exports.easePoly = polyInOut; +exports.easePolyIn = polyIn; +exports.easePolyInOut = polyInOut; +exports.easePolyOut = polyOut; +exports.easeQuad = quadInOut; +exports.easeQuadIn = quadIn; +exports.easeQuadInOut = quadInOut; +exports.easeQuadOut = quadOut; +exports.easeSin = sinInOut; +exports.easeSinIn = sinIn; +exports.easeSinInOut = sinInOut; +exports.easeSinOut = sinOut; +exports.entries = entries; +exports.extent = extent; +exports.forceCenter = center$1; +exports.forceCollide = collide; +exports.forceLink = link; +exports.forceManyBody = manyBody; +exports.forceRadial = radial; +exports.forceSimulation = simulation; +exports.forceX = x$2; +exports.forceY = y$2; +exports.formatDefaultLocale = defaultLocale; +exports.formatLocale = formatLocale; +exports.formatSpecifier = formatSpecifier; +exports.geoAlbers = albers; +exports.geoAlbersUsa = albersUsa; +exports.geoArea = area$1; +exports.geoAzimuthalEqualArea = azimuthalEqualArea; +exports.geoAzimuthalEqualAreaRaw = azimuthalEqualAreaRaw; +exports.geoAzimuthalEquidistant = azimuthalEquidistant; +exports.geoAzimuthalEquidistantRaw = azimuthalEquidistantRaw; +exports.geoBounds = bounds; +exports.geoCentroid = centroid; +exports.geoCircle = circle; +exports.geoClipAntimeridian = clipAntimeridian; +exports.geoClipCircle = clipCircle; +exports.geoClipExtent = extent$1; +exports.geoClipRectangle = clipRectangle; +exports.geoConicConformal = conicConformal; +exports.geoConicConformalRaw = conicConformalRaw; +exports.geoConicEqualArea = conicEqualArea; +exports.geoConicEqualAreaRaw = conicEqualAreaRaw; +exports.geoConicEquidistant = conicEquidistant; +exports.geoConicEquidistantRaw = conicEquidistantRaw; +exports.geoContains = contains$1; +exports.geoDistance = distance; +exports.geoEqualEarth = equalEarth; +exports.geoEqualEarthRaw = equalEarthRaw; +exports.geoEquirectangular = equirectangular; +exports.geoEquirectangularRaw = equirectangularRaw; +exports.geoGnomonic = gnomonic; +exports.geoGnomonicRaw = gnomonicRaw; +exports.geoGraticule = graticule; +exports.geoGraticule10 = graticule10; +exports.geoIdentity = identity$5; +exports.geoInterpolate = interpolate$1; +exports.geoLength = length$1; +exports.geoMercator = mercator; +exports.geoMercatorRaw = mercatorRaw; +exports.geoNaturalEarth1 = naturalEarth1; +exports.geoNaturalEarth1Raw = naturalEarth1Raw; +exports.geoOrthographic = orthographic; +exports.geoOrthographicRaw = orthographicRaw; +exports.geoPath = index$1; +exports.geoProjection = projection; +exports.geoProjectionMutator = projectionMutator; +exports.geoRotation = rotation; +exports.geoStereographic = stereographic; +exports.geoStereographicRaw = stereographicRaw; +exports.geoStream = geoStream; +exports.geoTransform = transform; +exports.geoTransverseMercator = transverseMercator; +exports.geoTransverseMercatorRaw = transverseMercatorRaw; +exports.gray = gray; +exports.hcl = hcl; +exports.hierarchy = hierarchy; +exports.histogram = histogram; +exports.hsl = hsl; +exports.html = html; +exports.image = image; +exports.interpolate = interpolateValue; +exports.interpolateArray = array$1; +exports.interpolateBasis = basis$1; +exports.interpolateBasisClosed = basisClosed; +exports.interpolateBlues = Blues; +exports.interpolateBrBG = BrBG; +exports.interpolateBuGn = BuGn; +exports.interpolateBuPu = BuPu; +exports.interpolateCividis = cividis; +exports.interpolateCool = cool; +exports.interpolateCubehelix = cubehelix$2; +exports.interpolateCubehelixDefault = cubehelix$3; +exports.interpolateCubehelixLong = cubehelixLong; +exports.interpolateDate = date; +exports.interpolateDiscrete = discrete; +exports.interpolateGnBu = GnBu; +exports.interpolateGreens = Greens; +exports.interpolateGreys = Greys; +exports.interpolateHcl = hcl$2; +exports.interpolateHclLong = hclLong; +exports.interpolateHsl = hsl$2; +exports.interpolateHslLong = hslLong; +exports.interpolateHue = hue$1; +exports.interpolateInferno = inferno; +exports.interpolateLab = lab$1; +exports.interpolateMagma = magma; +exports.interpolateNumber = interpolateNumber; +exports.interpolateNumberArray = numberArray; +exports.interpolateObject = object; +exports.interpolateOrRd = OrRd; +exports.interpolateOranges = Oranges; +exports.interpolatePRGn = PRGn; +exports.interpolatePiYG = PiYG; +exports.interpolatePlasma = plasma; +exports.interpolatePuBu = PuBu; +exports.interpolatePuBuGn = PuBuGn; +exports.interpolatePuOr = PuOr; +exports.interpolatePuRd = PuRd; +exports.interpolatePurples = Purples; +exports.interpolateRainbow = rainbow; +exports.interpolateRdBu = RdBu; +exports.interpolateRdGy = RdGy; +exports.interpolateRdPu = RdPu; +exports.interpolateRdYlBu = RdYlBu; +exports.interpolateRdYlGn = RdYlGn; +exports.interpolateReds = Reds; +exports.interpolateRgb = interpolateRgb; +exports.interpolateRgbBasis = rgbBasis; +exports.interpolateRgbBasisClosed = rgbBasisClosed; +exports.interpolateRound = interpolateRound; +exports.interpolateSinebow = sinebow; +exports.interpolateSpectral = Spectral; +exports.interpolateString = interpolateString; +exports.interpolateTransformCss = interpolateTransformCss; +exports.interpolateTransformSvg = interpolateTransformSvg; +exports.interpolateTurbo = turbo; +exports.interpolateViridis = viridis; +exports.interpolateWarm = warm; +exports.interpolateYlGn = YlGn; +exports.interpolateYlGnBu = YlGnBu; +exports.interpolateYlOrBr = YlOrBr; +exports.interpolateYlOrRd = YlOrRd; +exports.interpolateZoom = interpolateZoom; +exports.interrupt = interrupt; +exports.interval = interval$1; +exports.isoFormat = formatIso; +exports.isoParse = parseIso; +exports.json = json; +exports.keys = keys; +exports.lab = lab; +exports.lch = lch; +exports.line = line; +exports.lineRadial = lineRadial$1; +exports.linkHorizontal = linkHorizontal; +exports.linkRadial = linkRadial; +exports.linkVertical = linkVertical; +exports.local = local; +exports.map = map$1; +exports.matcher = matcher; +exports.max = max; +exports.mean = mean; +exports.median = median; +exports.merge = merge; +exports.min = min; +exports.mouse = mouse; +exports.namespace = namespace; +exports.namespaces = namespaces; +exports.nest = nest; +exports.now = now; +exports.pack = index$2; +exports.packEnclose = enclose; +exports.packSiblings = siblings; +exports.pairs = pairs; +exports.partition = partition; +exports.path = path; +exports.permute = permute; +exports.pie = pie; +exports.piecewise = piecewise; +exports.pointRadial = pointRadial; +exports.polygonArea = area$2; +exports.polygonCentroid = centroid$1; +exports.polygonContains = contains$2; +exports.polygonHull = hull; +exports.polygonLength = length$2; +exports.precisionFixed = precisionFixed; +exports.precisionPrefix = precisionPrefix; +exports.precisionRound = precisionRound; +exports.quadtree = quadtree; +exports.quantile = threshold; +exports.quantize = quantize; +exports.radialArea = areaRadial; +exports.radialLine = lineRadial$1; +exports.randomBates = bates; +exports.randomExponential = exponential$1; +exports.randomIrwinHall = irwinHall; +exports.randomLogNormal = logNormal; +exports.randomNormal = normal; +exports.randomUniform = uniform; +exports.range = sequence; +exports.rgb = rgb; +exports.ribbon = ribbon; +exports.scaleBand = band; +exports.scaleDiverging = diverging; +exports.scaleDivergingLog = divergingLog; +exports.scaleDivergingPow = divergingPow; +exports.scaleDivergingSqrt = divergingSqrt; +exports.scaleDivergingSymlog = divergingSymlog; +exports.scaleIdentity = identity$7; +exports.scaleImplicit = implicit; +exports.scaleLinear = linear$2; +exports.scaleLog = log$1; +exports.scaleOrdinal = ordinal; +exports.scalePoint = point$1; +exports.scalePow = pow$1; +exports.scaleQuantile = quantile; +exports.scaleQuantize = quantize$1; +exports.scaleSequential = sequential; +exports.scaleSequentialLog = sequentialLog; +exports.scaleSequentialPow = sequentialPow; +exports.scaleSequentialQuantile = sequentialQuantile; +exports.scaleSequentialSqrt = sequentialSqrt; +exports.scaleSequentialSymlog = sequentialSymlog; +exports.scaleSqrt = sqrt$1; +exports.scaleSymlog = symlog; +exports.scaleThreshold = threshold$1; +exports.scaleTime = time; +exports.scaleUtc = utcTime; +exports.scan = scan; +exports.schemeAccent = Accent; +exports.schemeBlues = scheme$l; +exports.schemeBrBG = scheme; +exports.schemeBuGn = scheme$9; +exports.schemeBuPu = scheme$a; +exports.schemeCategory10 = category10; +exports.schemeDark2 = Dark2; +exports.schemeGnBu = scheme$b; +exports.schemeGreens = scheme$m; +exports.schemeGreys = scheme$n; +exports.schemeOrRd = scheme$c; +exports.schemeOranges = scheme$q; +exports.schemePRGn = scheme$1; +exports.schemePaired = Paired; +exports.schemePastel1 = Pastel1; +exports.schemePastel2 = Pastel2; +exports.schemePiYG = scheme$2; +exports.schemePuBu = scheme$e; +exports.schemePuBuGn = scheme$d; +exports.schemePuOr = scheme$3; +exports.schemePuRd = scheme$f; +exports.schemePurples = scheme$o; +exports.schemeRdBu = scheme$4; +exports.schemeRdGy = scheme$5; +exports.schemeRdPu = scheme$g; +exports.schemeRdYlBu = scheme$6; +exports.schemeRdYlGn = scheme$7; +exports.schemeReds = scheme$p; +exports.schemeSet1 = Set1; +exports.schemeSet2 = Set2; +exports.schemeSet3 = Set3; +exports.schemeSpectral = scheme$8; +exports.schemeTableau10 = Tableau10; +exports.schemeYlGn = scheme$i; +exports.schemeYlGnBu = scheme$h; +exports.schemeYlOrBr = scheme$j; +exports.schemeYlOrRd = scheme$k; +exports.select = select; +exports.selectAll = selectAll; +exports.selection = selection; +exports.selector = selector; +exports.selectorAll = selectorAll; +exports.set = set$2; +exports.shuffle = shuffle; +exports.stack = stack; +exports.stackOffsetDiverging = diverging$1; +exports.stackOffsetExpand = expand; +exports.stackOffsetNone = none$1; +exports.stackOffsetSilhouette = silhouette; +exports.stackOffsetWiggle = wiggle; +exports.stackOrderAppearance = appearance; +exports.stackOrderAscending = ascending$3; +exports.stackOrderDescending = descending$2; +exports.stackOrderInsideOut = insideOut; +exports.stackOrderNone = none$2; +exports.stackOrderReverse = reverse; +exports.stratify = stratify; +exports.style = styleValue; +exports.sum = sum; +exports.svg = svg; +exports.symbol = symbol; +exports.symbolCircle = circle$2; +exports.symbolCross = cross$2; +exports.symbolDiamond = diamond; +exports.symbolSquare = square; +exports.symbolStar = star; +exports.symbolTriangle = triangle; +exports.symbolWye = wye; +exports.symbols = symbols; +exports.text = text; +exports.thresholdFreedmanDiaconis = freedmanDiaconis; +exports.thresholdScott = scott; +exports.thresholdSturges = thresholdSturges; +exports.tickFormat = tickFormat; +exports.tickIncrement = tickIncrement; +exports.tickStep = tickStep; +exports.ticks = ticks; +exports.timeDay = day; +exports.timeDays = days; +exports.timeFormatDefaultLocale = defaultLocale$1; +exports.timeFormatLocale = formatLocale$1; +exports.timeFriday = friday; +exports.timeFridays = fridays; +exports.timeHour = hour; +exports.timeHours = hours; +exports.timeInterval = newInterval; +exports.timeMillisecond = millisecond; +exports.timeMilliseconds = milliseconds; +exports.timeMinute = minute; +exports.timeMinutes = minutes; +exports.timeMonday = monday; +exports.timeMondays = mondays; +exports.timeMonth = month; +exports.timeMonths = months; +exports.timeSaturday = saturday; +exports.timeSaturdays = saturdays; +exports.timeSecond = second; +exports.timeSeconds = seconds; +exports.timeSunday = sunday; +exports.timeSundays = sundays; +exports.timeThursday = thursday; +exports.timeThursdays = thursdays; +exports.timeTuesday = tuesday; +exports.timeTuesdays = tuesdays; +exports.timeWednesday = wednesday; +exports.timeWednesdays = wednesdays; +exports.timeWeek = sunday; +exports.timeWeeks = sundays; +exports.timeYear = year; +exports.timeYears = years; +exports.timeout = timeout$1; +exports.timer = timer; +exports.timerFlush = timerFlush; +exports.touch = touch; +exports.touches = touches; +exports.transition = transition; +exports.transpose = transpose; +exports.tree = tree; +exports.treemap = index$3; +exports.treemapBinary = binary; +exports.treemapDice = treemapDice; +exports.treemapResquarify = resquarify; +exports.treemapSlice = treemapSlice; +exports.treemapSliceDice = sliceDice; +exports.treemapSquarify = squarify; +exports.tsv = tsv$1; +exports.tsvFormat = tsvFormat; +exports.tsvFormatBody = tsvFormatBody; +exports.tsvFormatRow = tsvFormatRow; +exports.tsvFormatRows = tsvFormatRows; +exports.tsvFormatValue = tsvFormatValue; +exports.tsvParse = tsvParse; +exports.tsvParseRows = tsvParseRows; +exports.utcDay = utcDay; +exports.utcDays = utcDays; +exports.utcFriday = utcFriday; +exports.utcFridays = utcFridays; +exports.utcHour = utcHour; +exports.utcHours = utcHours; +exports.utcMillisecond = millisecond; +exports.utcMilliseconds = milliseconds; +exports.utcMinute = utcMinute; +exports.utcMinutes = utcMinutes; +exports.utcMonday = utcMonday; +exports.utcMondays = utcMondays; +exports.utcMonth = utcMonth; +exports.utcMonths = utcMonths; +exports.utcSaturday = utcSaturday; +exports.utcSaturdays = utcSaturdays; +exports.utcSecond = second; +exports.utcSeconds = seconds; +exports.utcSunday = utcSunday; +exports.utcSundays = utcSundays; +exports.utcThursday = utcThursday; +exports.utcThursdays = utcThursdays; +exports.utcTuesday = utcTuesday; +exports.utcTuesdays = utcTuesdays; +exports.utcWednesday = utcWednesday; +exports.utcWednesdays = utcWednesdays; +exports.utcWeek = utcSunday; +exports.utcWeeks = utcSundays; +exports.utcYear = utcYear; +exports.utcYears = utcYears; +exports.values = values; +exports.variance = variance; +exports.version = version; +exports.voronoi = voronoi; +exports.window = defaultView; +exports.xml = xml; +exports.zip = zip; +exports.zoom = zoom; +exports.zoomIdentity = identity$9; +exports.zoomTransform = transform$1; + +Object.defineProperty(exports, '__esModule', { value: true }); + +})); diff --git a/alluvial/index.css b/alluvial/index.css new file mode 100644 index 0000000..f195eee --- /dev/null +++ b/alluvial/index.css @@ -0,0 +1,176 @@ + +/* First Line */ +.titleContainer{ + float: left; + width: 90%; + font-size: 40px; + text-align: center; + vertical-align: middle; + margin: auto; +} + +#hideButton{ + float: left; + width: 10%; +} + +.selector{ + margin-top: 10%; + margin-bottom: 10%; +} + +/* Second Line */ + +#selectSankeyOptions{ + float: left; + margin-left: auto; + margin-right: auto; + height: 100%; + width: 100%; +} + +/* Third Line */ + +#selectStationOptions{ + margin-left: 1%; + margin-right: 1%; + margin-top: 2.5%; + margin-bottom: 2.5%; + width: 7%; + float: left; +} + +#selectModeButton{ + margin-top: 50%; +} + +.sankeyRange{ + width: 550px; +} + +.sankeyContainer{ + z-index: 0; + align-items: center; + border:1px solid black; + height: 500px; + width: 85%; + overflow: hidden; + float: left; + margin-top: 15px; + margin-bottom: 15px; + margin-left: 15px; + margin-right: 15px; +} + +.stationDetails{ + /*display: none;*/ + float: left; + width: 98%; + margin-left: 1%; + margin-right: 1%; + overflow: auto; +} + +.lineContainer{ + display: inline-block; + vertical-align:top; + font-size: 11px; + line-height: 0.2cm; + margin: 1%; +} + +.lineTitleContainer{ + font-weight: bold; +} + + +.svg-background{ + z-index: 1; + background-color: white; + height: 90%; + margin: 0 auto; + display: block; +} + +.nodes{ + stroke-width: 5; +} + + +.tooltip{ + z-index: 2; + background-color: rgb(245, 245, 245); + border: solid; + border-width: 2px; + border-radius: 5px; + padding: 5px; +} + +/* Fourth Line */ +.routemapTitle{ + width: 45%; + float: left; + margin: 2.5%; + text-align: center; + vertical-align: middle; +} + + +/* Fifth Line */ +#routemapSelector{ + width: 100%; + float: left; + text-align: center; +} + +#routemapSelect{ + width: 60%; + float: left; +} +#routemapTimestepRange{ + width: 90%; +} + +#selectCluster{ + width: 20%; + float: left; +} + +#fullGammaButton{ + width: 15%; + float: left; +} + +/* Sixth Line */ +.routemapContainer{ + width: 42%; + float: left; + margin:auto; + overflow: hidden; +} +.routemapImage{ + max-width: 100%; + max-height: 100%; +} + +#gamma{ + width: 12%; + float: left; + vertical-align: middle; + margin-top: 5%; + margin-bottom: auto; + margin-right: 1%; + margin-left: 1%; +} + +#gammaImage{ + max-width: 100%; + max-height: 100%; + float: left; + vertical-align:top; + margin-top: 5%; + margin-bottom: 5%; + margin-left: 1%; + margin-right: 1%; +} + diff --git a/alluvial/index.html b/alluvial/index.html new file mode 100644 index 0000000..bcff095 --- /dev/null +++ b/alluvial/index.html @@ -0,0 +1,133 @@ + + +Class Transitions + + + + + + + + + + + + Hide Sankey settings + + + + + Clusters of entry stations over time + + + + + Select Sankey options : + + node width = + + + + node padding = + + + + width = + + + + height = + + + + tick font size = + + + + + + + Select Station + + + Or select Line + + + + Select analysis mode: + + entry + exit + block + + + + + + + + + + + + + + + + + + + + Select entry cluster   + + + + + + Show full cluster interactions + + + + + + Clusters of entry stations + + + + Clusters of exit stations + + + + + + + + + + + + + + + + + + + + + diff --git a/clean_package.py b/clean_package.py new file mode 100644 index 0000000..e89d767 --- /dev/null +++ b/clean_package.py @@ -0,0 +1,53 @@ +"""Function to clean all temporary files from the repo""" + +import shutil +import os + + +DEBUG_DIR = './dcblockmodels/model_debug_output/' +NOTEBOOK_DIR = './notebooks/' + + +def delete_dir(dirpath): + if os.path.exists(dirpath) and os.path.isdir(dirpath): + shutil.rmtree(dirpath) + + +def delete_file(filename): + if os.path.exists(filename): + os.remove(filename) + + +def clean(): + if os.path.exists(DEBUG_DIR): + for file in os.listdir(DEBUG_DIR): + shutil.rmtree(DEBUG_DIR + file) + delete_dir(DEBUG_DIR) + + for file in os.listdir(NOTEBOOK_DIR): + ext = file.split('.')[-1] + if ext in ['npy', 'npz']: + os.remove(NOTEBOOK_DIR + file) + + delete_dir('./datasets/classic') + delete_dir('./saved_models') + + delete_file('sparsebm.log') + delete_file(f'{NOTEBOOK_DIR}sparsebm.log') + delete_file('./dcblockmodels/sparsebm.log') + + delete_dir('.ipynb_checkpoints') + delete_dir(f'{NOTEBOOK_DIR}/.ipynb_checkpoints') + + delete_dir('./dcblockmodels/tests/.pytest_cache') + delete_dir('./dcblockmodels/models/.pytest_cache') + delete_dir('.pytest_cache') + + delete_dir('./dcblockmodels/__pycache__') + delete_dir('./dcblockmodels/models/__pycache__') + delete_dir('./dcblockmodels/models/utils/__pycache__') + delete_dir('./dcblockmodels/tests/__pycache__') + + +if __name__ == '__main__': + clean() diff --git a/dcblockmodels/__init__.py b/dcblockmodels/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dcblockmodels/data.py b/dcblockmodels/data.py new file mode 100644 index 0000000..1e8dc82 --- /dev/null +++ b/dcblockmodels/data.py @@ -0,0 +1,407 @@ +"""Methods to sample data from static or dynamic LBM/SBM""" + +import numpy as np + + +def sample_edge(mu_it, nu_it, gamma_tkl, block_sparsity_matrix_tkl): + """ + Sample a given edge with paramters mu_it, nu_it, gamma_tkl + in a Poisson distribution, while keeping a sparsity of block_sparsity_matrix_tkl + (defined in [0, 1)) + """ + if np.random.rand() < block_sparsity_matrix_tkl: + return 0 + return np.random.poisson(lam=mu_it * nu_it * gamma_tkl) + + +def generate_data( + T, + model_type, + dimensions, + n_clusters, + prior_init, + prior_trans, + gamma, + with_margins, + margins, + self_loops, + directed, + noise_level, + with_absent_nodes, + absent_nodes, + dtype, + block_sparsity_matrix): + """ + noise_level is the std of gaussian noise + block_sparsity_matrix is 1 - connectivity_matrix + in the binary case + """ + assert model_type in ['SBM', 'LBM'] + assert with_margins in [False, True] + assert dtype in ['int32', 'int64'] + + # parses the inputs + N = dimensions['N'] + Kz = n_clusters['Kz'] + alpha = prior_init['alpha'] + if model_type == 'LBM': + beta = prior_init['beta'] + D = dimensions['D'] + Kw = n_clusters['Kw'] + elif model_type == 'SBM': + D = N + Kw = Kz + W = None # for consistency + + print('Data characteristics: ') + print(f' - T, N, D = {T, N, D}') + if T == 1: + print(' - Static ' + model_type) + else: + print(' - Dynamic ' + model_type) + + if T > 1: + pi = prior_trans['pi'] + if model_type == 'LBM': + rho = prior_trans['rho'] + + if not directed: + assert model_type == 'SBM' + print(' - Undirected') + else: + if model_type == 'SBM': + print(' - Directed') + + if not self_loops: + assert model_type == 'SBM' or N == D + print(' - Without self-loops') + else: + print(' - With self-loops') + + if block_sparsity_matrix is not None: + assert block_sparsity_matrix.shape == gamma.shape + else: + block_sparsity_matrix = np.zeros_like(gamma, dtype=dtype) + + # the margins are either generated in generate_margins() + # or we generate constant margins equal to 1 + if with_margins: + print(' - With margins') + mu = margins['mu'] + nu = margins['nu'] + else: + print(' - Without margins') + if T > 1: + mu = np.ones((T, N), dtype=dtype) + if directed: + nu = np.ones((T, D), dtype=dtype) + else: + mu = np.ones((N), dtype=dtype) + if directed: + nu = np.ones((D), dtype=dtype) + + if with_absent_nodes and T > 1: + absent_row_nodes = absent_nodes['absent_row_nodes'] + res_row = {t: 0 for t in range(T)} + for t, i in absent_row_nodes: + res_row[t] += 1 + print(f' - absent row nodes {res_row}') + if directed: + absent_col_nodes = absent_nodes['absent_col_nodes'] + res_col = {t: 0 for t in range(T)} + for t, i in absent_col_nodes: + res_col[t] += 1 + print(f' - absent col nodes {res_col}') + + # converts the static parameters to + # dynamic ones for genericity + if gamma.ndim == 2 and T > 1: + gamma = np.stack([gamma for _ in range(T)]) + + if mu.ndim == 1 and T > 1: + mu = np.stack([mu for _ in range(T)]) + + if directed: + if nu.ndim == 1 and T > 1: + nu = np.stack([nu for _ in range(T)]) + + if block_sparsity_matrix.ndim == 2 and T > 1: + block_sparsity_matrix = np.stack([block_sparsity_matrix for _ in range(T)]) + + # generates the latent processes + Z0 = np.random.choice(Kz, size=(N), p=alpha) + + if model_type == 'LBM': + W0 = np.random.choice(Kw, size=(D), p=beta) + + if T > 1: + Z = np.zeros((T, N), dtype=dtype) + Z[0] = Z0.copy() + for t in range(1, T): + for i in range(N): + k = Z[t - 1, i] + Z[t, i] = np.random.choice(Kz, p=pi[k, :]) + + if model_type == 'LBM': + W = np.zeros((T, D), dtype=dtype) + W[0] = W0.copy() + for t in range(1, T): + for j in range(D): + k = W[t - 1, j] + W[t, j] = np.random.choice(Kw, p=rho[k, :]) + else: + Z = Z0 + if model_type == 'LBM': + W = W0 + + # Generate the data matrix with the latent processes + if T > 1: # pylint: disable=R1702 + X = np.zeros((T, N, D), dtype=dtype) + if not directed: + # we have model_type == SBM + for t in range(T): + for i in range(N): + for j in range(i + 1): + k, l = Z[t, i], Z[t, j] + val = sample_edge( + mu[t, i], mu[t, j], + gamma[t, k, l], + block_sparsity_matrix[t, k, l] + ) + X[t, i, j] = val + X[t, j, i] = val + else: + for t in range(T): + for i in range(N): + for j in range(D): + if model_type == 'LBM': + k, l = Z[t, i], W[t, j] + else: + k, l = Z[t, i], Z[t, j] + val = sample_edge( + mu[t, i], nu[t, j], + gamma[t, k, l], + block_sparsity_matrix[t, k, l] + ) + X[t, i, j] = val + else: + X = np.zeros((N, D), dtype=dtype) + if not directed: + for i in range(N): + for j in range(i + 1): + k, l = Z[i], Z[j] + val = sample_edge( + mu[i], mu[j], + gamma[k, l], + block_sparsity_matrix[k, l] + ) + X[i, j] = val + X[j, i] = val + else: + for i in range(N): + for j in range(D): + if model_type == 'LBM': + k, l = Z[i], W[j] + else: + k, l = Z[i], Z[j] + val = sample_edge( + mu[i], nu[j], + gamma[k, l], + block_sparsity_matrix[k, l] + ) + X[i, j] = val + + if not self_loops: + if T > 1: + for t in range(T): + np.fill_diagonal(X[t], 0) + else: + np.fill_diagonal(X, 0) + + # adds noise to the resulting matrix + noise = np.random.normal(loc=0., scale=noise_level, size=X.shape).astype(dtype) + + if not self_loops: + if T == 1: + np.fill_diagonal(noise, 0) + else: + for t in range(T): + np.fill_diagonal(noise[t], 0) + + np.add(X, noise, out=X) + np.clip(X, a_min=0, a_max=None, out=X) + + if with_absent_nodes and T > 1: + for t, i in absent_row_nodes: + X[t, i, :] = 0 + if not (directed and model_type == 'SBM'): + Z[t, i] = -1 + if directed: + for t, j in absent_col_nodes: + X[t, :, j] = 0 + if model_type == 'SBM': + # see below + pass + elif model_type == 'LBM': + W[t, j] = -1 + + # in the directed case with SBM and different row and col absent nodes a node is + # considered absent in Z if it is absent in the row AND the col + if model_type == 'SBM': + for t, i in absent_row_nodes: + if (t, i) in absent_col_nodes: + Z[t, i] = -1 + + if T == 1: + sparsity = (X == 0).sum() / (N * D) + print(f' - Sparsity {100 * sparsity: .2f}%') + else: + print(' - Sparsity', end=' ') + for t in range(T): + sparsity = (X[t] == 0).sum() / (N * D) + print(f't = {t} {100 * sparsity: .2f}%', end='|') + + if not with_absent_nodes: + dynamic_offset = int(T > 1) + if (X.sum(dynamic_offset) == 0).any() or (X.sum(dynamic_offset + 1) == 0).any(): + print( + 'Warning : with_absent_nodes is False but there are ' + 'nodes with a zero in- or out-degree. They will be ' + 'classified to cluster -1, which is different from' + 'their true cluster. The model parameters are probably ' + 'too low' + ) + return X, Z, W + + +def AR1_process_margins(x0, a, sigma2, T): + """ + given x0, array of margins at time t=0, generates + margins with T samples for each margin + x[t+1] = N(a * x[t] + c, sigma2) + where c is adapted so that the margins are + increasing over time + """ + max_trials = 5000 + n_trials = 0 + + def sample(x0, a, sigma2, T): + c = .1 + x0 * (1. - a) + # c = 0. + res = np.zeros(shape=(T, x0.shape[0]), dtype='float32') + res[0] = x0 + for t in range(1, T): + mu = a * res[t - 1] + c + cov = sigma2 * np.eye(x0.shape[0]) + res[t] = np.random.multivariate_normal(mu, cov, 1)[0] + return res + + res = np.full((T, x0.shape[0]), -1.) + while (res <= 0.).any(): + res = sample(x0, a, sigma2, T) + n_trials += 1 + if n_trials == max_trials: + raise Exception('Could not sample all non negative values from AR1(x0, a, sigma2)') + return res + + +def generate_margins( + T, N, D, constant_margins, start, stop, step, + directed, order_power_law, + ar_margins=None, a_ar=None, sigma2_ar=None +): + """ + Generates margins : arrays of shape (T, N) or (N) + values x sampled in np.arange(start, stop, step) + with probability propto x ^ order_power_law + """ + assert order_power_law < 0 + if D is None: + D = N + nax = np.newaxis + + if constant_margins: + print('Constant margins') + else: + print('Variable margins') + + values_margins = np.arange(start, stop, step) + proba_distrib = values_margins ** order_power_law + np.divide(proba_distrib, proba_distrib.sum(), out=proba_distrib) + if T == 1 or constant_margins or ar_margins: + mu = np.random.choice( + a=values_margins, + size=(N), + p=proba_distrib + ) + if directed: + nu = np.random.choice( + a=values_margins, + size=(D), + p=proba_distrib + ) + else: + mu = np.random.choice( + a=values_margins, + size=(T, N), + p=proba_distrib + ) + if directed: + nu = np.random.choice( + a=values_margins, + size=(T, D), + p=proba_distrib + ) + if T > 1 and constant_margins: + mu = np.concatenate([mu[nax, :] for t in range(T)], axis=0) + if directed: + nu = np.concatenate([nu[nax, :] for t in range(T)], axis=0) + + if T > 1 and ar_margins: + mu = AR1_process_margins(mu, a_ar, sigma2_ar, T) + if directed: + nu = AR1_process_margins(nu, a_ar, sigma2_ar, T) + + if directed: + return mu, nu + return mu, None + + +def generate_diag_transition_matrix(Kzw, val_diag): + """ + val_diag : the proba of transition to the same state + """ + val_off_diag = (1. - val_diag) / (Kzw - 1) + return (val_diag - val_off_diag) * np.eye((Kzw)) + val_off_diag + + +def generate_initial_proportions(Kzw, alpha_dirichlet): + """ + generates a np.array of shape (Kzw, ) from a dirichlet + distribution with constant parameter alpha_dirichlet + """ + return np.random.dirichlet(np.full(Kzw, alpha_dirichlet), size=1)[0] + + +def sample_absent_nodes(T, ND, min_proba_t=None, max_proba_t=None, proba_absent=None): + """ + Samples the time and node index of absent nodes. + If min_proba_t and max_proba_t are given, samples for each + time step t a probability p_t from Unif(min_proba_t, max_proba_t), + then samples the number n_abs_t of absent nodes at time t + from Binom(ND, p_t), then samples n_abs_t nodes without + replacement. + If min_proba_t=None, max_proba_t=None and proba_absent is not + None, then all p_t = proba_absent + """ + if min_proba_t is not None and max_proba_t is not None: + assert proba_absent is None + proba_absent = np.random.uniform(low=min_proba_t, high=max_proba_t, size=T) + + absent = [] + n_absent_t = np.random.binomial(ND, proba_absent, size=(T)) + for t, n_abs in enumerate(n_absent_t): + i_absent_t = np.sort(np.random.choice(ND, size=n_abs, replace=False)) + absent += list(zip([t] * n_abs, list(i_absent_t))) + + return absent diff --git a/dcblockmodels/metrics.py b/dcblockmodels/metrics.py new file mode 100644 index 0000000..37b7344 --- /dev/null +++ b/dcblockmodels/metrics.py @@ -0,0 +1,473 @@ +""" +Methods to measure the quality of a partition of the data +or to measure the class separability given the ground truth +""" + +import warnings + +import numpy as np +import prince +import pandas as pd + +from sklearn.metrics import (adjusted_rand_score, + normalized_mutual_info_score, + confusion_matrix) +from scipy.optimize import linear_sum_assignment +from sparsebm import CARI + + +def AFD(X, Z, n_factors=5): + """ + Discriminative Factorial Analysis + https://marie-chavent.perso.math.cnrs.fr/wp-content/uploads/2013/10/AFD.pdf + """ + assert X.ndim == 2 + + n, d = X.shape + K = np.unique(Z).shape[0] + + X_centered = X - X.mean(axis=0) + V = 1 / n * X_centered.T @ X_centered + + B = np.zeros((d, d)) + for k in range(K): + ind_k = np.where(Z == k)[0] + if ind_k.shape[0] == 0: + continue + X_k = X_centered[ind_k, :] + group_gravity = X_k.mean(axis=0) + B += ind_k.shape[0] * np.outer(group_gravity, group_gravity) + + B = B / n + + # just to check that V = W + B + # W = np.zeros((d, d)) + # for k in range(K): + # X_k = X_centered[np.where(Z == k)[0], :] + # X_g = X_k - X_k.mean(axis=0) + # for i in range(X_g.shape[0]): + # W += np.outer(X_g[i], X_g[i]) + # + # W = W / n + + _eigen_values, eigen_vectors = np.linalg.eigh(np.linalg.inv(V) @ B) + eigen_vectors = eigen_vectors[:, ::-1] + eigen_vectors = eigen_vectors[:, :n_factors] + + discriminant_power = np.zeros((n_factors)) + for k in range(n_factors): + u = eigen_vectors[:, k] + discriminant_power[k] = (u.T.dot(B).dot(u) / u.T.dot(V).dot(u)) + + return eigen_vectors, discriminant_power + + +def AFD_CA_linear_separation(X, Z, W, n_components, absent_row_nodes=None, absent_col_nodes=None): + """ + Level of linear separability of the classes after projection onto R^N using correspondence + analysis + """ + warnings.filterwarnings("ignore", category=FutureWarning) + + assert X.ndim == 2 + N, D = X.shape + present_row = np.setdiff1d(np.arange(N), absent_row_nodes) + present_col = np.setdiff1d(np.arange(D), absent_col_nodes) + X_ = X[np.ix_(present_row, present_col)] + X_ = X_ + 1e-10 + # N_, D_ = X_.shape + + if N == D and not np.array_equal(present_row, present_col): + print('Special case: SBM with different absent row and col nodes.') + + # for directed SBM with different absent + # row and col nodes, X_ is not square + # and W_ is taken from Z with absent col nodes + Z_ = Z[present_row] + if W is not None: + W_ = W[present_col] + else: + W_ = Z[present_col] + + ca = prince.CA( + n_components=n_components, + n_iter=30, + copy=True, + check_input=True, + engine='auto', + random_state=42 + ) + df = pd.DataFrame(X_) + ca = ca.fit(df) + row_factor_score = ca.row_coordinates(df).values + col_factor_score = ca.column_coordinates(df).values + + lambdas_row = AFD(row_factor_score, Z_, n_factors=n_components)[1] + lambdas_col = AFD(col_factor_score, W_, n_factors=n_components)[1] + return lambdas_row, lambdas_col + + +def sort_partitions(criteria, partitions, n_first): + """ + Sorts multiple partitions based on their log likelihoods + and returns the n_first + criteria: list of lists, criteria[k]: list of + log likelihoods at each iteration for initialization k + of the model + partitions: list of lists: len(partitions) = 1 for dSBM/SBM + and len(partitions) = 2 for dLBM/LBM. + partitions[0][k] is the row partition obtained at the kth + initialization of the model. + """ + assert n_first <= len(criteria) + assert len(partitions[0]) == len(criteria) + + sorted_partitions = sorted( + zip(criteria, *(part for part in partitions)), + key=lambda x: x[0][-1], + reverse=True + ) + return [x[1:] for x in sorted_partitions[:n_first]] + + +def always_absent_nodes(ZW): + """ + ZW : where absent nodes have already + been put in cluster -1 + """ + return np.where((ZW == -1).all(axis=0))[0] + + +def cmat_clustering(cmat): + """ + input : np.array shape (n,n) : a confusion matrix + returns : np.array shape (n,n) : a confusion matrix for + clustering, with the permutation leading to the + maximal diagonal + """ + indexes = linear_sum_assignment(-cmat) + return cmat[:, indexes[1]] + + +def accuracy(cmat): + """ + Accuracy of a confusion matrix (diagonal sum over whole matrix sum) + """ + return np.trace(cmat) / cmat.sum() + + +def get_metrics(Z, W, Z_true, W_true, absent_nodes=None): + """ + If absent_nodes is not None : returns the metrics only + considering present nodes, i.e. nodes in + cluster K do not bias the performance metrics. + We nevertheless consider appearing nodes + and the metrics are given for {always_present_nodes + appearing_nodes} + """ + def clustering_accuracy(p1, p2): + if len(p1) == 0: + return np.nan + return accuracy(cmat_clustering(confusion_matrix(p1, p2))) + + def ari_(p1, p2): + if len(p1) == 0: + return np.nan + return adjusted_rand_score(p1, p2) + + def nmi_(p1, p2): + if len(p1) == 0: + return np.nan + return normalized_mutual_info_score(p1, p2, average_method='arithmetic') + + if Z.ndim == 1: + T = 1 + elif Z.ndim == 2: + T = Z.shape[0] + else: + raise ValueError + + res_dic = {} + + if absent_nodes is not None: + absent_row_nodes = absent_nodes.absent_row_nodes + absent_col_nodes = absent_nodes.absent_col_nodes + appearing_row_nodes = absent_nodes.appearing_row_nodes + appearing_col_nodes = absent_nodes.appearing_col_nodes + else: + absent_row_nodes, absent_col_nodes = None, None + appearing_row_nodes, appearing_col_nodes = None, None + + if Z is not None and W is not None and Z_true is not None and W_true is not None: + if T == 1: + if Z is not None and W is not None: + cari = CARI(Z_true, W_true, Z, W) + res_dic['cari'] = cari + else: + Z_without_absent = partition_without_absent(Z, absent_row_nodes) + Z_true_without_absent = partition_without_absent(Z_true, absent_row_nodes) + W_without_absent = partition_without_absent(W, absent_col_nodes) + W_true_without_absent = partition_without_absent(W_true, absent_col_nodes) + caris = np.array([ + CARI( + Z_true_without_absent[t], + W_true_without_absent[t], + Z_without_absent[t], + W_without_absent[t] + ) + for t in range(T)]) + cari_avg = np.nanmean(caris) + + cari_f = CARI( + np.concatenate([Z_true_without_absent[t] for t in range(T)]), + np.concatenate([W_true_without_absent[t] for t in range(T)]), + np.concatenate([Z_without_absent[t] for t in range(T)]), + np.concatenate([W_without_absent[t] for t in range(T)]) + ) + res_dic['cari_f_without_absent'] = cari_f + res_dic['cari_avg_without_absent'] = cari_avg + res_dic['caris_without_absent'] = caris + + list_dims = [] + if Z is not None and Z_true is not None: + list_dims.append(('Z', Z, Z_true, absent_row_nodes, appearing_row_nodes)) + if W is not None and W_true is not None: + list_dims.append(('W', W, W_true, absent_col_nodes, appearing_col_nodes)) + + for name, ZW, ZW_true, absent_nodes_, appearing_nodes in list_dims: + if ZW is not None: + assert len(ZW) == len(ZW_true) + + if T == 1: + ari = ari_(ZW_true, ZW) + nmi = nmi_(ZW_true, ZW) + acc = clustering_accuracy(ZW_true, ZW) + + res_dic['ari_' + name] = ari + res_dic['nmi_' + name] = nmi + res_dic['acc_' + name] = acc + else: + ZW_without_absent = partition_without_absent(ZW, absent_nodes_) + ZW_true_without_absent = partition_without_absent(ZW_true, absent_nodes_) + + ZW_appearing = partition_apearing_nodes(ZW, appearing_nodes) + ZW_true_appearing = partition_apearing_nodes(ZW_true, appearing_nodes) + + flat_ZW_without_absent = np.concatenate( + [ZW_without_absent[t] for t in range(T)] + ) + flat_ZW_true_without_absent = np.concatenate( + [ZW_true_without_absent[t] for t in range(T)] + ) + flat_ZW_appearing = np.concatenate( + [ZW_appearing[t - 1] for t in range(1, T)] + ) + flat_ZW_true_appearing = np.concatenate( + [ZW_true_appearing[t - 1] for t in range(1, T)] + ) + aris_without_absent = np.array([ + ari_(ZW_true_without_absent[t], ZW_without_absent[t]) + for t in range(T) + ]) + nmis_without_absent = np.array([ + nmi_(ZW_true_without_absent[t], ZW_without_absent[t]) + for t in range(T) + ]) + accs_without_absent = np.array([ + clustering_accuracy(ZW_true_without_absent[t], ZW_without_absent[t]) + for t in range(T) + ]) + ari_avg_without_absent = np.nanmean(aris_without_absent) + nmi_avg_without_absent = np.nanmean(nmis_without_absent) + acc_avg_without_absent = np.nanmean(accs_without_absent) + + ari_f_without_absent = ari_(flat_ZW_true_without_absent, flat_ZW_without_absent) + nmi_f_without_absent = nmi_(flat_ZW_true_without_absent, flat_ZW_without_absent) + acc_f_without_absent = clustering_accuracy( + flat_ZW_true_without_absent, + flat_ZW_without_absent + ) + res_dic['aris_without_absent_' + name] = aris_without_absent + res_dic['nmis_without_absent_' + name] = nmis_without_absent + res_dic['accs_without_absent_' + name] = accs_without_absent + res_dic['ari_avg_without_absent_' + name] = ari_avg_without_absent + res_dic['nmi_avg_without_absent_' + name] = nmi_avg_without_absent + res_dic['acc_avg_without_absent_' + name] = acc_avg_without_absent + res_dic['ari_f_without_absent_' + name] = ari_f_without_absent + res_dic['nmi_f_without_absent_' + name] = nmi_f_without_absent + res_dic['acc_f_without_absent_' + name] = acc_f_without_absent + + if absent_nodes_ is not None: + aris_appearing = np.array([ + ari_(ZW_true_appearing[t - 1], ZW_appearing[t - 1]) + for t in range(1, T) + ]) + nmis_appearing = np.array([ + nmi_(ZW_true_appearing[t - 1], ZW_appearing[t - 1]) + for t in range(1, T) + ]) + accs_appearing = np.array([ + clustering_accuracy(ZW_true_appearing[t - 1], ZW_appearing[t - 1]) + for t in range(1, T) + ]) + ari_avg_appearing = np.nanmean(aris_appearing) + nmi_avg_appearing = np.nanmean(nmis_appearing) + acc_avg_appearing = np.nanmean(accs_appearing) + + ari_f_appearing = ari_(flat_ZW_true_appearing, flat_ZW_appearing) + nmi_f_appearing = nmi_(flat_ZW_true_appearing, flat_ZW_appearing) + acc_f_appearing = clustering_accuracy( + flat_ZW_true_appearing, flat_ZW_appearing) + + res_dic['aris_appearing_' + name] = aris_appearing + res_dic['nmis_appearing_' + name] = nmis_appearing + res_dic['accs_appearing_' + name] = accs_appearing + res_dic['ari_avg_appearing_' + name] = ari_avg_appearing + res_dic['nmi_avg_appearing_' + name] = nmi_avg_appearing + res_dic['acc_avg_appearing_' + name] = acc_avg_appearing + res_dic['ari_f_appearing_' + name] = ari_f_appearing + res_dic['nmi_f_appearing_' + name] = nmi_f_appearing + res_dic['acc_f_appearing_' + name] = acc_f_appearing + + return res_dic + + +def print_metrics(Z, W, Z_true, W_true, absent_nodes=None, print_each_timestep=False): + """Print all metrics in terminal""" + if Z.ndim == 1: + T = 1 + elif Z.ndim == 2: + T, _ = Z.shape + else: + raise ValueError + + metrics_dic = get_metrics(Z, W, Z_true, W_true, absent_nodes) + + if T > 1: + if 'cari_f_without_absent' in metrics_dic: + print(f'global CARI : {(100 * metrics_dic["cari_f_without_absent"]):.2f}') + print(f'local AVG CARI : {(100 * metrics_dic["cari_avg_without_absent"]):.2f}') + if print_each_timestep: + print('\n\nAt each timestep: ') + for t in range(T): + print(f't = {t} CARI : {100 * metrics_dic["caris_without_absent"][t]:.2f}') + print('\n') + else: + if 'cari' in metrics_dic: + print(f'CARI : {100 * metrics_dic["cari"]:.2f}\n') + + if absent_nodes is not None: + absent_row_nodes = absent_nodes.absent_row_nodes + absent_col_nodes = absent_nodes.absent_col_nodes + n_absent_row = absent_nodes.n_absent_row_tot + n_absent_col = absent_nodes.n_absent_col_tot + else: + absent_row_nodes, absent_col_nodes = None, None + n_absent_row, n_absent_col = None, None + + list_dims = [] + if Z is not None and Z_true is not None: + list_dims.append(('Z', absent_row_nodes, n_absent_row)) + if W is not None and W_true is not None: + list_dims.append(('W', absent_col_nodes, n_absent_col)) + + for name, absent_nodes_, n_absent in list_dims: + print(' ' + name, end='\n\n') + + if absent_nodes_ is not None: + print_for_absent_nodes = n_absent > 0 + else: + print_for_absent_nodes = False + + if T == 1: + print( + f'ARI : {100 * metrics_dic["ari_" + name]:.2f}, ' + f'NMI : {100 * metrics_dic["nmi_" + name]:.2f}, ' + f'ACC : {100 * metrics_dic["acc_" + name]:.2f}' + ) + else: + print('Without absent nodes:') + print(f'local AVG ' + f'ARI : {100 * metrics_dic["ari_avg_without_absent_" + name]:.2f}, ' + f'NMI : {100 * metrics_dic["nmi_avg_without_absent_" + name]:.2f}, ' + f'ACC : {100 * metrics_dic["acc_avg_without_absent_" + name]:.2f}' + ) + print('global ' + f'ARI : {100 * metrics_dic["ari_f_without_absent_" + name]:.2f}, ' + f'NMI : {100 * metrics_dic["nmi_f_without_absent_" + name]:.2f}, ' + f'ACC : {100 * metrics_dic["acc_f_without_absent_" + name]:.2f}' + ) + if print_for_absent_nodes: + print() + print('Appearing nodes: ') + print('local AVG ' + f'ARI : {100 * metrics_dic["ari_avg_appearing_" + name]:.2f}, ' + f'NMI : {100 * metrics_dic["nmi_avg_appearing_" + name]:.2f}, ' + f'ACC : {100 * metrics_dic["acc_avg_appearing_" + name]:.2f}' + ) + print('global ' + f'ARI : {100 * metrics_dic["ari_f_appearing_" + name]:.2f}, ' + f'NMI : {100 * metrics_dic["nmi_f_appearing_" + name]:.2f}, ' + f'ACC : {100 * metrics_dic["acc_f_appearing_" + name]:.2f}' + ) + + if print_each_timestep: + print('\n\nAt each timestep: ') + print('Without absent nodes:') + for t in range(T): + print(f't = {t} ' + f'ARI : {100 * metrics_dic["aris_without_absent_" + name][t]:.2f}, ' + f'NMI : {100 * metrics_dic["nmis_without_absent_" + name][t]:.2f}, ' + f'ACC : {100 * metrics_dic["accs_without_absent_" + name][t]:.2f}' + ) + print('\n') + + if print_for_absent_nodes: + print('Appearing nodes: ') + for t in range(1, T): + print(f't = {t} ' + f'ARI : {100 * metrics_dic["aris_appearing_" + name][t - 1]:.2f}, ' + f'NMI : {100 * metrics_dic["nmis_appearing_" + name][t - 1]:.2f}, ' + f'ACC : {100 * metrics_dic["accs_appearing_" + name][t - 1]:.2f}' + ) + print('\n') + + +def partition_apearing_nodes(ZW, appearing_nodes): + """ + returns a list of arrays containing the elements of + the rows of ZW that correspond to absent nodes + """ + if appearing_nodes is not None: + return [ZW[t][appearing_nodes[t]] for t in range(1, len(ZW))] + + return [ZW[t] for t in range(1, len(ZW))] + + +def partition_without_absent(ZW, absent_nodes): + """ + returns the partition Z with absent nodes removed + as a list of T ndarray of different sizes + Z : (T, N) + """ + if absent_nodes is not None: + return [np.delete(ZW[t], absent_nodes[t]) for t in range(ZW.shape[0])] + + return [ZW[t] for t in range(ZW.shape[0])] + + +def get_prop_clusters(ZW): + """ + returns a np.array of size T x Kzw + containing the proportion of points in + each cluster at each time step + """ + T = len(ZW) + flat_ZW = np.concatenate([ZW[t] for t in range(T)]) + Kzw = np.unique(flat_ZW).shape[0] + + res = np.zeros((T, Kzw)) + for t in range(T): + for k in range(Kzw): + res[t, k] = (ZW[t] == k).sum() + res = res / res.sum(axis=1, keepdims=True) + return res diff --git a/dcblockmodels/models/__init__.py b/dcblockmodels/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dcblockmodels/models/blockmodel.py b/dcblockmodels/models/blockmodel.py new file mode 100644 index 0000000..18d047e --- /dev/null +++ b/dcblockmodels/models/blockmodel.py @@ -0,0 +1,367 @@ +"""Base class for the Block Models""" + +import time +import os +import sys +import warnings +import pickle + +import numpy as np +import scipy as sp +from numba import NumbaPendingDeprecationWarning +from sklearn.base import BaseEstimator +from sklearn.utils import check_random_state + +np.seterr(all='raise') + +# FIXME Probably a way to remove warnings? (problem in the whole program) +sys.stderr = open(os.devnull, "w") # pylint: disable=R1732,W1514 + +warnings.filterwarnings("ignore", category=NumbaPendingDeprecationWarning) +warnings.filterwarnings("ignore", category=DeprecationWarning) + + +class BlockModel(BaseEstimator): + """Base class for the dynamic and semi-supervised LBM""" + + model_parameters = [] # filled in subclasses + + # The default blockmodel parameters + # overwritten by the params given in dic blockmodel_params + # in the construction of BlockModel + n_iter_min = 5 + n_init_clustering_consensus = 100 + loc_random_margins = 1e-8 # mean and std used for the initialization + scale_random_margins = 1e-3 # of the margins if self.type_init_margins == 'random' + + # checking the inputs + MODEL_TYPES = [ + 'with_margins', # static & dynamic + 'without_margins' # static & dynamic + ] + EM_TYPES = ['CEM', 'VEM'] + INITS_TYPES = [ + 'random', + 'given', + 'kmeans', + 'skmeans' + ] + TYPES_INIT_MARGINS = ['ones', 'X.', 'random', 'given'] + DTYPES = ['float32', 'float64'] + VERBOSE_LEVELS = [0, 1, 2] + + def __init__( + self, + Kz, + Kw, + init_type, + em_type, + n_init, + n_init_clustering, + node_perturbation_rate, + model_type, + type_init_margins, + min_float, + min_gamma, + min_proba_Z, + min_proba_W, + min_margin, + min_proba_mixture_proportions, + threshold_absent_nodes, + dtype, + random_state, + max_iter, + tol_iter, + n_jobs, + verbose, + blockmodel_params, + model_id, + debug_list, + debug_output + ): + super().__init__() + + for arg in ['Kz', 'Kw', 'init_type', 'em_type', 'n_init', 'model_type', 'min_gamma']: + if arg is None: + raise (f'Argument {arg} for class {type(self).__name__} must be initialized ' + 'explicitly. See documentation for possible values.') + + assert Kz > 1 + assert Kw > 1 + + assert init_type in self.INITS_TYPES + assert em_type in self.EM_TYPES + assert model_type in self.MODEL_TYPES + + assert dtype in self.DTYPES + assert verbose in self.VERBOSE_LEVELS + assert type_init_margins in self.TYPES_INIT_MARGINS + + assert min_float > 0. + assert min_gamma >= 0. + assert 0. < min_proba_mixture_proportions < 1. + + self.Kz = Kz + self.Kw = Kw + + self.init_type = init_type + self.em_type = em_type + self.n_init = n_init + self.n_init_clustering = n_init_clustering + self.node_perturbation_rate = node_perturbation_rate + self.type_init_margins = type_init_margins + + self.min_float = min_float + self.min_gamma = min_gamma + self.min_proba_Z = min_proba_Z + self.min_proba_W = min_proba_W + self.min_margin = min_margin + self.min_proba_mixture_proportions = min_proba_mixture_proportions + self.threshold_absent_nodes = threshold_absent_nodes + + self.dtype = dtype + self.random_state = check_random_state(random_state) + + self.max_iter = max_iter + self.tol_iter = tol_iter + + self.n_jobs = n_jobs + self.verbose = verbose + + self.blockmodel_params = blockmodel_params + self._set_blockmodel_params(blockmodel_params) + + self.model_id = int(time.time()) if model_id is None else model_id + self.debug_list = debug_list if debug_list is not None else [] + self.debug_output = debug_output + + self.cluster_perturbation_rate = None + + self.N, self.D, self.T = None, None, None + self.X = None + self.W, self.Z = None, None + self.alpha, self.beta = None, None + self.gamma, self.mu, self.nu = None, None, None + self.log_alpha, self.log_beta, self.log_pi, self.log_rho = None, None, None, None + self.Xi_, self.X_j = None, None + + self.qz, self.qw = None, None + self.current_init_W, self.current_init_Z = None, None + self.global_init_W, self.global_init_Z = None, None + + self.best_parameters, self.best_criterion = None, None + + self.model_type = model_type + + self.debug_counts = None + self.debug_path = None + + self.fitted = False + + self.all_row_partitions = None + self.all_col_partitions = None + self.all_iter_criterions = None + self.all_intermediate_iter_criterions = None + self.all_icls = None + self.all_regularizations = None + self.all_intermediate_regularizations = None + + def _set_blockmodel_params(self, blockmodel_params): + """ + overwrites the default params with given params in __init__ + """ + params_names = [ + "min_proba_mixture_proportions", + "min_float_margins", + "n_iter_min", + "loc_random_margins", + "scale_random_margins", + ] + if blockmodel_params is not None: + for param_name, param_value in blockmodel_params.items(): + if param_name in params_names: + setattr(self, param_name, param_value) + + # ################### Verbose #################### # + + def _print_verbose_msg_iter(self, n_iter): + if self.verbose == 1: + if (n_iter % 5 == 4) or (n_iter == self.max_iter - 1): + print(f' Iteration {n_iter + 1} on {self.max_iter}') + elif self.verbose == 2: + print(f' Iteration {n_iter + 1} on {self.max_iter}') + + def _print_verbose_msg_init(self, n_init): + if self.verbose >= 1: + print() + print(f'*** Model {self.model_id}: ' + f'initialization {n_init + 1} on {self.n_init} ***') + + def _print_verbose_converged(self, n_iter_tot, ind_smoothing): + if self.verbose >= 1: + if ind_smoothing is None: + print(f' {n_iter_tot + 1} Iterations are enough') + else: + if ind_smoothing % 5 == 4: + print(f' smoothing step {ind_smoothing + 1} at iter {n_iter_tot + 1}') + + def _print_verbose_smoothing(self, smoothing_schedule, n_iter_supp_smoothing): + if self.verbose >= 1: + n_steps = len(smoothing_schedule) + n_iter_supp = n_steps * n_iter_supp_smoothing + print( + f' Smoothing: {n_iter_supp} new iter max ({n_steps} steps, ' + f'{n_iter_supp_smoothing} iter max per tau)' + ) + + # ################### Debug #################### # + + def _init_debug(self): + """ + creates appropriate directories where + the parameters we want to debug will be saved + """ + self.debug_output.mkdir(exist_ok=True) + + # FIXME Verify that the path is a Pathlib.path! + + # FIXME And create directory dynamically! + def _unique_dir(base_directory, name_pattern): + c = 0 + while True: + c += 1 + directory = base_directory / name_pattern.format(c) + if not directory.is_dir(): + return directory + + # for each init, the number of times debug() was called + self.debug_counts = np.zeros((self.n_init), dtype='int') + + # name_pattern = 'debug_model_{}'.format(self.model_id) + '_{:03d}' + # TODO verify behaviour + name_pattern = f'debug_model_{self.model_id}_{{:03d}}' + self.debug_path = _unique_dir(self.debug_output, name_pattern) + + if len(self.debug_list) > 0: + os.mkdir(self.debug_path) + for debug_item in self.debug_list: + os.mkdir(self.debug_path / debug_item) + + def _debug(self, init_nb): + """ + e.g. debug_list = ['Z', 'gamma'] + adds Z_0.npy in dir /Z and gamma_0.npy in /gamma + then adds Z_1.npy and gamma_1.npy + etc ... + + call the method every time you want to log + the state of a given variable + """ + for debug_item in self.debug_list: + item = getattr(self, debug_item, None) + item_id = f'{debug_item}_init_{init_nb}_{self.debug_counts[init_nb]}' + item_path = self.debug_path / debug_item / item_id + _save_item(item, item_path) + + self.debug_counts[init_nb] += 1 + + def has_sparse_ZW(self): + """Check whether self.Z is either sparse or a list""" + return sp.sparse.issparse(self.Z) or isinstance(self.Z, list) + + def get_debug(self, sublist=None): + """ + returns a dictionnary containing the values of each + item in self.debug_list (e.g. debug_list = ['Z', 'gamma']) + at each iteration of the algorithm + if sublist is not None, only returns the elements that are + both in self.debug_list and sublist + """ + if sublist is not None: + debug_list = [x for x in sublist if x in self.debug_list] + else: + debug_list = self.debug_list + + res = {} + for debug_item in debug_list: + if debug_item in ['Z', 'W']: + ext = '.npz' if self.has_sparse_ZW() else '.npy' + else: + ext = '.npy' + res_item = [] # one list for a given item, e.g. gamma + for init_nb in range(self.n_init): + res_init = [] # one list for each initialization of an item + for debug_count in range(self.debug_counts[init_nb]): + item_id = (debug_item + f'_init_{init_nb}_{debug_count}' + ext) + item_path = self.debug_path / debug_item / item_id + item = _load_item(item_path) + res_init.append(item) + res_item.append(res_init) + res[debug_item] = res_item + return res + + # ################### Model utils #################### # + + def _set_best_parameters(self, criterion, cur_init, cur_iter): + if cur_init == 0 and cur_iter == 0: + self.best_parameters = [[] for _ in range(self.n_init)] + + if cur_iter == 0: + self.best_criterion = criterion + self._set_new_best_params(cur_init) + else: + if criterion > self.best_criterion: + self.best_criterion = criterion + self._set_new_best_params(cur_init) + + def _set_new_best_params(self, cur_init): + best_params = [self.best_criterion] + for param_name in self.model_parameters: + best_params.append(getattr(self, param_name)) + self.best_parameters[cur_init] = best_params + + def _write_best_parameters(self): + """ + writes attribute best_parameters + [(criterion(init_i), + {'param_name': best_value_param_init_i, ...}), ...] + ordered by criterion + """ + self.best_parameters = [ + (self.best_parameters[i][0], + dict(zip(self.model_parameters, self.best_parameters[i][1:]))) + for i in range(self.n_init) + ] + self.best_parameters = sorted(self.best_parameters, key=lambda x: x[0], reverse=True) + + def save(self, path='.', modelname=None): + """ + Saves the model at path path with the name modelname as a pickle file. + """ + if modelname is None: + modelname = f'{type(self).__name__}_saved_at_{int(time.time())}' + + full_path = path + '/' + modelname + os.makedirs(full_path, exist_ok=True) + + model_path = full_path + '/' + 'model.pickle' + with open(model_path, 'wb') as f: + pickle.dump(self, f) + print('Model saved at : ' + model_path) + + +def _save_item(item, path): + if isinstance(item, np.ndarray): + np.save(path.with_suffix('.npy'), item) + elif sp.sparse.issparse(item): + sp.sparse.save_npz(path.with_suffix('.npz'), item) + else: + raise TypeError + + +def _load_item(path): + if path.suffix == '.npy': + return np.load(path, allow_pickle=True) + if path.suffix == '.npz': + return sp.sparse.load_npz(path) + raise TypeError diff --git a/dcblockmodels/models/dlbm.py b/dcblockmodels/models/dlbm.py new file mode 100644 index 0000000..006ad3c --- /dev/null +++ b/dcblockmodels/models/dlbm.py @@ -0,0 +1,1217 @@ +"""Dynamic Latent Block Model""" + +import os +import sys + +import warnings +import numpy as np +from numba import NumbaPendingDeprecationWarning + +from dcblockmodels import metrics +from .blockmodel import BlockModel +from .utils import ( + init, + e_step, + m_step, + general, + consensus, + absent_nodes +) + +sys.stderr = open(os.devnull, "w") # pylint: disable=R1732,W1514 + +warnings.filterwarnings("ignore", category=NumbaPendingDeprecationWarning) +warnings.filterwarnings("ignore", category=DeprecationWarning) + + +class dLBM(BlockModel): + """Dynamic Latent Block Model + + Block Model General Parameters + ---------- + Kz : int + Number of row clusters + Kw : int + Number of column clusters + init_type : {'random', 'kmeans', 'skmeans', 'given'}, optional, + by default 'skmeans'. The methods used for the initialization of + the algorithm. It is fit once for each side (rows and columns) + and gives two global initialization partitions. These partitions + are then used to initialization-specific partitions using the + parameters `node_perturbation_rate` and `cluster_perturbation_rate`: + - '`random'` randomly initialized clusters, + - `'kmeans'` clusters initialized using `sklearn.cluster.KMeans`, + - `'skmeans'` clusters initialized using `spherecluster.SphericalKMeans`, + - `'given'` given in the `fit()` method + em_type : {'VEM', 'CEM'} + The EM algorithm used + n_init : int + Number of initializations of the model. Each initialization + will start from different initial row and col partitions + and will converge to a partition and parameter set with + a corresponding complete-data log likelihood. One can then + select one of these partitions using the `best_partition()` + method. + n_init_clustering : int, optional + the number of initializations of the clustering algorithm + chosen in `'init_type'`, by default 100 + node_perturbation_rate : float, optional + the fraction of nodes (row or cols) that are reassgined to + a random cluster at each new intitialization of the model, by default .1 + model_type : {'with_margins', 'without_margins'} + The dynamic model used: + - `'with_margins'` : a model with dynamic margins + and static connectivity matrix gamma + - `'without_margins'` : a model with only a dynamic + connectivity matrix as presented in Matias & Miele + type_init_margins : {'ones', 'X.', 'random', 'given'}, optional + How the margins are initialized, by default 'ones': + - `'ones'` : mu[t, i] = nu[t_, j] = 1 + - `'X.'` : mu = X.sum((2)) nu = X.sum((1)) + - `'random'` : mu and nu sampled from normal distribution + followed by an absolute value. + - `'given'` : mu an nu are given in the `fit()` method + min_float : float, optional + The minimum float used to avoid numerical issues, by default 1e-15 + min_gamma : float + The minimum value of connectivity parameter gamma + especially important in CEM to avoid empty clusters + min_proba_Z : float, optional + The probability (between 0 and 1) at which the variational probabilities + for the row memberships are clipped, by default .05 + min_proba_W : float, optional + The probability (between 0 and 1) at which the variational probabilities + for the column memberships are clipped, by default .05 + min_margin : float, optional + The value at which the margins are clipped, by default 1e-10 + min_proba_mixture_proportions : float, optional + The probability (between 0 and 1) at which the mixture proportions + are clipped, by default .05 + threshold_absent_nodes : int, optional + Row or column nodes that have, at a given time step, a degree below + `threshold_absent_nodes` are considered absent and do not contribute + to the observed data log likelihood, by default 0 + This value can be set this to -1 to avoid considering 0-degree nodes as absent + # TODO Verify compatibility with the rest of the code + dtype : str, optional + The dtype of the floats used in the model, by default 'float32' + random_state : int | np.random.RandomState | None, optional + Creates a random state generator from a seed or uses the given + random state generator, by default None + max_iter : int, optional + The maximum number of EM iterations for a single + initialization of the model, by default 500 + tol_iter : float, optional + The decrease ratio below which we consider the algorithm + has converged, by default 1e-5 + n_jobs : int, optional + The number of jobs used in the initialization clustering algorithm + in sklearn or spherecluster, by default -1 i.e. all cores + verbose : {0, 1, 2}, optional + 0 is silent, 1 is normal verbose and 2 is very verbose, by default 1 + blockmodel_params : [type], optional + A dictionnary of parameters can can overwrite the class parameters + if non empty or None, by default None. The parameters that can be changed + are the following: + - `n_iter_min` + - `n_init_clustering_consensus` + - `loc_random_margins` + - `scale_random_margins` + model_id : int | None, optional + The numerical id of the model, used for debugging purposes, by default None + debug_list : list, optional + a list of strings that correspond to the names of the model attributes + whose value we wish to keep track of during the iterations of the + EM algorithm, by default []. `debug_list` must be a sublist of + `self.model_parameters`. + debug_output : str, optional + The directory where the values of the parameters in `debug_list` + will be outputed in the form of .npy files, by default '.' + + dLBM Specific Parameters + ---------- + n_iter_supp_smoothing : int, optional + The maximum number of smoothing iterations for a given step of + the smoothing schedule, by default 5 + parameter_smoothing : bool + Whether we apply parameter smoothing or not + smoothing_schedule : class SmoothingSchedule + Describes how the parameters will be smoothed during the + iterations of the EM algorithm + diag_pi_init : float, optional + The value of each entry of the diagonal of the transition + matrix pi at initialization, by default None which results + in pi being first estimated using a M-step + diag_rho_init : float, optional + The value of each entry of the diagonal of the transition + matrix pi at initialization, by default None which results + in rho being first estimated using a M-step + prior_diagonal_pi : float, optional + Used as an informative prior for the diagonal terms of pi, + and produces pseudocounts for intr-cluster transitions, + by default 0. + prior_diagonal_rho : float, optional + Used as an informative prior for the diagonal terms of rho, + and produces pseudocounts for intr-cluster transitions, + by default 0. + cluster_perturbation_rate : float, optional + The probability of applying a new permutation between two + clusters at a given time step in the intial_partition. This + sampling of two-cycles continues until False is sampled or + every cluster has been permutated once, by default .1 + """ + + def __init__( + self, + # Global blockmodel arguments + Kz=None, + Kw=None, + init_type=None, + em_type=None, + n_init=None, + n_init_clustering=100, + node_perturbation_rate=.1, + model_type=None, + type_init_margins='ones', + min_float=1e-15, + min_gamma=None, + min_proba_Z=.05, + min_proba_W=.05, + min_margin=1e-10, + min_proba_mixture_proportions=.05, + threshold_absent_nodes=0, + dtype='float32', + random_state=None, + max_iter=500, + tol_iter=1e-5, + n_jobs=-1, + verbose=1, + blockmodel_params=None, + model_id=None, + debug_list=None, + debug_output='.', + # Specific DLBM arguments + n_iter_supp_smoothing=5, + parameter_smoothing=None, + smoothing_schedule=None, + diag_pi_init=None, + diag_rho_init=None, + prior_diagonal_pi=0., + prior_diagonal_rho=0., + cluster_perturbation_rate=.1, + ): + super().__init__( + Kz=Kz, + Kw=Kw, + init_type=init_type, + em_type=em_type, + n_init=n_init, + n_init_clustering=n_init_clustering, + node_perturbation_rate=node_perturbation_rate, + model_type=model_type, + type_init_margins=type_init_margins, + min_float=min_float, + min_gamma=min_gamma, + min_proba_Z=.05, + min_proba_W=.05, + min_margin=1e-10, + min_proba_mixture_proportions=min_proba_mixture_proportions, + threshold_absent_nodes=threshold_absent_nodes, + dtype=dtype, + random_state=random_state, + max_iter=max_iter, + tol_iter=tol_iter, + n_jobs=n_jobs, + verbose=verbose, + blockmodel_params=blockmodel_params, + model_id=model_id, + debug_list=debug_list, + debug_output=debug_output + ) + + for arg in ['parameter_smoothing', 'smoothing_schedule']: + if arg is None: + raise (f'Argument {arg} for class {type(self).__name__} must be initialized ' + 'explicitly. See documentation for possible values.') + + assert isinstance(parameter_smoothing, bool) + assert ((smoothing_schedule[1:] - smoothing_schedule[:-1]) > 0).all() + assert n_iter_supp_smoothing > 0 + + self.directed = True # for compatibility with dsbm + + self.parameter_smoothing = parameter_smoothing + self.smoothing_schedule = smoothing_schedule if self.parameter_smoothing else [] + + self.diag_pi_init = diag_pi_init + self.diag_rho_init = diag_rho_init + self.prior_diagonal_pi = prior_diagonal_pi + self.prior_diagonal_rho = prior_diagonal_rho + + self.n_iter_supp_smoothing = n_iter_supp_smoothing + self.cluster_perturbation_rate = cluster_perturbation_rate + + self.pi, self.rho = None, None + self.pi_mask, self.rho_mask = None, None + self.prior_pi, self.prior_rho = None, None + + # In the dynamic case, if there is margins, they are estimated + self.estimated_margins = (self.model_type == 'with_margins') + self.model_parameters = ['log_alpha', 'log_beta', 'log_pi', 'log_rho', 'gamma'] + if self.model_type == 'with_margins': + self.model_parameters += ['mu', 'nu'] + + self.absent_nodes = None + + self.tau = None + + self.density_part_Lc = None + + def fit( + self, X, + given_Z=None, given_W=None, + given_mu=None, given_nu=None + ): + """Fits the model to the given data + + Parameters + ---------- + X : np.ndarray with ndim == 3, first axis representing the + time, second reprenting the rows and third the columns | + list of scipy.sparse matrices, where each matrix is a snapshot + of the graph. + The discrete-time dynamic bi-partite graph to fit the data to. + given_Z : np.ndarray of shape (, n_rows) or shape (n_timesteps, n_rows) + such that each values of the array indicates the cluster of the row. + The values must be in {0, ..., Kz-1}, as returned by sklearn api + e.g. KMeans().fit(X).labels_ + This parameter must not be None if the `init_type` parameter is + `'given'`, otherwise it will not be used. By default None + given_W : np.ndarray of shape (, n_cols) or shape (n_timesteps, n_cols) + such that each values of the array indicates the cluster of the row. + The values must be in {0, ..., Kz-1}, as returned by sklearn api + e.g. KMeans().fit(X).labels_ + This parameter must not be None if the `init_type` parameter is + `'given'`, otherwise it will not be used. By default None + given_mu : np.ndarray of shape (n_timesteps, n_rows), optional + Gives an initial value for the margin parameter mu of the model. + This parameter must not be None if the `type_init_margins` parameter is + `'given'`, otherwise it will not be used. By default None + given_nu : np.ndarray of shape (n_timesteps, n_cols), optional + Gives an initial value for the margin parameter nu of the model. + This parameter must not be None if the `type_init_margins` parameter is + `'given'`, otherwise it will not be used. By default None + + Returns + ------- + self : object + Fitted estimator. + """ + general.check_X( + X=X, + is_graph=False, + self_loops=True, + directed=True + ) + self.X = X + self.T = len(self.X) + self.N, self.D = self.X[0].shape + self.absent_nodes = absent_nodes.AbsentNodes( + self.X, + self.threshold_absent_nodes, + 'LBM' + ) + self._set_transition_masks() + self._set_global_init_partition(given_Z, given_W) + self._init_debug() + self._set_data_margins() + + (all_iter_criterions, + all_intermediate_iter_criterions, + all_icls, + all_row_partitions, + all_col_partitions) = [], [], [], [], [] + + # each initialization of the model + for init_number in range(self.n_init): + self._print_verbose_msg_init(init_number) + self._set_current_init_partition() + self._init_q() + self._init_margins(given_mu, given_nu) + self.tau = 0. # the smoothing parameter, \in [0., 1.] + self._full_m_step() + self._debug(init_number) + + old_iter_criterion = - np.finfo(np.float32).max + new_iter_criterion = - np.finfo(np.float32).max + iter_criterions, intermediate_iter_criterions = [], [] + iter_number = 0 + + # each iteration of the model, before smoothing + for _ in range(self.max_iter): + self._print_verbose_msg_iter(iter_number) + self._debug(init_number) + + # E + M steps + old_iter_criterion = new_iter_criterion + interm_criterion, new_iter_criterion = self._fit_single() + + iter_criterions.append(new_iter_criterion) + intermediate_iter_criterions.append(interm_criterion) + + self._set_best_parameters(new_iter_criterion, init_number, iter_number) + delta_iter = general.get_delta(old_iter_criterion, new_iter_criterion) + + if (iter_number >= self.n_iter_min) and (delta_iter < self.tol_iter): + self._print_verbose_converged(iter_number, None) + break + iter_number += 1 + + self._print_verbose_smoothing(self.smoothing_schedule, self.n_iter_supp_smoothing) + + # each iteration of the model, with smoothing + for ind_tau, tau in enumerate(self.smoothing_schedule): + self.tau = tau + for _ in range(self.n_iter_supp_smoothing): + self._debug(init_number) + + # E + M steps + old_iter_criterion = new_iter_criterion + interm_criterion, new_iter_criterion = self._fit_single() + + iter_criterions.append(new_iter_criterion) + intermediate_iter_criterions.append(interm_criterion) + + self._set_best_parameters(new_iter_criterion, init_number, iter_number) + delta_iter = general.get_delta(old_iter_criterion, new_iter_criterion) + + if delta_iter < self.tol_iter: + self._print_verbose_converged(iter_number, ind_tau) + break + iter_number += 1 + if self.verbose >= 1: + print('Done') + + all_row_partitions.append(self.Z.argmax(axis=2).copy()) + all_col_partitions.append(self.W.argmax(axis=2).copy()) + + all_iter_criterions.append(iter_criterions) + all_intermediate_iter_criterions.append(intermediate_iter_criterions) + all_icls.append(self.icl()) + + self.all_row_partitions = all_row_partitions + self.all_col_partitions = all_col_partitions + self.all_iter_criterions = all_iter_criterions + self.all_intermediate_iter_criterions = all_intermediate_iter_criterions + self.all_icls = all_icls + + self.fitted = True + self._set_mixture_proportions() + self._write_best_parameters() + return self + + def _fit_single(self): + """ + a single iteration of EM: + - computes the log density + - applies a row E-step + - applies an M-step on row parameters + - computes the log density + - applies a col E-step + - applies an M-step on col parameters + - returns a tuple containing the likelihoods + after row E+M step and after col E+M step + """ + L1 = self._fit_single_one_sided(mode='row') + L2 = self._fit_single_one_sided(mode='col') + return L1, L2 + + def _fit_single_one_sided(self, mode): + """ + a single iteration of EM : + - applies a row/col E-step + - applies an M-step on row/col parameters + - returns the complete data loglikelihood after row/col iteration + """ + L = 0. + num_gamma, den_gamma, den_mu, den_nu = self._init_num_den_parameters() + + for t in range(self.T): + # e step + X_red_t = self._e_step(t, mode) + + # sufficient statistics for the m step at time t + (num_gamma[t], den_gamma[t], + den_mu[t], den_nu[t], Lc_t) = self._m_step_t(t, X_red_t, mode) + + # local part of the loglikelihhod + L += Lc_t + self.density_part_Lc = L + + # m step + self._set_parameters(num_gamma, den_gamma, den_mu, den_nu) + self._update_mixture_proportions(mode) + L += self._mixture_part_complete_data_loglikelihood() + L += self.entropy() + return L + + def entropy(self): + """Compute entropy""" + if self.em_type == 'VEM': + Hz = m_step.entropy_dynamic( + self.Z, + self.qz, + self.absent_nodes.appearing_row_nodes, + self.min_float + ) + Hw = m_step.entropy_dynamic( + self.W, + self.qw, + self.absent_nodes.appearing_col_nodes, + self.min_float + ) + return Hz + Hw + + return 0. + + def _m_step_t(self, t, X_red_t, mode): + """ + Fills the numerators and denominators of + the parameters of the model at a given time step t + + We fill num_gamma, den_gamma, den_mu, den_nu + time step by time step. We then smooth theses quantities if needed. + + Note that we keep time variying numerators and denominators + even if the parameter does not depend on time in the model + since we compute sufficient statistics at time t to + estimate gamma, mu and nu + + Note that, contrary to the static case, the complete + data log likelihood is computed after the E step. In fact, + otherwise, we would have to compute num_gamma[t] and + den_gamma[t] for each timestep, to obtain gamma. Then, + we would have to compute Lc_t for each t, requiring to + recompute reduced matrices for each time step + """ + if self.model_type == 'with_margins': + mu_t, nu_t = self.mu[t], self.nu[t] + elif self.model_type == 'without_margins': + mu_t, nu_t = None, None + + # num_gamma_t = X_ZW + num_gamma_t, den_gamma_t = m_step.update_gamma( + mode, self.estimated_margins, + self.X[t], self.Z[t], self.W[t], + X_red_t, mu_t, nu_t, + self.dtype, self.model_type + ) + gamma_no_smooth_t = m_step.get_gamma( + num_gamma_t, + den_gamma_t, + self.em_type, + self.min_float, + self.min_gamma + ) + if self.model_type == 'with_margins': + den_mu_t = m_step.get_denominator_mu( + self.Z[t], + self.W[t], + self.nu[t], + gamma_no_smooth_t + ) + mu_no_smooth_t = self.Xi_[t] / (den_mu_t + self.min_float) + mu_no_smooth_t[self.absent_nodes.absent_row_nodes[t]] = self.min_float + den_nu_t = m_step.get_denominator_nu( + self.Z[t], + self.W[t], + mu_no_smooth_t, + gamma_no_smooth_t + ) + + elif self.model_type == 'without_margins': + den_mu_t, den_nu_t = None, None + + # in mode 'init', at initialization, there + # is no self.gamma yet. Ideally, we should first + # estimate gamma, and then compute Lc, but + # it is less expensive to compute an approximation + # of Lc using the local estimate of gamma + if mode == 'init': + gamma_t = gamma_no_smooth_t + else: + gamma_t = self.gamma if self.model_type == 'with_margins' else self.gamma[t] + + Lc_t = m_step.compute_Lc_static_density( + self.model_type, + self.estimated_margins, + gamma_t, + mu_t, nu_t, + self.Xi_[t], self.X_j[t], + num_gamma_t, + self.min_float + ) + return num_gamma_t, den_gamma_t, den_mu_t, den_nu_t, Lc_t + + def _update_pi_rho(self, ZW, qzw, prior, mask): + if self.em_type == 'VEM': + pi_rho = m_step.update_pi_rho(ZW, qzw, prior, mask) + elif self.em_type == 'CEM': + pi_rho = m_step.update_pi_rho_cem(ZW, prior, mask, self.dtype) + + pi_rho = m_step.correct_pi_rho( + pi_rho, + self.min_proba_mixture_proportions, + self.min_float + ) + return np.log(pi_rho + self.min_float) + + def _e_step(self, t, mode): + if self.em_type == 'VEM': + return self._e_step_vem(t, mode) + if self.em_type == 'CEM': + return self._e_step_cem(t, mode) + raise ValueError(self, "em_type", self.em_type) + + def _e_step_cem(self, t, mode): + mu_t, nu_t = (self.mu[t], self.nu[t]) if self.model_type == 'with_margins' else (None, None) + # gamma_t = self.gamma if self.model_type == 'with_margins' else self.gamma[t] + if mode == 'row': + self.Z[t], X_red_t = e_step.e_step_t_dynamic_cem( + mode='row', + model_type=self.model_type, + estimated_margins=self.estimated_margins, + X_t=self.X[t], + gamma=self.gamma, + dtype=self.dtype, + min_float=self.min_float, + log_pi_rho=self.log_pi, + log_alpha_beta=self.log_alpha, + ind_appearing_nodes_t=self.absent_nodes.appearing_row_nodes[t], + ind_absent_nodes_t=self.absent_nodes.absent_row_nodes[t], + ZW_tm1=None if t == 0 else self.Z[t - 1], + ZW_tp1=None if t == (self.T - 1) else self.Z[t + 1], + Z=None, + W=self.W[t], + nu_t=nu_t, + mu_t=mu_t + ) + elif mode == 'col': + self.W[t], X_red_t = e_step.e_step_t_dynamic_cem( + mode='col', + model_type=self.model_type, + estimated_margins=self.estimated_margins, + X_t=self.X[t], + gamma=self.gamma, + dtype=self.dtype, + min_float=self.min_float, + log_pi_rho=self.log_rho, + log_alpha_beta=self.log_beta, + ind_appearing_nodes_t=self.absent_nodes.appearing_col_nodes[t], + ind_absent_nodes_t=self.absent_nodes.absent_col_nodes[t], + ZW_tm1=None if t == 0 else self.W[t - 1], + ZW_tp1=None if t == (self.T - 1) else self.W[t + 1], + Z=self.Z[t], + W=None, + nu_t=nu_t, + mu_t=mu_t + ) + return X_red_t + + def _e_step_vem(self, t, mode): + """ + Applies an E-step for a given time step t + on the row or column posterior distributions. + If t == 0: + updates the initial posterior proba (ZW[0]) + else: + in VEM: + updates the transition posterior proba (qzw[t]) + then updates posterior proba of appearing nodes (ZW_app) + then updates ZW[t] with qzw[t] and ZW[t-1] + in CEM: + computes an unormalized posterior proba vector zw_crit + then updates ZW[t] with zw_crit and ZW[t-1] + mode : row or col e-step + """ + mu_t, nu_t = (self.mu[t], self.nu[t]) if self.model_type == 'with_margins' else (None, None) + # gamma_t = self.gamma if self.model_type == 'with_margins' else self.gamma[t] + if mode == 'row': + self.Z[t], X_red_t = e_step.e_step_t_dynamic_vem( + mode='row', + first_ts=(t == 0), + model_type=self.model_type, + estimated_margins=self.estimated_margins, + X_t=self.X[t], + gamma=self.gamma, + dtype=self.dtype, + min_float=self.min_float, + qzw_tp1=self.qz[t + 1] if (t != self.T - 1) else None, + log_pi_rho=self.log_pi, + log_alpha_beta=self.log_alpha, + ind_appearing_nodes_t=self.absent_nodes.appearing_row_nodes[t], + ind_absent_nodes_t=self.absent_nodes.absent_row_nodes[t], + min_proba=self.min_proba_Z, + ZW_tm1=None if t == 0 else self.Z[t - 1], + Z=None, + W=self.W[t], + nu_t=nu_t, + mu_t=mu_t + ) + elif mode == 'col': + self.W[t], X_red_t = e_step.e_step_t_dynamic_vem( + mode='col', + first_ts=(t == 0), + model_type=self.model_type, + estimated_margins=self.estimated_margins, + X_t=self.X[t], + gamma=self.gamma, + dtype=self.dtype, + min_float=self.min_float, + qzw_tp1=self.qw[t + 1] if (t != self.T - 1) else None, + log_pi_rho=self.log_rho, + log_alpha_beta=self.log_beta, + ind_appearing_nodes_t=self.absent_nodes.appearing_col_nodes[t], + ind_absent_nodes_t=self.absent_nodes.absent_col_nodes[t], + min_proba=self.min_proba_W, + ZW_tm1=None if t == 0 else self.W[t - 1], + Z=self.Z[t], + W=None, + nu_t=nu_t, + mu_t=mu_t + ) + return X_red_t + + def _update_mixture_proportions(self, mode): + """ + Updates the mixture proportions + """ + if mode == 'init': + self._update_alpha() + self._update_beta() + self._update_pi() + self._update_rho() + elif mode == 'row': + self._update_alpha() + self._update_pi() + elif mode == 'col': + self._update_beta() + self._update_rho() + + def _set_parameters(self, num_gamma, den_gamma, den_mu, den_nu): + """ + Updates gamma, mu and nu with their num_... and den_... + The smoothing is applied here. + Gamma, mu, nu can be constant or not. Anyway, + we deal with num_... and den_... as temporal signals + """ + # gamma + if self.model_type == 'with_margins': + self.gamma = m_step.get_gamma( + num_gamma.sum(0), + den_gamma.sum(0), + self.em_type, + self.min_float, + self.min_gamma + ) + elif self.model_type == 'without_margins': + if self.parameter_smoothing: + W_tau = m_step.smoothing_matrix(self.T, self.tau, self.dtype) + num_gamma = num_gamma.reshape((self.T, self.Kz * self.Kw)) + den_gamma = den_gamma.reshape((self.T, self.Kz * self.Kw)) + + with np.errstate(under='ignore'): + smoothed_num_gamma = (W_tau @ num_gamma).reshape((self.T, self.Kz, self.Kw)) + smoothed_den_gamma = (W_tau @ den_gamma).reshape((self.T, self.Kz, self.Kw)) + self.gamma = m_step.get_gamma( + smoothed_num_gamma, + smoothed_den_gamma, + self.em_type, + self.min_float, + self.min_gamma + ) + else: + self.gamma = m_step.get_gamma( + num_gamma, + den_gamma, + self.em_type, + self.min_float, + self.min_gamma + ) + # margins + if self.model_type == 'with_margins': + if self.parameter_smoothing: + W_tau = m_step.smoothing_matrix(self.T, self.tau, self.dtype) + if self.absent_nodes.n_absent_row_tot > 0: + absent_nodes.replace_vals_absent( + den_mu, + self.absent_nodes.inds_prev_rows, + self.absent_nodes.ts_absent_rows + ) + if self.absent_nodes.n_absent_col_tot > 0: + absent_nodes.replace_vals_absent( + den_nu, + self.absent_nodes.inds_prev_cols, + self.absent_nodes.ts_absent_cols + ) + with np.errstate(under='ignore'): + smoothed_num_mu = W_tau @ self.Xi_ + smoothed_den_mu = W_tau @ den_mu + self.mu = smoothed_num_mu / (smoothed_den_mu + self.min_float) + + smoothed_num_nu = W_tau @ self.X_j + smoothed_den_nu = W_tau @ den_nu + self.nu = smoothed_num_nu / (smoothed_den_nu + self.min_float) + else: + self.mu = self.Xi_ / (den_mu + self.min_float) + self.nu = self.X_j / (den_nu + self.min_float) + + self._correct_margins() + + def _update_alpha(self): + self.log_alpha = m_step.update_alpha_beta_dynamic( + self.Z, + self.absent_nodes.n_absent_row_tot, + self.absent_nodes.appearing_row_nodes, + self.absent_nodes.absent_row_nodes[0], + self.min_float, + self.min_proba_mixture_proportions, + self.dtype + ) + + def _update_beta(self): + self.log_beta = m_step.update_alpha_beta_dynamic( + self.W, + self.absent_nodes.n_absent_col_tot, + self.absent_nodes.appearing_col_nodes, + self.absent_nodes.absent_col_nodes[0], + self.min_float, + self.min_proba_mixture_proportions, + self.dtype + ) + + def _update_pi(self): + self.log_pi = self._update_pi_rho( + self.Z, + self.qz, + self.prior_pi, + self.pi_mask + ) + + def _update_rho(self): + self.log_rho = self._update_pi_rho( + self.W, + self.qw, + self.prior_rho, + self.rho_mask + ) + + def _correct_margins(self): + """ + Sets margins to zero for absent nodes. + Useful if smoothing is applied + + It should be noted that num_mu and den_mu are 0 for absent nodes. + When the smoothing is applied, the margin of + an absent node does not contribute to the + other margins. Moreover, for an absent node, + the smoothed margin can be non zero : it must then + be corrected. + """ + np.clip(self.mu, self.min_margin, None, self.mu) + np.clip(self.nu, self.min_margin, None, self.nu) + + # important + if self.model_type == 'with_margins': + for t in range(self.T): + self.mu[t][self.absent_nodes.absent_row_nodes[t]] = self.min_float + self.nu[t][self.absent_nodes.absent_col_nodes[t]] = self.min_float + + def _init_num_den_parameters(self): + """ + Initializes the time-dependant vectors + that will be filled in _m_step_t() + """ + num_gamma = np.zeros((self.T, self.Kz, self.Kw), dtype=self.dtype) + den_gamma = np.zeros((self.T, self.Kz, self.Kw), dtype=self.dtype) + + # we initialize den_mu, den_nu even if the model + # is without margins, for consistency + den_mu = np.zeros((self.T, self.N), dtype=self.dtype) + den_nu = np.zeros((self.T, self.D), dtype=self.dtype) + + return num_gamma, den_gamma, den_mu, den_nu + + def _init_q(self): + """ + Initializes the variational probability distributions: + qz, Z, Z_app, qw, W, W_app, + where qz and qw are initialized only in VEM + """ + if self.em_type == 'VEM': + self.Z = np.zeros((self.T, self.N, self.Kz), dtype=self.dtype) + self.Z[0] = init.init_ZW0( + self.Kz, + self.current_init_Z[0], + self.min_proba_Z, + self.dtype + ) + self.qz = init.init_qzw( + self.Kz, + self.current_init_Z, + self.min_proba_Z, + self.dtype + ) + for t in range(1, self.T): + self.Z[t] = e_step.update_ZW_t_vem( + self.Z[t - 1], + self.qz[t - 1], + None, None, None, + self.min_proba_Z + ) + self.Z = e_step.correct_ZW(self.Z, self.min_proba_Z) + + self.W = np.zeros((self.T, self.D, self.Kw), dtype=self.dtype) + self.W[0] = init.init_ZW0( + self.Kw, + self.current_init_W[0], + self.min_proba_W, + self.dtype + ) + self.qw = init.init_qzw( + self.Kw, + self.current_init_W, + self.min_proba_W, + self.dtype + ) + for t in range(1, self.T): + self.W[t] = e_step.update_ZW_t_vem( + self.W[t - 1], + self.qw[t - 1], + None, None, None, + self.min_proba_W + ) + self.W = e_step.correct_ZW(self.W, self.min_proba_W) + + elif self.em_type == 'CEM': + self.qz = None + self.Z = np.zeros((self.T, self.N, self.Kz), dtype='bool') + for t in range(self.T): + self.Z[t] = init.init_ZW0( + self.Kz, + self.current_init_Z[t], + None, 'bool' + ) + self.W = np.zeros((self.T, self.D, self.Kw), dtype='bool') + for t in range(self.T): + self.W[t] = init.init_ZW0( + self.Kw, + self.current_init_W[t], + None, 'bool' + ) + self.qw = None + + def _set_current_init_partition(self): + """ + Creates the current init partition by + applying noise to the global init partition + """ + self.current_init_Z = init.apply_perturbation_to_init_clustering( + init_partition=self.global_init_Z, + random_state=self.random_state, + node_perturbation_rate=self.node_perturbation_rate, + cluster_perturbation_rate=self.cluster_perturbation_rate, + dynamic=True + ) + self.current_init_W = init.apply_perturbation_to_init_clustering( + init_partition=self.global_init_W, + random_state=self.random_state, + node_perturbation_rate=self.node_perturbation_rate, + cluster_perturbation_rate=self.cluster_perturbation_rate, + dynamic=True + ) + + def _set_global_init_partition(self, given_Z, given_W): + """ + Sets the global row and column partitions of the model. + At each new initialization of the model, noise will be applied to + this partition to create the current init partition + """ + # wether we convert the obtained (N) partition to a T x N matrix + if given_Z is not None or given_W is not None: + assert (given_Z is not None) and (given_W is not None) + assert self.init_type == 'given' + assert given_Z.ndim == given_W.ndim + + # X_init : data matrix used for initial clustering + X_init_row = init.get_X_init( + self.X, + mode='row', + absent_nodes=self.absent_nodes.absent_row_nodes + ) + self.global_init_Z = init.get_init_partition( + X_init=X_init_row, + Kzw=self.Kz, + init_type=self.init_type, + T=self.T, + random_state=self.random_state, + n_jobs=self.n_jobs, + n_init=self.n_init_clustering, + given_partition=given_Z + ) + X_init_col = init.get_X_init( + self.X, + mode='col', + absent_nodes=self.absent_nodes.absent_col_nodes + ) + self.global_init_W = init.get_init_partition( + X_init=X_init_col, + Kzw=self.Kw, + init_type=self.init_type, + T=self.T, + random_state=self.random_state, + n_jobs=self.n_jobs, + n_init=self.n_init_clustering, + given_partition=given_W + ) + + def _mixture_part_complete_data_loglikelihood(self): + """ + Computes the terms in alpha, beta, rho and pi + of the log likelihood of the expected complete data + """ + # alpha and beta part + alpha_part = m_step.complete_data_loglikelihood_alpha_beta( + self.log_alpha, + self.Z, + self.absent_nodes.absent_row_nodes[0], + self.absent_nodes.appearing_row_nodes + ) + beta_part = m_step.complete_data_loglikelihood_alpha_beta( + self.log_beta, + self.W, + self.absent_nodes.absent_col_nodes[0], + self.absent_nodes.appearing_col_nodes + ) + pi_part = m_step.complete_data_loglikelihood_pi_rho( + self.em_type, + self.log_pi, + self.Z, self.qz, + self.pi_mask + ) + rho_part = m_step.complete_data_loglikelihood_pi_rho( + self.em_type, + self.log_rho, + self.W, self.qw, + self.rho_mask + ) + return alpha_part + beta_part + pi_part + rho_part + + def _set_data_margins(self): + if isinstance(self.X, list): + self.Xi_ = np.array([self.X[t].sum(1).A1 for t in range(self.T)]) + self.X_j = np.array([self.X[t].sum(0).A1 for t in range(self.T)]) + else: + self.Xi_ = self.X.sum(2) + self.X_j = self.X.sum(1) + + if self.absent_nodes.n_absent_row_tot > 0: + absent_nodes.replace_vals_absent( + self.Xi_, + self.absent_nodes.inds_prev_rows, + self.absent_nodes.ts_absent_rows + ) + if self.absent_nodes.n_absent_col_tot > 0: + absent_nodes.replace_vals_absent( + self.X_j, + self.absent_nodes.inds_prev_cols, + self.absent_nodes.ts_absent_cols + ) + + def _init_margins(self, given_mu=None, given_nu=None): + """ + Initializes the margins mu and nu, that + can not be initialized from a given partition + as it is the case for alpha, pi, gamma, ... + """ + # time dependant margins + if self.model_type == 'with_margins': + if self.type_init_margins == 'ones': + self.mu = np.ones((self.T, self.N), dtype=self.dtype) + self.nu = np.ones((self.T, self.D), dtype=self.dtype) + elif self.type_init_margins == 'X.': + self.mu = self.Xi_.copy() + self.nu = self.X_j.copy() + elif self.type_init_margins == 'random': + self.mu = np.abs(self.random_state.normal( + loc=self.loc_random_margins, + scale=self.scale_random_margins, + size=(self.T, self.N) + ).astype(self.dtype)) + self.nu = np.abs(self.random_state.normal( + loc=self.loc_random_margins, + scale=self.scale_random_margins, + size=(self.T, self.D) + ).astype(self.dtype)) + elif self.type_init_margins == 'given': + assert given_mu is not None and given_nu is not None + self.mu = given_mu.astype(self.dtype) + self.nu = given_nu.astype(self.dtype) + + def _set_transition_masks(self): + # masks used to update pi and rho efficiently + # by only selecting present and non-appearing nodes + self.pi_mask = init.pi_rho_update_mask( + self.T, self.N, + self.absent_nodes.absent_row_nodes, + self.absent_nodes.appearing_row_nodes + ) + self.rho_mask = init.pi_rho_update_mask( + self.T, self.D, + self.absent_nodes.absent_col_nodes, + self.absent_nodes.appearing_col_nodes + ) + + def _set_mixture_proportions(self): + self.alpha = np.exp(self.log_alpha) + self.beta = np.exp(self.log_beta) + self.pi = np.exp(self.log_pi) + self.rho = np.exp(self.log_rho) + + def set_prior_transition_matrices(self): + """ + For pi and rho, we consider an informatiVe dirichlet prior + for each row of the transition matrices. + The prior is Kzw x Kzw and st prior[k, l] = 1 + and prior[k, k] = prior_diagonal > 1. + Its role is to favor partitions with less class transitions. + During the M-step, it adds the pseudocounts, with eg + pi_kl propto n[k, l] + prior[k, l] - 1 + where n[k, l] is the number of class transitions, as in + a classical M step. + """ + assert 0. <= self.prior_diagonal_pi <= 1. + assert 0. <= self.prior_diagonal_rho <= 1. + + prior_pi_val = self.prior_diagonal_pi * self.N + 1 + prior_rho_val = self.prior_diagonal_rho * self.D + 1 + + IZ = np.eye((self.Kz), dtype=self.dtype) + OZ = np.ones((self.Kz, self.Kz), dtype=self.dtype) + self.prior_pi = (prior_pi_val - 1.) * IZ + OZ + + IW = np.eye((self.Kw), dtype=self.dtype) + OW = np.ones((self.Kw, self.Kw), dtype=self.dtype) + self.prior_rho = (prior_rho_val - 1.) * IW + OW + + def _full_m_step(self): + """ + Applies an M-step to all the current parameters. + Used after the initialization of the posterior probabilities. + """ + # update all mixture prop + self.set_prior_transition_matrices() + self._update_mixture_proportions(mode='init') + + # update gamma, mu, nu + # by filling num_mu, num_nu, num_gamma, den... + num_gamma, den_gamma, den_mu, den_nu = self._init_num_den_parameters() + + for t in range(self.T): + (num_gamma[t], den_gamma[t], + den_mu[t], den_nu[t], _) = self._m_step_t(t=t, X_red_t=None, mode='init') + + # now that the num and den of the params have + # been set, we set the params values (with or without smoothing) + self._set_parameters(num_gamma, den_gamma, den_mu, den_nu) + + def _init_pi_rho(self): + """ + Initializes pi and rho, the transition matrices, + with a M step or with a matrix with diagonal + value diag_pi_rho_init + """ + if self.diag_pi_init is not None: + val_off_diag = (1. - self.diag_pi_init) / (self.Kz - 1) + OZ = np.full((self.Kz, self.Kz), val_off_diag) + IZ = np.eye(self.Kz) + pi = OZ + (self.diag_pi_init - val_off_diag) * IZ + self.log_pi = np.log(pi) + else: + self._update_pi() + + if self.diag_rho_init is not None: + val_off_diag = (1. - self.diag_rho_init) / (self.Kw - 1) + OW = np.full((self.Kw, self.Kw), val_off_diag) + IW = np.eye(self.Kw) + rho = OW + (self.diag_rho_init - val_off_diag) * IW + self.log_rho = np.log(rho) + else: + self._update_rho() + + def icl(self): + """Compute ICL""" + icl_z = general.compute_mixture_exact_icl(self.Z, self.Kz, self.N) + icl_w = general.compute_mixture_exact_icl(self.W, self.Kw, self.D) + bic_penalty = ( + -.5 * (self.Kz * self.Kw + self.T * (self.N + self.D)) * + np.log(self.T * self.N * self.D) + ) + val = ( + icl_z + icl_w + + self.density_part_Lc + + bic_penalty + ) + return val + + def best_partition(self, mode, n_first=1): + """ + returns a list of tuple of partitions + if 1 partition: [(part_1), ..., (part_n_first)] + if 2 partitions: [(row_part_1, col_part_1), ..., + (row_part_n_first, col_part_n_first)] + + mode == 'likelihood' returns the n_first best partitions + in terms of likelihood + + if mode == 'consensus: hbgf' or mode == 'consensus: cspa' + returns 1 consensus partition over n_first partitions + """ + assert self.fitted + best_partitions = metrics.sort_partitions( + self.all_iter_criterions, + [self.all_row_partitions, self.all_col_partitions], + n_first + ) + # put absent nodes in cluster -1 + for p1, p2 in best_partitions: + for t in range(self.T): + for i in self.absent_nodes.absent_row_nodes[t]: + p1[t, i] = -1 + for j in self.absent_nodes.absent_col_nodes[t]: + p2[t, j] = -1 + + if mode == 'likelihood': + return best_partitions + if mode in ['consensus: hbgf', 'consensus: cspa']: + best_row_partitions = [x[0] for x in best_partitions] + best_col_partitions = [x[1] for x in best_partitions] + if mode == 'consensus: hbgf': + Z_consensus = consensus.hbgf( + best_row_partitions, + self.n_init_clustering_consensus + ).reshape(self.T, self.N) + W_consensus = consensus.hbgf( + best_col_partitions, + self.n_init_clustering_consensus + ).reshape(self.T, self.D) + elif mode == 'consensus: cspa': + Z_consensus = consensus.cspa( + best_row_partitions, self.n_init_clustering_consensus + ).reshape(self.T, self.N) + W_consensus = consensus.cspa( + best_col_partitions, + self.n_init_clustering_consensus + ).reshape(self.T, self.D) + return [(Z_consensus, W_consensus)] + raise ValueError diff --git a/dcblockmodels/models/hlbm.py b/dcblockmodels/models/hlbm.py new file mode 100644 index 0000000..ebcdbeb --- /dev/null +++ b/dcblockmodels/models/hlbm.py @@ -0,0 +1,675 @@ +"""Pairwise semi-supervised Latent Block Model with a Hidden Markov Random Field""" + + +import os +import sys +import warnings + +import numpy as np +import scipy as sp +from numba import NumbaPendingDeprecationWarning + +from .blockmodel import BlockModel +from .. import metrics +from .utils import ( + e_step, + m_step, + init, + general, + similarity_matrices, + consensus +) + +sys.stderr = open(os.devnull, "w") # pylint: disable=R1732,W1514 + +warnings.filterwarnings("ignore", category=NumbaPendingDeprecationWarning) +warnings.filterwarnings("ignore", category=DeprecationWarning) + + +class HLBM(BlockModel): + """Semi-supervised Latent Block Model with a pairwise Hidden Markov Random Field + + Block Model General Parameters + ---------- + Kz : int + Number of row clusters + Kw : int + Number of column clusters + init_type : {'random', 'kmeans', 'skmeans', 'given'}, optional, + by default 'skmeans'. The methods used for the initialization of + the algorithm. It is fit once for each side (rows and columns) + and gives two global initialization partitions. These partitions + are then used to initialization-specific partitions using the + parameters `node_perturbation_rate` and `cluster_perturbation_rate`: + - '`random'` randomly initialized clusters, + - `'kmeans'` clusters initialized using `sklearn.cluster.KMeans`, + - `'skmeans'` clusters initialized using `spherecluster.SphericalKMeans`, + - `'given'` given in the `fit()` method + em_type : {'VEM', 'CEM'} + The EM algorithm used + n_init : int + Number of initializations of the model. Each initialization + will start from different initial row and col partitions + and will converge to a partition and parameter set with + a corresponding complete-data log likelihood. One can then + select one of these partitions using the `best_partition()` + method. + n_init_clustering : int, optional + the number of initializations of the clustering algorithm + chosen in `'init_type'`, by default 100 + node_perturbation_rate : float, optional + the fraction of nodes (row or cols) that are reassgined to + a random cluster at each new intitialization of the model, by default .1 + model_type : {'with_margins', 'without_margins'} + The dynamic model used: + - `'with_margins'` : a model with dynamic margins + and static connectivity matrix gamma + - `'without_margins'` : a model with only a dynamic + connectivity matrix as presented in Matias & Miele + type_init_margins : {'ones', 'X.', 'random', 'given'}, optional + How the margins are initialized, by default 'ones': + - `'ones'` : mu[i] = nu[j] = 1 + - `'X.'` : mu = X.sum((1)) nu = X.sum((0)) + - `'random'` : mu and nu sampled from normal distribution + followed by an absolute value. + - `'given'` : mu an nu are given in the `fit()` method + min_float : float, optional + The minimum float used to avoid numerical issues, by default 1e-15 + min_gamma : float + The minimum value of connectivity parameter gamma + especially important in CEM to avoid empty clusters + min_proba_Z : float, optional + The probability (between 0 and 1) at which the variational probabilities + for the row memberships are clipped, by default .05 + min_proba_W : float, optional + The probability (between 0 and 1) at which the variational probabilities + for the column memberships are clipped, by default .05 + min_margin : float, optional + The value at which the margins are clipped, by default 1e-10 + min_proba_mixture_proportions : float, optional + The probability (between 0 and 1) at which the mixture proportions + are clipped, by default .05 + threshold_absent_nodes : int, optional + Row or column nodes that have, at a given time step, a degree below + `threshold_absent_nodes` are considered absent and do not contribute + to the observed data log likelihood, by default 0 + dtype : str, optional + The dtype of the floats used in the model, by default 'float32' + random_state : int | np.random.RandomState | None, optional + Creates a random state generator from a seed or uses the given + random state generator, by default None + max_iter : int, optional + The maximum number of EM iterations for a single + initialization of the model, by default 50 + tol_iter : float, optional + The decrease ratio below which we consider the algorithm + has converged, by default 1e-5 + n_jobs : int, optional + The number of jobs used in the initialization clustering algorithm + in sklearn or spherecluster, by default -1 i.e. all cores + verbose : {0, 1, 2}, optional + 0 is silent, 1 is normal verbose and 2 is very verbose, by default 1 + blockmodel_params : [type], optional + A dictionnary of parameters can can overwrite the class parameters + if non empty or None, by default None. The parameters that can be changed + are the following: + - `n_iter_min` + - `n_init_clustering_consensus` + - `loc_random_margins` + - `scale_random_margins` + model_id : int | None, optional + The numerical id of the model, used for debugging purposes, by default None + debug_list : list, optional + a list of strings that correspond to the names of the model attributes + whose value we wish to keep track of during the iterations of the + EM algorithm, by default []. `debug_list` must be a sublist of + `self.model_parameters`. + debug_output : str, optional + The directory where the values of the parameters in `debug_list` + will be outputed in the form of .npy files, by default '.' + + hLBM Specific Parameters + ---------- + estimated_margins : True | False + Whether the margins mu and nu are estimated or are + set to the observed margins X.sum(1) and X.sum(0) + regularization_mode : {'all', 'mixture'} + Whether we consider the mixture proportions as + an external field in the HMRF (`'all'`) or we consider + the mixture proportions outside the HMRF (`'mixture'`) + compute_regularization : bool, optional + Whether we compute the regularization term in the criterion, + which is computationally costly, by default True + regularize_row : bool, optional + Whether we use the similarity matrix S_r to regularize the + model or not, by default False + regularize_col : bool, optional + Whether we use the similarity matrix S_c to regularize the + model or not, by default False + multiplicative_init_rows : bool, optional + Whether we use the Must-Link relationships in the initialization + of the row partition or not, by default True + multiplicative_init_cols : bool, optional + Whether we use the Must-Link relationships in the initialization + of the column partition or not, by default True + power_multiplicative_init : int if multiplicative_init_rows or + if multiplicative_init_cols | None, optional + The power to which we raise the stochastic matrix created from + the Must-Link relationships in the initialization, + by default None + damping_factor : float if ((regularize_row or regularize_col) and + em_type == 'VEM') | None, optional + The damping factor between 0 and 1 used in VEM, by default None + """ + REGULARIZATION_MODES = ['all', 'mixture'] + model_parameters = ['log_alpha', 'log_beta', 'gamma', 'mu', 'nu'] + + def __init__( + self, + # Global blockmodel arguments + Kz=None, + Kw=None, + init_type=None, + em_type=None, + n_init=None, + n_init_clustering=100, + node_perturbation_rate=.1, + model_type=None, + type_init_margins='ones', + min_float=1e-15, + min_gamma=None, + min_proba_Z=.05, + min_proba_W=.05, + min_margin=1e-10, + min_proba_mixture_proportions=.05, + threshold_absent_nodes=0, + dtype='float32', + random_state=None, + max_iter=50, + tol_iter=1e-5, + n_jobs=-1, + verbose=1, + blockmodel_params=None, + model_id=None, + debug_list=None, + debug_output='.', + # Specific HLBM arguments + estimated_margins=None, + regularization_mode=None, + compute_regularization=True, + regularize_row=False, + regularize_col=False, + multiplicative_init_rows=True, + multiplicative_init_cols=True, + power_multiplicative_init=None, + damping_factor=None, + ): + super().__init__( + Kz=Kz, + Kw=Kw, + init_type=init_type, + em_type=em_type, + n_init=n_init, + min_float=min_float, + n_init_clustering=n_init_clustering, + node_perturbation_rate=node_perturbation_rate, + model_type=model_type, + type_init_margins=type_init_margins, + min_gamma=min_gamma, + min_proba_Z=.05, + min_proba_W=.05, + min_margin=1e-10, + min_proba_mixture_proportions=min_proba_mixture_proportions, + threshold_absent_nodes=threshold_absent_nodes, + dtype=dtype, + random_state=random_state, + max_iter=max_iter, + tol_iter=tol_iter, + n_jobs=n_jobs, + verbose=verbose, + blockmodel_params=blockmodel_params, + model_id=model_id, + debug_list=debug_list, + debug_output=debug_output + ) + + for arg in ['regularization_mode']: + if arg is None: + raise (f'Argument {arg} for class {type(self).__name__} must be initialized ' + 'explicitly. See documentation for possible values.') + + assert isinstance(multiplicative_init_rows, bool) + assert isinstance(multiplicative_init_cols, bool) + assert regularization_mode in self.REGULARIZATION_MODES + assert 0. < min_proba_Z < 1. + assert 0. < min_proba_W < 1. + assert min_margin > 0. + + if self.model_type == 'with_margins': + assert isinstance(estimated_margins, bool) + else: + assert not estimated_margins + self.estimated_margins = bool(estimated_margins) + + self.regularization_mode = regularization_mode + self.regularize_row = regularize_row + self.regularize_col = regularize_col + self.compute_regularization = compute_regularization + + if multiplicative_init_rows or multiplicative_init_cols: + assert power_multiplicative_init >= 1 + self.multiplicative_init_rows = multiplicative_init_rows + self.multiplicative_init_cols = multiplicative_init_cols + self.power_multiplicative_init = power_multiplicative_init + + # damping_factor used for damping in HMRF VEM + self.damping_factor = None + if self.em_type == 'VEM' and (self.regularize_row or self.regularize_col): + assert 0. < damping_factor < 1. + self.damping_factor = damping_factor + + self.lambda_r, self.lambda_c = None, None + self.S_r, self.S_c, self.P_r, self.P_c = None, None, None, None + + def fit( + self, + X, + lambda_r=None, + S_r=None, + lambda_c=None, + S_c=None, + given_Z=None, + given_W=None + ): + """Fits the model to the given data""" + self.X = X + self.N, self.D = self.X.shape + self._set_similarity_parameters(lambda_r, S_r, lambda_c, S_c) + self._set_global_init_partition(given_Z, given_W) + self._init_debug() + seeds = self.random_state.randint(np.iinfo(np.int32).max, size=self.n_init) + + (all_iter_criterions, + all_intermediate_iter_criterions, + all_regularizations, + all_intermediate_regularizations, + all_row_partitions, + all_col_partitions) = [], [], [], [], [], [] + + for i, _ in enumerate(seeds): + self._print_verbose_msg_init(i) + self._set_current_init_partition() + self._init_q() + self._init_parameters() + + old_iter_criterion = - np.finfo(np.float32).max + new_iter_criterion = - np.finfo(np.float32).max + iter_criterions, intermediate_iter_criterions = [], [] + regularizations, intermediate_regularizations = [], [] + + for it in range(self.max_iter): + self._print_verbose_msg_iter(it) + self._debug(i) + + # E + M steps + _old_interm_criterion, old_iter_criterion = new_iter_criterion, new_iter_criterion + interm_criterion, interm_reg, new_iter_criterion, new_reg = self._fit_single() + + if self.regularize_row or self.regularize_col: + interm_criterion -= interm_reg + new_iter_criterion -= new_reg + + self._set_best_parameters(new_iter_criterion, i, it) + delta_iter = general.get_delta(old_iter_criterion, new_iter_criterion) + + if (it >= self.n_iter_min) and (delta_iter < self.tol_iter): + self._print_verbose_converged(it, None) + break + + iter_criterions.append(new_iter_criterion) + intermediate_iter_criterions.append(interm_criterion) + regularizations.append(new_reg) + intermediate_regularizations.append(interm_reg) + + all_row_partitions.append(general.to_dense(self.Z.copy()).argmax(1)) + all_col_partitions.append(general.to_dense(self.W.copy()).argmax(1)) + all_iter_criterions.append(iter_criterions) + all_intermediate_iter_criterions.append(intermediate_iter_criterions) + all_regularizations.append(regularizations) + all_intermediate_regularizations.append(intermediate_regularizations) + + self.all_row_partitions = all_row_partitions + self.all_col_partitions = all_col_partitions + self.all_iter_criterions = all_iter_criterions + self.all_intermediate_iter_criterions = all_intermediate_iter_criterions + self.all_regularizations = all_regularizations + self.all_intermediate_regularizations = all_intermediate_regularizations + self.fitted = True + self._set_mixture_proportions() + self._write_best_parameters() + + return self + + def _m_step(self, X_red, mode): + # update mixture proportions (alpha, beta) + Z_ = general.get_class_counts(self.Z) + W_ = general.get_class_counts(self.W) + + # alpha, beta + self.update_mixture_proportions(mode, Z_, W_) + + # gamma + X_kl, den_gamma = m_step.update_gamma( + mode, self.estimated_margins, + self.X, self.Z, self.W, X_red, self.mu, self.nu, + self.dtype, self.model_type + ) + self.gamma = m_step.get_gamma( + X_kl, den_gamma, + self.em_type, self.min_float, self.min_gamma + ) + + # mu, nu + if self.estimated_margins: + self.mu = m_step.update_mu( + self.Z, self.W, self.Xi_, self.nu, self.gamma, self.min_margin + ) + self.nu = m_step.update_nu( + self.Z, self.W, self.X_j, self.mu, self.gamma, self.min_margin + ) + + # complete data log likelihood + Lc = m_step.compute_Lc_static( + self.model_type, self.estimated_margins, + self.regularization_mode, self.regularize_row, self.regularize_col, + self.P_r, self.P_c, + self.log_alpha, self.log_beta, self.gamma, + self.mu, self.nu, self.Xi_, self.X_j, + Z_, W_, X_kl, self.min_float + ) + + # regulariation term of the complete data log likelihood + # can be not computed for faster computations + if self.compute_regularization: + regularization = m_step.get_regularization( + self.regularize_row, self.regularize_col, + self.lambda_r, self.lambda_c, + self.S_r, self.S_c, self.Z, self.W + ) + else: + regularization = 0. + return Lc, regularization + + def _e_step(self, mode): + if mode == 'row': + self.Z, X_W = e_step.e_step_static( + X=self.X, gamma=self.gamma, + mode='row', + model_type=self.model_type, em_type=self.em_type, + estimated_margins=self.estimated_margins, + regularization_mode=self.regularization_mode, + regularize=self.regularize_row, + lambda_=self.lambda_r, S=self.S_r, P=self.P_r, + Z=self.Z, W=self.W, + log_alpha_beta=self.log_alpha, + mu=self.mu, nu=self.nu, + damping_factor=self.damping_factor, + dtype=self.dtype, + min_proba=self.min_proba_Z, min_float=self.min_float + ) + return X_W + if mode == 'col': + self.W, X_Z = e_step.e_step_static( + X=self.X, gamma=self.gamma, + mode=mode, + model_type=self.model_type, em_type=self.em_type, + estimated_margins=self.estimated_margins, + regularization_mode=self.regularization_mode, + regularize=self.regularize_col, + lambda_=self.lambda_c, S=self.S_c, P=self.P_c, + Z=self.Z, W=self.W, + log_alpha_beta=self.log_beta, + mu=self.mu, nu=self.nu, + damping_factor=self.damping_factor, + dtype=self.dtype, + min_proba=self.min_proba_Z, min_float=self.min_float + ) + return X_Z + raise ValueError(self, "mode", mode) + + def _fit_single(self): + """ + A single iteration of EM + X_Z and X_W are the reduced matrices + the M step returns the complete data log likelihood + and the regularization term + """ + X_W = self._e_step(mode='row') + L1, reg1 = self._m_step(X_W, 'row') + + X_Z = self._e_step(mode='col') + L2, reg2 = self._m_step(X_Z, 'col') + return L1, reg1, L2, reg2 + + def _init_parameters(self): + # log_alpha, beta, gamma initialized in the first M-step + self.gamma = np.zeros((self.Kz, self.Kw), dtype=self.dtype) + self.log_alpha = np.zeros((self.Kz), dtype=self.dtype) + self.log_beta = np.zeros((self.Kw), dtype=self.dtype) + + if sp.sparse.issparse(self.X): + self.Xi_ = self.X.sum(1).A1 + self.X_j = self.X.sum(0).A1 + else: + self.Xi_ = self.X.sum(1) + self.X_j = self.X.sum(0) + + if self.estimated_margins: + self.mu = np.ones((self.N), dtype=self.dtype) + self.nu = np.ones((self.D), dtype=self.dtype) + else: + self.mu, self.nu = self.Xi_, self.X_j + np.clip(self.mu, self.min_margin, None, self.mu) + np.clip(self.nu, self.min_margin, None, self.nu) + + # first M step + self._m_step(None, mode='init') + + def _set_global_init_partition(self, given_Z, given_W): + # X_init : data matrix used for initial clustering of the rows + X_init = init.get_X_init( + self.X, + mode='row', + absent_nodes=None + ) + if self.regularize_row and self.multiplicative_init_rows: + X_init = similarity_matrices.init_transform( + X_init, self.S_r, self.power_multiplicative_init + ) + self.global_init_Z = init.get_init_partition( + X_init=X_init, + Kzw=self.Kz, + init_type=self.init_type, + T=1, + random_state=self.random_state, + n_jobs=self.n_jobs, + n_init=self.n_init_clustering, + given_partition=given_Z + ) + # X_init : data matrix used for initial clustering of the cols + X_init = init.get_X_init( + self.X, + mode='col', + absent_nodes=None + ) + if self.regularize_col and self.multiplicative_init_cols: + X_init = similarity_matrices.init_transform( + X_init, self.S_c, self.power_multiplicative_init + ) + self.global_init_W = init.get_init_partition( + X_init=X_init, + Kzw=self.Kw, + init_type=self.init_type, + T=1, + random_state=self.random_state, + n_jobs=self.n_jobs, + n_init=self.n_init_clustering, + given_partition=given_W + ) + + def _set_current_init_partition(self): + self.current_init_Z = init.apply_perturbation_to_init_clustering( + init_partition=self.global_init_Z, + random_state=self.random_state, + node_perturbation_rate=self.node_perturbation_rate, + dynamic=False + ) + self.current_init_W = init.apply_perturbation_to_init_clustering( + init_partition=self.global_init_W, + random_state=self.random_state, + node_perturbation_rate=self.node_perturbation_rate, + dynamic=False + ) + + def _init_q(self): + if self.em_type == 'VEM': + self.Z = init.init_ZW0( + Kzw=self.Kz, + partition=self.current_init_Z, + min_proba=self.min_proba_Z, + dtype=self.dtype + ) + self.W = init.init_ZW0( + Kzw=self.Kw, + partition=self.current_init_W, + min_proba=self.min_proba_W, + dtype=self.dtype + ) + elif self.em_type == 'CEM': + self.Z = init.init_ZW0( + Kzw=self.Kz, + partition=self.current_init_Z, + min_proba=0, + dtype='bool' + ) + self.Z = general.to_sparse(self.Z) + + self.W = init.init_ZW0( + Kzw=self.Kw, + partition=self.current_init_W, + min_proba=0, + dtype='bool' + ) + self.W = general.to_sparse(self.W) + + def update_mixture_proportions(self, mode, Z_, W_): + """Update mixture proportions (alpha, beta) for rows and columns""" + if mode in ['row', 'all']: + self.log_alpha = Z_ / self.N + np.clip(self.log_alpha, self.min_proba_mixture_proportions, None, self.log_alpha) + np.divide(self.log_alpha, self.log_alpha.sum(), self.log_alpha) + self.log_alpha = np.log(self.log_alpha + self.min_float) + if mode in ['col', 'all']: + self.log_beta = W_ / self.D + np.clip(self.log_beta, self.min_proba_mixture_proportions, None, self.log_beta) + np.divide(self.log_beta, self.log_beta.sum(), self.log_beta) + self.log_beta = np.log(self.log_beta + self.min_float) + + def icl(self): + """ + Returns the ICL values for each init of the model, + sorted from best to worse complete data log likelihood + """ + assert hasattr(self, 'best_parameters') + penality = ( + 0.5 * (self.Kz - 1) * np.log(self.N) + + 0.5 * (self.Kw - 1) * np.log(self.D) + + 0.5 * self.Kz * self.Kw * np.log(self.N * self.D) + ) + icls = [params[0] - penality for params in self.best_parameters] + return icls + + def _set_similarity_parameters(self, lambda_r, S_r, lambda_c, S_c): + """ + P_r and P_c : ndarray of length N and D such that + P_r[i] = True if we have prior knowledge about node i + that is there exists i' such that S_r[i, i'] > 0 + """ + if self.regularize_row: + assert lambda_r > 0 + similarity_matrices.check_similarity_matrix(S_r) + + self.lambda_r = lambda_r + self.S_r = general.to_sparse(S_r) + + # since S is symmetric + self.P_r = np.asarray((self.S_r != 0.).sum(0))[0] > 0 + else: + self.lambda_r = None + self.S_r = None + self.P_r = np.zeros(self.N, dtype='bool') + + if self.regularize_col: + assert lambda_c > 0 + similarity_matrices.check_similarity_matrix(S_c) + + self.lambda_c = lambda_c + self.S_c = general.to_sparse(S_c) + self.P_c = np.asarray((self.S_c != 0.).sum(0))[0] > 0 + else: + self.lambda_c = None + self.S_c = None + self.P_c = np.zeros(self.D, dtype='bool') + + def _set_mixture_proportions(self): + self.alpha = np.exp(self.log_alpha) + self.beta = np.exp(self.log_beta) + + def best_partition(self, mode, n_first=1): + """ + returns a list of tuple of partitions + if 1 partition: [(part_1), ..., (part_n_first)] + if 2 partitions: [(row_part_1, col_part_1), ..., + (row_part_n_first, col_part_n_first)] + + mode == 'likeelihood' returns the n_first best partitions + in terms of likelihood + + if mode == 'consensus: hbgf' or mode == 'consensus: cspa' + returns 1 consensus partition + """ + assert self.fitted + + best_partitions = metrics.sort_partitions( + self.all_iter_criterions, + [self.all_row_partitions, self.all_col_partitions], + n_first + ) + if mode == 'likelihood': + return best_partitions + + if mode in ['consensus: hbgf', 'consensus: cspa']: + best_row_partitions = [x[0] for x in best_partitions] + best_col_partitions = [x[1] for x in best_partitions] + + if mode == 'consensus: hbgf': + Z_consensus = consensus.hbgf( + best_row_partitions, + self.n_init_clustering_consensus + ) + W_consensus = consensus.hbgf( + best_col_partitions, + self.n_init_clustering_consensus + ) + elif mode == 'consensus: cspa': + Z_consensus = consensus.cspa( + best_row_partitions, + self.n_init_clustering_consensus + ) + W_consensus = consensus.cspa( + best_col_partitions, + self.n_init_clustering_consensus + ) + return [(Z_consensus, W_consensus)] + + raise ValueError diff --git a/dcblockmodels/models/utils/__init__.py b/dcblockmodels/models/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dcblockmodels/models/utils/absent_nodes.py b/dcblockmodels/models/utils/absent_nodes.py new file mode 100644 index 0000000..eb6c2d9 --- /dev/null +++ b/dcblockmodels/models/utils/absent_nodes.py @@ -0,0 +1,202 @@ +"""class to efficiently deal with the indexes of absent and appearing nodes""" + +import functools + +import numpy as np +from numba import njit + + +class AbsentNodes: + """ + class of nodes that dont interract with others at a given + time step, i.e. their in and out degrees are below/above threshold + + both : True for dSBM, False for dLBM + """ + + def __init__(self, X, threshold, mode): + + self.T = len(X) + + if mode == 'SBM': + (n_absent_tot, n_appearing_tot, + absent, appearing, n_abs, n_app) = present_and_absent_nodes(X, threshold, 'both') + + (self.n_absent_row_tot, + self.n_appearing_row_tot, + self.absent_row_nodes, + self.appearing_row_nodes, + self.n_absent_row_nodes, + self.n_appearing_row_nodes) = (n_absent_tot, n_appearing_tot, + absent, appearing, n_abs, n_app) + + (self.n_absent_col_tot, + self.n_appearing_col_tot, + self.absent_col_nodes, + self.appearing_col_nodes, + self.n_absent_col_nodes, + self.n_appearing_col_nodes) = (n_absent_tot, n_appearing_tot, + absent, appearing, n_abs, n_app) + + elif mode == 'LBM': + (self.n_absent_row_tot, + self.n_appearing_row_tot, + self.absent_row_nodes, + self.appearing_row_nodes, + self.n_absent_row_nodes, + self.n_appearing_row_nodes) = present_and_absent_nodes(X, threshold, 'row') + + (self.n_absent_col_tot, + self.n_appearing_col_tot, + self.absent_col_nodes, + self.appearing_col_nodes, + self.n_absent_col_nodes, + self.n_appearing_col_nodes) = present_and_absent_nodes(X, threshold, 'col') + + self.set_replace_vals_absent() + + def __str__(self): + return ('Row nodes : \n' + f'n_absent_row_nodes = {self.n_absent_row_nodes}\n' + f'absent_row_nodes = {self.absent_row_nodes}\n' + f'n_appearing_row_nodes = {self.n_appearing_row_nodes}\n' + f'appearing_row_nodes = {self.appearing_row_nodes}\n\n' + 'Column nodes : \n' + f'n_absent_col_nodes = {self.n_absent_col_nodes}\n' + f'absent_col_nodes = {self.absent_col_nodes}\n' + f'n_appearing_col_nodes = {self.n_appearing_col_nodes}\n' + f'appearing_col_nodes = {self.appearing_col_nodes}\n') + + def __repr__(self): + return self.__str__() + + def set_replace_vals_absent(self): + """ + previous timestep at which + node i was present + """ + self.inds_prev_rows = {} + self.inds_prev_cols = {} + self.ts_absent_rows = {} + self.ts_absent_cols = {} + + for absent_dic, inds_prev_dic, timesteps_absent in zip( + [self.absent_row_nodes, self.absent_col_nodes], + [self.inds_prev_rows, self.inds_prev_cols], + [self.ts_absent_rows, self.ts_absent_cols] + ): + inds_absent = functools.reduce( + np.union1d, (absent_dic[t] for t in range(self.T)) + ).astype('int64') + + for i in inds_absent: + ts_absent = [t for t in range(self.T) if i in absent_dic[t]] + inds_prev_dic[i] = inds_prev_absent(self.T, ts_absent) + timesteps_absent[i] = np.array(ts_absent) + + +def present_and_absent_nodes(X, threshold, axis): + """ + Assess nodes that are absent at each time, deduce present & appearing nodes at each time + + Absent nodes are nodes that dont interact with others at a given time step, i.e. their in and + out degrees are below/above threshold. + """ + + if isinstance(X, np.ndarray): + T = X.shape[0] + if axis == 'row': + nodes_degrees = X.sum(2) + elif axis == 'col': + nodes_degrees = X.sum(1) + elif axis == 'both': + nodes_out_degrees = X.sum(1) + nodes_in_degrees = X.sum(2) + nodes_degrees = nodes_in_degrees + nodes_out_degrees + elif isinstance(X, list): + T = len(X) + if axis == 'row': + nodes_degrees = np.array([X[t].sum(1).A1 for t in range(T)]) + elif axis == 'col': + nodes_degrees = np.array([X[t].sum(0).A1 for t in range(T)]) + elif axis == 'both': + nodes_out_degrees = np.array([X[t].sum(0).A1 for t in range(T)]) + nodes_in_degrees = np.array([X[t].sum(1).A1 for t in range(T)]) + nodes_degrees = nodes_in_degrees + nodes_out_degrees + + absent = np.where(nodes_degrees <= threshold) + + absent_dic = {} + appearing_dic = {0: np.array([], dtype='int')} + + n_absent_tot, n_appearing_tot = 0, 0 + n_absent_dic, n_appearing_dic = {}, {} + for t in range(T): + ind0_t = np.where(absent[0] == t) + absent_dic[t] = absent[1][ind0_t] + n_absent_tot += len(absent_dic[t]) + if t >= 1: + appearing_t = [] + for i in absent_dic[t - 1]: + if i not in absent_dic[t]: + appearing_t.append(i) + n_appearing_tot += 1 + appearing_dic[t] = np.array(appearing_t, dtype='int') + + n_absent_dic[t] = len(absent_dic[t]) + n_appearing_dic[t] = len(appearing_dic[t]) + + return (n_absent_tot, n_appearing_tot, absent_dic, + appearing_dic, n_absent_dic, n_appearing_dic) + + +def replace_vals_absent(arr, inds_prev, ts_absent): + """ + Replace values for an absent node by previous values at which it was present + """ + for i, ts_absent_i, inds_prev_i in zip( + ts_absent.keys(), + ts_absent.values(), + inds_prev.values() + ): + arr[ts_absent_i, i] = arr[inds_prev_i, i] + + +@njit +def inds_prev_absent(T, inds): + """Computes the timesteps used to replace the values + of the smoothed arrays for absent nodes + + Parameters + ---------- + T : int + number timesteps + inds : list + indexes of the timesteps at which a a given node is absent + + Returns + ------- + np.ndarray + array of same size as inds that maps each timestep + at which a a given node is absent to the previous timestep + at which the node was present + """ + inds_prev = [] + i_prev = -1 + len_inds = 0 + for i in range(T): + if i not in inds: + i_next = i + break + for i in range(T): + if i in inds: + len_inds += 1 + if i_prev != -1: + inds_prev.append(i_prev) + else: + inds_prev.append(i_next) + else: + i_prev = i + if len_inds == T: + break + return np.array(inds_prev, dtype=np.int64) diff --git a/dcblockmodels/models/utils/consensus.py b/dcblockmodels/models/utils/consensus.py new file mode 100644 index 0000000..b154e34 --- /dev/null +++ b/dcblockmodels/models/utils/consensus.py @@ -0,0 +1,63 @@ +"""Different kinds of consensus clustering based on a series of partitions of the data""" + +import numpy as np +from sklearn.cluster import SpectralCoclustering, SpectralClustering + + +def build_partition_matrix(partition_list): + """ + partition_list : list of hard partitions Z np.array of shape (T, N) + returns : (N*T, sum_r K_r) np.array + K_r : nb of clusters in partition r + """ + partitions = [(p.flatten() == k)[:, None] + for p in partition_list + for k in np.unique(p)] + return np.concatenate(partitions, axis=1).astype(int) + + +def hbgf(partition_list, n_init_clustering_consensus): + """ + consensus with bipartite graph W_{ij} = 1 if + item i is in cluster j. Then applies co-clustering + """ + H = build_partition_matrix(partition_list) + K = np.array([np.unique(Z).shape[0] for Z in partition_list]).max() + + spcl = SpectralCoclustering( + n_clusters=K, + svd_method='randomized', + n_svd_vecs=None, + mini_batch=False, + init='k-means++', + n_init=n_init_clustering_consensus, + n_jobs=-1, + random_state=None + ) + spcl.fit(H) + return spcl.row_labels_ + + +def cspa(partition_list, n_init_clustering_consensus): + """ + item based consensus : builds matrix C, + where c_{ij} is the number of times i and j + have been put in the same cluster in the partitions + in partition_list. Then clusters the items + using spectral clustering on C seen as an affinity matrix + """ + H = build_partition_matrix(partition_list) + K = np.array([np.unique(Z).shape[0] for Z in partition_list]).max() + R = len(partition_list) + C = (H.dot(H.T)) / R + spcl = SpectralClustering( + n_clusters=K, + eigen_solver=None, + random_state=None, + n_init=n_init_clustering_consensus, + affinity='precomputed', + assign_labels='kmeans', + n_jobs=-1 + ) + spcl.fit(C) + return spcl.labels_ diff --git a/dcblockmodels/models/utils/e_step.py b/dcblockmodels/models/utils/e_step.py new file mode 100644 index 0000000..cb9b50b --- /dev/null +++ b/dcblockmodels/models/utils/e_step.py @@ -0,0 +1,437 @@ +"""E-step-related general functions""" + +import numpy as np + +from . import general + +nax = np.newaxis + + +def log_density_t( + X_t, gamma, mode, + model_type, estimated_margins, dtype, min_float, + Z=None, W=None, + mu_t=None, nu_t=None +): + """ + deals with static and dynamic cases + """ + log_gamma = np.log(gamma + min_float) + if mode == 'row': + X_W = (X_t @ W).astype(dtype) + dens = X_W @ log_gamma.T + if model_type in ['without_margins', 'without_margins+free_gamma']: + W_ = general.get_class_counts(W).astype(dtype) + np.add(dens, - (gamma @ W_.T)[nax, :], out=dens) + elif ((model_type == 'with_margins' and estimated_margins) + or model_type == 'free_margins+constant_gamma'): + nu_t_W = (nu_t @ W).astype(dtype) + np.add(dens, - mu_t[:, nax] * (gamma @ nu_t_W.T)[nax, :], out=dens) + X_red = X_W + + elif mode == 'col': + X_Z = (X_t.T @ Z).astype(dtype) + dens = X_Z @ log_gamma + if model_type in ['without_margins', 'without_margins+free_gamma']: + Z_ = general.get_class_counts(Z).astype(dtype) + np.add(dens, - (Z_ @ gamma)[nax, :], out=dens) + elif ((model_type == 'with_margins' and estimated_margins) + or model_type == 'free_margins+constant_gamma'): + mu_t_Z = (mu_t @ Z).astype(dtype) + np.add(dens, - nu_t[:, nax] * (mu_t_Z @ gamma)[nax, :], out=dens) + X_red = X_Z + return dens, X_red + + +def _get_ZW_t_app( + dens, + em_type, + ind_appearing_nodes_t, + qzw_tp1, + ZW_tm1, + ZW_tp1, + log_alpha_beta, + log_pi_rho, + min_float, + min_proba +): + appearing = len(ind_appearing_nodes_t) > 0 + if not appearing: + return None + + ND = dens.shape[0] + dens_app = dens[ind_appearing_nodes_t, :] + if em_type == 'VEM': + log_prop_app = _get_log_prop_dynamic_vem( + 'appearing', + qzw_tp1, + ND, + log_pi_rho, + log_alpha_beta, + min_float + ) + ZW_app = dens_app + log_prop_app[ind_appearing_nodes_t, :] + ZW_app = _normalize_posterior( + ZW_app, + min_proba, + ind_absent_nodes_t=None, + dynamic=False + ) + elif em_type == 'CEM': + log_prop_app = _get_log_prop_dynamic_cem( + ZW_tm1, ZW_tp1, + log_pi_rho, log_alpha_beta + ) + ZW_app = dens_app + log_prop_app[ind_appearing_nodes_t, :] + ZW_app = _ce_step_static(ZW_app) + return ZW_app + + +def _get_log_prop_dynamic_vem( + mode, + qzw_tp1, + ND, + log_pi_rho, + log_alpha_beta, + min_float +): + """ + """ + if qzw_tp1 is not None: + # dkl_tp1[i, k] + dkl_tp1 = (qzw_tp1 * (np.log(qzw_tp1 + min_float) - log_pi_rho[nax, :, :])).sum(2) + + if mode in ['appearing', 't0']: + log_prop = log_alpha_beta[nax, :] - dkl_tp1 + else: + log_prop = log_pi_rho[nax, :, :] - dkl_tp1[:, :, nax] + else: + # case t = T + if mode in ['appearing', 't0']: + log_prop = np.tile(log_alpha_beta[nax, :], (ND, 1)) + else: + log_prop = log_pi_rho[nax, :, :] + return log_prop + + +def e_step_t_dynamic_vem( + mode, first_ts, model_type, estimated_margins, + X_t, gamma, + dtype, min_float, + qzw_tp1, log_pi_rho, log_alpha_beta, + ind_appearing_nodes_t, ind_absent_nodes_t, + min_proba, ZW_tm1, + Z=None, W=None, nu_t=None, mu_t=None +): + """ + E step at time t on either column or row for VEM + """ + dens, X_red = log_density_t( + X_t, gamma, mode, + model_type, estimated_margins, + dtype, min_float, + Z=Z, W=W, nu_t=nu_t, mu_t=mu_t + ) + ZW_t_app = _get_ZW_t_app( + dens, + 'VEM', + ind_appearing_nodes_t, + qzw_tp1, + None, + None, + log_alpha_beta, + log_pi_rho, + min_float, + min_proba + ) + log_prop_mode = 't0' if first_ts else 'regular' + ND = X_red.shape[0] + + log_prop = _get_log_prop_dynamic_vem( + log_prop_mode, + qzw_tp1, + ND, + log_pi_rho, log_alpha_beta, + min_float + ) + if first_ts: + ZW = log_prop + dens + ZW = _normalize_posterior( + ZW, min_proba, + ind_absent_nodes_t=ind_absent_nodes_t, + dynamic=False + ) + else: + qzw_t = log_prop + dens[:, nax, :] + qzw_t = _normalize_posterior( + qzw_t, min_proba, + ind_absent_nodes_t=ind_absent_nodes_t, + dynamic=True + ) + ZW = update_ZW_t_vem( + ZW_tm1, + qzw_t, + ind_appearing_nodes_t, + ZW_t_app, + ind_absent_nodes_t, + min_proba + ) + return ZW, X_red + + +def e_step_t_dynamic_cem( + mode, model_type, estimated_margins, + X_t, gamma, + ZW_tm1, ZW_tp1, + dtype, min_float, + log_pi_rho, log_alpha_beta, + ind_appearing_nodes_t, ind_absent_nodes_t, + Z=None, W=None, nu_t=None, mu_t=None +): + """ + E step at time t on either column or row for CEM + """ + dens, X_red = log_density_t( + X_t, gamma, mode, + model_type, estimated_margins, + dtype, min_float, + Z=Z, W=W, nu_t=nu_t, mu_t=mu_t + ) + ZW_t_app = _get_ZW_t_app( + dens, + 'CEM', + ind_appearing_nodes_t, + None, + ZW_tm1, + ZW_tp1, + log_alpha_beta, + log_pi_rho, + min_float, + None + ) + log_prop = _get_log_prop_dynamic_cem( + ZW_tm1, ZW_tp1, + log_pi_rho, log_alpha_beta + ) + ZW_t = log_prop + dens + ZW_t = _ce_step_dynamic_t( + ZW_t, + ind_appearing_nodes_t, + ZW_t_app, + ind_absent_nodes_t + ) + return ZW_t, X_red + + +def update_ZW_t_vem( + ZW_tm1, + qzw_t, + ind_appearing_t, + ZW_t_app, + ind_absent_nodes_t, + min_proba +): + """ + marginal posterior probas + ZW, qzw and ZW_t_app must be proba not log proba + """ + # here ZW_t are the posterior probabilities + # at the previous iteration of EM + with np.errstate(under='ignore'): + ZW_t = (ZW_tm1[:, :, nax] * qzw_t).sum(axis=1) + + # we replace the previous calculations for + # the appearing nodes with ZW_t_app + if ZW_t_app is not None: + ZW_t[ind_appearing_t] = ZW_t_app + + # to avoid numerical issues + ZW_t[ind_absent_nodes_t] = 1. + + # normalize + np.divide(ZW_t, ZW_t.sum(1, keepdims=True), ZW_t) + np.clip(ZW_t, min_proba, None, ZW_t) + np.divide(ZW_t, ZW_t.sum(1, keepdims=True), ZW_t) + + # set zero proba for absent nodes + # so that when we compute Z.T @ X @ W + # the absent nodes are not counted + # which is necessary to correctly update + # gamma (in case thr_absent_nodes != 0) + # and to compute the complete data log + # likelihood + ZW_t[ind_absent_nodes_t] = 0. + return ZW_t + + +def e_step_static( + log_alpha_beta, + regularize, + regularization_mode, + lambda_, S, P, + damping_factor, + em_type, + X, gamma, mode, + model_type, estimated_margins, + dtype, min_float, min_proba, + Z, W, nu, mu +): + """ + E step on either column or row in static mode + """ + ZW = Z if mode == 'row' else W + log_prop = _get_log_prop_static( + log_alpha_beta, + regularize, + regularization_mode, + lambda_, S, P, ZW + ) + dens, X_red = log_density_t( + X, gamma, mode, + model_type, estimated_margins, + dtype, min_float, + Z=Z, W=W, nu_t=nu, mu_t=mu + ) + ZW_new = log_prop + dens + + if em_type == 'VEM': + ZW_new = _normalize_posterior( + ZW_new, + min_proba, + ind_absent_nodes_t=None, + dynamic=False + ) + ZW_new = _apply_damping(ZW, ZW_new, damping_factor) + elif em_type == 'CEM': + ZW_new = _ce_step_static(ZW_new) + ZW_new = general.to_sparse(ZW_new) + + return ZW_new, X_red + + +def _normalize_posterior( + qzw_ZW, min_proba, + ind_absent_nodes_t=None, dynamic=False +): + """ + qzw_ZW : could be qzw or ZW + """ + axis = 2 if dynamic else 1 + max_log = np.max(qzw_ZW, axis=axis, keepdims=True) + with np.errstate(divide='ignore', under='ignore'): + np.exp(qzw_ZW - max_log, qzw_ZW) + np.divide(qzw_ZW, qzw_ZW.sum(axis=axis, keepdims=True), qzw_ZW) + + # only fill un informative values + # for absent nodes to avoid numerical isssues + if ind_absent_nodes_t is not None: + qzw_ZW[ind_absent_nodes_t] = 1. + + np.clip(qzw_ZW, min_proba, None, qzw_ZW) + with np.errstate(divide='ignore', under='ignore'): + np.divide(qzw_ZW, qzw_ZW.sum(axis=axis, keepdims=True), qzw_ZW) + + return qzw_ZW + + +def _apply_damping(ZW_old, ZW_new, damping_factor): + if damping_factor is None: + return ZW_new + return damping_factor * ZW_new + (1. - damping_factor) * ZW_old + + +def _get_log_prop_static( + log_alpha_beta, + regularize, + regularization_mode, + lambda_, S, P, ZW +): + """ + returns a ND x Kzw array containing the log proportions + that is, for instance for the rows, log_alpha[i, k] if there + is no regularization, or S_r[i].dot(Z[:, k]) + log_alpha[i, k] + if there is regularization + """ + if regularize: + reg = general.to_dense(lambda_ * (S @ ZW)) + if regularization_mode == 'mixture': + not_P = ~ P + log_prop = not_P[:, nax] * log_alpha_beta[nax, :] + P[:, nax] * reg + elif regularization_mode == 'all': + log_prop = log_alpha_beta[nax, :] + reg + else: + log_prop = log_alpha_beta[nax, :] + + return log_prop + + +def _ce_step_dynamic_t(crit, ind_appearing_t, ZW_t_app, ind_absent_nodes_t): + """ + Computes the Classification E step in dynamic + where z_i = argmax_k crit + """ + indexes = np.asarray(crit.argmax(1)).T + ZW_t_new = np.zeros_like(crit, dtype='bool') + ND = crit.shape[0] + ZW_t_new[np.arange(ND), indexes] = True + + # we replace the previous calculations for + # the appearing nodes with ZW_t_app + if ZW_t_app is not None: + ZW_t_new[ind_appearing_t] = ZW_t_app + + # put constant proba for absent nodes + # for we do not care + ZW_t_new[ind_absent_nodes_t] = False + + return ZW_t_new + + +def _ce_step_static(crit): + """ + Computes the Classification E step in static + where z_i = argmax_k crit + """ + indexes = np.asarray(crit.argmax(1)).T + ZW_t_new = np.zeros_like(crit, dtype='bool') + ZW_t_new[np.arange(crit.shape[0]), indexes] = True + return ZW_t_new + + +def _get_log_prop_dynamic_cem( + ZW_tm1, ZW_tp1, + log_pi_rho, log_alpha_beta +): + """ + """ + ND = ZW_tm1.shape[0] if ZW_tm1 is not None else ZW_tp1.shape[0] + log_pi_rho = np.tile(log_pi_rho[nax], (ND, 1, 1)) + arange = np.arange(ND) + + if ZW_tm1 is not None: + ind_tm1 = ZW_tm1.argmax(1) + log_pi_prev = log_pi_rho[arange, ind_tm1, :] + log_prop = log_pi_prev + else: + # case t == 1 + log_prop = log_alpha_beta[nax] + + if ZW_tp1 is not None: + ind_tp1 = ZW_tp1.argmax(1) + log_pi_next = log_pi_rho[arange, :, ind_tp1] + log_prop = log_prop + log_pi_next + else: + # case t=T + pass + + return log_prop + + +def correct_ZW(ZW, min_proba): + """ + ZW must be a proba (not a log proba) + """ + np.clip(ZW, min_proba, None, ZW) + with np.errstate(divide='ignore', under='ignore'): + np.divide(ZW, ZW.sum(axis=2, keepdims=True), ZW) + return ZW diff --git a/dcblockmodels/models/utils/general.py b/dcblockmodels/models/utils/general.py new file mode 100644 index 0000000..7a0d42a --- /dev/null +++ b/dcblockmodels/models/utils/general.py @@ -0,0 +1,107 @@ +"""General functions that can be used in all modules""" + +import pickle + +import numpy as np +import scipy as sp + + +def get_class_counts(ZW): + """Get number of nodes in each cluster""" + res = ZW.sum(0) + if sp.sparse.issparse(ZW): + res = res.A1 + return res + + +def encode(ZW, Kzw): + """Encode cluster membership in a boolean matrix""" + ND = ZW.shape[0] + ZW_ = np.zeros((ND, Kzw), dtype='bool') + ZW_[np.arange(ND), ZW] = True + return ZW_ + + +def load_model(path): + """Load a given model from its directory""" + with open(path + '/model.pickle', 'rb') as f: + model = pickle.load(f) + return model + + +# TODO Only usable on dynamic X, check if keep specific in dlbm.py or generalize and keep here. +def check_X(X, is_graph, self_loops, directed=True): + """ + is_graph: square adjacency matrix? + directed: whether the graph is directed or not + this requires a symetric adjacency matrix. + Only useful for (d)SBM + """ + T = len(X) + + for t in range(T): + X_dens_t = to_dense(X[t]) + assert (X_dens_t >= 0).all() + assert (X_dens_t > 0).any() + # assert X_dens_t.dtype in [np.int32, np.int64] + assert X_dens_t.ndim == 2 + + if is_graph: + assert X_dens_t.shape[1] == X_dens_t.shape[0] + + if not self_loops: + assert (np.diag(X_dens_t) == 0).all() + + if not directed: + assert np.array_equal(X_dens_t, symmetrize(X_dens_t)) + + +def symmetrize(a): + """Symmetrize a matrix by keeping only triangular part and copying it on lower triangular""" + a = np.tril(a) + return a + a.T - np.diag(a.diagonal()) + + +def get_delta(old, new): + """ + returns the criterion used to determine + if the model has converged from the + old and new likelihoods + """ + return abs((new - old) / new) + + +def to_dense(mat): + """From sp.sparse sparse matrix format to dense Numpy arrays""" + if sp.sparse.issparse(mat): + return mat.toarray() + + return mat + + +def to_sparse(mat): + """From dense Numpy arrays to sp.sparse sparse matrix format""" + if not sp.sparse.issparse(mat): + return sp.sparse.csr_matrix(mat) + + return mat + + +def compute_mixture_exact_icl(ZW, Kzw, ND): + """Compute ICL for either lines or columns""" + from scipy.special import loggamma + nax = np.newaxis + + cst = ( + loggamma(.5 * Kzw) * (1 - Kzw) - + .5 * Kzw * (Kzw + 1) * np.log(np.pi) - + loggamma(.5 * Kzw + ND) + ) + + icl_zw_t1 = loggamma(ZW[0].sum(0) + .5).sum() + + trans = (ZW[:-1, :, :, nax] * ZW[1:, :, nax, :]).sum((0, 1)) + trans_ = trans.sum(1) + icl_zw_t2 = - loggamma(trans_ + .5 * Kzw).sum() + loggamma(trans + .5).sum() + + return cst + icl_zw_t1 + icl_zw_t2 diff --git a/dcblockmodels/models/utils/init.py b/dcblockmodels/models/utils/init.py new file mode 100644 index 0000000..62b2893 --- /dev/null +++ b/dcblockmodels/models/utils/init.py @@ -0,0 +1,239 @@ +"""Functions linked to initialization of the model""" + +import numpy as np +from sklearn.cluster import KMeans +try: + from spherecluster import SphericalKMeans +except ImportError: + pass + +from . import general + +nax = np.newaxis + + +def init_ZW0(Kzw, partition, min_proba, dtype): + """ + Initializes Z or W with a static partition that is + not encoded (i.e. partition[i] = k). + Can be used in the static case or in dynamic CEM, + where we do not deal with transitions probabilites qzw + """ + ND = partition.shape[0] + ZW0 = np.full((ND, Kzw), min_proba, dtype=dtype) + if min_proba is not None: + val = 1. - min_proba + else: + val = True + ZW0[np.arange(ND), partition] = val + return ZW0 + + +def init_qzw(Kzw, partition, min_proba, dtype): + """ + Initializes qz or qw with a dynamic partition that is + not encoded (i.e. partition[t, i] = k). + Can be used in the dynamic case. + """ + T, ND = partition.shape + qzw = np.full((T, ND, Kzw, Kzw), min_proba, dtype=dtype) + arr_ND = np.arange(ND) + for t in range(1, T): + qzw[t, arr_ND, :, partition[t]] = 1. - min_proba + return qzw + + +def apply_perturbation_to_init_clustering( + init_partition, + random_state, + node_perturbation_rate, + cluster_perturbation_rate=0., + dynamic=False +): + """Apply some noise to the initial partitions""" + res_init_partition = init_partition.copy() + + if node_perturbation_rate > 0.: + if dynamic: + T, ND = init_partition.shape + Kzw = np.unique(init_partition.flatten()).shape[0] + n_nodes_shuffled = int(node_perturbation_rate * ND) + for t in range(T): + nodes_replaced = random_state.choice( + ND, size=n_nodes_shuffled, replace=False + ) + res_init_partition[t, nodes_replaced] = random_state.choice( + Kzw, size=n_nodes_shuffled + ) + if cluster_perturbation_rate > 0.: + res_init_partition[t] = _apply_cluster_perturbation( + res_init_partition[t], + Kzw, + random_state, + cluster_perturbation_rate + ) + else: + ND = init_partition.shape[0] + Kzw = np.unique(init_partition.flatten()).shape[0] + res_init_partition = init_partition.copy() + if node_perturbation_rate > 0.: + n_nodes_shuffled = int(node_perturbation_rate * ND) + nodes_replaced = random_state.choice( + ND, size=n_nodes_shuffled, replace=False + ) + res_init_partition[nodes_replaced] = random_state.choice( + Kzw, size=n_nodes_shuffled + ) + return res_init_partition + + +def _apply_cluster_perturbation( + init_partition, + Kzw, + random_state, + cluster_perturbation_rate +): + def permute_clusters(Z, k, l): + Z[Z == k] = -1 + Z[Z == l] = k + Z[Z == -1] = l + return Z + + partition = init_partition.copy() + + seen = [] + u = random_state.rand() + while u <= cluster_perturbation_rate: + not_permutated_clusters = np.setdiff1d(np.arange(Kzw), np.array(seen)) + if not_permutated_clusters.shape[0] >= 2: + k, k_ = random_state.choice(not_permutated_clusters, size=2, replace=False) + partition = permute_clusters(partition, k, k_) + seen += [k, k_] + u = random_state.rand() + else: + break + return partition + + +def get_X_init(X, mode, absent_nodes=None): + """ + returns the data matrix used for the initialization + of the clusters. Could be dynamic or not and provides + row or col concatenation for init of Z or W. The profiles + X[t, i, :] (or X[t, :, j]) of absent nodes at time t are + replaced with their mean profile over time to avoid + initialization issues due to absent nodes. + """ + dynamic = True if isinstance(X, list) else (X.ndim == 3) + + if not dynamic: + if mode == 'row': + res = X + elif mode == 'col': + res = X.T + else: + T = len(X) + if isinstance(X, list): + # when X is a list of sparse matrices + X_ = np.concatenate([ + general.to_dense(X[t])[nax] for t in range(T)], + axis=0 + ) + else: + X_ = X.copy() + + # replaces row os cols of absent nodes by + # their mean values + # then stacks the columns/rows + if mode == 'row': + if absent_nodes is not None: + X_mean = X_.mean(0) + for t in range(T): + X_[t, absent_nodes[t], :] = X_mean[absent_nodes[t], :] + res = np.hstack([X_[t] for t in range(T)]) + elif mode == 'col': + if absent_nodes is not None: + X_mean = X_.mean(0) + for t in range(T): + X_[t, :, absent_nodes[t]] = X_mean[:, absent_nodes[t]].T + res = np.vstack([X_[t] for t in range(T)]).T + return res + + +def _random_init(Kzw, ND, random_state): + return random_state.randint(0, Kzw, size=(ND)) + + +def _given_partition_init(Kzw, given_partition): + clusters = np.unique(given_partition.flatten()) + assert clusters.shape[0] <= Kzw + assert np.in1d(clusters, np.arange(Kzw)).all() + # absent nodes can not be directly put in cluster K + # : put it instead in a random cluster for given_partition + # it will then be managed with threshold_absent_nodes + return given_partition + + +def _kmeans_init(X, K, n_init, random_state, n_jobs): + clustering = KMeans( + n_clusters=K, + random_state=random_state, + n_init=n_init) + return clustering.fit(X).labels_ + + +def _skmeans_init(X, K, n_init, random_state, n_jobs): + clustering = SphericalKMeans( + n_clusters=K, + init="k-means++", + n_init=n_init, + max_iter=150, + tol=1e-6, + n_jobs=n_jobs, + verbose=0, + random_state=random_state, + copy_x=True, + normalize=True) + return clustering.fit(X).labels_ + + +def get_init_partition( + X_init, Kzw, init_type, T, random_state, n_jobs, + n_init, given_partition): + """ + Returns the row or col partition used + for the first initialization of the algo. + init_partition is not one-hot-encoded: ie + init_partition[t, i] = k + Noise will be apllied later + """ + ND = X_init.shape[0] + if init_type == 'random': + part = _random_init(Kzw, ND, random_state) + elif init_type == 'kmeans': + part = _kmeans_init(X_init, Kzw, n_init, random_state, n_jobs) + elif init_type == 'skmeans': + part = _skmeans_init(X_init, Kzw, n_init, random_state, n_jobs) + elif init_type == 'given': + part = _given_partition_init(Kzw, given_partition) + else: + raise ValueError + if T > 1: + part = np.concatenate( + [part[nax] for _ in range(T)], + axis=0 + ) + return part + + +def pi_rho_update_mask(T, ND, absent_nodes, appearing_nodes): + """ + returns a (T-1) x ND boolean mask, set to False + for absent nodes and appearing nodes. + """ + mask = np.ones((T, ND), dtype='bool') + for t in range(T): + mask[t][absent_nodes[t]] = False + mask[t][appearing_nodes[t]] = False + mask = mask[1:] + return mask diff --git a/dcblockmodels/models/utils/m_step.py b/dcblockmodels/models/utils/m_step.py new file mode 100644 index 0000000..1fe5cff --- /dev/null +++ b/dcblockmodels/models/utils/m_step.py @@ -0,0 +1,364 @@ +"""M-step-related general functions""" + +import numpy as np + +from . import general + +nax = np.newaxis + + +def update_gamma( + mode, estimated_margins, + X, Z, W, X_red, mu, nu, + dtype, model_type +): + """ + In the static case or for parameters at a given time step + + Note that here, mu and nu can be the estimated margins or + the non estimated margins xi_, x_j + + X_red is the reduced matrix (X_W or X_Z), that has been computed + in the e step (row or col), where row or col is + determined by the 'mode' keyword. + + For the first m step, at initialization, + there has not been any e step + previously, so the computation is different, which + is indicated by mode == 'init' + """ + # computes the numerator and denominator of gamma + if mode == 'init': + X_kl = general.to_dense(Z.T @ X @ W) + if model_type == 'with_margins': + mu_Z = Z.T @ mu + nu_W = W.T @ nu + margins_kl = np.outer(mu_Z, nu_W).astype(dtype) + + elif mode == 'row': + X_W = X_red + X_kl = general.to_dense(Z.T @ X_W) + if model_type == 'with_margins': + mu_Z = Z.T @ mu + if estimated_margins: + nu_W = W.T @ nu + else: + nu_W = X_W.sum(0) + nu_W = general.to_dense(nu_W) + margins_kl = np.outer(mu_Z, nu_W) + elif mode == 'col': + X_Z = X_red + X_kl = general.to_dense(X_Z.T @ W) + if model_type == 'with_margins': + nu_W = W.T @ nu + if estimated_margins: + mu_Z = Z.T @ mu + else: + mu_Z = X_Z.sum(0) + mu_Z = general.to_dense(mu_Z) + margins_kl = np.outer(mu_Z, nu_W) + + if model_type == 'without_margins': + Z_ = general.get_class_counts(Z) + W_ = general.get_class_counts(W) + ZW_kl = np.outer(Z_, W_) + den_gamma = ZW_kl + elif model_type == 'with_margins': + den_gamma = margins_kl + + return X_kl, den_gamma + + +def get_gamma(num_gamma, den_gamma, em_type, min_float, min_gamma): + """Get usable gamma depending on EM type, with minimum values for denominator & gamma itself""" + gamma = num_gamma / (den_gamma + min_float) + if em_type == 'CEM': + # very important in CEM to avoid empty clusters + np.clip(gamma, min_gamma, None, gamma) + return gamma + + +def get_denominator_mu(Z, W, nu, gamma): + """Compute denominator of mu""" + nu_W = W.T @ nu + den_mu = Z @ gamma @ nu_W + return den_mu + + +def get_denominator_nu(Z, W, mu, gamma): + """Compute denominator of nu""" + mu_Z = Z.T @ mu + den_nu = W @ gamma.T @ mu_Z + return den_nu + + +def update_pi_rho(ZW, qzw, prior, mask): + """ + Update pi/rho in VEM mode + """ + res = prior - 1. + ( + ZW[:-1, :, :, nax] * + qzw[1:, :, :, :] * + mask[:, :, nax, nax] + ).sum((0, 1)) + return res + + +def correct_pi_rho(mat, min_proba, min_float): + """ + for pi/rho + """ + np.divide(mat, mat.sum(axis=1, keepdims=True) + min_float, mat) + np.clip(mat, min_proba, None, mat) + np.divide(mat, mat.sum(axis=1, keepdims=True) + min_float, mat) + return mat + + +def update_alpha_beta_dynamic( + ZW, n_absent_nodes, appearing_nodes, + absent_nodes_t0, min_float, min_proba, dtype +): + """ + Update alpha/beta in dynamic mode + """ + if n_absent_nodes > 0: + present_t0 = np.setdiff1d( + np.arange(ZW.shape[1]), + absent_nodes_t0 + ) + res = ZW[0][present_t0].sum(0).astype(dtype) + + T = len(appearing_nodes) + for t in range(1, T): + np.add(res, ZW[t][appearing_nodes[t]].sum(0).astype(dtype), res) + else: + res = ZW[0].sum(0).astype(dtype) + + np.divide(res, res.sum(), res) + np.clip(res, min_proba, None, res) + np.divide(res, res.sum(), res) + return np.log(res + min_float) + + +def update_pi_rho_cem(ZW, prior, mask, dtype): + """ + Update pi/rho in CEM mode + """ + pi_rho = (prior - 1.) + ( + ZW[:-1, :, :, nax] * + ZW[1:, :, nax, :] * + mask[:, :, nax, nax] + ).sum((0, 1)).astype(dtype) + return pi_rho + + +def smoothing_matrix(T, tau, dtype): + """ + Return the weights used for temporal smoothing + """ + if tau == 0: + return np.ones((T, T), dtype=dtype) / T + if tau == 1: + return np.eye((T), dtype=dtype) + + arr = np.arange(T) + t = arr[:, nax] + t_ = arr[nax, :] + delta = (t_ - t).astype(dtype) + + with np.errstate(under='ignore'): + # W = np.exp(- (tau / (1 - tau)) * delta ** 2) + W = np.exp(- (1. / ((1. / tau) - 1.)) * delta ** 2) + np.divide(W, W.sum(axis=1, keepdims=True), W) + return W + + +def compute_Lc_static_density( + model_type, estimated_margins, + gamma, mu, nu, Xi_, X_j, + X_kl, min_float +): + """ + Computes the terms in gamma, mu,nu in complete data log likelihood + """ + Lc = 0. + log_gamma = np.log(gamma + min_float) + if model_type == 'without_margins': + Lc += (X_kl * log_gamma).sum() + elif model_type == 'with_margins': + if estimated_margins: + log_mu = np.log(mu + min_float) + log_nu = np.log(nu + min_float) + Lc += ( + (X_kl * log_gamma).sum() + + Xi_.dot(log_mu) + X_j.dot(log_nu) + ) + else: + Lc += (X_kl * log_gamma).sum() + return Lc + + +def complete_data_loglikelihood_alpha_beta( + log_alpha_beta, ZW, + absent_nodes_t0, + appearing_nodes +): + """ + Computes the terms in alpha or beta of the log likelihood of the expected complete data + """ + T = len(ZW) + ND = ZW[0].shape[0] + + present_t0 = np.setdiff1d( + np.arange(ND), + absent_nodes_t0 + ) + nzw = ZW[0][present_t0].sum(0) + + for t in range(T): + np.add(nzw, ZW[t][appearing_nodes[t], :].sum(), out=nzw) + + return nzw.dot(log_alpha_beta) + + +def complete_data_loglikelihood_pi_rho( + em_type, + log_pi_rho, + ZW, qzw, + mask +): + """ + Computes the terms in pi or rho of the log likelihood of the expected complete data + """ + if em_type == 'VEM': + res = ( + ZW[:-1, :, :, nax] * + qzw[1:, :, :, :] * + log_pi_rho[nax, nax, :, :] + ).sum((2, 3)) + + elif em_type == 'CEM': + res = ( + ZW[:-1, :, :, nax] * + ZW[1:, :, nax, :] * + log_pi_rho[nax, nax, :, :] + ).sum((2, 3)) + + return res.sum(where=mask) + + +def get_regularization( + regularize_row, regularize_col, lambda_r, lambda_c, S_r, S_c, Z, W): + """ + Returns the regularization term : lambda_c R_c + lambda_r R_r + """ + regularization = 0. + if regularize_row: + regularization += (.5 * lambda_r * (S_r.sum() - S_r.multiply(Z @ Z.T).sum())) + if regularize_col: + regularization += (.5 * lambda_c * (S_c.sum() - S_c.multiply(W @ W.T).sum())) + return regularization + + +def update_mu(Z, W, Xi_, nu, gamma, min_margin): + """ + Update mu by combining numerator (X_i.) and denominator + """ + den_mu = get_denominator_mu(Z, W, nu, gamma) + mu = Xi_ / den_mu + np.clip(mu, min_margin, None, mu) + return mu + + +def update_nu(Z, W, X_j, mu, gamma, min_margin): + """ + Update nu by combining numerator (X_.j) and denominator + """ + den_nu = get_denominator_nu(Z, W, mu, gamma) + nu = X_j / den_nu + np.clip(nu, min_margin, None, nu) + return nu + + +def _compute_static_mixture_part_Lc( + regularization_mode, regularize_row, regularize_col, P_r, P_c, + Z_, W_, log_alpha, log_beta +): + """ + Computes the terms in alpha and beta in complete data log likelihood + """ + # the terms in alpha and beta in Lc + if (not (regularize_row or regularize_col) or regularization_mode == 'all'): + Lc_part = Z_.dot(log_alpha) + W_.dot(log_beta) + elif regularization_mode == 'mixture': + not_P_r = ~ P_r + not_P_c = ~ P_c + Lc_part = ((not_P_r[:, nax] * log_alpha[nax, :]).sum() + + (not_P_c[:, nax] * log_beta[nax, :]).sum()) + return Lc_part + + +def compute_Lc_static( + model_type, estimated_margins, + regularization_mode, regularize_row, regularize_col, + P_r, P_c, log_alpha, log_beta, gamma, mu, nu, Xi_, X_j, + Z_, W_, X_kl, min_float +): + """ + computes the complete data log likelihood for a static HLBM (T = 1) + """ + # the terms in alpha and beta in Lc + Lc_mixture = _compute_static_mixture_part_Lc( + regularization_mode, regularize_row, regularize_col, + P_r, P_c, Z_, W_, log_alpha, log_beta + ) + # the terms in gamma (and mu, nu) in Lc + Lc_density = compute_Lc_static_density( + model_type, estimated_margins, + gamma, mu, nu, Xi_, X_j, + X_kl, min_float + ) + return Lc_mixture + Lc_density + + +def entropy_static(ZW, min_float): + """ + Entropy, static part + """ + # ZW[absent_nodes] = 0, so no pb + return - (ZW * np.log(ZW + min_float)).sum() + + +def entropy_appearing(ZW, appearing_nodes, min_float): + """ + Entropy, appearing nodes part (if any) + """ + entr = 0. + for t, _ind_app_t in appearing_nodes.items(): + entr -= (ZW[t] * np.log(ZW[t] + min_float)).sum(0, where=appearing_nodes).sum() + return entr + + +def entropy_present(ZW, qzw, min_float): + """ + Entropy, present nodes part + """ + # ZW[absent_nodes] = 0, so no pb + return (ZW[:-1, :, :, nax] * qzw[:1] * np.log(qzw[:1] + min_float)).sum() + + +def entropy_dynamic( + ZW, + qzw, + appearing_nodes, + min_float +): + """ + Total entropy + """ + H = 0. + H += entropy_static(ZW, min_float) + if appearing_nodes is not None: + H += entropy_appearing(ZW, appearing_nodes, min_float) + H += entropy_present(ZW, qzw, min_float) + return H diff --git a/dcblockmodels/models/utils/similarity_matrices.py b/dcblockmodels/models/utils/similarity_matrices.py new file mode 100644 index 0000000..4ac42fd --- /dev/null +++ b/dcblockmodels/models/utils/similarity_matrices.py @@ -0,0 +1,425 @@ +""" +Methods that deal with the pairwise similarity matrices (i.e. the semi-supervision) used for HLBM +""" + +from itertools import combinations, product + +import numpy as np +import scipy as sp +from numba import jit, prange + + +def check_similarity_matrix(S): + """Checks on similarity matrix correctness""" + def is_sparse_mat_symetric(S): + S_res = S - S.T + return np.all(np.abs(S_res.data) < 1e-3) + + assert is_sparse_mat_symetric(S) + assert (S[np.diag_indices_from(S)] == 0.).all() + + +def init_transform(X, S, p): + """ + Returns the np.array X transformed according + to a multiplicative initialization technique + that uses the similarity matrix S. The order p + can be used to propagate the similarity relationship + to p-neighborhoods. + Note that X_transformed is a dense array, such that + running kmeans or skmeans on it can be inefficient + in a high-dimensional setting + """ + # transform similarity matrix S into a stochastic matrix W + W = np.clip(S.copy().toarray(), 0., None) + np.fill_diagonal(W, 1.) + + with np.errstate(divide='ignore'): + W_ = W.sum(1) + np.divide(W, W_, where=(W_ > 0.), out=W) + np.nan_to_num(W, copy=False, nan=0., posinf=0., neginf=None) + + if p > 1: + with np.errstate(under='ignore'): + W = np.linalg.matrix_power(W, p) + + X_transformed = W @ X + + np.clip(X_transformed, 1e-10, None, out=X_transformed) + return X_transformed + + +def build_S_mask(N, frac): + """ + Returns a random N x N symmetric boolean matrix + without self loops such that int(frac * N * (N - 1)) + of its elements are equal to True + Used for sampling a given fraction of all the + possible constraints + """ + def i_max(k): + return (k + 1) * N - int((k + 1) * (k + 2) / 2) - 1 + i_max_ = [i_max(k) for k in range(N)] + + S = np.zeros((N, N), dtype='bool') + n_pairs = int(N * (N - 1) / 2) + n_pairs_sampled = int(frac * n_pairs) + indexes = np.random.choice(n_pairs, size=n_pairs_sampled, replace=False) + indexes = sorted(indexes) + + # k, l the row and columns indexes corresponding + # to the edge number 'ind' in S + # k_0 the current row index, to speed up + # computations, since 'indexes' is sorted + k_0 = 0 + for ind in indexes: + for k in range(k_0, N): + i_min_k = i_max_[k - 1] + 1 if k != 0 else 0 + i_max_k = i_max_[k] + if i_min_k <= ind <= i_max_k: + l = ind - i_min_k + (k + 1) + k_0 = k + # print(ind, k, l, i_max_k, i_min_k) + S[k, l], S[l, k] = True, True + break + return S + + +def build_S_strat(ZW, frac, frac_noise=0., path_only=False): + """ + returns a similarity matrix build from the + true classes ZW, by sampling a fraction frac + of the nodes of each class and adding noise + to a fraction frac_noise of the nodes + """ + ND = ZW.shape[0] + Kzw = np.unique(ZW).shape[0] + S = np.zeros((ND, ND)) + for kzw in range(Kzw): + ind_kzw = np.where(ZW == kzw)[0] + nb_nodes = int(frac * ind_kzw.shape[0]) + ind_sampled = np.random.choice(ind_kzw, nb_nodes, replace=False) + if not path_only: + S[np.ix_(ind_sampled, ind_sampled)] = 1 + else: + for i_1, i_2 in zip(ind_sampled[:-1], ind_sampled[1:]): + S[i_1, i_2] = 1 + S[i_2, i_1] = 1 + if frac_noise > 0.: + nb_nodes_random = int(frac_noise * ND) + ind_rnd = np.random.choice(ND, nb_nodes_random, replace=False) + S[np.ix_(ind_rnd, ind_rnd)] = 1 - S[np.ix_(ind_rnd, ind_rnd)] + + S[np.diag_indices(ND)] = 0. + assert ((S - S.T) == 0).all() + return S + + +def build_S(ZW, frac, frac_noise=None): + """ + returns a similarity matrix build from the + true classes ZW, by sampling a fraction frac + of the nodes of each class + + consider using build_S_sparse + """ + ND = ZW.shape[0] + clusters = np.unique(ZW) + + # S_ZW[i, j] = 1 if i and j are in the same + # cluster in the partition ZW otherwise S_ZW[i, j] = -1 + S_ZW = np.zeros((ND, ND), dtype='int') + for k in clusters: + ind_k = np.where(ZW == k)[0] + z_k = np.zeros((ND, 1), dtype='int') + z_k[ind_k, :] = True + S_ZW += z_k.dot(z_k.T) + + S_ZW[S_ZW == 0] = -1 + S_mask = build_S_mask(ND, frac) + S = S_ZW * S_mask + + if frac_noise is not None: + S_mask_noise = build_S_mask(ND, frac_noise) + S = (S * (~S_mask_noise) - S_mask_noise * S) + + S[np.diag_indices(ND)] = 0. + assert ((S - S.T) == 0).all() + return S + + +def build_S_ssl(y): + """ + Given an array y of size N with integer values, where + y[i] = k : means that observation i is in cluster k + and y[i] = -1 : means that we have no information about + the cluster of observation i; + returns a N x N array S where: + S[i, j] = 1 if i and j are in the same cluster, + S[i, j] = -1 if they are in different clusters + S[i, j] = 0 if we have no information + Allows to compare classical semi-supervised + approaches to pairwise semi-supervised clustering. + """ + N = y.shape[0] + clusters = np.unique(y) + + S = np.zeros((N, N), dtype='int') + for k in clusters: + if k >= 0: + ind_k = np.where(y == k)[0] + y_k = np.zeros((N, 1), dtype='int') + y_k[ind_k, :] = True + S += y_k.dot(y_k.T) + + S[S == 0] = -1 + ind_masked = np.where(y == -1)[0] + S[ind_masked] = 0 + S[:, ind_masked] = 0 + S[np.diag_indices(N)] = 0. + assert ((S - S.T) == 0).all() + return S + + +@jit(nopython=True) +def i_max_sparse(N, k): + """Temporary value computation for i_max_ (for numba parallel computation)""" + return (k + 1) * N - int((k + 1) * (k + 2) / 2) - 1 + + +@jit(nopython=True) +def indexes_S_mask_sparse(N, frac): + """ + returns a random N x N symmetric boolean matrix + without self loops such that int(frac * N * (N - 1)) + of its elements are equal to True + """ + i_max_ = [i_max_sparse(N, k) for k in prange(N)] # pylint: disable=not-an-iterable + + n_pairs = int(N * (N - 1) / 2) + n_pairs_sampled = int(frac * n_pairs) + indexes = np.random.choice(n_pairs, size=n_pairs_sampled, replace=False) + indexes = sorted(indexes) + + row_indexes, col_indexes = [], [] + + # k, l the row and columns indexes corresponding + # to the edge number 'ind' in S + # k_0 the current row index, to speed up + # computations, since 'indexes' is sorted + k_0 = 0 + for ind in indexes: + for k in range(k_0, N): + i_min_k = i_max_[k - 1] + 1 if k != 0 else 0 + i_max_k = i_max_[k] + if i_min_k <= ind <= i_max_k: + l = ind - i_min_k + (k + 1) + k_0 = k + # print(ind, k, l, i_max_k, i_min_k) + # S[k, l], S[l, k] = True, True + row_indexes.append(k) + col_indexes.append(l) + + row_indexes.append(l) + col_indexes.append(k) + break + + return row_indexes, col_indexes + + +def build_S_mask_sparse(n, row_indexes, col_indexes): + """Build a mask on a sparse matrix""" + S = sp.sparse.dok_matrix((n, n), dtype='bool') + dic_indexes = zip(row_indexes, col_indexes) + dic_values = (1 for _ in range(len(row_indexes))) + update_dic = dict(zip(dic_indexes, dic_values)) + S._update(update_dic) # pylint: disable=W0212 + return S.tocsr() + + +def build_S_sparse(ZW, frac, stratified=False): + """ + returns a similarity matrix build from the + true classes ZW, by sampling a fraction frac + of all the possible ML and CL relationships (stratified = True) + or by sampling a fraction frac of the nodes of each class + + Caution : the scales for frac are very different depending + on the value of wether stratified is true or not + """ + ND = ZW.shape[0] + clusters = np.unique(ZW) + + # S_ZW[i, j] = 1 if i and j are in the same + # cluster in the partition ZW otherwise S_ZW[i, j] = -1 + S_ML = sp.sparse.csr_matrix((ND, ND), dtype='bool') + S_CL = sp.sparse.csr_matrix((ND, ND), dtype='bool') + + # build S_ML and S_CL that conatain all relationships + if stratified: + # stores sampled indices for S + inds = {} + for k in clusters: + ind_k = np.where(ZW == k)[0] + n_nodes_k = len(ind_k) + n_nodes_sampled = int(frac * n_nodes_k) + ind_k = np.random.choice(ind_k, size=n_nodes_sampled, replace=False) + inds[k] = ind_k + + # build S_ML and S_CL + for k, ind_k in inds.items(): + z_k = np.zeros((ND, 1), dtype='bool') + z_k[ind_k, :] = True + + # z_k_bar[i] = 1 if i is known not to be in cluster k, 0 otherwise + z_k_bar = np.zeros((ND, 1), dtype='bool') + for k_, ind_k_ in inds.items(): + if k_ != k: + z_k_bar[ind_k_, :] = True + + z_k = sp.sparse.csr_matrix(z_k) + z_k_bar = sp.sparse.csr_matrix(z_k_bar) + S_ML += z_k * z_k.T + S_CL += z_k * z_k_bar.T + + S = (S_ML.astype('int') - S_CL.astype('int')) + S[np.diag_indices_from(S)] = 0 + else: + for k in clusters: + ind_k = np.where(ZW == k)[0] + z_k_ = np.zeros((ND, 1), dtype='bool') + z_k_[ind_k, :] = True + + # z_k[i] = 1 if i is known to be in cluster k, 0 otherwise + # z_k_bar[i] = 1 if i is known not to be in cluster k, 0 otherwise + z_k = sp.sparse.csr_matrix(z_k_) + z_k_bar = sp.sparse.csr_matrix((1 - z_k_).astype('bool')) + S_ML += z_k * z_k.T + S_CL += z_k * z_k_bar.T + + # sample a fraction frac of the relatioships + # and builds the integer valued matrix S + # S_mask determines which relatiships are sampled + row_indexes, col_indexes = indexes_S_mask_sparse(ND, frac) + S_mask = build_S_mask_sparse(ND, row_indexes, col_indexes) + S = sp.sparse.csr_matrix.multiply( + S_ML.astype('int') - S_CL.astype('int'), + S_mask + ) + return S + + +def must_link_and_cannot_link_closure(S): + """ + Given a must link and cannot link matrix S + such that S[i, j] = 1 for ML, S[i, j] = -1 for CL + and S[i, j] = 0 otherwise, returns a matrix with + the transitive and reflexive closure of the ML + relationship and the closure of the CL relationship. + """ + def transitive_closure(list_tuples): + closure = set(list_tuples) + while True: + new_relations = set((x, w) for x, y in closure for q, w in closure if q == y) + closure_until_now = closure | new_relations + if closure_until_now == closure: + break + closure = closure_until_now + return closure + + if sp.sparse.issparse(S): + S = S.toarray() + + assert S.shape[0] == S.shape[1] + assert (S.T == S).all() + assert np.isin(np.unique(S), np.array([-1, 0, 1])).all() + + # builds S_res that contains the transitive closure + # of the ML relationship + S_tu = np.triu(S) + t_clos_ml = transitive_closure(list(zip(*np.where(S_tu > 0.)))) + S_res = np.zeros_like(S) + for i, j in t_clos_ml: + S_res[i, j], S_res[j, i] = 1., 1. + + # builds the neighborhoods given by the transitive + # closure of the ML relationship + N = S.shape[0] + S_ = S_res + np.eye(N) + neigh_ml = [] + for i in range(N): + neigh = np.where(S_[i] > 0.)[0] + new_neigh = np.array([len(np.intersect1d(n, neigh)) == 0 for n in neigh_ml]).all() + if new_neigh: + neigh_ml.append(neigh) + + # add the CL relatioships between all pairs + # of nodes of two neighborhoods if there is + # a CL relationship between two nodes of + # these neighborhoods + for n1, n2 in combinations(neigh_ml, 2): + if (S[np.ix_(n1, n2)] < 0.).any(): + for i, j in product(n1, n2): + S_res[i, j], S_res[j, i] = -1, -1 + + return S_res + + +def normalize_S(S): + """ + Normalizes the similarity matrix S + as presented in the paper + """ + S_ = S.sum(0) + S_inv = np.zeros((S.shape[0])) + np.divide(1, S_, where=(S_ > 0.), out=S_inv) + root_D_inv = np.diag(np.sqrt(S_inv)) + return root_D_inv @ S @ root_D_inv + + +def similarity_discordance(ZW, S, weighted): + """ + Counts the proportion of ML or CL constraints + that are not satisfied in the partition ZW. + If ZW is the true partition, the returned value + represents the discordance between the true classes + and the given similarity information. + If ZW is the partition returned by the algorithm + without similarity matrix, the returned value + represents the information brought by the given + similarity matrix. + If ZW is the partition returned by the algorithm + with similarity matrix, the returned value + represents the proportion of constraints from the + similarity matrix that are not respected after + regularisation. + """ + clusters = np.unique(ZW) + ND = ZW.shape[0] + not_S_ZW = sp.sparse.csr.csr_matrix((ND, ND), dtype='bool') + # not_S_ZW[i, j] = True if ZW[i] != ZW[j] else False + for k in clusters: + ind_k = np.where(ZW == k)[0] + z_k0 = np.zeros((ND, 1), dtype='bool') + z_k0[ind_k, :] = True + z_k = sp.sparse.csr.csr_matrix(z_k0) + not_z_k = sp.sparse.csr.csr_matrix(~z_k0) + not_S_ZW += z_k.dot(not_z_k.T) + + S_ZW = sp.sparse.csr.csr_matrix(~not_S_ZW.toarray()) # pylint: disable=E1130 + + S_ml = S.copy() + S_ml[S_ml < 0.] = 0. + S_cl = S.copy() + S_cl[S_cl > 0.] = 0. + S_cl.data = np.abs(S_cl.data) + + if weighted: + total_edges = np.abs(S.data).sum() + S_contraints = sp.sparse.csr_matrix.multiply(S_ml, not_S_ZW.astype('float')) + S_contraints += sp.sparse.csr_matrix.multiply(S_cl, S_ZW.astype('float')) + else: + total_edges = S.data.shape[0] + S_contraints = sp.sparse.csr_matrix.multiply(S_ml.astype('bool'), not_S_ZW) + S_contraints += sp.sparse.csr_matrix.multiply(S_cl.astype('bool'), S_ZW) + return S_contraints.data.sum() / total_edges diff --git a/dcblockmodels/models/utils/smoothing_schedule.py b/dcblockmodels/models/utils/smoothing_schedule.py new file mode 100644 index 0000000..8dfaed2 --- /dev/null +++ b/dcblockmodels/models/utils/smoothing_schedule.py @@ -0,0 +1,54 @@ +"""Smoothing schedule for dLBM""" + +import numpy as np +import matplotlib.pyplot as plt + + +class SmoothingSchedule: + """Smoothing schedule for dLBM""" + + def __init__(self, schedule_type, length, tau0=1e-3, x0=-6., x1=6.): + self.schedule_type = schedule_type + self.length = length + + self.x0 = x0 + self.x1 = x1 + self.tau0 = tau0 + + if self.schedule_type == 'sigmoid': + schedule = np.linspace(x0, x1, self.length) + # TODO check + # func = lambda x: 1. / (1. + np.exp(- x)) + # schedule = func(schedule) + schedule = 1. / (1. + np.exp(- schedule)) + + elif self.schedule_type == 'linear': + schedule = np.linspace(tau0, 1., self.length) + + # TODO check whether this is desirable (keeping a last schedule value != 1 amounts to a sort + # of regularization) + schedule[-1] = 1. + + self.schedule = schedule + + def plot(self): + """Plot the schedule in Matplotlib""" + f, ax = plt.subplots() + ax.plot(self.schedule) + f.suptitle('Smoothing Schedule') + return ax + + def __str__(self): + s = ( + 'Smoothing schedule:\n' + f'schedule type : {self.schedule_type}\n' + f'{self.length} steps\n' + ) + if self.schedule_type == 'linear': + s += f'from {self.tau0} to 1.' + else: + s += f'sigmoid on [{self.x0}, {self.x1}]' + return s + + def __repr__(self): + return self.__str__() diff --git a/dcblockmodels/plot.py b/dcblockmodels/plot.py new file mode 100644 index 0000000..f656e55 --- /dev/null +++ b/dcblockmodels/plot.py @@ -0,0 +1,542 @@ +"""Plotting functions for model results""" + +import time +import warnings + +import numpy as np +import pandas as pd +import networkx as nx + +import prince + +import matplotlib.pyplot as plt +import seaborn as sns +import plotly.graph_objects as go + + +def plot_criterions(model, thr_decrease, i_start=0, i_end=-1, legend=True): + """ + Plots the criterions as a function of the iteration + number for each initialization of a model. + + In the case of (d)LBM models, two criteria are + computed for a given E step. We thus plot the + 2 * n_iter_tot values of this criterion. + + The iterations steps where there is a "decrease" in + the criterion are highlighed with a 'o' marker. + thr_decrease determines by how much an iteration + must increase the likelihood to be considered "decreasing" + """ + assert thr_decrease > 0 + _f, ax = plt.subplots(figsize=(16, 5)) + pal = sns.color_palette('colorblind', n_colors=model.n_init) + + for init in range(model.n_init): + crits = model.all_iter_criterions[init] + if hasattr(model, 'all_intermediate_iter_criterions'): + interm_crits = model.all_intermediate_iter_criterions[init] + # all_crits = [interm[0], crit[0], interm[1], crit[1],...] + all_crits = np.array([j + for i in zip(interm_crits, crits) + for j in i]) + else: + all_crits = np.array(crits) + all_crits = all_crits[i_start: i_end] + + diff = all_crits[1:] - all_crits[:-1] + increase = (diff > -thr_decrease) + if not increase.all(): + iters_pb = np.where(~increase)[0] + 1 + ax.plot(iters_pb, all_crits[iters_pb], marker='o', color=pal[init], lw=0) + + ax.plot(all_crits, label=f'{init}', alpha=.9, color=pal[init], lw=2.) + if legend: + ax.legend(loc='best', title='initializations', fancybox=True) + return ax + + +def CA_plot(X, Z, W, absent_row_nodes=None, absent_col_nodes=None, ax=None): + """ + plots the projction of the rows and the columns of + the matrix X onto the factorial plane found by + correspondance analysis. + """ + warnings.filterwarnings("ignore", category=FutureWarning) + + assert X.ndim == 2 + + N, D = X.shape + present_row = np.setdiff1d(np.arange(N), absent_row_nodes) + present_col = np.setdiff1d(np.arange(D), absent_col_nodes) + X_ = X[np.ix_(present_row, present_col)] + X_ = X_ + 1e-10 + N_, D_ = X_.shape + + if N == D and not np.array_equal(present_row, present_col): + print('Special case: SBM with different absent row and col nodes.') + + # for directed SBM with different absent + # row and col nodes, X_ is not square + # and W_ is taken from Z with absent col nodes + Z_ = Z[present_row] + if W is not None: + W_ = W[present_col] + else: + W_ = Z[present_col] + + if ax is None: + _f, ax = plt.subplots(1, 2, figsize=(10, 5)) + row_clusters = np.unique(Z_) + col_clusters = np.unique(W_) + n_clusters_tot = row_clusters.shape[0] + col_clusters.shape[0] + + ca = prince.CA( + n_components=10, + n_iter=30, + copy=True, + check_input=True, + engine='auto', + random_state=42 + ) + + df = pd.DataFrame(X_) + ca = ca.fit(df) + row_factor_score = ca.row_coordinates(df).values + col_factor_score = ca.column_coordinates(df).values + pal = sns.color_palette('colorblind', n_colors=n_clusters_tot) + pal_rows = pal[:row_clusters.shape[0]] + pal_cols = pal[row_clusters.shape[0]:] + + for k in row_clusters: + ix = np.where(Z_ == k)[0] + prop = 100 * ix.shape[0] / N_ + ax[0].scatter(row_factor_score[ix, 0], + row_factor_score[ix, 1], + c=[pal_rows[k]], + label=f'{k}: {prop:.0f}%', + edgecolors='black', + alpha=.8, s=200) + for l in col_clusters: + ix = np.where(W_ == l)[0] + prop = 100 * ix.shape[0] / D_ + ax[1].scatter(col_factor_score[ix, 0], + col_factor_score[ix, 1], + c=[pal_cols[l]], + label=f'{l}: {prop:.0f}%', + edgecolors='black', + alpha=.8, s=200) + + exp_var = 100 * np.array(ca.explained_inertia_) + s = 'explained inertia {:.2f}%' + for i in range(2): + ax[i].set_xlabel(s.format(exp_var[0])) + ax[i].set_ylabel(s.format(exp_var[1])) + + for x, y, name in zip(row_factor_score[:, 0], + row_factor_score[:, 1], + df.index.values): + ax[0].text(x, y, name) + for x, y, name in zip(col_factor_score[:, 0], + col_factor_score[:, 1], + df.columns.values): + ax[1].text(x, y, name) + + ax[0].legend(title='clusters') + ax[1].legend(title='clusters') + warnings.filterwarnings('default', category=FutureWarning) + return ax + + +def plot_reorganised_matrix(X, Z, logscale=False, light=1.3, snapshot_titles=None): + """ + Reorganized matrix with line/column permutations + """ + T = X.shape[0] + _f, ax = plt.subplots(T, 1, figsize=(6, 6 * T)) + cmap = sns.cubehelix_palette(light=light, as_cmap=True) + for t in range(T): + if snapshot_titles is not None: + ax[t].title.set_text(snapshot_titles[t]) + indices = np.argsort(Z[t].astype(int)) + X_reorg = X[t, indices, :] + X_reorg = X_reorg[:, indices] + df_plot = pd.DataFrame(X_reorg) + if logscale: + df_plot = np.log10(df_plot + 1) # pour logscale + sns.heatmap(df_plot, ax=ax[t], cmap=cmap, linewidths=.005) + # plots the lines that separates the blocks + _, unique_indices = np.unique(Z[t], return_counts=True) + x_indices = np.cumsum(unique_indices) + for x in x_indices: + ax[t].axvline(x, linewidth=2.5) + ax[t].axhline(x, linewidth=2.5) + + +def plot_connectivity_matrix(gamma, subclusters=None): + """ + Row and column normalized connectivity matrix + at a given time step (or constant connectivity) + """ + if subclusters is not None: + gamma = gamma[np.ix_(subclusters, subclusters)] + + Kz, Kw = gamma.shape + _f, ax = plt.subplots(1, 2, figsize=(2 * 6, 5)) + cmap = sns.cubehelix_palette(light=1, as_cmap=True) + + g1 = gamma / gamma.sum(axis=1, keepdims=True) + g2 = gamma / gamma.sum(axis=0, keepdims=True) + + sns.heatmap(g1, cmap=cmap, ax=ax[0], annot=True, fmt='.2f') + sns.heatmap(g2, cmap=cmap, ax=ax[1], annot=True, fmt='.2f') + + for y in range(Kz + 2): + ax[0].axhline(y, linewidth=2.5) + for x in range(Kw + 2): + ax[1].axvline(x, linewidth=2.5) + + if subclusters is not None: + for i in range(2): + ax[i].set_xticklabels(subclusters) + ax[i].set_yticklabels(subclusters) + + for i in range(2): + ax[i].set_xlabel('destination cluster') + ax[i].set_ylabel('origin cluster') + ax[i].set_title('destination connection profiles in %') + + +def to_gephi(X, Z=None, filename=None, model=None): + """ + Plot the matrix in the Gephi (Open Graph Viz Platform) format + """ + assert X.ndim == 2 + + if not filename: + filename = f'graph_{time.time()}' + + G = nx.from_numpy_matrix(np.matrix(X, dtype=[('weight', int)])) + deg = X.sum(axis=0) + for i in G.nodes: + G.node[i]['node_degree'] = deg[i] + + if Z is not None: + assert Z.ndim == 1 + for i in G.nodes: + G.node[i]['true_cluster'] = str(int(Z[i])) + if model: + for i in G.nodes: + G.node[i]['found_cluster'] = str(int(model.best_Z[0, i])) + + nx.write_gexf(G, filename + '.gexf') + + +def plot_alluvial(Z, df_stations=None, dates=None, dataset='mtr'): + """ + Plot the clusters in an alluvial graph (only lines or columns at a time) + """ + + def parse_stations_bart(stat_numbers): + stat_numbers = np.array(list(stat_numbers)).astype('int') + quadrigrams = df_stations.iloc[stat_numbers]['quadrigram'].values + res = 'Stations : ' + " ".join(quadrigrams) + "" + return res + + def parse_stations_mtr(stat_numbers): + """ + from a set of stations to an htlm + formated string that describes the nodes + transitionning between two clusters + """ + stat_numbers = np.array(list(stat_numbers)).astype('int') + d = pd.DataFrame({'station_number': stat_numbers}) + + d = df_stations.merge( + d, + on='station_number', + how='inner' + ).groupby('line_').agg({'station_code': list}).reset_index().values + + res = 'Stations :' + for l in range(d.shape[0]): + res += '' + d[l, 0] + '' + '' + trigrams = np.unique(d[l, 1]) + for i, x in enumerate(trigrams): + if (i > 0) and (i % 5 == 0): + res += '' + res += ' ' + x + res += '' + return res + + if dates is not None: + assert len(dates) == Z.shape[0] + + T = Z.shape[0] + K_values = np.sort(np.unique(Z.ravel()).astype(int)) + K = K_values.shape[0] + + trans = [set(np.where(np.logical_and(Z[t] == s, Z[t + 1] == d))[0]) + for t in range(T - 1) + for s in K_values + for d in K_values] + + # colors_plotly = ['red', 'green', 'blue', 'yellow', 'orange'] + colors_plotly = np.array(['aliceblue', 'antiquewhite', 'aqua', 'aquamarine', 'azure', + 'beige', 'bisque', 'black', 'blanchedalmond', 'blue', + 'blueviolet', 'brown', 'burlywood', 'cadetblue', + 'chartreuse', 'chocolate', 'coral', 'cornflowerblue', + 'cornsilk', 'crimson', 'cyan', 'darkcyan', + 'darkmagenta', 'darkolivegreen', 'darkorange', + 'darkorchid', 'darkred', 'darksalmon', 'darkseagreen']) + + # Nodes + if dates is not None: + if dataset == 'mtr': + label = [f"K{k}, {dates[t]}" + for t in range(T) + for k in K_values] + elif dataset == 'bart': + label = [f"K{k} {dates[t][1]}" + for t in range(T) + for k in K_values] + else: + label = [f"K{k} t{t}" + for t in range(T) + for k in K_values] + node_color = np.random.choice(colors_plotly, K, replace=False).tolist() * T + + # Links + source = [s + t * K + for t in range(T - 1) + for s in range(K) + for d in range(K)] + target = [d + (t + 1) * K + for t in range(T - 1) + for s in range(K) + for d in range(K)] + + value = np.zeros((K * K * (T - 1))) + for t in range(T - 1): + for s in range(K): + for d in range(K): + flow_tsd = len(trans[t * K**2 + s * K + d]) + value[t * K**2 + s * K + d] = flow_tsd if flow_tsd > 0 else -1 + value = value.astype(int) + + link_colors = ['grey' if s == d else 'black' + for t in range(T - 1) + for s in range(K) + for d in range(K)] + + if df_stations is not None: + if dataset == 'mtr': + link_label = [parse_stations_mtr(x) + if len(x) > 0 else 'None' + for x in trans] + elif dataset == 'bart': + link_label = [parse_stations_bart(x) + if len(x) > 0 else 'None' + for x in trans] + else: + link_label = [str(x) if len(x) > 0 else 'None' for x in trans] + + if dates is not None: + if dataset == 'mtr': + title = ('MTR Network sum-up Station groups evolution ' + f'from {dates[0]} to {dates[-1]}' + ) + elif dataset == 'bart': + title = ('BART Network sum-up Station' + ' groups evolution ' + f'from {dates[0][0]} {dates[0][1]}h ' + f'to {dates[-1][0]} {dates[-1][1]}h' + ) + else: + title = f'Clusters evolution over {T} snapshots' + + # Layout + layout = dict(title=title, font=dict(size=12), height=700, width=1200) + + # Figure + f = go.Figure(data=[go.Sankey( + + valueformat=".f", + valuesuffix=" stations", + + node=dict( + pad=30, + thickness=10, + line=dict(color="black", width=0.5), + label=label, + color=node_color, + hoverlabel=dict( + bgcolor=node_color, + font=dict( + size=15 + ) + ) + ), + link=dict( + source=source, + target=target, + value=value, + label=link_label, + color=link_colors, + hoverlabel=dict( + bgcolor='white', + font=dict( + size=12 + ) + ) + ), + textfont=dict( + size=7, + color='black' + ) + + )], + layout=layout + ) + + return f + + +def plot_gamma(gamma, dates=None, step=None, show_cluster_ids=True): + """ + Plot the gamma matrix in Matplotlib + """ + assert gamma.ndim == 3 + if dates is not None: + assert gamma.shape[0] == len(dates) + if step is None: + step = 1 + + K = gamma.shape[1] + T = gamma.shape[0] + time_range = np.arange(0, T, step) + dates = [d for i, d in enumerate(dates) if i % step == 0] + _f, ax = plt.subplots(K, K, figsize=(5 * K, 5 * K), sharex=True, sharey=True) + + for k in range(K): + if show_cluster_ids: + ax[k, 0].set_ylabel(f'from cluster {k}') + ax[K - 1, k].set_xlabel(f'to cluster {k}') + ax[0, k].set_xlabel(f'to cluster {k}') + ax[0, k].xaxis.set_label_position('top') + for l in range(K): + ax[k, l].plot(gamma[:, k, l]) + if dates is not None: + ax[k, l].set_xticks(time_range) + ax[k, l].set_xticklabels(dates) + + +def plot_margins(model, always_present, dates=None, subset=None): + """ + always_present = utils.get_always_present_nodes() + subset : subset of nodes to be considered + dates : ndarray of strings of dates + """ + _f, ax = plt.subplots(2, 1, figsize=(16, 12)) + + if subset is not None: + always_present = np.intersect1d(subset, always_present) + + ax[0].plot(model.best_mu[:, always_present]) + ax[1].plot(model.best_nu[:, always_present]) + + if dates is not None: + ax[0].set_xticklabels(dates) + ax[1].set_xticklabels(dates) + + +def plot_mu_nu_during_optim(debug_values_mu_nu, indexes=None): + """ + Plots mu_{i}^t^(c) as a function of (c), + for each timestep t, each i in indexes and each run + """ + n_init = len(debug_values_mu_nu) + T = debug_values_mu_nu[0][0].shape[0] + _f, ax = plt.subplots(n_init, 1, figsize=(8, 4 * n_init)) + + if indexes is None: + indexes = range(debug_values_mu_nu[0][0].shape[1]) + for init in range(n_init): + mu_nu = np.array(debug_values_mu_nu[init]) + mu_nu = mu_nu[:, :, ] if indexes is not None else mu_nu + for t in range(T): + for i in indexes: + ax[init].plot( + mu_nu[:, t, i], + label=f'mu/nu_t={t},i={i}' + ) + ax[init].legend() + ax[init].title.set_text(f'Run {init + 1}') + plt.tight_layout() + + +def plot_alphas_during_optim(debug_values_alpha_beta): + """ + Plots \alpha_{k}^(c) as a function of (c), + for each cluster and run + """ + n_init = len(debug_values_alpha_beta) + Kzw = debug_values_alpha_beta[0][0].shape[0] + _f, ax = plt.subplots(n_init, 1, figsize=(8, 4 * n_init)) + + for init in range(n_init): + alpha_beta = np.exp(debug_values_alpha_beta[init]) + for k in range(Kzw): + ax[init].plot( + alpha_beta[:, k], + label=f'alpha/beta_{k}' + ) + ax[init].legend() + ax[init].title.set_text(f'Run {init + 1}') + plt.tight_layout() + + +def plot_pi_rho_during_optim(debug_values_pi_rho): + """ + Plots \alpha_{k}^(c) as a function of (c), + for each cluster and run + """ + n_init = len(debug_values_pi_rho) + Kzw = debug_values_pi_rho[0][0].shape[0] + _f, ax = plt.subplots(n_init, 1, figsize=(8, 4 * n_init)) + + for init in range(n_init): + pi_rho = np.exp(debug_values_pi_rho[init]) + for k in range(Kzw): + for l in range(Kzw): + ax[init].plot( + pi_rho[:, k, l], + label=f'pi_rho_{(k, l)}' + ) + ax[init].legend() + ax[init].title.set_text(f'Run {init + 1}') + plt.tight_layout() + + +def plot_gamma_during_optim(debug_values_gamma): + """ + Plots gamma_{kl}^(c) as a function of (c), + for each pair of cluster k and l and run + """ + assert debug_values_gamma[0][0].ndim == 2 + + n_init = len(debug_values_gamma) + Kzw = debug_values_gamma[0][0].shape[0] + _f, ax = plt.subplots(n_init, 1, figsize=(8, 4 * n_init)) + + for init in range(n_init): + gamma = np.exp(debug_values_gamma[init]) + for k in range(Kzw): + for l in range(Kzw): + ax[init].plot( + gamma[:, k, l], + label=f'gamma_{(k, l)}' + ) + ax[init].legend() + ax[init].title.set_text(f'Run {init + 1}') + plt.tight_layout() diff --git a/dcblockmodels/tests/__init__.py b/dcblockmodels/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dcblockmodels/tests/test_dlbm.py b/dcblockmodels/tests/test_dlbm.py new file mode 100644 index 0000000..86b2dd2 --- /dev/null +++ b/dcblockmodels/tests/test_dlbm.py @@ -0,0 +1,303 @@ +import os +import sys +import warnings + +import pathlib +import pytest + +import numpy as np +from sklearn.metrics import adjusted_rand_score + +from ..models.dlbm import dLBM +from .. import data +from ..models.utils import general +from ..models.utils.smoothing_schedule import SmoothingSchedule + + +sys.stderr = open(os.devnull, "w") + +warnings.filterwarnings("ignore", category=DeprecationWarning) + + +# Model test thresholds +ARI_ROW_F = { + 'easy': .4, + 'medium': .15, + 'hard': .1 +} +ARI_COL_F = ARI_ROW_F.copy() +ARI_ROW_T = ARI_ROW_F.copy() +ARI_COL_T = ARI_ROW_T.copy() + +# Model fixed params +smoothing_schedule = SmoothingSchedule('sigmoid', 20) + +debug_output = pathlib.Path(r'./dcblockmodels/model_debug_output') +random_state = None +n_jobs = -1 +verbose = 0 +debug_list = [] +n_iter_supp_smoothing = 5 + +diag_pi_init = 0.7 +diag_rho_init = 0.7 +prior_diagonal_pi = 0. +prior_diagonal_rho = 0. + +max_iter = 100 +tol_iter = 1e-6 +min_float = 1e-15 +min_proba_Z, min_proba_W = .05, .05 +min_proba_mixture_proportions = 1e-2 # to avoid empty clusters +min_margin = 1e-10 +min_gamma = 1e-8 + +init_type = 'skmeans' # 'skmeans', 'kmeans' +given_mu, given_nu = None, None +given_Z, given_W = None, None +n_init_clustering = 10 +node_perturbation_rate = .15 +cluster_perturbation_rate = 0. +threshold_absent_nodes = 0 +blockmodel_params = { + "n_iter_min": 5, + "loc_random_margins": 1e-8, # mean and std used for the initialization + "scale_random_margins": 1e-3, # of the margins if self.type_init_margins == 'random' + "n_init_clustering_consensus": 100 # for best_partition() method of model +} +parameter_smoothing = True +type_init_margins = 'ones' + + +class TestDLBM: + + # Data variable params + # T, N, D, Kz, Kw, level, gamma_0, with_margins, with_absent_nodes + test_data_setups = [ + (10, 100, 120, 3, 4, 'easy', True, False), # incresing difficulty + (10, 100, 120, 3, 4, 'medium', True, False), # incresing difficulty + (10, 100, 120, 3, 4, 'hard', True, False), # incresing difficulty + (10, 100, 100, 3, 3, 'medium', True, False), # N = D, Kz = Kw + (10, 100, 120, 3, 4, 'medium', True, True), # Absent nodes + (100, 100, 120, 3, 4, 'medium', True, False), # T big + (10, 500, 800, 3, 4, 'medium', True, False) # N and D big + ] + + n_init = 10 + n_first = 2 # n_first partitions that will be checked in the test, must be <= n_int + + def fit_model( + self, + test_data, + sparse_X, + em_type, + dtype='float64'): + + (X, Z, W, T, N, D, Kz, Kw, + level, with_margins, with_absent_nodes) = test_data + + print('Test data = \n') + print('level', level) + print('with_margins', with_margins) + print('with_absent_nodes', with_absent_nodes) + + model_type = 'with_margins' if with_margins else 'without_margins' + + if sparse_X: + X_ = [general.to_sparse(X[t]) for t in range(T)] + else: + X_ = X.copy() + + model = dLBM( + model_type=model_type, + em_type=em_type, + parameter_smoothing=parameter_smoothing, + Kz=Kz, Kw=Kw, + n_init=TestDLBM.n_init, + model_id=1, + max_iter=max_iter, + type_init_margins=type_init_margins, + smoothing_schedule=smoothing_schedule.schedule, + n_iter_supp_smoothing=n_iter_supp_smoothing, + prior_diagonal_pi=prior_diagonal_pi, + prior_diagonal_rho=prior_diagonal_rho, + diag_pi_init=diag_pi_init, + diag_rho_init=diag_rho_init, + init_type=init_type, + n_init_clustering=n_init_clustering, + node_perturbation_rate=node_perturbation_rate, + cluster_perturbation_rate=cluster_perturbation_rate, + threshold_absent_nodes=threshold_absent_nodes, + min_proba_mixture_proportions=min_proba_mixture_proportions, + min_gamma=min_gamma, + min_margin=min_margin, + min_proba_Z=min_proba_Z, + min_proba_W=min_proba_W, + dtype=dtype, + blockmodel_params=blockmodel_params, + random_state=random_state, + tol_iter=tol_iter, + n_jobs=n_jobs, verbose=verbose, + debug_list=debug_list, + debug_output=debug_output + ) + model.fit( + X_, + given_Z=given_Z, given_W=given_W, + given_mu=given_mu, given_nu=given_nu + ) + return model, Z, W, level + + def assert_metrics(self, model, Z, W, level, n_first): + for init, (Z_model, W_model) in enumerate(model.best_partition(mode='likelihood', + n_first=n_first)): + ari_row_f = adjusted_rand_score(Z.flatten(), Z_model.flatten()) + ari_col_f = adjusted_rand_score(W.flatten(), W_model.flatten()) + + print(f'level = {level}, ari global = {(ari_row_f, ari_col_f)}') + + assert ari_row_f > ARI_ROW_F[level] + assert ari_col_f > ARI_COL_F[level] + + for t in range(model.T): + ari_row_t = adjusted_rand_score(Z[t], Z_model[t]) + ari_col_t = adjusted_rand_score(W[t], W_model[t]) + assert ari_row_t > ARI_ROW_T[level] + assert ari_col_t > ARI_COL_T[level] + + @pytest.fixture + def test_data(self, request): + (T, N, D, Kz, Kw, + level, with_margins, with_absent_nodes) = request.param + + model_type = 'LBM' + dimensions = {'N': N, 'D': D} + n_clusters = {'Kz': Kz, 'Kw': Kw} + + alphas_dirichlet = { + 'very_easy': 10, + 'easy': 8, + 'medium': 6, + 'hard': 4 + } + diag_vals = { + 'diag': 0, + 'easy': .9, + 'medium': .75, + 'hard': .6 + } + + gamma0_level_dic = { + 'easy': 0.02, + 'medium': 0.01, + 'hard': 0.005 + } + + alpha = data.generate_initial_proportions(Kz, alphas_dirichlet[level]) + beta = data.generate_initial_proportions(Kw, alphas_dirichlet[level]) + prior_init = {'alpha': alpha, 'beta': beta} + + pi = data.generate_diag_transition_matrix(Kz, diag_vals[level]) + rho = data.generate_diag_transition_matrix(Kw, diag_vals[level]) + prior_trans = {'pi': pi, 'rho': rho} + + gamma_0 = gamma0_level_dic[level] + + # Data fixed params + + block_sparsity_matrix = None # \beta_{kl}^t of Matias + # block_sparsity_matrix = 0.1 * np.ones((Kz, Kw), dtype='float') + + constant_margins = True # True, False + start, stop, step = 1, 50, .1 + order_power_law = -1.5 # margins ~ Unif(start, stop)^order_power_law + # mu ~ AR1 : mu_{t+1} = N(a mu_t + c, sigma2) + # (c s.t. mu increasing if sigma2 = 0) + ar_margins, a_ar, sigma2_ar = False, 1.1, .1 + + # absent nodes + min_proba_t = .0 + max_proba_t = .2 + proba_absent = None + + directed = True + self_loops = True + dtype = 'int32' + + if with_margins: + mu, nu = data.generate_margins( + T, N, D, constant_margins, start, stop, step, + directed, order_power_law, + ar_margins, a_ar, sigma2_ar + ) + margins = {'mu': mu, 'nu': nu} + else: + margins = None + + noise_level_ = 0. + + if Kz == 3 and Kw == 4: + gamma = gamma_0 * np.array([ + [1, 2, 4, 1], + [3, 1, 2, 3], + [2, 3, 1, 3] + ]) + elif Kz == 3 and Kw == 3: + gamma = gamma_0 * np.array([ + [1, 2, 3], + [3, 1, 2], + [2, 3, 1] + ]) + else: + raise ValueError + + if T > 1: + gamma = np.stack([gamma for _ in range(T)], axis=0) + if block_sparsity_matrix is not None: + block_sparsity_matrix = np.stack([block_sparsity_matrix for _ in range(T)], axis=0) + + if with_absent_nodes: + absent_row_nodes = data.sample_absent_nodes( + T, N, + min_proba_t=min_proba_t, + max_proba_t=max_proba_t, + proba_absent=proba_absent + ) + if not directed: + absent_col_nodes = absent_row_nodes.copy() + else: + absent_col_nodes = data.sample_absent_nodes( + T, D, + min_proba_t=min_proba_t, + max_proba_t=max_proba_t, + proba_absent=proba_absent + ) + else: + absent_row_nodes, absent_col_nodes = [], [] + + absent_nodes = { + 'absent_row_nodes': absent_row_nodes, + 'absent_col_nodes': absent_col_nodes + } + + X, Z, W = data.generate_data( + T, + model_type, + dimensions, + n_clusters, + prior_init, + prior_trans, + gamma, + with_margins, + margins, + self_loops, + directed, + noise_level_, + with_absent_nodes, + absent_nodes, + dtype, + block_sparsity_matrix + ) + + return (X, Z, W, T, N, D, Kz, Kw, + level, with_margins, with_absent_nodes) diff --git a/dcblockmodels/tests/test_dlbm_long.py b/dcblockmodels/tests/test_dlbm_long.py new file mode 100644 index 0000000..32eca5e --- /dev/null +++ b/dcblockmodels/tests/test_dlbm_long.py @@ -0,0 +1,21 @@ +import pytest +from .test_dlbm import TestDLBM + + +class TestLong(TestDLBM): + + @pytest.mark.parametrize('test_data', TestDLBM.test_data_setups, indirect=True) + @pytest.mark.parametrize('sparse_X', [False, True]) + @pytest.mark.parametrize('em_type', ['VEM', 'CEM']) + def test_fitted_model( + self, + test_data, + sparse_X, + em_type): + + model, Z, W, level = self.fit_model( + test_data, + sparse_X, + em_type, + ) + self.assert_metrics(model, Z, W, level, TestDLBM.n_first) diff --git a/dcblockmodels/tests/test_dlbm_short.py b/dcblockmodels/tests/test_dlbm_short.py new file mode 100644 index 0000000..53bf451 --- /dev/null +++ b/dcblockmodels/tests/test_dlbm_short.py @@ -0,0 +1,45 @@ +import pytest +from .test_dlbm import TestDLBM + + +class TestShort(TestDLBM): + + @pytest.mark.parametrize('test_data', [TestDLBM.test_data_setups[1]], indirect=True) + @pytest.mark.parametrize('sparse_X', [False, True]) + def test_sparse(self, test_data, sparse_X): + model, Z, W, level = self.fit_model( + test_data=test_data, + sparse_X=sparse_X, + em_type='VEM' + ) + self.assert_metrics(model, Z, W, level, TestDLBM.n_first) + + @pytest.mark.parametrize('test_data', [TestDLBM.test_data_setups[1]], indirect=True) + @pytest.mark.parametrize('em_type', ['VEM', 'CEM']) + def test_em_type(self, test_data, em_type): + model, Z, W, level = self.fit_model( + test_data=test_data, + sparse_X=False, + em_type=em_type + ) + self.assert_metrics(model, Z, W, level, TestDLBM.n_first) + + @pytest.mark.parametrize('test_data', [TestDLBM.test_data_setups[4]], indirect=True) + def test_absent(self, test_data): + model, Z, W, level = self.fit_model( + test_data=test_data, + sparse_X=False, + em_type='VEM' + ) + self.assert_metrics(model, Z, W, level, TestDLBM.n_first) + + @pytest.mark.parametrize('test_data', [TestDLBM.test_data_setups[1]], indirect=True) + @pytest.mark.parametrize('dtype', ['float32', 'float64']) + def test_dtype(self, test_data, dtype): + model, Z, W, level = self.fit_model( + test_data=test_data, + sparse_X=False, + em_type='VEM', + dtype=dtype + ) + self.assert_metrics(model, Z, W, level, TestDLBM.n_first) diff --git a/dcblockmodels/tests/test_hlbm.py b/dcblockmodels/tests/test_hlbm.py new file mode 100644 index 0000000..270686e --- /dev/null +++ b/dcblockmodels/tests/test_hlbm.py @@ -0,0 +1,266 @@ +import os +import sys +import warnings + +import pathlib +import pytest + +import numpy as np +from sklearn.metrics import adjusted_rand_score + +from ..models.hlbm import HLBM +from .. import data +from ..models.utils import general, similarity_matrices + + +sys.stderr = open(os.devnull, "w") + +warnings.filterwarnings("ignore", category=DeprecationWarning) + +# Model test thresholds +ARI_ROW = { + 'easy': .4, + 'medium': .3, + 'hard': .1 +} +ARI_COL = ARI_ROW.copy() + + +# Model fixed params +debug_output = pathlib.Path(r'./dcblockmodels/model_debug_output') +random_state = None +n_jobs = -1 +verbose = 0 +debug_list = [] +power_multiplicative_init = 1 # True, False +n_iter_supp_smoothing = 5 + +frac_r, frac_c = .01, .01 +frac_noise = 0. +regularization_mode = 'all' +lambda_0 = 2. +lambda_r, lambda_c = lambda_0, lambda_0 +S_r, S_c = None, None +damping_factor = .7 + +max_iter = 100 +tol_iter = 1e-6 +min_float = 1e-15 +min_proba_Z, min_proba_W = .05, .05 +min_proba_mixture_proportions = 1e-2 # to avoid empty clusters +min_margin = 1e-10 +min_gamma = 1e-8 + +init_type = 'skmeans' # 'skmeans', 'kmeans' +given_mu, given_nu = None, None +given_Z, given_W = None, None +n_init_clustering = 20 +node_perturbation_rate = .15 +cluster_perturbation_rate = 0. +threshold_absent_nodes = 0 +blockmodel_params = { + "n_iter_min": 5, + "loc_random_margins": 1e-8, # mean and std used for the initialization + "scale_random_margins": 1e-3, # of the margins if self.type_init_margins == 'random' + "n_init_clustering_consensus": 100 # for best_partition() method of model +} +parameter_smoothing = True +type_init_margins = 'ones' +dtype = 'float64' +self_loops = True + + +class TestHLBM: + # Data variable params + # T, N, D, Kz, Kw, level, gamma_0, with_margins + test_data_setups = [ + (100, 120, 3, 4, 'easy', True), # incresing difficulty + (100, 120, 3, 4, 'medium', True), # incresing difficulty + (100, 120, 3, 4, 'hard', True), # incresing difficulty + (100, 100, 3, 3, 'medium', True), # N = D + (500, 520, 3, 4, 'medium', True) # N and D big + ] + + n_init = 20 + n_first = 10 # n_first partitions that will be checked in the test, must be <= n_int + + def fit_model( + self, + test_data, + sparse_X, + estimated_margins, + regularize, + em_type, + multiplicative_init + ): + (X, Z, W, N, D, Kz, Kw, + level, with_margins) = test_data + + print('Test data = \n') + print('level', level) + print('with_margins', with_margins) + + model_type = 'with_margins' if with_margins else 'without_margins' + regularize_row, regularize_col = regularize, regularize + multiplicative_init_rows, multiplicative_init_cols = ( + multiplicative_init, + multiplicative_init + ) + S_r = general.to_sparse( + similarity_matrices.build_S(Z, frac_r, frac_noise).astype(dtype) + ) + S_c = general.to_sparse( + similarity_matrices.build_S(W, frac_c, frac_noise).astype(dtype) + ) + if sparse_X: + X_ = general.to_sparse(X) + else: + X_ = X.copy() + + model = HLBM( + Kz=Kz, Kw=Kw, + model_type=model_type, + estimated_margins=estimated_margins, + regularization_mode=regularization_mode, + regularize_row=regularize_row, regularize_col=regularize_col, + n_init=TestHLBM.n_init, + max_iter=max_iter, + em_type=em_type, + damping_factor=damping_factor, + multiplicative_init_rows=multiplicative_init_rows, + multiplicative_init_cols=multiplicative_init_cols, + power_multiplicative_init=power_multiplicative_init, + min_float=min_float, + min_proba_Z=min_proba_Z, + min_proba_W=min_proba_W, + min_proba_mixture_proportions=min_proba_mixture_proportions, + min_margin=min_margin, + min_gamma=min_gamma, + init_type=init_type, + n_init_clustering=n_init_clustering, + node_perturbation_rate=node_perturbation_rate, + model_id=1, + dtype='float64', + threshold_absent_nodes=threshold_absent_nodes, + blockmodel_params=blockmodel_params, + random_state=None, # np.random.RandomState(42) + tol_iter=tol_iter, + n_jobs=-1, + verbose=0, debug_list=[], + debug_output=debug_output + ) + model.fit( + X_, + given_Z=given_Z, given_W=given_W, + S_r=S_r, lambda_r=lambda_r, S_c=S_c, lambda_c=lambda_c + ) + return model, Z, W, level + + def assert_metrics(self, model, Z, W, level, n_first): + for init, (Z_model, W_model) in enumerate(model.best_partition(mode='likelihood', + n_first=n_first)): + ari_row_f = adjusted_rand_score(Z.flatten(), Z_model.flatten()) + ari_col_f = adjusted_rand_score(W.flatten(), W_model.flatten()) + + print(f'level = {level}, ari global = {(ari_row_f, ari_col_f)}') + + assert ari_row_f > ARI_ROW[level] + assert ari_col_f > ARI_COL[level] + + @pytest.fixture + def test_data(self, request): + (N, D, Kz, Kw, + level, with_margins) = request.param + + model_type = 'LBM' + dimensions = {'N': N, 'D': D} + n_clusters = {'Kz': Kz, 'Kw': Kw} + T = 1 + directed = True + dtype = 'int32' + + alphas_dirichlet = { + 'very_easy': 10, + 'easy': 8, + 'medium': 6, + 'hard': 4 + } + gamma0_level_dic = { + 'easy': 0.02, + 'medium': 0.01, + 'hard': 0.005 + } + alpha = data.generate_initial_proportions(Kz, alphas_dirichlet[level]) + beta = data.generate_initial_proportions(Kw, alphas_dirichlet[level]) + prior_init = {'alpha': alpha, 'beta': beta} + + prior_trans = {'pi': None, 'rho': None} + + gamma_0 = gamma0_level_dic[level] + + # Data fixed params + + block_sparsity_matrix = None # \beta_{kl}^t of Matias + # block_sparsity_matrix = 0.1 * np.ones((Kz, Kw), dtype='float') + + constant_margins = True # True, False + start, stop, step = 1, 50, .1 + order_power_law = -1.5 # margins ~ Unif(start, stop)^order_power_law + + if with_margins: + mu, nu = data.generate_margins( + T, N, D, constant_margins, start, stop, step, + directed, order_power_law, + ar_margins=None, a_ar=None, sigma2_ar=None + ) + margins = {'mu': mu, 'nu': nu} + else: + margins = None + + noise_level_ = 0. + + if Kz == 3 and Kw == 4: + gamma = gamma_0 * np.array([ + [1, 2, 4, 1], + [3, 1, 2, 3], + [2, 3, 1, 3] + ]) + elif Kz == 3 and Kw == 3: + gamma = gamma_0 * np.array([ + [1, 2, 3], + [3, 1, 2], + [2, 3, 1] + ]) + else: + raise ValueError + + if T > 1: + gamma = np.stack([gamma for _ in range(T)], axis=0) + if block_sparsity_matrix is not None: + block_sparsity_matrix = np.stack([block_sparsity_matrix for _ in range(T)], axis=0) + + with_absent_nodes = False + absent_nodes = { + 'absent_row_nodes': [], + 'absent_col_nodes': [] + } + X, Z, W = data.generate_data( + T, + model_type, + dimensions, + n_clusters, + prior_init, + prior_trans, + gamma, + with_margins, + margins, + self_loops, + directed, + noise_level_, + with_absent_nodes, + absent_nodes, + dtype, + block_sparsity_matrix + ) + return (X, Z, W, N, D, Kz, Kw, + level, with_margins) diff --git a/dcblockmodels/tests/test_hlbm_long.py b/dcblockmodels/tests/test_hlbm_long.py new file mode 100644 index 0000000..0ba4583 --- /dev/null +++ b/dcblockmodels/tests/test_hlbm_long.py @@ -0,0 +1,31 @@ +import pytest + +from .test_hlbm import TestHLBM + + +class TestLong(TestHLBM): + + @pytest.mark.parametrize('test_data', TestHLBM.test_data_setups, indirect=True) + @pytest.mark.parametrize('sparse_X', [False, True]) + @pytest.mark.parametrize('estimated_margins', [False, True]) + @pytest.mark.parametrize('regularize', [False, True]) + @pytest.mark.parametrize('em_type', ['VEM', 'CEM']) + @pytest.mark.parametrize('multiplicative_init', [False, True]) + def test_fitted_model( + self, + test_data, + sparse_X, + estimated_margins, + regularize, + em_type, + multiplicative_init): + + model, Z, W, level = self.fit_model( + test_data, + sparse_X, + estimated_margins, + regularize, + em_type, + multiplicative_init + ) + self.assert_metrics(model, Z, W, level, TestHLBM.n_first) diff --git a/dcblockmodels/tests/test_hlbm_short.py b/dcblockmodels/tests/test_hlbm_short.py new file mode 100644 index 0000000..d087622 --- /dev/null +++ b/dcblockmodels/tests/test_hlbm_short.py @@ -0,0 +1,61 @@ +import warnings +import pytest + +from .test_hlbm import TestHLBM + +warnings.filterwarnings("ignore", category=DeprecationWarning) + + +class TestShort(TestHLBM): + + @pytest.mark.parametrize('test_data', [TestHLBM.test_data_setups[1]], indirect=True) + @pytest.mark.parametrize('sparse_X', [False, True]) + def test_sparse(self, test_data, sparse_X): + model, Z, W, level = self.fit_model( + test_data=test_data, + sparse_X=sparse_X, + estimated_margins=True, + regularize=False, + em_type='VEM', + multiplicative_init=False + ) + self.assert_metrics(model, Z, W, level, TestHLBM.n_first) + + @pytest.mark.parametrize('test_data', [TestHLBM.test_data_setups[1]], indirect=True) + @pytest.mark.parametrize('estimated_margins', [False, True]) + def test_estimated_margins(self, test_data, estimated_margins): + model, Z, W, level = self.fit_model( + test_data=test_data, + sparse_X=False, + estimated_margins=estimated_margins, + regularize=False, + em_type='VEM', + multiplicative_init=False + ) + self.assert_metrics(model, Z, W, level, TestHLBM.n_first) + + @pytest.mark.parametrize('test_data', [TestHLBM.test_data_setups[1]], indirect=True) + @pytest.mark.parametrize('regularize', [False, True]) + def test_regularize(self, test_data, regularize): + model, Z, W, level = self.fit_model( + test_data=test_data, + sparse_X=False, + estimated_margins=True, + regularize=regularize, + em_type='VEM', + multiplicative_init=False + ) + self.assert_metrics(model, Z, W, level, TestHLBM.n_first) + + @pytest.mark.parametrize('test_data', [TestHLBM.test_data_setups[1]], indirect=True) + @pytest.mark.parametrize('em_type', ['VEM', 'CEM']) + def test_em_type(self, test_data, em_type): + model, Z, W, level = self.fit_model( + test_data=test_data, + sparse_X=False, + estimated_margins=True, + regularize=False, + em_type=em_type, + multiplicative_init=False + ) + self.assert_metrics(model, Z, W, level, TestHLBM.n_first) diff --git a/docs/codestyle.md b/docs/codestyle.md new file mode 100644 index 0000000..36dac15 --- /dev/null +++ b/docs/codestyle.md @@ -0,0 +1,19 @@ +# Code style specifics + +This codebase strives to follow PEP8 standards, but some specifics have been needed for code readability and easier links to the articles describing the algorithm. + +## Variable names + +Since a lot of long equations are implemented, shortened variable names need to be accepted. Also, the maths involve a lot of upper case variables that describe matrices, functions, which cannot be lower case'd without loss of understandability in the code. Therefore, there is some tolerance to the following variables: + +`ND`, `Kz`, `Kw`, `nax`, `ax`, `N`, `D`, `N_`, `D_`, `T`, `G`, `H`, `a`, `c`, `d`, `f`, `df`, `ix`, `ca`, `g1`, `g2`, `i`, `j`, `k`, `k_`, `l`, `n`, `n1`, `n2`, `p`, `s`, `t`, `t_`, `u`, `y`, `it`, `mu`, `nu`, `pi`, `p1`, `p2`, `L1`, `L2`, `dLBM`, `L`, `N_`, `V`, `B`, `D_`, `Hz`, `Hw`, `qw`, `qz`, `root_D_inv`, `x`, `x0`, `x1`, `R`, `C`, `CA_plot` + +and any variables that respect the following regular expressions: + +`Z.*`, `W.*`, `.*Z`, `.*W`, `.*X.*`, `K.*`, `.*S.*`, `.*P.*`, `.*Lc.*`, `.*ND`, `AFD.*`, `AR.*` + +## Code structure + +Some leeway has been left to line length for modules, functions, classes, and for number of arguments/methods. + +Line length is set to 100 for equation readability. diff --git a/notebooks/dlbm_example.ipynb b/notebooks/dlbm_example.ipynb new file mode 100644 index 0000000..0aedae0 --- /dev/null +++ b/notebooks/dlbm_example.ipynb @@ -0,0 +1,1416 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "ac617c7b", + "metadata": {}, + "source": [ + "**This notebook allows to simulate data, classify it using a DLBM model and evaluate the model in a controlled environnement. The model is a dynamic LBM `dLBM` for data represented as a series of adjacency matrices.**" + ] + }, + { + "cell_type": "markdown", + "id": "3bf00e47", + "metadata": {}, + "source": [ + "# Imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f3666b39", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:12:31.205763Z", + "start_time": "2022-02-03T14:12:31.190085Z" + } + }, + "outputs": [], + "source": [ + "import sys, os\n", + "os.path.dirname(sys.executable), sys.version, sys.path" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "da11811b", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:12:32.532417Z", + "start_time": "2022-02-03T14:12:31.799891Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "%config Completer.use_jedi = False\n", + "#!jt -t onedork -fs 100 -altp -tfs 11 -nfs 100 -cellw 60% -T -N\n", + "%pip list" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "bec266b8", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:31.751414Z", + "start_time": "2022-02-10T09:23:28.903594Z" + } + }, + "outputs": [], + "source": [ + "import sys\n", + "import pathlib\n", + "import numpy as np\n", + "import scipy as sp\n", + "import matplotlib.pyplot as plt\n", + "import seaborn as sns\n", + "%matplotlib inline\n", + "\n", + "from sklearn.metrics import confusion_matrix\n", + "\n", + "import warnings\n", + "warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n", + "warnings.filterwarnings(\"ignore\", message='Deprecation')\n", + "\n", + "from dcblockmodels.models.dlbm import dLBM\n", + "\n", + "from dcblockmodels import metrics, plot, data\n", + "from dcblockmodels.models.utils import similarity_matrices, general, init\n", + "from dcblockmodels.models.utils.smoothing_schedule import SmoothingSchedule" + ] + }, + { + "cell_type": "markdown", + "id": "8413e799", + "metadata": {}, + "source": [ + "# Data" + ] + }, + { + "cell_type": "markdown", + "id": "7ac65838", + "metadata": {}, + "source": [ + "## Sampling the data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "76075b11", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:31.757910Z", + "start_time": "2022-02-10T09:23:31.753531Z" + } + }, + "outputs": [], + "source": [ + "# whether we sample from a SBM or LBM\n", + "model_type = 'LBM'\n", + "# in case of SBM, whether the graph is directed\n", + "directed = True\n", + "# number of time steps\n", + "T = 10\n", + "# nb of row nodes, nb of column nodes\n", + "N, D = 100, 200\n", + "# nb row clusters, nb of column clusters \n", + "Kz, Kw = 3, 4" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ddd59632", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:31.773339Z", + "start_time": "2022-02-10T09:23:31.759331Z" + } + }, + "outputs": [], + "source": [ + "level_alpha = 'medium'\n", + "level_beta = 'medium'\n", + "level_pi = 'medium'\n", + "level_rho = 'medium'\n", + "\n", + "alphas_dirichlet = {\n", + " 'very_easy': 10,\n", + " 'easy': 8,\n", + " 'medium': 6,\n", + " 'hard': 4\n", + "}\n", + "diag_vals = {\n", + " 'diag': 0,\n", + " 'easy': .9,\n", + " 'medium': .75,\n", + " 'hard': .6\n", + "}\n", + "\n", + "alpha = data.generate_initial_proportions(Kz, alphas_dirichlet[level_alpha])\n", + "beta = data.generate_initial_proportions(Kw, alphas_dirichlet[level_beta])\n", + "prior_init = {'alpha': alpha, 'beta': beta}\n", + "\n", + "pi = data.generate_diag_transition_matrix(Kz, diag_vals[level_pi]) \n", + "rho = data.generate_diag_transition_matrix(Kw, diag_vals[level_rho])\n", + "prior_trans = {'pi': pi, 'rho': rho}\n", + "\n", + "alpha, pi, beta, rho" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "68dbbadf", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:32.496038Z", + "start_time": "2022-02-10T09:23:32.332025Z" + } + }, + "outputs": [], + "source": [ + "with_margins = True # True, False\n", + "constant_margins = False # True, False\n", + "start, stop, step = 1, 50, .1\n", + "order_power_law = -1.5 # margins ~ Unif(start, stop)^order_power_law\n", + "ar_margins, a_ar, sigma2_ar = True, 1.1, .05 # mu ~ AR1 : mu_{t+1} = N(a mu_t + c, sigma2) (c s.t. mu increasing if sigma2 = 0)\n", + "\n", + "if with_margins:\n", + " mu, nu = data.generate_margins(\n", + " T, N, D, constant_margins, start, stop, step,\n", + " directed, order_power_law,\n", + " ar_margins, a_ar, sigma2_ar\n", + " )\n", + " margins = {'mu': mu, 'nu': nu}\n", + "else:\n", + " margins = None\n", + " \n", + "margins" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8db1a89", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:49.335157Z", + "start_time": "2022-02-10T09:23:49.310317Z" + } + }, + "outputs": [], + "source": [ + "with_absent_nodes = True # True, False\n", + "min_proba_t = .0\n", + "max_proba_t = .2\n", + "proba_absent = None\n", + "\n", + "if with_absent_nodes:\n", + " absent_row_nodes = data.sample_absent_nodes(\n", + " T, N,\n", + " min_proba_t=min_proba_t,\n", + " max_proba_t=max_proba_t,\n", + " proba_absent=proba_absent\n", + " )\n", + " if not directed:\n", + " absent_col_nodes = absent_row_nodes.copy()\n", + " else:\n", + " absent_col_nodes = data.sample_absent_nodes(\n", + " T, D,\n", + " min_proba_t=min_proba_t,\n", + " max_proba_t=max_proba_t,\n", + " proba_absent=proba_absent\n", + " )\n", + "else:\n", + " absent_row_nodes, absent_col_nodes = [], []\n", + "\n", + "absent_nodes = {\n", + " 'absent_row_nodes': absent_row_nodes,\n", + " 'absent_col_nodes': absent_col_nodes\n", + "}\n", + "\n", + "absent_nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dab2198a", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:56.082978Z", + "start_time": "2022-02-10T09:23:56.062983Z" + } + }, + "outputs": [], + "source": [ + "# scaling factor for the matrix gamma : determines the separability level\n", + "# lower is harder and more sparse\n", + "gamma_0 = .01\n", + "\n", + "# defines the sparsity in a block\n", + "# block_sparsity_matrix[t, k, l] is the proba of a zero\n", + "# in block (k, l) at time t\n", + "# corresponds to the \\beta_{kl}^t of Matias\n", + "block_sparsity_matrix = None\n", + "# block_sparsity_matrix = 0.1 * np.ones((Kz, Kw), dtype='float')\n", + "\n", + "# if we add gaussian noise to the sampled graph\n", + "# not advised since it can make models with lower\n", + "# complete data log likelihood give better classification results\n", + "# than model with higher complete data log likelihood\n", + "noise_level_ = 0.\n", + "\n", + "\n", + "if Kz == 3 and Kw == 4:\n", + " gamma = gamma_0 * np.array([\n", + " [1, 2, 3, 1 ],\n", + " [3, 1, 2, 3 ],\n", + " [2, 3, 1, 4 ]\n", + " ])\n", + "elif Kz == 3 and Kw == 3:\n", + " gamma = gamma_0 * np.array([\n", + " [1, 2, 3],\n", + " [3, 1, 2],\n", + " [2, 3, 1]\n", + " ])\n", + "else:\n", + " raise ValueError\n", + "\n", + "if T > 1:\n", + " gamma = np.stack([gamma for _ in range(T)], axis=0)\n", + " if block_sparsity_matrix is not None:\n", + " block_sparsity_matrix = np.stack([block_sparsity_matrix for _ in range(T)], axis=0)\n", + " \n", + "gamma" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "05ea268c", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:24:00.099758Z", + "start_time": "2022-02-10T09:23:58.117497Z" + } + }, + "outputs": [], + "source": [ + "dimensions = {'N': N, 'D': D}\n", + "n_clusters = {'Kz': Kz, 'Kw': Kw}\n", + "\n", + "self_loops = True\n", + "dtype = 'int32'\n", + "\n", + "X, Z, W = data.generate_data(\n", + " T,\n", + " model_type,\n", + " dimensions,\n", + " n_clusters,\n", + " prior_init,\n", + " prior_trans,\n", + " gamma,\n", + " with_margins,\n", + " margins,\n", + " self_loops,\n", + " directed,\n", + " noise_level_,\n", + " with_absent_nodes,\n", + " absent_nodes,\n", + " dtype,\n", + " block_sparsity_matrix\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "90c020d5", + "metadata": {}, + "source": [ + "## Plot" + ] + }, + { + "cell_type": "markdown", + "id": "c74df45b", + "metadata": {}, + "source": [ + "### Block view & link with matrix factorization" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "46f3dc63", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:40:22.239985Z", + "start_time": "2022-02-03T11:40:21.551891Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "X_ = X[t_plot] if X.ndim == 3 else X\n", + "Z_ = Z[t_plot] if X.ndim == 3 else Z\n", + "W_ = W[t_plot] if X.ndim == 3 else W\n", + "gamma_ = gamma[t_plot] if X.ndim == 3 else gamma\n", + "\n", + "row_indices = np.argsort(Z_.astype(int))\n", + "col_indices = np.argsort(W_.astype(int))\n", + "\n", + "cmap = sns.cubehelix_palette(light=1., as_cmap=True)\n", + "f, ax = plt.subplots(1, 4, figsize=(4 * 5, 5))\n", + "\n", + "sns.heatmap(X_, ax=ax[0], cbar=False, square=False, xticklabels=False, yticklabels=False, cmap=cmap)\n", + "ax[0].set_title('Raw data')\n", + "\n", + "sns.heatmap(X_[row_indices, :], ax=ax[1], cbar=False, square=False, xticklabels=False, yticklabels=False, cmap=cmap)\n", + "ax[1].set_title('Row-reorganized data')\n", + "\n", + "sns.heatmap(X_[np.ix_(row_indices, col_indices)], ax=ax[2], cbar=False, square=False, xticklabels=False, yticklabels=False, cmap=cmap)\n", + "ax[2].set_title('Row and column-reorganized data')\n", + "\n", + "Z_encoded = general.encode(Z_, Kz)\n", + "W_encoded = general.encode(W_, Kw)\n", + "X_approx = Z_encoded.dot(gamma_).dot(W_encoded.T)\n", + "sns.heatmap(X_approx[np.ix_(row_indices, col_indices)], ax=ax[3], cbar=False, square=False, xticklabels=False, yticklabels=False, cmap=cmap)\n", + "ax[3].set_title('Connectivity-approximized data')\n", + "\n", + "plt.tight_layout()" + ] + }, + { + "cell_type": "markdown", + "id": "38da6976", + "metadata": {}, + "source": [ + "### Dimensionality reduction with Correspondence Analysis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4f7c9930", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:40:39.150051Z", + "start_time": "2022-02-03T11:40:34.318621Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "t_plot = [t for t in range(T)]\n", + "t_plot = [0, T//2, T - 1]\n", + "\n", + "if X.ndim == 2:\n", + " plot.CA_plot(X, Z, W)\n", + "else:\n", + " if type(t_plot) == int:\n", + " t_plot = [t_plot]\n", + "\n", + " n_plots = len(t_plot)\n", + " f, ax = plt.subplots(n_plots, 2, figsize=(10, 5 * n_plots))\n", + " for i, t in enumerate(t_plot):\n", + " W_plot = W[t] if W is not None else None\n", + "\n", + " absent_row = [tup[1] for tup in absent_row_nodes if tup[0] == t]\n", + " absent_col = [tup[1] for tup in absent_col_nodes if tup[0] == t]\n", + "\n", + " plot.CA_plot(\n", + " X[t],\n", + " Z[t], W_plot,\n", + " absent_row, absent_col,\n", + " ax=ax[i]\n", + " )\n", + " ax[i, 0].set_title(f't = {t}')\n", + " ax[i, 1].set_title(f't = {t}')\n" + ] + }, + { + "cell_type": "markdown", + "id": "137afcd1", + "metadata": {}, + "source": [ + "### True margins" + ] + }, + { + "cell_type": "markdown", + "id": "2de3e036", + "metadata": {}, + "source": [ + "Plot margins over time, in the dynamic case" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "23024ee1", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:40:44.260515Z", + "start_time": "2022-02-03T11:40:44.011289Z" + } + }, + "outputs": [], + "source": [ + "n_nodes = 20\n", + "f, ax = plt.subplots(1, 2, figsize=(2 * 6, 4))\n", + "\n", + "ax[0].plot(margins['mu'][:, np.random.choice(N, size=n_nodes)]);\n", + "ax[0].set_title('True row margins mu');\n", + "\n", + "ax[1].plot(margins['nu'][:, np.random.choice(D, size=n_nodes)]);\n", + "ax[1].set_title('True col margins nu');" + ] + }, + { + "cell_type": "markdown", + "id": "224c9caf", + "metadata": {}, + "source": [ + "### Factorial Discriminant Analysis" + ] + }, + { + "cell_type": "markdown", + "id": "5bfc8b85", + "metadata": {}, + "source": [ + "Measures the level of linear separability of the classes after projection onto R^N using correspondence analysis\n", + "\n", + "See Discriminative Factorial Analysis : http://www.math.u-bordeaux.fr/~mchave100p/wordpress/wp-content/uploads/2013/10/AFD.pdf" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b8dc5130", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:40:49.139055Z", + "start_time": "2022-02-03T11:40:47.753899Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "t_plot = [t for t in range(T)]\n", + "t_plot = [0, T//2, -1]\n", + "\n", + "n_components = 3\n", + "\n", + "f, ax = plt.subplots(len(t_plot), 2, squeeze=False, sharex=True, sharey=True, figsize=(5 * len(t_plot), 8))\n", + "xs = np.arange(n_components, dtype='int')\n", + "\n", + "if X.ndim == 3:\n", + " for i, t in enumerate(t_plot):\n", + " res = metrics.AFD_CA_linear_separation(\n", + " X[t], Z[t], W[t],\n", + " n_components=n_components,\n", + " absent_row_nodes=absent_row_nodes,\n", + " absent_col_nodes=absent_col_nodes\n", + " )\n", + "\n", + " ax[i, 0].bar(xs, res[0])\n", + " ax[i, 1].bar(xs, res[1])\n", + " ax[i, 0].set_xlabel('factorial axis')\n", + " ax[i, 1].set_xlabel('factorial axis')\n", + " ax[i, 0].set_title(f'Rows, T = {t}')\n", + " ax[i, 1].set_title(f'Cols, T = {t}')\n", + "else:\n", + " res = metrics.AFD_CA_linear_separation(\n", + " X, Z, W,\n", + " n_components=n_components,\n", + " absent_row_nodes=absent_row_nodes,\n", + " absent_col_nodes=absent_col_nodes\n", + " )\n", + " ax[0, 0].bar(xs, res[0])\n", + " ax[0, 1].bar(xs, res[1])\n", + " ax[0, 0].set_xlabel('factorial axis')\n", + " ax[0, 1].set_xlabel('factorial axis')\n", + " ax[0, 0].set_title(f'Rows')\n", + " ax[0, 1].set_title(f'Cols')\n", + " \n", + "plt.suptitle('CA AFD linear separability', y=1);\n", + "plt.tight_layout()" + ] + }, + { + "cell_type": "markdown", + "id": "fdf4c251", + "metadata": {}, + "source": [ + "### Distribution of the values of the cells of the data matrix" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2371b3cf", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:41:00.800991Z", + "start_time": "2022-02-03T11:41:00.136830Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "t_plot = [t for t in range(T)]\n", + "t_plot = [0, T//2, -1]\n", + "\n", + "bins = 50\n", + "val_min = 1\n", + "val_max = 100 # int or None\n", + "\n", + "f, ax = plt.subplots(len(t_plot), 1, sharex=True, sharey=True, figsize=(10, 1.5 * len(t_plot)))\n", + "\n", + "for i, t in enumerate(t_plot):\n", + " values = X[t].flatten()\n", + " values = values[values >= val_min]\n", + " if val_max is not None:\n", + " values = values[values < val_max]\n", + " ax[i].hist(values, bins=bins)\n", + " ax[i].set_title(f'time t = {t}')\n", + " \n", + "f.suptitle('Histogram of the values of the cells of the data matrix over time');\n", + "plt.tight_layout();" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8fe333fa", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:41:02.120634Z", + "start_time": "2022-02-03T11:41:01.567850Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "t_plot = [t for t in range(T)]\n", + "t_plot = [0, T//2, -1]\n", + "\n", + "bins = 50\n", + "val_min = 0\n", + "val_max = None #int or None\n", + "\n", + "f, ax = plt.subplots(len(t_plot), 1, sharex=True, sharey=True, figsize=(10, 1.5 * len(t_plot)))\n", + "\n", + "for i, t in enumerate(t_plot):\n", + " values = X[t].sum(0).flatten()\n", + " values = values[values >= val_min]\n", + " if val_max is not None:\n", + " values = values[values < val_max]\n", + " ax[i].hist(values, bins=bins)\n", + " ax[i].set_title(f'time t = {t}')\n", + " \n", + "f.suptitle('Histogram of the degrees of the nodes over time');\n", + "plt.tight_layout();" + ] + }, + { + "cell_type": "markdown", + "id": "eb52f014", + "metadata": {}, + "source": [ + "# Models" + ] + }, + { + "cell_type": "markdown", + "id": "37944c14", + "metadata": {}, + "source": [ + "## DLBM" + ] + }, + { + "cell_type": "markdown", + "id": "3c24ab54", + "metadata": {}, + "source": [ + "### Algo params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a2031578", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:24:05.598531Z", + "start_time": "2022-02-10T09:24:05.416239Z" + } + }, + "outputs": [], + "source": [ + "smoothing_schedule = SmoothingSchedule('sigmoid', 50, tau0=1e-3, x0=-6., x1=5.)\n", + "smoothing_schedule.plot()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "89c67479", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:24:06.496074Z", + "start_time": "2022-02-10T09:24:06.485304Z" + } + }, + "outputs": [], + "source": [ + "model_type = 'with_margins' # # 'with_margins', # 'without_margins'\n", + "parameter_smoothing = True # True, False\n", + "n_iter_supp_smoothing = 10\n", + "sparse_X = True # True, False\n", + "\n", + "n_init = 5\n", + "em_type = 'VEM' # 'VEM', 'CEM'\n", + "max_iter = 500\n", + "tol_iter = 1e-6\n", + "min_float = 1e-15\n", + "min_proba_Z, min_proba_W = .1, .1\n", + "min_proba_mixture_proportions = 1e-1 # to avoid empty clusters\n", + "min_margin = 1e-10\n", + "min_gamma = 1e-10\n", + "prior_diagonal_pi, prior_diagonal_rho = 0., 0. #.2, .2\n", + "diag_pi_init, diag_rho_init = .7, .7\n", + "\n", + "init_type = 'kmeans' #'given' # 'skmeans', 'kmeans', 'given'\n", + "type_init_margins = 'ones' # ones, X.\n", + "given_mu, given_nu = None, None\n", + "n_init_clustering = 20\n", + "node_perturbation_rate = .15\n", + "cluster_perturbation_rate = 0.\n", + "threshold_absent_nodes = -1\n", + "\n", + "debug_output = pathlib.Path(r'../dcblockmodels/model_debug_output')\n", + "dtype = 'float64'\n", + "random_state = None\n", + "n_jobs = -1\n", + "verbose = 1\n", + "model_id = 12\n", + "\n", + "# debug_list contains the names of the parameters fo the models\n", + "# or of the variational distribution that we wish to monitor\n", + "# during the fitting of the model\n", + "# This is done by writing the values of the model to disk\n", + "# so it takes time and space. Providing an empty list\n", + "# is the normal behavior\n", + "debug_list = ['log_alpha', 'gamma', 'log_pi', 'Z', 'mu'] # []" + ] + }, + { + "cell_type": "markdown", + "id": "d30d6d4d", + "metadata": {}, + "source": [ + "### Initialization" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5fc56de5", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:24:17.200760Z", + "start_time": "2022-02-10T09:24:13.514583Z" + } + }, + "outputs": [], + "source": [ + "if init_type == 'given':\n", + " # one could get initial partitions\n", + " # using any clustering algo\n", + " given_Z = init._skmeans_init(\n", + " np.concatenate([X[t] for t in range(T)], axis=1),\n", + " Kz, n_init_clustering, random_state=None, n_jobs=-1\n", + " )\n", + " given_W = init._skmeans_init(\n", + " np.concatenate([X[t] for t in range(T)], axis=0).T,\n", + " Kw, n_init_clustering, random_state=None, n_jobs=-1\n", + " )\n", + "else:\n", + " given_Z, given_W = None, None\n", + "\n", + "if sparse_X:\n", + " X_ = [general.to_sparse(X[t]) for t in range(T)]\n", + "else:\n", + " X_ = X.copy()" + ] + }, + { + "cell_type": "markdown", + "id": "3540682e", + "metadata": {}, + "source": [ + "### Fitting the model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5a1df072", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:24:49.158307Z", + "start_time": "2022-02-10T09:24:18.418387Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "model = dLBM(\n", + " model_type=model_type,\n", + " em_type=em_type,\n", + " parameter_smoothing=parameter_smoothing,\n", + " Kz=Kz, Kw=Kw,\n", + " n_init=n_init,\n", + " model_id=model_id,\n", + " max_iter=max_iter,\n", + " type_init_margins=type_init_margins,\n", + " smoothing_schedule=smoothing_schedule.schedule,\n", + " n_iter_supp_smoothing=n_iter_supp_smoothing,\n", + " prior_diagonal_pi=prior_diagonal_pi,\n", + " prior_diagonal_rho=prior_diagonal_rho,\n", + " diag_pi_init=diag_pi_init,\n", + " diag_rho_init=diag_rho_init,\n", + " init_type=init_type,\n", + " n_init_clustering=n_init_clustering,\n", + " node_perturbation_rate=node_perturbation_rate,\n", + " cluster_perturbation_rate=cluster_perturbation_rate,\n", + " threshold_absent_nodes=threshold_absent_nodes,\n", + " min_proba_mixture_proportions=min_proba_mixture_proportions,\n", + " min_gamma=min_gamma,\n", + " min_margin=min_margin,\n", + " min_proba_Z=min_proba_Z,\n", + " min_proba_W=min_proba_W,\n", + " dtype=dtype,\n", + " blockmodel_params=None,\n", + " random_state=random_state,\n", + " tol_iter=tol_iter,\n", + " n_jobs=n_jobs, verbose=verbose,\n", + " debug_list=debug_list,\n", + " debug_output=debug_output\n", + ")\n", + "model.fit(\n", + " X_, \n", + " given_Z=given_Z, given_W=given_W,\n", + " given_mu=given_mu, given_nu=given_nu\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "23bc1bb2", + "metadata": {}, + "source": [ + "## Load/save model" + ] + }, + { + "cell_type": "markdown", + "id": "4e7b20a3", + "metadata": {}, + "source": [ + "### Save" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "05b2141c", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T16:04:42.325906Z", + "start_time": "2022-01-24T16:04:42.312022Z" + } + }, + "outputs": [], + "source": [ + "model.save(path='../saved_models', modelname='my_model')" + ] + }, + { + "cell_type": "markdown", + "id": "3399143e", + "metadata": {}, + "source": [ + "### Load" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c6118fd1", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T16:04:56.102664Z", + "start_time": "2022-01-24T16:04:56.094125Z" + } + }, + "outputs": [], + "source": [ + "model = general.load_model('../saved_models/my_model')\n", + "model" + ] + }, + { + "cell_type": "markdown", + "id": "7c5a2cbe", + "metadata": {}, + "source": [ + "# Metrics & visualizations" + ] + }, + { + "cell_type": "markdown", + "id": "b53b5c62", + "metadata": {}, + "source": [ + "## Partitions and criterion" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "5d7e162d", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:15:04.857333Z", + "start_time": "2022-02-03T14:15:04.628455Z" + } + }, + "outputs": [], + "source": [ + "Z_model, W_model = model.best_partition(mode='likelihood', n_first=1)[0]\n", + "#Z_model, W_model = model.best_partition(mode='consensus: hbgf', n_first=(model.n_init) // 2)[0]\n", + "\n", + "plot.plot_criterions(\n", + " model,\n", + " thr_decrease=1000,\n", + " i_start=0, i_end=-1,\n", + " legend=True\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "dea43b42", + "metadata": {}, + "source": [ + "## DLBM" + ] + }, + { + "cell_type": "markdown", + "id": "208429c4", + "metadata": {}, + "source": [ + "### Metrics" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "27a56550", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:16:03.021136Z", + "start_time": "2022-02-03T14:16:02.939999Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "metrics.print_metrics(\n", + " Z_model, W_model, Z, W,\n", + " absent_nodes=model.absent_nodes,\n", + " print_each_timestep=True\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "fbc3f88a", + "metadata": {}, + "source": [ + "### Distribution of the metrics" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6436214f", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:45:46.181865Z", + "start_time": "2022-02-03T11:45:45.707107Z" + }, + "scrolled": false + }, + "outputs": [], + "source": [ + "f, ax = plt.subplots(figsize=(8, 4.5))\n", + "\n", + "caris = np.array([\n", + " metrics.get_metrics(\n", + " Z_model, W_model,\n", + " Z, W,\n", + " absent_nodes=model.absent_nodes\n", + " )['cari_f_without_absent']\n", + " for Z_model, W_model in model.best_partition(mode='likelihood', n_first=model.n_init)\n", + "])\n", + "sns.kdeplot(data=caris, ax=ax, bw=.2, clip=(caris.min() - .1, caris.max() + .1));\n", + "ax.set_title(f'{model.__class__.__name__}: max CARI = {100 * caris.max():.2f}');\n", + "ax.set_xlabel('global CARI values');" + ] + }, + { + "cell_type": "markdown", + "id": "6a66f1c9", + "metadata": {}, + "source": [ + "### Confusion matrices" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b945db71", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:56:44.940497Z", + "start_time": "2022-02-03T11:56:44.912597Z" + }, + "cell_style": "split" + }, + "outputs": [], + "source": [ + "for t in range(T):\n", + " print('t = {}'.format(t), end='\\n')\n", + " print(metrics.cmat_clustering(confusion_matrix(\n", + " Z_model[t], Z[t])), end='\\n\\n')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e0026a9e", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T16:17:38.322717Z", + "start_time": "2022-01-24T16:17:38.295008Z" + }, + "cell_style": "split" + }, + "outputs": [], + "source": [ + "for t in range(T):\n", + " print('t = {}'.format(t), end='\\n')\n", + " print(metrics.cmat_clustering(confusion_matrix(\n", + " W_model[t], W[t])), end='\\n\\n')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9cbf20d0", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T16:17:41.190033Z", + "start_time": "2022-01-24T16:17:40.984682Z" + } + }, + "outputs": [], + "source": [ + "plot.plot_alluvial(Z)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "00373f9f", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T16:17:43.163869Z", + "start_time": "2022-01-24T16:17:43.132552Z" + } + }, + "outputs": [], + "source": [ + "plot.plot_alluvial(Z_model)" + ] + }, + { + "cell_type": "markdown", + "id": "e5d9696b", + "metadata": {}, + "source": [ + "## Model parameters" + ] + }, + { + "cell_type": "markdown", + "id": "918e2bd5", + "metadata": {}, + "source": [ + "`model.best_parameters` : a list of length the number of initializations of the model. Each element of the list is a tuple in the form `(crit, param_dic)`, where `crit` is the best value of the objective criterion of the model of the given init and `param_dic` contains the parameters of the model that gave this `crit` " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4795bdd6", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:01:59.387254Z", + "start_time": "2022-02-03T12:01:59.379744Z" + } + }, + "outputs": [], + "source": [ + "model.best_parameters[0]" + ] + }, + { + "cell_type": "markdown", + "id": "a09928b3", + "metadata": {}, + "source": [ + "Mapping the indexes of the found clusters to the indexes of the true clusters using the Kuhn Munkres/Hungarian algorithm on the confusion matrix" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1d6e8bb4", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:09:17.156827Z", + "start_time": "2022-02-03T12:09:17.148111Z" + } + }, + "outputs": [], + "source": [ + "from scipy.optimize import linear_sum_assignment\n", + "from sklearn.metrics import confusion_matrix\n", + "\n", + "Z_model, W_model = model.best_partition(mode='likelihood', n_first=1)[0]\n", + "\n", + "cmat_Z = confusion_matrix(Z_model.flatten(), Z.flatten())\n", + "cmat_W = confusion_matrix(W_model.flatten(), W.flatten())\n", + " \n", + "indexes_Z = linear_sum_assignment(-cmat_Z)[1]\n", + "indexes_W = linear_sum_assignment(-cmat_W)[1]" + ] + }, + { + "cell_type": "markdown", + "id": "c6d18e68", + "metadata": {}, + "source": [ + "### Fixme: Problem with confusion matrix computation & absent nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "eea23259", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:09:18.353259Z", + "start_time": "2022-02-03T12:09:17.993155Z" + } + }, + "outputs": [], + "source": [ + "\"\"\"f, ax = plt.subplots(1, 2, figsize=(12, 6))\n", + "\n", + "gamma_model = model.best_parameters[0][1]['gamma']\n", + "reordered_gamma_model = gamma_model[np.ix_(indexes_Z, indexes_W)]\n", + "\n", + "sns.heatmap(reordered_gamma_model, ax=ax[0], square=True)\n", + "sns.heatmap(gamma[0], ax=ax[1], square=True)\n", + "\n", + "ax[0].set_title('Estimated gamma');\n", + "ax[1].set_title('True gamma');\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d0befdee", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:10:29.511205Z", + "start_time": "2022-02-03T12:10:29.485939Z" + } + }, + "outputs": [], + "source": [ + "\"\"\"if isinstance(model, dLBM):\n", + " f, ax = plt.subplots(2, 2, figsize=(12, 6))\n", + "\n", + " pi_model = np.exp(model.best_parameters[0][1]['log_pi'])\n", + " reordered_pi_model = pi_model[np.ix_(indexes_Z, indexes_Z)]\n", + "\n", + " sns.heatmap(reordered_pi_model, ax=ax[0, 0], square=True)\n", + " sns.heatmap(pi, ax=ax[0, 1], square=True)\n", + "\n", + " ax[0, 0].set_title('Estimated pi');\n", + " ax[0, 1].set_title('True pi');\n", + "\n", + " rho_model = np.exp(model.best_parameters[0][1]['log_rho'])\n", + " reordered_rho_model = rho_model[np.ix_(indexes_W, indexes_W)]\n", + "\n", + " sns.heatmap(reordered_rho_model, ax=ax[1, 0], square=True)\n", + " sns.heatmap(rho, ax=ax[1, 1], square=True)\n", + "\n", + " ax[1, 0].set_title('Estimated rho');\n", + " ax[1, 1].set_title('True rho');\n", + "\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1a6d1e41", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:10:51.320478Z", + "start_time": "2022-02-03T12:10:51.306552Z" + } + }, + "outputs": [], + "source": [ + "\"\"\"if isinstance(model, dLBM):\n", + " n_nodes = 20\n", + " f, ax = plt.subplots(2, 2, figsize=(2 * 8, 8))\n", + "\n", + " row_nodes = np.random.choice(N, size=n_nodes)\n", + " col_nodes = np.random.choice(D, size=n_nodes)\n", + "\n", + " ax[0, 0].plot(margins['mu'][:, row_nodes]);\n", + " ax[0, 1].plot(model.best_parameters[0][1]['mu'][:, row_nodes]);\n", + " ax[0, 0].set_title('True row margins mu');\n", + " ax[0, 1].set_title('Estimated row margins mu');\n", + "\n", + " ax[1, 0].plot(margins['nu'][:, col_nodes]);\n", + " ax[1, 1].plot(model.best_parameters[0][1]['nu'][:, col_nodes]);\n", + " ax[1, 0].set_title('True row margins nu');\n", + " ax[1, 1].set_title('Estimated row margins nu');\n", + "\"\"\"" + ] + }, + { + "cell_type": "markdown", + "id": "3cc92c2b", + "metadata": {}, + "source": [ + "# Debug : parameters during inference" + ] + }, + { + "cell_type": "markdown", + "id": "0bd09351", + "metadata": {}, + "source": [ + "Get parameter values during the iterations of the algorithm. The parameters we wish to analyze must be given as strings in given in model.debug_list. The parameters are written in the directory `dcblockmodels/model_debug_output`, which should be emptied from time to time." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e40eab9f", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:11:18.472738Z", + "start_time": "2022-02-03T12:11:18.177298Z" + } + }, + "outputs": [], + "source": [ + "debug_dic = model.get_debug()\n", + "debug_dic" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a64d40fd", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-25T12:11:33.462494Z", + "start_time": "2022-01-25T12:11:33.457146Z" + } + }, + "outputs": [], + "source": [ + "# debug_dic['param'][init][iter] : returns the value of the parameter \n", + "# 'param' that was given in self.debug_list\n", + "# for the initialization init\n", + "# and for the iteration iter\n", + "debug_dic['gamma'][0][10]" + ] + }, + { + "cell_type": "markdown", + "id": "1fcac6d5", + "metadata": {}, + "source": [ + "## Alpha and beta" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "393c479c", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-25T12:11:34.378583Z", + "start_time": "2022-01-25T12:11:33.464514Z" + } + }, + "outputs": [], + "source": [ + "plot.plot_alphas_during_optim(debug_dic['log_alpha'])" + ] + }, + { + "cell_type": "markdown", + "id": "226c1bcb", + "metadata": {}, + "source": [ + "## Pi and rho" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d8455add", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T15:59:48.656940Z", + "start_time": "2022-01-24T15:59:47.486307Z" + } + }, + "outputs": [], + "source": [ + "plot.plot_pi_rho_during_optim(debug_dic['log_pi'])" + ] + }, + { + "cell_type": "markdown", + "id": "78f83fdf", + "metadata": {}, + "source": [ + "## Gamma" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2faa8244", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T15:59:58.797714Z", + "start_time": "2022-01-24T15:59:57.843486Z" + }, + "scrolled": false + }, + "outputs": [], + "source": [ + "plot.plot_gamma_during_optim(debug_dic['gamma'])" + ] + }, + { + "cell_type": "markdown", + "id": "784d1a3e", + "metadata": {}, + "source": [ + "## Mu and nu" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a81749c5", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T16:00:12.572968Z", + "start_time": "2022-01-24T16:00:10.193151Z" + } + }, + "outputs": [], + "source": [ + "plot.plot_mu_nu_during_optim(debug_dic['mu'], indexes=np.random.choice(N, size=(2)))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "6de8df8f", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "hide_input": false, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.15" + }, + "toc": { + "base_numbering": 1, + "nav_menu": {}, + "number_sections": true, + "sideBar": true, + "skip_h1_title": false, + "title_cell": "Table of Contents", + "title_sidebar": "Contents", + "toc_cell": false, + "toc_position": { + "height": "calc(100% - 180px)", + "left": "10px", + "top": "150px", + "width": "486.4px" + }, + "toc_section_display": true, + "toc_window_display": true + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/hlbm_example.ipynb b/notebooks/hlbm_example.ipynb new file mode 100644 index 0000000..c7e3f87 --- /dev/null +++ b/notebooks/hlbm_example.ipynb @@ -0,0 +1,1170 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "8ab48679", + "metadata": {}, + "source": [ + "**This notebook allows to simulate data, classify it using a HLBM model and evaluate the model in a controlled environnement. The model is a semi-supervised (or constrained) LBM `HLBM` using pairwise constraints in both row and column space.**" + ] + }, + { + "cell_type": "markdown", + "id": "3f254231", + "metadata": {}, + "source": [ + "# Imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "63dc66e2", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:12:31.205763Z", + "start_time": "2022-02-03T14:12:31.190085Z" + } + }, + "outputs": [], + "source": [ + "import sys, os\n", + "os.path.dirname(sys.executable), sys.version, sys.path" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1f35a7d1", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:12:32.532417Z", + "start_time": "2022-02-03T14:12:31.799891Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "%config Completer.use_jedi = False\n", + "#!jt -t onedork -fs 100 -altp -tfs 11 -nfs 100 -cellw 60% -T -N\n", + "%pip list" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "86c3d00f", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:31.751414Z", + "start_time": "2022-02-10T09:23:28.903594Z" + } + }, + "outputs": [], + "source": [ + "import sys\n", + "import pathlib\n", + "import numpy as np\n", + "import scipy as sp\n", + "import matplotlib.pyplot as plt\n", + "import seaborn as sns\n", + "%matplotlib inline\n", + "\n", + "from sklearn.metrics import confusion_matrix\n", + "\n", + "import warnings\n", + "warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n", + "warnings.filterwarnings(\"ignore\", message='Deprecation')\n", + "\n", + "from dcblockmodels.models.hlbm import HLBM\n", + "\n", + "from dcblockmodels import metrics, plot, data\n", + "from dcblockmodels.models.utils import similarity_matrices, general, init\n", + "from dcblockmodels.models.utils.smoothing_schedule import SmoothingSchedule" + ] + }, + { + "cell_type": "markdown", + "id": "aafeaf5d", + "metadata": {}, + "source": [ + "# Data" + ] + }, + { + "cell_type": "markdown", + "id": "562c9e8e", + "metadata": {}, + "source": [ + "## Sampling the data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c3793686", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:31.757910Z", + "start_time": "2022-02-10T09:23:31.753531Z" + } + }, + "outputs": [], + "source": [ + "# whether we sample from a SBM or LBM\n", + "model_type = 'LBM'\n", + "# in case of SBM, whether the graph is directed\n", + "directed = True\n", + "# number of time steps\n", + "T = 10\n", + "# nb of row nodes, nb of column nodes\n", + "N, D = 100, 200\n", + "# nb row clusters, nb of column clusters \n", + "Kz, Kw = 3, 4" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ddd4da4c", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:31.773339Z", + "start_time": "2022-02-10T09:23:31.759331Z" + } + }, + "outputs": [], + "source": [ + "level_alpha = 'medium'\n", + "level_beta = 'medium'\n", + "level_pi = 'medium'\n", + "level_rho = 'medium'\n", + "\n", + "alphas_dirichlet = {\n", + " 'very_easy': 10,\n", + " 'easy': 8,\n", + " 'medium': 6,\n", + " 'hard': 4\n", + "}\n", + "diag_vals = {\n", + " 'diag': 0,\n", + " 'easy': .9,\n", + " 'medium': .75,\n", + " 'hard': .6\n", + "}\n", + "\n", + "alpha = data.generate_initial_proportions(Kz, alphas_dirichlet[level_alpha])\n", + "beta = data.generate_initial_proportions(Kw, alphas_dirichlet[level_beta])\n", + "prior_init = {'alpha': alpha, 'beta': beta}\n", + "\n", + "pi = data.generate_diag_transition_matrix(Kz, diag_vals[level_pi]) \n", + "rho = data.generate_diag_transition_matrix(Kw, diag_vals[level_rho])\n", + "prior_trans = {'pi': pi, 'rho': rho}\n", + "\n", + "alpha, pi, beta, rho" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "155d99f1", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:32.496038Z", + "start_time": "2022-02-10T09:23:32.332025Z" + } + }, + "outputs": [], + "source": [ + "with_margins = True # True, False\n", + "constant_margins = False # True, False\n", + "start, stop, step = 1, 50, .1\n", + "order_power_law = -1.5 # margins ~ Unif(start, stop)^order_power_law\n", + "ar_margins, a_ar, sigma2_ar = True, 1.1, .05 # mu ~ AR1 : mu_{t+1} = N(a mu_t + c, sigma2) (c s.t. mu increasing if sigma2 = 0)\n", + "\n", + "if with_margins:\n", + " mu, nu = data.generate_margins(\n", + " T, N, D, constant_margins, start, stop, step,\n", + " directed, order_power_law,\n", + " ar_margins, a_ar, sigma2_ar\n", + " )\n", + " margins = {'mu': mu, 'nu': nu}\n", + "else:\n", + " margins = None\n", + " \n", + "margins" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e962e8fd", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:49.335157Z", + "start_time": "2022-02-10T09:23:49.310317Z" + } + }, + "outputs": [], + "source": [ + "with_absent_nodes = False\n", + "min_proba_t = .0\n", + "max_proba_t = .2\n", + "proba_absent = None\n", + "\n", + "if with_absent_nodes:\n", + " absent_row_nodes = data.sample_absent_nodes(\n", + " T, N,\n", + " min_proba_t=min_proba_t,\n", + " max_proba_t=max_proba_t,\n", + " proba_absent=proba_absent\n", + " )\n", + " if not directed:\n", + " absent_col_nodes = absent_row_nodes.copy()\n", + " else:\n", + " absent_col_nodes = data.sample_absent_nodes(\n", + " T, D,\n", + " min_proba_t=min_proba_t,\n", + " max_proba_t=max_proba_t,\n", + " proba_absent=proba_absent\n", + " )\n", + "else:\n", + " absent_row_nodes, absent_col_nodes = [], []\n", + "\n", + "absent_nodes = {\n", + " 'absent_row_nodes': absent_row_nodes,\n", + " 'absent_col_nodes': absent_col_nodes\n", + "}\n", + "\n", + "absent_nodes" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "f1f1669f", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:23:56.082978Z", + "start_time": "2022-02-10T09:23:56.062983Z" + } + }, + "outputs": [], + "source": [ + "# scaling factor for the matrix gamma : determines the separability level\n", + "# lower is harder and more sparse\n", + "gamma_0 = .01\n", + "\n", + "# defines the sparsity in a block\n", + "# block_sparsity_matrix[t, k, l] is the proba of a zero\n", + "# in block (k, l) at time t\n", + "# corresponds to the \\beta_{kl}^t of Matias\n", + "block_sparsity_matrix = None\n", + "# block_sparsity_matrix = 0.1 * np.ones((Kz, Kw), dtype='float')\n", + "\n", + "# if we add gaussian noise to the sampled graph\n", + "# not advised since it can make models with lower\n", + "# complete data log likelihood give better classification results\n", + "# than model with higher complete data log likelihood\n", + "noise_level_ = 0.\n", + "\n", + "\n", + "if Kz == 3 and Kw == 4:\n", + " gamma = gamma_0 * np.array([\n", + " [1, 2, 3, 1 ],\n", + " [3, 1, 2, 3 ],\n", + " [2, 3, 1, 4 ]\n", + " ])\n", + "elif Kz == 3 and Kw == 3:\n", + " gamma = gamma_0 * np.array([\n", + " [1, 2, 3],\n", + " [3, 1, 2],\n", + " [2, 3, 1]\n", + " ])\n", + "else:\n", + " raise ValueError\n", + "\n", + "if T > 1:\n", + " gamma = np.stack([gamma for _ in range(T)], axis=0)\n", + " if block_sparsity_matrix is not None:\n", + " block_sparsity_matrix = np.stack([block_sparsity_matrix for _ in range(T)], axis=0)\n", + " \n", + "gamma" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1ab60b66", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-10T09:24:00.099758Z", + "start_time": "2022-02-10T09:23:58.117497Z" + } + }, + "outputs": [], + "source": [ + "dimensions = {'N': N, 'D': D}\n", + "n_clusters = {'Kz': Kz, 'Kw': Kw}\n", + "\n", + "self_loops = True\n", + "dtype = 'int32'\n", + "\n", + "X, Z, W = data.generate_data(\n", + " T,\n", + " model_type,\n", + " dimensions,\n", + " n_clusters,\n", + " prior_init,\n", + " prior_trans,\n", + " gamma,\n", + " with_margins,\n", + " margins,\n", + " self_loops,\n", + " directed,\n", + " noise_level_,\n", + " with_absent_nodes,\n", + " absent_nodes,\n", + " dtype,\n", + " block_sparsity_matrix\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "a2f6984a", + "metadata": {}, + "source": [ + "## Plot" + ] + }, + { + "cell_type": "markdown", + "id": "621122f7", + "metadata": {}, + "source": [ + "### Block view & link with matrix factorization" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "851d9f4c", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:40:22.239985Z", + "start_time": "2022-02-03T11:40:21.551891Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "X_ = X[t_plot] if X.ndim == 3 else X\n", + "Z_ = Z[t_plot] if X.ndim == 3 else Z\n", + "W_ = W[t_plot] if X.ndim == 3 else W\n", + "gamma_ = gamma[t_plot] if X.ndim == 3 else gamma\n", + "\n", + "row_indices = np.argsort(Z_.astype(int))\n", + "col_indices = np.argsort(W_.astype(int))\n", + "\n", + "cmap = sns.cubehelix_palette(light=1., as_cmap=True)\n", + "f, ax = plt.subplots(1, 4, figsize=(4 * 5, 5))\n", + "\n", + "sns.heatmap(X_, ax=ax[0], cbar=False, square=False, xticklabels=False, yticklabels=False, cmap=cmap)\n", + "ax[0].set_title('Raw data')\n", + "\n", + "sns.heatmap(X_[row_indices, :], ax=ax[1], cbar=False, square=False, xticklabels=False, yticklabels=False, cmap=cmap)\n", + "ax[1].set_title('Row-reorganized data')\n", + "\n", + "sns.heatmap(X_[np.ix_(row_indices, col_indices)], ax=ax[2], cbar=False, square=False, xticklabels=False, yticklabels=False, cmap=cmap)\n", + "ax[2].set_title('Row and column-reorganized data')\n", + "\n", + "Z_encoded = general.encode(Z_, Kz)\n", + "W_encoded = general.encode(W_, Kw)\n", + "X_approx = Z_encoded.dot(gamma_).dot(W_encoded.T)\n", + "sns.heatmap(X_approx[np.ix_(row_indices, col_indices)], ax=ax[3], cbar=False, square=False, xticklabels=False, yticklabels=False, cmap=cmap)\n", + "ax[3].set_title('Connectivity-approximized data')\n", + "\n", + "plt.tight_layout()" + ] + }, + { + "cell_type": "markdown", + "id": "6f3b3e3c", + "metadata": {}, + "source": [ + "### Dimensionality reduction with Correspondence Analysis" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "a4ac827f", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:40:39.150051Z", + "start_time": "2022-02-03T11:40:34.318621Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "t_plot = [t for t in range(T)]\n", + "t_plot = [0, T//2, T - 1]\n", + "\n", + "if X.ndim == 2:\n", + " plot.CA_plot(X, Z, W)\n", + "else:\n", + " if type(t_plot) == int:\n", + " t_plot = [t_plot]\n", + "\n", + " n_plots = len(t_plot)\n", + " f, ax = plt.subplots(n_plots, 2, figsize=(10, 5 * n_plots))\n", + " for i, t in enumerate(t_plot):\n", + " W_plot = W[t] if W is not None else None\n", + "\n", + " absent_row = [tup[1] for tup in absent_row_nodes if tup[0] == t]\n", + " absent_col = [tup[1] for tup in absent_col_nodes if tup[0] == t]\n", + "\n", + " plot.CA_plot(\n", + " X[t],\n", + " Z[t], W_plot,\n", + " absent_row, absent_col,\n", + " ax=ax[i]\n", + " )\n", + " ax[i, 0].set_title(f't = {t}')\n", + " ax[i, 1].set_title(f't = {t}')\n" + ] + }, + { + "cell_type": "markdown", + "id": "e3c8e4a2", + "metadata": {}, + "source": [ + "### True margins" + ] + }, + { + "cell_type": "markdown", + "id": "51bdaee3", + "metadata": {}, + "source": [ + "Plot margins over time, in the dynamic case" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "80020334", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:40:44.260515Z", + "start_time": "2022-02-03T11:40:44.011289Z" + } + }, + "outputs": [], + "source": [ + "n_nodes = 20\n", + "f, ax = plt.subplots(1, 2, figsize=(2 * 6, 4))\n", + "\n", + "ax[0].plot(margins['mu'][:, np.random.choice(N, size=n_nodes)]);\n", + "ax[0].set_title('True row margins mu');\n", + "\n", + "ax[1].plot(margins['nu'][:, np.random.choice(D, size=n_nodes)]);\n", + "ax[1].set_title('True col margins nu');" + ] + }, + { + "cell_type": "markdown", + "id": "b54caf79", + "metadata": {}, + "source": [ + "### Factorial Discriminant Analysis" + ] + }, + { + "cell_type": "markdown", + "id": "6cb7cfe0", + "metadata": {}, + "source": [ + "Measures the level of linear separability of the classes after projection onto R^N using correspondence analysis\n", + "\n", + "See Discriminative Factorial Analysis : http://www.math.u-bordeaux.fr/~mchave100p/wordpress/wp-content/uploads/2013/10/AFD.pdf" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dd360456", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:40:49.139055Z", + "start_time": "2022-02-03T11:40:47.753899Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "t_plot = [t for t in range(T)]\n", + "t_plot = [0, T//2, -1]\n", + "\n", + "n_components = 3\n", + "\n", + "f, ax = plt.subplots(len(t_plot), 2, squeeze=False, sharex=True, sharey=True, figsize=(5 * len(t_plot), 8))\n", + "xs = np.arange(n_components, dtype='int')\n", + "\n", + "if X.ndim == 3:\n", + " for i, t in enumerate(t_plot):\n", + " res = metrics.AFD_CA_linear_separation(\n", + " X[t], Z[t], W[t],\n", + " n_components=n_components,\n", + " absent_row_nodes=absent_row_nodes,\n", + " absent_col_nodes=absent_col_nodes\n", + " )\n", + "\n", + " ax[i, 0].bar(xs, res[0])\n", + " ax[i, 1].bar(xs, res[1])\n", + " ax[i, 0].set_xlabel('factorial axis')\n", + " ax[i, 1].set_xlabel('factorial axis')\n", + " ax[i, 0].set_title(f'Rows, T = {t}')\n", + " ax[i, 1].set_title(f'Cols, T = {t}')\n", + "else:\n", + " res = metrics.AFD_CA_linear_separation(\n", + " X, Z, W,\n", + " n_components=n_components,\n", + " absent_row_nodes=absent_row_nodes,\n", + " absent_col_nodes=absent_col_nodes\n", + " )\n", + " ax[0, 0].bar(xs, res[0])\n", + " ax[0, 1].bar(xs, res[1])\n", + " ax[0, 0].set_xlabel('factorial axis')\n", + " ax[0, 1].set_xlabel('factorial axis')\n", + " ax[0, 0].set_title(f'Rows')\n", + " ax[0, 1].set_title(f'Cols')\n", + " \n", + "plt.suptitle('CA AFD linear separability', y=1);\n", + "plt.tight_layout()" + ] + }, + { + "cell_type": "markdown", + "id": "4c8c6ee6", + "metadata": {}, + "source": [ + "### Distribution of the values of the cells of the data matrix" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "52fbeac0", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:41:00.800991Z", + "start_time": "2022-02-03T11:41:00.136830Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "t_plot = [t for t in range(T)]\n", + "t_plot = [0, T//2, -1]\n", + "\n", + "bins = 50\n", + "val_min = 1\n", + "val_max = 100 # int or None\n", + "\n", + "f, ax = plt.subplots(len(t_plot), 1, sharex=True, sharey=True, figsize=(10, 1.5 * len(t_plot)))\n", + "\n", + "for i, t in enumerate(t_plot):\n", + " values = X[t].flatten()\n", + " values = values[values >= val_min]\n", + " if val_max is not None:\n", + " values = values[values < val_max]\n", + " ax[i].hist(values, bins=bins)\n", + " ax[i].set_title(f'time t = {t}')\n", + " \n", + "f.suptitle('Histogram of the values of the cells of the data matrix over time');\n", + "plt.tight_layout();" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "70363856", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:41:02.120634Z", + "start_time": "2022-02-03T11:41:01.567850Z" + } + }, + "outputs": [], + "source": [ + "t_plot = 0\n", + "t_plot = [t for t in range(T)]\n", + "t_plot = [0, T//2, -1]\n", + "\n", + "bins = 50\n", + "val_min = 0\n", + "val_max = None #int or None\n", + "\n", + "f, ax = plt.subplots(len(t_plot), 1, sharex=True, sharey=True, figsize=(10, 1.5 * len(t_plot)))\n", + "\n", + "for i, t in enumerate(t_plot):\n", + " values = X[t].sum(0).flatten()\n", + " values = values[values >= val_min]\n", + " if val_max is not None:\n", + " values = values[values < val_max]\n", + " ax[i].hist(values, bins=bins)\n", + " ax[i].set_title(f'time t = {t}')\n", + " \n", + "f.suptitle('Histogram of the degrees of the nodes over time');\n", + "plt.tight_layout();" + ] + }, + { + "cell_type": "markdown", + "id": "258338e3", + "metadata": {}, + "source": [ + "# Models" + ] + }, + { + "cell_type": "markdown", + "id": "35d91484", + "metadata": {}, + "source": [ + "## HLBM" + ] + }, + { + "cell_type": "markdown", + "id": "be241d93", + "metadata": {}, + "source": [ + "### Algo params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "931527d8", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:16:34.960156Z", + "start_time": "2022-02-03T12:16:34.952768Z" + } + }, + "outputs": [], + "source": [ + "t_ = 0\n", + "if X.ndim == 3:\n", + " X_ = X[t_]\n", + " Z_, W_ = Z[t_], W[t_]\n", + "else:\n", + " X_ = X\n", + " Z_, W_ = Z, W\n", + "\n", + "sparse_X = True\n", + "\n", + "frac_r, frac_c = .1, .1\n", + "frac_noise = 0.\n", + "\n", + "lambda_0 = 3.\n", + "damping_factor = .7\n", + "\n", + "n_init = 20\n", + "model_type = 'with_margins' # 'with_margins', 'without_margins'\n", + "estimated_margins = True # True, False\n", + "# 'skmeans' requires spherecluster & Python 3.7\n", + "init_type = 'kmeans' # 'skmeans', 'kmeans'\n", + "regularize_row, regularize_col = False, False\n", + "regularization_mode = 'all' # 'all' 'mixture'\n", + "em_type = 'CEM' # 'VEM', 'CEM'\n", + "dtype = 'float64'\n", + "\n", + "n_init_clustering = 1\n", + "node_perturbation_rate = .1\n", + "multiplicative_init_rows = False # True, False\n", + "multiplicative_init_cols = False # True, False\n", + "power_multiplicative_init = 1 # True, False\n", + "given_Z, given_W = None, None\n", + "n_jobs = -1\n", + "random_state = None # or np.random.RandomState(42) \n", + "\n", + "max_iter = 200\n", + "tol_iter = 1e-8\n", + "\n", + "min_float = 1e-15\n", + "min_proba_Z, min_proba_W = .05, .05\n", + "min_proba_mixture_proportions = 1e-2 # to avoid empty clusters\n", + "min_margin = 1e-10\n", + "min_gamma = 1e-8\n", + "threshold_absent_nodes = -1\n", + "\n", + "# debug_list contains the names of the parameters fo the models\n", + "# or of the variational distribution that we wish to monitor\n", + "# during the fitting of the model\n", + "# This is done by writing the values of the model to disk\n", + "# so it takes time and space. Providing an empty list\n", + "# is the normal behavior\n", + "debug_list = []\n", + "debug_output = pathlib.Path(r'../dcblockmodels/model_debug_output')\n", + "verbose = 1\n", + "model_id = 1" + ] + }, + { + "cell_type": "markdown", + "id": "3975831e", + "metadata": {}, + "source": [ + "### Initialization" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "abd3c0f0", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:16:38.260411Z", + "start_time": "2022-02-03T12:16:38.246110Z" + } + }, + "outputs": [], + "source": [ + "if sparse_X:\n", + " if not sp.sparse.issparse(X_):\n", + " X_ = general.to_sparse(X_)\n", + "else:\n", + " if sp.sparse.issparse(X):\n", + " X_ = general.to_dense(X_)\n", + "\n", + "S_r = similarity_matrices.build_S(Z_, frac_r, frac_noise)\n", + "S_c = similarity_matrices.build_S(W_, frac_c, frac_noise)\n", + "\n", + "S_r = sp.sparse.csr.csr_matrix(S_r)\n", + "S_c = sp.sparse.csr.csr_matrix(S_c)" + ] + }, + { + "cell_type": "markdown", + "id": "ec0474ce", + "metadata": {}, + "source": [ + "### Fitting the model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "702fc4d2", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:16:44.073088Z", + "start_time": "2022-02-03T12:16:39.589722Z" + } + }, + "outputs": [], + "source": [ + "regularize_row, regularize_col = (lambda_0 != 0), (lambda_0 != 0)\n", + "lambda_r, lambda_c = lambda_0, lambda_0\n", + "\n", + "model = HLBM(\n", + " Kz=Kz, Kw=Kw,\n", + " model_type=model_type,\n", + " estimated_margins=estimated_margins,\n", + " regularization_mode=regularization_mode,\n", + " regularize_row=regularize_row,\n", + " regularize_col=regularize_col,\n", + " n_init=n_init,\n", + " max_iter=max_iter,\n", + " em_type=em_type,\n", + " damping_factor=damping_factor,\n", + " multiplicative_init_rows=multiplicative_init_rows,\n", + " multiplicative_init_cols=multiplicative_init_cols,\n", + " power_multiplicative_init=power_multiplicative_init,\n", + " min_float=min_float,\n", + " min_proba_Z=min_proba_Z,\n", + " min_proba_W=min_proba_W,\n", + " min_proba_mixture_proportions=min_proba_mixture_proportions,\n", + " min_margin=min_margin,\n", + " min_gamma=min_gamma,\n", + " init_type=init_type,\n", + " n_init_clustering=n_init_clustering,\n", + " node_perturbation_rate=node_perturbation_rate,\n", + " model_id=model_id,\n", + " dtype=dtype,\n", + " threshold_absent_nodes=threshold_absent_nodes,\n", + " blockmodel_params=None,\n", + " random_state=random_state,\n", + " tol_iter=tol_iter,\n", + " n_jobs=n_jobs,\n", + " verbose=verbose, \n", + " debug_list=debug_list,\n", + " debug_output=debug_output\n", + ")\n", + "\n", + "# Fit model\n", + "model.fit(\n", + " X_,\n", + " given_Z=given_Z,\n", + " given_W=given_W, \n", + " S_r=S_r, lambda_r=lambda_r,\n", + " S_c=S_c, lambda_c=lambda_c\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "91cd4cc9", + "metadata": {}, + "source": [ + "## Load/save model" + ] + }, + { + "cell_type": "markdown", + "id": "426e83bd", + "metadata": {}, + "source": [ + "### Save" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "35c23fc1", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T16:04:42.325906Z", + "start_time": "2022-01-24T16:04:42.312022Z" + } + }, + "outputs": [], + "source": [ + "model.save(path='../saved_models', modelname='my_model')" + ] + }, + { + "cell_type": "markdown", + "id": "25b79fd3", + "metadata": {}, + "source": [ + "### Load" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "895ce56c", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-24T16:04:56.102664Z", + "start_time": "2022-01-24T16:04:56.094125Z" + } + }, + "outputs": [], + "source": [ + "model = general.load_model('../saved_models/my_model')\n", + "model" + ] + }, + { + "cell_type": "markdown", + "id": "4f5ab881", + "metadata": {}, + "source": [ + "# Metrics & visualizations" + ] + }, + { + "cell_type": "markdown", + "id": "0787b309", + "metadata": {}, + "source": [ + "## Partitions and criterion" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "48d5ec9c", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:15:04.857333Z", + "start_time": "2022-02-03T14:15:04.628455Z" + } + }, + "outputs": [], + "source": [ + "Z_model, W_model = model.best_partition(mode='likelihood', n_first=1)[0]\n", + "#Z_model, W_model = model.best_partition(mode='consensus: hbgf', n_first=(model.n_init) // 2)[0]\n", + "\n", + "plot.plot_criterions(\n", + " model,\n", + " thr_decrease=1000,\n", + " i_start=0, i_end=-1,\n", + " legend=True\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "386fad5b", + "metadata": {}, + "source": [ + "## HLBM" + ] + }, + { + "cell_type": "markdown", + "id": "99b7b664", + "metadata": {}, + "source": [ + "### Metrics" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d4b178ac", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:56:32.389344Z", + "start_time": "2022-02-03T11:56:32.369821Z" + } + }, + "outputs": [], + "source": [ + "Z_model, W_model = model.best_partition(mode='likelihood', n_first=1)[0]\n", + "\n", + "metrics.print_metrics(\n", + " Z_model, W_model, Z_, W_,\n", + " absent_nodes=None,\n", + " print_each_timestep=False\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "fbc6bd87", + "metadata": {}, + "source": [ + "### Distribution of the metrics" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c9d61f67", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T11:58:50.615439Z", + "start_time": "2022-02-03T11:58:50.273701Z" + } + }, + "outputs": [], + "source": [ + "f, ax = plt.subplots(figsize=(8, 4.5))\n", + "\n", + "caris = np.array([\n", + " metrics.get_metrics(\n", + " Z_model, W_model,\n", + " Z_, W_,\n", + " absent_nodes=None\n", + " )['cari']\n", + " for Z_model, W_model in model.best_partition(mode='likelihood', n_first=model.n_init)\n", + "])\n", + "sns.kdeplot(data=caris, ax=ax, bw=.2, clip=(caris.min() - .1, caris.max() + .1));\n", + "ax.set_title(f'{model.__class__.__name__}: max CARI = {100 * caris.max():.2f}');\n", + "ax.set_xlabel('global CARI values');" + ] + }, + { + "cell_type": "markdown", + "id": "761ec98f", + "metadata": {}, + "source": [ + "### Confusion matrix" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "63beaa4b", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:01:51.629518Z", + "start_time": "2022-02-03T12:01:51.614201Z" + } + }, + "outputs": [], + "source": [ + "print(' Z')\n", + "print(\n", + " metrics.cmat_clustering(\n", + " confusion_matrix(Z_model, Z_)\n", + " ),\n", + " end='\\n\\n'\n", + ")\n", + "print(' W')\n", + "print(\n", + " metrics.cmat_clustering(\n", + " confusion_matrix(W_model, W_)\n", + " ),\n", + " end='\\n\\n'\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "c706dfcf", + "metadata": {}, + "source": [ + "## Model parameters" + ] + }, + { + "cell_type": "markdown", + "id": "871a033d", + "metadata": {}, + "source": [ + "`model.best_parameters` : a list of length the number of initializations of the model. Each element of the list is a tuple in the form `(crit, param_dic)`, where `crit` is the best value of the objective criterion of the model of the given init and `param_dic` contains the parameters of the model that gave this `crit` " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7ec82d62", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:01:59.387254Z", + "start_time": "2022-02-03T12:01:59.379744Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "model.best_parameters[0]" + ] + }, + { + "cell_type": "markdown", + "id": "092ed77c", + "metadata": {}, + "source": [ + "Mapping the indexes of the found clusters to the indexes of the true clusters using the Kuhn Munkres/Hungarian algorithm on the confusion matrix" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3696d6ab", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:09:17.156827Z", + "start_time": "2022-02-03T12:09:17.148111Z" + } + }, + "outputs": [], + "source": [ + "from scipy.optimize import linear_sum_assignment\n", + "from sklearn.metrics import confusion_matrix\n", + "\n", + "Z_model, W_model = model.best_partition(mode='likelihood', n_first=1)[0]\n", + "\n", + "cmat_Z = confusion_matrix(Z_model, Z_)\n", + "cmat_W = confusion_matrix(W_model, W_)\n", + " \n", + "indexes_Z = linear_sum_assignment(-cmat_Z)[1]\n", + "indexes_W = linear_sum_assignment(-cmat_W)[1]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8e3122c6", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:09:18.353259Z", + "start_time": "2022-02-03T12:09:17.993155Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "f, ax = plt.subplots(1, 2, figsize=(12, 6))\n", + "\n", + "gamma_model = model.best_parameters[0][1]['gamma']\n", + "print(gamma_model)\n", + "print(indexes_W)\n", + "print(indexes_Z)\n", + "print(Z_model)\n", + "print(Z_)\n", + "print(W_model)\n", + "print(W_)\n", + "print(cmat_Z)\n", + "print(cmat_W)\n", + "\n", + "reordered_gamma_model = gamma_model[np.ix_(indexes_Z, indexes_W)]\n", + "\n", + "sns.heatmap(reordered_gamma_model, ax=ax[0], square=True)\n", + "sns.heatmap(gamma[0], ax=ax[1], square=True)\n", + "\n", + "ax[0].set_title('Estimated gamma');\n", + "ax[1].set_title('True gamma');" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e6f478f8", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "hide_input": false, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.15" + }, + "toc": { + "base_numbering": 1, + "nav_menu": {}, + "number_sections": true, + "sideBar": true, + "skip_h1_title": false, + "title_cell": "Table of Contents", + "title_sidebar": "Contents", + "toc_cell": false, + "toc_position": { + "height": "calc(100% - 180px)", + "left": "10px", + "top": "150px", + "width": "486.4px" + }, + "toc_section_display": true, + "toc_window_display": true + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/hlbm_on_text_data.ipynb b/notebooks/hlbm_on_text_data.ipynb new file mode 100644 index 0000000..a679540 --- /dev/null +++ b/notebooks/hlbm_on_text_data.ipynb @@ -0,0 +1,961 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "ed4f90bd", + "metadata": {}, + "source": [ + "**This notebook applies the semi-supervised (or constrained) LBM `HLBM` on high-dimensional text data.**" + ] + }, + { + "cell_type": "markdown", + "id": "98daf658", + "metadata": {}, + "source": [ + "# Imports" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "cbd9af3b", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:23:14.066696Z", + "start_time": "2022-02-03T14:23:11.977193Z" + } + }, + "outputs": [], + "source": [ + "import os\n", + "import numpy as np\n", + "import scipy as sp\n", + "import pandas as pd\n", + "import matplotlib.pyplot as plt\n", + "import seaborn as sns\n", + "import pathlib\n", + "\n", + "from dcblockmodels.models.hlbm import HLBM\n", + "from dcblockmodels import metrics, plot, data\n", + "from dcblockmodels.models.utils import similarity_matrices, general, init\n", + "\n", + "import os\n", + "import sys\n", + "sys.stderr = open(os.devnull, \"w\")" + ] + }, + { + "cell_type": "markdown", + "id": "683348b4", + "metadata": {}, + "source": [ + "# Data" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "0f4d4dc9", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:14:26.095465Z", + "start_time": "2022-02-03T13:14:25.865931Z" + } + }, + "outputs": [], + "source": [ + "import re\n", + "import nltk \n", + "from sklearn.feature_extraction.text import CountVectorizer\n", + "from nltk.corpus import stopwords\n", + "nltk.download('stopwords')\n", + "\n", + "def handle_spe_char(s):\n", + " \"\"\"\n", + " Removes some special characters considered uniinformative\n", + " The \"''\" character has to be treated separately\n", + " If there is a number in a token, it is replaced\n", + " by =number and spaces are added around it\n", + " \"\"\"\n", + " spe_char = \".,;()'-/:=[]`*+\\_^|\" #‘’\n", + " table = str.maketrans(dict.fromkeys(spe_char))\n", + " s = s.translate(table)\n", + " s = re.sub(\"''\", \"\", s)\n", + " #s = re.sub('|', ' ', s)\n", + " s = re.sub('\\n', ' ', s)\n", + " s = re.sub('\\d+', ' =number ', s)\n", + " return s\n", + "\n", + "class StemTokenizer:\n", + " stop_words = stopwords.words('english')\n", + " \n", + " def __init__(self, min_word_length):\n", + " self.stemmer = nltk.stem.PorterStemmer()\n", + " self.tokenizer = nltk.word_tokenize\n", + " self.min_wl = min_word_length\n", + " \n", + " def __call__(self, doc):\n", + " res_doc = []\n", + " for token in self.tokenizer(doc):\n", + " token_ = self.stemmer.stem(token)\n", + " if len(token_) >= self.min_wl:\n", + " if token_ not in self.stop_words:\n", + " res_doc.append(token_)\n", + " return res_doc\n", + " \n", + "\n", + "vectorizer = CountVectorizer(\n", + " input='content', encoding='utf-8',\n", + " decode_error='strict', strip_accents=None,\n", + " lowercase=True, preprocessor=handle_spe_char,\n", + " tokenizer=StemTokenizer(min_word_length=3),\n", + " stop_words=None, token_pattern=r\"(?u)\\b\\w\\w+\\b\",\n", + " ngram_range=(1, 1), analyzer='word',\n", + " max_df=1.0, min_df=1, max_features=None,\n", + " vocabulary=None, binary=False, dtype=np.int64\n", + ")\n" + ] + }, + { + "cell_type": "markdown", + "id": "444e58ee", + "metadata": {}, + "source": [ + "## 20 Newsgroup" + ] + }, + { + "cell_type": "markdown", + "id": "6aef9c1c", + "metadata": {}, + "source": [ + "### Build" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7c94c45c", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:19:39.822609Z", + "start_time": "2022-02-03T12:19:39.388314Z" + } + }, + "outputs": [], + "source": [ + "from sklearn.datasets import fetch_20newsgroups\n", + "\n", + "corpus = fetch_20newsgroups(\n", + " data_home=None, subset='all',\n", + " categories=None, shuffle=True,\n", + " random_state=42, remove=(),\n", + " download_if_missing=True)\n", + "\n", + "corpus.target, corpus.target_names, corpus" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "39b94d74", + "metadata": {}, + "outputs": [], + "source": [ + "nltk.download('punkt')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "605a7122", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:22:13.874435Z", + "start_time": "2022-02-03T12:19:40.726561Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "X0 = vectorizer.fit_transform(corpus.data)\n", + "y_ = corpus.target\n", + "wf = np.squeeze(np.asarray(X0.sum(0)))\n", + "\n", + "X0.shape, vectorizer.get_feature_names()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b672bc73", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:22:14.103107Z", + "start_time": "2022-02-03T12:22:13.876815Z" + } + }, + "outputs": [], + "source": [ + "plt.plot([np.where(wf >= k)[0].shape[0] for k in range(20)]);\n", + "plt.plot([19949 for k in range(20)]) # 26214\n", + "plt.xlabel('word frequency threshold')\n", + "plt.ylabel('X.shape[1]')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "79336310", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T12:22:14.519733Z", + "start_time": "2022-02-03T12:22:14.104609Z" + } + }, + "outputs": [], + "source": [ + "n_docs = 10000 # None\n", + "min_word_frequency = 9\n", + "\n", + "features = np.array(vectorizer.get_feature_names())\n", + "selected_features_ind = np.where(wf >= min_word_frequency)[0]\n", + "selected_features = features[selected_features_ind]\n", + "unselected_features = features[~selected_features_ind]\n", + "\n", + "X = X0[:, selected_features_ind]\n", + "\n", + "if n_docs is not None:\n", + " docs = np.random.choice(X.shape[0], size=n_docs, replace=False)\n", + " X = X[docs]\n", + " y_ = y_[docs]\n", + " del docs\n", + "\n", + "del X0, features, wf, corpus\n", + "\n", + "X.shape, list(unselected_features)" + ] + }, + { + "cell_type": "markdown", + "id": "d736c296", + "metadata": {}, + "source": [ + "### Save/load" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "3affb526", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T10:55:00.672514Z", + "start_time": "2022-01-28T10:55:00.588742Z" + } + }, + "outputs": [], + "source": [ + "sp.sparse.save_npz('data_ng20', X)\n", + "np.save('labels_ng20', y_)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4a59d6fb", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T11:04:10.716543Z", + "start_time": "2022-01-28T11:04:10.683613Z" + } + }, + "outputs": [], + "source": [ + "X = sp.sparse.load_npz('data_ng20.npz')\n", + "y_ = np.load('labels_ng20.npy')" + ] + }, + { + "cell_type": "markdown", + "id": "076b894f", + "metadata": {}, + "source": [ + "## Reuters NLTK" + ] + }, + { + "cell_type": "markdown", + "id": "1e681d2c", + "metadata": {}, + "source": [ + "### Build" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "8e9f6ace", + "metadata": {}, + "outputs": [], + "source": [ + "import nltk\n", + "nltk.download('reuters')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "467e6b33", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T13:38:05.458909Z", + "start_time": "2022-01-28T13:38:03.752041Z" + } + }, + "outputs": [], + "source": [ + "from nltk.corpus import reuters\n", + "\n", + "fileids = reuters.fileids()\n", + "\n", + "corpus = []\n", + "y = []\n", + "for fileid in fileids[:]:\n", + " labels = reuters.categories(fileid)\n", + " if len(labels) == 1:\n", + " corpus.append(reuters.raw(fileid))\n", + " y.append(labels[0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4568bf1a", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T13:38:05.473853Z", + "start_time": "2022-01-28T13:38:05.460877Z" + } + }, + "outputs": [], + "source": [ + "import pandas as pd\n", + "n_largest_classes = 10\n", + "classes, counts = np.unique(np.asarray(y), return_counts=True)\n", + "df_classes = pd.DataFrame({'classes': classes, 'counts': counts}).sort_values(by='counts', ascending=False)\n", + "categories = df_classes['classes'].values[:n_largest_classes]\n", + "categories" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "79b5de5a", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T13:38:05.964017Z", + "start_time": "2022-01-28T13:38:05.475299Z" + } + }, + "outputs": [], + "source": [ + "fileids = reuters.fileids(categories=categories)\n", + "\n", + "corpus = []\n", + "y = []\n", + "for fileid in fileids[:]:\n", + " labels = reuters.categories(fileid)\n", + " if len(labels) == 1:\n", + " corpus.append(reuters.raw(fileid))\n", + " y.append(labels[0])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "438156c1", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T13:38:30.324862Z", + "start_time": "2022-01-28T13:38:05.965851Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "X0 = vectorizer.fit_transform(corpus)\n", + "\n", + "y = np.array(y)\n", + "classes = np.unique(y)\n", + "map_classes = lambda c : np.argmax(classes == c)\n", + "vmap_classes = np.vectorize(map_classes)\n", + "y_ = vmap_classes(y)\n", + "wf = np.squeeze(np.asarray(X0.sum(0)))\n", + "\n", + "X0.shape, vectorizer.get_feature_names()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "62c1725d", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T13:38:30.533417Z", + "start_time": "2022-01-28T13:38:30.326426Z" + } + }, + "outputs": [], + "source": [ + "plt.plot([np.where(wf >= k)[0].shape[0] for k in range(20)]);\n", + "plt.plot([18900 for k in range(20)])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b36dfcd5", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T13:38:30.589577Z", + "start_time": "2022-01-28T13:38:30.535001Z" + } + }, + "outputs": [], + "source": [ + "min_word_frequency = 5\n", + "\n", + "features = np.array(vectorizer.get_feature_names())\n", + "selected_features_ind = np.where(wf >= min_word_frequency)[0]\n", + "selected_features = features[selected_features_ind]\n", + "unselected_features = features[~selected_features_ind]\n", + "\n", + "X = X0[:, selected_features_ind]\n", + "\n", + "X.shape, list(unselected_features)" + ] + }, + { + "cell_type": "markdown", + "id": "077adbbc", + "metadata": {}, + "source": [ + "### Save/load" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "570aba3b", + "metadata": { + "ExecuteTime": { + "end_time": "2022-01-28T13:38:30.803326Z", + "start_time": "2022-01-28T13:38:30.591513Z" + } + }, + "outputs": [], + "source": [ + "sp.sparse.save_npz('data_reuters', X)\n", + "np.save('labels_reuters', y_)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4b0145cc", + "metadata": {}, + "outputs": [], + "source": [ + "X = sp.sparse.load_npz('data_reuters.npz')\n", + "y_ = np.load('labels_reuters.npy')" + ] + }, + { + "cell_type": "markdown", + "id": "63cd7af3", + "metadata": {}, + "source": [ + "## Classic" + ] + }, + { + "cell_type": "markdown", + "id": "4412be51", + "metadata": {}, + "source": [ + "### Build" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fbcc32ad", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T14:23:18.696625Z", + "start_time": "2022-02-03T14:23:17.648900Z" + } + }, + "outputs": [], + "source": [ + "import zipfile\n", + "\n", + "dataset_dir = '../datasets/'\n", + "if 'classic' not in os.listdir(dataset_dir):\n", + " with zipfile.ZipFile(dataset_dir + 'classic.zip', 'r') as zip_ref:\n", + " zip_ref.extractall(dataset_dir)\n", + "os.listdir(dataset_dir)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d87cb71a", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:14:53.423619Z", + "start_time": "2022-02-03T13:14:33.044322Z" + } + }, + "outputs": [], + "source": [ + "path = dataset_dir + 'classic/'\n", + "\n", + "def get_X_classic(path, vectorizer):\n", + " files = os.listdir(path)\n", + " corpus = []\n", + " y = []\n", + " for file in files:\n", + " y.append(file.split('.')[0]) # cluster in file name\n", + " with open(path + file) as f:\n", + " doc = ''.join(f.readlines())\n", + " corpus.append(doc)\n", + "\n", + " X0 = vectorizer.fit_transform(corpus)\n", + " return X0, y\n", + "\n", + "X0, y = get_X_classic(path, vectorizer)\n", + "\n", + "y = np.array(y)\n", + "classes = np.unique(y)\n", + "map_classes = lambda c : np.argmax(classes == c)\n", + "vmap_classes = np.vectorize(map_classes)\n", + "y_ = vmap_classes(y)\n", + "\n", + "X0.shape, vectorizer.get_feature_names()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "b7ce53ac", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:14:53.507717Z", + "start_time": "2022-02-03T13:14:53.425845Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "min_word_frequency = 4 #4\n", + "wf = np.squeeze(np.asarray(X0.sum(0)))\n", + "\n", + "features = np.array(vectorizer.get_feature_names())\n", + "selected_features_ind = np.where(wf >= min_word_frequency)[0]\n", + "selected_features = features[selected_features_ind]\n", + "unselected_features = features[~selected_features_ind]\n", + "\n", + "X = X0[:, selected_features_ind]\n", + "\n", + "X.shape, list(unselected_features)" + ] + }, + { + "cell_type": "markdown", + "id": "4c9e5cad", + "metadata": {}, + "source": [ + "### Save/load" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2c7da18a", + "metadata": { + "ExecuteTime": { + "end_time": "2021-07-21T15:57:22.082456Z", + "start_time": "2021-07-21T15:57:19.406120Z" + } + }, + "outputs": [], + "source": [ + "sp.sparse.save_npz('data_classic', X)\n", + "np.save('labels_classic', y_)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e6e16f9e", + "metadata": {}, + "outputs": [], + "source": [ + "X = sp.sparse.load_npz('data_classic.npz')\n", + "y_ = np.load('labels_classic.npy')" + ] + }, + { + "cell_type": "markdown", + "id": "b519acfe", + "metadata": {}, + "source": [ + "# Model" + ] + }, + { + "cell_type": "markdown", + "id": "8dd6b845", + "metadata": {}, + "source": [ + "## Model params" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ac416815", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:15:02.119890Z", + "start_time": "2022-02-03T13:15:02.115223Z" + } + }, + "outputs": [], + "source": [ + "X.shape, np.unique(y_).shape[0]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "c90f3fac", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:15:02.423069Z", + "start_time": "2022-02-03T13:15:02.409397Z" + } + }, + "outputs": [], + "source": [ + "np.unique(X.data, return_counts=True)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "dbf8dec7", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:15:02.997189Z", + "start_time": "2022-02-03T13:15:02.641958Z" + } + }, + "outputs": [], + "source": [ + "f, ax = plt.subplots(figsize=(10, 10))\n", + "ax.spy(X, markersize=.1, precision=1)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "e57d244d", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:15:03.134598Z", + "start_time": "2022-02-03T13:15:03.125542Z" + } + }, + "outputs": [], + "source": [ + "Kz = np.unique(y_).shape[0]\n", + "Kw = 10\n", + "\n", + "max_iter = 100\n", + "tol_iter = 1e-5\n", + "\n", + "frac_r, frac_c = .01, None\n", + "frac_noise = 0.\n", + "\n", + "n_init = 10\n", + "model_type = 'with_margins' # 'with_margins', 'without_margins'\n", + "estimated_margins = False # True, False\n", + "init_type = 'kmeans' #'skmeans' # 'skmeans', 'kmeans'\n", + "regularize_row, regularize_col = True, False\n", + "regularization_mode = 'all' # 'all' 'mixture'\n", + "em_type = 'VEM' # 'VEM', 'CEM'\n", + "compute_regularization = True\n", + "\n", + "lambda_r = 1.\n", + "lambda_c = None\n", + "damping_factor = None if em_type == 'CEM' else .7\n", + "\n", + "multiplicative_init_rows, multiplicative_init_cols = False, False # True, False\n", + "power_multiplicative_init = 1\n", + "given_Z, given_W = None, None\n", + "\n", + "min_float = 1e-15\n", + "min_proba_Z, min_proba_W = .005, .005\n", + "min_proba_mixture_proportions = .1 * (1 / Kz) # to avoid empty clusters\n", + "min_margin = 1e-12\n", + "min_gamma = 1e-12\n", + "threshold_absent_nodes = 0\n", + "dtype = 'float32'\n", + "debug_output = pathlib.Path(r'../dcblockmodels/model_debug_output')\n", + "\n", + "n_init_clustering = 7 * 1\n", + "node_perturbation_rate = .2" + ] + }, + { + "cell_type": "markdown", + "id": "ef7916de", + "metadata": {}, + "source": [ + "## Similarity" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "1b50fc4d", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:15:14.298175Z", + "start_time": "2022-02-03T13:15:04.292928Z" + } + }, + "outputs": [], + "source": [ + "if regularize_row:\n", + " S_r = similarity_matrices.build_S_sparse(y_, frac_r, stratified=False)\n", + "else:\n", + " S_r = None\n", + "S_c = None\n", + "\n", + "S_r, S_c" + ] + }, + { + "cell_type": "markdown", + "id": "6c1be724", + "metadata": {}, + "source": [ + "## Fitting the model" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "fd998f3b", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:17:54.921904Z", + "start_time": "2022-02-03T13:15:14.300575Z" + }, + "scrolled": true + }, + "outputs": [], + "source": [ + "model = HLBM(\n", + " Kz=Kz, Kw=Kw,\n", + " model_type=model_type,\n", + " estimated_margins=estimated_margins,\n", + " regularization_mode=regularization_mode,\n", + " regularize_row=regularize_row, regularize_col=regularize_col,\n", + " n_init=n_init,\n", + " max_iter=max_iter,\n", + " em_type=em_type,\n", + " damping_factor=damping_factor,\n", + " multiplicative_init_rows=multiplicative_init_rows,\n", + " multiplicative_init_cols=multiplicative_init_cols,\n", + " power_multiplicative_init=power_multiplicative_init,\n", + " min_float=min_float,\n", + " min_proba_Z=min_proba_Z,\n", + " min_proba_W=min_proba_W,\n", + " min_proba_mixture_proportions=min_proba_mixture_proportions,\n", + " min_margin=min_margin,\n", + " min_gamma=min_gamma,\n", + " init_type=init_type,\n", + " n_init_clustering=n_init_clustering,\n", + " node_perturbation_rate=node_perturbation_rate,\n", + " compute_regularization=compute_regularization,\n", + " model_id=1,\n", + " dtype=dtype,\n", + " threshold_absent_nodes=threshold_absent_nodes,\n", + " blockmodel_params=None,\n", + " random_state=None, #np.random.RandomState(42) \n", + " tol_iter=tol_iter,\n", + " n_jobs=-1,\n", + " verbose=1, debug_list=[], #'Z', 'W'\n", + " debug_output=debug_output\n", + ")\n", + "model.fit(\n", + " X,\n", + " given_Z=given_Z,\n", + " given_W=given_W, \n", + " S_r=S_r, lambda_r=lambda_r,\n", + " S_c=S_c, lambda_c=lambda_c\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "471faf4d", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:17:55.204754Z", + "start_time": "2022-02-03T13:17:54.923929Z" + } + }, + "outputs": [], + "source": [ + "plot.plot_criterions(\n", + " model,\n", + " thr_decrease=1000,\n", + " i_start=0, i_end=-1,\n", + " legend=True\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "292eb55f", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:17:55.223934Z", + "start_time": "2022-02-03T13:17:55.207349Z" + } + }, + "outputs": [], + "source": [ + "Z_model, W_model = model.best_partition(mode='likelihood', n_first=1)[0]\n", + "\n", + "metrics.print_metrics(\n", + " Z_model, W_model, y_, None,\n", + " absent_nodes=None,\n", + " print_each_timestep=False\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "31cd63ba", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:17:55.695263Z", + "start_time": "2022-02-03T13:17:55.226095Z" + } + }, + "outputs": [], + "source": [ + "from sklearn.metrics import confusion_matrix\n", + "\n", + "Z_model, W_model = model.best_partition(mode='likelihood', n_first=1)[0]\n", + "cmat = metrics.cmat_clustering(confusion_matrix(Z_model, y_))\n", + "\n", + "f, ax = plt.subplots(figsize=(10, 10))\n", + "sns.heatmap(cmat, annot=True, fmt='.0f', ax=ax, square=True, cmap=sns.light_palette(\"red\"))\n", + "ax.set_title('confusion matrix');\n", + "ax.set_xlabel('predicted');\n", + "ax.set_ylabel('true');" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d2e4e37b", + "metadata": { + "ExecuteTime": { + "end_time": "2022-02-03T13:17:56.792808Z", + "start_time": "2022-02-03T13:17:55.697194Z" + } + }, + "outputs": [], + "source": [ + "lw_cluster = 2.\n", + "\n", + "X_reorg = X.toarray()[np.ix_(np.argsort(Z_model), np.argsort(W_model))]\n", + "\n", + "f, ax = plt.subplots(figsize=(10, 10))\n", + "ax.spy(X_reorg, markersize=.1, precision=1)\n", + "\n", + "# plots the lines that separates the blocks\n", + "row_clusters, unique_row_indices = np.unique(Z_model, return_counts=True)\n", + "x_indices = np.cumsum(unique_row_indices)\n", + "for x in x_indices[:-1]:\n", + " ax.axhline(x, linewidth=lw_cluster)\n", + "\n", + "col_clusters, unique_col_indices = np.unique(W_model, return_counts=True)\n", + "y_indices = np.cumsum(unique_col_indices)\n", + "for x in y_indices[:-1]:\n", + " ax.axvline(x, linewidth=lw_cluster)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "ecf49d8a", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "hide_input": false, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.15" + }, + "toc": { + "base_numbering": 1, + "nav_menu": {}, + "number_sections": true, + "sideBar": true, + "skip_h1_title": false, + "title_cell": "Table of Contents", + "title_sidebar": "Contents", + "toc_cell": false, + "toc_position": { + "height": "calc(100% - 180px)", + "left": "10px", + "top": "150px", + "width": "480px" + }, + "toc_section_display": true, + "toc_window_display": true + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..cd84611 --- /dev/null +++ b/setup.py @@ -0,0 +1,64 @@ +import sys +import warnings +from setuptools import setup, find_packages + +with open("README.md", "r", encoding="utf-8") as fh: + long_description = fh.read() + +interactive = [ + 'notebook==5.7.10', + 'jupyter_contrib_nbextensions', + 'jupyter_nbextensions_configurator', + 'matplotlib', + 'networkx', + 'seaborn', + 'plotly', + 'pandas', + 'prince', # for Correspondence Analysis + 'nltk' # for notebook of text processing +] +metrics = ['sparsebm'] # for Co-clustering ARI (CARI) +tests = ['pytest', 'jinja2<3.1'] + +if (sys.version_info.major, sys.version_info.minor) != (3, 7): + warnings.warn( + 'Python version is different from 3.7 -> spherecluster package ' + 'will not be installed, so initializations with spherical k-means ' + 'will not be possible.' + ) + initialization = ['scikit-learn'] + base = [ + 'numpy', + 'scipy', + 'numba' + ] +else: + warnings.warn( + 'Python version is 3.7 -> spherecluster package can be installed ' + 'so initializations with spherical k-means are possible' + ) + initialization = ['spherecluster', 'scikit-learn==0.20'] + base = [ + 'numpy==1.21', + 'scipy', + 'numba' + ] + +all_extras = interactive + initialization + metrics + tests + +setup( + name='dcblockmodels', + version='1.0.0', + description='Dynamic and constrained block models', + install_requires=base, + extras_require={ + 'interactive': interactive, + 'intialization': initialization, + 'metrics': metrics, + 'all': all_extras + }, + long_description=long_description, + long_description_content_type='text/markdown', + tests_require=tests, + packages=find_packages() +)
Select Sankey options :