{ "info": { "author": "Kazuaki Tanida", "author_email": "UNKNOWN", "bugtrack_url": null, "classifiers": [ "Intended Audience :: Science/Research", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2", "Programming Language :: Python :: 3", "Topic :: Scientific/Engineering" ], "description": "universal-divergence\n--------------------\n\nuniversal-divergence is a Python module for estimating divergence of two sets of samples generated from the two underlying distributions.\nThe theory of the estimator is based on `a paper\n`_ written by Q.Wang et al [1]_.\n\nInstall\n-------\n\n::\n\n pip install universal-divergence\n\nExample\n-------\n\n::\n\n from __future__ import print_function\n\n import numpy as np\n from universal_divergence import estimate\n\n mean = [0, 0]\n cov = [[1, 0], [0, 10]]\n x = np.random.multivariate_normal(mean, cov, 100)\n y = np.random.multivariate_normal(mean, cov, 100)\n print(estimate(x, y)) # will be close to 0.0\n\n mean2 = [10, 0]\n cov2 = [[5, 0], [0, 5]]\n z = np.random.multivariate_normal(mean2, cov2, 100)\n print(estimate(x, z)) # will be bigger than 0.0\n\nReferences\n----------\n\n.. [1] Qing Wang, Sanjeev R. Kulkarni, and Sergio Verd\u00fa. \"Divergence estimation for multidimensional densities via k-nearest-neighbor distances.\" Information Theory, IEEE Transactions on 55.5 (2009): 2392-2405.\n", "description_content_type": null, "docs_url": null, "download_url": "UNKNOWN", "downloads": { "last_day": -1, "last_month": -1, "last_week": -1 }, "home_page": "https://github.com/slaypni/universal-divergence", "keywords": "KL,Kullback-Leibler,divergence,information measure", "license": "MIT", "maintainer": null, "maintainer_email": null, "name": "universal-divergence", "package_url": "https://pypi.org/project/universal-divergence/", "platform": "UNKNOWN", "project_url": "https://pypi.org/project/universal-divergence/", "project_urls": { "Download": "UNKNOWN", "Homepage": "https://github.com/slaypni/universal-divergence" }, "release_url": "https://pypi.org/project/universal-divergence/0.2.0/", "requires_dist": null, "requires_python": null, "summary": "A divergence estimator of two sets of samples.", "version": "0.2.0" }, "last_serial": 2010823, "releases": { "0.1.0": [ { "comment_text": "", "digests": { "md5": "18263ea4eeffdf14e5bc7cf80fd33f33", "sha256": "461839cc794f9e668ad373f48aed147cd2767bd42e6a5167715d86d336cb5447" }, "downloads": -1, "filename": "universal-divergence-0.1.0.tar.gz", "has_sig": false, "md5_digest": "18263ea4eeffdf14e5bc7cf80fd33f33", "packagetype": "sdist", "python_version": "source", "requires_python": null, "size": 2472, "upload_time": "2016-03-14T18:52:02", "url": "https://files.pythonhosted.org/packages/fb/44/01e2fb3b3aa6534bbe3b8d3c18d9b3a0284739b5b74d03c105d823a5df53/universal-divergence-0.1.0.tar.gz" } ], "0.2.0": [ { "comment_text": "", "digests": { "md5": "7c324fb82b10832c3e924441ad5bcc69", "sha256": "812fb206a02ca21a00038403d412138578ee048848b32526bd2b1f95daaaebce" }, "downloads": -1, "filename": "universal-divergence-0.2.0.tar.gz", "has_sig": false, "md5_digest": "7c324fb82b10832c3e924441ad5bcc69", "packagetype": "sdist", "python_version": "source", "requires_python": null, "size": 2689, "upload_time": "2016-03-16T18:19:24", "url": "https://files.pythonhosted.org/packages/58/57/5f5e30e6ff1c6d63fb925517fcc467325bfa82f9a2e81a39bc69197e76d1/universal-divergence-0.2.0.tar.gz" } ] }, "urls": [ { "comment_text": "", "digests": { "md5": "7c324fb82b10832c3e924441ad5bcc69", "sha256": "812fb206a02ca21a00038403d412138578ee048848b32526bd2b1f95daaaebce" }, "downloads": -1, "filename": "universal-divergence-0.2.0.tar.gz", "has_sig": false, "md5_digest": "7c324fb82b10832c3e924441ad5bcc69", "packagetype": "sdist", "python_version": "source", "requires_python": null, "size": 2689, "upload_time": "2016-03-16T18:19:24", "url": "https://files.pythonhosted.org/packages/58/57/5f5e30e6ff1c6d63fb925517fcc467325bfa82f9a2e81a39bc69197e76d1/universal-divergence-0.2.0.tar.gz" } ] }