{ "info": { "author": "Ceasar Bautista", "author_email": "cbautista2010@gmail.com", "bugtrack_url": null, "classifiers": [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Topic :: Software Development :: Libraries :: Python Modules" ], "description": "words\n=====\n\nGet large collection of words from books.\n\nUsage\n-----\n\n1. Make a directory called ``books/``\n2. Put ``.txt`` files in it. Presumably large ones, i.e. books, which are easily available from Gutenberg_.\n3. Run ``words.py`` and wait a moment.\n4. Receive ``dictionary.txt``.\n\nDescription\n-----------\n\n`words.py` simply reads each of the books and puts each of the words in a book filtering any that obviously aren't words (anything containing numbers or punctuation).\n\nNOTE: This method may generate non-words which look like words. It is worth revieiwng the lexicon before any serious use.\n\n.. _Gutenberg: http://www.gutenberg.org/\n", "description_content_type": null, "docs_url": null, "download_url": "UNKNOWN", "downloads": { "last_day": -1, "last_month": -1, "last_week": -1 }, "home_page": "https://github.com/Ceasar/words", "keywords": "words,dictionary,lexicon", "license": "MIT", "maintainer": null, "maintainer_email": null, "name": "words", "package_url": "https://pypi.org/project/words/", "platform": "UNKNOWN", "project_url": "https://pypi.org/project/words/", "project_urls": { "Download": "UNKNOWN", "Homepage": "https://github.com/Ceasar/words" }, "release_url": "https://pypi.org/project/words/0.0.1/", "requires_dist": null, "requires_python": null, "summary": "Tool to get a large collection of words.", "version": "0.0.1" }, "last_serial": 932883, "releases": { "0.0.1": [ { "comment_text": "", "digests": { "md5": "e7bbdb79189b91a7c0e22cfcd8cc68d8", "sha256": "6c26114adb50256e1ec2d6a09a50fc611200438558f899d72dd51a2694b02467" }, "downloads": -1, "filename": "words-0.0.1.tar.gz", "has_sig": false, "md5_digest": "e7bbdb79189b91a7c0e22cfcd8cc68d8", "packagetype": "sdist", "python_version": "source", "requires_python": null, "size": 1979, "upload_time": "2013-12-01T04:31:10", "url": "https://files.pythonhosted.org/packages/c0/97/a9d2f6e51455cc2fcf54ead4897bd3aef5bb16728a2b631808552a29bec6/words-0.0.1.tar.gz" } ] }, "urls": [ { "comment_text": "", "digests": { "md5": "e7bbdb79189b91a7c0e22cfcd8cc68d8", "sha256": "6c26114adb50256e1ec2d6a09a50fc611200438558f899d72dd51a2694b02467" }, "downloads": -1, "filename": "words-0.0.1.tar.gz", "has_sig": false, "md5_digest": "e7bbdb79189b91a7c0e22cfcd8cc68d8", "packagetype": "sdist", "python_version": "source", "requires_python": null, "size": 1979, "upload_time": "2013-12-01T04:31:10", "url": "https://files.pythonhosted.org/packages/c0/97/a9d2f6e51455cc2fcf54ead4897bd3aef5bb16728a2b631808552a29bec6/words-0.0.1.tar.gz" } ] }