PK 6{LD D turbogears/errorhandling.pyimport sys
from itertools import izip, islice
from inspect import getargspec
import cherrypy
from dispatch import generic, NoApplicableMethods, strategy
from turbogears.util import inject_args, adapt_call, call_on_stack, has_arg, \
remove_keys, Enum, combine_contexts
from turbogears.decorator import func_eq
from turbogears.genericfunctions import MultiorderGenericFunction
default = strategy.default
def dispatch_error(controller, tg_source, tg_errors, tg_exceptions,
*args, **kw):
"""Dispatch error.
Error handler is a function registered via register_handler or if no
such decorator was applied, the method triggering the error.
"""
dispatch_error = generic(MultiorderGenericFunction)(dispatch_error)
def _register_implicit_errh(controller, tg_source, tg_errors,
tg_exceptions, *args, **kw):
"""Register implicitly declared error handler and re-dispatch.
Any method declaring tg_errors parameter is considered an implicitly
declared error handler.
"""
error_handler(tg_source)(tg_source)
return dispatch_error(controller, tg_source, tg_errors, tg_exceptions,
*args, **kw)
_register_implicit_errh = dispatch_error.when(
"(tg_errors and has_arg(tg_source, 'tg_errors'))", order=3)(
_register_implicit_errh)
def _register_implicit_exch(controller, tg_source, tg_errors,
tg_exceptions, *args, **kw):
"""Register implicitly declared exception handler and re-dispatch.
Any method declaring tg_exceptions parameter is considered an
implicitly declared exception handler.
"""
exception_handler(tg_source)(tg_source)
return dispatch_error(controller, tg_source, tg_errors, tg_exceptions,
*args, **kw)
_register_implicit_exch = dispatch_error.when(
"(tg_exceptions and has_arg(tg_source, 'tg_exceptions'))", order=3)(
_register_implicit_exch)
def dispatch_error_adaptor(func):
"""Construct a signature isomorphic to dispatch_error.
The actual handler will receive only arguments explicitly
declared.
"""
def adaptor(controller, tg_source, tg_errors, tg_exceptions, *args, **kw):
args, kw = inject_args(func, {"tg_source":tg_source,
"tg_errors":tg_errors,
"tg_exceptions":tg_exceptions},
args, kw, 1)
args, kw = adapt_call(func, args, kw, 1)
return func(controller, *args, **kw)
return adaptor
def try_call(func, self, *args, **kw):
"""Call function, catch and dispatch any resulting exception."""
# turbogears.database import here to avoid circular imports
from turbogears.database import _use_sa
try:
return func(self, *args, **kw)
except Exception, e:
if isinstance(e, cherrypy.HTTPRedirect) or \
call_on_stack("dispatch_error",
{"tg_source":func, "tg_exception":e}, 4):
raise
else:
exc_type, exc_value, exc_trace = sys.exc_info()
remove_keys(kw, ("tg_source", "tg_errors", "tg_exceptions"))
try:
output = dispatch_error(self, func, None, e, *args, **kw)
except NoApplicableMethods:
raise exc_type, exc_value, exc_trace
else:
del exc_trace
if _use_sa() and getattr(cherrypy.request, "in_transaction", None):
# using SA ?
# Caught pants down in the middle of a transaction :) ?
# rollback the transaction for this whole request
cherrypy.request.sa_transaction.rollback()
return output
def run_with_errors(errors, func, self, *args, **kw):
"""Branch execution depending on presence of errors."""
if errors:
if hasattr(self, "validation_error"):
import warnings
warnings.warn(
"Use decorator error_handler() on per-method base "
"rather than defining a validation_error() method.",
DeprecationWarning, 2)
return self.validation_error(func.__name__, kw, errors)
else:
remove_keys(kw, ("tg_source", "tg_errors", "tg_exceptions"))
try:
return dispatch_error(self, func, errors, None, *args, **kw)
except NoApplicableMethods:
raise NotImplementedError("Method %s.%s() has no applicable "
"error handler." % (self.__class__.__name__, func.__name__))
else:
return func(self, *args, **kw)
def register_handler(handler=None, rules=None):
"""Register handler as an error handler for decorated method.
If handler is not given, method is considered it's own error handler.
rules can be a string containing an arbitrary logical Python expression
to be used as dispatch rule allowing multiple error handlers for a
single method.
register_handler decorator is an invariant.
"""
def register(func):
when = "func_eq(tg_source, func)"
if rules:
when += " and (%s)" % rules
dispatch_error.when(dispatch_error.parse(when, *combine_contexts(
depth=[0, 1])), order=1)(dispatch_error_adaptor(handler or func))
return func
return register
def bind_rules(pre_rules):
"""Prepend rules to error handler specialisation."""
def registrant(handler=None, rules=None):
when = pre_rules
if rules:
when += " and (%s)" % rules
return register_handler(handler, when)
return registrant
error_handler = bind_rules("tg_errors")
exception_handler = bind_rules("tg_exceptions")
FailsafeSchema = Enum("none", "values", "map_errors", "defaults")
def dispatch_failsafe(schema, values, errors, source, kw):
"""Dispatch fail-safe mechanism for failed inputs."""
dispatch_failsafe = generic()(dispatch_failsafe)
def _failsafe_none(schema, values, errors, source, kw):
"""No fail-safe values."""
return kw
_failsafe_none = dispatch_failsafe.when(strategy.default)(_failsafe_none)
def _failsafe_values_dict(schema, values, errors, source, kw):
"""Map erroneous inputs to values."""
for key in errors:
if key in values:
kw[key] = values[key]
return kw
_failsafe_values_dict = dispatch_failsafe.when(
"schema is FailsafeSchema.values and isinstance(values, dict) and "
"isinstance(errors, dict)")(_failsafe_values_dict)
def _failsafe_values_atom(schema, values, errors, source, kw):
"""Map all erroneous inputs to a single value."""
for key in errors:
kw[key] = values
return kw
_failsafe_values_atom = dispatch_failsafe.when(
"schema is FailsafeSchema.values and isinstance(errors, dict)")(
_failsafe_values_atom)
def _failsafe_map_errors(schema, values, errors, source, kw):
"""Map erroneous inputs to coresponding exceptions."""
kw.update(errors)
return kw
_failsafe_map_errors = dispatch_failsafe.when(
"schema is FailsafeSchema.map_errors and isinstance(errors, dict)")(
_failsafe_map_errors)
def _failsafe_defaults(schema, values, errors, source, kw):
"""Map erroneous inputs to method defaults."""
argnames, defaultvals = getargspec(source)[::3]
defaults = dict(izip(islice(argnames, len(argnames) - len(defaultvals),
None), defaultvals))
for key in errors:
if key in defaults:
kw[key] = defaults[key]
return kw
_failsafe_defaults = dispatch_failsafe.when(
"schema is FailsafeSchema.defaults and isinstance(errors, dict)")(
_failsafe_defaults)
__all__ = ["dispatch_error", "dispatch_error_adaptor", "try_call",
"run_with_errors", "default", "register_handler", "FailsafeSchema",
"dispatch_failsafe", "error_handler", "exception_handler",
]
PK 6?ݰ98 98 turbogears/paginate.pyimport re
import types
from math import ceil
import logging
import cherrypy
try:
import sqlobject
from sqlobject.main import SelectResults
except ImportError:
SelectResults = None
sqlobject = None
try:
# Can't depend on sqlalchemy being available.
import sqlalchemy
from sqlalchemy.ext.selectresults import SelectResults as SASelectResults
from sqlalchemy.orm.query import Query
except ImportError:
SASelectResults = None
sqlalchemy = None
Query = None
import turbogears
from turbogears.decorator import weak_signature_decorator
from turbogears.view import variable_providers
from formencode.variabledecode import variable_encode, variable_decode
log = logging.getLogger("turbogears.paginate")
def paginate(var_name, default_order='', default_reversed=False, limit=10,
allow_limit_override=False, max_pages=5, dynamic_limit=None):
'''
@param var_name: the variable name that the paginate decorator will try
to control. This key must be present in the dictionnary returned from
your controller in order for the paginate decorator to be able to handle
it.
@type var_name: string
@param default_order: Needs work! XXX
@type default_order: string
@param default_reversed: Needs work! XXX
@type default_reversed: Boolean
@param limit: the hard coded limit that the paginate decorator will
impose on the number of "var_name" to display at the same time.
This value can be overridden by the use of the dynamic_limit keyword
argument
@type limit: integer
@param allow_limit_override: A boolean that indicates if the parameters
passed in the calling URL can modify the imposed limit. By default it is
set to False. If you want to be able to control the limit by using an
URL parameter then you need to set this to True.
@type allow_limit_override: Boolean
@param max_pages: Needs work! XXX
@type max_pages: integer
@param dynamic_limit: If specified, this parameter must be the name
of a key present in the dictionnary returned by your decorated
controller. The value found for this key will be used as the limit
for our pagination and will override the other settings, the hard-coded
one declared in the decorator itself AND the URL parameter one.
This enables the programmer to store a limit settings inside the
application preferences and then let the user manage it.
@type dynamic_limit: string
'''
def entangle(func):
def decorated(func, *args, **kw):
page = int(kw.pop('tg_paginate_no', 1))
limit_ = int(kw.pop('tg_paginate_limit', limit))
order = kw.pop('tg_paginate_order', None)
ordering = kw.pop('tg_paginate_ordering', {})
# Convert ordering str to a dict.
if ordering:
ordering = convert_ordering(ordering)
if not allow_limit_override:
limit_ = limit
log.debug("Pagination params: page=%s, limit=%s, order=%s "
"", page, limit_, order)
# get the output from the decorated function
output = func(*args, **kw)
if not isinstance(output, dict):
return output
try:
var_data = output[var_name]
except KeyError:
raise StandardError("Didn't get expected variable")
if dynamic_limit:
try:
dyn_limit = output[dynamic_limit]
except KeyError:
msg = "dynamic_limit: %s not found in output dict" % (
dynamic_limit)
raise StandardError(msg)
limit_ = dyn_limit
if order and not default_order:
msg = "If you want to enable ordering you need "
msg += "to provide a default_order"
raise StandardError(msg)
elif default_order and not ordering:
ordering = {default_order:[0, not default_reversed]}
elif ordering and order:
sort_ordering(ordering, order)
log.debug('ordering %s' % ordering)
row_count = 0
if (SelectResults and isinstance(var_data, SelectResults)) or \
(SASelectResults and isinstance(var_data, SASelectResults)) or \
(Query and isinstance(var_data, Query)):
row_count = var_data.count()
if ordering:
# Build order_by list.
order_cols = range(len(ordering))
for (colname, order_opts) in ordering.items():
col = sql_get_column(colname, var_data)
if not col:
msg = "The order column (%s) doesn't exist" % colname
raise StandardError(msg)
order_by_expr = sql_order_col(col, order_opts[1])
order_cols[order_opts[0]] = order_by_expr
# May need to address potential of ordering already
# existing in var_data.
# SO and SA differ on this method name.
if hasattr(var_data, 'orderBy'):
var_data = var_data.orderBy(order_cols)
else:
var_data = var_data.order_by(order_cols)
elif isinstance(var_data, list) or (sqlalchemy and isinstance(
var_data, sqlalchemy.orm.attributes.InstrumentedList)):
row_count = len(var_data)
else:
raise StandardError(
'Variable is not a list or SelectResults or Query (%s)' % type(
var_data))
offset = (page-1) * limit_
page_count = int(ceil(float(row_count)/limit_))
# if it's possible display every page
if page_count <= max_pages:
pages_to_show = range(1,page_count+1)
else:
pages_to_show = _select_pages_to_show(page_count=page_count,
current_page=page,
max_pages=max_pages)
# which one should we use? cherrypy.request.input_values or kw?
#input_values = cherrypy.request.input_values.copy()
##input_values = kw.copy()
input_values = variable_encode(cherrypy.request.params.copy())
input_values.pop('self', None)
for input_key in input_values.keys():
if input_key.startswith('tg_paginate'):
del input_values[input_key]
cherrypy.request.paginate = Paginate(current_page=page,
limit=limit_,
pages=pages_to_show,
page_count=page_count,
input_values=input_values,
order=order,
ordering=ordering,
row_count=row_count)
# we replace the var with the sliced one
endpoint = offset + limit_
log.debug("slicing data between %d and %d", offset, endpoint)
output[var_name] = var_data[offset:endpoint]
return output
return decorated
return weak_signature_decorator(entangle)
def _paginate_var_provider(d):
# replaced cherrypy.thread_data for cherrypy.request
# thanks alberto!
paginate = getattr(cherrypy.request, 'paginate', None)
if paginate:
d.update(dict(paginate=paginate))
variable_providers.append(_paginate_var_provider)
class Paginate:
"""class for variable provider"""
def __init__(self, current_page, pages, page_count, input_values,
limit, order, ordering, row_count):
self.pages = pages
self.limit = limit
self.page_count = page_count
self.current_page = current_page
self.input_values = input_values
self.order = order
self.ordering = ordering
self.row_count = row_count
self.first_item = (current_page - 1) * limit + 1
self.last_item = min(current_page * limit, row_count)
self.reversed = False
# Should reversed be true?
for (field_name, ordering_values) in ordering.items():
if ordering_values[0] == 0 and not ordering_values[1]:
self.reversed = True
# If ordering is empty, don't add it.
input_values = dict(tg_paginate_limit=limit)
if ordering:
input_values['tg_paginate_ordering'] = ordering
self.input_values.update(input_values)
if current_page < page_count:
self.input_values.update(dict(
tg_paginate_no=current_page+1,
tg_paginate_limit=limit))
self.href_next = turbogears.url(cherrypy.request.path,
self.input_values)
self.input_values.update(dict(
tg_paginate_no=page_count,
tg_paginate_limit=limit))
self.href_last = turbogears.url(cherrypy.request.path,
self.input_values)
else:
self.href_next = None
self.href_last = None
if current_page > 1:
self.input_values.update(dict(
tg_paginate_no=current_page-1,
tg_paginate_limit=limit))
self.href_prev = turbogears.url(cherrypy.request.path,
self.input_values)
self.input_values.update(dict(
tg_paginate_no=1,
tg_paginate_limit=limit))
self.href_first = turbogears.url(cherrypy.request.path,
self.input_values)
else:
self.href_prev = None
self.href_first = None
def get_href(self, page, order=None, reverse_order=None):
# Note that reverse_order is not used. It should be cleaned up here
# and in the template. I'm not removing it now because I don't want
# to break the API.
order = order or None
self.input_values['tg_paginate_no'] = page
if order:
self.input_values['tg_paginate_order'] = order
return turbogears.url('', self.input_values)
def _select_pages_to_show(current_page, page_count, max_pages):
pages_to_show = []
if max_pages < 3:
msg = "The minimun value for max_pages on this algorithm is 3"
raise StandardError(msg)
if page_count <= max_pages:
pages_to_show = range(1,page_count+1)
pad = 0
if not max_pages % 2:
pad = 1
start = current_page - (max_pages / 2) + pad
end = current_page + (max_pages / 2)
if start < 1:
end = end + (start * -1) + 1
start = 1
if end > page_count:
start = start - (end - page_count)
end = page_count
return range(start, end+1)
def sort_ordering(ordering, sort_name):
"""Rearrange ordering based on sort_name."""
log.debug('sort called with %s and %s' % (ordering, sort_name))
if sort_name not in ordering:
ordering[sort_name] = [-1, True]
if ordering[sort_name][0] == 0:
# Flip
ordering[sort_name][1] = not ordering[sort_name][1]
else:
ordering[sort_name][0] = 0
for key in ordering.keys():
if key != sort_name and ordering[key][0] < len(ordering) - 1:
ordering[key][0] += 1
log.debug('sort results is %s and %s' % (ordering, sort_name))
def sql_get_column(colname, var_data):
"""Return a column from var_data based on colname."""
if isinstance(var_data, SelectResults):
col = getattr(var_data.sourceClass.q, colname, None)
elif isinstance(var_data, SASelectResults) or isinstance(var_data, Query):
col = getattr(
var_data._query.mapper.c,
colname[len(var_data._query.mapper.column_prefix or ''):],
None)
else:
raise StandardError, 'expected SelectResults'
return col
def sql_order_col(col, ascending=True):
"""Return an ordered col for col."""
if sqlalchemy and isinstance(col, sqlalchemy.sql.ColumnElement):
if ascending:
order_col = sqlalchemy.sql.asc(col)
else:
order_col = sqlalchemy.sql.desc(col)
elif sqlobject and isinstance(col, types.InstanceType):
# I don't like using InstanceType, but that's what sqlobject col type
# is.
if ascending:
order_col = col
else:
order_col = sqlobject.DESC(col)
else:
raise StandardError, 'expected Column, but got %s' % str(type(col))
return order_col
# Ordering re:
ordering_expr = re.compile(r"('\w+'): ?\[(\d+), ?(True|False)\]")
def convert_ordering(ordering):
"""Covert ordering unicode string to dict."""
log.debug('ordering received %s' % str(ordering))
# eval would be simple, but insecure.
if not isinstance(ordering, (str, unicode)):
raise ValueError, "ordering should be string or unicode."
new_ordering = {}
if ordering == u"{}":
pass
else:
try:
ordering_info_find = ordering_expr.findall(ordering)
emsg = "Didn't match ordering for %s." % str(ordering)
assert len(ordering_info_find) > 0, emsg
for ordering_info in ordering_info_find:
ordering_key = str(ordering_info[0]).strip("'")
ordering_order = int(ordering_info[1])
ordering_reverse = bool(ordering_info[2] == 'True')
new_ordering[ordering_key] = [ordering_order,
ordering_reverse]
except StandardError, e:
log.debug('FAILED to convert ordering.')
new_ordering = {}
log.debug('ordering converted to %s' % str(new_ordering))
return new_ordering
PK 6D^qV qV turbogears/controllers.py"""Classes and methods for TurboGears controllers."""
import logging
import re
import urllib
import types
from itertools import izip
import cherrypy
from dispatch import generic, strategy, functions
import turbogears.util as tg_util
import turbogears
from inspect import isclass
from turbogears import view, database, errorhandling, config
from turbogears.decorator import weak_signature_decorator
from turbogears.validators import Invalid
from turbogears.errorhandling import error_handler, exception_handler
log = logging.getLogger("turbogears.controllers")
unicodechars = re.compile(r"([^\x00-\x7F])")
if config.get("session_filter.on", None) == True:
if config.get("session_filter.storage_type", None) == "PostgreSQL":
import psycopg2
config.update(
{'session_filter.get_db': psycopg2.connect(
psycopg2.get('sessions.postgres.dsn'))
})
# support for mysql/sqlite/etc here
def _process_output(output, template, format, content_type,
mapping, fragment=False):
"""
Produces final output form from the data returned from a
controller method.
See the expose() arguments for more info in theses ones since
they are the same.
"""
if isinstance(output, dict):
from turbogears.widgets import js_location
css = tg_util.setlike()
js = dict(izip(js_location, iter(tg_util.setlike, None)))
include_widgets = {}
include_widgets_lst = config.get("tg.include_widgets", [])
if config.get("tg.mochikit_all", False):
include_widgets_lst.insert(0, 'turbogears.mochikit')
for i in include_widgets_lst:
widget = tg_util.load_class(i)
if isclass(widget):
widget = widget()
include_widgets["tg_%s" % i.split(".")[-1]] = widget
for script in widget.retrieve_javascript():
if hasattr(script, "location"):
js[script.location].add(script)
else:
js[js_location.head].add(script)
css.add_all(widget.retrieve_css())
for value in output.itervalues():
if hasattr(value, "retrieve_css"):
retrieve = getattr(value, "retrieve_css")
if callable(retrieve):
css.add_all(value.retrieve_css())
if hasattr(value, "retrieve_javascript"):
retrieve = getattr(value, "retrieve_javascript")
if callable(retrieve):
for script in value.retrieve_javascript():
if hasattr(script, "location"):
js[script.location].add(script)
else:
js[js_location.head].add(script)
output.update(include_widgets)
output["tg_css"] = css
#output.update([("tg_js_%s" % str(l), js[l]) for l in js_location])
for l in iter(js_location):
output["tg_js_%s" % str(l)] = js[l]
tg_flash = _get_flash()
if not tg_flash == None:
output["tg_flash"] = tg_flash
output = view.render(output, template=template, format=format,
mapping=mapping, content_type=content_type,
fragment=fragment)
else:
if content_type:
cherrypy.response.headers["Content-Type"] = content_type
# fix the Safari XMLHttpRequest encoding problem
try:
contentType = cherrypy.response.headers["Content-Type"]
ua = cherrypy.request.headers["User-Agent"]
except KeyError:
return output
if not contentType.startswith("text/"):
return output
ua = view.UserAgent(ua)
enc = tg_util.get_template_encoding_default()
if ua.browser == "safari":
if isinstance(output, str):
output = output.decode(enc)
elif isinstance(output, types.GeneratorType):
output = "".join(output)
output = unicodechars.sub(
lambda m: "%x;" % ord(m.group(1)), output).encode("ascii")
if isinstance(output, unicode):
output = output.encode(enc)
return output
class BadFormatError(Exception):
"""Output-format exception."""
def validate(form=None, validators=None,
failsafe_schema=errorhandling.FailsafeSchema.none,
failsafe_values=None, state_factory=None):
"""Validate input.
@param form form to validate input from
@param validators individual validators to use for parameters
@param failsafe_schema fail-safe schema
@param failsafe_values replacements for erroneous inputs
@param state_factory callable which returns the initial state instance for
validation
"""
def entangle(func):
recursion_guard = dict(func=func)
if callable(form) and not hasattr(form, "validate"):
init_form = lambda self: form(self)
else:
init_form = lambda self: form
def validate(func, *args, **kw):
if tg_util.call_on_stack("validate", recursion_guard, 4):
return func(*args, **kw)
form = init_form(args and args[0] or kw["self"])
args, kw = tg_util.to_kw(func, args, kw)
errors = {}
if state_factory is not None:
state = state_factory()
else:
state = None
if form:
value = kw.copy()
try:
kw.update(form.validate(value, state))
except Invalid, e:
errors = e.unpack_errors()
cherrypy.request.validation_exception = e
cherrypy.request.validated_form = form
if validators:
if isinstance(validators, dict):
for field, validator in validators.iteritems():
try:
kw[field] = validator.to_python(
kw.get(field, None), state
)
except Invalid, error:
errors[field] = error
else:
try:
value = kw.copy()
kw.update(validators.to_python(value, state))
except Invalid, e:
errors = e.unpack_errors()
cherrypy.request.validation_exception = e
cherrypy.request.validation_errors = errors
cherrypy.request.input_values = kw.copy()
cherrypy.request.validation_state = state
if errors:
kw = errorhandling.dispatch_failsafe(failsafe_schema,
failsafe_values, errors, func, kw)
args, kw = tg_util.from_kw(func, args, kw)
return errorhandling.run_with_errors(errors, func, *args, **kw)
return validate
return weak_signature_decorator(entangle)
class CustomDispatch(functions.GenericFunction):
def combine(self,cases):
strict = [strategy.ordered_signatures,strategy.safe_methods]
cases = strategy.separate_qualifiers(
cases,
primary = strict,
)
primary = strategy.method_chain(cases.get('primary',[]))
if type(primary) != types.FunctionType:
for i in primary:
for y in i:
return y[1]
return primary
def _add_rule(_expose, found_default, as_format, accept_format, template,
rulefunc):
if as_format == "default":
if found_default:
colon = template.find(":")
if colon == -1:
as_format = template
else:
as_format = template[:colon]
else:
found_default = True
ruleparts = []
ruleparts.append('kw.get("tg_format", "default") == "%s"'
% as_format)
if accept_format:
ruleparts.append('(accept == "%s" and kw.get("tg_format", '
'"default") == "default")' % accept_format)
rule = " or ".join(ruleparts)
log.debug("Generated rule %s", rule)
_expose.when(rule)(rulefunc)
return found_default
def _build_rules(func):
def _expose(func, accept, allow_json, *args, **kw):
pass
_expose = generic(CustomDispatch)(_expose)
if func._allow_json:
log.debug("Adding allow_json rule: "
'allow_json and '
'(kw.get("tg_format", None) == "json" or accept '
'=="text/javascript")')
_expose.when('allow_json '
'and (kw.get("tg_format", None) == "json" or accept'
' =="text/javascript")')(
lambda _func, accept, allow_json,
*args, **kw: _execute_func(
_func, "json", None, None, None, False, args, kw))
found_default = False
for ruleinfo in func._ruleinfo:
found_default = _add_rule(_expose, found_default, **ruleinfo)
func._expose = _expose
def expose(template=None, validators=None, allow_json=None, html=None,
format=None, content_type=None, inputform=None, fragment=False,
as_format="default", mapping=None, accept_format=None):
"""Exposes a method to the web.
By putting the expose decorator on a method, you tell TurboGears that
the method should be accessible via URL traversal. Additionally, expose
handles the output processing (turning a dictionary into finished
output) and is also responsible for ensuring that the request is
wrapped in a database transaction.
You can apply multiple expose decorators to a method, if
you'd like to support multiple output formats. The decorator that's
listed first in your code without as_format or accept_format is
the default that is chosen when no format is specifically asked for.
Any other expose calls that are missing as_format and accept_format
will have as_format implicitly set to the whatever comes before
the ":" in the template name (or the whole template name if there
is no ":". For example, expose("json"), if it's not
the default expose, will have as_format set to "json".
When as_format is set, passing the same value in the tg_format
parameter in a request will choose the options for that expose
decorator. Similarly, accept_format will watch for matching
Accept headers. You can also use both. expose("json", as_format="json",
accept_format="text/javascript") will choose JSON output for either
case: tg_format=json as a parameter or Accept: text/javascript as a
request header.
Passing allow_json=True to an expose decorator
is equivalent to adding the decorator just mentioned.
Each expose decorator has its own set of options, and each one
can choose a different template or even template engine (you can
use Kid for HTML output and Cheetah for plain text, for example).
See the other expose parameters below to learn about the options
you can pass to the template engine.
Take a look at the
test_expose.py suite
for more examples.
@param template "templateengine:dotted.reference" reference along the
Python path for the template and the template engine. For
example, "kid:foo.bar" will have Kid render the bar template in
the foo package.
@keyparam format format for the template engine to output (if the
template engine can render different formats. Kid, for example,
can render "html", "xml" or "xhtml")
@keyparam content_type sets the content-type http header
@keyparam allow_json allow the function to be exposed as json
@keyparam fragment for template engines (like Kid) that generate
DOCTYPE declarations and the like, this is a signal to
just generate the immediate template fragment. Use this
if you're building up a page from multiple templates or
going to put something onto a page with .innerHTML.
@keyparam mapping mapping with options that are sent to the template
engine
@keyparam as_format designates which value of tg_format will choose
this expose.
@keyparam accept_format which value of an Accept: header will
choose this expose.
@keyparam html deprecated in favor of template
@keyparam validators deprecated. Maps argument names to validator
applied to that arg
@keyparam inputform deprecated. A form object that generates the
input to this method
"""
if html:
template = html
if not template:
template = format
if format == "json" or (format == None and template == None):
template = "json"
allow_json = True
if content_type is None:
content_type = config.get("tg.content_type", None)
if config.get("tg.session.automatic_lock",None) == True:
cherrypy.session.acquire_lock()
def entangle(func):
log.debug("Exposing %s", func)
log.debug("template: %s, format: %s, allow_json: %s, "
"content-type: %s", template, format, allow_json, content_type)
if not getattr(func, "exposed", False):
def expose(func, *args, **kw):
accept = cherrypy.request.headers.get('Accept', "").lower()
if not hasattr(func, "_expose"):
_build_rules(func)
if hasattr(cherrypy.request, "in_transaction"):
output = func._expose(func, accept, func._allow_json,
*args, **kw)
else:
cherrypy.request.in_transaction = True
output = database.run_with_transaction(
func._expose, func, accept, func._allow_json,
*args, **kw)
return output
func.exposed = True
func._ruleinfo = []
allow_json_from_config = config.get(
"tg.allow_json", False)
func._allow_json = allow_json_from_config
else:
expose = lambda func, *args, **kw: func(*args, **kw)
func._ruleinfo.insert(0, dict(as_format = as_format,
accept_format = accept_format, template = template,
rulefunc = lambda _func, accept, allow_json,
*args, **kw:
_execute_func(_func, template, format, content_type,
mapping, fragment, args, kw)))
if allow_json:
func._allow_json = True
if inputform or validators:
import warnings
warnings.warn(
"Use a separate decorator validate() rather than passing "
"arguments validators and/or inputform to decorator "
"expose().",
DeprecationWarning, 2)
func = validate(form=inputform, validators=validators)(func)
return expose
return weak_signature_decorator(entangle)
def _execute_func(func, template, format, content_type, mapping, fragment, args, kw):
"""Call controller method and process it's output."""
if config.get("tg.strict_parameters", False):
tg_util.remove_keys(kw, ["tg_random", "tg_format"])
else:
args, kw = tg_util.adapt_call(func, args, kw)
if config.get('server.environment', 'development') == 'development':
# Only output this in development mode: If it's a field storage object,
# this means big memory usage, and we don't want that in production
log.debug("Calling %s with *(%s), **(%s)", func, args, kw)
output = errorhandling.try_call(func, *args, **kw)
if isinstance(output, list):
return output
assert isinstance(output, basestring) or isinstance(output, dict) \
or isinstance(output, types.GeneratorType), \
"Method %s.%s() returned unexpected output. Output should " \
"be of type basestring, dict or generator." % (
args[0].__class__.__name__, func.__name__)
if isinstance(output, dict):
template = output.pop("tg_template", template)
format= output.pop("tg_format", format)
if template and template.startswith("."):
template = func.__module__[:func.__module__.rfind('.')]+template
return _process_output(output, template, format, content_type, mapping, fragment)
def flash(message):
"""Set a message to be displayed in the browser on next page display."""
cherrypy.response.simple_cookie['tg_flash'] = tg_util.to_utf8(message)
cherrypy.response.simple_cookie['tg_flash']['path'] = '/'
def _get_flash():
"""Retrieve the flash message (if one is set), clearing the message."""
request_cookie = cherrypy.request.simple_cookie
response_cookie = cherrypy.response.simple_cookie
def clearcookie():
response_cookie["tg_flash"] = ""
response_cookie["tg_flash"]['expires'] = 0
response_cookie['tg_flash']['path'] = '/'
if response_cookie.has_key("tg_flash"):
message = response_cookie["tg_flash"].value
response_cookie.pop("tg_flash")
if request_cookie.has_key("tg_flash"):
# New flash overrided old one sitting in cookie. Clear that old cookie.
clearcookie()
elif request_cookie.has_key("tg_flash"):
message = request_cookie["tg_flash"].value
if not response_cookie.has_key("tg_flash"):
clearcookie()
else:
message = None
if message:
message = unicode(message, 'utf-8')
return message
class Controller(object):
"""Base class for a web application's controller.
Currently, this provides positional parameters functionality
via a standard default method.
"""
class RootController(Controller):
"""Base class for the root of a web application.
Your web application should have one of these. The root of
your application is used to compute URLs used by your app.
"""
is_app_root = True
msglog = logging.getLogger('cherrypy.msg')
msglogfunc = {0: msglog.info, 1: msglog.warning, 2: msglog.error}
def _cp_log_message(self, msg, context = 'nocontext', severity = 0):
log = self.msglogfunc[severity]
text = ''.join((context, ': ', msg))
log(text)
accesslog = logging.getLogger("turbogears.access")
def _cp_log_access(self):
tmpl = '%(h)s %(l)s %(u)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s"'
try:
username = cherrypy.request.user_name
if not username:
username = "-"
except AttributeError:
username = "-"
s = tmpl % {'h': cherrypy.request.remote_host,
'l': '-',
'u': username,
'r': cherrypy.request.requestLine,
's': cherrypy.response.status.split(" ", 1)[0],
'b': cherrypy.response.headers.get('Content-Length',
'') or "-",
'f': cherrypy.request.headers.get('referer', ''),
'a': cherrypy.request.headers.get('user-agent', ''),
}
self.accesslog.info(s)
Root = RootController
def url(tgpath, tgparams=None, **kw):
"""Computes URLs.
tgpath can be a list or a string. If the path is absolute (starts
with a "/"), the server.webpath and the approot of the application
are prepended to the path. In order for the approot to be
detected properly, the root object should extend
controllers.RootController.
Query parameters for the URL can be passed in as a dictionary in
the second argument *or* as keyword parameters.
"""
if not isinstance(tgpath, basestring):
tgpath = "/".join(list(tgpath))
if tgpath.startswith("/"):
if tg_util.request_available():
check_app_root()
tgpath = cherrypy.request.app_root + tgpath
result = config.get("server.webpath", "") + tgpath
else:
result = tgpath
if tgparams is not None:
tgparams.update(kw)
else:
tgparams = kw
args = []
for key, value in tgparams.iteritems():
if value is None:
continue
if isinstance(value, unicode):
value = value.encode("utf8")
args.append("%s=%s" % (key, urllib.quote(str(value))))
if args:
result += "?" + "&".join(args)
return result
def check_app_root():
"""Sets cherrypy.request.app_root if needed."""
if hasattr(cherrypy.request, "app_root"):
return
found_root = False
trail = cherrypy.request.object_trail
top = len(trail) - 1
# compute the app_root by stepping back through the object
# trail and collecting up the path elements after the first
# root we find
# we can eliminate this if we find a way to use
# CherryPy's mounting mechanism whenever a new root
# is hit.
rootlist = []
for i in xrange(len(trail) - 1, -1, -1):
path, obj = trail[i]
if not found_root and isinstance(obj, RootController):
if i == top:
break
found_root = True
if found_root and i > 0:
rootlist.insert(0, path)
app_root = "/".join(rootlist)
if not app_root.startswith("/"):
app_root = "/" + app_root
if app_root.endswith("/"):
app_root = app_root[:-1]
cherrypy.request.app_root = app_root
def redirect(redirect_path, redirect_params=None, **kw):
"""
Redirect (via cherrypy.HTTPRedirect).
Raises the exception instead of returning it, this to allow
users to both call it as a function or to raise it as an exeption.
"""
raise cherrypy.HTTPRedirect(
url(tgpath=redirect_path, tgparams=redirect_params, **kw))
__all__ = ["expose", "validate", "redirect", "flash",
"Root", "RootController", "Controller",
"error_handler", "exception_handler",
]
PK 6't+ + turbogears/docgen.pyimport os
import sys
import shutil
import time
from glob import glob
from setuptools import Command
import pkg_resources
pkg_resources.require("Kid >= 0.6.4")
import kid
import re
from distutils import log
class GenSite(Command):
"setuptools command to generate the TurboGears website"
user_options = [
("srcdirs=", "s", "directories containing the source files (default: docs)"),
("destdir=", "d", "destination output directory (default: dist/site)"),
("encoding=", "e", "encoding for output (default: utf8)"),
("force", "f", "regenerate all files"),
("ignoredirs=", "i", "directories to ignore (default: ['.svn', '.cvs'])"),
("ignorefiles=", "x", "files to ignore (default: ['.*\\.pyc', '.DS_Store'])"),
("nodelete=", "l", "directories to leave alone rather than delete"),
("templates=", "t", "mapping of templates to load (format: name=templatefile,name=templatefile)"),
("copydirs=", "c", "copy files from these directories without template proc. (destdir=srcdir,...)"),
("noprintable", "N", "don't make printable version of tutorials"),
("eggdir=", "g", "which directory has the eggs in it (default: '../thirdparty/eggs')")
]
boolean_options=["force"]
srcdirs = None
destdir = "dist/site"
encoding = "utf8"
force = False
ignoredirs = None
ignorefiles = None
nodelete = None
templates = None
copydirs = None
eggdir = "../thirdparty/eggs"
noprintable = False
def initialize_options(self):
pass
def finalize_options(self):
if self.srcdirs is None:
self.srcdirs = ["docs"]
if self.srcdirs == "":
self.srcdirs = []
self.ensure_string_list("srcdirs")
self.ensure_string("destdir", "dist/site")
self.ensure_string("encoding", "utf8")
if self.ignoredirs is None:
self.ignoredirs = [".svn", ".cvs"]
self.ensure_string_list("ignoredirs")
if self.ignorefiles is None:
self.ignorefiles = ['.*\\.pyc', '.DS_Store']
if self.nodelete is None:
self.nodelete = ["dist/site/preview"]
self.ensure_string_list("nodelete")
self.ensure_string_list("ignorefiles")
regexes = []
for pat in self.ignorefiles:
regexes.append(re.compile(pat))
self.ignorepatterns = regexes
self.templates, self.templates_order = self._split_mapping(self.templates, True)
self.copydirs = self._split_mapping(self.copydirs)
def _split_mapping(self, valToSplit, preserve_order=False):
mapping = {}
order = []
if valToSplit and isinstance(valToSplit, basestring):
pairs = re.split(",\s*", valToSplit)
for pair in pairs:
name, filename = re.split("\s*=\s*", pair)
mapping[name] = os.path.abspath(filename)
order.append(name)
if preserve_order:
return mapping, order
return mapping
def check_if_newer(self, src, dest):
srcmtime = os.path.getmtime(src)
if os.path.exists(dest):
destmtime = os.path.getmtime(dest)
else:
destmtime = 0
return srcmtime > destmtime
def copy_if_newer(self, src, dest):
if self.force or self.check_if_newer(src, dest):
d = os.path.dirname(dest)
if not os.path.exists(d):
os.makedirs(d)
self.copy_file(src, dest)
def render_template(self, src, dest, depth):
if not self.force and not self.check_if_newer(src, dest):
return
if not self.dry_run:
log.info("rendering %s" % dest)
else:
log.info("skipping rendering %s" % dest)
return
template = kid.load_template(src, cache=False)
template.Template.serializer = self.serializer
toroot = "../" * depth
destfile = dest[len(self.destdir)+1:]
updated = time.strftime("%b %d, %Y", time.localtime(os.path.getmtime(src)))
output = template.serialize(encoding=self.encoding, root=toroot, updated=updated,
destfile=destfile, eggs=self.eggs)
output = output.replace("$$", "$")
destfile = open(dest, "w")
destfile.write(output)
destfile.close()
def update_site_files(self, srcdir, processTemplates = True, destroot=None):
if not destroot:
destroot = self.destdir
for root, dirs, files in os.walk(srcdir):
if root != srcdir:
fromroot = root[len(srcdir)+1:]
segments = fromroot.split(os.sep)
if set(segments).intersection(self.ignoredirs):
continue
depth = len(segments)
else:
fromroot = ""
depth = 0
destdir = os.path.join(destroot, fromroot)
if not os.path.exists(destdir):
if not self.dry_run:
log.info("creating directory %s" % (destdir))
os.makedirs(destdir)
else:
log.info("skipping creating directory %s" % (destdir))
for file in files:
ignore = False
abs = os.path.abspath(file)
for pat in self.ignorepatterns:
if pat.match(file):
ignore = True
break
if ignore:
continue
for tempfile in self.templates.values():
if tempfile == abs:
ignore = True
break
if ignore:
continue
ext = os.path.splitext(file)[1]
dest = os.path.join(destdir, file)
self.currentfiles.add(dest)
if not processTemplates or ext != ".html":
self.copy_if_newer(os.path.join(root, file),
dest)
else:
self.render_template(os.path.join(root, file),
dest, depth)
def delete_excess_files(self):
for root, dirs, files in os.walk(self.destdir):
leavealone = False
for dirname in self.nodelete:
if root.startswith(dirname):
leavealone = True
break
if leavealone:
continue
for file in files:
dest = os.path.join(root, file)
if dest not in self.currentfiles:
if not self.dry_run:
log.info("deleting %s" % dest)
os.unlink(dest)
else:
log.info("skipping deleting %s" % dest)
def run(self):
destdir = self.destdir
log.info("generating website to %s" % destdir)
if not os.path.exists(destdir):
log.info("creating %s" % destdir)
os.makedirs(destdir)
for name in self.templates_order:
filename = self.templates[name]
log.info("template %s loaded as %s" % (filename, name))
kid.load_template(filename, name=name)
if self.eggdir:
if not self.eggdir.endswith("/"):
self.eggdir += "/"
choplen = len(self.eggdir)
self.eggs = [fn[choplen:] for fn in glob(self.eggdir + "*")]
self.eggs.sort()
self.currentfiles = set()
self.serializer = kid.HTMLSerializer(encoding=self.encoding)
for d in self.srcdirs:
self.update_site_files(d)
for dest, src in self.copydirs.items():
if os.path.isdir(src):
self.update_site_files(src, processTemplates=False,
destroot=os.path.join(self.destdir, dest))
else:
destfile = os.path.join(self.destdir, os.path.normpath(dest))
self.copy_if_newer(src, destfile)
self.currentfiles.add(destfile)
self.printable_tutorial()
self.delete_excess_files()
def printable_tutorial(self):
if self.noprintable:
return
self._make_printable(os.path.join("docs", "tutorials", "wiki20"), 3)
self._make_printable(os.path.join("docs", "wiki20"))
def _make_printable(self, tutdir, up_to_root=2):
endpath = tutdir
tutdir = os.path.join(self.srcdirs[0], tutdir)
import cElementTree as elementtree
masterdoc = """
TurboGears: 20 Minute Wiki Tutorial
"""
docs = os.listdir(tutdir)
docs.sort()
for doc in docs:
if not doc.endswith(".html"):
continue
log.info("combining %s" % doc)
tree = elementtree.parse(os.path.join(tutdir, doc))
body = tree.find("{http://www.w3.org/1999/xhtml}body")
map(body.remove, body.findall("{http://www.w3.org/1999/xhtml}script"))
bodytext = elementtree.tostring(body)
bodytext = bodytext.replace("", "")
bodytext = bodytext.replace('', "")
masterdoc += bodytext
masterdoc += """
"""
masterdoc = masterdoc.replace("html:", "")
template = kid.Template(source=masterdoc, root="../" * up_to_root)
template.serializer = self.serializer
destend = os.path.join(self.destdir, endpath)
if not os.path.exists(destend):
os.makedirs(destend)
outfn = os.path.join(destend, "printable.html")
print "combined output: %s" % outfn
outfile = open(outfn, "w")
masterdoc = template.serialize(encoding=self.encoding)
masterdoc = masterdoc.replace("$${", "${")
outfile.write(masterdoc)
outfile.close()
self.currentfiles.add(outfn)
PK 6v?1, turbogears/__init__.py"TurboGears Front-to-Back Web Framework"
import pkg_resources
from turbogears import config
from turbogears.controllers import expose, flash, validate, redirect, \
error_handler, exception_handler, url
from turbogears import controllers, view, database, validators, command, \
i18n, widgets, startup, scheduler
from turbogears.release import version as __version__, author as __author__, \
email as __email__, license as __license__, \
copyright as __copyright__
from turbogears.widgets import mochikit
from turbogears.config import update_config
from turbogears.paginate import paginate
from turbogears.startup import start_server
extensions = pkg_resources.iter_entry_points("turbogears.extensions")
for entrypoint in extensions:
ext = entrypoint.load()
if hasattr(ext, "tgsymbols"):
globals().update(ext.tgsymbols())
i18n.install() # adds _ (gettext) to builtins namespace
__all__ = ["url", "expose", "redirect", "validate", "flash",
"error_handler", "exception_handler",
"view", "controllers", "update_config",
"database", "command", "validators", "mochikit", "widgets",
"config", "start_server", "scheduler"]
PK 6^1* * turbogears/startup.py"Things to do when TurboGears is imported."
import os
import errno
import logging
import sys
import time
import atexit
import signal
import pkg_resources
import cherrypy
from cherrypy import _cputil
from formencode.variabledecode import NestedVariables
from cherrypy._cpwsgi import wsgiApp, CPHTTPRequest
from cherrypy._cpwsgiserver import CherryPyWSGIServer
from turbogears import config, scheduler, database
from turbogears import view
from turbogears.database import hub_registry, EndTransactionsFilter
log = logging.getLogger("turbogears.startup")
pkg_resources.require("TurboGears")
def reloader_thread(freq):
"""Monkeypatch for the reloader provided by CherryPy.
This reloader is designed to reload a single package. This is
more efficient and, more important, compatible with zipped
libraries that may not provide access to the individual files."""
def archive_selector(module):
if hasattr(module, '__loader__'):
if hasattr(module.__loader__, 'archive'):
return module.__loader__.archive
return module
mtimes = {}
package = config.get("autoreload.package", None)
if package is None:
print \
"""TurboGears requires autoreload.package to be set. It can be an empty
value, which will use CherryPy's default behavior which is to check
every module. Setting an actual package makes the check much faster."""
return
while cherrypy.lib.autoreload.RUN_RELOADER:
if package:
modnames = filter(lambda modname: modname.startswith(package),
sys.modules.keys())
modlist = [sys.modules[modname] for modname in modnames]
else:
modlist = map(archive_selector, sys.modules.values())
for filename in filter(lambda v: v,
map(lambda m: getattr(m, "__file__", None), modlist)):
if filename.endswith(".kid") or filename == "":
continue
orig_filename = filename
if filename.endswith(".pyc"):
filename = filename[:-1]
try:
mtime = os.stat(filename).st_mtime
except OSError, e:
if orig_filename.endswith('.pyc') and e[0] == errno.ENOENT:
# This prevents us from endlessly restarting
# if there is an old .pyc lying around
# after a .py file has been deleted
try: os.unlink(orig_filename)
except: pass
sys.exit(3) # force reload
if filename not in mtimes:
mtimes[filename] = mtime
continue
if mtime > mtimes[filename]:
sys.exit(3) # force reload
time.sleep(freq)
cherrypy.lib.autoreload.reloader_thread = reloader_thread
webpath = ""
DNS_SD_PID = None
def start_bonjour(package=None):
global DNS_SD_PID
if DNS_SD_PID:
return
if (not hasattr(cherrypy, "root")) or (not cherrypy.root):
return
if not package:
package = cherrypy.root.__module__
package = package[:package.find(".")]
host = config.get('server.socket_host', '')
port = str(config.get('server.socket_port'))
env = config.get('server.environment')
name = package + ": " + env
type = "_http._tcp"
cmds = [['/usr/bin/avahi-publish-service', ["-H", host, name, type, port]],
['/usr/bin/dns-sd', ['-R', name, type, "."+host, port, "path=/"]]]
for cmd, args in cmds:
# TODO:. This check is flawed. If one has both services installed and
# avahi isn't the one running, then this won't work. We should either
# try registering with both or checking what service is running and use
# that. Program availability on the filesystem was never enough...
if os.path.exists(cmd):
DNS_SD_PID = os.spawnv(os.P_NOWAIT, cmd, [cmd]+args)
atexit.register(stop_bonjour)
break
def stop_bonjour():
if not DNS_SD_PID:
return
try:
os.kill(DNS_SD_PID, signal.SIGTERM)
except OSError:
pass
class VirtualPathFilter(object):
"""Filter that makes CherryPy ignorant of a URL root path.
That is, you can mount your app so the URI "/users/~rdel/myapp/"
maps to the root object "/".
"""
def on_start_resource(self):
prefix = config.get('server.webpath', False)
if prefix:
path = cherrypy.request.object_path
if path == prefix:
cherrypy.request.object_path = '/'
elif path.startswith(prefix):
cherrypy.request.object_path = path[len(prefix):]
else:
raise cherrypy.NotFound(path)
class NestedVariablesFilter(object):
def before_main(self):
if hasattr(cherrypy.request, "params"):
cherrypy.request.params = \
NestedVariables.to_python(cherrypy.request.params or {})
def startTurboGears():
"""Handles TurboGears tasks when the CherryPy server starts.
This adds the "tg_js" configuration to make MochiKit accessible.
It also turns on stdlib logging when in development mode.
"""
config.update({"/tg_static":
{
"static_filter.on": True,
"static_filter.dir":
os.path.abspath(pkg_resources.resource_filename(__name__, "static")),
'log_debug_info_filter.on' : False,
}
})
config.update({"/tg_js" :
{
"static_filter.on" : True,
"static_filter.dir" :
os.path.abspath(pkg_resources.resource_filename(__name__, "static/js")),
'log_debug_info_filter.on' : False,
}
})
cherrypy.config.environments['development']['log_debug_info_filter.on'] = False
if config.get("decoding_filter.on", path="/") is None:
config.update({"/": {
"decoding_filter.on" : True,
"decoding_filter.encoding" : config.get(
"kid.encoding", "utf8")
}})
view.load_engines()
view.loadBaseTemplates()
global webpath
webpath = config.get("server.webpath", "")
if hasattr(cherrypy, "root") and cherrypy.root:
if not hasattr(cherrypy.root, "_cp_filters"):
cherrypy.root._cp_filters= []
morefilters = [EndTransactionsFilter(),
NestedVariablesFilter()]
if webpath:
morefilters.insert(0, VirtualPathFilter())
cherrypy.root._cp_filters.extend(morefilters)
if webpath.startswith("/"):
webpath = webpath[1:]
if webpath and not webpath.endswith("/"):
webpath = webpath + "/"
isdev = config.get('server.environment') == 'development'
if not config.get("tg.new_style_logging"):
if config.get('server.log_to_screen'):
setuplog = logging.getLogger()
setuplog.setLevel(logging.DEBUG)
fmt = logging.Formatter("%(asctime)s %(name)s "
"%(levelname)s %(message)s")
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
handler.setFormatter(fmt)
setuplog.addHandler(handler)
logfile = config.get("server.log_file")
if logfile:
setuplog = logging.getLogger("turbogears.access")
setuplog.propagate = 0
fmt = logging.Formatter("%(message)s")
handler = logging.FileHandler(logfile)
handler.setLevel(logging.INFO)
handler.setFormatter(fmt)
setuplog.addHandler(handler)
bonjoursetting = config.get("tg.bonjour", None)
if bonjoursetting or isdev:
start_bonjour(bonjoursetting)
if config.get("sqlalchemy.dburi"):
database.bind_meta_data()
# Start all TurboGears extensions
extensions = pkg_resources.iter_entry_points("turbogears.extensions")
for entrypoint in extensions:
ext = entrypoint.load()
if hasattr(ext, "start_extension"):
ext.start_extension()
for item in call_on_startup:
item()
if config.get("tg.scheduler", False):
scheduler._start_scheduler()
log.info("Scheduler started")
def stopTurboGears():
# end all transactions and clear out the hubs to
# help ensure proper reloading in autoreload situations
for hub in hub_registry:
hub.end()
hub_registry.clear()
stop_bonjour()
# Shut down all TurboGears extensions
extensions= pkg_resources.iter_entry_points( "turbogears.extensions" )
for entrypoint in extensions:
ext= entrypoint.load()
if hasattr(ext, "shutdown_extension"):
ext.shutdown_extension()
for item in call_on_shutdown:
item()
if config.get("tg.scheduler", False):
scheduler._stop_scheduler()
log.info("Scheduler stopped")
old_object_trail = _cputil.get_object_trail
# hang on to object trail to use it to find an app root if need be
def get_object_trail(object_path=None):
trail = old_object_trail(object_path)
try:
cherrypy.request.object_trail = trail
except AttributeError:
pass
return trail
_cputil.get_object_trail = get_object_trail
class SimpleWSGIServer(CherryPyWSGIServer):
"""A WSGI server that accepts a WSGI application as a parameter."""
RequestHandlerClass = CPHTTPRequest
def __init__(self):
conf = cherrypy.config.get
wsgi_app = wsgiApp
if conf('server.environment') == 'development':
try:
from paste.evalexception.middleware import EvalException
except ImportError:
pass
else:
wsgi_app = EvalException(wsgi_app, global_conf={})
cherrypy.config.update({'server.throw_errors':True})
bind_addr = (conf("server.socket_host"), conf("server.socket_port"))
CherryPyWSGIServer.__init__(self, bind_addr, wsgi_app,
conf("server.thread_pool"),
conf("server.socket_host"),
request_queue_size = conf(
"server.socket_queue_size"),
)
def start_server(root):
cherrypy.root = root
if config.get("tg.fancy_exception", False):
cherrypy.server.start(server=SimpleWSGIServer())
else:
cherrypy.server.start()
if startTurboGears not in cherrypy.server.on_start_server_list:
cherrypy.server.on_start_server_list.append(startTurboGears)
if stopTurboGears not in cherrypy.server.on_stop_server_list:
cherrypy.server.on_stop_server_list.append(stopTurboGears)
call_on_startup = []
call_on_shutdown = []
PK 6RW7 7 turbogears/database.py"""Provides convenient access to an SQLObject or SQLAlchemy
managed database."""
import sys
import time
import logging
try:
import sqlobject
from sqlobject.dbconnection import ConnectionHub, Transaction, TheURIOpener
from sqlobject.util.threadinglocal import local as threading_local
except ImportError:
sqlobject = None
import cherrypy
from cherrypy.filters.basefilter import BaseFilter
import dispatch
from turbogears import config
from turbogears.util import remove_keys
from turbogears.genericfunctions import MultiorderGenericFunction
log = logging.getLogger("turbogears.database")
_engine = None
# Provide support for sqlalchemy
try:
import sqlalchemy
from sqlalchemy.ext import activemapper, sessioncontext
from sqlalchemy.exceptions import InvalidRequestError
def get_engine():
"Retreives the engine based on the current configuration"
global _engine
if not _engine:
alch_args = dict()
for k, v in config.config.configMap["global"].items():
if "sqlalchemy" in k:
alch_args[k.split(".")[-1]] = v
dburi = alch_args.pop('dburi')
if not dburi:
raise KeyError("No sqlalchemy database config found!")
_engine = sqlalchemy.create_engine(dburi, **alch_args)
metadata.connect(_engine)
elif not metadata.is_bound():
metadata.connect(_engine)
return _engine
def create_session():
"""Creates a session that uses the engine from thread-local metadata"""
if not metadata.is_bound():
bind_meta_data()
return sqlalchemy.create_session()
metadata = activemapper.metadata
session = activemapper.Objectstore(create_session)
activemapper.objectstore = session
def bind_meta_data():
get_engine()
except ImportError:
sqlalchemy = None
try:
set
except NameError:
from sets import Set as set
hub_registry = set()
# This dictionary stores the AutoConnectHubs used for each
# connection URI
_hubs = dict()
if sqlobject:
def _mysql_timestamp_converter(raw):
"""Convert a MySQL TIMESTAMP to a floating point number representing
the seconds since the Un*x Epoch. It uses custom code the input seems
to be the new (MySQL 4.1+) timestamp format, otherwise code from the
MySQLdb module is used."""
if raw[4] == '-':
return time.mktime(time.strptime(raw, '%Y-%m-%d %H:%M:%S'))
else:
import MySQLdb.converters
return MySQLdb.converters.mysql_timestamp_converter(raw)
class AutoConnectHub(ConnectionHub):
"""Connects to the database once per thread. The AutoConnectHub also
provides convenient methods for managing transactions."""
uri = None
params = {}
def __init__(self, uri=None, supports_transactions=True):
if not uri:
uri = config.get("sqlobject.dburi")
self.uri = uri
self.supports_transactions = supports_transactions
hub_registry.add(self)
ConnectionHub.__init__(self)
def _is_interesting_version(self):
"Return True only if version of MySQLdb <= 1.0."
import MySQLdb
module_version = MySQLdb.version_info[0:2]
major = module_version[0]
minor = module_version[1]
# we can't use Decimal here because it is only available for Python 2.4
return (major < 1 or (major == 1 and minor < 2))
def _enable_timestamp_workaround(self, connection):
"""Enable a workaround for an incompatible timestamp format change
in MySQL 4.1 when using an old version of MySQLdb. See trac ticket
#1235 - http://trac.turbogears.org/ticket/1235 for details."""
# precondition: connection is a MySQLConnection
import MySQLdb
import MySQLdb.converters
if self._is_interesting_version():
conversions = MySQLdb.converters.conversions.copy()
conversions[MySQLdb.constants.FIELD_TYPE.TIMESTAMP] = \
_mysql_timestamp_converter
# There is no method to use custom keywords when using
# "connectionForURI" in sqlobject so we have to insert the
# conversions afterwards.
connection.kw["conv"] = conversions
def getConnection(self):
try:
conn = self.threadingLocal.connection
return self.begin(conn)
except AttributeError:
if self.uri:
conn = sqlobject.connectionForURI(self.uri)
# the following line effectively turns off the DBAPI connection
# cache. We're already holding on to a connection per thread,
# and the cache causes problems with sqlite.
if self.uri.startswith("sqlite"):
TheURIOpener.cachedURIs = {}
elif self.uri.startswith("mysql") and \
config.get("turbogears.enable_mysql41_timestamp_workaround", False):
self._enable_timestamp_workaround(conn)
self.threadingLocal.connection = conn
return self.begin(conn)
raise AttributeError(
"No connection has been defined for this thread "
"or process")
def reset(self):
"""Used for testing purposes. This drops all of the connections
that are being held."""
self.threadingLocal = threading_local()
def begin(self, conn=None):
"Starts a transaction."
if not self.supports_transactions:
return conn
if not conn:
conn = self.getConnection()
if isinstance(conn, Transaction):
if conn._obsolete:
conn.begin()
return conn
self.threadingLocal.old_conn = conn
trans = conn.transaction()
self.threadingLocal.connection = trans
return trans
def commit(self):
"Commits the current transaction."
if not self.supports_transactions:
return
try:
conn = self.threadingLocal.connection
except AttributeError:
return
if isinstance(conn, Transaction):
self.threadingLocal.connection.commit()
def rollback(self):
"Rolls back the current transaction."
if not self.supports_transactions:
return
try:
conn = self.threadingLocal.connection
except AttributeError:
return
if isinstance(conn, Transaction) and not conn._obsolete:
self.threadingLocal.connection.rollback()
def end(self):
"Ends the transaction, returning to a standard connection."
if not self.supports_transactions:
return
try:
conn = self.threadingLocal.connection
except AttributeError:
return
if not isinstance(conn, Transaction):
return
if not conn._obsolete:
conn.rollback()
self.threadingLocal.connection = self.threadingLocal.old_conn
del self.threadingLocal.old_conn
self.threadingLocal.connection.cache.clear()
class PackageHub(object):
"""Transparently proxies to an AutoConnectHub for the URI
that is appropriate for this package. A package URI is
configured via "packagename.dburi" in the global CherryPy
settings. If there is no package DB URI configured, the
default (provided by "sqlobject.dburi") is used.
The hub is not instantiated until an attempt is made to
use the database.
"""
def __init__(self, packagename):
self.packagename = packagename
self.hub = None
def __get__(self, obj, type):
if not self.hub:
self.set_hub()
return self.hub.__get__(obj, type)
def __set__(self, obj, type):
if not self.hub:
self.set_hub()
return self.hub.__set__(obj, type)
def __getattr__(self, name):
if not self.hub:
self.set_hub()
return getattr(self.hub, name)
def set_hub(self):
dburi = config.get("%s.dburi" % self.packagename, None)
if not dburi:
dburi = config.get("sqlobject.dburi", None)
if not dburi:
raise KeyError, "No database configuration found!"
if dburi.startswith("notrans_"):
dburi = dburi[8:]
trans = False
else:
trans = True
hub = _hubs.get(dburi, None)
if not hub:
hub = AutoConnectHub(dburi, supports_transactions=trans)
_hubs[dburi] = hub
self.hub = hub
else:
class AutoConnectHub(object):
pass
class PackageHub(object):
pass
def set_db_uri(dburi, package=None):
"""Sets the database URI to use either globally or for a specific
package. Note that once the database is accessed, calling
setDBUri will have no effect.
@param dburi: database URI to use
@param package: package name this applies to, or None to set the default.
"""
if package:
config.update({'global':
{"%s.dburi" % package : dburi}
})
else:
config.update({'global':
{"sqlobject.dburi" : dburi}
})
def commit_all():
"Commits the Transactions in all registered hubs (for this thread)"
for hub in hub_registry:
hub.commit()
def rollback_all():
"Rolls back the Transactions in all registered hubs (for this thread)"
for hub in hub_registry:
hub.rollback()
def end_all():
"Ends the Transactions in all registered hubs (for this thread)"
for hub in hub_registry:
hub.end()
[dispatch.generic(MultiorderGenericFunction)]
def run_with_transaction(func, *args, **kw):
pass
def _use_sa(args=None):
# check to see if sqlalchemy has been imported and configured
return _engine is not None
[run_with_transaction.when("not _use_sa(args)")] # include "args" to avoid call being pre-cached
def so_rwt(func, *args, **kw):
log.debug("Starting SQLObject transaction")
try:
try:
retval = func(*args, **kw)
commit_all()
return retval
except cherrypy.HTTPRedirect:
commit_all()
raise
except cherrypy.InternalRedirect:
commit_all()
raise
except:
# No need to "rollback" the sqlalchemy unit of work, because nothing
# has hit the db yet.
rollback_all()
raise
finally:
end_all()
def dispatch_exception(exception, args, kw):
# errorhandling import here to avoid circular imports
from turbogears.errorhandling import dispatch_error
# Keep in mind func is not the real func but _expose
real_func, accept, allow_json, controller = args[:4]
args = args[4:]
exc_type, exc_value, exc_trace = sys.exc_info()
remove_keys(kw, ("tg_source", "tg_errors", "tg_exceptions"))
try:
output = dispatch_error(
controller, real_func, None, exception, *args, **kw)
except dispatch.NoApplicableMethods:
raise exc_type, exc_value, exc_trace
else:
del exc_trace
return output
# include "args" to avoid call being pre-cached
[run_with_transaction.when("_use_sa(args)")]
def sa_rwt(func, *args, **kw):
log.debug("New SA transaction")
del session.context.current
req = cherrypy.request
req.sa_transaction = session.create_transaction()
try:
retval = func(*args, **kw)
req.sa_transaction.commit()
except (cherrypy.HTTPRedirect,cherrypy.InternalRedirect):
try:
req.sa_transaction.commit()
except Exception,e:
retval = dispatch_exception(e,args,kw)
else:
raise
except InvalidRequestError, e:
# do nothing here transaction
# has been already rolledback and
# we should just display output
pass
except Exception, e:
req.sa_transaction.rollback()
retval = dispatch_exception(e,args,kw)
return retval
def so_to_dict(sqlobj):
"Converts SQLObject to a dictionary based on columns"
d = {}
if sqlobj == None:
# stops recursion
return d
for name in sqlobj.sqlmeta.columns.keys():
d[name] = getattr(sqlobj, name)
"id must be added explicitly"
d["id"] = sqlobj.id
if sqlobj._inheritable:
d.update(so_to_dict(sqlobj._parent))
d.pop('childName')
return d
def so_columns(sqlclass, columns=None):
"""Returns a dict with all columns from a SQLObject including those from
InheritableSO's bases"""
if columns is None:
columns = {}
columns.update(filter(lambda i: i[0] != 'childName',
sqlclass.sqlmeta.columns.items()))
if sqlclass._inheritable:
so_columns(sqlclass.__base__, columns)
return columns
def so_joins(sqlclass, joins=None):
"""Returns a list with all joins from a SQLObject including those from
InheritableSO's bases"""
if joins is None:
joins = []
joins.extend(sqlclass.sqlmeta.joins)
if sqlclass._inheritable:
so_joins(sqlclass.__base__, joins)
return joins
class EndTransactionsFilter(BaseFilter):
def on_end_resource(self):
if _use_sa():
session.clear()
end_all()
__all__ = ["PackageHub", "AutoConnectHub", "set_db_uri",
"commit_all", "rollback_all", "end_all", "so_to_dict",
"so_columns", "so_joins", "EndTransactionsFilter"]
if sqlalchemy:
__all__.extend(["metadata", "session", "bind_meta_data"])
PK 6 turbogears/config.pyimport sys, os, glob, re
from cherrypy import config
from configobj import ConfigObj
import pkg_resources
import logging
import logging.handlers
__all__ = ["update_config", "get", "update"]
try:
set
except NameError:
from sets import Set as set
class ConfigError(Exception):
pass
def _get_formatters(formatters):
for key, formatter in formatters.items():
kw = {}
fmt = formatter.get("format", None)
if fmt:
fmt = fmt.replace("*(", "%(")
kw["fmt"] = fmt
datefmt = formatter.get("datefmt", None)
if datefmt:
kw["datefmt"] = datefmt
formatter = logging.Formatter(**kw)
formatters[key] = formatter
def _get_handlers(handlers, formatters):
for key, handler in handlers.items():
kw = {}
try:
cls = handler.get("class")
args = handler.get("args", tuple())
level = handler.get("level", None)
try:
cls = eval(cls, logging.__dict__)
except NameError:
try:
cls = eval(cls, logging.handlers.__dict__)
except NameError, err:
raise ConfigError("Specified class in handler "
"%s is not a recognizable logger name" % key)
try:
handler_obj = cls(*eval(args, logging.__dict__))
except IOError,err:
raise ConfigError("Missing or wrong argument to "
"%s in handler %s -> %s " % (cls.__name__,key,err))
except TypeError,err:
raise ConfigError("Wrong format for arguments "
"to %s in handler %s -> %s" % (cls.__name__,key,err))
if level:
level = eval(level, logging.__dict__)
handler_obj.setLevel(level)
except KeyError:
raise ConfigError("No class specified for logging "
"handler %s" % key)
formatter = handler.get("formatter", None)
if formatter:
try:
formatter = formatters[formatter]
except KeyError:
raise ConfigError("Handler %s references unknown "
"formatter %s" % (key, formatter))
handler_obj.setFormatter(formatter)
handlers[key] = handler_obj
def _get_loggers(loggers, handlers):
for key, logger in loggers.items():
qualname = logger.get("qualname", None)
if qualname:
log = logging.getLogger(qualname)
else:
log = logging.getLogger()
level = logger.get("level", None)
if level:
level = eval(level, logging.__dict__)
else:
level = logging.NOTSET
log.setLevel(level)
propagate = logger.get("propagate", None)
if propagate is not None:
log.propagate = propagate
cfghandlers = logger.get("handlers", None)
if cfghandlers:
if isinstance(cfghandlers, basestring):
cfghandlers = [cfghandlers]
for handler in cfghandlers:
try:
handler = handlers[handler]
except KeyError:
raise ConfigError("Logger %s references unknown "
"handler %s" % (key, handler))
log.addHandler(handler)
def configure_loggers(config):
"""Configures the Python logging module, using options that are very
similar to the ones listed in the Python documentation. This also
removes the logging configuration from the configuration dictionary
because CherryPy doesn't like it there. Here are some of the Python
examples converted to the format used here:
[logging]
[[loggers]]
[[[parser]]]
[logger_parser]
level="DEBUG"
handlers="hand01"
propagate=1
qualname="compiler.parser"
[[handlers]]
[[[hand01]]]
class="StreamHandler"
level="NOTSET"
formatter="form01"
args="(sys.stdout,)"
[[formatters]]
[[[form01]]]
format="F1 *(asctime)s *(levelname)s *(message)s"
datefmt=
One notable format difference is that *() is used in the formatter
instead of %() because %() is already used for config file
interpolation.
"""
if not config.has_key("logging"):
config["global"]["tg.new_style_logging"] = False
return
logcfg = config["logging"]
formatters = logcfg.get("formatters", {})
_get_formatters(formatters)
handlers = logcfg.get("handlers", {})
_get_handlers(handlers, formatters)
loggers = logcfg.get("loggers", {})
_get_loggers(loggers, handlers)
del config["logging"]
config["global"]["tg.new_style_logging"] = True
def config_defaults():
current_dir_uri = os.path.abspath(os.getcwd())
if not current_dir_uri.startswith("/"):
current_dir_uri = "/" + current_dir_uri
defaults = {'current_dir_uri' : current_dir_uri}
return defaults
def config_obj(configfile = None, modulename = None):
defaults = config_defaults()
if modulename:
mod_globals = dict()
lastdot = modulename.rfind(".")
firstdot = modulename.find(".")
packagename = modulename[:lastdot]
top_level_package = modulename[:firstdot]
modname = modulename[lastdot+1:]
modfile = pkg_resources.resource_filename(packagename,
modname + ".cfg")
if not os.path.exists(modfile):
modfile = pkg_resources.resource_filename(packagename,
modname)
if os.path.isdir(modfile):
configfiles = glob.glob(os.path.join(modfile, "*.cfg"))
else:
configfiles = [modfile]
configdata = ConfigObj(unrepr=True)
top_level_dir = pkg_resources.resource_filename(
top_level_package, "")[:-1].replace("\\", "/")
package_dir = pkg_resources.resource_filename(
packagename, "")[:-1].replace("\\", "/")
defaults.update(dict(top_level_dir=top_level_dir,
package_dir=package_dir))
configdata.merge(dict(DEFAULT=defaults))
for file in configfiles:
configdata2 = ConfigObj(file, unrepr=True)
configdata2.merge(dict(DEFAULT=defaults))
configdata.merge(configdata2)
if configfile:
if modulename:
configdata2 = ConfigObj(configfile, unrepr=True)
configdata2.merge(dict(DEFAULT=defaults))
configdata.merge(configdata2)
else:
configdata = ConfigObj(configfile, unrepr=True)
return configdata
def update_config(configfile=None, modulename=None):
"""Updates the system configuration either from a ConfigObj
(INI-style) config file, a module name specified in dotted notation
or both (the module name is assumed to have a ".cfg" extension).
If both are specified, the module is called first,
followed by the config file. This means that the config file's options
override the options in the module file."""
configdict = config_obj(configfile, modulename).dict()
configure_loggers(configdict)
config.update(configdict)
def get(key, default_value=None, return_section=False, path=None):
"""Retrieves a config value"""
value = config.get(key, default_value, return_section, path)
if value and key == 'sqlobject.dburi' and os.name == "nt":
value = re.sub('///(\w):', '///\\1|', value)
return value
def update(configvalues):
"""Updates the configuration with the values from the dictionary."""
return config.update(configvalues)
PK 6}D turbogears/genericfunctions.pyimport sys
from itertools import izip, repeat, chain as ichain
from dispatch import strategy, functions
class MultiorderGenericFunction(functions.GenericFunction):
"""Generic function allowing a priori method ordering."""
def __init__(self, func):
functions.GenericFunction.__init__(self, func)
self.order_when = []
self.order_around = []
def when(self, cond, order=0):
if order not in self.order_when:
self.order_when.append(order)
self.order_when.sort()
return self._decorate(cond, "primary%d" % order)
def around(self, cond, order=0):
if order not in self.order_around:
self.order_around.append(order)
self.order_around.sort()
return self._decorate(cond, "around%d" % order)
# Based on dispatch.functions.GenericFunction.combine
def combine(self, cases):
strict = [strategy.ordered_signatures,strategy.safe_methods]
loose = [strategy.ordered_signatures,strategy.all_methods]
primary_names = ['primary%d' % order for order in self.order_when]
around_names = ['around%d' % order for order in self.order_around]
cases = strategy.separate_qualifiers(
cases,
before = loose, after =loose,
**dict(izip(ichain(primary_names, around_names), repeat(strict)))
)
primary = strategy.method_chain(ichain(
*[cases.get(primary, []) for primary in primary_names]))
if cases.get('after') or cases.get('before'):
befores = strategy.method_list(cases.get('before',[]))
afters = strategy.method_list(list(cases.get('after',[]))[::-1])
def chain(*args,**kw):
for tmp in befores(*args,**kw): pass # toss return values
result = primary(*args,**kw)
for tmp in afters(*args,**kw): pass # toss return values
return result
else:
chain = primary
if (self.order_around):
chain = strategy.method_chain(ichain(*([cases.get(around, [])
for around in around_names] + [[chain]])))
return chain
def getter(var):
"""Create an accessor for given variable."""
frame = sys._getframe(1)
return lambda: var in frame.f_locals and frame.f_locals[var] or \
frame.f_globals[var]
__all__ = ["MultiorderGenericFunction", "getter", ]
PK 6l w" w" turbogears/validators.py"""Convenient validators and converters for data coming in from the web.
This module also imports everything from formencode.validators, so all
common validation routines are available here."""
import pkg_resources
#XXX Remove in 1.0.3 when everyone has already upgraded FE
# so we don't need to keep this in sync with setup.py
pkg_resources.require("FormEncode >= 0.7.1")
import time
import re
from datetime import datetime
import cgi # FieldStorageUploadConverter
import warnings
import simplejson
from formencode.validators import *
from formencode.compound import *
from formencode.api import Invalid, NoDefault
from formencode.schema import Schema
from formencode import ForEach
from turbogears.i18n import format
from turbogears import util
from turbojson import jsonify
from formencode import validators # Needed to disambiguate the Number validator...
import __builtin__
def _(s): return s # dummy
Validator.gettextargs['domain'] = 'FormEncode' # FormEncode should call Tg's gettext \
# function with domain = "FormEncode"
class TgFancyValidator(FancyValidator):
gettextargs = {'domain':'TurboGears'}
class Money(TgFancyValidator):
messages = {
'badFormat': _('Invalid number format'),
'empty': _('Empty values not allowed'),
}
def __init__(self, allow_empty=None, *args, **kw):
if allow_empty is not None:
warnings.warn("Use not_empty instead of allow_empty",
DeprecationWarning, 2)
not_empty = not allow_empty
kw["not_empty"] = not_empty
super(Money, self).__init__(*args, **kw)
def _to_python(self, value, state):
""" parse a string and returns a float or integer """
try:
return format.parse_decimal(value)
except ValueError:
raise Invalid(self.message('badFormat', state), value, state)
def _from_python(self, value, state):
""" returns a string using the correct grouping """
return format.format_currency(value)
class Number(TgFancyValidator):
def _to_python(self, value, state):
""" parse a string and returns a float or integer """
if isinstance(value, basestring):
try:
value = format.parse_decimal(value)
except ValueError:
pass
return validators.Number.to_python(value, state)
def _from_python(self, value, state):
""" returns a string using the correct grouping """
dec_places = util.find_precision(value)
if dec_places > 0:
return format.format_decimal(value, dec_places)
else:
return format.format_number(value)
class DateTimeConverter(TgFancyValidator):
"""
Converts Python date and datetime objects into string representation and back.
"""
messages = {
'badFormat': _('Invalid datetime format'),
'empty': _('Empty values not allowed'),
}
def __init__(self, format = "%Y/%m/%d %H:%M", allow_empty = None,
*args, **kwargs):
if allow_empty is not None:
warnings.warn("Use not_empty instead of allow_empty",
DeprecationWarning, 2)
not_empty = not allow_empty
kwargs["not_empty"] = not_empty
super(TgFancyValidator, self).__init__(*args, **kwargs)
self.format = format
def _to_python(self, value, state):
""" parse a string and return a datetime object. """
if value and isinstance(value, datetime):
return value
else:
try:
tpl = time.strptime(value, self.format)
except ValueError:
raise Invalid(self.message('badFormat', state), value, state)
# shoudn't use time.mktime() because it can give OverflowError,
# depending on the date (e.g. pre 1970) and underlying C library
return datetime(year=tpl.tm_year, month=tpl.tm_mon, day=tpl.tm_mday,
hour=tpl.tm_hour, minute=tpl.tm_min, second=tpl.tm_sec)
def _from_python(self, value, state):
if not value:
return None
elif isinstance(value, datetime):
# Python stdlib can only handle dates with year greater than 1900
if value.year <= 1900:
return strftime_before1900(value, self.format)
else:
return value.strftime(self.format)
else:
return value
# formencode trunk contains UnicodeString implementation
# but it is different from ours and was broken at the time.
# remove this impl. when formencode.validators.UnicodeString will be identical to ours.
class UnicodeString(String):
encoding = 'utf-8'
messages = {
'badEncoding' : _("Invalid data or incorrect encoding"),
}
def __init__(self, inputEncoding=None, outputEncoding=None, **kw):
String.__init__(self, **kw)
self.inputEncoding = inputEncoding or self.encoding
self.outputEncoding = outputEncoding or self.encoding
def _to_python(self, value, state):
if value:
if isinstance(value, unicode):
return value
if hasattr(value, '__unicode__'):
return unicode(value)
try:
return unicode(value, self.inputEncoding)
except UnicodeDecodeError:
raise Invalid(self.message('badEncoding', state), value, state)
return u''
def _from_python(self, value, state):
if hasattr(value, '__unicode__'):
value = unicode(value)
if isinstance(value, unicode):
return value.encode(self.outputEncoding)
return str(value)
# another formencode workaround,
# see #1464357 on FE bugtracker (http://tinyurl.com/lm9ae).
# Custom version of FieldStorage validator that does not break FE schema validator.
class FieldStorageUploadConverter(TgFancyValidator):
def to_python(self, value, state=None):
if isinstance(value, cgi.FieldStorage):
if value.filename:
return value
raise Invalid('invalid', value, state)
else:
return value
# For translated messages that are not wrapped in a Validator.messages
# dictionary, we need to reinstate the Turbogears gettext function under
# the name "_", with the "TurboGears" domain, so that the TurboGears.mo
# file is selected.
import turbogears.i18n
_ = lambda s: turbogears.i18n.gettext(s, domain='TurboGears')
class MultipleSelection(ForEach):
if_missing = NoDefault
if_empty = []
def to_python(self, value, state=None):
try:
return super(MultipleSelection, self).to_python(value, state)
except Invalid:
raise Invalid(_("Please select at least a value"), value, state)
class Schema(Schema):
""" A Schema validator """
filter_extra_fields = True
allow_extra_fields = True
if_key_missing = None
def from_python(self,value,state=None):
# The Schema shouldn't do any from_python conversion because
# adjust_value already takes care of that for all childs.
return value
class JSONValidator(TgFancyValidator):
def _from_python(self, value, state):
return jsonify.encode(value)
def _to_python(self, value, state):
return simplejson.loads(value)
_illegal_s = re.compile(r"((^|[^%])(%%)*%s)")
def _findall(text, substr):
# Also finds overlaps
sites = []
i = 0
while 1:
j = text.find(substr, i)
if j == -1:
break
sites.append(j)
i = j+1
return sites
def strftime_before1900(dt, fmt):
"""
A strftime implementation that supports proleptic Gregorian dates before 1900.
@see: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/306860
"""
import datetime
if _illegal_s.search(fmt):
raise TypeError("This strftime implementation does not handle %s")
if dt.year > 1900:
return dt.strftime(fmt)
year = dt.year
# For every non-leap year century, advance by
# 6 years to get into the 28-year repeat cycle
delta = 2000 - year
off = 6*(delta // 100 + delta // 400)
year = year + off
# Move to around the year 2000
year = year + ((2000 - year)//28)*28
timetuple = dt.timetuple()
s1 = time.strftime(fmt, (year,) + timetuple[1:])
sites1 = _findall(s1, str(year))
s2 = time.strftime(fmt, (year+28,) + timetuple[1:])
sites2 = _findall(s2, str(year+28))
sites = []
for site in sites1:
if site in sites2:
sites.append(site)
s = s1
syear = "%4d" % (dt.year,)
for site in sites:
s = s[:site] + syear + s[site+4:]
return s
PK 6, , turbogears/finddata.py# Note: you may want to copy this into your setup.py file verbatim, as
# you can't import this from another package, when you don't know if
# that package is installed yet.
import os
import sys
from fnmatch import fnmatchcase
from distutils.util import convert_path
# Provided as an attribute, so you can append to these instead
# of replicating them:
standard_exclude = ('*.py', '*.pyc', '*~', '.*', '*.bak', '*.swp*')
standard_exclude_directories = ('.*', 'CVS', '_darcs', './build',
'./dist', 'EGG-INFO', '*.egg-info')
def find_package_data(
where='.', package='',
exclude=standard_exclude,
exclude_directories=standard_exclude_directories,
only_in_packages=True,
show_ignored=False):
"""
Return a dictionary suitable for use in ``package_data``
in a distutils ``setup.py`` file.
The dictionary looks like::
{'package': [files]}
Where ``files`` is a list of all the files in that package that
don't match anything in ``exclude``.
If ``only_in_packages`` is true, then top-level directories that
are not packages won't be included (but directories under packages
will).
Directories matching any pattern in ``exclude_directories`` will
be ignored; by default directories with leading ``.``, ``CVS``,
and ``_darcs`` will be ignored.
If ``show_ignored`` is true, then all the files that aren't
included in package data are shown on stderr (for debugging
purposes).
Note patterns use wildcards, or can be exact paths (including
leading ``./``), and all searching is case-insensitive.
"""
out = {}
stack = [(convert_path(where), '', package, only_in_packages)]
while stack:
where, prefix, package, only_in_packages = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where, name)
if os.path.isdir(fn):
bad_name = False
for pattern in exclude_directories:
if (fnmatchcase(name, pattern)
or fn.lower() == pattern.lower()):
bad_name = True
if show_ignored:
print >> sys.stderr, (
"Directory %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
if os.path.isfile(os.path.join(fn, '__init__.py')):
if not package:
new_package = name
else:
new_package = package + '.' + name
stack.append((fn, '', new_package, False))
else:
stack.append((fn, prefix + name + '/', package, only_in_packages))
elif package or not only_in_packages:
# is a file
bad_name = False
for pattern in exclude:
if (fnmatchcase(name, pattern)
or fn.lower() == pattern.lower()):
bad_name = True
if show_ignored:
print >> sys.stderr, (
"File %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
out.setdefault(package, []).append(prefix+name)
return out
if __name__ == '__main__':
import pprint
pprint.pprint(
find_package_data(show_ignored=True))
PK 6Z3 3 turbogears/testutil.pyimport types
import inspect
import logging
import unittest
import cStringIO as StringIO
import Cookie
import cherrypy
try:
import sqlobject
from sqlobject.inheritance import InheritableSQLObject
except ImportError:
sqlobject = None
try:
import sqlalchemy
except ImportError:
sqlalchemy = None
from cherrypy import _cphttptools
from turbogears import database, controllers, startup, validators, config, \
update_config
from turbogears.util import get_model
import os
from os.path import *
cwd = os.getcwd()
# For clean tests, remove all compiled Kid templates
for w in os.walk(cwd):
if w[0] != '.svn':
for f in w[2]:
if f.endswith('.kid'):
f = join(w[0], f[:-3] + 'pyc')
if exists(f):
os.remove(f)
# Override config of all applications with test.cfg
if exists(join(cwd, "test.cfg")):
modulename = None
for w in os.walk(cwd):
if w[0].endswith("config"):
config_dir = w[0].replace(cwd, "")[1:]
modulename = "%s.app" % config_dir.replace(os.sep, ".")
break
update_config(configfile=join(cwd, "test.cfg"), modulename=modulename)
else:
database.set_db_uri("sqlite:///:memory:")
config.update({"global" : {"tg.new_style_logging" : True}})
config.update({"global" : {"autoreload.on" : False}})
def start_cp():
if not config.get("cherrypy_started", False):
cherrypy.server.start(serverClass=None, initOnly=True)
config.update({"cherrypy_started" : True})
test_user = None
def set_identity_user(user):
"Setup a user which will be used to configure request's identity."
global test_user
test_user = user
def attach_identity(req):
from turbogears.identity import current_provider
if config.get("identity.on", False):
if test_user:
id = current_provider.authenticated_identity(test_user)
else:
id = current_provider.anonymous_identity()
req.identity = id
def create_request(request, method="GET", protocol="HTTP/1.1",
headers={}, rfile=None, clientAddress="127.0.0.1",
remoteHost="localhost", scheme="http"):
start_cp()
if not rfile:
rfile = StringIO.StringIO("")
if type(headers) != dict:
headerList = headers
else:
headerList = [(key, value) for key, value in headers.items()]
headerList.append(("Host", "localhost"))
if not hasattr(cherrypy.root, "started"):
startup.startTurboGears()
cherrypy.root.started = True
req = _cphttptools.Request(clientAddress, 80, remoteHost, scheme)
cherrypy.serving.request = req
attach_identity(req)
cherrypy.serving.response = _cphttptools.Response()
req.run(" ".join((method, request, protocol)), headerList, rfile)
createRequest = create_request
class BrowsingSession(object):
def __init__(self):
self.visit = None
self.response, self.status = None, None
self.cookie = Cookie.SimpleCookie()
def goto(self, *args, **kwargs):
if self.cookie:
headers = kwargs.setdefault('headers', {})
headers['Cookie'] = self.cookie.output()
create_request(*args, **kwargs)
self.response = cherrypy.response.body[0]
self.status = cherrypy.response.status
if cherrypy.response.simple_cookie:
self.cookie.update(cherrypy.response.simple_cookie)
def _return_directly(output, *args):
return output
class DummySession:
session_storage = dict
to_be_loaded = None
class DummyRequest:
"A very simple dummy request."
remote_host = "127.0.0.1"
def __init__(self, method='GET', path='/', headers=None):
self.headers = headers or {}
self.method = method
self.path = path
self.base = ''
self._session = DummySession()
def purge__(self):
pass
def call(method, *args, **kw):
start_cp()
output, response = call_with_request(method, DummyRequest(), *args, **kw)
return output
def call_with_request(method, request, *args, **kw):
"More fine-grained version of call method, allowing to use request/response."
orig_proc_output = controllers._process_output
controllers._process_output = _return_directly
cherrypy.serving.response = _cphttptools.Response()
cherrypy.serving.request = request
if not hasattr(request, "identity"):
attach_identity(request)
output = None
try:
output = method(*args, **kw)
finally:
del cherrypy.serving.request
controllers._process_output = orig_proc_output
response = cherrypy.serving.response
return output, response
class DBTest(unittest.TestCase):
model = None
def setUp(self):
if not self.model:
self.model = get_model()
if not self.model:
raise "Unable to run database tests without a model"
for item in self.model.__dict__.values():
if isinstance(item, types.TypeType) and issubclass(item,
sqlobject.SQLObject) and item != sqlobject.SQLObject \
and item != InheritableSQLObject:
item.createTable(ifNotExists=True)
def tearDown(self):
database.rollback_all()
for item in self.model.__dict__.values():
if isinstance(item, types.TypeType) and issubclass(item,
sqlobject.SQLObject) and item != sqlobject.SQLObject \
and item != InheritableSQLObject:
item.dropTable(ifExists=True)
def reset_cp():
cherrypy.root = None
def catch_validation_errors(widget, value):
""" Catches and unpacks validation errors. For testing purposes. """
errors = {}
try:
value = widget.validate(value)
except validators.Invalid, e:
if hasattr(e, 'unpack_errors'):
errors = e.unpack_errors()
else:
errors = e
return value, errors
class MemoryListHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self, level=logging.DEBUG)
self.log = []
def emit(self, record):
print "Got record: %s" % record
print "formatted as: %s" % self.format(record)
self.log.append(self.format(record))
def print_log(self):
print "\n".join(self.log)
self.log = []
def get_log(self):
log = self.log
self.log = []
return log
_memhandler = MemoryListHandler()
_currentcat = None
def capture_log(category):
"""Category can either be a single category (a string like 'foo.bar')
or a list of them. You must call print_log() to reset when
you're done."""
global _currentcat
assert not _currentcat
if not isinstance(category, list) and not isinstance(category, tuple):
category = [category]
_currentcat = category
for cat in category:
log = logging.getLogger(cat)
log.setLevel(logging.DEBUG)
log.addHandler(_memhandler)
def _reset_logging():
"""Manages the resetting of the loggers"""
global _currentcat
if not _currentcat:
return
for cat in _currentcat:
log = logging.getLogger(cat)
log.removeHandler(_memhandler)
_currentcat = None
def print_log():
"""Prints the log captured by capture_log to stdout, resets that log
and resets the temporarily added handlers."""
_reset_logging()
_memhandler.print_log()
def get_log():
"""Returns the list of log messages captured by capture_log,
resets that log and resets the temporarily added handlers."""
_reset_logging()
return _memhandler.get_log()
def sqlalchemy_cleanup():
database._engine = None
sqlalchemy.clear_mappers()
database.metadata.clear()
database.metadata.dispose()
__all__ = ["create_request", "call", "DBTest", "createRequest",
"attach_identity", "set_identity_user",
"capture_log", "print_log", "get_log", "sqlalchemy_cleanup"]
PK 6zbH8 8 turbogears/util.pyimport os
import sys
import re
import htmlentitydefs
from inspect import getargspec, getargvalues
from itertools import izip, islice, chain, imap
from operator import isSequenceType
import pkg_resources
import setuptools
from cherrypy import request
from turbogears.decorator import decorator
from turbogears import config
# This Enum implementation is from the Python Cookbook and is
# written by Zoran Isailovski:
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/413486
def Enum(*names):
##assert names, "Empty enums are not supported" # <- Don't like empty enums? Uncomment!
class EnumClass(object):
__slots__ = names
def __iter__(self): return iter(constants)
def __len__(self): return len(constants)
def __getitem__(self, i): return constants[i]
def __repr__(self): return 'Enum' + str(names)
def __str__(self): return 'enum ' + str(constants)
enumType = EnumClass()
class EnumValue(object):
__slots__ = ('__value')
def __init__(self, value): self.__value = value
Value = property(lambda self: self.__value)
EnumType = property(lambda self: enumType)
def __hash__(self): return hash(self.__value)
def __cmp__(self, other):
# C fans might want to remove the following assertion
# to make all enums comparable by ordinal value {;))
assert self.EnumType is other.EnumType, "Only values from the same enum are comparable"
return cmp(self.__value, other.__value)
def __invert__(self): return constants[maximum - self.__value]
def __nonzero__(self): return bool(self.__value)
def __repr__(self): return str(names[self.__value])
maximum = len(names) - 1
constants = [None] * len(names)
for i, each in enumerate(names):
val = EnumValue(i)
setattr(EnumClass, each, val)
constants[i] = val
constants = tuple(constants)
return enumType
class setlike(list):
"""Set preserving item order."""
def add(self, item):
if item not in self:
self.append(item)
def add_all(self, iterable):
for item in iterable:
self.add(item)
def get_project_meta(name):
for dirname in os.listdir("./"):
if dirname.lower().endswith("egg-info"):
fname = os.path.join(dirname, name)
return fname
def get_project_config():
"""Tries to select appropriate project configuration file."""
config = None
if os.path.exists("setup.py"):
config = "dev.cfg"
else:
config = "prod.cfg"
return config
def load_project_config(configfile=None):
"""Tries to update the config, loading project settings from the config
file specified. If config is C{None} uses L{get_project_config} to locate
one.
"""
if configfile is None:
configfile = get_project_config()
if not os.path.isfile(configfile):
print 'config file %s not found or is not a file.' % os.path.abspath(configfile)
sys.exit()
package = get_package_name()
config.update_config(configfile=configfile,
modulename = package + ".config")
def get_package_name():
"""Try to find out the package name of the current directory."""
package = config.get("package")
if package:
return package
if "--egg" in sys.argv:
projectname = sys.argv[sys.argv.index("--egg")+1]
egg = pkg_resources.get_distribution(projectname)
top_level = egg._get_metadata("top_level.txt")
else:
fname = get_project_meta('top_level.txt')
top_level = fname and open(fname) or []
for package in top_level:
package = package.rstrip()
if package and package != 'locales':
return package
def get_project_name():
pkg_info = get_project_meta('PKG-INFO')
if pkg_info:
name = list(open(pkg_info))[1][6:-1]
return name.strip()
def get_model():
package_name = get_package_name()
if not package_name:
return None
package = __import__(package_name, {}, {}, ["model"])
if hasattr(package, "model"):
return package.model
def ensure_sequence(obj):
"""Construct a sequence from object."""
if obj is None:
return []
elif isSequenceType(obj):
return obj
else:
return [obj]
def to_kw(func, args, kw, start=0):
"""Convert all applicable arguments to keyword arguments."""
argnames, defaults = getargspec(func)[::3]
defaults = ensure_sequence(defaults)
kv_pairs = izip(
islice(argnames, start, len(argnames) - len(defaults)),
args)
for k, v in kv_pairs:
kw[k] = v
return args[len(argnames)-len(defaults)-start:], kw
def from_kw(func, args, kw, start=0):
"""Extract named positional arguments from keyword arguments."""
argnames, defaults = getargspec(func)[::3]
defaults = ensure_sequence(defaults)
newargs = [kw.pop(name) for name in islice(argnames, start,
len(argnames) - len(defaults)) if name in kw]
newargs.extend(args)
return newargs, kw
def adapt_call(func, args, kw, start=0):
"""Remove excess arguments."""
argnames, varargs, kwargs, defaults = getargspec(func)
defaults = ensure_sequence(defaults)
del argnames[:start]
if kwargs in (None, "_decorator__kwargs"):
remove_keys(kw, [key for key in kw.iterkeys() if key not in argnames])
if varargs in (None, "_decorator__varargs"):
args = args[:len(argnames) - len(defaults)]
else:
pivot = len(argnames) - len(defaults)
args = tuple(chain(islice(args, pivot), imap(kw.pop, islice(
argnames, pivot, None)), islice(args, pivot, None)))
return args, kw
def call_on_stack(func_name, kw, start=0):
"""Check if a call to function matching pattern is on stack."""
try:
frame = sys._getframe(start+1)
except ValueError:
return False
while frame.f_back:
frame = frame.f_back
if frame.f_code.co_name == func_name:
args = getargvalues(frame)[3]
for key in kw.iterkeys():
try:
if kw[key] != args[key]:
continue
except KeyError, TypeError:
continue
if key or not args:
return True
return False
def arg_index(func, argname):
"""Find index of argument as declared for given function."""
argnames = getargspec(func)[0]
if has_arg(func, argname):
return argnames.index(argname)
else:
return None
def has_arg(func, argname):
"""Check whether function has argument."""
return argname in getargspec(func)[0]
def inject_arg(func, argname, argval, args, kw, start=0):
"""Insert argument into call."""
argnames, defaults = getargspec(func)[::3]
defaults = ensure_sequence(defaults)
pos = arg_index(func, argname)
if pos is None or pos > len(argnames) - len(defaults) - 1:
kw[argname] = argval
else:
pos -= start
args = tuple(chain(islice(args, pos), (argval,),
islice(args, pos, None)))
return args, kw
def inject_args(func, injections, args, kw, start=0):
"""Insert arguments into call."""
for argname, argval in injections.iteritems():
args, kw = inject_arg(func, argname, argval, args, kw, start)
return args, kw
def inject_call(func, injections, *args, **kw):
"""Insert arguments and call."""
args, kw = inject_args(func, injections, args, kw)
return func(*args, **kw)
def bind_args(**add):
"""Call with arguments set to a predefined value."""
def entagle(func):
return lambda func, *args, **kw: inject_call(func, add, *args, **kw)
def make_decorator(func):
argnames, varargs, kwargs, defaults = getargspec(func)
defaults = list(ensure_sequence(defaults))
defaults = [d for d in defaults if
argnames[-len(defaults) + defaults.index(d)] not in add]
argnames = [arg for arg in argnames if arg not in add]
return decorator(entagle, (argnames, varargs, kwargs, defaults))(func)
return make_decorator
def remove_keys(dict_, seq):
"""Gracefully remove keys from dict."""
for key in seq:
dict_.pop(key, None)
return dict_
def recursive_update(to_dict, from_dict):
"""Recursively update all dicts in to_dict with values from from_dict."""
# probably slow as hell :( should be optimized somehow...
for k, v in from_dict.iteritems():
if isinstance(v, dict) and isinstance(to_dict[k], dict):
recursive_update(to_dict[k], v)
else:
to_dict[k] = v
return to_dict
def combine_contexts(frames=None, depth=None):
"""Combine contexts (globals, locals) of frames."""
locals_ = {}
globals_ = {}
if frames is None:
frames = []
if depth is not None:
frames.extend([sys._getframe(d+1) for d in depth])
for frame in frames:
locals_.update(frame.f_locals)
globals_.update(frame.f_globals)
return locals_, globals_
def request_available():
"""Check if cherrypy.request is available."""
try:
setattr(request, "tg_dumb_attribute", True)
return True
except AttributeError:
return False
def flatten_sequence(seq):
"""Flatten sequence."""
for item in seq:
if isSequenceType(item) and not isinstance(item, basestring):
for item in flatten_sequence(item):
yield item
else:
yield item
def load_class(dottedpath):
'''
Loads a class from a module in dotted-path notation.
Eg: load_class("package.module.class").
Based on recipe 16.3 from "Python Cookbook, 2ed., by Alex Martelli,
Anna Martelli Ravenscroft, and David Ascher (O'Reilly Media, 2005)
0-596-00797-3"
'''
assert dottedpath is not None, "dottedpath must not be None"
splitted_path = dottedpath.split('.')
modulename = '.'.join(splitted_path[:-1])
classname = splitted_path[-1]
try:
module = __import__(modulename, globals(), locals(), [classname])
except ImportError:
return None
return getattr(module, classname)
class Bunch(dict):
__setattr__ = dict.__setitem__
def __delattr__(self, name):
try:
del self[name]
except KeyError:
raise AttributeError(name)
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError(name)
#XXX: Should issue Deprecation warning?
DictObj = Bunch
DictWrapper = Bunch
def parse_http_accept_header(accept):
items = []
if accept is None:
return items
for item in accept.split(","):
pos = item.find(";q=")
order = 1
if pos > -1:
order = float(item[pos+3:].strip())
item = item[:pos].strip()
items.append((item, order))
items.sort(lambda i1, i2: cmp(i2[1], i1[1]))
return [i[0] for i in items]
def to_unicode(value):
"""
Converts encoded string to unicode string.
Uses get_template_encoding_default() to guess source string encoding.
Handles turbogears.i18n.lazystring correctly.
"""
if isinstance(value, str):
# try to make sure we won't get UnicodeDecodeError from the template
# by converting all encoded strings to Unicode strings
try:
value = unicode(value)
except UnicodeDecodeError:
try:
value = unicode(value, get_template_encoding_default())
except UnicodeDecodeError:
# fail early
raise ValueError("Non-unicode string: %r" % value)
return value
def to_utf8(value):
"""Converts a unicode string to utf-8 encoded plain string.
Handles turbogears.i18n.lazystring correctly. Does nothing to already encoded string.
"""
if isinstance(value, str):
pass
elif hasattr(value, '__unicode__'):
value = unicode(value)
if isinstance(value, unicode):
value = value.encode('utf-8')
return value
def get_template_encoding_default(engine_name=None):
"""Returns default encoding for template files (Kid, Genshi, etc.)."""
if engine_name is None:
engine_name = config.get('tg.defaultview', 'kid')
return config.get('%s.encoding' % engine_name, 'utf-8')
def find_precision(value):
"""
Find precision of some arbitrary value. The main intention for this function
is to use it together with turbogears.i18n.format.format_decimal() where one
has to inform the precision wanted. So, use it like this:
format_decimal(some_number, find_precision(some_number))
"""
decimals = ''
try:
stub, decimals = str(value).split('.')
except ValueError:
pass
return len(decimals)
def copy_if_mutable(value, feedback=False):
if isinstance(value, dict):
mutable = True
value = value.copy()
elif isinstance(value, list):
mutable = True
value = value[:]
else:
mutable = False
if feedback:
return (value, mutable)
else:
return value
def fixentities(htmltext):
# replace HTML character entities with numerical references
# note: this won't handle CDATA sections properly
def repl(m):
entity = htmlentitydefs.entitydefs.get(m.group(1).lower())
if not entity:
return m.group(0)
elif len(entity) == 1:
if entity in "&<>'\"":
return m.group(0)
return "%d;" % ord(entity)
else:
return entity
return re.sub("&(\w+);?", repl, htmltext)
__all__ = ["Enum", "setlike",
"get_package_name", "get_model", "load_project_config",
"url", "ensure_sequence", "has_arg",
"DictWrapper", "DictObj", "to_kw", "from_kw", "adapt_call",
"call_on_stack", "remove_keys",
"arg_index", "inject_arg", "inject_args", "bind_args",
"recursive_update", "combine_contexts", "request_available",
"flatten_sequence", "load_class", "Bunch",
"parse_http_accept_header",
"to_unicode", "to_utf8", "get_template_encoding_default",
"find_precision", "copy_if_mutable"]
PK 6BF BF turbogears/scheduler.py"""Module that provides a cron-like task scheduler.
This task scheduler is designed to be used from inside your own program.
You can schedule Python functions to be called at specific intervals or
days. It uses the standard 'sched' module for the actual task scheduling,
but provides much more:
- repeated tasks (at intervals, or on specific days)
- error handling (exceptions in tasks don't kill the scheduler)
- optional to run scheduler in its own thread or separate process
- optional to run a task in its own thread or separate process
If the threading module is available, you can use the various Threaded
variants of the scheduler and associated tasks. If threading is not
available, you could still use the forked variants. If fork is also
not available, all processing is done in a single process, sequentially.
There are three Scheduler classes:
Scheduler ThreadedScheduler ForkedScheduler
You usually add new tasks to a scheduler using the add_interval_task or
add_daytime_task methods, with the appropriate processmethod argument
to select sequential, threaded or forked processing. NOTE: it is impossible
to add new tasks to a ForkedScheduler, after the scheduler has been started!
For more control you could use one of the following Task classes
and use schedule_task or schedule_task_abs:
IntervalTask ThreadedIntervalTask ForkedIntervalTask
WeekdayTask ThreadedWeekdayTask ForkedWeekdayTask
MonthdayTask ThreadedMonthdayTask ForkedMonthdayTask
Kronos is the Greek God of Time.
This module is based on Kronos by Irmen de Jong, but has been modified
to better fit within TurboGears. Additionally, this module appeared to
no longer be supported/in development.
"""
#
# $Id: kronos.py,v 1.5 2004/10/06 22:43:49 irmen Exp $
#
# (c) Irmen de Jong.
# This is open-source software, released under the MIT Software License:
# http://www.opensource.org/licenses/mit-license.php
#
import os, sys
import sched, time
import traceback
import weakref
from turbogears.util import Enum
method = Enum("sequential", "forked", "threaded")
class Scheduler:
"""The Scheduler itself."""
def __init__(self):
self.running=True
self.sched = sched.scheduler(time.time, self.__delayfunc)
def __delayfunc(self, delay):
# This delay function is basically a time.sleep() that is
# divided up, so that we can check the self.running flag while delaying.
# there is an additional check in here to ensure that the top item of
# the queue hasn't changed
if delay<10:
time.sleep(delay)
else:
toptime = self.sched.queue[0][0]
endtime = time.time() + delay
period=5
stoptime = endtime - period
while self.running and stoptime > time.time() and \
self.sched.queue[0][0] == toptime:
time.sleep(period)
if not self.running or self.sched.queue[0][0] != toptime:
return
now = time.time()
if endtime > now:
time.sleep(endtime - now)
def _acquire_lock(self): pass
def _release_lock(self): pass
def add_interval_task(self, action, taskname, initialdelay, interval, processmethod, args, kw):
"""Add a new Interval Task to the schedule. A very short initialdelay or one of
zero cannot be honored, you will see a slight delay before the task is first
executed. This is because the scheduler needs to pick it up in its loop."""
if initialdelay<0 or interval<1:
raise ValueError("delay or interval must be >0")
# Select the correct IntervalTask class. Not all types may be available!
if processmethod==method.sequential:
TaskClass=IntervalTask
elif processmethod==method.threaded:
TaskClass = ThreadedIntervalTask
elif processmethod==method.forked:
TaskClass = ForkedIntervalTask
else:
raise ValueError("invalid processmethod")
if not args:
args=[]
if not kw:
kw={}
task = TaskClass(taskname, interval, action, args, kw)
self.schedule_task(task, initialdelay)
return task
def add_daytime_task(self, action, taskname, weekdays, monthdays, timeonday, processmethod, args, kw):
"""Add a new Day Task (Weekday or Monthday) to the schedule."""
if weekdays and monthdays:
raise ValueError("you can only specify weekdays or monthdays, not both")
if not args:
args=[]
if not kw:
kw={}
if weekdays:
# Select the correct WeekdayTask class. Not all types may be available!
if processmethod==method.sequential:
TaskClass=WeekdayTask
elif processmethod==method.threaded:
TaskClass = ThreadedWeekdayTask
elif processmethod==method.forked:
TaskClass = ForkedWeekdayTask
else:
raise ValueError("invalid processmethod")
task=TaskClass(taskname, weekdays, timeonday, action, args, kw)
if monthdays:
# Select the correct MonthdayTask class. Not all types may be available!
if processmethod==method.sequential:
TaskClass=MonthdayTask
elif processmethod==method.threaded:
TaskClass = ThreadedMonthdayTask
elif processmethod==method.forked:
TaskClass = ForkedMonthdayTask
else:
raise ValueError("invalid processmethod")
task=TaskClass(taskname, monthdays, timeonday, action, args, kw)
firsttime=task.get_schedule_time(True)
self.schedule_task_abs(task, firsttime)
return task
def schedule_task(self, task, delay):
"""Low-level method to add a new task to the scheduler with the given delay (seconds)."""
if self.running:
self._acquire_lock() # lock the sched queue, if needed
try:
task.event = self.sched.enter(delay, 0, task,
(weakref.ref(self),) )
finally:
self._release_lock()
else:
task.event = self.sched.enter(delay, 0, task,
(weakref.ref(self),) )
def schedule_task_abs(self, task, abstime):
"""Low-level method to add a new task to the scheduler for the given absolute time value."""
if self.running:
self._acquire_lock() # lock the sched queue, if needed
try:
task.event = self.sched.enterabs(abstime, 0, task,
(weakref.ref(self),) )
finally:
self._release_lock()
else:
task.event = self.sched.enterabs(abstime, 0, task,
(weakref.ref(self),) )
def start(self):
"""Start the scheduler."""
self._run()
def stop(self):
"""Remove all pending tasks and stop the Scheduler."""
self.running=False
self.sched.queue[:]=[]
def cancel(self, task):
self.sched.cancel(task.event)
def _run(self):
# Low-level run method to do the actual scheduling loop.
while self.running:
try:
self.sched.run()
except Exception,x:
print >>sys.stderr, "ERROR DURING SCHEDULER EXECUTION",x
print >>sys.stderr, "".join(traceback.format_exception(*sys.exc_info()))
print >>sys.stderr, "-"*20
# queue is empty; sleep a short while before checking again
if self.running:
time.sleep(5)
class Task:
"""Abstract base class of all scheduler tasks"""
def __init__(self, name, action, args, kw):
"""This is an abstract class!"""
self.name=name
self.action=action
self.args=args
self.kw=kw
def __call__(self, schedulerref):
"""Execute the task action in the scheduler's thread."""
try:
self.execute()
except Exception,x:
self.handle_exception(x)
self.reschedule(schedulerref())
def reschedule(self, scheduler):
"""This is an abstract class, this method is defined in one of the sub classes!"""
raise NotImplementedError("you're using the abstract base class 'Task', use a concrete class instead")
def execute(self):
"""Execute the actual task."""
self.action(*self.args, **self.kw)
def handle_exception(self, exc):
"""Handle any exception that occured during task execution."""
print >>sys.stderr, "ERROR DURING TASK EXECUTION",exc
print >>sys.stderr,"".join(traceback.format_exception(*sys.exc_info()))
print >>sys.stderr,"-"*20
class IntervalTask(Task):
"""A repeated task that occurs at certain intervals (in seconds)."""
def __init__(self, name, interval, action, args=None, kw=None):
Task.__init__(self, name, action, args, kw)
self.interval=interval
def reschedule(self, scheduler):
# reschedule this task according to its interval (in seconds).
scheduler.schedule_task(self, self.interval)
class DayTaskRescheduler:
"""A mixin class that contains the reschedule logic for the DayTasks."""
def __init__(self, timeonday):
self.timeonday=timeonday
def get_schedule_time(self, today):
"""Calculate the time value at which this task is to be scheduled."""
now=list(time.localtime())
if today:
# schedule for today. let's see if that is still possible
if (now[3], now[4]) >= self.timeonday:
now[2]+=1 # too bad, it will be tomorrow
else:
now[2]+=1 # tomorrow
now[3], now[4] = self.timeonday # set new time on day (hour,minute)
now[5]=0 # seconds
return time.mktime(now)
def reschedule(self, scheduler):
# Reschedule this task according to the daytime for the task.
# The task is scheduled for tomorrow, for the given daytime.
# (The execute method in the concrete Task classes will check
# if the current day is a day on which the task must run).
abstime = self.get_schedule_time(False)
scheduler.schedule_task_abs(self, abstime)
class WeekdayTask(DayTaskRescheduler, Task):
"""A task that is called at specific days in a week (1-7), at a fixed time on the day."""
def __init__(self, name, weekdays, timeonday, action, args=None, kw=None):
if type(timeonday) not in (list,tuple) or len(timeonday) != 2:
raise TypeError("timeonday must be a 2-tuple (hour,minute)")
if type(weekdays) not in (list,tuple):
raise TypeError("weekdays must be a sequence of weekday numbers 1-7 (1 is Monday)")
DayTaskRescheduler.__init__(self, timeonday)
Task.__init__(self, name, action, args, kw)
self.days=weekdays
def execute(self):
# This is called every day, at the correct time. We only need to
# check if we should run this task today (this day of the week).
weekday=time.localtime().tm_wday+1
if weekday in self.days:
self.action(*self.args, **self.kw)
class MonthdayTask(DayTaskRescheduler, Task):
"""A task that is called at specific days in a month (1-31), at a fixed time on the day."""
def __init__(self, name, monthdays, timeonday, action, args=None, kw=None):
if type(timeonday) not in (list,tuple) or len(timeonday) != 2:
raise TypeError("timeonday must be a 2-tuple (hour,minute)")
if type(monthdays) not in (list,tuple):
raise TypeError("monthdays must be a sequence of monthdays numbers 1-31")
DayTaskRescheduler.__init__(self, timeonday)
Task.__init__(self, name, action, args, kw)
self.days=monthdays
def execute(self):
# This is called every day, at the correct time. We only need to
# check if we should run this task today (this day of the month).
if time.localtime().tm_mday in self.days:
self.action(*self.args, **self.kw)
try:
import threading
class ThreadedScheduler(Scheduler):
"""A Scheduler that runs in its own thread."""
def __init__(self):
Scheduler.__init__(self)
self._lock=threading.Lock() # we require a lock around the task queue
def start(self):
# Start method that splices of a thread in which the scheduler will run.
self.thread=threading.Thread(target=self._run)
self.thread.setDaemon(True)
self.thread.start()
def stop(self):
# Stop method that stops the scheduler and waits for the thread to finish.
Scheduler.stop(self)
try:
self.thread.join()
except AttributeError:
pass
def _acquire_lock(self):
self._lock.acquire() # lock the thread's task queue
def _release_lock(self):
self._lock.release() # release the thread's task queue
class ThreadedTaskMixin:
"""A mixin class to make a Task execute in a separate thread."""
def __call__(self, schedulerref):
# execute the task action in its own thread.
threading.Thread(target=self.threadedcall).start()
self.reschedule(schedulerref())
def threadedcall(self):
# This method is run within its own thread, so we have to
# do the execute() call and exception handling here.
try:
self.execute()
except Exception,x:
self.handle_exception(x)
class ThreadedIntervalTask(ThreadedTaskMixin, IntervalTask):
"""Interval Task that executes in its own thread."""
pass
class ThreadedWeekdayTask(ThreadedTaskMixin, WeekdayTask):
"""Weekday Task that executes in its own thread."""
pass
class ThreadedMonthdayTask(ThreadedTaskMixin, MonthdayTask):
"""Monthday Task that executes in its own thread."""
pass
except ImportError:
# threading is not available
pass
if hasattr(os,"fork"):
import signal
class ForkedScheduler(Scheduler):
"""A Scheduler that runs in its own forked process."""
def __del__(self):
if hasattr(self, "childpid"):
os.kill(self.childpid, signal.SIGKILL)
def start(self):
# Start method that forks of a new process in which the scheduler will run.
pid = os.fork()
if pid==0:
# we are the child
signal.signal(signal.SIGUSR1, self.signalhandler)
self._run()
os._exit(0)
else:
# we are the parent
self.childpid=pid
del self.sched # can no longer insert in the scheduler queue
def stop(self):
# Stop method that stops the scheduler and waits for the process to finish.
os.kill(self.childpid, signal.SIGUSR1)
os.waitpid(self.childpid,0)
def signalhandler(self, sig, stack):
Scheduler.stop(self)
class ForkedTaskMixin:
"""A mixin class to make a Task execute in a separate process."""
def __call__(self, schedulerref):
# execute the task action in its own process.
pid=os.fork()
if pid==0:
# we are the child
try:
self.execute()
except Exception,x:
self.handle_exception(x)
os._exit(0)
else:
# we are the parent
self.reschedule(schedulerref())
class ForkedIntervalTask(ForkedTaskMixin, IntervalTask):
"""Interval Task that executes in its own process."""
pass
class ForkedWeekdayTask(ForkedTaskMixin, WeekdayTask):
"""Weekday Task that executes in its own process."""
pass
class ForkedMonthdayTask(ForkedTaskMixin, MonthdayTask):
"""Monthday Task that executes in its own process."""
pass
_scheduler_instance = None
def _get_scheduler():
global _scheduler_instance
si = _scheduler_instance
if not si:
si = ThreadedScheduler()
_scheduler_instance = si
return si
def _start_scheduler():
si = _get_scheduler()
si.start()
def _stop_scheduler():
if not _scheduler_instance:
return
si = _get_scheduler()
si.stop()
def add_interval_task(action, interval, args=None, kw=None,
initialdelay=0, processmethod=method.threaded, taskname=None):
si = _get_scheduler()
return si.add_interval_task(action=action, interval=interval, args=args,
kw=kw, initialdelay=initialdelay,
processmethod=processmethod, taskname=taskname)
def add_weekday_task(action, weekdays, timeonday, args=None, kw=None,
processmethod=method.threaded, taskname=None):
si = _get_scheduler()
return si.add_daytime_task(action=action, taskname=taskname,
weekdays=weekdays, monthdays=None, timeonday=timeonday,
processmethod=processmethod, args=args, kw=kw)
def add_monthday_task(action, monthdays, timeonday,
args=None, kw=None,
processmethod=method.threaded, taskname=None):
si = _get_scheduler()
return si.add_daytime_task(action=action, taskname=taskname,
weekdays=None, monthdays=monthdays, timeonday=timeonday,
processmethod=processmethod, args=args, kw=kw)
def cancel(task):
si = _get_scheduler()
si.cancel(task)
PK 6gʌ turbogears/release.pyversion = "1.0.3.2"
author = "Kevin Dangoor"
email = "dangoor+turbogears@gmail.com"
copyright = "Copyright 2005, 2006 Kevin Dangoor and contributors"
license = "MIT"
PK 6@4` ` turbogears/decorator.pyimport itertools
from copy import copy
from inspect import getargspec, formatargspec
from peak.util.decorators import decorate_assignment
# Inspired by Michele Simionato's decorator library
# http://www.phyast.pitt.edu/~micheles/python/documentation.html
def decorate(func, caller, signature=None):
"""Decorate func with caller."""
if signature is not None:
argnames, varargs, kwargs, defaults = signature
else:
argnames, varargs, kwargs, defaults = getargspec(func)
if defaults is None:
defaults = ()
parameters = formatargspec(argnames, varargs, kwargs, defaults)[1:-1]
defval = itertools.count(len(argnames)-len(defaults))
args = formatargspec(argnames, varargs, kwargs, defaults,
formatvalue=lambda value:"=%s" % (
argnames[defval.next()]))[1:-1]
func_str = """
def %s(%s):
return caller(func, %s)
""" % (func.__name__, parameters, args)
exec_dict = dict(func=func, caller=caller)
exec func_str in exec_dict
newfunc = exec_dict[func.__name__]
newfunc.__doc__ = func.__doc__
newfunc.__dict__ = func.__dict__.copy()
newfunc.__module__ = func.__module__
if hasattr(func, "__composition__"):
newfunc.__composition__ = copy(func.__composition__)
else:
newfunc.__composition__ = [func]
newfunc.__composition__.append(newfunc)
return newfunc
def decorator(entangler, signature=None):
"""Decorate function with entangler.
Use signature as signature or preserve original signature if signature
is None.
Enables alternative decorator syntax for Python 2.3 as seen in PEAK:
[my_decorator(foo)]
def baz():
pass
Mind, the decorator needs to be a closure for this syntax to work.
"""
def callback(frame, k, v, old_locals):
return decorate(v, entangler(v), signature)
return decorate_assignment(callback, 3)
def weak_signature_decorator(entangler):
"""Decorate function with entangler and change signature to accept
arbitrary additional arguments.
Enables alternative decorator syntax for Python 2.3 as seen in PEAK:
[my_decorator(foo)]
def baz():
pass
Mind, the decorator needs to be a closure for this syntax to work.
"""
def callback(frame, k, v, old_locals):
return decorate(v, entangler(v), make_weak_signature(v))
return decorate_assignment(callback, 3)
def simple_decorator(caller, signature=None):
"""Decorate function with caller."""
def entangle(func):
return decorate(func, caller, signature)
return entangle
def simple_weak_signature_decorator(caller):
"""Decorate function with caller and change signature to accept
arbitrary additional arguments."""
def entangle(func):
return decorate(func, caller, make_weak_signature(func))
return entangle
def make_weak_signature(func):
"""Change signature to accept arbitrary additional arguments."""
argnames, varargs, kwargs, defaults = getargspec(func)
if kwargs is None:
kwargs = "_decorator__kwargs"
if varargs is None:
varargs = "_decorator__varargs"
return argnames, varargs, kwargs, defaults
def compose(*decorators):
"""Compose decorators."""
return lambda func: reduce(lambda f, g: g(f), decorators, func)
def func_composition(func):
"""Return composition (decorator wise) of function."""
return getattr(func, "__composition__", [func])
def func_original(func):
"""Return original (undecorated) function."""
return func_composition(func)[0]
def func_id(func):
"""Return identity of function.
Identity is invariant under decorator application (if decorator is
created with decorator() or weak_signature_decorator()).
"""
return id(func_original(func))
def func_eq(f, g):
"""Check if functions are identical."""
return func_id(f) == func_id(g)
__all__ = ["decorator", "compose", "func_id", "func_eq", "func_original",
"func_composition", "weak_signature_decorator", "decorate",
"make_weak_signature", "simple_decorator",
"simple_weak_signature_decorator",]
PK Մ6O+ + turbogears/errorhandling.pyc;
fFc
@ s; d k Z d k l Z l Z d k l Z d k Z d k l Z l Z l
Z
d k l Z l
Z
l Z l Z l Z l Z l Z d k l Z d k l Z e
i Z d Z e e e Z d Z e i d d
d e Z d Z e i d
d
d e Z d Z d Z d Z e e d Z d Z! e! d Z" e! d Z# e d d d d Z$ d Z% e e% Z% d Z&