>>> a,(b,c) = 1,(2,3)
>>> a
1
>>> b
2
>>> c
3
Kind of like "hey guys, check it out you can just duct tape down the dead-man's switch on this power tool and use it one handed". In Python.
Tuesday, December 28, 2010
the confusingly named setdefault
setdefault has the same behavior as the following function:
>>> def setdefault2(my_dict, key, def):
... my_dict[key] = my_dict.get(key, def)
... return my_dict[key]
...
>>> a = {}
>>> setdefault2(a, "1", "2")
'2'
>>> a.setdefault(1, 2)
2
>>> setdefault2(a, "1", "3")
2
>>> a
{'1': '2', 1: 2}
>>> def setdefault2(my_dict, key, def):
... my_dict[key] = my_dict.get(key, def)
... return my_dict[key]
...
>>> a = {}
>>> setdefault2(a, "1", "2")
'2'
>>> a.setdefault(1, 2)
2
>>> setdefault2(a, "1", "3")
'2'
>>> a.setdefault(1, 3)2
>>> a
{'1': '2', 1: 2}
This function is useful in similar cases to a collections.defaultdict. The difference being, with this function you can choose what you want the default to be each time you fetch a key from the dictionary. With a defaultdict, the default value for a missing key must be set at construction time.
using finally to do something after return
>>> def foo():
... try:
... print "returning"
... return
... finally:
... print "after return"
...
>>> foo()
returning
after return
... try:
... print "returning"
... return
... finally:
... print "after return"
...
>>> foo()
returning
after return
sequence unpacking in argument list
>>> def foo(a, (b,c)): print a,b,c
...
>>> foo(1,(2,3))
1 2 3
>>> foo(1,())
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 1, in foo
ValueError: need more than 0 values to unpack
This feature has probably really confused you if you ever mistakenly put parenthesis around the parameters to a lambda. In that case the lambda expression will take only one argument, and that argument will be a tuple.
>>> a = lambda (a,b): a+b
3
...
>>> foo(1,(2,3))
1 2 3
>>> foo(1,())
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "<stdin>", line 1, in foo
ValueError: need more than 0 values to unpack
This feature has probably really confused you if you ever mistakenly put parenthesis around the parameters to a lambda. In that case the lambda expression will take only one argument, and that argument will be a tuple.
>>> a = lambda (a,b): a+b
>>> a(1,2)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: <lambda>() takes exactly 1 argument (2 given)
>>> a((1,2))3
Monday, December 27, 2010
change class (not instance) __repr__ with metaclass
>>> class PrintMeta(type):
... def __repr__(cls):
... return "Hello World!"
...
>>> class A(object):
... __metaclass__ = PrintMeta
...
>>> A
Hello World!
... def __repr__(cls):
... return "Hello World!"
...
>>> class A(object):
... __metaclass__ = PrintMeta
...
>>> A
Hello World!
metaclass abuse: conjoined classes
>>> class ConjoinedMeta(type):
... def __new__(cls, name, bases, attrs):
... for k,v in attrs.items():
... setattr(ConjoinedMeta, k, v)
... return ConjoinedMeta
...
>>> class A(object):
... __metaclass__ = ConjoinedMeta
... a = "a"
...
>>> A
<class '__main__.ConjoinedMeta'>
>>> class B(object):
... __metaclass__ = ConjoinedMeta
...
>>> B.a
'a'
>>> B.b = 'b'
>>> A.b
'b'
... def __new__(cls, name, bases, attrs):
... for k,v in attrs.items():
... setattr(ConjoinedMeta, k, v)
... return ConjoinedMeta
...
>>> class A(object):
... __metaclass__ = ConjoinedMeta
... a = "a"
...
>>> A
<class '__main__.ConjoinedMeta'>
>>> class B(object):
... __metaclass__ = ConjoinedMeta
...
>>> B.a
'a'
>>> B.b = 'b'
>>> A.b
'b'
Monday, December 13, 2010
extend list while iterating
>>> a = [1,2]
>>> for n in a:
... print n, a, [n-1]*n
... a += [n-1]*n
...
1 [1, 2] [0]
2 [1, 2, 0] [1, 1]
0 [1, 2, 0, 1, 1] []
1 [1, 2, 0, 1, 1] [0]
1 [1, 2, 0, 1, 1, 0] [0]
0 [1, 2, 0, 1, 1, 0, 0] []
0 [1, 2, 0, 1, 1, 0, 0] []
Note that this only works for lists since they have a defined ordering, and so the iterator doesn't "lose its place" if the list is modified. For sets and dicts, this type of construct will result in "RuntimeError: changed size during iteration."
Friday, December 10, 2010
finally block can override enclosed return statement
>>> def print_then_return():
... try:
... return "returns"
... finally:
... print "finally"
...
>>> print_then_return()
finally
'returns'
>>> def override_return():
... try:
... return "returns"
... finally:
... return "finally"
...
>>> override_return()
'finally'
... try:
... return "returns"
... finally:
... print "finally"
...
>>> print_then_return()
finally
'returns'
>>> def override_return():
... try:
... return "returns"
... finally:
... return "finally"
...
>>> override_return()
'finally'
Thursday, December 9, 2010
how built-in staticmethod (could be) implemented
>>> class MyStatic(object):
... def __init__(self, f): self.f = f
... def __get__(self, obj, type=None): return self.f
...
>>> class C(object):
... @MyStatic
... def foo(): print "Hello World!"
...
>>> C.foo()
Hello World!
... def __init__(self, f): self.f = f
... def __get__(self, obj, type=None): return self.f
...
>>> class C(object):
... @MyStatic
... def foo(): print "Hello World!"
...
>>> C.foo()
Hello World!
inheriting from instance
>>> class A(object):
... def __init__(*args): print args
...
>>> class B(A()): pass
...
(<__main__.A object at 0x2071fd0>,)
(<__main__.A object at 0x2077110>, 'B', (<__main__.A object at 0x2071fd0>,), {'__module__': '__main__'})
>>> B
<__main__.A object at 0x2077110>
>>> B()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'A' object is not callable
>>> dir(B)
['__class__', '__delattr__', '__dict__', '__doc__', '__format__', '__getattribute__', '__hash__', '__init__', '__module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__']
>>> B.__dict__
{}
... def __init__(*args): print args
...
>>> class B(A()): pass
...
(<__main__.A object at 0x2071fd0>,)
(<__main__.A object at 0x2077110>, 'B', (<__main__.A object at 0x2071fd0>,), {'__module__': '__main__'})
>>> B
<__main__.A object at 0x2077110>
>>> B()
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
TypeError: 'A' object is not callable
>>> dir(B)
['__class__', '__delattr__', '__dict__', '__doc__', '__format__', '__getattribute__', '__hash__', '__init__', '__module__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__']
>>> B.__dict__
{}
Wednesday, December 8, 2010
self destructing module
#self_destruct.py
def self_destruct():
import sys
for k,m in sys.modules.items():
if getattr(m, "__file__", None) is __file__:
print "removing self from sys.modules:", repr(k)
del sys.modules[k]>>> import self_destruct
>>> reload(self_destruct)
<module 'self_destruct' from 'self_destruct.pyc'>
>>> self_destruct.self_destruct()
removing self from sys.modules: 'self_destruct'
>>> reload(self_destruct)
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: reload(): module self_destruct not in sys.modules
Tuesday, December 7, 2010
attributedict: dictionary whose keys are also attributes
>>> class attributedict(dict):
... def __init__(self, *a, **kw):
... self.__dict__ = self
... dict.__init__(*a, **kw)
...
... def __init__(self, *a, **kw):
... self.__dict__ = self
... dict.__init__(*a, **kw)
...
>>> ad = attributedict()
>>> ad["one"] = 1
>>> ad.one
1
>>> ad.two = 2
>>> ad["two"]
2
>>> attributedict(three=3).three
3
Edit: unsurprisingly, I'm not the first one to come up with this construct. Here is one example that predates my post by 5 years: http://code.activestate.com/recipes/361668/
>>> attributedict(three=3).three
3
Edit: unsurprisingly, I'm not the first one to come up with this construct. Here is one example that predates my post by 5 years: http://code.activestate.com/recipes/361668/
Monday, December 6, 2010
get full inherited __dict__
Edit: whoops, don't do this: use inspect.getmembers()
>>> def get_full_dict(obj):
... return dict(sum([cls.__dict__.items() for cls in obj.__class__.__mro__ if cls.__name__ != "object"], obj.__dict__.items()))
...
>>> class A(object):
... testA = "A"
... testB = "A"
... testb = "A"
...
>>> class B(A):
... testB = "B"
... testb = "B"
...
>>> b = B()
>>> b.testb = "b"
>>> get_full_dict(b)
{'__module__': '__main__', 'testA': 'A', 'testb': 'b', 'testB': 'B', '__dict__': <attribute '__dict__' of 'A' objects>, __weakref__': <attribute '__weakref__'of 'A' objects>, '__doc__': None}
Returns the full dictionary of things that will be searched through by default by getattr(obj), other than a few special case variables like __doc__ and __class__. Stops short of "object" for clarity of sample output. This could easily be removed if desired.
Note the reversed order of a,b in the argument list and body of the lambda expression. This is critical in order that the subclass attributes (which will be contained in a) override the superclass attributes (which will be contained in b).
>>> def get_full_dict(obj):
... return dict(sum([cls.__dict__.items() for cls in obj.__class__.__mro__ if cls.__name__ != "object"], obj.__dict__.items()))
...
>>> class A(object):
... testA = "A"
... testB = "A"
... testb = "A"
...
>>> class B(A):
... testB = "B"
... testb = "B"
...
>>> b = B()
>>> b.testb = "b"
>>> get_full_dict(b)
{'__module__': '__main__', 'testA': 'A', 'testb': 'b', 'testB': 'B', '__dict__': <attribute '__dict__' of 'A' objects>, __weakref__': <attribute '__weakref__'of 'A' objects>, '__doc__': None}
Returns the full dictionary of things that will be searched through by default by getattr(obj), other than a few special case variables like __doc__ and __class__. Stops short of "object" for clarity of sample output. This could easily be removed if desired.
Note the reversed order of a,b in the argument list and body of the lambda expression. This is critical in order that the subclass attributes (which will be contained in a) override the superclass attributes (which will be contained in b).
Sunday, December 5, 2010
call a function with all possible inputs
>>> import itertools
>>> def try_all(f, *possible_inputs):
... inputs = itertools.product(*possible_inputs)
... while True: yield f(*(inputs.next()))
...
>>> def try_all(f, *possible_inputs):
... inputs = itertools.product(*possible_inputs)
... while True: yield f(*(inputs.next()))
...
>>> def fp(a,b,c): return a,b,c
...
>>> [a for a in try_all(fp, [1], ["a","b"], [2.75, 3.5, 1.375])]
[(1, 'a', 2.75), (1, 'a', 3.5), (1, 'a', 1.375), (1, 'b', 2.75), (1, 'b', 3.5), (1, 'b', 1.375)]
...
>>> [a for a in try_all(fp, [1], ["a","b"], [2.75, 3.5, 1.375])]
[(1, 'a', 2.75), (1, 'a', 3.5), (1, 'a', 1.375), (1, 'b', 2.75), (1, 'b', 3.5), (1, 'b', 1.375)]
The output is kept in generator form rather than being returned as a list since it is likely to be extremely large.
zip(*a) is its own inverse
>>> zip( *[(1, 2, 3), ('a', 'b', 'c')] )
[(1, 'a'), (2, 'b'), (3, 'c')]
>>> zip( *[(1, 'a'), (2, 'b'), (3, 'c')] )
[(1, 'a'), (2, 'b'), (3, 'c')]
>>> zip( *[(1, 'a'), (2, 'b'), (3, 'c')] )
[(1, 2, 3), ('a', 'b', 'c')]
Saturday, December 4, 2010
transform object into different (new-style) class
>>> def transform(obj, cls, *a, **kw):
... obj.__dict__.clear()
... obj.__class__ = cls
... cls.__init__(obj, *a, **kw)
...
>>> class A(object): pass
...
>>> a = A()
>>> type(a)
<class '__main__.A'>
>>> import json
>>> transform(a, json.JSONDecoder)
>>> type(a)
<class 'json.decoder.JSONDecoder'>
>>> a.decode('{"foo":1}')
{u'foo': 1}
... obj.__dict__.clear()
... obj.__class__ = cls
... cls.__init__(obj, *a, **kw)
...
>>> class A(object): pass
...
>>> a = A()
>>> type(a)
<class '__main__.A'>
>>> import json
>>> transform(a, json.JSONDecoder)
>>> type(a)
<class 'json.decoder.JSONDecoder'>
>>> a.decode('{"foo":1}')
{u'foo': 1}
Transform an instance of one (new-style) class into another:
1- clear instance dictionary
2- set __class__ to point to new class
3- call new classes __init__() on the cleaned object
Take additional *a, and **kw parameters to pass through arguments to the new class __init__()
Friday, December 3, 2010
the many uses of type
Use #1: call type to get the type of something
>>> type(5)
<type 'int'>
>>> type([])
<type 'list'>
>>> type(5)
<type 'int'>
>>> type([])
<type 'list'>
Use #2: call type to construct a new type, more dynamic alternative to class keyword
>>> class A(object): pass
...
>>> bases = (A,)
>>> attributes = {"one": 1}
>>> B = type("B", bases, attributes)
>>> B().one
1
Use #3: type is the type of (new-style) classes, and built-ins
>>> class A(object): pass
...
>>> type(A)
<type 'type'>
>>> type(int)
<type 'type'>
>>> type(int) == type
True
Use #4: inherit from type to make a meta-class
>>> class Meta(type):
... def __new__(cls, name, bases, dct):
... print "hello meta world"
... return type.__new__(cls, name, bases, dct)
...
>>> class A(object):
... __metaclass__ = Meta
...
hello meta world
change class inheritance
>>> class A(): pass
...
>>> class B(A): pass
...
>>> b = B()
>>> isinstance(b, A)
True
>>> B.__bases__
(<class __main__.A at 0x7f552b3f36b0>,)
>>> B.__bases__ = ()
>>> isinstance(b, A)
False
...
>>> class B(A): pass
...
>>> b = B()
>>> isinstance(b, A)
True
>>> B.__bases__
(<class __main__.A at 0x7f552b3f36b0>,)
>>> B.__bases__ = ()
>>> isinstance(b, A)
False
The super-classes of a class are available under the __bases__ attribute. These can be modified to dynamically change the class inheritance tree.
class keyword assigns to a variable
>>> g = 1
>>> def f():
... global g
... class g(): pass
...
>>> g
1
>>> f()
>>> g
<class __main__.g at 0x7f552b3f3650>
>>> def f():
... global g
... class g(): pass
...
>>> g
1
>>> f()
>>> g
<class __main__.g at 0x7f552b3f3650>
The class keyword in python is exactly equivalent to a call to the type function, even down to how it interacts with the global keyword.
Subscribe to:
Posts (Atom)