Import Upstream version 2.7.18
This commit is contained in:
134
Doc/includes/capsulethunk.h
Normal file
134
Doc/includes/capsulethunk.h
Normal file
@@ -0,0 +1,134 @@
|
||||
#ifndef __CAPSULETHUNK_H
|
||||
#define __CAPSULETHUNK_H
|
||||
|
||||
#if ( (PY_VERSION_HEX < 0x02070000) \
|
||||
|| ((PY_VERSION_HEX >= 0x03000000) \
|
||||
&& (PY_VERSION_HEX < 0x03010000)) )
|
||||
|
||||
#define __PyCapsule_GetField(capsule, field, default_value) \
|
||||
( PyCapsule_CheckExact(capsule) \
|
||||
? (((PyCObject *)capsule)->field) \
|
||||
: (default_value) \
|
||||
) \
|
||||
|
||||
#define __PyCapsule_SetField(capsule, field, value) \
|
||||
( PyCapsule_CheckExact(capsule) \
|
||||
? (((PyCObject *)capsule)->field = value), 1 \
|
||||
: 0 \
|
||||
) \
|
||||
|
||||
|
||||
#define PyCapsule_Type PyCObject_Type
|
||||
|
||||
#define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule))
|
||||
#define PyCapsule_IsValid(capsule, name) (PyCObject_Check(capsule))
|
||||
|
||||
|
||||
#define PyCapsule_New(pointer, name, destructor) \
|
||||
(PyCObject_FromVoidPtr(pointer, destructor))
|
||||
|
||||
|
||||
#define PyCapsule_GetPointer(capsule, name) \
|
||||
(PyCObject_AsVoidPtr(capsule))
|
||||
|
||||
/* Don't call PyCObject_SetPointer here, it fails if there's a destructor */
|
||||
#define PyCapsule_SetPointer(capsule, pointer) \
|
||||
__PyCapsule_SetField(capsule, cobject, pointer)
|
||||
|
||||
|
||||
#define PyCapsule_GetDestructor(capsule) \
|
||||
__PyCapsule_GetField(capsule, destructor)
|
||||
|
||||
#define PyCapsule_SetDestructor(capsule, dtor) \
|
||||
__PyCapsule_SetField(capsule, destructor, dtor)
|
||||
|
||||
|
||||
/*
|
||||
* Sorry, there's simply no place
|
||||
* to store a Capsule "name" in a CObject.
|
||||
*/
|
||||
#define PyCapsule_GetName(capsule) NULL
|
||||
|
||||
static int
|
||||
PyCapsule_SetName(PyObject *capsule, const char *unused)
|
||||
{
|
||||
unused = unused;
|
||||
PyErr_SetString(PyExc_NotImplementedError,
|
||||
"can't use PyCapsule_SetName with CObjects");
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
|
||||
#define PyCapsule_GetContext(capsule) \
|
||||
__PyCapsule_GetField(capsule, descr)
|
||||
|
||||
#define PyCapsule_SetContext(capsule, context) \
|
||||
__PyCapsule_SetField(capsule, descr, context)
|
||||
|
||||
|
||||
static void *
|
||||
PyCapsule_Import(const char *name, int no_block)
|
||||
{
|
||||
PyObject *object = NULL;
|
||||
void *return_value = NULL;
|
||||
char *trace;
|
||||
size_t name_length = (strlen(name) + 1) * sizeof(char);
|
||||
char *name_dup = (char *)PyMem_MALLOC(name_length);
|
||||
|
||||
if (!name_dup) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
memcpy(name_dup, name, name_length);
|
||||
|
||||
trace = name_dup;
|
||||
while (trace) {
|
||||
char *dot = strchr(trace, '.');
|
||||
if (dot) {
|
||||
*dot++ = '\0';
|
||||
}
|
||||
|
||||
if (object == NULL) {
|
||||
if (no_block) {
|
||||
object = PyImport_ImportModuleNoBlock(trace);
|
||||
} else {
|
||||
object = PyImport_ImportModule(trace);
|
||||
if (!object) {
|
||||
PyErr_Format(PyExc_ImportError,
|
||||
"PyCapsule_Import could not "
|
||||
"import module \"%s\"", trace);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
PyObject *object2 = PyObject_GetAttrString(object, trace);
|
||||
Py_DECREF(object);
|
||||
object = object2;
|
||||
}
|
||||
if (!object) {
|
||||
goto EXIT;
|
||||
}
|
||||
|
||||
trace = dot;
|
||||
}
|
||||
|
||||
if (PyCObject_Check(object)) {
|
||||
PyCObject *cobject = (PyCObject *)object;
|
||||
return_value = cobject->cobject;
|
||||
} else {
|
||||
PyErr_Format(PyExc_AttributeError,
|
||||
"PyCapsule_Import \"%s\" is not valid",
|
||||
name);
|
||||
}
|
||||
|
||||
EXIT:
|
||||
Py_XDECREF(object);
|
||||
if (name_dup) {
|
||||
PyMem_FREE(name_dup);
|
||||
}
|
||||
return return_value;
|
||||
}
|
||||
|
||||
#endif /* #if PY_VERSION_HEX < 0x02070000 */
|
||||
|
||||
#endif /* __CAPSULETHUNK_H */
|
||||
48
Doc/includes/email-alternative.py
Executable file
48
Doc/includes/email-alternative.py
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import smtplib
|
||||
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
# me == my email address
|
||||
# you == recipient's email address
|
||||
me = "my@email.com"
|
||||
you = "your@email.com"
|
||||
|
||||
# Create message container - the correct MIME type is multipart/alternative.
|
||||
msg = MIMEMultipart('alternative')
|
||||
msg['Subject'] = "Link"
|
||||
msg['From'] = me
|
||||
msg['To'] = you
|
||||
|
||||
# Create the body of the message (a plain-text and an HTML version).
|
||||
text = "Hi!\nHow are you?\nHere is the link you wanted:\nhttps://www.python.org"
|
||||
html = """\
|
||||
<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<p>Hi!<br>
|
||||
How are you?<br>
|
||||
Here is the <a href="https://www.python.org">link</a> you wanted.
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
|
||||
# Record the MIME types of both parts - text/plain and text/html.
|
||||
part1 = MIMEText(text, 'plain')
|
||||
part2 = MIMEText(html, 'html')
|
||||
|
||||
# Attach parts into message container.
|
||||
# According to RFC 2046, the last part of a multipart message, in this case
|
||||
# the HTML message, is best and preferred.
|
||||
msg.attach(part1)
|
||||
msg.attach(part2)
|
||||
|
||||
# Send the message via local SMTP server.
|
||||
s = smtplib.SMTP('localhost')
|
||||
# sendmail function takes 3 arguments: sender's address, recipient's address
|
||||
# and message to send - here it is sent as one string.
|
||||
s.sendmail(me, you, msg.as_string())
|
||||
s.quit()
|
||||
114
Doc/includes/email-dir.py
Normal file
114
Doc/includes/email-dir.py
Normal file
@@ -0,0 +1,114 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""Send the contents of a directory as a MIME message."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import smtplib
|
||||
# For guessing MIME type based on file name extension
|
||||
import mimetypes
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
from email import encoders
|
||||
from email.message import Message
|
||||
from email.mime.audio import MIMEAudio
|
||||
from email.mime.base import MIMEBase
|
||||
from email.mime.image import MIMEImage
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
COMMASPACE = ', '
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="""\
|
||||
Send the contents of a directory as a MIME message.
|
||||
|
||||
Usage: %prog [options]
|
||||
|
||||
Unless the -o option is given, the email is sent by forwarding to your local
|
||||
SMTP server, which then does the normal delivery process. Your local machine
|
||||
must be running an SMTP server.
|
||||
""")
|
||||
parser.add_option('-d', '--directory',
|
||||
type='string', action='store',
|
||||
help="""Mail the contents of the specified directory,
|
||||
otherwise use the current directory. Only the regular
|
||||
files in the directory are sent, and we don't recurse to
|
||||
subdirectories.""")
|
||||
parser.add_option('-o', '--output',
|
||||
type='string', action='store', metavar='FILE',
|
||||
help="""Print the composed message to FILE instead of
|
||||
sending the message to the SMTP server.""")
|
||||
parser.add_option('-s', '--sender',
|
||||
type='string', action='store', metavar='SENDER',
|
||||
help='The value of the From: header (required)')
|
||||
parser.add_option('-r', '--recipient',
|
||||
type='string', action='append', metavar='RECIPIENT',
|
||||
default=[], dest='recipients',
|
||||
help='A To: header value (at least one required)')
|
||||
opts, args = parser.parse_args()
|
||||
if not opts.sender or not opts.recipients:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
directory = opts.directory
|
||||
if not directory:
|
||||
directory = '.'
|
||||
# Create the enclosing (outer) message
|
||||
outer = MIMEMultipart()
|
||||
outer['Subject'] = 'Contents of directory %s' % os.path.abspath(directory)
|
||||
outer['To'] = COMMASPACE.join(opts.recipients)
|
||||
outer['From'] = opts.sender
|
||||
outer.preamble = 'You will not see this in a MIME-aware mail reader.\n'
|
||||
|
||||
for filename in os.listdir(directory):
|
||||
path = os.path.join(directory, filename)
|
||||
if not os.path.isfile(path):
|
||||
continue
|
||||
# Guess the content type based on the file's extension. Encoding
|
||||
# will be ignored, although we should check for simple things like
|
||||
# gzip'd or compressed files.
|
||||
ctype, encoding = mimetypes.guess_type(path)
|
||||
if ctype is None or encoding is not None:
|
||||
# No guess could be made, or the file is encoded (compressed), so
|
||||
# use a generic bag-of-bits type.
|
||||
ctype = 'application/octet-stream'
|
||||
maintype, subtype = ctype.split('/', 1)
|
||||
if maintype == 'text':
|
||||
fp = open(path)
|
||||
# Note: we should handle calculating the charset
|
||||
msg = MIMEText(fp.read(), _subtype=subtype)
|
||||
fp.close()
|
||||
elif maintype == 'image':
|
||||
fp = open(path, 'rb')
|
||||
msg = MIMEImage(fp.read(), _subtype=subtype)
|
||||
fp.close()
|
||||
elif maintype == 'audio':
|
||||
fp = open(path, 'rb')
|
||||
msg = MIMEAudio(fp.read(), _subtype=subtype)
|
||||
fp.close()
|
||||
else:
|
||||
fp = open(path, 'rb')
|
||||
msg = MIMEBase(maintype, subtype)
|
||||
msg.set_payload(fp.read())
|
||||
fp.close()
|
||||
# Encode the payload using Base64
|
||||
encoders.encode_base64(msg)
|
||||
# Set the filename parameter
|
||||
msg.add_header('Content-Disposition', 'attachment', filename=filename)
|
||||
outer.attach(msg)
|
||||
# Now send or store the message
|
||||
composed = outer.as_string()
|
||||
if opts.output:
|
||||
fp = open(opts.output, 'w')
|
||||
fp.write(composed)
|
||||
fp.close()
|
||||
else:
|
||||
s = smtplib.SMTP('localhost')
|
||||
s.sendmail(opts.sender, opts.recipients, composed)
|
||||
s.quit()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
17
Doc/includes/email-headers.py
Normal file
17
Doc/includes/email-headers.py
Normal file
@@ -0,0 +1,17 @@
|
||||
# Import the email modules we'll need
|
||||
from email.parser import Parser
|
||||
|
||||
# If the e-mail headers are in a file, uncomment this line:
|
||||
#headers = Parser().parse(open(messagefile, 'r'))
|
||||
|
||||
# Or for parsing headers in a string, use:
|
||||
headers = Parser().parsestr('From: <user@example.com>\n'
|
||||
'To: <someone_else@example.com>\n'
|
||||
'Subject: Test message\n'
|
||||
'\n'
|
||||
'Body would go here\n')
|
||||
|
||||
# Now the header items can be accessed as a dictionary:
|
||||
print 'To: %s' % headers['to']
|
||||
print 'From: %s' % headers['from']
|
||||
print 'Subject: %s' % headers['subject']
|
||||
31
Doc/includes/email-mime.py
Normal file
31
Doc/includes/email-mime.py
Normal file
@@ -0,0 +1,31 @@
|
||||
# Import smtplib for the actual sending function
|
||||
import smtplib
|
||||
|
||||
# Here are the email package modules we'll need
|
||||
from email.mime.image import MIMEImage
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
|
||||
COMMASPACE = ', '
|
||||
|
||||
# Create the container (outer) email message.
|
||||
msg = MIMEMultipart()
|
||||
msg['Subject'] = 'Our family reunion'
|
||||
# me == the sender's email address
|
||||
# family = the list of all recipients' email addresses
|
||||
msg['From'] = me
|
||||
msg['To'] = COMMASPACE.join(family)
|
||||
msg.preamble = 'Our family reunion'
|
||||
|
||||
# Assume we know that the image files are all in PNG format
|
||||
for file in pngfiles:
|
||||
# Open the files in binary mode. Let the MIMEImage class automatically
|
||||
# guess the specific image type.
|
||||
fp = open(file, 'rb')
|
||||
img = MIMEImage(fp.read())
|
||||
fp.close()
|
||||
msg.attach(img)
|
||||
|
||||
# Send the email via our own SMTP server.
|
||||
s = smtplib.SMTP('localhost')
|
||||
s.sendmail(me, family, msg.as_string())
|
||||
s.quit()
|
||||
24
Doc/includes/email-simple.py
Normal file
24
Doc/includes/email-simple.py
Normal file
@@ -0,0 +1,24 @@
|
||||
# Import smtplib for the actual sending function
|
||||
import smtplib
|
||||
|
||||
# Import the email modules we'll need
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
# Open a plain text file for reading. For this example, assume that
|
||||
# the text file contains only ASCII characters.
|
||||
fp = open(textfile, 'rb')
|
||||
# Create a text/plain message
|
||||
msg = MIMEText(fp.read())
|
||||
fp.close()
|
||||
|
||||
# me == the sender's email address
|
||||
# you == the recipient's email address
|
||||
msg['Subject'] = 'The contents of %s' % textfile
|
||||
msg['From'] = me
|
||||
msg['To'] = you
|
||||
|
||||
# Send the message via our own SMTP server, but don't include the
|
||||
# envelope header.
|
||||
s = smtplib.SMTP('localhost')
|
||||
s.sendmail(me, [you], msg.as_string())
|
||||
s.quit()
|
||||
68
Doc/includes/email-unpack.py
Normal file
68
Doc/includes/email-unpack.py
Normal file
@@ -0,0 +1,68 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
"""Unpack a MIME message into a directory of files."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import email
|
||||
import errno
|
||||
import mimetypes
|
||||
|
||||
from optparse import OptionParser
|
||||
|
||||
|
||||
def main():
|
||||
parser = OptionParser(usage="""\
|
||||
Unpack a MIME message into a directory of files.
|
||||
|
||||
Usage: %prog [options] msgfile
|
||||
""")
|
||||
parser.add_option('-d', '--directory',
|
||||
type='string', action='store',
|
||||
help="""Unpack the MIME message into the named
|
||||
directory, which will be created if it doesn't already
|
||||
exist.""")
|
||||
opts, args = parser.parse_args()
|
||||
if not opts.directory:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
msgfile = args[0]
|
||||
except IndexError:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
os.mkdir(opts.directory)
|
||||
except OSError as e:
|
||||
# Ignore directory exists error
|
||||
if e.errno != errno.EEXIST:
|
||||
raise
|
||||
|
||||
fp = open(msgfile)
|
||||
msg = email.message_from_file(fp)
|
||||
fp.close()
|
||||
|
||||
counter = 1
|
||||
for part in msg.walk():
|
||||
# multipart/* are just containers
|
||||
if part.get_content_maintype() == 'multipart':
|
||||
continue
|
||||
# Applications should really sanitize the given filename so that an
|
||||
# email message can't be used to overwrite important files
|
||||
filename = part.get_filename()
|
||||
if not filename:
|
||||
ext = mimetypes.guess_extension(part.get_content_type())
|
||||
if not ext:
|
||||
# Use a generic bag-of-bits extension
|
||||
ext = '.bin'
|
||||
filename = 'part-%03d%s' % (counter, ext)
|
||||
counter += 1
|
||||
fp = open(os.path.join(opts.directory, filename), 'wb')
|
||||
fp.write(part.get_payload(decode=True))
|
||||
fp.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
64
Doc/includes/minidom-example.py
Normal file
64
Doc/includes/minidom-example.py
Normal file
@@ -0,0 +1,64 @@
|
||||
import xml.dom.minidom
|
||||
|
||||
document = """\
|
||||
<slideshow>
|
||||
<title>Demo slideshow</title>
|
||||
<slide><title>Slide title</title>
|
||||
<point>This is a demo</point>
|
||||
<point>Of a program for processing slides</point>
|
||||
</slide>
|
||||
|
||||
<slide><title>Another demo slide</title>
|
||||
<point>It is important</point>
|
||||
<point>To have more than</point>
|
||||
<point>one slide</point>
|
||||
</slide>
|
||||
</slideshow>
|
||||
"""
|
||||
|
||||
dom = xml.dom.minidom.parseString(document)
|
||||
|
||||
def getText(nodelist):
|
||||
rc = []
|
||||
for node in nodelist:
|
||||
if node.nodeType == node.TEXT_NODE:
|
||||
rc.append(node.data)
|
||||
return ''.join(rc)
|
||||
|
||||
def handleSlideshow(slideshow):
|
||||
print "<html>"
|
||||
handleSlideshowTitle(slideshow.getElementsByTagName("title")[0])
|
||||
slides = slideshow.getElementsByTagName("slide")
|
||||
handleToc(slides)
|
||||
handleSlides(slides)
|
||||
print "</html>"
|
||||
|
||||
def handleSlides(slides):
|
||||
for slide in slides:
|
||||
handleSlide(slide)
|
||||
|
||||
def handleSlide(slide):
|
||||
handleSlideTitle(slide.getElementsByTagName("title")[0])
|
||||
handlePoints(slide.getElementsByTagName("point"))
|
||||
|
||||
def handleSlideshowTitle(title):
|
||||
print "<title>%s</title>" % getText(title.childNodes)
|
||||
|
||||
def handleSlideTitle(title):
|
||||
print "<h2>%s</h2>" % getText(title.childNodes)
|
||||
|
||||
def handlePoints(points):
|
||||
print "<ul>"
|
||||
for point in points:
|
||||
handlePoint(point)
|
||||
print "</ul>"
|
||||
|
||||
def handlePoint(point):
|
||||
print "<li>%s</li>" % getText(point.childNodes)
|
||||
|
||||
def handleToc(slides):
|
||||
for slide in slides:
|
||||
title = slide.getElementsByTagName("title")[0]
|
||||
print "<p>%s</p>" % getText(title.childNodes)
|
||||
|
||||
handleSlideshow(dom)
|
||||
238
Doc/includes/mp_benchmarks.py
Normal file
238
Doc/includes/mp_benchmarks.py
Normal file
@@ -0,0 +1,238 @@
|
||||
#
|
||||
# Simple benchmarks for the multiprocessing package
|
||||
#
|
||||
# Copyright (c) 2006-2008, R Oudkerk
|
||||
# All rights reserved.
|
||||
#
|
||||
|
||||
import time, sys, multiprocessing, threading, Queue, gc
|
||||
|
||||
if sys.platform == 'win32':
|
||||
_timer = time.clock
|
||||
else:
|
||||
_timer = time.time
|
||||
|
||||
delta = 1
|
||||
|
||||
|
||||
#### TEST_QUEUESPEED
|
||||
|
||||
def queuespeed_func(q, c, iterations):
|
||||
a = '0' * 256
|
||||
c.acquire()
|
||||
c.notify()
|
||||
c.release()
|
||||
|
||||
for i in xrange(iterations):
|
||||
q.put(a)
|
||||
|
||||
q.put('STOP')
|
||||
|
||||
def test_queuespeed(Process, q, c):
|
||||
elapsed = 0
|
||||
iterations = 1
|
||||
|
||||
while elapsed < delta:
|
||||
iterations *= 2
|
||||
|
||||
p = Process(target=queuespeed_func, args=(q, c, iterations))
|
||||
c.acquire()
|
||||
p.start()
|
||||
c.wait()
|
||||
c.release()
|
||||
|
||||
result = None
|
||||
t = _timer()
|
||||
|
||||
while result != 'STOP':
|
||||
result = q.get()
|
||||
|
||||
elapsed = _timer() - t
|
||||
|
||||
p.join()
|
||||
|
||||
print iterations, 'objects passed through the queue in', elapsed, 'seconds'
|
||||
print 'average number/sec:', iterations/elapsed
|
||||
|
||||
|
||||
#### TEST_PIPESPEED
|
||||
|
||||
def pipe_func(c, cond, iterations):
|
||||
a = '0' * 256
|
||||
cond.acquire()
|
||||
cond.notify()
|
||||
cond.release()
|
||||
|
||||
for i in xrange(iterations):
|
||||
c.send(a)
|
||||
|
||||
c.send('STOP')
|
||||
|
||||
def test_pipespeed():
|
||||
c, d = multiprocessing.Pipe()
|
||||
cond = multiprocessing.Condition()
|
||||
elapsed = 0
|
||||
iterations = 1
|
||||
|
||||
while elapsed < delta:
|
||||
iterations *= 2
|
||||
|
||||
p = multiprocessing.Process(target=pipe_func,
|
||||
args=(d, cond, iterations))
|
||||
cond.acquire()
|
||||
p.start()
|
||||
cond.wait()
|
||||
cond.release()
|
||||
|
||||
result = None
|
||||
t = _timer()
|
||||
|
||||
while result != 'STOP':
|
||||
result = c.recv()
|
||||
|
||||
elapsed = _timer() - t
|
||||
p.join()
|
||||
|
||||
print iterations, 'objects passed through connection in',elapsed,'seconds'
|
||||
print 'average number/sec:', iterations/elapsed
|
||||
|
||||
|
||||
#### TEST_SEQSPEED
|
||||
|
||||
def test_seqspeed(seq):
|
||||
elapsed = 0
|
||||
iterations = 1
|
||||
|
||||
while elapsed < delta:
|
||||
iterations *= 2
|
||||
|
||||
t = _timer()
|
||||
|
||||
for i in xrange(iterations):
|
||||
a = seq[5]
|
||||
|
||||
elapsed = _timer()-t
|
||||
|
||||
print iterations, 'iterations in', elapsed, 'seconds'
|
||||
print 'average number/sec:', iterations/elapsed
|
||||
|
||||
|
||||
#### TEST_LOCK
|
||||
|
||||
def test_lockspeed(l):
|
||||
elapsed = 0
|
||||
iterations = 1
|
||||
|
||||
while elapsed < delta:
|
||||
iterations *= 2
|
||||
|
||||
t = _timer()
|
||||
|
||||
for i in xrange(iterations):
|
||||
l.acquire()
|
||||
l.release()
|
||||
|
||||
elapsed = _timer()-t
|
||||
|
||||
print iterations, 'iterations in', elapsed, 'seconds'
|
||||
print 'average number/sec:', iterations/elapsed
|
||||
|
||||
|
||||
#### TEST_CONDITION
|
||||
|
||||
def conditionspeed_func(c, N):
|
||||
c.acquire()
|
||||
c.notify()
|
||||
|
||||
for i in xrange(N):
|
||||
c.wait()
|
||||
c.notify()
|
||||
|
||||
c.release()
|
||||
|
||||
def test_conditionspeed(Process, c):
|
||||
elapsed = 0
|
||||
iterations = 1
|
||||
|
||||
while elapsed < delta:
|
||||
iterations *= 2
|
||||
|
||||
c.acquire()
|
||||
p = Process(target=conditionspeed_func, args=(c, iterations))
|
||||
p.start()
|
||||
|
||||
c.wait()
|
||||
|
||||
t = _timer()
|
||||
|
||||
for i in xrange(iterations):
|
||||
c.notify()
|
||||
c.wait()
|
||||
|
||||
elapsed = _timer()-t
|
||||
|
||||
c.release()
|
||||
p.join()
|
||||
|
||||
print iterations * 2, 'waits in', elapsed, 'seconds'
|
||||
print 'average number/sec:', iterations * 2 / elapsed
|
||||
|
||||
####
|
||||
|
||||
def test():
|
||||
manager = multiprocessing.Manager()
|
||||
|
||||
gc.disable()
|
||||
|
||||
print '\n\t######## testing Queue.Queue\n'
|
||||
test_queuespeed(threading.Thread, Queue.Queue(),
|
||||
threading.Condition())
|
||||
print '\n\t######## testing multiprocessing.Queue\n'
|
||||
test_queuespeed(multiprocessing.Process, multiprocessing.Queue(),
|
||||
multiprocessing.Condition())
|
||||
print '\n\t######## testing Queue managed by server process\n'
|
||||
test_queuespeed(multiprocessing.Process, manager.Queue(),
|
||||
manager.Condition())
|
||||
print '\n\t######## testing multiprocessing.Pipe\n'
|
||||
test_pipespeed()
|
||||
|
||||
print
|
||||
|
||||
print '\n\t######## testing list\n'
|
||||
test_seqspeed(range(10))
|
||||
print '\n\t######## testing list managed by server process\n'
|
||||
test_seqspeed(manager.list(range(10)))
|
||||
print '\n\t######## testing Array("i", ..., lock=False)\n'
|
||||
test_seqspeed(multiprocessing.Array('i', range(10), lock=False))
|
||||
print '\n\t######## testing Array("i", ..., lock=True)\n'
|
||||
test_seqspeed(multiprocessing.Array('i', range(10), lock=True))
|
||||
|
||||
print
|
||||
|
||||
print '\n\t######## testing threading.Lock\n'
|
||||
test_lockspeed(threading.Lock())
|
||||
print '\n\t######## testing threading.RLock\n'
|
||||
test_lockspeed(threading.RLock())
|
||||
print '\n\t######## testing multiprocessing.Lock\n'
|
||||
test_lockspeed(multiprocessing.Lock())
|
||||
print '\n\t######## testing multiprocessing.RLock\n'
|
||||
test_lockspeed(multiprocessing.RLock())
|
||||
print '\n\t######## testing lock managed by server process\n'
|
||||
test_lockspeed(manager.Lock())
|
||||
print '\n\t######## testing rlock managed by server process\n'
|
||||
test_lockspeed(manager.RLock())
|
||||
|
||||
print
|
||||
|
||||
print '\n\t######## testing threading.Condition\n'
|
||||
test_conditionspeed(threading.Thread, threading.Condition())
|
||||
print '\n\t######## testing multiprocessing.Condition\n'
|
||||
test_conditionspeed(multiprocessing.Process, multiprocessing.Condition())
|
||||
print '\n\t######## testing condition managed by a server process\n'
|
||||
test_conditionspeed(multiprocessing.Process, manager.Condition())
|
||||
|
||||
gc.enable()
|
||||
|
||||
if __name__ == '__main__':
|
||||
multiprocessing.freeze_support()
|
||||
test()
|
||||
101
Doc/includes/mp_newtype.py
Normal file
101
Doc/includes/mp_newtype.py
Normal file
@@ -0,0 +1,101 @@
|
||||
#
|
||||
# This module shows how to use arbitrary callables with a subclass of
|
||||
# `BaseManager`.
|
||||
#
|
||||
# Copyright (c) 2006-2008, R Oudkerk
|
||||
# All rights reserved.
|
||||
#
|
||||
|
||||
from multiprocessing import freeze_support
|
||||
from multiprocessing.managers import BaseManager, BaseProxy
|
||||
import operator
|
||||
|
||||
##
|
||||
|
||||
class Foo(object):
|
||||
def f(self):
|
||||
print 'you called Foo.f()'
|
||||
def g(self):
|
||||
print 'you called Foo.g()'
|
||||
def _h(self):
|
||||
print 'you called Foo._h()'
|
||||
|
||||
# A simple generator function
|
||||
def baz():
|
||||
for i in xrange(10):
|
||||
yield i*i
|
||||
|
||||
# Proxy type for generator objects
|
||||
class GeneratorProxy(BaseProxy):
|
||||
_exposed_ = ('next', '__next__')
|
||||
def __iter__(self):
|
||||
return self
|
||||
def next(self):
|
||||
return self._callmethod('next')
|
||||
def __next__(self):
|
||||
return self._callmethod('__next__')
|
||||
|
||||
# Function to return the operator module
|
||||
def get_operator_module():
|
||||
return operator
|
||||
|
||||
##
|
||||
|
||||
class MyManager(BaseManager):
|
||||
pass
|
||||
|
||||
# register the Foo class; make `f()` and `g()` accessible via proxy
|
||||
MyManager.register('Foo1', Foo)
|
||||
|
||||
# register the Foo class; make `g()` and `_h()` accessible via proxy
|
||||
MyManager.register('Foo2', Foo, exposed=('g', '_h'))
|
||||
|
||||
# register the generator function baz; use `GeneratorProxy` to make proxies
|
||||
MyManager.register('baz', baz, proxytype=GeneratorProxy)
|
||||
|
||||
# register get_operator_module(); make public functions accessible via proxy
|
||||
MyManager.register('operator', get_operator_module)
|
||||
|
||||
##
|
||||
|
||||
def test():
|
||||
manager = MyManager()
|
||||
manager.start()
|
||||
|
||||
print '-' * 20
|
||||
|
||||
f1 = manager.Foo1()
|
||||
f1.f()
|
||||
f1.g()
|
||||
assert not hasattr(f1, '_h')
|
||||
assert sorted(f1._exposed_) == sorted(['f', 'g'])
|
||||
|
||||
print '-' * 20
|
||||
|
||||
f2 = manager.Foo2()
|
||||
f2.g()
|
||||
f2._h()
|
||||
assert not hasattr(f2, 'f')
|
||||
assert sorted(f2._exposed_) == sorted(['g', '_h'])
|
||||
|
||||
print '-' * 20
|
||||
|
||||
it = manager.baz()
|
||||
for i in it:
|
||||
print '<%d>' % i,
|
||||
print
|
||||
|
||||
print '-' * 20
|
||||
|
||||
op = manager.operator()
|
||||
print 'op.add(23, 45) =', op.add(23, 45)
|
||||
print 'op.pow(2, 94) =', op.pow(2, 94)
|
||||
print 'op.getslice(range(10), 2, 6) =', op.getslice(range(10), 2, 6)
|
||||
print 'op.repeat(range(5), 3) =', op.repeat(range(5), 3)
|
||||
print 'op._exposed_ =', op._exposed_
|
||||
|
||||
##
|
||||
|
||||
if __name__ == '__main__':
|
||||
freeze_support()
|
||||
test()
|
||||
314
Doc/includes/mp_pool.py
Normal file
314
Doc/includes/mp_pool.py
Normal file
@@ -0,0 +1,314 @@
|
||||
#
|
||||
# A test of `multiprocessing.Pool` class
|
||||
#
|
||||
# Copyright (c) 2006-2008, R Oudkerk
|
||||
# All rights reserved.
|
||||
#
|
||||
|
||||
import multiprocessing
|
||||
import time
|
||||
import random
|
||||
import sys
|
||||
|
||||
#
|
||||
# Functions used by test code
|
||||
#
|
||||
|
||||
def calculate(func, args):
|
||||
result = func(*args)
|
||||
return '%s says that %s%s = %s' % (
|
||||
multiprocessing.current_process().name,
|
||||
func.__name__, args, result
|
||||
)
|
||||
|
||||
def calculatestar(args):
|
||||
return calculate(*args)
|
||||
|
||||
def mul(a, b):
|
||||
time.sleep(0.5*random.random())
|
||||
return a * b
|
||||
|
||||
def plus(a, b):
|
||||
time.sleep(0.5*random.random())
|
||||
return a + b
|
||||
|
||||
def f(x):
|
||||
return 1.0 / (x-5.0)
|
||||
|
||||
def pow3(x):
|
||||
return x**3
|
||||
|
||||
def noop(x):
|
||||
pass
|
||||
|
||||
#
|
||||
# Test code
|
||||
#
|
||||
|
||||
def test():
|
||||
print 'cpu_count() = %d\n' % multiprocessing.cpu_count()
|
||||
|
||||
#
|
||||
# Create pool
|
||||
#
|
||||
|
||||
PROCESSES = 4
|
||||
print 'Creating pool with %d processes\n' % PROCESSES
|
||||
pool = multiprocessing.Pool(PROCESSES)
|
||||
print 'pool = %s' % pool
|
||||
print
|
||||
|
||||
#
|
||||
# Tests
|
||||
#
|
||||
|
||||
TASKS = [(mul, (i, 7)) for i in range(10)] + \
|
||||
[(plus, (i, 8)) for i in range(10)]
|
||||
|
||||
results = [pool.apply_async(calculate, t) for t in TASKS]
|
||||
imap_it = pool.imap(calculatestar, TASKS)
|
||||
imap_unordered_it = pool.imap_unordered(calculatestar, TASKS)
|
||||
|
||||
print 'Ordered results using pool.apply_async():'
|
||||
for r in results:
|
||||
print '\t', r.get()
|
||||
print
|
||||
|
||||
print 'Ordered results using pool.imap():'
|
||||
for x in imap_it:
|
||||
print '\t', x
|
||||
print
|
||||
|
||||
print 'Unordered results using pool.imap_unordered():'
|
||||
for x in imap_unordered_it:
|
||||
print '\t', x
|
||||
print
|
||||
|
||||
print 'Ordered results using pool.map() --- will block till complete:'
|
||||
for x in pool.map(calculatestar, TASKS):
|
||||
print '\t', x
|
||||
print
|
||||
|
||||
#
|
||||
# Simple benchmarks
|
||||
#
|
||||
|
||||
N = 100000
|
||||
print 'def pow3(x): return x**3'
|
||||
|
||||
t = time.time()
|
||||
A = map(pow3, xrange(N))
|
||||
print '\tmap(pow3, xrange(%d)):\n\t\t%s seconds' % \
|
||||
(N, time.time() - t)
|
||||
|
||||
t = time.time()
|
||||
B = pool.map(pow3, xrange(N))
|
||||
print '\tpool.map(pow3, xrange(%d)):\n\t\t%s seconds' % \
|
||||
(N, time.time() - t)
|
||||
|
||||
t = time.time()
|
||||
C = list(pool.imap(pow3, xrange(N), chunksize=N//8))
|
||||
print '\tlist(pool.imap(pow3, xrange(%d), chunksize=%d)):\n\t\t%s' \
|
||||
' seconds' % (N, N//8, time.time() - t)
|
||||
|
||||
assert A == B == C, (len(A), len(B), len(C))
|
||||
print
|
||||
|
||||
L = [None] * 1000000
|
||||
print 'def noop(x): pass'
|
||||
print 'L = [None] * 1000000'
|
||||
|
||||
t = time.time()
|
||||
A = map(noop, L)
|
||||
print '\tmap(noop, L):\n\t\t%s seconds' % \
|
||||
(time.time() - t)
|
||||
|
||||
t = time.time()
|
||||
B = pool.map(noop, L)
|
||||
print '\tpool.map(noop, L):\n\t\t%s seconds' % \
|
||||
(time.time() - t)
|
||||
|
||||
t = time.time()
|
||||
C = list(pool.imap(noop, L, chunksize=len(L)//8))
|
||||
print '\tlist(pool.imap(noop, L, chunksize=%d)):\n\t\t%s seconds' % \
|
||||
(len(L)//8, time.time() - t)
|
||||
|
||||
assert A == B == C, (len(A), len(B), len(C))
|
||||
print
|
||||
|
||||
del A, B, C, L
|
||||
|
||||
#
|
||||
# Test error handling
|
||||
#
|
||||
|
||||
print 'Testing error handling:'
|
||||
|
||||
try:
|
||||
print pool.apply(f, (5,))
|
||||
except ZeroDivisionError:
|
||||
print '\tGot ZeroDivisionError as expected from pool.apply()'
|
||||
else:
|
||||
raise AssertionError('expected ZeroDivisionError')
|
||||
|
||||
try:
|
||||
print pool.map(f, range(10))
|
||||
except ZeroDivisionError:
|
||||
print '\tGot ZeroDivisionError as expected from pool.map()'
|
||||
else:
|
||||
raise AssertionError('expected ZeroDivisionError')
|
||||
|
||||
try:
|
||||
print list(pool.imap(f, range(10)))
|
||||
except ZeroDivisionError:
|
||||
print '\tGot ZeroDivisionError as expected from list(pool.imap())'
|
||||
else:
|
||||
raise AssertionError('expected ZeroDivisionError')
|
||||
|
||||
it = pool.imap(f, range(10))
|
||||
for i in range(10):
|
||||
try:
|
||||
x = it.next()
|
||||
except ZeroDivisionError:
|
||||
if i == 5:
|
||||
pass
|
||||
except StopIteration:
|
||||
break
|
||||
else:
|
||||
if i == 5:
|
||||
raise AssertionError('expected ZeroDivisionError')
|
||||
|
||||
assert i == 9
|
||||
print '\tGot ZeroDivisionError as expected from IMapIterator.next()'
|
||||
print
|
||||
|
||||
#
|
||||
# Testing timeouts
|
||||
#
|
||||
|
||||
print 'Testing ApplyResult.get() with timeout:',
|
||||
res = pool.apply_async(calculate, TASKS[0])
|
||||
while 1:
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
sys.stdout.write('\n\t%s' % res.get(0.02))
|
||||
break
|
||||
except multiprocessing.TimeoutError:
|
||||
sys.stdout.write('.')
|
||||
print
|
||||
print
|
||||
|
||||
print 'Testing IMapIterator.next() with timeout:',
|
||||
it = pool.imap(calculatestar, TASKS)
|
||||
while 1:
|
||||
sys.stdout.flush()
|
||||
try:
|
||||
sys.stdout.write('\n\t%s' % it.next(0.02))
|
||||
except StopIteration:
|
||||
break
|
||||
except multiprocessing.TimeoutError:
|
||||
sys.stdout.write('.')
|
||||
print
|
||||
print
|
||||
|
||||
#
|
||||
# Testing callback
|
||||
#
|
||||
|
||||
print 'Testing callback:'
|
||||
|
||||
A = []
|
||||
B = [56, 0, 1, 8, 27, 64, 125, 216, 343, 512, 729]
|
||||
|
||||
r = pool.apply_async(mul, (7, 8), callback=A.append)
|
||||
r.wait()
|
||||
|
||||
r = pool.map_async(pow3, range(10), callback=A.extend)
|
||||
r.wait()
|
||||
|
||||
if A == B:
|
||||
print '\tcallbacks succeeded\n'
|
||||
else:
|
||||
print '\t*** callbacks failed\n\t\t%s != %s\n' % (A, B)
|
||||
|
||||
#
|
||||
# Check there are no outstanding tasks
|
||||
#
|
||||
|
||||
assert not pool._cache, 'cache = %r' % pool._cache
|
||||
|
||||
#
|
||||
# Check close() methods
|
||||
#
|
||||
|
||||
print 'Testing close():'
|
||||
|
||||
for worker in pool._pool:
|
||||
assert worker.is_alive()
|
||||
|
||||
result = pool.apply_async(time.sleep, [0.5])
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
assert result.get() is None
|
||||
|
||||
for worker in pool._pool:
|
||||
assert not worker.is_alive()
|
||||
|
||||
print '\tclose() succeeded\n'
|
||||
|
||||
#
|
||||
# Check terminate() method
|
||||
#
|
||||
|
||||
print 'Testing terminate():'
|
||||
|
||||
pool = multiprocessing.Pool(2)
|
||||
DELTA = 0.1
|
||||
ignore = pool.apply(pow3, [2])
|
||||
results = [pool.apply_async(time.sleep, [DELTA]) for i in range(100)]
|
||||
pool.terminate()
|
||||
pool.join()
|
||||
|
||||
for worker in pool._pool:
|
||||
assert not worker.is_alive()
|
||||
|
||||
print '\tterminate() succeeded\n'
|
||||
|
||||
#
|
||||
# Check garbage collection
|
||||
#
|
||||
|
||||
print 'Testing garbage collection:'
|
||||
|
||||
pool = multiprocessing.Pool(2)
|
||||
DELTA = 0.1
|
||||
processes = pool._pool
|
||||
ignore = pool.apply(pow3, [2])
|
||||
results = [pool.apply_async(time.sleep, [DELTA]) for i in range(100)]
|
||||
|
||||
results = pool = None
|
||||
|
||||
time.sleep(DELTA * 2)
|
||||
|
||||
for worker in processes:
|
||||
assert not worker.is_alive()
|
||||
|
||||
print '\tgarbage collection succeeded\n'
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
multiprocessing.freeze_support()
|
||||
|
||||
assert len(sys.argv) in (1, 2)
|
||||
|
||||
if len(sys.argv) == 1 or sys.argv[1] == 'processes':
|
||||
print ' Using processes '.center(79, '-')
|
||||
elif sys.argv[1] == 'threads':
|
||||
print ' Using threads '.center(79, '-')
|
||||
import multiprocessing.dummy as multiprocessing
|
||||
else:
|
||||
print 'Usage:\n\t%s [processes | threads]' % sys.argv[0]
|
||||
raise SystemExit(2)
|
||||
|
||||
test()
|
||||
276
Doc/includes/mp_synchronize.py
Normal file
276
Doc/includes/mp_synchronize.py
Normal file
@@ -0,0 +1,276 @@
|
||||
#
|
||||
# A test file for the `multiprocessing` package
|
||||
#
|
||||
# Copyright (c) 2006-2008, R Oudkerk
|
||||
# All rights reserved.
|
||||
#
|
||||
|
||||
import time, sys, random
|
||||
from Queue import Empty
|
||||
|
||||
import multiprocessing # may get overwritten
|
||||
|
||||
|
||||
#### TEST_VALUE
|
||||
|
||||
def value_func(running, mutex):
|
||||
random.seed()
|
||||
time.sleep(random.random()*4)
|
||||
|
||||
mutex.acquire()
|
||||
print '\n\t\t\t' + str(multiprocessing.current_process()) + ' has finished'
|
||||
running.value -= 1
|
||||
mutex.release()
|
||||
|
||||
def test_value():
|
||||
TASKS = 10
|
||||
running = multiprocessing.Value('i', TASKS)
|
||||
mutex = multiprocessing.Lock()
|
||||
|
||||
for i in range(TASKS):
|
||||
p = multiprocessing.Process(target=value_func, args=(running, mutex))
|
||||
p.start()
|
||||
|
||||
while running.value > 0:
|
||||
time.sleep(0.08)
|
||||
mutex.acquire()
|
||||
print running.value,
|
||||
sys.stdout.flush()
|
||||
mutex.release()
|
||||
|
||||
print
|
||||
print 'No more running processes'
|
||||
|
||||
|
||||
#### TEST_QUEUE
|
||||
|
||||
def queue_func(queue):
|
||||
for i in range(30):
|
||||
time.sleep(0.5 * random.random())
|
||||
queue.put(i*i)
|
||||
queue.put('STOP')
|
||||
|
||||
def test_queue():
|
||||
q = multiprocessing.Queue()
|
||||
|
||||
p = multiprocessing.Process(target=queue_func, args=(q,))
|
||||
p.start()
|
||||
|
||||
o = None
|
||||
while o != 'STOP':
|
||||
try:
|
||||
o = q.get(timeout=0.3)
|
||||
print o,
|
||||
sys.stdout.flush()
|
||||
except Empty:
|
||||
print 'TIMEOUT'
|
||||
|
||||
print
|
||||
|
||||
|
||||
#### TEST_CONDITION
|
||||
|
||||
def condition_func(cond):
|
||||
cond.acquire()
|
||||
print '\t' + str(cond)
|
||||
time.sleep(2)
|
||||
print '\tchild is notifying'
|
||||
print '\t' + str(cond)
|
||||
cond.notify()
|
||||
cond.release()
|
||||
|
||||
def test_condition():
|
||||
cond = multiprocessing.Condition()
|
||||
|
||||
p = multiprocessing.Process(target=condition_func, args=(cond,))
|
||||
print cond
|
||||
|
||||
cond.acquire()
|
||||
print cond
|
||||
cond.acquire()
|
||||
print cond
|
||||
|
||||
p.start()
|
||||
|
||||
print 'main is waiting'
|
||||
cond.wait()
|
||||
print 'main has woken up'
|
||||
|
||||
print cond
|
||||
cond.release()
|
||||
print cond
|
||||
cond.release()
|
||||
|
||||
p.join()
|
||||
print cond
|
||||
|
||||
|
||||
#### TEST_SEMAPHORE
|
||||
|
||||
def semaphore_func(sema, mutex, running):
|
||||
sema.acquire()
|
||||
|
||||
mutex.acquire()
|
||||
running.value += 1
|
||||
print running.value, 'tasks are running'
|
||||
mutex.release()
|
||||
|
||||
random.seed()
|
||||
time.sleep(random.random()*2)
|
||||
|
||||
mutex.acquire()
|
||||
running.value -= 1
|
||||
print '%s has finished' % multiprocessing.current_process()
|
||||
mutex.release()
|
||||
|
||||
sema.release()
|
||||
|
||||
def test_semaphore():
|
||||
sema = multiprocessing.Semaphore(3)
|
||||
mutex = multiprocessing.RLock()
|
||||
running = multiprocessing.Value('i', 0)
|
||||
|
||||
processes = [
|
||||
multiprocessing.Process(target=semaphore_func,
|
||||
args=(sema, mutex, running))
|
||||
for i in range(10)
|
||||
]
|
||||
|
||||
for p in processes:
|
||||
p.start()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
|
||||
#### TEST_JOIN_TIMEOUT
|
||||
|
||||
def join_timeout_func():
|
||||
print '\tchild sleeping'
|
||||
time.sleep(5.5)
|
||||
print '\n\tchild terminating'
|
||||
|
||||
def test_join_timeout():
|
||||
p = multiprocessing.Process(target=join_timeout_func)
|
||||
p.start()
|
||||
|
||||
print 'waiting for process to finish'
|
||||
|
||||
while 1:
|
||||
p.join(timeout=1)
|
||||
if not p.is_alive():
|
||||
break
|
||||
print '.',
|
||||
sys.stdout.flush()
|
||||
|
||||
|
||||
#### TEST_EVENT
|
||||
|
||||
def event_func(event):
|
||||
print '\t%r is waiting' % multiprocessing.current_process()
|
||||
event.wait()
|
||||
print '\t%r has woken up' % multiprocessing.current_process()
|
||||
|
||||
def test_event():
|
||||
event = multiprocessing.Event()
|
||||
|
||||
processes = [multiprocessing.Process(target=event_func, args=(event,))
|
||||
for i in range(5)]
|
||||
|
||||
for p in processes:
|
||||
p.start()
|
||||
|
||||
print 'main is sleeping'
|
||||
time.sleep(2)
|
||||
|
||||
print 'main is setting event'
|
||||
event.set()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
|
||||
#### TEST_SHAREDVALUES
|
||||
|
||||
def sharedvalues_func(values, arrays, shared_values, shared_arrays):
|
||||
for i in range(len(values)):
|
||||
v = values[i][1]
|
||||
sv = shared_values[i].value
|
||||
assert v == sv
|
||||
|
||||
for i in range(len(values)):
|
||||
a = arrays[i][1]
|
||||
sa = list(shared_arrays[i][:])
|
||||
assert a == sa
|
||||
|
||||
print 'Tests passed'
|
||||
|
||||
def test_sharedvalues():
|
||||
values = [
|
||||
('i', 10),
|
||||
('h', -2),
|
||||
('d', 1.25)
|
||||
]
|
||||
arrays = [
|
||||
('i', range(100)),
|
||||
('d', [0.25 * i for i in range(100)]),
|
||||
('H', range(1000))
|
||||
]
|
||||
|
||||
shared_values = [multiprocessing.Value(id, v) for id, v in values]
|
||||
shared_arrays = [multiprocessing.Array(id, a) for id, a in arrays]
|
||||
|
||||
p = multiprocessing.Process(
|
||||
target=sharedvalues_func,
|
||||
args=(values, arrays, shared_values, shared_arrays)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
assert p.exitcode == 0
|
||||
|
||||
|
||||
####
|
||||
|
||||
def test(namespace=multiprocessing):
|
||||
global multiprocessing
|
||||
|
||||
multiprocessing = namespace
|
||||
|
||||
for func in [ test_value, test_queue, test_condition,
|
||||
test_semaphore, test_join_timeout, test_event,
|
||||
test_sharedvalues ]:
|
||||
|
||||
print '\n\t######## %s\n' % func.__name__
|
||||
func()
|
||||
|
||||
ignore = multiprocessing.active_children() # cleanup any old processes
|
||||
if hasattr(multiprocessing, '_debug_info'):
|
||||
info = multiprocessing._debug_info()
|
||||
if info:
|
||||
print info
|
||||
raise ValueError('there should be no positive refcounts left')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
multiprocessing.freeze_support()
|
||||
|
||||
assert len(sys.argv) in (1, 2)
|
||||
|
||||
if len(sys.argv) == 1 or sys.argv[1] == 'processes':
|
||||
print ' Using processes '.center(79, '-')
|
||||
namespace = multiprocessing
|
||||
elif sys.argv[1] == 'manager':
|
||||
print ' Using processes and a manager '.center(79, '-')
|
||||
namespace = multiprocessing.Manager()
|
||||
namespace.Process = multiprocessing.Process
|
||||
namespace.current_process = multiprocessing.current_process
|
||||
namespace.active_children = multiprocessing.active_children
|
||||
elif sys.argv[1] == 'threads':
|
||||
print ' Using threads '.center(79, '-')
|
||||
import multiprocessing.dummy as namespace
|
||||
else:
|
||||
print 'Usage:\n\t%s [processes | manager | threads]' % sys.argv[0]
|
||||
raise SystemExit(2)
|
||||
|
||||
test(namespace)
|
||||
70
Doc/includes/mp_webserver.py
Normal file
70
Doc/includes/mp_webserver.py
Normal file
@@ -0,0 +1,70 @@
|
||||
#
|
||||
# Example where a pool of http servers share a single listening socket
|
||||
#
|
||||
# On Windows this module depends on the ability to pickle a socket
|
||||
# object so that the worker processes can inherit a copy of the server
|
||||
# object. (We import `multiprocessing.reduction` to enable this pickling.)
|
||||
#
|
||||
# Not sure if we should synchronize access to `socket.accept()` method by
|
||||
# using a process-shared lock -- does not seem to be necessary.
|
||||
#
|
||||
# Copyright (c) 2006-2008, R Oudkerk
|
||||
# All rights reserved.
|
||||
#
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from multiprocessing import Process, current_process, freeze_support
|
||||
from BaseHTTPServer import HTTPServer
|
||||
from SimpleHTTPServer import SimpleHTTPRequestHandler
|
||||
|
||||
if sys.platform == 'win32':
|
||||
import multiprocessing.reduction # make sockets pickable/inheritable
|
||||
|
||||
|
||||
def note(format, *args):
|
||||
sys.stderr.write('[%s]\t%s\n' % (current_process().name, format%args))
|
||||
|
||||
|
||||
class RequestHandler(SimpleHTTPRequestHandler):
|
||||
# we override log_message() to show which process is handling the request
|
||||
def log_message(self, format, *args):
|
||||
note(format, *args)
|
||||
|
||||
def serve_forever(server):
|
||||
note('starting server')
|
||||
try:
|
||||
server.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
def runpool(address, number_of_processes):
|
||||
# create a single server object -- children will each inherit a copy
|
||||
server = HTTPServer(address, RequestHandler)
|
||||
|
||||
# create child processes to act as workers
|
||||
for i in range(number_of_processes-1):
|
||||
Process(target=serve_forever, args=(server,)).start()
|
||||
|
||||
# main process also acts as a worker
|
||||
serve_forever(server)
|
||||
|
||||
|
||||
def test():
|
||||
DIR = os.path.join(os.path.dirname(__file__), '..')
|
||||
ADDRESS = ('localhost', 8000)
|
||||
NUMBER_OF_PROCESSES = 4
|
||||
|
||||
print 'Serving at http://%s:%d using %d worker processes' % \
|
||||
(ADDRESS[0], ADDRESS[1], NUMBER_OF_PROCESSES)
|
||||
print 'To exit press Ctrl-' + ['C', 'Break'][sys.platform=='win32']
|
||||
|
||||
os.chdir(DIR)
|
||||
runpool(ADDRESS, NUMBER_OF_PROCESSES)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
freeze_support()
|
||||
test()
|
||||
90
Doc/includes/mp_workers.py
Normal file
90
Doc/includes/mp_workers.py
Normal file
@@ -0,0 +1,90 @@
|
||||
#
|
||||
# Simple example which uses a pool of workers to carry out some tasks.
|
||||
#
|
||||
# Notice that the results will probably not come out of the output
|
||||
# queue in the same in the same order as the corresponding tasks were
|
||||
# put on the input queue. If it is important to get the results back
|
||||
# in the original order then consider using `Pool.map()` or
|
||||
# `Pool.imap()` (which will save on the amount of code needed anyway).
|
||||
#
|
||||
# Copyright (c) 2006-2008, R Oudkerk
|
||||
# All rights reserved.
|
||||
#
|
||||
|
||||
import time
|
||||
import random
|
||||
|
||||
from multiprocessing import Process, Queue, current_process, freeze_support
|
||||
|
||||
#
|
||||
# Function run by worker processes
|
||||
#
|
||||
|
||||
def worker(input, output):
|
||||
for func, args in iter(input.get, 'STOP'):
|
||||
result = calculate(func, args)
|
||||
output.put(result)
|
||||
|
||||
#
|
||||
# Function used to calculate result
|
||||
#
|
||||
|
||||
def calculate(func, args):
|
||||
result = func(*args)
|
||||
return '%s says that %s%s = %s' % \
|
||||
(current_process().name, func.__name__, args, result)
|
||||
|
||||
#
|
||||
# Functions referenced by tasks
|
||||
#
|
||||
|
||||
def mul(a, b):
|
||||
time.sleep(0.5*random.random())
|
||||
return a * b
|
||||
|
||||
def plus(a, b):
|
||||
time.sleep(0.5*random.random())
|
||||
return a + b
|
||||
|
||||
#
|
||||
#
|
||||
#
|
||||
|
||||
def test():
|
||||
NUMBER_OF_PROCESSES = 4
|
||||
TASKS1 = [(mul, (i, 7)) for i in range(20)]
|
||||
TASKS2 = [(plus, (i, 8)) for i in range(10)]
|
||||
|
||||
# Create queues
|
||||
task_queue = Queue()
|
||||
done_queue = Queue()
|
||||
|
||||
# Submit tasks
|
||||
for task in TASKS1:
|
||||
task_queue.put(task)
|
||||
|
||||
# Start worker processes
|
||||
for i in range(NUMBER_OF_PROCESSES):
|
||||
Process(target=worker, args=(task_queue, done_queue)).start()
|
||||
|
||||
# Get and print results
|
||||
print 'Unordered results:'
|
||||
for i in range(len(TASKS1)):
|
||||
print '\t', done_queue.get()
|
||||
|
||||
# Add more tasks using `put()`
|
||||
for task in TASKS2:
|
||||
task_queue.put(task)
|
||||
|
||||
# Get and print some more results
|
||||
for i in range(len(TASKS2)):
|
||||
print '\t', done_queue.get()
|
||||
|
||||
# Tell child processes to stop
|
||||
for i in range(NUMBER_OF_PROCESSES):
|
||||
task_queue.put('STOP')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
freeze_support()
|
||||
test()
|
||||
53
Doc/includes/noddy.c
Normal file
53
Doc/includes/noddy.c
Normal file
@@ -0,0 +1,53 @@
|
||||
#include <Python.h>
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
/* Type-specific fields go here. */
|
||||
} noddy_NoddyObject;
|
||||
|
||||
static PyTypeObject noddy_NoddyType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"noddy.Noddy", /* tp_name */
|
||||
sizeof(noddy_NoddyObject), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
0, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT, /* tp_flags */
|
||||
"Noddy objects", /* tp_doc */
|
||||
};
|
||||
|
||||
static PyMethodDef noddy_methods[] = {
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
PyMODINIT_FUNC
|
||||
initnoddy(void)
|
||||
{
|
||||
PyObject* m;
|
||||
|
||||
noddy_NoddyType.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&noddy_NoddyType) < 0)
|
||||
return;
|
||||
|
||||
m = Py_InitModule3("noddy", noddy_methods,
|
||||
"Example module that creates an extension type.");
|
||||
|
||||
Py_INCREF(&noddy_NoddyType);
|
||||
PyModule_AddObject(m, "Noddy", (PyObject *)&noddy_NoddyType);
|
||||
}
|
||||
188
Doc/includes/noddy2.c
Normal file
188
Doc/includes/noddy2.c
Normal file
@@ -0,0 +1,188 @@
|
||||
#include <Python.h>
|
||||
#include "structmember.h"
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
PyObject *first; /* first name */
|
||||
PyObject *last; /* last name */
|
||||
int number;
|
||||
} Noddy;
|
||||
|
||||
static void
|
||||
Noddy_dealloc(Noddy* self)
|
||||
{
|
||||
Py_XDECREF(self->first);
|
||||
Py_XDECREF(self->last);
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
Noddy_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
Noddy *self;
|
||||
|
||||
self = (Noddy *)type->tp_alloc(type, 0);
|
||||
if (self != NULL) {
|
||||
self->first = PyString_FromString("");
|
||||
if (self->first == NULL) {
|
||||
Py_DECREF(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->last = PyString_FromString("");
|
||||
if (self->last == NULL) {
|
||||
Py_DECREF(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->number = 0;
|
||||
}
|
||||
|
||||
return (PyObject *)self;
|
||||
}
|
||||
|
||||
static int
|
||||
Noddy_init(Noddy *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject *first=NULL, *last=NULL, *tmp;
|
||||
|
||||
static char *kwlist[] = {"first", "last", "number", NULL};
|
||||
|
||||
if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist,
|
||||
&first, &last,
|
||||
&self->number))
|
||||
return -1;
|
||||
|
||||
if (first) {
|
||||
tmp = self->first;
|
||||
Py_INCREF(first);
|
||||
self->first = first;
|
||||
Py_XDECREF(tmp);
|
||||
}
|
||||
|
||||
if (last) {
|
||||
tmp = self->last;
|
||||
Py_INCREF(last);
|
||||
self->last = last;
|
||||
Py_XDECREF(tmp);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static PyMemberDef Noddy_members[] = {
|
||||
{"first", T_OBJECT_EX, offsetof(Noddy, first), 0,
|
||||
"first name"},
|
||||
{"last", T_OBJECT_EX, offsetof(Noddy, last), 0,
|
||||
"last name"},
|
||||
{"number", T_INT, offsetof(Noddy, number), 0,
|
||||
"noddy number"},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyObject *
|
||||
Noddy_name(Noddy* self)
|
||||
{
|
||||
static PyObject *format = NULL;
|
||||
PyObject *args, *result;
|
||||
|
||||
if (format == NULL) {
|
||||
format = PyString_FromString("%s %s");
|
||||
if (format == NULL)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (self->first == NULL) {
|
||||
PyErr_SetString(PyExc_AttributeError, "first");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (self->last == NULL) {
|
||||
PyErr_SetString(PyExc_AttributeError, "last");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
args = Py_BuildValue("OO", self->first, self->last);
|
||||
if (args == NULL)
|
||||
return NULL;
|
||||
|
||||
result = PyString_Format(format, args);
|
||||
Py_DECREF(args);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static PyMethodDef Noddy_methods[] = {
|
||||
{"name", (PyCFunction)Noddy_name, METH_NOARGS,
|
||||
"Return the name, combining the first and last name"
|
||||
},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyTypeObject NoddyType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"noddy.Noddy", /* tp_name */
|
||||
sizeof(Noddy), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
(destructor)Noddy_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT |
|
||||
Py_TPFLAGS_BASETYPE, /* tp_flags */
|
||||
"Noddy objects", /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
Noddy_methods, /* tp_methods */
|
||||
Noddy_members, /* tp_members */
|
||||
0, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)Noddy_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
Noddy_new, /* tp_new */
|
||||
};
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
PyMODINIT_FUNC
|
||||
initnoddy2(void)
|
||||
{
|
||||
PyObject* m;
|
||||
|
||||
if (PyType_Ready(&NoddyType) < 0)
|
||||
return;
|
||||
|
||||
m = Py_InitModule3("noddy2", module_methods,
|
||||
"Example module that creates an extension type.");
|
||||
|
||||
if (m == NULL)
|
||||
return;
|
||||
|
||||
Py_INCREF(&NoddyType);
|
||||
PyModule_AddObject(m, "Noddy", (PyObject *)&NoddyType);
|
||||
}
|
||||
241
Doc/includes/noddy3.c
Normal file
241
Doc/includes/noddy3.c
Normal file
@@ -0,0 +1,241 @@
|
||||
#include <Python.h>
|
||||
#include "structmember.h"
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
PyObject *first;
|
||||
PyObject *last;
|
||||
int number;
|
||||
} Noddy;
|
||||
|
||||
static void
|
||||
Noddy_dealloc(Noddy* self)
|
||||
{
|
||||
Py_XDECREF(self->first);
|
||||
Py_XDECREF(self->last);
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
Noddy_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
Noddy *self;
|
||||
|
||||
self = (Noddy *)type->tp_alloc(type, 0);
|
||||
if (self != NULL) {
|
||||
self->first = PyString_FromString("");
|
||||
if (self->first == NULL) {
|
||||
Py_DECREF(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->last = PyString_FromString("");
|
||||
if (self->last == NULL) {
|
||||
Py_DECREF(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->number = 0;
|
||||
}
|
||||
|
||||
return (PyObject *)self;
|
||||
}
|
||||
|
||||
static int
|
||||
Noddy_init(Noddy *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject *first=NULL, *last=NULL, *tmp;
|
||||
|
||||
static char *kwlist[] = {"first", "last", "number", NULL};
|
||||
|
||||
if (! PyArg_ParseTupleAndKeywords(args, kwds, "|SSi", kwlist,
|
||||
&first, &last,
|
||||
&self->number))
|
||||
return -1;
|
||||
|
||||
if (first) {
|
||||
tmp = self->first;
|
||||
Py_INCREF(first);
|
||||
self->first = first;
|
||||
Py_DECREF(tmp);
|
||||
}
|
||||
|
||||
if (last) {
|
||||
tmp = self->last;
|
||||
Py_INCREF(last);
|
||||
self->last = last;
|
||||
Py_DECREF(tmp);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyMemberDef Noddy_members[] = {
|
||||
{"number", T_INT, offsetof(Noddy, number), 0,
|
||||
"noddy number"},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyObject *
|
||||
Noddy_getfirst(Noddy *self, void *closure)
|
||||
{
|
||||
Py_INCREF(self->first);
|
||||
return self->first;
|
||||
}
|
||||
|
||||
static int
|
||||
Noddy_setfirst(Noddy *self, PyObject *value, void *closure)
|
||||
{
|
||||
if (value == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete the first attribute");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (! PyString_Check(value)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"The first attribute value must be a string");
|
||||
return -1;
|
||||
}
|
||||
|
||||
Py_DECREF(self->first);
|
||||
Py_INCREF(value);
|
||||
self->first = value;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
Noddy_getlast(Noddy *self, void *closure)
|
||||
{
|
||||
Py_INCREF(self->last);
|
||||
return self->last;
|
||||
}
|
||||
|
||||
static int
|
||||
Noddy_setlast(Noddy *self, PyObject *value, void *closure)
|
||||
{
|
||||
if (value == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Cannot delete the last attribute");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (! PyString_Check(value)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"The last attribute value must be a string");
|
||||
return -1;
|
||||
}
|
||||
|
||||
Py_DECREF(self->last);
|
||||
Py_INCREF(value);
|
||||
self->last = value;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyGetSetDef Noddy_getseters[] = {
|
||||
{"first",
|
||||
(getter)Noddy_getfirst, (setter)Noddy_setfirst,
|
||||
"first name",
|
||||
NULL},
|
||||
{"last",
|
||||
(getter)Noddy_getlast, (setter)Noddy_setlast,
|
||||
"last name",
|
||||
NULL},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyObject *
|
||||
Noddy_name(Noddy* self)
|
||||
{
|
||||
static PyObject *format = NULL;
|
||||
PyObject *args, *result;
|
||||
|
||||
if (format == NULL) {
|
||||
format = PyString_FromString("%s %s");
|
||||
if (format == NULL)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
args = Py_BuildValue("OO", self->first, self->last);
|
||||
if (args == NULL)
|
||||
return NULL;
|
||||
|
||||
result = PyString_Format(format, args);
|
||||
Py_DECREF(args);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static PyMethodDef Noddy_methods[] = {
|
||||
{"name", (PyCFunction)Noddy_name, METH_NOARGS,
|
||||
"Return the name, combining the first and last name"
|
||||
},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyTypeObject NoddyType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"noddy.Noddy", /* tp_name */
|
||||
sizeof(Noddy), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
(destructor)Noddy_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT |
|
||||
Py_TPFLAGS_BASETYPE, /* tp_flags */
|
||||
"Noddy objects", /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
Noddy_methods, /* tp_methods */
|
||||
Noddy_members, /* tp_members */
|
||||
Noddy_getseters, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)Noddy_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
Noddy_new, /* tp_new */
|
||||
};
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
PyMODINIT_FUNC
|
||||
initnoddy3(void)
|
||||
{
|
||||
PyObject* m;
|
||||
|
||||
if (PyType_Ready(&NoddyType) < 0)
|
||||
return;
|
||||
|
||||
m = Py_InitModule3("noddy3", module_methods,
|
||||
"Example module that creates an extension type.");
|
||||
|
||||
if (m == NULL)
|
||||
return;
|
||||
|
||||
Py_INCREF(&NoddyType);
|
||||
PyModule_AddObject(m, "Noddy", (PyObject *)&NoddyType);
|
||||
}
|
||||
224
Doc/includes/noddy4.c
Normal file
224
Doc/includes/noddy4.c
Normal file
@@ -0,0 +1,224 @@
|
||||
#include <Python.h>
|
||||
#include "structmember.h"
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
PyObject *first;
|
||||
PyObject *last;
|
||||
int number;
|
||||
} Noddy;
|
||||
|
||||
static int
|
||||
Noddy_traverse(Noddy *self, visitproc visit, void *arg)
|
||||
{
|
||||
int vret;
|
||||
|
||||
if (self->first) {
|
||||
vret = visit(self->first, arg);
|
||||
if (vret != 0)
|
||||
return vret;
|
||||
}
|
||||
if (self->last) {
|
||||
vret = visit(self->last, arg);
|
||||
if (vret != 0)
|
||||
return vret;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int
|
||||
Noddy_clear(Noddy *self)
|
||||
{
|
||||
PyObject *tmp;
|
||||
|
||||
tmp = self->first;
|
||||
self->first = NULL;
|
||||
Py_XDECREF(tmp);
|
||||
|
||||
tmp = self->last;
|
||||
self->last = NULL;
|
||||
Py_XDECREF(tmp);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void
|
||||
Noddy_dealloc(Noddy* self)
|
||||
{
|
||||
PyObject_GC_UnTrack(self);
|
||||
Noddy_clear(self);
|
||||
Py_TYPE(self)->tp_free((PyObject*)self);
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
Noddy_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
Noddy *self;
|
||||
|
||||
self = (Noddy *)type->tp_alloc(type, 0);
|
||||
if (self != NULL) {
|
||||
self->first = PyString_FromString("");
|
||||
if (self->first == NULL) {
|
||||
Py_DECREF(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->last = PyString_FromString("");
|
||||
if (self->last == NULL) {
|
||||
Py_DECREF(self);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
self->number = 0;
|
||||
}
|
||||
|
||||
return (PyObject *)self;
|
||||
}
|
||||
|
||||
static int
|
||||
Noddy_init(Noddy *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject *first=NULL, *last=NULL, *tmp;
|
||||
|
||||
static char *kwlist[] = {"first", "last", "number", NULL};
|
||||
|
||||
if (! PyArg_ParseTupleAndKeywords(args, kwds, "|OOi", kwlist,
|
||||
&first, &last,
|
||||
&self->number))
|
||||
return -1;
|
||||
|
||||
if (first) {
|
||||
tmp = self->first;
|
||||
Py_INCREF(first);
|
||||
self->first = first;
|
||||
Py_XDECREF(tmp);
|
||||
}
|
||||
|
||||
if (last) {
|
||||
tmp = self->last;
|
||||
Py_INCREF(last);
|
||||
self->last = last;
|
||||
Py_XDECREF(tmp);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static PyMemberDef Noddy_members[] = {
|
||||
{"first", T_OBJECT_EX, offsetof(Noddy, first), 0,
|
||||
"first name"},
|
||||
{"last", T_OBJECT_EX, offsetof(Noddy, last), 0,
|
||||
"last name"},
|
||||
{"number", T_INT, offsetof(Noddy, number), 0,
|
||||
"noddy number"},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyObject *
|
||||
Noddy_name(Noddy* self)
|
||||
{
|
||||
static PyObject *format = NULL;
|
||||
PyObject *args, *result;
|
||||
|
||||
if (format == NULL) {
|
||||
format = PyString_FromString("%s %s");
|
||||
if (format == NULL)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (self->first == NULL) {
|
||||
PyErr_SetString(PyExc_AttributeError, "first");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (self->last == NULL) {
|
||||
PyErr_SetString(PyExc_AttributeError, "last");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
args = Py_BuildValue("OO", self->first, self->last);
|
||||
if (args == NULL)
|
||||
return NULL;
|
||||
|
||||
result = PyString_Format(format, args);
|
||||
Py_DECREF(args);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static PyMethodDef Noddy_methods[] = {
|
||||
{"name", (PyCFunction)Noddy_name, METH_NOARGS,
|
||||
"Return the name, combining the first and last name"
|
||||
},
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static PyTypeObject NoddyType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"noddy.Noddy", /* tp_name */
|
||||
sizeof(Noddy), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
(destructor)Noddy_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT |
|
||||
Py_TPFLAGS_BASETYPE |
|
||||
Py_TPFLAGS_HAVE_GC, /* tp_flags */
|
||||
"Noddy objects", /* tp_doc */
|
||||
(traverseproc)Noddy_traverse, /* tp_traverse */
|
||||
(inquiry)Noddy_clear, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
Noddy_methods, /* tp_methods */
|
||||
Noddy_members, /* tp_members */
|
||||
0, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)Noddy_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
Noddy_new, /* tp_new */
|
||||
};
|
||||
|
||||
static PyMethodDef module_methods[] = {
|
||||
{NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
#ifndef PyMODINIT_FUNC /* declarations for DLL import/export */
|
||||
#define PyMODINIT_FUNC void
|
||||
#endif
|
||||
PyMODINIT_FUNC
|
||||
initnoddy4(void)
|
||||
{
|
||||
PyObject* m;
|
||||
|
||||
if (PyType_Ready(&NoddyType) < 0)
|
||||
return;
|
||||
|
||||
m = Py_InitModule3("noddy4", module_methods,
|
||||
"Example module that creates an extension type.");
|
||||
|
||||
if (m == NULL)
|
||||
return;
|
||||
|
||||
Py_INCREF(&NoddyType);
|
||||
PyModule_AddObject(m, "Noddy", (PyObject *)&NoddyType);
|
||||
}
|
||||
68
Doc/includes/run-func.c
Normal file
68
Doc/includes/run-func.c
Normal file
@@ -0,0 +1,68 @@
|
||||
#include <Python.h>
|
||||
|
||||
int
|
||||
main(int argc, char *argv[])
|
||||
{
|
||||
PyObject *pName, *pModule, *pFunc;
|
||||
PyObject *pArgs, *pValue;
|
||||
int i;
|
||||
|
||||
if (argc < 3) {
|
||||
fprintf(stderr,"Usage: call pythonfile funcname [args]\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
Py_Initialize();
|
||||
pName = PyString_FromString(argv[1]);
|
||||
/* Error checking of pName left out */
|
||||
|
||||
pModule = PyImport_Import(pName);
|
||||
Py_DECREF(pName);
|
||||
|
||||
if (pModule != NULL) {
|
||||
pFunc = PyObject_GetAttrString(pModule, argv[2]);
|
||||
/* pFunc is a new reference */
|
||||
|
||||
if (pFunc && PyCallable_Check(pFunc)) {
|
||||
pArgs = PyTuple_New(argc - 3);
|
||||
for (i = 0; i < argc - 3; ++i) {
|
||||
pValue = PyInt_FromLong(atoi(argv[i + 3]));
|
||||
if (!pValue) {
|
||||
Py_DECREF(pArgs);
|
||||
Py_DECREF(pModule);
|
||||
fprintf(stderr, "Cannot convert argument\n");
|
||||
return 1;
|
||||
}
|
||||
/* pValue reference stolen here: */
|
||||
PyTuple_SetItem(pArgs, i, pValue);
|
||||
}
|
||||
pValue = PyObject_CallObject(pFunc, pArgs);
|
||||
Py_DECREF(pArgs);
|
||||
if (pValue != NULL) {
|
||||
printf("Result of call: %ld\n", PyInt_AsLong(pValue));
|
||||
Py_DECREF(pValue);
|
||||
}
|
||||
else {
|
||||
Py_DECREF(pFunc);
|
||||
Py_DECREF(pModule);
|
||||
PyErr_Print();
|
||||
fprintf(stderr,"Call failed\n");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (PyErr_Occurred())
|
||||
PyErr_Print();
|
||||
fprintf(stderr, "Cannot find function \"%s\"\n", argv[2]);
|
||||
}
|
||||
Py_XDECREF(pFunc);
|
||||
Py_DECREF(pModule);
|
||||
}
|
||||
else {
|
||||
PyErr_Print();
|
||||
fprintf(stderr, "Failed to load \"%s\"\n", argv[1]);
|
||||
return 1;
|
||||
}
|
||||
Py_Finalize();
|
||||
return 0;
|
||||
}
|
||||
9
Doc/includes/setup.py
Normal file
9
Doc/includes/setup.py
Normal file
@@ -0,0 +1,9 @@
|
||||
from distutils.core import setup, Extension
|
||||
setup(name="noddy", version="1.0",
|
||||
ext_modules=[
|
||||
Extension("noddy", ["noddy.c"]),
|
||||
Extension("noddy2", ["noddy2.c"]),
|
||||
Extension("noddy3", ["noddy3.c"]),
|
||||
Extension("noddy4", ["noddy4.c"]),
|
||||
Extension("shoddy", ["shoddy.c"]),
|
||||
])
|
||||
90
Doc/includes/shoddy.c
Normal file
90
Doc/includes/shoddy.c
Normal file
@@ -0,0 +1,90 @@
|
||||
#include <Python.h>
|
||||
|
||||
typedef struct {
|
||||
PyListObject list;
|
||||
int state;
|
||||
} Shoddy;
|
||||
|
||||
|
||||
static PyObject *
|
||||
Shoddy_increment(Shoddy *self, PyObject *unused)
|
||||
{
|
||||
self->state++;
|
||||
return PyInt_FromLong(self->state);
|
||||
}
|
||||
|
||||
|
||||
static PyMethodDef Shoddy_methods[] = {
|
||||
{"increment", (PyCFunction)Shoddy_increment, METH_NOARGS,
|
||||
PyDoc_STR("increment state counter")},
|
||||
{NULL, NULL},
|
||||
};
|
||||
|
||||
static int
|
||||
Shoddy_init(Shoddy *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
if (PyList_Type.tp_init((PyObject *)self, args, kwds) < 0)
|
||||
return -1;
|
||||
self->state = 0;
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
static PyTypeObject ShoddyType = {
|
||||
PyVarObject_HEAD_INIT(NULL, 0)
|
||||
"shoddy.Shoddy", /* tp_name */
|
||||
sizeof(Shoddy), /* tp_basicsize */
|
||||
0, /* tp_itemsize */
|
||||
0, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
0, /* tp_as_number */
|
||||
0, /* tp_as_sequence */
|
||||
0, /* tp_as_mapping */
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer */
|
||||
Py_TPFLAGS_DEFAULT |
|
||||
Py_TPFLAGS_BASETYPE, /* tp_flags */
|
||||
0, /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
0, /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
Shoddy_methods, /* tp_methods */
|
||||
0, /* tp_members */
|
||||
0, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)Shoddy_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
0, /* tp_new */
|
||||
};
|
||||
|
||||
PyMODINIT_FUNC
|
||||
initshoddy(void)
|
||||
{
|
||||
PyObject *m;
|
||||
|
||||
ShoddyType.tp_base = &PyList_Type;
|
||||
if (PyType_Ready(&ShoddyType) < 0)
|
||||
return;
|
||||
|
||||
m = Py_InitModule3("shoddy", NULL, "Shoddy module");
|
||||
if (m == NULL)
|
||||
return;
|
||||
|
||||
Py_INCREF(&ShoddyType);
|
||||
PyModule_AddObject(m, "Shoddy", (PyObject *) &ShoddyType);
|
||||
}
|
||||
14
Doc/includes/sqlite3/adapter_datetime.py
Normal file
14
Doc/includes/sqlite3/adapter_datetime.py
Normal file
@@ -0,0 +1,14 @@
|
||||
import sqlite3
|
||||
import datetime, time
|
||||
|
||||
def adapt_datetime(ts):
|
||||
return time.mktime(ts.timetuple())
|
||||
|
||||
sqlite3.register_adapter(datetime.datetime, adapt_datetime)
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
|
||||
now = datetime.datetime.now()
|
||||
cur.execute("select ?", (now,))
|
||||
print cur.fetchone()[0]
|
||||
16
Doc/includes/sqlite3/adapter_point_1.py
Normal file
16
Doc/includes/sqlite3/adapter_point_1.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import sqlite3
|
||||
|
||||
class Point(object):
|
||||
def __init__(self, x, y):
|
||||
self.x, self.y = x, y
|
||||
|
||||
def __conform__(self, protocol):
|
||||
if protocol is sqlite3.PrepareProtocol:
|
||||
return "%f;%f" % (self.x, self.y)
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
|
||||
p = Point(4.0, -3.2)
|
||||
cur.execute("select ?", (p,))
|
||||
print cur.fetchone()[0]
|
||||
17
Doc/includes/sqlite3/adapter_point_2.py
Normal file
17
Doc/includes/sqlite3/adapter_point_2.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import sqlite3
|
||||
|
||||
class Point(object):
|
||||
def __init__(self, x, y):
|
||||
self.x, self.y = x, y
|
||||
|
||||
def adapt_point(point):
|
||||
return "%f;%f" % (point.x, point.y)
|
||||
|
||||
sqlite3.register_adapter(Point, adapt_point)
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
|
||||
p = Point(4.0, -3.2)
|
||||
cur.execute("select ?", (p,))
|
||||
print cur.fetchone()[0]
|
||||
15
Doc/includes/sqlite3/collation_reverse.py
Normal file
15
Doc/includes/sqlite3/collation_reverse.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import sqlite3
|
||||
|
||||
def collate_reverse(string1, string2):
|
||||
return -cmp(string1, string2)
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
con.create_collation("reverse", collate_reverse)
|
||||
|
||||
cur = con.cursor()
|
||||
cur.execute("create table test(x)")
|
||||
cur.executemany("insert into test(x) values (?)", [("a",), ("b",)])
|
||||
cur.execute("select x from test order by x collate reverse")
|
||||
for row in cur:
|
||||
print row
|
||||
con.close()
|
||||
30
Doc/includes/sqlite3/complete_statement.py
Normal file
30
Doc/includes/sqlite3/complete_statement.py
Normal file
@@ -0,0 +1,30 @@
|
||||
# A minimal SQLite shell for experiments
|
||||
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
con.isolation_level = None
|
||||
cur = con.cursor()
|
||||
|
||||
buffer = ""
|
||||
|
||||
print "Enter your SQL commands to execute in sqlite3."
|
||||
print "Enter a blank line to exit."
|
||||
|
||||
while True:
|
||||
line = raw_input()
|
||||
if line == "":
|
||||
break
|
||||
buffer += line
|
||||
if sqlite3.complete_statement(buffer):
|
||||
try:
|
||||
buffer = buffer.strip()
|
||||
cur.execute(buffer)
|
||||
|
||||
if buffer.lstrip().upper().startswith("SELECT"):
|
||||
print cur.fetchall()
|
||||
except sqlite3.Error as e:
|
||||
print "An error occurred:", e.args[0]
|
||||
buffer = ""
|
||||
|
||||
con.close()
|
||||
3
Doc/includes/sqlite3/connect_db_1.py
Normal file
3
Doc/includes/sqlite3/connect_db_1.py
Normal file
@@ -0,0 +1,3 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect("mydb")
|
||||
3
Doc/includes/sqlite3/connect_db_2.py
Normal file
3
Doc/includes/sqlite3/connect_db_2.py
Normal file
@@ -0,0 +1,3 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
47
Doc/includes/sqlite3/converter_point.py
Normal file
47
Doc/includes/sqlite3/converter_point.py
Normal file
@@ -0,0 +1,47 @@
|
||||
import sqlite3
|
||||
|
||||
class Point(object):
|
||||
def __init__(self, x, y):
|
||||
self.x, self.y = x, y
|
||||
|
||||
def __repr__(self):
|
||||
return "(%f;%f)" % (self.x, self.y)
|
||||
|
||||
def adapt_point(point):
|
||||
return "%f;%f" % (point.x, point.y)
|
||||
|
||||
def convert_point(s):
|
||||
x, y = map(float, s.split(";"))
|
||||
return Point(x, y)
|
||||
|
||||
# Register the adapter
|
||||
sqlite3.register_adapter(Point, adapt_point)
|
||||
|
||||
# Register the converter
|
||||
sqlite3.register_converter("point", convert_point)
|
||||
|
||||
p = Point(4.0, -3.2)
|
||||
|
||||
#########################
|
||||
# 1) Using declared types
|
||||
con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_DECLTYPES)
|
||||
cur = con.cursor()
|
||||
cur.execute("create table test(p point)")
|
||||
|
||||
cur.execute("insert into test(p) values (?)", (p,))
|
||||
cur.execute("select p from test")
|
||||
print "with declared types:", cur.fetchone()[0]
|
||||
cur.close()
|
||||
con.close()
|
||||
|
||||
#######################
|
||||
# 1) Using column names
|
||||
con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_COLNAMES)
|
||||
cur = con.cursor()
|
||||
cur.execute("create table test(p)")
|
||||
|
||||
cur.execute("insert into test(p) values (?)", (p,))
|
||||
cur.execute('select p as "p [point]" from test')
|
||||
print "with column names:", cur.fetchone()[0]
|
||||
cur.close()
|
||||
con.close()
|
||||
15
Doc/includes/sqlite3/countcursors.py
Normal file
15
Doc/includes/sqlite3/countcursors.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import sqlite3
|
||||
|
||||
class CountCursorsConnection(sqlite3.Connection):
|
||||
def __init__(self, *args, **kwargs):
|
||||
sqlite3.Connection.__init__(self, *args, **kwargs)
|
||||
self.numcursors = 0
|
||||
|
||||
def cursor(self, *args, **kwargs):
|
||||
self.numcursors += 1
|
||||
return sqlite3.Connection.cursor(self, *args, **kwargs)
|
||||
|
||||
con = sqlite3.connect(":memory:", factory=CountCursorsConnection)
|
||||
cur1 = con.cursor()
|
||||
cur2 = con.cursor()
|
||||
print con.numcursors
|
||||
28
Doc/includes/sqlite3/createdb.py
Normal file
28
Doc/includes/sqlite3/createdb.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# Not referenced from the documentation, but builds the database file the other
|
||||
# code snippets expect.
|
||||
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
DB_FILE = "mydb"
|
||||
|
||||
if os.path.exists(DB_FILE):
|
||||
os.remove(DB_FILE)
|
||||
|
||||
con = sqlite3.connect(DB_FILE)
|
||||
cur = con.cursor()
|
||||
cur.execute("""
|
||||
create table people
|
||||
(
|
||||
name_last varchar(20),
|
||||
age integer
|
||||
)
|
||||
""")
|
||||
|
||||
cur.execute("insert into people (name_last, age) values ('Yeltsin', 72)")
|
||||
cur.execute("insert into people (name_last, age) values ('Putin', 51)")
|
||||
|
||||
con.commit()
|
||||
|
||||
cur.close()
|
||||
con.close()
|
||||
16
Doc/includes/sqlite3/ctx_manager.py
Normal file
16
Doc/includes/sqlite3/ctx_manager.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
con.execute("create table person (id integer primary key, firstname varchar unique)")
|
||||
|
||||
# Successful, con.commit() is called automatically afterwards
|
||||
with con:
|
||||
con.execute("insert into person(firstname) values (?)", ("Joe",))
|
||||
|
||||
# con.rollback() is called after the with block finishes with an exception, the
|
||||
# exception is still raised and must be caught
|
||||
try:
|
||||
with con:
|
||||
con.execute("insert into person(firstname) values (?)", ("Joe",))
|
||||
except sqlite3.IntegrityError:
|
||||
print "couldn't add Joe twice"
|
||||
17
Doc/includes/sqlite3/execsql_fetchonerow.py
Normal file
17
Doc/includes/sqlite3/execsql_fetchonerow.py
Normal file
@@ -0,0 +1,17 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect("mydb")
|
||||
|
||||
cur = con.cursor()
|
||||
SELECT = "select name_last, age from people order by age, name_last"
|
||||
|
||||
# 1. Iterate over the rows available from the cursor, unpacking the
|
||||
# resulting sequences to yield their elements (name_last, age):
|
||||
cur.execute(SELECT)
|
||||
for (name_last, age) in cur:
|
||||
print '%s is %d years old.' % (name_last, age)
|
||||
|
||||
# 2. Equivalently:
|
||||
cur.execute(SELECT)
|
||||
for row in cur:
|
||||
print '%s is %d years old.' % (row[0], row[1])
|
||||
13
Doc/includes/sqlite3/execsql_printall_1.py
Normal file
13
Doc/includes/sqlite3/execsql_printall_1.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import sqlite3
|
||||
|
||||
# Create a connection to the database file "mydb":
|
||||
con = sqlite3.connect("mydb")
|
||||
|
||||
# Get a Cursor object that operates in the context of Connection con:
|
||||
cur = con.cursor()
|
||||
|
||||
# Execute the SELECT statement:
|
||||
cur.execute("select * from people order by age")
|
||||
|
||||
# Retrieve all rows as a sequence and print that sequence:
|
||||
print cur.fetchall()
|
||||
16
Doc/includes/sqlite3/execute_1.py
Normal file
16
Doc/includes/sqlite3/execute_1.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
cur.execute("create table people (name_last, age)")
|
||||
|
||||
who = "Yeltsin"
|
||||
age = 72
|
||||
|
||||
# This is the qmark style:
|
||||
cur.execute("insert into people values (?, ?)", (who, age))
|
||||
|
||||
# And this is the named style:
|
||||
cur.execute("select * from people where name_last=:who and age=:age", {"who": who, "age": age})
|
||||
|
||||
print cur.fetchone()
|
||||
12
Doc/includes/sqlite3/execute_3.py
Normal file
12
Doc/includes/sqlite3/execute_3.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect("mydb")
|
||||
|
||||
cur = con.cursor()
|
||||
|
||||
who = "Yeltsin"
|
||||
age = 72
|
||||
|
||||
cur.execute("select name_last, age from people where name_last=:who and age=:age",
|
||||
locals())
|
||||
print cur.fetchone()
|
||||
24
Doc/includes/sqlite3/executemany_1.py
Normal file
24
Doc/includes/sqlite3/executemany_1.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import sqlite3
|
||||
|
||||
class IterChars:
|
||||
def __init__(self):
|
||||
self.count = ord('a')
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
if self.count > ord('z'):
|
||||
raise StopIteration
|
||||
self.count += 1
|
||||
return (chr(self.count - 1),) # this is a 1-tuple
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
cur.execute("create table characters(c)")
|
||||
|
||||
theIter = IterChars()
|
||||
cur.executemany("insert into characters(c) values (?)", theIter)
|
||||
|
||||
cur.execute("select c from characters")
|
||||
print cur.fetchall()
|
||||
15
Doc/includes/sqlite3/executemany_2.py
Normal file
15
Doc/includes/sqlite3/executemany_2.py
Normal file
@@ -0,0 +1,15 @@
|
||||
import sqlite3
|
||||
import string
|
||||
|
||||
def char_generator():
|
||||
for c in string.lowercase:
|
||||
yield (c,)
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
cur.execute("create table characters(c)")
|
||||
|
||||
cur.executemany("insert into characters(c) values (?)", char_generator())
|
||||
|
||||
cur.execute("select c from characters")
|
||||
print cur.fetchall()
|
||||
24
Doc/includes/sqlite3/executescript.py
Normal file
24
Doc/includes/sqlite3/executescript.py
Normal file
@@ -0,0 +1,24 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
cur.executescript("""
|
||||
create table person(
|
||||
firstname,
|
||||
lastname,
|
||||
age
|
||||
);
|
||||
|
||||
create table book(
|
||||
title,
|
||||
author,
|
||||
published
|
||||
);
|
||||
|
||||
insert into book(title, author, published)
|
||||
values (
|
||||
'Dirk Gently''s Holistic Detective Agency',
|
||||
'Douglas Adams',
|
||||
1987
|
||||
);
|
||||
""")
|
||||
16
Doc/includes/sqlite3/insert_more_people.py
Normal file
16
Doc/includes/sqlite3/insert_more_people.py
Normal file
@@ -0,0 +1,16 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect("mydb")
|
||||
|
||||
cur = con.cursor()
|
||||
|
||||
newPeople = (
|
||||
('Lebed' , 53),
|
||||
('Zhirinovsky' , 57),
|
||||
)
|
||||
|
||||
for person in newPeople:
|
||||
cur.execute("insert into people (name_last, age) values (?, ?)", person)
|
||||
|
||||
# The changes will not be saved unless the transaction is committed explicitly:
|
||||
con.commit()
|
||||
26
Doc/includes/sqlite3/load_extension.py
Normal file
26
Doc/includes/sqlite3/load_extension.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
|
||||
# enable extension loading
|
||||
con.enable_load_extension(True)
|
||||
|
||||
# Load the fulltext search extension
|
||||
con.execute("select load_extension('./fts3.so')")
|
||||
|
||||
# alternatively you can load the extension using an API call:
|
||||
# con.load_extension("./fts3.so")
|
||||
|
||||
# disable extension loading again
|
||||
con.enable_load_extension(False)
|
||||
|
||||
# example from SQLite wiki
|
||||
con.execute("create virtual table recipe using fts3(name, ingredients)")
|
||||
con.executescript("""
|
||||
insert into recipe (name, ingredients) values ('broccoli stew', 'broccoli peppers cheese tomatoes');
|
||||
insert into recipe (name, ingredients) values ('pumpkin stew', 'pumpkin onions garlic celery');
|
||||
insert into recipe (name, ingredients) values ('broccoli pie', 'broccoli cheese onions flour');
|
||||
insert into recipe (name, ingredients) values ('pumpkin pie', 'pumpkin sugar flour butter');
|
||||
""")
|
||||
for row in con.execute("select rowid, name, ingredients from recipe where name match 'pie'"):
|
||||
print row
|
||||
11
Doc/includes/sqlite3/md5func.py
Normal file
11
Doc/includes/sqlite3/md5func.py
Normal file
@@ -0,0 +1,11 @@
|
||||
import sqlite3
|
||||
import md5
|
||||
|
||||
def md5sum(t):
|
||||
return md5.md5(t).hexdigest()
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
con.create_function("md5", 1, md5sum)
|
||||
cur = con.cursor()
|
||||
cur.execute("select md5(?)", ("foo",))
|
||||
print cur.fetchone()[0]
|
||||
20
Doc/includes/sqlite3/mysumaggr.py
Normal file
20
Doc/includes/sqlite3/mysumaggr.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import sqlite3
|
||||
|
||||
class MySum:
|
||||
def __init__(self):
|
||||
self.count = 0
|
||||
|
||||
def step(self, value):
|
||||
self.count += value
|
||||
|
||||
def finalize(self):
|
||||
return self.count
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
con.create_aggregate("mysum", 1, MySum)
|
||||
cur = con.cursor()
|
||||
cur.execute("create table test(i)")
|
||||
cur.execute("insert into test(i) values (1)")
|
||||
cur.execute("insert into test(i) values (2)")
|
||||
cur.execute("select mysum(i) from test")
|
||||
print cur.fetchone()[0]
|
||||
8
Doc/includes/sqlite3/parse_colnames.py
Normal file
8
Doc/includes/sqlite3/parse_colnames.py
Normal file
@@ -0,0 +1,8 @@
|
||||
import sqlite3
|
||||
import datetime
|
||||
|
||||
con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_COLNAMES)
|
||||
cur = con.cursor()
|
||||
cur.execute('select ? as "x [timestamp]"', (datetime.datetime.now(),))
|
||||
dt = cur.fetchone()[0]
|
||||
print dt, type(dt)
|
||||
20
Doc/includes/sqlite3/pysqlite_datetime.py
Normal file
20
Doc/includes/sqlite3/pysqlite_datetime.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import sqlite3
|
||||
import datetime
|
||||
|
||||
con = sqlite3.connect(":memory:", detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
|
||||
cur = con.cursor()
|
||||
cur.execute("create table test(d date, ts timestamp)")
|
||||
|
||||
today = datetime.date.today()
|
||||
now = datetime.datetime.now()
|
||||
|
||||
cur.execute("insert into test(d, ts) values (?, ?)", (today, now))
|
||||
cur.execute("select d, ts from test")
|
||||
row = cur.fetchone()
|
||||
print today, "=>", row[0], type(row[0])
|
||||
print now, "=>", row[1], type(row[1])
|
||||
|
||||
cur.execute('select current_date as "d [date]", current_timestamp as "ts [timestamp]"')
|
||||
row = cur.fetchone()
|
||||
print "current_date", row[0], type(row[0])
|
||||
print "current_timestamp", row[1], type(row[1])
|
||||
13
Doc/includes/sqlite3/row_factory.py
Normal file
13
Doc/includes/sqlite3/row_factory.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import sqlite3
|
||||
|
||||
def dict_factory(cursor, row):
|
||||
d = {}
|
||||
for idx, col in enumerate(cursor.description):
|
||||
d[col[0]] = row[idx]
|
||||
return d
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
con.row_factory = dict_factory
|
||||
cur = con.cursor()
|
||||
cur.execute("select 1 as a")
|
||||
print cur.fetchone()["a"]
|
||||
12
Doc/includes/sqlite3/rowclass.py
Normal file
12
Doc/includes/sqlite3/rowclass.py
Normal file
@@ -0,0 +1,12 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
con.row_factory = sqlite3.Row
|
||||
|
||||
cur = con.cursor()
|
||||
cur.execute("select 'John' as name, 42 as age")
|
||||
for row in cur:
|
||||
assert row[0] == row["name"]
|
||||
assert row["name"] == row["nAmE"]
|
||||
assert row[1] == row["age"]
|
||||
assert row[1] == row["AgE"]
|
||||
6
Doc/includes/sqlite3/shared_cache.py
Normal file
6
Doc/includes/sqlite3/shared_cache.py
Normal file
@@ -0,0 +1,6 @@
|
||||
import sqlite3
|
||||
|
||||
# The shared cache is only available in SQLite versions 3.3.3 or later
|
||||
# See the SQLite documentation for details.
|
||||
|
||||
sqlite3.enable_shared_cache(True)
|
||||
20
Doc/includes/sqlite3/shortcut_methods.py
Normal file
20
Doc/includes/sqlite3/shortcut_methods.py
Normal file
@@ -0,0 +1,20 @@
|
||||
import sqlite3
|
||||
|
||||
persons = [
|
||||
("Hugo", "Boss"),
|
||||
("Calvin", "Klein")
|
||||
]
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
|
||||
# Create the table
|
||||
con.execute("create table person(firstname, lastname)")
|
||||
|
||||
# Fill the table
|
||||
con.executemany("insert into person(firstname, lastname) values (?, ?)", persons)
|
||||
|
||||
# Print the table contents
|
||||
for row in con.execute("select firstname, lastname from person"):
|
||||
print row
|
||||
|
||||
print "I just deleted", con.execute("delete from person").rowcount, "rows"
|
||||
26
Doc/includes/sqlite3/simple_tableprinter.py
Normal file
26
Doc/includes/sqlite3/simple_tableprinter.py
Normal file
@@ -0,0 +1,26 @@
|
||||
import sqlite3
|
||||
|
||||
FIELD_MAX_WIDTH = 20
|
||||
TABLE_NAME = 'people'
|
||||
SELECT = 'select * from %s order by age, name_last' % TABLE_NAME
|
||||
|
||||
con = sqlite3.connect("mydb")
|
||||
|
||||
cur = con.cursor()
|
||||
cur.execute(SELECT)
|
||||
|
||||
# Print a header.
|
||||
for fieldDesc in cur.description:
|
||||
print fieldDesc[0].ljust(FIELD_MAX_WIDTH) ,
|
||||
print # Finish the header with a newline.
|
||||
print '-' * 78
|
||||
|
||||
# For each row, print the value of each field left-justified within
|
||||
# the maximum possible width of that field.
|
||||
fieldIndices = range(len(cur.description))
|
||||
for row in cur:
|
||||
for fieldIndex in fieldIndices:
|
||||
fieldValue = str(row[fieldIndex])
|
||||
print fieldValue.ljust(FIELD_MAX_WIDTH) ,
|
||||
|
||||
print # Finish the row with a newline.
|
||||
40
Doc/includes/sqlite3/text_factory.py
Normal file
40
Doc/includes/sqlite3/text_factory.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import sqlite3
|
||||
|
||||
con = sqlite3.connect(":memory:")
|
||||
cur = con.cursor()
|
||||
|
||||
AUSTRIA = u"\xd6sterreich"
|
||||
|
||||
# by default, rows are returned as Unicode
|
||||
cur.execute("select ?", (AUSTRIA,))
|
||||
row = cur.fetchone()
|
||||
assert row[0] == AUSTRIA
|
||||
|
||||
# but we can make sqlite3 always return bytestrings ...
|
||||
con.text_factory = str
|
||||
cur.execute("select ?", (AUSTRIA,))
|
||||
row = cur.fetchone()
|
||||
assert type(row[0]) is str
|
||||
# the bytestrings will be encoded in UTF-8, unless you stored garbage in the
|
||||
# database ...
|
||||
assert row[0] == AUSTRIA.encode("utf-8")
|
||||
|
||||
# we can also implement a custom text_factory ...
|
||||
# here we implement one that will ignore Unicode characters that cannot be
|
||||
# decoded from UTF-8
|
||||
con.text_factory = lambda x: unicode(x, "utf-8", "ignore")
|
||||
cur.execute("select ?", ("this is latin1 and would normally create errors" +
|
||||
u"\xe4\xf6\xfc".encode("latin1"),))
|
||||
row = cur.fetchone()
|
||||
assert type(row[0]) is unicode
|
||||
|
||||
# sqlite3 offers a built-in optimized text_factory that will return bytestring
|
||||
# objects, if the data is in ASCII only, and otherwise return unicode objects
|
||||
con.text_factory = sqlite3.OptimizedUnicode
|
||||
cur.execute("select ?", (AUSTRIA,))
|
||||
row = cur.fetchone()
|
||||
assert type(row[0]) is unicode
|
||||
|
||||
cur.execute("select ?", ("Germany",))
|
||||
row = cur.fetchone()
|
||||
assert type(row[0]) is str
|
||||
213
Doc/includes/test.py
Normal file
213
Doc/includes/test.py
Normal file
@@ -0,0 +1,213 @@
|
||||
"""Test module for the noddy examples
|
||||
|
||||
Noddy 1:
|
||||
|
||||
>>> import noddy
|
||||
>>> n1 = noddy.Noddy()
|
||||
>>> n2 = noddy.Noddy()
|
||||
>>> del n1
|
||||
>>> del n2
|
||||
|
||||
|
||||
Noddy 2
|
||||
|
||||
>>> import noddy2
|
||||
>>> n1 = noddy2.Noddy('jim', 'fulton', 42)
|
||||
>>> n1.first
|
||||
'jim'
|
||||
>>> n1.last
|
||||
'fulton'
|
||||
>>> n1.number
|
||||
42
|
||||
>>> n1.name()
|
||||
'jim fulton'
|
||||
>>> n1.first = 'will'
|
||||
>>> n1.name()
|
||||
'will fulton'
|
||||
>>> n1.last = 'tell'
|
||||
>>> n1.name()
|
||||
'will tell'
|
||||
>>> del n1.first
|
||||
>>> n1.name()
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AttributeError: first
|
||||
>>> n1.first
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AttributeError: first
|
||||
>>> n1.first = 'drew'
|
||||
>>> n1.first
|
||||
'drew'
|
||||
>>> del n1.number
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
TypeError: can't delete numeric/char attribute
|
||||
>>> n1.number=2
|
||||
>>> n1.number
|
||||
2
|
||||
>>> n1.first = 42
|
||||
>>> n1.name()
|
||||
'42 tell'
|
||||
>>> n2 = noddy2.Noddy()
|
||||
>>> n2.name()
|
||||
' '
|
||||
>>> n2.first
|
||||
''
|
||||
>>> n2.last
|
||||
''
|
||||
>>> del n2.first
|
||||
>>> n2.first
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AttributeError: first
|
||||
>>> n2.first
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AttributeError: first
|
||||
>>> n2.name()
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in ?
|
||||
AttributeError: first
|
||||
>>> n2.number
|
||||
0
|
||||
>>> n3 = noddy2.Noddy('jim', 'fulton', 'waaa')
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in ?
|
||||
TypeError: an integer is required
|
||||
>>> del n1
|
||||
>>> del n2
|
||||
|
||||
|
||||
Noddy 3
|
||||
|
||||
>>> import noddy3
|
||||
>>> n1 = noddy3.Noddy('jim', 'fulton', 42)
|
||||
>>> n1 = noddy3.Noddy('jim', 'fulton', 42)
|
||||
>>> n1.name()
|
||||
'jim fulton'
|
||||
>>> del n1.first
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in ?
|
||||
TypeError: Cannot delete the first attribute
|
||||
>>> n1.first = 42
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in ?
|
||||
TypeError: The first attribute value must be a string
|
||||
>>> n1.first = 'will'
|
||||
>>> n1.name()
|
||||
'will fulton'
|
||||
>>> n2 = noddy3.Noddy()
|
||||
>>> n2 = noddy3.Noddy()
|
||||
>>> n2 = noddy3.Noddy()
|
||||
>>> n3 = noddy3.Noddy('jim', 'fulton', 'waaa')
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in ?
|
||||
TypeError: an integer is required
|
||||
>>> del n1
|
||||
>>> del n2
|
||||
|
||||
Noddy 4
|
||||
|
||||
>>> import noddy4
|
||||
>>> n1 = noddy4.Noddy('jim', 'fulton', 42)
|
||||
>>> n1.first
|
||||
'jim'
|
||||
>>> n1.last
|
||||
'fulton'
|
||||
>>> n1.number
|
||||
42
|
||||
>>> n1.name()
|
||||
'jim fulton'
|
||||
>>> n1.first = 'will'
|
||||
>>> n1.name()
|
||||
'will fulton'
|
||||
>>> n1.last = 'tell'
|
||||
>>> n1.name()
|
||||
'will tell'
|
||||
>>> del n1.first
|
||||
>>> n1.name()
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AttributeError: first
|
||||
>>> n1.first
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AttributeError: first
|
||||
>>> n1.first = 'drew'
|
||||
>>> n1.first
|
||||
'drew'
|
||||
>>> del n1.number
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
TypeError: can't delete numeric/char attribute
|
||||
>>> n1.number=2
|
||||
>>> n1.number
|
||||
2
|
||||
>>> n1.first = 42
|
||||
>>> n1.name()
|
||||
'42 tell'
|
||||
>>> n2 = noddy4.Noddy()
|
||||
>>> n2 = noddy4.Noddy()
|
||||
>>> n2 = noddy4.Noddy()
|
||||
>>> n2 = noddy4.Noddy()
|
||||
>>> n2.name()
|
||||
' '
|
||||
>>> n2.first
|
||||
''
|
||||
>>> n2.last
|
||||
''
|
||||
>>> del n2.first
|
||||
>>> n2.first
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AttributeError: first
|
||||
>>> n2.first
|
||||
Traceback (most recent call last):
|
||||
...
|
||||
AttributeError: first
|
||||
>>> n2.name()
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in ?
|
||||
AttributeError: first
|
||||
>>> n2.number
|
||||
0
|
||||
>>> n3 = noddy4.Noddy('jim', 'fulton', 'waaa')
|
||||
Traceback (most recent call last):
|
||||
File "<stdin>", line 1, in ?
|
||||
TypeError: an integer is required
|
||||
|
||||
|
||||
Test cyclic gc(?)
|
||||
|
||||
>>> import gc
|
||||
>>> gc.disable()
|
||||
|
||||
>>> x = []
|
||||
>>> l = [x]
|
||||
>>> n2.first = l
|
||||
>>> n2.first
|
||||
[[]]
|
||||
>>> l.append(n2)
|
||||
>>> del l
|
||||
>>> del n1
|
||||
>>> del n2
|
||||
>>> sys.getrefcount(x)
|
||||
3
|
||||
>>> ignore = gc.collect()
|
||||
>>> sys.getrefcount(x)
|
||||
2
|
||||
|
||||
>>> gc.enable()
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from distutils.util import get_platform
|
||||
PLAT_SPEC = "%s-%s" % (get_platform(), sys.version[0:3])
|
||||
src = os.path.join("build", "lib.%s" % PLAT_SPEC)
|
||||
sys.path.append(src)
|
||||
|
||||
if __name__ == "__main__":
|
||||
import doctest, __main__
|
||||
doctest.testmod(__main__)
|
||||
76
Doc/includes/typestruct.h
Normal file
76
Doc/includes/typestruct.h
Normal file
@@ -0,0 +1,76 @@
|
||||
typedef struct _typeobject {
|
||||
PyObject_VAR_HEAD
|
||||
char *tp_name; /* For printing, in format "<module>.<name>" */
|
||||
int tp_basicsize, tp_itemsize; /* For allocation */
|
||||
|
||||
/* Methods to implement standard operations */
|
||||
|
||||
destructor tp_dealloc;
|
||||
printfunc tp_print;
|
||||
getattrfunc tp_getattr;
|
||||
setattrfunc tp_setattr;
|
||||
cmpfunc tp_compare;
|
||||
reprfunc tp_repr;
|
||||
|
||||
/* Method suites for standard classes */
|
||||
|
||||
PyNumberMethods *tp_as_number;
|
||||
PySequenceMethods *tp_as_sequence;
|
||||
PyMappingMethods *tp_as_mapping;
|
||||
|
||||
/* More standard operations (here for binary compatibility) */
|
||||
|
||||
hashfunc tp_hash;
|
||||
ternaryfunc tp_call;
|
||||
reprfunc tp_str;
|
||||
getattrofunc tp_getattro;
|
||||
setattrofunc tp_setattro;
|
||||
|
||||
/* Functions to access object as input/output buffer */
|
||||
PyBufferProcs *tp_as_buffer;
|
||||
|
||||
/* Flags to define presence of optional/expanded features */
|
||||
long tp_flags;
|
||||
|
||||
char *tp_doc; /* Documentation string */
|
||||
|
||||
/* Assigned meaning in release 2.0 */
|
||||
/* call function for all accessible objects */
|
||||
traverseproc tp_traverse;
|
||||
|
||||
/* delete references to contained objects */
|
||||
inquiry tp_clear;
|
||||
|
||||
/* Assigned meaning in release 2.1 */
|
||||
/* rich comparisons */
|
||||
richcmpfunc tp_richcompare;
|
||||
|
||||
/* weak reference enabler */
|
||||
long tp_weaklistoffset;
|
||||
|
||||
/* Added in release 2.2 */
|
||||
/* Iterators */
|
||||
getiterfunc tp_iter;
|
||||
iternextfunc tp_iternext;
|
||||
|
||||
/* Attribute descriptor and subclassing stuff */
|
||||
struct PyMethodDef *tp_methods;
|
||||
struct PyMemberDef *tp_members;
|
||||
struct PyGetSetDef *tp_getset;
|
||||
struct _typeobject *tp_base;
|
||||
PyObject *tp_dict;
|
||||
descrgetfunc tp_descr_get;
|
||||
descrsetfunc tp_descr_set;
|
||||
long tp_dictoffset;
|
||||
initproc tp_init;
|
||||
allocfunc tp_alloc;
|
||||
newfunc tp_new;
|
||||
freefunc tp_free; /* Low-level free-memory routine */
|
||||
inquiry tp_is_gc; /* For PyObject_IS_GC */
|
||||
PyObject *tp_bases;
|
||||
PyObject *tp_mro; /* method resolution order */
|
||||
PyObject *tp_cache;
|
||||
PyObject *tp_subclasses;
|
||||
PyObject *tp_weaklist;
|
||||
|
||||
} PyTypeObject;
|
||||
169
Doc/includes/tzinfo-examples.py
Normal file
169
Doc/includes/tzinfo-examples.py
Normal file
@@ -0,0 +1,169 @@
|
||||
from datetime import tzinfo, timedelta, datetime
|
||||
|
||||
ZERO = timedelta(0)
|
||||
HOUR = timedelta(hours=1)
|
||||
|
||||
# A UTC class.
|
||||
|
||||
class UTC(tzinfo):
|
||||
"""UTC"""
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return ZERO
|
||||
|
||||
def tzname(self, dt):
|
||||
return "UTC"
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
utc = UTC()
|
||||
|
||||
# A class building tzinfo objects for fixed-offset time zones.
|
||||
# Note that FixedOffset(0, "UTC") is a different way to build a
|
||||
# UTC tzinfo object.
|
||||
|
||||
class FixedOffset(tzinfo):
|
||||
"""Fixed offset in minutes east from UTC."""
|
||||
|
||||
def __init__(self, offset, name):
|
||||
self.__offset = timedelta(minutes = offset)
|
||||
self.__name = name
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self.__offset
|
||||
|
||||
def tzname(self, dt):
|
||||
return self.__name
|
||||
|
||||
def dst(self, dt):
|
||||
return ZERO
|
||||
|
||||
# A class capturing the platform's idea of local time.
|
||||
|
||||
import time as _time
|
||||
|
||||
STDOFFSET = timedelta(seconds = -_time.timezone)
|
||||
if _time.daylight:
|
||||
DSTOFFSET = timedelta(seconds = -_time.altzone)
|
||||
else:
|
||||
DSTOFFSET = STDOFFSET
|
||||
|
||||
DSTDIFF = DSTOFFSET - STDOFFSET
|
||||
|
||||
class LocalTimezone(tzinfo):
|
||||
|
||||
def utcoffset(self, dt):
|
||||
if self._isdst(dt):
|
||||
return DSTOFFSET
|
||||
else:
|
||||
return STDOFFSET
|
||||
|
||||
def dst(self, dt):
|
||||
if self._isdst(dt):
|
||||
return DSTDIFF
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
def tzname(self, dt):
|
||||
return _time.tzname[self._isdst(dt)]
|
||||
|
||||
def _isdst(self, dt):
|
||||
tt = (dt.year, dt.month, dt.day,
|
||||
dt.hour, dt.minute, dt.second,
|
||||
dt.weekday(), 0, 0)
|
||||
stamp = _time.mktime(tt)
|
||||
tt = _time.localtime(stamp)
|
||||
return tt.tm_isdst > 0
|
||||
|
||||
Local = LocalTimezone()
|
||||
|
||||
|
||||
# A complete implementation of current DST rules for major US time zones.
|
||||
|
||||
def first_sunday_on_or_after(dt):
|
||||
days_to_go = 6 - dt.weekday()
|
||||
if days_to_go:
|
||||
dt += timedelta(days_to_go)
|
||||
return dt
|
||||
|
||||
|
||||
# US DST Rules
|
||||
#
|
||||
# This is a simplified (i.e., wrong for a few cases) set of rules for US
|
||||
# DST start and end times. For a complete and up-to-date set of DST rules
|
||||
# and timezone definitions, visit the Olson Database (or try pytz):
|
||||
# http://www.twinsun.com/tz/tz-link.htm
|
||||
# http://sourceforge.net/projects/pytz/ (might not be up-to-date)
|
||||
#
|
||||
# In the US, since 2007, DST starts at 2am (standard time) on the second
|
||||
# Sunday in March, which is the first Sunday on or after Mar 8.
|
||||
DSTSTART_2007 = datetime(1, 3, 8, 2)
|
||||
# and ends at 2am (DST time; 1am standard time) on the first Sunday of Nov.
|
||||
DSTEND_2007 = datetime(1, 11, 1, 1)
|
||||
# From 1987 to 2006, DST used to start at 2am (standard time) on the first
|
||||
# Sunday in April and to end at 2am (DST time; 1am standard time) on the last
|
||||
# Sunday of October, which is the first Sunday on or after Oct 25.
|
||||
DSTSTART_1987_2006 = datetime(1, 4, 1, 2)
|
||||
DSTEND_1987_2006 = datetime(1, 10, 25, 1)
|
||||
# From 1967 to 1986, DST used to start at 2am (standard time) on the last
|
||||
# Sunday in April (the one on or after April 24) and to end at 2am (DST time;
|
||||
# 1am standard time) on the last Sunday of October, which is the first Sunday
|
||||
# on or after Oct 25.
|
||||
DSTSTART_1967_1986 = datetime(1, 4, 24, 2)
|
||||
DSTEND_1967_1986 = DSTEND_1987_2006
|
||||
|
||||
class USTimeZone(tzinfo):
|
||||
|
||||
def __init__(self, hours, reprname, stdname, dstname):
|
||||
self.stdoffset = timedelta(hours=hours)
|
||||
self.reprname = reprname
|
||||
self.stdname = stdname
|
||||
self.dstname = dstname
|
||||
|
||||
def __repr__(self):
|
||||
return self.reprname
|
||||
|
||||
def tzname(self, dt):
|
||||
if self.dst(dt):
|
||||
return self.dstname
|
||||
else:
|
||||
return self.stdname
|
||||
|
||||
def utcoffset(self, dt):
|
||||
return self.stdoffset + self.dst(dt)
|
||||
|
||||
def dst(self, dt):
|
||||
if dt is None or dt.tzinfo is None:
|
||||
# An exception may be sensible here, in one or both cases.
|
||||
# It depends on how you want to treat them. The default
|
||||
# fromutc() implementation (called by the default astimezone()
|
||||
# implementation) passes a datetime with dt.tzinfo is self.
|
||||
return ZERO
|
||||
assert dt.tzinfo is self
|
||||
|
||||
# Find start and end times for US DST. For years before 1967, return
|
||||
# ZERO for no DST.
|
||||
if 2006 < dt.year:
|
||||
dststart, dstend = DSTSTART_2007, DSTEND_2007
|
||||
elif 1986 < dt.year < 2007:
|
||||
dststart, dstend = DSTSTART_1987_2006, DSTEND_1987_2006
|
||||
elif 1966 < dt.year < 1987:
|
||||
dststart, dstend = DSTSTART_1967_1986, DSTEND_1967_1986
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
start = first_sunday_on_or_after(dststart.replace(year=dt.year))
|
||||
end = first_sunday_on_or_after(dstend.replace(year=dt.year))
|
||||
|
||||
# Can't compare naive to aware objects, so strip the timezone from
|
||||
# dt first.
|
||||
if start <= dt.replace(tzinfo=None) < end:
|
||||
return HOUR
|
||||
else:
|
||||
return ZERO
|
||||
|
||||
Eastern = USTimeZone(-5, "Eastern", "EST", "EDT")
|
||||
Central = USTimeZone(-6, "Central", "CST", "CDT")
|
||||
Mountain = USTimeZone(-7, "Mountain", "MST", "MDT")
|
||||
Pacific = USTimeZone(-8, "Pacific", "PST", "PDT")
|
||||
Reference in New Issue
Block a user